commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
623cdce39e1a14a2cbd80f116ab7e709c469e891
|
read-python-dependency-graph.py
|
read-python-dependency-graph.py
|
# IPython log file
import itertools as it
with open('pypi-deps.txt', 'r') as fin:
lines = fin.readlines()
edges = [line.rstrip().split() for line in lines]
packages = set(list(it.chain(*edges)))
len(edges)
len(packages)
'skimage' in packages
import toolz as tz
from toolz import curried as c
dep_count = tz.pipe(edges, c.pluck(1), tz.frequencies)
dep_count['skimage']
import networkx as nx
deps = nx.DiGraph()
'scikit-image' in packages
'scikit-learn' in packages
for u, v in edges:
u = u.replace('scikit-', 'sk')
v = v.replace('scikit-', 'sk')
deps.add_edge(u, v)
deps.number_of_edges()
deps.number_of_nodes()
deps.node['skimage']
deps.in_edges('skimage')
nodes = nx.katz_centrality(deps)
central = sorted(deps.nodes(), key=nodes.__getitem__, reverse=True)
central[:10]
central[:20]
central[:40]
central[40:80]
central.index('skimage')
central.index('scipy')
import pickle
stdlib = pickle.load(open('/Users/jni/projects/depsy/data/python_standard_libs.pickle', 'rb'))
central_nonstd = list(tz.filter(lambda x: x not in stdlib, central))
len(central_nonstd)
central_nonstd.index('scipy')
len(central)
central[:5]
nx.is_connected(deps.to_undirected())
len(packages)
deps_sym = deps.to_undirected()
import numpy as np
conncomps = list(nx.connected_component_subgraphs(deps_sym))
giant = conncomps[0]
giant_d = deps.subgraph(giant.nodes())
gpackages = giant_d.nodes()
A = nx.to_scipy_sparse_matrix(giant_d)
A.shape
A.dtype
n = A.shape[0]
c = (A + A.T) / 2
c.dtype
from scipy import sparse
d = sparse.diags([np.asarray(c.sum(axis=0)).ravel()], [0])
L = d.tocsr() - c
b = (c.multiply((A - A.T).sign())).sum(axis=1)
type(b)
|
Read in python dependency graph and clean up
|
Read in python dependency graph and clean up
|
Python
|
bsd-3-clause
|
jni/useful-histories
|
Read in python dependency graph and clean up
|
# IPython log file
import itertools as it
with open('pypi-deps.txt', 'r') as fin:
lines = fin.readlines()
edges = [line.rstrip().split() for line in lines]
packages = set(list(it.chain(*edges)))
len(edges)
len(packages)
'skimage' in packages
import toolz as tz
from toolz import curried as c
dep_count = tz.pipe(edges, c.pluck(1), tz.frequencies)
dep_count['skimage']
import networkx as nx
deps = nx.DiGraph()
'scikit-image' in packages
'scikit-learn' in packages
for u, v in edges:
u = u.replace('scikit-', 'sk')
v = v.replace('scikit-', 'sk')
deps.add_edge(u, v)
deps.number_of_edges()
deps.number_of_nodes()
deps.node['skimage']
deps.in_edges('skimage')
nodes = nx.katz_centrality(deps)
central = sorted(deps.nodes(), key=nodes.__getitem__, reverse=True)
central[:10]
central[:20]
central[:40]
central[40:80]
central.index('skimage')
central.index('scipy')
import pickle
stdlib = pickle.load(open('/Users/jni/projects/depsy/data/python_standard_libs.pickle', 'rb'))
central_nonstd = list(tz.filter(lambda x: x not in stdlib, central))
len(central_nonstd)
central_nonstd.index('scipy')
len(central)
central[:5]
nx.is_connected(deps.to_undirected())
len(packages)
deps_sym = deps.to_undirected()
import numpy as np
conncomps = list(nx.connected_component_subgraphs(deps_sym))
giant = conncomps[0]
giant_d = deps.subgraph(giant.nodes())
gpackages = giant_d.nodes()
A = nx.to_scipy_sparse_matrix(giant_d)
A.shape
A.dtype
n = A.shape[0]
c = (A + A.T) / 2
c.dtype
from scipy import sparse
d = sparse.diags([np.asarray(c.sum(axis=0)).ravel()], [0])
L = d.tocsr() - c
b = (c.multiply((A - A.T).sign())).sum(axis=1)
type(b)
|
<commit_before><commit_msg>Read in python dependency graph and clean up<commit_after>
|
# IPython log file
import itertools as it
with open('pypi-deps.txt', 'r') as fin:
lines = fin.readlines()
edges = [line.rstrip().split() for line in lines]
packages = set(list(it.chain(*edges)))
len(edges)
len(packages)
'skimage' in packages
import toolz as tz
from toolz import curried as c
dep_count = tz.pipe(edges, c.pluck(1), tz.frequencies)
dep_count['skimage']
import networkx as nx
deps = nx.DiGraph()
'scikit-image' in packages
'scikit-learn' in packages
for u, v in edges:
u = u.replace('scikit-', 'sk')
v = v.replace('scikit-', 'sk')
deps.add_edge(u, v)
deps.number_of_edges()
deps.number_of_nodes()
deps.node['skimage']
deps.in_edges('skimage')
nodes = nx.katz_centrality(deps)
central = sorted(deps.nodes(), key=nodes.__getitem__, reverse=True)
central[:10]
central[:20]
central[:40]
central[40:80]
central.index('skimage')
central.index('scipy')
import pickle
stdlib = pickle.load(open('/Users/jni/projects/depsy/data/python_standard_libs.pickle', 'rb'))
central_nonstd = list(tz.filter(lambda x: x not in stdlib, central))
len(central_nonstd)
central_nonstd.index('scipy')
len(central)
central[:5]
nx.is_connected(deps.to_undirected())
len(packages)
deps_sym = deps.to_undirected()
import numpy as np
conncomps = list(nx.connected_component_subgraphs(deps_sym))
giant = conncomps[0]
giant_d = deps.subgraph(giant.nodes())
gpackages = giant_d.nodes()
A = nx.to_scipy_sparse_matrix(giant_d)
A.shape
A.dtype
n = A.shape[0]
c = (A + A.T) / 2
c.dtype
from scipy import sparse
d = sparse.diags([np.asarray(c.sum(axis=0)).ravel()], [0])
L = d.tocsr() - c
b = (c.multiply((A - A.T).sign())).sum(axis=1)
type(b)
|
Read in python dependency graph and clean up# IPython log file
import itertools as it
with open('pypi-deps.txt', 'r') as fin:
lines = fin.readlines()
edges = [line.rstrip().split() for line in lines]
packages = set(list(it.chain(*edges)))
len(edges)
len(packages)
'skimage' in packages
import toolz as tz
from toolz import curried as c
dep_count = tz.pipe(edges, c.pluck(1), tz.frequencies)
dep_count['skimage']
import networkx as nx
deps = nx.DiGraph()
'scikit-image' in packages
'scikit-learn' in packages
for u, v in edges:
u = u.replace('scikit-', 'sk')
v = v.replace('scikit-', 'sk')
deps.add_edge(u, v)
deps.number_of_edges()
deps.number_of_nodes()
deps.node['skimage']
deps.in_edges('skimage')
nodes = nx.katz_centrality(deps)
central = sorted(deps.nodes(), key=nodes.__getitem__, reverse=True)
central[:10]
central[:20]
central[:40]
central[40:80]
central.index('skimage')
central.index('scipy')
import pickle
stdlib = pickle.load(open('/Users/jni/projects/depsy/data/python_standard_libs.pickle', 'rb'))
central_nonstd = list(tz.filter(lambda x: x not in stdlib, central))
len(central_nonstd)
central_nonstd.index('scipy')
len(central)
central[:5]
nx.is_connected(deps.to_undirected())
len(packages)
deps_sym = deps.to_undirected()
import numpy as np
conncomps = list(nx.connected_component_subgraphs(deps_sym))
giant = conncomps[0]
giant_d = deps.subgraph(giant.nodes())
gpackages = giant_d.nodes()
A = nx.to_scipy_sparse_matrix(giant_d)
A.shape
A.dtype
n = A.shape[0]
c = (A + A.T) / 2
c.dtype
from scipy import sparse
d = sparse.diags([np.asarray(c.sum(axis=0)).ravel()], [0])
L = d.tocsr() - c
b = (c.multiply((A - A.T).sign())).sum(axis=1)
type(b)
|
<commit_before><commit_msg>Read in python dependency graph and clean up<commit_after># IPython log file
import itertools as it
with open('pypi-deps.txt', 'r') as fin:
lines = fin.readlines()
edges = [line.rstrip().split() for line in lines]
packages = set(list(it.chain(*edges)))
len(edges)
len(packages)
'skimage' in packages
import toolz as tz
from toolz import curried as c
dep_count = tz.pipe(edges, c.pluck(1), tz.frequencies)
dep_count['skimage']
import networkx as nx
deps = nx.DiGraph()
'scikit-image' in packages
'scikit-learn' in packages
for u, v in edges:
u = u.replace('scikit-', 'sk')
v = v.replace('scikit-', 'sk')
deps.add_edge(u, v)
deps.number_of_edges()
deps.number_of_nodes()
deps.node['skimage']
deps.in_edges('skimage')
nodes = nx.katz_centrality(deps)
central = sorted(deps.nodes(), key=nodes.__getitem__, reverse=True)
central[:10]
central[:20]
central[:40]
central[40:80]
central.index('skimage')
central.index('scipy')
import pickle
stdlib = pickle.load(open('/Users/jni/projects/depsy/data/python_standard_libs.pickle', 'rb'))
central_nonstd = list(tz.filter(lambda x: x not in stdlib, central))
len(central_nonstd)
central_nonstd.index('scipy')
len(central)
central[:5]
nx.is_connected(deps.to_undirected())
len(packages)
deps_sym = deps.to_undirected()
import numpy as np
conncomps = list(nx.connected_component_subgraphs(deps_sym))
giant = conncomps[0]
giant_d = deps.subgraph(giant.nodes())
gpackages = giant_d.nodes()
A = nx.to_scipy_sparse_matrix(giant_d)
A.shape
A.dtype
n = A.shape[0]
c = (A + A.T) / 2
c.dtype
from scipy import sparse
d = sparse.diags([np.asarray(c.sum(axis=0)).ravel()], [0])
L = d.tocsr() - c
b = (c.multiply((A - A.T).sign())).sum(axis=1)
type(b)
|
|
14d223068e2d8963dfe1f4e71854e9ea9c194bc5
|
Datasnakes/Tools/sge/qsubber.py
|
Datasnakes/Tools/sge/qsubber.py
|
import argparse
import textwrap
from qstat import Qstat
__author__ = 'Datasnakes'
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
description=textwrap.dedent('''\
This is a command line wrapper for the SGE module.
' '''))
parser.add_argument("-o", "--output", help="Qstat info output type",
required=True)
q = Qstat()
args = parser.parse_args(namespace=q)
|
Set up shell argparser for sge module
|
Set up shell argparser for sge module
|
Python
|
mit
|
datasnakes/Datasnakes-Scripts,datasnakes/Datasnakes-Scripts,datasnakes/Datasnakes-Scripts,datasnakes/Datasnakes-Scripts,datasnakes/Datasnakes-Scripts,datasnakes/Datasnakes-Scripts
|
Set up shell argparser for sge module
|
import argparse
import textwrap
from qstat import Qstat
__author__ = 'Datasnakes'
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
description=textwrap.dedent('''\
This is a command line wrapper for the SGE module.
' '''))
parser.add_argument("-o", "--output", help="Qstat info output type",
required=True)
q = Qstat()
args = parser.parse_args(namespace=q)
|
<commit_before><commit_msg>Set up shell argparser for sge module<commit_after>
|
import argparse
import textwrap
from qstat import Qstat
__author__ = 'Datasnakes'
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
description=textwrap.dedent('''\
This is a command line wrapper for the SGE module.
' '''))
parser.add_argument("-o", "--output", help="Qstat info output type",
required=True)
q = Qstat()
args = parser.parse_args(namespace=q)
|
Set up shell argparser for sge moduleimport argparse
import textwrap
from qstat import Qstat
__author__ = 'Datasnakes'
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
description=textwrap.dedent('''\
This is a command line wrapper for the SGE module.
' '''))
parser.add_argument("-o", "--output", help="Qstat info output type",
required=True)
q = Qstat()
args = parser.parse_args(namespace=q)
|
<commit_before><commit_msg>Set up shell argparser for sge module<commit_after>import argparse
import textwrap
from qstat import Qstat
__author__ = 'Datasnakes'
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
description=textwrap.dedent('''\
This is a command line wrapper for the SGE module.
' '''))
parser.add_argument("-o", "--output", help="Qstat info output type",
required=True)
q = Qstat()
args = parser.parse_args(namespace=q)
|
|
c267818a28018a6f386c6f4d11eefc9987efe7bd
|
py/find-mode-in-binary-search-tree.py
|
py/find-mode-in-binary-search-tree.py
|
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def inOrder(self, cur):
if cur:
for x in self.inOrder(cur.left):
yield x
yield cur.val
for x in self.inOrder(cur.right):
yield x
def findMode(self, root):
"""
:type root: TreeNode
:rtype: List[int]
"""
if not root:
return []
modes = []
c = 0
m = 0
prev = None
for v in self.inOrder(root):
if prev == v:
c += 1
else:
c = 1
if c > m:
modes = [v]
m = c
elif c == m:
modes.append(v)
prev = v
return modes
|
Add py solution for 501. Find Mode in Binary Search Tree
|
Add py solution for 501. Find Mode in Binary Search Tree
501. Find Mode in Binary Search Tree: https://leetcode.com/problems/find-mode-in-binary-search-tree/
|
Python
|
apache-2.0
|
ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode
|
Add py solution for 501. Find Mode in Binary Search Tree
501. Find Mode in Binary Search Tree: https://leetcode.com/problems/find-mode-in-binary-search-tree/
|
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def inOrder(self, cur):
if cur:
for x in self.inOrder(cur.left):
yield x
yield cur.val
for x in self.inOrder(cur.right):
yield x
def findMode(self, root):
"""
:type root: TreeNode
:rtype: List[int]
"""
if not root:
return []
modes = []
c = 0
m = 0
prev = None
for v in self.inOrder(root):
if prev == v:
c += 1
else:
c = 1
if c > m:
modes = [v]
m = c
elif c == m:
modes.append(v)
prev = v
return modes
|
<commit_before><commit_msg>Add py solution for 501. Find Mode in Binary Search Tree
501. Find Mode in Binary Search Tree: https://leetcode.com/problems/find-mode-in-binary-search-tree/<commit_after>
|
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def inOrder(self, cur):
if cur:
for x in self.inOrder(cur.left):
yield x
yield cur.val
for x in self.inOrder(cur.right):
yield x
def findMode(self, root):
"""
:type root: TreeNode
:rtype: List[int]
"""
if not root:
return []
modes = []
c = 0
m = 0
prev = None
for v in self.inOrder(root):
if prev == v:
c += 1
else:
c = 1
if c > m:
modes = [v]
m = c
elif c == m:
modes.append(v)
prev = v
return modes
|
Add py solution for 501. Find Mode in Binary Search Tree
501. Find Mode in Binary Search Tree: https://leetcode.com/problems/find-mode-in-binary-search-tree/# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def inOrder(self, cur):
if cur:
for x in self.inOrder(cur.left):
yield x
yield cur.val
for x in self.inOrder(cur.right):
yield x
def findMode(self, root):
"""
:type root: TreeNode
:rtype: List[int]
"""
if not root:
return []
modes = []
c = 0
m = 0
prev = None
for v in self.inOrder(root):
if prev == v:
c += 1
else:
c = 1
if c > m:
modes = [v]
m = c
elif c == m:
modes.append(v)
prev = v
return modes
|
<commit_before><commit_msg>Add py solution for 501. Find Mode in Binary Search Tree
501. Find Mode in Binary Search Tree: https://leetcode.com/problems/find-mode-in-binary-search-tree/<commit_after># Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def inOrder(self, cur):
if cur:
for x in self.inOrder(cur.left):
yield x
yield cur.val
for x in self.inOrder(cur.right):
yield x
def findMode(self, root):
"""
:type root: TreeNode
:rtype: List[int]
"""
if not root:
return []
modes = []
c = 0
m = 0
prev = None
for v in self.inOrder(root):
if prev == v:
c += 1
else:
c = 1
if c > m:
modes = [v]
m = c
elif c == m:
modes.append(v)
prev = v
return modes
|
|
7914c46189634e1c4abf713fc2c70fa81a72101d
|
simple-cipher/simple_cipher_better.py
|
simple-cipher/simple_cipher_better.py
|
import re
import time
import random
random.seed(int(time.time()))
length = 100
keyinit = "".join([chr(random.randint(97,122)) for i in range(0,100)])
class Cipher(object):
def __init__(self, key=keyinit):
if not key.isalpha():
raise ValueError
if not key.islower():
raise ValueError
numkey = []
for item in key:
numkey.append(ord(item)-97)
#self.caesar = Caesar(numkey)
self.key = key
self.jump = numkey
def encode(self, strs):
plain_code = re.sub("[^a-z]","",strs.lower())
ret=[]
for idx, item in enumerate(plain_code):
jump_idx = idx%len(self.jump)
ascii = ord(item)+self.jump[jump_idx]
if ascii > 122:
ascii -= 26
ret.append(chr(ascii))
return "".join(ret)
def decode(self, strs):
ret = []
for idx,item in enumerate(strs):
jump_idx = idx%len(self.jump)
ascii = ord(item)-self.jump[jump_idx]
if ascii < 97:
ascii += 26
ret.append(chr(ascii))
return "".join(ret)
class Caesar(Cipher):
def __init__(self):
super(Caesar,self).__init__('d')
|
Add better solution for simple cipher
|
Add better solution for simple cipher
|
Python
|
mit
|
always-waiting/exercism-python
|
Add better solution for simple cipher
|
import re
import time
import random
random.seed(int(time.time()))
length = 100
keyinit = "".join([chr(random.randint(97,122)) for i in range(0,100)])
class Cipher(object):
def __init__(self, key=keyinit):
if not key.isalpha():
raise ValueError
if not key.islower():
raise ValueError
numkey = []
for item in key:
numkey.append(ord(item)-97)
#self.caesar = Caesar(numkey)
self.key = key
self.jump = numkey
def encode(self, strs):
plain_code = re.sub("[^a-z]","",strs.lower())
ret=[]
for idx, item in enumerate(plain_code):
jump_idx = idx%len(self.jump)
ascii = ord(item)+self.jump[jump_idx]
if ascii > 122:
ascii -= 26
ret.append(chr(ascii))
return "".join(ret)
def decode(self, strs):
ret = []
for idx,item in enumerate(strs):
jump_idx = idx%len(self.jump)
ascii = ord(item)-self.jump[jump_idx]
if ascii < 97:
ascii += 26
ret.append(chr(ascii))
return "".join(ret)
class Caesar(Cipher):
def __init__(self):
super(Caesar,self).__init__('d')
|
<commit_before><commit_msg>Add better solution for simple cipher<commit_after>
|
import re
import time
import random
random.seed(int(time.time()))
length = 100
keyinit = "".join([chr(random.randint(97,122)) for i in range(0,100)])
class Cipher(object):
def __init__(self, key=keyinit):
if not key.isalpha():
raise ValueError
if not key.islower():
raise ValueError
numkey = []
for item in key:
numkey.append(ord(item)-97)
#self.caesar = Caesar(numkey)
self.key = key
self.jump = numkey
def encode(self, strs):
plain_code = re.sub("[^a-z]","",strs.lower())
ret=[]
for idx, item in enumerate(plain_code):
jump_idx = idx%len(self.jump)
ascii = ord(item)+self.jump[jump_idx]
if ascii > 122:
ascii -= 26
ret.append(chr(ascii))
return "".join(ret)
def decode(self, strs):
ret = []
for idx,item in enumerate(strs):
jump_idx = idx%len(self.jump)
ascii = ord(item)-self.jump[jump_idx]
if ascii < 97:
ascii += 26
ret.append(chr(ascii))
return "".join(ret)
class Caesar(Cipher):
def __init__(self):
super(Caesar,self).__init__('d')
|
Add better solution for simple cipherimport re
import time
import random
random.seed(int(time.time()))
length = 100
keyinit = "".join([chr(random.randint(97,122)) for i in range(0,100)])
class Cipher(object):
def __init__(self, key=keyinit):
if not key.isalpha():
raise ValueError
if not key.islower():
raise ValueError
numkey = []
for item in key:
numkey.append(ord(item)-97)
#self.caesar = Caesar(numkey)
self.key = key
self.jump = numkey
def encode(self, strs):
plain_code = re.sub("[^a-z]","",strs.lower())
ret=[]
for idx, item in enumerate(plain_code):
jump_idx = idx%len(self.jump)
ascii = ord(item)+self.jump[jump_idx]
if ascii > 122:
ascii -= 26
ret.append(chr(ascii))
return "".join(ret)
def decode(self, strs):
ret = []
for idx,item in enumerate(strs):
jump_idx = idx%len(self.jump)
ascii = ord(item)-self.jump[jump_idx]
if ascii < 97:
ascii += 26
ret.append(chr(ascii))
return "".join(ret)
class Caesar(Cipher):
def __init__(self):
super(Caesar,self).__init__('d')
|
<commit_before><commit_msg>Add better solution for simple cipher<commit_after>import re
import time
import random
random.seed(int(time.time()))
length = 100
keyinit = "".join([chr(random.randint(97,122)) for i in range(0,100)])
class Cipher(object):
def __init__(self, key=keyinit):
if not key.isalpha():
raise ValueError
if not key.islower():
raise ValueError
numkey = []
for item in key:
numkey.append(ord(item)-97)
#self.caesar = Caesar(numkey)
self.key = key
self.jump = numkey
def encode(self, strs):
plain_code = re.sub("[^a-z]","",strs.lower())
ret=[]
for idx, item in enumerate(plain_code):
jump_idx = idx%len(self.jump)
ascii = ord(item)+self.jump[jump_idx]
if ascii > 122:
ascii -= 26
ret.append(chr(ascii))
return "".join(ret)
def decode(self, strs):
ret = []
for idx,item in enumerate(strs):
jump_idx = idx%len(self.jump)
ascii = ord(item)-self.jump[jump_idx]
if ascii < 97:
ascii += 26
ret.append(chr(ascii))
return "".join(ret)
class Caesar(Cipher):
def __init__(self):
super(Caesar,self).__init__('d')
|
|
210a9809eef6743107b459b91980158ea87e75dc
|
openstack/common/fixture/lockutils.py
|
openstack/common/fixture/lockutils.py
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import fixtures
from openstack.common.lockutils import lock
class LockFixture(fixtures.Fixture):
"""External locking fixture.
This fixture is basically an alternative to the synchronized decorator with
the external flag so that tearDowns and addCleanups will be included in
the lock context for locking between tests. The fixture is recommended to
be the first line in a test method, like so::
def test_method(self):
self.useFixture(LockFixture)
...
or the first line in setUp if all the test methods in the class are
required to be serialized. Something like::
class TestCase(testtools.testcase):
def setUp(self):
self.useFixture(LockFixture)
super(TestCase, self).setUp()
...
This is because addCleanups are put on a LIFO queue that gets run after the
test method exits. (either by completing or raising an exception)
"""
def __init__(self, name, lock_file_prefix=None):
self.mgr = lock(name, lock_file_prefix, True)
def setUp(self):
super(LockFixture, self).setUp()
self.addCleanup(self.mgr.__exit__, None, None, None)
self.mgr.__enter__()
|
Move LockFixture into a fixtures module
|
Move LockFixture into a fixtures module
In Nova and other projects, fixtures are in test-requirements.txt and
NOT in requirements.txt so updating nova to use latest lockutils.py
is not possible. So let us move LockFixture out into it a separate
fixtures.py module where in theory we can add other fixtures as
well. See https://review.openstack.org/#/c/52153/ for an attempt to
try latest lockutils.py in Nova.
Change-Id: I12942d6390d06bb889d4145e708692fa886d99b5
|
Python
|
apache-2.0
|
openstack/oslotest,openstack/oslotest
|
Move LockFixture into a fixtures module
In Nova and other projects, fixtures are in test-requirements.txt and
NOT in requirements.txt so updating nova to use latest lockutils.py
is not possible. So let us move LockFixture out into it a separate
fixtures.py module where in theory we can add other fixtures as
well. See https://review.openstack.org/#/c/52153/ for an attempt to
try latest lockutils.py in Nova.
Change-Id: I12942d6390d06bb889d4145e708692fa886d99b5
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import fixtures
from openstack.common.lockutils import lock
class LockFixture(fixtures.Fixture):
"""External locking fixture.
This fixture is basically an alternative to the synchronized decorator with
the external flag so that tearDowns and addCleanups will be included in
the lock context for locking between tests. The fixture is recommended to
be the first line in a test method, like so::
def test_method(self):
self.useFixture(LockFixture)
...
or the first line in setUp if all the test methods in the class are
required to be serialized. Something like::
class TestCase(testtools.testcase):
def setUp(self):
self.useFixture(LockFixture)
super(TestCase, self).setUp()
...
This is because addCleanups are put on a LIFO queue that gets run after the
test method exits. (either by completing or raising an exception)
"""
def __init__(self, name, lock_file_prefix=None):
self.mgr = lock(name, lock_file_prefix, True)
def setUp(self):
super(LockFixture, self).setUp()
self.addCleanup(self.mgr.__exit__, None, None, None)
self.mgr.__enter__()
|
<commit_before><commit_msg>Move LockFixture into a fixtures module
In Nova and other projects, fixtures are in test-requirements.txt and
NOT in requirements.txt so updating nova to use latest lockutils.py
is not possible. So let us move LockFixture out into it a separate
fixtures.py module where in theory we can add other fixtures as
well. See https://review.openstack.org/#/c/52153/ for an attempt to
try latest lockutils.py in Nova.
Change-Id: I12942d6390d06bb889d4145e708692fa886d99b5<commit_after>
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import fixtures
from openstack.common.lockutils import lock
class LockFixture(fixtures.Fixture):
"""External locking fixture.
This fixture is basically an alternative to the synchronized decorator with
the external flag so that tearDowns and addCleanups will be included in
the lock context for locking between tests. The fixture is recommended to
be the first line in a test method, like so::
def test_method(self):
self.useFixture(LockFixture)
...
or the first line in setUp if all the test methods in the class are
required to be serialized. Something like::
class TestCase(testtools.testcase):
def setUp(self):
self.useFixture(LockFixture)
super(TestCase, self).setUp()
...
This is because addCleanups are put on a LIFO queue that gets run after the
test method exits. (either by completing or raising an exception)
"""
def __init__(self, name, lock_file_prefix=None):
self.mgr = lock(name, lock_file_prefix, True)
def setUp(self):
super(LockFixture, self).setUp()
self.addCleanup(self.mgr.__exit__, None, None, None)
self.mgr.__enter__()
|
Move LockFixture into a fixtures module
In Nova and other projects, fixtures are in test-requirements.txt and
NOT in requirements.txt so updating nova to use latest lockutils.py
is not possible. So let us move LockFixture out into it a separate
fixtures.py module where in theory we can add other fixtures as
well. See https://review.openstack.org/#/c/52153/ for an attempt to
try latest lockutils.py in Nova.
Change-Id: I12942d6390d06bb889d4145e708692fa886d99b5# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import fixtures
from openstack.common.lockutils import lock
class LockFixture(fixtures.Fixture):
"""External locking fixture.
This fixture is basically an alternative to the synchronized decorator with
the external flag so that tearDowns and addCleanups will be included in
the lock context for locking between tests. The fixture is recommended to
be the first line in a test method, like so::
def test_method(self):
self.useFixture(LockFixture)
...
or the first line in setUp if all the test methods in the class are
required to be serialized. Something like::
class TestCase(testtools.testcase):
def setUp(self):
self.useFixture(LockFixture)
super(TestCase, self).setUp()
...
This is because addCleanups are put on a LIFO queue that gets run after the
test method exits. (either by completing or raising an exception)
"""
def __init__(self, name, lock_file_prefix=None):
self.mgr = lock(name, lock_file_prefix, True)
def setUp(self):
super(LockFixture, self).setUp()
self.addCleanup(self.mgr.__exit__, None, None, None)
self.mgr.__enter__()
|
<commit_before><commit_msg>Move LockFixture into a fixtures module
In Nova and other projects, fixtures are in test-requirements.txt and
NOT in requirements.txt so updating nova to use latest lockutils.py
is not possible. So let us move LockFixture out into it a separate
fixtures.py module where in theory we can add other fixtures as
well. See https://review.openstack.org/#/c/52153/ for an attempt to
try latest lockutils.py in Nova.
Change-Id: I12942d6390d06bb889d4145e708692fa886d99b5<commit_after># vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import fixtures
from openstack.common.lockutils import lock
class LockFixture(fixtures.Fixture):
"""External locking fixture.
This fixture is basically an alternative to the synchronized decorator with
the external flag so that tearDowns and addCleanups will be included in
the lock context for locking between tests. The fixture is recommended to
be the first line in a test method, like so::
def test_method(self):
self.useFixture(LockFixture)
...
or the first line in setUp if all the test methods in the class are
required to be serialized. Something like::
class TestCase(testtools.testcase):
def setUp(self):
self.useFixture(LockFixture)
super(TestCase, self).setUp()
...
This is because addCleanups are put on a LIFO queue that gets run after the
test method exits. (either by completing or raising an exception)
"""
def __init__(self, name, lock_file_prefix=None):
self.mgr = lock(name, lock_file_prefix, True)
def setUp(self):
super(LockFixture, self).setUp()
self.addCleanup(self.mgr.__exit__, None, None, None)
self.mgr.__enter__()
|
|
ba9c1108a15bac713e7bda987865f8c4c1db92c7
|
timm/loss/binary_cross_entropy.py
|
timm/loss/binary_cross_entropy.py
|
import torch
import torch.nn as nn
import torch.nn.functional as F
class DenseBinaryCrossEntropy(nn.Module):
""" BCE using one-hot from dense targets w/ label smoothing
NOTE for experiments comparing CE to BCE /w label smoothing, may remove
"""
def __init__(self, smoothing=0.1):
super(DenseBinaryCrossEntropy, self).__init__()
assert 0. <= smoothing < 1.0
self.smoothing = smoothing
self.bce = nn.BCEWithLogitsLoss()
def forward(self, x, target):
num_classes = x.shape[-1]
off_value = self.smoothing / num_classes
on_value = 1. - self.smoothing + off_value
target = target.long().view(-1, 1)
target = torch.full(
(target.size()[0], num_classes), off_value, device=x.device, dtype=x.dtype).scatter_(1, target, on_value)
return self.bce(x, target)
|
Add a BCE loss impl that converts dense targets to sparse /w smoothing as an alternate to CE w/ smoothing. For training experiments.
|
Add a BCE loss impl that converts dense targets to sparse /w smoothing as an alternate to CE w/ smoothing. For training experiments.
|
Python
|
apache-2.0
|
rwightman/pytorch-image-models,rwightman/pytorch-image-models
|
Add a BCE loss impl that converts dense targets to sparse /w smoothing as an alternate to CE w/ smoothing. For training experiments.
|
import torch
import torch.nn as nn
import torch.nn.functional as F
class DenseBinaryCrossEntropy(nn.Module):
""" BCE using one-hot from dense targets w/ label smoothing
NOTE for experiments comparing CE to BCE /w label smoothing, may remove
"""
def __init__(self, smoothing=0.1):
super(DenseBinaryCrossEntropy, self).__init__()
assert 0. <= smoothing < 1.0
self.smoothing = smoothing
self.bce = nn.BCEWithLogitsLoss()
def forward(self, x, target):
num_classes = x.shape[-1]
off_value = self.smoothing / num_classes
on_value = 1. - self.smoothing + off_value
target = target.long().view(-1, 1)
target = torch.full(
(target.size()[0], num_classes), off_value, device=x.device, dtype=x.dtype).scatter_(1, target, on_value)
return self.bce(x, target)
|
<commit_before><commit_msg>Add a BCE loss impl that converts dense targets to sparse /w smoothing as an alternate to CE w/ smoothing. For training experiments.<commit_after>
|
import torch
import torch.nn as nn
import torch.nn.functional as F
class DenseBinaryCrossEntropy(nn.Module):
""" BCE using one-hot from dense targets w/ label smoothing
NOTE for experiments comparing CE to BCE /w label smoothing, may remove
"""
def __init__(self, smoothing=0.1):
super(DenseBinaryCrossEntropy, self).__init__()
assert 0. <= smoothing < 1.0
self.smoothing = smoothing
self.bce = nn.BCEWithLogitsLoss()
def forward(self, x, target):
num_classes = x.shape[-1]
off_value = self.smoothing / num_classes
on_value = 1. - self.smoothing + off_value
target = target.long().view(-1, 1)
target = torch.full(
(target.size()[0], num_classes), off_value, device=x.device, dtype=x.dtype).scatter_(1, target, on_value)
return self.bce(x, target)
|
Add a BCE loss impl that converts dense targets to sparse /w smoothing as an alternate to CE w/ smoothing. For training experiments.import torch
import torch.nn as nn
import torch.nn.functional as F
class DenseBinaryCrossEntropy(nn.Module):
""" BCE using one-hot from dense targets w/ label smoothing
NOTE for experiments comparing CE to BCE /w label smoothing, may remove
"""
def __init__(self, smoothing=0.1):
super(DenseBinaryCrossEntropy, self).__init__()
assert 0. <= smoothing < 1.0
self.smoothing = smoothing
self.bce = nn.BCEWithLogitsLoss()
def forward(self, x, target):
num_classes = x.shape[-1]
off_value = self.smoothing / num_classes
on_value = 1. - self.smoothing + off_value
target = target.long().view(-1, 1)
target = torch.full(
(target.size()[0], num_classes), off_value, device=x.device, dtype=x.dtype).scatter_(1, target, on_value)
return self.bce(x, target)
|
<commit_before><commit_msg>Add a BCE loss impl that converts dense targets to sparse /w smoothing as an alternate to CE w/ smoothing. For training experiments.<commit_after>import torch
import torch.nn as nn
import torch.nn.functional as F
class DenseBinaryCrossEntropy(nn.Module):
""" BCE using one-hot from dense targets w/ label smoothing
NOTE for experiments comparing CE to BCE /w label smoothing, may remove
"""
def __init__(self, smoothing=0.1):
super(DenseBinaryCrossEntropy, self).__init__()
assert 0. <= smoothing < 1.0
self.smoothing = smoothing
self.bce = nn.BCEWithLogitsLoss()
def forward(self, x, target):
num_classes = x.shape[-1]
off_value = self.smoothing / num_classes
on_value = 1. - self.smoothing + off_value
target = target.long().view(-1, 1)
target = torch.full(
(target.size()[0], num_classes), off_value, device=x.device, dtype=x.dtype).scatter_(1, target, on_value)
return self.bce(x, target)
|
|
8c7f9a8286d3d1b5172af2fdfb74d1f73756790b
|
tests/app/main/views/test_history.py
|
tests/app/main/views/test_history.py
|
from tests.conftest import SERVICE_ONE_ID
def test_history(
client_request,
mock_get_service_history,
):
client_request.get('main.history', service_id=SERVICE_ONE_ID)
|
Add a simple test for the history page
|
Add a simple test for the history page
|
Python
|
mit
|
alphagov/notifications-admin,alphagov/notifications-admin,alphagov/notifications-admin,alphagov/notifications-admin
|
Add a simple test for the history page
|
from tests.conftest import SERVICE_ONE_ID
def test_history(
client_request,
mock_get_service_history,
):
client_request.get('main.history', service_id=SERVICE_ONE_ID)
|
<commit_before><commit_msg>Add a simple test for the history page<commit_after>
|
from tests.conftest import SERVICE_ONE_ID
def test_history(
client_request,
mock_get_service_history,
):
client_request.get('main.history', service_id=SERVICE_ONE_ID)
|
Add a simple test for the history pagefrom tests.conftest import SERVICE_ONE_ID
def test_history(
client_request,
mock_get_service_history,
):
client_request.get('main.history', service_id=SERVICE_ONE_ID)
|
<commit_before><commit_msg>Add a simple test for the history page<commit_after>from tests.conftest import SERVICE_ONE_ID
def test_history(
client_request,
mock_get_service_history,
):
client_request.get('main.history', service_id=SERVICE_ONE_ID)
|
|
b0243ac96d31693611cec20e60812739be92e3aa
|
tests/functional/test_connection.py
|
tests/functional/test_connection.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from sure import scenario
from pyeqs import QuerySet
from tests.helpers import prepare_data, cleanup_data, add_document
@scenario(prepare_data, cleanup_data)
def test_simple_search_with_host_string(context):
"""
Connect with host string
"""
# When create a queryset
t = QuerySet("localhost", index="foo")
# And there are records
add_document("foo", {"bar": "baz"})
# And I do a search
results = t[0:1]
# Then I get a the expected results
len(results).should.equal(1)
results[0]['_source'].should.equal({"bar": "baz"})
@scenario(prepare_data, cleanup_data)
def test_simple_search_with_host_dict(context):
"""
Connect with host dict
"""
# When create a queryset
connection_info = {"host": "localhost", "port": 9200}
t = QuerySet(connection_info, index="foo")
# And there are records
add_document("foo", {"bar": "baz"})
# And I do a search
results = t[0:1]
# Then I get a the expected results
len(results).should.equal(1)
results[0]['_source'].should.equal({"bar": "baz"})
@scenario(prepare_data, cleanup_data)
def test_simple_search_with_host_list(context):
"""
Connect with host list
"""
# When create a queryset
connection_info = [{"host": "localhost", "port": 9200}]
t = QuerySet(connection_info, index="foo")
# And there are records
add_document("foo", {"bar": "baz"})
# And I do a search
results = t[0:1]
# Then I get a the expected results
len(results).should.equal(1)
results[0]['_source'].should.equal({"bar": "baz"})
|
Add functional tests for connection change
|
Add functional tests for connection change
|
Python
|
mit
|
Yipit/pyeqs
|
Add functional tests for connection change
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from sure import scenario
from pyeqs import QuerySet
from tests.helpers import prepare_data, cleanup_data, add_document
@scenario(prepare_data, cleanup_data)
def test_simple_search_with_host_string(context):
"""
Connect with host string
"""
# When create a queryset
t = QuerySet("localhost", index="foo")
# And there are records
add_document("foo", {"bar": "baz"})
# And I do a search
results = t[0:1]
# Then I get a the expected results
len(results).should.equal(1)
results[0]['_source'].should.equal({"bar": "baz"})
@scenario(prepare_data, cleanup_data)
def test_simple_search_with_host_dict(context):
"""
Connect with host dict
"""
# When create a queryset
connection_info = {"host": "localhost", "port": 9200}
t = QuerySet(connection_info, index="foo")
# And there are records
add_document("foo", {"bar": "baz"})
# And I do a search
results = t[0:1]
# Then I get a the expected results
len(results).should.equal(1)
results[0]['_source'].should.equal({"bar": "baz"})
@scenario(prepare_data, cleanup_data)
def test_simple_search_with_host_list(context):
"""
Connect with host list
"""
# When create a queryset
connection_info = [{"host": "localhost", "port": 9200}]
t = QuerySet(connection_info, index="foo")
# And there are records
add_document("foo", {"bar": "baz"})
# And I do a search
results = t[0:1]
# Then I get a the expected results
len(results).should.equal(1)
results[0]['_source'].should.equal({"bar": "baz"})
|
<commit_before><commit_msg>Add functional tests for connection change<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from sure import scenario
from pyeqs import QuerySet
from tests.helpers import prepare_data, cleanup_data, add_document
@scenario(prepare_data, cleanup_data)
def test_simple_search_with_host_string(context):
"""
Connect with host string
"""
# When create a queryset
t = QuerySet("localhost", index="foo")
# And there are records
add_document("foo", {"bar": "baz"})
# And I do a search
results = t[0:1]
# Then I get a the expected results
len(results).should.equal(1)
results[0]['_source'].should.equal({"bar": "baz"})
@scenario(prepare_data, cleanup_data)
def test_simple_search_with_host_dict(context):
"""
Connect with host dict
"""
# When create a queryset
connection_info = {"host": "localhost", "port": 9200}
t = QuerySet(connection_info, index="foo")
# And there are records
add_document("foo", {"bar": "baz"})
# And I do a search
results = t[0:1]
# Then I get a the expected results
len(results).should.equal(1)
results[0]['_source'].should.equal({"bar": "baz"})
@scenario(prepare_data, cleanup_data)
def test_simple_search_with_host_list(context):
"""
Connect with host list
"""
# When create a queryset
connection_info = [{"host": "localhost", "port": 9200}]
t = QuerySet(connection_info, index="foo")
# And there are records
add_document("foo", {"bar": "baz"})
# And I do a search
results = t[0:1]
# Then I get a the expected results
len(results).should.equal(1)
results[0]['_source'].should.equal({"bar": "baz"})
|
Add functional tests for connection change# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from sure import scenario
from pyeqs import QuerySet
from tests.helpers import prepare_data, cleanup_data, add_document
@scenario(prepare_data, cleanup_data)
def test_simple_search_with_host_string(context):
"""
Connect with host string
"""
# When create a queryset
t = QuerySet("localhost", index="foo")
# And there are records
add_document("foo", {"bar": "baz"})
# And I do a search
results = t[0:1]
# Then I get a the expected results
len(results).should.equal(1)
results[0]['_source'].should.equal({"bar": "baz"})
@scenario(prepare_data, cleanup_data)
def test_simple_search_with_host_dict(context):
"""
Connect with host dict
"""
# When create a queryset
connection_info = {"host": "localhost", "port": 9200}
t = QuerySet(connection_info, index="foo")
# And there are records
add_document("foo", {"bar": "baz"})
# And I do a search
results = t[0:1]
# Then I get a the expected results
len(results).should.equal(1)
results[0]['_source'].should.equal({"bar": "baz"})
@scenario(prepare_data, cleanup_data)
def test_simple_search_with_host_list(context):
"""
Connect with host list
"""
# When create a queryset
connection_info = [{"host": "localhost", "port": 9200}]
t = QuerySet(connection_info, index="foo")
# And there are records
add_document("foo", {"bar": "baz"})
# And I do a search
results = t[0:1]
# Then I get a the expected results
len(results).should.equal(1)
results[0]['_source'].should.equal({"bar": "baz"})
|
<commit_before><commit_msg>Add functional tests for connection change<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from sure import scenario
from pyeqs import QuerySet
from tests.helpers import prepare_data, cleanup_data, add_document
@scenario(prepare_data, cleanup_data)
def test_simple_search_with_host_string(context):
"""
Connect with host string
"""
# When create a queryset
t = QuerySet("localhost", index="foo")
# And there are records
add_document("foo", {"bar": "baz"})
# And I do a search
results = t[0:1]
# Then I get a the expected results
len(results).should.equal(1)
results[0]['_source'].should.equal({"bar": "baz"})
@scenario(prepare_data, cleanup_data)
def test_simple_search_with_host_dict(context):
"""
Connect with host dict
"""
# When create a queryset
connection_info = {"host": "localhost", "port": 9200}
t = QuerySet(connection_info, index="foo")
# And there are records
add_document("foo", {"bar": "baz"})
# And I do a search
results = t[0:1]
# Then I get a the expected results
len(results).should.equal(1)
results[0]['_source'].should.equal({"bar": "baz"})
@scenario(prepare_data, cleanup_data)
def test_simple_search_with_host_list(context):
"""
Connect with host list
"""
# When create a queryset
connection_info = [{"host": "localhost", "port": 9200}]
t = QuerySet(connection_info, index="foo")
# And there are records
add_document("foo", {"bar": "baz"})
# And I do a search
results = t[0:1]
# Then I get a the expected results
len(results).should.equal(1)
results[0]['_source'].should.equal({"bar": "baz"})
|
|
bcd4ebd31f915825eb135420d56231e380589c5b
|
src/personalisation/migrations/0008_devicerule.py
|
src/personalisation/migrations/0008_devicerule.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.7 on 2017-04-20 15:47
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import modelcluster.fields
class Migration(migrations.Migration):
dependencies = [
('personalisation', '0007_dayrule'),
]
operations = [
migrations.CreateModel(
name='DeviceRule',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('mobile', models.BooleanField(default=False, verbose_name='Mobile phone')),
('tablet', models.BooleanField(default=False, verbose_name='Tablet')),
('desktop', models.BooleanField(default=False, verbose_name='Desktop')),
('segment', modelcluster.fields.ParentalKey(on_delete=django.db.models.deletion.CASCADE, related_name='personalisation_devicerule_related', related_query_name='personalisation_devicerules', to='personalisation.Segment')),
],
options={
'abstract': False,
},
),
]
|
Add missing migration for DeviceRule
|
Add missing migration for DeviceRule
|
Python
|
mit
|
LabD/wagtail-personalisation,LabD/wagtail-personalisation,LabD/wagtail-personalisation
|
Add missing migration for DeviceRule
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.7 on 2017-04-20 15:47
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import modelcluster.fields
class Migration(migrations.Migration):
dependencies = [
('personalisation', '0007_dayrule'),
]
operations = [
migrations.CreateModel(
name='DeviceRule',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('mobile', models.BooleanField(default=False, verbose_name='Mobile phone')),
('tablet', models.BooleanField(default=False, verbose_name='Tablet')),
('desktop', models.BooleanField(default=False, verbose_name='Desktop')),
('segment', modelcluster.fields.ParentalKey(on_delete=django.db.models.deletion.CASCADE, related_name='personalisation_devicerule_related', related_query_name='personalisation_devicerules', to='personalisation.Segment')),
],
options={
'abstract': False,
},
),
]
|
<commit_before><commit_msg>Add missing migration for DeviceRule<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.7 on 2017-04-20 15:47
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import modelcluster.fields
class Migration(migrations.Migration):
dependencies = [
('personalisation', '0007_dayrule'),
]
operations = [
migrations.CreateModel(
name='DeviceRule',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('mobile', models.BooleanField(default=False, verbose_name='Mobile phone')),
('tablet', models.BooleanField(default=False, verbose_name='Tablet')),
('desktop', models.BooleanField(default=False, verbose_name='Desktop')),
('segment', modelcluster.fields.ParentalKey(on_delete=django.db.models.deletion.CASCADE, related_name='personalisation_devicerule_related', related_query_name='personalisation_devicerules', to='personalisation.Segment')),
],
options={
'abstract': False,
},
),
]
|
Add missing migration for DeviceRule# -*- coding: utf-8 -*-
# Generated by Django 1.10.7 on 2017-04-20 15:47
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import modelcluster.fields
class Migration(migrations.Migration):
dependencies = [
('personalisation', '0007_dayrule'),
]
operations = [
migrations.CreateModel(
name='DeviceRule',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('mobile', models.BooleanField(default=False, verbose_name='Mobile phone')),
('tablet', models.BooleanField(default=False, verbose_name='Tablet')),
('desktop', models.BooleanField(default=False, verbose_name='Desktop')),
('segment', modelcluster.fields.ParentalKey(on_delete=django.db.models.deletion.CASCADE, related_name='personalisation_devicerule_related', related_query_name='personalisation_devicerules', to='personalisation.Segment')),
],
options={
'abstract': False,
},
),
]
|
<commit_before><commit_msg>Add missing migration for DeviceRule<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.10.7 on 2017-04-20 15:47
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import modelcluster.fields
class Migration(migrations.Migration):
dependencies = [
('personalisation', '0007_dayrule'),
]
operations = [
migrations.CreateModel(
name='DeviceRule',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('mobile', models.BooleanField(default=False, verbose_name='Mobile phone')),
('tablet', models.BooleanField(default=False, verbose_name='Tablet')),
('desktop', models.BooleanField(default=False, verbose_name='Desktop')),
('segment', modelcluster.fields.ParentalKey(on_delete=django.db.models.deletion.CASCADE, related_name='personalisation_devicerule_related', related_query_name='personalisation_devicerules', to='personalisation.Segment')),
],
options={
'abstract': False,
},
),
]
|
|
4be3afba45b39a77595d3db11db364f7f0f3c5c5
|
test/buildbot/ensure_webcam_is_running.py
|
test/buildbot/ensure_webcam_is_running.py
|
#!/usr/bin/env python
# Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
"""Checks if a virtual webcam is running and starts it if not.
Returns a non-zero return code if the webcam could not be started.
Prerequisites:
* Python must have the psutil package installed.
* Windows: a scheduled task named 'ManyCam' must exist and be configured to
launch ManyCam preconfigured to auto-play the test clip.
* Mac: ManyCam must be installed in the default location and be preconfigured
to auto-play the test clip.
* Linux: The v4l2loopback must be loaded to the kernel already (with the
devices=2 argument) and the v4l2_file_player application must be compiled and
put in the location specified below.
"""
import psutil
import subprocess
import sys
WEBCAM_WIN = ['schtasks', '/run', '/tn', 'ManyCam']
WEBCAM_MAC = ['open', '/Applications/ManyCam/ManyCam.app']
WEBCAM_LINUX = (
'$HOME/fake-webcam-driver/linux/v4l2_file_player/v4l2_file_player '
'$HOME/webrtc_video_quality/reference_video.yuv 640 480 /dev/video1 &')
def IsWebCamRunning():
if sys.platform == 'win32':
process_name = 'ManyCam.exe'
elif sys.platform.startswith('darwin'):
process_name = 'ManyCam'
elif sys.platform.startswith('linux'):
process_name = 'v4l2_file_player'
else:
raise Exception('Unsupported platform: %s' % sys.platform)
for p in psutil.get_process_list():
if process_name == p.name:
print 'Found a running virtual webcam (%s with PID %s)' % (p.name, p.pid)
return True
return False
def Main():
if IsWebCamRunning():
return 0
try:
if sys.platform == 'win32':
subprocess.check_call(WEBCAM_WIN)
elif sys.platform.startswith('darwin'):
subprocess.check_call(WEBCAM_MAC)
elif sys.platform.startswith('linux'):
subprocess.check_call(WEBCAM_LINUX, shell=True)
print 'Successfully launched virtual webcam.'
return 0
except Exception as e:
print 'Failed to launch virtual webcam: %s' % e
if __name__ == '__main__':
sys.exit(Main())
|
Add script to ensure virtual webcam is running.
|
Add script to ensure virtual webcam is running.
This script will check that a webcam is running and start it if it's
not currently running.
It's tailored to the way our buildbots are currently configured.
TEST=local execution on Windows, Mac and Linux.
BUG=none
R=phoglund@webrtc.org
Review URL: https://webrtc-codereview.appspot.com/1406005
git-svn-id: 03ae4fbe531b1eefc9d815f31e49022782c42458@3981 4adac7df-926f-26a2-2b94-8c16560cd09d
|
Python
|
bsd-3-clause
|
Alkalyne/webrtctrunk,bpsinc-native/src_third_party_webrtc,jgcaaprom/android_external_chromium_org_third_party_webrtc,krieger-od/nwjs_chromium_webrtc,xin3liang/platform_external_chromium_org_third_party_webrtc,geekboxzone/lollipop_external_chromium_org_third_party_webrtc,geekboxzone/lollipop_external_chromium_org_third_party_webrtc,CyanogenMod/android_external_chromium_org_third_party_webrtc,CyanogenMod/android_external_chromium_org_third_party_webrtc,krieger-od/nwjs_chromium_webrtc,xin3liang/platform_external_chromium_org_third_party_webrtc,geekboxzone/lollipop_external_chromium_org_third_party_webrtc,Omegaphora/external_chromium_org_third_party_webrtc,PersonifyInc/chromium_webrtc,MIPS/external-chromium_org-third_party-webrtc,Alkalyne/webrtctrunk,SlimXperiments/external_chromium_org_third_party_webrtc,MIPS/external-chromium_org-third_party-webrtc,bpsinc-native/src_third_party_webrtc,AOSPU/external_chromium_org_third_party_webrtc,AOSPU/external_chromium_org_third_party_webrtc,MIPS/external-chromium_org-third_party-webrtc,svn2github/webrtc-Revision-8758,Alkalyne/webrtctrunk,CyanogenMod/android_external_chromium_org_third_party_webrtc,jgcaaprom/android_external_chromium_org_third_party_webrtc,svn2github/webrtc-Revision-8758,bpsinc-native/src_third_party_webrtc,PersonifyInc/chromium_webrtc,svn2github/webrtc-Revision-8758,android-ia/platform_external_chromium_org_third_party_webrtc,android-ia/platform_external_chromium_org_third_party_webrtc,lukeweber/webrtc-src-override,krieger-od/webrtc,krieger-od/webrtc,jchavanton/webrtc,Alkalyne/webrtctrunk,sippet/webrtc,jchavanton/webrtc,xin3liang/platform_external_chromium_org_third_party_webrtc,bpsinc-native/src_third_party_webrtc,aleonliao/webrtc-trunk,jchavanton/webrtc,Omegaphora/external_chromium_org_third_party_webrtc,svn2github/webrtc-Revision-8758,geekboxzone/lollipop_external_chromium_org_third_party_webrtc,android-ia/platform_external_chromium_org_third_party_webrtc,Omegaphora/external_chromium_org_third_party_webrtc,lukeweber/webrtc-src-override,PersonifyInc/chromium_webrtc,Omegaphora/external_chromium_org_third_party_webrtc,CyanogenMod/android_external_chromium_org_third_party_webrtc,lukeweber/webrtc-src-override,Omegaphora/external_chromium_org_third_party_webrtc,sippet/webrtc,MIPS/external-chromium_org-third_party-webrtc,CyanogenMod/android_external_chromium_org_third_party_webrtc,krieger-od/nwjs_chromium_webrtc,jgcaaprom/android_external_chromium_org_third_party_webrtc,SlimXperiments/external_chromium_org_third_party_webrtc,SlimXperiments/external_chromium_org_third_party_webrtc,sippet/webrtc,aleonliao/webrtc-trunk,jgcaaprom/android_external_chromium_org_third_party_webrtc,aleonliao/webrtc-trunk,MIPS/external-chromium_org-third_party-webrtc,android-ia/platform_external_chromium_org_third_party_webrtc,android-ia/platform_external_chromium_org_third_party_webrtc,krieger-od/nwjs_chromium_webrtc,SlimXperiments/external_chromium_org_third_party_webrtc,sippet/webrtc,jgcaaprom/android_external_chromium_org_third_party_webrtc,sippet/webrtc,MIPS/external-chromium_org-third_party-webrtc,android-ia/platform_external_chromium_org_third_party_webrtc,lukeweber/webrtc-src-override,AOSPU/external_chromium_org_third_party_webrtc,PersonifyInc/chromium_webrtc,lukeweber/webrtc-src-override,jchavanton/webrtc,lukeweber/webrtc-src-override,AOSPU/external_chromium_org_third_party_webrtc,geekboxzone/lollipop_external_chromium_org_third_party_webrtc,AOSPU/external_chromium_org_third_party_webrtc,MIPS/external-chromium_org-third_party-webrtc,krieger-od/webrtc,jgcaaprom/android_external_chromium_org_third_party_webrtc,android-ia/platform_external_chromium_org_third_party_webrtc,Alkalyne/webrtctrunk,jgcaaprom/android_external_chromium_org_third_party_webrtc,AOSPU/external_chromium_org_third_party_webrtc,geekboxzone/lollipop_external_chromium_org_third_party_webrtc,jgcaaprom/android_external_chromium_org_third_party_webrtc,sippet/webrtc,xin3liang/platform_external_chromium_org_third_party_webrtc,Alkalyne/webrtctrunk,krieger-od/webrtc,krieger-od/nwjs_chromium_webrtc,Alkalyne/webrtctrunk,geekboxzone/lollipop_external_chromium_org_third_party_webrtc,AOSPU/external_chromium_org_third_party_webrtc,aleonliao/webrtc-trunk,CyanogenMod/android_external_chromium_org_third_party_webrtc,jchavanton/webrtc,bpsinc-native/src_third_party_webrtc,aleonliao/webrtc-trunk,bpsinc-native/src_third_party_webrtc,xin3liang/platform_external_chromium_org_third_party_webrtc,MIPS/external-chromium_org-third_party-webrtc,PersonifyInc/chromium_webrtc,Alkalyne/webrtctrunk,jchavanton/webrtc,bpsinc-native/src_third_party_webrtc,lukeweber/webrtc-src-override,SlimXperiments/external_chromium_org_third_party_webrtc,Omegaphora/external_chromium_org_third_party_webrtc,CyanogenMod/android_external_chromium_org_third_party_webrtc,SlimXperiments/external_chromium_org_third_party_webrtc,geekboxzone/lollipop_external_chromium_org_third_party_webrtc,jchavanton/webrtc,PersonifyInc/chromium_webrtc,krieger-od/webrtc,xin3liang/platform_external_chromium_org_third_party_webrtc,bpsinc-native/src_third_party_webrtc,aleonliao/webrtc-trunk,krieger-od/webrtc,svn2github/webrtc-Revision-8758,krieger-od/nwjs_chromium_webrtc,SlimXperiments/external_chromium_org_third_party_webrtc,svn2github/webrtc-Revision-8758,Omegaphora/external_chromium_org_third_party_webrtc,android-ia/platform_external_chromium_org_third_party_webrtc,xin3liang/platform_external_chromium_org_third_party_webrtc,Omegaphora/external_chromium_org_third_party_webrtc
|
Add script to ensure virtual webcam is running.
This script will check that a webcam is running and start it if it's
not currently running.
It's tailored to the way our buildbots are currently configured.
TEST=local execution on Windows, Mac and Linux.
BUG=none
R=phoglund@webrtc.org
Review URL: https://webrtc-codereview.appspot.com/1406005
git-svn-id: 03ae4fbe531b1eefc9d815f31e49022782c42458@3981 4adac7df-926f-26a2-2b94-8c16560cd09d
|
#!/usr/bin/env python
# Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
"""Checks if a virtual webcam is running and starts it if not.
Returns a non-zero return code if the webcam could not be started.
Prerequisites:
* Python must have the psutil package installed.
* Windows: a scheduled task named 'ManyCam' must exist and be configured to
launch ManyCam preconfigured to auto-play the test clip.
* Mac: ManyCam must be installed in the default location and be preconfigured
to auto-play the test clip.
* Linux: The v4l2loopback must be loaded to the kernel already (with the
devices=2 argument) and the v4l2_file_player application must be compiled and
put in the location specified below.
"""
import psutil
import subprocess
import sys
WEBCAM_WIN = ['schtasks', '/run', '/tn', 'ManyCam']
WEBCAM_MAC = ['open', '/Applications/ManyCam/ManyCam.app']
WEBCAM_LINUX = (
'$HOME/fake-webcam-driver/linux/v4l2_file_player/v4l2_file_player '
'$HOME/webrtc_video_quality/reference_video.yuv 640 480 /dev/video1 &')
def IsWebCamRunning():
if sys.platform == 'win32':
process_name = 'ManyCam.exe'
elif sys.platform.startswith('darwin'):
process_name = 'ManyCam'
elif sys.platform.startswith('linux'):
process_name = 'v4l2_file_player'
else:
raise Exception('Unsupported platform: %s' % sys.platform)
for p in psutil.get_process_list():
if process_name == p.name:
print 'Found a running virtual webcam (%s with PID %s)' % (p.name, p.pid)
return True
return False
def Main():
if IsWebCamRunning():
return 0
try:
if sys.platform == 'win32':
subprocess.check_call(WEBCAM_WIN)
elif sys.platform.startswith('darwin'):
subprocess.check_call(WEBCAM_MAC)
elif sys.platform.startswith('linux'):
subprocess.check_call(WEBCAM_LINUX, shell=True)
print 'Successfully launched virtual webcam.'
return 0
except Exception as e:
print 'Failed to launch virtual webcam: %s' % e
if __name__ == '__main__':
sys.exit(Main())
|
<commit_before><commit_msg>Add script to ensure virtual webcam is running.
This script will check that a webcam is running and start it if it's
not currently running.
It's tailored to the way our buildbots are currently configured.
TEST=local execution on Windows, Mac and Linux.
BUG=none
R=phoglund@webrtc.org
Review URL: https://webrtc-codereview.appspot.com/1406005
git-svn-id: 03ae4fbe531b1eefc9d815f31e49022782c42458@3981 4adac7df-926f-26a2-2b94-8c16560cd09d<commit_after>
|
#!/usr/bin/env python
# Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
"""Checks if a virtual webcam is running and starts it if not.
Returns a non-zero return code if the webcam could not be started.
Prerequisites:
* Python must have the psutil package installed.
* Windows: a scheduled task named 'ManyCam' must exist and be configured to
launch ManyCam preconfigured to auto-play the test clip.
* Mac: ManyCam must be installed in the default location and be preconfigured
to auto-play the test clip.
* Linux: The v4l2loopback must be loaded to the kernel already (with the
devices=2 argument) and the v4l2_file_player application must be compiled and
put in the location specified below.
"""
import psutil
import subprocess
import sys
WEBCAM_WIN = ['schtasks', '/run', '/tn', 'ManyCam']
WEBCAM_MAC = ['open', '/Applications/ManyCam/ManyCam.app']
WEBCAM_LINUX = (
'$HOME/fake-webcam-driver/linux/v4l2_file_player/v4l2_file_player '
'$HOME/webrtc_video_quality/reference_video.yuv 640 480 /dev/video1 &')
def IsWebCamRunning():
if sys.platform == 'win32':
process_name = 'ManyCam.exe'
elif sys.platform.startswith('darwin'):
process_name = 'ManyCam'
elif sys.platform.startswith('linux'):
process_name = 'v4l2_file_player'
else:
raise Exception('Unsupported platform: %s' % sys.platform)
for p in psutil.get_process_list():
if process_name == p.name:
print 'Found a running virtual webcam (%s with PID %s)' % (p.name, p.pid)
return True
return False
def Main():
if IsWebCamRunning():
return 0
try:
if sys.platform == 'win32':
subprocess.check_call(WEBCAM_WIN)
elif sys.platform.startswith('darwin'):
subprocess.check_call(WEBCAM_MAC)
elif sys.platform.startswith('linux'):
subprocess.check_call(WEBCAM_LINUX, shell=True)
print 'Successfully launched virtual webcam.'
return 0
except Exception as e:
print 'Failed to launch virtual webcam: %s' % e
if __name__ == '__main__':
sys.exit(Main())
|
Add script to ensure virtual webcam is running.
This script will check that a webcam is running and start it if it's
not currently running.
It's tailored to the way our buildbots are currently configured.
TEST=local execution on Windows, Mac and Linux.
BUG=none
R=phoglund@webrtc.org
Review URL: https://webrtc-codereview.appspot.com/1406005
git-svn-id: 03ae4fbe531b1eefc9d815f31e49022782c42458@3981 4adac7df-926f-26a2-2b94-8c16560cd09d#!/usr/bin/env python
# Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
"""Checks if a virtual webcam is running and starts it if not.
Returns a non-zero return code if the webcam could not be started.
Prerequisites:
* Python must have the psutil package installed.
* Windows: a scheduled task named 'ManyCam' must exist and be configured to
launch ManyCam preconfigured to auto-play the test clip.
* Mac: ManyCam must be installed in the default location and be preconfigured
to auto-play the test clip.
* Linux: The v4l2loopback must be loaded to the kernel already (with the
devices=2 argument) and the v4l2_file_player application must be compiled and
put in the location specified below.
"""
import psutil
import subprocess
import sys
WEBCAM_WIN = ['schtasks', '/run', '/tn', 'ManyCam']
WEBCAM_MAC = ['open', '/Applications/ManyCam/ManyCam.app']
WEBCAM_LINUX = (
'$HOME/fake-webcam-driver/linux/v4l2_file_player/v4l2_file_player '
'$HOME/webrtc_video_quality/reference_video.yuv 640 480 /dev/video1 &')
def IsWebCamRunning():
if sys.platform == 'win32':
process_name = 'ManyCam.exe'
elif sys.platform.startswith('darwin'):
process_name = 'ManyCam'
elif sys.platform.startswith('linux'):
process_name = 'v4l2_file_player'
else:
raise Exception('Unsupported platform: %s' % sys.platform)
for p in psutil.get_process_list():
if process_name == p.name:
print 'Found a running virtual webcam (%s with PID %s)' % (p.name, p.pid)
return True
return False
def Main():
if IsWebCamRunning():
return 0
try:
if sys.platform == 'win32':
subprocess.check_call(WEBCAM_WIN)
elif sys.platform.startswith('darwin'):
subprocess.check_call(WEBCAM_MAC)
elif sys.platform.startswith('linux'):
subprocess.check_call(WEBCAM_LINUX, shell=True)
print 'Successfully launched virtual webcam.'
return 0
except Exception as e:
print 'Failed to launch virtual webcam: %s' % e
if __name__ == '__main__':
sys.exit(Main())
|
<commit_before><commit_msg>Add script to ensure virtual webcam is running.
This script will check that a webcam is running and start it if it's
not currently running.
It's tailored to the way our buildbots are currently configured.
TEST=local execution on Windows, Mac and Linux.
BUG=none
R=phoglund@webrtc.org
Review URL: https://webrtc-codereview.appspot.com/1406005
git-svn-id: 03ae4fbe531b1eefc9d815f31e49022782c42458@3981 4adac7df-926f-26a2-2b94-8c16560cd09d<commit_after>#!/usr/bin/env python
# Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
"""Checks if a virtual webcam is running and starts it if not.
Returns a non-zero return code if the webcam could not be started.
Prerequisites:
* Python must have the psutil package installed.
* Windows: a scheduled task named 'ManyCam' must exist and be configured to
launch ManyCam preconfigured to auto-play the test clip.
* Mac: ManyCam must be installed in the default location and be preconfigured
to auto-play the test clip.
* Linux: The v4l2loopback must be loaded to the kernel already (with the
devices=2 argument) and the v4l2_file_player application must be compiled and
put in the location specified below.
"""
import psutil
import subprocess
import sys
WEBCAM_WIN = ['schtasks', '/run', '/tn', 'ManyCam']
WEBCAM_MAC = ['open', '/Applications/ManyCam/ManyCam.app']
WEBCAM_LINUX = (
'$HOME/fake-webcam-driver/linux/v4l2_file_player/v4l2_file_player '
'$HOME/webrtc_video_quality/reference_video.yuv 640 480 /dev/video1 &')
def IsWebCamRunning():
if sys.platform == 'win32':
process_name = 'ManyCam.exe'
elif sys.platform.startswith('darwin'):
process_name = 'ManyCam'
elif sys.platform.startswith('linux'):
process_name = 'v4l2_file_player'
else:
raise Exception('Unsupported platform: %s' % sys.platform)
for p in psutil.get_process_list():
if process_name == p.name:
print 'Found a running virtual webcam (%s with PID %s)' % (p.name, p.pid)
return True
return False
def Main():
if IsWebCamRunning():
return 0
try:
if sys.platform == 'win32':
subprocess.check_call(WEBCAM_WIN)
elif sys.platform.startswith('darwin'):
subprocess.check_call(WEBCAM_MAC)
elif sys.platform.startswith('linux'):
subprocess.check_call(WEBCAM_LINUX, shell=True)
print 'Successfully launched virtual webcam.'
return 0
except Exception as e:
print 'Failed to launch virtual webcam: %s' % e
if __name__ == '__main__':
sys.exit(Main())
|
|
d88a793badccb506b6afd62cf8fdce846ef7fe94
|
txircd/modules/extra/customprefix.py
|
txircd/modules/extra/customprefix.py
|
from twisted.plugin import IPlugin
from twisted.python import log
from txircd.module_interface import IMode, IModuleData, Mode, ModuleData
from txircd.utils import ModeType
from zope.interface import implements
import logging
class CustomPrefix(ModuleData, Mode):
implements(IPlugin, IModuleData, IMode)
name = "CustomPrefix"
def hookIRCd(self, ircd):
self.ircd = ircd
def channelModes(self):
modes = []
prefixes = self.ircd.config.getWithDefault("custom_prefixes", { "h": { "level": 50, "char": "%" }, "a": { "level": 150, "char": "&" }, "q" : { "level": 200, "char": "~" } })
for prefix, prefixValue in prefixes.iteritems():
try:
statusLevel = int(prefixValue["level"])
modes.append((prefix, ModeType.Status, self, statusLevel, prefixValue["char"]))
except ValueError:
log.msg("CustomPrefix: Prefix {} does not specify a valid level; skipping prefix".format(prefix), logLevel=logging.WARNING)
except KeyError as e:
log.msg("CustomPrefix: Prefix {} is missing {}; skipping prefix".format(prefix, e. message), logLevel=logging.WARNING)
return modes
def checkSet(self, channel, param):
return param.split(",")
customPrefix = CustomPrefix()
|
Implement the custom prefixes module
|
Implement the custom prefixes module
|
Python
|
bsd-3-clause
|
ElementalAlchemist/txircd,Heufneutje/txircd
|
Implement the custom prefixes module
|
from twisted.plugin import IPlugin
from twisted.python import log
from txircd.module_interface import IMode, IModuleData, Mode, ModuleData
from txircd.utils import ModeType
from zope.interface import implements
import logging
class CustomPrefix(ModuleData, Mode):
implements(IPlugin, IModuleData, IMode)
name = "CustomPrefix"
def hookIRCd(self, ircd):
self.ircd = ircd
def channelModes(self):
modes = []
prefixes = self.ircd.config.getWithDefault("custom_prefixes", { "h": { "level": 50, "char": "%" }, "a": { "level": 150, "char": "&" }, "q" : { "level": 200, "char": "~" } })
for prefix, prefixValue in prefixes.iteritems():
try:
statusLevel = int(prefixValue["level"])
modes.append((prefix, ModeType.Status, self, statusLevel, prefixValue["char"]))
except ValueError:
log.msg("CustomPrefix: Prefix {} does not specify a valid level; skipping prefix".format(prefix), logLevel=logging.WARNING)
except KeyError as e:
log.msg("CustomPrefix: Prefix {} is missing {}; skipping prefix".format(prefix, e. message), logLevel=logging.WARNING)
return modes
def checkSet(self, channel, param):
return param.split(",")
customPrefix = CustomPrefix()
|
<commit_before><commit_msg>Implement the custom prefixes module<commit_after>
|
from twisted.plugin import IPlugin
from twisted.python import log
from txircd.module_interface import IMode, IModuleData, Mode, ModuleData
from txircd.utils import ModeType
from zope.interface import implements
import logging
class CustomPrefix(ModuleData, Mode):
implements(IPlugin, IModuleData, IMode)
name = "CustomPrefix"
def hookIRCd(self, ircd):
self.ircd = ircd
def channelModes(self):
modes = []
prefixes = self.ircd.config.getWithDefault("custom_prefixes", { "h": { "level": 50, "char": "%" }, "a": { "level": 150, "char": "&" }, "q" : { "level": 200, "char": "~" } })
for prefix, prefixValue in prefixes.iteritems():
try:
statusLevel = int(prefixValue["level"])
modes.append((prefix, ModeType.Status, self, statusLevel, prefixValue["char"]))
except ValueError:
log.msg("CustomPrefix: Prefix {} does not specify a valid level; skipping prefix".format(prefix), logLevel=logging.WARNING)
except KeyError as e:
log.msg("CustomPrefix: Prefix {} is missing {}; skipping prefix".format(prefix, e. message), logLevel=logging.WARNING)
return modes
def checkSet(self, channel, param):
return param.split(",")
customPrefix = CustomPrefix()
|
Implement the custom prefixes modulefrom twisted.plugin import IPlugin
from twisted.python import log
from txircd.module_interface import IMode, IModuleData, Mode, ModuleData
from txircd.utils import ModeType
from zope.interface import implements
import logging
class CustomPrefix(ModuleData, Mode):
implements(IPlugin, IModuleData, IMode)
name = "CustomPrefix"
def hookIRCd(self, ircd):
self.ircd = ircd
def channelModes(self):
modes = []
prefixes = self.ircd.config.getWithDefault("custom_prefixes", { "h": { "level": 50, "char": "%" }, "a": { "level": 150, "char": "&" }, "q" : { "level": 200, "char": "~" } })
for prefix, prefixValue in prefixes.iteritems():
try:
statusLevel = int(prefixValue["level"])
modes.append((prefix, ModeType.Status, self, statusLevel, prefixValue["char"]))
except ValueError:
log.msg("CustomPrefix: Prefix {} does not specify a valid level; skipping prefix".format(prefix), logLevel=logging.WARNING)
except KeyError as e:
log.msg("CustomPrefix: Prefix {} is missing {}; skipping prefix".format(prefix, e. message), logLevel=logging.WARNING)
return modes
def checkSet(self, channel, param):
return param.split(",")
customPrefix = CustomPrefix()
|
<commit_before><commit_msg>Implement the custom prefixes module<commit_after>from twisted.plugin import IPlugin
from twisted.python import log
from txircd.module_interface import IMode, IModuleData, Mode, ModuleData
from txircd.utils import ModeType
from zope.interface import implements
import logging
class CustomPrefix(ModuleData, Mode):
implements(IPlugin, IModuleData, IMode)
name = "CustomPrefix"
def hookIRCd(self, ircd):
self.ircd = ircd
def channelModes(self):
modes = []
prefixes = self.ircd.config.getWithDefault("custom_prefixes", { "h": { "level": 50, "char": "%" }, "a": { "level": 150, "char": "&" }, "q" : { "level": 200, "char": "~" } })
for prefix, prefixValue in prefixes.iteritems():
try:
statusLevel = int(prefixValue["level"])
modes.append((prefix, ModeType.Status, self, statusLevel, prefixValue["char"]))
except ValueError:
log.msg("CustomPrefix: Prefix {} does not specify a valid level; skipping prefix".format(prefix), logLevel=logging.WARNING)
except KeyError as e:
log.msg("CustomPrefix: Prefix {} is missing {}; skipping prefix".format(prefix, e. message), logLevel=logging.WARNING)
return modes
def checkSet(self, channel, param):
return param.split(",")
customPrefix = CustomPrefix()
|
|
04c201a9db9eb2523febb81f5fc5601b2d86624e
|
tcamp/reg/management/commands/lobby_day_report.py
|
tcamp/reg/management/commands/lobby_day_report.py
|
from django.core.management.base import BaseCommand, CommandError
from reg.models import *
from optparse import make_option
import cStringIO, csv
from collections import defaultdict
class Command(BaseCommand):
help = 'Closes the specified poll for voting'
option_list = BaseCommand.option_list + (
make_option('--staff-domain',
action='store',
dest='staff_domain',
default=None,
help='Exclude tickets with email addresses from this domain'),
make_option('--staff-coupon',
action='store',
dest='staff_coupon',
default=None,
help='Exclude tickets that used this coupon'),
make_option('--output',
action='store',
dest='output',
default=None,
help='Output CSV to a file'),
)
def handle(self, *args, **options):
from reg.views import CURRENT_EVENT
search = {
'success': True,
'event': CURRENT_EVENT
}
query = Ticket.objects.filter(**search)
fdomain = '@%s' % options['staff_domain'] if options['staff_domain'] else None
fcoupon = CouponCode.objects.get(code=options['staff_coupon']) if options['staff_coupon'] else CouponCode()
outs = cStringIO.StringIO()
outc = csv.DictWriter(outs, ['first_name', 'last_name', 'email', 'state', 'organization', 'ticket_type', 'is_staff'])
outc.writeheader()
for ticket in query.order_by('id').select_related():
if ticket.lobby_day:
staff = (ticket.email and fdomain and fdomain in ticket.email) or \
(ticket.sale.email and fdomain and fdomain in ticket.sale.email) or \
(ticket.sale.coupon_code and ticket.sale.coupon_code.id == fcoupon.id)
outc.writerow({
'first_name': ticket.first_name,
'last_name': ticket.last_name,
'email': ticket.email,
'state': ticket.sale.state,
'organization': ticket.organization,
'ticket_type': ticket.type.name,
'is_staff': 'Y' if staff else 'N',
})
if options['output']:
f = open(options['output'], 'wb')
f.write(outs.getvalue())
f.close()
else:
print outs.getvalue()
|
Add management command to export TCamp lobby day participants.
|
Add management command to export TCamp lobby day participants.
|
Python
|
bsd-3-clause
|
sunlightlabs/tcamp,sunlightlabs/tcamp,sunlightlabs/tcamp,sunlightlabs/tcamp
|
Add management command to export TCamp lobby day participants.
|
from django.core.management.base import BaseCommand, CommandError
from reg.models import *
from optparse import make_option
import cStringIO, csv
from collections import defaultdict
class Command(BaseCommand):
help = 'Closes the specified poll for voting'
option_list = BaseCommand.option_list + (
make_option('--staff-domain',
action='store',
dest='staff_domain',
default=None,
help='Exclude tickets with email addresses from this domain'),
make_option('--staff-coupon',
action='store',
dest='staff_coupon',
default=None,
help='Exclude tickets that used this coupon'),
make_option('--output',
action='store',
dest='output',
default=None,
help='Output CSV to a file'),
)
def handle(self, *args, **options):
from reg.views import CURRENT_EVENT
search = {
'success': True,
'event': CURRENT_EVENT
}
query = Ticket.objects.filter(**search)
fdomain = '@%s' % options['staff_domain'] if options['staff_domain'] else None
fcoupon = CouponCode.objects.get(code=options['staff_coupon']) if options['staff_coupon'] else CouponCode()
outs = cStringIO.StringIO()
outc = csv.DictWriter(outs, ['first_name', 'last_name', 'email', 'state', 'organization', 'ticket_type', 'is_staff'])
outc.writeheader()
for ticket in query.order_by('id').select_related():
if ticket.lobby_day:
staff = (ticket.email and fdomain and fdomain in ticket.email) or \
(ticket.sale.email and fdomain and fdomain in ticket.sale.email) or \
(ticket.sale.coupon_code and ticket.sale.coupon_code.id == fcoupon.id)
outc.writerow({
'first_name': ticket.first_name,
'last_name': ticket.last_name,
'email': ticket.email,
'state': ticket.sale.state,
'organization': ticket.organization,
'ticket_type': ticket.type.name,
'is_staff': 'Y' if staff else 'N',
})
if options['output']:
f = open(options['output'], 'wb')
f.write(outs.getvalue())
f.close()
else:
print outs.getvalue()
|
<commit_before><commit_msg>Add management command to export TCamp lobby day participants.<commit_after>
|
from django.core.management.base import BaseCommand, CommandError
from reg.models import *
from optparse import make_option
import cStringIO, csv
from collections import defaultdict
class Command(BaseCommand):
help = 'Closes the specified poll for voting'
option_list = BaseCommand.option_list + (
make_option('--staff-domain',
action='store',
dest='staff_domain',
default=None,
help='Exclude tickets with email addresses from this domain'),
make_option('--staff-coupon',
action='store',
dest='staff_coupon',
default=None,
help='Exclude tickets that used this coupon'),
make_option('--output',
action='store',
dest='output',
default=None,
help='Output CSV to a file'),
)
def handle(self, *args, **options):
from reg.views import CURRENT_EVENT
search = {
'success': True,
'event': CURRENT_EVENT
}
query = Ticket.objects.filter(**search)
fdomain = '@%s' % options['staff_domain'] if options['staff_domain'] else None
fcoupon = CouponCode.objects.get(code=options['staff_coupon']) if options['staff_coupon'] else CouponCode()
outs = cStringIO.StringIO()
outc = csv.DictWriter(outs, ['first_name', 'last_name', 'email', 'state', 'organization', 'ticket_type', 'is_staff'])
outc.writeheader()
for ticket in query.order_by('id').select_related():
if ticket.lobby_day:
staff = (ticket.email and fdomain and fdomain in ticket.email) or \
(ticket.sale.email and fdomain and fdomain in ticket.sale.email) or \
(ticket.sale.coupon_code and ticket.sale.coupon_code.id == fcoupon.id)
outc.writerow({
'first_name': ticket.first_name,
'last_name': ticket.last_name,
'email': ticket.email,
'state': ticket.sale.state,
'organization': ticket.organization,
'ticket_type': ticket.type.name,
'is_staff': 'Y' if staff else 'N',
})
if options['output']:
f = open(options['output'], 'wb')
f.write(outs.getvalue())
f.close()
else:
print outs.getvalue()
|
Add management command to export TCamp lobby day participants.from django.core.management.base import BaseCommand, CommandError
from reg.models import *
from optparse import make_option
import cStringIO, csv
from collections import defaultdict
class Command(BaseCommand):
help = 'Closes the specified poll for voting'
option_list = BaseCommand.option_list + (
make_option('--staff-domain',
action='store',
dest='staff_domain',
default=None,
help='Exclude tickets with email addresses from this domain'),
make_option('--staff-coupon',
action='store',
dest='staff_coupon',
default=None,
help='Exclude tickets that used this coupon'),
make_option('--output',
action='store',
dest='output',
default=None,
help='Output CSV to a file'),
)
def handle(self, *args, **options):
from reg.views import CURRENT_EVENT
search = {
'success': True,
'event': CURRENT_EVENT
}
query = Ticket.objects.filter(**search)
fdomain = '@%s' % options['staff_domain'] if options['staff_domain'] else None
fcoupon = CouponCode.objects.get(code=options['staff_coupon']) if options['staff_coupon'] else CouponCode()
outs = cStringIO.StringIO()
outc = csv.DictWriter(outs, ['first_name', 'last_name', 'email', 'state', 'organization', 'ticket_type', 'is_staff'])
outc.writeheader()
for ticket in query.order_by('id').select_related():
if ticket.lobby_day:
staff = (ticket.email and fdomain and fdomain in ticket.email) or \
(ticket.sale.email and fdomain and fdomain in ticket.sale.email) or \
(ticket.sale.coupon_code and ticket.sale.coupon_code.id == fcoupon.id)
outc.writerow({
'first_name': ticket.first_name,
'last_name': ticket.last_name,
'email': ticket.email,
'state': ticket.sale.state,
'organization': ticket.organization,
'ticket_type': ticket.type.name,
'is_staff': 'Y' if staff else 'N',
})
if options['output']:
f = open(options['output'], 'wb')
f.write(outs.getvalue())
f.close()
else:
print outs.getvalue()
|
<commit_before><commit_msg>Add management command to export TCamp lobby day participants.<commit_after>from django.core.management.base import BaseCommand, CommandError
from reg.models import *
from optparse import make_option
import cStringIO, csv
from collections import defaultdict
class Command(BaseCommand):
help = 'Closes the specified poll for voting'
option_list = BaseCommand.option_list + (
make_option('--staff-domain',
action='store',
dest='staff_domain',
default=None,
help='Exclude tickets with email addresses from this domain'),
make_option('--staff-coupon',
action='store',
dest='staff_coupon',
default=None,
help='Exclude tickets that used this coupon'),
make_option('--output',
action='store',
dest='output',
default=None,
help='Output CSV to a file'),
)
def handle(self, *args, **options):
from reg.views import CURRENT_EVENT
search = {
'success': True,
'event': CURRENT_EVENT
}
query = Ticket.objects.filter(**search)
fdomain = '@%s' % options['staff_domain'] if options['staff_domain'] else None
fcoupon = CouponCode.objects.get(code=options['staff_coupon']) if options['staff_coupon'] else CouponCode()
outs = cStringIO.StringIO()
outc = csv.DictWriter(outs, ['first_name', 'last_name', 'email', 'state', 'organization', 'ticket_type', 'is_staff'])
outc.writeheader()
for ticket in query.order_by('id').select_related():
if ticket.lobby_day:
staff = (ticket.email and fdomain and fdomain in ticket.email) or \
(ticket.sale.email and fdomain and fdomain in ticket.sale.email) or \
(ticket.sale.coupon_code and ticket.sale.coupon_code.id == fcoupon.id)
outc.writerow({
'first_name': ticket.first_name,
'last_name': ticket.last_name,
'email': ticket.email,
'state': ticket.sale.state,
'organization': ticket.organization,
'ticket_type': ticket.type.name,
'is_staff': 'Y' if staff else 'N',
})
if options['output']:
f = open(options['output'], 'wb')
f.write(outs.getvalue())
f.close()
else:
print outs.getvalue()
|
|
fb123e3751bd0736fbc44098ff435199ece3b849
|
tests/test_run_all_doctests.py
|
tests/test_run_all_doctests.py
|
# -*- coding: utf-8 -*-
#
# Copyright (c) 2007-2013 Parisson SARL
# This file is part of TimeSide.
# TimeSide is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
# TimeSide is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with TimeSide. If not, see <http://www.gnu.org/licenses/>.
# Authors:
# Thomas Fillon <thomas at parisson.com>
import unittest
import doctest
import timeside
import pkgutil
def load_tests(loader, tests, ignore):
import fnmatch
import os
finder = doctest.DocTestFinder(exclude_empty=False)
timeside_path = os.path.dirname(timeside.__path__[0])
# Create tests for doctest ReST files
rst_files = []
for root, dirnames, filenames in os.walk(timeside_path):
for filename in fnmatch.filter(filenames, '*.rst'):
rst_files.append(os.path.join(root, filename))
for filename in rst_files:
tests.addTests(doctest.DocFileSuite(filename, module_relative=False))
# Create tests for doctest in timeside modules and sub-modules
modules_list = [modname for _, modname, _ in pkgutil.walk_packages(
path=timeside.__path__,
prefix=timeside.__name__ + '.',
onerror=lambda x: None)]
for module in modules_list:
tests.addTests(doctest.DocTestSuite(module, test_finder=finder))
return tests
if __name__ == '__main__':
unittest.main()
|
Add a test collecting all doctests
|
Add a test collecting all doctests
|
Python
|
agpl-3.0
|
Parisson/TimeSide,Parisson/TimeSide,Parisson/TimeSide,Parisson/TimeSide,Parisson/TimeSide
|
Add a test collecting all doctests
|
# -*- coding: utf-8 -*-
#
# Copyright (c) 2007-2013 Parisson SARL
# This file is part of TimeSide.
# TimeSide is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
# TimeSide is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with TimeSide. If not, see <http://www.gnu.org/licenses/>.
# Authors:
# Thomas Fillon <thomas at parisson.com>
import unittest
import doctest
import timeside
import pkgutil
def load_tests(loader, tests, ignore):
import fnmatch
import os
finder = doctest.DocTestFinder(exclude_empty=False)
timeside_path = os.path.dirname(timeside.__path__[0])
# Create tests for doctest ReST files
rst_files = []
for root, dirnames, filenames in os.walk(timeside_path):
for filename in fnmatch.filter(filenames, '*.rst'):
rst_files.append(os.path.join(root, filename))
for filename in rst_files:
tests.addTests(doctest.DocFileSuite(filename, module_relative=False))
# Create tests for doctest in timeside modules and sub-modules
modules_list = [modname for _, modname, _ in pkgutil.walk_packages(
path=timeside.__path__,
prefix=timeside.__name__ + '.',
onerror=lambda x: None)]
for module in modules_list:
tests.addTests(doctest.DocTestSuite(module, test_finder=finder))
return tests
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add a test collecting all doctests<commit_after>
|
# -*- coding: utf-8 -*-
#
# Copyright (c) 2007-2013 Parisson SARL
# This file is part of TimeSide.
# TimeSide is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
# TimeSide is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with TimeSide. If not, see <http://www.gnu.org/licenses/>.
# Authors:
# Thomas Fillon <thomas at parisson.com>
import unittest
import doctest
import timeside
import pkgutil
def load_tests(loader, tests, ignore):
import fnmatch
import os
finder = doctest.DocTestFinder(exclude_empty=False)
timeside_path = os.path.dirname(timeside.__path__[0])
# Create tests for doctest ReST files
rst_files = []
for root, dirnames, filenames in os.walk(timeside_path):
for filename in fnmatch.filter(filenames, '*.rst'):
rst_files.append(os.path.join(root, filename))
for filename in rst_files:
tests.addTests(doctest.DocFileSuite(filename, module_relative=False))
# Create tests for doctest in timeside modules and sub-modules
modules_list = [modname for _, modname, _ in pkgutil.walk_packages(
path=timeside.__path__,
prefix=timeside.__name__ + '.',
onerror=lambda x: None)]
for module in modules_list:
tests.addTests(doctest.DocTestSuite(module, test_finder=finder))
return tests
if __name__ == '__main__':
unittest.main()
|
Add a test collecting all doctests# -*- coding: utf-8 -*-
#
# Copyright (c) 2007-2013 Parisson SARL
# This file is part of TimeSide.
# TimeSide is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
# TimeSide is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with TimeSide. If not, see <http://www.gnu.org/licenses/>.
# Authors:
# Thomas Fillon <thomas at parisson.com>
import unittest
import doctest
import timeside
import pkgutil
def load_tests(loader, tests, ignore):
import fnmatch
import os
finder = doctest.DocTestFinder(exclude_empty=False)
timeside_path = os.path.dirname(timeside.__path__[0])
# Create tests for doctest ReST files
rst_files = []
for root, dirnames, filenames in os.walk(timeside_path):
for filename in fnmatch.filter(filenames, '*.rst'):
rst_files.append(os.path.join(root, filename))
for filename in rst_files:
tests.addTests(doctest.DocFileSuite(filename, module_relative=False))
# Create tests for doctest in timeside modules and sub-modules
modules_list = [modname for _, modname, _ in pkgutil.walk_packages(
path=timeside.__path__,
prefix=timeside.__name__ + '.',
onerror=lambda x: None)]
for module in modules_list:
tests.addTests(doctest.DocTestSuite(module, test_finder=finder))
return tests
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add a test collecting all doctests<commit_after># -*- coding: utf-8 -*-
#
# Copyright (c) 2007-2013 Parisson SARL
# This file is part of TimeSide.
# TimeSide is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
# TimeSide is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with TimeSide. If not, see <http://www.gnu.org/licenses/>.
# Authors:
# Thomas Fillon <thomas at parisson.com>
import unittest
import doctest
import timeside
import pkgutil
def load_tests(loader, tests, ignore):
import fnmatch
import os
finder = doctest.DocTestFinder(exclude_empty=False)
timeside_path = os.path.dirname(timeside.__path__[0])
# Create tests for doctest ReST files
rst_files = []
for root, dirnames, filenames in os.walk(timeside_path):
for filename in fnmatch.filter(filenames, '*.rst'):
rst_files.append(os.path.join(root, filename))
for filename in rst_files:
tests.addTests(doctest.DocFileSuite(filename, module_relative=False))
# Create tests for doctest in timeside modules and sub-modules
modules_list = [modname for _, modname, _ in pkgutil.walk_packages(
path=timeside.__path__,
prefix=timeside.__name__ + '.',
onerror=lambda x: None)]
for module in modules_list:
tests.addTests(doctest.DocTestSuite(module, test_finder=finder))
return tests
if __name__ == '__main__':
unittest.main()
|
|
fe9dd692d07e1924c88622e3e00c75c2fe5c65db
|
examples/listradio.py
|
examples/listradio.py
|
import gobject
import gtk
from kiwi.ui.widgets.list import Column, List
class Object:
def __init__(self, name, value):
self.name, self.value = name, value
columns = [Column('name'),
Column('value', data_type=bool, radio=True, editable=True)]
win = gtk.Window()
win.set_size_request(300, 120)
win.connect('delete-event', gtk.main_quit)
list = List(columns)
win.add(list)
for name, value in [('First', False),
('Second', False),
('Third', True),
('Fourth', False),
('Fifth', False)]:
list.add_instance(Object(name, value))
win.show_all()
gtk.main()
|
Test for radio buttons in lists
|
Test for radio buttons in lists
|
Python
|
lgpl-2.1
|
stoq/kiwi
|
Test for radio buttons in lists
|
import gobject
import gtk
from kiwi.ui.widgets.list import Column, List
class Object:
def __init__(self, name, value):
self.name, self.value = name, value
columns = [Column('name'),
Column('value', data_type=bool, radio=True, editable=True)]
win = gtk.Window()
win.set_size_request(300, 120)
win.connect('delete-event', gtk.main_quit)
list = List(columns)
win.add(list)
for name, value in [('First', False),
('Second', False),
('Third', True),
('Fourth', False),
('Fifth', False)]:
list.add_instance(Object(name, value))
win.show_all()
gtk.main()
|
<commit_before><commit_msg>Test for radio buttons in lists<commit_after>
|
import gobject
import gtk
from kiwi.ui.widgets.list import Column, List
class Object:
def __init__(self, name, value):
self.name, self.value = name, value
columns = [Column('name'),
Column('value', data_type=bool, radio=True, editable=True)]
win = gtk.Window()
win.set_size_request(300, 120)
win.connect('delete-event', gtk.main_quit)
list = List(columns)
win.add(list)
for name, value in [('First', False),
('Second', False),
('Third', True),
('Fourth', False),
('Fifth', False)]:
list.add_instance(Object(name, value))
win.show_all()
gtk.main()
|
Test for radio buttons in listsimport gobject
import gtk
from kiwi.ui.widgets.list import Column, List
class Object:
def __init__(self, name, value):
self.name, self.value = name, value
columns = [Column('name'),
Column('value', data_type=bool, radio=True, editable=True)]
win = gtk.Window()
win.set_size_request(300, 120)
win.connect('delete-event', gtk.main_quit)
list = List(columns)
win.add(list)
for name, value in [('First', False),
('Second', False),
('Third', True),
('Fourth', False),
('Fifth', False)]:
list.add_instance(Object(name, value))
win.show_all()
gtk.main()
|
<commit_before><commit_msg>Test for radio buttons in lists<commit_after>import gobject
import gtk
from kiwi.ui.widgets.list import Column, List
class Object:
def __init__(self, name, value):
self.name, self.value = name, value
columns = [Column('name'),
Column('value', data_type=bool, radio=True, editable=True)]
win = gtk.Window()
win.set_size_request(300, 120)
win.connect('delete-event', gtk.main_quit)
list = List(columns)
win.add(list)
for name, value in [('First', False),
('Second', False),
('Third', True),
('Fourth', False),
('Fifth', False)]:
list.add_instance(Object(name, value))
win.show_all()
gtk.main()
|
|
391de2d5920b4ebb12d488715cec4ef6839208d3
|
zproject/local_settings_template.py
|
zproject/local_settings_template.py
|
# Template for Django settings for the Zulip local servers
import os
import platform
import re
# TODO: Rewrite this file to be more or less self-documenting as to
# how to generate each token securely and what other setup is needed.
# For now, we'll do that piecewise by component.
# Make this unique, and don't share it with anybody.
SECRET_KEY = ''
# A fixed salt used for hashing in certain places, e.g. email-based
# username generation.
HASH_SALT = ''
# Use this salt to hash a user's email into a filename for their user-uploaded
# avatar. If this salt is discovered, attackers will only be able to determine
# that the owner of an email account has uploaded an avatar to Zulip, which isn't
# the end of the world. Don't use the salt where there is more security exposure.
AVATAR_SALT = ''
# A shared secret, used to authenticate different parts of the app to each other.
SHARED_SECRET = ''
# Password for rabbitmq
RABBITMQ_PASSWORD = ''
# TODO: Make USING_MAILCHIMP do something (and default to False)
USING_MAILCHIMP = False
# This can be filled in automatically from the database
FEEDBACK_BOT_KEY = ''
# TODO: Make USING_MANDRILL do something (and default to False)
USING_MANDRILL = False
# This needs to be synced with the camo installation
CAMO_KEY = ''
# TODO: Put in example values
EMAIL_USE_TLS = True
EMAIL_HOST = ''
EMAIL_HOST_USER = ''
EMAIL_HOST_PASSWORD = ''
EMAIL_PORT = 587
# Whether we're running in a production environment. Note that DEPLOYED does
# **not** mean hosted by us; customer sites are DEPLOYED and LOCALSERVER
# and as such should not for example assume they are the main Zulip site.
#
# TODO: Set these variables inside settings.py properly
DEPLOYED = os.path.exists('/etc/humbug-server')
STAGING_DEPLOYED = (platform.node() == 'staging.zulip.net')
TESTING_DEPLOYED = not not re.match(r'^test', platform.node())
LOCALSERVER = os.path.exists('/etc/zulip-local')
# TODO: Clean this up
if TESTING_DEPLOYED:
EXTERNAL_HOST = platform.node()
elif STAGING_DEPLOYED:
EXTERNAL_HOST = 'staging.zulip.com'
elif DEPLOYED:
EXTERNAL_HOST = 'zulip.com'
else:
EXTERNAL_HOST = 'localhost:9991'
# TODO: Make this not required
EMBEDLY_KEY=""
# TODO: Replace these
S3_KEY=""
S3_SECRET_KEY=""
S3_BUCKET=""
S3_AVATAR_BUCKET=""
# TODO: Replace these
MIXPANEL_TOKEN = ""
# TODO: Add twitter template variables below.
|
Add local settings template for local server instances.
|
Add local settings template for local server instances.
(imported from commit 96f59e6a041992f5b2c467558a2fcc14a951a8a9)
|
Python
|
apache-2.0
|
dawran6/zulip,amyliu345/zulip,arpitpanwar/zulip,brainwane/zulip,jphilipsen05/zulip,synicalsyntax/zulip,Drooids/zulip,PaulPetring/zulip,ericzhou2008/zulip,codeKonami/zulip,paxapy/zulip,wdaher/zulip,swinghu/zulip,so0k/zulip,aps-sids/zulip,Diptanshu8/zulip,moria/zulip,ikasumiwt/zulip,seapasulli/zulip,mansilladev/zulip,hackerkid/zulip,arpitpanwar/zulip,zulip/zulip,zachallaun/zulip,brainwane/zulip,bssrdf/zulip,he15his/zulip,ApsOps/zulip,easyfmxu/zulip,wangdeshui/zulip,deer-hope/zulip,xuanhan863/zulip,ufosky-server/zulip,zwily/zulip,LAndreas/zulip,Suninus/zulip,KingxBanana/zulip,KJin99/zulip,calvinleenyc/zulip,ikasumiwt/zulip,jerryge/zulip,dawran6/zulip,tbutter/zulip,dnmfarrell/zulip,cosmicAsymmetry/zulip,wavelets/zulip,pradiptad/zulip,zorojean/zulip,amyliu345/zulip,PaulPetring/zulip,jessedhillon/zulip,wavelets/zulip,ashwinirudrappa/zulip,ryanbackman/zulip,jimmy54/zulip,Juanvulcano/zulip,itnihao/zulip,themass/zulip,easyfmxu/zulip,hayderimran7/zulip,codeKonami/zulip,Cheppers/zulip,lfranchi/zulip,joshisa/zulip,noroot/zulip,he15his/zulip,KJin99/zulip,MayB/zulip,karamcnair/zulip,adnanh/zulip,dotcool/zulip,tdr130/zulip,hengqujushi/zulip,EasonYi/zulip,Qgap/zulip,hafeez3000/zulip,MariaFaBella85/zulip,aps-sids/zulip,RobotCaleb/zulip,zachallaun/zulip,johnny9/zulip,KingxBanana/zulip,amanharitsh123/zulip,amyliu345/zulip,timabbott/zulip,SmartPeople/zulip,eeshangarg/zulip,easyfmxu/zulip,voidException/zulip,huangkebo/zulip,lfranchi/zulip,hj3938/zulip,amanharitsh123/zulip,praveenaki/zulip,sharmaeklavya2/zulip,johnny9/zulip,mohsenSy/zulip,ryansnowboarder/zulip,vakila/zulip,shaunstanislaus/zulip,dhcrzf/zulip,hackerkid/zulip,dattatreya303/zulip,aliceriot/zulip,hayderimran7/zulip,yuvipanda/zulip,Frouk/zulip,Drooids/zulip,grave-w-grave/zulip,levixie/zulip,amanharitsh123/zulip,ryansnowboarder/zulip,dxq-git/zulip,brockwhittaker/zulip,firstblade/zulip,hustlzp/zulip,jessedhillon/zulip,joyhchen/zulip,fw1121/zulip,littledogboy/zulip,bitemyapp/zulip,wavelets/zulip,qq1012803704/zulip,mahim97/zulip,Diptanshu8/zulip,xuanhan863/zulip,guiquanz/zulip,dnmfarrell/zulip,gigawhitlocks/zulip,johnnygaddarr/zulip,susansls/zulip,shrikrishnaholla/zulip,mohsenSy/zulip,babbage/zulip,jackrzhang/zulip,amallia/zulip,krtkmj/zulip,developerfm/zulip,johnnygaddarr/zulip,LAndreas/zulip,rht/zulip,jackrzhang/zulip,zorojean/zulip,akuseru/zulip,j831/zulip,adnanh/zulip,zorojean/zulip,mdavid/zulip,bitemyapp/zulip,swinghu/zulip,samatdav/zulip,dattatreya303/zulip,so0k/zulip,ryanbackman/zulip,qq1012803704/zulip,JanzTam/zulip,krtkmj/zulip,guiquanz/zulip,aps-sids/zulip,vaidap/zulip,gigawhitlocks/zulip,eastlhu/zulip,arpitpanwar/zulip,Gabriel0402/zulip,dxq-git/zulip,vaidap/zulip,EasonYi/zulip,tdr130/zulip,nicholasbs/zulip,glovebx/zulip,johnnygaddarr/zulip,vikas-parashar/zulip,hj3938/zulip,swinghu/zulip,eastlhu/zulip,ryansnowboarder/zulip,ahmadassaf/zulip,andersk/zulip,hustlzp/zulip,kokoar/zulip,aliceriot/zulip,saitodisse/zulip,dwrpayne/zulip,PhilSk/zulip,LeeRisk/zulip,KJin99/zulip,natanovia/zulip,lfranchi/zulip,littledogboy/zulip,m1ssou/zulip,RobotCaleb/zulip,jessedhillon/zulip,joshisa/zulip,levixie/zulip,RobotCaleb/zulip,Galexrt/zulip,krtkmj/zulip,rishig/zulip,hackerkid/zulip,xuxiao/zulip,tommyip/zulip,brockwhittaker/zulip,zwily/zulip,shaunstanislaus/zulip,glovebx/zulip,codeKonami/zulip,schatt/zulip,KJin99/zulip,zwily/zulip,niftynei/zulip,proliming/zulip,JanzTam/zulip,yocome/zulip,aps-sids/zulip,ufosky-server/zulip,armooo/zulip,mahim97/zulip,deer-hope/zulip,seapasulli/zulip,seapasulli/zulip,themass/zulip,peguin40/zulip,shaunstanislaus/zulip,vikas-parashar/zulip,Gabriel0402/zulip,isht3/zulip,bitemyapp/zulip,jainayush975/zulip,dhcrzf/zulip,bowlofstew/zulip,AZtheAsian/zulip,armooo/zulip,zorojean/zulip,adnanh/zulip,AZtheAsian/zulip,showell/zulip,gigawhitlocks/zulip,levixie/zulip,Qgap/zulip,jessedhillon/zulip,ApsOps/zulip,umkay/zulip,shubhamdhama/zulip,ericzhou2008/zulip,ahmadassaf/zulip,itnihao/zulip,Batterfii/zulip,bowlofstew/zulip,JanzTam/zulip,umkay/zulip,wangdeshui/zulip,dwrpayne/zulip,RobotCaleb/zulip,Drooids/zulip,tommyip/zulip,vakila/zulip,hengqujushi/zulip,zacps/zulip,bitemyapp/zulip,voidException/zulip,armooo/zulip,alliejones/zulip,jessedhillon/zulip,ipernet/zulip,PhilSk/zulip,Diptanshu8/zulip,johnny9/zulip,verma-varsha/zulip,amallia/zulip,niftynei/zulip,Frouk/zulip,johnnygaddarr/zulip,zulip/zulip,MariaFaBella85/zulip,isht3/zulip,fw1121/zulip,armooo/zulip,deer-hope/zulip,luyifan/zulip,amanharitsh123/zulip,shubhamdhama/zulip,johnny9/zulip,jimmy54/zulip,andersk/zulip,paxapy/zulip,amallia/zulip,Jianchun1/zulip,nicholasbs/zulip,huangkebo/zulip,MariaFaBella85/zulip,itnihao/zulip,samatdav/zulip,Cheppers/zulip,sonali0901/zulip,willingc/zulip,yocome/zulip,amallia/zulip,rishig/zulip,wweiradio/zulip,vikas-parashar/zulip,thomasboyt/zulip,Gabriel0402/zulip,ashwinirudrappa/zulip,dnmfarrell/zulip,aliceriot/zulip,avastu/zulip,Cheppers/zulip,bastianh/zulip,kou/zulip,willingc/zulip,dwrpayne/zulip,m1ssou/zulip,deer-hope/zulip,tdr130/zulip,suxinde2009/zulip,susansls/zulip,voidException/zulip,qq1012803704/zulip,ipernet/zulip,johnnygaddarr/zulip,KingxBanana/zulip,Batterfii/zulip,AZtheAsian/zulip,firstblade/zulip,Vallher/zulip,saitodisse/zulip,littledogboy/zulip,glovebx/zulip,stamhe/zulip,bastianh/zulip,schatt/zulip,levixie/zulip,yuvipanda/zulip,dotcool/zulip,littledogboy/zulip,yuvipanda/zulip,brainwane/zulip,eeshangarg/zulip,alliejones/zulip,hayderimran7/zulip,zacps/zulip,babbage/zulip,sonali0901/zulip,dhcrzf/zulip,peiwei/zulip,pradiptad/zulip,xuxiao/zulip,lfranchi/zulip,qq1012803704/zulip,Juanvulcano/zulip,dawran6/zulip,moria/zulip,wweiradio/zulip,dattatreya303/zulip,showell/zulip,jimmy54/zulip,bluesea/zulip,gkotian/zulip,swinghu/zulip,xuxiao/zulip,ufosky-server/zulip,rht/zulip,moria/zulip,synicalsyntax/zulip,vikas-parashar/zulip,eastlhu/zulip,sup95/zulip,yuvipanda/zulip,showell/zulip,susansls/zulip,zofuthan/zulip,jonesgithub/zulip,voidException/zulip,hengqujushi/zulip,EasonYi/zulip,Juanvulcano/zulip,mdavid/zulip,MayB/zulip,zachallaun/zulip,synicalsyntax/zulip,paxapy/zulip,arpith/zulip,fw1121/zulip,jonesgithub/zulip,andersk/zulip,bssrdf/zulip,KJin99/zulip,he15his/zulip,j831/zulip,kokoar/zulip,grave-w-grave/zulip,mahim97/zulip,levixie/zulip,bastianh/zulip,seapasulli/zulip,ryanbackman/zulip,codeKonami/zulip,MariaFaBella85/zulip,showell/zulip,Qgap/zulip,MariaFaBella85/zulip,LeeRisk/zulip,ahmadassaf/zulip,m1ssou/zulip,PaulPetring/zulip,wangdeshui/zulip,isht3/zulip,DazWorrall/zulip,firstblade/zulip,synicalsyntax/zulip,souravbadami/zulip,esander91/zulip,MariaFaBella85/zulip,levixie/zulip,DazWorrall/zulip,zulip/zulip,calvinleenyc/zulip,jphilipsen05/zulip,Suninus/zulip,zorojean/zulip,ipernet/zulip,gigawhitlocks/zulip,natanovia/zulip,zulip/zulip,PaulPetring/zulip,avastu/zulip,grave-w-grave/zulip,TigorC/zulip,vabs22/zulip,Drooids/zulip,schatt/zulip,gkotian/zulip,akuseru/zulip,paxapy/zulip,verma-varsha/zulip,armooo/zulip,jerryge/zulip,schatt/zulip,hustlzp/zulip,dnmfarrell/zulip,Galexrt/zulip,tiansiyuan/zulip,mansilladev/zulip,moria/zulip,itnihao/zulip,natanovia/zulip,bluesea/zulip,dattatreya303/zulip,dhcrzf/zulip,hj3938/zulip,hustlzp/zulip,verma-varsha/zulip,nicholasbs/zulip,nicholasbs/zulip,LeeRisk/zulip,hackerkid/zulip,verma-varsha/zulip,zofuthan/zulip,qq1012803704/zulip,joyhchen/zulip,JPJPJPOPOP/zulip,LAndreas/zulip,hayderimran7/zulip,dhcrzf/zulip,jainayush975/zulip,vakila/zulip,brainwane/zulip,aps-sids/zulip,paxapy/zulip,mansilladev/zulip,ahmadassaf/zulip,aakash-cr7/zulip,developerfm/zulip,xuxiao/zulip,RobotCaleb/zulip,reyha/zulip,themass/zulip,synicalsyntax/zulip,m1ssou/zulip,Vallher/zulip,qq1012803704/zulip,huangkebo/zulip,udxxabp/zulip,Drooids/zulip,j831/zulip,hackerkid/zulip,zwily/zulip,suxinde2009/zulip,hj3938/zulip,Cheppers/zulip,Qgap/zulip,hustlzp/zulip,vakila/zulip,PaulPetring/zulip,gigawhitlocks/zulip,eastlhu/zulip,mdavid/zulip,vaidap/zulip,arpith/zulip,ApsOps/zulip,nicholasbs/zulip,avastu/zulip,wavelets/zulip,rishig/zulip,itnihao/zulip,brockwhittaker/zulip,saitodisse/zulip,suxinde2009/zulip,alliejones/zulip,ryansnowboarder/zulip,nicholasbs/zulip,jimmy54/zulip,Cheppers/zulip,jonesgithub/zulip,ApsOps/zulip,suxinde2009/zulip,dotcool/zulip,samatdav/zulip,vabs22/zulip,aakash-cr7/zulip,ufosky-server/zulip,itnihao/zulip,LAndreas/zulip,bluesea/zulip,fw1121/zulip,punchagan/zulip,amanharitsh123/zulip,jainayush975/zulip,tiansiyuan/zulip,tiansiyuan/zulip,tommyip/zulip,dnmfarrell/zulip,qq1012803704/zulip,christi3k/zulip,schatt/zulip,zacps/zulip,JPJPJPOPOP/zulip,peiwei/zulip,ikasumiwt/zulip,christi3k/zulip,ryanbackman/zulip,dxq-git/zulip,adnanh/zulip,umkay/zulip,Galexrt/zulip,rht/zulip,sonali0901/zulip,LAndreas/zulip,praveenaki/zulip,themass/zulip,peguin40/zulip,udxxabp/zulip,gkotian/zulip,JanzTam/zulip,joyhchen/zulip,mohsenSy/zulip,saitodisse/zulip,babbage/zulip,glovebx/zulip,yocome/zulip,blaze225/zulip,atomic-labs/zulip,bluesea/zulip,peguin40/zulip,niftynei/zulip,LAndreas/zulip,ryansnowboarder/zulip,zwily/zulip,amallia/zulip,niftynei/zulip,littledogboy/zulip,willingc/zulip,showell/zulip,tdr130/zulip,andersk/zulip,jonesgithub/zulip,andersk/zulip,atomic-labs/zulip,KJin99/zulip,m1ssou/zulip,nicholasbs/zulip,shrikrishnaholla/zulip,RobotCaleb/zulip,ahmadassaf/zulip,mohsenSy/zulip,he15his/zulip,ashwinirudrappa/zulip,alliejones/zulip,deer-hope/zulip,natanovia/zulip,m1ssou/zulip,Galexrt/zulip,ikasumiwt/zulip,kaiyuanheshang/zulip,verma-varsha/zulip,atomic-labs/zulip,wweiradio/zulip,Galexrt/zulip,dwrpayne/zulip,eeshangarg/zulip,jrowan/zulip,DazWorrall/zulip,calvinleenyc/zulip,tbutter/zulip,umkay/zulip,tommyip/zulip,yuvipanda/zulip,udxxabp/zulip,dxq-git/zulip,punchagan/zulip,timabbott/zulip,udxxabp/zulip,Juanvulcano/zulip,praveenaki/zulip,Qgap/zulip,susansls/zulip,ipernet/zulip,bowlofstew/zulip,Frouk/zulip,joyhchen/zulip,wavelets/zulip,bowlofstew/zulip,showell/zulip,voidException/zulip,stamhe/zulip,hayderimran7/zulip,souravbadami/zulip,xuxiao/zulip,vaidap/zulip,wavelets/zulip,wdaher/zulip,ericzhou2008/zulip,ryanbackman/zulip,ikasumiwt/zulip,schatt/zulip,ufosky-server/zulip,amyliu345/zulip,jphilipsen05/zulip,zachallaun/zulip,kaiyuanheshang/zulip,sup95/zulip,zacps/zulip,j831/zulip,TigorC/zulip,AZtheAsian/zulip,Frouk/zulip,jrowan/zulip,dattatreya303/zulip,JPJPJPOPOP/zulip,bowlofstew/zulip,Gabriel0402/zulip,hafeez3000/zulip,noroot/zulip,fw1121/zulip,peguin40/zulip,wdaher/zulip,avastu/zulip,kaiyuanheshang/zulip,krtkmj/zulip,bluesea/zulip,aliceriot/zulip,proliming/zulip,praveenaki/zulip,punchagan/zulip,LeeRisk/zulip,firstblade/zulip,Frouk/zulip,schatt/zulip,saitodisse/zulip,bitemyapp/zulip,Galexrt/zulip,MayB/zulip,Jianchun1/zulip,calvinleenyc/zulip,hengqujushi/zulip,noroot/zulip,thomasboyt/zulip,bastianh/zulip,alliejones/zulip,tbutter/zulip,wweiradio/zulip,LeeRisk/zulip,luyifan/zulip,Gabriel0402/zulip,zofuthan/zulip,karamcnair/zulip,JanzTam/zulip,dwrpayne/zulip,esander91/zulip,zulip/zulip,levixie/zulip,kou/zulip,LAndreas/zulip,brainwane/zulip,krtkmj/zulip,zhaoweigg/zulip,aliceriot/zulip,timabbott/zulip,luyifan/zulip,tiansiyuan/zulip,gkotian/zulip,SmartPeople/zulip,brockwhittaker/zulip,willingc/zulip,Diptanshu8/zulip,adnanh/zulip,PaulPetring/zulip,ufosky-server/zulip,tommyip/zulip,zhaoweigg/zulip,esander91/zulip,j831/zulip,shaunstanislaus/zulip,cosmicAsymmetry/zulip,armooo/zulip,christi3k/zulip,xuanhan863/zulip,alliejones/zulip,rht/zulip,karamcnair/zulip,kokoar/zulip,Qgap/zulip,vabs22/zulip,jainayush975/zulip,SmartPeople/zulip,gkotian/zulip,glovebx/zulip,ashwinirudrappa/zulip,luyifan/zulip,christi3k/zulip,blaze225/zulip,akuseru/zulip,brockwhittaker/zulip,kou/zulip,atomic-labs/zulip,bowlofstew/zulip,ipernet/zulip,timabbott/zulip,ApsOps/zulip,arpith/zulip,ashwinirudrappa/zulip,jphilipsen05/zulip,easyfmxu/zulip,MayB/zulip,punchagan/zulip,niftynei/zulip,susansls/zulip,ipernet/zulip,mdavid/zulip,shaunstanislaus/zulip,peiwei/zulip,TigorC/zulip,TigorC/zulip,pradiptad/zulip,dattatreya303/zulip,TigorC/zulip,zhaoweigg/zulip,natanovia/zulip,hj3938/zulip,souravbadami/zulip,adnanh/zulip,hackerkid/zulip,proliming/zulip,tbutter/zulip,sonali0901/zulip,amanharitsh123/zulip,willingc/zulip,EasonYi/zulip,babbage/zulip,TigorC/zulip,sup95/zulip,ryanbackman/zulip,Suninus/zulip,guiquanz/zulip,natanovia/zulip,noroot/zulip,umkay/zulip,jerryge/zulip,noroot/zulip,arpith/zulip,Vallher/zulip,mansilladev/zulip,jerryge/zulip,yuvipanda/zulip,aliceriot/zulip,PhilSk/zulip,sonali0901/zulip,lfranchi/zulip,eeshangarg/zulip,zulip/zulip,yuvipanda/zulip,jonesgithub/zulip,esander91/zulip,seapasulli/zulip,Jianchun1/zulip,technicalpickles/zulip,suxinde2009/zulip,mdavid/zulip,bssrdf/zulip,dnmfarrell/zulip,Cheppers/zulip,willingc/zulip,dotcool/zulip,so0k/zulip,kou/zulip,zulip/zulip,samatdav/zulip,KingxBanana/zulip,bssrdf/zulip,MariaFaBella85/zulip,Drooids/zulip,themass/zulip,vaidap/zulip,bastianh/zulip,udxxabp/zulip,Qgap/zulip,showell/zulip,shrikrishnaholla/zulip,umkay/zulip,littledogboy/zulip,stamhe/zulip,moria/zulip,vakila/zulip,codeKonami/zulip,wdaher/zulip,bastianh/zulip,dxq-git/zulip,jrowan/zulip,developerfm/zulip,proliming/zulip,akuseru/zulip,bluesea/zulip,synicalsyntax/zulip,sup95/zulip,EasonYi/zulip,avastu/zulip,Diptanshu8/zulip,huangkebo/zulip,aakash-cr7/zulip,shubhamdhama/zulip,cosmicAsymmetry/zulip,he15his/zulip,moria/zulip,eeshangarg/zulip,xuanhan863/zulip,wangdeshui/zulip,krtkmj/zulip,timabbott/zulip,jonesgithub/zulip,yocome/zulip,he15his/zulip,souravbadami/zulip,Cheppers/zulip,Suninus/zulip,esander91/zulip,joshisa/zulip,EasonYi/zulip,suxinde2009/zulip,zofuthan/zulip,bitemyapp/zulip,babbage/zulip,noroot/zulip,technicalpickles/zulip,shrikrishnaholla/zulip,ericzhou2008/zulip,jeffcao/zulip,jerryge/zulip,tdr130/zulip,isht3/zulip,udxxabp/zulip,Frouk/zulip,Galexrt/zulip,eeshangarg/zulip,rishig/zulip,jrowan/zulip,codeKonami/zulip,PhilSk/zulip,xuanhan863/zulip,timabbott/zulip,Diptanshu8/zulip,sup95/zulip,Vallher/zulip,andersk/zulip,cosmicAsymmetry/zulip,atomic-labs/zulip,guiquanz/zulip,ahmadassaf/zulip,technicalpickles/zulip,blaze225/zulip,synicalsyntax/zulip,peguin40/zulip,guiquanz/zulip,christi3k/zulip,tdr130/zulip,joshisa/zulip,amallia/zulip,PaulPetring/zulip,Jianchun1/zulip,easyfmxu/zulip,pradiptad/zulip,sharmaeklavya2/zulip,mohsenSy/zulip,ApsOps/zulip,rht/zulip,adnanh/zulip,rht/zulip,natanovia/zulip,tiansiyuan/zulip,jackrzhang/zulip,guiquanz/zulip,zofuthan/zulip,jeffcao/zulip,kou/zulip,SmartPeople/zulip,souravbadami/zulip,bssrdf/zulip,gkotian/zulip,aakash-cr7/zulip,zhaoweigg/zulip,developerfm/zulip,atomic-labs/zulip,shaunstanislaus/zulip,jackrzhang/zulip,aps-sids/zulip,jackrzhang/zulip,jphilipsen05/zulip,calvinleenyc/zulip,AZtheAsian/zulip,thomasboyt/zulip,ericzhou2008/zulip,bastianh/zulip,vikas-parashar/zulip,joshisa/zulip,m1ssou/zulip,babbage/zulip,xuxiao/zulip,zachallaun/zulip,isht3/zulip,MayB/zulip,KingxBanana/zulip,kaiyuanheshang/zulip,karamcnair/zulip,kokoar/zulip,dotcool/zulip,esander91/zulip,johnny9/zulip,atomic-labs/zulip,technicalpickles/zulip,arpitpanwar/zulip,so0k/zulip,ashwinirudrappa/zulip,Jianchun1/zulip,dawran6/zulip,johnnygaddarr/zulip,noroot/zulip,arpitpanwar/zulip,punchagan/zulip,dxq-git/zulip,brockwhittaker/zulip,eastlhu/zulip,sonali0901/zulip,yocome/zulip,dnmfarrell/zulip,jphilipsen05/zulip,cosmicAsymmetry/zulip,so0k/zulip,praveenaki/zulip,thomasboyt/zulip,JPJPJPOPOP/zulip,saitodisse/zulip,joshisa/zulip,wavelets/zulip,sup95/zulip,karamcnair/zulip,jeffcao/zulip,dotcool/zulip,Gabriel0402/zulip,mahim97/zulip,huangkebo/zulip,sharmaeklavya2/zulip,peiwei/zulip,joyhchen/zulip,Juanvulcano/zulip,hj3938/zulip,seapasulli/zulip,developerfm/zulip,udxxabp/zulip,kokoar/zulip,eeshangarg/zulip,themass/zulip,praveenaki/zulip,thomasboyt/zulip,voidException/zulip,niftynei/zulip,stamhe/zulip,wangdeshui/zulip,technicalpickles/zulip,hayderimran7/zulip,avastu/zulip,so0k/zulip,Jianchun1/zulip,zacps/zulip,karamcnair/zulip,shrikrishnaholla/zulip,reyha/zulip,j831/zulip,LeeRisk/zulip,firstblade/zulip,avastu/zulip,amyliu345/zulip,vakila/zulip,rishig/zulip,andersk/zulip,joyhchen/zulip,huangkebo/zulip,luyifan/zulip,Suninus/zulip,aps-sids/zulip,easyfmxu/zulip,hayderimran7/zulip,hafeez3000/zulip,ufosky-server/zulip,blaze225/zulip,peiwei/zulip,Vallher/zulip,Juanvulcano/zulip,zofuthan/zulip,voidException/zulip,akuseru/zulip,JanzTam/zulip,developerfm/zulip,kaiyuanheshang/zulip,thomasboyt/zulip,stamhe/zulip,jeffcao/zulip,kou/zulip,technicalpickles/zulip,vabs22/zulip,Batterfii/zulip,wangdeshui/zulip,mohsenSy/zulip,samatdav/zulip,MayB/zulip,Frouk/zulip,tbutter/zulip,PhilSk/zulip,ikasumiwt/zulip,hustlzp/zulip,Gabriel0402/zulip,Drooids/zulip,huangkebo/zulip,xuanhan863/zulip,he15his/zulip,deer-hope/zulip,zachallaun/zulip,krtkmj/zulip,vabs22/zulip,Suninus/zulip,hengqujushi/zulip,tbutter/zulip,stamhe/zulip,jeffcao/zulip,kou/zulip,shrikrishnaholla/zulip,hafeez3000/zulip,KingxBanana/zulip,ryansnowboarder/zulip,esander91/zulip,mansilladev/zulip,mdavid/zulip,gigawhitlocks/zulip,zhaoweigg/zulip,dotcool/zulip,reyha/zulip,eastlhu/zulip,firstblade/zulip,DazWorrall/zulip,zorojean/zulip,pradiptad/zulip,JPJPJPOPOP/zulip,aakash-cr7/zulip,jessedhillon/zulip,dawran6/zulip,dhcrzf/zulip,mdavid/zulip,codeKonami/zulip,verma-varsha/zulip,brainwane/zulip,JanzTam/zulip,eastlhu/zulip,zhaoweigg/zulip,Batterfii/zulip,bluesea/zulip,blaze225/zulip,brainwane/zulip,tommyip/zulip,bssrdf/zulip,AZtheAsian/zulip,thomasboyt/zulip,wangdeshui/zulip,susansls/zulip,tdr130/zulip,vaidap/zulip,Suninus/zulip,calvinleenyc/zulip,ipernet/zulip,tiansiyuan/zulip,tommyip/zulip,suxinde2009/zulip,akuseru/zulip,JPJPJPOPOP/zulip,wweiradio/zulip,gigawhitlocks/zulip,jeffcao/zulip,vakila/zulip,jainayush975/zulip,wdaher/zulip,aakash-cr7/zulip,alliejones/zulip,pradiptad/zulip,arpith/zulip,jessedhillon/zulip,ahmadassaf/zulip,armooo/zulip,vabs22/zulip,praveenaki/zulip,amyliu345/zulip,lfranchi/zulip,isht3/zulip,shubhamdhama/zulip,mansilladev/zulip,Vallher/zulip,hackerkid/zulip,technicalpickles/zulip,mahim97/zulip,grave-w-grave/zulip,PhilSk/zulip,reyha/zulip,deer-hope/zulip,johnny9/zulip,jrowan/zulip,dwrpayne/zulip,timabbott/zulip,paxapy/zulip,ericzhou2008/zulip,shaunstanislaus/zulip,shubhamdhama/zulip,arpitpanwar/zulip,kaiyuanheshang/zulip,bowlofstew/zulip,so0k/zulip,DazWorrall/zulip,grave-w-grave/zulip,dhcrzf/zulip,RobotCaleb/zulip,hengqujushi/zulip,willingc/zulip,amallia/zulip,bssrdf/zulip,ryansnowboarder/zulip,fw1121/zulip,jimmy54/zulip,hengqujushi/zulip,guiquanz/zulip,kokoar/zulip,zhaoweigg/zulip,wweiradio/zulip,proliming/zulip,Vallher/zulip,blaze225/zulip,jrowan/zulip,aliceriot/zulip,ashwinirudrappa/zulip,dxq-git/zulip,lfranchi/zulip,babbage/zulip,ikasumiwt/zulip,EasonYi/zulip,sharmaeklavya2/zulip,rishig/zulip,kaiyuanheshang/zulip,hafeez3000/zulip,DazWorrall/zulip,seapasulli/zulip,christi3k/zulip,jackrzhang/zulip,easyfmxu/zulip,kokoar/zulip,hafeez3000/zulip,zwily/zulip,swinghu/zulip,zachallaun/zulip,Batterfii/zulip,zofuthan/zulip,shrikrishnaholla/zulip,fw1121/zulip,hustlzp/zulip,Batterfii/zulip,wdaher/zulip,grave-w-grave/zulip,Batterfii/zulip,jimmy54/zulip,firstblade/zulip,jainayush975/zulip,sharmaeklavya2/zulip,xuxiao/zulip,peguin40/zulip,reyha/zulip,jeffcao/zulip,punchagan/zulip,tbutter/zulip,SmartPeople/zulip,luyifan/zulip,developerfm/zulip,dwrpayne/zulip,itnihao/zulip,DazWorrall/zulip,pradiptad/zulip,LeeRisk/zulip,xuanhan863/zulip,reyha/zulip,shubhamdhama/zulip,rht/zulip,umkay/zulip,punchagan/zulip,joshisa/zulip,moria/zulip,swinghu/zulip,stamhe/zulip,karamcnair/zulip,swinghu/zulip,mansilladev/zulip,zacps/zulip,wweiradio/zulip,gkotian/zulip,glovebx/zulip,jackrzhang/zulip,johnny9/zulip,glovebx/zulip,zorojean/zulip,SmartPeople/zulip,littledogboy/zulip,akuseru/zulip,yocome/zulip,wdaher/zulip,themass/zulip,bitemyapp/zulip,arpitpanwar/zulip,proliming/zulip,mahim97/zulip,proliming/zulip,rishig/zulip,arpith/zulip,jimmy54/zulip,jerryge/zulip,johnnygaddarr/zulip,MayB/zulip,zwily/zulip,ericzhou2008/zulip,tiansiyuan/zulip,peiwei/zulip,yocome/zulip,hj3938/zulip,cosmicAsymmetry/zulip,ApsOps/zulip,saitodisse/zulip,KJin99/zulip,souravbadami/zulip,sharmaeklavya2/zulip,shubhamdhama/zulip,samatdav/zulip,dawran6/zulip,jerryge/zulip,vikas-parashar/zulip,luyifan/zulip,peiwei/zulip,hafeez3000/zulip,jonesgithub/zulip
|
Add local settings template for local server instances.
(imported from commit 96f59e6a041992f5b2c467558a2fcc14a951a8a9)
|
# Template for Django settings for the Zulip local servers
import os
import platform
import re
# TODO: Rewrite this file to be more or less self-documenting as to
# how to generate each token securely and what other setup is needed.
# For now, we'll do that piecewise by component.
# Make this unique, and don't share it with anybody.
SECRET_KEY = ''
# A fixed salt used for hashing in certain places, e.g. email-based
# username generation.
HASH_SALT = ''
# Use this salt to hash a user's email into a filename for their user-uploaded
# avatar. If this salt is discovered, attackers will only be able to determine
# that the owner of an email account has uploaded an avatar to Zulip, which isn't
# the end of the world. Don't use the salt where there is more security exposure.
AVATAR_SALT = ''
# A shared secret, used to authenticate different parts of the app to each other.
SHARED_SECRET = ''
# Password for rabbitmq
RABBITMQ_PASSWORD = ''
# TODO: Make USING_MAILCHIMP do something (and default to False)
USING_MAILCHIMP = False
# This can be filled in automatically from the database
FEEDBACK_BOT_KEY = ''
# TODO: Make USING_MANDRILL do something (and default to False)
USING_MANDRILL = False
# This needs to be synced with the camo installation
CAMO_KEY = ''
# TODO: Put in example values
EMAIL_USE_TLS = True
EMAIL_HOST = ''
EMAIL_HOST_USER = ''
EMAIL_HOST_PASSWORD = ''
EMAIL_PORT = 587
# Whether we're running in a production environment. Note that DEPLOYED does
# **not** mean hosted by us; customer sites are DEPLOYED and LOCALSERVER
# and as such should not for example assume they are the main Zulip site.
#
# TODO: Set these variables inside settings.py properly
DEPLOYED = os.path.exists('/etc/humbug-server')
STAGING_DEPLOYED = (platform.node() == 'staging.zulip.net')
TESTING_DEPLOYED = not not re.match(r'^test', platform.node())
LOCALSERVER = os.path.exists('/etc/zulip-local')
# TODO: Clean this up
if TESTING_DEPLOYED:
EXTERNAL_HOST = platform.node()
elif STAGING_DEPLOYED:
EXTERNAL_HOST = 'staging.zulip.com'
elif DEPLOYED:
EXTERNAL_HOST = 'zulip.com'
else:
EXTERNAL_HOST = 'localhost:9991'
# TODO: Make this not required
EMBEDLY_KEY=""
# TODO: Replace these
S3_KEY=""
S3_SECRET_KEY=""
S3_BUCKET=""
S3_AVATAR_BUCKET=""
# TODO: Replace these
MIXPANEL_TOKEN = ""
# TODO: Add twitter template variables below.
|
<commit_before><commit_msg>Add local settings template for local server instances.
(imported from commit 96f59e6a041992f5b2c467558a2fcc14a951a8a9)<commit_after>
|
# Template for Django settings for the Zulip local servers
import os
import platform
import re
# TODO: Rewrite this file to be more or less self-documenting as to
# how to generate each token securely and what other setup is needed.
# For now, we'll do that piecewise by component.
# Make this unique, and don't share it with anybody.
SECRET_KEY = ''
# A fixed salt used for hashing in certain places, e.g. email-based
# username generation.
HASH_SALT = ''
# Use this salt to hash a user's email into a filename for their user-uploaded
# avatar. If this salt is discovered, attackers will only be able to determine
# that the owner of an email account has uploaded an avatar to Zulip, which isn't
# the end of the world. Don't use the salt where there is more security exposure.
AVATAR_SALT = ''
# A shared secret, used to authenticate different parts of the app to each other.
SHARED_SECRET = ''
# Password for rabbitmq
RABBITMQ_PASSWORD = ''
# TODO: Make USING_MAILCHIMP do something (and default to False)
USING_MAILCHIMP = False
# This can be filled in automatically from the database
FEEDBACK_BOT_KEY = ''
# TODO: Make USING_MANDRILL do something (and default to False)
USING_MANDRILL = False
# This needs to be synced with the camo installation
CAMO_KEY = ''
# TODO: Put in example values
EMAIL_USE_TLS = True
EMAIL_HOST = ''
EMAIL_HOST_USER = ''
EMAIL_HOST_PASSWORD = ''
EMAIL_PORT = 587
# Whether we're running in a production environment. Note that DEPLOYED does
# **not** mean hosted by us; customer sites are DEPLOYED and LOCALSERVER
# and as such should not for example assume they are the main Zulip site.
#
# TODO: Set these variables inside settings.py properly
DEPLOYED = os.path.exists('/etc/humbug-server')
STAGING_DEPLOYED = (platform.node() == 'staging.zulip.net')
TESTING_DEPLOYED = not not re.match(r'^test', platform.node())
LOCALSERVER = os.path.exists('/etc/zulip-local')
# TODO: Clean this up
if TESTING_DEPLOYED:
EXTERNAL_HOST = platform.node()
elif STAGING_DEPLOYED:
EXTERNAL_HOST = 'staging.zulip.com'
elif DEPLOYED:
EXTERNAL_HOST = 'zulip.com'
else:
EXTERNAL_HOST = 'localhost:9991'
# TODO: Make this not required
EMBEDLY_KEY=""
# TODO: Replace these
S3_KEY=""
S3_SECRET_KEY=""
S3_BUCKET=""
S3_AVATAR_BUCKET=""
# TODO: Replace these
MIXPANEL_TOKEN = ""
# TODO: Add twitter template variables below.
|
Add local settings template for local server instances.
(imported from commit 96f59e6a041992f5b2c467558a2fcc14a951a8a9)# Template for Django settings for the Zulip local servers
import os
import platform
import re
# TODO: Rewrite this file to be more or less self-documenting as to
# how to generate each token securely and what other setup is needed.
# For now, we'll do that piecewise by component.
# Make this unique, and don't share it with anybody.
SECRET_KEY = ''
# A fixed salt used for hashing in certain places, e.g. email-based
# username generation.
HASH_SALT = ''
# Use this salt to hash a user's email into a filename for their user-uploaded
# avatar. If this salt is discovered, attackers will only be able to determine
# that the owner of an email account has uploaded an avatar to Zulip, which isn't
# the end of the world. Don't use the salt where there is more security exposure.
AVATAR_SALT = ''
# A shared secret, used to authenticate different parts of the app to each other.
SHARED_SECRET = ''
# Password for rabbitmq
RABBITMQ_PASSWORD = ''
# TODO: Make USING_MAILCHIMP do something (and default to False)
USING_MAILCHIMP = False
# This can be filled in automatically from the database
FEEDBACK_BOT_KEY = ''
# TODO: Make USING_MANDRILL do something (and default to False)
USING_MANDRILL = False
# This needs to be synced with the camo installation
CAMO_KEY = ''
# TODO: Put in example values
EMAIL_USE_TLS = True
EMAIL_HOST = ''
EMAIL_HOST_USER = ''
EMAIL_HOST_PASSWORD = ''
EMAIL_PORT = 587
# Whether we're running in a production environment. Note that DEPLOYED does
# **not** mean hosted by us; customer sites are DEPLOYED and LOCALSERVER
# and as such should not for example assume they are the main Zulip site.
#
# TODO: Set these variables inside settings.py properly
DEPLOYED = os.path.exists('/etc/humbug-server')
STAGING_DEPLOYED = (platform.node() == 'staging.zulip.net')
TESTING_DEPLOYED = not not re.match(r'^test', platform.node())
LOCALSERVER = os.path.exists('/etc/zulip-local')
# TODO: Clean this up
if TESTING_DEPLOYED:
EXTERNAL_HOST = platform.node()
elif STAGING_DEPLOYED:
EXTERNAL_HOST = 'staging.zulip.com'
elif DEPLOYED:
EXTERNAL_HOST = 'zulip.com'
else:
EXTERNAL_HOST = 'localhost:9991'
# TODO: Make this not required
EMBEDLY_KEY=""
# TODO: Replace these
S3_KEY=""
S3_SECRET_KEY=""
S3_BUCKET=""
S3_AVATAR_BUCKET=""
# TODO: Replace these
MIXPANEL_TOKEN = ""
# TODO: Add twitter template variables below.
|
<commit_before><commit_msg>Add local settings template for local server instances.
(imported from commit 96f59e6a041992f5b2c467558a2fcc14a951a8a9)<commit_after># Template for Django settings for the Zulip local servers
import os
import platform
import re
# TODO: Rewrite this file to be more or less self-documenting as to
# how to generate each token securely and what other setup is needed.
# For now, we'll do that piecewise by component.
# Make this unique, and don't share it with anybody.
SECRET_KEY = ''
# A fixed salt used for hashing in certain places, e.g. email-based
# username generation.
HASH_SALT = ''
# Use this salt to hash a user's email into a filename for their user-uploaded
# avatar. If this salt is discovered, attackers will only be able to determine
# that the owner of an email account has uploaded an avatar to Zulip, which isn't
# the end of the world. Don't use the salt where there is more security exposure.
AVATAR_SALT = ''
# A shared secret, used to authenticate different parts of the app to each other.
SHARED_SECRET = ''
# Password for rabbitmq
RABBITMQ_PASSWORD = ''
# TODO: Make USING_MAILCHIMP do something (and default to False)
USING_MAILCHIMP = False
# This can be filled in automatically from the database
FEEDBACK_BOT_KEY = ''
# TODO: Make USING_MANDRILL do something (and default to False)
USING_MANDRILL = False
# This needs to be synced with the camo installation
CAMO_KEY = ''
# TODO: Put in example values
EMAIL_USE_TLS = True
EMAIL_HOST = ''
EMAIL_HOST_USER = ''
EMAIL_HOST_PASSWORD = ''
EMAIL_PORT = 587
# Whether we're running in a production environment. Note that DEPLOYED does
# **not** mean hosted by us; customer sites are DEPLOYED and LOCALSERVER
# and as such should not for example assume they are the main Zulip site.
#
# TODO: Set these variables inside settings.py properly
DEPLOYED = os.path.exists('/etc/humbug-server')
STAGING_DEPLOYED = (platform.node() == 'staging.zulip.net')
TESTING_DEPLOYED = not not re.match(r'^test', platform.node())
LOCALSERVER = os.path.exists('/etc/zulip-local')
# TODO: Clean this up
if TESTING_DEPLOYED:
EXTERNAL_HOST = platform.node()
elif STAGING_DEPLOYED:
EXTERNAL_HOST = 'staging.zulip.com'
elif DEPLOYED:
EXTERNAL_HOST = 'zulip.com'
else:
EXTERNAL_HOST = 'localhost:9991'
# TODO: Make this not required
EMBEDLY_KEY=""
# TODO: Replace these
S3_KEY=""
S3_SECRET_KEY=""
S3_BUCKET=""
S3_AVATAR_BUCKET=""
# TODO: Replace these
MIXPANEL_TOKEN = ""
# TODO: Add twitter template variables below.
|
|
36d23fb7a3353a621e177a044a6c64335e1a79fb
|
src/sentry/digests/utilities.py
|
src/sentry/digests/utilities.py
|
from collections import Counter
# TODO(tkaemming): This should probably just be part of `build_digest`.t
def get_digest_metadata(digest):
start = None
end = None
counts = Counter()
for rule, groups in digest.iteritems():
counts.update(groups.keys())
for group, records in groups.iteritems():
for record in records:
if start is None or record.datetime < start:
start = record.datetime
if end is None or record.datetime > end:
end = record.datetime
return start, end, counts
|
from collections import Counter
# TODO(tkaemming): This should probably just be part of `build_digest`.
def get_digest_metadata(digest):
start = None
end = None
counts = Counter()
for rule, groups in digest.iteritems():
counts.update(groups.keys())
for group, records in groups.iteritems():
for record in records:
if start is None or record.datetime < start:
start = record.datetime
if end is None or record.datetime > end:
end = record.datetime
return start, end, counts
|
Fix typo in digest comment.
|
Fix typo in digest comment.
|
Python
|
bsd-3-clause
|
ifduyue/sentry,JamesMura/sentry,JackDanger/sentry,mvaled/sentry,BuildingLink/sentry,nicholasserra/sentry,BuildingLink/sentry,alexm92/sentry,gencer/sentry,beeftornado/sentry,JamesMura/sentry,alexm92/sentry,jean/sentry,nicholasserra/sentry,mvaled/sentry,JackDanger/sentry,ifduyue/sentry,BuildingLink/sentry,fotinakis/sentry,zenefits/sentry,gencer/sentry,mitsuhiko/sentry,nicholasserra/sentry,beeftornado/sentry,fotinakis/sentry,fotinakis/sentry,daevaorn/sentry,daevaorn/sentry,BuildingLink/sentry,ifduyue/sentry,gencer/sentry,jean/sentry,zenefits/sentry,zenefits/sentry,ifduyue/sentry,beeftornado/sentry,looker/sentry,daevaorn/sentry,zenefits/sentry,jean/sentry,looker/sentry,mitsuhiko/sentry,jean/sentry,JamesMura/sentry,JamesMura/sentry,mvaled/sentry,alexm92/sentry,BuildingLink/sentry,gencer/sentry,fotinakis/sentry,ifduyue/sentry,looker/sentry,gencer/sentry,daevaorn/sentry,zenefits/sentry,JamesMura/sentry,mvaled/sentry,mvaled/sentry,mvaled/sentry,JackDanger/sentry,looker/sentry,jean/sentry,looker/sentry
|
from collections import Counter
# TODO(tkaemming): This should probably just be part of `build_digest`.t
def get_digest_metadata(digest):
start = None
end = None
counts = Counter()
for rule, groups in digest.iteritems():
counts.update(groups.keys())
for group, records in groups.iteritems():
for record in records:
if start is None or record.datetime < start:
start = record.datetime
if end is None or record.datetime > end:
end = record.datetime
return start, end, counts
Fix typo in digest comment.
|
from collections import Counter
# TODO(tkaemming): This should probably just be part of `build_digest`.
def get_digest_metadata(digest):
start = None
end = None
counts = Counter()
for rule, groups in digest.iteritems():
counts.update(groups.keys())
for group, records in groups.iteritems():
for record in records:
if start is None or record.datetime < start:
start = record.datetime
if end is None or record.datetime > end:
end = record.datetime
return start, end, counts
|
<commit_before>from collections import Counter
# TODO(tkaemming): This should probably just be part of `build_digest`.t
def get_digest_metadata(digest):
start = None
end = None
counts = Counter()
for rule, groups in digest.iteritems():
counts.update(groups.keys())
for group, records in groups.iteritems():
for record in records:
if start is None or record.datetime < start:
start = record.datetime
if end is None or record.datetime > end:
end = record.datetime
return start, end, counts
<commit_msg>Fix typo in digest comment.<commit_after>
|
from collections import Counter
# TODO(tkaemming): This should probably just be part of `build_digest`.
def get_digest_metadata(digest):
start = None
end = None
counts = Counter()
for rule, groups in digest.iteritems():
counts.update(groups.keys())
for group, records in groups.iteritems():
for record in records:
if start is None or record.datetime < start:
start = record.datetime
if end is None or record.datetime > end:
end = record.datetime
return start, end, counts
|
from collections import Counter
# TODO(tkaemming): This should probably just be part of `build_digest`.t
def get_digest_metadata(digest):
start = None
end = None
counts = Counter()
for rule, groups in digest.iteritems():
counts.update(groups.keys())
for group, records in groups.iteritems():
for record in records:
if start is None or record.datetime < start:
start = record.datetime
if end is None or record.datetime > end:
end = record.datetime
return start, end, counts
Fix typo in digest comment.from collections import Counter
# TODO(tkaemming): This should probably just be part of `build_digest`.
def get_digest_metadata(digest):
start = None
end = None
counts = Counter()
for rule, groups in digest.iteritems():
counts.update(groups.keys())
for group, records in groups.iteritems():
for record in records:
if start is None or record.datetime < start:
start = record.datetime
if end is None or record.datetime > end:
end = record.datetime
return start, end, counts
|
<commit_before>from collections import Counter
# TODO(tkaemming): This should probably just be part of `build_digest`.t
def get_digest_metadata(digest):
start = None
end = None
counts = Counter()
for rule, groups in digest.iteritems():
counts.update(groups.keys())
for group, records in groups.iteritems():
for record in records:
if start is None or record.datetime < start:
start = record.datetime
if end is None or record.datetime > end:
end = record.datetime
return start, end, counts
<commit_msg>Fix typo in digest comment.<commit_after>from collections import Counter
# TODO(tkaemming): This should probably just be part of `build_digest`.
def get_digest_metadata(digest):
start = None
end = None
counts = Counter()
for rule, groups in digest.iteritems():
counts.update(groups.keys())
for group, records in groups.iteritems():
for record in records:
if start is None or record.datetime < start:
start = record.datetime
if end is None or record.datetime > end:
end = record.datetime
return start, end, counts
|
711aab80991f236232d3d1247f694d16e8ac0c4d
|
Charts/Testing/Python/TestLinePlot.py
|
Charts/Testing/Python/TestLinePlot.py
|
#!/usr/bin/env python
# Run this test like so:
# vtkpython TestLinePlot.py -D $VTK_DATA_ROOT \
# -B $VTK_DATA_ROOT/Baseline/Charts/
import os
import vtk
import vtk.test.Testing
import math
class TestLinePlot(vtk.test.Testing.vtkTest):
def testLinePlot(self):
"Test if line plots can be built with python"
# Set up a 2D scene, add an XY chart to it
view = vtk.vtkContextView()
view.GetRenderer().SetBackground(1.0,1.0,1.0)
view.GetRenderWindow().SetSize(400,300)
chart = vtk.vtkChartXY()
view.GetScene().AddItem(chart)
# Create a table with some points in it
table = vtk.vtkTable()
arrX = vtk.vtkFloatArray()
arrX.SetName("X Axis")
arrC = vtk.vtkFloatArray()
arrC.SetName("Cosine")
arrS = vtk.vtkFloatArray()
arrS.SetName("Sine")
arrS2 = vtk.vtkFloatArray()
arrS2.SetName("Sine2")
numPoints = 69
inc = 7.5 / (numPoints - 1)
for i in range(0,numPoints):
arrX.InsertNextValue(i*inc)
arrC.InsertNextValue(math.cos(i * inc) + 0.0)
arrS.InsertNextValue(math.sin(i * inc) + 0.0)
arrS2.InsertNextValue(math.sin(i * inc) + 0.5)
table.AddColumn(arrX)
table.AddColumn(arrC)
table.AddColumn(arrS)
table.AddColumn(arrS2)
# Now add the line plots with appropriate colors
line = chart.AddPlot(0)
line.SetInput(table,0,1)
line.SetColor(0,255,0,255)
line.SetWidth(1.0)
line = chart.AddPlot(0)
line.SetInput(table,0,2)
line.SetColor(255,0,0,255);
line.SetWidth(5.0)
line = chart.AddPlot(0)
line.SetInput(table,0,3)
line.SetColor(0,0,255,255);
line.SetWidth(4.0)
view.GetRenderWindow().SetMultiSamples(0)
img_file = "TestLinePlot.png"
vtk.test.Testing.compareImage(view.GetRenderWindow(),vtk.test.Testing.getAbsImagePath(img_file),threshold=25)
vtk.test.Testing.interact()
if __name__ == "__main__":
vtk.test.Testing.main([(TestLinePlot, 'test')])
|
Add Python LinePlot Charts Test
|
Add Python LinePlot Charts Test
Largely a clone of the Cxx version save that the data arrays must
be manipulated directly since the vtkVariant based methods on vtkTable
aren't available to Python.
|
Python
|
bsd-3-clause
|
ashray/VTK-EVM,berendkleinhaneveld/VTK,demarle/VTK,cjh1/VTK,collects/VTK,keithroe/vtkoptix,mspark93/VTK,gram526/VTK,SimVascular/VTK,spthaolt/VTK,msmolens/VTK,spthaolt/VTK,johnkit/vtk-dev,jeffbaumes/jeffbaumes-vtk,arnaudgelas/VTK,sumedhasingla/VTK,mspark93/VTK,ashray/VTK-EVM,biddisco/VTK,naucoin/VTKSlicerWidgets,jmerkow/VTK,gram526/VTK,spthaolt/VTK,spthaolt/VTK,johnkit/vtk-dev,gram526/VTK,biddisco/VTK,keithroe/vtkoptix,biddisco/VTK,spthaolt/VTK,naucoin/VTKSlicerWidgets,mspark93/VTK,gram526/VTK,sankhesh/VTK,hendradarwin/VTK,jeffbaumes/jeffbaumes-vtk,msmolens/VTK,sumedhasingla/VTK,hendradarwin/VTK,hendradarwin/VTK,demarle/VTK,jmerkow/VTK,demarle/VTK,johnkit/vtk-dev,sumedhasingla/VTK,naucoin/VTKSlicerWidgets,sumedhasingla/VTK,demarle/VTK,johnkit/vtk-dev,cjh1/VTK,candy7393/VTK,sumedhasingla/VTK,demarle/VTK,gram526/VTK,johnkit/vtk-dev,candy7393/VTK,naucoin/VTKSlicerWidgets,mspark93/VTK,jeffbaumes/jeffbaumes-vtk,daviddoria/PointGraphsPhase1,hendradarwin/VTK,SimVascular/VTK,jeffbaumes/jeffbaumes-vtk,gram526/VTK,biddisco/VTK,mspark93/VTK,keithroe/vtkoptix,jeffbaumes/jeffbaumes-vtk,aashish24/VTK-old,daviddoria/PointGraphsPhase1,sankhesh/VTK,ashray/VTK-EVM,keithroe/vtkoptix,hendradarwin/VTK,ashray/VTK-EVM,berendkleinhaneveld/VTK,collects/VTK,collects/VTK,berendkleinhaneveld/VTK,msmolens/VTK,msmolens/VTK,hendradarwin/VTK,sankhesh/VTK,candy7393/VTK,demarle/VTK,candy7393/VTK,aashish24/VTK-old,collects/VTK,jmerkow/VTK,daviddoria/PointGraphsPhase1,sumedhasingla/VTK,jeffbaumes/jeffbaumes-vtk,msmolens/VTK,gram526/VTK,demarle/VTK,berendkleinhaneveld/VTK,arnaudgelas/VTK,biddisco/VTK,cjh1/VTK,arnaudgelas/VTK,collects/VTK,sankhesh/VTK,berendkleinhaneveld/VTK,hendradarwin/VTK,jmerkow/VTK,ashray/VTK-EVM,SimVascular/VTK,SimVascular/VTK,arnaudgelas/VTK,aashish24/VTK-old,ashray/VTK-EVM,aashish24/VTK-old,sankhesh/VTK,ashray/VTK-EVM,cjh1/VTK,arnaudgelas/VTK,gram526/VTK,candy7393/VTK,berendkleinhaneveld/VTK,SimVascular/VTK,johnkit/vtk-dev,jmerkow/VTK,msmolens/VTK,sumedhasingla/VTK,sankhesh/VTK,SimVascular/VTK,cjh1/VTK,msmolens/VTK,jmerkow/VTK,candy7393/VTK,sankhesh/VTK,keithroe/vtkoptix,spthaolt/VTK,spthaolt/VTK,keithroe/vtkoptix,naucoin/VTKSlicerWidgets,candy7393/VTK,mspark93/VTK,biddisco/VTK,demarle/VTK,keithroe/vtkoptix,cjh1/VTK,SimVascular/VTK,johnkit/vtk-dev,ashray/VTK-EVM,daviddoria/PointGraphsPhase1,mspark93/VTK,keithroe/vtkoptix,msmolens/VTK,jmerkow/VTK,SimVascular/VTK,berendkleinhaneveld/VTK,mspark93/VTK,daviddoria/PointGraphsPhase1,collects/VTK,sankhesh/VTK,jmerkow/VTK,naucoin/VTKSlicerWidgets,sumedhasingla/VTK,aashish24/VTK-old,candy7393/VTK,arnaudgelas/VTK,biddisco/VTK,aashish24/VTK-old,daviddoria/PointGraphsPhase1
|
Add Python LinePlot Charts Test
Largely a clone of the Cxx version save that the data arrays must
be manipulated directly since the vtkVariant based methods on vtkTable
aren't available to Python.
|
#!/usr/bin/env python
# Run this test like so:
# vtkpython TestLinePlot.py -D $VTK_DATA_ROOT \
# -B $VTK_DATA_ROOT/Baseline/Charts/
import os
import vtk
import vtk.test.Testing
import math
class TestLinePlot(vtk.test.Testing.vtkTest):
def testLinePlot(self):
"Test if line plots can be built with python"
# Set up a 2D scene, add an XY chart to it
view = vtk.vtkContextView()
view.GetRenderer().SetBackground(1.0,1.0,1.0)
view.GetRenderWindow().SetSize(400,300)
chart = vtk.vtkChartXY()
view.GetScene().AddItem(chart)
# Create a table with some points in it
table = vtk.vtkTable()
arrX = vtk.vtkFloatArray()
arrX.SetName("X Axis")
arrC = vtk.vtkFloatArray()
arrC.SetName("Cosine")
arrS = vtk.vtkFloatArray()
arrS.SetName("Sine")
arrS2 = vtk.vtkFloatArray()
arrS2.SetName("Sine2")
numPoints = 69
inc = 7.5 / (numPoints - 1)
for i in range(0,numPoints):
arrX.InsertNextValue(i*inc)
arrC.InsertNextValue(math.cos(i * inc) + 0.0)
arrS.InsertNextValue(math.sin(i * inc) + 0.0)
arrS2.InsertNextValue(math.sin(i * inc) + 0.5)
table.AddColumn(arrX)
table.AddColumn(arrC)
table.AddColumn(arrS)
table.AddColumn(arrS2)
# Now add the line plots with appropriate colors
line = chart.AddPlot(0)
line.SetInput(table,0,1)
line.SetColor(0,255,0,255)
line.SetWidth(1.0)
line = chart.AddPlot(0)
line.SetInput(table,0,2)
line.SetColor(255,0,0,255);
line.SetWidth(5.0)
line = chart.AddPlot(0)
line.SetInput(table,0,3)
line.SetColor(0,0,255,255);
line.SetWidth(4.0)
view.GetRenderWindow().SetMultiSamples(0)
img_file = "TestLinePlot.png"
vtk.test.Testing.compareImage(view.GetRenderWindow(),vtk.test.Testing.getAbsImagePath(img_file),threshold=25)
vtk.test.Testing.interact()
if __name__ == "__main__":
vtk.test.Testing.main([(TestLinePlot, 'test')])
|
<commit_before><commit_msg>Add Python LinePlot Charts Test
Largely a clone of the Cxx version save that the data arrays must
be manipulated directly since the vtkVariant based methods on vtkTable
aren't available to Python.<commit_after>
|
#!/usr/bin/env python
# Run this test like so:
# vtkpython TestLinePlot.py -D $VTK_DATA_ROOT \
# -B $VTK_DATA_ROOT/Baseline/Charts/
import os
import vtk
import vtk.test.Testing
import math
class TestLinePlot(vtk.test.Testing.vtkTest):
def testLinePlot(self):
"Test if line plots can be built with python"
# Set up a 2D scene, add an XY chart to it
view = vtk.vtkContextView()
view.GetRenderer().SetBackground(1.0,1.0,1.0)
view.GetRenderWindow().SetSize(400,300)
chart = vtk.vtkChartXY()
view.GetScene().AddItem(chart)
# Create a table with some points in it
table = vtk.vtkTable()
arrX = vtk.vtkFloatArray()
arrX.SetName("X Axis")
arrC = vtk.vtkFloatArray()
arrC.SetName("Cosine")
arrS = vtk.vtkFloatArray()
arrS.SetName("Sine")
arrS2 = vtk.vtkFloatArray()
arrS2.SetName("Sine2")
numPoints = 69
inc = 7.5 / (numPoints - 1)
for i in range(0,numPoints):
arrX.InsertNextValue(i*inc)
arrC.InsertNextValue(math.cos(i * inc) + 0.0)
arrS.InsertNextValue(math.sin(i * inc) + 0.0)
arrS2.InsertNextValue(math.sin(i * inc) + 0.5)
table.AddColumn(arrX)
table.AddColumn(arrC)
table.AddColumn(arrS)
table.AddColumn(arrS2)
# Now add the line plots with appropriate colors
line = chart.AddPlot(0)
line.SetInput(table,0,1)
line.SetColor(0,255,0,255)
line.SetWidth(1.0)
line = chart.AddPlot(0)
line.SetInput(table,0,2)
line.SetColor(255,0,0,255);
line.SetWidth(5.0)
line = chart.AddPlot(0)
line.SetInput(table,0,3)
line.SetColor(0,0,255,255);
line.SetWidth(4.0)
view.GetRenderWindow().SetMultiSamples(0)
img_file = "TestLinePlot.png"
vtk.test.Testing.compareImage(view.GetRenderWindow(),vtk.test.Testing.getAbsImagePath(img_file),threshold=25)
vtk.test.Testing.interact()
if __name__ == "__main__":
vtk.test.Testing.main([(TestLinePlot, 'test')])
|
Add Python LinePlot Charts Test
Largely a clone of the Cxx version save that the data arrays must
be manipulated directly since the vtkVariant based methods on vtkTable
aren't available to Python.#!/usr/bin/env python
# Run this test like so:
# vtkpython TestLinePlot.py -D $VTK_DATA_ROOT \
# -B $VTK_DATA_ROOT/Baseline/Charts/
import os
import vtk
import vtk.test.Testing
import math
class TestLinePlot(vtk.test.Testing.vtkTest):
def testLinePlot(self):
"Test if line plots can be built with python"
# Set up a 2D scene, add an XY chart to it
view = vtk.vtkContextView()
view.GetRenderer().SetBackground(1.0,1.0,1.0)
view.GetRenderWindow().SetSize(400,300)
chart = vtk.vtkChartXY()
view.GetScene().AddItem(chart)
# Create a table with some points in it
table = vtk.vtkTable()
arrX = vtk.vtkFloatArray()
arrX.SetName("X Axis")
arrC = vtk.vtkFloatArray()
arrC.SetName("Cosine")
arrS = vtk.vtkFloatArray()
arrS.SetName("Sine")
arrS2 = vtk.vtkFloatArray()
arrS2.SetName("Sine2")
numPoints = 69
inc = 7.5 / (numPoints - 1)
for i in range(0,numPoints):
arrX.InsertNextValue(i*inc)
arrC.InsertNextValue(math.cos(i * inc) + 0.0)
arrS.InsertNextValue(math.sin(i * inc) + 0.0)
arrS2.InsertNextValue(math.sin(i * inc) + 0.5)
table.AddColumn(arrX)
table.AddColumn(arrC)
table.AddColumn(arrS)
table.AddColumn(arrS2)
# Now add the line plots with appropriate colors
line = chart.AddPlot(0)
line.SetInput(table,0,1)
line.SetColor(0,255,0,255)
line.SetWidth(1.0)
line = chart.AddPlot(0)
line.SetInput(table,0,2)
line.SetColor(255,0,0,255);
line.SetWidth(5.0)
line = chart.AddPlot(0)
line.SetInput(table,0,3)
line.SetColor(0,0,255,255);
line.SetWidth(4.0)
view.GetRenderWindow().SetMultiSamples(0)
img_file = "TestLinePlot.png"
vtk.test.Testing.compareImage(view.GetRenderWindow(),vtk.test.Testing.getAbsImagePath(img_file),threshold=25)
vtk.test.Testing.interact()
if __name__ == "__main__":
vtk.test.Testing.main([(TestLinePlot, 'test')])
|
<commit_before><commit_msg>Add Python LinePlot Charts Test
Largely a clone of the Cxx version save that the data arrays must
be manipulated directly since the vtkVariant based methods on vtkTable
aren't available to Python.<commit_after>#!/usr/bin/env python
# Run this test like so:
# vtkpython TestLinePlot.py -D $VTK_DATA_ROOT \
# -B $VTK_DATA_ROOT/Baseline/Charts/
import os
import vtk
import vtk.test.Testing
import math
class TestLinePlot(vtk.test.Testing.vtkTest):
def testLinePlot(self):
"Test if line plots can be built with python"
# Set up a 2D scene, add an XY chart to it
view = vtk.vtkContextView()
view.GetRenderer().SetBackground(1.0,1.0,1.0)
view.GetRenderWindow().SetSize(400,300)
chart = vtk.vtkChartXY()
view.GetScene().AddItem(chart)
# Create a table with some points in it
table = vtk.vtkTable()
arrX = vtk.vtkFloatArray()
arrX.SetName("X Axis")
arrC = vtk.vtkFloatArray()
arrC.SetName("Cosine")
arrS = vtk.vtkFloatArray()
arrS.SetName("Sine")
arrS2 = vtk.vtkFloatArray()
arrS2.SetName("Sine2")
numPoints = 69
inc = 7.5 / (numPoints - 1)
for i in range(0,numPoints):
arrX.InsertNextValue(i*inc)
arrC.InsertNextValue(math.cos(i * inc) + 0.0)
arrS.InsertNextValue(math.sin(i * inc) + 0.0)
arrS2.InsertNextValue(math.sin(i * inc) + 0.5)
table.AddColumn(arrX)
table.AddColumn(arrC)
table.AddColumn(arrS)
table.AddColumn(arrS2)
# Now add the line plots with appropriate colors
line = chart.AddPlot(0)
line.SetInput(table,0,1)
line.SetColor(0,255,0,255)
line.SetWidth(1.0)
line = chart.AddPlot(0)
line.SetInput(table,0,2)
line.SetColor(255,0,0,255);
line.SetWidth(5.0)
line = chart.AddPlot(0)
line.SetInput(table,0,3)
line.SetColor(0,0,255,255);
line.SetWidth(4.0)
view.GetRenderWindow().SetMultiSamples(0)
img_file = "TestLinePlot.png"
vtk.test.Testing.compareImage(view.GetRenderWindow(),vtk.test.Testing.getAbsImagePath(img_file),threshold=25)
vtk.test.Testing.interact()
if __name__ == "__main__":
vtk.test.Testing.main([(TestLinePlot, 'test')])
|
|
88e4d0beaca3adc561fc624e4d878c90c2750a8f
|
DataSources/BluetoothDataSource.py
|
DataSources/BluetoothDataSource.py
|
from time import sleep
from bluetooth import *
class BluetoothDataSource:
def __init__(self, params):
self.params = params
self.socket = BluetoothSocket(RFCOMM)
pass
def open(self):
try:
self.socket.connect((self.params['device'], self.params['port']))
sleep(0.4)
return True
except BluetoothError as e:
return str(e)
def close(self):
self.socket.close()
def execute(self, command):
self.socket.send(command + '\r\n')
res = ''
c = ''
while (c != '>'):
res += c
c = self.socket.recv(1)
return res
|
Add Bluetooth support. Still buggy but works. Mostly. If you are lucky.
|
Add Bluetooth support. Still buggy but works. Mostly. If you are lucky.
|
Python
|
mit
|
Max-Might/Elmo-Scan
|
Add Bluetooth support. Still buggy but works. Mostly. If you are lucky.
|
from time import sleep
from bluetooth import *
class BluetoothDataSource:
def __init__(self, params):
self.params = params
self.socket = BluetoothSocket(RFCOMM)
pass
def open(self):
try:
self.socket.connect((self.params['device'], self.params['port']))
sleep(0.4)
return True
except BluetoothError as e:
return str(e)
def close(self):
self.socket.close()
def execute(self, command):
self.socket.send(command + '\r\n')
res = ''
c = ''
while (c != '>'):
res += c
c = self.socket.recv(1)
return res
|
<commit_before><commit_msg>Add Bluetooth support. Still buggy but works. Mostly. If you are lucky.<commit_after>
|
from time import sleep
from bluetooth import *
class BluetoothDataSource:
def __init__(self, params):
self.params = params
self.socket = BluetoothSocket(RFCOMM)
pass
def open(self):
try:
self.socket.connect((self.params['device'], self.params['port']))
sleep(0.4)
return True
except BluetoothError as e:
return str(e)
def close(self):
self.socket.close()
def execute(self, command):
self.socket.send(command + '\r\n')
res = ''
c = ''
while (c != '>'):
res += c
c = self.socket.recv(1)
return res
|
Add Bluetooth support. Still buggy but works. Mostly. If you are lucky.from time import sleep
from bluetooth import *
class BluetoothDataSource:
def __init__(self, params):
self.params = params
self.socket = BluetoothSocket(RFCOMM)
pass
def open(self):
try:
self.socket.connect((self.params['device'], self.params['port']))
sleep(0.4)
return True
except BluetoothError as e:
return str(e)
def close(self):
self.socket.close()
def execute(self, command):
self.socket.send(command + '\r\n')
res = ''
c = ''
while (c != '>'):
res += c
c = self.socket.recv(1)
return res
|
<commit_before><commit_msg>Add Bluetooth support. Still buggy but works. Mostly. If you are lucky.<commit_after>from time import sleep
from bluetooth import *
class BluetoothDataSource:
def __init__(self, params):
self.params = params
self.socket = BluetoothSocket(RFCOMM)
pass
def open(self):
try:
self.socket.connect((self.params['device'], self.params['port']))
sleep(0.4)
return True
except BluetoothError as e:
return str(e)
def close(self):
self.socket.close()
def execute(self, command):
self.socket.send(command + '\r\n')
res = ''
c = ''
while (c != '>'):
res += c
c = self.socket.recv(1)
return res
|
|
2a3c24b32e426e1cad65cdb72affeb81398f7b98
|
scripts/hdf5_to_csv.py
|
scripts/hdf5_to_csv.py
|
#!/usr/bin/env python
from __future__ import print_function
from os.path import join, splitext
import glob
import pandas as pd
#import matplotlib.pyplot as plt
import multi_tracker_analysis as mta
def main():
#experiment_dir = 'choice_20210129_162648'
experiment_dir = '.'
def find_file_via_suffix(suffix):
files = glob.glob(join(experiment_dir, '*' + suffix))
assert len(files) == 1
return files[0]
csv_fname = find_file_via_suffix('_stimuli.csv')
sdf = pd.read_csv(csv_fname)
for hdf5_fname in glob.glob(join(experiment_dir, '*_trackedobjects.hdf5')):
print(hdf5_fname)
# TODO suppress load message about config files we aren't using.
# seems the latter is None. but what else could it be?
df, _ = mta.read_hdf5_file_to_pandas.load_and_preprocess_data(
hdf5_fname
)
csv_fname = splitext(hdf5_fname)[0] + '.csv'
# TODO TODO TODO also add speed / velocity columns after checking they
# are correct / fixing if need be
df.to_csv(csv_fname, columns=['time_epoch', 'position_x', 'position_y'],
index=False
)
if __name__ == '__main__':
main()
|
Add script to convert (some of) hdf5 tracking data to csv
|
Add script to convert (some of) hdf5 tracking data to csv
|
Python
|
mit
|
tom-f-oconnell/multi_tracker,tom-f-oconnell/multi_tracker
|
Add script to convert (some of) hdf5 tracking data to csv
|
#!/usr/bin/env python
from __future__ import print_function
from os.path import join, splitext
import glob
import pandas as pd
#import matplotlib.pyplot as plt
import multi_tracker_analysis as mta
def main():
#experiment_dir = 'choice_20210129_162648'
experiment_dir = '.'
def find_file_via_suffix(suffix):
files = glob.glob(join(experiment_dir, '*' + suffix))
assert len(files) == 1
return files[0]
csv_fname = find_file_via_suffix('_stimuli.csv')
sdf = pd.read_csv(csv_fname)
for hdf5_fname in glob.glob(join(experiment_dir, '*_trackedobjects.hdf5')):
print(hdf5_fname)
# TODO suppress load message about config files we aren't using.
# seems the latter is None. but what else could it be?
df, _ = mta.read_hdf5_file_to_pandas.load_and_preprocess_data(
hdf5_fname
)
csv_fname = splitext(hdf5_fname)[0] + '.csv'
# TODO TODO TODO also add speed / velocity columns after checking they
# are correct / fixing if need be
df.to_csv(csv_fname, columns=['time_epoch', 'position_x', 'position_y'],
index=False
)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add script to convert (some of) hdf5 tracking data to csv<commit_after>
|
#!/usr/bin/env python
from __future__ import print_function
from os.path import join, splitext
import glob
import pandas as pd
#import matplotlib.pyplot as plt
import multi_tracker_analysis as mta
def main():
#experiment_dir = 'choice_20210129_162648'
experiment_dir = '.'
def find_file_via_suffix(suffix):
files = glob.glob(join(experiment_dir, '*' + suffix))
assert len(files) == 1
return files[0]
csv_fname = find_file_via_suffix('_stimuli.csv')
sdf = pd.read_csv(csv_fname)
for hdf5_fname in glob.glob(join(experiment_dir, '*_trackedobjects.hdf5')):
print(hdf5_fname)
# TODO suppress load message about config files we aren't using.
# seems the latter is None. but what else could it be?
df, _ = mta.read_hdf5_file_to_pandas.load_and_preprocess_data(
hdf5_fname
)
csv_fname = splitext(hdf5_fname)[0] + '.csv'
# TODO TODO TODO also add speed / velocity columns after checking they
# are correct / fixing if need be
df.to_csv(csv_fname, columns=['time_epoch', 'position_x', 'position_y'],
index=False
)
if __name__ == '__main__':
main()
|
Add script to convert (some of) hdf5 tracking data to csv#!/usr/bin/env python
from __future__ import print_function
from os.path import join, splitext
import glob
import pandas as pd
#import matplotlib.pyplot as plt
import multi_tracker_analysis as mta
def main():
#experiment_dir = 'choice_20210129_162648'
experiment_dir = '.'
def find_file_via_suffix(suffix):
files = glob.glob(join(experiment_dir, '*' + suffix))
assert len(files) == 1
return files[0]
csv_fname = find_file_via_suffix('_stimuli.csv')
sdf = pd.read_csv(csv_fname)
for hdf5_fname in glob.glob(join(experiment_dir, '*_trackedobjects.hdf5')):
print(hdf5_fname)
# TODO suppress load message about config files we aren't using.
# seems the latter is None. but what else could it be?
df, _ = mta.read_hdf5_file_to_pandas.load_and_preprocess_data(
hdf5_fname
)
csv_fname = splitext(hdf5_fname)[0] + '.csv'
# TODO TODO TODO also add speed / velocity columns after checking they
# are correct / fixing if need be
df.to_csv(csv_fname, columns=['time_epoch', 'position_x', 'position_y'],
index=False
)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add script to convert (some of) hdf5 tracking data to csv<commit_after>#!/usr/bin/env python
from __future__ import print_function
from os.path import join, splitext
import glob
import pandas as pd
#import matplotlib.pyplot as plt
import multi_tracker_analysis as mta
def main():
#experiment_dir = 'choice_20210129_162648'
experiment_dir = '.'
def find_file_via_suffix(suffix):
files = glob.glob(join(experiment_dir, '*' + suffix))
assert len(files) == 1
return files[0]
csv_fname = find_file_via_suffix('_stimuli.csv')
sdf = pd.read_csv(csv_fname)
for hdf5_fname in glob.glob(join(experiment_dir, '*_trackedobjects.hdf5')):
print(hdf5_fname)
# TODO suppress load message about config files we aren't using.
# seems the latter is None. but what else could it be?
df, _ = mta.read_hdf5_file_to_pandas.load_and_preprocess_data(
hdf5_fname
)
csv_fname = splitext(hdf5_fname)[0] + '.csv'
# TODO TODO TODO also add speed / velocity columns after checking they
# are correct / fixing if need be
df.to_csv(csv_fname, columns=['time_epoch', 'position_x', 'position_y'],
index=False
)
if __name__ == '__main__':
main()
|
|
e7ec8deb9fda8be9f85f1f26452646b6ddfe5367
|
fuel_test/test_openstack.py
|
fuel_test/test_openstack.py
|
from openstack_site_pp_base import OpenStackSitePPBaseTestCase
import unittest
class OpenStackCase(OpenStackSitePPBaseTestCase):
def test_deploy_open_stack(self):
self.validate(
[self.controller1,self.controller2,self.compute1,self.compute2],
'puppet agent --test')
if __name__ == '__main__':
unittest.main()
|
from openstack_site_pp_base import OpenStackSitePPBaseTestCase
import unittest
class OpenStackCase(OpenStackSitePPBaseTestCase):
def test_deploy_open_stack(self):
self.validate(
[self.controller1,self.controller2,self.compute1,self.compute2],
'puppet agent --test')
for node in self.environment.nodes:
node.save_snapshot('openstack')
if __name__ == '__main__':
unittest.main()
|
Create snapshot after deploy openstack
|
Create snapshot after deploy openstack
|
Python
|
apache-2.0
|
slystopad/fuel-lib,Metaswitch/fuel-library,zhaochao/fuel-library,eayunstack/fuel-library,huntxu/fuel-library,SmartInfrastructures/fuel-library-dev,SmartInfrastructures/fuel-library-dev,ddepaoli3/fuel-library-dev,zhaochao/fuel-library,eayunstack/fuel-library,Metaswitch/fuel-library,zhaochao/fuel-library,zhaochao/fuel-library,zhaochao/fuel-library,SmartInfrastructures/fuel-library-dev,ddepaoli3/fuel-library-dev,xarses/fuel-library,huntxu/fuel-library,huntxu/fuel-library,xarses/fuel-library,stackforge/fuel-library,Metaswitch/fuel-library,huntxu/fuel-library,eayunstack/fuel-library,SmartInfrastructures/fuel-library-dev,ddepaoli3/fuel-library-dev,SmartInfrastructures/fuel-library-dev,slystopad/fuel-lib,stackforge/fuel-library,slystopad/fuel-lib,Metaswitch/fuel-library,ddepaoli3/fuel-library-dev,eayunstack/fuel-library,stackforge/fuel-library,slystopad/fuel-lib,xarses/fuel-library,stackforge/fuel-library,ddepaoli3/fuel-library-dev,xarses/fuel-library,huntxu/fuel-library,eayunstack/fuel-library
|
from openstack_site_pp_base import OpenStackSitePPBaseTestCase
import unittest
class OpenStackCase(OpenStackSitePPBaseTestCase):
def test_deploy_open_stack(self):
self.validate(
[self.controller1,self.controller2,self.compute1,self.compute2],
'puppet agent --test')
if __name__ == '__main__':
unittest.main()
Create snapshot after deploy openstack
|
from openstack_site_pp_base import OpenStackSitePPBaseTestCase
import unittest
class OpenStackCase(OpenStackSitePPBaseTestCase):
def test_deploy_open_stack(self):
self.validate(
[self.controller1,self.controller2,self.compute1,self.compute2],
'puppet agent --test')
for node in self.environment.nodes:
node.save_snapshot('openstack')
if __name__ == '__main__':
unittest.main()
|
<commit_before>from openstack_site_pp_base import OpenStackSitePPBaseTestCase
import unittest
class OpenStackCase(OpenStackSitePPBaseTestCase):
def test_deploy_open_stack(self):
self.validate(
[self.controller1,self.controller2,self.compute1,self.compute2],
'puppet agent --test')
if __name__ == '__main__':
unittest.main()
<commit_msg>Create snapshot after deploy openstack<commit_after>
|
from openstack_site_pp_base import OpenStackSitePPBaseTestCase
import unittest
class OpenStackCase(OpenStackSitePPBaseTestCase):
def test_deploy_open_stack(self):
self.validate(
[self.controller1,self.controller2,self.compute1,self.compute2],
'puppet agent --test')
for node in self.environment.nodes:
node.save_snapshot('openstack')
if __name__ == '__main__':
unittest.main()
|
from openstack_site_pp_base import OpenStackSitePPBaseTestCase
import unittest
class OpenStackCase(OpenStackSitePPBaseTestCase):
def test_deploy_open_stack(self):
self.validate(
[self.controller1,self.controller2,self.compute1,self.compute2],
'puppet agent --test')
if __name__ == '__main__':
unittest.main()
Create snapshot after deploy openstackfrom openstack_site_pp_base import OpenStackSitePPBaseTestCase
import unittest
class OpenStackCase(OpenStackSitePPBaseTestCase):
def test_deploy_open_stack(self):
self.validate(
[self.controller1,self.controller2,self.compute1,self.compute2],
'puppet agent --test')
for node in self.environment.nodes:
node.save_snapshot('openstack')
if __name__ == '__main__':
unittest.main()
|
<commit_before>from openstack_site_pp_base import OpenStackSitePPBaseTestCase
import unittest
class OpenStackCase(OpenStackSitePPBaseTestCase):
def test_deploy_open_stack(self):
self.validate(
[self.controller1,self.controller2,self.compute1,self.compute2],
'puppet agent --test')
if __name__ == '__main__':
unittest.main()
<commit_msg>Create snapshot after deploy openstack<commit_after>from openstack_site_pp_base import OpenStackSitePPBaseTestCase
import unittest
class OpenStackCase(OpenStackSitePPBaseTestCase):
def test_deploy_open_stack(self):
self.validate(
[self.controller1,self.controller2,self.compute1,self.compute2],
'puppet agent --test')
for node in self.environment.nodes:
node.save_snapshot('openstack')
if __name__ == '__main__':
unittest.main()
|
4d065ff7fe1aab53dee472bfa4138f19ddc6774c
|
test/unittests/tts/test_espeak_tts.py
|
test/unittests/tts/test_espeak_tts.py
|
import unittest
from unittest import mock
from mycroft.tts.espeak_tts import ESpeak
@mock.patch('mycroft.tts.tts.PlaybackThread')
class TestMimic(unittest.TestCase):
@mock.patch('mycroft.tts.espeak_tts.subprocess')
def test_get_tts(self, mock_subprocess, _):
conf = {
"lang": "english-us",
"voice": "m1"
}
e = ESpeak('en-US', conf)
sentence = 'hello'
wav_filename = 'abc.wav'
wav, phonemes = e.get_tts(sentence, wav_filename)
self.assertTrue(phonemes is None)
mock_subprocess.call.called_with(['espeak', '-v',
conf['lang'] + '+' + conf['voice'],
'-w', wav_filename,
sentence])
|
Add basic unittest for espeak
|
Add basic unittest for espeak
|
Python
|
apache-2.0
|
forslund/mycroft-core,MycroftAI/mycroft-core,forslund/mycroft-core,MycroftAI/mycroft-core
|
Add basic unittest for espeak
|
import unittest
from unittest import mock
from mycroft.tts.espeak_tts import ESpeak
@mock.patch('mycroft.tts.tts.PlaybackThread')
class TestMimic(unittest.TestCase):
@mock.patch('mycroft.tts.espeak_tts.subprocess')
def test_get_tts(self, mock_subprocess, _):
conf = {
"lang": "english-us",
"voice": "m1"
}
e = ESpeak('en-US', conf)
sentence = 'hello'
wav_filename = 'abc.wav'
wav, phonemes = e.get_tts(sentence, wav_filename)
self.assertTrue(phonemes is None)
mock_subprocess.call.called_with(['espeak', '-v',
conf['lang'] + '+' + conf['voice'],
'-w', wav_filename,
sentence])
|
<commit_before><commit_msg>Add basic unittest for espeak<commit_after>
|
import unittest
from unittest import mock
from mycroft.tts.espeak_tts import ESpeak
@mock.patch('mycroft.tts.tts.PlaybackThread')
class TestMimic(unittest.TestCase):
@mock.patch('mycroft.tts.espeak_tts.subprocess')
def test_get_tts(self, mock_subprocess, _):
conf = {
"lang": "english-us",
"voice": "m1"
}
e = ESpeak('en-US', conf)
sentence = 'hello'
wav_filename = 'abc.wav'
wav, phonemes = e.get_tts(sentence, wav_filename)
self.assertTrue(phonemes is None)
mock_subprocess.call.called_with(['espeak', '-v',
conf['lang'] + '+' + conf['voice'],
'-w', wav_filename,
sentence])
|
Add basic unittest for espeakimport unittest
from unittest import mock
from mycroft.tts.espeak_tts import ESpeak
@mock.patch('mycroft.tts.tts.PlaybackThread')
class TestMimic(unittest.TestCase):
@mock.patch('mycroft.tts.espeak_tts.subprocess')
def test_get_tts(self, mock_subprocess, _):
conf = {
"lang": "english-us",
"voice": "m1"
}
e = ESpeak('en-US', conf)
sentence = 'hello'
wav_filename = 'abc.wav'
wav, phonemes = e.get_tts(sentence, wav_filename)
self.assertTrue(phonemes is None)
mock_subprocess.call.called_with(['espeak', '-v',
conf['lang'] + '+' + conf['voice'],
'-w', wav_filename,
sentence])
|
<commit_before><commit_msg>Add basic unittest for espeak<commit_after>import unittest
from unittest import mock
from mycroft.tts.espeak_tts import ESpeak
@mock.patch('mycroft.tts.tts.PlaybackThread')
class TestMimic(unittest.TestCase):
@mock.patch('mycroft.tts.espeak_tts.subprocess')
def test_get_tts(self, mock_subprocess, _):
conf = {
"lang": "english-us",
"voice": "m1"
}
e = ESpeak('en-US', conf)
sentence = 'hello'
wav_filename = 'abc.wav'
wav, phonemes = e.get_tts(sentence, wav_filename)
self.assertTrue(phonemes is None)
mock_subprocess.call.called_with(['espeak', '-v',
conf['lang'] + '+' + conf['voice'],
'-w', wav_filename,
sentence])
|
|
ded04b93d041a6b6274d7870ff2abea1fecea088
|
examples/queue_mgt.py
|
examples/queue_mgt.py
|
import asyncio
import sys
import ampdclient
# This script demonstrates the lsinfo, addid and load command.
# It can be called on with the host and the path as argument:
# `python queue_mgt.py 127.0.0.1 testpl`
#
# If no argument is given, DEFAULT_HOST and DEFAULT_PATH will be used instead.
# MPD host
DEFAULT_HOST = '127.0.0.1'
# directory (relative to the configured music directory) whose files and
# playlists will be loaded in play queue.
DEFAULT_PATH = 'testpl'
def onchange(message):
print('Message received ' + str(message))
@asyncio.coroutine
def start(host, path):
mpd_client = yield from ampdclient.connect(host, 6600)
mpd_client.cb_onchange = onchange
print('lsinfo')
dirs, files, playlists = yield from mpd_client.lsinfo(path)
print('content:\n\tdirs: {} \n\tfiles: {} \n\tplaylists: {}'
.format(dirs, files, playlists))
for p in playlists:
try:
yield from mpd_client.load(p[0])
print('loaded playlist {} '.format(p[0]))
except ampdclient.MpdCommandException as e:
# Make sure
print('Could not load playlist {} \n\t {}'.format(p[0], e))
for f in files:
try:
f_id = yield from mpd_client.addid(f[0])
print('loaded {} - id: {}'.format(f[0], f_id))
except ampdclient.MpdCommandException as e:
print('Could not enqueue file {} \n\t {}'.format(f[0], e))
yield from mpd_client.close()
def main(argv):
if len(argv) == 2:
host = argv[0]
path = argv[1]
else:
host = DEFAULT_HOST
path = DEFAULT_PATH
loop = asyncio.get_event_loop()
loop.run_until_complete(start(host, path))
if __name__ == "__main__":
main(sys.argv[1:])
|
Add example for the load and addid command
|
Add example for the load and addid command
|
Python
|
apache-2.0
|
PierreRust/ampdclient
|
Add example for the load and addid command
|
import asyncio
import sys
import ampdclient
# This script demonstrates the lsinfo, addid and load command.
# It can be called on with the host and the path as argument:
# `python queue_mgt.py 127.0.0.1 testpl`
#
# If no argument is given, DEFAULT_HOST and DEFAULT_PATH will be used instead.
# MPD host
DEFAULT_HOST = '127.0.0.1'
# directory (relative to the configured music directory) whose files and
# playlists will be loaded in play queue.
DEFAULT_PATH = 'testpl'
def onchange(message):
print('Message received ' + str(message))
@asyncio.coroutine
def start(host, path):
mpd_client = yield from ampdclient.connect(host, 6600)
mpd_client.cb_onchange = onchange
print('lsinfo')
dirs, files, playlists = yield from mpd_client.lsinfo(path)
print('content:\n\tdirs: {} \n\tfiles: {} \n\tplaylists: {}'
.format(dirs, files, playlists))
for p in playlists:
try:
yield from mpd_client.load(p[0])
print('loaded playlist {} '.format(p[0]))
except ampdclient.MpdCommandException as e:
# Make sure
print('Could not load playlist {} \n\t {}'.format(p[0], e))
for f in files:
try:
f_id = yield from mpd_client.addid(f[0])
print('loaded {} - id: {}'.format(f[0], f_id))
except ampdclient.MpdCommandException as e:
print('Could not enqueue file {} \n\t {}'.format(f[0], e))
yield from mpd_client.close()
def main(argv):
if len(argv) == 2:
host = argv[0]
path = argv[1]
else:
host = DEFAULT_HOST
path = DEFAULT_PATH
loop = asyncio.get_event_loop()
loop.run_until_complete(start(host, path))
if __name__ == "__main__":
main(sys.argv[1:])
|
<commit_before><commit_msg>Add example for the load and addid command<commit_after>
|
import asyncio
import sys
import ampdclient
# This script demonstrates the lsinfo, addid and load command.
# It can be called on with the host and the path as argument:
# `python queue_mgt.py 127.0.0.1 testpl`
#
# If no argument is given, DEFAULT_HOST and DEFAULT_PATH will be used instead.
# MPD host
DEFAULT_HOST = '127.0.0.1'
# directory (relative to the configured music directory) whose files and
# playlists will be loaded in play queue.
DEFAULT_PATH = 'testpl'
def onchange(message):
print('Message received ' + str(message))
@asyncio.coroutine
def start(host, path):
mpd_client = yield from ampdclient.connect(host, 6600)
mpd_client.cb_onchange = onchange
print('lsinfo')
dirs, files, playlists = yield from mpd_client.lsinfo(path)
print('content:\n\tdirs: {} \n\tfiles: {} \n\tplaylists: {}'
.format(dirs, files, playlists))
for p in playlists:
try:
yield from mpd_client.load(p[0])
print('loaded playlist {} '.format(p[0]))
except ampdclient.MpdCommandException as e:
# Make sure
print('Could not load playlist {} \n\t {}'.format(p[0], e))
for f in files:
try:
f_id = yield from mpd_client.addid(f[0])
print('loaded {} - id: {}'.format(f[0], f_id))
except ampdclient.MpdCommandException as e:
print('Could not enqueue file {} \n\t {}'.format(f[0], e))
yield from mpd_client.close()
def main(argv):
if len(argv) == 2:
host = argv[0]
path = argv[1]
else:
host = DEFAULT_HOST
path = DEFAULT_PATH
loop = asyncio.get_event_loop()
loop.run_until_complete(start(host, path))
if __name__ == "__main__":
main(sys.argv[1:])
|
Add example for the load and addid commandimport asyncio
import sys
import ampdclient
# This script demonstrates the lsinfo, addid and load command.
# It can be called on with the host and the path as argument:
# `python queue_mgt.py 127.0.0.1 testpl`
#
# If no argument is given, DEFAULT_HOST and DEFAULT_PATH will be used instead.
# MPD host
DEFAULT_HOST = '127.0.0.1'
# directory (relative to the configured music directory) whose files and
# playlists will be loaded in play queue.
DEFAULT_PATH = 'testpl'
def onchange(message):
print('Message received ' + str(message))
@asyncio.coroutine
def start(host, path):
mpd_client = yield from ampdclient.connect(host, 6600)
mpd_client.cb_onchange = onchange
print('lsinfo')
dirs, files, playlists = yield from mpd_client.lsinfo(path)
print('content:\n\tdirs: {} \n\tfiles: {} \n\tplaylists: {}'
.format(dirs, files, playlists))
for p in playlists:
try:
yield from mpd_client.load(p[0])
print('loaded playlist {} '.format(p[0]))
except ampdclient.MpdCommandException as e:
# Make sure
print('Could not load playlist {} \n\t {}'.format(p[0], e))
for f in files:
try:
f_id = yield from mpd_client.addid(f[0])
print('loaded {} - id: {}'.format(f[0], f_id))
except ampdclient.MpdCommandException as e:
print('Could not enqueue file {} \n\t {}'.format(f[0], e))
yield from mpd_client.close()
def main(argv):
if len(argv) == 2:
host = argv[0]
path = argv[1]
else:
host = DEFAULT_HOST
path = DEFAULT_PATH
loop = asyncio.get_event_loop()
loop.run_until_complete(start(host, path))
if __name__ == "__main__":
main(sys.argv[1:])
|
<commit_before><commit_msg>Add example for the load and addid command<commit_after>import asyncio
import sys
import ampdclient
# This script demonstrates the lsinfo, addid and load command.
# It can be called on with the host and the path as argument:
# `python queue_mgt.py 127.0.0.1 testpl`
#
# If no argument is given, DEFAULT_HOST and DEFAULT_PATH will be used instead.
# MPD host
DEFAULT_HOST = '127.0.0.1'
# directory (relative to the configured music directory) whose files and
# playlists will be loaded in play queue.
DEFAULT_PATH = 'testpl'
def onchange(message):
print('Message received ' + str(message))
@asyncio.coroutine
def start(host, path):
mpd_client = yield from ampdclient.connect(host, 6600)
mpd_client.cb_onchange = onchange
print('lsinfo')
dirs, files, playlists = yield from mpd_client.lsinfo(path)
print('content:\n\tdirs: {} \n\tfiles: {} \n\tplaylists: {}'
.format(dirs, files, playlists))
for p in playlists:
try:
yield from mpd_client.load(p[0])
print('loaded playlist {} '.format(p[0]))
except ampdclient.MpdCommandException as e:
# Make sure
print('Could not load playlist {} \n\t {}'.format(p[0], e))
for f in files:
try:
f_id = yield from mpd_client.addid(f[0])
print('loaded {} - id: {}'.format(f[0], f_id))
except ampdclient.MpdCommandException as e:
print('Could not enqueue file {} \n\t {}'.format(f[0], e))
yield from mpd_client.close()
def main(argv):
if len(argv) == 2:
host = argv[0]
path = argv[1]
else:
host = DEFAULT_HOST
path = DEFAULT_PATH
loop = asyncio.get_event_loop()
loop.run_until_complete(start(host, path))
if __name__ == "__main__":
main(sys.argv[1:])
|
|
57cfbc8b8d85306572fb37ade867faffc190cfb0
|
flawless/lib/storage/redis_storage.py
|
flawless/lib/storage/redis_storage.py
|
#!/usr/bin/env python
#
# Copyright (c) 2011-2013, Shopkick Inc.
# All rights reserved.
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
#
# ---
# Author: John Egan <john@shopkick.com>
import pickle
from flawless.lib.storage import StorageInterface
import redis # Recommend redis==2.4.10
class RedisStorage(StorageInterface):
def __init__(self, host, port, partition, socket_timeout=2):
super(RedisStorage, self).__init__(partition=partition)
self.redis_partition_name = self.patition if self.partition else "config"
self.client = redis.Redis(host=host, port=port, socket_timeout=socket_timeout)
def _serialize(self, value):
return pickle.dumps(value, pickle.HIGHEST_PROTOCOL)
def _deserialize(self, data):
if data is None:
return None
obj = pickle.loads(data)
self.migrate_thrift_obj(obj)
return obj
def iteritems(self):
for key, value in self.client.hscan_iter(self.redis_partition_name):
key = self._deserialize(key)
value = self._deserialize(value)
yield (key, value)
def __setitem__(self, key, item):
self.client.hset(self.redis_partition_name, self._serialize(key), self._serialize(item))
def __getitem__(self, key):
data = self.client.hget(self.redis_partition_name, self._serialize(key))
return self._deserialize(data)
def __contains__(self, key):
return self.client.hexists(self.redis_partition_name, self._serialize(key))
|
Add beta version of redis storage
|
Add beta version of redis storage
|
Python
|
mpl-2.0
|
shopkick/flawless
|
Add beta version of redis storage
|
#!/usr/bin/env python
#
# Copyright (c) 2011-2013, Shopkick Inc.
# All rights reserved.
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
#
# ---
# Author: John Egan <john@shopkick.com>
import pickle
from flawless.lib.storage import StorageInterface
import redis # Recommend redis==2.4.10
class RedisStorage(StorageInterface):
def __init__(self, host, port, partition, socket_timeout=2):
super(RedisStorage, self).__init__(partition=partition)
self.redis_partition_name = self.patition if self.partition else "config"
self.client = redis.Redis(host=host, port=port, socket_timeout=socket_timeout)
def _serialize(self, value):
return pickle.dumps(value, pickle.HIGHEST_PROTOCOL)
def _deserialize(self, data):
if data is None:
return None
obj = pickle.loads(data)
self.migrate_thrift_obj(obj)
return obj
def iteritems(self):
for key, value in self.client.hscan_iter(self.redis_partition_name):
key = self._deserialize(key)
value = self._deserialize(value)
yield (key, value)
def __setitem__(self, key, item):
self.client.hset(self.redis_partition_name, self._serialize(key), self._serialize(item))
def __getitem__(self, key):
data = self.client.hget(self.redis_partition_name, self._serialize(key))
return self._deserialize(data)
def __contains__(self, key):
return self.client.hexists(self.redis_partition_name, self._serialize(key))
|
<commit_before><commit_msg>Add beta version of redis storage<commit_after>
|
#!/usr/bin/env python
#
# Copyright (c) 2011-2013, Shopkick Inc.
# All rights reserved.
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
#
# ---
# Author: John Egan <john@shopkick.com>
import pickle
from flawless.lib.storage import StorageInterface
import redis # Recommend redis==2.4.10
class RedisStorage(StorageInterface):
def __init__(self, host, port, partition, socket_timeout=2):
super(RedisStorage, self).__init__(partition=partition)
self.redis_partition_name = self.patition if self.partition else "config"
self.client = redis.Redis(host=host, port=port, socket_timeout=socket_timeout)
def _serialize(self, value):
return pickle.dumps(value, pickle.HIGHEST_PROTOCOL)
def _deserialize(self, data):
if data is None:
return None
obj = pickle.loads(data)
self.migrate_thrift_obj(obj)
return obj
def iteritems(self):
for key, value in self.client.hscan_iter(self.redis_partition_name):
key = self._deserialize(key)
value = self._deserialize(value)
yield (key, value)
def __setitem__(self, key, item):
self.client.hset(self.redis_partition_name, self._serialize(key), self._serialize(item))
def __getitem__(self, key):
data = self.client.hget(self.redis_partition_name, self._serialize(key))
return self._deserialize(data)
def __contains__(self, key):
return self.client.hexists(self.redis_partition_name, self._serialize(key))
|
Add beta version of redis storage#!/usr/bin/env python
#
# Copyright (c) 2011-2013, Shopkick Inc.
# All rights reserved.
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
#
# ---
# Author: John Egan <john@shopkick.com>
import pickle
from flawless.lib.storage import StorageInterface
import redis # Recommend redis==2.4.10
class RedisStorage(StorageInterface):
def __init__(self, host, port, partition, socket_timeout=2):
super(RedisStorage, self).__init__(partition=partition)
self.redis_partition_name = self.patition if self.partition else "config"
self.client = redis.Redis(host=host, port=port, socket_timeout=socket_timeout)
def _serialize(self, value):
return pickle.dumps(value, pickle.HIGHEST_PROTOCOL)
def _deserialize(self, data):
if data is None:
return None
obj = pickle.loads(data)
self.migrate_thrift_obj(obj)
return obj
def iteritems(self):
for key, value in self.client.hscan_iter(self.redis_partition_name):
key = self._deserialize(key)
value = self._deserialize(value)
yield (key, value)
def __setitem__(self, key, item):
self.client.hset(self.redis_partition_name, self._serialize(key), self._serialize(item))
def __getitem__(self, key):
data = self.client.hget(self.redis_partition_name, self._serialize(key))
return self._deserialize(data)
def __contains__(self, key):
return self.client.hexists(self.redis_partition_name, self._serialize(key))
|
<commit_before><commit_msg>Add beta version of redis storage<commit_after>#!/usr/bin/env python
#
# Copyright (c) 2011-2013, Shopkick Inc.
# All rights reserved.
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
#
# ---
# Author: John Egan <john@shopkick.com>
import pickle
from flawless.lib.storage import StorageInterface
import redis # Recommend redis==2.4.10
class RedisStorage(StorageInterface):
def __init__(self, host, port, partition, socket_timeout=2):
super(RedisStorage, self).__init__(partition=partition)
self.redis_partition_name = self.patition if self.partition else "config"
self.client = redis.Redis(host=host, port=port, socket_timeout=socket_timeout)
def _serialize(self, value):
return pickle.dumps(value, pickle.HIGHEST_PROTOCOL)
def _deserialize(self, data):
if data is None:
return None
obj = pickle.loads(data)
self.migrate_thrift_obj(obj)
return obj
def iteritems(self):
for key, value in self.client.hscan_iter(self.redis_partition_name):
key = self._deserialize(key)
value = self._deserialize(value)
yield (key, value)
def __setitem__(self, key, item):
self.client.hset(self.redis_partition_name, self._serialize(key), self._serialize(item))
def __getitem__(self, key):
data = self.client.hget(self.redis_partition_name, self._serialize(key))
return self._deserialize(data)
def __contains__(self, key):
return self.client.hexists(self.redis_partition_name, self._serialize(key))
|
|
a216b64c9d4c156d4ef4342efa3b8203c89b13a5
|
compare_packages.py
|
compare_packages.py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# This script compares the npm packages version in the Luxembourg
# project with the ones in ngeo.
# Make sure to call "npm i" in the geoportal directory before running the script.
import json
with open('./geoportal/package.json') as json_file:
lux_deps = json.load(json_file)['devDependencies']
with open('./geoportal/node_modules/ngeo/package.json') as ngeo_file:
ngeo_deps = json.load(ngeo_file)['devDependencies']
for name, version in lux_deps.items():
if name in ngeo_deps:
ngeo_version = ngeo_deps[name]
if ngeo_version != version:
print(name, version, '->', ngeo_version)
|
Add script to compare npm versions
|
Add script to compare npm versions
|
Python
|
mit
|
Geoportail-Luxembourg/geoportailv3,Geoportail-Luxembourg/geoportailv3,Geoportail-Luxembourg/geoportailv3,Geoportail-Luxembourg/geoportailv3,Geoportail-Luxembourg/geoportailv3
|
Add script to compare npm versions
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# This script compares the npm packages version in the Luxembourg
# project with the ones in ngeo.
# Make sure to call "npm i" in the geoportal directory before running the script.
import json
with open('./geoportal/package.json') as json_file:
lux_deps = json.load(json_file)['devDependencies']
with open('./geoportal/node_modules/ngeo/package.json') as ngeo_file:
ngeo_deps = json.load(ngeo_file)['devDependencies']
for name, version in lux_deps.items():
if name in ngeo_deps:
ngeo_version = ngeo_deps[name]
if ngeo_version != version:
print(name, version, '->', ngeo_version)
|
<commit_before><commit_msg>Add script to compare npm versions<commit_after>
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# This script compares the npm packages version in the Luxembourg
# project with the ones in ngeo.
# Make sure to call "npm i" in the geoportal directory before running the script.
import json
with open('./geoportal/package.json') as json_file:
lux_deps = json.load(json_file)['devDependencies']
with open('./geoportal/node_modules/ngeo/package.json') as ngeo_file:
ngeo_deps = json.load(ngeo_file)['devDependencies']
for name, version in lux_deps.items():
if name in ngeo_deps:
ngeo_version = ngeo_deps[name]
if ngeo_version != version:
print(name, version, '->', ngeo_version)
|
Add script to compare npm versions#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# This script compares the npm packages version in the Luxembourg
# project with the ones in ngeo.
# Make sure to call "npm i" in the geoportal directory before running the script.
import json
with open('./geoportal/package.json') as json_file:
lux_deps = json.load(json_file)['devDependencies']
with open('./geoportal/node_modules/ngeo/package.json') as ngeo_file:
ngeo_deps = json.load(ngeo_file)['devDependencies']
for name, version in lux_deps.items():
if name in ngeo_deps:
ngeo_version = ngeo_deps[name]
if ngeo_version != version:
print(name, version, '->', ngeo_version)
|
<commit_before><commit_msg>Add script to compare npm versions<commit_after>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# This script compares the npm packages version in the Luxembourg
# project with the ones in ngeo.
# Make sure to call "npm i" in the geoportal directory before running the script.
import json
with open('./geoportal/package.json') as json_file:
lux_deps = json.load(json_file)['devDependencies']
with open('./geoportal/node_modules/ngeo/package.json') as ngeo_file:
ngeo_deps = json.load(ngeo_file)['devDependencies']
for name, version in lux_deps.items():
if name in ngeo_deps:
ngeo_version = ngeo_deps[name]
if ngeo_version != version:
print(name, version, '->', ngeo_version)
|
|
abafda042fe35611d144cb45c6f6c7d010515353
|
enhydris/hcore/migrations/0002_maintainers.py
|
enhydris/hcore/migrations/0002_maintainers.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
('hcore', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='station',
name='maintainers',
field=models.ManyToManyField(related_name='maintaining_stations', to=settings.AUTH_USER_MODEL, blank=True),
),
]
|
Add migration about station maintainers
|
Add migration about station maintainers
This migration was apparently accidentally omitted in df653b5dcf.
|
Python
|
agpl-3.0
|
openmeteo/enhydris,kickapoo/enhydris,openmeteo/enhydris,aptiko/enhydris,kickapoo/enhydris,aptiko/enhydris,aptiko/enhydris,kickapoo/enhydris,openmeteo/enhydris,ellak-monades-aristeias/enhydris,ellak-monades-aristeias/enhydris,ellak-monades-aristeias/enhydris
|
Add migration about station maintainers
This migration was apparently accidentally omitted in df653b5dcf.
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
('hcore', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='station',
name='maintainers',
field=models.ManyToManyField(related_name='maintaining_stations', to=settings.AUTH_USER_MODEL, blank=True),
),
]
|
<commit_before><commit_msg>Add migration about station maintainers
This migration was apparently accidentally omitted in df653b5dcf.<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
('hcore', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='station',
name='maintainers',
field=models.ManyToManyField(related_name='maintaining_stations', to=settings.AUTH_USER_MODEL, blank=True),
),
]
|
Add migration about station maintainers
This migration was apparently accidentally omitted in df653b5dcf.# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
('hcore', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='station',
name='maintainers',
field=models.ManyToManyField(related_name='maintaining_stations', to=settings.AUTH_USER_MODEL, blank=True),
),
]
|
<commit_before><commit_msg>Add migration about station maintainers
This migration was apparently accidentally omitted in df653b5dcf.<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
('hcore', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='station',
name='maintainers',
field=models.ManyToManyField(related_name='maintaining_stations', to=settings.AUTH_USER_MODEL, blank=True),
),
]
|
|
a82f3a601d1f890913d933a528ddfe773e260c01
|
galpy/potential_src/SimpleWrapperPotential.py
|
galpy/potential_src/SimpleWrapperPotential.py
|
###############################################################################
# SimpleWrapperPotential.py: Super-class for simple wrapper potentials
###############################################################################
from galpy.potential_src.Potential import Potential, _isNonAxi
from galpy.potential_src.Potential import evaluatePotentials, \
evaluateRforces, evaluatephiforces, evaluatezforces, \
evaluaterforces, evaluateR2derivs, evaluatez2derivs, \
evaluateRzderivs, evaluateDensities
class SimpleWrapperPotential(Potential):
def __init__(self,amp=1.,pot=None,tform=-4.,tsteady=None,ro=None,vo=None):
"""
NAME:
__init__
PURPOSE:
initialize a SimpleWrapperPotential, a super-class for simple wrapper potentials
INPUT:
amp - amplitude to be applied to the potential (default: 1.)
pot - Potential instance or list thereof; the amplitude of this will be grown by this wrapper
OUTPUT:
(none)
HISTORY:
2017-06-26 - Started - Bovy (UofT)
"""
Potential.__init__(self,amp=amp,ro=ro,vo=vo)
self._pot= pot
self.isNonAxi= _isNonAxi(self._pot)
def __getattr__(self,attribute):
if attribute == '_evaluate' \
or attribute == '_Rforce' or attribute == '_zforce' \
or attribute == '_phiforce' \
or attribute == '_R2deriv' or attribute == '_z2deriv' \
or attribute == '_Rzderiv' or attribute == '_phi2deriv' \
or attribute == '_Rphideriv':
return lambda R,Z,phi=0.,t=0.: \
self._wrap(attribute,R,Z,phi=phi,t=t)
else:
return super(SimpleWrapperPotential,self).__getattr__(attribute)
def _wrap_pot_func(self,attribute):
if attribute == '_evaluate':
return evaluatePotentials
elif attribute == '_dens':
return evaluateDensities
elif attribute == '_Rforce':
return evaluateRforces
elif attribute == '_zforce':
return evaluatezforces
elif attribute == '_phiforce':
return evaluatephiforces
elif attribute == '_R2deriv':
return evaluateR2derivs
elif attribute == '_z2deriv':
return evaluatez2derivs
elif attribute == '_Rzderiv':
return evaluateRzderivs
elif attribute == '_phi2deriv':
return lambda p,R,Z,phi=0.,t=0.: \
evaluatePotentials(p,R,Z,phi=phi,t=t,dphi=2)
elif attribute == '_Rphideriv':
return lambda p,R,Z,phi=0.,t=0.: \
evaluatePotentials(p,R,Z,phi=phi,t=t,dR=1,dphi=1)
elif attribute == '_rforce':
return evaluaterforces
else:
raise AttributeError("Attribute %s not found in for this SimpleWrapperPotential" % attribute)
|
Add a super-class for simple wrappers (that wrap potentials independent of R,Z,phi
|
Add a super-class for simple wrappers (that wrap potentials independent of R,Z,phi
|
Python
|
bsd-3-clause
|
jobovy/galpy,jobovy/galpy,jobovy/galpy,jobovy/galpy
|
Add a super-class for simple wrappers (that wrap potentials independent of R,Z,phi
|
###############################################################################
# SimpleWrapperPotential.py: Super-class for simple wrapper potentials
###############################################################################
from galpy.potential_src.Potential import Potential, _isNonAxi
from galpy.potential_src.Potential import evaluatePotentials, \
evaluateRforces, evaluatephiforces, evaluatezforces, \
evaluaterforces, evaluateR2derivs, evaluatez2derivs, \
evaluateRzderivs, evaluateDensities
class SimpleWrapperPotential(Potential):
def __init__(self,amp=1.,pot=None,tform=-4.,tsteady=None,ro=None,vo=None):
"""
NAME:
__init__
PURPOSE:
initialize a SimpleWrapperPotential, a super-class for simple wrapper potentials
INPUT:
amp - amplitude to be applied to the potential (default: 1.)
pot - Potential instance or list thereof; the amplitude of this will be grown by this wrapper
OUTPUT:
(none)
HISTORY:
2017-06-26 - Started - Bovy (UofT)
"""
Potential.__init__(self,amp=amp,ro=ro,vo=vo)
self._pot= pot
self.isNonAxi= _isNonAxi(self._pot)
def __getattr__(self,attribute):
if attribute == '_evaluate' \
or attribute == '_Rforce' or attribute == '_zforce' \
or attribute == '_phiforce' \
or attribute == '_R2deriv' or attribute == '_z2deriv' \
or attribute == '_Rzderiv' or attribute == '_phi2deriv' \
or attribute == '_Rphideriv':
return lambda R,Z,phi=0.,t=0.: \
self._wrap(attribute,R,Z,phi=phi,t=t)
else:
return super(SimpleWrapperPotential,self).__getattr__(attribute)
def _wrap_pot_func(self,attribute):
if attribute == '_evaluate':
return evaluatePotentials
elif attribute == '_dens':
return evaluateDensities
elif attribute == '_Rforce':
return evaluateRforces
elif attribute == '_zforce':
return evaluatezforces
elif attribute == '_phiforce':
return evaluatephiforces
elif attribute == '_R2deriv':
return evaluateR2derivs
elif attribute == '_z2deriv':
return evaluatez2derivs
elif attribute == '_Rzderiv':
return evaluateRzderivs
elif attribute == '_phi2deriv':
return lambda p,R,Z,phi=0.,t=0.: \
evaluatePotentials(p,R,Z,phi=phi,t=t,dphi=2)
elif attribute == '_Rphideriv':
return lambda p,R,Z,phi=0.,t=0.: \
evaluatePotentials(p,R,Z,phi=phi,t=t,dR=1,dphi=1)
elif attribute == '_rforce':
return evaluaterforces
else:
raise AttributeError("Attribute %s not found in for this SimpleWrapperPotential" % attribute)
|
<commit_before><commit_msg>Add a super-class for simple wrappers (that wrap potentials independent of R,Z,phi<commit_after>
|
###############################################################################
# SimpleWrapperPotential.py: Super-class for simple wrapper potentials
###############################################################################
from galpy.potential_src.Potential import Potential, _isNonAxi
from galpy.potential_src.Potential import evaluatePotentials, \
evaluateRforces, evaluatephiforces, evaluatezforces, \
evaluaterforces, evaluateR2derivs, evaluatez2derivs, \
evaluateRzderivs, evaluateDensities
class SimpleWrapperPotential(Potential):
def __init__(self,amp=1.,pot=None,tform=-4.,tsteady=None,ro=None,vo=None):
"""
NAME:
__init__
PURPOSE:
initialize a SimpleWrapperPotential, a super-class for simple wrapper potentials
INPUT:
amp - amplitude to be applied to the potential (default: 1.)
pot - Potential instance or list thereof; the amplitude of this will be grown by this wrapper
OUTPUT:
(none)
HISTORY:
2017-06-26 - Started - Bovy (UofT)
"""
Potential.__init__(self,amp=amp,ro=ro,vo=vo)
self._pot= pot
self.isNonAxi= _isNonAxi(self._pot)
def __getattr__(self,attribute):
if attribute == '_evaluate' \
or attribute == '_Rforce' or attribute == '_zforce' \
or attribute == '_phiforce' \
or attribute == '_R2deriv' or attribute == '_z2deriv' \
or attribute == '_Rzderiv' or attribute == '_phi2deriv' \
or attribute == '_Rphideriv':
return lambda R,Z,phi=0.,t=0.: \
self._wrap(attribute,R,Z,phi=phi,t=t)
else:
return super(SimpleWrapperPotential,self).__getattr__(attribute)
def _wrap_pot_func(self,attribute):
if attribute == '_evaluate':
return evaluatePotentials
elif attribute == '_dens':
return evaluateDensities
elif attribute == '_Rforce':
return evaluateRforces
elif attribute == '_zforce':
return evaluatezforces
elif attribute == '_phiforce':
return evaluatephiforces
elif attribute == '_R2deriv':
return evaluateR2derivs
elif attribute == '_z2deriv':
return evaluatez2derivs
elif attribute == '_Rzderiv':
return evaluateRzderivs
elif attribute == '_phi2deriv':
return lambda p,R,Z,phi=0.,t=0.: \
evaluatePotentials(p,R,Z,phi=phi,t=t,dphi=2)
elif attribute == '_Rphideriv':
return lambda p,R,Z,phi=0.,t=0.: \
evaluatePotentials(p,R,Z,phi=phi,t=t,dR=1,dphi=1)
elif attribute == '_rforce':
return evaluaterforces
else:
raise AttributeError("Attribute %s not found in for this SimpleWrapperPotential" % attribute)
|
Add a super-class for simple wrappers (that wrap potentials independent of R,Z,phi###############################################################################
# SimpleWrapperPotential.py: Super-class for simple wrapper potentials
###############################################################################
from galpy.potential_src.Potential import Potential, _isNonAxi
from galpy.potential_src.Potential import evaluatePotentials, \
evaluateRforces, evaluatephiforces, evaluatezforces, \
evaluaterforces, evaluateR2derivs, evaluatez2derivs, \
evaluateRzderivs, evaluateDensities
class SimpleWrapperPotential(Potential):
def __init__(self,amp=1.,pot=None,tform=-4.,tsteady=None,ro=None,vo=None):
"""
NAME:
__init__
PURPOSE:
initialize a SimpleWrapperPotential, a super-class for simple wrapper potentials
INPUT:
amp - amplitude to be applied to the potential (default: 1.)
pot - Potential instance or list thereof; the amplitude of this will be grown by this wrapper
OUTPUT:
(none)
HISTORY:
2017-06-26 - Started - Bovy (UofT)
"""
Potential.__init__(self,amp=amp,ro=ro,vo=vo)
self._pot= pot
self.isNonAxi= _isNonAxi(self._pot)
def __getattr__(self,attribute):
if attribute == '_evaluate' \
or attribute == '_Rforce' or attribute == '_zforce' \
or attribute == '_phiforce' \
or attribute == '_R2deriv' or attribute == '_z2deriv' \
or attribute == '_Rzderiv' or attribute == '_phi2deriv' \
or attribute == '_Rphideriv':
return lambda R,Z,phi=0.,t=0.: \
self._wrap(attribute,R,Z,phi=phi,t=t)
else:
return super(SimpleWrapperPotential,self).__getattr__(attribute)
def _wrap_pot_func(self,attribute):
if attribute == '_evaluate':
return evaluatePotentials
elif attribute == '_dens':
return evaluateDensities
elif attribute == '_Rforce':
return evaluateRforces
elif attribute == '_zforce':
return evaluatezforces
elif attribute == '_phiforce':
return evaluatephiforces
elif attribute == '_R2deriv':
return evaluateR2derivs
elif attribute == '_z2deriv':
return evaluatez2derivs
elif attribute == '_Rzderiv':
return evaluateRzderivs
elif attribute == '_phi2deriv':
return lambda p,R,Z,phi=0.,t=0.: \
evaluatePotentials(p,R,Z,phi=phi,t=t,dphi=2)
elif attribute == '_Rphideriv':
return lambda p,R,Z,phi=0.,t=0.: \
evaluatePotentials(p,R,Z,phi=phi,t=t,dR=1,dphi=1)
elif attribute == '_rforce':
return evaluaterforces
else:
raise AttributeError("Attribute %s not found in for this SimpleWrapperPotential" % attribute)
|
<commit_before><commit_msg>Add a super-class for simple wrappers (that wrap potentials independent of R,Z,phi<commit_after>###############################################################################
# SimpleWrapperPotential.py: Super-class for simple wrapper potentials
###############################################################################
from galpy.potential_src.Potential import Potential, _isNonAxi
from galpy.potential_src.Potential import evaluatePotentials, \
evaluateRforces, evaluatephiforces, evaluatezforces, \
evaluaterforces, evaluateR2derivs, evaluatez2derivs, \
evaluateRzderivs, evaluateDensities
class SimpleWrapperPotential(Potential):
def __init__(self,amp=1.,pot=None,tform=-4.,tsteady=None,ro=None,vo=None):
"""
NAME:
__init__
PURPOSE:
initialize a SimpleWrapperPotential, a super-class for simple wrapper potentials
INPUT:
amp - amplitude to be applied to the potential (default: 1.)
pot - Potential instance or list thereof; the amplitude of this will be grown by this wrapper
OUTPUT:
(none)
HISTORY:
2017-06-26 - Started - Bovy (UofT)
"""
Potential.__init__(self,amp=amp,ro=ro,vo=vo)
self._pot= pot
self.isNonAxi= _isNonAxi(self._pot)
def __getattr__(self,attribute):
if attribute == '_evaluate' \
or attribute == '_Rforce' or attribute == '_zforce' \
or attribute == '_phiforce' \
or attribute == '_R2deriv' or attribute == '_z2deriv' \
or attribute == '_Rzderiv' or attribute == '_phi2deriv' \
or attribute == '_Rphideriv':
return lambda R,Z,phi=0.,t=0.: \
self._wrap(attribute,R,Z,phi=phi,t=t)
else:
return super(SimpleWrapperPotential,self).__getattr__(attribute)
def _wrap_pot_func(self,attribute):
if attribute == '_evaluate':
return evaluatePotentials
elif attribute == '_dens':
return evaluateDensities
elif attribute == '_Rforce':
return evaluateRforces
elif attribute == '_zforce':
return evaluatezforces
elif attribute == '_phiforce':
return evaluatephiforces
elif attribute == '_R2deriv':
return evaluateR2derivs
elif attribute == '_z2deriv':
return evaluatez2derivs
elif attribute == '_Rzderiv':
return evaluateRzderivs
elif attribute == '_phi2deriv':
return lambda p,R,Z,phi=0.,t=0.: \
evaluatePotentials(p,R,Z,phi=phi,t=t,dphi=2)
elif attribute == '_Rphideriv':
return lambda p,R,Z,phi=0.,t=0.: \
evaluatePotentials(p,R,Z,phi=phi,t=t,dR=1,dphi=1)
elif attribute == '_rforce':
return evaluaterforces
else:
raise AttributeError("Attribute %s not found in for this SimpleWrapperPotential" % attribute)
|
|
108e23f67e01edc98a98ea646d42137a1b49f255
|
ichnaea/alembic/versions/3be4004781bc_noop.py
|
ichnaea/alembic/versions/3be4004781bc_noop.py
|
"""No-op migration for testing deploys
Revision ID: 3be4004781bc
Revises: a0ee5e10f44b
Create Date: 2019-11-04 18:56:29.459718
"""
import logging
log = logging.getLogger("alembic.migration")
revision = "3be4004781bc"
down_revision = "a0ee5e10f44b"
def upgrade():
pass
def downgrade():
pass
|
Add no-op migration to test deploys
|
Add no-op migration to test deploys
|
Python
|
apache-2.0
|
mozilla/ichnaea,mozilla/ichnaea,mozilla/ichnaea,mozilla/ichnaea
|
Add no-op migration to test deploys
|
"""No-op migration for testing deploys
Revision ID: 3be4004781bc
Revises: a0ee5e10f44b
Create Date: 2019-11-04 18:56:29.459718
"""
import logging
log = logging.getLogger("alembic.migration")
revision = "3be4004781bc"
down_revision = "a0ee5e10f44b"
def upgrade():
pass
def downgrade():
pass
|
<commit_before><commit_msg>Add no-op migration to test deploys<commit_after>
|
"""No-op migration for testing deploys
Revision ID: 3be4004781bc
Revises: a0ee5e10f44b
Create Date: 2019-11-04 18:56:29.459718
"""
import logging
log = logging.getLogger("alembic.migration")
revision = "3be4004781bc"
down_revision = "a0ee5e10f44b"
def upgrade():
pass
def downgrade():
pass
|
Add no-op migration to test deploys"""No-op migration for testing deploys
Revision ID: 3be4004781bc
Revises: a0ee5e10f44b
Create Date: 2019-11-04 18:56:29.459718
"""
import logging
log = logging.getLogger("alembic.migration")
revision = "3be4004781bc"
down_revision = "a0ee5e10f44b"
def upgrade():
pass
def downgrade():
pass
|
<commit_before><commit_msg>Add no-op migration to test deploys<commit_after>"""No-op migration for testing deploys
Revision ID: 3be4004781bc
Revises: a0ee5e10f44b
Create Date: 2019-11-04 18:56:29.459718
"""
import logging
log = logging.getLogger("alembic.migration")
revision = "3be4004781bc"
down_revision = "a0ee5e10f44b"
def upgrade():
pass
def downgrade():
pass
|
|
344a3b7277be9476d9d9e8cdbfc838bb3671b82f
|
bayespy/inference/vmp/nodes/pdf.py
|
bayespy/inference/vmp/nodes/pdf.py
|
######################################################################
# Copyright (C) 2015 Jaakko Luttinen
#
# This file is licensed under Version 3.0 of the GNU General Public
# License. See LICENSE for a text of the license.
######################################################################
######################################################################
# This file is part of BayesPy.
#
# BayesPy is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# BayesPy is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with BayesPy. If not, see <http://www.gnu.org/licenses/>.
######################################################################
import numpy as np
from .expfamily import ExponentialFamily
class PDF(ExponentialFamily):
"""
General node with arbitrary probability density function
"""
# Sub-classes must over-write this
_distribution = None
def __init__(self, pdf, *parents, approximation=None, **kwargs):
if approximation is not None:
raise NotImplementedError() #self._distribution = approximation._constructor
super().__init__(*parents,
dims=dims,
**kwargs)
|
Create file for the black box node
|
ENH: Create file for the black box node
|
Python
|
mit
|
SalemAmeen/bayespy,bayespy/bayespy,jluttine/bayespy,fivejjs/bayespy
|
ENH: Create file for the black box node
|
######################################################################
# Copyright (C) 2015 Jaakko Luttinen
#
# This file is licensed under Version 3.0 of the GNU General Public
# License. See LICENSE for a text of the license.
######################################################################
######################################################################
# This file is part of BayesPy.
#
# BayesPy is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# BayesPy is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with BayesPy. If not, see <http://www.gnu.org/licenses/>.
######################################################################
import numpy as np
from .expfamily import ExponentialFamily
class PDF(ExponentialFamily):
"""
General node with arbitrary probability density function
"""
# Sub-classes must over-write this
_distribution = None
def __init__(self, pdf, *parents, approximation=None, **kwargs):
if approximation is not None:
raise NotImplementedError() #self._distribution = approximation._constructor
super().__init__(*parents,
dims=dims,
**kwargs)
|
<commit_before><commit_msg>ENH: Create file for the black box node<commit_after>
|
######################################################################
# Copyright (C) 2015 Jaakko Luttinen
#
# This file is licensed under Version 3.0 of the GNU General Public
# License. See LICENSE for a text of the license.
######################################################################
######################################################################
# This file is part of BayesPy.
#
# BayesPy is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# BayesPy is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with BayesPy. If not, see <http://www.gnu.org/licenses/>.
######################################################################
import numpy as np
from .expfamily import ExponentialFamily
class PDF(ExponentialFamily):
"""
General node with arbitrary probability density function
"""
# Sub-classes must over-write this
_distribution = None
def __init__(self, pdf, *parents, approximation=None, **kwargs):
if approximation is not None:
raise NotImplementedError() #self._distribution = approximation._constructor
super().__init__(*parents,
dims=dims,
**kwargs)
|
ENH: Create file for the black box node######################################################################
# Copyright (C) 2015 Jaakko Luttinen
#
# This file is licensed under Version 3.0 of the GNU General Public
# License. See LICENSE for a text of the license.
######################################################################
######################################################################
# This file is part of BayesPy.
#
# BayesPy is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# BayesPy is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with BayesPy. If not, see <http://www.gnu.org/licenses/>.
######################################################################
import numpy as np
from .expfamily import ExponentialFamily
class PDF(ExponentialFamily):
"""
General node with arbitrary probability density function
"""
# Sub-classes must over-write this
_distribution = None
def __init__(self, pdf, *parents, approximation=None, **kwargs):
if approximation is not None:
raise NotImplementedError() #self._distribution = approximation._constructor
super().__init__(*parents,
dims=dims,
**kwargs)
|
<commit_before><commit_msg>ENH: Create file for the black box node<commit_after>######################################################################
# Copyright (C) 2015 Jaakko Luttinen
#
# This file is licensed under Version 3.0 of the GNU General Public
# License. See LICENSE for a text of the license.
######################################################################
######################################################################
# This file is part of BayesPy.
#
# BayesPy is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# BayesPy is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with BayesPy. If not, see <http://www.gnu.org/licenses/>.
######################################################################
import numpy as np
from .expfamily import ExponentialFamily
class PDF(ExponentialFamily):
"""
General node with arbitrary probability density function
"""
# Sub-classes must over-write this
_distribution = None
def __init__(self, pdf, *parents, approximation=None, **kwargs):
if approximation is not None:
raise NotImplementedError() #self._distribution = approximation._constructor
super().__init__(*parents,
dims=dims,
**kwargs)
|
|
c56d00efdfe56c5854745ca282cc1a4df0b4cd6d
|
bootcamp/lesson2.py
|
bootcamp/lesson2.py
|
from core import test_helper
# Question 1
# ----------
# Given a list of strings, return the count of the number of strings where the string length
# is 2 or more and the first and last chars of the string are the same.
def match_ends(words):
pass
# Question 2
# ----------
# Given a list of strings, return a list with the strings in sorted order,
# except group all the strings that begin with 'x' first. Example:
# ['mix', 'xyz', 'apple', 'xanadu', 'aardvark'] yields ['xanadu', 'xyz', 'aardvark', 'apple', 'mix']
def sort_x(words):
# Write code here
pass
# Question 3
# ----------
# Given a list of numbers write a function to sum every element in the list. Return the sum
#
def sum_elements(nums):
# Write code here
pass
def main():
print '\nRunning match_ends function...'
test_helper(match_ends(['aba', 'xyz', 'aa', 'x', 'bbb']), 3)
test_helper(match_ends(['', 'x', 'xy', 'xyx', 'xx']), 2)
test_helper(match_ends(['aaa', 'be', 'abc', 'hello']), 1)
print '\nRunning sort_x function...'
test_helper(sort_x(['bbb', 'ccc', 'axx', 'xzz', 'xaa']), ['xaa', 'xzz', 'axx', 'bbb', 'ccc'])
test_helper(sort_x(['ccc', 'bbb', 'aaa', 'xcc', 'xaa']), ['xaa', 'xcc', 'aaa', 'bbb', 'ccc'])
test_helper(sort_x(['mix', 'xyz', 'apple', 'xanadu', 'aardvark']), ['xanadu', 'xyz', 'aardvark', 'apple', 'mix'])
print'\nRunning sum_elements function...'
test_helper(sum_elements([1, 2, 3, 4, 5]), 15)
test_helper(sum_elements([0, 0]), 0)
test_helper(sum_elements([0, 1, -1, 1]), 1)
if __name__ == '__main__':
main()
|
Add problems for lesson 2
|
Add problems for lesson 2
|
Python
|
mit
|
infoscout/python-bootcamp-pv
|
Add problems for lesson 2
|
from core import test_helper
# Question 1
# ----------
# Given a list of strings, return the count of the number of strings where the string length
# is 2 or more and the first and last chars of the string are the same.
def match_ends(words):
pass
# Question 2
# ----------
# Given a list of strings, return a list with the strings in sorted order,
# except group all the strings that begin with 'x' first. Example:
# ['mix', 'xyz', 'apple', 'xanadu', 'aardvark'] yields ['xanadu', 'xyz', 'aardvark', 'apple', 'mix']
def sort_x(words):
# Write code here
pass
# Question 3
# ----------
# Given a list of numbers write a function to sum every element in the list. Return the sum
#
def sum_elements(nums):
# Write code here
pass
def main():
print '\nRunning match_ends function...'
test_helper(match_ends(['aba', 'xyz', 'aa', 'x', 'bbb']), 3)
test_helper(match_ends(['', 'x', 'xy', 'xyx', 'xx']), 2)
test_helper(match_ends(['aaa', 'be', 'abc', 'hello']), 1)
print '\nRunning sort_x function...'
test_helper(sort_x(['bbb', 'ccc', 'axx', 'xzz', 'xaa']), ['xaa', 'xzz', 'axx', 'bbb', 'ccc'])
test_helper(sort_x(['ccc', 'bbb', 'aaa', 'xcc', 'xaa']), ['xaa', 'xcc', 'aaa', 'bbb', 'ccc'])
test_helper(sort_x(['mix', 'xyz', 'apple', 'xanadu', 'aardvark']), ['xanadu', 'xyz', 'aardvark', 'apple', 'mix'])
print'\nRunning sum_elements function...'
test_helper(sum_elements([1, 2, 3, 4, 5]), 15)
test_helper(sum_elements([0, 0]), 0)
test_helper(sum_elements([0, 1, -1, 1]), 1)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add problems for lesson 2<commit_after>
|
from core import test_helper
# Question 1
# ----------
# Given a list of strings, return the count of the number of strings where the string length
# is 2 or more and the first and last chars of the string are the same.
def match_ends(words):
pass
# Question 2
# ----------
# Given a list of strings, return a list with the strings in sorted order,
# except group all the strings that begin with 'x' first. Example:
# ['mix', 'xyz', 'apple', 'xanadu', 'aardvark'] yields ['xanadu', 'xyz', 'aardvark', 'apple', 'mix']
def sort_x(words):
# Write code here
pass
# Question 3
# ----------
# Given a list of numbers write a function to sum every element in the list. Return the sum
#
def sum_elements(nums):
# Write code here
pass
def main():
print '\nRunning match_ends function...'
test_helper(match_ends(['aba', 'xyz', 'aa', 'x', 'bbb']), 3)
test_helper(match_ends(['', 'x', 'xy', 'xyx', 'xx']), 2)
test_helper(match_ends(['aaa', 'be', 'abc', 'hello']), 1)
print '\nRunning sort_x function...'
test_helper(sort_x(['bbb', 'ccc', 'axx', 'xzz', 'xaa']), ['xaa', 'xzz', 'axx', 'bbb', 'ccc'])
test_helper(sort_x(['ccc', 'bbb', 'aaa', 'xcc', 'xaa']), ['xaa', 'xcc', 'aaa', 'bbb', 'ccc'])
test_helper(sort_x(['mix', 'xyz', 'apple', 'xanadu', 'aardvark']), ['xanadu', 'xyz', 'aardvark', 'apple', 'mix'])
print'\nRunning sum_elements function...'
test_helper(sum_elements([1, 2, 3, 4, 5]), 15)
test_helper(sum_elements([0, 0]), 0)
test_helper(sum_elements([0, 1, -1, 1]), 1)
if __name__ == '__main__':
main()
|
Add problems for lesson 2from core import test_helper
# Question 1
# ----------
# Given a list of strings, return the count of the number of strings where the string length
# is 2 or more and the first and last chars of the string are the same.
def match_ends(words):
pass
# Question 2
# ----------
# Given a list of strings, return a list with the strings in sorted order,
# except group all the strings that begin with 'x' first. Example:
# ['mix', 'xyz', 'apple', 'xanadu', 'aardvark'] yields ['xanadu', 'xyz', 'aardvark', 'apple', 'mix']
def sort_x(words):
# Write code here
pass
# Question 3
# ----------
# Given a list of numbers write a function to sum every element in the list. Return the sum
#
def sum_elements(nums):
# Write code here
pass
def main():
print '\nRunning match_ends function...'
test_helper(match_ends(['aba', 'xyz', 'aa', 'x', 'bbb']), 3)
test_helper(match_ends(['', 'x', 'xy', 'xyx', 'xx']), 2)
test_helper(match_ends(['aaa', 'be', 'abc', 'hello']), 1)
print '\nRunning sort_x function...'
test_helper(sort_x(['bbb', 'ccc', 'axx', 'xzz', 'xaa']), ['xaa', 'xzz', 'axx', 'bbb', 'ccc'])
test_helper(sort_x(['ccc', 'bbb', 'aaa', 'xcc', 'xaa']), ['xaa', 'xcc', 'aaa', 'bbb', 'ccc'])
test_helper(sort_x(['mix', 'xyz', 'apple', 'xanadu', 'aardvark']), ['xanadu', 'xyz', 'aardvark', 'apple', 'mix'])
print'\nRunning sum_elements function...'
test_helper(sum_elements([1, 2, 3, 4, 5]), 15)
test_helper(sum_elements([0, 0]), 0)
test_helper(sum_elements([0, 1, -1, 1]), 1)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add problems for lesson 2<commit_after>from core import test_helper
# Question 1
# ----------
# Given a list of strings, return the count of the number of strings where the string length
# is 2 or more and the first and last chars of the string are the same.
def match_ends(words):
pass
# Question 2
# ----------
# Given a list of strings, return a list with the strings in sorted order,
# except group all the strings that begin with 'x' first. Example:
# ['mix', 'xyz', 'apple', 'xanadu', 'aardvark'] yields ['xanadu', 'xyz', 'aardvark', 'apple', 'mix']
def sort_x(words):
# Write code here
pass
# Question 3
# ----------
# Given a list of numbers write a function to sum every element in the list. Return the sum
#
def sum_elements(nums):
# Write code here
pass
def main():
print '\nRunning match_ends function...'
test_helper(match_ends(['aba', 'xyz', 'aa', 'x', 'bbb']), 3)
test_helper(match_ends(['', 'x', 'xy', 'xyx', 'xx']), 2)
test_helper(match_ends(['aaa', 'be', 'abc', 'hello']), 1)
print '\nRunning sort_x function...'
test_helper(sort_x(['bbb', 'ccc', 'axx', 'xzz', 'xaa']), ['xaa', 'xzz', 'axx', 'bbb', 'ccc'])
test_helper(sort_x(['ccc', 'bbb', 'aaa', 'xcc', 'xaa']), ['xaa', 'xcc', 'aaa', 'bbb', 'ccc'])
test_helper(sort_x(['mix', 'xyz', 'apple', 'xanadu', 'aardvark']), ['xanadu', 'xyz', 'aardvark', 'apple', 'mix'])
print'\nRunning sum_elements function...'
test_helper(sum_elements([1, 2, 3, 4, 5]), 15)
test_helper(sum_elements([0, 0]), 0)
test_helper(sum_elements([0, 1, -1, 1]), 1)
if __name__ == '__main__':
main()
|
|
3cff7a8eb7dd1babeeb16da06239ad63d4f8c154
|
src/keybar/tests/models/test_vault.py
|
src/keybar/tests/models/test_vault.py
|
import pytest
from keybar.tests.factories.vault import VaultFactory
@pytest.mark.django_db
class TestVault:
def test_simple(self):
vault = VaultFactory.create()
assert str(vault) == '{} ({})'.format(vault.name, vault.slug)
def test_slug(self):
vault = VaultFactory.create(name='This is funny')
vault2 = VaultFactory.create(name='This is funny')
assert vault.slug == 'this-is-funny'
assert vault2.slug == 'this-is-funny-2'
|
Add simple tests for models.Vault
|
Add simple tests for models.Vault
|
Python
|
bsd-3-clause
|
keybar/keybar
|
Add simple tests for models.Vault
|
import pytest
from keybar.tests.factories.vault import VaultFactory
@pytest.mark.django_db
class TestVault:
def test_simple(self):
vault = VaultFactory.create()
assert str(vault) == '{} ({})'.format(vault.name, vault.slug)
def test_slug(self):
vault = VaultFactory.create(name='This is funny')
vault2 = VaultFactory.create(name='This is funny')
assert vault.slug == 'this-is-funny'
assert vault2.slug == 'this-is-funny-2'
|
<commit_before><commit_msg>Add simple tests for models.Vault<commit_after>
|
import pytest
from keybar.tests.factories.vault import VaultFactory
@pytest.mark.django_db
class TestVault:
def test_simple(self):
vault = VaultFactory.create()
assert str(vault) == '{} ({})'.format(vault.name, vault.slug)
def test_slug(self):
vault = VaultFactory.create(name='This is funny')
vault2 = VaultFactory.create(name='This is funny')
assert vault.slug == 'this-is-funny'
assert vault2.slug == 'this-is-funny-2'
|
Add simple tests for models.Vaultimport pytest
from keybar.tests.factories.vault import VaultFactory
@pytest.mark.django_db
class TestVault:
def test_simple(self):
vault = VaultFactory.create()
assert str(vault) == '{} ({})'.format(vault.name, vault.slug)
def test_slug(self):
vault = VaultFactory.create(name='This is funny')
vault2 = VaultFactory.create(name='This is funny')
assert vault.slug == 'this-is-funny'
assert vault2.slug == 'this-is-funny-2'
|
<commit_before><commit_msg>Add simple tests for models.Vault<commit_after>import pytest
from keybar.tests.factories.vault import VaultFactory
@pytest.mark.django_db
class TestVault:
def test_simple(self):
vault = VaultFactory.create()
assert str(vault) == '{} ({})'.format(vault.name, vault.slug)
def test_slug(self):
vault = VaultFactory.create(name='This is funny')
vault2 = VaultFactory.create(name='This is funny')
assert vault.slug == 'this-is-funny'
assert vault2.slug == 'this-is-funny-2'
|
|
3d9c3c63758bf52b22be8c56a50a1cba2a441d12
|
turbustat/tests/test_wrapper.py
|
turbustat/tests/test_wrapper.py
|
# Licensed under an MIT open source license - see LICENSE
from ..statistics import stats_wrapper
from ._testing_data import \
dataset1, dataset2
def test_wrapper():
run_wrapper = stats_wrapper(dataset1, dataset2)
|
Add a test for the wrapper
|
Add a test for the wrapper
|
Python
|
mit
|
Astroua/TurbuStat,e-koch/TurbuStat
|
Add a test for the wrapper
|
# Licensed under an MIT open source license - see LICENSE
from ..statistics import stats_wrapper
from ._testing_data import \
dataset1, dataset2
def test_wrapper():
run_wrapper = stats_wrapper(dataset1, dataset2)
|
<commit_before><commit_msg>Add a test for the wrapper<commit_after>
|
# Licensed under an MIT open source license - see LICENSE
from ..statistics import stats_wrapper
from ._testing_data import \
dataset1, dataset2
def test_wrapper():
run_wrapper = stats_wrapper(dataset1, dataset2)
|
Add a test for the wrapper# Licensed under an MIT open source license - see LICENSE
from ..statistics import stats_wrapper
from ._testing_data import \
dataset1, dataset2
def test_wrapper():
run_wrapper = stats_wrapper(dataset1, dataset2)
|
<commit_before><commit_msg>Add a test for the wrapper<commit_after># Licensed under an MIT open source license - see LICENSE
from ..statistics import stats_wrapper
from ._testing_data import \
dataset1, dataset2
def test_wrapper():
run_wrapper = stats_wrapper(dataset1, dataset2)
|
|
4ebf68bc3ae22f39be6f7c0a260323648537e65c
|
integration/experiment/common_args.py
|
integration/experiment/common_args.py
|
#
# Copyright (c) 2015, 2016, 2017, 2018, 2019, 2020, Intel Corporation
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
#
# * Neither the name of Intel Corporation nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY LOG OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
'''
Common command line arguments for experiments.
'''
def add_output_dir(parser):
parser.add_argument('-o', '--output-dir', dest='output_dir',
action='store', default='.',
help='location for reports and other output files')
def add_nodes(parser):
parser.add_argument('--nodes', dest='nodes',
default=1, type=int,
help='number of nodes to use for launch')
def add_show_details(parser):
parser.add_argument('--show-details', dest='show_details',
action='store_true', default=False,
help='print additional data analysis details')
|
Add helper methods for commonly added commandline options for experiments
|
Add helper methods for commonly added commandline options for experiments
Signed-off-by: Diana Guttman <98d11df29868673c9a01b97a41154316626a31b0@intel.com>
|
Python
|
bsd-3-clause
|
geopm/geopm,cmcantalupo/geopm,cmcantalupo/geopm,cmcantalupo/geopm,geopm/geopm,geopm/geopm,cmcantalupo/geopm,geopm/geopm,geopm/geopm,cmcantalupo/geopm
|
Add helper methods for commonly added commandline options for experiments
Signed-off-by: Diana Guttman <98d11df29868673c9a01b97a41154316626a31b0@intel.com>
|
#
# Copyright (c) 2015, 2016, 2017, 2018, 2019, 2020, Intel Corporation
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
#
# * Neither the name of Intel Corporation nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY LOG OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
'''
Common command line arguments for experiments.
'''
def add_output_dir(parser):
parser.add_argument('-o', '--output-dir', dest='output_dir',
action='store', default='.',
help='location for reports and other output files')
def add_nodes(parser):
parser.add_argument('--nodes', dest='nodes',
default=1, type=int,
help='number of nodes to use for launch')
def add_show_details(parser):
parser.add_argument('--show-details', dest='show_details',
action='store_true', default=False,
help='print additional data analysis details')
|
<commit_before><commit_msg>Add helper methods for commonly added commandline options for experiments
Signed-off-by: Diana Guttman <98d11df29868673c9a01b97a41154316626a31b0@intel.com><commit_after>
|
#
# Copyright (c) 2015, 2016, 2017, 2018, 2019, 2020, Intel Corporation
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
#
# * Neither the name of Intel Corporation nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY LOG OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
'''
Common command line arguments for experiments.
'''
def add_output_dir(parser):
parser.add_argument('-o', '--output-dir', dest='output_dir',
action='store', default='.',
help='location for reports and other output files')
def add_nodes(parser):
parser.add_argument('--nodes', dest='nodes',
default=1, type=int,
help='number of nodes to use for launch')
def add_show_details(parser):
parser.add_argument('--show-details', dest='show_details',
action='store_true', default=False,
help='print additional data analysis details')
|
Add helper methods for commonly added commandline options for experiments
Signed-off-by: Diana Guttman <98d11df29868673c9a01b97a41154316626a31b0@intel.com>#
# Copyright (c) 2015, 2016, 2017, 2018, 2019, 2020, Intel Corporation
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
#
# * Neither the name of Intel Corporation nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY LOG OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
'''
Common command line arguments for experiments.
'''
def add_output_dir(parser):
parser.add_argument('-o', '--output-dir', dest='output_dir',
action='store', default='.',
help='location for reports and other output files')
def add_nodes(parser):
parser.add_argument('--nodes', dest='nodes',
default=1, type=int,
help='number of nodes to use for launch')
def add_show_details(parser):
parser.add_argument('--show-details', dest='show_details',
action='store_true', default=False,
help='print additional data analysis details')
|
<commit_before><commit_msg>Add helper methods for commonly added commandline options for experiments
Signed-off-by: Diana Guttman <98d11df29868673c9a01b97a41154316626a31b0@intel.com><commit_after>#
# Copyright (c) 2015, 2016, 2017, 2018, 2019, 2020, Intel Corporation
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
#
# * Neither the name of Intel Corporation nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY LOG OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
'''
Common command line arguments for experiments.
'''
def add_output_dir(parser):
parser.add_argument('-o', '--output-dir', dest='output_dir',
action='store', default='.',
help='location for reports and other output files')
def add_nodes(parser):
parser.add_argument('--nodes', dest='nodes',
default=1, type=int,
help='number of nodes to use for launch')
def add_show_details(parser):
parser.add_argument('--show-details', dest='show_details',
action='store_true', default=False,
help='print additional data analysis details')
|
|
ae580fb0dde2b6cce956733beb51dc4187183bab
|
flexget/plugins/cli/check_version.py
|
flexget/plugins/cli/check_version.py
|
from __future__ import unicode_literals, division, absolute_import
import re
from argparse import _VersionAction
import flexget
from flexget.utils import requests
from flexget.plugin import register_parser_option
class CheckVersion(_VersionAction):
def __call__(self, parser, namespace, values, option_string=None):
messages = []
try:
page = requests.get('http://download.flexget.com')
except requests.RequestException:
messages.append('Error getting latest version number from download.flexget.com')
else:
ver = re.search(r'FlexGet-([\d\.]*)\.tar\.gz', page.text).group(1)
if flexget.__version__ == ver:
messages.append('You are on the latest version. (%s)' % ver)
else:
messages.append('You are on: %s' % flexget.__version__)
messages.append('Latest release: %s' % ver)
parser.exit(message='\n'.join(messages))
register_parser_option('--check-version', action=CheckVersion, help='Check for latest version.')
|
Add --check-version to see if you are on latest release.
|
Add --check-version to see if you are on latest release.
|
Python
|
mit
|
dsemi/Flexget,tarzasai/Flexget,crawln45/Flexget,camon/Flexget,jawilson/Flexget,offbyone/Flexget,LynxyssCZ/Flexget,qvazzler/Flexget,ianstalk/Flexget,Danfocus/Flexget,grrr2/Flexget,cvium/Flexget,Flexget/Flexget,antivirtel/Flexget,ZefQ/Flexget,gazpachoking/Flexget,vfrc2/Flexget,qk4l/Flexget,tarzasai/Flexget,offbyone/Flexget,JorisDeRieck/Flexget,ratoaq2/Flexget,JorisDeRieck/Flexget,jacobmetrick/Flexget,voriux/Flexget,tarzasai/Flexget,tobinjt/Flexget,sean797/Flexget,poulpito/Flexget,ZefQ/Flexget,crawln45/Flexget,asm0dey/Flexget,Flexget/Flexget,tvcsantos/Flexget,thalamus/Flexget,grrr2/Flexget,malkavi/Flexget,patsissons/Flexget,oxc/Flexget,xfouloux/Flexget,ratoaq2/Flexget,Danfocus/Flexget,drwyrm/Flexget,JorisDeRieck/Flexget,jawilson/Flexget,tobinjt/Flexget,cvium/Flexget,v17al/Flexget,ibrahimkarahan/Flexget,cvium/Flexget,voriux/Flexget,drwyrm/Flexget,malkavi/Flexget,vfrc2/Flexget,tsnoam/Flexget,ibrahimkarahan/Flexget,spencerjanssen/Flexget,poulpito/Flexget,v17al/Flexget,sean797/Flexget,antivirtel/Flexget,poulpito/Flexget,OmgOhnoes/Flexget,grrr2/Flexget,oxc/Flexget,LynxyssCZ/Flexget,tobinjt/Flexget,ianstalk/Flexget,dsemi/Flexget,dsemi/Flexget,ibrahimkarahan/Flexget,X-dark/Flexget,Pretagonist/Flexget,tvcsantos/Flexget,Danfocus/Flexget,ianstalk/Flexget,jawilson/Flexget,sean797/Flexget,xfouloux/Flexget,offbyone/Flexget,LynxyssCZ/Flexget,jawilson/Flexget,ZefQ/Flexget,gazpachoking/Flexget,xfouloux/Flexget,Flexget/Flexget,oxc/Flexget,patsissons/Flexget,OmgOhnoes/Flexget,Pretagonist/Flexget,qk4l/Flexget,qk4l/Flexget,crawln45/Flexget,asm0dey/Flexget,vfrc2/Flexget,tobinjt/Flexget,camon/Flexget,spencerjanssen/Flexget,X-dark/Flexget,v17al/Flexget,ratoaq2/Flexget,spencerjanssen/Flexget,jacobmetrick/Flexget,qvazzler/Flexget,Danfocus/Flexget,OmgOhnoes/Flexget,malkavi/Flexget,antivirtel/Flexget,malkavi/Flexget,patsissons/Flexget,X-dark/Flexget,thalamus/Flexget,lildadou/Flexget,JorisDeRieck/Flexget,drwyrm/Flexget,crawln45/Flexget,Pretagonist/Flexget,tsnoam/Flexget,thalamus/Flexget,lildadou/Flexget,jacobmetrick/Flexget,qvazzler/Flexget,LynxyssCZ/Flexget,lildadou/Flexget,Flexget/Flexget,tsnoam/Flexget,asm0dey/Flexget
|
Add --check-version to see if you are on latest release.
|
from __future__ import unicode_literals, division, absolute_import
import re
from argparse import _VersionAction
import flexget
from flexget.utils import requests
from flexget.plugin import register_parser_option
class CheckVersion(_VersionAction):
def __call__(self, parser, namespace, values, option_string=None):
messages = []
try:
page = requests.get('http://download.flexget.com')
except requests.RequestException:
messages.append('Error getting latest version number from download.flexget.com')
else:
ver = re.search(r'FlexGet-([\d\.]*)\.tar\.gz', page.text).group(1)
if flexget.__version__ == ver:
messages.append('You are on the latest version. (%s)' % ver)
else:
messages.append('You are on: %s' % flexget.__version__)
messages.append('Latest release: %s' % ver)
parser.exit(message='\n'.join(messages))
register_parser_option('--check-version', action=CheckVersion, help='Check for latest version.')
|
<commit_before><commit_msg>Add --check-version to see if you are on latest release.<commit_after>
|
from __future__ import unicode_literals, division, absolute_import
import re
from argparse import _VersionAction
import flexget
from flexget.utils import requests
from flexget.plugin import register_parser_option
class CheckVersion(_VersionAction):
def __call__(self, parser, namespace, values, option_string=None):
messages = []
try:
page = requests.get('http://download.flexget.com')
except requests.RequestException:
messages.append('Error getting latest version number from download.flexget.com')
else:
ver = re.search(r'FlexGet-([\d\.]*)\.tar\.gz', page.text).group(1)
if flexget.__version__ == ver:
messages.append('You are on the latest version. (%s)' % ver)
else:
messages.append('You are on: %s' % flexget.__version__)
messages.append('Latest release: %s' % ver)
parser.exit(message='\n'.join(messages))
register_parser_option('--check-version', action=CheckVersion, help='Check for latest version.')
|
Add --check-version to see if you are on latest release.from __future__ import unicode_literals, division, absolute_import
import re
from argparse import _VersionAction
import flexget
from flexget.utils import requests
from flexget.plugin import register_parser_option
class CheckVersion(_VersionAction):
def __call__(self, parser, namespace, values, option_string=None):
messages = []
try:
page = requests.get('http://download.flexget.com')
except requests.RequestException:
messages.append('Error getting latest version number from download.flexget.com')
else:
ver = re.search(r'FlexGet-([\d\.]*)\.tar\.gz', page.text).group(1)
if flexget.__version__ == ver:
messages.append('You are on the latest version. (%s)' % ver)
else:
messages.append('You are on: %s' % flexget.__version__)
messages.append('Latest release: %s' % ver)
parser.exit(message='\n'.join(messages))
register_parser_option('--check-version', action=CheckVersion, help='Check for latest version.')
|
<commit_before><commit_msg>Add --check-version to see if you are on latest release.<commit_after>from __future__ import unicode_literals, division, absolute_import
import re
from argparse import _VersionAction
import flexget
from flexget.utils import requests
from flexget.plugin import register_parser_option
class CheckVersion(_VersionAction):
def __call__(self, parser, namespace, values, option_string=None):
messages = []
try:
page = requests.get('http://download.flexget.com')
except requests.RequestException:
messages.append('Error getting latest version number from download.flexget.com')
else:
ver = re.search(r'FlexGet-([\d\.]*)\.tar\.gz', page.text).group(1)
if flexget.__version__ == ver:
messages.append('You are on the latest version. (%s)' % ver)
else:
messages.append('You are on: %s' % flexget.__version__)
messages.append('Latest release: %s' % ver)
parser.exit(message='\n'.join(messages))
register_parser_option('--check-version', action=CheckVersion, help='Check for latest version.')
|
|
f0c2494aeec0040fab6276ba0ddbb0812d27e09a
|
scripts/plots.py
|
scripts/plots.py
|
"""
Plot user tweet activity.
"""
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
import twitterproj
def plot_counts(collection, ax=None):
if ax is None:
ax = plt.gca()
tweets, db, conn = twitterproj.connect()
counts = []
for doc in collection.find():
counts.append(doc['count'])
mean, std = np.mean(counts), np.std(counts)
bincounted = np.bincount(counts)
out = ax.plot(bincounted)
return out, mean, std
def tweet_counts(hashtags=True, ax=None):
tweets, db, conn = twitterproj.connect()
if hashtags:
coll = db.userHashtagTweetCount
else:
coll = db.userTweetCount
out, mean, std = plot_counts(coll, ax)
ax = out[0].axes
ax.set_xlabel('A user tweeted $x$ times')
ax.set_ylabel('Number of users tweeting $x$ times')
ax.set_yscale('log')
ax.set_xscale('log')
if hashtags:
mod = 'Hashtag'
ax.set_title('Hashtagged Tweets')
else:
mod = ''
ax.set_title('All Tweets')
ax.figure.savefig('user{0}TweetCounts.pdf'.format(mod))
def hashtag_counts(ax=None):
tweets, db, conn = twitterproj.connect()
coll = db.hashtags
out, mean, std = plot_counts(coll, ax)
ax = out[0].axes
ax.set_xlabel('A hashtag appears $x$ times')
ax.set_ylabel('Number of hashtags appearing $x$ times')
ax.set_yscale('log')
ax.set_xscale('log')
ax.figure.savefig('hashtagCounts.pdf')
if __name__ == '__main__':
f, ax = plt.subplots()
tweet_counts(hashtags=True, ax=ax)
f, ax = plt.subplots()
tweet_counts(hashtags=False, ax=ax)
f, ax = plt.subplots()
hashtag_counts(ax=ax)
|
Add simple scripts to plot frequencies.
|
Add simple scripts to plot frequencies.
|
Python
|
unlicense
|
chebee7i/twitter,chebee7i/twitter,chebee7i/twitter
|
Add simple scripts to plot frequencies.
|
"""
Plot user tweet activity.
"""
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
import twitterproj
def plot_counts(collection, ax=None):
if ax is None:
ax = plt.gca()
tweets, db, conn = twitterproj.connect()
counts = []
for doc in collection.find():
counts.append(doc['count'])
mean, std = np.mean(counts), np.std(counts)
bincounted = np.bincount(counts)
out = ax.plot(bincounted)
return out, mean, std
def tweet_counts(hashtags=True, ax=None):
tweets, db, conn = twitterproj.connect()
if hashtags:
coll = db.userHashtagTweetCount
else:
coll = db.userTweetCount
out, mean, std = plot_counts(coll, ax)
ax = out[0].axes
ax.set_xlabel('A user tweeted $x$ times')
ax.set_ylabel('Number of users tweeting $x$ times')
ax.set_yscale('log')
ax.set_xscale('log')
if hashtags:
mod = 'Hashtag'
ax.set_title('Hashtagged Tweets')
else:
mod = ''
ax.set_title('All Tweets')
ax.figure.savefig('user{0}TweetCounts.pdf'.format(mod))
def hashtag_counts(ax=None):
tweets, db, conn = twitterproj.connect()
coll = db.hashtags
out, mean, std = plot_counts(coll, ax)
ax = out[0].axes
ax.set_xlabel('A hashtag appears $x$ times')
ax.set_ylabel('Number of hashtags appearing $x$ times')
ax.set_yscale('log')
ax.set_xscale('log')
ax.figure.savefig('hashtagCounts.pdf')
if __name__ == '__main__':
f, ax = plt.subplots()
tweet_counts(hashtags=True, ax=ax)
f, ax = plt.subplots()
tweet_counts(hashtags=False, ax=ax)
f, ax = plt.subplots()
hashtag_counts(ax=ax)
|
<commit_before><commit_msg>Add simple scripts to plot frequencies.<commit_after>
|
"""
Plot user tweet activity.
"""
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
import twitterproj
def plot_counts(collection, ax=None):
if ax is None:
ax = plt.gca()
tweets, db, conn = twitterproj.connect()
counts = []
for doc in collection.find():
counts.append(doc['count'])
mean, std = np.mean(counts), np.std(counts)
bincounted = np.bincount(counts)
out = ax.plot(bincounted)
return out, mean, std
def tweet_counts(hashtags=True, ax=None):
tweets, db, conn = twitterproj.connect()
if hashtags:
coll = db.userHashtagTweetCount
else:
coll = db.userTweetCount
out, mean, std = plot_counts(coll, ax)
ax = out[0].axes
ax.set_xlabel('A user tweeted $x$ times')
ax.set_ylabel('Number of users tweeting $x$ times')
ax.set_yscale('log')
ax.set_xscale('log')
if hashtags:
mod = 'Hashtag'
ax.set_title('Hashtagged Tweets')
else:
mod = ''
ax.set_title('All Tweets')
ax.figure.savefig('user{0}TweetCounts.pdf'.format(mod))
def hashtag_counts(ax=None):
tweets, db, conn = twitterproj.connect()
coll = db.hashtags
out, mean, std = plot_counts(coll, ax)
ax = out[0].axes
ax.set_xlabel('A hashtag appears $x$ times')
ax.set_ylabel('Number of hashtags appearing $x$ times')
ax.set_yscale('log')
ax.set_xscale('log')
ax.figure.savefig('hashtagCounts.pdf')
if __name__ == '__main__':
f, ax = plt.subplots()
tweet_counts(hashtags=True, ax=ax)
f, ax = plt.subplots()
tweet_counts(hashtags=False, ax=ax)
f, ax = plt.subplots()
hashtag_counts(ax=ax)
|
Add simple scripts to plot frequencies."""
Plot user tweet activity.
"""
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
import twitterproj
def plot_counts(collection, ax=None):
if ax is None:
ax = plt.gca()
tweets, db, conn = twitterproj.connect()
counts = []
for doc in collection.find():
counts.append(doc['count'])
mean, std = np.mean(counts), np.std(counts)
bincounted = np.bincount(counts)
out = ax.plot(bincounted)
return out, mean, std
def tweet_counts(hashtags=True, ax=None):
tweets, db, conn = twitterproj.connect()
if hashtags:
coll = db.userHashtagTweetCount
else:
coll = db.userTweetCount
out, mean, std = plot_counts(coll, ax)
ax = out[0].axes
ax.set_xlabel('A user tweeted $x$ times')
ax.set_ylabel('Number of users tweeting $x$ times')
ax.set_yscale('log')
ax.set_xscale('log')
if hashtags:
mod = 'Hashtag'
ax.set_title('Hashtagged Tweets')
else:
mod = ''
ax.set_title('All Tweets')
ax.figure.savefig('user{0}TweetCounts.pdf'.format(mod))
def hashtag_counts(ax=None):
tweets, db, conn = twitterproj.connect()
coll = db.hashtags
out, mean, std = plot_counts(coll, ax)
ax = out[0].axes
ax.set_xlabel('A hashtag appears $x$ times')
ax.set_ylabel('Number of hashtags appearing $x$ times')
ax.set_yscale('log')
ax.set_xscale('log')
ax.figure.savefig('hashtagCounts.pdf')
if __name__ == '__main__':
f, ax = plt.subplots()
tweet_counts(hashtags=True, ax=ax)
f, ax = plt.subplots()
tweet_counts(hashtags=False, ax=ax)
f, ax = plt.subplots()
hashtag_counts(ax=ax)
|
<commit_before><commit_msg>Add simple scripts to plot frequencies.<commit_after>"""
Plot user tweet activity.
"""
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
import twitterproj
def plot_counts(collection, ax=None):
if ax is None:
ax = plt.gca()
tweets, db, conn = twitterproj.connect()
counts = []
for doc in collection.find():
counts.append(doc['count'])
mean, std = np.mean(counts), np.std(counts)
bincounted = np.bincount(counts)
out = ax.plot(bincounted)
return out, mean, std
def tweet_counts(hashtags=True, ax=None):
tweets, db, conn = twitterproj.connect()
if hashtags:
coll = db.userHashtagTweetCount
else:
coll = db.userTweetCount
out, mean, std = plot_counts(coll, ax)
ax = out[0].axes
ax.set_xlabel('A user tweeted $x$ times')
ax.set_ylabel('Number of users tweeting $x$ times')
ax.set_yscale('log')
ax.set_xscale('log')
if hashtags:
mod = 'Hashtag'
ax.set_title('Hashtagged Tweets')
else:
mod = ''
ax.set_title('All Tweets')
ax.figure.savefig('user{0}TweetCounts.pdf'.format(mod))
def hashtag_counts(ax=None):
tweets, db, conn = twitterproj.connect()
coll = db.hashtags
out, mean, std = plot_counts(coll, ax)
ax = out[0].axes
ax.set_xlabel('A hashtag appears $x$ times')
ax.set_ylabel('Number of hashtags appearing $x$ times')
ax.set_yscale('log')
ax.set_xscale('log')
ax.figure.savefig('hashtagCounts.pdf')
if __name__ == '__main__':
f, ax = plt.subplots()
tweet_counts(hashtags=True, ax=ax)
f, ax = plt.subplots()
tweet_counts(hashtags=False, ax=ax)
f, ax = plt.subplots()
hashtag_counts(ax=ax)
|
|
9b4053dde1cd1baef2a71adbeb7ba1338a2b5093
|
scripts/consistency/find_bad_registrations.py
|
scripts/consistency/find_bad_registrations.py
|
"""
"""
from website.app import init_app
from website import models
from framework import Q
app = init_app()
known_schemas = [
'Open-Ended_Registration',
'OSF-Standard_Pre-Data_Collection_Registration',
'Replication_Recipe_(Brandt_et_al__!dot!__,_2013):_Pre-Registration',
'Replication_Recipe_(Brandt_et_al__!dot!__,_2013):_Post-Completion',
]
def find_bad_registrations():
"""Find registrations with unexpected numbers of template keys or
outdated templates.
"""
registrations = models.Node.find(
Q('is_registration', 'eq', True)
)
for registration in registrations:
meta = registration.registered_meta or {}
keys = meta.keys()
if len(keys) != 1:
print 'Inconsistency: Number of keys on project {} ({}) != 1'.format(
registration.title,
registration._primary_key,
)
continue
if keys[0] not in known_schemas:
print 'Inconsistency: Registration schema {} on project {} ({}) not in known schemas'.format(
keys[0],
registration.title,
registration._primary_key,
)
if __name__ == '__main__':
find_bad_registrations()
|
Add script to find bad / outdated registrations
|
Add script to find bad / outdated registrations
|
Python
|
apache-2.0
|
lamdnhan/osf.io,KAsante95/osf.io,jolene-esposito/osf.io,brianjgeiger/osf.io,SSJohns/osf.io,cwisecarver/osf.io,revanthkolli/osf.io,fabianvf/osf.io,GaryKriebel/osf.io,danielneis/osf.io,RomanZWang/osf.io,CenterForOpenScience/osf.io,petermalcolm/osf.io,HalcyonChimera/osf.io,kch8qx/osf.io,GageGaskins/osf.io,hmoco/osf.io,ticklemepierce/osf.io,doublebits/osf.io,jinluyuan/osf.io,ticklemepierce/osf.io,arpitar/osf.io,samchrisinger/osf.io,doublebits/osf.io,MerlinZhang/osf.io,TomBaxter/osf.io,ckc6cz/osf.io,laurenrevere/osf.io,crcresearch/osf.io,reinaH/osf.io,samchrisinger/osf.io,cslzchen/osf.io,ticklemepierce/osf.io,danielneis/osf.io,adlius/osf.io,danielneis/osf.io,Nesiehr/osf.io,zkraime/osf.io,dplorimer/osf,GageGaskins/osf.io,mattclark/osf.io,TomHeatwole/osf.io,abought/osf.io,mluke93/osf.io,brandonPurvis/osf.io,billyhunt/osf.io,jnayak1/osf.io,samchrisinger/osf.io,acshi/osf.io,asanfilippo7/osf.io,mattclark/osf.io,Nesiehr/osf.io,fabianvf/osf.io,zkraime/osf.io,dplorimer/osf,GaryKriebel/osf.io,dplorimer/osf,jeffreyliu3230/osf.io,acshi/osf.io,zachjanicki/osf.io,himanshuo/osf.io,RomanZWang/osf.io,kwierman/osf.io,icereval/osf.io,hmoco/osf.io,mfraezz/osf.io,RomanZWang/osf.io,haoyuchen1992/osf.io,crcresearch/osf.io,zachjanicki/osf.io,cldershem/osf.io,baylee-d/osf.io,jolene-esposito/osf.io,asanfilippo7/osf.io,brandonPurvis/osf.io,jinluyuan/osf.io,jinluyuan/osf.io,saradbowman/osf.io,Ghalko/osf.io,kch8qx/osf.io,DanielSBrown/osf.io,aaxelb/osf.io,barbour-em/osf.io,jmcarp/osf.io,CenterForOpenScience/osf.io,lyndsysimon/osf.io,cosenal/osf.io,zkraime/osf.io,lamdnhan/osf.io,caseyrygt/osf.io,baylee-d/osf.io,caseyrygt/osf.io,amyshi188/osf.io,rdhyee/osf.io,kwierman/osf.io,felliott/osf.io,petermalcolm/osf.io,adlius/osf.io,rdhyee/osf.io,erinspace/osf.io,cldershem/osf.io,mluo613/osf.io,Ghalko/osf.io,SSJohns/osf.io,zamattiac/osf.io,sbt9uc/osf.io,jnayak1/osf.io,caseyrollins/osf.io,revanthkolli/osf.io,GaryKriebel/osf.io,fabianvf/osf.io,caneruguz/osf.io,brandonPurvis/osf.io,kch8qx/osf.io,sloria/osf.io,TomBaxter/osf.io,acshi/osf.io,acshi/osf.io,bdyetton/prettychart,samanehsan/osf.io,GageGaskins/osf.io,monikagrabowska/osf.io,chrisseto/osf.io,zamattiac/osf.io,Ghalko/osf.io,leb2dg/osf.io,bdyetton/prettychart,DanielSBrown/osf.io,chennan47/osf.io,barbour-em/osf.io,petermalcolm/osf.io,GaryKriebel/osf.io,jnayak1/osf.io,TomHeatwole/osf.io,asanfilippo7/osf.io,zamattiac/osf.io,mattclark/osf.io,acshi/osf.io,GageGaskins/osf.io,lamdnhan/osf.io,njantrania/osf.io,zamattiac/osf.io,saradbowman/osf.io,HarryRybacki/osf.io,doublebits/osf.io,cldershem/osf.io,sbt9uc/osf.io,jeffreyliu3230/osf.io,TomHeatwole/osf.io,mluo613/osf.io,adlius/osf.io,alexschiller/osf.io,mluke93/osf.io,arpitar/osf.io,AndrewSallans/osf.io,AndrewSallans/osf.io,mfraezz/osf.io,binoculars/osf.io,chrisseto/osf.io,pattisdr/osf.io,Johnetordoff/osf.io,kushG/osf.io,aaxelb/osf.io,caseyrollins/osf.io,wearpants/osf.io,kch8qx/osf.io,billyhunt/osf.io,GageGaskins/osf.io,alexschiller/osf.io,ZobairAlijan/osf.io,mluo613/osf.io,doublebits/osf.io,aaxelb/osf.io,jolene-esposito/osf.io,sloria/osf.io,abought/osf.io,HarryRybacki/osf.io,icereval/osf.io,haoyuchen1992/osf.io,reinaH/osf.io,haoyuchen1992/osf.io,Nesiehr/osf.io,barbour-em/osf.io,TomBaxter/osf.io,cwisecarver/osf.io,TomHeatwole/osf.io,leb2dg/osf.io,jinluyuan/osf.io,bdyetton/prettychart,HalcyonChimera/osf.io,KAsante95/osf.io,abought/osf.io,wearpants/osf.io,cosenal/osf.io,sbt9uc/osf.io,njantrania/osf.io,Nesiehr/osf.io,arpitar/osf.io,chennan47/osf.io,lyndsysimon/osf.io,RomanZWang/osf.io,mluo613/osf.io,cslzchen/osf.io,chennan47/osf.io,cwisecarver/osf.io,mluke93/osf.io,hmoco/osf.io,brandonPurvis/osf.io,kushG/osf.io,jnayak1/osf.io,zachjanicki/osf.io,jeffreyliu3230/osf.io,danielneis/osf.io,doublebits/osf.io,alexschiller/osf.io,himanshuo/osf.io,caneruguz/osf.io,mluke93/osf.io,HalcyonChimera/osf.io,njantrania/osf.io,barbour-em/osf.io,erinspace/osf.io,emetsger/osf.io,samchrisinger/osf.io,MerlinZhang/osf.io,KAsante95/osf.io,laurenrevere/osf.io,monikagrabowska/osf.io,jmcarp/osf.io,binoculars/osf.io,zkraime/osf.io,reinaH/osf.io,samanehsan/osf.io,dplorimer/osf,CenterForOpenScience/osf.io,leb2dg/osf.io,bdyetton/prettychart,jeffreyliu3230/osf.io,ZobairAlijan/osf.io,alexschiller/osf.io,monikagrabowska/osf.io,mluo613/osf.io,MerlinZhang/osf.io,KAsante95/osf.io,emetsger/osf.io,cslzchen/osf.io,mfraezz/osf.io,abought/osf.io,emetsger/osf.io,felliott/osf.io,brianjgeiger/osf.io,cwisecarver/osf.io,aaxelb/osf.io,KAsante95/osf.io,hmoco/osf.io,wearpants/osf.io,lyndsysimon/osf.io,arpitar/osf.io,CenterForOpenScience/osf.io,revanthkolli/osf.io,ZobairAlijan/osf.io,wearpants/osf.io,caseyrygt/osf.io,pattisdr/osf.io,kushG/osf.io,felliott/osf.io,binoculars/osf.io,jolene-esposito/osf.io,njantrania/osf.io,emetsger/osf.io,ckc6cz/osf.io,baylee-d/osf.io,monikagrabowska/osf.io,cosenal/osf.io,chrisseto/osf.io,brandonPurvis/osf.io,reinaH/osf.io,kch8qx/osf.io,icereval/osf.io,haoyuchen1992/osf.io,billyhunt/osf.io,sloria/osf.io,ckc6cz/osf.io,lyndsysimon/osf.io,caseyrygt/osf.io,cslzchen/osf.io,DanielSBrown/osf.io,laurenrevere/osf.io,himanshuo/osf.io,Johnetordoff/osf.io,fabianvf/osf.io,HarryRybacki/osf.io,Ghalko/osf.io,caneruguz/osf.io,amyshi188/osf.io,adlius/osf.io,cosenal/osf.io,leb2dg/osf.io,ckc6cz/osf.io,ticklemepierce/osf.io,erinspace/osf.io,kwierman/osf.io,samanehsan/osf.io,alexschiller/osf.io,monikagrabowska/osf.io,DanielSBrown/osf.io,pattisdr/osf.io,MerlinZhang/osf.io,billyhunt/osf.io,SSJohns/osf.io,Johnetordoff/osf.io,sbt9uc/osf.io,jmcarp/osf.io,jmcarp/osf.io,amyshi188/osf.io,amyshi188/osf.io,HalcyonChimera/osf.io,caneruguz/osf.io,Johnetordoff/osf.io,crcresearch/osf.io,petermalcolm/osf.io,chrisseto/osf.io,RomanZWang/osf.io,zachjanicki/osf.io,kushG/osf.io,brianjgeiger/osf.io,rdhyee/osf.io,himanshuo/osf.io,SSJohns/osf.io,caseyrollins/osf.io,samanehsan/osf.io,ZobairAlijan/osf.io,lamdnhan/osf.io,kwierman/osf.io,asanfilippo7/osf.io,mfraezz/osf.io,revanthkolli/osf.io,billyhunt/osf.io,rdhyee/osf.io,cldershem/osf.io,felliott/osf.io,HarryRybacki/osf.io,brianjgeiger/osf.io
|
Add script to find bad / outdated registrations
|
"""
"""
from website.app import init_app
from website import models
from framework import Q
app = init_app()
known_schemas = [
'Open-Ended_Registration',
'OSF-Standard_Pre-Data_Collection_Registration',
'Replication_Recipe_(Brandt_et_al__!dot!__,_2013):_Pre-Registration',
'Replication_Recipe_(Brandt_et_al__!dot!__,_2013):_Post-Completion',
]
def find_bad_registrations():
"""Find registrations with unexpected numbers of template keys or
outdated templates.
"""
registrations = models.Node.find(
Q('is_registration', 'eq', True)
)
for registration in registrations:
meta = registration.registered_meta or {}
keys = meta.keys()
if len(keys) != 1:
print 'Inconsistency: Number of keys on project {} ({}) != 1'.format(
registration.title,
registration._primary_key,
)
continue
if keys[0] not in known_schemas:
print 'Inconsistency: Registration schema {} on project {} ({}) not in known schemas'.format(
keys[0],
registration.title,
registration._primary_key,
)
if __name__ == '__main__':
find_bad_registrations()
|
<commit_before><commit_msg>Add script to find bad / outdated registrations<commit_after>
|
"""
"""
from website.app import init_app
from website import models
from framework import Q
app = init_app()
known_schemas = [
'Open-Ended_Registration',
'OSF-Standard_Pre-Data_Collection_Registration',
'Replication_Recipe_(Brandt_et_al__!dot!__,_2013):_Pre-Registration',
'Replication_Recipe_(Brandt_et_al__!dot!__,_2013):_Post-Completion',
]
def find_bad_registrations():
"""Find registrations with unexpected numbers of template keys or
outdated templates.
"""
registrations = models.Node.find(
Q('is_registration', 'eq', True)
)
for registration in registrations:
meta = registration.registered_meta or {}
keys = meta.keys()
if len(keys) != 1:
print 'Inconsistency: Number of keys on project {} ({}) != 1'.format(
registration.title,
registration._primary_key,
)
continue
if keys[0] not in known_schemas:
print 'Inconsistency: Registration schema {} on project {} ({}) not in known schemas'.format(
keys[0],
registration.title,
registration._primary_key,
)
if __name__ == '__main__':
find_bad_registrations()
|
Add script to find bad / outdated registrations"""
"""
from website.app import init_app
from website import models
from framework import Q
app = init_app()
known_schemas = [
'Open-Ended_Registration',
'OSF-Standard_Pre-Data_Collection_Registration',
'Replication_Recipe_(Brandt_et_al__!dot!__,_2013):_Pre-Registration',
'Replication_Recipe_(Brandt_et_al__!dot!__,_2013):_Post-Completion',
]
def find_bad_registrations():
"""Find registrations with unexpected numbers of template keys or
outdated templates.
"""
registrations = models.Node.find(
Q('is_registration', 'eq', True)
)
for registration in registrations:
meta = registration.registered_meta or {}
keys = meta.keys()
if len(keys) != 1:
print 'Inconsistency: Number of keys on project {} ({}) != 1'.format(
registration.title,
registration._primary_key,
)
continue
if keys[0] not in known_schemas:
print 'Inconsistency: Registration schema {} on project {} ({}) not in known schemas'.format(
keys[0],
registration.title,
registration._primary_key,
)
if __name__ == '__main__':
find_bad_registrations()
|
<commit_before><commit_msg>Add script to find bad / outdated registrations<commit_after>"""
"""
from website.app import init_app
from website import models
from framework import Q
app = init_app()
known_schemas = [
'Open-Ended_Registration',
'OSF-Standard_Pre-Data_Collection_Registration',
'Replication_Recipe_(Brandt_et_al__!dot!__,_2013):_Pre-Registration',
'Replication_Recipe_(Brandt_et_al__!dot!__,_2013):_Post-Completion',
]
def find_bad_registrations():
"""Find registrations with unexpected numbers of template keys or
outdated templates.
"""
registrations = models.Node.find(
Q('is_registration', 'eq', True)
)
for registration in registrations:
meta = registration.registered_meta or {}
keys = meta.keys()
if len(keys) != 1:
print 'Inconsistency: Number of keys on project {} ({}) != 1'.format(
registration.title,
registration._primary_key,
)
continue
if keys[0] not in known_schemas:
print 'Inconsistency: Registration schema {} on project {} ({}) not in known schemas'.format(
keys[0],
registration.title,
registration._primary_key,
)
if __name__ == '__main__':
find_bad_registrations()
|
|
a93ad6ce9a264a82717a37230e48ff00d9c642fc
|
pywikibot/families/wikidata_family.py
|
pywikibot/families/wikidata_family.py
|
# -*- coding: utf-8 -*-
__version__ = '$Id$'
from pywikibot import family
# The wikidata family
class Family(family.WikimediaFamily):
def __init__(self):
super(Family, self).__init__()
self.name = 'wikidata'
self.langs = {
'wikidata': 'www.wikidata.org',
'repo': 'wikidata-test-repo.wikimedia.de',
'client': 'wikidata-test-client.wikimedia.de',
'test': 'test.wikidata.org',
}
def scriptpath(self, code):
if code == 'client':
return ''
return super(Family, self).scriptpath(code)
def shared_data_repository(self, code, transcluded=False):
"""Always return a repository tupe. This enables testing whether
the site opject is the repository itself, see Site.is_data_repository()
"""
if transcluded:
return (None, None)
else:
if code == 'wikidata':
return ('wikidata', 'wikidata')
elif code == 'test':
return ('test', 'wikidata')
else:
return ('repo', 'wikidata')
def globes(self, code):
"""Supported globes for Coordinate datatype"""
return {'earth': 'http://www.wikidata.org/entity/Q2'}
|
# -*- coding: utf-8 -*-
__version__ = '$Id$'
from pywikibot import family
# The wikidata family
class Family(family.WikimediaFamily):
def __init__(self):
super(Family, self).__init__()
self.name = 'wikidata'
self.langs = {
'wikidata': 'www.wikidata.org',
'repo': 'wikidata-test-repo.wikimedia.de',
'client': 'wikidata-test-client.wikimedia.de',
'test': 'test.wikidata.org',
}
def scriptpath(self, code):
if code == 'client':
return ''
return super(Family, self).scriptpath(code)
def shared_data_repository(self, code, transcluded=False):
"""Always return a repository tupe. This enables testing whether
the site opject is the repository itself, see Site.is_data_repository()
"""
if transcluded:
return (None, None)
else:
if code == 'wikidata':
return ('wikidata', 'wikidata')
elif code == 'test':
return ('test', 'wikidata')
else:
return ('repo', 'wikidata')
def globes(self, code):
"""Supported globes for Coordinate datatype"""
return {'earth': 'http://www.wikidata.org/entity/Q2',
'moon': 'http://www.wikidata.org/entity/Q405'}
|
Add moon (Q405) to the list of globes
|
Add moon (Q405) to the list of globes
Change-Id: I2dd9f87fcb1d748bff94328575f8439dc36035e3
|
Python
|
mit
|
magul/pywikibot-core,wikimedia/pywikibot-core,happy5214/pywikibot-core,jayvdb/pywikibot-core,Darkdadaah/pywikibot-core,trishnaguha/pywikibot-core,VcamX/pywikibot-core,hasteur/g13bot_tools_new,h4ck3rm1k3/pywikibot-core,hasteur/g13bot_tools_new,wikimedia/pywikibot-core,hasteur/g13bot_tools_new,PersianWikipedia/pywikibot-core,smalyshev/pywikibot-core,darthbhyrava/pywikibot-local,magul/pywikibot-core,TridevGuha/pywikibot-core,jayvdb/pywikibot-core,happy5214/pywikibot-core,h4ck3rm1k3/pywikibot-core,npdoty/pywikibot,valhallasw/pywikibot-core,icyflame/batman,Darkdadaah/pywikibot-core,emijrp/pywikibot-core,xZise/pywikibot-core,npdoty/pywikibot
|
# -*- coding: utf-8 -*-
__version__ = '$Id$'
from pywikibot import family
# The wikidata family
class Family(family.WikimediaFamily):
def __init__(self):
super(Family, self).__init__()
self.name = 'wikidata'
self.langs = {
'wikidata': 'www.wikidata.org',
'repo': 'wikidata-test-repo.wikimedia.de',
'client': 'wikidata-test-client.wikimedia.de',
'test': 'test.wikidata.org',
}
def scriptpath(self, code):
if code == 'client':
return ''
return super(Family, self).scriptpath(code)
def shared_data_repository(self, code, transcluded=False):
"""Always return a repository tupe. This enables testing whether
the site opject is the repository itself, see Site.is_data_repository()
"""
if transcluded:
return (None, None)
else:
if code == 'wikidata':
return ('wikidata', 'wikidata')
elif code == 'test':
return ('test', 'wikidata')
else:
return ('repo', 'wikidata')
def globes(self, code):
"""Supported globes for Coordinate datatype"""
return {'earth': 'http://www.wikidata.org/entity/Q2'}
Add moon (Q405) to the list of globes
Change-Id: I2dd9f87fcb1d748bff94328575f8439dc36035e3
|
# -*- coding: utf-8 -*-
__version__ = '$Id$'
from pywikibot import family
# The wikidata family
class Family(family.WikimediaFamily):
def __init__(self):
super(Family, self).__init__()
self.name = 'wikidata'
self.langs = {
'wikidata': 'www.wikidata.org',
'repo': 'wikidata-test-repo.wikimedia.de',
'client': 'wikidata-test-client.wikimedia.de',
'test': 'test.wikidata.org',
}
def scriptpath(self, code):
if code == 'client':
return ''
return super(Family, self).scriptpath(code)
def shared_data_repository(self, code, transcluded=False):
"""Always return a repository tupe. This enables testing whether
the site opject is the repository itself, see Site.is_data_repository()
"""
if transcluded:
return (None, None)
else:
if code == 'wikidata':
return ('wikidata', 'wikidata')
elif code == 'test':
return ('test', 'wikidata')
else:
return ('repo', 'wikidata')
def globes(self, code):
"""Supported globes for Coordinate datatype"""
return {'earth': 'http://www.wikidata.org/entity/Q2',
'moon': 'http://www.wikidata.org/entity/Q405'}
|
<commit_before># -*- coding: utf-8 -*-
__version__ = '$Id$'
from pywikibot import family
# The wikidata family
class Family(family.WikimediaFamily):
def __init__(self):
super(Family, self).__init__()
self.name = 'wikidata'
self.langs = {
'wikidata': 'www.wikidata.org',
'repo': 'wikidata-test-repo.wikimedia.de',
'client': 'wikidata-test-client.wikimedia.de',
'test': 'test.wikidata.org',
}
def scriptpath(self, code):
if code == 'client':
return ''
return super(Family, self).scriptpath(code)
def shared_data_repository(self, code, transcluded=False):
"""Always return a repository tupe. This enables testing whether
the site opject is the repository itself, see Site.is_data_repository()
"""
if transcluded:
return (None, None)
else:
if code == 'wikidata':
return ('wikidata', 'wikidata')
elif code == 'test':
return ('test', 'wikidata')
else:
return ('repo', 'wikidata')
def globes(self, code):
"""Supported globes for Coordinate datatype"""
return {'earth': 'http://www.wikidata.org/entity/Q2'}
<commit_msg>Add moon (Q405) to the list of globes
Change-Id: I2dd9f87fcb1d748bff94328575f8439dc36035e3<commit_after>
|
# -*- coding: utf-8 -*-
__version__ = '$Id$'
from pywikibot import family
# The wikidata family
class Family(family.WikimediaFamily):
def __init__(self):
super(Family, self).__init__()
self.name = 'wikidata'
self.langs = {
'wikidata': 'www.wikidata.org',
'repo': 'wikidata-test-repo.wikimedia.de',
'client': 'wikidata-test-client.wikimedia.de',
'test': 'test.wikidata.org',
}
def scriptpath(self, code):
if code == 'client':
return ''
return super(Family, self).scriptpath(code)
def shared_data_repository(self, code, transcluded=False):
"""Always return a repository tupe. This enables testing whether
the site opject is the repository itself, see Site.is_data_repository()
"""
if transcluded:
return (None, None)
else:
if code == 'wikidata':
return ('wikidata', 'wikidata')
elif code == 'test':
return ('test', 'wikidata')
else:
return ('repo', 'wikidata')
def globes(self, code):
"""Supported globes for Coordinate datatype"""
return {'earth': 'http://www.wikidata.org/entity/Q2',
'moon': 'http://www.wikidata.org/entity/Q405'}
|
# -*- coding: utf-8 -*-
__version__ = '$Id$'
from pywikibot import family
# The wikidata family
class Family(family.WikimediaFamily):
def __init__(self):
super(Family, self).__init__()
self.name = 'wikidata'
self.langs = {
'wikidata': 'www.wikidata.org',
'repo': 'wikidata-test-repo.wikimedia.de',
'client': 'wikidata-test-client.wikimedia.de',
'test': 'test.wikidata.org',
}
def scriptpath(self, code):
if code == 'client':
return ''
return super(Family, self).scriptpath(code)
def shared_data_repository(self, code, transcluded=False):
"""Always return a repository tupe. This enables testing whether
the site opject is the repository itself, see Site.is_data_repository()
"""
if transcluded:
return (None, None)
else:
if code == 'wikidata':
return ('wikidata', 'wikidata')
elif code == 'test':
return ('test', 'wikidata')
else:
return ('repo', 'wikidata')
def globes(self, code):
"""Supported globes for Coordinate datatype"""
return {'earth': 'http://www.wikidata.org/entity/Q2'}
Add moon (Q405) to the list of globes
Change-Id: I2dd9f87fcb1d748bff94328575f8439dc36035e3# -*- coding: utf-8 -*-
__version__ = '$Id$'
from pywikibot import family
# The wikidata family
class Family(family.WikimediaFamily):
def __init__(self):
super(Family, self).__init__()
self.name = 'wikidata'
self.langs = {
'wikidata': 'www.wikidata.org',
'repo': 'wikidata-test-repo.wikimedia.de',
'client': 'wikidata-test-client.wikimedia.de',
'test': 'test.wikidata.org',
}
def scriptpath(self, code):
if code == 'client':
return ''
return super(Family, self).scriptpath(code)
def shared_data_repository(self, code, transcluded=False):
"""Always return a repository tupe. This enables testing whether
the site opject is the repository itself, see Site.is_data_repository()
"""
if transcluded:
return (None, None)
else:
if code == 'wikidata':
return ('wikidata', 'wikidata')
elif code == 'test':
return ('test', 'wikidata')
else:
return ('repo', 'wikidata')
def globes(self, code):
"""Supported globes for Coordinate datatype"""
return {'earth': 'http://www.wikidata.org/entity/Q2',
'moon': 'http://www.wikidata.org/entity/Q405'}
|
<commit_before># -*- coding: utf-8 -*-
__version__ = '$Id$'
from pywikibot import family
# The wikidata family
class Family(family.WikimediaFamily):
def __init__(self):
super(Family, self).__init__()
self.name = 'wikidata'
self.langs = {
'wikidata': 'www.wikidata.org',
'repo': 'wikidata-test-repo.wikimedia.de',
'client': 'wikidata-test-client.wikimedia.de',
'test': 'test.wikidata.org',
}
def scriptpath(self, code):
if code == 'client':
return ''
return super(Family, self).scriptpath(code)
def shared_data_repository(self, code, transcluded=False):
"""Always return a repository tupe. This enables testing whether
the site opject is the repository itself, see Site.is_data_repository()
"""
if transcluded:
return (None, None)
else:
if code == 'wikidata':
return ('wikidata', 'wikidata')
elif code == 'test':
return ('test', 'wikidata')
else:
return ('repo', 'wikidata')
def globes(self, code):
"""Supported globes for Coordinate datatype"""
return {'earth': 'http://www.wikidata.org/entity/Q2'}
<commit_msg>Add moon (Q405) to the list of globes
Change-Id: I2dd9f87fcb1d748bff94328575f8439dc36035e3<commit_after># -*- coding: utf-8 -*-
__version__ = '$Id$'
from pywikibot import family
# The wikidata family
class Family(family.WikimediaFamily):
def __init__(self):
super(Family, self).__init__()
self.name = 'wikidata'
self.langs = {
'wikidata': 'www.wikidata.org',
'repo': 'wikidata-test-repo.wikimedia.de',
'client': 'wikidata-test-client.wikimedia.de',
'test': 'test.wikidata.org',
}
def scriptpath(self, code):
if code == 'client':
return ''
return super(Family, self).scriptpath(code)
def shared_data_repository(self, code, transcluded=False):
"""Always return a repository tupe. This enables testing whether
the site opject is the repository itself, see Site.is_data_repository()
"""
if transcluded:
return (None, None)
else:
if code == 'wikidata':
return ('wikidata', 'wikidata')
elif code == 'test':
return ('test', 'wikidata')
else:
return ('repo', 'wikidata')
def globes(self, code):
"""Supported globes for Coordinate datatype"""
return {'earth': 'http://www.wikidata.org/entity/Q2',
'moon': 'http://www.wikidata.org/entity/Q405'}
|
4ed6e80128c9ea51e73be0eb4a3f840f8bb13787
|
scrapers/jyvaskyla.py
|
scrapers/jyvaskyla.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import requests
import requests_cache
from lxml import html
from utils import ScrapeError, submit_council_members
from finland import PARTIES
def scrape_council_group(url):
# E.g. http://www.jyvaskyla.fi/hallinto/valtuusto/valtuusto09/sdp
party = url.split('/')[-1]
for p in PARTIES:
if p.lower().startswith(party):
party = p
break
else:
raise ScrapeError("Unknown party: %s" % party)
print url
r = requests.get(url)
doc = html.fromstring(r.text)
el_list = doc.xpath('//a[@name]')
members = []
for idx, el in enumerate(el_list):
# The first council member is sometimes encoded differently...
if idx == 0 and el.getnext() != None:
name = el.getnext().text_content()
else:
name = el.tail
name = name.strip()
members.append((name, party))
return members
requests_cache.configure('jyvaskyla')
members = []
BASE_URL = 'http://www.jyvaskyla.fi/hallinto/valtuusto/valtuusto09'
r = requests.get(BASE_URL)
doc = html.fromstring(r.text)
# We will be fetching linked pages, so relative paths must be
# convert into absolute URLs.
doc.make_links_absolute(BASE_URL)
# Find the p element that contains the text "Valtuustoryhmät"
el = doc.xpath(u"//h2[contains(., 'Valtuustoryhmät')]")[0]
# The links to the council groups follow
party_links = el.xpath("following-sibling::p/a")
for link_el in party_links:
url = link_el.attrib['href']
ret = scrape_council_group(url)
members += ret
# The city has exactly 75 council members
assert len(members) == 75
submit_council_members("Jyväskylä", members)
|
Add scraper for Jyväskylä city council members
|
Add scraper for Jyväskylä city council members
|
Python
|
agpl-3.0
|
kansanmuisti/datavaalit,kansanmuisti/datavaalit
|
Add scraper for Jyväskylä city council members
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import requests
import requests_cache
from lxml import html
from utils import ScrapeError, submit_council_members
from finland import PARTIES
def scrape_council_group(url):
# E.g. http://www.jyvaskyla.fi/hallinto/valtuusto/valtuusto09/sdp
party = url.split('/')[-1]
for p in PARTIES:
if p.lower().startswith(party):
party = p
break
else:
raise ScrapeError("Unknown party: %s" % party)
print url
r = requests.get(url)
doc = html.fromstring(r.text)
el_list = doc.xpath('//a[@name]')
members = []
for idx, el in enumerate(el_list):
# The first council member is sometimes encoded differently...
if idx == 0 and el.getnext() != None:
name = el.getnext().text_content()
else:
name = el.tail
name = name.strip()
members.append((name, party))
return members
requests_cache.configure('jyvaskyla')
members = []
BASE_URL = 'http://www.jyvaskyla.fi/hallinto/valtuusto/valtuusto09'
r = requests.get(BASE_URL)
doc = html.fromstring(r.text)
# We will be fetching linked pages, so relative paths must be
# convert into absolute URLs.
doc.make_links_absolute(BASE_URL)
# Find the p element that contains the text "Valtuustoryhmät"
el = doc.xpath(u"//h2[contains(., 'Valtuustoryhmät')]")[0]
# The links to the council groups follow
party_links = el.xpath("following-sibling::p/a")
for link_el in party_links:
url = link_el.attrib['href']
ret = scrape_council_group(url)
members += ret
# The city has exactly 75 council members
assert len(members) == 75
submit_council_members("Jyväskylä", members)
|
<commit_before><commit_msg>Add scraper for Jyväskylä city council members<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import requests
import requests_cache
from lxml import html
from utils import ScrapeError, submit_council_members
from finland import PARTIES
def scrape_council_group(url):
# E.g. http://www.jyvaskyla.fi/hallinto/valtuusto/valtuusto09/sdp
party = url.split('/')[-1]
for p in PARTIES:
if p.lower().startswith(party):
party = p
break
else:
raise ScrapeError("Unknown party: %s" % party)
print url
r = requests.get(url)
doc = html.fromstring(r.text)
el_list = doc.xpath('//a[@name]')
members = []
for idx, el in enumerate(el_list):
# The first council member is sometimes encoded differently...
if idx == 0 and el.getnext() != None:
name = el.getnext().text_content()
else:
name = el.tail
name = name.strip()
members.append((name, party))
return members
requests_cache.configure('jyvaskyla')
members = []
BASE_URL = 'http://www.jyvaskyla.fi/hallinto/valtuusto/valtuusto09'
r = requests.get(BASE_URL)
doc = html.fromstring(r.text)
# We will be fetching linked pages, so relative paths must be
# convert into absolute URLs.
doc.make_links_absolute(BASE_URL)
# Find the p element that contains the text "Valtuustoryhmät"
el = doc.xpath(u"//h2[contains(., 'Valtuustoryhmät')]")[0]
# The links to the council groups follow
party_links = el.xpath("following-sibling::p/a")
for link_el in party_links:
url = link_el.attrib['href']
ret = scrape_council_group(url)
members += ret
# The city has exactly 75 council members
assert len(members) == 75
submit_council_members("Jyväskylä", members)
|
Add scraper for Jyväskylä city council members#!/usr/bin/env python
# -*- coding: utf-8 -*-
import requests
import requests_cache
from lxml import html
from utils import ScrapeError, submit_council_members
from finland import PARTIES
def scrape_council_group(url):
# E.g. http://www.jyvaskyla.fi/hallinto/valtuusto/valtuusto09/sdp
party = url.split('/')[-1]
for p in PARTIES:
if p.lower().startswith(party):
party = p
break
else:
raise ScrapeError("Unknown party: %s" % party)
print url
r = requests.get(url)
doc = html.fromstring(r.text)
el_list = doc.xpath('//a[@name]')
members = []
for idx, el in enumerate(el_list):
# The first council member is sometimes encoded differently...
if idx == 0 and el.getnext() != None:
name = el.getnext().text_content()
else:
name = el.tail
name = name.strip()
members.append((name, party))
return members
requests_cache.configure('jyvaskyla')
members = []
BASE_URL = 'http://www.jyvaskyla.fi/hallinto/valtuusto/valtuusto09'
r = requests.get(BASE_URL)
doc = html.fromstring(r.text)
# We will be fetching linked pages, so relative paths must be
# convert into absolute URLs.
doc.make_links_absolute(BASE_URL)
# Find the p element that contains the text "Valtuustoryhmät"
el = doc.xpath(u"//h2[contains(., 'Valtuustoryhmät')]")[0]
# The links to the council groups follow
party_links = el.xpath("following-sibling::p/a")
for link_el in party_links:
url = link_el.attrib['href']
ret = scrape_council_group(url)
members += ret
# The city has exactly 75 council members
assert len(members) == 75
submit_council_members("Jyväskylä", members)
|
<commit_before><commit_msg>Add scraper for Jyväskylä city council members<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import requests
import requests_cache
from lxml import html
from utils import ScrapeError, submit_council_members
from finland import PARTIES
def scrape_council_group(url):
# E.g. http://www.jyvaskyla.fi/hallinto/valtuusto/valtuusto09/sdp
party = url.split('/')[-1]
for p in PARTIES:
if p.lower().startswith(party):
party = p
break
else:
raise ScrapeError("Unknown party: %s" % party)
print url
r = requests.get(url)
doc = html.fromstring(r.text)
el_list = doc.xpath('//a[@name]')
members = []
for idx, el in enumerate(el_list):
# The first council member is sometimes encoded differently...
if idx == 0 and el.getnext() != None:
name = el.getnext().text_content()
else:
name = el.tail
name = name.strip()
members.append((name, party))
return members
requests_cache.configure('jyvaskyla')
members = []
BASE_URL = 'http://www.jyvaskyla.fi/hallinto/valtuusto/valtuusto09'
r = requests.get(BASE_URL)
doc = html.fromstring(r.text)
# We will be fetching linked pages, so relative paths must be
# convert into absolute URLs.
doc.make_links_absolute(BASE_URL)
# Find the p element that contains the text "Valtuustoryhmät"
el = doc.xpath(u"//h2[contains(., 'Valtuustoryhmät')]")[0]
# The links to the council groups follow
party_links = el.xpath("following-sibling::p/a")
for link_el in party_links:
url = link_el.attrib['href']
ret = scrape_council_group(url)
members += ret
# The city has exactly 75 council members
assert len(members) == 75
submit_council_members("Jyväskylä", members)
|
|
a6cbb7e0914afdebcb4a03078575e55ce86b3224
|
extract_pairings.py
|
extract_pairings.py
|
import json
import re
from collections import defaultdict
def extract_ingredients_from_string(description):
if description:
cleaned_description = re.sub('[^a-z ]', '', description.lower())
return cleaned_description.split()
def make_pairing_store():
return defaultdict(lambda: defautdict(int))
def record_pairing(pairings, a, b):
if a and b:
pairings[a][b] += 1
pairings[b][a] += 1
if __name__ == '__main__':
num_items_seen = 10
venues_with_menus = json.loads(open('menus.json').read())
for venue in venues_with_menus:
for menu in venue['menus']:
for section in menu.get('sections', []):
# todo: section['name']
for subsection in section.get('subsections', []):
# todo: subsection['name'] ?
for item in subsection.get('contents', []):
text_to_inspect_for_item = []
text_to_inspect_for_item.append(item.get('name'))
text_to_inspect_for_item.append(item.get('description'))
for option_group in item.get('option_groups', []):
text_to_inspect_for_item.append(option_group.get('text'))
for option in option_group.get('options', []):
text_to_inspect_for_item.append(option.get('name'))
print json.dumps(map(extract_ingredients_from_string,
text_to_inspect_for_item),
indent=2, sort_keys=True)
num_items_seen += 1
if num_items_seen == 20:
import sys
sys.exit(0)
|
Add iteration over menu items
|
Add iteration over menu items
|
Python
|
mit
|
keir/tastypair,keir/tastypair
|
Add iteration over menu items
|
import json
import re
from collections import defaultdict
def extract_ingredients_from_string(description):
if description:
cleaned_description = re.sub('[^a-z ]', '', description.lower())
return cleaned_description.split()
def make_pairing_store():
return defaultdict(lambda: defautdict(int))
def record_pairing(pairings, a, b):
if a and b:
pairings[a][b] += 1
pairings[b][a] += 1
if __name__ == '__main__':
num_items_seen = 10
venues_with_menus = json.loads(open('menus.json').read())
for venue in venues_with_menus:
for menu in venue['menus']:
for section in menu.get('sections', []):
# todo: section['name']
for subsection in section.get('subsections', []):
# todo: subsection['name'] ?
for item in subsection.get('contents', []):
text_to_inspect_for_item = []
text_to_inspect_for_item.append(item.get('name'))
text_to_inspect_for_item.append(item.get('description'))
for option_group in item.get('option_groups', []):
text_to_inspect_for_item.append(option_group.get('text'))
for option in option_group.get('options', []):
text_to_inspect_for_item.append(option.get('name'))
print json.dumps(map(extract_ingredients_from_string,
text_to_inspect_for_item),
indent=2, sort_keys=True)
num_items_seen += 1
if num_items_seen == 20:
import sys
sys.exit(0)
|
<commit_before><commit_msg>Add iteration over menu items<commit_after>
|
import json
import re
from collections import defaultdict
def extract_ingredients_from_string(description):
if description:
cleaned_description = re.sub('[^a-z ]', '', description.lower())
return cleaned_description.split()
def make_pairing_store():
return defaultdict(lambda: defautdict(int))
def record_pairing(pairings, a, b):
if a and b:
pairings[a][b] += 1
pairings[b][a] += 1
if __name__ == '__main__':
num_items_seen = 10
venues_with_menus = json.loads(open('menus.json').read())
for venue in venues_with_menus:
for menu in venue['menus']:
for section in menu.get('sections', []):
# todo: section['name']
for subsection in section.get('subsections', []):
# todo: subsection['name'] ?
for item in subsection.get('contents', []):
text_to_inspect_for_item = []
text_to_inspect_for_item.append(item.get('name'))
text_to_inspect_for_item.append(item.get('description'))
for option_group in item.get('option_groups', []):
text_to_inspect_for_item.append(option_group.get('text'))
for option in option_group.get('options', []):
text_to_inspect_for_item.append(option.get('name'))
print json.dumps(map(extract_ingredients_from_string,
text_to_inspect_for_item),
indent=2, sort_keys=True)
num_items_seen += 1
if num_items_seen == 20:
import sys
sys.exit(0)
|
Add iteration over menu itemsimport json
import re
from collections import defaultdict
def extract_ingredients_from_string(description):
if description:
cleaned_description = re.sub('[^a-z ]', '', description.lower())
return cleaned_description.split()
def make_pairing_store():
return defaultdict(lambda: defautdict(int))
def record_pairing(pairings, a, b):
if a and b:
pairings[a][b] += 1
pairings[b][a] += 1
if __name__ == '__main__':
num_items_seen = 10
venues_with_menus = json.loads(open('menus.json').read())
for venue in venues_with_menus:
for menu in venue['menus']:
for section in menu.get('sections', []):
# todo: section['name']
for subsection in section.get('subsections', []):
# todo: subsection['name'] ?
for item in subsection.get('contents', []):
text_to_inspect_for_item = []
text_to_inspect_for_item.append(item.get('name'))
text_to_inspect_for_item.append(item.get('description'))
for option_group in item.get('option_groups', []):
text_to_inspect_for_item.append(option_group.get('text'))
for option in option_group.get('options', []):
text_to_inspect_for_item.append(option.get('name'))
print json.dumps(map(extract_ingredients_from_string,
text_to_inspect_for_item),
indent=2, sort_keys=True)
num_items_seen += 1
if num_items_seen == 20:
import sys
sys.exit(0)
|
<commit_before><commit_msg>Add iteration over menu items<commit_after>import json
import re
from collections import defaultdict
def extract_ingredients_from_string(description):
if description:
cleaned_description = re.sub('[^a-z ]', '', description.lower())
return cleaned_description.split()
def make_pairing_store():
return defaultdict(lambda: defautdict(int))
def record_pairing(pairings, a, b):
if a and b:
pairings[a][b] += 1
pairings[b][a] += 1
if __name__ == '__main__':
num_items_seen = 10
venues_with_menus = json.loads(open('menus.json').read())
for venue in venues_with_menus:
for menu in venue['menus']:
for section in menu.get('sections', []):
# todo: section['name']
for subsection in section.get('subsections', []):
# todo: subsection['name'] ?
for item in subsection.get('contents', []):
text_to_inspect_for_item = []
text_to_inspect_for_item.append(item.get('name'))
text_to_inspect_for_item.append(item.get('description'))
for option_group in item.get('option_groups', []):
text_to_inspect_for_item.append(option_group.get('text'))
for option in option_group.get('options', []):
text_to_inspect_for_item.append(option.get('name'))
print json.dumps(map(extract_ingredients_from_string,
text_to_inspect_for_item),
indent=2, sort_keys=True)
num_items_seen += 1
if num_items_seen == 20:
import sys
sys.exit(0)
|
|
ff80cf04452c85ff0b93666feb867afa6e4d94f0
|
examples/apc2016/train_fcn8s.py
|
examples/apc2016/train_fcn8s.py
|
#!/usr/bin/env python
import argparse
import os
import os.path as osp
import chainer
from chainer import cuda
import fcn
import datasets
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--fcn16s', required=True)
parser.add_argument('--gpu', type=int, default=0)
parser.add_argument('--out', required=True)
parser.add_argument('--dataset', default='v2', choices=['v1', 'v2'])
args = parser.parse_args()
fcn16s_path = args.fcn16s
gpu = args.gpu
out = args.out
if args.dataset == 'v1':
dataset_class = datasets.APC2016DatasetV1
else:
dataset_class = datasets.APC2016DatasetV2
if not osp.exists(out):
os.makedirs(out)
# 1. dataset
dataset_train = dataset_class('train')
dataset_val = dataset_class('val')
iter_train = chainer.iterators.SerialIterator(dataset_train, batch_size=1)
iter_val = chainer.iterators.SerialIterator(dataset_val, batch_size=1,
repeat=False, shuffle=False)
# 2. model
n_class = len(dataset_train.label_names)
fcn16s = fcn.models.FCN16s(n_class=n_class)
chainer.serializers.load_hdf5(fcn16s_path, fcn16s)
model = fcn.models.FCN8s(n_class=n_class)
model.train = True
fcn.utils.copy_chainermodel(fcn16s, model)
if gpu >= 0:
cuda.get_device(gpu).use()
model.to_gpu()
# 3. optimizer
optimizer = chainer.optimizers.Adam(alpha=1e-5)
optimizer.setup(model)
# training loop
trainer = fcn.Trainer(
device=gpu,
model=model,
optimizer=optimizer,
iter_train=iter_train,
iter_val=iter_val,
out=out,
)
trainer.train(
max_iter=150000,
interval_eval=5000,
)
if __name__ == '__main__':
main()
|
Add trainer for fcn8s on apc2016
|
Add trainer for fcn8s on apc2016
|
Python
|
mit
|
wkentaro/fcn
|
Add trainer for fcn8s on apc2016
|
#!/usr/bin/env python
import argparse
import os
import os.path as osp
import chainer
from chainer import cuda
import fcn
import datasets
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--fcn16s', required=True)
parser.add_argument('--gpu', type=int, default=0)
parser.add_argument('--out', required=True)
parser.add_argument('--dataset', default='v2', choices=['v1', 'v2'])
args = parser.parse_args()
fcn16s_path = args.fcn16s
gpu = args.gpu
out = args.out
if args.dataset == 'v1':
dataset_class = datasets.APC2016DatasetV1
else:
dataset_class = datasets.APC2016DatasetV2
if not osp.exists(out):
os.makedirs(out)
# 1. dataset
dataset_train = dataset_class('train')
dataset_val = dataset_class('val')
iter_train = chainer.iterators.SerialIterator(dataset_train, batch_size=1)
iter_val = chainer.iterators.SerialIterator(dataset_val, batch_size=1,
repeat=False, shuffle=False)
# 2. model
n_class = len(dataset_train.label_names)
fcn16s = fcn.models.FCN16s(n_class=n_class)
chainer.serializers.load_hdf5(fcn16s_path, fcn16s)
model = fcn.models.FCN8s(n_class=n_class)
model.train = True
fcn.utils.copy_chainermodel(fcn16s, model)
if gpu >= 0:
cuda.get_device(gpu).use()
model.to_gpu()
# 3. optimizer
optimizer = chainer.optimizers.Adam(alpha=1e-5)
optimizer.setup(model)
# training loop
trainer = fcn.Trainer(
device=gpu,
model=model,
optimizer=optimizer,
iter_train=iter_train,
iter_val=iter_val,
out=out,
)
trainer.train(
max_iter=150000,
interval_eval=5000,
)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add trainer for fcn8s on apc2016<commit_after>
|
#!/usr/bin/env python
import argparse
import os
import os.path as osp
import chainer
from chainer import cuda
import fcn
import datasets
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--fcn16s', required=True)
parser.add_argument('--gpu', type=int, default=0)
parser.add_argument('--out', required=True)
parser.add_argument('--dataset', default='v2', choices=['v1', 'v2'])
args = parser.parse_args()
fcn16s_path = args.fcn16s
gpu = args.gpu
out = args.out
if args.dataset == 'v1':
dataset_class = datasets.APC2016DatasetV1
else:
dataset_class = datasets.APC2016DatasetV2
if not osp.exists(out):
os.makedirs(out)
# 1. dataset
dataset_train = dataset_class('train')
dataset_val = dataset_class('val')
iter_train = chainer.iterators.SerialIterator(dataset_train, batch_size=1)
iter_val = chainer.iterators.SerialIterator(dataset_val, batch_size=1,
repeat=False, shuffle=False)
# 2. model
n_class = len(dataset_train.label_names)
fcn16s = fcn.models.FCN16s(n_class=n_class)
chainer.serializers.load_hdf5(fcn16s_path, fcn16s)
model = fcn.models.FCN8s(n_class=n_class)
model.train = True
fcn.utils.copy_chainermodel(fcn16s, model)
if gpu >= 0:
cuda.get_device(gpu).use()
model.to_gpu()
# 3. optimizer
optimizer = chainer.optimizers.Adam(alpha=1e-5)
optimizer.setup(model)
# training loop
trainer = fcn.Trainer(
device=gpu,
model=model,
optimizer=optimizer,
iter_train=iter_train,
iter_val=iter_val,
out=out,
)
trainer.train(
max_iter=150000,
interval_eval=5000,
)
if __name__ == '__main__':
main()
|
Add trainer for fcn8s on apc2016#!/usr/bin/env python
import argparse
import os
import os.path as osp
import chainer
from chainer import cuda
import fcn
import datasets
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--fcn16s', required=True)
parser.add_argument('--gpu', type=int, default=0)
parser.add_argument('--out', required=True)
parser.add_argument('--dataset', default='v2', choices=['v1', 'v2'])
args = parser.parse_args()
fcn16s_path = args.fcn16s
gpu = args.gpu
out = args.out
if args.dataset == 'v1':
dataset_class = datasets.APC2016DatasetV1
else:
dataset_class = datasets.APC2016DatasetV2
if not osp.exists(out):
os.makedirs(out)
# 1. dataset
dataset_train = dataset_class('train')
dataset_val = dataset_class('val')
iter_train = chainer.iterators.SerialIterator(dataset_train, batch_size=1)
iter_val = chainer.iterators.SerialIterator(dataset_val, batch_size=1,
repeat=False, shuffle=False)
# 2. model
n_class = len(dataset_train.label_names)
fcn16s = fcn.models.FCN16s(n_class=n_class)
chainer.serializers.load_hdf5(fcn16s_path, fcn16s)
model = fcn.models.FCN8s(n_class=n_class)
model.train = True
fcn.utils.copy_chainermodel(fcn16s, model)
if gpu >= 0:
cuda.get_device(gpu).use()
model.to_gpu()
# 3. optimizer
optimizer = chainer.optimizers.Adam(alpha=1e-5)
optimizer.setup(model)
# training loop
trainer = fcn.Trainer(
device=gpu,
model=model,
optimizer=optimizer,
iter_train=iter_train,
iter_val=iter_val,
out=out,
)
trainer.train(
max_iter=150000,
interval_eval=5000,
)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add trainer for fcn8s on apc2016<commit_after>#!/usr/bin/env python
import argparse
import os
import os.path as osp
import chainer
from chainer import cuda
import fcn
import datasets
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--fcn16s', required=True)
parser.add_argument('--gpu', type=int, default=0)
parser.add_argument('--out', required=True)
parser.add_argument('--dataset', default='v2', choices=['v1', 'v2'])
args = parser.parse_args()
fcn16s_path = args.fcn16s
gpu = args.gpu
out = args.out
if args.dataset == 'v1':
dataset_class = datasets.APC2016DatasetV1
else:
dataset_class = datasets.APC2016DatasetV2
if not osp.exists(out):
os.makedirs(out)
# 1. dataset
dataset_train = dataset_class('train')
dataset_val = dataset_class('val')
iter_train = chainer.iterators.SerialIterator(dataset_train, batch_size=1)
iter_val = chainer.iterators.SerialIterator(dataset_val, batch_size=1,
repeat=False, shuffle=False)
# 2. model
n_class = len(dataset_train.label_names)
fcn16s = fcn.models.FCN16s(n_class=n_class)
chainer.serializers.load_hdf5(fcn16s_path, fcn16s)
model = fcn.models.FCN8s(n_class=n_class)
model.train = True
fcn.utils.copy_chainermodel(fcn16s, model)
if gpu >= 0:
cuda.get_device(gpu).use()
model.to_gpu()
# 3. optimizer
optimizer = chainer.optimizers.Adam(alpha=1e-5)
optimizer.setup(model)
# training loop
trainer = fcn.Trainer(
device=gpu,
model=model,
optimizer=optimizer,
iter_train=iter_train,
iter_val=iter_val,
out=out,
)
trainer.train(
max_iter=150000,
interval_eval=5000,
)
if __name__ == '__main__':
main()
|
|
d0dcdf7ed663f9909f2a8e889bb972c5731aef0f
|
src/ggrc/migrations/versions/20170302155757_2127ea770285_add_audit_fk_to_assessments_and_issues.py
|
src/ggrc/migrations/versions/20170302155757_2127ea770285_add_audit_fk_to_assessments_and_issues.py
|
# Copyright (C) 2017 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
"""
Add audit FK to assessments and issues
Create Date: 2017-03-02 15:57:57.006126
"""
# disable Invalid constant name pylint warning for mandatory Alembic variables.
# pylint: disable=invalid-name
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = '2127ea770285'
down_revision = '1a5ec1ed04af'
def upgrade_table(table_name):
"""Add audit foreign key to a table."""
op.add_column(
table_name,
sa.Column("audit_id", sa.Integer(), nullable=True)
)
op.execute("""
UPDATE {table_name} AS t
JOIN contexts AS c ON
c.id = t.context_id AND
c.related_object_type = "Audit"
JOIN audits AS au ON
c.related_object_id = au.id
SET
t.audit_id = au.id
""".format(
table_name=table_name,
))
# Simple fix for testing with invalid objects
op.execute("""
DELETE FROM {table_name}
WHERE audit_id IS NULL
""".format(
table_name=table_name,
))
op.alter_column(
table_name,
"audit_id",
existing_type=sa.Integer(),
nullable=False
)
op.create_foreign_key(
"fk_{}_audits".format(table_name),
table_name,
"audits",
["audit_id"],
["id"],
ondelete="RESTRICT"
)
def upgrade():
"""Upgrade database schema and/or data, creating a new revision."""
upgrade_table("assessments")
upgrade_table("issues")
def downgrade():
"""Downgrade database schema and/or data back to the previous revision."""
op.drop_constraint("fk_assessments_audits", "assessments",
type_="foreignkey")
op.drop_constraint("fk_issues_audits", "issues", type_="foreignkey")
op.drop_column("assessments", "audit_id")
op.drop_column("issues", "audit_id")
|
Add audit foreign key to assessments and issues
|
Add audit foreign key to assessments and issues
|
Python
|
apache-2.0
|
AleksNeStu/ggrc-core,AleksNeStu/ggrc-core,AleksNeStu/ggrc-core,AleksNeStu/ggrc-core
|
Add audit foreign key to assessments and issues
|
# Copyright (C) 2017 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
"""
Add audit FK to assessments and issues
Create Date: 2017-03-02 15:57:57.006126
"""
# disable Invalid constant name pylint warning for mandatory Alembic variables.
# pylint: disable=invalid-name
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = '2127ea770285'
down_revision = '1a5ec1ed04af'
def upgrade_table(table_name):
"""Add audit foreign key to a table."""
op.add_column(
table_name,
sa.Column("audit_id", sa.Integer(), nullable=True)
)
op.execute("""
UPDATE {table_name} AS t
JOIN contexts AS c ON
c.id = t.context_id AND
c.related_object_type = "Audit"
JOIN audits AS au ON
c.related_object_id = au.id
SET
t.audit_id = au.id
""".format(
table_name=table_name,
))
# Simple fix for testing with invalid objects
op.execute("""
DELETE FROM {table_name}
WHERE audit_id IS NULL
""".format(
table_name=table_name,
))
op.alter_column(
table_name,
"audit_id",
existing_type=sa.Integer(),
nullable=False
)
op.create_foreign_key(
"fk_{}_audits".format(table_name),
table_name,
"audits",
["audit_id"],
["id"],
ondelete="RESTRICT"
)
def upgrade():
"""Upgrade database schema and/or data, creating a new revision."""
upgrade_table("assessments")
upgrade_table("issues")
def downgrade():
"""Downgrade database schema and/or data back to the previous revision."""
op.drop_constraint("fk_assessments_audits", "assessments",
type_="foreignkey")
op.drop_constraint("fk_issues_audits", "issues", type_="foreignkey")
op.drop_column("assessments", "audit_id")
op.drop_column("issues", "audit_id")
|
<commit_before><commit_msg>Add audit foreign key to assessments and issues<commit_after>
|
# Copyright (C) 2017 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
"""
Add audit FK to assessments and issues
Create Date: 2017-03-02 15:57:57.006126
"""
# disable Invalid constant name pylint warning for mandatory Alembic variables.
# pylint: disable=invalid-name
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = '2127ea770285'
down_revision = '1a5ec1ed04af'
def upgrade_table(table_name):
"""Add audit foreign key to a table."""
op.add_column(
table_name,
sa.Column("audit_id", sa.Integer(), nullable=True)
)
op.execute("""
UPDATE {table_name} AS t
JOIN contexts AS c ON
c.id = t.context_id AND
c.related_object_type = "Audit"
JOIN audits AS au ON
c.related_object_id = au.id
SET
t.audit_id = au.id
""".format(
table_name=table_name,
))
# Simple fix for testing with invalid objects
op.execute("""
DELETE FROM {table_name}
WHERE audit_id IS NULL
""".format(
table_name=table_name,
))
op.alter_column(
table_name,
"audit_id",
existing_type=sa.Integer(),
nullable=False
)
op.create_foreign_key(
"fk_{}_audits".format(table_name),
table_name,
"audits",
["audit_id"],
["id"],
ondelete="RESTRICT"
)
def upgrade():
"""Upgrade database schema and/or data, creating a new revision."""
upgrade_table("assessments")
upgrade_table("issues")
def downgrade():
"""Downgrade database schema and/or data back to the previous revision."""
op.drop_constraint("fk_assessments_audits", "assessments",
type_="foreignkey")
op.drop_constraint("fk_issues_audits", "issues", type_="foreignkey")
op.drop_column("assessments", "audit_id")
op.drop_column("issues", "audit_id")
|
Add audit foreign key to assessments and issues# Copyright (C) 2017 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
"""
Add audit FK to assessments and issues
Create Date: 2017-03-02 15:57:57.006126
"""
# disable Invalid constant name pylint warning for mandatory Alembic variables.
# pylint: disable=invalid-name
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = '2127ea770285'
down_revision = '1a5ec1ed04af'
def upgrade_table(table_name):
"""Add audit foreign key to a table."""
op.add_column(
table_name,
sa.Column("audit_id", sa.Integer(), nullable=True)
)
op.execute("""
UPDATE {table_name} AS t
JOIN contexts AS c ON
c.id = t.context_id AND
c.related_object_type = "Audit"
JOIN audits AS au ON
c.related_object_id = au.id
SET
t.audit_id = au.id
""".format(
table_name=table_name,
))
# Simple fix for testing with invalid objects
op.execute("""
DELETE FROM {table_name}
WHERE audit_id IS NULL
""".format(
table_name=table_name,
))
op.alter_column(
table_name,
"audit_id",
existing_type=sa.Integer(),
nullable=False
)
op.create_foreign_key(
"fk_{}_audits".format(table_name),
table_name,
"audits",
["audit_id"],
["id"],
ondelete="RESTRICT"
)
def upgrade():
"""Upgrade database schema and/or data, creating a new revision."""
upgrade_table("assessments")
upgrade_table("issues")
def downgrade():
"""Downgrade database schema and/or data back to the previous revision."""
op.drop_constraint("fk_assessments_audits", "assessments",
type_="foreignkey")
op.drop_constraint("fk_issues_audits", "issues", type_="foreignkey")
op.drop_column("assessments", "audit_id")
op.drop_column("issues", "audit_id")
|
<commit_before><commit_msg>Add audit foreign key to assessments and issues<commit_after># Copyright (C) 2017 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
"""
Add audit FK to assessments and issues
Create Date: 2017-03-02 15:57:57.006126
"""
# disable Invalid constant name pylint warning for mandatory Alembic variables.
# pylint: disable=invalid-name
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = '2127ea770285'
down_revision = '1a5ec1ed04af'
def upgrade_table(table_name):
"""Add audit foreign key to a table."""
op.add_column(
table_name,
sa.Column("audit_id", sa.Integer(), nullable=True)
)
op.execute("""
UPDATE {table_name} AS t
JOIN contexts AS c ON
c.id = t.context_id AND
c.related_object_type = "Audit"
JOIN audits AS au ON
c.related_object_id = au.id
SET
t.audit_id = au.id
""".format(
table_name=table_name,
))
# Simple fix for testing with invalid objects
op.execute("""
DELETE FROM {table_name}
WHERE audit_id IS NULL
""".format(
table_name=table_name,
))
op.alter_column(
table_name,
"audit_id",
existing_type=sa.Integer(),
nullable=False
)
op.create_foreign_key(
"fk_{}_audits".format(table_name),
table_name,
"audits",
["audit_id"],
["id"],
ondelete="RESTRICT"
)
def upgrade():
"""Upgrade database schema and/or data, creating a new revision."""
upgrade_table("assessments")
upgrade_table("issues")
def downgrade():
"""Downgrade database schema and/or data back to the previous revision."""
op.drop_constraint("fk_assessments_audits", "assessments",
type_="foreignkey")
op.drop_constraint("fk_issues_audits", "issues", type_="foreignkey")
op.drop_column("assessments", "audit_id")
op.drop_column("issues", "audit_id")
|
|
e06642b2a3e679d9292bec6bd468d2cada2baf70
|
h2o-py/tests/testdir_persist/pyunit_import_s3_parquet.py
|
h2o-py/tests/testdir_persist/pyunit_import_s3_parquet.py
|
import h2o
import os
from h2o.persist import set_s3_credentials
from tests import pyunit_utils
from pandas.testing import assert_frame_equal
def test_import_parquet_from_s3():
access_key_id = os.environ['AWS_ACCESS_KEY_ID']
secret_access_key = os.environ['AWS_SECRET_ACCESS_KEY']
assert access_key_id is not None
assert secret_access_key is not None
set_s3_credentials(access_key_id, secret_access_key)
from_s3 = h2o.import_file("s3://h2o-public-test-data/smalldata/parser/parquet/airlines-simple.snappy.parquet")
from_local = h2o.import_file(pyunit_utils.locate("smalldata/parser/parquet/airlines-simple.snappy.parquet"))
print(from_s3.summary())
assert from_s3.shape == (24421, 12)
assert_frame_equal(from_local.as_data_frame(), from_s3.as_data_frame())
if __name__ == "__main__":
pyunit_utils.standalone_test(test_import_parquet_from_s3)
else:
test_import_parquet_from_s3()
|
Add python test of importing Parquet files from S3
|
Add python test of importing Parquet files from S3
|
Python
|
apache-2.0
|
h2oai/h2o-3,h2oai/h2o-3,h2oai/h2o-3,h2oai/h2o-3,h2oai/h2o-3,h2oai/h2o-3,h2oai/h2o-3,h2oai/h2o-3
|
Add python test of importing Parquet files from S3
|
import h2o
import os
from h2o.persist import set_s3_credentials
from tests import pyunit_utils
from pandas.testing import assert_frame_equal
def test_import_parquet_from_s3():
access_key_id = os.environ['AWS_ACCESS_KEY_ID']
secret_access_key = os.environ['AWS_SECRET_ACCESS_KEY']
assert access_key_id is not None
assert secret_access_key is not None
set_s3_credentials(access_key_id, secret_access_key)
from_s3 = h2o.import_file("s3://h2o-public-test-data/smalldata/parser/parquet/airlines-simple.snappy.parquet")
from_local = h2o.import_file(pyunit_utils.locate("smalldata/parser/parquet/airlines-simple.snappy.parquet"))
print(from_s3.summary())
assert from_s3.shape == (24421, 12)
assert_frame_equal(from_local.as_data_frame(), from_s3.as_data_frame())
if __name__ == "__main__":
pyunit_utils.standalone_test(test_import_parquet_from_s3)
else:
test_import_parquet_from_s3()
|
<commit_before><commit_msg>Add python test of importing Parquet files from S3<commit_after>
|
import h2o
import os
from h2o.persist import set_s3_credentials
from tests import pyunit_utils
from pandas.testing import assert_frame_equal
def test_import_parquet_from_s3():
access_key_id = os.environ['AWS_ACCESS_KEY_ID']
secret_access_key = os.environ['AWS_SECRET_ACCESS_KEY']
assert access_key_id is not None
assert secret_access_key is not None
set_s3_credentials(access_key_id, secret_access_key)
from_s3 = h2o.import_file("s3://h2o-public-test-data/smalldata/parser/parquet/airlines-simple.snappy.parquet")
from_local = h2o.import_file(pyunit_utils.locate("smalldata/parser/parquet/airlines-simple.snappy.parquet"))
print(from_s3.summary())
assert from_s3.shape == (24421, 12)
assert_frame_equal(from_local.as_data_frame(), from_s3.as_data_frame())
if __name__ == "__main__":
pyunit_utils.standalone_test(test_import_parquet_from_s3)
else:
test_import_parquet_from_s3()
|
Add python test of importing Parquet files from S3import h2o
import os
from h2o.persist import set_s3_credentials
from tests import pyunit_utils
from pandas.testing import assert_frame_equal
def test_import_parquet_from_s3():
access_key_id = os.environ['AWS_ACCESS_KEY_ID']
secret_access_key = os.environ['AWS_SECRET_ACCESS_KEY']
assert access_key_id is not None
assert secret_access_key is not None
set_s3_credentials(access_key_id, secret_access_key)
from_s3 = h2o.import_file("s3://h2o-public-test-data/smalldata/parser/parquet/airlines-simple.snappy.parquet")
from_local = h2o.import_file(pyunit_utils.locate("smalldata/parser/parquet/airlines-simple.snappy.parquet"))
print(from_s3.summary())
assert from_s3.shape == (24421, 12)
assert_frame_equal(from_local.as_data_frame(), from_s3.as_data_frame())
if __name__ == "__main__":
pyunit_utils.standalone_test(test_import_parquet_from_s3)
else:
test_import_parquet_from_s3()
|
<commit_before><commit_msg>Add python test of importing Parquet files from S3<commit_after>import h2o
import os
from h2o.persist import set_s3_credentials
from tests import pyunit_utils
from pandas.testing import assert_frame_equal
def test_import_parquet_from_s3():
access_key_id = os.environ['AWS_ACCESS_KEY_ID']
secret_access_key = os.environ['AWS_SECRET_ACCESS_KEY']
assert access_key_id is not None
assert secret_access_key is not None
set_s3_credentials(access_key_id, secret_access_key)
from_s3 = h2o.import_file("s3://h2o-public-test-data/smalldata/parser/parquet/airlines-simple.snappy.parquet")
from_local = h2o.import_file(pyunit_utils.locate("smalldata/parser/parquet/airlines-simple.snappy.parquet"))
print(from_s3.summary())
assert from_s3.shape == (24421, 12)
assert_frame_equal(from_local.as_data_frame(), from_s3.as_data_frame())
if __name__ == "__main__":
pyunit_utils.standalone_test(test_import_parquet_from_s3)
else:
test_import_parquet_from_s3()
|
|
1094ba0a4ee9443e6317e7fba8db8e69f09cfaa3
|
pints/tests/test_transformation.py
|
pints/tests/test_transformation.py
|
#!/usr/bin/env python3
#
# Tests Transform functions in Pints
#
# This file is part of PINTS (https://github.com/pints-team/pints/) which is
# released under the BSD 3-clause license. See accompanying LICENSE.md for
# copyright notice and full license details.
#
from __future__ import division
import unittest
import pints
import numpy as np
class TestTransform(unittest.TestCase):
def test_log_transform(self):
# Test input parameters
t1 = pints.LogTransform(1)
t4 = pints.LogTransform(4)
p = [0.1, 1., 10., 999.]
x = [-2.3025850929940455, 0., 2.3025850929940459, 6.9067547786485539]
# Test forward transform
for xi, pi in zip(x, p):
calc_xi = t1.to_search(pi)
self.assertAlmostEqual(calc_xi, xi)
# Test inverse transform
for xi, pi in zip(x, p):
calc_pi = t1.to_model(xi)
self.assertAlmostEqual(calc_pi, pi)
# Test n_parameters
self.assertEqual(t1.n_parameters(), 1)
self.assertEqual(t4.n_parameters(), 4)
# Test Jacobian
# Test log-Jacobian determinant
if __name__ == '__main__':
unittest.main()
|
Add first simple transformation tests
|
Add first simple transformation tests
|
Python
|
bsd-3-clause
|
martinjrobins/hobo,martinjrobins/hobo,martinjrobins/hobo,martinjrobins/hobo
|
Add first simple transformation tests
|
#!/usr/bin/env python3
#
# Tests Transform functions in Pints
#
# This file is part of PINTS (https://github.com/pints-team/pints/) which is
# released under the BSD 3-clause license. See accompanying LICENSE.md for
# copyright notice and full license details.
#
from __future__ import division
import unittest
import pints
import numpy as np
class TestTransform(unittest.TestCase):
def test_log_transform(self):
# Test input parameters
t1 = pints.LogTransform(1)
t4 = pints.LogTransform(4)
p = [0.1, 1., 10., 999.]
x = [-2.3025850929940455, 0., 2.3025850929940459, 6.9067547786485539]
# Test forward transform
for xi, pi in zip(x, p):
calc_xi = t1.to_search(pi)
self.assertAlmostEqual(calc_xi, xi)
# Test inverse transform
for xi, pi in zip(x, p):
calc_pi = t1.to_model(xi)
self.assertAlmostEqual(calc_pi, pi)
# Test n_parameters
self.assertEqual(t1.n_parameters(), 1)
self.assertEqual(t4.n_parameters(), 4)
# Test Jacobian
# Test log-Jacobian determinant
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add first simple transformation tests<commit_after>
|
#!/usr/bin/env python3
#
# Tests Transform functions in Pints
#
# This file is part of PINTS (https://github.com/pints-team/pints/) which is
# released under the BSD 3-clause license. See accompanying LICENSE.md for
# copyright notice and full license details.
#
from __future__ import division
import unittest
import pints
import numpy as np
class TestTransform(unittest.TestCase):
def test_log_transform(self):
# Test input parameters
t1 = pints.LogTransform(1)
t4 = pints.LogTransform(4)
p = [0.1, 1., 10., 999.]
x = [-2.3025850929940455, 0., 2.3025850929940459, 6.9067547786485539]
# Test forward transform
for xi, pi in zip(x, p):
calc_xi = t1.to_search(pi)
self.assertAlmostEqual(calc_xi, xi)
# Test inverse transform
for xi, pi in zip(x, p):
calc_pi = t1.to_model(xi)
self.assertAlmostEqual(calc_pi, pi)
# Test n_parameters
self.assertEqual(t1.n_parameters(), 1)
self.assertEqual(t4.n_parameters(), 4)
# Test Jacobian
# Test log-Jacobian determinant
if __name__ == '__main__':
unittest.main()
|
Add first simple transformation tests#!/usr/bin/env python3
#
# Tests Transform functions in Pints
#
# This file is part of PINTS (https://github.com/pints-team/pints/) which is
# released under the BSD 3-clause license. See accompanying LICENSE.md for
# copyright notice and full license details.
#
from __future__ import division
import unittest
import pints
import numpy as np
class TestTransform(unittest.TestCase):
def test_log_transform(self):
# Test input parameters
t1 = pints.LogTransform(1)
t4 = pints.LogTransform(4)
p = [0.1, 1., 10., 999.]
x = [-2.3025850929940455, 0., 2.3025850929940459, 6.9067547786485539]
# Test forward transform
for xi, pi in zip(x, p):
calc_xi = t1.to_search(pi)
self.assertAlmostEqual(calc_xi, xi)
# Test inverse transform
for xi, pi in zip(x, p):
calc_pi = t1.to_model(xi)
self.assertAlmostEqual(calc_pi, pi)
# Test n_parameters
self.assertEqual(t1.n_parameters(), 1)
self.assertEqual(t4.n_parameters(), 4)
# Test Jacobian
# Test log-Jacobian determinant
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add first simple transformation tests<commit_after>#!/usr/bin/env python3
#
# Tests Transform functions in Pints
#
# This file is part of PINTS (https://github.com/pints-team/pints/) which is
# released under the BSD 3-clause license. See accompanying LICENSE.md for
# copyright notice and full license details.
#
from __future__ import division
import unittest
import pints
import numpy as np
class TestTransform(unittest.TestCase):
def test_log_transform(self):
# Test input parameters
t1 = pints.LogTransform(1)
t4 = pints.LogTransform(4)
p = [0.1, 1., 10., 999.]
x = [-2.3025850929940455, 0., 2.3025850929940459, 6.9067547786485539]
# Test forward transform
for xi, pi in zip(x, p):
calc_xi = t1.to_search(pi)
self.assertAlmostEqual(calc_xi, xi)
# Test inverse transform
for xi, pi in zip(x, p):
calc_pi = t1.to_model(xi)
self.assertAlmostEqual(calc_pi, pi)
# Test n_parameters
self.assertEqual(t1.n_parameters(), 1)
self.assertEqual(t4.n_parameters(), 4)
# Test Jacobian
# Test log-Jacobian determinant
if __name__ == '__main__':
unittest.main()
|
|
1bab2385382e188c332320b77b86cdf5ac214802
|
allegedb/allegedb/tests/test_window.py
|
allegedb/allegedb/tests/test_window.py
|
from allegedb.cache import WindowDict
from itertools import cycle
testvs = ['a', 99, ['spam', 'eggs', 'ham'], {'foo': 'bar', 0: 1, '💧': '🔑'}]
testdata = []
for k, v in zip(range(100), cycle(testvs)):
testdata.append((k, v))
windd = WindowDict(testdata)
assert list(range(100)) == list(windd.keys())
for item in testdata:
assert item in windd.items()
assert list(reversed(range(100))) == list(windd.past())
assert [] == list(windd.future())
windd.seek(-1)
assert list(range(100)) == list(windd.future())
for item in testdata:
assert item in windd.future().items()
windd.seek(50)
unseen = []
seen = []
assert list(reversed(range(51))) == list(windd.past())
for item in testdata:
if item not in windd.past().items():
unseen.append(item)
else:
seen.append(item)
assert list(range(51, 100)) == list(windd.future())
for item in unseen:
assert item in windd.future().items()
for item in windd.future().items():
assert item in unseen
for item in seen:
assert item in windd.past().items()
for item in windd.past().items():
assert item in seen
|
Add a new test for WindowDict
|
Add a new test for WindowDict
|
Python
|
agpl-3.0
|
LogicalDash/LiSE,LogicalDash/LiSE
|
Add a new test for WindowDict
|
from allegedb.cache import WindowDict
from itertools import cycle
testvs = ['a', 99, ['spam', 'eggs', 'ham'], {'foo': 'bar', 0: 1, '💧': '🔑'}]
testdata = []
for k, v in zip(range(100), cycle(testvs)):
testdata.append((k, v))
windd = WindowDict(testdata)
assert list(range(100)) == list(windd.keys())
for item in testdata:
assert item in windd.items()
assert list(reversed(range(100))) == list(windd.past())
assert [] == list(windd.future())
windd.seek(-1)
assert list(range(100)) == list(windd.future())
for item in testdata:
assert item in windd.future().items()
windd.seek(50)
unseen = []
seen = []
assert list(reversed(range(51))) == list(windd.past())
for item in testdata:
if item not in windd.past().items():
unseen.append(item)
else:
seen.append(item)
assert list(range(51, 100)) == list(windd.future())
for item in unseen:
assert item in windd.future().items()
for item in windd.future().items():
assert item in unseen
for item in seen:
assert item in windd.past().items()
for item in windd.past().items():
assert item in seen
|
<commit_before><commit_msg>Add a new test for WindowDict<commit_after>
|
from allegedb.cache import WindowDict
from itertools import cycle
testvs = ['a', 99, ['spam', 'eggs', 'ham'], {'foo': 'bar', 0: 1, '💧': '🔑'}]
testdata = []
for k, v in zip(range(100), cycle(testvs)):
testdata.append((k, v))
windd = WindowDict(testdata)
assert list(range(100)) == list(windd.keys())
for item in testdata:
assert item in windd.items()
assert list(reversed(range(100))) == list(windd.past())
assert [] == list(windd.future())
windd.seek(-1)
assert list(range(100)) == list(windd.future())
for item in testdata:
assert item in windd.future().items()
windd.seek(50)
unseen = []
seen = []
assert list(reversed(range(51))) == list(windd.past())
for item in testdata:
if item not in windd.past().items():
unseen.append(item)
else:
seen.append(item)
assert list(range(51, 100)) == list(windd.future())
for item in unseen:
assert item in windd.future().items()
for item in windd.future().items():
assert item in unseen
for item in seen:
assert item in windd.past().items()
for item in windd.past().items():
assert item in seen
|
Add a new test for WindowDictfrom allegedb.cache import WindowDict
from itertools import cycle
testvs = ['a', 99, ['spam', 'eggs', 'ham'], {'foo': 'bar', 0: 1, '💧': '🔑'}]
testdata = []
for k, v in zip(range(100), cycle(testvs)):
testdata.append((k, v))
windd = WindowDict(testdata)
assert list(range(100)) == list(windd.keys())
for item in testdata:
assert item in windd.items()
assert list(reversed(range(100))) == list(windd.past())
assert [] == list(windd.future())
windd.seek(-1)
assert list(range(100)) == list(windd.future())
for item in testdata:
assert item in windd.future().items()
windd.seek(50)
unseen = []
seen = []
assert list(reversed(range(51))) == list(windd.past())
for item in testdata:
if item not in windd.past().items():
unseen.append(item)
else:
seen.append(item)
assert list(range(51, 100)) == list(windd.future())
for item in unseen:
assert item in windd.future().items()
for item in windd.future().items():
assert item in unseen
for item in seen:
assert item in windd.past().items()
for item in windd.past().items():
assert item in seen
|
<commit_before><commit_msg>Add a new test for WindowDict<commit_after>from allegedb.cache import WindowDict
from itertools import cycle
testvs = ['a', 99, ['spam', 'eggs', 'ham'], {'foo': 'bar', 0: 1, '💧': '🔑'}]
testdata = []
for k, v in zip(range(100), cycle(testvs)):
testdata.append((k, v))
windd = WindowDict(testdata)
assert list(range(100)) == list(windd.keys())
for item in testdata:
assert item in windd.items()
assert list(reversed(range(100))) == list(windd.past())
assert [] == list(windd.future())
windd.seek(-1)
assert list(range(100)) == list(windd.future())
for item in testdata:
assert item in windd.future().items()
windd.seek(50)
unseen = []
seen = []
assert list(reversed(range(51))) == list(windd.past())
for item in testdata:
if item not in windd.past().items():
unseen.append(item)
else:
seen.append(item)
assert list(range(51, 100)) == list(windd.future())
for item in unseen:
assert item in windd.future().items()
for item in windd.future().items():
assert item in unseen
for item in seen:
assert item in windd.past().items()
for item in windd.past().items():
assert item in seen
|
|
2a8fe5b8293dc68628479f1223e2e564d7757b87
|
plyer/platforms/android/storagepath.py
|
plyer/platforms/android/storagepath.py
|
'''
Android Storage Path
--------------------
'''
from plyer.facades import StoragePath
from jnius import autoclass
from android import mActivity
Environment = autoclass('android.os.Environment')
Context = autoclass('android.content.Context')
class AndroidStoragePath(StoragePath):
def _get_home_dir(self):
return Environment.getDataDirectory().getAbsolutePath()
def _get_external_storage_dir(self):
return Environment.getExternalStorageDirectory().getAbsolutePath()
def _get_root_dir(self):
return Environment.getRootDirectory().getAbsolutePath()
def _get_documents_dir(self):
return Environment.getExternalStoragePublicDirectory(
Environment.DIRECTORY_DOCUMENTS).getAbsolutePath()
def _get_downloads_dir(self):
return Environment.getExternalStoragePublicDirectory(
Environment.DIRECTORY_DOWNLOADS).getAbsolutePath()
def _get_movies_dir(self):
return Environment.getExternalStoragePublicDirectory(
Environment.DIRECTORY_MOVIES).getAbsolutePath()
def _get_music_dir(self):
return Environment.getExternalStoragePublicDirectory(
Environment.DIRECTORY_MUSIC).getAbsolutePath()
def _get_pictures_dir(self):
return Environment.getExternalStoragePublicDirectory(
Environment.DIRECTORY_PICTURES).getAbsolutePath()
def _get_application_dir(self):
return mActivity.getFilesDir().getParentFile().getParent()
def instance():
return AndroidStoragePath()
|
Add android implementation of storage path
|
Add android implementation of storage path
|
Python
|
mit
|
kivy/plyer,kivy/plyer,KeyWeeUsr/plyer,KeyWeeUsr/plyer,kivy/plyer,KeyWeeUsr/plyer
|
Add android implementation of storage path
|
'''
Android Storage Path
--------------------
'''
from plyer.facades import StoragePath
from jnius import autoclass
from android import mActivity
Environment = autoclass('android.os.Environment')
Context = autoclass('android.content.Context')
class AndroidStoragePath(StoragePath):
def _get_home_dir(self):
return Environment.getDataDirectory().getAbsolutePath()
def _get_external_storage_dir(self):
return Environment.getExternalStorageDirectory().getAbsolutePath()
def _get_root_dir(self):
return Environment.getRootDirectory().getAbsolutePath()
def _get_documents_dir(self):
return Environment.getExternalStoragePublicDirectory(
Environment.DIRECTORY_DOCUMENTS).getAbsolutePath()
def _get_downloads_dir(self):
return Environment.getExternalStoragePublicDirectory(
Environment.DIRECTORY_DOWNLOADS).getAbsolutePath()
def _get_movies_dir(self):
return Environment.getExternalStoragePublicDirectory(
Environment.DIRECTORY_MOVIES).getAbsolutePath()
def _get_music_dir(self):
return Environment.getExternalStoragePublicDirectory(
Environment.DIRECTORY_MUSIC).getAbsolutePath()
def _get_pictures_dir(self):
return Environment.getExternalStoragePublicDirectory(
Environment.DIRECTORY_PICTURES).getAbsolutePath()
def _get_application_dir(self):
return mActivity.getFilesDir().getParentFile().getParent()
def instance():
return AndroidStoragePath()
|
<commit_before><commit_msg>Add android implementation of storage path<commit_after>
|
'''
Android Storage Path
--------------------
'''
from plyer.facades import StoragePath
from jnius import autoclass
from android import mActivity
Environment = autoclass('android.os.Environment')
Context = autoclass('android.content.Context')
class AndroidStoragePath(StoragePath):
def _get_home_dir(self):
return Environment.getDataDirectory().getAbsolutePath()
def _get_external_storage_dir(self):
return Environment.getExternalStorageDirectory().getAbsolutePath()
def _get_root_dir(self):
return Environment.getRootDirectory().getAbsolutePath()
def _get_documents_dir(self):
return Environment.getExternalStoragePublicDirectory(
Environment.DIRECTORY_DOCUMENTS).getAbsolutePath()
def _get_downloads_dir(self):
return Environment.getExternalStoragePublicDirectory(
Environment.DIRECTORY_DOWNLOADS).getAbsolutePath()
def _get_movies_dir(self):
return Environment.getExternalStoragePublicDirectory(
Environment.DIRECTORY_MOVIES).getAbsolutePath()
def _get_music_dir(self):
return Environment.getExternalStoragePublicDirectory(
Environment.DIRECTORY_MUSIC).getAbsolutePath()
def _get_pictures_dir(self):
return Environment.getExternalStoragePublicDirectory(
Environment.DIRECTORY_PICTURES).getAbsolutePath()
def _get_application_dir(self):
return mActivity.getFilesDir().getParentFile().getParent()
def instance():
return AndroidStoragePath()
|
Add android implementation of storage path'''
Android Storage Path
--------------------
'''
from plyer.facades import StoragePath
from jnius import autoclass
from android import mActivity
Environment = autoclass('android.os.Environment')
Context = autoclass('android.content.Context')
class AndroidStoragePath(StoragePath):
def _get_home_dir(self):
return Environment.getDataDirectory().getAbsolutePath()
def _get_external_storage_dir(self):
return Environment.getExternalStorageDirectory().getAbsolutePath()
def _get_root_dir(self):
return Environment.getRootDirectory().getAbsolutePath()
def _get_documents_dir(self):
return Environment.getExternalStoragePublicDirectory(
Environment.DIRECTORY_DOCUMENTS).getAbsolutePath()
def _get_downloads_dir(self):
return Environment.getExternalStoragePublicDirectory(
Environment.DIRECTORY_DOWNLOADS).getAbsolutePath()
def _get_movies_dir(self):
return Environment.getExternalStoragePublicDirectory(
Environment.DIRECTORY_MOVIES).getAbsolutePath()
def _get_music_dir(self):
return Environment.getExternalStoragePublicDirectory(
Environment.DIRECTORY_MUSIC).getAbsolutePath()
def _get_pictures_dir(self):
return Environment.getExternalStoragePublicDirectory(
Environment.DIRECTORY_PICTURES).getAbsolutePath()
def _get_application_dir(self):
return mActivity.getFilesDir().getParentFile().getParent()
def instance():
return AndroidStoragePath()
|
<commit_before><commit_msg>Add android implementation of storage path<commit_after>'''
Android Storage Path
--------------------
'''
from plyer.facades import StoragePath
from jnius import autoclass
from android import mActivity
Environment = autoclass('android.os.Environment')
Context = autoclass('android.content.Context')
class AndroidStoragePath(StoragePath):
def _get_home_dir(self):
return Environment.getDataDirectory().getAbsolutePath()
def _get_external_storage_dir(self):
return Environment.getExternalStorageDirectory().getAbsolutePath()
def _get_root_dir(self):
return Environment.getRootDirectory().getAbsolutePath()
def _get_documents_dir(self):
return Environment.getExternalStoragePublicDirectory(
Environment.DIRECTORY_DOCUMENTS).getAbsolutePath()
def _get_downloads_dir(self):
return Environment.getExternalStoragePublicDirectory(
Environment.DIRECTORY_DOWNLOADS).getAbsolutePath()
def _get_movies_dir(self):
return Environment.getExternalStoragePublicDirectory(
Environment.DIRECTORY_MOVIES).getAbsolutePath()
def _get_music_dir(self):
return Environment.getExternalStoragePublicDirectory(
Environment.DIRECTORY_MUSIC).getAbsolutePath()
def _get_pictures_dir(self):
return Environment.getExternalStoragePublicDirectory(
Environment.DIRECTORY_PICTURES).getAbsolutePath()
def _get_application_dir(self):
return mActivity.getFilesDir().getParentFile().getParent()
def instance():
return AndroidStoragePath()
|
|
c97a0c3d5d5b5354cb4e5f6eb0e134eab89edc85
|
pylab/website/tests/test_about_page.py
|
pylab/website/tests/test_about_page.py
|
import datetime
from django_webtest import WebTest
from django.contrib.auth.models import User
from pylab.core.models import Event
class AboutPageTests(WebTest):
def setUp(self):
self.user = User.objects.create(username='u1')
def test_no_events_on_about_page(self):
resp = self.app.get('/about/')
self.assertEqual(resp.status_int, 200)
self.assertTrue(b'No events yet.' in resp.content)
def test_event_list_on_about_page(self):
Event.objects.create(
author=self.user,
starts=datetime.datetime(2015, 9, 3),
ends=datetime.datetime(2015, 9, 3),
title='Test title',
osm_map_link='http://openstreetmap.org/',
description='Test description',
)
resp = self.app.get('/about/')
self.assertEqual(resp.status_int, 200)
self.assertTrue(b'Test title' in resp.content)
|
Add tests for about page.
|
Add tests for about page.
|
Python
|
agpl-3.0
|
python-dirbtuves/website,python-dirbtuves/website,python-dirbtuves/website
|
Add tests for about page.
|
import datetime
from django_webtest import WebTest
from django.contrib.auth.models import User
from pylab.core.models import Event
class AboutPageTests(WebTest):
def setUp(self):
self.user = User.objects.create(username='u1')
def test_no_events_on_about_page(self):
resp = self.app.get('/about/')
self.assertEqual(resp.status_int, 200)
self.assertTrue(b'No events yet.' in resp.content)
def test_event_list_on_about_page(self):
Event.objects.create(
author=self.user,
starts=datetime.datetime(2015, 9, 3),
ends=datetime.datetime(2015, 9, 3),
title='Test title',
osm_map_link='http://openstreetmap.org/',
description='Test description',
)
resp = self.app.get('/about/')
self.assertEqual(resp.status_int, 200)
self.assertTrue(b'Test title' in resp.content)
|
<commit_before><commit_msg>Add tests for about page.<commit_after>
|
import datetime
from django_webtest import WebTest
from django.contrib.auth.models import User
from pylab.core.models import Event
class AboutPageTests(WebTest):
def setUp(self):
self.user = User.objects.create(username='u1')
def test_no_events_on_about_page(self):
resp = self.app.get('/about/')
self.assertEqual(resp.status_int, 200)
self.assertTrue(b'No events yet.' in resp.content)
def test_event_list_on_about_page(self):
Event.objects.create(
author=self.user,
starts=datetime.datetime(2015, 9, 3),
ends=datetime.datetime(2015, 9, 3),
title='Test title',
osm_map_link='http://openstreetmap.org/',
description='Test description',
)
resp = self.app.get('/about/')
self.assertEqual(resp.status_int, 200)
self.assertTrue(b'Test title' in resp.content)
|
Add tests for about page.import datetime
from django_webtest import WebTest
from django.contrib.auth.models import User
from pylab.core.models import Event
class AboutPageTests(WebTest):
def setUp(self):
self.user = User.objects.create(username='u1')
def test_no_events_on_about_page(self):
resp = self.app.get('/about/')
self.assertEqual(resp.status_int, 200)
self.assertTrue(b'No events yet.' in resp.content)
def test_event_list_on_about_page(self):
Event.objects.create(
author=self.user,
starts=datetime.datetime(2015, 9, 3),
ends=datetime.datetime(2015, 9, 3),
title='Test title',
osm_map_link='http://openstreetmap.org/',
description='Test description',
)
resp = self.app.get('/about/')
self.assertEqual(resp.status_int, 200)
self.assertTrue(b'Test title' in resp.content)
|
<commit_before><commit_msg>Add tests for about page.<commit_after>import datetime
from django_webtest import WebTest
from django.contrib.auth.models import User
from pylab.core.models import Event
class AboutPageTests(WebTest):
def setUp(self):
self.user = User.objects.create(username='u1')
def test_no_events_on_about_page(self):
resp = self.app.get('/about/')
self.assertEqual(resp.status_int, 200)
self.assertTrue(b'No events yet.' in resp.content)
def test_event_list_on_about_page(self):
Event.objects.create(
author=self.user,
starts=datetime.datetime(2015, 9, 3),
ends=datetime.datetime(2015, 9, 3),
title='Test title',
osm_map_link='http://openstreetmap.org/',
description='Test description',
)
resp = self.app.get('/about/')
self.assertEqual(resp.status_int, 200)
self.assertTrue(b'Test title' in resp.content)
|
|
a8c5aa8dba3a9c45ae5933ee53f895f0fa1b2c47
|
examples/crab_client.py
|
examples/crab_client.py
|
# -*- coding: utf-8 -*-
'''
This script prints all available information on the CRAB webservice.
'''
from crabpy.client import crab_factory
crab = crab_factory()
print crab
|
Add example that prints info on the CRAB service.
|
Add example that prints info on the CRAB service.
|
Python
|
mit
|
OnroerendErfgoed/crabpy
|
Add example that prints info on the CRAB service.
|
# -*- coding: utf-8 -*-
'''
This script prints all available information on the CRAB webservice.
'''
from crabpy.client import crab_factory
crab = crab_factory()
print crab
|
<commit_before><commit_msg>Add example that prints info on the CRAB service.<commit_after>
|
# -*- coding: utf-8 -*-
'''
This script prints all available information on the CRAB webservice.
'''
from crabpy.client import crab_factory
crab = crab_factory()
print crab
|
Add example that prints info on the CRAB service.# -*- coding: utf-8 -*-
'''
This script prints all available information on the CRAB webservice.
'''
from crabpy.client import crab_factory
crab = crab_factory()
print crab
|
<commit_before><commit_msg>Add example that prints info on the CRAB service.<commit_after># -*- coding: utf-8 -*-
'''
This script prints all available information on the CRAB webservice.
'''
from crabpy.client import crab_factory
crab = crab_factory()
print crab
|
|
59e6ad7ce222ad04ba2da64aa6fa20ba0133fac9
|
cg-static-condensation/profile_cgsc.py
|
cg-static-condensation/profile_cgsc.py
|
from firedrake import *
import sys
parameters["pyop2_options"]["lazy_evaluation"] = False
def is_intstring(s):
try:
int(s)
return True
except ValueError:
return False
# NOTE: ksp_monitor is on to monitor convergence of the
# preconditioned (AMG) Krylov method
if '--scpc' in sys.argv:
parameters = {'mat_type': 'matfree',
'ksp_type': 'preonly',
'pc_type': 'python',
'pc_python_type': 'firedrake.StaticCondensationPC',
'static_condensation': {'ksp_type': 'cg',
'pc_type': 'hypre',
'pc_hypre_type': 'boomeramg',
'pc_hypre_boomeramg_P_max': 4,
'ksp_monitor': True,
'ksp_rtol': 1e-10}}
else:
parameters = {'ksp_type': 'cg',
'pc_type': 'hypre',
'pc_hypre_type': 'boomeramg',
'pc_hypre_boomeramg_P_max': 4,
'ksp_monitor': True,
'ksp_rtol': 1e-10}
# Set up unit cube mesh with h = (1/2**r) in all spatial
# directions
if is_intstring(sys.argv[1]):
r = int(sys.argv[1])
else:
r = 3
print("Resolution parameter is: %d" % r)
mesh = UnitCubeMesh(2 ** r, 2 ** r, 2 ** r)
x = SpatialCoordinate(mesh)
# Set up H1 function space and test/trial functions
d = 4
V = FunctionSpace(mesh, "CG", degree=d)
u = TrialFunction(V)
v = TestFunction(V)
f = Function(FunctionSpace(mesh, "CG", degree=d+1))
f.interpolate((1 + 108*pi*pi)*cos(6*pi*x[0])*cos(6*pi*x[1])*cos(6*pi*x[2]))
# Set a(u, v) = L(v)
# NOTE: This problem has homogeneous Neumman conditions
# applied weakly on all sides of the cube
a = inner(grad(u), grad(v))*dx + u*v*dx
L = f*v*dx
Uh = Function(V, name="Approximate Solution")
solve(a == L, Uh, solver_parameters=parameters)
# Compare with exact solution
V_a = FunctionSpace(mesh, "CG", d + 2)
exact = Function(V_a, name="Exact Solution")
exact.interpolate(cos(6*pi*x[0])*cos(6*pi*x[1])*cos(6*pi*x[2]))
error = errornorm(Uh, exact)
print("Error between computed solution and exact: %0.8f" % error)
# Write output file
File("SCPC-3DHelmholtz-r%d.pvd" % r).write(Uh, exact)
|
Add profile script for comparisons
|
Add profile script for comparisons
|
Python
|
mit
|
thomasgibson/tabula-rasa
|
Add profile script for comparisons
|
from firedrake import *
import sys
parameters["pyop2_options"]["lazy_evaluation"] = False
def is_intstring(s):
try:
int(s)
return True
except ValueError:
return False
# NOTE: ksp_monitor is on to monitor convergence of the
# preconditioned (AMG) Krylov method
if '--scpc' in sys.argv:
parameters = {'mat_type': 'matfree',
'ksp_type': 'preonly',
'pc_type': 'python',
'pc_python_type': 'firedrake.StaticCondensationPC',
'static_condensation': {'ksp_type': 'cg',
'pc_type': 'hypre',
'pc_hypre_type': 'boomeramg',
'pc_hypre_boomeramg_P_max': 4,
'ksp_monitor': True,
'ksp_rtol': 1e-10}}
else:
parameters = {'ksp_type': 'cg',
'pc_type': 'hypre',
'pc_hypre_type': 'boomeramg',
'pc_hypre_boomeramg_P_max': 4,
'ksp_monitor': True,
'ksp_rtol': 1e-10}
# Set up unit cube mesh with h = (1/2**r) in all spatial
# directions
if is_intstring(sys.argv[1]):
r = int(sys.argv[1])
else:
r = 3
print("Resolution parameter is: %d" % r)
mesh = UnitCubeMesh(2 ** r, 2 ** r, 2 ** r)
x = SpatialCoordinate(mesh)
# Set up H1 function space and test/trial functions
d = 4
V = FunctionSpace(mesh, "CG", degree=d)
u = TrialFunction(V)
v = TestFunction(V)
f = Function(FunctionSpace(mesh, "CG", degree=d+1))
f.interpolate((1 + 108*pi*pi)*cos(6*pi*x[0])*cos(6*pi*x[1])*cos(6*pi*x[2]))
# Set a(u, v) = L(v)
# NOTE: This problem has homogeneous Neumman conditions
# applied weakly on all sides of the cube
a = inner(grad(u), grad(v))*dx + u*v*dx
L = f*v*dx
Uh = Function(V, name="Approximate Solution")
solve(a == L, Uh, solver_parameters=parameters)
# Compare with exact solution
V_a = FunctionSpace(mesh, "CG", d + 2)
exact = Function(V_a, name="Exact Solution")
exact.interpolate(cos(6*pi*x[0])*cos(6*pi*x[1])*cos(6*pi*x[2]))
error = errornorm(Uh, exact)
print("Error between computed solution and exact: %0.8f" % error)
# Write output file
File("SCPC-3DHelmholtz-r%d.pvd" % r).write(Uh, exact)
|
<commit_before><commit_msg>Add profile script for comparisons<commit_after>
|
from firedrake import *
import sys
parameters["pyop2_options"]["lazy_evaluation"] = False
def is_intstring(s):
try:
int(s)
return True
except ValueError:
return False
# NOTE: ksp_monitor is on to monitor convergence of the
# preconditioned (AMG) Krylov method
if '--scpc' in sys.argv:
parameters = {'mat_type': 'matfree',
'ksp_type': 'preonly',
'pc_type': 'python',
'pc_python_type': 'firedrake.StaticCondensationPC',
'static_condensation': {'ksp_type': 'cg',
'pc_type': 'hypre',
'pc_hypre_type': 'boomeramg',
'pc_hypre_boomeramg_P_max': 4,
'ksp_monitor': True,
'ksp_rtol': 1e-10}}
else:
parameters = {'ksp_type': 'cg',
'pc_type': 'hypre',
'pc_hypre_type': 'boomeramg',
'pc_hypre_boomeramg_P_max': 4,
'ksp_monitor': True,
'ksp_rtol': 1e-10}
# Set up unit cube mesh with h = (1/2**r) in all spatial
# directions
if is_intstring(sys.argv[1]):
r = int(sys.argv[1])
else:
r = 3
print("Resolution parameter is: %d" % r)
mesh = UnitCubeMesh(2 ** r, 2 ** r, 2 ** r)
x = SpatialCoordinate(mesh)
# Set up H1 function space and test/trial functions
d = 4
V = FunctionSpace(mesh, "CG", degree=d)
u = TrialFunction(V)
v = TestFunction(V)
f = Function(FunctionSpace(mesh, "CG", degree=d+1))
f.interpolate((1 + 108*pi*pi)*cos(6*pi*x[0])*cos(6*pi*x[1])*cos(6*pi*x[2]))
# Set a(u, v) = L(v)
# NOTE: This problem has homogeneous Neumman conditions
# applied weakly on all sides of the cube
a = inner(grad(u), grad(v))*dx + u*v*dx
L = f*v*dx
Uh = Function(V, name="Approximate Solution")
solve(a == L, Uh, solver_parameters=parameters)
# Compare with exact solution
V_a = FunctionSpace(mesh, "CG", d + 2)
exact = Function(V_a, name="Exact Solution")
exact.interpolate(cos(6*pi*x[0])*cos(6*pi*x[1])*cos(6*pi*x[2]))
error = errornorm(Uh, exact)
print("Error between computed solution and exact: %0.8f" % error)
# Write output file
File("SCPC-3DHelmholtz-r%d.pvd" % r).write(Uh, exact)
|
Add profile script for comparisonsfrom firedrake import *
import sys
parameters["pyop2_options"]["lazy_evaluation"] = False
def is_intstring(s):
try:
int(s)
return True
except ValueError:
return False
# NOTE: ksp_monitor is on to monitor convergence of the
# preconditioned (AMG) Krylov method
if '--scpc' in sys.argv:
parameters = {'mat_type': 'matfree',
'ksp_type': 'preonly',
'pc_type': 'python',
'pc_python_type': 'firedrake.StaticCondensationPC',
'static_condensation': {'ksp_type': 'cg',
'pc_type': 'hypre',
'pc_hypre_type': 'boomeramg',
'pc_hypre_boomeramg_P_max': 4,
'ksp_monitor': True,
'ksp_rtol': 1e-10}}
else:
parameters = {'ksp_type': 'cg',
'pc_type': 'hypre',
'pc_hypre_type': 'boomeramg',
'pc_hypre_boomeramg_P_max': 4,
'ksp_monitor': True,
'ksp_rtol': 1e-10}
# Set up unit cube mesh with h = (1/2**r) in all spatial
# directions
if is_intstring(sys.argv[1]):
r = int(sys.argv[1])
else:
r = 3
print("Resolution parameter is: %d" % r)
mesh = UnitCubeMesh(2 ** r, 2 ** r, 2 ** r)
x = SpatialCoordinate(mesh)
# Set up H1 function space and test/trial functions
d = 4
V = FunctionSpace(mesh, "CG", degree=d)
u = TrialFunction(V)
v = TestFunction(V)
f = Function(FunctionSpace(mesh, "CG", degree=d+1))
f.interpolate((1 + 108*pi*pi)*cos(6*pi*x[0])*cos(6*pi*x[1])*cos(6*pi*x[2]))
# Set a(u, v) = L(v)
# NOTE: This problem has homogeneous Neumman conditions
# applied weakly on all sides of the cube
a = inner(grad(u), grad(v))*dx + u*v*dx
L = f*v*dx
Uh = Function(V, name="Approximate Solution")
solve(a == L, Uh, solver_parameters=parameters)
# Compare with exact solution
V_a = FunctionSpace(mesh, "CG", d + 2)
exact = Function(V_a, name="Exact Solution")
exact.interpolate(cos(6*pi*x[0])*cos(6*pi*x[1])*cos(6*pi*x[2]))
error = errornorm(Uh, exact)
print("Error between computed solution and exact: %0.8f" % error)
# Write output file
File("SCPC-3DHelmholtz-r%d.pvd" % r).write(Uh, exact)
|
<commit_before><commit_msg>Add profile script for comparisons<commit_after>from firedrake import *
import sys
parameters["pyop2_options"]["lazy_evaluation"] = False
def is_intstring(s):
try:
int(s)
return True
except ValueError:
return False
# NOTE: ksp_monitor is on to monitor convergence of the
# preconditioned (AMG) Krylov method
if '--scpc' in sys.argv:
parameters = {'mat_type': 'matfree',
'ksp_type': 'preonly',
'pc_type': 'python',
'pc_python_type': 'firedrake.StaticCondensationPC',
'static_condensation': {'ksp_type': 'cg',
'pc_type': 'hypre',
'pc_hypre_type': 'boomeramg',
'pc_hypre_boomeramg_P_max': 4,
'ksp_monitor': True,
'ksp_rtol': 1e-10}}
else:
parameters = {'ksp_type': 'cg',
'pc_type': 'hypre',
'pc_hypre_type': 'boomeramg',
'pc_hypre_boomeramg_P_max': 4,
'ksp_monitor': True,
'ksp_rtol': 1e-10}
# Set up unit cube mesh with h = (1/2**r) in all spatial
# directions
if is_intstring(sys.argv[1]):
r = int(sys.argv[1])
else:
r = 3
print("Resolution parameter is: %d" % r)
mesh = UnitCubeMesh(2 ** r, 2 ** r, 2 ** r)
x = SpatialCoordinate(mesh)
# Set up H1 function space and test/trial functions
d = 4
V = FunctionSpace(mesh, "CG", degree=d)
u = TrialFunction(V)
v = TestFunction(V)
f = Function(FunctionSpace(mesh, "CG", degree=d+1))
f.interpolate((1 + 108*pi*pi)*cos(6*pi*x[0])*cos(6*pi*x[1])*cos(6*pi*x[2]))
# Set a(u, v) = L(v)
# NOTE: This problem has homogeneous Neumman conditions
# applied weakly on all sides of the cube
a = inner(grad(u), grad(v))*dx + u*v*dx
L = f*v*dx
Uh = Function(V, name="Approximate Solution")
solve(a == L, Uh, solver_parameters=parameters)
# Compare with exact solution
V_a = FunctionSpace(mesh, "CG", d + 2)
exact = Function(V_a, name="Exact Solution")
exact.interpolate(cos(6*pi*x[0])*cos(6*pi*x[1])*cos(6*pi*x[2]))
error = errornorm(Uh, exact)
print("Error between computed solution and exact: %0.8f" % error)
# Write output file
File("SCPC-3DHelmholtz-r%d.pvd" % r).write(Uh, exact)
|
|
6db2328cee6d26d0db7a6abf4a58adb40b583799
|
sym/tests/test_cse.py
|
sym/tests/test_cse.py
|
from .. import Backend
import pytest
backends = []
for bk in Backend.backends.keys():
try:
_be = Backend(bk)
except ImportError:
continue
_x = _be.Symbol('x')
try:
_be.cse([_x])
except:
continue
backends.append(bk)
def _inverse_cse(subs_cses, cse_exprs):
subs = dict(subs_cses)
return [expr.subs(subs) for expr in cse_exprs]
@pytest.mark.parametrize('key', backends)
def test_basic_cse(key):
be = Backend(key)
x, y = map(be.Symbol, "xy")
exprs = [x**2 + y**2 + 3, be.exp(x**2 + y**2)]
subs_cses, cse_exprs = be.cse(exprs)
subs, cses = zip(*subs_cses)
assert cses[0] == x**2 + y**2
for cse_expr in cse_exprs:
assert x not in cse_expr.atoms()
assert y not in cse_expr.atoms()
assert _inverse_cse(subs_cses, cse_exprs) == exprs
@pytest.mark.parametrize('key', backends)
def test_moot_cse(key):
be = Backend(key)
x, y = map(be.Symbol, "xy")
exprs = [x**2 + y**2, y]
subs_cses, cse_exprs = be.cse(exprs)
assert not subs_cses
assert _inverse_cse(subs_cses, cse_exprs) == exprs
@pytest.mark.parametrize('key', backends)
def test_cse_with_symbols(key):
be = Backend(key)
x = be.Symbol('x')
exprs = [x**2, 1/(1 + x**2), be.log(x + 2), be.exp(x + 2)]
subs_cses, cse_exprs = be.cse(exprs, symbols=be.numbered_symbols('y'))
subs, cses = zip(*subs_cses)
assert subs[0] == be.Symbol('y0')
assert subs[1] == be.Symbol('y1')
assert _inverse_cse(subs_cses, cse_exprs) == exprs
@pytest.mark.parametrize('key', backends)
def test_cse_with_symbols_overlap(key):
be = Backend(key)
x0, x1, y = map(be.Symbol, "x0 x1 y".split())
exprs = [x0**2, x0**2 + be.exp(y)**2 + 3, x1 * be.exp(y), be.sin(x1 * be.exp(y) + 1)]
subs_cses, cse_exprs = be.cse(exprs)
assert _inverse_cse(subs_cses, cse_exprs) == exprs
|
Add tests for cse for all supporting backends
|
Add tests for cse for all supporting backends
|
Python
|
bsd-2-clause
|
bjodah/sym,bjodah/sym
|
Add tests for cse for all supporting backends
|
from .. import Backend
import pytest
backends = []
for bk in Backend.backends.keys():
try:
_be = Backend(bk)
except ImportError:
continue
_x = _be.Symbol('x')
try:
_be.cse([_x])
except:
continue
backends.append(bk)
def _inverse_cse(subs_cses, cse_exprs):
subs = dict(subs_cses)
return [expr.subs(subs) for expr in cse_exprs]
@pytest.mark.parametrize('key', backends)
def test_basic_cse(key):
be = Backend(key)
x, y = map(be.Symbol, "xy")
exprs = [x**2 + y**2 + 3, be.exp(x**2 + y**2)]
subs_cses, cse_exprs = be.cse(exprs)
subs, cses = zip(*subs_cses)
assert cses[0] == x**2 + y**2
for cse_expr in cse_exprs:
assert x not in cse_expr.atoms()
assert y not in cse_expr.atoms()
assert _inverse_cse(subs_cses, cse_exprs) == exprs
@pytest.mark.parametrize('key', backends)
def test_moot_cse(key):
be = Backend(key)
x, y = map(be.Symbol, "xy")
exprs = [x**2 + y**2, y]
subs_cses, cse_exprs = be.cse(exprs)
assert not subs_cses
assert _inverse_cse(subs_cses, cse_exprs) == exprs
@pytest.mark.parametrize('key', backends)
def test_cse_with_symbols(key):
be = Backend(key)
x = be.Symbol('x')
exprs = [x**2, 1/(1 + x**2), be.log(x + 2), be.exp(x + 2)]
subs_cses, cse_exprs = be.cse(exprs, symbols=be.numbered_symbols('y'))
subs, cses = zip(*subs_cses)
assert subs[0] == be.Symbol('y0')
assert subs[1] == be.Symbol('y1')
assert _inverse_cse(subs_cses, cse_exprs) == exprs
@pytest.mark.parametrize('key', backends)
def test_cse_with_symbols_overlap(key):
be = Backend(key)
x0, x1, y = map(be.Symbol, "x0 x1 y".split())
exprs = [x0**2, x0**2 + be.exp(y)**2 + 3, x1 * be.exp(y), be.sin(x1 * be.exp(y) + 1)]
subs_cses, cse_exprs = be.cse(exprs)
assert _inverse_cse(subs_cses, cse_exprs) == exprs
|
<commit_before><commit_msg>Add tests for cse for all supporting backends<commit_after>
|
from .. import Backend
import pytest
backends = []
for bk in Backend.backends.keys():
try:
_be = Backend(bk)
except ImportError:
continue
_x = _be.Symbol('x')
try:
_be.cse([_x])
except:
continue
backends.append(bk)
def _inverse_cse(subs_cses, cse_exprs):
subs = dict(subs_cses)
return [expr.subs(subs) for expr in cse_exprs]
@pytest.mark.parametrize('key', backends)
def test_basic_cse(key):
be = Backend(key)
x, y = map(be.Symbol, "xy")
exprs = [x**2 + y**2 + 3, be.exp(x**2 + y**2)]
subs_cses, cse_exprs = be.cse(exprs)
subs, cses = zip(*subs_cses)
assert cses[0] == x**2 + y**2
for cse_expr in cse_exprs:
assert x not in cse_expr.atoms()
assert y not in cse_expr.atoms()
assert _inverse_cse(subs_cses, cse_exprs) == exprs
@pytest.mark.parametrize('key', backends)
def test_moot_cse(key):
be = Backend(key)
x, y = map(be.Symbol, "xy")
exprs = [x**2 + y**2, y]
subs_cses, cse_exprs = be.cse(exprs)
assert not subs_cses
assert _inverse_cse(subs_cses, cse_exprs) == exprs
@pytest.mark.parametrize('key', backends)
def test_cse_with_symbols(key):
be = Backend(key)
x = be.Symbol('x')
exprs = [x**2, 1/(1 + x**2), be.log(x + 2), be.exp(x + 2)]
subs_cses, cse_exprs = be.cse(exprs, symbols=be.numbered_symbols('y'))
subs, cses = zip(*subs_cses)
assert subs[0] == be.Symbol('y0')
assert subs[1] == be.Symbol('y1')
assert _inverse_cse(subs_cses, cse_exprs) == exprs
@pytest.mark.parametrize('key', backends)
def test_cse_with_symbols_overlap(key):
be = Backend(key)
x0, x1, y = map(be.Symbol, "x0 x1 y".split())
exprs = [x0**2, x0**2 + be.exp(y)**2 + 3, x1 * be.exp(y), be.sin(x1 * be.exp(y) + 1)]
subs_cses, cse_exprs = be.cse(exprs)
assert _inverse_cse(subs_cses, cse_exprs) == exprs
|
Add tests for cse for all supporting backendsfrom .. import Backend
import pytest
backends = []
for bk in Backend.backends.keys():
try:
_be = Backend(bk)
except ImportError:
continue
_x = _be.Symbol('x')
try:
_be.cse([_x])
except:
continue
backends.append(bk)
def _inverse_cse(subs_cses, cse_exprs):
subs = dict(subs_cses)
return [expr.subs(subs) for expr in cse_exprs]
@pytest.mark.parametrize('key', backends)
def test_basic_cse(key):
be = Backend(key)
x, y = map(be.Symbol, "xy")
exprs = [x**2 + y**2 + 3, be.exp(x**2 + y**2)]
subs_cses, cse_exprs = be.cse(exprs)
subs, cses = zip(*subs_cses)
assert cses[0] == x**2 + y**2
for cse_expr in cse_exprs:
assert x not in cse_expr.atoms()
assert y not in cse_expr.atoms()
assert _inverse_cse(subs_cses, cse_exprs) == exprs
@pytest.mark.parametrize('key', backends)
def test_moot_cse(key):
be = Backend(key)
x, y = map(be.Symbol, "xy")
exprs = [x**2 + y**2, y]
subs_cses, cse_exprs = be.cse(exprs)
assert not subs_cses
assert _inverse_cse(subs_cses, cse_exprs) == exprs
@pytest.mark.parametrize('key', backends)
def test_cse_with_symbols(key):
be = Backend(key)
x = be.Symbol('x')
exprs = [x**2, 1/(1 + x**2), be.log(x + 2), be.exp(x + 2)]
subs_cses, cse_exprs = be.cse(exprs, symbols=be.numbered_symbols('y'))
subs, cses = zip(*subs_cses)
assert subs[0] == be.Symbol('y0')
assert subs[1] == be.Symbol('y1')
assert _inverse_cse(subs_cses, cse_exprs) == exprs
@pytest.mark.parametrize('key', backends)
def test_cse_with_symbols_overlap(key):
be = Backend(key)
x0, x1, y = map(be.Symbol, "x0 x1 y".split())
exprs = [x0**2, x0**2 + be.exp(y)**2 + 3, x1 * be.exp(y), be.sin(x1 * be.exp(y) + 1)]
subs_cses, cse_exprs = be.cse(exprs)
assert _inverse_cse(subs_cses, cse_exprs) == exprs
|
<commit_before><commit_msg>Add tests for cse for all supporting backends<commit_after>from .. import Backend
import pytest
backends = []
for bk in Backend.backends.keys():
try:
_be = Backend(bk)
except ImportError:
continue
_x = _be.Symbol('x')
try:
_be.cse([_x])
except:
continue
backends.append(bk)
def _inverse_cse(subs_cses, cse_exprs):
subs = dict(subs_cses)
return [expr.subs(subs) for expr in cse_exprs]
@pytest.mark.parametrize('key', backends)
def test_basic_cse(key):
be = Backend(key)
x, y = map(be.Symbol, "xy")
exprs = [x**2 + y**2 + 3, be.exp(x**2 + y**2)]
subs_cses, cse_exprs = be.cse(exprs)
subs, cses = zip(*subs_cses)
assert cses[0] == x**2 + y**2
for cse_expr in cse_exprs:
assert x not in cse_expr.atoms()
assert y not in cse_expr.atoms()
assert _inverse_cse(subs_cses, cse_exprs) == exprs
@pytest.mark.parametrize('key', backends)
def test_moot_cse(key):
be = Backend(key)
x, y = map(be.Symbol, "xy")
exprs = [x**2 + y**2, y]
subs_cses, cse_exprs = be.cse(exprs)
assert not subs_cses
assert _inverse_cse(subs_cses, cse_exprs) == exprs
@pytest.mark.parametrize('key', backends)
def test_cse_with_symbols(key):
be = Backend(key)
x = be.Symbol('x')
exprs = [x**2, 1/(1 + x**2), be.log(x + 2), be.exp(x + 2)]
subs_cses, cse_exprs = be.cse(exprs, symbols=be.numbered_symbols('y'))
subs, cses = zip(*subs_cses)
assert subs[0] == be.Symbol('y0')
assert subs[1] == be.Symbol('y1')
assert _inverse_cse(subs_cses, cse_exprs) == exprs
@pytest.mark.parametrize('key', backends)
def test_cse_with_symbols_overlap(key):
be = Backend(key)
x0, x1, y = map(be.Symbol, "x0 x1 y".split())
exprs = [x0**2, x0**2 + be.exp(y)**2 + 3, x1 * be.exp(y), be.sin(x1 * be.exp(y) + 1)]
subs_cses, cse_exprs = be.cse(exprs)
assert _inverse_cse(subs_cses, cse_exprs) == exprs
|
|
ba09c70dd02747cead96232a5b51c7b56a640df2
|
docker/scripts/test-flask-mail-smtp.py
|
docker/scripts/test-flask-mail-smtp.py
|
import argparse, sys
from pybossa.core import create_app
from flask_mail import Mail
from flask_mail import Message
if __name__ == '__main__':
arg_parser = argparse.ArgumentParser()
arg_parser.add_argument('-s', '--from', dest='sender')
arg_parser.add_argument('-t', '--to')
args = arg_parser.parse_args()
sender = "info@pybossa.com"
recipient = "info@pybossa.com"
if args.sender:
sender = args.sender
if args.to:
recipient = args.to
# next line is how to properly load PYBOSSA_SETTINGS=settings_from_env.py
app = create_app(run_as_server=False)
mail = Mail(app)
with app.app_context():
msg = Message("Hello", sender=sender, recipients=[recipient])
msg.body = "Hi, This is a test to see if Flask-Mail is able to send mail."
mail.send(msg)
print "Sent From: {} To: {}".format(sender, recipient)
|
Add script to test sending email with Pybossa Flask-Mail config.
|
Add script to test sending email with Pybossa Flask-Mail config.
|
Python
|
apache-2.0
|
Goodly/pybossa-build,Goodly/pybossa-build
|
Add script to test sending email with Pybossa Flask-Mail config.
|
import argparse, sys
from pybossa.core import create_app
from flask_mail import Mail
from flask_mail import Message
if __name__ == '__main__':
arg_parser = argparse.ArgumentParser()
arg_parser.add_argument('-s', '--from', dest='sender')
arg_parser.add_argument('-t', '--to')
args = arg_parser.parse_args()
sender = "info@pybossa.com"
recipient = "info@pybossa.com"
if args.sender:
sender = args.sender
if args.to:
recipient = args.to
# next line is how to properly load PYBOSSA_SETTINGS=settings_from_env.py
app = create_app(run_as_server=False)
mail = Mail(app)
with app.app_context():
msg = Message("Hello", sender=sender, recipients=[recipient])
msg.body = "Hi, This is a test to see if Flask-Mail is able to send mail."
mail.send(msg)
print "Sent From: {} To: {}".format(sender, recipient)
|
<commit_before><commit_msg>Add script to test sending email with Pybossa Flask-Mail config.<commit_after>
|
import argparse, sys
from pybossa.core import create_app
from flask_mail import Mail
from flask_mail import Message
if __name__ == '__main__':
arg_parser = argparse.ArgumentParser()
arg_parser.add_argument('-s', '--from', dest='sender')
arg_parser.add_argument('-t', '--to')
args = arg_parser.parse_args()
sender = "info@pybossa.com"
recipient = "info@pybossa.com"
if args.sender:
sender = args.sender
if args.to:
recipient = args.to
# next line is how to properly load PYBOSSA_SETTINGS=settings_from_env.py
app = create_app(run_as_server=False)
mail = Mail(app)
with app.app_context():
msg = Message("Hello", sender=sender, recipients=[recipient])
msg.body = "Hi, This is a test to see if Flask-Mail is able to send mail."
mail.send(msg)
print "Sent From: {} To: {}".format(sender, recipient)
|
Add script to test sending email with Pybossa Flask-Mail config.import argparse, sys
from pybossa.core import create_app
from flask_mail import Mail
from flask_mail import Message
if __name__ == '__main__':
arg_parser = argparse.ArgumentParser()
arg_parser.add_argument('-s', '--from', dest='sender')
arg_parser.add_argument('-t', '--to')
args = arg_parser.parse_args()
sender = "info@pybossa.com"
recipient = "info@pybossa.com"
if args.sender:
sender = args.sender
if args.to:
recipient = args.to
# next line is how to properly load PYBOSSA_SETTINGS=settings_from_env.py
app = create_app(run_as_server=False)
mail = Mail(app)
with app.app_context():
msg = Message("Hello", sender=sender, recipients=[recipient])
msg.body = "Hi, This is a test to see if Flask-Mail is able to send mail."
mail.send(msg)
print "Sent From: {} To: {}".format(sender, recipient)
|
<commit_before><commit_msg>Add script to test sending email with Pybossa Flask-Mail config.<commit_after>import argparse, sys
from pybossa.core import create_app
from flask_mail import Mail
from flask_mail import Message
if __name__ == '__main__':
arg_parser = argparse.ArgumentParser()
arg_parser.add_argument('-s', '--from', dest='sender')
arg_parser.add_argument('-t', '--to')
args = arg_parser.parse_args()
sender = "info@pybossa.com"
recipient = "info@pybossa.com"
if args.sender:
sender = args.sender
if args.to:
recipient = args.to
# next line is how to properly load PYBOSSA_SETTINGS=settings_from_env.py
app = create_app(run_as_server=False)
mail = Mail(app)
with app.app_context():
msg = Message("Hello", sender=sender, recipients=[recipient])
msg.body = "Hi, This is a test to see if Flask-Mail is able to send mail."
mail.send(msg)
print "Sent From: {} To: {}".format(sender, recipient)
|
|
3481f5d11a0ff51c510e599bb3ff72ddff9be622
|
scripts/Parallelize.py
|
scripts/Parallelize.py
|
#!/usr/bin/env python3
import argparse
import datetime
import multiprocessing
import os
import subprocess
import sys
import time
def main():
#Parse arguments
parser = argparse.ArgumentParser(description='Parallize helper script.')
parser.add_argument('run_script', help='Find to do comments.')
parser.add_argument('run_id', help='A unique identifier for the run')
parser.add_argument('configs', nargs='*', help='A set of configurations to run.')
arguments = parser.parse_args(sys.argv[1:])
maxProcesses = multiprocessing.cpu_count()
startTime = datetime.datetime.now()
print('[%s] Starting %d processes for %d configs' % (startTime, maxProcesses, len(arguments.configs)))
#Start processes
processes = {}
for i, config in enumerate(arguments.configs):
#Wait for a CPU to become available
while len(processes) >= maxProcesses:
time.sleep(5)
#Check for finished processes
finishedConfigs = []
for runningConfig, process in processes.items():
returnCode = process.poll()
if returnCode is not None:
if returnCode != 0:
sys.stdout.write('[%s] Config %s failed with return code %d' % (datetime.datetime.now(), runningConfig, returnCode))
sys.stdout.flush()
finishedConfigs.append(runningConfig)
for finishedConfig in finishedConfigs:
del processes[finishedConfig]
sys.stdout.write('[%s] Starting %.4d/%.4d (%5.1f%%)\n' % (datetime.datetime.now(), i + 1, len(arguments.configs), 100.0 * (i + 1) / len(arguments.configs)))
sys.stdout.flush()
process = subprocess.Popen([arguments.run_script, arguments.run_id, config], shell=False)
processes[config] = process
#Wait for processes to finish
for process in processes.values():
process.wait()
endTime = datetime.datetime.now()
print('[%s] Finished in %.2f seconds' % (endTime, (endTime - startTime).total_seconds()))
return 0
if __name__ == '__main__':
sys.exit(main())
|
Add single machine parallelism helper script
|
Add single machine parallelism helper script
|
Python
|
mit
|
AgalmicVentures/Environment,AgalmicVentures/Environment,AgalmicVentures/Environment
|
Add single machine parallelism helper script
|
#!/usr/bin/env python3
import argparse
import datetime
import multiprocessing
import os
import subprocess
import sys
import time
def main():
#Parse arguments
parser = argparse.ArgumentParser(description='Parallize helper script.')
parser.add_argument('run_script', help='Find to do comments.')
parser.add_argument('run_id', help='A unique identifier for the run')
parser.add_argument('configs', nargs='*', help='A set of configurations to run.')
arguments = parser.parse_args(sys.argv[1:])
maxProcesses = multiprocessing.cpu_count()
startTime = datetime.datetime.now()
print('[%s] Starting %d processes for %d configs' % (startTime, maxProcesses, len(arguments.configs)))
#Start processes
processes = {}
for i, config in enumerate(arguments.configs):
#Wait for a CPU to become available
while len(processes) >= maxProcesses:
time.sleep(5)
#Check for finished processes
finishedConfigs = []
for runningConfig, process in processes.items():
returnCode = process.poll()
if returnCode is not None:
if returnCode != 0:
sys.stdout.write('[%s] Config %s failed with return code %d' % (datetime.datetime.now(), runningConfig, returnCode))
sys.stdout.flush()
finishedConfigs.append(runningConfig)
for finishedConfig in finishedConfigs:
del processes[finishedConfig]
sys.stdout.write('[%s] Starting %.4d/%.4d (%5.1f%%)\n' % (datetime.datetime.now(), i + 1, len(arguments.configs), 100.0 * (i + 1) / len(arguments.configs)))
sys.stdout.flush()
process = subprocess.Popen([arguments.run_script, arguments.run_id, config], shell=False)
processes[config] = process
#Wait for processes to finish
for process in processes.values():
process.wait()
endTime = datetime.datetime.now()
print('[%s] Finished in %.2f seconds' % (endTime, (endTime - startTime).total_seconds()))
return 0
if __name__ == '__main__':
sys.exit(main())
|
<commit_before><commit_msg>Add single machine parallelism helper script<commit_after>
|
#!/usr/bin/env python3
import argparse
import datetime
import multiprocessing
import os
import subprocess
import sys
import time
def main():
#Parse arguments
parser = argparse.ArgumentParser(description='Parallize helper script.')
parser.add_argument('run_script', help='Find to do comments.')
parser.add_argument('run_id', help='A unique identifier for the run')
parser.add_argument('configs', nargs='*', help='A set of configurations to run.')
arguments = parser.parse_args(sys.argv[1:])
maxProcesses = multiprocessing.cpu_count()
startTime = datetime.datetime.now()
print('[%s] Starting %d processes for %d configs' % (startTime, maxProcesses, len(arguments.configs)))
#Start processes
processes = {}
for i, config in enumerate(arguments.configs):
#Wait for a CPU to become available
while len(processes) >= maxProcesses:
time.sleep(5)
#Check for finished processes
finishedConfigs = []
for runningConfig, process in processes.items():
returnCode = process.poll()
if returnCode is not None:
if returnCode != 0:
sys.stdout.write('[%s] Config %s failed with return code %d' % (datetime.datetime.now(), runningConfig, returnCode))
sys.stdout.flush()
finishedConfigs.append(runningConfig)
for finishedConfig in finishedConfigs:
del processes[finishedConfig]
sys.stdout.write('[%s] Starting %.4d/%.4d (%5.1f%%)\n' % (datetime.datetime.now(), i + 1, len(arguments.configs), 100.0 * (i + 1) / len(arguments.configs)))
sys.stdout.flush()
process = subprocess.Popen([arguments.run_script, arguments.run_id, config], shell=False)
processes[config] = process
#Wait for processes to finish
for process in processes.values():
process.wait()
endTime = datetime.datetime.now()
print('[%s] Finished in %.2f seconds' % (endTime, (endTime - startTime).total_seconds()))
return 0
if __name__ == '__main__':
sys.exit(main())
|
Add single machine parallelism helper script#!/usr/bin/env python3
import argparse
import datetime
import multiprocessing
import os
import subprocess
import sys
import time
def main():
#Parse arguments
parser = argparse.ArgumentParser(description='Parallize helper script.')
parser.add_argument('run_script', help='Find to do comments.')
parser.add_argument('run_id', help='A unique identifier for the run')
parser.add_argument('configs', nargs='*', help='A set of configurations to run.')
arguments = parser.parse_args(sys.argv[1:])
maxProcesses = multiprocessing.cpu_count()
startTime = datetime.datetime.now()
print('[%s] Starting %d processes for %d configs' % (startTime, maxProcesses, len(arguments.configs)))
#Start processes
processes = {}
for i, config in enumerate(arguments.configs):
#Wait for a CPU to become available
while len(processes) >= maxProcesses:
time.sleep(5)
#Check for finished processes
finishedConfigs = []
for runningConfig, process in processes.items():
returnCode = process.poll()
if returnCode is not None:
if returnCode != 0:
sys.stdout.write('[%s] Config %s failed with return code %d' % (datetime.datetime.now(), runningConfig, returnCode))
sys.stdout.flush()
finishedConfigs.append(runningConfig)
for finishedConfig in finishedConfigs:
del processes[finishedConfig]
sys.stdout.write('[%s] Starting %.4d/%.4d (%5.1f%%)\n' % (datetime.datetime.now(), i + 1, len(arguments.configs), 100.0 * (i + 1) / len(arguments.configs)))
sys.stdout.flush()
process = subprocess.Popen([arguments.run_script, arguments.run_id, config], shell=False)
processes[config] = process
#Wait for processes to finish
for process in processes.values():
process.wait()
endTime = datetime.datetime.now()
print('[%s] Finished in %.2f seconds' % (endTime, (endTime - startTime).total_seconds()))
return 0
if __name__ == '__main__':
sys.exit(main())
|
<commit_before><commit_msg>Add single machine parallelism helper script<commit_after>#!/usr/bin/env python3
import argparse
import datetime
import multiprocessing
import os
import subprocess
import sys
import time
def main():
#Parse arguments
parser = argparse.ArgumentParser(description='Parallize helper script.')
parser.add_argument('run_script', help='Find to do comments.')
parser.add_argument('run_id', help='A unique identifier for the run')
parser.add_argument('configs', nargs='*', help='A set of configurations to run.')
arguments = parser.parse_args(sys.argv[1:])
maxProcesses = multiprocessing.cpu_count()
startTime = datetime.datetime.now()
print('[%s] Starting %d processes for %d configs' % (startTime, maxProcesses, len(arguments.configs)))
#Start processes
processes = {}
for i, config in enumerate(arguments.configs):
#Wait for a CPU to become available
while len(processes) >= maxProcesses:
time.sleep(5)
#Check for finished processes
finishedConfigs = []
for runningConfig, process in processes.items():
returnCode = process.poll()
if returnCode is not None:
if returnCode != 0:
sys.stdout.write('[%s] Config %s failed with return code %d' % (datetime.datetime.now(), runningConfig, returnCode))
sys.stdout.flush()
finishedConfigs.append(runningConfig)
for finishedConfig in finishedConfigs:
del processes[finishedConfig]
sys.stdout.write('[%s] Starting %.4d/%.4d (%5.1f%%)\n' % (datetime.datetime.now(), i + 1, len(arguments.configs), 100.0 * (i + 1) / len(arguments.configs)))
sys.stdout.flush()
process = subprocess.Popen([arguments.run_script, arguments.run_id, config], shell=False)
processes[config] = process
#Wait for processes to finish
for process in processes.values():
process.wait()
endTime = datetime.datetime.now()
print('[%s] Finished in %.2f seconds' % (endTime, (endTime - startTime).total_seconds()))
return 0
if __name__ == '__main__':
sys.exit(main())
|
|
d82b1f9d7334c1cd976624788da785a87cd5db8a
|
functional/tests/volume/v1/test_qos.py
|
functional/tests/volume/v1/test_qos.py
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
from functional.common import test
class VolumeQosTests(test.TestCase):
"""Functional tests for volume qos. """
NAME = uuid.uuid4().hex
HEADERS = ['Name']
FIELDS = ['id', 'name']
ID = None
@classmethod
def setUpClass(cls):
opts = cls.get_show_opts(cls.FIELDS)
raw_output = cls.openstack('volume qos create ' + cls.NAME + opts)
cls.ID, name, rol = raw_output.split('\n')
cls.assertOutput(cls.NAME, name)
@classmethod
def tearDownClass(cls):
raw_output = cls.openstack('volume qos delete ' + cls.ID)
cls.assertOutput('', raw_output)
def test_volume_qos_list(self):
opts = self.get_list_opts(self.HEADERS)
raw_output = self.openstack('volume qos list' + opts)
self.assertIn(self.NAME, raw_output)
def test_volume_qos_show(self):
opts = self.get_show_opts(self.FIELDS)
raw_output = self.openstack('volume qos show ' + self.ID + opts)
self.assertEqual(self.ID + "\n" + self.NAME + "\n", raw_output)
def test_volume_qos_metadata(self):
raw_output = self.openstack(
'volume qos set --property a=b --property c=d ' + self.ID)
self.assertEqual("", raw_output)
opts = self.get_show_opts(['name', 'specs'])
raw_output = self.openstack('volume qos show ' + self.ID + opts)
self.assertEqual(self.NAME + "\na='b', c='d'\n", raw_output)
|
Add functional tests for volume qos
|
Add functional tests for volume qos
Change-Id: I80010b56b399bc027ac864304be60a3ee53bda00
|
Python
|
apache-2.0
|
openstack/python-openstackclient,redhat-openstack/python-openstackclient,BjoernT/python-openstackclient,BjoernT/python-openstackclient,openstack/python-openstackclient,dtroyer/python-openstackclient,redhat-openstack/python-openstackclient,dtroyer/python-openstackclient
|
Add functional tests for volume qos
Change-Id: I80010b56b399bc027ac864304be60a3ee53bda00
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
from functional.common import test
class VolumeQosTests(test.TestCase):
"""Functional tests for volume qos. """
NAME = uuid.uuid4().hex
HEADERS = ['Name']
FIELDS = ['id', 'name']
ID = None
@classmethod
def setUpClass(cls):
opts = cls.get_show_opts(cls.FIELDS)
raw_output = cls.openstack('volume qos create ' + cls.NAME + opts)
cls.ID, name, rol = raw_output.split('\n')
cls.assertOutput(cls.NAME, name)
@classmethod
def tearDownClass(cls):
raw_output = cls.openstack('volume qos delete ' + cls.ID)
cls.assertOutput('', raw_output)
def test_volume_qos_list(self):
opts = self.get_list_opts(self.HEADERS)
raw_output = self.openstack('volume qos list' + opts)
self.assertIn(self.NAME, raw_output)
def test_volume_qos_show(self):
opts = self.get_show_opts(self.FIELDS)
raw_output = self.openstack('volume qos show ' + self.ID + opts)
self.assertEqual(self.ID + "\n" + self.NAME + "\n", raw_output)
def test_volume_qos_metadata(self):
raw_output = self.openstack(
'volume qos set --property a=b --property c=d ' + self.ID)
self.assertEqual("", raw_output)
opts = self.get_show_opts(['name', 'specs'])
raw_output = self.openstack('volume qos show ' + self.ID + opts)
self.assertEqual(self.NAME + "\na='b', c='d'\n", raw_output)
|
<commit_before><commit_msg>Add functional tests for volume qos
Change-Id: I80010b56b399bc027ac864304be60a3ee53bda00<commit_after>
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
from functional.common import test
class VolumeQosTests(test.TestCase):
"""Functional tests for volume qos. """
NAME = uuid.uuid4().hex
HEADERS = ['Name']
FIELDS = ['id', 'name']
ID = None
@classmethod
def setUpClass(cls):
opts = cls.get_show_opts(cls.FIELDS)
raw_output = cls.openstack('volume qos create ' + cls.NAME + opts)
cls.ID, name, rol = raw_output.split('\n')
cls.assertOutput(cls.NAME, name)
@classmethod
def tearDownClass(cls):
raw_output = cls.openstack('volume qos delete ' + cls.ID)
cls.assertOutput('', raw_output)
def test_volume_qos_list(self):
opts = self.get_list_opts(self.HEADERS)
raw_output = self.openstack('volume qos list' + opts)
self.assertIn(self.NAME, raw_output)
def test_volume_qos_show(self):
opts = self.get_show_opts(self.FIELDS)
raw_output = self.openstack('volume qos show ' + self.ID + opts)
self.assertEqual(self.ID + "\n" + self.NAME + "\n", raw_output)
def test_volume_qos_metadata(self):
raw_output = self.openstack(
'volume qos set --property a=b --property c=d ' + self.ID)
self.assertEqual("", raw_output)
opts = self.get_show_opts(['name', 'specs'])
raw_output = self.openstack('volume qos show ' + self.ID + opts)
self.assertEqual(self.NAME + "\na='b', c='d'\n", raw_output)
|
Add functional tests for volume qos
Change-Id: I80010b56b399bc027ac864304be60a3ee53bda00# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
from functional.common import test
class VolumeQosTests(test.TestCase):
"""Functional tests for volume qos. """
NAME = uuid.uuid4().hex
HEADERS = ['Name']
FIELDS = ['id', 'name']
ID = None
@classmethod
def setUpClass(cls):
opts = cls.get_show_opts(cls.FIELDS)
raw_output = cls.openstack('volume qos create ' + cls.NAME + opts)
cls.ID, name, rol = raw_output.split('\n')
cls.assertOutput(cls.NAME, name)
@classmethod
def tearDownClass(cls):
raw_output = cls.openstack('volume qos delete ' + cls.ID)
cls.assertOutput('', raw_output)
def test_volume_qos_list(self):
opts = self.get_list_opts(self.HEADERS)
raw_output = self.openstack('volume qos list' + opts)
self.assertIn(self.NAME, raw_output)
def test_volume_qos_show(self):
opts = self.get_show_opts(self.FIELDS)
raw_output = self.openstack('volume qos show ' + self.ID + opts)
self.assertEqual(self.ID + "\n" + self.NAME + "\n", raw_output)
def test_volume_qos_metadata(self):
raw_output = self.openstack(
'volume qos set --property a=b --property c=d ' + self.ID)
self.assertEqual("", raw_output)
opts = self.get_show_opts(['name', 'specs'])
raw_output = self.openstack('volume qos show ' + self.ID + opts)
self.assertEqual(self.NAME + "\na='b', c='d'\n", raw_output)
|
<commit_before><commit_msg>Add functional tests for volume qos
Change-Id: I80010b56b399bc027ac864304be60a3ee53bda00<commit_after># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
from functional.common import test
class VolumeQosTests(test.TestCase):
"""Functional tests for volume qos. """
NAME = uuid.uuid4().hex
HEADERS = ['Name']
FIELDS = ['id', 'name']
ID = None
@classmethod
def setUpClass(cls):
opts = cls.get_show_opts(cls.FIELDS)
raw_output = cls.openstack('volume qos create ' + cls.NAME + opts)
cls.ID, name, rol = raw_output.split('\n')
cls.assertOutput(cls.NAME, name)
@classmethod
def tearDownClass(cls):
raw_output = cls.openstack('volume qos delete ' + cls.ID)
cls.assertOutput('', raw_output)
def test_volume_qos_list(self):
opts = self.get_list_opts(self.HEADERS)
raw_output = self.openstack('volume qos list' + opts)
self.assertIn(self.NAME, raw_output)
def test_volume_qos_show(self):
opts = self.get_show_opts(self.FIELDS)
raw_output = self.openstack('volume qos show ' + self.ID + opts)
self.assertEqual(self.ID + "\n" + self.NAME + "\n", raw_output)
def test_volume_qos_metadata(self):
raw_output = self.openstack(
'volume qos set --property a=b --property c=d ' + self.ID)
self.assertEqual("", raw_output)
opts = self.get_show_opts(['name', 'specs'])
raw_output = self.openstack('volume qos show ' + self.ID + opts)
self.assertEqual(self.NAME + "\na='b', c='d'\n", raw_output)
|
|
66f2a65ae393ffe1398c5d34f036940aebd170d3
|
tools/send-webhook.py
|
tools/send-webhook.py
|
#!/bin/env python3
# Copyright 2016 Ruud van Asseldonk
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3. See
# the licence file in the root of the repository.
from json import dumps
from sys import argv
from urllib.error import HTTPError
from urllib.request import Request, urlopen
def send_webhook(event_name, payload):
url = 'http://localhost:5261/hook/github'
headers = {'X-GitHub-Event': event_name}
payload_bytes = dumps(payload).encode('utf8')
request = Request(url, payload_bytes, headers, method='POST')
try:
response = urlopen(request)
except HTTPError as error:
response = error
print('{}: {}'.format(response.code, response.read().decode('utf8')))
def main():
"""
usage: tools/send-webhook.py <event_name> [<args>]
events:
pull_request <action> <number> <sha>
issue_comment <action> <number> <body>
"""
if argv[1] == 'pull_request':
action = argv[2]
number = int(argv[3])
sha = argv[4]
payload = {
'action': action,
'pull_request': {
'base': {
'repo': {
'owner': {'login': 'baxterthehacker'},
'name': 'public-repo'
}
},
'number': number,
'head': {'sha': sha},
'user': {'login': 'johnthenitter'}
}
}
send_webhook('pull_request', payload)
if argv[1] == 'issue_comment':
action = argv[2]
number = int(argv[3])
body = argv[4]
payload = {
'action': action,
'repository': {
'owner': {'login': 'baxterthehacker'},
'name': 'public-repo'
},
'issue': {'number': number},
'sender': {'login': 'johnthenitter'},
'comment': {'body': body}
}
send_webhook('issue_comment', payload)
main()
|
Add script to test webhooks
|
Add script to test webhooks
Searching for the right curl commands through my shell history gets
tedious. Let's add a bit of automation.
|
Python
|
apache-2.0
|
ruuda/hoff,ruuda/hoff
|
Add script to test webhooks
Searching for the right curl commands through my shell history gets
tedious. Let's add a bit of automation.
|
#!/bin/env python3
# Copyright 2016 Ruud van Asseldonk
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3. See
# the licence file in the root of the repository.
from json import dumps
from sys import argv
from urllib.error import HTTPError
from urllib.request import Request, urlopen
def send_webhook(event_name, payload):
url = 'http://localhost:5261/hook/github'
headers = {'X-GitHub-Event': event_name}
payload_bytes = dumps(payload).encode('utf8')
request = Request(url, payload_bytes, headers, method='POST')
try:
response = urlopen(request)
except HTTPError as error:
response = error
print('{}: {}'.format(response.code, response.read().decode('utf8')))
def main():
"""
usage: tools/send-webhook.py <event_name> [<args>]
events:
pull_request <action> <number> <sha>
issue_comment <action> <number> <body>
"""
if argv[1] == 'pull_request':
action = argv[2]
number = int(argv[3])
sha = argv[4]
payload = {
'action': action,
'pull_request': {
'base': {
'repo': {
'owner': {'login': 'baxterthehacker'},
'name': 'public-repo'
}
},
'number': number,
'head': {'sha': sha},
'user': {'login': 'johnthenitter'}
}
}
send_webhook('pull_request', payload)
if argv[1] == 'issue_comment':
action = argv[2]
number = int(argv[3])
body = argv[4]
payload = {
'action': action,
'repository': {
'owner': {'login': 'baxterthehacker'},
'name': 'public-repo'
},
'issue': {'number': number},
'sender': {'login': 'johnthenitter'},
'comment': {'body': body}
}
send_webhook('issue_comment', payload)
main()
|
<commit_before><commit_msg>Add script to test webhooks
Searching for the right curl commands through my shell history gets
tedious. Let's add a bit of automation.<commit_after>
|
#!/bin/env python3
# Copyright 2016 Ruud van Asseldonk
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3. See
# the licence file in the root of the repository.
from json import dumps
from sys import argv
from urllib.error import HTTPError
from urllib.request import Request, urlopen
def send_webhook(event_name, payload):
url = 'http://localhost:5261/hook/github'
headers = {'X-GitHub-Event': event_name}
payload_bytes = dumps(payload).encode('utf8')
request = Request(url, payload_bytes, headers, method='POST')
try:
response = urlopen(request)
except HTTPError as error:
response = error
print('{}: {}'.format(response.code, response.read().decode('utf8')))
def main():
"""
usage: tools/send-webhook.py <event_name> [<args>]
events:
pull_request <action> <number> <sha>
issue_comment <action> <number> <body>
"""
if argv[1] == 'pull_request':
action = argv[2]
number = int(argv[3])
sha = argv[4]
payload = {
'action': action,
'pull_request': {
'base': {
'repo': {
'owner': {'login': 'baxterthehacker'},
'name': 'public-repo'
}
},
'number': number,
'head': {'sha': sha},
'user': {'login': 'johnthenitter'}
}
}
send_webhook('pull_request', payload)
if argv[1] == 'issue_comment':
action = argv[2]
number = int(argv[3])
body = argv[4]
payload = {
'action': action,
'repository': {
'owner': {'login': 'baxterthehacker'},
'name': 'public-repo'
},
'issue': {'number': number},
'sender': {'login': 'johnthenitter'},
'comment': {'body': body}
}
send_webhook('issue_comment', payload)
main()
|
Add script to test webhooks
Searching for the right curl commands through my shell history gets
tedious. Let's add a bit of automation.#!/bin/env python3
# Copyright 2016 Ruud van Asseldonk
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3. See
# the licence file in the root of the repository.
from json import dumps
from sys import argv
from urllib.error import HTTPError
from urllib.request import Request, urlopen
def send_webhook(event_name, payload):
url = 'http://localhost:5261/hook/github'
headers = {'X-GitHub-Event': event_name}
payload_bytes = dumps(payload).encode('utf8')
request = Request(url, payload_bytes, headers, method='POST')
try:
response = urlopen(request)
except HTTPError as error:
response = error
print('{}: {}'.format(response.code, response.read().decode('utf8')))
def main():
"""
usage: tools/send-webhook.py <event_name> [<args>]
events:
pull_request <action> <number> <sha>
issue_comment <action> <number> <body>
"""
if argv[1] == 'pull_request':
action = argv[2]
number = int(argv[3])
sha = argv[4]
payload = {
'action': action,
'pull_request': {
'base': {
'repo': {
'owner': {'login': 'baxterthehacker'},
'name': 'public-repo'
}
},
'number': number,
'head': {'sha': sha},
'user': {'login': 'johnthenitter'}
}
}
send_webhook('pull_request', payload)
if argv[1] == 'issue_comment':
action = argv[2]
number = int(argv[3])
body = argv[4]
payload = {
'action': action,
'repository': {
'owner': {'login': 'baxterthehacker'},
'name': 'public-repo'
},
'issue': {'number': number},
'sender': {'login': 'johnthenitter'},
'comment': {'body': body}
}
send_webhook('issue_comment', payload)
main()
|
<commit_before><commit_msg>Add script to test webhooks
Searching for the right curl commands through my shell history gets
tedious. Let's add a bit of automation.<commit_after>#!/bin/env python3
# Copyright 2016 Ruud van Asseldonk
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3. See
# the licence file in the root of the repository.
from json import dumps
from sys import argv
from urllib.error import HTTPError
from urllib.request import Request, urlopen
def send_webhook(event_name, payload):
url = 'http://localhost:5261/hook/github'
headers = {'X-GitHub-Event': event_name}
payload_bytes = dumps(payload).encode('utf8')
request = Request(url, payload_bytes, headers, method='POST')
try:
response = urlopen(request)
except HTTPError as error:
response = error
print('{}: {}'.format(response.code, response.read().decode('utf8')))
def main():
"""
usage: tools/send-webhook.py <event_name> [<args>]
events:
pull_request <action> <number> <sha>
issue_comment <action> <number> <body>
"""
if argv[1] == 'pull_request':
action = argv[2]
number = int(argv[3])
sha = argv[4]
payload = {
'action': action,
'pull_request': {
'base': {
'repo': {
'owner': {'login': 'baxterthehacker'},
'name': 'public-repo'
}
},
'number': number,
'head': {'sha': sha},
'user': {'login': 'johnthenitter'}
}
}
send_webhook('pull_request', payload)
if argv[1] == 'issue_comment':
action = argv[2]
number = int(argv[3])
body = argv[4]
payload = {
'action': action,
'repository': {
'owner': {'login': 'baxterthehacker'},
'name': 'public-repo'
},
'issue': {'number': number},
'sender': {'login': 'johnthenitter'},
'comment': {'body': body}
}
send_webhook('issue_comment', payload)
main()
|
|
d98ea2f891940a0251a470a8373dfc1b7df3b036
|
test/test_sanity.py
|
test/test_sanity.py
|
#!/usr/bin/env python2
import unittest
class TestSanity(unittest.TestCase):
def setUp(self):
self.right_answer = 4
def test_sanity(self):
self.assertEqual(2 + 2, self.right_answer)
if __name__ == '__main__':
unittest.main()
|
Add an extremely simple "sanity test" to the test suite.
|
Add an extremely simple "sanity test" to the test suite.
|
Python
|
bsd-3-clause
|
blindsighttf2/Astron,pizcogirl/Astron,pizcogirl/Astron,ketoo/Astron,blindsighttf2/Astron,blindsighttf2/Astron,ketoo/Astron,pizcogirl/Astron,ketoo/Astron,blindsighttf2/Astron,ketoo/Astron,pizcogirl/Astron
|
Add an extremely simple "sanity test" to the test suite.
|
#!/usr/bin/env python2
import unittest
class TestSanity(unittest.TestCase):
def setUp(self):
self.right_answer = 4
def test_sanity(self):
self.assertEqual(2 + 2, self.right_answer)
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add an extremely simple "sanity test" to the test suite.<commit_after>
|
#!/usr/bin/env python2
import unittest
class TestSanity(unittest.TestCase):
def setUp(self):
self.right_answer = 4
def test_sanity(self):
self.assertEqual(2 + 2, self.right_answer)
if __name__ == '__main__':
unittest.main()
|
Add an extremely simple "sanity test" to the test suite.#!/usr/bin/env python2
import unittest
class TestSanity(unittest.TestCase):
def setUp(self):
self.right_answer = 4
def test_sanity(self):
self.assertEqual(2 + 2, self.right_answer)
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add an extremely simple "sanity test" to the test suite.<commit_after>#!/usr/bin/env python2
import unittest
class TestSanity(unittest.TestCase):
def setUp(self):
self.right_answer = 4
def test_sanity(self):
self.assertEqual(2 + 2, self.right_answer)
if __name__ == '__main__':
unittest.main()
|
|
ddcc269f0f3a7d0e2e2505b0700197e4a4500984
|
contentcuration/contentcuration/collectstatic_settings.py
|
contentcuration/contentcuration/collectstatic_settings.py
|
# Settings used by containers running collectstatic. Scope our services
# to the only ones needed to run collectstatic.
from .settings import *
CACHES['default']['BACKEND'] = "django_prometheus.cache.backends.locmem.LocMemCache"
|
Make a special settings file for collectstatic
|
Make a special settings file for collectstatic
This is useful to minimize the services needed by a collectstatic container
|
Python
|
mit
|
DXCanas/content-curation,DXCanas/content-curation,DXCanas/content-curation,DXCanas/content-curation
|
Make a special settings file for collectstatic
This is useful to minimize the services needed by a collectstatic container
|
# Settings used by containers running collectstatic. Scope our services
# to the only ones needed to run collectstatic.
from .settings import *
CACHES['default']['BACKEND'] = "django_prometheus.cache.backends.locmem.LocMemCache"
|
<commit_before><commit_msg>Make a special settings file for collectstatic
This is useful to minimize the services needed by a collectstatic container<commit_after>
|
# Settings used by containers running collectstatic. Scope our services
# to the only ones needed to run collectstatic.
from .settings import *
CACHES['default']['BACKEND'] = "django_prometheus.cache.backends.locmem.LocMemCache"
|
Make a special settings file for collectstatic
This is useful to minimize the services needed by a collectstatic container# Settings used by containers running collectstatic. Scope our services
# to the only ones needed to run collectstatic.
from .settings import *
CACHES['default']['BACKEND'] = "django_prometheus.cache.backends.locmem.LocMemCache"
|
<commit_before><commit_msg>Make a special settings file for collectstatic
This is useful to minimize the services needed by a collectstatic container<commit_after># Settings used by containers running collectstatic. Scope our services
# to the only ones needed to run collectstatic.
from .settings import *
CACHES['default']['BACKEND'] = "django_prometheus.cache.backends.locmem.LocMemCache"
|
|
0edf160f3f12697d896721eb86ff09052fdb1126
|
tests/test_flask.py
|
tests/test_flask.py
|
import os
import socket
from webtest import TestApp
from nose.tools import eq_, assert_raises
from mock import MagicMock, patch
from flask import Flask
from bugsnag.six import Iterator
from bugsnag.flask import handle_exceptions
import bugsnag.notification
bugsnag.configuration.api_key = '066f5ad3590596f9aa8d601ea89af845'
class SentinalError(RuntimeError):
pass
@patch('bugsnag.notification.deliver')
def test_bugsnag_middleware_working(deliver):
app = Flask("working")
@app.route("/hello")
def hello():
return "OK"
handle_exceptions(app)
resp = app.test_client().get('/hello')
eq_(resp.data, b'OK')
eq_(deliver.call_count, 0)
@patch('bugsnag.notification.deliver')
def test_bugsnag_crash(deliver):
app = Flask("crashing")
@app.route("/hello")
def hello():
raise SentinalError("oops")
handle_exceptions(app)
resp = app.test_client().get('/hello')
eq_(deliver.call_count, 1)
payload = deliver.call_args[0][0]
eq_(payload['events'][0]['exceptions'][0]['errorClass'], 'test_flask.SentinalError')
eq_(payload['events'][0]['metaData']['request']['url'], 'http://localhost/hello')
@patch('bugsnag.notification.deliver')
def test_bugsnag_notify(deliver):
app = Flask("notifying")
@app.route("/hello")
def hello():
bugsnag.notify(SentinalError("oops"))
return "OK"
handle_exceptions(app)
resp = app.test_client().get('/hello')
eq_(deliver.call_count, 1)
payload = deliver.call_args[0][0]
eq_(payload['events'][0]['metaData']['request']['url'], 'http://localhost/hello')
@patch('bugsnag.notification.deliver')
def test_bugsnag_custom_data(deliver):
meta_data = [{"hello":{"world":"once"}}, {"again":{"hello":"world"}}]
app = Flask("custom")
@app.route("/hello")
def hello():
bugsnag.configure_request(meta_data=meta_data.pop())
raise SentinalError("oops")
handle_exceptions(app)
resp = app.test_client().get('/hello')
resp = app.test_client().get('/hello')
eq_(deliver.call_count, 2)
payload = deliver.call_args_list[0][0][0]
eq_(payload['events'][0]['metaData'].get('hello'), None)
eq_(payload['events'][0]['metaData']['again']['hello'], 'world')
payload = deliver.call_args_list[1][0][0]
eq_(payload['events'][0]['metaData']['hello']['world'], 'once')
eq_(payload['events'][0]['metaData'].get('again'), None)
|
Add some tests for flask integration
|
Add some tests for flask integration
|
Python
|
mit
|
overplumbum/bugsnag-python,bugsnag/bugsnag-python,overplumbum/bugsnag-python,bugsnag/bugsnag-python
|
Add some tests for flask integration
|
import os
import socket
from webtest import TestApp
from nose.tools import eq_, assert_raises
from mock import MagicMock, patch
from flask import Flask
from bugsnag.six import Iterator
from bugsnag.flask import handle_exceptions
import bugsnag.notification
bugsnag.configuration.api_key = '066f5ad3590596f9aa8d601ea89af845'
class SentinalError(RuntimeError):
pass
@patch('bugsnag.notification.deliver')
def test_bugsnag_middleware_working(deliver):
app = Flask("working")
@app.route("/hello")
def hello():
return "OK"
handle_exceptions(app)
resp = app.test_client().get('/hello')
eq_(resp.data, b'OK')
eq_(deliver.call_count, 0)
@patch('bugsnag.notification.deliver')
def test_bugsnag_crash(deliver):
app = Flask("crashing")
@app.route("/hello")
def hello():
raise SentinalError("oops")
handle_exceptions(app)
resp = app.test_client().get('/hello')
eq_(deliver.call_count, 1)
payload = deliver.call_args[0][0]
eq_(payload['events'][0]['exceptions'][0]['errorClass'], 'test_flask.SentinalError')
eq_(payload['events'][0]['metaData']['request']['url'], 'http://localhost/hello')
@patch('bugsnag.notification.deliver')
def test_bugsnag_notify(deliver):
app = Flask("notifying")
@app.route("/hello")
def hello():
bugsnag.notify(SentinalError("oops"))
return "OK"
handle_exceptions(app)
resp = app.test_client().get('/hello')
eq_(deliver.call_count, 1)
payload = deliver.call_args[0][0]
eq_(payload['events'][0]['metaData']['request']['url'], 'http://localhost/hello')
@patch('bugsnag.notification.deliver')
def test_bugsnag_custom_data(deliver):
meta_data = [{"hello":{"world":"once"}}, {"again":{"hello":"world"}}]
app = Flask("custom")
@app.route("/hello")
def hello():
bugsnag.configure_request(meta_data=meta_data.pop())
raise SentinalError("oops")
handle_exceptions(app)
resp = app.test_client().get('/hello')
resp = app.test_client().get('/hello')
eq_(deliver.call_count, 2)
payload = deliver.call_args_list[0][0][0]
eq_(payload['events'][0]['metaData'].get('hello'), None)
eq_(payload['events'][0]['metaData']['again']['hello'], 'world')
payload = deliver.call_args_list[1][0][0]
eq_(payload['events'][0]['metaData']['hello']['world'], 'once')
eq_(payload['events'][0]['metaData'].get('again'), None)
|
<commit_before><commit_msg>Add some tests for flask integration<commit_after>
|
import os
import socket
from webtest import TestApp
from nose.tools import eq_, assert_raises
from mock import MagicMock, patch
from flask import Flask
from bugsnag.six import Iterator
from bugsnag.flask import handle_exceptions
import bugsnag.notification
bugsnag.configuration.api_key = '066f5ad3590596f9aa8d601ea89af845'
class SentinalError(RuntimeError):
pass
@patch('bugsnag.notification.deliver')
def test_bugsnag_middleware_working(deliver):
app = Flask("working")
@app.route("/hello")
def hello():
return "OK"
handle_exceptions(app)
resp = app.test_client().get('/hello')
eq_(resp.data, b'OK')
eq_(deliver.call_count, 0)
@patch('bugsnag.notification.deliver')
def test_bugsnag_crash(deliver):
app = Flask("crashing")
@app.route("/hello")
def hello():
raise SentinalError("oops")
handle_exceptions(app)
resp = app.test_client().get('/hello')
eq_(deliver.call_count, 1)
payload = deliver.call_args[0][0]
eq_(payload['events'][0]['exceptions'][0]['errorClass'], 'test_flask.SentinalError')
eq_(payload['events'][0]['metaData']['request']['url'], 'http://localhost/hello')
@patch('bugsnag.notification.deliver')
def test_bugsnag_notify(deliver):
app = Flask("notifying")
@app.route("/hello")
def hello():
bugsnag.notify(SentinalError("oops"))
return "OK"
handle_exceptions(app)
resp = app.test_client().get('/hello')
eq_(deliver.call_count, 1)
payload = deliver.call_args[0][0]
eq_(payload['events'][0]['metaData']['request']['url'], 'http://localhost/hello')
@patch('bugsnag.notification.deliver')
def test_bugsnag_custom_data(deliver):
meta_data = [{"hello":{"world":"once"}}, {"again":{"hello":"world"}}]
app = Flask("custom")
@app.route("/hello")
def hello():
bugsnag.configure_request(meta_data=meta_data.pop())
raise SentinalError("oops")
handle_exceptions(app)
resp = app.test_client().get('/hello')
resp = app.test_client().get('/hello')
eq_(deliver.call_count, 2)
payload = deliver.call_args_list[0][0][0]
eq_(payload['events'][0]['metaData'].get('hello'), None)
eq_(payload['events'][0]['metaData']['again']['hello'], 'world')
payload = deliver.call_args_list[1][0][0]
eq_(payload['events'][0]['metaData']['hello']['world'], 'once')
eq_(payload['events'][0]['metaData'].get('again'), None)
|
Add some tests for flask integrationimport os
import socket
from webtest import TestApp
from nose.tools import eq_, assert_raises
from mock import MagicMock, patch
from flask import Flask
from bugsnag.six import Iterator
from bugsnag.flask import handle_exceptions
import bugsnag.notification
bugsnag.configuration.api_key = '066f5ad3590596f9aa8d601ea89af845'
class SentinalError(RuntimeError):
pass
@patch('bugsnag.notification.deliver')
def test_bugsnag_middleware_working(deliver):
app = Flask("working")
@app.route("/hello")
def hello():
return "OK"
handle_exceptions(app)
resp = app.test_client().get('/hello')
eq_(resp.data, b'OK')
eq_(deliver.call_count, 0)
@patch('bugsnag.notification.deliver')
def test_bugsnag_crash(deliver):
app = Flask("crashing")
@app.route("/hello")
def hello():
raise SentinalError("oops")
handle_exceptions(app)
resp = app.test_client().get('/hello')
eq_(deliver.call_count, 1)
payload = deliver.call_args[0][0]
eq_(payload['events'][0]['exceptions'][0]['errorClass'], 'test_flask.SentinalError')
eq_(payload['events'][0]['metaData']['request']['url'], 'http://localhost/hello')
@patch('bugsnag.notification.deliver')
def test_bugsnag_notify(deliver):
app = Flask("notifying")
@app.route("/hello")
def hello():
bugsnag.notify(SentinalError("oops"))
return "OK"
handle_exceptions(app)
resp = app.test_client().get('/hello')
eq_(deliver.call_count, 1)
payload = deliver.call_args[0][0]
eq_(payload['events'][0]['metaData']['request']['url'], 'http://localhost/hello')
@patch('bugsnag.notification.deliver')
def test_bugsnag_custom_data(deliver):
meta_data = [{"hello":{"world":"once"}}, {"again":{"hello":"world"}}]
app = Flask("custom")
@app.route("/hello")
def hello():
bugsnag.configure_request(meta_data=meta_data.pop())
raise SentinalError("oops")
handle_exceptions(app)
resp = app.test_client().get('/hello')
resp = app.test_client().get('/hello')
eq_(deliver.call_count, 2)
payload = deliver.call_args_list[0][0][0]
eq_(payload['events'][0]['metaData'].get('hello'), None)
eq_(payload['events'][0]['metaData']['again']['hello'], 'world')
payload = deliver.call_args_list[1][0][0]
eq_(payload['events'][0]['metaData']['hello']['world'], 'once')
eq_(payload['events'][0]['metaData'].get('again'), None)
|
<commit_before><commit_msg>Add some tests for flask integration<commit_after>import os
import socket
from webtest import TestApp
from nose.tools import eq_, assert_raises
from mock import MagicMock, patch
from flask import Flask
from bugsnag.six import Iterator
from bugsnag.flask import handle_exceptions
import bugsnag.notification
bugsnag.configuration.api_key = '066f5ad3590596f9aa8d601ea89af845'
class SentinalError(RuntimeError):
pass
@patch('bugsnag.notification.deliver')
def test_bugsnag_middleware_working(deliver):
app = Flask("working")
@app.route("/hello")
def hello():
return "OK"
handle_exceptions(app)
resp = app.test_client().get('/hello')
eq_(resp.data, b'OK')
eq_(deliver.call_count, 0)
@patch('bugsnag.notification.deliver')
def test_bugsnag_crash(deliver):
app = Flask("crashing")
@app.route("/hello")
def hello():
raise SentinalError("oops")
handle_exceptions(app)
resp = app.test_client().get('/hello')
eq_(deliver.call_count, 1)
payload = deliver.call_args[0][0]
eq_(payload['events'][0]['exceptions'][0]['errorClass'], 'test_flask.SentinalError')
eq_(payload['events'][0]['metaData']['request']['url'], 'http://localhost/hello')
@patch('bugsnag.notification.deliver')
def test_bugsnag_notify(deliver):
app = Flask("notifying")
@app.route("/hello")
def hello():
bugsnag.notify(SentinalError("oops"))
return "OK"
handle_exceptions(app)
resp = app.test_client().get('/hello')
eq_(deliver.call_count, 1)
payload = deliver.call_args[0][0]
eq_(payload['events'][0]['metaData']['request']['url'], 'http://localhost/hello')
@patch('bugsnag.notification.deliver')
def test_bugsnag_custom_data(deliver):
meta_data = [{"hello":{"world":"once"}}, {"again":{"hello":"world"}}]
app = Flask("custom")
@app.route("/hello")
def hello():
bugsnag.configure_request(meta_data=meta_data.pop())
raise SentinalError("oops")
handle_exceptions(app)
resp = app.test_client().get('/hello')
resp = app.test_client().get('/hello')
eq_(deliver.call_count, 2)
payload = deliver.call_args_list[0][0][0]
eq_(payload['events'][0]['metaData'].get('hello'), None)
eq_(payload['events'][0]['metaData']['again']['hello'], 'world')
payload = deliver.call_args_list[1][0][0]
eq_(payload['events'][0]['metaData']['hello']['world'], 'once')
eq_(payload['events'][0]['metaData'].get('again'), None)
|
|
a75cecd0291067cf8ce7624e4c929b64e2388052
|
tests/unit/fakes.py
|
tests/unit/fakes.py
|
# Copyright 2012 Intel Inc, OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Fakes For filter and weight tests.
"""
from openstack.common.scheduler import weights
class FakeWeigher1(weights.BaseHostWeigher):
def __init__(self):
pass
class FakeWeigher2(weights.BaseHostWeigher):
def __init__(self):
pass
class FakeClass(object):
def __init__(self):
pass
|
Add common base weigher/weigher handler for filter scheduler
|
Add common base weigher/weigher handler for filter scheduler
Filter scheduler is being used for more than one core projects (Nova
and Cinder as of writing), the implementation shared a lot of common
code. This patch is to move base weigher (weighing function), weigher
handler for filter scheduler into oslo to reduce possible porting.
implement bp: common-weights
Change-Id: I2d1b37438663b53e035cc262875a283e5e2ee970
|
Python
|
apache-2.0
|
openstack/oslo.i18n,varunarya10/oslo.i18n
|
Add common base weigher/weigher handler for filter scheduler
Filter scheduler is being used for more than one core projects (Nova
and Cinder as of writing), the implementation shared a lot of common
code. This patch is to move base weigher (weighing function), weigher
handler for filter scheduler into oslo to reduce possible porting.
implement bp: common-weights
Change-Id: I2d1b37438663b53e035cc262875a283e5e2ee970
|
# Copyright 2012 Intel Inc, OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Fakes For filter and weight tests.
"""
from openstack.common.scheduler import weights
class FakeWeigher1(weights.BaseHostWeigher):
def __init__(self):
pass
class FakeWeigher2(weights.BaseHostWeigher):
def __init__(self):
pass
class FakeClass(object):
def __init__(self):
pass
|
<commit_before><commit_msg>Add common base weigher/weigher handler for filter scheduler
Filter scheduler is being used for more than one core projects (Nova
and Cinder as of writing), the implementation shared a lot of common
code. This patch is to move base weigher (weighing function), weigher
handler for filter scheduler into oslo to reduce possible porting.
implement bp: common-weights
Change-Id: I2d1b37438663b53e035cc262875a283e5e2ee970<commit_after>
|
# Copyright 2012 Intel Inc, OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Fakes For filter and weight tests.
"""
from openstack.common.scheduler import weights
class FakeWeigher1(weights.BaseHostWeigher):
def __init__(self):
pass
class FakeWeigher2(weights.BaseHostWeigher):
def __init__(self):
pass
class FakeClass(object):
def __init__(self):
pass
|
Add common base weigher/weigher handler for filter scheduler
Filter scheduler is being used for more than one core projects (Nova
and Cinder as of writing), the implementation shared a lot of common
code. This patch is to move base weigher (weighing function), weigher
handler for filter scheduler into oslo to reduce possible porting.
implement bp: common-weights
Change-Id: I2d1b37438663b53e035cc262875a283e5e2ee970# Copyright 2012 Intel Inc, OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Fakes For filter and weight tests.
"""
from openstack.common.scheduler import weights
class FakeWeigher1(weights.BaseHostWeigher):
def __init__(self):
pass
class FakeWeigher2(weights.BaseHostWeigher):
def __init__(self):
pass
class FakeClass(object):
def __init__(self):
pass
|
<commit_before><commit_msg>Add common base weigher/weigher handler for filter scheduler
Filter scheduler is being used for more than one core projects (Nova
and Cinder as of writing), the implementation shared a lot of common
code. This patch is to move base weigher (weighing function), weigher
handler for filter scheduler into oslo to reduce possible porting.
implement bp: common-weights
Change-Id: I2d1b37438663b53e035cc262875a283e5e2ee970<commit_after># Copyright 2012 Intel Inc, OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Fakes For filter and weight tests.
"""
from openstack.common.scheduler import weights
class FakeWeigher1(weights.BaseHostWeigher):
def __init__(self):
pass
class FakeWeigher2(weights.BaseHostWeigher):
def __init__(self):
pass
class FakeClass(object):
def __init__(self):
pass
|
|
e3958525303be01b2800c48a494b586f62f74393
|
tests/test_command.py
|
tests/test_command.py
|
# -*- coding: utf-8 -*-
from tornado.httpclient import AsyncHTTPClient
from tornado.testing import gen_test, AsyncTestCase
from blackgate import Command, component
class TestCommand(AsyncTestCase):
def setUp(self):
super(TestCommand, self).setUp()
component.pools.register_pool('test_command', max_workers=1)
@gen_test
def test_queue(self):
class SimpleCommand(Command):
group_key = 'test_command'
def run(self):
return 'run'
command = SimpleCommand()
result = yield command.queue()
assert result == 'run'
@gen_test
def test_fallback(self):
class FallbackCommand(Command):
group_key = 'test_command'
def run(self):
raise Exception
def fallback(self):
return 'fallback'
command = FallbackCommand()
result = yield command.queue()
assert result == 'fallback'
|
Test command fallback and queue.
|
Test command fallback and queue.
|
Python
|
mit
|
soasme/blackgate
|
Test command fallback and queue.
|
# -*- coding: utf-8 -*-
from tornado.httpclient import AsyncHTTPClient
from tornado.testing import gen_test, AsyncTestCase
from blackgate import Command, component
class TestCommand(AsyncTestCase):
def setUp(self):
super(TestCommand, self).setUp()
component.pools.register_pool('test_command', max_workers=1)
@gen_test
def test_queue(self):
class SimpleCommand(Command):
group_key = 'test_command'
def run(self):
return 'run'
command = SimpleCommand()
result = yield command.queue()
assert result == 'run'
@gen_test
def test_fallback(self):
class FallbackCommand(Command):
group_key = 'test_command'
def run(self):
raise Exception
def fallback(self):
return 'fallback'
command = FallbackCommand()
result = yield command.queue()
assert result == 'fallback'
|
<commit_before><commit_msg>Test command fallback and queue.<commit_after>
|
# -*- coding: utf-8 -*-
from tornado.httpclient import AsyncHTTPClient
from tornado.testing import gen_test, AsyncTestCase
from blackgate import Command, component
class TestCommand(AsyncTestCase):
def setUp(self):
super(TestCommand, self).setUp()
component.pools.register_pool('test_command', max_workers=1)
@gen_test
def test_queue(self):
class SimpleCommand(Command):
group_key = 'test_command'
def run(self):
return 'run'
command = SimpleCommand()
result = yield command.queue()
assert result == 'run'
@gen_test
def test_fallback(self):
class FallbackCommand(Command):
group_key = 'test_command'
def run(self):
raise Exception
def fallback(self):
return 'fallback'
command = FallbackCommand()
result = yield command.queue()
assert result == 'fallback'
|
Test command fallback and queue.# -*- coding: utf-8 -*-
from tornado.httpclient import AsyncHTTPClient
from tornado.testing import gen_test, AsyncTestCase
from blackgate import Command, component
class TestCommand(AsyncTestCase):
def setUp(self):
super(TestCommand, self).setUp()
component.pools.register_pool('test_command', max_workers=1)
@gen_test
def test_queue(self):
class SimpleCommand(Command):
group_key = 'test_command'
def run(self):
return 'run'
command = SimpleCommand()
result = yield command.queue()
assert result == 'run'
@gen_test
def test_fallback(self):
class FallbackCommand(Command):
group_key = 'test_command'
def run(self):
raise Exception
def fallback(self):
return 'fallback'
command = FallbackCommand()
result = yield command.queue()
assert result == 'fallback'
|
<commit_before><commit_msg>Test command fallback and queue.<commit_after># -*- coding: utf-8 -*-
from tornado.httpclient import AsyncHTTPClient
from tornado.testing import gen_test, AsyncTestCase
from blackgate import Command, component
class TestCommand(AsyncTestCase):
def setUp(self):
super(TestCommand, self).setUp()
component.pools.register_pool('test_command', max_workers=1)
@gen_test
def test_queue(self):
class SimpleCommand(Command):
group_key = 'test_command'
def run(self):
return 'run'
command = SimpleCommand()
result = yield command.queue()
assert result == 'run'
@gen_test
def test_fallback(self):
class FallbackCommand(Command):
group_key = 'test_command'
def run(self):
raise Exception
def fallback(self):
return 'fallback'
command = FallbackCommand()
result = yield command.queue()
assert result == 'fallback'
|
|
62d1e162cdc34cf6b361b5334625323d9d13c7ed
|
possel/resources.py
|
possel/resources.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
possel.resources
----------------
This module defines a tornado-based RESTful (? - I don't know shit about REST) API for fetching the state of the possel
system over HTTP. This is coupled with a real time push mechanism that will be used to inform the client of new
resources.
"""
import json
import peewee
from pircel import model, tornado_adapter
import tornado.ioloop
import tornado.web
from tornado.web import url
class BaseAPIHandler(tornado.web.RequestHandler):
def initialize(self, controllers):
self.set_header('Content-Type', 'application/json')
self.controllers = controllers
class LinesHandler(BaseAPIHandler):
def get(self):
line_id = self.get_argument('id', None)
before = self.get_argument('before', None)
after = self.get_argument('after', None)
kind = self.get_argument('kind', None)
if not (line_id or before or after):
raise tornado.web.HTTPError(403)
lines = model.IRCLineModel.select()
if line_id is not None:
lines = lines.where(model.IRCLineModel.id == line_id)
if before is not None:
lines = lines.where(model.IRCLineModel.id <= before)
if after is not None:
lines = lines.where(model.IRCLineModel.id >= after)
if kind is not None:
lines = lines.where(model.IRCLineModel.kind == kind)
self.write(json.dumps([line.to_dict() for line in lines]))
def post(self):
buffer_id = self.get_body_argument('buffer')
content = self.get_body_argument('content')
buffer = model.IRCBufferModel.get(id=buffer_id)
controller = self.controllers[buffer.server_id]
controller.server_handler.send_message(buffer.name, content)
def main():
db = peewee.SqliteDatabase('imaginary.db')
model.database.initialize(db)
model.database.connect()
model.create_tables()
controllers = model.IRCServerController.get_all()
clients = {controller_id: tornado_adapter.IRCClient.from_controller(controller)
for controller_id, controller in controllers.items()}
for client in clients.values():
client.connect()
application = tornado.web.Application([url(r'/lines', LinesHandler, dict(controllers=controllers)),
])
application.listen(8080)
tornado.ioloop.IOLoop.current().start()
if __name__ == '__main__':
main()
|
Add basic web api server
|
Add basic web api server
So far allows you to get and send lines but that's about it. Still, this means I can actually make pircel say shit \o/
|
Python
|
bsd-3-clause
|
possel/possel,possel/possel,possel/possel,possel/possel
|
Add basic web api server
So far allows you to get and send lines but that's about it. Still, this means I can actually make pircel say shit \o/
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
possel.resources
----------------
This module defines a tornado-based RESTful (? - I don't know shit about REST) API for fetching the state of the possel
system over HTTP. This is coupled with a real time push mechanism that will be used to inform the client of new
resources.
"""
import json
import peewee
from pircel import model, tornado_adapter
import tornado.ioloop
import tornado.web
from tornado.web import url
class BaseAPIHandler(tornado.web.RequestHandler):
def initialize(self, controllers):
self.set_header('Content-Type', 'application/json')
self.controllers = controllers
class LinesHandler(BaseAPIHandler):
def get(self):
line_id = self.get_argument('id', None)
before = self.get_argument('before', None)
after = self.get_argument('after', None)
kind = self.get_argument('kind', None)
if not (line_id or before or after):
raise tornado.web.HTTPError(403)
lines = model.IRCLineModel.select()
if line_id is not None:
lines = lines.where(model.IRCLineModel.id == line_id)
if before is not None:
lines = lines.where(model.IRCLineModel.id <= before)
if after is not None:
lines = lines.where(model.IRCLineModel.id >= after)
if kind is not None:
lines = lines.where(model.IRCLineModel.kind == kind)
self.write(json.dumps([line.to_dict() for line in lines]))
def post(self):
buffer_id = self.get_body_argument('buffer')
content = self.get_body_argument('content')
buffer = model.IRCBufferModel.get(id=buffer_id)
controller = self.controllers[buffer.server_id]
controller.server_handler.send_message(buffer.name, content)
def main():
db = peewee.SqliteDatabase('imaginary.db')
model.database.initialize(db)
model.database.connect()
model.create_tables()
controllers = model.IRCServerController.get_all()
clients = {controller_id: tornado_adapter.IRCClient.from_controller(controller)
for controller_id, controller in controllers.items()}
for client in clients.values():
client.connect()
application = tornado.web.Application([url(r'/lines', LinesHandler, dict(controllers=controllers)),
])
application.listen(8080)
tornado.ioloop.IOLoop.current().start()
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add basic web api server
So far allows you to get and send lines but that's about it. Still, this means I can actually make pircel say shit \o/<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
possel.resources
----------------
This module defines a tornado-based RESTful (? - I don't know shit about REST) API for fetching the state of the possel
system over HTTP. This is coupled with a real time push mechanism that will be used to inform the client of new
resources.
"""
import json
import peewee
from pircel import model, tornado_adapter
import tornado.ioloop
import tornado.web
from tornado.web import url
class BaseAPIHandler(tornado.web.RequestHandler):
def initialize(self, controllers):
self.set_header('Content-Type', 'application/json')
self.controllers = controllers
class LinesHandler(BaseAPIHandler):
def get(self):
line_id = self.get_argument('id', None)
before = self.get_argument('before', None)
after = self.get_argument('after', None)
kind = self.get_argument('kind', None)
if not (line_id or before or after):
raise tornado.web.HTTPError(403)
lines = model.IRCLineModel.select()
if line_id is not None:
lines = lines.where(model.IRCLineModel.id == line_id)
if before is not None:
lines = lines.where(model.IRCLineModel.id <= before)
if after is not None:
lines = lines.where(model.IRCLineModel.id >= after)
if kind is not None:
lines = lines.where(model.IRCLineModel.kind == kind)
self.write(json.dumps([line.to_dict() for line in lines]))
def post(self):
buffer_id = self.get_body_argument('buffer')
content = self.get_body_argument('content')
buffer = model.IRCBufferModel.get(id=buffer_id)
controller = self.controllers[buffer.server_id]
controller.server_handler.send_message(buffer.name, content)
def main():
db = peewee.SqliteDatabase('imaginary.db')
model.database.initialize(db)
model.database.connect()
model.create_tables()
controllers = model.IRCServerController.get_all()
clients = {controller_id: tornado_adapter.IRCClient.from_controller(controller)
for controller_id, controller in controllers.items()}
for client in clients.values():
client.connect()
application = tornado.web.Application([url(r'/lines', LinesHandler, dict(controllers=controllers)),
])
application.listen(8080)
tornado.ioloop.IOLoop.current().start()
if __name__ == '__main__':
main()
|
Add basic web api server
So far allows you to get and send lines but that's about it. Still, this means I can actually make pircel say shit \o/#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
possel.resources
----------------
This module defines a tornado-based RESTful (? - I don't know shit about REST) API for fetching the state of the possel
system over HTTP. This is coupled with a real time push mechanism that will be used to inform the client of new
resources.
"""
import json
import peewee
from pircel import model, tornado_adapter
import tornado.ioloop
import tornado.web
from tornado.web import url
class BaseAPIHandler(tornado.web.RequestHandler):
def initialize(self, controllers):
self.set_header('Content-Type', 'application/json')
self.controllers = controllers
class LinesHandler(BaseAPIHandler):
def get(self):
line_id = self.get_argument('id', None)
before = self.get_argument('before', None)
after = self.get_argument('after', None)
kind = self.get_argument('kind', None)
if not (line_id or before or after):
raise tornado.web.HTTPError(403)
lines = model.IRCLineModel.select()
if line_id is not None:
lines = lines.where(model.IRCLineModel.id == line_id)
if before is not None:
lines = lines.where(model.IRCLineModel.id <= before)
if after is not None:
lines = lines.where(model.IRCLineModel.id >= after)
if kind is not None:
lines = lines.where(model.IRCLineModel.kind == kind)
self.write(json.dumps([line.to_dict() for line in lines]))
def post(self):
buffer_id = self.get_body_argument('buffer')
content = self.get_body_argument('content')
buffer = model.IRCBufferModel.get(id=buffer_id)
controller = self.controllers[buffer.server_id]
controller.server_handler.send_message(buffer.name, content)
def main():
db = peewee.SqliteDatabase('imaginary.db')
model.database.initialize(db)
model.database.connect()
model.create_tables()
controllers = model.IRCServerController.get_all()
clients = {controller_id: tornado_adapter.IRCClient.from_controller(controller)
for controller_id, controller in controllers.items()}
for client in clients.values():
client.connect()
application = tornado.web.Application([url(r'/lines', LinesHandler, dict(controllers=controllers)),
])
application.listen(8080)
tornado.ioloop.IOLoop.current().start()
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add basic web api server
So far allows you to get and send lines but that's about it. Still, this means I can actually make pircel say shit \o/<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
possel.resources
----------------
This module defines a tornado-based RESTful (? - I don't know shit about REST) API for fetching the state of the possel
system over HTTP. This is coupled with a real time push mechanism that will be used to inform the client of new
resources.
"""
import json
import peewee
from pircel import model, tornado_adapter
import tornado.ioloop
import tornado.web
from tornado.web import url
class BaseAPIHandler(tornado.web.RequestHandler):
def initialize(self, controllers):
self.set_header('Content-Type', 'application/json')
self.controllers = controllers
class LinesHandler(BaseAPIHandler):
def get(self):
line_id = self.get_argument('id', None)
before = self.get_argument('before', None)
after = self.get_argument('after', None)
kind = self.get_argument('kind', None)
if not (line_id or before or after):
raise tornado.web.HTTPError(403)
lines = model.IRCLineModel.select()
if line_id is not None:
lines = lines.where(model.IRCLineModel.id == line_id)
if before is not None:
lines = lines.where(model.IRCLineModel.id <= before)
if after is not None:
lines = lines.where(model.IRCLineModel.id >= after)
if kind is not None:
lines = lines.where(model.IRCLineModel.kind == kind)
self.write(json.dumps([line.to_dict() for line in lines]))
def post(self):
buffer_id = self.get_body_argument('buffer')
content = self.get_body_argument('content')
buffer = model.IRCBufferModel.get(id=buffer_id)
controller = self.controllers[buffer.server_id]
controller.server_handler.send_message(buffer.name, content)
def main():
db = peewee.SqliteDatabase('imaginary.db')
model.database.initialize(db)
model.database.connect()
model.create_tables()
controllers = model.IRCServerController.get_all()
clients = {controller_id: tornado_adapter.IRCClient.from_controller(controller)
for controller_id, controller in controllers.items()}
for client in clients.values():
client.connect()
application = tornado.web.Application([url(r'/lines', LinesHandler, dict(controllers=controllers)),
])
application.listen(8080)
tornado.ioloop.IOLoop.current().start()
if __name__ == '__main__':
main()
|
|
4a6073e6c84391d9e67e08603b3ce429a4f58db4
|
util/rom-ext-manifest-generator.py
|
util/rom-ext-manifest-generator.py
|
#!/usr/bin/env python3
# Copyright lowRISC contributors.
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
# SPDX-License-Identifier: Apache-2.0
import argparse
from pathlib import Path
import hjson
from mako.template import Template
from topgen.c import MemoryRegion, Name
DESC = """ROM_EXT manifest generator"""
USAGE = """
rom-ext-manifest-generator --input-dir:
Directory where manifest.hjson and manifest.h.tpl reside.
rom-ext-manifest-generator --output-dir:
Directory where manifest.hjson and manifest.h.tpl reside.
"""
def generate_cheader(fields, input_dir, output_dir):
""" Generates C header file from the `template_file`.
It produces a list of tuples with a field name and the `MemoryRegion`
object, whic is used in the `template_path`. The resulting header file is
placed into `output_path`.
"""
template_path = input_dir / 'manifest.h.tpl'
output_path = output_dir / 'manifest.h'
base_name = Name.from_snake_case("ROM_EXT")
items = []
offset = 0
for field in fields:
assert field['size'] % 8 == 0
size_bytes = field['size'] // 8
if field['type'] == "field":
region_name = base_name + Name.from_snake_case(field['name'])
region = MemoryRegion(region_name, offset, size_bytes)
items.append((field['name'], region))
offset += size_bytes
with template_path.open('r') as f:
template = Template(f.read())
header = template.render(items=items)
with output_path.open('w') as f:
f.write(header)
print('Template sucessfuly written to {}.'.format(output_path))
def main():
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
usage=USAGE,
description=DESC)
parser.add_argument('--input-dir',
required=True,
type=Path,
help='Manifest hjson and template directory.')
parser.add_argument('--output-dir',
required=True,
type=Path,
help='Manifest header output directory.')
args = parser.parse_args()
manifest_hjson_file = args.input_dir / 'manifest.hjson'
with manifest_hjson_file.open('r') as hjson_file:
obj = hjson.loads(hjson_file.read())
generate_cheader(obj['fields'], args.input_dir, args.output_dir)
if __name__ == '__main__':
main()
|
Add python ROM_EXT manifest generator
|
[util] Add python ROM_EXT manifest generator
This script takes .hjson input file that contains ROM_EXT manifest
metadata. This metadata is parsed and fed into the mako that uses it
together with the template file to produce C header.
In future we could add the asm template to generate the `.S` file.
Signed-off-by: Silvestrs Timofejevs <5a6466a3751cccdb6ee413d1e87698ed8baaba81@lowrisc.org>
|
Python
|
apache-2.0
|
lowRISC/opentitan,lowRISC/opentitan,lowRISC/opentitan,lowRISC/opentitan,lowRISC/opentitan,lowRISC/opentitan
|
[util] Add python ROM_EXT manifest generator
This script takes .hjson input file that contains ROM_EXT manifest
metadata. This metadata is parsed and fed into the mako that uses it
together with the template file to produce C header.
In future we could add the asm template to generate the `.S` file.
Signed-off-by: Silvestrs Timofejevs <5a6466a3751cccdb6ee413d1e87698ed8baaba81@lowrisc.org>
|
#!/usr/bin/env python3
# Copyright lowRISC contributors.
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
# SPDX-License-Identifier: Apache-2.0
import argparse
from pathlib import Path
import hjson
from mako.template import Template
from topgen.c import MemoryRegion, Name
DESC = """ROM_EXT manifest generator"""
USAGE = """
rom-ext-manifest-generator --input-dir:
Directory where manifest.hjson and manifest.h.tpl reside.
rom-ext-manifest-generator --output-dir:
Directory where manifest.hjson and manifest.h.tpl reside.
"""
def generate_cheader(fields, input_dir, output_dir):
""" Generates C header file from the `template_file`.
It produces a list of tuples with a field name and the `MemoryRegion`
object, whic is used in the `template_path`. The resulting header file is
placed into `output_path`.
"""
template_path = input_dir / 'manifest.h.tpl'
output_path = output_dir / 'manifest.h'
base_name = Name.from_snake_case("ROM_EXT")
items = []
offset = 0
for field in fields:
assert field['size'] % 8 == 0
size_bytes = field['size'] // 8
if field['type'] == "field":
region_name = base_name + Name.from_snake_case(field['name'])
region = MemoryRegion(region_name, offset, size_bytes)
items.append((field['name'], region))
offset += size_bytes
with template_path.open('r') as f:
template = Template(f.read())
header = template.render(items=items)
with output_path.open('w') as f:
f.write(header)
print('Template sucessfuly written to {}.'.format(output_path))
def main():
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
usage=USAGE,
description=DESC)
parser.add_argument('--input-dir',
required=True,
type=Path,
help='Manifest hjson and template directory.')
parser.add_argument('--output-dir',
required=True,
type=Path,
help='Manifest header output directory.')
args = parser.parse_args()
manifest_hjson_file = args.input_dir / 'manifest.hjson'
with manifest_hjson_file.open('r') as hjson_file:
obj = hjson.loads(hjson_file.read())
generate_cheader(obj['fields'], args.input_dir, args.output_dir)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>[util] Add python ROM_EXT manifest generator
This script takes .hjson input file that contains ROM_EXT manifest
metadata. This metadata is parsed and fed into the mako that uses it
together with the template file to produce C header.
In future we could add the asm template to generate the `.S` file.
Signed-off-by: Silvestrs Timofejevs <5a6466a3751cccdb6ee413d1e87698ed8baaba81@lowrisc.org><commit_after>
|
#!/usr/bin/env python3
# Copyright lowRISC contributors.
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
# SPDX-License-Identifier: Apache-2.0
import argparse
from pathlib import Path
import hjson
from mako.template import Template
from topgen.c import MemoryRegion, Name
DESC = """ROM_EXT manifest generator"""
USAGE = """
rom-ext-manifest-generator --input-dir:
Directory where manifest.hjson and manifest.h.tpl reside.
rom-ext-manifest-generator --output-dir:
Directory where manifest.hjson and manifest.h.tpl reside.
"""
def generate_cheader(fields, input_dir, output_dir):
""" Generates C header file from the `template_file`.
It produces a list of tuples with a field name and the `MemoryRegion`
object, whic is used in the `template_path`. The resulting header file is
placed into `output_path`.
"""
template_path = input_dir / 'manifest.h.tpl'
output_path = output_dir / 'manifest.h'
base_name = Name.from_snake_case("ROM_EXT")
items = []
offset = 0
for field in fields:
assert field['size'] % 8 == 0
size_bytes = field['size'] // 8
if field['type'] == "field":
region_name = base_name + Name.from_snake_case(field['name'])
region = MemoryRegion(region_name, offset, size_bytes)
items.append((field['name'], region))
offset += size_bytes
with template_path.open('r') as f:
template = Template(f.read())
header = template.render(items=items)
with output_path.open('w') as f:
f.write(header)
print('Template sucessfuly written to {}.'.format(output_path))
def main():
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
usage=USAGE,
description=DESC)
parser.add_argument('--input-dir',
required=True,
type=Path,
help='Manifest hjson and template directory.')
parser.add_argument('--output-dir',
required=True,
type=Path,
help='Manifest header output directory.')
args = parser.parse_args()
manifest_hjson_file = args.input_dir / 'manifest.hjson'
with manifest_hjson_file.open('r') as hjson_file:
obj = hjson.loads(hjson_file.read())
generate_cheader(obj['fields'], args.input_dir, args.output_dir)
if __name__ == '__main__':
main()
|
[util] Add python ROM_EXT manifest generator
This script takes .hjson input file that contains ROM_EXT manifest
metadata. This metadata is parsed and fed into the mako that uses it
together with the template file to produce C header.
In future we could add the asm template to generate the `.S` file.
Signed-off-by: Silvestrs Timofejevs <5a6466a3751cccdb6ee413d1e87698ed8baaba81@lowrisc.org>#!/usr/bin/env python3
# Copyright lowRISC contributors.
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
# SPDX-License-Identifier: Apache-2.0
import argparse
from pathlib import Path
import hjson
from mako.template import Template
from topgen.c import MemoryRegion, Name
DESC = """ROM_EXT manifest generator"""
USAGE = """
rom-ext-manifest-generator --input-dir:
Directory where manifest.hjson and manifest.h.tpl reside.
rom-ext-manifest-generator --output-dir:
Directory where manifest.hjson and manifest.h.tpl reside.
"""
def generate_cheader(fields, input_dir, output_dir):
""" Generates C header file from the `template_file`.
It produces a list of tuples with a field name and the `MemoryRegion`
object, whic is used in the `template_path`. The resulting header file is
placed into `output_path`.
"""
template_path = input_dir / 'manifest.h.tpl'
output_path = output_dir / 'manifest.h'
base_name = Name.from_snake_case("ROM_EXT")
items = []
offset = 0
for field in fields:
assert field['size'] % 8 == 0
size_bytes = field['size'] // 8
if field['type'] == "field":
region_name = base_name + Name.from_snake_case(field['name'])
region = MemoryRegion(region_name, offset, size_bytes)
items.append((field['name'], region))
offset += size_bytes
with template_path.open('r') as f:
template = Template(f.read())
header = template.render(items=items)
with output_path.open('w') as f:
f.write(header)
print('Template sucessfuly written to {}.'.format(output_path))
def main():
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
usage=USAGE,
description=DESC)
parser.add_argument('--input-dir',
required=True,
type=Path,
help='Manifest hjson and template directory.')
parser.add_argument('--output-dir',
required=True,
type=Path,
help='Manifest header output directory.')
args = parser.parse_args()
manifest_hjson_file = args.input_dir / 'manifest.hjson'
with manifest_hjson_file.open('r') as hjson_file:
obj = hjson.loads(hjson_file.read())
generate_cheader(obj['fields'], args.input_dir, args.output_dir)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>[util] Add python ROM_EXT manifest generator
This script takes .hjson input file that contains ROM_EXT manifest
metadata. This metadata is parsed and fed into the mako that uses it
together with the template file to produce C header.
In future we could add the asm template to generate the `.S` file.
Signed-off-by: Silvestrs Timofejevs <5a6466a3751cccdb6ee413d1e87698ed8baaba81@lowrisc.org><commit_after>#!/usr/bin/env python3
# Copyright lowRISC contributors.
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
# SPDX-License-Identifier: Apache-2.0
import argparse
from pathlib import Path
import hjson
from mako.template import Template
from topgen.c import MemoryRegion, Name
DESC = """ROM_EXT manifest generator"""
USAGE = """
rom-ext-manifest-generator --input-dir:
Directory where manifest.hjson and manifest.h.tpl reside.
rom-ext-manifest-generator --output-dir:
Directory where manifest.hjson and manifest.h.tpl reside.
"""
def generate_cheader(fields, input_dir, output_dir):
""" Generates C header file from the `template_file`.
It produces a list of tuples with a field name and the `MemoryRegion`
object, whic is used in the `template_path`. The resulting header file is
placed into `output_path`.
"""
template_path = input_dir / 'manifest.h.tpl'
output_path = output_dir / 'manifest.h'
base_name = Name.from_snake_case("ROM_EXT")
items = []
offset = 0
for field in fields:
assert field['size'] % 8 == 0
size_bytes = field['size'] // 8
if field['type'] == "field":
region_name = base_name + Name.from_snake_case(field['name'])
region = MemoryRegion(region_name, offset, size_bytes)
items.append((field['name'], region))
offset += size_bytes
with template_path.open('r') as f:
template = Template(f.read())
header = template.render(items=items)
with output_path.open('w') as f:
f.write(header)
print('Template sucessfuly written to {}.'.format(output_path))
def main():
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
usage=USAGE,
description=DESC)
parser.add_argument('--input-dir',
required=True,
type=Path,
help='Manifest hjson and template directory.')
parser.add_argument('--output-dir',
required=True,
type=Path,
help='Manifest header output directory.')
args = parser.parse_args()
manifest_hjson_file = args.input_dir / 'manifest.hjson'
with manifest_hjson_file.open('r') as hjson_file:
obj = hjson.loads(hjson_file.read())
generate_cheader(obj['fields'], args.input_dir, args.output_dir)
if __name__ == '__main__':
main()
|
|
ebd34d996dc49679c232b40db44114f10fdb6c58
|
scripts/tests/test_preprint_summary.py
|
scripts/tests/test_preprint_summary.py
|
import datetime
from tests.base import OsfTestCase
from osf_tests.factories import PreprintFactory, PreprintProviderFactory
from osf.models import PreprintService
from nose.tools import * # PEP8 asserts
from django.utils import timezone
from scripts.analytics.preprint_summary import PreprintSummary
class TestPreprintCount(OsfTestCase):
def setUp(self):
super(TestPreprintCount, self).setUp()
field = PreprintService._meta.get_field('date_created')
field.auto_now_add = False # We have to fudge the time because Keen doesn't allow same day queries.
self.preprint_provider = PreprintProviderFactory(name='Test 1')
self.preprint = PreprintFactory._build(PreprintService, provider=self.preprint_provider)
self.date = datetime.datetime.utcnow() - datetime.timedelta(1)
self.preprint.date_created = self.date - datetime.timedelta(0.1)
self.preprint.save()
field.auto_now_add = True
self.results = PreprintSummary().get_events(self.date.date())
def test_get_preprint_count(self):
assert_equal(len(self.results), 1)
data = self.results[0]
assert_equal(data['provider']['name'], 'Test 1')
assert_equal(data['provider']['total'], 1)
|
Add test for Keen script for Preprint providers.
|
Add test for Keen script for Preprint providers.
|
Python
|
apache-2.0
|
HalcyonChimera/osf.io,TomBaxter/osf.io,adlius/osf.io,mfraezz/osf.io,caneruguz/osf.io,mattclark/osf.io,Johnetordoff/osf.io,Johnetordoff/osf.io,chennan47/osf.io,leb2dg/osf.io,felliott/osf.io,adlius/osf.io,chrisseto/osf.io,sloria/osf.io,caseyrollins/osf.io,icereval/osf.io,erinspace/osf.io,felliott/osf.io,caseyrollins/osf.io,caseyrollins/osf.io,erinspace/osf.io,binoculars/osf.io,aaxelb/osf.io,brianjgeiger/osf.io,caneruguz/osf.io,HalcyonChimera/osf.io,mattclark/osf.io,chennan47/osf.io,mfraezz/osf.io,saradbowman/osf.io,laurenrevere/osf.io,pattisdr/osf.io,baylee-d/osf.io,TomBaxter/osf.io,HalcyonChimera/osf.io,laurenrevere/osf.io,chrisseto/osf.io,HalcyonChimera/osf.io,binoculars/osf.io,mfraezz/osf.io,sloria/osf.io,crcresearch/osf.io,brianjgeiger/osf.io,saradbowman/osf.io,baylee-d/osf.io,mattclark/osf.io,laurenrevere/osf.io,baylee-d/osf.io,binoculars/osf.io,CenterForOpenScience/osf.io,Johnetordoff/osf.io,leb2dg/osf.io,adlius/osf.io,CenterForOpenScience/osf.io,Johnetordoff/osf.io,aaxelb/osf.io,mfraezz/osf.io,cslzchen/osf.io,cslzchen/osf.io,crcresearch/osf.io,adlius/osf.io,felliott/osf.io,CenterForOpenScience/osf.io,brianjgeiger/osf.io,pattisdr/osf.io,brianjgeiger/osf.io,leb2dg/osf.io,icereval/osf.io,chennan47/osf.io,crcresearch/osf.io,aaxelb/osf.io,leb2dg/osf.io,caneruguz/osf.io,cslzchen/osf.io,icereval/osf.io,CenterForOpenScience/osf.io,TomBaxter/osf.io,chrisseto/osf.io,sloria/osf.io,felliott/osf.io,chrisseto/osf.io,caneruguz/osf.io,aaxelb/osf.io,pattisdr/osf.io,erinspace/osf.io,cslzchen/osf.io
|
Add test for Keen script for Preprint providers.
|
import datetime
from tests.base import OsfTestCase
from osf_tests.factories import PreprintFactory, PreprintProviderFactory
from osf.models import PreprintService
from nose.tools import * # PEP8 asserts
from django.utils import timezone
from scripts.analytics.preprint_summary import PreprintSummary
class TestPreprintCount(OsfTestCase):
def setUp(self):
super(TestPreprintCount, self).setUp()
field = PreprintService._meta.get_field('date_created')
field.auto_now_add = False # We have to fudge the time because Keen doesn't allow same day queries.
self.preprint_provider = PreprintProviderFactory(name='Test 1')
self.preprint = PreprintFactory._build(PreprintService, provider=self.preprint_provider)
self.date = datetime.datetime.utcnow() - datetime.timedelta(1)
self.preprint.date_created = self.date - datetime.timedelta(0.1)
self.preprint.save()
field.auto_now_add = True
self.results = PreprintSummary().get_events(self.date.date())
def test_get_preprint_count(self):
assert_equal(len(self.results), 1)
data = self.results[0]
assert_equal(data['provider']['name'], 'Test 1')
assert_equal(data['provider']['total'], 1)
|
<commit_before><commit_msg>Add test for Keen script for Preprint providers.<commit_after>
|
import datetime
from tests.base import OsfTestCase
from osf_tests.factories import PreprintFactory, PreprintProviderFactory
from osf.models import PreprintService
from nose.tools import * # PEP8 asserts
from django.utils import timezone
from scripts.analytics.preprint_summary import PreprintSummary
class TestPreprintCount(OsfTestCase):
def setUp(self):
super(TestPreprintCount, self).setUp()
field = PreprintService._meta.get_field('date_created')
field.auto_now_add = False # We have to fudge the time because Keen doesn't allow same day queries.
self.preprint_provider = PreprintProviderFactory(name='Test 1')
self.preprint = PreprintFactory._build(PreprintService, provider=self.preprint_provider)
self.date = datetime.datetime.utcnow() - datetime.timedelta(1)
self.preprint.date_created = self.date - datetime.timedelta(0.1)
self.preprint.save()
field.auto_now_add = True
self.results = PreprintSummary().get_events(self.date.date())
def test_get_preprint_count(self):
assert_equal(len(self.results), 1)
data = self.results[0]
assert_equal(data['provider']['name'], 'Test 1')
assert_equal(data['provider']['total'], 1)
|
Add test for Keen script for Preprint providers.import datetime
from tests.base import OsfTestCase
from osf_tests.factories import PreprintFactory, PreprintProviderFactory
from osf.models import PreprintService
from nose.tools import * # PEP8 asserts
from django.utils import timezone
from scripts.analytics.preprint_summary import PreprintSummary
class TestPreprintCount(OsfTestCase):
def setUp(self):
super(TestPreprintCount, self).setUp()
field = PreprintService._meta.get_field('date_created')
field.auto_now_add = False # We have to fudge the time because Keen doesn't allow same day queries.
self.preprint_provider = PreprintProviderFactory(name='Test 1')
self.preprint = PreprintFactory._build(PreprintService, provider=self.preprint_provider)
self.date = datetime.datetime.utcnow() - datetime.timedelta(1)
self.preprint.date_created = self.date - datetime.timedelta(0.1)
self.preprint.save()
field.auto_now_add = True
self.results = PreprintSummary().get_events(self.date.date())
def test_get_preprint_count(self):
assert_equal(len(self.results), 1)
data = self.results[0]
assert_equal(data['provider']['name'], 'Test 1')
assert_equal(data['provider']['total'], 1)
|
<commit_before><commit_msg>Add test for Keen script for Preprint providers.<commit_after>import datetime
from tests.base import OsfTestCase
from osf_tests.factories import PreprintFactory, PreprintProviderFactory
from osf.models import PreprintService
from nose.tools import * # PEP8 asserts
from django.utils import timezone
from scripts.analytics.preprint_summary import PreprintSummary
class TestPreprintCount(OsfTestCase):
def setUp(self):
super(TestPreprintCount, self).setUp()
field = PreprintService._meta.get_field('date_created')
field.auto_now_add = False # We have to fudge the time because Keen doesn't allow same day queries.
self.preprint_provider = PreprintProviderFactory(name='Test 1')
self.preprint = PreprintFactory._build(PreprintService, provider=self.preprint_provider)
self.date = datetime.datetime.utcnow() - datetime.timedelta(1)
self.preprint.date_created = self.date - datetime.timedelta(0.1)
self.preprint.save()
field.auto_now_add = True
self.results = PreprintSummary().get_events(self.date.date())
def test_get_preprint_count(self):
assert_equal(len(self.results), 1)
data = self.results[0]
assert_equal(data['provider']['name'], 'Test 1')
assert_equal(data['provider']['total'], 1)
|
|
fabcc39144e727c5c840554e229eb8f7ddd1e944
|
Software/Python/grove_thumb_joystick.py
|
Software/Python/grove_thumb_joystick.py
|
# GrovePi + Grove Thumb Joystick
# http://www.seeedstudio.com/wiki/Grove_-_Thumb_Joystick
import time
import grovepi
# Connect the Thumb Joystick to analog port A0
# Uses two pins - one for the X axis and one for the Y axis
grovepi.pinMode(0,"INPUT")
grovepi.pinMode(1,"INPUT")
# The Thumb Joystick is an analog device that outputs analog signal ranging from 0 to 1023
# The X and Y axes are two ~10k potentiometers and a momentary push button which shorts the x axis
# My joystick produces slightly different results to the specifications found on the url above
# I've listed both here:
# Specifications
# Min Typ Max Click
# X 206 516 798 1023
# Y 203 507 797
# My Joystick
# Min Typ Max Click
# X 253 513 766 1020-1023
# Y 250 505 769
while True:
try:
# Get X/Y coordinates
x = grovepi.analogRead(0)
y = grovepi.analogRead(1)
# Calculate X/Y resistance
Rx = (float)(1023 - x) * 10 / x
Ry = (float)(1023 - y) * 10 / y
# Was a click detected on the X axis?
click = 1 if x >= 1020 else 0
print x,y,Rx,Ry,click
print ''
time.sleep(.5)
except IOError:
print "Error"
|
Add Grove Thumb Joystick example
|
Add Grove Thumb Joystick example
|
Python
|
mit
|
NeuroRoboticTech/Jetduino,stwolny/GrovePi,karan259/GrovePi,karan259/GrovePi,karan259/GrovePi,NeuroRoboticTech/Jetduino,stwolny/GrovePi,stwolny/GrovePi,penoud/GrovePi,NeuroRoboticTech/Jetduino,rpedersen/GrovePi,karan259/GrovePi,penoud/GrovePi,rpedersen/GrovePi,stwolny/GrovePi,rpedersen/GrovePi,karan259/GrovePi,stwolny/GrovePi,rpedersen/GrovePi,penoud/GrovePi,nerginer/GrovePi,nerginer/GrovePi,rpedersen/GrovePi,karan259/GrovePi,NeuroRoboticTech/Jetduino,penoud/GrovePi,rpedersen/GrovePi,nerginer/GrovePi,NeuroRoboticTech/Jetduino,nerginer/GrovePi,penoud/GrovePi,nerginer/GrovePi,stwolny/GrovePi,nerginer/GrovePi,NeuroRoboticTech/Jetduino,rpedersen/GrovePi,stwolny/GrovePi,penoud/GrovePi,karan259/GrovePi,stwolny/GrovePi,karan259/GrovePi,NeuroRoboticTech/Jetduino,nerginer/GrovePi,nerginer/GrovePi,penoud/GrovePi,penoud/GrovePi,rpedersen/GrovePi,karan259/GrovePi
|
Add Grove Thumb Joystick example
|
# GrovePi + Grove Thumb Joystick
# http://www.seeedstudio.com/wiki/Grove_-_Thumb_Joystick
import time
import grovepi
# Connect the Thumb Joystick to analog port A0
# Uses two pins - one for the X axis and one for the Y axis
grovepi.pinMode(0,"INPUT")
grovepi.pinMode(1,"INPUT")
# The Thumb Joystick is an analog device that outputs analog signal ranging from 0 to 1023
# The X and Y axes are two ~10k potentiometers and a momentary push button which shorts the x axis
# My joystick produces slightly different results to the specifications found on the url above
# I've listed both here:
# Specifications
# Min Typ Max Click
# X 206 516 798 1023
# Y 203 507 797
# My Joystick
# Min Typ Max Click
# X 253 513 766 1020-1023
# Y 250 505 769
while True:
try:
# Get X/Y coordinates
x = grovepi.analogRead(0)
y = grovepi.analogRead(1)
# Calculate X/Y resistance
Rx = (float)(1023 - x) * 10 / x
Ry = (float)(1023 - y) * 10 / y
# Was a click detected on the X axis?
click = 1 if x >= 1020 else 0
print x,y,Rx,Ry,click
print ''
time.sleep(.5)
except IOError:
print "Error"
|
<commit_before><commit_msg>Add Grove Thumb Joystick example<commit_after>
|
# GrovePi + Grove Thumb Joystick
# http://www.seeedstudio.com/wiki/Grove_-_Thumb_Joystick
import time
import grovepi
# Connect the Thumb Joystick to analog port A0
# Uses two pins - one for the X axis and one for the Y axis
grovepi.pinMode(0,"INPUT")
grovepi.pinMode(1,"INPUT")
# The Thumb Joystick is an analog device that outputs analog signal ranging from 0 to 1023
# The X and Y axes are two ~10k potentiometers and a momentary push button which shorts the x axis
# My joystick produces slightly different results to the specifications found on the url above
# I've listed both here:
# Specifications
# Min Typ Max Click
# X 206 516 798 1023
# Y 203 507 797
# My Joystick
# Min Typ Max Click
# X 253 513 766 1020-1023
# Y 250 505 769
while True:
try:
# Get X/Y coordinates
x = grovepi.analogRead(0)
y = grovepi.analogRead(1)
# Calculate X/Y resistance
Rx = (float)(1023 - x) * 10 / x
Ry = (float)(1023 - y) * 10 / y
# Was a click detected on the X axis?
click = 1 if x >= 1020 else 0
print x,y,Rx,Ry,click
print ''
time.sleep(.5)
except IOError:
print "Error"
|
Add Grove Thumb Joystick example# GrovePi + Grove Thumb Joystick
# http://www.seeedstudio.com/wiki/Grove_-_Thumb_Joystick
import time
import grovepi
# Connect the Thumb Joystick to analog port A0
# Uses two pins - one for the X axis and one for the Y axis
grovepi.pinMode(0,"INPUT")
grovepi.pinMode(1,"INPUT")
# The Thumb Joystick is an analog device that outputs analog signal ranging from 0 to 1023
# The X and Y axes are two ~10k potentiometers and a momentary push button which shorts the x axis
# My joystick produces slightly different results to the specifications found on the url above
# I've listed both here:
# Specifications
# Min Typ Max Click
# X 206 516 798 1023
# Y 203 507 797
# My Joystick
# Min Typ Max Click
# X 253 513 766 1020-1023
# Y 250 505 769
while True:
try:
# Get X/Y coordinates
x = grovepi.analogRead(0)
y = grovepi.analogRead(1)
# Calculate X/Y resistance
Rx = (float)(1023 - x) * 10 / x
Ry = (float)(1023 - y) * 10 / y
# Was a click detected on the X axis?
click = 1 if x >= 1020 else 0
print x,y,Rx,Ry,click
print ''
time.sleep(.5)
except IOError:
print "Error"
|
<commit_before><commit_msg>Add Grove Thumb Joystick example<commit_after># GrovePi + Grove Thumb Joystick
# http://www.seeedstudio.com/wiki/Grove_-_Thumb_Joystick
import time
import grovepi
# Connect the Thumb Joystick to analog port A0
# Uses two pins - one for the X axis and one for the Y axis
grovepi.pinMode(0,"INPUT")
grovepi.pinMode(1,"INPUT")
# The Thumb Joystick is an analog device that outputs analog signal ranging from 0 to 1023
# The X and Y axes are two ~10k potentiometers and a momentary push button which shorts the x axis
# My joystick produces slightly different results to the specifications found on the url above
# I've listed both here:
# Specifications
# Min Typ Max Click
# X 206 516 798 1023
# Y 203 507 797
# My Joystick
# Min Typ Max Click
# X 253 513 766 1020-1023
# Y 250 505 769
while True:
try:
# Get X/Y coordinates
x = grovepi.analogRead(0)
y = grovepi.analogRead(1)
# Calculate X/Y resistance
Rx = (float)(1023 - x) * 10 / x
Ry = (float)(1023 - y) * 10 / y
# Was a click detected on the X axis?
click = 1 if x >= 1020 else 0
print x,y,Rx,Ry,click
print ''
time.sleep(.5)
except IOError:
print "Error"
|
|
856cbe9049375ce277c4b3de2efa70fc4d68af4b
|
dakotathon/tests/test_run_component.py
|
dakotathon/tests/test_run_component.py
|
#!/usr/bin/env python
import os
import sys
import shutil
from nose.tools import raises, with_setup
from dakotathon.run_component import run_component, main
from dakotathon.dakota import Dakota
from . import start_dir, data_dir
run_dir = os.getcwd()
local_config_file = 'config.yaml'
config_file = os.path.join(data_dir, local_config_file)
local_params_file = 'params.in'
params_file = os.path.join(data_dir, local_params_file)
results_file = 'results.out'
def setup_module():
"""Called before any tests are performed."""
print('\n*** ' + __name__)
def setup():
"""Called at start of any test using it @with_setup()"""
global d
d = Dakota.from_file_like(config_file)
def teardown():
"""Called at end of any test using it @with_setup()"""
if os.path.exists(results_file):
os.remove(results_file)
if os.path.exists(local_config_file):
os.remove(local_config_file)
if os.path.exists(local_params_file):
os.remove(local_params_file)
def teardown_module():
"""Called after all tests have completed."""
pass
@raises(IOError)
def test_run_component_unknown_config_file():
"""Tests run_component() fails with unknown config file."""
run_component(params_file, results_file)
@raises(AttributeError)
@with_setup(setup, teardown)
def test_run_component_unknown_module():
"""Tests run_component() fails with unknown module."""
d.component = 'foo'
d.serialize(local_config_file)
run_component(params_file, results_file)
@raises(TypeError)
@with_setup(setup, teardown)
def test_run_component_uninstalled_module():
"""Tests run_component() fails with module that's not installed."""
d.serialize(local_config_file)
os.environ['PATH'] = '.'
run_component(params_file, results_file)
@raises(IndexError)
def test_main_no_args():
"""Tests main() fails without args."""
sys.argv = []
main()
|
Create initial unit tests for run_component module
|
Create initial unit tests for run_component module
|
Python
|
mit
|
csdms/dakota,csdms/dakota
|
Create initial unit tests for run_component module
|
#!/usr/bin/env python
import os
import sys
import shutil
from nose.tools import raises, with_setup
from dakotathon.run_component import run_component, main
from dakotathon.dakota import Dakota
from . import start_dir, data_dir
run_dir = os.getcwd()
local_config_file = 'config.yaml'
config_file = os.path.join(data_dir, local_config_file)
local_params_file = 'params.in'
params_file = os.path.join(data_dir, local_params_file)
results_file = 'results.out'
def setup_module():
"""Called before any tests are performed."""
print('\n*** ' + __name__)
def setup():
"""Called at start of any test using it @with_setup()"""
global d
d = Dakota.from_file_like(config_file)
def teardown():
"""Called at end of any test using it @with_setup()"""
if os.path.exists(results_file):
os.remove(results_file)
if os.path.exists(local_config_file):
os.remove(local_config_file)
if os.path.exists(local_params_file):
os.remove(local_params_file)
def teardown_module():
"""Called after all tests have completed."""
pass
@raises(IOError)
def test_run_component_unknown_config_file():
"""Tests run_component() fails with unknown config file."""
run_component(params_file, results_file)
@raises(AttributeError)
@with_setup(setup, teardown)
def test_run_component_unknown_module():
"""Tests run_component() fails with unknown module."""
d.component = 'foo'
d.serialize(local_config_file)
run_component(params_file, results_file)
@raises(TypeError)
@with_setup(setup, teardown)
def test_run_component_uninstalled_module():
"""Tests run_component() fails with module that's not installed."""
d.serialize(local_config_file)
os.environ['PATH'] = '.'
run_component(params_file, results_file)
@raises(IndexError)
def test_main_no_args():
"""Tests main() fails without args."""
sys.argv = []
main()
|
<commit_before><commit_msg>Create initial unit tests for run_component module<commit_after>
|
#!/usr/bin/env python
import os
import sys
import shutil
from nose.tools import raises, with_setup
from dakotathon.run_component import run_component, main
from dakotathon.dakota import Dakota
from . import start_dir, data_dir
run_dir = os.getcwd()
local_config_file = 'config.yaml'
config_file = os.path.join(data_dir, local_config_file)
local_params_file = 'params.in'
params_file = os.path.join(data_dir, local_params_file)
results_file = 'results.out'
def setup_module():
"""Called before any tests are performed."""
print('\n*** ' + __name__)
def setup():
"""Called at start of any test using it @with_setup()"""
global d
d = Dakota.from_file_like(config_file)
def teardown():
"""Called at end of any test using it @with_setup()"""
if os.path.exists(results_file):
os.remove(results_file)
if os.path.exists(local_config_file):
os.remove(local_config_file)
if os.path.exists(local_params_file):
os.remove(local_params_file)
def teardown_module():
"""Called after all tests have completed."""
pass
@raises(IOError)
def test_run_component_unknown_config_file():
"""Tests run_component() fails with unknown config file."""
run_component(params_file, results_file)
@raises(AttributeError)
@with_setup(setup, teardown)
def test_run_component_unknown_module():
"""Tests run_component() fails with unknown module."""
d.component = 'foo'
d.serialize(local_config_file)
run_component(params_file, results_file)
@raises(TypeError)
@with_setup(setup, teardown)
def test_run_component_uninstalled_module():
"""Tests run_component() fails with module that's not installed."""
d.serialize(local_config_file)
os.environ['PATH'] = '.'
run_component(params_file, results_file)
@raises(IndexError)
def test_main_no_args():
"""Tests main() fails without args."""
sys.argv = []
main()
|
Create initial unit tests for run_component module#!/usr/bin/env python
import os
import sys
import shutil
from nose.tools import raises, with_setup
from dakotathon.run_component import run_component, main
from dakotathon.dakota import Dakota
from . import start_dir, data_dir
run_dir = os.getcwd()
local_config_file = 'config.yaml'
config_file = os.path.join(data_dir, local_config_file)
local_params_file = 'params.in'
params_file = os.path.join(data_dir, local_params_file)
results_file = 'results.out'
def setup_module():
"""Called before any tests are performed."""
print('\n*** ' + __name__)
def setup():
"""Called at start of any test using it @with_setup()"""
global d
d = Dakota.from_file_like(config_file)
def teardown():
"""Called at end of any test using it @with_setup()"""
if os.path.exists(results_file):
os.remove(results_file)
if os.path.exists(local_config_file):
os.remove(local_config_file)
if os.path.exists(local_params_file):
os.remove(local_params_file)
def teardown_module():
"""Called after all tests have completed."""
pass
@raises(IOError)
def test_run_component_unknown_config_file():
"""Tests run_component() fails with unknown config file."""
run_component(params_file, results_file)
@raises(AttributeError)
@with_setup(setup, teardown)
def test_run_component_unknown_module():
"""Tests run_component() fails with unknown module."""
d.component = 'foo'
d.serialize(local_config_file)
run_component(params_file, results_file)
@raises(TypeError)
@with_setup(setup, teardown)
def test_run_component_uninstalled_module():
"""Tests run_component() fails with module that's not installed."""
d.serialize(local_config_file)
os.environ['PATH'] = '.'
run_component(params_file, results_file)
@raises(IndexError)
def test_main_no_args():
"""Tests main() fails without args."""
sys.argv = []
main()
|
<commit_before><commit_msg>Create initial unit tests for run_component module<commit_after>#!/usr/bin/env python
import os
import sys
import shutil
from nose.tools import raises, with_setup
from dakotathon.run_component import run_component, main
from dakotathon.dakota import Dakota
from . import start_dir, data_dir
run_dir = os.getcwd()
local_config_file = 'config.yaml'
config_file = os.path.join(data_dir, local_config_file)
local_params_file = 'params.in'
params_file = os.path.join(data_dir, local_params_file)
results_file = 'results.out'
def setup_module():
"""Called before any tests are performed."""
print('\n*** ' + __name__)
def setup():
"""Called at start of any test using it @with_setup()"""
global d
d = Dakota.from_file_like(config_file)
def teardown():
"""Called at end of any test using it @with_setup()"""
if os.path.exists(results_file):
os.remove(results_file)
if os.path.exists(local_config_file):
os.remove(local_config_file)
if os.path.exists(local_params_file):
os.remove(local_params_file)
def teardown_module():
"""Called after all tests have completed."""
pass
@raises(IOError)
def test_run_component_unknown_config_file():
"""Tests run_component() fails with unknown config file."""
run_component(params_file, results_file)
@raises(AttributeError)
@with_setup(setup, teardown)
def test_run_component_unknown_module():
"""Tests run_component() fails with unknown module."""
d.component = 'foo'
d.serialize(local_config_file)
run_component(params_file, results_file)
@raises(TypeError)
@with_setup(setup, teardown)
def test_run_component_uninstalled_module():
"""Tests run_component() fails with module that's not installed."""
d.serialize(local_config_file)
os.environ['PATH'] = '.'
run_component(params_file, results_file)
@raises(IndexError)
def test_main_no_args():
"""Tests main() fails without args."""
sys.argv = []
main()
|
|
383784ca4533314d3313ca4901f70dcac40e776a
|
migrations/0.0.1.1/pre-0001_delete_inactive_investments.py
|
migrations/0.0.1.1/pre-0001_delete_inactive_investments.py
|
# coding=utf-8
from oopgrade import oopgrade
import netsvc
def up(cursor, installed_version):
logger= netsvc.Logger()
print "somenergia-generationkwh_0.0.1.1: Hem entrat al UP"
if not installed_version:
return
logger.notifyChannel('migration', netsvc.LOG_INFO, 'Changing ir_model_data from giscedata_facturacio_comer to giscedata_facturacio')
'''cursor.execute("delete
from generationkwh_investment
where id in (
select
inv.id as id
from
generationkwh_investment as inv
left join
account_move_line as ml
on
inv.move_line_id = ml.id
left join
account_period as p
on
p.id = ml.period_id
where
p.special and
not inv.active")'''
logger.notifyChannel('migration', netsvc.LOG_INFO, 'Succesfully changed')
def down(cursor):
print "somenergia-generationkwh_0.0.1.1: Hem entrat al down"
pass
# vim: ts=4 sw=4 et
|
Add pre-script migration generationkwh module
|
Add pre-script migration generationkwh module
|
Python
|
agpl-3.0
|
Som-Energia/somenergia-generationkwh,Som-Energia/somenergia-generationkwh
|
Add pre-script migration generationkwh module
|
# coding=utf-8
from oopgrade import oopgrade
import netsvc
def up(cursor, installed_version):
logger= netsvc.Logger()
print "somenergia-generationkwh_0.0.1.1: Hem entrat al UP"
if not installed_version:
return
logger.notifyChannel('migration', netsvc.LOG_INFO, 'Changing ir_model_data from giscedata_facturacio_comer to giscedata_facturacio')
'''cursor.execute("delete
from generationkwh_investment
where id in (
select
inv.id as id
from
generationkwh_investment as inv
left join
account_move_line as ml
on
inv.move_line_id = ml.id
left join
account_period as p
on
p.id = ml.period_id
where
p.special and
not inv.active")'''
logger.notifyChannel('migration', netsvc.LOG_INFO, 'Succesfully changed')
def down(cursor):
print "somenergia-generationkwh_0.0.1.1: Hem entrat al down"
pass
# vim: ts=4 sw=4 et
|
<commit_before><commit_msg>Add pre-script migration generationkwh module<commit_after>
|
# coding=utf-8
from oopgrade import oopgrade
import netsvc
def up(cursor, installed_version):
logger= netsvc.Logger()
print "somenergia-generationkwh_0.0.1.1: Hem entrat al UP"
if not installed_version:
return
logger.notifyChannel('migration', netsvc.LOG_INFO, 'Changing ir_model_data from giscedata_facturacio_comer to giscedata_facturacio')
'''cursor.execute("delete
from generationkwh_investment
where id in (
select
inv.id as id
from
generationkwh_investment as inv
left join
account_move_line as ml
on
inv.move_line_id = ml.id
left join
account_period as p
on
p.id = ml.period_id
where
p.special and
not inv.active")'''
logger.notifyChannel('migration', netsvc.LOG_INFO, 'Succesfully changed')
def down(cursor):
print "somenergia-generationkwh_0.0.1.1: Hem entrat al down"
pass
# vim: ts=4 sw=4 et
|
Add pre-script migration generationkwh module# coding=utf-8
from oopgrade import oopgrade
import netsvc
def up(cursor, installed_version):
logger= netsvc.Logger()
print "somenergia-generationkwh_0.0.1.1: Hem entrat al UP"
if not installed_version:
return
logger.notifyChannel('migration', netsvc.LOG_INFO, 'Changing ir_model_data from giscedata_facturacio_comer to giscedata_facturacio')
'''cursor.execute("delete
from generationkwh_investment
where id in (
select
inv.id as id
from
generationkwh_investment as inv
left join
account_move_line as ml
on
inv.move_line_id = ml.id
left join
account_period as p
on
p.id = ml.period_id
where
p.special and
not inv.active")'''
logger.notifyChannel('migration', netsvc.LOG_INFO, 'Succesfully changed')
def down(cursor):
print "somenergia-generationkwh_0.0.1.1: Hem entrat al down"
pass
# vim: ts=4 sw=4 et
|
<commit_before><commit_msg>Add pre-script migration generationkwh module<commit_after># coding=utf-8
from oopgrade import oopgrade
import netsvc
def up(cursor, installed_version):
logger= netsvc.Logger()
print "somenergia-generationkwh_0.0.1.1: Hem entrat al UP"
if not installed_version:
return
logger.notifyChannel('migration', netsvc.LOG_INFO, 'Changing ir_model_data from giscedata_facturacio_comer to giscedata_facturacio')
'''cursor.execute("delete
from generationkwh_investment
where id in (
select
inv.id as id
from
generationkwh_investment as inv
left join
account_move_line as ml
on
inv.move_line_id = ml.id
left join
account_period as p
on
p.id = ml.period_id
where
p.special and
not inv.active")'''
logger.notifyChannel('migration', netsvc.LOG_INFO, 'Succesfully changed')
def down(cursor):
print "somenergia-generationkwh_0.0.1.1: Hem entrat al down"
pass
# vim: ts=4 sw=4 et
|
|
4226b7d1df34d61e948d435272964d4665c63836
|
tmp_gen.py
|
tmp_gen.py
|
import joblib
path_dict = joblib.load('data/filepath_dict.txt')
def tmp_gen():
for i in range(len(path_dict)):
path = path_dict[i]
with open(path) as document_file:
yield document_file.read()
|
Add temporary corpus document generator - toolset need to be reworked.
|
Add temporary corpus document generator - toolset need to be reworked.
|
Python
|
mit
|
theovasi/browsewiki,theovasi/browsewiki,theovasi/browsewiki
|
Add temporary corpus document generator - toolset need to be reworked.
|
import joblib
path_dict = joblib.load('data/filepath_dict.txt')
def tmp_gen():
for i in range(len(path_dict)):
path = path_dict[i]
with open(path) as document_file:
yield document_file.read()
|
<commit_before><commit_msg>Add temporary corpus document generator - toolset need to be reworked.<commit_after>
|
import joblib
path_dict = joblib.load('data/filepath_dict.txt')
def tmp_gen():
for i in range(len(path_dict)):
path = path_dict[i]
with open(path) as document_file:
yield document_file.read()
|
Add temporary corpus document generator - toolset need to be reworked.import joblib
path_dict = joblib.load('data/filepath_dict.txt')
def tmp_gen():
for i in range(len(path_dict)):
path = path_dict[i]
with open(path) as document_file:
yield document_file.read()
|
<commit_before><commit_msg>Add temporary corpus document generator - toolset need to be reworked.<commit_after>import joblib
path_dict = joblib.load('data/filepath_dict.txt')
def tmp_gen():
for i in range(len(path_dict)):
path = path_dict[i]
with open(path) as document_file:
yield document_file.read()
|
|
b864b771f57dbef9a26fbb7acd864f113bdeab96
|
tests/test_missingmigrations.py
|
tests/test_missingmigrations.py
|
import cStringIO
from django.test import TestCase
from django.core.management import call_command
class MissingMigrationTest(TestCase):
def test_for_missing_migrations(self):
out = cStringIO.StringIO()
call_command('makemigrations', '--dry-run',
verbocity=3, interactive=False, stdout=out)
self.assertEquals(out.getvalue(), 'No changes detected\n')
|
Add a test for missing migrations.
|
Add a test for missing migrations.
This commit detects missing migrations by running makemigrations
with the --dry-run flag and checking for the 'No changes detected'
message on stdout. Otherwise, the error includes information on
the migrations that are missing.
|
Python
|
mit
|
jrief/djangocms-cascade,jrief/djangocms-cascade,haricot/djangocms-bs4forcascade,haricot/djangocms-bs4forcascade,jrief/djangocms-cascade
|
Add a test for missing migrations.
This commit detects missing migrations by running makemigrations
with the --dry-run flag and checking for the 'No changes detected'
message on stdout. Otherwise, the error includes information on
the migrations that are missing.
|
import cStringIO
from django.test import TestCase
from django.core.management import call_command
class MissingMigrationTest(TestCase):
def test_for_missing_migrations(self):
out = cStringIO.StringIO()
call_command('makemigrations', '--dry-run',
verbocity=3, interactive=False, stdout=out)
self.assertEquals(out.getvalue(), 'No changes detected\n')
|
<commit_before><commit_msg>Add a test for missing migrations.
This commit detects missing migrations by running makemigrations
with the --dry-run flag and checking for the 'No changes detected'
message on stdout. Otherwise, the error includes information on
the migrations that are missing.<commit_after>
|
import cStringIO
from django.test import TestCase
from django.core.management import call_command
class MissingMigrationTest(TestCase):
def test_for_missing_migrations(self):
out = cStringIO.StringIO()
call_command('makemigrations', '--dry-run',
verbocity=3, interactive=False, stdout=out)
self.assertEquals(out.getvalue(), 'No changes detected\n')
|
Add a test for missing migrations.
This commit detects missing migrations by running makemigrations
with the --dry-run flag and checking for the 'No changes detected'
message on stdout. Otherwise, the error includes information on
the migrations that are missing.import cStringIO
from django.test import TestCase
from django.core.management import call_command
class MissingMigrationTest(TestCase):
def test_for_missing_migrations(self):
out = cStringIO.StringIO()
call_command('makemigrations', '--dry-run',
verbocity=3, interactive=False, stdout=out)
self.assertEquals(out.getvalue(), 'No changes detected\n')
|
<commit_before><commit_msg>Add a test for missing migrations.
This commit detects missing migrations by running makemigrations
with the --dry-run flag and checking for the 'No changes detected'
message on stdout. Otherwise, the error includes information on
the migrations that are missing.<commit_after>import cStringIO
from django.test import TestCase
from django.core.management import call_command
class MissingMigrationTest(TestCase):
def test_for_missing_migrations(self):
out = cStringIO.StringIO()
call_command('makemigrations', '--dry-run',
verbocity=3, interactive=False, stdout=out)
self.assertEquals(out.getvalue(), 'No changes detected\n')
|
|
c7a1b733ab274381fa01c96ffc5bf16967a721e5
|
tests/unit/modules/test_ansiblegate.py
|
tests/unit/modules/test_ansiblegate.py
|
# -*- coding: utf-8 -*-
#
# Author: Bo Maryniuk <bo@suse.de>
#
# Copyright 2017 SUSE LLC
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Import Salt Testing Libs
from tests.support.mixins import LoaderModuleMockMixin
from tests.support.unit import TestCase, skipIf
from tests.support.mock import (
Mock,
MagicMock,
call,
patch,
NO_MOCK,
NO_MOCK_REASON
)
import salt.modules.ansiblegate as ansible
@skipIf(NO_MOCK, NO_MOCK_REASON)
class AnsiblegateTestCase(TestCase, LoaderModuleMockMixin):
def setup_loader_modules(self):
return {ansible: {}}
def test_ansible_modules_listing(self):
pass
def test_ansible_module_help(self):
pass
|
Add scaffold for the unit test
|
Add scaffold for the unit test
|
Python
|
apache-2.0
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
Add scaffold for the unit test
|
# -*- coding: utf-8 -*-
#
# Author: Bo Maryniuk <bo@suse.de>
#
# Copyright 2017 SUSE LLC
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Import Salt Testing Libs
from tests.support.mixins import LoaderModuleMockMixin
from tests.support.unit import TestCase, skipIf
from tests.support.mock import (
Mock,
MagicMock,
call,
patch,
NO_MOCK,
NO_MOCK_REASON
)
import salt.modules.ansiblegate as ansible
@skipIf(NO_MOCK, NO_MOCK_REASON)
class AnsiblegateTestCase(TestCase, LoaderModuleMockMixin):
def setup_loader_modules(self):
return {ansible: {}}
def test_ansible_modules_listing(self):
pass
def test_ansible_module_help(self):
pass
|
<commit_before><commit_msg>Add scaffold for the unit test<commit_after>
|
# -*- coding: utf-8 -*-
#
# Author: Bo Maryniuk <bo@suse.de>
#
# Copyright 2017 SUSE LLC
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Import Salt Testing Libs
from tests.support.mixins import LoaderModuleMockMixin
from tests.support.unit import TestCase, skipIf
from tests.support.mock import (
Mock,
MagicMock,
call,
patch,
NO_MOCK,
NO_MOCK_REASON
)
import salt.modules.ansiblegate as ansible
@skipIf(NO_MOCK, NO_MOCK_REASON)
class AnsiblegateTestCase(TestCase, LoaderModuleMockMixin):
def setup_loader_modules(self):
return {ansible: {}}
def test_ansible_modules_listing(self):
pass
def test_ansible_module_help(self):
pass
|
Add scaffold for the unit test# -*- coding: utf-8 -*-
#
# Author: Bo Maryniuk <bo@suse.de>
#
# Copyright 2017 SUSE LLC
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Import Salt Testing Libs
from tests.support.mixins import LoaderModuleMockMixin
from tests.support.unit import TestCase, skipIf
from tests.support.mock import (
Mock,
MagicMock,
call,
patch,
NO_MOCK,
NO_MOCK_REASON
)
import salt.modules.ansiblegate as ansible
@skipIf(NO_MOCK, NO_MOCK_REASON)
class AnsiblegateTestCase(TestCase, LoaderModuleMockMixin):
def setup_loader_modules(self):
return {ansible: {}}
def test_ansible_modules_listing(self):
pass
def test_ansible_module_help(self):
pass
|
<commit_before><commit_msg>Add scaffold for the unit test<commit_after># -*- coding: utf-8 -*-
#
# Author: Bo Maryniuk <bo@suse.de>
#
# Copyright 2017 SUSE LLC
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Import Salt Testing Libs
from tests.support.mixins import LoaderModuleMockMixin
from tests.support.unit import TestCase, skipIf
from tests.support.mock import (
Mock,
MagicMock,
call,
patch,
NO_MOCK,
NO_MOCK_REASON
)
import salt.modules.ansiblegate as ansible
@skipIf(NO_MOCK, NO_MOCK_REASON)
class AnsiblegateTestCase(TestCase, LoaderModuleMockMixin):
def setup_loader_modules(self):
return {ansible: {}}
def test_ansible_modules_listing(self):
pass
def test_ansible_module_help(self):
pass
|
|
336f3ab701dc5b50e5ba65b7789029e6cf403d4e
|
fetsy/migrations/0005_ticket_reminder.py
|
fetsy/migrations/0005_ticket_reminder.py
|
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('fetsy', '0004_tags'),
]
operations = [
migrations.AddField(
model_name='ticket',
name='reminder',
field=models.PositiveIntegerField(verbose_name='Remind me in ... minutes', default=120),
preserve_default=True,
),
]
|
Add migrations for reminder field.
|
Add migrations for reminder field.
|
Python
|
mit
|
normanjaeckel/FeTSy,normanjaeckel/FeTSy,normanjaeckel/FeTSy
|
Add migrations for reminder field.
|
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('fetsy', '0004_tags'),
]
operations = [
migrations.AddField(
model_name='ticket',
name='reminder',
field=models.PositiveIntegerField(verbose_name='Remind me in ... minutes', default=120),
preserve_default=True,
),
]
|
<commit_before><commit_msg>Add migrations for reminder field.<commit_after>
|
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('fetsy', '0004_tags'),
]
operations = [
migrations.AddField(
model_name='ticket',
name='reminder',
field=models.PositiveIntegerField(verbose_name='Remind me in ... minutes', default=120),
preserve_default=True,
),
]
|
Add migrations for reminder field.from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('fetsy', '0004_tags'),
]
operations = [
migrations.AddField(
model_name='ticket',
name='reminder',
field=models.PositiveIntegerField(verbose_name='Remind me in ... minutes', default=120),
preserve_default=True,
),
]
|
<commit_before><commit_msg>Add migrations for reminder field.<commit_after>from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('fetsy', '0004_tags'),
]
operations = [
migrations.AddField(
model_name='ticket',
name='reminder',
field=models.PositiveIntegerField(verbose_name='Remind me in ... minutes', default=120),
preserve_default=True,
),
]
|
|
3d6519dd5f03e5ac87c4b6c45941199b8a12e3e7
|
tests/test_model.py
|
tests/test_model.py
|
import uuid
from bloop import Column, UUID, Boolean, DateTime, String
missing = object()
def test_default_model_init(User):
''' Missing attributes aren't set to `None` or any other placeholder '''
user = User(id=uuid.uuid4(), email='user@domain.com')
assert user.email == 'user@domain.com'
assert getattr(user, 'name', missing) is missing
def test_load_default_init(engine, local_bind):
''' The default model loader uses the model's __init__ method '''
loader_calls = 0
class CustomUser(engine.model):
id = Column(UUID, hash_key=True)
admin = Column(Boolean)
joined = Column(DateTime)
email = Column(String)
engine.bind()
def load_user(**kwargs):
nonlocal loader_calls
loader_calls += 1
user = CustomUser()
for key, value in kwargs.items():
setattr(user, key, value)
return user
CustomUser.Meta.bloop_init = load_user
user_id = uuid.uuid4()
user = {
'id': {'S': str(user_id)},
'admin': {'BOOL': False},
'extra_field': {'N': '0.125'}
}
loaded_user = CustomUser.__load__(user)
assert loader_calls == 1
assert loaded_user.id == user_id
assert loaded_user.admin is False
# Values that aren't explicitly described by the model aren't passed to
# the custom loader
assert getattr(loaded_user, 'extra_field', missing) is missing
def test_load_dump(User):
''' __load__ and __dump__ should be symmetric '''
user_id = uuid.uuid4()
user = User(id=user_id, name='name', email='user@domain.com', age=25)
serialized_user = {
'id': {'S': str(user_id)},
'age': {'N': '25'},
'name': {'S': 'name'},
'email': {'S': 'user@domain.com'}
}
assert User.__load__(serialized_user) == user
assert User.__dump__(user) == serialized_user
def test_equality(User):
user_id = uuid.uuid4()
user = User(id=user_id, name='name', email='user@domain.com', age=25)
same = User(id=user_id, name='name', email='user@domain.com', age=25)
other = User(id=user_id, name='wrong', email='user@domain.com', age=25)
another = User(id=user_id, email='user@domain.com', age=25)
# Wrong type
assert not(user == 'foo')
assert user != 'foo'
# Attr with different value
assert not(user == other)
assert user != other
# Missing an attr
assert not(user == another)
assert user != another
assert user == same
|
Add unit tests for BaseModel
|
Add unit tests for BaseModel
|
Python
|
mit
|
numberoverzero/bloop,numberoverzero/bloop
|
Add unit tests for BaseModel
|
import uuid
from bloop import Column, UUID, Boolean, DateTime, String
missing = object()
def test_default_model_init(User):
''' Missing attributes aren't set to `None` or any other placeholder '''
user = User(id=uuid.uuid4(), email='user@domain.com')
assert user.email == 'user@domain.com'
assert getattr(user, 'name', missing) is missing
def test_load_default_init(engine, local_bind):
''' The default model loader uses the model's __init__ method '''
loader_calls = 0
class CustomUser(engine.model):
id = Column(UUID, hash_key=True)
admin = Column(Boolean)
joined = Column(DateTime)
email = Column(String)
engine.bind()
def load_user(**kwargs):
nonlocal loader_calls
loader_calls += 1
user = CustomUser()
for key, value in kwargs.items():
setattr(user, key, value)
return user
CustomUser.Meta.bloop_init = load_user
user_id = uuid.uuid4()
user = {
'id': {'S': str(user_id)},
'admin': {'BOOL': False},
'extra_field': {'N': '0.125'}
}
loaded_user = CustomUser.__load__(user)
assert loader_calls == 1
assert loaded_user.id == user_id
assert loaded_user.admin is False
# Values that aren't explicitly described by the model aren't passed to
# the custom loader
assert getattr(loaded_user, 'extra_field', missing) is missing
def test_load_dump(User):
''' __load__ and __dump__ should be symmetric '''
user_id = uuid.uuid4()
user = User(id=user_id, name='name', email='user@domain.com', age=25)
serialized_user = {
'id': {'S': str(user_id)},
'age': {'N': '25'},
'name': {'S': 'name'},
'email': {'S': 'user@domain.com'}
}
assert User.__load__(serialized_user) == user
assert User.__dump__(user) == serialized_user
def test_equality(User):
user_id = uuid.uuid4()
user = User(id=user_id, name='name', email='user@domain.com', age=25)
same = User(id=user_id, name='name', email='user@domain.com', age=25)
other = User(id=user_id, name='wrong', email='user@domain.com', age=25)
another = User(id=user_id, email='user@domain.com', age=25)
# Wrong type
assert not(user == 'foo')
assert user != 'foo'
# Attr with different value
assert not(user == other)
assert user != other
# Missing an attr
assert not(user == another)
assert user != another
assert user == same
|
<commit_before><commit_msg>Add unit tests for BaseModel<commit_after>
|
import uuid
from bloop import Column, UUID, Boolean, DateTime, String
missing = object()
def test_default_model_init(User):
''' Missing attributes aren't set to `None` or any other placeholder '''
user = User(id=uuid.uuid4(), email='user@domain.com')
assert user.email == 'user@domain.com'
assert getattr(user, 'name', missing) is missing
def test_load_default_init(engine, local_bind):
''' The default model loader uses the model's __init__ method '''
loader_calls = 0
class CustomUser(engine.model):
id = Column(UUID, hash_key=True)
admin = Column(Boolean)
joined = Column(DateTime)
email = Column(String)
engine.bind()
def load_user(**kwargs):
nonlocal loader_calls
loader_calls += 1
user = CustomUser()
for key, value in kwargs.items():
setattr(user, key, value)
return user
CustomUser.Meta.bloop_init = load_user
user_id = uuid.uuid4()
user = {
'id': {'S': str(user_id)},
'admin': {'BOOL': False},
'extra_field': {'N': '0.125'}
}
loaded_user = CustomUser.__load__(user)
assert loader_calls == 1
assert loaded_user.id == user_id
assert loaded_user.admin is False
# Values that aren't explicitly described by the model aren't passed to
# the custom loader
assert getattr(loaded_user, 'extra_field', missing) is missing
def test_load_dump(User):
''' __load__ and __dump__ should be symmetric '''
user_id = uuid.uuid4()
user = User(id=user_id, name='name', email='user@domain.com', age=25)
serialized_user = {
'id': {'S': str(user_id)},
'age': {'N': '25'},
'name': {'S': 'name'},
'email': {'S': 'user@domain.com'}
}
assert User.__load__(serialized_user) == user
assert User.__dump__(user) == serialized_user
def test_equality(User):
user_id = uuid.uuid4()
user = User(id=user_id, name='name', email='user@domain.com', age=25)
same = User(id=user_id, name='name', email='user@domain.com', age=25)
other = User(id=user_id, name='wrong', email='user@domain.com', age=25)
another = User(id=user_id, email='user@domain.com', age=25)
# Wrong type
assert not(user == 'foo')
assert user != 'foo'
# Attr with different value
assert not(user == other)
assert user != other
# Missing an attr
assert not(user == another)
assert user != another
assert user == same
|
Add unit tests for BaseModelimport uuid
from bloop import Column, UUID, Boolean, DateTime, String
missing = object()
def test_default_model_init(User):
''' Missing attributes aren't set to `None` or any other placeholder '''
user = User(id=uuid.uuid4(), email='user@domain.com')
assert user.email == 'user@domain.com'
assert getattr(user, 'name', missing) is missing
def test_load_default_init(engine, local_bind):
''' The default model loader uses the model's __init__ method '''
loader_calls = 0
class CustomUser(engine.model):
id = Column(UUID, hash_key=True)
admin = Column(Boolean)
joined = Column(DateTime)
email = Column(String)
engine.bind()
def load_user(**kwargs):
nonlocal loader_calls
loader_calls += 1
user = CustomUser()
for key, value in kwargs.items():
setattr(user, key, value)
return user
CustomUser.Meta.bloop_init = load_user
user_id = uuid.uuid4()
user = {
'id': {'S': str(user_id)},
'admin': {'BOOL': False},
'extra_field': {'N': '0.125'}
}
loaded_user = CustomUser.__load__(user)
assert loader_calls == 1
assert loaded_user.id == user_id
assert loaded_user.admin is False
# Values that aren't explicitly described by the model aren't passed to
# the custom loader
assert getattr(loaded_user, 'extra_field', missing) is missing
def test_load_dump(User):
''' __load__ and __dump__ should be symmetric '''
user_id = uuid.uuid4()
user = User(id=user_id, name='name', email='user@domain.com', age=25)
serialized_user = {
'id': {'S': str(user_id)},
'age': {'N': '25'},
'name': {'S': 'name'},
'email': {'S': 'user@domain.com'}
}
assert User.__load__(serialized_user) == user
assert User.__dump__(user) == serialized_user
def test_equality(User):
user_id = uuid.uuid4()
user = User(id=user_id, name='name', email='user@domain.com', age=25)
same = User(id=user_id, name='name', email='user@domain.com', age=25)
other = User(id=user_id, name='wrong', email='user@domain.com', age=25)
another = User(id=user_id, email='user@domain.com', age=25)
# Wrong type
assert not(user == 'foo')
assert user != 'foo'
# Attr with different value
assert not(user == other)
assert user != other
# Missing an attr
assert not(user == another)
assert user != another
assert user == same
|
<commit_before><commit_msg>Add unit tests for BaseModel<commit_after>import uuid
from bloop import Column, UUID, Boolean, DateTime, String
missing = object()
def test_default_model_init(User):
''' Missing attributes aren't set to `None` or any other placeholder '''
user = User(id=uuid.uuid4(), email='user@domain.com')
assert user.email == 'user@domain.com'
assert getattr(user, 'name', missing) is missing
def test_load_default_init(engine, local_bind):
''' The default model loader uses the model's __init__ method '''
loader_calls = 0
class CustomUser(engine.model):
id = Column(UUID, hash_key=True)
admin = Column(Boolean)
joined = Column(DateTime)
email = Column(String)
engine.bind()
def load_user(**kwargs):
nonlocal loader_calls
loader_calls += 1
user = CustomUser()
for key, value in kwargs.items():
setattr(user, key, value)
return user
CustomUser.Meta.bloop_init = load_user
user_id = uuid.uuid4()
user = {
'id': {'S': str(user_id)},
'admin': {'BOOL': False},
'extra_field': {'N': '0.125'}
}
loaded_user = CustomUser.__load__(user)
assert loader_calls == 1
assert loaded_user.id == user_id
assert loaded_user.admin is False
# Values that aren't explicitly described by the model aren't passed to
# the custom loader
assert getattr(loaded_user, 'extra_field', missing) is missing
def test_load_dump(User):
''' __load__ and __dump__ should be symmetric '''
user_id = uuid.uuid4()
user = User(id=user_id, name='name', email='user@domain.com', age=25)
serialized_user = {
'id': {'S': str(user_id)},
'age': {'N': '25'},
'name': {'S': 'name'},
'email': {'S': 'user@domain.com'}
}
assert User.__load__(serialized_user) == user
assert User.__dump__(user) == serialized_user
def test_equality(User):
user_id = uuid.uuid4()
user = User(id=user_id, name='name', email='user@domain.com', age=25)
same = User(id=user_id, name='name', email='user@domain.com', age=25)
other = User(id=user_id, name='wrong', email='user@domain.com', age=25)
another = User(id=user_id, email='user@domain.com', age=25)
# Wrong type
assert not(user == 'foo')
assert user != 'foo'
# Attr with different value
assert not(user == other)
assert user != other
# Missing an attr
assert not(user == another)
assert user != another
assert user == same
|
|
dbc18d07160a0f08485234a3ffe766031144d951
|
src/test_theme.py
|
src/test_theme.py
|
# This module is part of the GeoTag-X project builder.
# Copyright (C) 2015 UNITAR.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import unittest
from theme import Theme
class TestTheme(unittest.TestCase):
pass
if __name__ == "__main__":
unittest.main()
|
Add a unit testing stub for the Theme object
|
Add a unit testing stub for the Theme object
|
Python
|
agpl-3.0
|
geotagx/geotagx-project-template,geotagx/geotagx-project-template
|
Add a unit testing stub for the Theme object
|
# This module is part of the GeoTag-X project builder.
# Copyright (C) 2015 UNITAR.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import unittest
from theme import Theme
class TestTheme(unittest.TestCase):
pass
if __name__ == "__main__":
unittest.main()
|
<commit_before><commit_msg>Add a unit testing stub for the Theme object<commit_after>
|
# This module is part of the GeoTag-X project builder.
# Copyright (C) 2015 UNITAR.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import unittest
from theme import Theme
class TestTheme(unittest.TestCase):
pass
if __name__ == "__main__":
unittest.main()
|
Add a unit testing stub for the Theme object# This module is part of the GeoTag-X project builder.
# Copyright (C) 2015 UNITAR.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import unittest
from theme import Theme
class TestTheme(unittest.TestCase):
pass
if __name__ == "__main__":
unittest.main()
|
<commit_before><commit_msg>Add a unit testing stub for the Theme object<commit_after># This module is part of the GeoTag-X project builder.
# Copyright (C) 2015 UNITAR.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import unittest
from theme import Theme
class TestTheme(unittest.TestCase):
pass
if __name__ == "__main__":
unittest.main()
|
|
a08604f85b82300a4a3b4f2e70f91c3ee129859b
|
openelex/tests/test_fetch.py
|
openelex/tests/test_fetch.py
|
from unittest import TestCase
from openelex.base.fetch import ErrorHandlingURLopener, HTTPError
class TestErrorHandlingURLopener(TestCase):
def setUp(self):
self.opener = ErrorHandlingURLopener()
def test_404(self):
url = "http://example.com/test.csv"
self.assertRaises(HTTPError, self.opener.retrieve, url)
|
Add missing test for ErrorHandlingURLopener
|
Add missing test for ErrorHandlingURLopener
I forgot to add this with commit
df9cfda52e952bb4d69cc0ee724e713cd6f468d5
|
Python
|
mit
|
cathydeng/openelections-core,datamade/openelections-core,openelections/openelections-core,datamade/openelections-core,openelections/openelections-core,cathydeng/openelections-core
|
Add missing test for ErrorHandlingURLopener
I forgot to add this with commit
df9cfda52e952bb4d69cc0ee724e713cd6f468d5
|
from unittest import TestCase
from openelex.base.fetch import ErrorHandlingURLopener, HTTPError
class TestErrorHandlingURLopener(TestCase):
def setUp(self):
self.opener = ErrorHandlingURLopener()
def test_404(self):
url = "http://example.com/test.csv"
self.assertRaises(HTTPError, self.opener.retrieve, url)
|
<commit_before><commit_msg>Add missing test for ErrorHandlingURLopener
I forgot to add this with commit
df9cfda52e952bb4d69cc0ee724e713cd6f468d5<commit_after>
|
from unittest import TestCase
from openelex.base.fetch import ErrorHandlingURLopener, HTTPError
class TestErrorHandlingURLopener(TestCase):
def setUp(self):
self.opener = ErrorHandlingURLopener()
def test_404(self):
url = "http://example.com/test.csv"
self.assertRaises(HTTPError, self.opener.retrieve, url)
|
Add missing test for ErrorHandlingURLopener
I forgot to add this with commit
df9cfda52e952bb4d69cc0ee724e713cd6f468d5from unittest import TestCase
from openelex.base.fetch import ErrorHandlingURLopener, HTTPError
class TestErrorHandlingURLopener(TestCase):
def setUp(self):
self.opener = ErrorHandlingURLopener()
def test_404(self):
url = "http://example.com/test.csv"
self.assertRaises(HTTPError, self.opener.retrieve, url)
|
<commit_before><commit_msg>Add missing test for ErrorHandlingURLopener
I forgot to add this with commit
df9cfda52e952bb4d69cc0ee724e713cd6f468d5<commit_after>from unittest import TestCase
from openelex.base.fetch import ErrorHandlingURLopener, HTTPError
class TestErrorHandlingURLopener(TestCase):
def setUp(self):
self.opener = ErrorHandlingURLopener()
def test_404(self):
url = "http://example.com/test.csv"
self.assertRaises(HTTPError, self.opener.retrieve, url)
|
|
cc23bfe9980525c90ae32460cd8231458f8880a5
|
lib/results_analysis/sim_result_tools.py
|
lib/results_analysis/sim_result_tools.py
|
__all__ = ['extract_ssi']
from datetime import datetime, timedelta
import h5py
import numpy as np
import numpy.ma as ma
# gzip compression flag
comp = 6
def extract_ssi(sim_fname, param_fname, result_fname, start_dt):
"""
Read a TOPKAPI simulation file and it's associated parameter file
and compute the SSI for each timestep. Store the results in a new
HDF5 file, grouped by date and containing datasets of latitude,
longitude and SSI value.
"""
params = np.loadtxt(param_fname)
lon = params[:, 1]
lat = params[:, 2]
soil_depth = params[:, 8]
factor = params[:, 11] - params[:, 10]
cell_area = 1000.0**2 # m^2
soil_depth = ma.masked_values(soil_depth, 0.0)
factor = ma.array(factor, mask=soil_depth.mask)
lon = ma.array(lon, mask=soil_depth.mask).compressed()
lat = ma.array(lat, mask=soil_depth.mask).compressed()
div = factor*soil_depth*cell_area
tkpi_file = h5py.File(sim_fname)
result_file = h5py.File(result_fname, 'w')
soil_vol = tkpi_file['/Soil/V_s'][...]
tkpi_file.close()
rows, cols = soil_vol.shape
# lat
dset = result_file.require_dataset('lat', shape=lat.shape,
dtype=np.float32, compression=comp)
dset[...] = lat
dset.attrs['name'] = 'latitude'
dset.attrs['units'] = 'Decimal degrees'
# lon
dset = result_file.require_dataset('lon', shape=lon.shape,
dtype=np.float32, compression=comp)
dset[...] = lon
dset.attrs['name'] = 'longitude'
dset.attrs['units'] = 'Decimal degrees'
curr_dt = start_dt
for k in range(rows):
print curr_dt
# ssi = (Vs/cell_vol)*100
# cell_vol = (theta_s - theta_r)*soil_depth*cell_area
sv = ma.array(soil_vol[k], mask=soil_depth.mask)
ssi = (sv/(div))*100.0
ssi = ssi.compressed()
# ssi
dset = result_file.require_dataset(curr_dt.strftime('%Y%m%d%H00'),
shape=ssi.shape,
dtype=np.float32, compression=comp)
dset[...] = ssi
dset.attrs['name'] = 'TOPKAPI soil saturation index'
dset.attrs['units'] = '% saturation'
curr_dt += timedelta(hours=3)
result_file.close()
|
Add tool to extract SSI from TOPKAPI simulation results
|
ENH: Add tool to extract SSI from TOPKAPI simulation results
|
Python
|
bsd-3-clause
|
sahg/PyTOPKAPI,scottza/PyTOPKAPI
|
ENH: Add tool to extract SSI from TOPKAPI simulation results
|
__all__ = ['extract_ssi']
from datetime import datetime, timedelta
import h5py
import numpy as np
import numpy.ma as ma
# gzip compression flag
comp = 6
def extract_ssi(sim_fname, param_fname, result_fname, start_dt):
"""
Read a TOPKAPI simulation file and it's associated parameter file
and compute the SSI for each timestep. Store the results in a new
HDF5 file, grouped by date and containing datasets of latitude,
longitude and SSI value.
"""
params = np.loadtxt(param_fname)
lon = params[:, 1]
lat = params[:, 2]
soil_depth = params[:, 8]
factor = params[:, 11] - params[:, 10]
cell_area = 1000.0**2 # m^2
soil_depth = ma.masked_values(soil_depth, 0.0)
factor = ma.array(factor, mask=soil_depth.mask)
lon = ma.array(lon, mask=soil_depth.mask).compressed()
lat = ma.array(lat, mask=soil_depth.mask).compressed()
div = factor*soil_depth*cell_area
tkpi_file = h5py.File(sim_fname)
result_file = h5py.File(result_fname, 'w')
soil_vol = tkpi_file['/Soil/V_s'][...]
tkpi_file.close()
rows, cols = soil_vol.shape
# lat
dset = result_file.require_dataset('lat', shape=lat.shape,
dtype=np.float32, compression=comp)
dset[...] = lat
dset.attrs['name'] = 'latitude'
dset.attrs['units'] = 'Decimal degrees'
# lon
dset = result_file.require_dataset('lon', shape=lon.shape,
dtype=np.float32, compression=comp)
dset[...] = lon
dset.attrs['name'] = 'longitude'
dset.attrs['units'] = 'Decimal degrees'
curr_dt = start_dt
for k in range(rows):
print curr_dt
# ssi = (Vs/cell_vol)*100
# cell_vol = (theta_s - theta_r)*soil_depth*cell_area
sv = ma.array(soil_vol[k], mask=soil_depth.mask)
ssi = (sv/(div))*100.0
ssi = ssi.compressed()
# ssi
dset = result_file.require_dataset(curr_dt.strftime('%Y%m%d%H00'),
shape=ssi.shape,
dtype=np.float32, compression=comp)
dset[...] = ssi
dset.attrs['name'] = 'TOPKAPI soil saturation index'
dset.attrs['units'] = '% saturation'
curr_dt += timedelta(hours=3)
result_file.close()
|
<commit_before><commit_msg>ENH: Add tool to extract SSI from TOPKAPI simulation results<commit_after>
|
__all__ = ['extract_ssi']
from datetime import datetime, timedelta
import h5py
import numpy as np
import numpy.ma as ma
# gzip compression flag
comp = 6
def extract_ssi(sim_fname, param_fname, result_fname, start_dt):
"""
Read a TOPKAPI simulation file and it's associated parameter file
and compute the SSI for each timestep. Store the results in a new
HDF5 file, grouped by date and containing datasets of latitude,
longitude and SSI value.
"""
params = np.loadtxt(param_fname)
lon = params[:, 1]
lat = params[:, 2]
soil_depth = params[:, 8]
factor = params[:, 11] - params[:, 10]
cell_area = 1000.0**2 # m^2
soil_depth = ma.masked_values(soil_depth, 0.0)
factor = ma.array(factor, mask=soil_depth.mask)
lon = ma.array(lon, mask=soil_depth.mask).compressed()
lat = ma.array(lat, mask=soil_depth.mask).compressed()
div = factor*soil_depth*cell_area
tkpi_file = h5py.File(sim_fname)
result_file = h5py.File(result_fname, 'w')
soil_vol = tkpi_file['/Soil/V_s'][...]
tkpi_file.close()
rows, cols = soil_vol.shape
# lat
dset = result_file.require_dataset('lat', shape=lat.shape,
dtype=np.float32, compression=comp)
dset[...] = lat
dset.attrs['name'] = 'latitude'
dset.attrs['units'] = 'Decimal degrees'
# lon
dset = result_file.require_dataset('lon', shape=lon.shape,
dtype=np.float32, compression=comp)
dset[...] = lon
dset.attrs['name'] = 'longitude'
dset.attrs['units'] = 'Decimal degrees'
curr_dt = start_dt
for k in range(rows):
print curr_dt
# ssi = (Vs/cell_vol)*100
# cell_vol = (theta_s - theta_r)*soil_depth*cell_area
sv = ma.array(soil_vol[k], mask=soil_depth.mask)
ssi = (sv/(div))*100.0
ssi = ssi.compressed()
# ssi
dset = result_file.require_dataset(curr_dt.strftime('%Y%m%d%H00'),
shape=ssi.shape,
dtype=np.float32, compression=comp)
dset[...] = ssi
dset.attrs['name'] = 'TOPKAPI soil saturation index'
dset.attrs['units'] = '% saturation'
curr_dt += timedelta(hours=3)
result_file.close()
|
ENH: Add tool to extract SSI from TOPKAPI simulation results__all__ = ['extract_ssi']
from datetime import datetime, timedelta
import h5py
import numpy as np
import numpy.ma as ma
# gzip compression flag
comp = 6
def extract_ssi(sim_fname, param_fname, result_fname, start_dt):
"""
Read a TOPKAPI simulation file and it's associated parameter file
and compute the SSI for each timestep. Store the results in a new
HDF5 file, grouped by date and containing datasets of latitude,
longitude and SSI value.
"""
params = np.loadtxt(param_fname)
lon = params[:, 1]
lat = params[:, 2]
soil_depth = params[:, 8]
factor = params[:, 11] - params[:, 10]
cell_area = 1000.0**2 # m^2
soil_depth = ma.masked_values(soil_depth, 0.0)
factor = ma.array(factor, mask=soil_depth.mask)
lon = ma.array(lon, mask=soil_depth.mask).compressed()
lat = ma.array(lat, mask=soil_depth.mask).compressed()
div = factor*soil_depth*cell_area
tkpi_file = h5py.File(sim_fname)
result_file = h5py.File(result_fname, 'w')
soil_vol = tkpi_file['/Soil/V_s'][...]
tkpi_file.close()
rows, cols = soil_vol.shape
# lat
dset = result_file.require_dataset('lat', shape=lat.shape,
dtype=np.float32, compression=comp)
dset[...] = lat
dset.attrs['name'] = 'latitude'
dset.attrs['units'] = 'Decimal degrees'
# lon
dset = result_file.require_dataset('lon', shape=lon.shape,
dtype=np.float32, compression=comp)
dset[...] = lon
dset.attrs['name'] = 'longitude'
dset.attrs['units'] = 'Decimal degrees'
curr_dt = start_dt
for k in range(rows):
print curr_dt
# ssi = (Vs/cell_vol)*100
# cell_vol = (theta_s - theta_r)*soil_depth*cell_area
sv = ma.array(soil_vol[k], mask=soil_depth.mask)
ssi = (sv/(div))*100.0
ssi = ssi.compressed()
# ssi
dset = result_file.require_dataset(curr_dt.strftime('%Y%m%d%H00'),
shape=ssi.shape,
dtype=np.float32, compression=comp)
dset[...] = ssi
dset.attrs['name'] = 'TOPKAPI soil saturation index'
dset.attrs['units'] = '% saturation'
curr_dt += timedelta(hours=3)
result_file.close()
|
<commit_before><commit_msg>ENH: Add tool to extract SSI from TOPKAPI simulation results<commit_after>__all__ = ['extract_ssi']
from datetime import datetime, timedelta
import h5py
import numpy as np
import numpy.ma as ma
# gzip compression flag
comp = 6
def extract_ssi(sim_fname, param_fname, result_fname, start_dt):
"""
Read a TOPKAPI simulation file and it's associated parameter file
and compute the SSI for each timestep. Store the results in a new
HDF5 file, grouped by date and containing datasets of latitude,
longitude and SSI value.
"""
params = np.loadtxt(param_fname)
lon = params[:, 1]
lat = params[:, 2]
soil_depth = params[:, 8]
factor = params[:, 11] - params[:, 10]
cell_area = 1000.0**2 # m^2
soil_depth = ma.masked_values(soil_depth, 0.0)
factor = ma.array(factor, mask=soil_depth.mask)
lon = ma.array(lon, mask=soil_depth.mask).compressed()
lat = ma.array(lat, mask=soil_depth.mask).compressed()
div = factor*soil_depth*cell_area
tkpi_file = h5py.File(sim_fname)
result_file = h5py.File(result_fname, 'w')
soil_vol = tkpi_file['/Soil/V_s'][...]
tkpi_file.close()
rows, cols = soil_vol.shape
# lat
dset = result_file.require_dataset('lat', shape=lat.shape,
dtype=np.float32, compression=comp)
dset[...] = lat
dset.attrs['name'] = 'latitude'
dset.attrs['units'] = 'Decimal degrees'
# lon
dset = result_file.require_dataset('lon', shape=lon.shape,
dtype=np.float32, compression=comp)
dset[...] = lon
dset.attrs['name'] = 'longitude'
dset.attrs['units'] = 'Decimal degrees'
curr_dt = start_dt
for k in range(rows):
print curr_dt
# ssi = (Vs/cell_vol)*100
# cell_vol = (theta_s - theta_r)*soil_depth*cell_area
sv = ma.array(soil_vol[k], mask=soil_depth.mask)
ssi = (sv/(div))*100.0
ssi = ssi.compressed()
# ssi
dset = result_file.require_dataset(curr_dt.strftime('%Y%m%d%H00'),
shape=ssi.shape,
dtype=np.float32, compression=comp)
dset[...] = ssi
dset.attrs['name'] = 'TOPKAPI soil saturation index'
dset.attrs['units'] = '% saturation'
curr_dt += timedelta(hours=3)
result_file.close()
|
|
287855c67b8008a589e5009ba689cd0d9b35124a
|
backoff_retry_async.py
|
backoff_retry_async.py
|
import asyncio
import logging
import aiohttp
import backoff
@backoff.on_exception(backoff.expo,
aiohttp.errors.ClientError,
max_tries=8)
async def get_url(url):
async with aiohttp.ClientSession() as session:
async with session.get(url) as response:
return await response.text()
@backoff.on_exception(backoff.expo,
aiohttp.errors.ClientError,
max_tries=8)
@asyncio.coroutine
def get_url_py34(url):
with aiohttp.ClientSession() as session:
response = yield from session.get(url)
try:
return (yield from response.text())
except Exception:
response.close()
raise
finally:
yield from response.release()
format_string = '%(asctime)-15s %(name)s %(levelname)s: %(message)s'
logging.basicConfig(format=format_string, level=logging.DEBUG)
url = 'http://python.org/'
#url = 'http://localhost:34534/'
loop = asyncio.get_event_loop()
print(loop.run_until_complete(get_url_py34(url))[:100])
print(loop.run_until_complete(get_url(url))[:100])
|
Add backoff retry async example
|
Add backoff retry async example
|
Python
|
mit
|
voidabhi/python-scripts,voidabhi/python-scripts,voidabhi/python-scripts,voidabhi/python-scripts,voidabhi/python-scripts
|
Add backoff retry async example
|
import asyncio
import logging
import aiohttp
import backoff
@backoff.on_exception(backoff.expo,
aiohttp.errors.ClientError,
max_tries=8)
async def get_url(url):
async with aiohttp.ClientSession() as session:
async with session.get(url) as response:
return await response.text()
@backoff.on_exception(backoff.expo,
aiohttp.errors.ClientError,
max_tries=8)
@asyncio.coroutine
def get_url_py34(url):
with aiohttp.ClientSession() as session:
response = yield from session.get(url)
try:
return (yield from response.text())
except Exception:
response.close()
raise
finally:
yield from response.release()
format_string = '%(asctime)-15s %(name)s %(levelname)s: %(message)s'
logging.basicConfig(format=format_string, level=logging.DEBUG)
url = 'http://python.org/'
#url = 'http://localhost:34534/'
loop = asyncio.get_event_loop()
print(loop.run_until_complete(get_url_py34(url))[:100])
print(loop.run_until_complete(get_url(url))[:100])
|
<commit_before><commit_msg>Add backoff retry async example<commit_after>
|
import asyncio
import logging
import aiohttp
import backoff
@backoff.on_exception(backoff.expo,
aiohttp.errors.ClientError,
max_tries=8)
async def get_url(url):
async with aiohttp.ClientSession() as session:
async with session.get(url) as response:
return await response.text()
@backoff.on_exception(backoff.expo,
aiohttp.errors.ClientError,
max_tries=8)
@asyncio.coroutine
def get_url_py34(url):
with aiohttp.ClientSession() as session:
response = yield from session.get(url)
try:
return (yield from response.text())
except Exception:
response.close()
raise
finally:
yield from response.release()
format_string = '%(asctime)-15s %(name)s %(levelname)s: %(message)s'
logging.basicConfig(format=format_string, level=logging.DEBUG)
url = 'http://python.org/'
#url = 'http://localhost:34534/'
loop = asyncio.get_event_loop()
print(loop.run_until_complete(get_url_py34(url))[:100])
print(loop.run_until_complete(get_url(url))[:100])
|
Add backoff retry async exampleimport asyncio
import logging
import aiohttp
import backoff
@backoff.on_exception(backoff.expo,
aiohttp.errors.ClientError,
max_tries=8)
async def get_url(url):
async with aiohttp.ClientSession() as session:
async with session.get(url) as response:
return await response.text()
@backoff.on_exception(backoff.expo,
aiohttp.errors.ClientError,
max_tries=8)
@asyncio.coroutine
def get_url_py34(url):
with aiohttp.ClientSession() as session:
response = yield from session.get(url)
try:
return (yield from response.text())
except Exception:
response.close()
raise
finally:
yield from response.release()
format_string = '%(asctime)-15s %(name)s %(levelname)s: %(message)s'
logging.basicConfig(format=format_string, level=logging.DEBUG)
url = 'http://python.org/'
#url = 'http://localhost:34534/'
loop = asyncio.get_event_loop()
print(loop.run_until_complete(get_url_py34(url))[:100])
print(loop.run_until_complete(get_url(url))[:100])
|
<commit_before><commit_msg>Add backoff retry async example<commit_after>import asyncio
import logging
import aiohttp
import backoff
@backoff.on_exception(backoff.expo,
aiohttp.errors.ClientError,
max_tries=8)
async def get_url(url):
async with aiohttp.ClientSession() as session:
async with session.get(url) as response:
return await response.text()
@backoff.on_exception(backoff.expo,
aiohttp.errors.ClientError,
max_tries=8)
@asyncio.coroutine
def get_url_py34(url):
with aiohttp.ClientSession() as session:
response = yield from session.get(url)
try:
return (yield from response.text())
except Exception:
response.close()
raise
finally:
yield from response.release()
format_string = '%(asctime)-15s %(name)s %(levelname)s: %(message)s'
logging.basicConfig(format=format_string, level=logging.DEBUG)
url = 'http://python.org/'
#url = 'http://localhost:34534/'
loop = asyncio.get_event_loop()
print(loop.run_until_complete(get_url_py34(url))[:100])
print(loop.run_until_complete(get_url(url))[:100])
|
|
d9be46134972e88a01e74cefe61ff8ccd51b3fe2
|
util/check-links.py
|
util/check-links.py
|
#!/usr/bin/env python2
from subprocess import Popen, PIPE
import os
import urllib2
import sys
utilDir = os.path.dirname(os.path.realpath(__file__))
ignores = ['localhost', '127.0.0.1', 'your-server', 'docker-ip',
'ghbtns', 'sphinx-doc']
def ignoreURL(url):
for ignore in ignores:
if ignore in url:
return True
return False
hdr = {
'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.64 Safari/537.11',
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
'Accept-Charset': 'ISO-8859-1,utf-8;q=0.7,*;q=0.3',
'Accept-Encoding': 'none',
'Accept-Language': 'en-US,en;q=0.8',
'Connection': 'keep-alive'
}
dirs = ['api-docs', 'batch-represent', 'docs', 'evaluation',
'openface', 'training', 'util']
dirs = [os.path.join(utilDir, '..', d) for d in dirs]
cmd = ['grep', '-I', '--no-filename',
'-o', '\(http\|https\)://[^"\')}`<> ]*',
'-R'] + dirs + \
['--exclude-dir=_build']
p = Popen(cmd, stdout=PIPE)
out = p.communicate()[0]
urls = set(out.split())
badURLs = []
for url in urls:
if not ignoreURL(url):
if url.endswith('.'):
url = url[:-1]
print("+ {}".format(url))
try:
req = urllib2.Request(url, headers=hdr)
resp = urllib2.urlopen(req)
except Exception as e:
print(" + Error:\n\n")
print(e)
print("\n\n")
badURLs.append(url)
print('\nFound {} bad of {} URLs'.format(len(badURLs), len(urls)))
if len(badURLs) > 0:
print("\n\n=== Bad URLs.\n")
for url in badURLs:
print("+ {}".format(url))
sys.exit(-1)
|
Add util script to check for broken links.
|
Add util script to check for broken links.
|
Python
|
apache-2.0
|
nhzandi/openface,nmabhi/Webface,xinfang/face-recognize,nmabhi/Webface,nhzandi/openface,xinfang/face-recognize,Alexx-G/openface,francisleunggie/openface,cmusatyalab/openface,Alexx-G/openface,francisleunggie/openface,nmabhi/Webface,xinfang/face-recognize,nmabhi/Webface,cmusatyalab/openface,cmusatyalab/openface,Alexx-G/openface,nhzandi/openface,Alexx-G/openface,francisleunggie/openface
|
Add util script to check for broken links.
|
#!/usr/bin/env python2
from subprocess import Popen, PIPE
import os
import urllib2
import sys
utilDir = os.path.dirname(os.path.realpath(__file__))
ignores = ['localhost', '127.0.0.1', 'your-server', 'docker-ip',
'ghbtns', 'sphinx-doc']
def ignoreURL(url):
for ignore in ignores:
if ignore in url:
return True
return False
hdr = {
'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.64 Safari/537.11',
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
'Accept-Charset': 'ISO-8859-1,utf-8;q=0.7,*;q=0.3',
'Accept-Encoding': 'none',
'Accept-Language': 'en-US,en;q=0.8',
'Connection': 'keep-alive'
}
dirs = ['api-docs', 'batch-represent', 'docs', 'evaluation',
'openface', 'training', 'util']
dirs = [os.path.join(utilDir, '..', d) for d in dirs]
cmd = ['grep', '-I', '--no-filename',
'-o', '\(http\|https\)://[^"\')}`<> ]*',
'-R'] + dirs + \
['--exclude-dir=_build']
p = Popen(cmd, stdout=PIPE)
out = p.communicate()[0]
urls = set(out.split())
badURLs = []
for url in urls:
if not ignoreURL(url):
if url.endswith('.'):
url = url[:-1]
print("+ {}".format(url))
try:
req = urllib2.Request(url, headers=hdr)
resp = urllib2.urlopen(req)
except Exception as e:
print(" + Error:\n\n")
print(e)
print("\n\n")
badURLs.append(url)
print('\nFound {} bad of {} URLs'.format(len(badURLs), len(urls)))
if len(badURLs) > 0:
print("\n\n=== Bad URLs.\n")
for url in badURLs:
print("+ {}".format(url))
sys.exit(-1)
|
<commit_before><commit_msg>Add util script to check for broken links.<commit_after>
|
#!/usr/bin/env python2
from subprocess import Popen, PIPE
import os
import urllib2
import sys
utilDir = os.path.dirname(os.path.realpath(__file__))
ignores = ['localhost', '127.0.0.1', 'your-server', 'docker-ip',
'ghbtns', 'sphinx-doc']
def ignoreURL(url):
for ignore in ignores:
if ignore in url:
return True
return False
hdr = {
'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.64 Safari/537.11',
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
'Accept-Charset': 'ISO-8859-1,utf-8;q=0.7,*;q=0.3',
'Accept-Encoding': 'none',
'Accept-Language': 'en-US,en;q=0.8',
'Connection': 'keep-alive'
}
dirs = ['api-docs', 'batch-represent', 'docs', 'evaluation',
'openface', 'training', 'util']
dirs = [os.path.join(utilDir, '..', d) for d in dirs]
cmd = ['grep', '-I', '--no-filename',
'-o', '\(http\|https\)://[^"\')}`<> ]*',
'-R'] + dirs + \
['--exclude-dir=_build']
p = Popen(cmd, stdout=PIPE)
out = p.communicate()[0]
urls = set(out.split())
badURLs = []
for url in urls:
if not ignoreURL(url):
if url.endswith('.'):
url = url[:-1]
print("+ {}".format(url))
try:
req = urllib2.Request(url, headers=hdr)
resp = urllib2.urlopen(req)
except Exception as e:
print(" + Error:\n\n")
print(e)
print("\n\n")
badURLs.append(url)
print('\nFound {} bad of {} URLs'.format(len(badURLs), len(urls)))
if len(badURLs) > 0:
print("\n\n=== Bad URLs.\n")
for url in badURLs:
print("+ {}".format(url))
sys.exit(-1)
|
Add util script to check for broken links.#!/usr/bin/env python2
from subprocess import Popen, PIPE
import os
import urllib2
import sys
utilDir = os.path.dirname(os.path.realpath(__file__))
ignores = ['localhost', '127.0.0.1', 'your-server', 'docker-ip',
'ghbtns', 'sphinx-doc']
def ignoreURL(url):
for ignore in ignores:
if ignore in url:
return True
return False
hdr = {
'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.64 Safari/537.11',
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
'Accept-Charset': 'ISO-8859-1,utf-8;q=0.7,*;q=0.3',
'Accept-Encoding': 'none',
'Accept-Language': 'en-US,en;q=0.8',
'Connection': 'keep-alive'
}
dirs = ['api-docs', 'batch-represent', 'docs', 'evaluation',
'openface', 'training', 'util']
dirs = [os.path.join(utilDir, '..', d) for d in dirs]
cmd = ['grep', '-I', '--no-filename',
'-o', '\(http\|https\)://[^"\')}`<> ]*',
'-R'] + dirs + \
['--exclude-dir=_build']
p = Popen(cmd, stdout=PIPE)
out = p.communicate()[0]
urls = set(out.split())
badURLs = []
for url in urls:
if not ignoreURL(url):
if url.endswith('.'):
url = url[:-1]
print("+ {}".format(url))
try:
req = urllib2.Request(url, headers=hdr)
resp = urllib2.urlopen(req)
except Exception as e:
print(" + Error:\n\n")
print(e)
print("\n\n")
badURLs.append(url)
print('\nFound {} bad of {} URLs'.format(len(badURLs), len(urls)))
if len(badURLs) > 0:
print("\n\n=== Bad URLs.\n")
for url in badURLs:
print("+ {}".format(url))
sys.exit(-1)
|
<commit_before><commit_msg>Add util script to check for broken links.<commit_after>#!/usr/bin/env python2
from subprocess import Popen, PIPE
import os
import urllib2
import sys
utilDir = os.path.dirname(os.path.realpath(__file__))
ignores = ['localhost', '127.0.0.1', 'your-server', 'docker-ip',
'ghbtns', 'sphinx-doc']
def ignoreURL(url):
for ignore in ignores:
if ignore in url:
return True
return False
hdr = {
'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.64 Safari/537.11',
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
'Accept-Charset': 'ISO-8859-1,utf-8;q=0.7,*;q=0.3',
'Accept-Encoding': 'none',
'Accept-Language': 'en-US,en;q=0.8',
'Connection': 'keep-alive'
}
dirs = ['api-docs', 'batch-represent', 'docs', 'evaluation',
'openface', 'training', 'util']
dirs = [os.path.join(utilDir, '..', d) for d in dirs]
cmd = ['grep', '-I', '--no-filename',
'-o', '\(http\|https\)://[^"\')}`<> ]*',
'-R'] + dirs + \
['--exclude-dir=_build']
p = Popen(cmd, stdout=PIPE)
out = p.communicate()[0]
urls = set(out.split())
badURLs = []
for url in urls:
if not ignoreURL(url):
if url.endswith('.'):
url = url[:-1]
print("+ {}".format(url))
try:
req = urllib2.Request(url, headers=hdr)
resp = urllib2.urlopen(req)
except Exception as e:
print(" + Error:\n\n")
print(e)
print("\n\n")
badURLs.append(url)
print('\nFound {} bad of {} URLs'.format(len(badURLs), len(urls)))
if len(badURLs) > 0:
print("\n\n=== Bad URLs.\n")
for url in badURLs:
print("+ {}".format(url))
sys.exit(-1)
|
|
9ff6932eb887e4c1e05dd94107aa0c3438ae26a9
|
Lib/gftools/actions/getlatestversion.py
|
Lib/gftools/actions/getlatestversion.py
|
import argparse
import subprocess
import os
from github import Github
import re
g = Github(os.environ["GITHUB_TOKEN"])
parser = argparse.ArgumentParser(description="Return the URL of a font's latest release artefact")
parser.add_argument('--user', help='the repository username', default="notofonts")
parser.add_argument('--repo', help='the repository name')
parser.add_argument('family', help='the font family name')
args = parser.parse_args()
if not (args.user and args.repo):
repo_url = subprocess.check_output(["git", "remote", "get-url", "origin"]).decode("utf8").strip()
url_split = repo_url.split("/")
args.user, args.repo = url_split[3], url_split[4]
repo = g.get_repo(args.user + '/' + args.repo)
for release in repo.get_releases():
m = re.match(r"^(.*)-(v[\d.]+)", release.tag_name)
if not m:
print(f"Unparsable release {release.tag_name} in {repo_name}")
continue
family, version = m[1], m[2]
if family != args.family:
continue
assets = release.get_assets()
download_url = assets[0].browser_download_url
print(f"::set-output name=version::{version}")
print(f"::set-output name=url::{download_url}")
break
|
Add another module useful for GitHub actions: get a repo's latest release URL
|
Add another module useful for GitHub actions: get a repo's latest release URL
|
Python
|
apache-2.0
|
googlefonts/gftools,googlefonts/gftools
|
Add another module useful for GitHub actions: get a repo's latest release URL
|
import argparse
import subprocess
import os
from github import Github
import re
g = Github(os.environ["GITHUB_TOKEN"])
parser = argparse.ArgumentParser(description="Return the URL of a font's latest release artefact")
parser.add_argument('--user', help='the repository username', default="notofonts")
parser.add_argument('--repo', help='the repository name')
parser.add_argument('family', help='the font family name')
args = parser.parse_args()
if not (args.user and args.repo):
repo_url = subprocess.check_output(["git", "remote", "get-url", "origin"]).decode("utf8").strip()
url_split = repo_url.split("/")
args.user, args.repo = url_split[3], url_split[4]
repo = g.get_repo(args.user + '/' + args.repo)
for release in repo.get_releases():
m = re.match(r"^(.*)-(v[\d.]+)", release.tag_name)
if not m:
print(f"Unparsable release {release.tag_name} in {repo_name}")
continue
family, version = m[1], m[2]
if family != args.family:
continue
assets = release.get_assets()
download_url = assets[0].browser_download_url
print(f"::set-output name=version::{version}")
print(f"::set-output name=url::{download_url}")
break
|
<commit_before><commit_msg>Add another module useful for GitHub actions: get a repo's latest release URL<commit_after>
|
import argparse
import subprocess
import os
from github import Github
import re
g = Github(os.environ["GITHUB_TOKEN"])
parser = argparse.ArgumentParser(description="Return the URL of a font's latest release artefact")
parser.add_argument('--user', help='the repository username', default="notofonts")
parser.add_argument('--repo', help='the repository name')
parser.add_argument('family', help='the font family name')
args = parser.parse_args()
if not (args.user and args.repo):
repo_url = subprocess.check_output(["git", "remote", "get-url", "origin"]).decode("utf8").strip()
url_split = repo_url.split("/")
args.user, args.repo = url_split[3], url_split[4]
repo = g.get_repo(args.user + '/' + args.repo)
for release in repo.get_releases():
m = re.match(r"^(.*)-(v[\d.]+)", release.tag_name)
if not m:
print(f"Unparsable release {release.tag_name} in {repo_name}")
continue
family, version = m[1], m[2]
if family != args.family:
continue
assets = release.get_assets()
download_url = assets[0].browser_download_url
print(f"::set-output name=version::{version}")
print(f"::set-output name=url::{download_url}")
break
|
Add another module useful for GitHub actions: get a repo's latest release URLimport argparse
import subprocess
import os
from github import Github
import re
g = Github(os.environ["GITHUB_TOKEN"])
parser = argparse.ArgumentParser(description="Return the URL of a font's latest release artefact")
parser.add_argument('--user', help='the repository username', default="notofonts")
parser.add_argument('--repo', help='the repository name')
parser.add_argument('family', help='the font family name')
args = parser.parse_args()
if not (args.user and args.repo):
repo_url = subprocess.check_output(["git", "remote", "get-url", "origin"]).decode("utf8").strip()
url_split = repo_url.split("/")
args.user, args.repo = url_split[3], url_split[4]
repo = g.get_repo(args.user + '/' + args.repo)
for release in repo.get_releases():
m = re.match(r"^(.*)-(v[\d.]+)", release.tag_name)
if not m:
print(f"Unparsable release {release.tag_name} in {repo_name}")
continue
family, version = m[1], m[2]
if family != args.family:
continue
assets = release.get_assets()
download_url = assets[0].browser_download_url
print(f"::set-output name=version::{version}")
print(f"::set-output name=url::{download_url}")
break
|
<commit_before><commit_msg>Add another module useful for GitHub actions: get a repo's latest release URL<commit_after>import argparse
import subprocess
import os
from github import Github
import re
g = Github(os.environ["GITHUB_TOKEN"])
parser = argparse.ArgumentParser(description="Return the URL of a font's latest release artefact")
parser.add_argument('--user', help='the repository username', default="notofonts")
parser.add_argument('--repo', help='the repository name')
parser.add_argument('family', help='the font family name')
args = parser.parse_args()
if not (args.user and args.repo):
repo_url = subprocess.check_output(["git", "remote", "get-url", "origin"]).decode("utf8").strip()
url_split = repo_url.split("/")
args.user, args.repo = url_split[3], url_split[4]
repo = g.get_repo(args.user + '/' + args.repo)
for release in repo.get_releases():
m = re.match(r"^(.*)-(v[\d.]+)", release.tag_name)
if not m:
print(f"Unparsable release {release.tag_name} in {repo_name}")
continue
family, version = m[1], m[2]
if family != args.family:
continue
assets = release.get_assets()
download_url = assets[0].browser_download_url
print(f"::set-output name=version::{version}")
print(f"::set-output name=url::{download_url}")
break
|
|
d79607f320579b2b9d98f219c35d7c878173f10e
|
acoustics/doppler.py
|
acoustics/doppler.py
|
"""
Doppler
=======
Doppler shift module.
"""
from __future__ import division
def velocity_from_doppler_shift(c, f1, f2):
"""
Calculate velocity based on measured frequency shifts due to Doppler shift.
The assumption is made that the velocity is constant between the observation times.
.. math:: v = c \cdot \\left( \\frac{f_2 - f_1}{f_2 + f_1} \\right)
:param c: Speed of sound :math:`c`.
:param f1: Lower frequency :math:`f_1`.
:param f2: Upper frequency :math:`f_2`.
"""
return c * (f2 - f1) / (f2 + f1)
|
"""
Doppler
=======
Doppler shift module.
"""
from __future__ import division
SOUNDSPEED = 343.0
"""Speed of sound
"""
def velocity_from_doppler_shift(f1, f2, c=SOUNDSPEED):
"""Calculate velocity based on measured frequency shifts due to Doppler shift.
:param c: Speed of sound :math:`c`.
:param f1: Lower frequency :math:`f_1`.
:param f2: Upper frequency :math:`f_2`.
.. math:: v = c \cdot \\left( \\frac{f_2 - f_1}{f_2 + f_1} \\right)
The assumption is made that the velocity is constant between the observation times.
"""
return c * (f2 - f1) / (f2 + f1)
def frequency_shift(frequency, velocity_source, velocity_receiver, soundspeed=SOUNDSPEED):
"""Frequency shift due to Doppler effect.
:param frequency: Emitted frequency :math:`f`.
:param velocity_source: Velocity of source :math:`v_s`. Positive if the source is moving away from the receiver (and negative in the other direction).
:param velocity_receiver: Velocity of receiver :math:`v_r`. Positive if the receiver is moving towards the source (and negative in the other direction);
:param soundspeed: Speed of sound :math:`c`.
.. math:: f = \\frac{c + v_r}{c + v_s} f_0
"""
return (soundspeed + velocity_receiver) / (soundspeed + velocity_source) * frequency
|
Add simple equation to calculate Doppler shift
|
Add simple equation to calculate Doppler shift
|
Python
|
bsd-3-clause
|
felipeacsi/python-acoustics,FRidh/python-acoustics,antiface/python-acoustics,python-acoustics/python-acoustics,giumas/python-acoustics
|
"""
Doppler
=======
Doppler shift module.
"""
from __future__ import division
def velocity_from_doppler_shift(c, f1, f2):
"""
Calculate velocity based on measured frequency shifts due to Doppler shift.
The assumption is made that the velocity is constant between the observation times.
.. math:: v = c \cdot \\left( \\frac{f_2 - f_1}{f_2 + f_1} \\right)
:param c: Speed of sound :math:`c`.
:param f1: Lower frequency :math:`f_1`.
:param f2: Upper frequency :math:`f_2`.
"""
return c * (f2 - f1) / (f2 + f1)
Add simple equation to calculate Doppler shift
|
"""
Doppler
=======
Doppler shift module.
"""
from __future__ import division
SOUNDSPEED = 343.0
"""Speed of sound
"""
def velocity_from_doppler_shift(f1, f2, c=SOUNDSPEED):
"""Calculate velocity based on measured frequency shifts due to Doppler shift.
:param c: Speed of sound :math:`c`.
:param f1: Lower frequency :math:`f_1`.
:param f2: Upper frequency :math:`f_2`.
.. math:: v = c \cdot \\left( \\frac{f_2 - f_1}{f_2 + f_1} \\right)
The assumption is made that the velocity is constant between the observation times.
"""
return c * (f2 - f1) / (f2 + f1)
def frequency_shift(frequency, velocity_source, velocity_receiver, soundspeed=SOUNDSPEED):
"""Frequency shift due to Doppler effect.
:param frequency: Emitted frequency :math:`f`.
:param velocity_source: Velocity of source :math:`v_s`. Positive if the source is moving away from the receiver (and negative in the other direction).
:param velocity_receiver: Velocity of receiver :math:`v_r`. Positive if the receiver is moving towards the source (and negative in the other direction);
:param soundspeed: Speed of sound :math:`c`.
.. math:: f = \\frac{c + v_r}{c + v_s} f_0
"""
return (soundspeed + velocity_receiver) / (soundspeed + velocity_source) * frequency
|
<commit_before>"""
Doppler
=======
Doppler shift module.
"""
from __future__ import division
def velocity_from_doppler_shift(c, f1, f2):
"""
Calculate velocity based on measured frequency shifts due to Doppler shift.
The assumption is made that the velocity is constant between the observation times.
.. math:: v = c \cdot \\left( \\frac{f_2 - f_1}{f_2 + f_1} \\right)
:param c: Speed of sound :math:`c`.
:param f1: Lower frequency :math:`f_1`.
:param f2: Upper frequency :math:`f_2`.
"""
return c * (f2 - f1) / (f2 + f1)
<commit_msg>Add simple equation to calculate Doppler shift<commit_after>
|
"""
Doppler
=======
Doppler shift module.
"""
from __future__ import division
SOUNDSPEED = 343.0
"""Speed of sound
"""
def velocity_from_doppler_shift(f1, f2, c=SOUNDSPEED):
"""Calculate velocity based on measured frequency shifts due to Doppler shift.
:param c: Speed of sound :math:`c`.
:param f1: Lower frequency :math:`f_1`.
:param f2: Upper frequency :math:`f_2`.
.. math:: v = c \cdot \\left( \\frac{f_2 - f_1}{f_2 + f_1} \\right)
The assumption is made that the velocity is constant between the observation times.
"""
return c * (f2 - f1) / (f2 + f1)
def frequency_shift(frequency, velocity_source, velocity_receiver, soundspeed=SOUNDSPEED):
"""Frequency shift due to Doppler effect.
:param frequency: Emitted frequency :math:`f`.
:param velocity_source: Velocity of source :math:`v_s`. Positive if the source is moving away from the receiver (and negative in the other direction).
:param velocity_receiver: Velocity of receiver :math:`v_r`. Positive if the receiver is moving towards the source (and negative in the other direction);
:param soundspeed: Speed of sound :math:`c`.
.. math:: f = \\frac{c + v_r}{c + v_s} f_0
"""
return (soundspeed + velocity_receiver) / (soundspeed + velocity_source) * frequency
|
"""
Doppler
=======
Doppler shift module.
"""
from __future__ import division
def velocity_from_doppler_shift(c, f1, f2):
"""
Calculate velocity based on measured frequency shifts due to Doppler shift.
The assumption is made that the velocity is constant between the observation times.
.. math:: v = c \cdot \\left( \\frac{f_2 - f_1}{f_2 + f_1} \\right)
:param c: Speed of sound :math:`c`.
:param f1: Lower frequency :math:`f_1`.
:param f2: Upper frequency :math:`f_2`.
"""
return c * (f2 - f1) / (f2 + f1)
Add simple equation to calculate Doppler shift"""
Doppler
=======
Doppler shift module.
"""
from __future__ import division
SOUNDSPEED = 343.0
"""Speed of sound
"""
def velocity_from_doppler_shift(f1, f2, c=SOUNDSPEED):
"""Calculate velocity based on measured frequency shifts due to Doppler shift.
:param c: Speed of sound :math:`c`.
:param f1: Lower frequency :math:`f_1`.
:param f2: Upper frequency :math:`f_2`.
.. math:: v = c \cdot \\left( \\frac{f_2 - f_1}{f_2 + f_1} \\right)
The assumption is made that the velocity is constant between the observation times.
"""
return c * (f2 - f1) / (f2 + f1)
def frequency_shift(frequency, velocity_source, velocity_receiver, soundspeed=SOUNDSPEED):
"""Frequency shift due to Doppler effect.
:param frequency: Emitted frequency :math:`f`.
:param velocity_source: Velocity of source :math:`v_s`. Positive if the source is moving away from the receiver (and negative in the other direction).
:param velocity_receiver: Velocity of receiver :math:`v_r`. Positive if the receiver is moving towards the source (and negative in the other direction);
:param soundspeed: Speed of sound :math:`c`.
.. math:: f = \\frac{c + v_r}{c + v_s} f_0
"""
return (soundspeed + velocity_receiver) / (soundspeed + velocity_source) * frequency
|
<commit_before>"""
Doppler
=======
Doppler shift module.
"""
from __future__ import division
def velocity_from_doppler_shift(c, f1, f2):
"""
Calculate velocity based on measured frequency shifts due to Doppler shift.
The assumption is made that the velocity is constant between the observation times.
.. math:: v = c \cdot \\left( \\frac{f_2 - f_1}{f_2 + f_1} \\right)
:param c: Speed of sound :math:`c`.
:param f1: Lower frequency :math:`f_1`.
:param f2: Upper frequency :math:`f_2`.
"""
return c * (f2 - f1) / (f2 + f1)
<commit_msg>Add simple equation to calculate Doppler shift<commit_after>"""
Doppler
=======
Doppler shift module.
"""
from __future__ import division
SOUNDSPEED = 343.0
"""Speed of sound
"""
def velocity_from_doppler_shift(f1, f2, c=SOUNDSPEED):
"""Calculate velocity based on measured frequency shifts due to Doppler shift.
:param c: Speed of sound :math:`c`.
:param f1: Lower frequency :math:`f_1`.
:param f2: Upper frequency :math:`f_2`.
.. math:: v = c \cdot \\left( \\frac{f_2 - f_1}{f_2 + f_1} \\right)
The assumption is made that the velocity is constant between the observation times.
"""
return c * (f2 - f1) / (f2 + f1)
def frequency_shift(frequency, velocity_source, velocity_receiver, soundspeed=SOUNDSPEED):
"""Frequency shift due to Doppler effect.
:param frequency: Emitted frequency :math:`f`.
:param velocity_source: Velocity of source :math:`v_s`. Positive if the source is moving away from the receiver (and negative in the other direction).
:param velocity_receiver: Velocity of receiver :math:`v_r`. Positive if the receiver is moving towards the source (and negative in the other direction);
:param soundspeed: Speed of sound :math:`c`.
.. math:: f = \\frac{c + v_r}{c + v_s} f_0
"""
return (soundspeed + velocity_receiver) / (soundspeed + velocity_source) * frequency
|
3114c6d3dfde0c4e0f39b006bc212dd4cebc6acc
|
solr-external/update_zookeeper_config.py
|
solr-external/update_zookeeper_config.py
|
#!/usr/bin/env python
import argparse
from mc_solr.constants import *
from mc_solr.solr import update_zookeeper_solr_configuration
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Update Solr's configuration on ZooKeeper.",
epilog="This script does not reload Solr shards! " +
"Run 'reload_solr_shards.py' afterwards.",
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument("-zh", "--zookeeper_host", type=str, required=False, default=MC_SOLR_ZOOKEEPER_HOST,
help="ZooKeeper host to connect to.")
parser.add_argument("-zp", "--zookeeper_port", type=int, required=False, default=MC_SOLR_ZOOKEEPER_PORT,
help="ZooKeeper port to connect to.")
args = parser.parse_args()
update_zookeeper_solr_configuration(zookeeper_host=args.zookeeper_host, zookeeper_port=args.zookeeper_port)
|
Add script that updates Solr configuration on ZooKeeper
|
Add script that updates Solr configuration on ZooKeeper
|
Python
|
agpl-3.0
|
berkmancenter/mediacloud,berkmancenter/mediacloud,berkmancenter/mediacloud,berkmancenter/mediacloud,berkmancenter/mediacloud
|
Add script that updates Solr configuration on ZooKeeper
|
#!/usr/bin/env python
import argparse
from mc_solr.constants import *
from mc_solr.solr import update_zookeeper_solr_configuration
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Update Solr's configuration on ZooKeeper.",
epilog="This script does not reload Solr shards! " +
"Run 'reload_solr_shards.py' afterwards.",
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument("-zh", "--zookeeper_host", type=str, required=False, default=MC_SOLR_ZOOKEEPER_HOST,
help="ZooKeeper host to connect to.")
parser.add_argument("-zp", "--zookeeper_port", type=int, required=False, default=MC_SOLR_ZOOKEEPER_PORT,
help="ZooKeeper port to connect to.")
args = parser.parse_args()
update_zookeeper_solr_configuration(zookeeper_host=args.zookeeper_host, zookeeper_port=args.zookeeper_port)
|
<commit_before><commit_msg>Add script that updates Solr configuration on ZooKeeper<commit_after>
|
#!/usr/bin/env python
import argparse
from mc_solr.constants import *
from mc_solr.solr import update_zookeeper_solr_configuration
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Update Solr's configuration on ZooKeeper.",
epilog="This script does not reload Solr shards! " +
"Run 'reload_solr_shards.py' afterwards.",
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument("-zh", "--zookeeper_host", type=str, required=False, default=MC_SOLR_ZOOKEEPER_HOST,
help="ZooKeeper host to connect to.")
parser.add_argument("-zp", "--zookeeper_port", type=int, required=False, default=MC_SOLR_ZOOKEEPER_PORT,
help="ZooKeeper port to connect to.")
args = parser.parse_args()
update_zookeeper_solr_configuration(zookeeper_host=args.zookeeper_host, zookeeper_port=args.zookeeper_port)
|
Add script that updates Solr configuration on ZooKeeper#!/usr/bin/env python
import argparse
from mc_solr.constants import *
from mc_solr.solr import update_zookeeper_solr_configuration
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Update Solr's configuration on ZooKeeper.",
epilog="This script does not reload Solr shards! " +
"Run 'reload_solr_shards.py' afterwards.",
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument("-zh", "--zookeeper_host", type=str, required=False, default=MC_SOLR_ZOOKEEPER_HOST,
help="ZooKeeper host to connect to.")
parser.add_argument("-zp", "--zookeeper_port", type=int, required=False, default=MC_SOLR_ZOOKEEPER_PORT,
help="ZooKeeper port to connect to.")
args = parser.parse_args()
update_zookeeper_solr_configuration(zookeeper_host=args.zookeeper_host, zookeeper_port=args.zookeeper_port)
|
<commit_before><commit_msg>Add script that updates Solr configuration on ZooKeeper<commit_after>#!/usr/bin/env python
import argparse
from mc_solr.constants import *
from mc_solr.solr import update_zookeeper_solr_configuration
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Update Solr's configuration on ZooKeeper.",
epilog="This script does not reload Solr shards! " +
"Run 'reload_solr_shards.py' afterwards.",
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument("-zh", "--zookeeper_host", type=str, required=False, default=MC_SOLR_ZOOKEEPER_HOST,
help="ZooKeeper host to connect to.")
parser.add_argument("-zp", "--zookeeper_port", type=int, required=False, default=MC_SOLR_ZOOKEEPER_PORT,
help="ZooKeeper port to connect to.")
args = parser.parse_args()
update_zookeeper_solr_configuration(zookeeper_host=args.zookeeper_host, zookeeper_port=args.zookeeper_port)
|
|
562d23ecebaafcac3edf0662b4957f671a874a06
|
recipes/skia.py
|
recipes/skia.py
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import sys
import recipe_util # pylint: disable=F0401
# This class doesn't need an __init__ method, so we disable the warning
# pylint: disable=W0232
class Skia(recipe_util.Recipe):
"""Basic Recipe class for the Skia repository."""
@staticmethod
def fetch_spec(_props):
solution = {
'name' : 'skia',
'url' : 'https://skia.googlesource.com/skia.git',
'deps_file': 'DEPS',
'managed' : False,
}
spec = {
'solutions': [solution]
}
return {
'type': 'gclient_git',
'gclient_git_spec': spec,
}
@staticmethod
def expected_root(_props):
return 'skia'
def main(argv=None):
return Skia().handle_args(argv)
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
Add fetch recipe for the Skia repository.
|
Add fetch recipe for the Skia repository.
Tested with the following command lines:
$ cd somewhere
$ mkdir some-test-dir
$ cd some-test-dir
$ fetch skia
$ cd skia
# confirm it is what we expected.
BUG=None
TEST=see above
R=agable@chromium.org
Review URL: https://codereview.chromium.org/746363003
git-svn-id: bd64dd6fa6f3f0ed0c0666d1018379882b742947@293135 4ff67af0-8c30-449e-8e8b-ad334ec8d88c
|
Python
|
bsd-3-clause
|
svn2github/chromium-depot-tools,svn2github/chromium-depot-tools,svn2github/chromium-depot-tools
|
Add fetch recipe for the Skia repository.
Tested with the following command lines:
$ cd somewhere
$ mkdir some-test-dir
$ cd some-test-dir
$ fetch skia
$ cd skia
# confirm it is what we expected.
BUG=None
TEST=see above
R=agable@chromium.org
Review URL: https://codereview.chromium.org/746363003
git-svn-id: bd64dd6fa6f3f0ed0c0666d1018379882b742947@293135 4ff67af0-8c30-449e-8e8b-ad334ec8d88c
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import sys
import recipe_util # pylint: disable=F0401
# This class doesn't need an __init__ method, so we disable the warning
# pylint: disable=W0232
class Skia(recipe_util.Recipe):
"""Basic Recipe class for the Skia repository."""
@staticmethod
def fetch_spec(_props):
solution = {
'name' : 'skia',
'url' : 'https://skia.googlesource.com/skia.git',
'deps_file': 'DEPS',
'managed' : False,
}
spec = {
'solutions': [solution]
}
return {
'type': 'gclient_git',
'gclient_git_spec': spec,
}
@staticmethod
def expected_root(_props):
return 'skia'
def main(argv=None):
return Skia().handle_args(argv)
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
<commit_before><commit_msg>Add fetch recipe for the Skia repository.
Tested with the following command lines:
$ cd somewhere
$ mkdir some-test-dir
$ cd some-test-dir
$ fetch skia
$ cd skia
# confirm it is what we expected.
BUG=None
TEST=see above
R=agable@chromium.org
Review URL: https://codereview.chromium.org/746363003
git-svn-id: bd64dd6fa6f3f0ed0c0666d1018379882b742947@293135 4ff67af0-8c30-449e-8e8b-ad334ec8d88c<commit_after>
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import sys
import recipe_util # pylint: disable=F0401
# This class doesn't need an __init__ method, so we disable the warning
# pylint: disable=W0232
class Skia(recipe_util.Recipe):
"""Basic Recipe class for the Skia repository."""
@staticmethod
def fetch_spec(_props):
solution = {
'name' : 'skia',
'url' : 'https://skia.googlesource.com/skia.git',
'deps_file': 'DEPS',
'managed' : False,
}
spec = {
'solutions': [solution]
}
return {
'type': 'gclient_git',
'gclient_git_spec': spec,
}
@staticmethod
def expected_root(_props):
return 'skia'
def main(argv=None):
return Skia().handle_args(argv)
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
Add fetch recipe for the Skia repository.
Tested with the following command lines:
$ cd somewhere
$ mkdir some-test-dir
$ cd some-test-dir
$ fetch skia
$ cd skia
# confirm it is what we expected.
BUG=None
TEST=see above
R=agable@chromium.org
Review URL: https://codereview.chromium.org/746363003
git-svn-id: bd64dd6fa6f3f0ed0c0666d1018379882b742947@293135 4ff67af0-8c30-449e-8e8b-ad334ec8d88c# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import sys
import recipe_util # pylint: disable=F0401
# This class doesn't need an __init__ method, so we disable the warning
# pylint: disable=W0232
class Skia(recipe_util.Recipe):
"""Basic Recipe class for the Skia repository."""
@staticmethod
def fetch_spec(_props):
solution = {
'name' : 'skia',
'url' : 'https://skia.googlesource.com/skia.git',
'deps_file': 'DEPS',
'managed' : False,
}
spec = {
'solutions': [solution]
}
return {
'type': 'gclient_git',
'gclient_git_spec': spec,
}
@staticmethod
def expected_root(_props):
return 'skia'
def main(argv=None):
return Skia().handle_args(argv)
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
<commit_before><commit_msg>Add fetch recipe for the Skia repository.
Tested with the following command lines:
$ cd somewhere
$ mkdir some-test-dir
$ cd some-test-dir
$ fetch skia
$ cd skia
# confirm it is what we expected.
BUG=None
TEST=see above
R=agable@chromium.org
Review URL: https://codereview.chromium.org/746363003
git-svn-id: bd64dd6fa6f3f0ed0c0666d1018379882b742947@293135 4ff67af0-8c30-449e-8e8b-ad334ec8d88c<commit_after># Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import sys
import recipe_util # pylint: disable=F0401
# This class doesn't need an __init__ method, so we disable the warning
# pylint: disable=W0232
class Skia(recipe_util.Recipe):
"""Basic Recipe class for the Skia repository."""
@staticmethod
def fetch_spec(_props):
solution = {
'name' : 'skia',
'url' : 'https://skia.googlesource.com/skia.git',
'deps_file': 'DEPS',
'managed' : False,
}
spec = {
'solutions': [solution]
}
return {
'type': 'gclient_git',
'gclient_git_spec': spec,
}
@staticmethod
def expected_root(_props):
return 'skia'
def main(argv=None):
return Skia().handle_args(argv)
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
|
ddacad6879d955f46f58bbfefd3363246c256193
|
examples/subclassing2.py
|
examples/subclassing2.py
|
from flask_table import Table, Col
class RawCol(Col):
"""Class that will just output whatever it is given and will not
escape it.
"""
def td_format(self, content):
return content
class ItemTable(Table):
name = Col('Name')
raw = RawCol('Raw')
def main():
items = [{'name': 'A', 'raw': '<span>a</span>'},
{'name': 'B', 'raw': '<span>b</span>'}]
tab = ItemTable(items)
# or {{ tab }} in jinja
print(tab.__html__())
if __name__ == '__main__':
main()
|
Add subclassing example for RawCol
|
Add subclassing example for RawCol
|
Python
|
bsd-3-clause
|
plumdog/flask_table,plumdog/flask_table,plumdog/flask_table
|
Add subclassing example for RawCol
|
from flask_table import Table, Col
class RawCol(Col):
"""Class that will just output whatever it is given and will not
escape it.
"""
def td_format(self, content):
return content
class ItemTable(Table):
name = Col('Name')
raw = RawCol('Raw')
def main():
items = [{'name': 'A', 'raw': '<span>a</span>'},
{'name': 'B', 'raw': '<span>b</span>'}]
tab = ItemTable(items)
# or {{ tab }} in jinja
print(tab.__html__())
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add subclassing example for RawCol<commit_after>
|
from flask_table import Table, Col
class RawCol(Col):
"""Class that will just output whatever it is given and will not
escape it.
"""
def td_format(self, content):
return content
class ItemTable(Table):
name = Col('Name')
raw = RawCol('Raw')
def main():
items = [{'name': 'A', 'raw': '<span>a</span>'},
{'name': 'B', 'raw': '<span>b</span>'}]
tab = ItemTable(items)
# or {{ tab }} in jinja
print(tab.__html__())
if __name__ == '__main__':
main()
|
Add subclassing example for RawColfrom flask_table import Table, Col
class RawCol(Col):
"""Class that will just output whatever it is given and will not
escape it.
"""
def td_format(self, content):
return content
class ItemTable(Table):
name = Col('Name')
raw = RawCol('Raw')
def main():
items = [{'name': 'A', 'raw': '<span>a</span>'},
{'name': 'B', 'raw': '<span>b</span>'}]
tab = ItemTable(items)
# or {{ tab }} in jinja
print(tab.__html__())
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add subclassing example for RawCol<commit_after>from flask_table import Table, Col
class RawCol(Col):
"""Class that will just output whatever it is given and will not
escape it.
"""
def td_format(self, content):
return content
class ItemTable(Table):
name = Col('Name')
raw = RawCol('Raw')
def main():
items = [{'name': 'A', 'raw': '<span>a</span>'},
{'name': 'B', 'raw': '<span>b</span>'}]
tab = ItemTable(items)
# or {{ tab }} in jinja
print(tab.__html__())
if __name__ == '__main__':
main()
|
|
aca69251d17bd76302c0d4c1403f54c9f8da4949
|
mica/archive/tests/test_aca_hdr3.py
|
mica/archive/tests/test_aca_hdr3.py
|
"""
Basic functionality and regression tests for ACA hdr3 (diagnostic) telemetry.
"""
import numpy as np
from .. import aca_hdr3
def test_MSIDset():
"""
Read all available MSIDs into a single MSIDset. Use the empirically determined
lengths as regression tests.
"""
msids = [hdr3['msid'] for hdr3 in aca_hdr3.HDR3_DEF.values() if 'value' in hdr3]
# Read all MSIDs as a set
dat = aca_hdr3.MSIDset(msids, '2010:001', '2010:003')
val_lengths = np.array([len(dat[msid].vals) for msid in msids])
time_lengths = np.array([len(dat[msid].times) for msid in msids])
assert np.all(val_lengths == time_lengths)
assert np.all(val_lengths == 44432)
for msid in msids:
dat[msid].filter_bad()
val_lengths = np.array([len(dat[msid].vals) for msid in msids])
time_lengths = np.array([len(dat[msid].times) for msid in msids])
assert np.all(val_lengths == time_lengths)
assert np.all(val_lengths == [40528, 44432, 44432, 10679, 44432, 10760, 10731, 44432,
44432, 10679, 44432, 44432, 44432, 44432, 40991, 44432,
40991, 44432, 44432, 40991, 10679])
|
Add minimal regression tests of reading ACA hdr3 data
|
Add minimal regression tests of reading ACA hdr3 data
|
Python
|
bsd-3-clause
|
sot/mica,sot/mica
|
Add minimal regression tests of reading ACA hdr3 data
|
"""
Basic functionality and regression tests for ACA hdr3 (diagnostic) telemetry.
"""
import numpy as np
from .. import aca_hdr3
def test_MSIDset():
"""
Read all available MSIDs into a single MSIDset. Use the empirically determined
lengths as regression tests.
"""
msids = [hdr3['msid'] for hdr3 in aca_hdr3.HDR3_DEF.values() if 'value' in hdr3]
# Read all MSIDs as a set
dat = aca_hdr3.MSIDset(msids, '2010:001', '2010:003')
val_lengths = np.array([len(dat[msid].vals) for msid in msids])
time_lengths = np.array([len(dat[msid].times) for msid in msids])
assert np.all(val_lengths == time_lengths)
assert np.all(val_lengths == 44432)
for msid in msids:
dat[msid].filter_bad()
val_lengths = np.array([len(dat[msid].vals) for msid in msids])
time_lengths = np.array([len(dat[msid].times) for msid in msids])
assert np.all(val_lengths == time_lengths)
assert np.all(val_lengths == [40528, 44432, 44432, 10679, 44432, 10760, 10731, 44432,
44432, 10679, 44432, 44432, 44432, 44432, 40991, 44432,
40991, 44432, 44432, 40991, 10679])
|
<commit_before><commit_msg>Add minimal regression tests of reading ACA hdr3 data<commit_after>
|
"""
Basic functionality and regression tests for ACA hdr3 (diagnostic) telemetry.
"""
import numpy as np
from .. import aca_hdr3
def test_MSIDset():
"""
Read all available MSIDs into a single MSIDset. Use the empirically determined
lengths as regression tests.
"""
msids = [hdr3['msid'] for hdr3 in aca_hdr3.HDR3_DEF.values() if 'value' in hdr3]
# Read all MSIDs as a set
dat = aca_hdr3.MSIDset(msids, '2010:001', '2010:003')
val_lengths = np.array([len(dat[msid].vals) for msid in msids])
time_lengths = np.array([len(dat[msid].times) for msid in msids])
assert np.all(val_lengths == time_lengths)
assert np.all(val_lengths == 44432)
for msid in msids:
dat[msid].filter_bad()
val_lengths = np.array([len(dat[msid].vals) for msid in msids])
time_lengths = np.array([len(dat[msid].times) for msid in msids])
assert np.all(val_lengths == time_lengths)
assert np.all(val_lengths == [40528, 44432, 44432, 10679, 44432, 10760, 10731, 44432,
44432, 10679, 44432, 44432, 44432, 44432, 40991, 44432,
40991, 44432, 44432, 40991, 10679])
|
Add minimal regression tests of reading ACA hdr3 data"""
Basic functionality and regression tests for ACA hdr3 (diagnostic) telemetry.
"""
import numpy as np
from .. import aca_hdr3
def test_MSIDset():
"""
Read all available MSIDs into a single MSIDset. Use the empirically determined
lengths as regression tests.
"""
msids = [hdr3['msid'] for hdr3 in aca_hdr3.HDR3_DEF.values() if 'value' in hdr3]
# Read all MSIDs as a set
dat = aca_hdr3.MSIDset(msids, '2010:001', '2010:003')
val_lengths = np.array([len(dat[msid].vals) for msid in msids])
time_lengths = np.array([len(dat[msid].times) for msid in msids])
assert np.all(val_lengths == time_lengths)
assert np.all(val_lengths == 44432)
for msid in msids:
dat[msid].filter_bad()
val_lengths = np.array([len(dat[msid].vals) for msid in msids])
time_lengths = np.array([len(dat[msid].times) for msid in msids])
assert np.all(val_lengths == time_lengths)
assert np.all(val_lengths == [40528, 44432, 44432, 10679, 44432, 10760, 10731, 44432,
44432, 10679, 44432, 44432, 44432, 44432, 40991, 44432,
40991, 44432, 44432, 40991, 10679])
|
<commit_before><commit_msg>Add minimal regression tests of reading ACA hdr3 data<commit_after>"""
Basic functionality and regression tests for ACA hdr3 (diagnostic) telemetry.
"""
import numpy as np
from .. import aca_hdr3
def test_MSIDset():
"""
Read all available MSIDs into a single MSIDset. Use the empirically determined
lengths as regression tests.
"""
msids = [hdr3['msid'] for hdr3 in aca_hdr3.HDR3_DEF.values() if 'value' in hdr3]
# Read all MSIDs as a set
dat = aca_hdr3.MSIDset(msids, '2010:001', '2010:003')
val_lengths = np.array([len(dat[msid].vals) for msid in msids])
time_lengths = np.array([len(dat[msid].times) for msid in msids])
assert np.all(val_lengths == time_lengths)
assert np.all(val_lengths == 44432)
for msid in msids:
dat[msid].filter_bad()
val_lengths = np.array([len(dat[msid].vals) for msid in msids])
time_lengths = np.array([len(dat[msid].times) for msid in msids])
assert np.all(val_lengths == time_lengths)
assert np.all(val_lengths == [40528, 44432, 44432, 10679, 44432, 10760, 10731, 44432,
44432, 10679, 44432, 44432, 44432, 44432, 40991, 44432,
40991, 44432, 44432, 40991, 10679])
|
|
2598b189bf2b7f968ded928100132f301a07f1e5
|
hours_slept_histogram.py
|
hours_slept_histogram.py
|
import plotly as py, plotly.graph_objs as go
from csvparser import parse
from os.path import basename, splitext
from sys import argv
data_file = argv[1]
raw_data = parse(data_file)
sleep_durations = []
for date, sleeps in raw_data.items():
total = 0
for s in sleeps:
sleep, wake, is_nap = s
delta_h = round((wake - sleep).seconds / 3600)
total += delta_h
sleep_durations.append(total)
min = min(sleep_durations)
max = max(sleep_durations)
range = range(min, max + 1)
hist = [sleep_durations.count(i) for i in range]
data = [go.Bar(x=list(range), y=hist)]
dates = list(raw_data.keys())
fmt = '%m-%d-%y'
start = dates[0].strftime(fmt)
end = dates[-1].strftime(fmt)
name = splitext(basename(__file__))[0]
path = '{}_{}--{}.html'.format(name, start, end)
py.offline.plot(data, filename=path)
|
Add sleep duration histogram plotter
|
Add sleep duration histogram plotter
|
Python
|
mit
|
f-jiang/sleep-pattern-grapher
|
Add sleep duration histogram plotter
|
import plotly as py, plotly.graph_objs as go
from csvparser import parse
from os.path import basename, splitext
from sys import argv
data_file = argv[1]
raw_data = parse(data_file)
sleep_durations = []
for date, sleeps in raw_data.items():
total = 0
for s in sleeps:
sleep, wake, is_nap = s
delta_h = round((wake - sleep).seconds / 3600)
total += delta_h
sleep_durations.append(total)
min = min(sleep_durations)
max = max(sleep_durations)
range = range(min, max + 1)
hist = [sleep_durations.count(i) for i in range]
data = [go.Bar(x=list(range), y=hist)]
dates = list(raw_data.keys())
fmt = '%m-%d-%y'
start = dates[0].strftime(fmt)
end = dates[-1].strftime(fmt)
name = splitext(basename(__file__))[0]
path = '{}_{}--{}.html'.format(name, start, end)
py.offline.plot(data, filename=path)
|
<commit_before><commit_msg>Add sleep duration histogram plotter<commit_after>
|
import plotly as py, plotly.graph_objs as go
from csvparser import parse
from os.path import basename, splitext
from sys import argv
data_file = argv[1]
raw_data = parse(data_file)
sleep_durations = []
for date, sleeps in raw_data.items():
total = 0
for s in sleeps:
sleep, wake, is_nap = s
delta_h = round((wake - sleep).seconds / 3600)
total += delta_h
sleep_durations.append(total)
min = min(sleep_durations)
max = max(sleep_durations)
range = range(min, max + 1)
hist = [sleep_durations.count(i) for i in range]
data = [go.Bar(x=list(range), y=hist)]
dates = list(raw_data.keys())
fmt = '%m-%d-%y'
start = dates[0].strftime(fmt)
end = dates[-1].strftime(fmt)
name = splitext(basename(__file__))[0]
path = '{}_{}--{}.html'.format(name, start, end)
py.offline.plot(data, filename=path)
|
Add sleep duration histogram plotterimport plotly as py, plotly.graph_objs as go
from csvparser import parse
from os.path import basename, splitext
from sys import argv
data_file = argv[1]
raw_data = parse(data_file)
sleep_durations = []
for date, sleeps in raw_data.items():
total = 0
for s in sleeps:
sleep, wake, is_nap = s
delta_h = round((wake - sleep).seconds / 3600)
total += delta_h
sleep_durations.append(total)
min = min(sleep_durations)
max = max(sleep_durations)
range = range(min, max + 1)
hist = [sleep_durations.count(i) for i in range]
data = [go.Bar(x=list(range), y=hist)]
dates = list(raw_data.keys())
fmt = '%m-%d-%y'
start = dates[0].strftime(fmt)
end = dates[-1].strftime(fmt)
name = splitext(basename(__file__))[0]
path = '{}_{}--{}.html'.format(name, start, end)
py.offline.plot(data, filename=path)
|
<commit_before><commit_msg>Add sleep duration histogram plotter<commit_after>import plotly as py, plotly.graph_objs as go
from csvparser import parse
from os.path import basename, splitext
from sys import argv
data_file = argv[1]
raw_data = parse(data_file)
sleep_durations = []
for date, sleeps in raw_data.items():
total = 0
for s in sleeps:
sleep, wake, is_nap = s
delta_h = round((wake - sleep).seconds / 3600)
total += delta_h
sleep_durations.append(total)
min = min(sleep_durations)
max = max(sleep_durations)
range = range(min, max + 1)
hist = [sleep_durations.count(i) for i in range]
data = [go.Bar(x=list(range), y=hist)]
dates = list(raw_data.keys())
fmt = '%m-%d-%y'
start = dates[0].strftime(fmt)
end = dates[-1].strftime(fmt)
name = splitext(basename(__file__))[0]
path = '{}_{}--{}.html'.format(name, start, end)
py.offline.plot(data, filename=path)
|
|
483c334a6272ee8fa19a43c353fa18b4c1a76fec
|
src/bindings/pygaia/scripts/classification/retrain_model.py
|
src/bindings/pygaia/scripts/classification/retrain_model.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2006-2019 Music Technology Group - Universitat Pompeu Fabra
#
# This file is part of Gaia
#
# Gaia is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation (FSF), either version 3 of the License, or (at your
# option) any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the Affero GNU General Public License
# version 3 along with this program. If not, see http://www.gnu.org/licenses/
import argparse
from gaia2.fastyaml import yaml
from generate_svm_history_from_config import trainSVMHistory
def retrainModel(project_file, params_file, output_file):
project = yaml.load(open(project_file, 'r'))
class_name = project['className']
trainSVMHistory(project_file, params_file, output_file, class_name)
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Retrains a model with all the available data given the '
'project configuration file and a set of parameters. '
'The model is saved in the output file.')
parser.add_argument('project_file', help='configuration file (.project)')
parser.add_argument('params_file', help='parameters (.param)')
parser.add_argument('output_file', help='history file (the model)')
args = parser.parse_args()
retrainModel(args.project_file, args.params_file, args.output_file)
|
Add script to retrain a model for a given param set
|
Add script to retrain a model for a given param set
|
Python
|
agpl-3.0
|
MTG/gaia,MTG/gaia,MTG/gaia,MTG/gaia
|
Add script to retrain a model for a given param set
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2006-2019 Music Technology Group - Universitat Pompeu Fabra
#
# This file is part of Gaia
#
# Gaia is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation (FSF), either version 3 of the License, or (at your
# option) any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the Affero GNU General Public License
# version 3 along with this program. If not, see http://www.gnu.org/licenses/
import argparse
from gaia2.fastyaml import yaml
from generate_svm_history_from_config import trainSVMHistory
def retrainModel(project_file, params_file, output_file):
project = yaml.load(open(project_file, 'r'))
class_name = project['className']
trainSVMHistory(project_file, params_file, output_file, class_name)
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Retrains a model with all the available data given the '
'project configuration file and a set of parameters. '
'The model is saved in the output file.')
parser.add_argument('project_file', help='configuration file (.project)')
parser.add_argument('params_file', help='parameters (.param)')
parser.add_argument('output_file', help='history file (the model)')
args = parser.parse_args()
retrainModel(args.project_file, args.params_file, args.output_file)
|
<commit_before><commit_msg>Add script to retrain a model for a given param set<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2006-2019 Music Technology Group - Universitat Pompeu Fabra
#
# This file is part of Gaia
#
# Gaia is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation (FSF), either version 3 of the License, or (at your
# option) any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the Affero GNU General Public License
# version 3 along with this program. If not, see http://www.gnu.org/licenses/
import argparse
from gaia2.fastyaml import yaml
from generate_svm_history_from_config import trainSVMHistory
def retrainModel(project_file, params_file, output_file):
project = yaml.load(open(project_file, 'r'))
class_name = project['className']
trainSVMHistory(project_file, params_file, output_file, class_name)
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Retrains a model with all the available data given the '
'project configuration file and a set of parameters. '
'The model is saved in the output file.')
parser.add_argument('project_file', help='configuration file (.project)')
parser.add_argument('params_file', help='parameters (.param)')
parser.add_argument('output_file', help='history file (the model)')
args = parser.parse_args()
retrainModel(args.project_file, args.params_file, args.output_file)
|
Add script to retrain a model for a given param set#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2006-2019 Music Technology Group - Universitat Pompeu Fabra
#
# This file is part of Gaia
#
# Gaia is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation (FSF), either version 3 of the License, or (at your
# option) any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the Affero GNU General Public License
# version 3 along with this program. If not, see http://www.gnu.org/licenses/
import argparse
from gaia2.fastyaml import yaml
from generate_svm_history_from_config import trainSVMHistory
def retrainModel(project_file, params_file, output_file):
project = yaml.load(open(project_file, 'r'))
class_name = project['className']
trainSVMHistory(project_file, params_file, output_file, class_name)
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Retrains a model with all the available data given the '
'project configuration file and a set of parameters. '
'The model is saved in the output file.')
parser.add_argument('project_file', help='configuration file (.project)')
parser.add_argument('params_file', help='parameters (.param)')
parser.add_argument('output_file', help='history file (the model)')
args = parser.parse_args()
retrainModel(args.project_file, args.params_file, args.output_file)
|
<commit_before><commit_msg>Add script to retrain a model for a given param set<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2006-2019 Music Technology Group - Universitat Pompeu Fabra
#
# This file is part of Gaia
#
# Gaia is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation (FSF), either version 3 of the License, or (at your
# option) any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the Affero GNU General Public License
# version 3 along with this program. If not, see http://www.gnu.org/licenses/
import argparse
from gaia2.fastyaml import yaml
from generate_svm_history_from_config import trainSVMHistory
def retrainModel(project_file, params_file, output_file):
project = yaml.load(open(project_file, 'r'))
class_name = project['className']
trainSVMHistory(project_file, params_file, output_file, class_name)
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Retrains a model with all the available data given the '
'project configuration file and a set of parameters. '
'The model is saved in the output file.')
parser.add_argument('project_file', help='configuration file (.project)')
parser.add_argument('params_file', help='parameters (.param)')
parser.add_argument('output_file', help='history file (the model)')
args = parser.parse_args()
retrainModel(args.project_file, args.params_file, args.output_file)
|
|
10db4c582f0e849807380ac763e002421f6604a1
|
tests/api/views/aircraft_models_test.py
|
tests/api/views/aircraft_models_test.py
|
# coding=utf-8
from skylines.model import AircraftModel
def test_list_empty(db_session, client):
res = client.get('/aircraft-models')
assert res.status_code == 200
assert res.json == {
'models': [],
}
def test_list(db_session, client):
models = [
AircraftModel(name='Nimeta', kind=1, igc_index=142, dmst_index=100),
AircraftModel(name='ASK 13', igc_index=42, dmst_index=17),
AircraftModel(name='Dimona', kind=2),
AircraftModel(name='EPSILON', kind=3),
AircraftModel(name=u'Δ', kind=4),
AircraftModel(name='Falcon 9', kind=5),
]
db_session.add_all(models)
db_session.commit()
res = client.get('/aircraft-models')
print res.json
assert res.status_code == 200
assert res.json == {
'models': [{
'id': models[1].id,
'name': 'ASK 13',
'index': 17,
'type': 'unspecified',
}, {
'id': models[0].id,
'index': 100,
'name': 'Nimeta',
'type': 'glider'
}, {
'id': models[2].id,
'index': None,
'name': 'Dimona',
'type': 'motorglider'
}, {
'id': models[3].id,
'index': None,
'name': 'EPSILON',
'type': 'paraglider'
}, {
'id': models[4].id,
'index': None,
'name': u'Δ',
'type': 'hangglider'
}, {
'id': models[5].id,
'index': None,
'name': 'Falcon 9',
'type': 'ul'
}]
}
|
Add tests for "GET /aircraft-models"
|
tests/api: Add tests for "GET /aircraft-models"
|
Python
|
agpl-3.0
|
RBE-Avionik/skylines,skylines-project/skylines,skylines-project/skylines,Harry-R/skylines,Harry-R/skylines,shadowoneau/skylines,shadowoneau/skylines,shadowoneau/skylines,Harry-R/skylines,RBE-Avionik/skylines,Turbo87/skylines,RBE-Avionik/skylines,skylines-project/skylines,skylines-project/skylines,Turbo87/skylines,Turbo87/skylines,RBE-Avionik/skylines,Harry-R/skylines,shadowoneau/skylines,Turbo87/skylines
|
tests/api: Add tests for "GET /aircraft-models"
|
# coding=utf-8
from skylines.model import AircraftModel
def test_list_empty(db_session, client):
res = client.get('/aircraft-models')
assert res.status_code == 200
assert res.json == {
'models': [],
}
def test_list(db_session, client):
models = [
AircraftModel(name='Nimeta', kind=1, igc_index=142, dmst_index=100),
AircraftModel(name='ASK 13', igc_index=42, dmst_index=17),
AircraftModel(name='Dimona', kind=2),
AircraftModel(name='EPSILON', kind=3),
AircraftModel(name=u'Δ', kind=4),
AircraftModel(name='Falcon 9', kind=5),
]
db_session.add_all(models)
db_session.commit()
res = client.get('/aircraft-models')
print res.json
assert res.status_code == 200
assert res.json == {
'models': [{
'id': models[1].id,
'name': 'ASK 13',
'index': 17,
'type': 'unspecified',
}, {
'id': models[0].id,
'index': 100,
'name': 'Nimeta',
'type': 'glider'
}, {
'id': models[2].id,
'index': None,
'name': 'Dimona',
'type': 'motorglider'
}, {
'id': models[3].id,
'index': None,
'name': 'EPSILON',
'type': 'paraglider'
}, {
'id': models[4].id,
'index': None,
'name': u'Δ',
'type': 'hangglider'
}, {
'id': models[5].id,
'index': None,
'name': 'Falcon 9',
'type': 'ul'
}]
}
|
<commit_before><commit_msg>tests/api: Add tests for "GET /aircraft-models"<commit_after>
|
# coding=utf-8
from skylines.model import AircraftModel
def test_list_empty(db_session, client):
res = client.get('/aircraft-models')
assert res.status_code == 200
assert res.json == {
'models': [],
}
def test_list(db_session, client):
models = [
AircraftModel(name='Nimeta', kind=1, igc_index=142, dmst_index=100),
AircraftModel(name='ASK 13', igc_index=42, dmst_index=17),
AircraftModel(name='Dimona', kind=2),
AircraftModel(name='EPSILON', kind=3),
AircraftModel(name=u'Δ', kind=4),
AircraftModel(name='Falcon 9', kind=5),
]
db_session.add_all(models)
db_session.commit()
res = client.get('/aircraft-models')
print res.json
assert res.status_code == 200
assert res.json == {
'models': [{
'id': models[1].id,
'name': 'ASK 13',
'index': 17,
'type': 'unspecified',
}, {
'id': models[0].id,
'index': 100,
'name': 'Nimeta',
'type': 'glider'
}, {
'id': models[2].id,
'index': None,
'name': 'Dimona',
'type': 'motorglider'
}, {
'id': models[3].id,
'index': None,
'name': 'EPSILON',
'type': 'paraglider'
}, {
'id': models[4].id,
'index': None,
'name': u'Δ',
'type': 'hangglider'
}, {
'id': models[5].id,
'index': None,
'name': 'Falcon 9',
'type': 'ul'
}]
}
|
tests/api: Add tests for "GET /aircraft-models"# coding=utf-8
from skylines.model import AircraftModel
def test_list_empty(db_session, client):
res = client.get('/aircraft-models')
assert res.status_code == 200
assert res.json == {
'models': [],
}
def test_list(db_session, client):
models = [
AircraftModel(name='Nimeta', kind=1, igc_index=142, dmst_index=100),
AircraftModel(name='ASK 13', igc_index=42, dmst_index=17),
AircraftModel(name='Dimona', kind=2),
AircraftModel(name='EPSILON', kind=3),
AircraftModel(name=u'Δ', kind=4),
AircraftModel(name='Falcon 9', kind=5),
]
db_session.add_all(models)
db_session.commit()
res = client.get('/aircraft-models')
print res.json
assert res.status_code == 200
assert res.json == {
'models': [{
'id': models[1].id,
'name': 'ASK 13',
'index': 17,
'type': 'unspecified',
}, {
'id': models[0].id,
'index': 100,
'name': 'Nimeta',
'type': 'glider'
}, {
'id': models[2].id,
'index': None,
'name': 'Dimona',
'type': 'motorglider'
}, {
'id': models[3].id,
'index': None,
'name': 'EPSILON',
'type': 'paraglider'
}, {
'id': models[4].id,
'index': None,
'name': u'Δ',
'type': 'hangglider'
}, {
'id': models[5].id,
'index': None,
'name': 'Falcon 9',
'type': 'ul'
}]
}
|
<commit_before><commit_msg>tests/api: Add tests for "GET /aircraft-models"<commit_after># coding=utf-8
from skylines.model import AircraftModel
def test_list_empty(db_session, client):
res = client.get('/aircraft-models')
assert res.status_code == 200
assert res.json == {
'models': [],
}
def test_list(db_session, client):
models = [
AircraftModel(name='Nimeta', kind=1, igc_index=142, dmst_index=100),
AircraftModel(name='ASK 13', igc_index=42, dmst_index=17),
AircraftModel(name='Dimona', kind=2),
AircraftModel(name='EPSILON', kind=3),
AircraftModel(name=u'Δ', kind=4),
AircraftModel(name='Falcon 9', kind=5),
]
db_session.add_all(models)
db_session.commit()
res = client.get('/aircraft-models')
print res.json
assert res.status_code == 200
assert res.json == {
'models': [{
'id': models[1].id,
'name': 'ASK 13',
'index': 17,
'type': 'unspecified',
}, {
'id': models[0].id,
'index': 100,
'name': 'Nimeta',
'type': 'glider'
}, {
'id': models[2].id,
'index': None,
'name': 'Dimona',
'type': 'motorglider'
}, {
'id': models[3].id,
'index': None,
'name': 'EPSILON',
'type': 'paraglider'
}, {
'id': models[4].id,
'index': None,
'name': u'Δ',
'type': 'hangglider'
}, {
'id': models[5].id,
'index': None,
'name': 'Falcon 9',
'type': 'ul'
}]
}
|
|
8fb8b19c75e4a331733b79f9d1d9384a0b3080be
|
karabo_data/tests/test_lsxfel.py
|
karabo_data/tests/test_lsxfel.py
|
from karabo_data import lsxfel
from karabo_data import H5File
def test_lsxfel_file(mock_lpd_data, capsys):
with H5File(mock_lpd_data) as f:
img_ds, index = lsxfel.find_image(f)
assert img_ds.ndim == 4
assert index['first'].shape == (480,)
lsxfel.summarise_file(mock_lpd_data)
out, err = capsys.readouterr()
assert "480 trains, 128 frames/train" in out
def test_lsxfel_run(mock_fxe_run, capsys):
lsxfel.summarise_run(mock_fxe_run)
out, err = capsys.readouterr()
assert "480 trains" in out
assert "16 detector files" in out
|
Add a couple of tests for lsxfel
|
Add a couple of tests for lsxfel
|
Python
|
bsd-3-clause
|
European-XFEL/h5tools-py
|
Add a couple of tests for lsxfel
|
from karabo_data import lsxfel
from karabo_data import H5File
def test_lsxfel_file(mock_lpd_data, capsys):
with H5File(mock_lpd_data) as f:
img_ds, index = lsxfel.find_image(f)
assert img_ds.ndim == 4
assert index['first'].shape == (480,)
lsxfel.summarise_file(mock_lpd_data)
out, err = capsys.readouterr()
assert "480 trains, 128 frames/train" in out
def test_lsxfel_run(mock_fxe_run, capsys):
lsxfel.summarise_run(mock_fxe_run)
out, err = capsys.readouterr()
assert "480 trains" in out
assert "16 detector files" in out
|
<commit_before><commit_msg>Add a couple of tests for lsxfel<commit_after>
|
from karabo_data import lsxfel
from karabo_data import H5File
def test_lsxfel_file(mock_lpd_data, capsys):
with H5File(mock_lpd_data) as f:
img_ds, index = lsxfel.find_image(f)
assert img_ds.ndim == 4
assert index['first'].shape == (480,)
lsxfel.summarise_file(mock_lpd_data)
out, err = capsys.readouterr()
assert "480 trains, 128 frames/train" in out
def test_lsxfel_run(mock_fxe_run, capsys):
lsxfel.summarise_run(mock_fxe_run)
out, err = capsys.readouterr()
assert "480 trains" in out
assert "16 detector files" in out
|
Add a couple of tests for lsxfelfrom karabo_data import lsxfel
from karabo_data import H5File
def test_lsxfel_file(mock_lpd_data, capsys):
with H5File(mock_lpd_data) as f:
img_ds, index = lsxfel.find_image(f)
assert img_ds.ndim == 4
assert index['first'].shape == (480,)
lsxfel.summarise_file(mock_lpd_data)
out, err = capsys.readouterr()
assert "480 trains, 128 frames/train" in out
def test_lsxfel_run(mock_fxe_run, capsys):
lsxfel.summarise_run(mock_fxe_run)
out, err = capsys.readouterr()
assert "480 trains" in out
assert "16 detector files" in out
|
<commit_before><commit_msg>Add a couple of tests for lsxfel<commit_after>from karabo_data import lsxfel
from karabo_data import H5File
def test_lsxfel_file(mock_lpd_data, capsys):
with H5File(mock_lpd_data) as f:
img_ds, index = lsxfel.find_image(f)
assert img_ds.ndim == 4
assert index['first'].shape == (480,)
lsxfel.summarise_file(mock_lpd_data)
out, err = capsys.readouterr()
assert "480 trains, 128 frames/train" in out
def test_lsxfel_run(mock_fxe_run, capsys):
lsxfel.summarise_run(mock_fxe_run)
out, err = capsys.readouterr()
assert "480 trains" in out
assert "16 detector files" in out
|
|
fab3d35e7e2bd633f48d16a08914832a36eabb5e
|
numba/cuda/tests/cudapy/test_casting.py
|
numba/cuda/tests/cudapy/test_casting.py
|
from numba import unittest_support as unittest
import numpy as np
from numba import cuda, types
import struct
def float_to_int(x):
return np.int32(x)
def int_to_float(x):
return np.float64(x) / 2
def float_to_unsigned(x):
return types.uint32(x)
def float_to_complex(x):
return np.complex128(x)
class TestCasting(unittest.TestCase):
def _create_wrapped(self, pyfunc, intype, outtype):
wrapped_func = cuda.jit(device=True)(pyfunc)
@cuda.jit
def cuda_wrapper_fn(arg, res):
res[0] = wrapped_func(arg[0])
def wrapper_fn(arg):
argarray = np.zeros(1, dtype=intype)
argarray[0] = arg
resarray = np.zeros(1, dtype=outtype)
cuda_wrapper_fn(argarray, resarray)
return resarray[0]
return wrapper_fn
def test_float_to_int(self):
pyfunc = float_to_int
cfunc = self._create_wrapped(pyfunc, np.float32, np.int32)
self.assertEqual(cfunc(12.3), pyfunc(12.3))
self.assertEqual(cfunc(12.3), int(12.3))
def test_int_to_float(self):
pyfunc = int_to_float
cfunc = self._create_wrapped(pyfunc, np.int64, np.float64)
self.assertEqual(cfunc(321), pyfunc(321))
self.assertEqual(cfunc(321), 321. / 2)
def test_float_to_unsigned(self):
pyfunc = float_to_unsigned
cfunc = self._create_wrapped(pyfunc, np.float32, np.uint32)
self.assertEqual(cfunc(3.21), pyfunc(3.21))
self.assertEqual(cfunc(3.21), struct.unpack('I', struct.pack('i',
3))[0])
def test_float_to_complex(self):
pyfunc = float_to_complex
cfunc = self._create_wrapped(pyfunc, np.float64, np.complex128)
self.assertEqual(cfunc(-3.21), pyfunc(-3.21))
self.assertEqual(cfunc(-3.21), -3.21 + 0j)
if __name__ == '__main__':
unittest.main()
|
Create CUDA cast tests based on CPU cast tests
|
Create CUDA cast tests based on CPU cast tests
|
Python
|
bsd-2-clause
|
gdementen/numba,stuartarchibald/numba,stuartarchibald/numba,ssarangi/numba,stuartarchibald/numba,IntelLabs/numba,seibert/numba,pombredanne/numba,gmarkall/numba,ssarangi/numba,numba/numba,pombredanne/numba,pombredanne/numba,jriehl/numba,stefanseefeld/numba,numba/numba,numba/numba,gmarkall/numba,stefanseefeld/numba,stefanseefeld/numba,sklam/numba,jriehl/numba,cpcloud/numba,cpcloud/numba,stuartarchibald/numba,gdementen/numba,ssarangi/numba,gdementen/numba,stefanseefeld/numba,numba/numba,GaZ3ll3/numba,pitrou/numba,numba/numba,seibert/numba,stefanseefeld/numba,GaZ3ll3/numba,ssarangi/numba,cpcloud/numba,seibert/numba,cpcloud/numba,stonebig/numba,gmarkall/numba,seibert/numba,sklam/numba,jriehl/numba,IntelLabs/numba,GaZ3ll3/numba,gmarkall/numba,stonebig/numba,pitrou/numba,stonebig/numba,IntelLabs/numba,stonebig/numba,jriehl/numba,sklam/numba,IntelLabs/numba,GaZ3ll3/numba,pombredanne/numba,cpcloud/numba,pombredanne/numba,gdementen/numba,seibert/numba,GaZ3ll3/numba,pitrou/numba,IntelLabs/numba,gmarkall/numba,sklam/numba,pitrou/numba,stonebig/numba,stuartarchibald/numba,jriehl/numba,ssarangi/numba,sklam/numba,gdementen/numba,pitrou/numba
|
Create CUDA cast tests based on CPU cast tests
|
from numba import unittest_support as unittest
import numpy as np
from numba import cuda, types
import struct
def float_to_int(x):
return np.int32(x)
def int_to_float(x):
return np.float64(x) / 2
def float_to_unsigned(x):
return types.uint32(x)
def float_to_complex(x):
return np.complex128(x)
class TestCasting(unittest.TestCase):
def _create_wrapped(self, pyfunc, intype, outtype):
wrapped_func = cuda.jit(device=True)(pyfunc)
@cuda.jit
def cuda_wrapper_fn(arg, res):
res[0] = wrapped_func(arg[0])
def wrapper_fn(arg):
argarray = np.zeros(1, dtype=intype)
argarray[0] = arg
resarray = np.zeros(1, dtype=outtype)
cuda_wrapper_fn(argarray, resarray)
return resarray[0]
return wrapper_fn
def test_float_to_int(self):
pyfunc = float_to_int
cfunc = self._create_wrapped(pyfunc, np.float32, np.int32)
self.assertEqual(cfunc(12.3), pyfunc(12.3))
self.assertEqual(cfunc(12.3), int(12.3))
def test_int_to_float(self):
pyfunc = int_to_float
cfunc = self._create_wrapped(pyfunc, np.int64, np.float64)
self.assertEqual(cfunc(321), pyfunc(321))
self.assertEqual(cfunc(321), 321. / 2)
def test_float_to_unsigned(self):
pyfunc = float_to_unsigned
cfunc = self._create_wrapped(pyfunc, np.float32, np.uint32)
self.assertEqual(cfunc(3.21), pyfunc(3.21))
self.assertEqual(cfunc(3.21), struct.unpack('I', struct.pack('i',
3))[0])
def test_float_to_complex(self):
pyfunc = float_to_complex
cfunc = self._create_wrapped(pyfunc, np.float64, np.complex128)
self.assertEqual(cfunc(-3.21), pyfunc(-3.21))
self.assertEqual(cfunc(-3.21), -3.21 + 0j)
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Create CUDA cast tests based on CPU cast tests<commit_after>
|
from numba import unittest_support as unittest
import numpy as np
from numba import cuda, types
import struct
def float_to_int(x):
return np.int32(x)
def int_to_float(x):
return np.float64(x) / 2
def float_to_unsigned(x):
return types.uint32(x)
def float_to_complex(x):
return np.complex128(x)
class TestCasting(unittest.TestCase):
def _create_wrapped(self, pyfunc, intype, outtype):
wrapped_func = cuda.jit(device=True)(pyfunc)
@cuda.jit
def cuda_wrapper_fn(arg, res):
res[0] = wrapped_func(arg[0])
def wrapper_fn(arg):
argarray = np.zeros(1, dtype=intype)
argarray[0] = arg
resarray = np.zeros(1, dtype=outtype)
cuda_wrapper_fn(argarray, resarray)
return resarray[0]
return wrapper_fn
def test_float_to_int(self):
pyfunc = float_to_int
cfunc = self._create_wrapped(pyfunc, np.float32, np.int32)
self.assertEqual(cfunc(12.3), pyfunc(12.3))
self.assertEqual(cfunc(12.3), int(12.3))
def test_int_to_float(self):
pyfunc = int_to_float
cfunc = self._create_wrapped(pyfunc, np.int64, np.float64)
self.assertEqual(cfunc(321), pyfunc(321))
self.assertEqual(cfunc(321), 321. / 2)
def test_float_to_unsigned(self):
pyfunc = float_to_unsigned
cfunc = self._create_wrapped(pyfunc, np.float32, np.uint32)
self.assertEqual(cfunc(3.21), pyfunc(3.21))
self.assertEqual(cfunc(3.21), struct.unpack('I', struct.pack('i',
3))[0])
def test_float_to_complex(self):
pyfunc = float_to_complex
cfunc = self._create_wrapped(pyfunc, np.float64, np.complex128)
self.assertEqual(cfunc(-3.21), pyfunc(-3.21))
self.assertEqual(cfunc(-3.21), -3.21 + 0j)
if __name__ == '__main__':
unittest.main()
|
Create CUDA cast tests based on CPU cast testsfrom numba import unittest_support as unittest
import numpy as np
from numba import cuda, types
import struct
def float_to_int(x):
return np.int32(x)
def int_to_float(x):
return np.float64(x) / 2
def float_to_unsigned(x):
return types.uint32(x)
def float_to_complex(x):
return np.complex128(x)
class TestCasting(unittest.TestCase):
def _create_wrapped(self, pyfunc, intype, outtype):
wrapped_func = cuda.jit(device=True)(pyfunc)
@cuda.jit
def cuda_wrapper_fn(arg, res):
res[0] = wrapped_func(arg[0])
def wrapper_fn(arg):
argarray = np.zeros(1, dtype=intype)
argarray[0] = arg
resarray = np.zeros(1, dtype=outtype)
cuda_wrapper_fn(argarray, resarray)
return resarray[0]
return wrapper_fn
def test_float_to_int(self):
pyfunc = float_to_int
cfunc = self._create_wrapped(pyfunc, np.float32, np.int32)
self.assertEqual(cfunc(12.3), pyfunc(12.3))
self.assertEqual(cfunc(12.3), int(12.3))
def test_int_to_float(self):
pyfunc = int_to_float
cfunc = self._create_wrapped(pyfunc, np.int64, np.float64)
self.assertEqual(cfunc(321), pyfunc(321))
self.assertEqual(cfunc(321), 321. / 2)
def test_float_to_unsigned(self):
pyfunc = float_to_unsigned
cfunc = self._create_wrapped(pyfunc, np.float32, np.uint32)
self.assertEqual(cfunc(3.21), pyfunc(3.21))
self.assertEqual(cfunc(3.21), struct.unpack('I', struct.pack('i',
3))[0])
def test_float_to_complex(self):
pyfunc = float_to_complex
cfunc = self._create_wrapped(pyfunc, np.float64, np.complex128)
self.assertEqual(cfunc(-3.21), pyfunc(-3.21))
self.assertEqual(cfunc(-3.21), -3.21 + 0j)
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Create CUDA cast tests based on CPU cast tests<commit_after>from numba import unittest_support as unittest
import numpy as np
from numba import cuda, types
import struct
def float_to_int(x):
return np.int32(x)
def int_to_float(x):
return np.float64(x) / 2
def float_to_unsigned(x):
return types.uint32(x)
def float_to_complex(x):
return np.complex128(x)
class TestCasting(unittest.TestCase):
def _create_wrapped(self, pyfunc, intype, outtype):
wrapped_func = cuda.jit(device=True)(pyfunc)
@cuda.jit
def cuda_wrapper_fn(arg, res):
res[0] = wrapped_func(arg[0])
def wrapper_fn(arg):
argarray = np.zeros(1, dtype=intype)
argarray[0] = arg
resarray = np.zeros(1, dtype=outtype)
cuda_wrapper_fn(argarray, resarray)
return resarray[0]
return wrapper_fn
def test_float_to_int(self):
pyfunc = float_to_int
cfunc = self._create_wrapped(pyfunc, np.float32, np.int32)
self.assertEqual(cfunc(12.3), pyfunc(12.3))
self.assertEqual(cfunc(12.3), int(12.3))
def test_int_to_float(self):
pyfunc = int_to_float
cfunc = self._create_wrapped(pyfunc, np.int64, np.float64)
self.assertEqual(cfunc(321), pyfunc(321))
self.assertEqual(cfunc(321), 321. / 2)
def test_float_to_unsigned(self):
pyfunc = float_to_unsigned
cfunc = self._create_wrapped(pyfunc, np.float32, np.uint32)
self.assertEqual(cfunc(3.21), pyfunc(3.21))
self.assertEqual(cfunc(3.21), struct.unpack('I', struct.pack('i',
3))[0])
def test_float_to_complex(self):
pyfunc = float_to_complex
cfunc = self._create_wrapped(pyfunc, np.float64, np.complex128)
self.assertEqual(cfunc(-3.21), pyfunc(-3.21))
self.assertEqual(cfunc(-3.21), -3.21 + 0j)
if __name__ == '__main__':
unittest.main()
|
|
2636bf8f010de273c9713269a3402b16c83c912e
|
fellowms/migrations/0021_blog_status.py
|
fellowms/migrations/0021_blog_status.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.6 on 2016-06-02 16:10
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('fellowms', '0020_auto_20160602_1607'),
]
operations = [
migrations.AddField(
model_name='blog',
name='status',
field=models.CharField(choices=[('U', 'Unprocessed'), ('R', 'On Google Drive (for review)'), ('L', 'On pipeline to be published'), ('P', 'Published'), ('D', 'Declined'), ('O', 'Out of date')], default='U', max_length=1),
),
]
|
Create migration for blog status
|
Create migration for blog status
|
Python
|
bsd-3-clause
|
softwaresaved/fat,softwaresaved/fat,softwaresaved/fat,softwaresaved/fat
|
Create migration for blog status
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.6 on 2016-06-02 16:10
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('fellowms', '0020_auto_20160602_1607'),
]
operations = [
migrations.AddField(
model_name='blog',
name='status',
field=models.CharField(choices=[('U', 'Unprocessed'), ('R', 'On Google Drive (for review)'), ('L', 'On pipeline to be published'), ('P', 'Published'), ('D', 'Declined'), ('O', 'Out of date')], default='U', max_length=1),
),
]
|
<commit_before><commit_msg>Create migration for blog status<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.6 on 2016-06-02 16:10
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('fellowms', '0020_auto_20160602_1607'),
]
operations = [
migrations.AddField(
model_name='blog',
name='status',
field=models.CharField(choices=[('U', 'Unprocessed'), ('R', 'On Google Drive (for review)'), ('L', 'On pipeline to be published'), ('P', 'Published'), ('D', 'Declined'), ('O', 'Out of date')], default='U', max_length=1),
),
]
|
Create migration for blog status# -*- coding: utf-8 -*-
# Generated by Django 1.9.6 on 2016-06-02 16:10
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('fellowms', '0020_auto_20160602_1607'),
]
operations = [
migrations.AddField(
model_name='blog',
name='status',
field=models.CharField(choices=[('U', 'Unprocessed'), ('R', 'On Google Drive (for review)'), ('L', 'On pipeline to be published'), ('P', 'Published'), ('D', 'Declined'), ('O', 'Out of date')], default='U', max_length=1),
),
]
|
<commit_before><commit_msg>Create migration for blog status<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.9.6 on 2016-06-02 16:10
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('fellowms', '0020_auto_20160602_1607'),
]
operations = [
migrations.AddField(
model_name='blog',
name='status',
field=models.CharField(choices=[('U', 'Unprocessed'), ('R', 'On Google Drive (for review)'), ('L', 'On pipeline to be published'), ('P', 'Published'), ('D', 'Declined'), ('O', 'Out of date')], default='U', max_length=1),
),
]
|
|
1e361a431fc95304553852531427016215002cb0
|
src/Harry_get_Citations_BlockingApps.py
|
src/Harry_get_Citations_BlockingApps.py
|
#!/bin/env python
# Get the number of apps blocked by each Motorola patent
# and plot agains the number of citations to that patent
import pandas as pd
import matplotlib as plt
Blocking = pd.read_csv("../data/Motorola_blocking_patents_1114.csv")
MotoPatents = pd.read_csv("../data/Motorola_Patents_Blocking_Citations.csv", index=False)
del Blocking['Unnamed: 0']
for i in range(0, len(Blocking)):
Blocking['Blocking_patent_str'][i] = str(Blocking['Blocking_patent'][i])
for i in range(0, len(MotoPatents)):
MotoPatents['Blocking_appnumber'][i] = len(set(Blocking['Blocked_app'][Blocking['Blocking_patent_str']==MotoPatents['PatentID'][i]]))
X = MotoPatents['Blocking_appnumber']
Y = MotoPatents['Citations']
plt.plot(X,Y)
plt.show()
|
Add script to get number of apps blocked without duplicates
|
Add script to get number of apps blocked without duplicates
|
Python
|
bsd-2-clause
|
PatentBlocker/Motorola_Patent_Citations,PatentBlocker/Motorola_Patent_Citations
|
Add script to get number of apps blocked without duplicates
|
#!/bin/env python
# Get the number of apps blocked by each Motorola patent
# and plot agains the number of citations to that patent
import pandas as pd
import matplotlib as plt
Blocking = pd.read_csv("../data/Motorola_blocking_patents_1114.csv")
MotoPatents = pd.read_csv("../data/Motorola_Patents_Blocking_Citations.csv", index=False)
del Blocking['Unnamed: 0']
for i in range(0, len(Blocking)):
Blocking['Blocking_patent_str'][i] = str(Blocking['Blocking_patent'][i])
for i in range(0, len(MotoPatents)):
MotoPatents['Blocking_appnumber'][i] = len(set(Blocking['Blocked_app'][Blocking['Blocking_patent_str']==MotoPatents['PatentID'][i]]))
X = MotoPatents['Blocking_appnumber']
Y = MotoPatents['Citations']
plt.plot(X,Y)
plt.show()
|
<commit_before><commit_msg>Add script to get number of apps blocked without duplicates<commit_after>
|
#!/bin/env python
# Get the number of apps blocked by each Motorola patent
# and plot agains the number of citations to that patent
import pandas as pd
import matplotlib as plt
Blocking = pd.read_csv("../data/Motorola_blocking_patents_1114.csv")
MotoPatents = pd.read_csv("../data/Motorola_Patents_Blocking_Citations.csv", index=False)
del Blocking['Unnamed: 0']
for i in range(0, len(Blocking)):
Blocking['Blocking_patent_str'][i] = str(Blocking['Blocking_patent'][i])
for i in range(0, len(MotoPatents)):
MotoPatents['Blocking_appnumber'][i] = len(set(Blocking['Blocked_app'][Blocking['Blocking_patent_str']==MotoPatents['PatentID'][i]]))
X = MotoPatents['Blocking_appnumber']
Y = MotoPatents['Citations']
plt.plot(X,Y)
plt.show()
|
Add script to get number of apps blocked without duplicates#!/bin/env python
# Get the number of apps blocked by each Motorola patent
# and plot agains the number of citations to that patent
import pandas as pd
import matplotlib as plt
Blocking = pd.read_csv("../data/Motorola_blocking_patents_1114.csv")
MotoPatents = pd.read_csv("../data/Motorola_Patents_Blocking_Citations.csv", index=False)
del Blocking['Unnamed: 0']
for i in range(0, len(Blocking)):
Blocking['Blocking_patent_str'][i] = str(Blocking['Blocking_patent'][i])
for i in range(0, len(MotoPatents)):
MotoPatents['Blocking_appnumber'][i] = len(set(Blocking['Blocked_app'][Blocking['Blocking_patent_str']==MotoPatents['PatentID'][i]]))
X = MotoPatents['Blocking_appnumber']
Y = MotoPatents['Citations']
plt.plot(X,Y)
plt.show()
|
<commit_before><commit_msg>Add script to get number of apps blocked without duplicates<commit_after>#!/bin/env python
# Get the number of apps blocked by each Motorola patent
# and plot agains the number of citations to that patent
import pandas as pd
import matplotlib as plt
Blocking = pd.read_csv("../data/Motorola_blocking_patents_1114.csv")
MotoPatents = pd.read_csv("../data/Motorola_Patents_Blocking_Citations.csv", index=False)
del Blocking['Unnamed: 0']
for i in range(0, len(Blocking)):
Blocking['Blocking_patent_str'][i] = str(Blocking['Blocking_patent'][i])
for i in range(0, len(MotoPatents)):
MotoPatents['Blocking_appnumber'][i] = len(set(Blocking['Blocked_app'][Blocking['Blocking_patent_str']==MotoPatents['PatentID'][i]]))
X = MotoPatents['Blocking_appnumber']
Y = MotoPatents['Citations']
plt.plot(X,Y)
plt.show()
|
|
ad3f121f43f07a2fa27525e25ef2afdca94475f9
|
greatbigcrane/job_server/job_processor.py
|
greatbigcrane/job_server/job_processor.py
|
import os
os.environ['DJANGO_SETTINGS_MODULE'] = 'greatbigcrane.development'
import zmq
import time
import json
addr = 'tcp://127.0.0.1:5555'
context = zmq.Context()
socket = context.socket(zmq.REQ)
socket.connect(addr)
def bootstrap(project):
print("processing %s" % project)
command_map = {
'BOOTSTRAP': bootstrap,
}
while True:
socket.send("GET")
job = socket.recv()
if job == "EMPTY":
time.sleep(1)
continue
job = json.loads(job)
command = command_map[job['command']]
del job['command']
command(**job)
|
Add a simple boilerplate to process jobs.
|
Add a simple boilerplate to process jobs.
|
Python
|
apache-2.0
|
pnomolos/greatbigcrane,pnomolos/greatbigcrane
|
Add a simple boilerplate to process jobs.
|
import os
os.environ['DJANGO_SETTINGS_MODULE'] = 'greatbigcrane.development'
import zmq
import time
import json
addr = 'tcp://127.0.0.1:5555'
context = zmq.Context()
socket = context.socket(zmq.REQ)
socket.connect(addr)
def bootstrap(project):
print("processing %s" % project)
command_map = {
'BOOTSTRAP': bootstrap,
}
while True:
socket.send("GET")
job = socket.recv()
if job == "EMPTY":
time.sleep(1)
continue
job = json.loads(job)
command = command_map[job['command']]
del job['command']
command(**job)
|
<commit_before><commit_msg>Add a simple boilerplate to process jobs.<commit_after>
|
import os
os.environ['DJANGO_SETTINGS_MODULE'] = 'greatbigcrane.development'
import zmq
import time
import json
addr = 'tcp://127.0.0.1:5555'
context = zmq.Context()
socket = context.socket(zmq.REQ)
socket.connect(addr)
def bootstrap(project):
print("processing %s" % project)
command_map = {
'BOOTSTRAP': bootstrap,
}
while True:
socket.send("GET")
job = socket.recv()
if job == "EMPTY":
time.sleep(1)
continue
job = json.loads(job)
command = command_map[job['command']]
del job['command']
command(**job)
|
Add a simple boilerplate to process jobs.import os
os.environ['DJANGO_SETTINGS_MODULE'] = 'greatbigcrane.development'
import zmq
import time
import json
addr = 'tcp://127.0.0.1:5555'
context = zmq.Context()
socket = context.socket(zmq.REQ)
socket.connect(addr)
def bootstrap(project):
print("processing %s" % project)
command_map = {
'BOOTSTRAP': bootstrap,
}
while True:
socket.send("GET")
job = socket.recv()
if job == "EMPTY":
time.sleep(1)
continue
job = json.loads(job)
command = command_map[job['command']]
del job['command']
command(**job)
|
<commit_before><commit_msg>Add a simple boilerplate to process jobs.<commit_after>import os
os.environ['DJANGO_SETTINGS_MODULE'] = 'greatbigcrane.development'
import zmq
import time
import json
addr = 'tcp://127.0.0.1:5555'
context = zmq.Context()
socket = context.socket(zmq.REQ)
socket.connect(addr)
def bootstrap(project):
print("processing %s" % project)
command_map = {
'BOOTSTRAP': bootstrap,
}
while True:
socket.send("GET")
job = socket.recv()
if job == "EMPTY":
time.sleep(1)
continue
job = json.loads(job)
command = command_map[job['command']]
del job['command']
command(**job)
|
|
f0d501bec97e77b7f7efb427bdcbb051230a5e5a
|
store/tests/test_context_processor.py
|
store/tests/test_context_processor.py
|
"""
Tests for Custom context processors.
"""
import os
from django.conf import settings
from django.test import TestCase, override_settings
class FooterCategoriesContextProcessorTests(TestCase):
"""
Tests for the ``store.context_processors.footer_categories`` processor.
"""
def test_custom_context_exists(self):
# Get the homepage
response = self.client.get('/')
self.assertIn('all_categories', response.context)
@override_settings(
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(settings.BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
)
def test_custom_context_does_not_exist_if_not_included_in_settings(self):
# Get the homepage
response = self.client.get('/')
self.assertNotIn('all_categories', response.context)
|
Add tests for custom context processor
|
Add tests for custom context processor
|
Python
|
bsd-3-clause
|
kevgathuku/compshop,kevgathuku/compshop,kevgathuku/compshop,kevgathuku/compshop,andela-kndungu/compshop,andela-kndungu/compshop,andela-kndungu/compshop,andela-kndungu/compshop
|
Add tests for custom context processor
|
"""
Tests for Custom context processors.
"""
import os
from django.conf import settings
from django.test import TestCase, override_settings
class FooterCategoriesContextProcessorTests(TestCase):
"""
Tests for the ``store.context_processors.footer_categories`` processor.
"""
def test_custom_context_exists(self):
# Get the homepage
response = self.client.get('/')
self.assertIn('all_categories', response.context)
@override_settings(
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(settings.BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
)
def test_custom_context_does_not_exist_if_not_included_in_settings(self):
# Get the homepage
response = self.client.get('/')
self.assertNotIn('all_categories', response.context)
|
<commit_before><commit_msg>Add tests for custom context processor<commit_after>
|
"""
Tests for Custom context processors.
"""
import os
from django.conf import settings
from django.test import TestCase, override_settings
class FooterCategoriesContextProcessorTests(TestCase):
"""
Tests for the ``store.context_processors.footer_categories`` processor.
"""
def test_custom_context_exists(self):
# Get the homepage
response = self.client.get('/')
self.assertIn('all_categories', response.context)
@override_settings(
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(settings.BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
)
def test_custom_context_does_not_exist_if_not_included_in_settings(self):
# Get the homepage
response = self.client.get('/')
self.assertNotIn('all_categories', response.context)
|
Add tests for custom context processor"""
Tests for Custom context processors.
"""
import os
from django.conf import settings
from django.test import TestCase, override_settings
class FooterCategoriesContextProcessorTests(TestCase):
"""
Tests for the ``store.context_processors.footer_categories`` processor.
"""
def test_custom_context_exists(self):
# Get the homepage
response = self.client.get('/')
self.assertIn('all_categories', response.context)
@override_settings(
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(settings.BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
)
def test_custom_context_does_not_exist_if_not_included_in_settings(self):
# Get the homepage
response = self.client.get('/')
self.assertNotIn('all_categories', response.context)
|
<commit_before><commit_msg>Add tests for custom context processor<commit_after>"""
Tests for Custom context processors.
"""
import os
from django.conf import settings
from django.test import TestCase, override_settings
class FooterCategoriesContextProcessorTests(TestCase):
"""
Tests for the ``store.context_processors.footer_categories`` processor.
"""
def test_custom_context_exists(self):
# Get the homepage
response = self.client.get('/')
self.assertIn('all_categories', response.context)
@override_settings(
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(settings.BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
)
def test_custom_context_does_not_exist_if_not_included_in_settings(self):
# Get the homepage
response = self.client.get('/')
self.assertNotIn('all_categories', response.context)
|
|
b0b3e28df3807de5bb6b534bf21c2d2d027340e9
|
osf/migrations/0002_auto_20170329_1251.py
|
osf/migrations/0002_auto_20170329_1251.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2017-03-29 17:51
from __future__ import unicode_literals
from django.db import migrations
import django.utils.timezone
import osf.utils.fields
class Migration(migrations.Migration):
dependencies = [
('osf', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='preprintservice',
name='date_modified',
field=osf.utils.fields.NonNaiveDateTimeField(default=django.utils.timezone.now),
),
]
|
Add migration for auto_now removal
|
Add migration for auto_now removal
|
Python
|
apache-2.0
|
mfraezz/osf.io,icereval/osf.io,Johnetordoff/osf.io,icereval/osf.io,HalcyonChimera/osf.io,adlius/osf.io,Nesiehr/osf.io,HalcyonChimera/osf.io,saradbowman/osf.io,hmoco/osf.io,aaxelb/osf.io,laurenrevere/osf.io,cslzchen/osf.io,caseyrollins/osf.io,TomBaxter/osf.io,chrisseto/osf.io,cslzchen/osf.io,crcresearch/osf.io,adlius/osf.io,erinspace/osf.io,adlius/osf.io,sloria/osf.io,mattclark/osf.io,mfraezz/osf.io,baylee-d/osf.io,chennan47/osf.io,erinspace/osf.io,Johnetordoff/osf.io,aaxelb/osf.io,felliott/osf.io,CenterForOpenScience/osf.io,hmoco/osf.io,chennan47/osf.io,HalcyonChimera/osf.io,icereval/osf.io,cwisecarver/osf.io,TomBaxter/osf.io,caseyrollins/osf.io,pattisdr/osf.io,brianjgeiger/osf.io,hmoco/osf.io,Johnetordoff/osf.io,CenterForOpenScience/osf.io,chrisseto/osf.io,caneruguz/osf.io,cslzchen/osf.io,laurenrevere/osf.io,caneruguz/osf.io,chrisseto/osf.io,binoculars/osf.io,cslzchen/osf.io,leb2dg/osf.io,binoculars/osf.io,caneruguz/osf.io,saradbowman/osf.io,aaxelb/osf.io,Johnetordoff/osf.io,laurenrevere/osf.io,CenterForOpenScience/osf.io,cwisecarver/osf.io,sloria/osf.io,felliott/osf.io,hmoco/osf.io,cwisecarver/osf.io,crcresearch/osf.io,brianjgeiger/osf.io,mattclark/osf.io,leb2dg/osf.io,mfraezz/osf.io,Nesiehr/osf.io,erinspace/osf.io,caseyrollins/osf.io,pattisdr/osf.io,leb2dg/osf.io,brianjgeiger/osf.io,felliott/osf.io,crcresearch/osf.io,caneruguz/osf.io,HalcyonChimera/osf.io,baylee-d/osf.io,pattisdr/osf.io,chennan47/osf.io,TomBaxter/osf.io,CenterForOpenScience/osf.io,chrisseto/osf.io,felliott/osf.io,Nesiehr/osf.io,baylee-d/osf.io,leb2dg/osf.io,sloria/osf.io,cwisecarver/osf.io,binoculars/osf.io,mfraezz/osf.io,brianjgeiger/osf.io,Nesiehr/osf.io,adlius/osf.io,mattclark/osf.io,aaxelb/osf.io
|
Add migration for auto_now removal
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2017-03-29 17:51
from __future__ import unicode_literals
from django.db import migrations
import django.utils.timezone
import osf.utils.fields
class Migration(migrations.Migration):
dependencies = [
('osf', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='preprintservice',
name='date_modified',
field=osf.utils.fields.NonNaiveDateTimeField(default=django.utils.timezone.now),
),
]
|
<commit_before><commit_msg>Add migration for auto_now removal<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2017-03-29 17:51
from __future__ import unicode_literals
from django.db import migrations
import django.utils.timezone
import osf.utils.fields
class Migration(migrations.Migration):
dependencies = [
('osf', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='preprintservice',
name='date_modified',
field=osf.utils.fields.NonNaiveDateTimeField(default=django.utils.timezone.now),
),
]
|
Add migration for auto_now removal# -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2017-03-29 17:51
from __future__ import unicode_literals
from django.db import migrations
import django.utils.timezone
import osf.utils.fields
class Migration(migrations.Migration):
dependencies = [
('osf', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='preprintservice',
name='date_modified',
field=osf.utils.fields.NonNaiveDateTimeField(default=django.utils.timezone.now),
),
]
|
<commit_before><commit_msg>Add migration for auto_now removal<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2017-03-29 17:51
from __future__ import unicode_literals
from django.db import migrations
import django.utils.timezone
import osf.utils.fields
class Migration(migrations.Migration):
dependencies = [
('osf', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='preprintservice',
name='date_modified',
field=osf.utils.fields.NonNaiveDateTimeField(default=django.utils.timezone.now),
),
]
|
|
73bf27a95944f67feb254d90b90cfa31165dc4cb
|
tests/UselessSymbolsRemove/CycleTest.py
|
tests/UselessSymbolsRemove/CycleTest.py
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 19.08.2017 16:13
:Licence GNUv3
Part of grammpy-transforms
"""
from unittest import main, TestCase
from grammpy import *
from grammpy_transforms import *
class S(Nonterminal): pass
class A(Nonterminal): pass
class B(Nonterminal): pass
class C(Nonterminal): pass
class D(Nonterminal): pass
class Rules(Rule):
rules = [([S], [A, B]),
([S], [C]),
([A], ['a', A]),
([A], ['a']),
([B], ['b', B]),
([C], ['c']),
([D], ['b', 'c'])]
class CycleTest(TestCase):
def test_cycleTest(self):
g = Grammar(terminals=['a', 'b', 'c'],
nonterminals=[S, A, B, C, D],
rules=[Rules],
start_symbol=S)
self.assertEqual(g.rules_count(), 7)
com = ContextFree.remove_useless_symbols(g)
self.assertTrue(com.have_term('c'))
self.assertFalse(com.have_term('a'))
self.assertFalse(com.have_term('b'))
self.assertTrue(com.have_nonterm([S, C]))
self.assertFalse(com.have_nonterm(A))
self.assertFalse(com.have_nonterm(B))
self.assertFalse(com.have_nonterm(D))
self.assertEqual(com.rules_count(), 2)
def test_cycleTestShouldNotChange(self):
g = Grammar(terminals=['a', 'b', 'c'],
nonterminals=[S, A, B, C, D],
rules=[Rules],
start_symbol=S)
self.assertEqual(g.rules_count(), 7)
ContextFree.remove_useless_symbols(g)
self.assertTrue(g.have_term(['a', 'b', 'c']))
self.assertTrue(g.have_nonterm([S, A, B, C, D]))
self.assertEqual(g.rules_count(), 7)
def test_cycleTestShouldChange(self):
g = Grammar(terminals=['a', 'b', 'c'],
nonterminals=[S, A, B, C, D],
rules=[Rules],
start_symbol=S)
self.assertEqual(g.rules_count(), 7)
ContextFree.remove_useless_symbols(g, transform_grammar=True)
self.assertTrue(g.have_term('c'))
self.assertFalse(g.have_term('a'))
self.assertFalse(g.have_term('b'))
self.assertTrue(g.have_nonterm([S, C]))
self.assertFalse(g.have_nonterm(A))
self.assertFalse(g.have_nonterm(B))
self.assertFalse(g.have_nonterm(D))
self.assertEqual(g.rules_count(), 2)
|
Add cycle test of remove useless symbols
|
Add cycle test of remove useless symbols
|
Python
|
mit
|
PatrikValkovic/grammpy
|
Add cycle test of remove useless symbols
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 19.08.2017 16:13
:Licence GNUv3
Part of grammpy-transforms
"""
from unittest import main, TestCase
from grammpy import *
from grammpy_transforms import *
class S(Nonterminal): pass
class A(Nonterminal): pass
class B(Nonterminal): pass
class C(Nonterminal): pass
class D(Nonterminal): pass
class Rules(Rule):
rules = [([S], [A, B]),
([S], [C]),
([A], ['a', A]),
([A], ['a']),
([B], ['b', B]),
([C], ['c']),
([D], ['b', 'c'])]
class CycleTest(TestCase):
def test_cycleTest(self):
g = Grammar(terminals=['a', 'b', 'c'],
nonterminals=[S, A, B, C, D],
rules=[Rules],
start_symbol=S)
self.assertEqual(g.rules_count(), 7)
com = ContextFree.remove_useless_symbols(g)
self.assertTrue(com.have_term('c'))
self.assertFalse(com.have_term('a'))
self.assertFalse(com.have_term('b'))
self.assertTrue(com.have_nonterm([S, C]))
self.assertFalse(com.have_nonterm(A))
self.assertFalse(com.have_nonterm(B))
self.assertFalse(com.have_nonterm(D))
self.assertEqual(com.rules_count(), 2)
def test_cycleTestShouldNotChange(self):
g = Grammar(terminals=['a', 'b', 'c'],
nonterminals=[S, A, B, C, D],
rules=[Rules],
start_symbol=S)
self.assertEqual(g.rules_count(), 7)
ContextFree.remove_useless_symbols(g)
self.assertTrue(g.have_term(['a', 'b', 'c']))
self.assertTrue(g.have_nonterm([S, A, B, C, D]))
self.assertEqual(g.rules_count(), 7)
def test_cycleTestShouldChange(self):
g = Grammar(terminals=['a', 'b', 'c'],
nonterminals=[S, A, B, C, D],
rules=[Rules],
start_symbol=S)
self.assertEqual(g.rules_count(), 7)
ContextFree.remove_useless_symbols(g, transform_grammar=True)
self.assertTrue(g.have_term('c'))
self.assertFalse(g.have_term('a'))
self.assertFalse(g.have_term('b'))
self.assertTrue(g.have_nonterm([S, C]))
self.assertFalse(g.have_nonterm(A))
self.assertFalse(g.have_nonterm(B))
self.assertFalse(g.have_nonterm(D))
self.assertEqual(g.rules_count(), 2)
|
<commit_before><commit_msg>Add cycle test of remove useless symbols<commit_after>
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 19.08.2017 16:13
:Licence GNUv3
Part of grammpy-transforms
"""
from unittest import main, TestCase
from grammpy import *
from grammpy_transforms import *
class S(Nonterminal): pass
class A(Nonterminal): pass
class B(Nonterminal): pass
class C(Nonterminal): pass
class D(Nonterminal): pass
class Rules(Rule):
rules = [([S], [A, B]),
([S], [C]),
([A], ['a', A]),
([A], ['a']),
([B], ['b', B]),
([C], ['c']),
([D], ['b', 'c'])]
class CycleTest(TestCase):
def test_cycleTest(self):
g = Grammar(terminals=['a', 'b', 'c'],
nonterminals=[S, A, B, C, D],
rules=[Rules],
start_symbol=S)
self.assertEqual(g.rules_count(), 7)
com = ContextFree.remove_useless_symbols(g)
self.assertTrue(com.have_term('c'))
self.assertFalse(com.have_term('a'))
self.assertFalse(com.have_term('b'))
self.assertTrue(com.have_nonterm([S, C]))
self.assertFalse(com.have_nonterm(A))
self.assertFalse(com.have_nonterm(B))
self.assertFalse(com.have_nonterm(D))
self.assertEqual(com.rules_count(), 2)
def test_cycleTestShouldNotChange(self):
g = Grammar(terminals=['a', 'b', 'c'],
nonterminals=[S, A, B, C, D],
rules=[Rules],
start_symbol=S)
self.assertEqual(g.rules_count(), 7)
ContextFree.remove_useless_symbols(g)
self.assertTrue(g.have_term(['a', 'b', 'c']))
self.assertTrue(g.have_nonterm([S, A, B, C, D]))
self.assertEqual(g.rules_count(), 7)
def test_cycleTestShouldChange(self):
g = Grammar(terminals=['a', 'b', 'c'],
nonterminals=[S, A, B, C, D],
rules=[Rules],
start_symbol=S)
self.assertEqual(g.rules_count(), 7)
ContextFree.remove_useless_symbols(g, transform_grammar=True)
self.assertTrue(g.have_term('c'))
self.assertFalse(g.have_term('a'))
self.assertFalse(g.have_term('b'))
self.assertTrue(g.have_nonterm([S, C]))
self.assertFalse(g.have_nonterm(A))
self.assertFalse(g.have_nonterm(B))
self.assertFalse(g.have_nonterm(D))
self.assertEqual(g.rules_count(), 2)
|
Add cycle test of remove useless symbols#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 19.08.2017 16:13
:Licence GNUv3
Part of grammpy-transforms
"""
from unittest import main, TestCase
from grammpy import *
from grammpy_transforms import *
class S(Nonterminal): pass
class A(Nonterminal): pass
class B(Nonterminal): pass
class C(Nonterminal): pass
class D(Nonterminal): pass
class Rules(Rule):
rules = [([S], [A, B]),
([S], [C]),
([A], ['a', A]),
([A], ['a']),
([B], ['b', B]),
([C], ['c']),
([D], ['b', 'c'])]
class CycleTest(TestCase):
def test_cycleTest(self):
g = Grammar(terminals=['a', 'b', 'c'],
nonterminals=[S, A, B, C, D],
rules=[Rules],
start_symbol=S)
self.assertEqual(g.rules_count(), 7)
com = ContextFree.remove_useless_symbols(g)
self.assertTrue(com.have_term('c'))
self.assertFalse(com.have_term('a'))
self.assertFalse(com.have_term('b'))
self.assertTrue(com.have_nonterm([S, C]))
self.assertFalse(com.have_nonterm(A))
self.assertFalse(com.have_nonterm(B))
self.assertFalse(com.have_nonterm(D))
self.assertEqual(com.rules_count(), 2)
def test_cycleTestShouldNotChange(self):
g = Grammar(terminals=['a', 'b', 'c'],
nonterminals=[S, A, B, C, D],
rules=[Rules],
start_symbol=S)
self.assertEqual(g.rules_count(), 7)
ContextFree.remove_useless_symbols(g)
self.assertTrue(g.have_term(['a', 'b', 'c']))
self.assertTrue(g.have_nonterm([S, A, B, C, D]))
self.assertEqual(g.rules_count(), 7)
def test_cycleTestShouldChange(self):
g = Grammar(terminals=['a', 'b', 'c'],
nonterminals=[S, A, B, C, D],
rules=[Rules],
start_symbol=S)
self.assertEqual(g.rules_count(), 7)
ContextFree.remove_useless_symbols(g, transform_grammar=True)
self.assertTrue(g.have_term('c'))
self.assertFalse(g.have_term('a'))
self.assertFalse(g.have_term('b'))
self.assertTrue(g.have_nonterm([S, C]))
self.assertFalse(g.have_nonterm(A))
self.assertFalse(g.have_nonterm(B))
self.assertFalse(g.have_nonterm(D))
self.assertEqual(g.rules_count(), 2)
|
<commit_before><commit_msg>Add cycle test of remove useless symbols<commit_after>#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 19.08.2017 16:13
:Licence GNUv3
Part of grammpy-transforms
"""
from unittest import main, TestCase
from grammpy import *
from grammpy_transforms import *
class S(Nonterminal): pass
class A(Nonterminal): pass
class B(Nonterminal): pass
class C(Nonterminal): pass
class D(Nonterminal): pass
class Rules(Rule):
rules = [([S], [A, B]),
([S], [C]),
([A], ['a', A]),
([A], ['a']),
([B], ['b', B]),
([C], ['c']),
([D], ['b', 'c'])]
class CycleTest(TestCase):
def test_cycleTest(self):
g = Grammar(terminals=['a', 'b', 'c'],
nonterminals=[S, A, B, C, D],
rules=[Rules],
start_symbol=S)
self.assertEqual(g.rules_count(), 7)
com = ContextFree.remove_useless_symbols(g)
self.assertTrue(com.have_term('c'))
self.assertFalse(com.have_term('a'))
self.assertFalse(com.have_term('b'))
self.assertTrue(com.have_nonterm([S, C]))
self.assertFalse(com.have_nonterm(A))
self.assertFalse(com.have_nonterm(B))
self.assertFalse(com.have_nonterm(D))
self.assertEqual(com.rules_count(), 2)
def test_cycleTestShouldNotChange(self):
g = Grammar(terminals=['a', 'b', 'c'],
nonterminals=[S, A, B, C, D],
rules=[Rules],
start_symbol=S)
self.assertEqual(g.rules_count(), 7)
ContextFree.remove_useless_symbols(g)
self.assertTrue(g.have_term(['a', 'b', 'c']))
self.assertTrue(g.have_nonterm([S, A, B, C, D]))
self.assertEqual(g.rules_count(), 7)
def test_cycleTestShouldChange(self):
g = Grammar(terminals=['a', 'b', 'c'],
nonterminals=[S, A, B, C, D],
rules=[Rules],
start_symbol=S)
self.assertEqual(g.rules_count(), 7)
ContextFree.remove_useless_symbols(g, transform_grammar=True)
self.assertTrue(g.have_term('c'))
self.assertFalse(g.have_term('a'))
self.assertFalse(g.have_term('b'))
self.assertTrue(g.have_nonterm([S, C]))
self.assertFalse(g.have_nonterm(A))
self.assertFalse(g.have_nonterm(B))
self.assertFalse(g.have_nonterm(D))
self.assertEqual(g.rules_count(), 2)
|
|
2ae16c880d6b38c00b5eb48d99facacc5a3ccf7e
|
python/second-largest.py
|
python/second-largest.py
|
# You are given as input an unsorted array of n distinct numbers,
# where n is a power of 2.
# Give an algorithm that identifies the second-largest number in the array,
# and that uses at most n + log2n - 2 comparisons.
A = [4, 512, 8, 64, 16, 2, 32, 256] # Uses at most 9 comparisons
# def second_largest (Array):
# divided = merge_split(Array)
# print divided
# def merge_split (Array) :
# left = Array[:len(Array)/2]
# right = Array[len(Array)/2:]
# if(len(left) > 1):
# left = merge_split(left)
# if(len(right) > 1):
# right = merge_split(right)
# return [left, right]
# def merge (left, right, result) :
# if left[0] < right[0]:
# result.append(left[0])
# else:
# result.append(right[0])
# return result
def mergesort(aList):
return _mergesort( aList, 0, len( aList ) - 1 )
def _mergesort( aList, first, last ):
# break problem into smaller structurally identical pieces
mid = ( first + last ) / 2
if first < last:
_mergesort( aList, first, mid )
_mergesort( aList, mid + 1, last )
# merge solved pieces to get solution to original problem
a, f, l = 0, first, mid + 1
tmp = [None] * ( last - first + 1 )
while f <= mid and l <= last:
if aList[f] < aList[l] :
tmp[a] = aList[f]
f += 1
else:
tmp[a] = aList[l]
l += 1
a += 1
if f <= mid :
tmp[a:] = aList[f:mid + 1]
if l <= last:
tmp[a:] = aList[l:last + 1]
a = 0
while first <= last:
aList[first] = tmp[a]
first += 1
a += 1
return aList
# second_largest(A)
print mergesort(A)
|
Add solution to hamming numbers kata
|
Add solution to hamming numbers kata
|
Python
|
mit
|
HiccupinGminor/tidbits,HiccupinGminor/tidbits
|
Add solution to hamming numbers kata
|
# You are given as input an unsorted array of n distinct numbers,
# where n is a power of 2.
# Give an algorithm that identifies the second-largest number in the array,
# and that uses at most n + log2n - 2 comparisons.
A = [4, 512, 8, 64, 16, 2, 32, 256] # Uses at most 9 comparisons
# def second_largest (Array):
# divided = merge_split(Array)
# print divided
# def merge_split (Array) :
# left = Array[:len(Array)/2]
# right = Array[len(Array)/2:]
# if(len(left) > 1):
# left = merge_split(left)
# if(len(right) > 1):
# right = merge_split(right)
# return [left, right]
# def merge (left, right, result) :
# if left[0] < right[0]:
# result.append(left[0])
# else:
# result.append(right[0])
# return result
def mergesort(aList):
return _mergesort( aList, 0, len( aList ) - 1 )
def _mergesort( aList, first, last ):
# break problem into smaller structurally identical pieces
mid = ( first + last ) / 2
if first < last:
_mergesort( aList, first, mid )
_mergesort( aList, mid + 1, last )
# merge solved pieces to get solution to original problem
a, f, l = 0, first, mid + 1
tmp = [None] * ( last - first + 1 )
while f <= mid and l <= last:
if aList[f] < aList[l] :
tmp[a] = aList[f]
f += 1
else:
tmp[a] = aList[l]
l += 1
a += 1
if f <= mid :
tmp[a:] = aList[f:mid + 1]
if l <= last:
tmp[a:] = aList[l:last + 1]
a = 0
while first <= last:
aList[first] = tmp[a]
first += 1
a += 1
return aList
# second_largest(A)
print mergesort(A)
|
<commit_before><commit_msg>Add solution to hamming numbers kata<commit_after>
|
# You are given as input an unsorted array of n distinct numbers,
# where n is a power of 2.
# Give an algorithm that identifies the second-largest number in the array,
# and that uses at most n + log2n - 2 comparisons.
A = [4, 512, 8, 64, 16, 2, 32, 256] # Uses at most 9 comparisons
# def second_largest (Array):
# divided = merge_split(Array)
# print divided
# def merge_split (Array) :
# left = Array[:len(Array)/2]
# right = Array[len(Array)/2:]
# if(len(left) > 1):
# left = merge_split(left)
# if(len(right) > 1):
# right = merge_split(right)
# return [left, right]
# def merge (left, right, result) :
# if left[0] < right[0]:
# result.append(left[0])
# else:
# result.append(right[0])
# return result
def mergesort(aList):
return _mergesort( aList, 0, len( aList ) - 1 )
def _mergesort( aList, first, last ):
# break problem into smaller structurally identical pieces
mid = ( first + last ) / 2
if first < last:
_mergesort( aList, first, mid )
_mergesort( aList, mid + 1, last )
# merge solved pieces to get solution to original problem
a, f, l = 0, first, mid + 1
tmp = [None] * ( last - first + 1 )
while f <= mid and l <= last:
if aList[f] < aList[l] :
tmp[a] = aList[f]
f += 1
else:
tmp[a] = aList[l]
l += 1
a += 1
if f <= mid :
tmp[a:] = aList[f:mid + 1]
if l <= last:
tmp[a:] = aList[l:last + 1]
a = 0
while first <= last:
aList[first] = tmp[a]
first += 1
a += 1
return aList
# second_largest(A)
print mergesort(A)
|
Add solution to hamming numbers kata# You are given as input an unsorted array of n distinct numbers,
# where n is a power of 2.
# Give an algorithm that identifies the second-largest number in the array,
# and that uses at most n + log2n - 2 comparisons.
A = [4, 512, 8, 64, 16, 2, 32, 256] # Uses at most 9 comparisons
# def second_largest (Array):
# divided = merge_split(Array)
# print divided
# def merge_split (Array) :
# left = Array[:len(Array)/2]
# right = Array[len(Array)/2:]
# if(len(left) > 1):
# left = merge_split(left)
# if(len(right) > 1):
# right = merge_split(right)
# return [left, right]
# def merge (left, right, result) :
# if left[0] < right[0]:
# result.append(left[0])
# else:
# result.append(right[0])
# return result
def mergesort(aList):
return _mergesort( aList, 0, len( aList ) - 1 )
def _mergesort( aList, first, last ):
# break problem into smaller structurally identical pieces
mid = ( first + last ) / 2
if first < last:
_mergesort( aList, first, mid )
_mergesort( aList, mid + 1, last )
# merge solved pieces to get solution to original problem
a, f, l = 0, first, mid + 1
tmp = [None] * ( last - first + 1 )
while f <= mid and l <= last:
if aList[f] < aList[l] :
tmp[a] = aList[f]
f += 1
else:
tmp[a] = aList[l]
l += 1
a += 1
if f <= mid :
tmp[a:] = aList[f:mid + 1]
if l <= last:
tmp[a:] = aList[l:last + 1]
a = 0
while first <= last:
aList[first] = tmp[a]
first += 1
a += 1
return aList
# second_largest(A)
print mergesort(A)
|
<commit_before><commit_msg>Add solution to hamming numbers kata<commit_after># You are given as input an unsorted array of n distinct numbers,
# where n is a power of 2.
# Give an algorithm that identifies the second-largest number in the array,
# and that uses at most n + log2n - 2 comparisons.
A = [4, 512, 8, 64, 16, 2, 32, 256] # Uses at most 9 comparisons
# def second_largest (Array):
# divided = merge_split(Array)
# print divided
# def merge_split (Array) :
# left = Array[:len(Array)/2]
# right = Array[len(Array)/2:]
# if(len(left) > 1):
# left = merge_split(left)
# if(len(right) > 1):
# right = merge_split(right)
# return [left, right]
# def merge (left, right, result) :
# if left[0] < right[0]:
# result.append(left[0])
# else:
# result.append(right[0])
# return result
def mergesort(aList):
return _mergesort( aList, 0, len( aList ) - 1 )
def _mergesort( aList, first, last ):
# break problem into smaller structurally identical pieces
mid = ( first + last ) / 2
if first < last:
_mergesort( aList, first, mid )
_mergesort( aList, mid + 1, last )
# merge solved pieces to get solution to original problem
a, f, l = 0, first, mid + 1
tmp = [None] * ( last - first + 1 )
while f <= mid and l <= last:
if aList[f] < aList[l] :
tmp[a] = aList[f]
f += 1
else:
tmp[a] = aList[l]
l += 1
a += 1
if f <= mid :
tmp[a:] = aList[f:mid + 1]
if l <= last:
tmp[a:] = aList[l:last + 1]
a = 0
while first <= last:
aList[first] = tmp[a]
first += 1
a += 1
return aList
# second_largest(A)
print mergesort(A)
|
|
8137d5ae66dc6f6d08a674466a5d7c84efb12b6c
|
pyrsistent/typing.py
|
pyrsistent/typing.py
|
import six
class SubscriptableType(type):
def __getitem__(self, key):
return self
@six.add_metaclass(SubscriptableType)
class CheckedPMap(object):
pass
@six.add_metaclass(SubscriptableType)
class CheckedPSet(object):
pass
@six.add_metaclass(SubscriptableType)
class CheckedPVector(object):
pass
@six.add_metaclass(SubscriptableType)
class PBag(object):
pass
@six.add_metaclass(SubscriptableType)
class PDeque(object):
pass
@six.add_metaclass(SubscriptableType)
class PList(object):
pass
@six.add_metaclass(SubscriptableType)
class PMap(object):
pass
@six.add_metaclass(SubscriptableType)
class PSet(object):
pass
@six.add_metaclass(SubscriptableType)
class PVector(object):
pass
|
Add shell class for type checkers
|
Add shell class for type checkers
|
Python
|
mit
|
tobgu/pyrsistent,tobgu/pyrsistent,tobgu/pyrsistent
|
Add shell class for type checkers
|
import six
class SubscriptableType(type):
def __getitem__(self, key):
return self
@six.add_metaclass(SubscriptableType)
class CheckedPMap(object):
pass
@six.add_metaclass(SubscriptableType)
class CheckedPSet(object):
pass
@six.add_metaclass(SubscriptableType)
class CheckedPVector(object):
pass
@six.add_metaclass(SubscriptableType)
class PBag(object):
pass
@six.add_metaclass(SubscriptableType)
class PDeque(object):
pass
@six.add_metaclass(SubscriptableType)
class PList(object):
pass
@six.add_metaclass(SubscriptableType)
class PMap(object):
pass
@six.add_metaclass(SubscriptableType)
class PSet(object):
pass
@six.add_metaclass(SubscriptableType)
class PVector(object):
pass
|
<commit_before><commit_msg>Add shell class for type checkers<commit_after>
|
import six
class SubscriptableType(type):
def __getitem__(self, key):
return self
@six.add_metaclass(SubscriptableType)
class CheckedPMap(object):
pass
@six.add_metaclass(SubscriptableType)
class CheckedPSet(object):
pass
@six.add_metaclass(SubscriptableType)
class CheckedPVector(object):
pass
@six.add_metaclass(SubscriptableType)
class PBag(object):
pass
@six.add_metaclass(SubscriptableType)
class PDeque(object):
pass
@six.add_metaclass(SubscriptableType)
class PList(object):
pass
@six.add_metaclass(SubscriptableType)
class PMap(object):
pass
@six.add_metaclass(SubscriptableType)
class PSet(object):
pass
@six.add_metaclass(SubscriptableType)
class PVector(object):
pass
|
Add shell class for type checkersimport six
class SubscriptableType(type):
def __getitem__(self, key):
return self
@six.add_metaclass(SubscriptableType)
class CheckedPMap(object):
pass
@six.add_metaclass(SubscriptableType)
class CheckedPSet(object):
pass
@six.add_metaclass(SubscriptableType)
class CheckedPVector(object):
pass
@six.add_metaclass(SubscriptableType)
class PBag(object):
pass
@six.add_metaclass(SubscriptableType)
class PDeque(object):
pass
@six.add_metaclass(SubscriptableType)
class PList(object):
pass
@six.add_metaclass(SubscriptableType)
class PMap(object):
pass
@six.add_metaclass(SubscriptableType)
class PSet(object):
pass
@six.add_metaclass(SubscriptableType)
class PVector(object):
pass
|
<commit_before><commit_msg>Add shell class for type checkers<commit_after>import six
class SubscriptableType(type):
def __getitem__(self, key):
return self
@six.add_metaclass(SubscriptableType)
class CheckedPMap(object):
pass
@six.add_metaclass(SubscriptableType)
class CheckedPSet(object):
pass
@six.add_metaclass(SubscriptableType)
class CheckedPVector(object):
pass
@six.add_metaclass(SubscriptableType)
class PBag(object):
pass
@six.add_metaclass(SubscriptableType)
class PDeque(object):
pass
@six.add_metaclass(SubscriptableType)
class PList(object):
pass
@six.add_metaclass(SubscriptableType)
class PMap(object):
pass
@six.add_metaclass(SubscriptableType)
class PSet(object):
pass
@six.add_metaclass(SubscriptableType)
class PVector(object):
pass
|
|
f6e384886d679d238b42e0fccd2185a07670353f
|
UM/VersionUpgrade.py
|
UM/VersionUpgrade.py
|
# Copyright (c) 2015 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
from UM.Logger import Logger
from UM.PluginObject import PluginObject
## A type of plug-in that upgrades the preferences from an old file format to
# a newer one.
#
# Each version upgrade plug-in can convert machine instances, preferences and
# profiles from one version to one other version. Which versions that are is
# specified in the metadata of the plug-in.
def VersionUpgrade(PluginObject):
## Initialises a version upgrade plugin instance.
def __init__(self):
super().__init__(self)
## Upgrades a machine instance file from one file format to another.
#
# This parses the serialised data of a machine instance and converts it to
# a serialised form of the new file format.
#
# \param serialised A machine instance, serialised in an old file format.
# \return A machine instance, serialised in a newer file format.
def upgradeMachineInstance(self, serialised):
Logger.log("w", "This version upgrade plug-in defines no way to upgrade machine instances.") #A subclass should implement this.
## Upgrades a preferences file from one file format to another.
#
# This parses the serialised data of a preferences file and converts it to
# a serialised form of the new file format.
#
# \param serialised A preferences file, serialised in an old file format.
# \return A preferences file, serialised in a newer file format.
def upgradePreferences(self, serialised):
Logger.log("w", "This version upgrade plug-in defines no way to upgrade preferences.") #A subclass should implement this.
## Upgrades a profile from one file format to another.
#
# This parses the serialised data of a profile and converts it to a
# serialised form of the new file format.
#
# \param serialised A profile, serialised in an old file format.
# \return A profile, serialised in a newer file format.
def upgradeProfile(self, serialised):
Logger.log("w", "This version upgrade plug-in defines no way to upgrade profiles.") #A subclass should implement this.
|
Add version upgrade plug-in object
|
Add version upgrade plug-in object
These are for plug-ins that upgrade preference files to newer versions of the application.
Contributes to issue CURA-844.
|
Python
|
agpl-3.0
|
onitake/Uranium,onitake/Uranium
|
Add version upgrade plug-in object
These are for plug-ins that upgrade preference files to newer versions of the application.
Contributes to issue CURA-844.
|
# Copyright (c) 2015 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
from UM.Logger import Logger
from UM.PluginObject import PluginObject
## A type of plug-in that upgrades the preferences from an old file format to
# a newer one.
#
# Each version upgrade plug-in can convert machine instances, preferences and
# profiles from one version to one other version. Which versions that are is
# specified in the metadata of the plug-in.
def VersionUpgrade(PluginObject):
## Initialises a version upgrade plugin instance.
def __init__(self):
super().__init__(self)
## Upgrades a machine instance file from one file format to another.
#
# This parses the serialised data of a machine instance and converts it to
# a serialised form of the new file format.
#
# \param serialised A machine instance, serialised in an old file format.
# \return A machine instance, serialised in a newer file format.
def upgradeMachineInstance(self, serialised):
Logger.log("w", "This version upgrade plug-in defines no way to upgrade machine instances.") #A subclass should implement this.
## Upgrades a preferences file from one file format to another.
#
# This parses the serialised data of a preferences file and converts it to
# a serialised form of the new file format.
#
# \param serialised A preferences file, serialised in an old file format.
# \return A preferences file, serialised in a newer file format.
def upgradePreferences(self, serialised):
Logger.log("w", "This version upgrade plug-in defines no way to upgrade preferences.") #A subclass should implement this.
## Upgrades a profile from one file format to another.
#
# This parses the serialised data of a profile and converts it to a
# serialised form of the new file format.
#
# \param serialised A profile, serialised in an old file format.
# \return A profile, serialised in a newer file format.
def upgradeProfile(self, serialised):
Logger.log("w", "This version upgrade plug-in defines no way to upgrade profiles.") #A subclass should implement this.
|
<commit_before><commit_msg>Add version upgrade plug-in object
These are for plug-ins that upgrade preference files to newer versions of the application.
Contributes to issue CURA-844.<commit_after>
|
# Copyright (c) 2015 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
from UM.Logger import Logger
from UM.PluginObject import PluginObject
## A type of plug-in that upgrades the preferences from an old file format to
# a newer one.
#
# Each version upgrade plug-in can convert machine instances, preferences and
# profiles from one version to one other version. Which versions that are is
# specified in the metadata of the plug-in.
def VersionUpgrade(PluginObject):
## Initialises a version upgrade plugin instance.
def __init__(self):
super().__init__(self)
## Upgrades a machine instance file from one file format to another.
#
# This parses the serialised data of a machine instance and converts it to
# a serialised form of the new file format.
#
# \param serialised A machine instance, serialised in an old file format.
# \return A machine instance, serialised in a newer file format.
def upgradeMachineInstance(self, serialised):
Logger.log("w", "This version upgrade plug-in defines no way to upgrade machine instances.") #A subclass should implement this.
## Upgrades a preferences file from one file format to another.
#
# This parses the serialised data of a preferences file and converts it to
# a serialised form of the new file format.
#
# \param serialised A preferences file, serialised in an old file format.
# \return A preferences file, serialised in a newer file format.
def upgradePreferences(self, serialised):
Logger.log("w", "This version upgrade plug-in defines no way to upgrade preferences.") #A subclass should implement this.
## Upgrades a profile from one file format to another.
#
# This parses the serialised data of a profile and converts it to a
# serialised form of the new file format.
#
# \param serialised A profile, serialised in an old file format.
# \return A profile, serialised in a newer file format.
def upgradeProfile(self, serialised):
Logger.log("w", "This version upgrade plug-in defines no way to upgrade profiles.") #A subclass should implement this.
|
Add version upgrade plug-in object
These are for plug-ins that upgrade preference files to newer versions of the application.
Contributes to issue CURA-844.# Copyright (c) 2015 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
from UM.Logger import Logger
from UM.PluginObject import PluginObject
## A type of plug-in that upgrades the preferences from an old file format to
# a newer one.
#
# Each version upgrade plug-in can convert machine instances, preferences and
# profiles from one version to one other version. Which versions that are is
# specified in the metadata of the plug-in.
def VersionUpgrade(PluginObject):
## Initialises a version upgrade plugin instance.
def __init__(self):
super().__init__(self)
## Upgrades a machine instance file from one file format to another.
#
# This parses the serialised data of a machine instance and converts it to
# a serialised form of the new file format.
#
# \param serialised A machine instance, serialised in an old file format.
# \return A machine instance, serialised in a newer file format.
def upgradeMachineInstance(self, serialised):
Logger.log("w", "This version upgrade plug-in defines no way to upgrade machine instances.") #A subclass should implement this.
## Upgrades a preferences file from one file format to another.
#
# This parses the serialised data of a preferences file and converts it to
# a serialised form of the new file format.
#
# \param serialised A preferences file, serialised in an old file format.
# \return A preferences file, serialised in a newer file format.
def upgradePreferences(self, serialised):
Logger.log("w", "This version upgrade plug-in defines no way to upgrade preferences.") #A subclass should implement this.
## Upgrades a profile from one file format to another.
#
# This parses the serialised data of a profile and converts it to a
# serialised form of the new file format.
#
# \param serialised A profile, serialised in an old file format.
# \return A profile, serialised in a newer file format.
def upgradeProfile(self, serialised):
Logger.log("w", "This version upgrade plug-in defines no way to upgrade profiles.") #A subclass should implement this.
|
<commit_before><commit_msg>Add version upgrade plug-in object
These are for plug-ins that upgrade preference files to newer versions of the application.
Contributes to issue CURA-844.<commit_after># Copyright (c) 2015 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
from UM.Logger import Logger
from UM.PluginObject import PluginObject
## A type of plug-in that upgrades the preferences from an old file format to
# a newer one.
#
# Each version upgrade plug-in can convert machine instances, preferences and
# profiles from one version to one other version. Which versions that are is
# specified in the metadata of the plug-in.
def VersionUpgrade(PluginObject):
## Initialises a version upgrade plugin instance.
def __init__(self):
super().__init__(self)
## Upgrades a machine instance file from one file format to another.
#
# This parses the serialised data of a machine instance and converts it to
# a serialised form of the new file format.
#
# \param serialised A machine instance, serialised in an old file format.
# \return A machine instance, serialised in a newer file format.
def upgradeMachineInstance(self, serialised):
Logger.log("w", "This version upgrade plug-in defines no way to upgrade machine instances.") #A subclass should implement this.
## Upgrades a preferences file from one file format to another.
#
# This parses the serialised data of a preferences file and converts it to
# a serialised form of the new file format.
#
# \param serialised A preferences file, serialised in an old file format.
# \return A preferences file, serialised in a newer file format.
def upgradePreferences(self, serialised):
Logger.log("w", "This version upgrade plug-in defines no way to upgrade preferences.") #A subclass should implement this.
## Upgrades a profile from one file format to another.
#
# This parses the serialised data of a profile and converts it to a
# serialised form of the new file format.
#
# \param serialised A profile, serialised in an old file format.
# \return A profile, serialised in a newer file format.
def upgradeProfile(self, serialised):
Logger.log("w", "This version upgrade plug-in defines no way to upgrade profiles.") #A subclass should implement this.
|
|
d729656703940304555a2d638d02c71f5a872434
|
tests/unit/output/test_table_out.py
|
tests/unit/output/test_table_out.py
|
# -*- coding: utf-8 -*-
'''
unittests for table outputter
'''
# Import Python Libs
from __future__ import absolute_import, print_function, unicode_literals
# Import Salt Testing Libs
from tests.support.mixins import LoaderModuleMockMixin
from tests.support.unit import TestCase
# Import Salt Libs
import salt.output.table_out as table_out
import salt.utils.stringutils
class TableTestCase(TestCase, LoaderModuleMockMixin):
'''
Test cases for salt.output.table_out
'''
def setup_loader_modules(self):
return {table_out: {}}
# The test data should include unicode chars, and in Python 2 there should
# be an example both of an encoded str type and an actual unicode type.
# Since unicode_literals is imported, we will achieve the former using
# salt.utils.stringutils.to_str and the latter by simply using a string
# literal.
data = [
{'Food': salt.utils.stringutils.to_str('яйца, бекон, колбаса и спам'),
'Price': 5.99},
{'Food': 'спам, спам, спам, яйца и спам',
'Price': 3.99},
]
def test_output(self):
ret = table_out.output(self.data)
self.assertEqual(
ret,
(' -----------------------------------------\n'
' | Food | Price |\n'
' -----------------------------------------\n'
' | яйца, бекон, колбаса и спам | 5.99 |\n'
' -----------------------------------------\n'
' | спам, спам, спам, яйца и спам | 3.99 |\n'
' -----------------------------------------')
)
|
Add unit tests for table outputter
|
Add unit tests for table outputter
|
Python
|
apache-2.0
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
Add unit tests for table outputter
|
# -*- coding: utf-8 -*-
'''
unittests for table outputter
'''
# Import Python Libs
from __future__ import absolute_import, print_function, unicode_literals
# Import Salt Testing Libs
from tests.support.mixins import LoaderModuleMockMixin
from tests.support.unit import TestCase
# Import Salt Libs
import salt.output.table_out as table_out
import salt.utils.stringutils
class TableTestCase(TestCase, LoaderModuleMockMixin):
'''
Test cases for salt.output.table_out
'''
def setup_loader_modules(self):
return {table_out: {}}
# The test data should include unicode chars, and in Python 2 there should
# be an example both of an encoded str type and an actual unicode type.
# Since unicode_literals is imported, we will achieve the former using
# salt.utils.stringutils.to_str and the latter by simply using a string
# literal.
data = [
{'Food': salt.utils.stringutils.to_str('яйца, бекон, колбаса и спам'),
'Price': 5.99},
{'Food': 'спам, спам, спам, яйца и спам',
'Price': 3.99},
]
def test_output(self):
ret = table_out.output(self.data)
self.assertEqual(
ret,
(' -----------------------------------------\n'
' | Food | Price |\n'
' -----------------------------------------\n'
' | яйца, бекон, колбаса и спам | 5.99 |\n'
' -----------------------------------------\n'
' | спам, спам, спам, яйца и спам | 3.99 |\n'
' -----------------------------------------')
)
|
<commit_before><commit_msg>Add unit tests for table outputter<commit_after>
|
# -*- coding: utf-8 -*-
'''
unittests for table outputter
'''
# Import Python Libs
from __future__ import absolute_import, print_function, unicode_literals
# Import Salt Testing Libs
from tests.support.mixins import LoaderModuleMockMixin
from tests.support.unit import TestCase
# Import Salt Libs
import salt.output.table_out as table_out
import salt.utils.stringutils
class TableTestCase(TestCase, LoaderModuleMockMixin):
'''
Test cases for salt.output.table_out
'''
def setup_loader_modules(self):
return {table_out: {}}
# The test data should include unicode chars, and in Python 2 there should
# be an example both of an encoded str type and an actual unicode type.
# Since unicode_literals is imported, we will achieve the former using
# salt.utils.stringutils.to_str and the latter by simply using a string
# literal.
data = [
{'Food': salt.utils.stringutils.to_str('яйца, бекон, колбаса и спам'),
'Price': 5.99},
{'Food': 'спам, спам, спам, яйца и спам',
'Price': 3.99},
]
def test_output(self):
ret = table_out.output(self.data)
self.assertEqual(
ret,
(' -----------------------------------------\n'
' | Food | Price |\n'
' -----------------------------------------\n'
' | яйца, бекон, колбаса и спам | 5.99 |\n'
' -----------------------------------------\n'
' | спам, спам, спам, яйца и спам | 3.99 |\n'
' -----------------------------------------')
)
|
Add unit tests for table outputter# -*- coding: utf-8 -*-
'''
unittests for table outputter
'''
# Import Python Libs
from __future__ import absolute_import, print_function, unicode_literals
# Import Salt Testing Libs
from tests.support.mixins import LoaderModuleMockMixin
from tests.support.unit import TestCase
# Import Salt Libs
import salt.output.table_out as table_out
import salt.utils.stringutils
class TableTestCase(TestCase, LoaderModuleMockMixin):
'''
Test cases for salt.output.table_out
'''
def setup_loader_modules(self):
return {table_out: {}}
# The test data should include unicode chars, and in Python 2 there should
# be an example both of an encoded str type and an actual unicode type.
# Since unicode_literals is imported, we will achieve the former using
# salt.utils.stringutils.to_str and the latter by simply using a string
# literal.
data = [
{'Food': salt.utils.stringutils.to_str('яйца, бекон, колбаса и спам'),
'Price': 5.99},
{'Food': 'спам, спам, спам, яйца и спам',
'Price': 3.99},
]
def test_output(self):
ret = table_out.output(self.data)
self.assertEqual(
ret,
(' -----------------------------------------\n'
' | Food | Price |\n'
' -----------------------------------------\n'
' | яйца, бекон, колбаса и спам | 5.99 |\n'
' -----------------------------------------\n'
' | спам, спам, спам, яйца и спам | 3.99 |\n'
' -----------------------------------------')
)
|
<commit_before><commit_msg>Add unit tests for table outputter<commit_after># -*- coding: utf-8 -*-
'''
unittests for table outputter
'''
# Import Python Libs
from __future__ import absolute_import, print_function, unicode_literals
# Import Salt Testing Libs
from tests.support.mixins import LoaderModuleMockMixin
from tests.support.unit import TestCase
# Import Salt Libs
import salt.output.table_out as table_out
import salt.utils.stringutils
class TableTestCase(TestCase, LoaderModuleMockMixin):
'''
Test cases for salt.output.table_out
'''
def setup_loader_modules(self):
return {table_out: {}}
# The test data should include unicode chars, and in Python 2 there should
# be an example both of an encoded str type and an actual unicode type.
# Since unicode_literals is imported, we will achieve the former using
# salt.utils.stringutils.to_str and the latter by simply using a string
# literal.
data = [
{'Food': salt.utils.stringutils.to_str('яйца, бекон, колбаса и спам'),
'Price': 5.99},
{'Food': 'спам, спам, спам, яйца и спам',
'Price': 3.99},
]
def test_output(self):
ret = table_out.output(self.data)
self.assertEqual(
ret,
(' -----------------------------------------\n'
' | Food | Price |\n'
' -----------------------------------------\n'
' | яйца, бекон, колбаса и спам | 5.99 |\n'
' -----------------------------------------\n'
' | спам, спам, спам, яйца и спам | 3.99 |\n'
' -----------------------------------------')
)
|
|
c64b2e468f19b96c49d2a79e8f9165beb7ab55a2
|
apps/competition/migrations/0015_auto_20190308_0215.py
|
apps/competition/migrations/0015_auto_20190308_0215.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2019-03-08 02:15
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('competition', '0014_auto_20180910_1931'),
]
operations = [
migrations.AlterField(
model_name='competition',
name='require_alias',
field=models.BooleanField(default=False, help_text=b'If checked, players will need to register an alias for the Activity that the competition belongs to.', verbose_name=b'require alias'),
),
]
|
Add migration that has been pending for a long time
|
Add migration that has been pending for a long time
|
Python
|
mit
|
CasualGaming/studlan,dotKom/studlan,CasualGaming/studlan,CasualGaming/studlan,dotKom/studlan,CasualGaming/studlan,dotKom/studlan,dotKom/studlan
|
Add migration that has been pending for a long time
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2019-03-08 02:15
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('competition', '0014_auto_20180910_1931'),
]
operations = [
migrations.AlterField(
model_name='competition',
name='require_alias',
field=models.BooleanField(default=False, help_text=b'If checked, players will need to register an alias for the Activity that the competition belongs to.', verbose_name=b'require alias'),
),
]
|
<commit_before><commit_msg>Add migration that has been pending for a long time<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2019-03-08 02:15
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('competition', '0014_auto_20180910_1931'),
]
operations = [
migrations.AlterField(
model_name='competition',
name='require_alias',
field=models.BooleanField(default=False, help_text=b'If checked, players will need to register an alias for the Activity that the competition belongs to.', verbose_name=b'require alias'),
),
]
|
Add migration that has been pending for a long time# -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2019-03-08 02:15
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('competition', '0014_auto_20180910_1931'),
]
operations = [
migrations.AlterField(
model_name='competition',
name='require_alias',
field=models.BooleanField(default=False, help_text=b'If checked, players will need to register an alias for the Activity that the competition belongs to.', verbose_name=b'require alias'),
),
]
|
<commit_before><commit_msg>Add migration that has been pending for a long time<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2019-03-08 02:15
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('competition', '0014_auto_20180910_1931'),
]
operations = [
migrations.AlterField(
model_name='competition',
name='require_alias',
field=models.BooleanField(default=False, help_text=b'If checked, players will need to register an alias for the Activity that the competition belongs to.', verbose_name=b'require alias'),
),
]
|
|
3c8f25edfe6fb87c971fd1d60ee7cf0e2d18f36b
|
sahgutils/mpl_helpers.py
|
sahgutils/mpl_helpers.py
|
"""
A collection of helper functions to aid the construction of custom
figures using Matplotlib and Basemap.
"""
from matplotlib.colors import ListedColormap
from _color_brewer import cdict
def _search_key(cmap_name):
cat_range = range(3, 13).reverse()
for cat in cat_range:
pal_name = '_%s_cat%s_data' % (cmap_name, cat)
if pal_name in cdict:
break
return pal_name
def categorical_cmap(cmap_name, num_cat=None, reverse=False):
"""
Construct a Matplotlib ListedColormap using ColorBrewer palettes.
This function returns a Matplotlib ListedColormap using the color
specifications from the ColorBrewer (http://colorbrewer.org)
palettes.
Parameters
----------
cmap_name : string
The name of the ColorBrewer palette to use for constructing
the ListedColormap e.g. 'Purples', 'RdBu' etc.
num_cat : int, optional
The number of distinct colors (categories) to use in creating
the colourmap. If `num_cat` is unspecified or larger than the
maximum size of the colourmap, then a colormap with the
maximum number of categories is returned. The number of
catergories in the returned `cmap` can be determined using
`cmap.N`.
reverse : bool, optional
Whether to reverse the entries in the colormap. Default is
False.
Returns
-------
cmap : matplotlib.colors.ListedColormap
A ListedColormap constructed using the given parameters is
returned.
"""
if num_cat is not None:
pal_name = '_%s_cat%s_data' % (cmap_name, int(num_cat))
if pal_name not in cdict:
pal_name = _search_key(cmap_name)
else:
pal_name = _search_key(cmap_name)
clist = cdict[pal_name]
if reverse is True:
clist.reverse()
cmap = ListedColormap(clist)
return cmap
|
Add a Matplotlib helper functions module
|
ENH: Add a Matplotlib helper functions module
* So far this only contains a function to create a ListedColormap from
ColorBrewer palette specs.
|
Python
|
bsd-3-clause
|
sahg/SAHGutils
|
ENH: Add a Matplotlib helper functions module
* So far this only contains a function to create a ListedColormap from
ColorBrewer palette specs.
|
"""
A collection of helper functions to aid the construction of custom
figures using Matplotlib and Basemap.
"""
from matplotlib.colors import ListedColormap
from _color_brewer import cdict
def _search_key(cmap_name):
cat_range = range(3, 13).reverse()
for cat in cat_range:
pal_name = '_%s_cat%s_data' % (cmap_name, cat)
if pal_name in cdict:
break
return pal_name
def categorical_cmap(cmap_name, num_cat=None, reverse=False):
"""
Construct a Matplotlib ListedColormap using ColorBrewer palettes.
This function returns a Matplotlib ListedColormap using the color
specifications from the ColorBrewer (http://colorbrewer.org)
palettes.
Parameters
----------
cmap_name : string
The name of the ColorBrewer palette to use for constructing
the ListedColormap e.g. 'Purples', 'RdBu' etc.
num_cat : int, optional
The number of distinct colors (categories) to use in creating
the colourmap. If `num_cat` is unspecified or larger than the
maximum size of the colourmap, then a colormap with the
maximum number of categories is returned. The number of
catergories in the returned `cmap` can be determined using
`cmap.N`.
reverse : bool, optional
Whether to reverse the entries in the colormap. Default is
False.
Returns
-------
cmap : matplotlib.colors.ListedColormap
A ListedColormap constructed using the given parameters is
returned.
"""
if num_cat is not None:
pal_name = '_%s_cat%s_data' % (cmap_name, int(num_cat))
if pal_name not in cdict:
pal_name = _search_key(cmap_name)
else:
pal_name = _search_key(cmap_name)
clist = cdict[pal_name]
if reverse is True:
clist.reverse()
cmap = ListedColormap(clist)
return cmap
|
<commit_before><commit_msg>ENH: Add a Matplotlib helper functions module
* So far this only contains a function to create a ListedColormap from
ColorBrewer palette specs.<commit_after>
|
"""
A collection of helper functions to aid the construction of custom
figures using Matplotlib and Basemap.
"""
from matplotlib.colors import ListedColormap
from _color_brewer import cdict
def _search_key(cmap_name):
cat_range = range(3, 13).reverse()
for cat in cat_range:
pal_name = '_%s_cat%s_data' % (cmap_name, cat)
if pal_name in cdict:
break
return pal_name
def categorical_cmap(cmap_name, num_cat=None, reverse=False):
"""
Construct a Matplotlib ListedColormap using ColorBrewer palettes.
This function returns a Matplotlib ListedColormap using the color
specifications from the ColorBrewer (http://colorbrewer.org)
palettes.
Parameters
----------
cmap_name : string
The name of the ColorBrewer palette to use for constructing
the ListedColormap e.g. 'Purples', 'RdBu' etc.
num_cat : int, optional
The number of distinct colors (categories) to use in creating
the colourmap. If `num_cat` is unspecified or larger than the
maximum size of the colourmap, then a colormap with the
maximum number of categories is returned. The number of
catergories in the returned `cmap` can be determined using
`cmap.N`.
reverse : bool, optional
Whether to reverse the entries in the colormap. Default is
False.
Returns
-------
cmap : matplotlib.colors.ListedColormap
A ListedColormap constructed using the given parameters is
returned.
"""
if num_cat is not None:
pal_name = '_%s_cat%s_data' % (cmap_name, int(num_cat))
if pal_name not in cdict:
pal_name = _search_key(cmap_name)
else:
pal_name = _search_key(cmap_name)
clist = cdict[pal_name]
if reverse is True:
clist.reverse()
cmap = ListedColormap(clist)
return cmap
|
ENH: Add a Matplotlib helper functions module
* So far this only contains a function to create a ListedColormap from
ColorBrewer palette specs."""
A collection of helper functions to aid the construction of custom
figures using Matplotlib and Basemap.
"""
from matplotlib.colors import ListedColormap
from _color_brewer import cdict
def _search_key(cmap_name):
cat_range = range(3, 13).reverse()
for cat in cat_range:
pal_name = '_%s_cat%s_data' % (cmap_name, cat)
if pal_name in cdict:
break
return pal_name
def categorical_cmap(cmap_name, num_cat=None, reverse=False):
"""
Construct a Matplotlib ListedColormap using ColorBrewer palettes.
This function returns a Matplotlib ListedColormap using the color
specifications from the ColorBrewer (http://colorbrewer.org)
palettes.
Parameters
----------
cmap_name : string
The name of the ColorBrewer palette to use for constructing
the ListedColormap e.g. 'Purples', 'RdBu' etc.
num_cat : int, optional
The number of distinct colors (categories) to use in creating
the colourmap. If `num_cat` is unspecified or larger than the
maximum size of the colourmap, then a colormap with the
maximum number of categories is returned. The number of
catergories in the returned `cmap` can be determined using
`cmap.N`.
reverse : bool, optional
Whether to reverse the entries in the colormap. Default is
False.
Returns
-------
cmap : matplotlib.colors.ListedColormap
A ListedColormap constructed using the given parameters is
returned.
"""
if num_cat is not None:
pal_name = '_%s_cat%s_data' % (cmap_name, int(num_cat))
if pal_name not in cdict:
pal_name = _search_key(cmap_name)
else:
pal_name = _search_key(cmap_name)
clist = cdict[pal_name]
if reverse is True:
clist.reverse()
cmap = ListedColormap(clist)
return cmap
|
<commit_before><commit_msg>ENH: Add a Matplotlib helper functions module
* So far this only contains a function to create a ListedColormap from
ColorBrewer palette specs.<commit_after>"""
A collection of helper functions to aid the construction of custom
figures using Matplotlib and Basemap.
"""
from matplotlib.colors import ListedColormap
from _color_brewer import cdict
def _search_key(cmap_name):
cat_range = range(3, 13).reverse()
for cat in cat_range:
pal_name = '_%s_cat%s_data' % (cmap_name, cat)
if pal_name in cdict:
break
return pal_name
def categorical_cmap(cmap_name, num_cat=None, reverse=False):
"""
Construct a Matplotlib ListedColormap using ColorBrewer palettes.
This function returns a Matplotlib ListedColormap using the color
specifications from the ColorBrewer (http://colorbrewer.org)
palettes.
Parameters
----------
cmap_name : string
The name of the ColorBrewer palette to use for constructing
the ListedColormap e.g. 'Purples', 'RdBu' etc.
num_cat : int, optional
The number of distinct colors (categories) to use in creating
the colourmap. If `num_cat` is unspecified or larger than the
maximum size of the colourmap, then a colormap with the
maximum number of categories is returned. The number of
catergories in the returned `cmap` can be determined using
`cmap.N`.
reverse : bool, optional
Whether to reverse the entries in the colormap. Default is
False.
Returns
-------
cmap : matplotlib.colors.ListedColormap
A ListedColormap constructed using the given parameters is
returned.
"""
if num_cat is not None:
pal_name = '_%s_cat%s_data' % (cmap_name, int(num_cat))
if pal_name not in cdict:
pal_name = _search_key(cmap_name)
else:
pal_name = _search_key(cmap_name)
clist = cdict[pal_name]
if reverse is True:
clist.reverse()
cmap = ListedColormap(clist)
return cmap
|
|
a046376067c7e3055dc327246660b161b505dce6
|
py/reverse-vowels-of-a-string.py
|
py/reverse-vowels-of-a-string.py
|
class Solution(object):
def reverseVowels(self, s):
"""
:type s: str
:rtype: str
"""
vowels = list(filter(lambda x:x in 'aeiouAEIOU', s))
ans = []
for c in s:
if c in 'aeiouAEIOU':
ans.append(vowels.pop())
else:
ans.append(c)
return ''.join(ans)
|
Add py solution for 345. Reverse Vowels of a String
|
Add py solution for 345. Reverse Vowels of a String
345. Reverse Vowels of a String: https://leetcode.com/problems/reverse-vowels-of-a-string/
|
Python
|
apache-2.0
|
ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode
|
Add py solution for 345. Reverse Vowels of a String
345. Reverse Vowels of a String: https://leetcode.com/problems/reverse-vowels-of-a-string/
|
class Solution(object):
def reverseVowels(self, s):
"""
:type s: str
:rtype: str
"""
vowels = list(filter(lambda x:x in 'aeiouAEIOU', s))
ans = []
for c in s:
if c in 'aeiouAEIOU':
ans.append(vowels.pop())
else:
ans.append(c)
return ''.join(ans)
|
<commit_before><commit_msg>Add py solution for 345. Reverse Vowels of a String
345. Reverse Vowels of a String: https://leetcode.com/problems/reverse-vowels-of-a-string/<commit_after>
|
class Solution(object):
def reverseVowels(self, s):
"""
:type s: str
:rtype: str
"""
vowels = list(filter(lambda x:x in 'aeiouAEIOU', s))
ans = []
for c in s:
if c in 'aeiouAEIOU':
ans.append(vowels.pop())
else:
ans.append(c)
return ''.join(ans)
|
Add py solution for 345. Reverse Vowels of a String
345. Reverse Vowels of a String: https://leetcode.com/problems/reverse-vowels-of-a-string/class Solution(object):
def reverseVowels(self, s):
"""
:type s: str
:rtype: str
"""
vowels = list(filter(lambda x:x in 'aeiouAEIOU', s))
ans = []
for c in s:
if c in 'aeiouAEIOU':
ans.append(vowels.pop())
else:
ans.append(c)
return ''.join(ans)
|
<commit_before><commit_msg>Add py solution for 345. Reverse Vowels of a String
345. Reverse Vowels of a String: https://leetcode.com/problems/reverse-vowels-of-a-string/<commit_after>class Solution(object):
def reverseVowels(self, s):
"""
:type s: str
:rtype: str
"""
vowels = list(filter(lambda x:x in 'aeiouAEIOU', s))
ans = []
for c in s:
if c in 'aeiouAEIOU':
ans.append(vowels.pop())
else:
ans.append(c)
return ''.join(ans)
|
|
d423c5cc0756f621660598a4df2e331c6e5d97db
|
new-em-subvolumes.py
|
new-em-subvolumes.py
|
# IPython log file
from gala import imio
import numpy as np
slices = [(slice(None), slice(None, 625), slice(None, 625)),
(slice(None), slice(None, 625), slice(625, None)),
(slice(None), slice(625, None), slice(None, 625)),
(slice(None), slice(625, None), slice(625, None))]
gt = imio.read_h5_stack('ground-truth.h5', group='bodies')
gts = [gt[s] for s in slices]
from skimage.measure import label
for i, vol in enumerate(gts):
fn = 'ground-truth-%i.lzf.h5' % i
vol_relabel = label(vol)
print(np.max(vol_relabel))
imio.write_h5_stack(vol.astype(np.uint16), fn, compression='lzf')
pr = imio.read_image_stack('membrane/*.tiff')
prs = [pr[s] for s in slices]
for i, vol in enumerate(prs):
fn = 'probabilities-%i.lzf.h5' % i
imio.write_h5_stack(vol.astype(np.uint8), fn, compression='lzf')
|
Add script used to subvolume new EM volumes
|
Add script used to subvolume new EM volumes
|
Python
|
bsd-3-clause
|
jni/gala-scripts
|
Add script used to subvolume new EM volumes
|
# IPython log file
from gala import imio
import numpy as np
slices = [(slice(None), slice(None, 625), slice(None, 625)),
(slice(None), slice(None, 625), slice(625, None)),
(slice(None), slice(625, None), slice(None, 625)),
(slice(None), slice(625, None), slice(625, None))]
gt = imio.read_h5_stack('ground-truth.h5', group='bodies')
gts = [gt[s] for s in slices]
from skimage.measure import label
for i, vol in enumerate(gts):
fn = 'ground-truth-%i.lzf.h5' % i
vol_relabel = label(vol)
print(np.max(vol_relabel))
imio.write_h5_stack(vol.astype(np.uint16), fn, compression='lzf')
pr = imio.read_image_stack('membrane/*.tiff')
prs = [pr[s] for s in slices]
for i, vol in enumerate(prs):
fn = 'probabilities-%i.lzf.h5' % i
imio.write_h5_stack(vol.astype(np.uint8), fn, compression='lzf')
|
<commit_before><commit_msg>Add script used to subvolume new EM volumes<commit_after>
|
# IPython log file
from gala import imio
import numpy as np
slices = [(slice(None), slice(None, 625), slice(None, 625)),
(slice(None), slice(None, 625), slice(625, None)),
(slice(None), slice(625, None), slice(None, 625)),
(slice(None), slice(625, None), slice(625, None))]
gt = imio.read_h5_stack('ground-truth.h5', group='bodies')
gts = [gt[s] for s in slices]
from skimage.measure import label
for i, vol in enumerate(gts):
fn = 'ground-truth-%i.lzf.h5' % i
vol_relabel = label(vol)
print(np.max(vol_relabel))
imio.write_h5_stack(vol.astype(np.uint16), fn, compression='lzf')
pr = imio.read_image_stack('membrane/*.tiff')
prs = [pr[s] for s in slices]
for i, vol in enumerate(prs):
fn = 'probabilities-%i.lzf.h5' % i
imio.write_h5_stack(vol.astype(np.uint8), fn, compression='lzf')
|
Add script used to subvolume new EM volumes# IPython log file
from gala import imio
import numpy as np
slices = [(slice(None), slice(None, 625), slice(None, 625)),
(slice(None), slice(None, 625), slice(625, None)),
(slice(None), slice(625, None), slice(None, 625)),
(slice(None), slice(625, None), slice(625, None))]
gt = imio.read_h5_stack('ground-truth.h5', group='bodies')
gts = [gt[s] for s in slices]
from skimage.measure import label
for i, vol in enumerate(gts):
fn = 'ground-truth-%i.lzf.h5' % i
vol_relabel = label(vol)
print(np.max(vol_relabel))
imio.write_h5_stack(vol.astype(np.uint16), fn, compression='lzf')
pr = imio.read_image_stack('membrane/*.tiff')
prs = [pr[s] for s in slices]
for i, vol in enumerate(prs):
fn = 'probabilities-%i.lzf.h5' % i
imio.write_h5_stack(vol.astype(np.uint8), fn, compression='lzf')
|
<commit_before><commit_msg>Add script used to subvolume new EM volumes<commit_after># IPython log file
from gala import imio
import numpy as np
slices = [(slice(None), slice(None, 625), slice(None, 625)),
(slice(None), slice(None, 625), slice(625, None)),
(slice(None), slice(625, None), slice(None, 625)),
(slice(None), slice(625, None), slice(625, None))]
gt = imio.read_h5_stack('ground-truth.h5', group='bodies')
gts = [gt[s] for s in slices]
from skimage.measure import label
for i, vol in enumerate(gts):
fn = 'ground-truth-%i.lzf.h5' % i
vol_relabel = label(vol)
print(np.max(vol_relabel))
imio.write_h5_stack(vol.astype(np.uint16), fn, compression='lzf')
pr = imio.read_image_stack('membrane/*.tiff')
prs = [pr[s] for s in slices]
for i, vol in enumerate(prs):
fn = 'probabilities-%i.lzf.h5' % i
imio.write_h5_stack(vol.astype(np.uint8), fn, compression='lzf')
|
|
bf8e308c9ebab2a2a8a14f9fff747d5a303f6f68
|
sympy/parsing/tests/test_sympy_parser.py
|
sympy/parsing/tests/test_sympy_parser.py
|
from sympy.parsing.sympy_parser import parse_expr
from sympy import *
def test_implicit_multiplication_application():
x = Symbol('x')
inputs = {
'2*x': 2 * x,
'3.00': Float(3),
'22/7': Rational(22, 7),
'2+3j': 2 + 3*I,
'exp(x)': exp(x),
'x!': factorial(x),
'Symbol("x").free_symbols': x.free_symbols,
"S('S(3).n(n=3)')": 3.00,
'factorint(12, visual=True)': Mul(
Pow(2, 2, evaluate=False),
Pow(3, 1, evaluate=False),
evaluate=False),
'Limit(sin(x), x, 0, dir="-")': Limit(sin(x), x, 0, dir='-'),
}
for text, result in inputs.items():
print text, parse_expr(text), result
assert(parse_expr(text) == result)
|
Add some tests for sympy_parser
|
Add some tests for sympy_parser
|
Python
|
bsd-3-clause
|
Mitchkoens/sympy,saurabhjn76/sympy,kmacinnis/sympy,pandeyadarsh/sympy,mafiya69/sympy,MechCoder/sympy,cccfran/sympy,liangjiaxing/sympy,hargup/sympy,mcdaniel67/sympy,jamesblunt/sympy,garvitr/sympy,ahhda/sympy,skidzo/sympy,Designist/sympy,hrashk/sympy,rahuldan/sympy,toolforger/sympy,grevutiu-gabriel/sympy,kaichogami/sympy,yashsharan/sympy,grevutiu-gabriel/sympy,kumarkrishna/sympy,kaushik94/sympy,yashsharan/sympy,lidavidm/sympy,ChristinaZografou/sympy,MechCoder/sympy,shikil/sympy,shipci/sympy,cccfran/sympy,Mitchkoens/sympy,Shaswat27/sympy,kevalds51/sympy,sahilshekhawat/sympy,Designist/sympy,vipulroxx/sympy,Titan-C/sympy,Titan-C/sympy,Vishluck/sympy,skidzo/sympy,VaibhavAgarwalVA/sympy,amitjamadagni/sympy,souravsingh/sympy,shikil/sympy,souravsingh/sympy,AunShiLord/sympy,yukoba/sympy,atreyv/sympy,dqnykamp/sympy,jaimahajan1997/sympy,moble/sympy,diofant/diofant,yukoba/sympy,jerli/sympy,ChristinaZografou/sympy,mcdaniel67/sympy,postvakje/sympy,iamutkarshtiwari/sympy,meghana1995/sympy,wanglongqi/sympy,Vishluck/sympy,beni55/sympy,saurabhjn76/sympy,jerli/sympy,maniteja123/sympy,pandeyadarsh/sympy,atreyv/sympy,drufat/sympy,mafiya69/sympy,debugger22/sympy,wyom/sympy,Curious72/sympy,Curious72/sympy,kmacinnis/sympy,saurabhjn76/sympy,beni55/sympy,liangjiaxing/sympy,aktech/sympy,sahilshekhawat/sympy,AkademieOlympia/sympy,sunny94/temp,Sumith1896/sympy,emon10005/sympy,postvakje/sympy,abhiii5459/sympy,drufat/sympy,toolforger/sympy,abloomston/sympy,AunShiLord/sympy,iamutkarshtiwari/sympy,jamesblunt/sympy,cswiercz/sympy,maniteja123/sympy,kaushik94/sympy,lindsayad/sympy,postvakje/sympy,asm666/sympy,farhaanbukhsh/sympy,debugger22/sympy,Davidjohnwilson/sympy,hargup/sympy,shipci/sympy,pandeyadarsh/sympy,emon10005/sympy,sahmed95/sympy,kevalds51/sympy,chaffra/sympy,souravsingh/sympy,sahmed95/sympy,wyom/sympy,Mitchkoens/sympy,cccfran/sympy,chaffra/sympy,Curious72/sympy,ga7g08/sympy,ga7g08/sympy,VaibhavAgarwalVA/sympy,sahmed95/sympy,pbrady/sympy,wanglongqi/sympy,asm666/sympy,emon10005/sympy,AkademieOlympia/sympy,moble/sympy,kumarkrishna/sympy,lidavidm/sympy,wanglongqi/sympy,pbrady/sympy,wyom/sympy,Gadal/sympy,lidavidm/sympy,Arafatk/sympy,iamutkarshtiwari/sympy,kumarkrishna/sympy,madan96/sympy,abloomston/sympy,VaibhavAgarwalVA/sympy,dqnykamp/sympy,jbbskinny/sympy,amitjamadagni/sympy,Arafatk/sympy,atreyv/sympy,rahuldan/sympy,abhiii5459/sympy,atsao72/sympy,skidzo/sympy,sampadsaha5/sympy,jamesblunt/sympy,farhaanbukhsh/sympy,aktech/sympy,sunny94/temp,drufat/sympy,meghana1995/sympy,Sumith1896/sympy,Gadal/sympy,MridulS/sympy,hrashk/sympy,dqnykamp/sympy,Gadal/sympy,cswiercz/sympy,abhiii5459/sympy,moble/sympy,sunny94/temp,shipci/sympy,MechCoder/sympy,sampadsaha5/sympy,pbrady/sympy,kaichogami/sympy,lindsayad/sympy,madan96/sympy,liangjiaxing/sympy,rahuldan/sympy,MridulS/sympy,MridulS/sympy,aktech/sympy,sahilshekhawat/sympy,Davidjohnwilson/sympy,vipulroxx/sympy,beni55/sympy,mcdaniel67/sympy,jaimahajan1997/sympy,mafiya69/sympy,meghana1995/sympy,kaichogami/sympy,sampadsaha5/sympy,ChristinaZografou/sympy,asm666/sympy,bukzor/sympy,jbbskinny/sympy,kevalds51/sympy,yukoba/sympy,garvitr/sympy,Shaswat27/sympy,atsao72/sympy,Titan-C/sympy,vipulroxx/sympy,garvitr/sympy,maniteja123/sympy,Davidjohnwilson/sympy,atsao72/sympy,Shaswat27/sympy,oliverlee/sympy,hargup/sympy,hrashk/sympy,abloomston/sympy,skirpichev/omg,AkademieOlympia/sympy,oliverlee/sympy,yashsharan/sympy,madan96/sympy,ga7g08/sympy,debugger22/sympy,jerli/sympy,farhaanbukhsh/sympy,grevutiu-gabriel/sympy,kmacinnis/sympy,kaushik94/sympy,chaffra/sympy,jbbskinny/sympy,AunShiLord/sympy,toolforger/sympy,ahhda/sympy,Vishluck/sympy,ahhda/sympy,jaimahajan1997/sympy,bukzor/sympy,cswiercz/sympy,bukzor/sympy,Sumith1896/sympy,Designist/sympy,lindsayad/sympy,Arafatk/sympy,shikil/sympy,oliverlee/sympy
|
Add some tests for sympy_parser
|
from sympy.parsing.sympy_parser import parse_expr
from sympy import *
def test_implicit_multiplication_application():
x = Symbol('x')
inputs = {
'2*x': 2 * x,
'3.00': Float(3),
'22/7': Rational(22, 7),
'2+3j': 2 + 3*I,
'exp(x)': exp(x),
'x!': factorial(x),
'Symbol("x").free_symbols': x.free_symbols,
"S('S(3).n(n=3)')": 3.00,
'factorint(12, visual=True)': Mul(
Pow(2, 2, evaluate=False),
Pow(3, 1, evaluate=False),
evaluate=False),
'Limit(sin(x), x, 0, dir="-")': Limit(sin(x), x, 0, dir='-'),
}
for text, result in inputs.items():
print text, parse_expr(text), result
assert(parse_expr(text) == result)
|
<commit_before><commit_msg>Add some tests for sympy_parser<commit_after>
|
from sympy.parsing.sympy_parser import parse_expr
from sympy import *
def test_implicit_multiplication_application():
x = Symbol('x')
inputs = {
'2*x': 2 * x,
'3.00': Float(3),
'22/7': Rational(22, 7),
'2+3j': 2 + 3*I,
'exp(x)': exp(x),
'x!': factorial(x),
'Symbol("x").free_symbols': x.free_symbols,
"S('S(3).n(n=3)')": 3.00,
'factorint(12, visual=True)': Mul(
Pow(2, 2, evaluate=False),
Pow(3, 1, evaluate=False),
evaluate=False),
'Limit(sin(x), x, 0, dir="-")': Limit(sin(x), x, 0, dir='-'),
}
for text, result in inputs.items():
print text, parse_expr(text), result
assert(parse_expr(text) == result)
|
Add some tests for sympy_parserfrom sympy.parsing.sympy_parser import parse_expr
from sympy import *
def test_implicit_multiplication_application():
x = Symbol('x')
inputs = {
'2*x': 2 * x,
'3.00': Float(3),
'22/7': Rational(22, 7),
'2+3j': 2 + 3*I,
'exp(x)': exp(x),
'x!': factorial(x),
'Symbol("x").free_symbols': x.free_symbols,
"S('S(3).n(n=3)')": 3.00,
'factorint(12, visual=True)': Mul(
Pow(2, 2, evaluate=False),
Pow(3, 1, evaluate=False),
evaluate=False),
'Limit(sin(x), x, 0, dir="-")': Limit(sin(x), x, 0, dir='-'),
}
for text, result in inputs.items():
print text, parse_expr(text), result
assert(parse_expr(text) == result)
|
<commit_before><commit_msg>Add some tests for sympy_parser<commit_after>from sympy.parsing.sympy_parser import parse_expr
from sympy import *
def test_implicit_multiplication_application():
x = Symbol('x')
inputs = {
'2*x': 2 * x,
'3.00': Float(3),
'22/7': Rational(22, 7),
'2+3j': 2 + 3*I,
'exp(x)': exp(x),
'x!': factorial(x),
'Symbol("x").free_symbols': x.free_symbols,
"S('S(3).n(n=3)')": 3.00,
'factorint(12, visual=True)': Mul(
Pow(2, 2, evaluate=False),
Pow(3, 1, evaluate=False),
evaluate=False),
'Limit(sin(x), x, 0, dir="-")': Limit(sin(x), x, 0, dir='-'),
}
for text, result in inputs.items():
print text, parse_expr(text), result
assert(parse_expr(text) == result)
|
|
f8e949019c7339a73bdd48e9b351843d458d99aa
|
gaphor/ui/tests/test_namespace.py
|
gaphor/ui/tests/test_namespace.py
|
import pytest
from gaphor import UML
from gaphor.core.modeling import Diagram
from gaphor.ui.namespace import Namespace, popup_model
@pytest.fixture
def namespace(event_manager, element_factory):
ns = Namespace(event_manager, element_factory)
scrolled_window = ns.open() # noqa: F841
assert ns.model
assert ns.view
yield ns
ns.shutdown()
def test_popup_model(namespace, diagram, element_factory):
item = diagram.create(
UML.classes.ClassItem, subject=element_factory.create(UML.Class)
)
namespace.select_element(item.subject)
popup = popup_model(namespace.view)
assert popup
def test_create_diagram(namespace, element_factory):
package = element_factory.create(UML.Package)
namespace.select_element(package)
namespace.tree_view_create_diagram()
assert element_factory.lselect(Diagram)
def test_create_package(namespace, element_factory):
package = element_factory.create(UML.Package)
namespace.select_element(package)
namespace.tree_view_create_package()
assert element_factory.lselect(lambda e: e.namespace is package)
def test_delete_package(namespace, element_factory):
package = element_factory.create(UML.Package)
namespace.select_element(package)
namespace.tree_view_delete()
assert element_factory.lselect() == []
|
Add tests for Namespace component
|
Add tests for Namespace component
|
Python
|
lgpl-2.1
|
amolenaar/gaphor,amolenaar/gaphor
|
Add tests for Namespace component
|
import pytest
from gaphor import UML
from gaphor.core.modeling import Diagram
from gaphor.ui.namespace import Namespace, popup_model
@pytest.fixture
def namespace(event_manager, element_factory):
ns = Namespace(event_manager, element_factory)
scrolled_window = ns.open() # noqa: F841
assert ns.model
assert ns.view
yield ns
ns.shutdown()
def test_popup_model(namespace, diagram, element_factory):
item = diagram.create(
UML.classes.ClassItem, subject=element_factory.create(UML.Class)
)
namespace.select_element(item.subject)
popup = popup_model(namespace.view)
assert popup
def test_create_diagram(namespace, element_factory):
package = element_factory.create(UML.Package)
namespace.select_element(package)
namespace.tree_view_create_diagram()
assert element_factory.lselect(Diagram)
def test_create_package(namespace, element_factory):
package = element_factory.create(UML.Package)
namespace.select_element(package)
namespace.tree_view_create_package()
assert element_factory.lselect(lambda e: e.namespace is package)
def test_delete_package(namespace, element_factory):
package = element_factory.create(UML.Package)
namespace.select_element(package)
namespace.tree_view_delete()
assert element_factory.lselect() == []
|
<commit_before><commit_msg>Add tests for Namespace component<commit_after>
|
import pytest
from gaphor import UML
from gaphor.core.modeling import Diagram
from gaphor.ui.namespace import Namespace, popup_model
@pytest.fixture
def namespace(event_manager, element_factory):
ns = Namespace(event_manager, element_factory)
scrolled_window = ns.open() # noqa: F841
assert ns.model
assert ns.view
yield ns
ns.shutdown()
def test_popup_model(namespace, diagram, element_factory):
item = diagram.create(
UML.classes.ClassItem, subject=element_factory.create(UML.Class)
)
namespace.select_element(item.subject)
popup = popup_model(namespace.view)
assert popup
def test_create_diagram(namespace, element_factory):
package = element_factory.create(UML.Package)
namespace.select_element(package)
namespace.tree_view_create_diagram()
assert element_factory.lselect(Diagram)
def test_create_package(namespace, element_factory):
package = element_factory.create(UML.Package)
namespace.select_element(package)
namespace.tree_view_create_package()
assert element_factory.lselect(lambda e: e.namespace is package)
def test_delete_package(namespace, element_factory):
package = element_factory.create(UML.Package)
namespace.select_element(package)
namespace.tree_view_delete()
assert element_factory.lselect() == []
|
Add tests for Namespace componentimport pytest
from gaphor import UML
from gaphor.core.modeling import Diagram
from gaphor.ui.namespace import Namespace, popup_model
@pytest.fixture
def namespace(event_manager, element_factory):
ns = Namespace(event_manager, element_factory)
scrolled_window = ns.open() # noqa: F841
assert ns.model
assert ns.view
yield ns
ns.shutdown()
def test_popup_model(namespace, diagram, element_factory):
item = diagram.create(
UML.classes.ClassItem, subject=element_factory.create(UML.Class)
)
namespace.select_element(item.subject)
popup = popup_model(namespace.view)
assert popup
def test_create_diagram(namespace, element_factory):
package = element_factory.create(UML.Package)
namespace.select_element(package)
namespace.tree_view_create_diagram()
assert element_factory.lselect(Diagram)
def test_create_package(namespace, element_factory):
package = element_factory.create(UML.Package)
namespace.select_element(package)
namespace.tree_view_create_package()
assert element_factory.lselect(lambda e: e.namespace is package)
def test_delete_package(namespace, element_factory):
package = element_factory.create(UML.Package)
namespace.select_element(package)
namespace.tree_view_delete()
assert element_factory.lselect() == []
|
<commit_before><commit_msg>Add tests for Namespace component<commit_after>import pytest
from gaphor import UML
from gaphor.core.modeling import Diagram
from gaphor.ui.namespace import Namespace, popup_model
@pytest.fixture
def namespace(event_manager, element_factory):
ns = Namespace(event_manager, element_factory)
scrolled_window = ns.open() # noqa: F841
assert ns.model
assert ns.view
yield ns
ns.shutdown()
def test_popup_model(namespace, diagram, element_factory):
item = diagram.create(
UML.classes.ClassItem, subject=element_factory.create(UML.Class)
)
namespace.select_element(item.subject)
popup = popup_model(namespace.view)
assert popup
def test_create_diagram(namespace, element_factory):
package = element_factory.create(UML.Package)
namespace.select_element(package)
namespace.tree_view_create_diagram()
assert element_factory.lselect(Diagram)
def test_create_package(namespace, element_factory):
package = element_factory.create(UML.Package)
namespace.select_element(package)
namespace.tree_view_create_package()
assert element_factory.lselect(lambda e: e.namespace is package)
def test_delete_package(namespace, element_factory):
package = element_factory.create(UML.Package)
namespace.select_element(package)
namespace.tree_view_delete()
assert element_factory.lselect() == []
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.