_id
stringlengths 2
7
| title
stringlengths 1
88
| partition
stringclasses 3
values | text
stringlengths 75
19.8k
| language
stringclasses 1
value | meta_information
dict |
|---|---|---|---|---|---|
q20800
|
find_neighbor_sites
|
train
|
def find_neighbor_sites(sites, am, flatten=True, include_input=False,
logic='or'):
r"""
Given a symmetric adjacency matrix, finds all sites that are connected
to the input sites.
Parameters
----------
am : scipy.sparse matrix
The adjacency matrix of the network. Must be symmetrical such that if
sites *i* and *j* are connected, the matrix contains non-zero values
at locations (i, j) and (j, i).
flatten : boolean
If ``True`` (default) the returned result is a compressed array of all
neighbors, or a list of lists with each sub-list containing the
neighbors for each input site. Note that an *unflattened* list might
be slow to generate since it is a Python ``list`` rather than a Numpy
array.
include_input : boolean
If ``False`` (default) the input sites will be removed from the result.
logic : string
Specifies logic to filter the resulting list. Options are:
**'or'** : (default) All neighbors of the input sites. This is also
known as the 'union' in set theory or 'any' in boolean logic. Both
keywords are accepted and treated as 'or'.
**'xor'** : Only neighbors of one and only one input site. This is
useful for finding the sites that are not shared by any of the input
sites. 'exclusive_or' is also accepted.
**'xnor'** : Neighbors that are shared by two or more input sites. This
is equivalent to finding all neighbors with 'or', minus those found
with 'xor', and is useful for finding neighbors that the inputs have
in common. 'nxor' is also accepted.
**'and'** : Only neighbors shared by all input sites. This is also
known as 'intersection' in set theory and (somtimes) as 'all' in
boolean logic. Both keywords are accepted and treated as 'and'.
Returns
-------
An array containing the neighboring sites filtered by the given logic. If
``flatten`` is ``False`` then the result is a list of lists containing the
neighbors of each input site.
See Also
--------
find_complement
Notes
-----
The ``logic`` options are applied to neighboring sites only, thus it is not
possible to obtain sites that are part of the global set but not neighbors.
This is because (a) the list global sites might be very large, and (b) it
is not possible to return a list of neighbors for each input site if global
sites are considered.
"""
if am.format != 'lil':
am = am.tolil(copy=False)
n_sites = am.shape[0]
rows = [am.rows[i] for i in sp.array(sites, ndmin=1)]
if len(rows) == 0:
return []
neighbors = sp.hstack(rows).astype(sp.int64) # Flatten list to apply logic
if logic in ['or', 'union', 'any']:
neighbors = sp.unique(neighbors)
elif logic in ['xor', 'exclusive_or']:
neighbors = sp.unique(sp.where(sp.bincount(neighbors) == 1)[0])
elif logic in ['xnor', 'nxor']:
neighbors = sp.unique(sp.where(sp.bincount(neighbors) > 1)[0])
elif logic in ['and', 'all', 'intersection']:
neighbors = set(neighbors)
[neighbors.intersection_update(i) for i in rows]
neighbors = sp.array(list(neighbors), dtype=sp.int64, ndmin=1)
else:
raise Exception('Specified logic is not implemented')
# Deal with removing inputs or not
mask = sp.zeros(shape=n_sites, dtype=bool)
mask[neighbors] = True
if not include_input:
mask[sites] = False
# Finally flatten or not
if flatten:
neighbors = sp.where(mask)[0]
else:
if (neighbors.size > 0):
for i in range(len(rows)):
vals = sp.array(rows[i], dtype=sp.int64)
rows[i] = vals[mask[vals]]
neighbors = rows
else:
neighbors = [sp.array([], dtype=int) for i in range(len(sites))]
return neighbors
|
python
|
{
"resource": ""
}
|
q20801
|
find_connected_sites
|
train
|
def find_connected_sites(bonds, am, flatten=True, logic='or'):
r"""
Given an adjacency matrix, finds which sites are connected to the input
bonds.
Parameters
----------
am : scipy.sparse matrix
The adjacency matrix of the network. Must be symmetrical such that if
sites *i* and *j* are connected, the matrix contains non-zero values
at locations (i, j) and (j, i).
flatten : boolean (default is ``True``)
Indicates whether the returned result is a compressed array of all
neighbors, or a list of lists with each sub-list containing the
neighbors for each input site. Note that an *unflattened* list might
be slow to generate since it is a Python ``list`` rather than a Numpy
array.
logic : string
Specifies logic to filter the resulting list. Options are:
**'or'** : (default) All neighbors of the input bonds. This is also
known as the 'union' in set theory or (sometimes) 'any' in boolean
logic. Both keywords are accepted and treated as 'or'.
**'xor'** : Only neighbors of one and only one input bond. This is
useful for finding the sites that are not shared by any of the input
bonds. 'exclusive_or' is also accepted.
**'xnor'** : Neighbors that are shared by two or more input bonds. This
is equivalent to finding all neighbors with 'or', minus those found
with 'xor', and is useful for finding neighbors that the inputs have
in common. 'nxor' is also accepted.
**'and'** : Only neighbors shared by all input bonds. This is also
known as 'intersection' in set theory and (somtimes) as 'all' in
boolean logic. Both keywords are accepted and treated as 'and'.
Returns
-------
An array containing the connected sites, filtered by the given logic. If
``flatten`` is ``False`` then the result is a list of lists containing the
neighbors of each given input bond. In this latter case, sites that
have been removed by the given logic are indicated by ``nans``, thus the
array is of type ``float`` and is not suitable for indexing.
See Also
--------
find_complement
"""
if am.format != 'coo':
raise Exception('Adjacency matrix must be in COO format')
bonds = sp.array(bonds, ndmin=1)
if len(bonds) == 0:
return []
neighbors = sp.hstack((am.row[bonds], am.col[bonds])).astype(sp.int64)
if neighbors.size:
n_sites = sp.amax(neighbors)
if logic in ['or', 'union', 'any']:
neighbors = sp.unique(neighbors)
elif logic in ['xor', 'exclusive_or']:
neighbors = sp.unique(sp.where(sp.bincount(neighbors) == 1)[0])
elif logic in ['xnor']:
neighbors = sp.unique(sp.where(sp.bincount(neighbors) > 1)[0])
elif logic in ['and', 'all', 'intersection']:
temp = sp.vstack((am.row[bonds], am.col[bonds])).T.tolist()
temp = [set(pair) for pair in temp]
neighbors = temp[0]
[neighbors.intersection_update(pair) for pair in temp[1:]]
neighbors = sp.array(list(neighbors), dtype=sp.int64, ndmin=1)
else:
raise Exception('Specified logic is not implemented')
if flatten is False:
if neighbors.size:
mask = sp.zeros(shape=n_sites+1, dtype=bool)
mask[neighbors] = True
temp = sp.hstack((am.row[bonds], am.col[bonds])).astype(sp.int64)
temp[~mask[temp]] = -1
inds = sp.where(temp == -1)[0]
if len(inds):
temp = temp.astype(float)
temp[inds] = sp.nan
temp = sp.reshape(a=temp, newshape=[len(bonds), 2], order='F')
neighbors = temp
else:
neighbors = [sp.array([], dtype=sp.int64) for i in range(len(bonds))]
return neighbors
|
python
|
{
"resource": ""
}
|
q20802
|
find_connecting_bonds
|
train
|
def find_connecting_bonds(sites, am):
r"""
Given pairs of sites, finds the bonds which connects each pair.
Parameters
----------
sites : array_like
A 2-column vector containing pairs of site indices on each row.
am : scipy.sparse matrix
The adjacency matrix of the network. Must be symmetrical such that if
sites *i* and *j* are connected, the matrix contains non-zero values
at locations (i, j) and (j, i).
Returns
-------
Returns a list the same length as P1 (and P2) with each element
containing the throat number that connects the corresponding pores,
or `None`` if pores are not connected.
Notes
-----
The returned list can be converted to an ND-array, which will convert
the ``None`` values to ``nan``. These can then be found using
``scipy.isnan``.
"""
if am.format != 'dok':
am = am.todok(copy=False)
sites = sp.array(sites, ndmin=2)
if sites.size == 0:
return []
z = tuple(zip(sites[:, 0], sites[:, 1]))
neighbors = [am.get(z[i], None) for i in range(len(z))]
return neighbors
|
python
|
{
"resource": ""
}
|
q20803
|
istriu
|
train
|
def istriu(am):
r"""
Returns ``True`` is the sparse adjacency matrix is upper triangular
"""
if am.shape[0] != am.shape[1]:
print('Matrix is not square, triangularity is irrelevant')
return False
if am.format != 'coo':
am = am.tocoo(copy=False)
return sp.all(am.row <= am.col)
|
python
|
{
"resource": ""
}
|
q20804
|
istriangular
|
train
|
def istriangular(am):
r"""
Returns ``True`` is the sparse adjacency matrix is either upper or lower
triangular
"""
if am.format != 'coo':
am = am.tocoo(copy=False)
return istril(am) or istriu(am)
|
python
|
{
"resource": ""
}
|
q20805
|
issymmetric
|
train
|
def issymmetric(am):
r"""
A method to check if a square matrix is symmetric
Returns ``True`` if the sparse adjacency matrix is symmetric
"""
if am.shape[0] != am.shape[1]:
logger.warning('Matrix is not square, symmetrical is irrelevant')
return False
if am.format != 'coo':
am = am.tocoo(copy=False)
if istril(am) or istriu(am):
return False
# Compare am with its transpose, element wise
sym = ((am != am.T).size) == 0
return sym
|
python
|
{
"resource": ""
}
|
q20806
|
am_to_im
|
train
|
def am_to_im(am):
r"""
Convert an adjacency matrix into an incidence matrix
"""
if am.shape[0] != am.shape[1]:
raise Exception('Adjacency matrices must be square')
if am.format != 'coo':
am = am.tocoo(copy=False)
conn = sp.vstack((am.row, am.col)).T
row = conn[:, 0]
data = am.data
col = sp.arange(sp.size(am.data))
if istriangular(am):
row = sp.append(row, conn[:, 1])
data = sp.append(data, data)
col = sp.append(col, col)
im = sprs.coo.coo_matrix((data, (row, col)), (row.max()+1, col.max()+1))
return im
|
python
|
{
"resource": ""
}
|
q20807
|
im_to_am
|
train
|
def im_to_am(im):
r"""
Convert an incidence matrix into an adjacency matrix
"""
if im.shape[0] == im.shape[1]:
print('Warning: Received matrix is square which is unlikely')
if im.shape[0] > im.shape[1]:
print('Warning: Received matrix has more sites than bonds')
if im.format != 'coo':
im = im.tocoo(copy=False)
|
python
|
{
"resource": ""
}
|
q20808
|
tri_to_am
|
train
|
def tri_to_am(tri):
r"""
Given a Delaunay Triangulation object from Scipy's ``spatial`` module,
converts to a sparse adjacency matrix network representation.
Parameters
----------
tri : Delaunay Triangulation Object
This object is produced by ``scipy.spatial.Delaunay``
Returns
-------
A sparse adjacency matrix in COO format. The network is undirected
and unweighted, so the adjacency matrix is upper-triangular and all the
weights are set to 1.
"""
# Create an empty list-of-list matrix
lil = sprs.lil_matrix((tri.npoints, tri.npoints))
# Scan through Delaunay triangulation to retrieve pairs
indices, indptr = tri.vertex_neighbor_vertices
for k in range(tri.npoints):
lil.rows[k] = indptr[indices[k]:indices[k+1]]
# Convert to coo format
lil.data = lil.rows # Just a dummy array to make things work properly
coo = lil.tocoo()
# Set weights to 1's
coo.data = sp.ones_like(coo.data)
# Remove diagonal, and convert to csr remove duplicates
am = sp.sparse.triu(A=coo, k=1, format='csr')
# The convert back to COO and return
am = am.tocoo()
return am
|
python
|
{
"resource": ""
}
|
q20809
|
vor_to_am
|
train
|
def vor_to_am(vor):
r"""
Given a Voronoi tessellation object from Scipy's ``spatial`` module,
converts to a sparse adjacency matrix network representation in COO format.
Parameters
----------
vor : Voronoi Tessellation object
This object is produced by ``scipy.spatial.Voronoi``
Returns
-------
A sparse adjacency matrix in COO format. The network is undirected
and unweighted, so the adjacency matrix is upper-triangular and all the
weights are set to 1.
"""
# Create adjacency matrix in lil format for quick matrix construction
N = vor.vertices.shape[0]
rc = [[], []]
for ij in vor.ridge_dict.keys():
row = vor.ridge_dict[ij].copy()
# Make sure voronoi cell closes upon itself
row.append(row[0])
# Add connections to rc list
rc[0].extend(row[:-1])
rc[1].extend(row[1:])
rc = sp.vstack(rc).T
# Make adj mat upper triangular
rc = sp.sort(rc, axis=1)
# Remove any pairs with ends at infinity (-1)
keep = ~sp.any(rc == -1, axis=1)
rc = rc[keep]
data = sp.ones_like(rc[:, 0])
# Build adj mat in COO format
M = N = sp.amax(rc) + 1
am = sprs.coo_matrix((data, (rc[:, 0], rc[:, 1])), shape=(M, N))
# Remove diagonal, and convert to csr remove duplicates
am = sp.sparse.triu(A=am, k=1, format='csr')
# The convert back to COO and return
am = am.tocoo()
return am
|
python
|
{
"resource": ""
}
|
q20810
|
conns_to_am
|
train
|
def conns_to_am(conns, shape=None, force_triu=True, drop_diag=True,
drop_dupes=True, drop_negs=True):
r"""
Converts a list of connections into a Scipy sparse adjacency matrix
Parameters
----------
conns : array_like, N x 2
The list of site-to-site connections
shape : list, optional
The shape of the array. If none is given then it is inferred from the
maximum value in ``conns`` array.
force_triu : boolean
If True (default), then all connections are assumed undirected, and
moved to the upper triangular portion of the array
drop_diag : boolean
If True (default), then connections from a site and itself are removed.
drop_dupes : boolean
If True (default), then all pairs of sites sharing multiple connections
are reduced to a single connection.
drop_negs : boolean
If True (default), then all connections with one or both ends pointing
to a negative number are removed.
"""
if force_triu: # Sort connections to [low, high]
conns = sp.sort(conns, axis=1)
if drop_negs: # Remove connections to -1
keep = ~sp.any(conns < 0, axis=1)
conns = conns[keep]
if drop_diag: # Remove connections of [self, self]
keep = sp.where(conns[:, 0] != conns[:, 1])[0]
conns = conns[keep]
# Now convert to actual sparse array in COO format
data = sp.ones_like(conns[:, 0], dtype=int)
if shape is None:
N = conns.max() + 1
shape = (N, N)
am = sprs.coo_matrix((data, (conns[:, 0], conns[:, 1])), shape=shape)
if drop_dupes: # Convert to csr and back too coo
am = am.tocsr()
am = am.tocoo()
# Perform one last check on adjacency matrix
missing = sp.where(sp.bincount(conns.flatten()) == 0)[0]
if sp.size(missing) or sp.any(am.col.max() < (shape[0] - 1)):
warnings.warn('Some nodes are not connected to any bonds')
return am
|
python
|
{
"resource": ""
}
|
q20811
|
isoutside
|
train
|
def isoutside(coords, shape):
r"""
Identifies points that lie outside the specified region.
Parameters
----------
domain_size : array_like
The size and shape of the domain beyond which points should be
trimmed. The argument is treated as follows:
**sphere** : If a scalar or single element list is received, it's
treated as the radius [r] of a sphere centered on [0, 0, 0].
**cylinder** : If a two-element list is received it's treated as
the radius and height of a cylinder [r, z] whose central axis
starts at [0, 0, 0] and extends in the positive z-direction.
**rectangle** : If a three element list is received, it's treated
as the outer corner of rectangle [x, y, z] whose opposite corner
lies at [0, 0, 0].
Returns
-------
An Np-long mask of True values indicating pores that lie outside the
domain.
"""
# Label external pores for trimming below
if len(shape) == 1: # Spherical
# Find external points
r = sp.sqrt(sp.sum(coords**2, axis=1))
Ps = r > shape[0]
elif len(shape) == 2: # Cylindrical
# Find external pores outside radius
r = sp.sqrt(sp.sum(coords[:, [0, 1]]**2, axis=1))
Ps = r > shape[0]
# Find external pores above and below cylinder
if shape[1] > 0:
Ps = Ps + (coords[:, 2] > shape[1])
Ps = Ps + (coords[:, 2] < 0)
else:
pass
elif len(shape) == 3: # Rectilinear
shape = sp.array(shape, dtype=float)
try:
lo_lim = shape[:, 0]
hi_lim = shape[:, 1]
except IndexError:
lo_lim = sp.array([0, 0, 0])
hi_lim = shape
Ps1 = sp.any(coords > hi_lim, axis=1)
Ps2 = sp.any(coords < lo_lim, axis=1)
Ps = Ps1 + Ps2
return Ps
|
python
|
{
"resource": ""
}
|
q20812
|
ispercolating
|
train
|
def ispercolating(am, inlets, outlets, mode='site'):
r"""
Determines if a percolating clusters exists in the network spanning
the given inlet and outlet sites
Parameters
----------
am : adjacency_matrix
The adjacency matrix with the ``data`` attribute indicating
if a bond is occupied or not
inlets : array_like
An array of indices indicating which sites are part of the inlets
outlets : array_like
An array of indices indicating which sites are part of the outlets
mode : string
Indicates which type of percolation to apply, either `'site'` or
`'bond'`
"""
if am.format is not 'coo':
am = am.to_coo()
ij = sp.vstack((am.col, am.row)).T
if mode.startswith('site'):
occupied_sites = sp.zeros(shape=am.shape[0], dtype=bool)
occupied_sites[ij[am.data].flatten()] = True
clusters = site_percolation(ij, occupied_sites)
elif mode.startswith('bond'):
occupied_bonds = am.data
clusters = bond_percolation(ij, occupied_bonds)
ins = sp.unique(clusters.sites[inlets])
if ins[0] == -1:
ins = ins[1:]
outs = sp.unique(clusters.sites[outlets])
if outs[0] == -1:
outs = outs[1:]
hits = sp.in1d(ins, outs)
return sp.any(hits)
|
python
|
{
"resource": ""
}
|
q20813
|
site_percolation
|
train
|
def site_percolation(ij, occupied_sites):
r"""
Calculates the site and bond occupancy status for a site percolation
process given a list of occupied sites.
Parameters
----------
ij : array_like
An N x 2 array of [site_A, site_B] connections. If two connected
sites are both occupied they are part of the same cluster, as it
the bond connecting them.
occupied_sites : boolean
A list indicating whether sites are occupied or not
Returns
-------
A tuple containing a list of site and bond labels, indicating which
cluster each belongs to. A value of -1 indicates unoccupied.
Notes
-----
The ``connected_components`` function of scipy.csgraph will give ALL
sites a cluster number whether they are occupied or not, so this
function essentially adjusts the cluster numbers to represent a
percolation process.
"""
from collections import namedtuple
Np = sp.size(occupied_sites)
occupied_bonds = sp.all(occupied_sites[ij], axis=1)
adj_mat = sprs.csr_matrix((occupied_bonds, (ij[:, 0], ij[:, 1])),
shape=(Np, Np))
adj_mat.eliminate_zeros()
clusters = csgraph.connected_components(csgraph=adj_mat, directed=False)[1]
clusters[~occupied_sites] = -1
s_labels = ps.tools.make_contiguous(clusters + 1)
if sp.any(~occupied_sites):
s_labels -= 1
b_labels = sp.amin(s_labels[ij], axis=1)
tup = namedtuple('cluster_labels', ('sites', 'bonds'))
return tup(s_labels, b_labels)
|
python
|
{
"resource": ""
}
|
q20814
|
bond_percolation
|
train
|
def bond_percolation(ij, occupied_bonds):
r"""
Calculates the site and bond occupancy status for a bond percolation
process given a list of occupied bonds.
Parameters
----------
ij : array_like
An N x 2 array of [site_A, site_B] connections. A site is
considered occupied if any of it's connecting bonds are occupied.
occupied_bonds: boolean
A list indicating whether a bond is occupied or not
Returns
-------
A tuple contain a list of site and bond labels, indicating which
cluster each belongs to. A value of -1 indicates uninvaded.
Notes
-----
The ``connected_components`` function of scipy.csgraph will give ALL
sites a cluster number whether they are occupied or not, so this
function essentially adjusts the cluster numbers to represent a
percolation process.
"""
from collections import namedtuple
Np = sp.amax(ij) + 1
adj_mat = sprs.csr_matrix((occupied_bonds, (ij[:, 0], ij[:, 1])),
shape=(Np, Np))
adj_mat.eliminate_zeros()
clusters = csgraph.connected_components(csgraph=adj_mat, directed=False)[1]
valid_clusters = sp.bincount(clusters) > 1
mapping = -sp.ones(shape=(clusters.max()+1, ), dtype=int)
mapping[valid_clusters] = sp.arange(0, valid_clusters.sum())
s_labels = mapping[clusters]
b_labels = sp.amin(s_labels[ij], axis=1)
tup = namedtuple('cluster_labels', ('sites', 'bonds'))
return tup(s_labels, b_labels)
|
python
|
{
"resource": ""
}
|
q20815
|
trim
|
train
|
def trim(network, pores=[], throats=[]):
'''
Remove pores or throats from the network.
Parameters
----------
network : OpenPNM Network Object
The Network from which pores or throats should be removed
pores (or throats) : array_like
The indices of the of the pores or throats to be removed from the
network.
Notes
-----
This is an in-place operation, meaning the received Network object will
be altered directly.
Examples
--------
>>> import openpnm as op
>>> pn = op.network.Cubic(shape=[5, 5, 5])
>>> pn.Np
125
>>> pn.Nt
300
>>> op.topotools.trim(network=pn, pores=[1])
>>> pn.Np
124
>>> pn.Nt
296
'''
pores = sp.array(pores, ndmin=1)
throats = sp.array(throats, ndmin=1)
Pkeep = sp.copy(network['pore.all'])
Tkeep = sp.copy(network['throat.all'])
if sp.size(pores) > 0:
Pkeep[pores] = False
if not sp.any(Pkeep):
raise Exception('Cannot delete ALL pores')
# Performing customized find_neighbor_throats which is much faster, but
# not general for other types of queries
# temp = sp.in1d(network['throat.conns'].flatten(), pores)
# temp = sp.reshape(temp, (network.Nt, 2))
# Ts = sp.any(temp, axis=1)
# Ts = network.Ts[Ts]
# tic()
Ts = network.find_neighbor_throats(pores=~Pkeep, mode='union')
# toc()
if len(Ts) > 0:
Tkeep[Ts] = False
if sp.size(throats) > 0:
Tkeep[throats] = False
# The following IF catches the special case of deleting ALL throats
# It removes all throat props, adds 'all', and skips rest of function
if not sp.any(Tkeep):
logger.info('Removing ALL throats from network')
for item in network.keys():
if item.split('.')[0] == 'throat':
del network[item]
network['throat.all'] = sp.array([], ndmin=1)
return
# Temporarily store throat conns and pore map for processing later
Np_old = network.Np
Nt_old = network.Nt
Pkeep_inds = sp.where(Pkeep)[0]
Tkeep_inds = sp.where(Tkeep)[0]
Pmap = sp.ones((network.Np,), dtype=int)*-1
tpore1 = network['throat.conns'][:, 0]
tpore2 = network['throat.conns'][:, 1]
# Delete specified pores and throats from all objects
for obj in network.project[::-1]:
if (obj.Np == Np_old) and (obj.Nt == Nt_old):
Ps = Pkeep_inds
Ts = Tkeep_inds
else:
Ps = obj.map_pores(pores=Pkeep, origin=network)
Ts = obj.map_throats(throats=Tkeep, origin=network)
for key in list(obj.keys()):
temp = obj.pop(key)
if key.split('.')[0] == 'throat':
obj.update({key: temp[Ts]})
if key.split('.')[0] == 'pore':
obj.update({key: temp[Ps]})
# Remap throat connections
Pmap[Pkeep] = sp.arange(0, sp.sum(Pkeep))
Tnew1 = Pmap[tpore1[Tkeep]]
Tnew2 = Pmap[tpore2[Tkeep]]
network.update({'throat.conns': sp.vstack((Tnew1, Tnew2)).T})
# Clear adjacency and incidence matrices which will be out of date now
network._am.clear()
network._im.clear()
|
python
|
{
"resource": ""
}
|
q20816
|
find_surface_pores
|
train
|
def find_surface_pores(network, markers=None, label='surface'):
r"""
Find the pores on the surface of the domain by performing a Delaunay
triangulation between the network pores and some external ``markers``. All
pores connected to these external marker points are considered surface
pores.
Parameters
----------
network: OpenPNM Network Object
The network for which the surface pores are to be found
markers: array_like
3 x N array of the marker coordinates to use in the triangulation. The
labeling is performed in one step, so all points are added, and then
any pores connected to at least one marker is given the provided label.
By default, this function will automatically generate 6 points outside
each axis of the network domain.
Users may wish to specify a single external marker point and provide an
appropriate label in order to identify specific faces. For instance,
the marker may be *above* the domain, and the label might be
'top_surface'.
label : string
The label to apply to the pores. The default is 'surface'.
Notes
-----
This function does not check whether the given markers actually lie outside
the domain, allowing the labeling of *internal* sufaces.
If this method fails to mark some surface pores, consider sending more
markers on each face.
Examples
--------
>>> import openpnm as op
>>> net = op.network.Cubic(shape=[5, 5, 5])
>>> op.topotools.find_surface_pores(network=net)
>>> net.num_pores('surface')
98
When cubic networks are created, the surfaces are already labeled:
>>> net.num_pores(['top','bottom', 'left', 'right', 'front','back'])
98
This function is mostly useful for unique networks such as spheres, random
topology, or networks that have been subdivied.
"""
import scipy.spatial as sptl
if markers is None:
(xmax, ymax, zmax) = sp.amax(network['pore.coords'], axis=0)
(xmin, ymin, zmin) = sp.amin(network['pore.coords'], axis=0)
xave = (xmin+xmax)/2
yave = (ymin+ymax)/2
zave = (zmin+zmax)/2
markers = [[xmax + xave, yave, zave],
[xmin - xave, yave, zave],
[xave, ymax + yave, zave],
[xave, ymin - yave, zave],
[xave, yave, zmax + zave],
[xave, yave, zmin - zave]]
markers = sp.atleast_2d(markers)
tri = sptl.Delaunay(network['pore.coords'], incremental=True)
tri.add_points(markers)
(indices, indptr) = tri.vertex_neighbor_vertices
for k in range(network.Np, tri.npoints):
neighbors = indptr[indices[k]:indices[k+1]]
inds = sp.where(neighbors < network.Np)
neighbors = neighbors[inds]
if 'pore.'+label not in network.keys():
network['pore.'+label] = False
network['pore.'+label][neighbors] = True
|
python
|
{
"resource": ""
}
|
q20817
|
clone_pores
|
train
|
def clone_pores(network, pores, labels=['clone'], mode='parents'):
r'''
Clones the specified pores and adds them to the network
Parameters
----------
network : OpenPNM Network Object
The Network object to which the new pores are to be added
pores : array_like
List of pores to clone
labels : string, or list of strings
The labels to apply to the clones, default is 'clone'
mode : string
Controls the connections between parents and clones. Options are:
- 'parents': (Default) Each clone is connected only to its parent
- 'siblings': Clones are only connected to each other in the same
manner as parents were connected
- 'isolated': No connections between parents or siblings
'''
if len(network.project.geometries()) > 0:
logger.warning('Network has active Geometries, new pores must be \
assigned a Geometry')
if len(network.project.phases()) > 0:
raise Exception('Network has active Phases, cannot proceed')
if type(labels) == str:
labels = [labels]
Np = network.Np
Nt = network.Nt
# Clone pores
parents = sp.array(pores, ndmin=1)
pcurrent = network['pore.coords']
pclone = pcurrent[pores, :]
pnew = sp.concatenate((pcurrent, pclone), axis=0)
Npnew = sp.shape(pnew)[0]
clones = sp.arange(Np, Npnew)
# Add clone labels to network
for item in labels:
network['pore.'+item] = False
network['throat.'+item] = False
# Add connections between parents and clones
if mode == 'parents':
tclone = sp.vstack((parents, clones)).T
extend(network=network, pore_coords=pclone, throat_conns=tclone)
if mode == 'siblings':
ts = network.find_neighbor_throats(pores=pores, mode='xnor')
tclone = network['throat.conns'][ts] + network.num_pores()
extend(network=network, pore_coords=pclone, throat_conns=tclone)
if mode == 'isolated':
extend(network=network, pore_coords=pclone)
# Apply provided labels to cloned pores
for item in labels:
network['pore.'+item][network.pores('all') >= Np] = True
network['throat.'+item][network.throats('all') >= Nt] = True
# Clear adjacency and incidence matrices which will be out of date now
network._am.clear()
network._im.clear()
|
python
|
{
"resource": ""
}
|
q20818
|
stitch
|
train
|
def stitch(network, donor, P_network, P_donor, method='nearest',
len_max=sp.inf, len_min=0, label_suffix=''):
r'''
Stitches a second a network to the current network.
Parameters
----------
networK : OpenPNM Network Object
The Network to which to donor Network will be attached
donor : OpenPNM Network Object
The Network to stitch on to the current Network
P_network : array_like
The pores on the current Network
P_donor : array_like
The pores on the donor Network
label_suffix : string or None
Some text to append to each label in the donor Network before
inserting them into the recipient. The default is to append no
text, but a common option would be to append the donor Network's
name. To insert none of the donor labels, use None.
len_max : float
Set a length limit on length of new throats
method : string (default = 'delaunay')
The method to use when making pore to pore connections. Options are:
- 'delaunay' : Use a Delaunay tessellation
- 'nearest' : Connects each pore on the receptor network to its nearest
pore on the donor network
Notes
-----
Before stitching it is necessary to translate the pore coordinates of
one of the Networks so that it is positioned correctly relative to the
other.
Examples
--------
>>> import openpnm as op
>>> pn = op.network.Cubic(shape=[5, 5, 5])
>>> pn2 = op.network.Cubic(shape=[5, 5, 5])
>>> [pn.Np, pn.Nt]
[125, 300]
>>> [pn2.Np, pn2.Nt]
[125, 300]
>>> pn2['pore.coords'][:, 2] += 5.0
>>> op.topotools.stitch(network=pn, donor=pn2, P_network=pn.pores('top'),
... P_donor=pn2.pores('bottom'), method='nearest',
... len_max=1.0)
>>> [pn.Np, pn.Nt]
[250, 625]
'''
# Ensure Networks have no associated objects yet
if (len(network.project) > 1) or (len(donor.project) > 1):
raise Exception('Cannot stitch a Network with active objects')
network['throat.stitched'] = False
# Get the initial number of pores and throats
N_init = {}
N_init['pore'] = network.Np
N_init['throat'] = network.Nt
if method == 'nearest':
P1 = P_network
P2 = P_donor + N_init['pore'] # Increment pores on donor
C1 = network['pore.coords'][P_network]
C2 = donor['pore.coords'][P_donor]
D = sp.spatial.distance.cdist(C1, C2)
[P1_ind, P2_ind] = sp.where(D <= len_max)
conns = sp.vstack((P1[P1_ind], P2[P2_ind])).T
else:
raise Exception('<{}> method not supported'.format(method))
# Enter donor's pores into the Network
extend(network=network, pore_coords=donor['pore.coords'])
# Enter donor's throats into the Network
extend(network=network, throat_conns=donor['throat.conns'] +
N_init['pore'])
# Trim throats that are longer then given len_max
C1 = network['pore.coords'][conns[:, 0]]
C2 = network['pore.coords'][conns[:, 1]]
L = sp.sum((C1 - C2)**2, axis=1)**0.5
conns = conns[L <= len_max]
# Add donor labels to recipient network
if label_suffix is not None:
if label_suffix != '':
label_suffix = '_'+label_suffix
for label in donor.labels():
element = label.split('.')[0]
locations = sp.where(network._get_indices(element) >=
N_init[element])[0]
if label + label_suffix not in network.keys():
network[label + label_suffix] = False
network[label+label_suffix][locations] = donor[label]
# Add the new stitch throats to the Network
extend(network=network, throat_conns=conns, labels='stitched')
# Remove donor from Workspace, if present
# This check allows for the reuse of a donor Network multiple times
for sim in list(ws.values()):
if donor in sim:
del ws[sim.name]
|
python
|
{
"resource": ""
}
|
q20819
|
connect_pores
|
train
|
def connect_pores(network, pores1, pores2, labels=[], add_conns=True):
r'''
Returns the possible connections between two group of pores, and optionally
makes the connections.
See ``Notes`` for advanced usage.
Parameters
----------
network : OpenPNM Network Object
pores1 : array_like
The first group of pores on the network
pores2 : array_like
The second group of pores on the network
labels : list of strings
The labels to apply to the new throats. This argument is only needed
if ``add_conns`` is True.
add_conns : bool
Indicates whether the connections should be added to the supplied
network (default is True). Otherwise, the connections are returned
as an Nt x 2 array that can be passed directly to ``extend``.
Notes
-----
(1) The method also works if ``pores1`` and ``pores2`` are list of lists,
in which case it consecutively connects corresponding members of the two
lists in a 1-to-1 fashion. Example: pores1 = [[0, 1], [2, 3]] and
pores2 = [[5], [7, 9]] leads to creation of the following connections:
0 --> 5 2 --> 7 3 --> 7
1 --> 5 2 --> 9 3 --> 9
(2) If you want to use the batch functionality, make sure that each element
within ``pores1`` and ``pores2`` are of type list or ndarray.
(3) It creates the connections in a format which is acceptable by
the default OpenPNM connection ('throat.conns') and either adds them to
the network or returns them.
Examples
--------
>>> import openpnm as op
>>> pn = op.network.Cubic(shape=[5, 5, 5])
>>> pn.Nt
300
>>> op.topotools.connect_pores(network=pn, pores1=[22, 32],
... pores2=[16, 80, 68])
>>> pn.Nt
306
>>> pn['throat.conns'][300:306]
array([[16, 22],
[22, 80],
[22, 68],
[16, 32],
[32, 80],
[32, 68]])
'''
# Assert that `pores1` and `pores2` are list of lists
try:
len(pores1[0])
except (TypeError, IndexError):
pores1 = [pores1]
try:
len(pores2[0])
except (TypeError, IndexError):
pores2 = [pores2]
if len(pores1) != len(pores2):
raise Exception('Running in batch mode! pores1 and pores2 must be' + \
' of the same length.')
arr1, arr2 = [], []
for ps1, ps2 in zip(pores1, pores2):
size1 = sp.size(ps1)
size2 = sp.size(ps2)
arr1.append(sp.repeat(ps1, size2))
arr2.append(sp.tile(ps2, size1))
conns = sp.vstack([sp.concatenate(arr1), sp.concatenate(arr2)]).T
if add_conns:
extend(network=network, throat_conns=conns, labels=labels)
else:
return conns
|
python
|
{
"resource": ""
}
|
q20820
|
find_pore_to_pore_distance
|
train
|
def find_pore_to_pore_distance(network, pores1=None, pores2=None):
r'''
Find the distance between all pores on set one to each pore in set 2
Parameters
----------
network : OpenPNM Network Object
The network object containing the pore coordinates
pores1 : array_like
The pore indices of the first set
pores2 : array_Like
The pore indices of the second set. It's OK if these indices are
partially or completely duplicating ``pores``.
Returns
-------
A distance matrix with ``len(pores1)`` rows and ``len(pores2)`` columns.
The distance between pore *i* in ``pores1`` and *j* in ``pores2`` is
located at *(i, j)* and *(j, i)* in the distance matrix.
'''
from scipy.spatial.distance import cdist
p1 = sp.array(pores1, ndmin=1)
p2 = sp.array(pores2, ndmin=1)
coords = network['pore.coords']
return cdist(coords[p1], coords[p2])
|
python
|
{
"resource": ""
}
|
q20821
|
merge_pores
|
train
|
def merge_pores(network, pores, labels=['merged']):
r"""
Combines a selection of pores into a new single pore located at the
centroid of the selected pores and connected to all of their neighbors.
Parameters
----------
network : OpenPNM Network Object
pores : array_like
The list of pores which are to be combined into a new single pore
labels : string or list of strings
The labels to apply to the new pore and new throat connections
Notes
-----
(1) The method also works if a list of lists is passed, in which case
it consecutively merges the given selections of pores.
(2) The selection of pores should be chosen carefully, preferrable so that
they all form a continuous cluster. For instance, it is recommended
to use the ``find_nearby_pores`` method to find all pores within a
certain distance of a given pore, and these can then be merged without
causing any abnormal connections.
Examples
--------
>>> import openpnm as op
>>> pn = op.network.Cubic(shape=[20, 20, 1])
>>> Ps = pn.find_nearby_pores(pores=111, r=5, flatten=True)
>>> op.topotools.merge_pores(network=pn, pores=Ps, labels=['merged'])
>>> print(pn.Np)
321
>>> pn.pores('merged')
array([320])
>>> pn.num_throats('merged')
32
"""
# Assert that `pores` is list of lists
try:
len(pores[0])
except (TypeError, IndexError):
pores = [pores]
N = len(pores)
NBs, XYZs = [], []
for Ps in pores:
NBs.append(network.find_neighbor_pores(pores=Ps,
mode='union',
flatten=True,
include_input=False))
XYZs.append(network['pore.coords'][Ps].mean(axis=0))
extend(network, pore_coords=XYZs, labels=labels)
Pnew = network.Ps[-N::]
# Possible throats between new pores: This only happens when running in
# batch mode, i.e. multiple groups of pores are to be merged. In case
# some of these groups share elements, possible throats between the
# intersecting elements is not captured and must be added manually.
pores_set = [set(items) for items in pores]
NBs_set = [set(items) for items in NBs]
ps1, ps2 = [], []
from itertools import combinations
for i, j in combinations(range(N), 2):
if not NBs_set[i].isdisjoint(pores_set[j]):
ps1.append([network.Ps[-N+i]])
ps2.append([network.Ps[-N+j]])
# Add (possible) connections between the new pores
connect_pores(network, pores1=ps1, pores2=ps2, labels=labels)
# Add connections between the new pores and the rest of the network
connect_pores(network, pores2=sp.split(Pnew, N), pores1=NBs, labels=labels)
# Trim merged pores from the network
trim(network=network, pores=sp.concatenate(pores))
|
python
|
{
"resource": ""
}
|
q20822
|
template_sphere_shell
|
train
|
def template_sphere_shell(outer_radius, inner_radius=0):
r"""
This method generates an image array of a sphere-shell. It is useful for
passing to Cubic networks as a ``template`` to make spherical shaped
networks.
Parameters
----------
outer_radius : int
Number of nodes in the outer radius of the sphere.
inner_radius : int
Number of nodes in the inner radius of the shell. a value of 0 will
result in a solid sphere.
Returns
-------
A Numpy array containing 1's to demarcate the sphere-shell, and 0's
elsewhere.
"""
img = _template_sphere_disc(dim=3, outer_radius=outer_radius,
inner_radius=inner_radius)
return img
|
python
|
{
"resource": ""
}
|
q20823
|
template_cylinder_annulus
|
train
|
def template_cylinder_annulus(height, outer_radius, inner_radius=0):
r"""
This method generates an image array of a disc-ring. It is useful for
passing to Cubic networks as a ``template`` to make circular-shaped 2D
networks.
Parameters
----------
height : int
The height of the cylinder
outer_radius : int
Number of nodes in the outer radius of the cylinder
inner_radius : int
Number of the nodes in the inner radius of the annulus. A value of 0
will result in a solid cylinder.
Returns
-------
A Numpy array containing 1's to demarcate the disc-ring, and 0's
elsewhere.
"""
img = _template_sphere_disc(dim=2, outer_radius=outer_radius,
inner_radius=inner_radius)
img = sp.tile(sp.atleast_3d(img), reps=height)
return img
|
python
|
{
"resource": ""
}
|
q20824
|
plot_connections
|
train
|
def plot_connections(network, throats=None, fig=None, **kwargs):
r"""
Produces a 3D plot of the network topology showing how throats connect
for quick visualization without having to export data to veiw in Paraview.
Parameters
----------
network : OpenPNM Network Object
The network whose topological connections to plot
throats : array_like (optional)
The list of throats to plot if only a sub-sample is desired. This is
useful for inspecting a small region of the network. If no throats are
specified then all throats are shown.
fig : Matplotlib figure handle and line property arguments
If a ``fig`` is supplied, then the topology will be overlaid on this
plot. This makes it possible to combine coordinates and connections,
and to color different throats differently (see ``kwargs``)
kwargs : other named arguments
By also in different line properties such as ``color`` it's possible to
plot several different sets of connections with unique colors.
For information on available line style options, visit the Matplotlib
documentation on the `web
<http://matplotlib.org/api/lines_api.html#matplotlib.lines.Line2D>`_
Notes
-----
The figure handle returned by this method can be passed into
``plot_coordinates`` to create a plot that combines pore coordinates and
throat connections, and vice versa.
See Also
--------
plot_coordinates
Examples
--------
>>> import openpnm as op
>>> pn = op.network.Cubic(shape=[10, 10, 3])
>>> pn.add_boundary_pores()
>>> Ts = pn.throats('*boundary', mode='nor')
>>> # Create figure showing boundary throats
>>> fig = op.topotools.plot_connections(network=pn, throats=Ts)
>>> Ts = pn.throats('*boundary')
>>> # Pass existing fig back into function to plot additional throats
>>> fig = op.topotools.plot_connections(network=pn, throats=Ts,
... fig=fig, color='r')
"""
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
if throats is None:
Ts = network.Ts
else:
Ts = network._parse_indices(indices=throats)
if len(sp.unique(network['pore.coords'][:, 2])) == 1:
ThreeD = False
else:
ThreeD = True
if fig is None:
fig = plt.figure()
if ThreeD:
ax = fig.add_subplot(111, projection='3d')
else:
ax = fig.gca()
else:
ax = fig.gca()
# Create dummy indexing to sp.inf
i = -1*sp.ones((sp.size(Ts)*3, ), dtype=int)
i[0::3] = network['throat.conns'][Ts, 0]
i[1::3] = network['throat.conns'][Ts, 1]
# Collect coordinates and scale axes to fit
Ps = sp.unique(network['throat.conns'][Ts])
X = network['pore.coords'][Ps, 0]
Y = network['pore.coords'][Ps, 1]
Z = network['pore.coords'][Ps, 2]
_scale_3d_axes(ax=ax, X=X, Y=Y, Z=Z)
# Add sp.inf to the last element of pore.coords (i.e. -1)
inf = sp.array((sp.inf,))
X = sp.hstack([network['pore.coords'][:, 0], inf])
Y = sp.hstack([network['pore.coords'][:, 1], inf])
Z = sp.hstack([network['pore.coords'][:, 2], inf])
if ThreeD:
ax.plot(xs=X[i], ys=Y[i], zs=Z[i], **kwargs)
else:
ax.plot(X[i], Y[i], **kwargs)
return fig
|
python
|
{
"resource": ""
}
|
q20825
|
plot_coordinates
|
train
|
def plot_coordinates(network, pores=None, fig=None, **kwargs):
r"""
Produces a 3D plot showing specified pore coordinates as markers
Parameters
----------
network : OpenPNM Network Object
The network whose topological connections to plot
pores : array_like (optional)
The list of pores to plot if only a sub-sample is desired. This is
useful for inspecting a small region of the network. If no pores are
specified then all are shown.
fig : Matplotlib figure handle
If a ``fig`` is supplied, then the coordinates will be overlaid. This
enables the plotting of multiple different sets of pores as well as
throat connections from ``plot_connections``.
kwargs : dict
By also in different marker properties such as size (``s``) and color
(``c``).
For information on available marker style options, visit the Matplotlib
documentation on the `web
<http://matplotlib.org/api/lines_api.html#matplotlib.lines.Line2D>`_
Notes
-----
The figure handle returned by this method can be passed into
``plot_topology`` to create a plot that combines pore coordinates and
throat connections, and vice versa.
See Also
--------
plot_connections
Examples
--------
>>> import openpnm as op
>>> pn = op.network.Cubic(shape=[10, 10, 3])
>>> pn.add_boundary_pores()
>>> Ps = pn.pores('internal')
>>> # Create figure showing internal pores
>>> fig = op.topotools.plot_coordinates(network=pn, pores=Ps, c='b')
>>> Ps = pn.pores('*boundary')
>>> # Pass existing fig back into function to plot boundary pores
>>> fig = op.topotools.plot_coordinates(network=pn, pores=Ps, fig=fig,
... c='r')
"""
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
if pores is None:
Ps = network.Ps
else:
Ps = network._parse_indices(indices=pores)
if len(sp.unique(network['pore.coords'][:, 2])) == 1:
ThreeD = False
else:
ThreeD = True
if fig is None:
fig = plt.figure()
if ThreeD:
ax = fig.add_subplot(111, projection='3d')
else:
ax = fig.add_subplot(111)
else:
ax = fig.gca()
# Collect specified coordinates
X = network['pore.coords'][Ps, 0]
Y = network['pore.coords'][Ps, 1]
Z = network['pore.coords'][Ps, 2]
if ThreeD:
_scale_3d_axes(ax=ax, X=X, Y=Y, Z=Z)
if ThreeD:
ax.scatter(xs=X, ys=Y, zs=Z, **kwargs)
else:
ax.scatter(X, Y, **kwargs)
return fig
|
python
|
{
"resource": ""
}
|
q20826
|
plot_networkx
|
train
|
def plot_networkx(network, plot_throats=True, labels=None, colors=None,
scale=10):
r'''
Returns a pretty 2d plot for 2d OpenPNM networks.
Parameters
----------
network : OpenPNM Network object
plot_throats : boolean
Plots throats as well as pores, if True.
labels : list
List of OpenPNM labels
colors : list
List of corresponding colors to the given `labels`.
scale : float
Scale factor for size of pores.
'''
import networkx as nx
x, y, z = network['pore.coords'].T
x, y = [j for j in [x, y, z] if not sp.allclose(j, j.mean())]
G = nx.Graph()
pos = {network.Ps[i]: [x[i], y[i]] for i in range(network.Np)}
if 'pore.diameter' in network.keys():
node_size = scale * network['pore.diameter']
else:
node_size = scale
node_color = sp.array(['r'] * len(network.Ps))
if labels:
if type(labels) is not list:
labels = [labels]
if type(colors) is not list:
colors = [colors]
if len(labels) != len(colors):
raise('len(colors) must be equal to len(labels)!')
for label, color in zip(labels, colors):
node_color[network.pores(label)] = color
nx.draw_networkx_nodes(G, pos=pos, nodelist=network.Ps.tolist(),
node_color=node_color, edge_color='r',
node_size=node_size)
if plot_throats:
nx.draw_networkx_edges(G, pos=pos, edge_color='k', alpha=0.8,
edgelist=network['throat.conns'].tolist())
return G
|
python
|
{
"resource": ""
}
|
q20827
|
reflect_base_points
|
train
|
def reflect_base_points(base_pts, domain_size):
r'''
Helper function for relecting a set of points about the faces of a
given domain.
Parameters
----------
base_pts : array_like
The coordinates of the base_pts to be reflected in the coordinate
system corresponding to the the domain as follows:
**spherical** : [r, theta, phi]
**cylindrical** or **circular** : [r, theta, z]
**rectangular** or **square** : [x, y, z]
domain_size : list or array
Controls the size and shape of the domain, as follows:
**sphere** : If a single value is received, its treated as the radius
[r] of a sphere centered on [0, 0, 0].
**cylinder** : If a two-element list is received it's treated as the
radius and height of a cylinder [r, z] positioned at [0, 0, 0] and
extending in the positive z-direction. If the z dimension is 0, a
disk of radius r is created.
**rectangle** : If a three element list is received, it's treated
as the outer corner of rectangle [x, y, z] whose opposite corner lies
at [0, 0, 0]. If the z dimension is 0, a rectangle of size X-by-Y is
created.
'''
domain_size = sp.array(domain_size)
if len(domain_size) == 1:
r, theta, phi = base_pts
new_r = 2*domain_size[0] - r
r = sp.hstack([r, new_r])
theta = sp.hstack([theta, theta])
phi = sp.hstack([phi, phi])
base_pts = sp.vstack((r, theta, phi))
if len(domain_size) == 2:
r, theta, z = base_pts
new_r = 2*domain_size[0] - r
r = sp.hstack([r, new_r])
theta = sp.hstack([theta, theta])
z = sp.hstack([z, z])
if domain_size[1] != 0: # If not a disk
r = sp.hstack([r, r, r])
theta = sp.hstack([theta, theta, theta])
z = sp.hstack([z, -z, 2-z])
base_pts = sp.vstack((r, theta, z))
elif len(domain_size) == 3:
Nx, Ny, Nz = domain_size
# Reflect base points about all 6 faces
orig_pts = base_pts
base_pts = sp.vstack((base_pts, [-1, 1, 1]*orig_pts +
[2.0*Nx, 0, 0]))
base_pts = sp.vstack((base_pts, [-1, 1, 1]*orig_pts))
base_pts = sp.vstack((base_pts, [1, -1, 1]*orig_pts +
[0, 2.0*Ny, 0]))
base_pts = sp.vstack((base_pts, [1, -1, 1]*orig_pts))
if domain_size[2] != 0:
base_pts = sp.vstack((base_pts, [1, 1, -1]*orig_pts +
[0, 0, 2.0*Nz]))
base_pts = sp.vstack((base_pts, [1, 1, -1]*orig_pts))
return base_pts
|
python
|
{
"resource": ""
}
|
q20828
|
find_clusters
|
train
|
def find_clusters(network, mask=[], t_labels=False):
r"""
Identify connected clusters of pores in the network. This method can
also return a list of throat cluster numbers, which correspond to the
cluster numbers of the pores to which the throat is connected. Either
site and bond percolation can be considered, see description of input
arguments for details.
Parameters
----------
network : OpenPNM Network Object
The network
mask : array_like, boolean
A list of active bonds or sites (throats or pores). If the mask is
Np long, then the method will perform a site percolation, and if
the mask is Nt long bond percolation will be performed.
Returns
-------
A tuple containing an Np long list of pore cluster labels, and an Nt-long
list of throat cluster labels. The label numbers correspond such that
pores and throats with the same label are part of the same cluster.
Examples
--------
>>> import openpnm as op
>>> from scipy import rand
>>> pn = op.network.Cubic(shape=[25, 25, 1])
>>> pn['pore.seed'] = rand(pn.Np)
>>> pn['throat.seed'] = rand(pn.Nt)
"""
# Parse the input arguments
mask = sp.array(mask, ndmin=1)
if mask.dtype != bool:
raise Exception('Mask must be a boolean array of Np or Nt length')
# If pore mask was given perform site percolation
if sp.size(mask) == network.Np:
(p_clusters, t_clusters) = _site_percolation(network, mask)
# If pore mask was given perform bond percolation
elif sp.size(mask) == network.Nt:
(p_clusters, t_clusters) = _bond_percolation(network, mask)
else:
raise Exception('Mask received was neither Nt nor Np long')
return (p_clusters, t_clusters)
|
python
|
{
"resource": ""
}
|
q20829
|
add_boundary_pores
|
train
|
def add_boundary_pores(network, pores, offset, apply_label='boundary'):
r"""
This method uses ``clone_pores`` to clone the input pores, then shifts
them the specified amount and direction, then applies the given label.
Parameters
----------
pores : array_like
List of pores to offset. If no pores are specified, then it
assumes that all surface pores are to be cloned.
offset : 3 x 1 array
The distance in vector form which the cloned boundary pores should
be offset.
apply_label : string
This label is applied to the boundary pores. Default is
'boundary'.
Examples
--------
>>> import openpnm as op
>>> pn = op.network.Cubic(shape=[5, 5, 5])
>>> print(pn.Np) # Confirm initial Network size
125
>>> Ps = pn.pores('top') # Select pores on top face
>>> pn.add_boundary_pores(labels=['top'])
>>> print(pn.Np) # Confirm addition of 25 new pores
150
"""
# Parse the input pores
Ps = sp.array(pores, ndmin=1)
if Ps.dtype is bool:
Ps = network.toindices(Ps)
if sp.size(pores) == 0: # Handle an empty array if given
return sp.array([], dtype=sp.int64)
# Clone the specifed pores
clone_pores(network=network, pores=Ps)
newPs = network.pores('pore.clone')
del network['pore.clone']
newTs = network.throats('clone')
del network['throat.clone']
# Offset the cloned pores
network['pore.coords'][newPs] += offset
# Apply labels to boundary pores (trim leading 'pores' if present)
label = apply_label.split('.')[-1]
plabel = 'pore.' + label
tlabel = 'throat.' + label
network[plabel] = False
network[plabel][newPs] = True
network[tlabel] = False
network[tlabel][newTs] = True
|
python
|
{
"resource": ""
}
|
q20830
|
find_path
|
train
|
def find_path(network, pore_pairs, weights=None):
r"""
Find the shortest path between pairs of pores.
Parameters
----------
network : OpenPNM Network Object
The Network object on which the search should be performed
pore_pairs : array_like
An N x 2 array containing N pairs of pores for which the shortest
path is sought.
weights : array_like, optional
An Nt-long list of throat weights for the search. Typically this
would be the throat lengths, but could also be used to represent
the phase configuration. If no weights are given then the
standard topological connections of the Network are used.
Returns
-------
A dictionary containing both the pores and throats that define the
shortest path connecting each pair of input pores.
Notes
-----
The shortest path is found using Dijkstra's algorithm included in the
scipy.sparse.csgraph module
TODO: The returned throat path contains the correct values, but not
necessarily in the true order
Examples
--------
>>> import openpnm as op
>>> pn = op.network.Cubic(shape=[3, 3, 3])
>>> a = op.topotools.find_path(network=pn, pore_pairs=[[0, 4], [0, 10]])
>>> a['pores']
[array([0, 1, 4]), array([ 0, 1, 10])]
>>> a['throats']
[array([ 0, 19]), array([ 0, 37])]
"""
Ps = sp.array(pore_pairs, ndmin=2)
if weights is None:
weights = sp.ones_like(network.Ts)
graph = network.create_adjacency_matrix(weights=weights, fmt='csr',
drop_zeros=False)
paths = csgraph.dijkstra(csgraph=graph, indices=Ps[:, 0],
return_predecessors=True)[1]
pores = []
throats = []
for row in range(0, sp.shape(Ps)[0]):
j = Ps[row][1]
ans = []
while paths[row][j] > -9999:
ans.append(j)
j = paths[row][j]
ans.append(Ps[row][0])
ans.reverse()
pores.append(sp.array(ans, dtype=int))
Ts = network.find_neighbor_throats(pores=ans, mode='xnor')
throats.append(sp.array(Ts, dtype=int))
pdict = PrintableDict
dict_ = pdict(**{'pores': pores, 'throats': throats})
return dict_
|
python
|
{
"resource": ""
}
|
q20831
|
iscoplanar
|
train
|
def iscoplanar(coords):
r'''
Determines if given pores are coplanar with each other
Parameters
----------
coords : array_like
List of pore coords to check for coplanarity. At least 3 pores are
required.
Returns
-------
A boolean value of whether given points are coplanar (True) or not (False)
'''
coords = sp.array(coords, ndmin=1)
if sp.shape(coords)[0] < 3:
raise Exception('At least 3 input pores are required')
Px = coords[:, 0]
Py = coords[:, 1]
Pz = coords[:, 2]
# Do easy check first, for common coordinate
if sp.shape(sp.unique(Px))[0] == 1:
return True
if sp.shape(sp.unique(Py))[0] == 1:
return True
if sp.shape(sp.unique(Pz))[0] == 1:
return True
# Perform rigorous check using vector algebra
n1 = sp.array((Px[1] - Px[0], Py[1] - Py[0], Pz[1] - Pz[0])).T
n2 = sp.array((Px[2] - Px[1], Py[2] - Py[1], Pz[2] - Pz[1])).T
n = sp.cross(n1, n2)
r = sp.array((Px[1:-1] - Px[0], Py[1:-1] - Py[0], Pz[1:-1] - Pz[0]))
n_dot = sp.dot(n, r)
if sp.sum(n_dot) == 0:
return True
else:
return False
|
python
|
{
"resource": ""
}
|
q20832
|
OhmicConduction.calc_effective_conductivity
|
train
|
def calc_effective_conductivity(self, inlets=None, outlets=None,
domain_area=None, domain_length=None):
r"""
This calculates the effective electrical conductivity.
Parameters
----------
inlets : array_like
The pores where the inlet voltage boundary conditions were
applied. If not given an attempt is made to infer them from the
algorithm.
outlets : array_like
The pores where the outlet voltage boundary conditions were
applied. If not given an attempt is made to infer them from the
algorithm.
domain_area : scalar, optional
The area of the inlet (and outlet) boundary faces. If not given
then an attempt is made to estimate it, but it is usually
underestimated.
domain_length : scalar, optional
The length of the domain between the inlet and outlet boundary
faces. If not given then an attempt is made to estimate it, but it
is usually underestimated.
Notes
-----
The area and length of the domain are found using the bounding box
around the inlet and outlet pores which do not necessarily lie on the
edge of the domain, resulting in underestimation of sizes.
"""
return self._calc_eff_prop(inlets=inlets, outlets=outlets,
domain_area=domain_area,
domain_length=domain_length)
|
python
|
{
"resource": ""
}
|
q20833
|
washburn
|
train
|
def washburn(target, surface_tension='pore.surface_tension',
contact_angle='pore.contact_angle',
diameter='throat.diameter'):
r"""
Computes the capillary entry pressure assuming the throat in a cylindrical
tube.
Parameters
----------
target : OpenPNM Object
The object for which these values are being calculated. This
controls the length of the calculated array, and also provides
access to other necessary thermofluid properties.
surface_tension : string
The dictionary key containing the surface tension values to be used. If
a pore property is given, it is interpolated to a throat list.
contact_angle : string
The dictionary key containing the contact angle values to be used. If
a pore property is given, it is interpolated to a throat list.
diameter : string
The dictionary key containing the throat diameter values to be used.
Notes
-----
The Washburn equation is:
.. math::
P_c = -\frac{2\sigma(cos(\theta))}{r}
This is the most basic approach to calculating entry pressure and is
suitable for highly non-wetting invading phases in most materials.
"""
network = target.project.network
phase = target.project.find_phase(target)
element, sigma, theta = _get_key_props(phase=phase,
diameter=diameter,
surface_tension=surface_tension,
contact_angle=contact_angle)
r = network[diameter]/2
value = -2*sigma*_sp.cos(_sp.radians(theta))/r
if diameter.split('.')[0] == 'throat':
value = value[phase.throats(target.name)]
else:
value = value[phase.pores(target.name)]
value[_sp.absolute(value) == _sp.inf] = 0
return value
|
python
|
{
"resource": ""
}
|
q20834
|
purcell
|
train
|
def purcell(target, r_toroid, surface_tension='pore.surface_tension',
contact_angle='pore.contact_angle',
diameter='throat.diameter'):
r"""
Computes the throat capillary entry pressure assuming the throat is a
toroid.
Parameters
----------
target : OpenPNM Object
The object for which these values are being calculated. This
controls the length of the calculated array, and also provides
access to other necessary thermofluid properties.
r_toroid : float or array_like
The radius of the toroid surrounding the pore
surface_tension : dict key (string)
The dictionary key containing the surface tension values to be used.
If a pore property is given, it is interpolated to a throat list.
contact_angle : dict key (string)
The dictionary key containing the contact angle values to be used.
If a pore property is given, it is interpolated to a throat list.
diameter : dict key (string)
The dictionary key containing the throat diameter values to be used.
Notes
-----
This approach accounts for the converging-diverging nature of many throat
types. Advancing the meniscus beyond the apex of the toroid requires an
increase in capillary pressure beyond that for a cylindical tube of the
same radius. The details of this equation are described by Mason and
Morrow [1]_, and explored by Gostick [2]_ in the context of a pore network
model.
References
----------
.. [1] G. Mason, N. R. Morrow, Effect of contact angle on capillary
displacement curvatures in pore throats formed by spheres. J.
Colloid Interface Sci. 168, 130 (1994).
.. [2] J. Gostick, Random pore network modeling of fibrous PEMFC gas
diffusion media using Voronoi and Delaunay tessellations. J.
Electrochem. Soc. 160, F731 (2013).
"""
network = target.project.network
phase = target.project.find_phase(target)
element, sigma, theta = _get_key_props(phase=phase,
diameter=diameter,
surface_tension=surface_tension,
contact_angle=contact_angle)
r = network[diameter]/2
R = r_toroid
alpha = theta - 180 + \
_sp.rad2deg(_sp.arcsin(_sp.sin(_sp.radians(theta))/(1+r/R)))
value = (-2*sigma/r) * \
(_sp.cos(_sp.radians(theta - alpha)) /
(1 + R/r*(1 - _sp.cos(_sp.radians(alpha)))))
if diameter.split('.')[0] == 'throat':
value = value[phase.throats(target.name)]
else:
value = value[phase.pores(target.name)]
return value
|
python
|
{
"resource": ""
}
|
q20835
|
ransohoff_snap_off
|
train
|
def ransohoff_snap_off(target,
shape_factor=2.0,
wavelength=5e-6,
require_pair=False,
surface_tension='pore.surface_tension',
contact_angle='pore.contact_angle',
diameter='throat.diameter',
vertices='throat.offset_vertices',
**kwargs):
r"""
Computes the capillary snap-off pressure assuming the throat is cylindrical
with converging-diverging change in diamater - like the Purcell model.
The wavelength of the change in diamater is the fiber radius.
Parameters
----------
target : OpenPNM Object
The object for which these values are being calculated. This
controls the length of the calculated array, and also provides
access to other necessary thermofluid properties.
shape_factor :
constant dependent on the shape of throat cross-section 1.75 - 2.0, see
Ref
wavelength : float or array like
The transverse interfacial radius of curvature at the neck
(fiber radius in fibrous media)
require_pair : bool
Controls whether snap-off requires a pair of arc meniscii to occur.
surface_tension : dict key (string)
The dictionary key containing the surface tension values to be used.
If a pore property is given, it is interpolated to a throat list.
contact_angle : dict key (string)
The dictionary key containing the contact angle values to be used.
If a pore property is given, it is interpolated to a throat list.
throat_diameter : dict key (string)
The dictionary key containing the throat diameter values to be used.
Notes
-----
This equation should be used to calculate the snap off capillary pressure
in fribrous media
References
----------
[1]: Ransohoff, T.C., Gauglitz, P.A. and Radke, C.J., 1987. Snap‐off of gas
bubbles in smoothly constricted noncircular capillaries. AIChE Journal,
33(5), pp.753-765.
"""
phase = target.project.find_phase(target)
geometry = target.project.find_geometry(target)
element, sigma, theta = _get_key_props(phase=phase,
diameter=diameter,
surface_tension=surface_tension,
contact_angle=contact_angle)
try:
all_verts = geometry[vertices]
# Work out whether throat geometry can support at least one pair of
# adjacent arc menisci that can grow and merge to form snap-off
# Only works if throat vertices are in convex hull order
angles_ok = np.zeros(geometry.Nt, dtype=bool)
for T in range(geometry.Nt):
verts = all_verts[T]
x = verts[:, 0]
y = verts[:, 1]
z = verts[:, 2]
# PLus
p = 1
# Minus
m = -1
verts_p = np.vstack((np.roll(x, p),
np.roll(y, p),
np.roll(z, p))).T
verts_m = np.vstack((np.roll(x, m),
np.roll(y, m),
np.roll(z, m))).T
v1 = verts_p - verts
v2 = verts_m - verts
corner_angles = np.rad2deg(tr.angle_between_vectors(v1,
v2,
axis=1))
# Logical test for existence of arc menisci
am = theta[T] <= 90 - corner_angles/2
if require_pair:
# Logical test for two adjacent arc menisci
pair_p = np.logical_and(am, np.roll(am, + p))
pair_m = np.logical_and(am, np.roll(am, + m))
am_pair = np.any(np.logical_or(pair_p, pair_m))
angles_ok[T] = am_pair
else:
# Logical test for any arc menisci
angles_ok[T] = np.any(am)
except:
logger.warning("Model is designed to work with property: " +
vertices)
angles_ok = np.ones(geometry.Nt, dtype=bool)
# Condition for arc menisci to form in corners
rad_Ts = geometry[diameter]/2
# Ransohoff and Radke eq. 4
C = 1/rad_Ts - 1/wavelength
value = sigma[phase.throats(target.name)]*C
# Only throats that can support arc menisci can snap-off
value[~angles_ok] = np.nan
logger.info("Snap off pressures calculated for " +
str(np.around(100*np.sum(angles_ok)/np.size(angles_ok), 0)) +
"% of throats")
return value
|
python
|
{
"resource": ""
}
|
q20836
|
purcell_bidirectional
|
train
|
def purcell_bidirectional(target, r_toroid,
num_points=1e2,
surface_tension='pore.surface_tension',
contact_angle='pore.contact_angle',
throat_diameter='throat.diameter',
pore_diameter='pore.diameter'):
r"""
Computes the throat capillary entry pressure assuming the throat is a
toroid. Makes use of the toroidal meniscus model with mode touch.
This model accounts for mensicus protrusion into adjacent pores and
touching solid features.
It is bidirectional becauase the connected pores generally have different
sizes and this determines how far the meniscus can protrude.
Parameters
----------
target : OpenPNM Object
The object for which these values are being calculated. This
controls the length of the calculated array, and also provides
access to other necessary thermofluid properties.
r_toroid : float or array_like
The radius of the toroid surrounding the pore
num_points : float (Default 100)
The number of divisions to make along the profile length to assess the
meniscus properties in order to find the touch length.
surface_tension : dict key (string)
The dictionary key containing the surface tension values to be used.
If a pore property is given, it is interpolated to a throat list.
contact_angle : dict key (string)
The dictionary key containing the contact angle values to be used.
If a pore property is given, it is interpolated to a throat list.
throat_diameter : dict key (string)
The dictionary key containing the throat diameter values to be used.
pore_diameter : dict key (string)
The dictionary key containing the pore diameter values to be used.
Notes
"""
network = target.project.network
conns = network['throat.conns']
values = {}
for p in range(2):
network['throat.temp_diameter'] = network[pore_diameter][conns[:, p]]
key = 'throat.touch_pore_'+str(p)
target.add_model(propname=key,
model=pm.meniscus.toroidal,
mode='touch',
r_toroid=r_toroid,
num_points=num_points,
throat_diameter=throat_diameter,
surface_tension=surface_tension,
contact_angle=contact_angle,
touch_length='throat.temp_diameter')
values[p] = target[key]
target.remove_model(key)
del network['throat.temp_diameter']
return np.vstack((values[0], values[1])).T
|
python
|
{
"resource": ""
}
|
q20837
|
sinusoidal_bidirectional
|
train
|
def sinusoidal_bidirectional(target,
num_points=1e2,
surface_tension='pore.surface_tension',
contact_angle='pore.contact_angle',
throat_diameter='throat.diameter',
throat_amplitude='throat.amplitude',
throat_length='throat.length',
pore_diameter='pore.diameter'):
r"""
Computes the throat capillary entry pressure assuming the throat has a
sinusoisal profile.
Makes use of the toroidal meniscus model with mode touch.
This model accounts for mensicus protrusion into adjacent pores and
touching solid features.
It is bidirectional becauase the connected pores generally have different
sizes and this determines how far the meniscus can protrude.
Parameters
----------
target : OpenPNM Object
The object for which these values are being calculated. This
controls the length of the calculated array, and also provides
access to other necessary thermofluid properties
num_points : float (Default 100)
The number of divisions to make along the profile length to assess the
meniscus properties in order to find the touch length.
surface_tension : dict key (string)
The dictionary key containing the surface tension values to be used.
If a pore property is given, it is interpolated to a throat list.
contact_angle : dict key (string)
The dictionary key containing the contact angle values to be used.
If a pore property is given, it is interpolated to a throat list.
throat_diameter : dict key (string)
The dictionary key containing the throat diameter values to be used.
throat_amplitude : dict key (string)
The dictionary key containing the amplitude of variation in the throat
diameter about the mean.
throat_length : dict key (string)
The dictionary key containing the throat length values to be used.
pore_diameter : dict key (string)
The dictionary key containing the pore diameter values to be used.
Notes
"""
network = target.project.network
conns = network['throat.conns']
values = {}
for p in range(2):
network['throat.temp_diameter'] = network[pore_diameter][conns[:, p]]
key = 'throat.touch_pore_'+str(p)
target.add_model(propname=key,
model=pm.meniscus.sinusoidal,
mode='touch',
num_points=num_points,
surface_tension=surface_tension,
contact_angle=contact_angle,
throat_diameter=throat_diameter,
throat_amplitude=throat_amplitude,
throat_length=throat_length,
touch_length='throat.temp_diameter')
values[p] = target[key]
target.remove_model(key)
del network['throat.temp_diameter']
return np.vstack((values[0], values[1])).T
|
python
|
{
"resource": ""
}
|
q20838
|
NetworkX.to_networkx
|
train
|
def to_networkx(cls, network):
r"""
Write OpenPNM Network to a NetworkX object.
Parameters
----------
network : OpenPNM Network Object
The OpenPNM Network to be converted to a NetworkX object
Returns
-------
A NetworkX object with all pore/throat properties attached to it
"""
# Ensure network is an OpenPNM Network object.
if not isinstance(network, GenericNetwork):
raise('Provided network is not an OpenPNM Network.')
G = nx.Graph()
# Extracting node list and connectivity matrix from Network
nodes = map(int, network.Ps)
conns = network['throat.conns']
# Explicitly add nodes and connectivity matrix
G.add_nodes_from(nodes)
G.add_edges_from(conns)
# Attach Network properties to G
for prop in network.props(deep=True) + network.labels():
if 'pore.' in prop:
if len(network[prop].shape) > 1:
val = {i: list(network[prop][i]) for i in network.Ps}
else:
val = {i: network[prop][i] for i in network.Ps}
nx.set_node_attributes(G, name=prop[5:], values=val)
if 'throat.' in prop:
val = {tuple(conn): network[prop][i] for i, conn
in enumerate(conns)}
nx.set_edge_attributes(G, name=prop[7:], values=val)
return G
|
python
|
{
"resource": ""
}
|
q20839
|
conduit_conductance
|
train
|
def conduit_conductance(target, throat_conductance,
throat_occupancy='throat.occupancy',
pore_occupancy='pore.occupancy',
mode='strict', factor=1e-6):
r"""
Determines the conductance of a pore-throat-pore conduit based on the
invaded state of each element.
Parameters
----------
target : OpenPNM Object
The OpenPNM object where the model is attached. Should either be a
Physics or a Phase.
throat_conductance : string
The transport conductance of the phase associated with the ``target``
object at single-phase conditions.
pore_occupancy : string
The property name containing the occupancy of the phase associated
with the ``target`` object. An occupancy of 1 means the pore
is completely filled with the phase and it fully conducts.
throat_occupancy : string
The property name containing the occupancy of the phase associated
with the ``target`` object. An occupancy of 1 means the throat
is completely filled with the phase and it fully conducts.
mode : 'strict' or 'medium' or 'loose'
How agressive the method should be when determining if a conduit is
closed.
**'strict'** : If any pore or throat in the conduit is unoccupied by
the given phase, the conduit is closed.
**'medium'** : If either the throat or both pores are unoccupied, the
conduit is closed
**'loose'** : Only close the conduit if the throat is unoccupied
factor : float (default is 1e-6)
The factor which becomes multiplied to the original conduit's
conductance to severely limit transport, but not set it to zero.
"""
network = target.project.network
phase = target.project.find_phase(target)
Tinv = phase[throat_occupancy] < 0.5
P12 = network['throat.conns']
Pinv = phase[pore_occupancy][P12] < 0.5
if mode == 'loose':
mask = Tinv
elif mode == 'medium':
mask = Tinv + sp.all(Pinv, axis=1)
elif mode == 'strict':
mask = Tinv + sp.any(Pinv, axis=1)
else:
raise Exception('Unrecongnized mode '+mode)
value = phase[throat_conductance].copy()
value[mask] = value[mask]*factor
# Now map throats onto target object
Ts = network.map_throats(throats=target.Ts, origin=target)
return value[Ts]
|
python
|
{
"resource": ""
}
|
q20840
|
pore_coords
|
train
|
def pore_coords(target):
r"""
The average of the pore coords
"""
network = target.project.network
Ts = network.throats(target.name)
conns = network['throat.conns']
coords = network['pore.coords']
return _sp.mean(coords[conns], axis=1)[Ts]
|
python
|
{
"resource": ""
}
|
q20841
|
ModelsDict.dependency_list
|
train
|
def dependency_list(self):
r'''
Returns a list of dependencies in the order with which they should be
called to ensure data is calculated by one model before it's asked for
by another.
Notes
-----
This raises an exception if the graph has cycles which means the
dependencies are unresolvable (i.e. there is no order which the
models can be called that will work). In this case it is possible
to visually inspect the graph using ``dependency_graph``.
See Also
--------
dependency_graph
dependency_map
'''
dtree = self.dependency_graph()
cycles = list(nx.simple_cycles(dtree))
if cycles:
raise Exception('Cyclic dependency found: ' + ' -> '.join(
cycles[0] + [cycles[0][0]]))
d = nx.algorithms.dag.lexicographical_topological_sort(dtree, sorted)
return list(d)
|
python
|
{
"resource": ""
}
|
q20842
|
ModelsDict.dependency_graph
|
train
|
def dependency_graph(self):
r"""
Returns a NetworkX graph object of the dependencies
See Also
--------
dependency_list
dependency_map
Notes
-----
To visualize the dependencies, the following NetworkX function and
settings is helpful:
nx.draw_spectral(d, arrowsize=50, font_size=32, with_labels=True,
node_size=2000, width=3.0, edge_color='lightgrey',
font_weight='bold')
"""
dtree = nx.DiGraph()
for propname in self.keys():
dtree.add_node(propname)
for dependency in self[propname].values():
if dependency in list(self.keys()):
dtree.add_edge(dependency, propname)
return dtree
|
python
|
{
"resource": ""
}
|
q20843
|
ModelsDict.dependency_map
|
train
|
def dependency_map(self):
r"""
Create a graph of the dependency graph in a decent format
See Also
--------
dependency_graph
dependency_list
"""
dtree = self.dependency_graph()
fig = nx.draw_spectral(dtree,
with_labels=True,
arrowsize=50,
node_size=2000,
edge_color='lightgrey',
width=3.0,
font_size=32,
font_weight='bold')
return fig
|
python
|
{
"resource": ""
}
|
q20844
|
ModelsMixin.regenerate_models
|
train
|
def regenerate_models(self, propnames=None, exclude=[], deep=False):
r"""
Re-runs the specified model or models.
Parameters
----------
propnames : string or list of strings
The list of property names to be regenerated. If None are given
then ALL models are re-run (except for those whose ``regen_mode``
is 'constant').
exclude : list of strings
Since the default behavior is to run ALL models, this can be used
to exclude specific models. It may be more convenient to supply
as list of 2 models to exclude than to specify 8 models to include.
deep : boolean
Specifies whether or not to regenerate models on all associated
objects. For instance, if ``True``, then all Physics models will
be regenerated when method is called on the corresponding Phase.
The default is ``False``. The method does not work in reverse,
so regenerating models on a Physics will not update a Phase.
"""
# If empty list of propnames was given, do nothing and return
if type(propnames) is list and len(propnames) == 0:
return
if type(propnames) is str: # Convert string to list if necessary
propnames = [propnames]
if propnames is None: # If no props given, then regenerate them all
propnames = self.models.dependency_list()
# If some props are to be excluded, remove them from list
if len(exclude) > 0:
propnames = [i for i in propnames if i not in exclude]
# Re-order given propnames according to dependency tree
self_models = self.models.dependency_list()
propnames = [i for i in self_models if i in propnames]
if deep:
other_models = None # Will trigger regen of ALL models
else:
# Make list of given propnames that are not in self
other_models = list(set(propnames).difference(set(self_models)))
# The following has some redundant lines, but is easier to understand
if self._isa('phase'):
# Start be regenerating models on self
for item in propnames:
self._regen(item)
# Then regen models on associated objects, if any in other_models
for phys in self.project.find_physics(phase=self):
phys.regenerate_models(propnames=other_models, deep=False)
elif self._isa('network'): # Repeat for other object types
for item in propnames:
self._regen(item)
for geom in self.project.geometries().values():
geom.regenerate_models(propnames=other_models, deep=False)
else:
for item in propnames:
self._regen(item)
|
python
|
{
"resource": ""
}
|
q20845
|
ModelsMixin.remove_model
|
train
|
def remove_model(self, propname=None, mode=['model', 'data']):
r"""
Removes model and data from object.
Parameters
----------
propname : string or list of strings
The property or list of properties to remove
mode : list of strings
Controls what is removed. Options are:
*'model'* : Removes the model but not any numerical data that may
already exist.
*'data'* : Removes the data but leaves the model.
The default is both.
"""
if type(propname) is str:
propname = [propname]
for item in propname:
if 'model' in mode:
if item in self.models.keys():
del self.models[item]
if 'data' in mode:
if item in self.keys():
del self[item]
|
python
|
{
"resource": ""
}
|
q20846
|
equivalent_diameter
|
train
|
def equivalent_diameter(target, throat_area='throat.area',
throat_shape='circle'):
r"""
Calculates the diameter of a cirlce or edge-length of a sqaure with same
area as the throat.
Parameters
----------
target : OpenPNM Object
The object which this model is associated with. This controls the
length of the calculated array, and also provides access to other
necessary properties.
thorat_area : string
The dictionary key to the throat area values
throat_shape : string
The shape cross-sectional shape of the throat to assume when
back-calculating from the area. Options are 'circle' (default) or
'square'.
"""
area = target[throat_area]
if throat_shape.startswith('circ'):
value = 2*_np.sqrt(area/_np.pi)
elif throat_shape.startswith('square'):
value = _np.sqrt(area)
return value
|
python
|
{
"resource": ""
}
|
q20847
|
TransientReactiveTransport.set_IC
|
train
|
def set_IC(self, values):
r"""
A method to set simulation initial conditions
Parameters
----------
values : ND-array or scalar
Set the initial conditions using an 'Np' long array. 'Np' being
the number of pores. If a scalar is given, the same value is
imposed to all pores.
"""
self[self.settings['quantity']] = values
converted_array = self[self.settings['quantity']].astype('float64')
self[self.settings['quantity']] = converted_array
|
python
|
{
"resource": ""
}
|
q20848
|
TransientReactiveTransport._t_update_A
|
train
|
def _t_update_A(self):
r"""
A method to update 'A' matrix at each time step according to 't_scheme'
"""
network = self.project.network
Vi = network['pore.volume']
dt = self.settings['t_step']
s = self.settings['t_scheme']
if (s == 'implicit'):
f1, f2 = 1, 1
elif (s == 'cranknicolson'):
f1, f2 = 0.5, 1
elif (s == 'steady'):
f1, f2 = 1, 0
# Compute A (operations involve conversion to 'csr')
A = ((f2/dt) * sprs.coo_matrix.multiply(
sprs.coo_matrix(np.reshape(Vi, (self.Np, 1)), shape=(self.Np,)),
sprs.identity(self.Np, format='coo')) + f1 * self._A_steady)
# Convert A to 'coo' format to apply BCs
A = sprs.coo_matrix(A)
self._A = A
return A
|
python
|
{
"resource": ""
}
|
q20849
|
TransientReactiveTransport._t_update_b
|
train
|
def _t_update_b(self):
r"""
A method to update 'b' array at each time step according to
't_scheme' and the source term value
"""
network = self.project.network
phase = self.project.phases()[self.settings['phase']]
Vi = network['pore.volume']
dt = self.settings['t_step']
s = self.settings['t_scheme']
if (s == 'implicit'):
f1, f2, f3 = 1, 1, 0
elif (s == 'cranknicolson'):
f1, f2, f3 = 0.5, 1, 0
elif (s == 'steady'):
f1, f2, f3 = 1, 0, 1
x_old = self[self.settings['quantity']]
b = (f2*(1-f1)*(-self._A_steady)*x_old +
f2*(Vi/dt)*x_old +
f3*np.zeros(shape=(self.Np, ), dtype=float))
self._update_physics()
for item in self.settings['sources']:
Ps = self.pores(item)
# Update b
b[Ps] = b[Ps] - f2*(1-f1)*(phase[item+'.'+'rate'][Ps])
self._b = b
return b
|
python
|
{
"resource": ""
}
|
q20850
|
TransientReactiveTransport._t_run_reactive
|
train
|
def _t_run_reactive(self, x):
"""r
Repeatedly updates transient 'A', 'b', and the solution guess within
each time step according to the applied source term then calls '_solve'
to solve the resulting system of linear equations. Stops when the
residual falls below 'r_tolerance'.
Parameters
----------
x : ND-array
Initial guess of unknown variable
Returns
-------
x_new : ND-array
Solution array.
Notes
-----
Description of 'relaxation_quantity' and 'max_iter' settings can be
found in the parent class 'ReactiveTransport' documentation.
"""
if x is None:
x = np.zeros(shape=[self.Np, ], dtype=float)
self[self.settings['quantity']] = x
relax = self.settings['relaxation_quantity']
res = 1e+06
for itr in range(int(self.settings['max_iter'])):
if res >= self.settings['r_tolerance']:
logger.info('Tolerance not met: ' + str(res))
self[self.settings['quantity']] = x
self._A = (self._A_t).copy()
self._b = (self._b_t).copy()
self._apply_sources()
x_new = self._solve()
# Relaxation
x_new = relax*x_new + (1-relax)*self[self.settings['quantity']]
self[self.settings['quantity']] = x_new
res = np.sum(np.absolute(x**2 - x_new**2))
x = x_new
if (res < self.settings['r_tolerance'] or
self.settings['sources'] == []):
logger.info('Solution converged: ' + str(res))
break
return x_new
|
python
|
{
"resource": ""
}
|
q20851
|
general_symbolic
|
train
|
def general_symbolic(target, eqn=None, arg_map=None):
r'''
A general function to interpret a sympy equation and evaluate the linear
components of the source term.
Parameters
----------
target : OpenPNM object
The OpenPNM object where the result will be applied.
eqn : sympy symbolic expression for the source terms
e.g. y = a*x**b + c
arg_map : Dict mapping the symbols in the expression to OpenPNM data
on the target. Must contain 'x' which is the independent variable.
e.g. arg_map={'a':'pore.a', 'b':'pore.b', 'c':'pore.c', 'x':'pore.x'}
Example
----------
>>> import openpnm as op
>>> from openpnm.models.physics import generic_source_term as gst
>>> import scipy as sp
>>> import sympy as _syp
>>> pn = op.network.Cubic(shape=[5, 5, 5], spacing=0.0001)
>>> water = op.phases.Water(network=pn)
>>> water['pore.a'] = 1
>>> water['pore.b'] = 2
>>> water['pore.c'] = 3
>>> water['pore.x'] = sp.random.random(water.Np)
>>> a, b, c, x = _syp.symbols('a,b,c,x')
>>> y = a*x**b + c
>>> arg_map = {'a':'pore.a', 'b':'pore.b', 'c':'pore.c', 'x':'pore.x'}
>>> water.add_model(propname='pore.general',
... model=gst.general_symbolic,
... eqn=y, arg_map=arg_map,
... regen_mode='normal')
>>> assert 'pore.general.rate' in water.props()
>>> assert 'pore.general.S1' in water.props()
>>> assert 'pore.general.S1' in water.props()
'''
# First make sure all the symbols have been allocated dict items
for arg in _syp.postorder_traversal(eqn):
if _syp.srepr(arg)[:6] == 'Symbol':
key = _syp.srepr(arg)[7:].strip('(').strip(')').strip("'")
if key not in arg_map.keys():
raise Exception('argument mapping incomplete, missing '+key)
if 'x' not in arg_map.keys():
raise Exception('argument mapping must contain "x" for the ' +
'independent variable')
# Get the data
data = {}
args = {}
for key in arg_map.keys():
data[key] = target[arg_map[key]]
# Callable functions
args[key] = _syp.symbols(key)
r, s1, s2 = _build_func(eqn, **args)
r_val = r(*data.values())
s1_val = s1(*data.values())
s2_val = s2(*data.values())
values = {'S1': s1_val, 'S2': s2_val, 'rate': r_val}
return values
|
python
|
{
"resource": ""
}
|
q20852
|
toc
|
train
|
def toc(quiet=False):
r"""
Homemade version of matlab tic and toc function, tic starts or resets
the clock, toc reports the time since the last call of tic.
Parameters
----------
quiet : Boolean
If False (default) then a message is output to the console. If True
the message is not displayed and the elapsed time is returned.
See Also
--------
tic
"""
if '_startTime_for_tictoc' in globals():
t = _time.time() - _startTime_for_tictoc
if quiet is False:
print('Elapsed time in seconds: ', t)
else:
return t
else:
raise Exception('Start time not set, call tic first')
|
python
|
{
"resource": ""
}
|
q20853
|
flat_list
|
train
|
def flat_list(input_list):
r"""
Given a list of nested lists of arbitrary depth, returns a single level or
'flat' list.
"""
x = input_list
if isinstance(x, list):
return [a for i in x for a in flat_list(i)]
else:
return [x]
|
python
|
{
"resource": ""
}
|
q20854
|
sanitize_dict
|
train
|
def sanitize_dict(input_dict):
r"""
Given a nested dictionary, ensures that all nested dicts are normal
Python dicts. This is necessary for pickling, or just converting
an 'auto-vivifying' dict to something that acts normal.
"""
plain_dict = dict()
for key in input_dict.keys():
value = input_dict[key]
if hasattr(value, 'keys'):
plain_dict[key] = sanitize_dict(value)
else:
plain_dict[key] = value
return plain_dict
|
python
|
{
"resource": ""
}
|
q20855
|
models_to_table
|
train
|
def models_to_table(obj, params=True):
r"""
Converts a ModelsDict object to a ReST compatible table
Parameters
----------
obj : OpenPNM object
Any object that has a ``models`` attribute
params : boolean
Indicates whether or not to include a list of parameter
values in the table. Set to False for just a list of models, and
True for a more verbose table with all parameter values.
"""
if not hasattr(obj, 'models'):
raise Exception('Received object does not have any models')
row = '+' + '-'*4 + '+' + '-'*22 + '+' + '-'*18 + '+' + '-'*26 + '+'
fmt = '{0:1s} {1:2s} {2:1s} {3:20s} {4:1s} {5:16s} {6:1s} {7:24s} {8:1s}'
lines = []
lines.append(row)
lines.append(fmt.format('|', '#', '|', 'Property Name', '|', 'Parameter',
'|', 'Value', '|'))
lines.append(row.replace('-', '='))
for i, item in enumerate(obj.models.keys()):
prop = item
if len(prop) > 20:
prop = item[:17] + "..."
temp = obj.models[item].copy()
model = str(temp.pop('model')).split(' ')[1]
lines.append(fmt.format('|', str(i+1), '|', prop, '|', 'model:',
'|', model, '|'))
lines.append(row)
if params:
for param in temp.keys():
p1 = param
if len(p1) > 16:
p1 = p1[:14] + '...'
p2 = str(temp[param])
if len(p2) > 24:
p2 = p2[:21] + '...'
lines.append(fmt.format('|', '', '|', '', '|', p1, '|',
p2, '|'))
lines.append(row)
return '\n'.join(lines)
|
python
|
{
"resource": ""
}
|
q20856
|
conduit_lengths
|
train
|
def conduit_lengths(network, throats=None, mode='pore'):
r"""
Return the respective lengths of the conduit components defined by the throat
conns P1 T P2
mode = 'pore' - uses pore coordinates
mode = 'centroid' uses pore and throat centroids
"""
if throats is None:
throats = network.throats()
Ps = network['throat.conns']
pdia = network['pore.diameter']
if mode == 'centroid':
try:
pcentroids = network['pore.centroid']
tcentroids = network['throat.centroid']
if _sp.sum(_sp.isnan(pcentroids)) + _sp.sum(_sp.isnan(tcentroids)) > 0:
mode = 'pore'
else:
plen1 = _sp.sqrt(_sp.sum(_sp.square(pcentroids[Ps[:, 0]] -
tcentroids), 1))-network['throat.length']/2
plen2 = _sp.sqrt(_sp.sum(_sp.square(pcentroids[Ps[:, 1]] -
tcentroids), 1))-network['throat.length']/2
except KeyError:
mode = 'pore'
if mode == 'pore':
# Find half-lengths of each pore
pcoords = network['pore.coords']
# Find the pore-to-pore distance, minus the throat length
lengths = _sp.sqrt(_sp.sum(_sp.square(pcoords[Ps[:, 0]] -
pcoords[Ps[:, 1]]), 1)) - network['throat.length']
lengths[lengths < 0.0] = 2e-9
# Calculate the fraction of that distance from the first pore
try:
fractions = pdia[Ps[:, 0]]/(pdia[Ps[:, 0]] + pdia[Ps[:, 1]])
# Don't allow zero lengths
# fractions[fractions == 0.0] = 0.5
# fractions[fractions == 1.0] = 0.5
except:
fractions = 0.5
plen1 = lengths*fractions
plen2 = lengths*(1-fractions)
return _sp.vstack((plen1, network['throat.length'], plen2)).T[throats]
|
python
|
{
"resource": ""
}
|
q20857
|
FickianDiffusion.calc_effective_diffusivity
|
train
|
def calc_effective_diffusivity(self, inlets=None, outlets=None,
domain_area=None, domain_length=None):
r"""
This calculates the effective diffusivity in this linear transport
algorithm.
Parameters
----------
inlets : array_like
The pores where the inlet composition boundary conditions were
applied. If not given an attempt is made to infer them from the
algorithm.
outlets : array_like
The pores where the outlet composition boundary conditions were
applied. If not given an attempt is made to infer them from the
algorithm.
domain_area : scalar, optional
The area of the inlet (and outlet) boundary faces. If not given
then an attempt is made to estimate it, but it is usually
underestimated.
domain_length : scalar, optional
The length of the domain between the inlet and outlet boundary
faces. If not given then an attempt is made to estimate it, but it
is usually underestimated.
Notes
-----
The area and length of the domain are found using the bounding box
around the inlet and outlet pores which do not necessarily lie on the
edge of the domain, resulting in underestimation of sizes.
"""
return self._calc_eff_prop(inlets=inlets, outlets=outlets,
domain_area=domain_area,
domain_length=domain_length)
|
python
|
{
"resource": ""
}
|
q20858
|
Subdomain._set_locations
|
train
|
def _set_locations(self, element, indices, mode, complete=False):
r"""
This private method is called by ``set_locations`` and
``remove_locations`` as needed.
"""
boss = self.project.find_full_domain(self)
element = self._parse_element(element=element, single=True)
# Make sure label array exists in boss
if (element+'.'+self.name) not in boss.keys():
boss[element+'.'+self.name] = False
# Check to ensure indices aren't already assigned
if mode == 'add':
if self._isa('geometry'):
objs = self.project.geometries().keys()
else:
objs = self.project.physics().keys()
for name in objs:
if element+'.'+name in boss.keys():
if np.any(boss[element+'.'+name][indices]):
raise Exception('Given indices are already assigned ' +
'to ' + name)
# Find mask of existing locations (network indexing)
mask = boss[element+'.'+self.name]
# Update mask with new locations (either add or remove)
if mode == 'add':
mask = mask + boss._tomask(indices=indices, element=element)
elif mode == 'drop':
mask = mask * (~boss._tomask(indices=indices, element=element))
# Change size of all arrays on self
for item in self.keys(element=element, mode='all'):
self.update({item: boss[item][mask]})
# Update label array in network
boss[element+'.'+self.name] = mask
# Remove label from boss if ALL locations are removed
if mode == 'drop':
if ~np.any(boss[element+'.'+self.name]):
if complete:
del boss[element+'.'+self.name]
else:
boss[element+'.'+self.name] = False
|
python
|
{
"resource": ""
}
|
q20859
|
ctc
|
train
|
def ctc(target, pore_diameter='pore.diameter'):
r"""
Calculate throat length assuming point-like pores, i.e. center-to-center
distance between pores. Also, this models assumes that pores and throat
centroids are colinear.
Parameters
----------
target : OpenPNM Object
The object which this model is associated with. This controls the
length of the calculated array, and also provides access to other
necessary properties.
pore_diameter : string
Dictionary key of the pore diameter values
"""
_np.warnings.filterwarnings('ignore', category=RuntimeWarning)
network = target.project.network
throats = network.map_throats(throats=target.Ts, origin=target)
cn = network['throat.conns'][throats]
C1 = network['pore.coords'][cn[:, 0]]
C2 = network['pore.coords'][cn[:, 1]]
value = _sqrt(((C1 - C2)**2).sum(axis=1))
_np.warnings.filterwarnings('default', category=RuntimeWarning)
return value
|
python
|
{
"resource": ""
}
|
q20860
|
piecewise
|
train
|
def piecewise(target, throat_endpoints='throat.endpoints',
throat_centroid='throat.centroid'):
r"""
Calculate throat length from end points and optionally a centroid
Parameters
----------
target : OpenPNM Object
The object which this model is associated with. This controls the
length of the calculated array, and also provides access to other
necessary properties.
throat_endpoints : string
Dictionary key of the throat endpoint values.
throat_centroid : string
Dictionary key of the throat centroid values, optional.
Returns
-------
Lt : ndarray
Array containing throat lengths for the given geometry.
Notes
-----
(1) By default, the model assumes that the centroids of pores and the
connecting throat in each conduit are colinear.
(2) If `throat_centroid` is passed, the model accounts for the extra
length. This could be useful for Voronoi or extracted networks.
"""
_np.warnings.filterwarnings('ignore', category=RuntimeWarning)
network = target.project.network
throats = network.map_throats(throats=target.Ts, origin=target)
# Get throat endpoints
EP1 = network[throat_endpoints + '.head'][throats]
EP2 = network[throat_endpoints + '.tail'][throats]
# Calculate throat length
Lt = _sqrt(((EP1 - EP2)**2).sum(axis=1))
# Handle the case where pores & throat centroids are not colinear
try:
Ct = network[throat_centroid][throats]
Lt = _sqrt(((Ct - EP1)**2).sum(axis=1)) + \
_sqrt(((Ct - EP2)**2).sum(axis=1))
except KeyError:
pass
_np.warnings.filterwarnings('default', category=RuntimeWarning)
return Lt
|
python
|
{
"resource": ""
}
|
q20861
|
conduit_lengths
|
train
|
def conduit_lengths(target, throat_endpoints='throat.endpoints',
throat_length='throat.length'):
r"""
Calculate conduit lengths. A conduit is defined as half pore + throat
+ half pore.
Parameters
----------
target : OpenPNM Object
The object which this model is associated with. This controls the
length of the calculated array, and also provides access to other
necessary properties.
throat_endpoints : string
Dictionary key of the throat endpoint values.
throat_diameter : string
Dictionary key of the throat length values.
throat_length : string (optional)
Dictionary key of the throat length values. If not given then the
direct distance bewteen the two throat end points is used.
Returns
-------
Dictionary containing conduit lengths, which can be accessed via the dict
keys 'pore1', 'pore2', and 'throat'.
"""
_np.warnings.filterwarnings('ignore', category=RuntimeWarning)
network = target.project.network
throats = network.map_throats(throats=target.Ts, origin=target)
cn = network['throat.conns'][throats]
# Get pore coordinates
C1 = network['pore.coords'][cn[:, 0]]
C2 = network['pore.coords'][cn[:, 1]]
# Get throat endpoints and length
EP1 = network[throat_endpoints + '.head'][throats]
EP2 = network[throat_endpoints + '.tail'][throats]
try:
# Look up throat length if given
Lt = network[throat_length][throats]
except KeyError:
# Calculate throat length otherwise
Lt = _sqrt(((EP1 - EP2)**2).sum(axis=1))
# Calculate conduit lengths
L1 = _sqrt(((C1 - EP1)**2).sum(axis=1))
L2 = _sqrt(((C2 - EP2)**2).sum(axis=1))
_np.warnings.filterwarnings('default', category=RuntimeWarning)
return {'pore1': L1, 'throat': Lt, 'pore2': L2}
|
python
|
{
"resource": ""
}
|
q20862
|
standard
|
train
|
def standard(target, mol_weight='pore.molecular_weight',
molar_density='pore.molar_density'):
r"""
Calculates the mass density from the molecular weight and molar density
Parameters
----------
mol_weight : string
The dictionary key containing the molecular weight values
molar_density : string
The dictionary key containing the molar density values
"""
MW = target[mol_weight]
rho = target[molar_density]
value = rho*MW
return value
|
python
|
{
"resource": ""
}
|
q20863
|
Dict.save
|
train
|
def save(cls, dct, filename):
r"""
Saves data from the given dictionary into the specified file.
Parameters
----------
dct : dictionary
A dictionary to save to file, presumably obtained from the
``to_dict`` method of this class.
filename : string or path object
The filename to store the dictionary.
"""
fname = cls._parse_filename(filename=filename, ext='dct')
dct = sanitize_dict(dct)
with open(fname, 'wb') as f:
pickle.dump(dct, f)
|
python
|
{
"resource": ""
}
|
q20864
|
Dict.load
|
train
|
def load(cls, filename):
r"""
Load data from the specified file into a Python dictionary
Parameters
----------
filename : string
The path to the file to be opened
Notes
-----
This returns a Python dictionary which can be converted into OpenPNM
objects using the ``from_dict`` method of this class.
"""
fname = cls._parse_filename(filename)
with open(fname, 'rb') as f:
dct = pickle.load(f)
return dct
|
python
|
{
"resource": ""
}
|
q20865
|
OrdinaryPercolation.set_inlets
|
train
|
def set_inlets(self, pores=[], overwrite=False):
r"""
Set the locations from which the invader enters the network
Parameters
----------
pores : array_like
Locations that are initially filled with invader, from which
clusters grow and invade into the network
overwrite : boolean
If ``True`` then all existing inlet locations will be removed and
then the supplied locations will be added. If ``False`` (default),
then supplied locations are added to any already existing inlet
locations.
"""
Ps = self._parse_indices(pores)
if np.sum(self['pore.outlets'][Ps]) > 0:
raise Exception('Some inlets are already defined as outlets')
if overwrite:
self['pore.inlets'] = False
self['pore.inlets'][Ps] = True
self['pore.invasion_pressure'][Ps] = 0
self['pore.invasion_sequence'][Ps] = 0
|
python
|
{
"resource": ""
}
|
q20866
|
OrdinaryPercolation.set_residual
|
train
|
def set_residual(self, pores=[], throats=[], overwrite=False):
r"""
Specify locations of any residual invader. These locations are set
to invaded at the start of the simulation.
Parameters
----------
pores : array_like
The pores locations that are to be filled with invader at the
beginning of the simulation.
throats : array_like
The throat locations that are to be filled with invader at the
beginning of the simulation.
overwrite : boolean
If ``True`` then all existing inlet locations will be removed and
then the supplied locations will be added. If ``False``, then
supplied locations are added to any already existing locations.
"""
Ps = self._parse_indices(pores)
if overwrite:
self['pore.residual'] = False
self['pore.residual'][Ps] = True
Ts = self._parse_indices(throats)
if overwrite:
self['throat.residual'] = False
self['throat.residual'][Ts] = True
|
python
|
{
"resource": ""
}
|
q20867
|
OrdinaryPercolation.get_percolation_threshold
|
train
|
def get_percolation_threshold(self):
r"""
Find the invasion threshold at which a cluster spans from the inlet to
the outlet sites
"""
if np.sum(self['pore.inlets']) == 0:
raise Exception('Inlet pores must be specified first')
if np.sum(self['pore.outlets']) == 0:
raise Exception('Outlet pores must be specified first')
else:
Pout = self['pore.outlets']
# Do a simple check of pressures on the outlet pores first...
if self.settings['access_limited']:
thresh = np.amin(self['pore.invasion_pressure'][Pout])
else:
raise Exception('This is currently only implemented for access ' +
'limited simulations')
return thresh
|
python
|
{
"resource": ""
}
|
q20868
|
OrdinaryPercolation.is_percolating
|
train
|
def is_percolating(self, applied_pressure):
r"""
Returns a True or False value to indicate if a percolating cluster
spans between the inlet and outlet pores that were specified at the
given applied pressure.
Parameters
----------
applied_pressure : scalar, float
The pressure at which percolation should be checked
Returns
-------
A simple boolean True or False if percolation has occured or not.
"""
if np.sum(self['pore.inlets']) == 0:
raise Exception('Inlet pores must be specified first')
else:
Pin = self['pore.inlets']
if np.sum(self['pore.outlets']) == 0:
raise Exception('Outlet pores must be specified first')
else:
Pout = self['pore.outlets']
# Do a simple check of pressures on the outlet pores first...
if np.amin(self['pore.invasion_pressure'][Pout]) > applied_pressure:
val = False
else: # ... and do a rigorous check only if necessary
mask = self['throat.invasion_pressure'] < applied_pressure
am = self.project.network.create_adjacency_matrix(weights=mask,
fmt='coo')
val = ispercolating(am=am, mode=self.settings['mode'],
inlets=Pin, outlets=Pout)
return val
|
python
|
{
"resource": ""
}
|
q20869
|
OrdinaryPercolation.run
|
train
|
def run(self, points=25, start=None, stop=None):
r"""
Runs the percolation algorithm to determine which pores and throats
will be invaded at each given pressure point.
Parameters
----------
points: int or array_like
An array containing the pressure points to apply. If a scalar is
given then an array will be generated with the given number of
points spaced between the lowest and highest values of throat
entry pressures using logarithmic spacing. To specify low and
high pressure points use the ``start`` and ``stop`` arguments.
start : int
The optional starting point to use when generating pressure points.
stop : int
The optional stopping point to use when generating pressure points.
"""
phase = self.project.find_phase(self)
# Parse inputs and generate list of invasion points if necessary
if self.settings['mode'] == 'bond':
self['throat.entry_pressure'] = \
phase[self.settings['throat_entry_threshold']]
if start is None:
start = sp.amin(self['throat.entry_pressure'])*0.95
if stop is None:
stop = sp.amax(self['throat.entry_pressure'])*1.05
elif self.settings['mode'] == 'site':
self['pore.entry_pressure'] = \
phase[self.settings['pore_entry_threshold']]
if start is None:
start = sp.amin(self['pore.entry_pressure'])*0.95
if stop is None:
stop = sp.amax(self['pore.entry_pressure'])*1.05
else:
raise Exception('Percolation type has not been set')
if type(points) is int:
points = sp.logspace(start=sp.log10(max(1, start)),
stop=sp.log10(stop), num=points)
# Ensure pore inlets have been set IF access limitations is True
if self.settings['access_limited']:
if sp.sum(self['pore.inlets']) == 0:
raise Exception('Inlet pores must be specified first')
else:
Pin = self['pore.inlets']
# Generate curve from points
conns = self.project.network['throat.conns']
for inv_val in points:
if self.settings['mode'] == 'bond':
t_invaded = self['throat.entry_pressure'] <= inv_val
labels = bond_percolation(conns, t_invaded)
elif self.settings['mode'] == 'site':
p_invaded = self['pore.entry_pressure'] <= inv_val
labels = site_percolation(conns, p_invaded)
# Optionally remove clusters not connected to the inlets
if self.settings['access_limited']:
labels = remove_isolated_clusters(labels=labels,
inlets=Pin)
# Store current applied pressure in newly invaded pores
pinds = (self['pore.invasion_pressure'] == sp.inf) * \
(labels.sites >= 0)
self['pore.invasion_pressure'][pinds] = inv_val
# Store current applied pressure in newly invaded throats
tinds = (self['throat.invasion_pressure'] == sp.inf) * \
(labels.bonds >= 0)
self['throat.invasion_pressure'][tinds] = inv_val
# Convert invasion pressures in sequence values
Pinv = self['pore.invasion_pressure']
Tinv = self['throat.invasion_pressure']
Pseq = sp.searchsorted(sp.unique(Pinv), Pinv)
Tseq = sp.searchsorted(sp.unique(Tinv), Tinv)
self['pore.invasion_sequence'] = Pseq
self['throat.invasion_sequence'] = Tseq
|
python
|
{
"resource": ""
}
|
q20870
|
OrdinaryPercolation.get_intrusion_data
|
train
|
def get_intrusion_data(self, Pc=None):
r"""
Obtain the numerical values of the calculated intrusion curve
Returns
-------
A named-tuple containing arrays of applied capillary pressures and
invading phase saturation.
"""
net = self.project.network
if Pc is None:
# Infer list of applied capillary pressures
points = np.unique(self['throat.invasion_pressure'])
# Add a low pressure point to the list to improve graph
points = np.concatenate(([0], points))
if points[-1] == np.inf: # Remove infinity from points if present
points = points[:-1]
else:
points = np.array(Pc)
# Get pore and throat volumes
Pvol = net[self.settings['pore_volume']]
Tvol = net[self.settings['throat_volume']]
Total_vol = np.sum(Pvol) + np.sum(Tvol)
# Find cumulative filled volume at each applied capillary pressure
Vnwp_t = []
Vnwp_p = []
Vnwp_all = []
for p in points:
# Calculate filled pore volumes
p_inv, t_inv = self.results(p).values()
Vp = np.sum(Pvol*p_inv)
Vt = np.sum(Tvol*t_inv)
Vnwp_p.append(Vp)
Vnwp_t.append(Vt)
Vnwp_all.append(Vp + Vt)
# Convert volumes to saturations by normalizing with total pore volume
Snwp_all = [V/Total_vol for V in Vnwp_all]
pc_curve = namedtuple('pc_curve', ('Pcap', 'Snwp'))
data = pc_curve(points, Snwp_all)
return data
|
python
|
{
"resource": ""
}
|
q20871
|
OrdinaryPercolation.plot_intrusion_curve
|
train
|
def plot_intrusion_curve(self, fig=None):
r"""
Plot the percolation curve as the invader volume or number fraction vs
the applied capillary pressure.
"""
# Begin creating nicely formatted plot
x, y = self.get_intrusion_data()
if fig is None:
fig = plt.figure()
plt.semilogx(x, y, 'ko-')
plt.ylabel('Invading Phase Saturation')
plt.xlabel('Capillary Pressure')
plt.grid(True)
return fig
|
python
|
{
"resource": ""
}
|
q20872
|
OrdinaryPercolation.results
|
train
|
def results(self, Pc):
r"""
This method determines which pores and throats are filled with invading
phase at the specified capillary pressure, and creates several arrays
indicating the occupancy status of each pore and throat for the given
pressure.
Parameters
----------
Pc : scalar
The capillary pressure for which an invading phase configuration
is desired.
Returns
-------
A dictionary containing an assortment of data about distribution
of the invading phase at the specified capillary pressure. The data
include:
**'pore.occupancy'** : A value between 0 and 1 indicating the
fractional volume of each pore that is invaded. If no late pore
filling model was applied, then this will only be integer values
(either filled or not).
**'throat.occupancy'** : The same as 'pore.occupancy' but for throats.
This dictionary can be passed directly to the ``update`` method of
the *Phase* object. These values can then be accessed by models
or algorithms.
"""
Psatn = self['pore.invasion_pressure'] <= Pc
Tsatn = self['throat.invasion_pressure'] <= Pc
inv_phase = {}
inv_phase['pore.occupancy'] = sp.array(Psatn, dtype=float)
inv_phase['throat.occupancy'] = sp.array(Tsatn, dtype=float)
return inv_phase
|
python
|
{
"resource": ""
}
|
q20873
|
percolating_continua
|
train
|
def percolating_continua(target, phi_crit, tau,
volume_fraction='pore.volume_fraction',
bulk_property='pore.intrinsic_conductivity'):
r'''
Calculates the effective property of a continua using percolation theory
Parameters
----------
target : OpenPNM Object
The object for which these values are being calculated. This
controls the length of the calculated array, and also provides
access to other necessary thermofluid properties.
volume_fraction : string
The dictionary key in the Phase object containing the volume fraction
of the conducting component
bulk_property : string
The dictionary key in the Phase object containing the intrinsic
property of the conducting component
phi_crit : float
The volume fraction below which percolation does NOT occur
tau : float
The exponent of the percolation relationship
Notes
-----
This model uses the following standard percolation relationship:
.. math::
\sigma_{effective}=\sigma_{bulk}(\phi - \phi_{critical})^\lambda
'''
sigma = target[bulk_property]
phi = target[volume_fraction]
diff_phi = _sp.clip(phi - phi_crit, a_min=0, a_max=_sp.inf)
sigma_eff = sigma*(diff_phi)**tau
return sigma_eff
|
python
|
{
"resource": ""
}
|
q20874
|
ReactiveTransport.set_source
|
train
|
def set_source(self, propname, pores):
r"""
Applies a given source term to the specified pores
Parameters
----------
propname : string
The property name of the source term model to be applied
pores : array_like
The pore indices where the source term should be applied
Notes
-----
Source terms cannot be applied in pores where boundary conditions have
already been set. Attempting to do so will result in an error being
raised.
"""
locs = self.tomask(pores=pores)
if (not np.all(np.isnan(self['pore.bc_value'][locs]))) or \
(not np.all(np.isnan(self['pore.bc_rate'][locs]))):
raise Exception('Boundary conditions already present in given ' +
'pores, cannot also assign source terms')
self[propname] = locs
self.settings['sources'].append(propname)
|
python
|
{
"resource": ""
}
|
q20875
|
ReactiveTransport._set_BC
|
train
|
def _set_BC(self, pores, bctype, bcvalues=None, mode='merge'):
r"""
Apply boundary conditions to specified pores if no source terms are
already assigned to these pores. Otherwise, raise an error.
Parameters
----------
pores : array_like
The pores where the boundary conditions should be applied
bctype : string
Specifies the type or the name of boundary condition to apply. The
types can be one one of the following:
- *'value'* : Specify the value of the quantity in each location
- *'rate'* : Specify the flow rate into each location
bcvalues : int or array_like
The boundary value to apply, such as concentration or rate. If
a single value is given, it's assumed to apply to all locations.
Different values can be applied to all pores in the form of an
array of the same length as ``pores``.
mode : string, optional
Controls how the conditions are applied. Options are:
*'merge'*: (Default) Adds supplied boundary conditions to already
existing conditions.
*'overwrite'*: Deletes all boundary condition on object then add
the given ones
Notes
-----
It is not possible to have multiple boundary conditions for a
specified location in one algorithm. Use ``remove_BCs`` to
clear existing BCs before applying new ones or ``mode='overwrite'``
which removes all existing BC's before applying the new ones.
"""
# First check that given pores do not have source terms already set
for item in self.settings['sources']:
if np.any(self[item][pores]):
raise Exception('Source term already present in given ' +
'pores, cannot also assign boundary ' +
'conditions')
# Then call parent class function if above check passes
super()._set_BC(pores=pores, bctype=bctype, bcvalues=bcvalues,
mode=mode)
|
python
|
{
"resource": ""
}
|
q20876
|
ReactiveTransport._update_physics
|
train
|
def _update_physics(self):
"""r
Update physics using the current value of 'quantity'
Notes
-----
The algorithm directly writes the value of 'quantity' into the phase.
This method was implemented relaxing one of the OpenPNM rules of
algorithms not being able to write into phases.
"""
phase = self.project.phases()[self.settings['phase']]
physics = self.project.find_physics(phase=phase)
for item in self.settings['sources']:
# Regenerate models with new guess
quantity = self.settings['quantity']
# Put quantity on phase so physics finds it when regenerating
phase[quantity] = self[quantity]
# Regenerate models, on either phase or physics
phase.regenerate_models(propnames=item)
for phys in physics:
phys.regenerate_models(propnames=item)
|
python
|
{
"resource": ""
}
|
q20877
|
ReactiveTransport._apply_sources
|
train
|
def _apply_sources(self):
"""r
Update 'A' and 'b' applying source terms to specified pores
Notes
-----
Applying source terms to 'A' and 'b' is performed after (optionally)
under-relaxing the source term to improve numerical stability. Physics
are also updated before applying source terms to ensure that source
terms values are associated with the current value of 'quantity'.
In the case of a transient simulation, the updates in 'A' and 'b'
also depend on the time scheme.
"""
if self.settings['t_scheme'] == 'cranknicolson':
f1 = 0.5
else:
f1 = 1
phase = self.project.phases()[self.settings['phase']]
relax = self.settings['relaxation_source']
for item in self.settings['sources']:
Ps = self.pores(item)
# Add S1 to diagonal of A
# TODO: We need this to NOT overwrite the A and b, but create
# copy, otherwise we have to regenerate A and b on each loop
datadiag = self._A.diagonal().copy()
# Source term relaxation
S1_old = phase[item+'.'+'S1'][Ps].copy()
S2_old = phase[item+'.'+'S2'][Ps].copy()
self._update_physics()
S1 = phase[item+'.'+'S1'][Ps]
S2 = phase[item+'.'+'S2'][Ps]
S1 = relax*S1 + (1-relax)*S1_old
S2 = relax*S2 + (1-relax)*S2_old
phase[item+'.'+'S1'][Ps] = S1
phase[item+'.'+'S2'][Ps] = S2
datadiag[Ps] = datadiag[Ps] - f1*S1
# Add S1 to A
self._A.setdiag(datadiag)
# Add S2 to b
self._b[Ps] = self._b[Ps] + f1*S2
|
python
|
{
"resource": ""
}
|
q20878
|
ReactiveTransport.run
|
train
|
def run(self, x=None):
r"""
Builds the A and b matrices, and calls the solver specified in the
``settings`` attribute.
Parameters
----------
x : ND-array
Initial guess of unknown variable
"""
logger.info('Running ReactiveTransport')
# Create S1 & S1 for 1st Picard's iteration
if x is None:
x = np.zeros(shape=[self.Np, ], dtype=float)
self[self.settings['quantity']] = x
self._update_physics()
x = self._run_reactive(x=x)
self[self.settings['quantity']] = x
|
python
|
{
"resource": ""
}
|
q20879
|
ReactiveTransport._run_reactive
|
train
|
def _run_reactive(self, x):
"""r
Repeatedly updates 'A', 'b', and the solution guess within according
to the applied source term then calls '_solve' to solve the resulting
system of linear equations.
Stops when the residual falls below 'r_tolerance' or when the maximum
number of iterations is reached.
Parameters
----------
x : ND-array
Initial guess of unknown variable
Returns
-------
x_new : ND-array
Solution array.
"""
if x is None:
x = np.zeros(shape=[self.Np, ], dtype=float)
self[self.settings['quantity']] = x
relax = self.settings['relaxation_quantity']
res = 1e+06 # Initialize the residual
for itr in range(int(self.settings['max_iter'])):
if res >= self.settings['r_tolerance']:
logger.info('Tolerance not met: ' + str(res))
self[self.settings['quantity']] = x
self._build_A(force=True)
self._build_b(force=True)
self._apply_BCs()
self._apply_sources()
x_new = self._solve()
# Relaxation
x_new = relax*x_new + (1-relax)*self[self.settings['quantity']]
self[self.settings['quantity']] = x_new
res = np.sum(np.absolute(x**2 - x_new**2))
x = x_new
if (res < self.settings['r_tolerance'] or
self.settings['sources'] == []):
logger.info('Solution converged: ' + str(res))
break
return x_new
|
python
|
{
"resource": ""
}
|
q20880
|
sphere
|
train
|
def sphere(target, pore_diameter='pore.diameter'):
r"""
Calculate pore volume from diameter assuming a spherical pore body
Parameters
----------
target : OpenPNM Object
The object which this model is associated with. This controls
the length of the calculated array, and also provides access to other
necessary geometric properties.
pore_diameter : string
The dictionary key of the pore diameter values
"""
diams = target[pore_diameter]
value = _pi/6*diams**3
return value
|
python
|
{
"resource": ""
}
|
q20881
|
cylinder
|
train
|
def cylinder(target, throat_length='throat.length',
throat_diameter='throat.diameter'):
r"""
Calculate throat volume assuing a cylindrical shape
Parameters
----------
target : OpenPNM Object
The object which this model is associated with. This controls the
length of the calculated array, and also provides access to other
necessary properties.
throat_length and throat_diameter : strings
The dictionary keys containing the arrays with the throat diameter and
length values.
Notes
-----
At present this models does NOT account for the volume reprsented by the
intersection of the throat with a spherical pore body.
"""
leng = target[throat_length]
diam = target[throat_diameter]
value = _sp.pi/4*leng*diam**2
return value
|
python
|
{
"resource": ""
}
|
q20882
|
cuboid
|
train
|
def cuboid(target, throat_length='throat.length',
throat_diameter='throat.diameter'):
r"""
Calculate throat volume assuing a square cross-section
Parameters
----------
target : OpenPNM Object
The object which this model is associated with. This controls the
length of the calculated array, and also provides access to other
necessary properties.
throat_length and throat_diameter : strings
The dictionary keys containing the arrays with the throat diameter and
length values.
Notes
-----
At present this models does NOT account for the volume reprsented by the
intersection of the throat with a spherical pore body.
"""
leng = target[throat_length]
diam = target[throat_diameter]
value = leng*diam**2
return value
|
python
|
{
"resource": ""
}
|
q20883
|
extrusion
|
train
|
def extrusion(target, throat_length='throat.length',
throat_area='throat.area'):
r"""
Calculate throat volume from the throat area and the throat length. This
method is useful for abnormal shaped throats.
Parameters
----------
target : OpenPNM Object
The object which this model is associated with. This controls the
length of the calculated array, and also provides access to other
necessary properties.
throat_length and throat_area : strings
The dictionary keys containing the arrays with the throat area and
length values.
Notes
-----
At present this models does NOT account for the volume reprsented by the
intersection of the throat with a spherical pore body.
"""
leng = target[throat_length]
area = target[throat_area]
value = leng*area
return value
|
python
|
{
"resource": ""
}
|
q20884
|
HDF5.to_hdf5
|
train
|
def to_hdf5(cls, network=None, phases=[], element=['pore', 'throat'],
filename='', interleave=True, flatten=False, categorize_by=[]):
r"""
Creates an HDF5 file containing data from the specified objects,
and categorized according to the given arguments.
Parameters
----------
network : OpenPNM Network Object
The network containing the desired data
phases : list of OpenPNM Phase Objects (optional, default is none)
A list of phase objects whose data are to be included
element : string or list of strings
An indication of whether 'pore' and/or 'throat' data are desired.
The default is both.
interleave : boolean (default is ``True``)
When ``True`` (default) the data from all Geometry objects (and
Physics objects if ``phases`` are given) is interleaved into
a single array and stored as a network property (or Phase
property for Physics data). When ``False``, the data for each
object are stored under their own dictionary key, the structuring
of which depends on the value of the ``flatten`` argument.
flatten : boolean (default is ``True``)
When ``True``, all objects are accessible from the top level
of the dictionary. When ``False`` objects are nested under their
parent object. If ``interleave`` is ``True`` this argument is
ignored.
categorize_by : string or list of strings
Indicates how the dictionaries should be organized. The list can
contain any, all or none of the following strings:
**'objects'** : If specified the dictionary keys will be stored
under a general level corresponding to their type (e.g.
'network/net_01/pore.all'). If ``interleave`` is ``True`` then
only the only categories are *network* and *phase*, since
*geometry* and *physics* data get stored under their respective
*network* and *phase*.
**'data'** : If specified the data arrays are additionally
categorized by ``label`` and ``property`` to separate *boolean*
from *numeric* data.
**'elements'** : If specified the data arrays are additionally
categorized by ``pore`` and ``throat``, meaning that the propnames
are no longer prepended by a 'pore.' or 'throat.'
"""
project, network, phases = cls._parse_args(network=network,
phases=phases)
if filename == '':
filename = project.name
filename = cls._parse_filename(filename, ext='hdf')
dct = Dict.to_dict(network=network, phases=phases, element=element,
interleave=interleave, flatten=flatten,
categorize_by=categorize_by)
d = FlatDict(dct, delimiter='/')
f = h5py.File(filename, "w")
for item in d.keys():
tempname = '_'.join(item.split('.'))
arr = d[item]
if d[item].dtype == 'O':
logger.warning(item + ' has dtype object,' +
' will not write to file')
del d[item]
elif 'U' in str(arr[0].dtype):
pass
else:
f.create_dataset(name='/'+tempname, shape=arr.shape,
dtype=arr.dtype, data=arr)
return f
|
python
|
{
"resource": ""
}
|
q20885
|
get_objects_in_sequence
|
train
|
def get_objects_in_sequence(brain_or_object, ctype, cref):
"""Return a list of items
"""
obj = api.get_object(brain_or_object)
if ctype == "backreference":
return get_backreferences(obj, cref)
if ctype == "contained":
return get_contained_items(obj, cref)
raise ValueError("Reference value is mandatory for sequence type counter")
|
python
|
{
"resource": ""
}
|
q20886
|
get_backreferences
|
train
|
def get_backreferences(obj, relationship):
"""Returns the backreferences
"""
refs = get_backuidreferences(obj, relationship)
# TODO remove after all ReferenceField get ported to UIDReferenceField
# At this moment, there are still some content types that are using the
# ReferenceField, so we need to fallback to traditional getBackReferences
# for these cases.
if not refs:
refs = obj.getBackReferences(relationship)
return refs
|
python
|
{
"resource": ""
}
|
q20887
|
get_type_id
|
train
|
def get_type_id(context, **kw):
"""Returns the type id for the context passed in
"""
portal_type = kw.get("portal_type", None)
if portal_type:
return portal_type
# Override by provided marker interface
if IAnalysisRequestPartition.providedBy(context):
return "AnalysisRequestPartition"
elif IAnalysisRequestRetest.providedBy(context):
return "AnalysisRequestRetest"
elif IAnalysisRequestSecondary.providedBy(context):
return "AnalysisRequestSecondary"
return api.get_portal_type(context)
|
python
|
{
"resource": ""
}
|
q20888
|
strip_suffix
|
train
|
def strip_suffix(id):
"""Split off any suffix from ID
This mimics the old behavior of the Sample ID.
"""
suffix = get_suffix(id)
if not suffix:
return id
return re.split(suffix, id)[0]
|
python
|
{
"resource": ""
}
|
q20889
|
get_partition_count
|
train
|
def get_partition_count(context, default=0):
"""Returns the number of partitions of this AR
"""
if not is_ar(context):
return default
parent = context.getParentAnalysisRequest()
if not parent:
return default
return len(parent.getDescendants())
|
python
|
{
"resource": ""
}
|
q20890
|
get_secondary_count
|
train
|
def get_secondary_count(context, default=0):
"""Returns the number of secondary ARs of this AR
"""
if not is_ar(context):
return default
primary = context.getPrimaryAnalysisRequest()
if not primary:
return default
return len(primary.getSecondaryAnalysisRequests())
|
python
|
{
"resource": ""
}
|
q20891
|
get_config
|
train
|
def get_config(context, **kw):
"""Fetch the config dict from the Bika Setup for the given portal_type
"""
# get the ID formatting config
config_map = api.get_bika_setup().getIDFormatting()
# allow portal_type override
portal_type = get_type_id(context, **kw)
# check if we have a config for the given portal_type
for config in config_map:
if config['portal_type'].lower() == portal_type.lower():
return config
# return a default config
default_config = {
'form': '%s-{seq}' % portal_type.lower(),
'sequence_type': 'generated',
'prefix': '%s' % portal_type.lower(),
}
return default_config
|
python
|
{
"resource": ""
}
|
q20892
|
get_variables
|
train
|
def get_variables(context, **kw):
"""Prepares a dictionary of key->value pairs usable for ID formatting
"""
# allow portal_type override
portal_type = get_type_id(context, **kw)
# The variables map hold the values that might get into the constructed id
variables = {
"context": context,
"id": api.get_id(context),
"portal_type": portal_type,
"year": get_current_year(),
"parent": api.get_parent(context),
"seq": 0,
"alpha": Alphanumber(0),
}
# Augment the variables map depending on the portal type
if portal_type in AR_TYPES:
now = DateTime()
sampling_date = context.getSamplingDate()
sampling_date = sampling_date and DT2dt(sampling_date) or DT2dt(now)
date_sampled = context.getDateSampled()
date_sampled = date_sampled and DT2dt(date_sampled) or DT2dt(now)
test_count = 1
variables.update({
"clientId": context.getClientID(),
"dateSampled": date_sampled,
"samplingDate": sampling_date,
"sampleType": context.getSampleType().getPrefix(),
"test_count": test_count
})
# Partition
if portal_type == "AnalysisRequestPartition":
parent_ar = context.getParentAnalysisRequest()
parent_ar_id = api.get_id(parent_ar)
parent_base_id = strip_suffix(parent_ar_id)
partition_count = get_partition_count(context)
variables.update({
"parent_analysisrequest": parent_ar,
"parent_ar_id": parent_ar_id,
"parent_base_id": parent_base_id,
"partition_count": partition_count,
})
# Retest
elif portal_type == "AnalysisRequestRetest":
# Note: we use "parent" instead of "invalidated" for simplicity
parent_ar = context.getInvalidated()
parent_ar_id = api.get_id(parent_ar)
parent_base_id = strip_suffix(parent_ar_id)
# keep the full ID if the retracted AR is a partition
if context.isPartition():
parent_base_id = parent_ar_id
retest_count = get_retest_count(context)
test_count = test_count + retest_count
variables.update({
"parent_analysisrequest": parent_ar,
"parent_ar_id": parent_ar_id,
"parent_base_id": parent_base_id,
"retest_count": retest_count,
"test_count": test_count,
})
# Secondary
elif portal_type == "AnalysisRequestSecondary":
primary_ar = context.getPrimaryAnalysisRequest()
primary_ar_id = api.get_id(primary_ar)
parent_base_id = strip_suffix(primary_ar_id)
secondary_count = get_secondary_count(context)
variables.update({
"parent_analysisrequest": primary_ar,
"parent_ar_id": primary_ar_id,
"parent_base_id": parent_base_id,
"secondary_count": secondary_count,
})
elif portal_type == "ARReport":
variables.update({
"clientId": context.aq_parent.getClientID(),
})
return variables
|
python
|
{
"resource": ""
}
|
q20893
|
slice
|
train
|
def slice(string, separator="-", start=None, end=None):
"""Slice out a segment of a string, which is splitted on both the wildcards
and the separator passed in, if any
"""
# split by wildcards/keywords first
# AR-{sampleType}-{parentId}{alpha:3a2d}
segments = filter(None, re.split('(\{.+?\})', string))
# ['AR-', '{sampleType}', '-', '{parentId}', '{alpha:3a2d}']
if separator:
# Keep track of singleton separators as empties
# We need to do this to prevent duplicates later, when splitting
segments = map(lambda seg: seg!=separator and seg or "", segments)
# ['AR-', '{sampleType}', '', '{parentId}', '{alpha:3a2d}']
# Split each segment at the given separator
segments = map(lambda seg: split(seg, separator), segments)
# [['AR', ''], ['{sampleType}'], [''], ['{parentId}'], ['{alpha:3a2d}']]
# Flatten the list
segments = list(itertools.chain.from_iterable(segments))
# ['AR', '', '{sampleType}', '', '{parentId}', '{alpha:3a2d}']
# And replace empties with separator
segments = map(lambda seg: seg!="" and seg or separator, segments)
# ['AR', '-', '{sampleType}', '-', '{parentId}', '{alpha:3a2d}']
# Get the start and end positions from the segments without separator
cleaned_segments = filter(lambda seg: seg!=separator, segments)
start_pos = to_int(start, 0)
# Note "end" is not a position, but the number of elements to join!
end_pos = to_int(end, len(cleaned_segments) - start_pos) + start_pos - 1
# Map the positions against the segments with separator
start = segments.index(cleaned_segments[start_pos])
end = segments.index(cleaned_segments[end_pos]) + 1
# Return all segments joined
sliced_parts = segments[start:end]
return "".join(sliced_parts)
|
python
|
{
"resource": ""
}
|
q20894
|
search_by_prefix
|
train
|
def search_by_prefix(portal_type, prefix):
"""Returns brains which share the same portal_type and ID prefix
"""
catalog = api.get_tool("uid_catalog")
brains = catalog({"portal_type": portal_type})
# Filter brains with the same ID prefix
return filter(lambda brain: api.get_id(brain).startswith(prefix), brains)
|
python
|
{
"resource": ""
}
|
q20895
|
get_ids_with_prefix
|
train
|
def get_ids_with_prefix(portal_type, prefix):
"""Return a list of ids sharing the same portal type and prefix
"""
brains = search_by_prefix(portal_type, prefix)
ids = map(api.get_id, brains)
return ids
|
python
|
{
"resource": ""
}
|
q20896
|
get_seq_number_from_id
|
train
|
def get_seq_number_from_id(id, id_template, prefix, **kw):
"""Return the sequence number of the given ID
"""
separator = kw.get("separator", "-")
postfix = id.replace(prefix, "").strip(separator)
postfix_segments = postfix.split(separator)
seq_number = 0
possible_seq_nums = filter(lambda n: n.isalnum(), postfix_segments)
if possible_seq_nums:
seq_number = possible_seq_nums[-1]
# Check if this id has to be expressed as an alphanumeric number
seq_number = get_alpha_or_number(seq_number, id_template)
seq_number = to_int(seq_number)
return seq_number
|
python
|
{
"resource": ""
}
|
q20897
|
get_alpha_or_number
|
train
|
def get_alpha_or_number(number, template):
"""Returns an Alphanumber that represents the number passed in, expressed
as defined in the template. Otherwise, returns the number
"""
match = re.match(r".*\{alpha:(\d+a\d+d)\}$", template.strip())
if match and match.groups():
format = match.groups()[0]
return to_alpha(number, format)
return number
|
python
|
{
"resource": ""
}
|
q20898
|
get_counted_number
|
train
|
def get_counted_number(context, config, variables, **kw):
"""Compute the number for the sequence type "Counter"
"""
# This "context" is defined by the user in the Setup and can be actually
# anything. However, we assume it is something like "sample" or similar
ctx = config.get("context")
# get object behind the context name (falls back to the current context)
obj = variables.get(ctx, context)
# get the counter type, which is either "backreference" or "contained"
counter_type = config.get("counter_type")
# the counter reference is either the "relationship" for
# "backreference" or the meta type for contained objects
counter_reference = config.get("counter_reference")
# This should be a list of existing items, including the current context
# object
seq_items = get_objects_in_sequence(obj, counter_type, counter_reference)
number = len(seq_items)
return number
|
python
|
{
"resource": ""
}
|
q20899
|
get_generated_number
|
train
|
def get_generated_number(context, config, variables, **kw):
"""Generate a new persistent number with the number generator for the
sequence type "Generated"
"""
# separator where to split the ID
separator = kw.get('separator', '-')
# allow portal_type override
portal_type = get_type_id(context, **kw)
# The ID format for string interpolation, e.g. WS-{seq:03d}
id_template = config.get("form", "")
# The split length defines where the key is splitted from the value
split_length = config.get("split_length", 1)
# The prefix template is the static part of the ID
prefix_template = slice(id_template, separator=separator, end=split_length)
# get the number generator
number_generator = getUtility(INumberGenerator)
# generate the key for the number generator storage
prefix = prefix_template.format(**variables)
# normalize out any unicode characters like Ö, É, etc. from the prefix
prefix = api.normalize_filename(prefix)
# The key used for the storage
key = make_storage_key(portal_type, prefix)
# Handle flushed storage
if key not in number_generator:
max_num = 0
existing = get_ids_with_prefix(portal_type, prefix)
numbers = map(lambda id: get_seq_number_from_id(id, id_template, prefix), existing)
# figure out the highest number in the sequence
if numbers:
max_num = max(numbers)
# set the number generator
logger.info("*** SEEDING Prefix '{}' to {}".format(prefix, max_num))
number_generator.set_number(key, max_num)
if not kw.get("dry_run", False):
# Generate a new number
# NOTE Even when the number exceeds the given ID sequence format,
# it will overflow gracefully, e.g.
# >>> {sampleId}-R{seq:03d}'.format(sampleId="Water", seq=999999)
# 'Water-R999999‘
number = number_generator.generate_number(key=key)
else:
# => This allows us to "preview" the next generated ID in the UI
# TODO Show the user the next generated number somewhere in the UI
number = number_generator.get(key, 1)
# Return an int or Alphanumber
return get_alpha_or_number(number, id_template)
|
python
|
{
"resource": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.