sentence1
stringlengths 52
3.87M
| sentence2
stringlengths 1
47.2k
| label
stringclasses 1
value |
|---|---|---|
def _get_k_p_a(font, left, right):
"""This actually calculates the kerning + advance"""
# http://lists.apple.com/archives/coretext-dev/2010/Dec/msg00020.html
# 1) set up a CTTypesetter
chars = left + right
args = [None, 1, cf.kCFTypeDictionaryKeyCallBacks,
cf.kCFTypeDictionaryValueCallBacks]
attributes = cf.CFDictionaryCreateMutable(*args)
cf.CFDictionaryAddValue(attributes, kCTFontAttributeName, font)
string = cf.CFAttributedStringCreate(None, CFSTR(chars), attributes)
typesetter = ct.CTTypesetterCreateWithAttributedString(string)
cf.CFRelease(string)
cf.CFRelease(attributes)
# 2) extract a CTLine from it
range = CFRange(0, 1)
line = ct.CTTypesetterCreateLine(typesetter, range)
# 3) use CTLineGetOffsetForStringIndex to get the character positions
offset = ct.CTLineGetOffsetForStringIndex(line, 1, None)
cf.CFRelease(line)
cf.CFRelease(typesetter)
return offset
|
This actually calculates the kerning + advance
|
entailment
|
def set_data(self, xs=None, ys=None, zs=None, colors=None):
'''Update the mesh data.
Parameters
----------
xs : ndarray | None
A 2d array of x coordinates for the vertices of the mesh.
ys : ndarray | None
A 2d array of y coordinates for the vertices of the mesh.
zs : ndarray | None
A 2d array of z coordinates for the vertices of the mesh.
colors : ndarray | None
The color at each point of the mesh. Must have shape
(width, height, 4) or (width, height, 3) for rgba or rgb
color definitions respectively.
'''
if xs is None:
xs = self._xs
self.__vertices = None
if ys is None:
ys = self._ys
self.__vertices = None
if zs is None:
zs = self._zs
self.__vertices = None
if self.__vertices is None:
vertices, indices = create_grid_mesh(xs, ys, zs)
self._xs = xs
self._ys = ys
self._zs = zs
if self.__vertices is None:
vertices, indices = create_grid_mesh(self._xs, self._ys, self._zs)
self.__meshdata.set_vertices(vertices)
self.__meshdata.set_faces(indices)
if colors is not None:
self.__meshdata.set_vertex_colors(colors.reshape(
colors.shape[0] * colors.shape[1], colors.shape[2]))
MeshVisual.set_data(self, meshdata=self.__meshdata)
|
Update the mesh data.
Parameters
----------
xs : ndarray | None
A 2d array of x coordinates for the vertices of the mesh.
ys : ndarray | None
A 2d array of y coordinates for the vertices of the mesh.
zs : ndarray | None
A 2d array of z coordinates for the vertices of the mesh.
colors : ndarray | None
The color at each point of the mesh. Must have shape
(width, height, 4) or (width, height, 3) for rgba or rgb
color definitions respectively.
|
entailment
|
def _make_png(data, level=6):
"""Convert numpy array to PNG byte array.
Parameters
----------
data : numpy.ndarray
Data must be (H, W, 3 | 4) with dtype = np.ubyte (np.uint8)
level : int
https://docs.python.org/2/library/zlib.html#zlib.compress
An integer from 0 to 9 controlling the level of compression:
* 1 is fastest and produces the least compression,
* 9 is slowest and produces the most.
* 0 is no compression.
The default value is 6.
Returns
-------
png : array
PNG formatted array
"""
# Eventually we might want to use ext/png.py for this, but this
# routine *should* be faster b/c it's speacialized for our use case
def mkchunk(data, name):
if isinstance(data, np.ndarray):
size = data.nbytes
else:
size = len(data)
chunk = np.empty(size + 12, dtype=np.ubyte)
chunk.data[0:4] = np.array(size, '>u4').tostring()
chunk.data[4:8] = name.encode('ASCII')
chunk.data[8:8 + size] = data
# and-ing may not be necessary, but is done for safety:
# https://docs.python.org/3/library/zlib.html#zlib.crc32
chunk.data[-4:] = np.array(zlib.crc32(chunk[4:-4]) & 0xffffffff,
'>u4').tostring()
return chunk
if data.dtype != np.ubyte:
raise TypeError('data.dtype must be np.ubyte (np.uint8)')
dim = data.shape[2] # Dimension
if dim not in (3, 4):
raise TypeError('data.shape[2] must be in (3, 4)')
# www.libpng.org/pub/png/spec/1.2/PNG-Chunks.html#C.IHDR
if dim == 4:
ctyp = 0b0110 # RGBA
else:
ctyp = 0b0010 # RGB
# www.libpng.org/pub/png/spec/1.2/PNG-Structure.html
header = b'\x89PNG\x0d\x0a\x1a\x0a' # header
h, w = data.shape[:2]
depth = data.itemsize * 8
ihdr = struct.pack('!IIBBBBB', w, h, depth, ctyp, 0, 0, 0)
c1 = mkchunk(ihdr, 'IHDR')
# www.libpng.org/pub/png/spec/1.2/PNG-Chunks.html#C.IDAT
# insert filter byte at each scanline
idat = np.empty((h, w * dim + 1), dtype=np.ubyte)
idat[:, 1:] = data.reshape(h, w * dim)
idat[:, 0] = 0
comp_data = zlib.compress(idat, level)
c2 = mkchunk(comp_data, 'IDAT')
c3 = mkchunk(np.empty((0,), dtype=np.ubyte), 'IEND')
# concatenate
lh = len(header)
png = np.empty(lh + c1.nbytes + c2.nbytes + c3.nbytes, dtype=np.ubyte)
png.data[:lh] = header
p = lh
for chunk in (c1, c2, c3):
png[p:p + len(chunk)] = chunk
p += chunk.nbytes
return png
|
Convert numpy array to PNG byte array.
Parameters
----------
data : numpy.ndarray
Data must be (H, W, 3 | 4) with dtype = np.ubyte (np.uint8)
level : int
https://docs.python.org/2/library/zlib.html#zlib.compress
An integer from 0 to 9 controlling the level of compression:
* 1 is fastest and produces the least compression,
* 9 is slowest and produces the most.
* 0 is no compression.
The default value is 6.
Returns
-------
png : array
PNG formatted array
|
entailment
|
def read_png(filename):
"""Read a PNG file to RGB8 or RGBA8
Unlike imread, this requires no external dependencies.
Parameters
----------
filename : str
File to read.
Returns
-------
data : array
Image data.
See also
--------
write_png, imread, imsave
"""
x = Reader(filename)
try:
alpha = x.asDirect()[3]['alpha']
if alpha:
y = x.asRGBA8()[2]
n = 4
else:
y = x.asRGB8()[2]
n = 3
y = np.array([yy for yy in y], np.uint8)
finally:
x.file.close()
y.shape = (y.shape[0], y.shape[1] // n, n)
return y
|
Read a PNG file to RGB8 or RGBA8
Unlike imread, this requires no external dependencies.
Parameters
----------
filename : str
File to read.
Returns
-------
data : array
Image data.
See also
--------
write_png, imread, imsave
|
entailment
|
def write_png(filename, data):
"""Write a PNG file
Unlike imsave, this requires no external dependencies.
Parameters
----------
filename : str
File to save to.
data : array
Image data.
See also
--------
read_png, imread, imsave
"""
data = np.asarray(data)
if not data.ndim == 3 and data.shape[-1] in (3, 4):
raise ValueError('data must be a 3D array with last dimension 3 or 4')
with open(filename, 'wb') as f:
f.write(_make_png(data))
|
Write a PNG file
Unlike imsave, this requires no external dependencies.
Parameters
----------
filename : str
File to save to.
data : array
Image data.
See also
--------
read_png, imread, imsave
|
entailment
|
def imread(filename, format=None):
"""Read image data from disk
Requires imageio or PIL.
Parameters
----------
filename : str
Filename to read.
format : str | None
Format of the file. If None, it will be inferred from the filename.
Returns
-------
data : array
Image data.
See also
--------
imsave, read_png, write_png
"""
imageio, PIL = _check_img_lib()
if imageio is not None:
return imageio.imread(filename, format)
elif PIL is not None:
im = PIL.Image.open(filename)
if im.mode == 'P':
im = im.convert()
# Make numpy array
a = np.asarray(im)
if len(a.shape) == 0:
raise MemoryError("Too little memory to convert PIL image to "
"array")
return a
else:
raise RuntimeError("imread requires the imageio or PIL package.")
|
Read image data from disk
Requires imageio or PIL.
Parameters
----------
filename : str
Filename to read.
format : str | None
Format of the file. If None, it will be inferred from the filename.
Returns
-------
data : array
Image data.
See also
--------
imsave, read_png, write_png
|
entailment
|
def imsave(filename, im, format=None):
"""Save image data to disk
Requires imageio or PIL.
Parameters
----------
filename : str
Filename to write.
im : array
Image data.
format : str | None
Format of the file. If None, it will be inferred from the filename.
See also
--------
imread, read_png, write_png
"""
# Import imageio or PIL
imageio, PIL = _check_img_lib()
if imageio is not None:
return imageio.imsave(filename, im, format)
elif PIL is not None:
pim = PIL.Image.fromarray(im)
pim.save(filename, format)
else:
raise RuntimeError("imsave requires the imageio or PIL package.")
|
Save image data to disk
Requires imageio or PIL.
Parameters
----------
filename : str
Filename to write.
im : array
Image data.
format : str | None
Format of the file. If None, it will be inferred from the filename.
See also
--------
imread, read_png, write_png
|
entailment
|
def _check_img_lib():
"""Utility to search for imageio or PIL"""
# Import imageio or PIL
imageio = PIL = None
try:
import imageio
except ImportError:
try:
import PIL.Image
except ImportError:
pass
return imageio, PIL
|
Utility to search for imageio or PIL
|
entailment
|
def read_from_user(input_type, *args, **kwargs):
'''
Helper function to prompt user for input of a specific type
e.g. float, str, int
Designed to work with both python 2 and 3
Yes I know this is ugly.
'''
def _read_in(*args, **kwargs):
while True:
try: tmp = raw_input(*args, **kwargs)
except NameError: tmp = input(*args, **kwargs)
try: return input_type(tmp)
except: print ('Expected type', input_type)
return _read_in(*args, **kwargs)
|
Helper function to prompt user for input of a specific type
e.g. float, str, int
Designed to work with both python 2 and 3
Yes I know this is ugly.
|
entailment
|
def load_builtin_slots():
'''
Helper function to load builtin slots from the data location
'''
builtin_slots = {}
for index, line in enumerate(open(BUILTIN_SLOTS_LOCATION)):
o = line.strip().split('\t')
builtin_slots[index] = {'name' : o[0],
'description' : o[1] }
return builtin_slots
|
Helper function to load builtin slots from the data location
|
entailment
|
def timer(logger=None, level=logging.INFO,
fmt="function %(function_name)s execution time: %(execution_time).3f",
*func_or_func_args, **timer_kwargs):
""" Function decorator displaying the function execution time
All kwargs are the arguments taken by the Timer class constructor.
"""
# store Timer kwargs in local variable so the namespace isn't polluted
# by different level args and kwargs
def wrapped_f(f):
@functools.wraps(f)
def wrapped(*args, **kwargs):
with Timer(**timer_kwargs) as t:
out = f(*args, **kwargs)
context = {
'function_name': f.__name__,
'execution_time': t.elapsed,
}
if logger:
logger.log(
level,
fmt % context,
extra=context)
else:
print(fmt % context)
return out
return wrapped
if (len(func_or_func_args) == 1
and isinstance(func_or_func_args[0], collections.Callable)):
return wrapped_f(func_or_func_args[0])
else:
return wrapped_f
|
Function decorator displaying the function execution time
All kwargs are the arguments taken by the Timer class constructor.
|
entailment
|
def append(self, points, indices, **kwargs):
"""
Append a new set of vertices to the collection.
For kwargs argument, n is the number of vertices (local) or the number
of item (shared)
Parameters
----------
points : np.array
Vertices composing the triangles
indices : np.array
Indices describing triangles
color : list, array or 4-tuple
Path color
"""
itemsize = len(points)
itemcount = 1
V = np.empty(itemcount * itemsize, dtype=self.vtype)
for name in self.vtype.names:
if name not in ['collection_index', 'position']:
V[name] = kwargs.get(name, self._defaults[name])
V["position"] = points
# Uniforms
if self.utype:
U = np.zeros(itemcount, dtype=self.utype)
for name in self.utype.names:
if name not in ["__unused__"]:
U[name] = kwargs.get(name, self._defaults[name])
else:
U = None
I = np.array(indices).ravel()
Collection.append(self, vertices=V, uniforms=U, indices=I,
itemsize=itemsize)
|
Append a new set of vertices to the collection.
For kwargs argument, n is the number of vertices (local) or the number
of item (shared)
Parameters
----------
points : np.array
Vertices composing the triangles
indices : np.array
Indices describing triangles
color : list, array or 4-tuple
Path color
|
entailment
|
def _mpl_to_vispy(fig):
"""Convert a given matplotlib figure to vispy
This function is experimental and subject to change!
Requires matplotlib and mplexporter.
Parameters
----------
fig : instance of matplotlib Figure
The populated figure to display.
Returns
-------
canvas : instance of Canvas
The resulting vispy Canvas.
"""
renderer = VispyRenderer()
exporter = Exporter(renderer)
with warnings.catch_warnings(record=True): # py3k mpl warning
exporter.run(fig)
renderer._vispy_done()
return renderer.canvas
|
Convert a given matplotlib figure to vispy
This function is experimental and subject to change!
Requires matplotlib and mplexporter.
Parameters
----------
fig : instance of matplotlib Figure
The populated figure to display.
Returns
-------
canvas : instance of Canvas
The resulting vispy Canvas.
|
entailment
|
def show(block=False):
"""Show current figures using vispy
Parameters
----------
block : bool
If True, blocking mode will be used. If False, then non-blocking
/ interactive mode will be used.
Returns
-------
canvases : list
List of the vispy canvases that were created.
"""
if not has_matplotlib():
raise ImportError('Requires matplotlib version >= 1.2')
cs = [_mpl_to_vispy(plt.figure(ii)) for ii in plt.get_fignums()]
if block and len(cs) > 0:
cs[0].app.run()
return cs
|
Show current figures using vispy
Parameters
----------
block : bool
If True, blocking mode will be used. If False, then non-blocking
/ interactive mode will be used.
Returns
-------
canvases : list
List of the vispy canvases that were created.
|
entailment
|
def _mpl_ax_to(self, mplobj, output='vb'):
"""Helper to get the parent axes of a given mplobj"""
for ax in self._axs.values():
if ax['ax'] is mplobj.axes:
return ax[output]
raise RuntimeError('Parent axes could not be found!')
|
Helper to get the parent axes of a given mplobj
|
entailment
|
def random(adjacency_mat, directed=False, random_state=None):
"""
Place the graph nodes at random places.
Parameters
----------
adjacency_mat : matrix or sparse
The graph adjacency matrix
directed : bool
Whether the graph is directed. If this is True, is will also
generate the vertices for arrows, which can be passed to an
ArrowVisual.
random_state : instance of RandomState | int | None
Random state to use. Can be None to use ``np.random``.
Yields
------
(node_vertices, line_vertices, arrow_vertices) : tuple
Yields the node and line vertices in a tuple. This layout only yields a
single time, and has no builtin animation
"""
if random_state is None:
random_state = np.random
elif not isinstance(random_state, np.random.RandomState):
random_state = np.random.RandomState(random_state)
if issparse(adjacency_mat):
adjacency_mat = adjacency_mat.tocoo()
# Randomly place nodes, visual coordinate system is between 0 and 1
num_nodes = adjacency_mat.shape[0]
node_coords = random_state.rand(num_nodes, 2)
line_vertices, arrows = _straight_line_vertices(adjacency_mat,
node_coords, directed)
yield node_coords, line_vertices, arrows
|
Place the graph nodes at random places.
Parameters
----------
adjacency_mat : matrix or sparse
The graph adjacency matrix
directed : bool
Whether the graph is directed. If this is True, is will also
generate the vertices for arrows, which can be passed to an
ArrowVisual.
random_state : instance of RandomState | int | None
Random state to use. Can be None to use ``np.random``.
Yields
------
(node_vertices, line_vertices, arrow_vertices) : tuple
Yields the node and line vertices in a tuple. This layout only yields a
single time, and has no builtin animation
|
entailment
|
def isocurve(data, level, connected=False, extend_to_edge=False):
"""
Generate isocurve from 2D data using marching squares algorithm.
Parameters
----------
data : ndarray
2D numpy array of scalar values
level : float
The level at which to generate an isosurface
connected : bool
If False, return a single long list of point pairs
If True, return multiple long lists of connected point
locations. (This is slower but better for drawing
continuous lines)
extend_to_edge : bool
If True, extend the curves to reach the exact edges of
the data.
"""
# This function is SLOW; plenty of room for optimization here.
if extend_to_edge:
d2 = np.empty((data.shape[0]+2, data.shape[1]+2), dtype=data.dtype)
d2[1:-1, 1:-1] = data
d2[0, 1:-1] = data[0]
d2[-1, 1:-1] = data[-1]
d2[1:-1, 0] = data[:, 0]
d2[1:-1, -1] = data[:, -1]
d2[0, 0] = d2[0, 1]
d2[0, -1] = d2[1, -1]
d2[-1, 0] = d2[-1, 1]
d2[-1, -1] = d2[-1, -2]
data = d2
side_table = [
[],
[0, 1],
[1, 2],
[0, 2],
[0, 3],
[1, 3],
[0, 1, 2, 3],
[2, 3],
[2, 3],
[0, 1, 2, 3],
[1, 3],
[0, 3],
[0, 2],
[1, 2],
[0, 1],
[]
]
edge_key = [
[(0, 1), (0, 0)],
[(0, 0), (1, 0)],
[(1, 0), (1, 1)],
[(1, 1), (0, 1)]
]
level = float(level)
lines = []
# mark everything below the isosurface level
mask = data < level
## make four sub-fields and compute indexes for grid cells
index = np.zeros([x-1 for x in data.shape], dtype=np.ubyte)
fields = np.empty((2, 2), dtype=object)
slices = [slice(0, -1), slice(1, None)]
for i in [0, 1]:
for j in [0, 1]:
fields[i, j] = mask[slices[i], slices[j]]
vertIndex = i+2*j
index += (fields[i, j] * 2**vertIndex).astype(np.ubyte)
# add lines
for i in range(index.shape[0]): # data x-axis
for j in range(index.shape[1]): # data y-axis
sides = side_table[index[i, j]]
for l in range(0, len(sides), 2): # faces for this grid cell
edges = sides[l:l+2]
pts = []
for m in [0, 1]: # points in this face
# p1, p2 are points at either side of an edge
p1 = edge_key[edges[m]][0]
p2 = edge_key[edges[m]][1]
# v1 and v2 are the values at p1 and p2
v1 = data[i+p1[0], j+p1[1]]
v2 = data[i+p2[0], j+p2[1]]
f = (level-v1) / (v2-v1)
fi = 1.0 - f
# interpolate between corners
p = (p1[0]*fi + p2[0]*f + i + 0.5,
p1[1]*fi + p2[1]*f + j + 0.5)
if extend_to_edge:
# check bounds
p = (min(data.shape[0]-2, max(0, p[0]-1)),
min(data.shape[1]-2, max(0, p[1]-1)))
if connected:
gridKey = (i + (1 if edges[m] == 2 else 0),
j + (1 if edges[m] == 3 else 0),
edges[m] % 2)
# give the actual position and a key identifying the
# grid location (for connecting segments)
pts.append((p, gridKey))
else:
pts.append(p)
lines.append(pts)
if not connected:
return lines
# turn disjoint list of segments into continuous lines
points = {} # maps each point to its connections
for a, b in lines:
if a[1] not in points:
points[a[1]] = []
points[a[1]].append([a, b])
if b[1] not in points:
points[b[1]] = []
points[b[1]].append([b, a])
# rearrange into chains
for k in list(points.keys()):
try:
chains = points[k]
except KeyError: # already used this point elsewhere
continue
for chain in chains:
x = None
while True:
if x == chain[-1][1]:
break # nothing left to do on this chain
x = chain[-1][1]
if x == k:
# chain has looped; we're done and can ignore the opposite
# chain
break
y = chain[-2][1]
connects = points[x]
for conn in connects[:]:
if conn[1][1] != y:
chain.extend(conn[1:])
del points[x]
if chain[0][1] == chain[-1][1]:
# looped chain; no need to continue the other direction
chains.pop()
break
# extract point locations
lines = []
for chain in points.values():
if len(chain) == 2:
# join together ends of chain
chain = chain[1][1:][::-1] + chain[0]
else:
chain = chain[0]
lines.append([pt[0] for pt in chain])
return lines
|
Generate isocurve from 2D data using marching squares algorithm.
Parameters
----------
data : ndarray
2D numpy array of scalar values
level : float
The level at which to generate an isosurface
connected : bool
If False, return a single long list of point pairs
If True, return multiple long lists of connected point
locations. (This is slower but better for drawing
continuous lines)
extend_to_edge : bool
If True, extend the curves to reach the exact edges of
the data.
|
entailment
|
def pos(self):
""" The position of this event in the local coordinate system of the
visual.
"""
if self._pos is None:
tr = self.visual.get_transform('canvas', 'visual')
self._pos = tr.map(self.mouse_event.pos)
return self._pos
|
The position of this event in the local coordinate system of the
visual.
|
entailment
|
def last_event(self):
""" The mouse event immediately prior to this one. This
property is None when no mouse buttons are pressed.
"""
if self.mouse_event.last_event is None:
return None
ev = self.copy()
ev.mouse_event = self.mouse_event.last_event
return ev
|
The mouse event immediately prior to this one. This
property is None when no mouse buttons are pressed.
|
entailment
|
def press_event(self):
""" The mouse press event that initiated a mouse drag, if any.
"""
if self.mouse_event.press_event is None:
return None
ev = self.copy()
ev.mouse_event = self.mouse_event.press_event
return ev
|
The mouse press event that initiated a mouse drag, if any.
|
entailment
|
def check_enum(enum, name=None, valid=None):
""" Get lowercase string representation of enum.
"""
name = name or 'enum'
# Try to convert
res = None
if isinstance(enum, int):
if hasattr(enum, 'name') and enum.name.startswith('GL_'):
res = enum.name[3:].lower()
elif isinstance(enum, string_types):
res = enum.lower()
# Check
if res is None:
raise ValueError('Could not determine string represenatation for'
'enum %r' % enum)
elif valid and res not in valid:
raise ValueError('Value of %s must be one of %r, not %r' %
(name, valid, enum))
return res
|
Get lowercase string representation of enum.
|
entailment
|
def draw_texture(tex):
"""Draw a 2D texture to the current viewport
Parameters
----------
tex : instance of Texture2D
The texture to draw.
"""
from .program import Program
program = Program(vert_draw, frag_draw)
program['u_texture'] = tex
program['a_position'] = [[-1., -1.], [-1., 1.], [1., -1.], [1., 1.]]
program['a_texcoord'] = [[0., 1.], [0., 0.], [1., 1.], [1., 0.]]
program.draw('triangle_strip')
|
Draw a 2D texture to the current viewport
Parameters
----------
tex : instance of Texture2D
The texture to draw.
|
entailment
|
def _get_dpi_from(cmd, pattern, func):
"""Match pattern against the output of func, passing the results as
floats to func. If anything fails, return None.
"""
try:
out, _ = run_subprocess([cmd])
except (OSError, CalledProcessError):
pass
else:
match = re.search(pattern, out)
if match:
return func(*map(float, match.groups()))
|
Match pattern against the output of func, passing the results as
floats to func. If anything fails, return None.
|
entailment
|
def get_dpi(raise_error=True):
"""Get screen DPI from the OS
Parameters
----------
raise_error : bool
If True, raise an error if DPI could not be determined.
Returns
-------
dpi : float
Dots per inch of the primary screen.
"""
# If we are running without an X server (e.g. OSMesa), use a fixed DPI
if 'DISPLAY' not in os.environ:
return 96.
from_xdpyinfo = _get_dpi_from(
'xdpyinfo', r'(\d+)x(\d+) dots per inch',
lambda x_dpi, y_dpi: (x_dpi + y_dpi) / 2)
if from_xdpyinfo is not None:
return from_xdpyinfo
from_xrandr = _get_dpi_from(
'xrandr', r'(\d+)x(\d+).*?(\d+)mm x (\d+)mm',
lambda x_px, y_px, x_mm, y_mm: 25.4 * (x_px / x_mm + y_px / y_mm) / 2)
if from_xrandr is not None:
return from_xrandr
if raise_error:
raise RuntimeError('could not determine DPI')
else:
logger.warning('could not determine DPI')
return 96
|
Get screen DPI from the OS
Parameters
----------
raise_error : bool
If True, raise an error if DPI could not be determined.
Returns
-------
dpi : float
Dots per inch of the primary screen.
|
entailment
|
def set_data(self, adjacency_mat=None, **kwargs):
"""Set the data
Parameters
----------
adjacency_mat : ndarray | None
The adjacency matrix.
**kwargs : dict
Keyword arguments to pass to the arrows.
"""
if adjacency_mat is not None:
if adjacency_mat.shape[0] != adjacency_mat.shape[1]:
raise ValueError("Adjacency matrix should be square.")
self._adjacency_mat = adjacency_mat
for k in self._arrow_attributes:
if k in kwargs:
translated = (self._arrow_kw_trans[k] if k in
self._arrow_kw_trans else k)
setattr(self._edges, translated, kwargs.pop(k))
arrow_kwargs = {}
for k in self._arrow_kwargs:
if k in kwargs:
translated = (self._arrow_kw_trans[k] if k in
self._arrow_kw_trans else k)
arrow_kwargs[translated] = kwargs.pop(k)
node_kwargs = {}
for k in self._node_kwargs:
if k in kwargs:
translated = (self._node_kw_trans[k] if k in
self._node_kw_trans else k)
node_kwargs[translated] = kwargs.pop(k)
if len(kwargs) > 0:
raise TypeError("%s.set_data() got invalid keyword arguments: %S"
% (self.__class__.__name__, list(kwargs.keys())))
# The actual data is set in GraphVisual.animate_layout or
# GraphVisual.set_final_layout
self._arrow_data = arrow_kwargs
self._node_data = node_kwargs
if not self._animate:
self.set_final_layout()
|
Set the data
Parameters
----------
adjacency_mat : ndarray | None
The adjacency matrix.
**kwargs : dict
Keyword arguments to pass to the arrows.
|
entailment
|
def calc_size(rect, orientation):
"""Calculate a size
Parameters
----------
rect : rectangle
The rectangle.
orientation : str
Either "bottom" or "top".
"""
(total_halfx, total_halfy) = rect.center
if orientation in ["bottom", "top"]:
(total_major_axis, total_minor_axis) = (total_halfx, total_halfy)
else:
(total_major_axis, total_minor_axis) = (total_halfy, total_halfx)
major_axis = total_major_axis * (1.0 -
ColorBarWidget.major_axis_padding)
minor_axis = major_axis * ColorBarWidget.minor_axis_ratio
# if the minor axis is "leaking" from the padding, then clamp
minor_axis = np.minimum(minor_axis,
total_minor_axis *
(1.0 - ColorBarWidget.minor_axis_padding))
return (major_axis, minor_axis)
|
Calculate a size
Parameters
----------
rect : rectangle
The rectangle.
orientation : str
Either "bottom" or "top".
|
entailment
|
def dtype_reduce(dtype, level=0, depth=0):
"""
Try to reduce dtype up to a given level when it is possible
dtype = [ ('vertex', [('x', 'f4'), ('y', 'f4'), ('z', 'f4')]),
('normal', [('x', 'f4'), ('y', 'f4'), ('z', 'f4')]),
('color', [('r', 'f4'), ('g', 'f4'), ('b', 'f4'),
('a', 'f4')])]
level 0: ['color,vertex,normal,', 10, 'float32']
level 1: [['color', 4, 'float32']
['normal', 3, 'float32']
['vertex', 3, 'float32']]
"""
dtype = np.dtype(dtype)
fields = dtype.fields
# No fields
if fields is None:
if len(dtype.shape):
count = reduce(mul, dtype.shape)
else:
count = 1
# size = dtype.itemsize / count
if dtype.subdtype:
name = str(dtype.subdtype[0])
else:
name = str(dtype)
return ['', count, name]
else:
items = []
name = ''
# Get reduced fields
for key, value in fields.items():
l = dtype_reduce(value[0], level, depth + 1)
if type(l[0]) is str:
items.append([key, l[1], l[2]])
else:
items.append(l)
name += key + ','
# Check if we can reduce item list
ctype = None
count = 0
for i, item in enumerate(items):
# One item is a list, we cannot reduce
if type(item[0]) is not str:
return items
else:
if i == 0:
ctype = item[2]
count += item[1]
else:
if item[2] != ctype:
return items
count += item[1]
if depth >= level:
return [name, count, ctype]
else:
return items
|
Try to reduce dtype up to a given level when it is possible
dtype = [ ('vertex', [('x', 'f4'), ('y', 'f4'), ('z', 'f4')]),
('normal', [('x', 'f4'), ('y', 'f4'), ('z', 'f4')]),
('color', [('r', 'f4'), ('g', 'f4'), ('b', 'f4'),
('a', 'f4')])]
level 0: ['color,vertex,normal,', 10, 'float32']
level 1: [['color', 4, 'float32']
['normal', 3, 'float32']
['vertex', 3, 'float32']]
|
entailment
|
def fetchcode(utype, prefix=""):
"""
Generate the GLSL code needed to retrieve fake uniform values from a
texture.
uniforms : sampler2D
Texture to fetch uniforms from
uniforms_shape: vec3
Size of texture (width,height,count) where count is the number of float
to be fetched.
collection_index: float
Attribute giving the index of the uniforms to be fetched. This index
relates to the index in the uniform array from python side.
"""
utype = np.dtype(utype)
_utype = dtype_reduce(utype, level=1)
header = """
uniform sampler2D uniforms;
uniform vec3 uniforms_shape;
attribute float collection_index;
"""
# Header generation (easy)
types = {1: 'float', 2: 'vec2 ', 3: 'vec3 ',
4: 'vec4 ', 9: 'mat3 ', 16: 'mat4 '}
for name, count, _ in _utype:
if name != '__unused__':
header += "varying %s %s%s;\n" % (types[count], prefix, name)
# Body generation (not so easy)
body = """\nvoid fetch_uniforms() {
float rows = uniforms_shape.x;
float cols = uniforms_shape.y;
float count = uniforms_shape.z;
float index = collection_index;
int index_x = int(mod(index, (floor(cols/(count/4.0))))) * int(count/4.0);
int index_y = int(floor(index / (floor(cols/(count/4.0)))));
float size_x = cols - 1.0;
float size_y = rows - 1.0;
float ty = 0.0;
if (size_y > 0.0)
ty = float(index_y)/size_y;
int i = index_x;
vec4 _uniform;\n"""
_utype = dict([(name, count) for name, count, _ in _utype])
store = 0
# Be very careful with utype name order (_utype.keys is wrong)
for name in utype.names:
if name == '__unused__':
continue
count, shift = _utype[name], 0
size = count
while count:
if store == 0:
body += "\n _uniform = texture2D(uniforms, vec2(float(i++)/size_x,ty));\n" # noqa
store = 4
if store == 4:
a = "xyzw"
elif store == 3:
a = "yzw"
elif store == 2:
a = "zw"
elif store == 1:
a = "w"
if shift == 0:
b = "xyzw"
elif shift == 1:
b = "yzw"
elif shift == 2:
b = "zw"
elif shift == 3:
b = "w"
i = min(min(len(b), count), len(a))
if size > 1:
body += " %s%s.%s = _uniform.%s;\n" % (prefix, name, b[:i], a[:i]) # noqa
else:
body += " %s%s = _uniform.%s;\n" % (prefix, name, a[:i])
count -= i
shift += i
store -= i
body += """}\n\n"""
return header + body
|
Generate the GLSL code needed to retrieve fake uniform values from a
texture.
uniforms : sampler2D
Texture to fetch uniforms from
uniforms_shape: vec3
Size of texture (width,height,count) where count is the number of float
to be fetched.
collection_index: float
Attribute giving the index of the uniforms to be fetched. This index
relates to the index in the uniform array from python side.
|
entailment
|
def create_cube():
""" Generate vertices & indices for a filled and outlined cube
Returns
-------
vertices : array
Array of vertices suitable for use as a VertexBuffer.
filled : array
Indices to use to produce a filled cube.
outline : array
Indices to use to produce an outline of the cube.
"""
vtype = [('position', np.float32, 3),
('texcoord', np.float32, 2),
('normal', np.float32, 3),
('color', np.float32, 4)]
itype = np.uint32
# Vertices positions
p = np.array([[1, 1, 1], [-1, 1, 1], [-1, -1, 1], [1, -1, 1],
[1, -1, -1], [1, 1, -1], [-1, 1, -1], [-1, -1, -1]])
# Face Normals
n = np.array([[0, 0, 1], [1, 0, 0], [0, 1, 0],
[-1, 0, 1], [0, -1, 0], [0, 0, -1]])
# Vertice colors
c = np.array([[1, 1, 1, 1], [0, 1, 1, 1], [0, 0, 1, 1], [1, 0, 1, 1],
[1, 0, 0, 1], [1, 1, 0, 1], [0, 1, 0, 1], [0, 0, 0, 1]])
# Texture coords
t = np.array([[0, 0], [0, 1], [1, 1], [1, 0]])
faces_p = [0, 1, 2, 3,
0, 3, 4, 5,
0, 5, 6, 1,
1, 6, 7, 2,
7, 4, 3, 2,
4, 7, 6, 5]
faces_c = [0, 1, 2, 3,
0, 3, 4, 5,
0, 5, 6, 1,
1, 6, 7, 2,
7, 4, 3, 2,
4, 7, 6, 5]
faces_n = [0, 0, 0, 0,
1, 1, 1, 1,
2, 2, 2, 2,
3, 3, 3, 3,
4, 4, 4, 4,
5, 5, 5, 5]
faces_t = [0, 1, 2, 3,
0, 1, 2, 3,
0, 1, 2, 3,
3, 2, 1, 0,
0, 1, 2, 3,
0, 1, 2, 3]
vertices = np.zeros(24, vtype)
vertices['position'] = p[faces_p]
vertices['normal'] = n[faces_n]
vertices['color'] = c[faces_c]
vertices['texcoord'] = t[faces_t]
filled = np.resize(
np.array([0, 1, 2, 0, 2, 3], dtype=itype), 6 * (2 * 3))
filled += np.repeat(4 * np.arange(6, dtype=itype), 6)
filled = filled.reshape((len(filled) // 3, 3))
outline = np.resize(
np.array([0, 1, 1, 2, 2, 3, 3, 0], dtype=itype), 6 * (2 * 4))
outline += np.repeat(4 * np.arange(6, dtype=itype), 8)
return vertices, filled, outline
|
Generate vertices & indices for a filled and outlined cube
Returns
-------
vertices : array
Array of vertices suitable for use as a VertexBuffer.
filled : array
Indices to use to produce a filled cube.
outline : array
Indices to use to produce an outline of the cube.
|
entailment
|
def create_plane(width=1, height=1, width_segments=1, height_segments=1,
direction='+z'):
""" Generate vertices & indices for a filled and outlined plane.
Parameters
----------
width : float
Plane width.
height : float
Plane height.
width_segments : int
Plane segments count along the width.
height_segments : float
Plane segments count along the height.
direction: unicode
``{'-x', '+x', '-y', '+y', '-z', '+z'}``
Direction the plane will be facing.
Returns
-------
vertices : array
Array of vertices suitable for use as a VertexBuffer.
faces : array
Indices to use to produce a filled plane.
outline : array
Indices to use to produce an outline of the plane.
References
----------
.. [1] Cabello, R. (n.d.). PlaneBufferGeometry.js. Retrieved May 12, 2015,
from http://git.io/vU1Fh
"""
x_grid = width_segments
y_grid = height_segments
x_grid1 = x_grid + 1
y_grid1 = y_grid + 1
# Positions, normals and texcoords.
positions = np.zeros(x_grid1 * y_grid1 * 3)
normals = np.zeros(x_grid1 * y_grid1 * 3)
texcoords = np.zeros(x_grid1 * y_grid1 * 2)
y = np.arange(y_grid1) * height / y_grid - height / 2
x = np.arange(x_grid1) * width / x_grid - width / 2
positions[::3] = np.tile(x, y_grid1)
positions[1::3] = -np.repeat(y, x_grid1)
normals[2::3] = 1
texcoords[::2] = np.tile(np.arange(x_grid1) / x_grid, y_grid1)
texcoords[1::2] = np.repeat(1 - np.arange(y_grid1) / y_grid, x_grid1)
# Faces and outline.
faces, outline = [], []
for i_y in range(y_grid):
for i_x in range(x_grid):
a = i_x + x_grid1 * i_y
b = i_x + x_grid1 * (i_y + 1)
c = (i_x + 1) + x_grid1 * (i_y + 1)
d = (i_x + 1) + x_grid1 * i_y
faces.extend(((a, b, d), (b, c, d)))
outline.extend(((a, b), (b, c), (c, d), (d, a)))
positions = np.reshape(positions, (-1, 3))
texcoords = np.reshape(texcoords, (-1, 2))
normals = np.reshape(normals, (-1, 3))
faces = np.reshape(faces, (-1, 3)).astype(np.uint32)
outline = np.reshape(outline, (-1, 2)).astype(np.uint32)
direction = direction.lower()
if direction in ('-x', '+x'):
shift, neutral_axis = 1, 0
elif direction in ('-y', '+y'):
shift, neutral_axis = -1, 1
elif direction in ('-z', '+z'):
shift, neutral_axis = 0, 2
sign = -1 if '-' in direction else 1
positions = np.roll(positions, shift, -1)
normals = np.roll(normals, shift, -1) * sign
colors = np.ravel(positions)
colors = np.hstack((np.reshape(np.interp(colors,
(np.min(colors),
np.max(colors)),
(0, 1)),
positions.shape),
np.ones((positions.shape[0], 1))))
colors[..., neutral_axis] = 0
vertices = np.zeros(positions.shape[0],
[('position', np.float32, 3),
('texcoord', np.float32, 2),
('normal', np.float32, 3),
('color', np.float32, 4)])
vertices['position'] = positions
vertices['texcoord'] = texcoords
vertices['normal'] = normals
vertices['color'] = colors
return vertices, faces, outline
|
Generate vertices & indices for a filled and outlined plane.
Parameters
----------
width : float
Plane width.
height : float
Plane height.
width_segments : int
Plane segments count along the width.
height_segments : float
Plane segments count along the height.
direction: unicode
``{'-x', '+x', '-y', '+y', '-z', '+z'}``
Direction the plane will be facing.
Returns
-------
vertices : array
Array of vertices suitable for use as a VertexBuffer.
faces : array
Indices to use to produce a filled plane.
outline : array
Indices to use to produce an outline of the plane.
References
----------
.. [1] Cabello, R. (n.d.). PlaneBufferGeometry.js. Retrieved May 12, 2015,
from http://git.io/vU1Fh
|
entailment
|
def create_box(width=1, height=1, depth=1, width_segments=1, height_segments=1,
depth_segments=1, planes=None):
""" Generate vertices & indices for a filled and outlined box.
Parameters
----------
width : float
Box width.
height : float
Box height.
depth : float
Box depth.
width_segments : int
Box segments count along the width.
height_segments : float
Box segments count along the height.
depth_segments : float
Box segments count along the depth.
planes: array_like
Any combination of ``{'-x', '+x', '-y', '+y', '-z', '+z'}``
Included planes in the box construction.
Returns
-------
vertices : array
Array of vertices suitable for use as a VertexBuffer.
faces : array
Indices to use to produce a filled box.
outline : array
Indices to use to produce an outline of the box.
"""
planes = (('+x', '-x', '+y', '-y', '+z', '-z')
if planes is None else
[d.lower() for d in planes])
w_s, h_s, d_s = width_segments, height_segments, depth_segments
planes_m = []
if '-z' in planes:
planes_m.append(create_plane(width, depth, w_s, d_s, '-z'))
planes_m[-1][0]['position'][..., 2] -= height / 2
if '+z' in planes:
planes_m.append(create_plane(width, depth, w_s, d_s, '+z'))
planes_m[-1][0]['position'][..., 2] += height / 2
if '-y' in planes:
planes_m.append(create_plane(height, width, h_s, w_s, '-y'))
planes_m[-1][0]['position'][..., 1] -= depth / 2
if '+y' in planes:
planes_m.append(create_plane(height, width, h_s, w_s, '+y'))
planes_m[-1][0]['position'][..., 1] += depth / 2
if '-x' in planes:
planes_m.append(create_plane(depth, height, d_s, h_s, '-x'))
planes_m[-1][0]['position'][..., 0] -= width / 2
if '+x' in planes:
planes_m.append(create_plane(depth, height, d_s, h_s, '+x'))
planes_m[-1][0]['position'][..., 0] += width / 2
positions = np.zeros((0, 3), dtype=np.float32)
texcoords = np.zeros((0, 2), dtype=np.float32)
normals = np.zeros((0, 3), dtype=np.float32)
faces = np.zeros((0, 3), dtype=np.uint32)
outline = np.zeros((0, 2), dtype=np.uint32)
offset = 0
for vertices_p, faces_p, outline_p in planes_m:
positions = np.vstack((positions, vertices_p['position']))
texcoords = np.vstack((texcoords, vertices_p['texcoord']))
normals = np.vstack((normals, vertices_p['normal']))
faces = np.vstack((faces, faces_p + offset))
outline = np.vstack((outline, outline_p + offset))
offset += vertices_p['position'].shape[0]
vertices = np.zeros(positions.shape[0],
[('position', np.float32, 3),
('texcoord', np.float32, 2),
('normal', np.float32, 3),
('color', np.float32, 4)])
colors = np.ravel(positions)
colors = np.hstack((np.reshape(np.interp(colors,
(np.min(colors),
np.max(colors)),
(0, 1)),
positions.shape),
np.ones((positions.shape[0], 1))))
vertices['position'] = positions
vertices['texcoord'] = texcoords
vertices['normal'] = normals
vertices['color'] = colors
return vertices, faces, outline
|
Generate vertices & indices for a filled and outlined box.
Parameters
----------
width : float
Box width.
height : float
Box height.
depth : float
Box depth.
width_segments : int
Box segments count along the width.
height_segments : float
Box segments count along the height.
depth_segments : float
Box segments count along the depth.
planes: array_like
Any combination of ``{'-x', '+x', '-y', '+y', '-z', '+z'}``
Included planes in the box construction.
Returns
-------
vertices : array
Array of vertices suitable for use as a VertexBuffer.
faces : array
Indices to use to produce a filled box.
outline : array
Indices to use to produce an outline of the box.
|
entailment
|
def create_sphere(rows=10, cols=10, depth=10, radius=1.0, offset=True,
subdivisions=3, method='latitude'):
"""Create a sphere
Parameters
----------
rows : int
Number of rows (for method='latitude' and 'cube').
cols : int
Number of columns (for method='latitude' and 'cube').
depth : int
Number of depth segments (for method='cube').
radius : float
Sphere radius.
offset : bool
Rotate each row by half a column (for method='latitude').
subdivisions : int
Number of subdivisions to perform (for method='ico')
method : str
Method for generating sphere. Accepts 'latitude' for latitude-
longitude, 'ico' for icosahedron, and 'cube' for cube based
tessellation.
Returns
-------
sphere : MeshData
Vertices and faces computed for a spherical surface.
"""
if method == 'latitude':
return _latitude(rows, cols, radius, offset)
elif method == 'ico':
return _ico(radius, subdivisions)
elif method == 'cube':
return _cube(rows, cols, depth, radius)
else:
raise Exception("Invalid method. Accepts: 'latitude', 'ico', 'cube'")
|
Create a sphere
Parameters
----------
rows : int
Number of rows (for method='latitude' and 'cube').
cols : int
Number of columns (for method='latitude' and 'cube').
depth : int
Number of depth segments (for method='cube').
radius : float
Sphere radius.
offset : bool
Rotate each row by half a column (for method='latitude').
subdivisions : int
Number of subdivisions to perform (for method='ico')
method : str
Method for generating sphere. Accepts 'latitude' for latitude-
longitude, 'ico' for icosahedron, and 'cube' for cube based
tessellation.
Returns
-------
sphere : MeshData
Vertices and faces computed for a spherical surface.
|
entailment
|
def create_cylinder(rows, cols, radius=[1.0, 1.0], length=1.0, offset=False):
"""Create a cylinder
Parameters
----------
rows : int
Number of rows.
cols : int
Number of columns.
radius : tuple of float
Cylinder radii.
length : float
Length of the cylinder.
offset : bool
Rotate each row by half a column.
Returns
-------
cylinder : MeshData
Vertices and faces computed for a cylindrical surface.
"""
verts = np.empty((rows+1, cols, 3), dtype=np.float32)
if isinstance(radius, int):
radius = [radius, radius] # convert to list
# compute vertices
th = np.linspace(2 * np.pi, 0, cols).reshape(1, cols)
# radius as a function of z
r = np.linspace(radius[0], radius[1], num=rows+1,
endpoint=True).reshape(rows+1, 1)
verts[..., 2] = np.linspace(0, length, num=rows+1,
endpoint=True).reshape(rows+1, 1) # z
if offset:
# rotate each row by 1/2 column
th = th + ((np.pi / cols) * np.arange(rows+1).reshape(rows+1, 1))
verts[..., 0] = r * np.cos(th) # x = r cos(th)
verts[..., 1] = r * np.sin(th) # y = r sin(th)
# just reshape: no redundant vertices...
verts = verts.reshape((rows+1)*cols, 3)
# compute faces
faces = np.empty((rows*cols*2, 3), dtype=np.uint32)
rowtemplate1 = (((np.arange(cols).reshape(cols, 1) +
np.array([[0, 1, 0]])) % cols) +
np.array([[0, 0, cols]]))
rowtemplate2 = (((np.arange(cols).reshape(cols, 1) +
np.array([[0, 1, 1]])) % cols) +
np.array([[cols, 0, cols]]))
for row in range(rows):
start = row * cols * 2
faces[start:start+cols] = rowtemplate1 + row * cols
faces[start+cols:start+(cols*2)] = rowtemplate2 + row * cols
return MeshData(vertices=verts, faces=faces)
|
Create a cylinder
Parameters
----------
rows : int
Number of rows.
cols : int
Number of columns.
radius : tuple of float
Cylinder radii.
length : float
Length of the cylinder.
offset : bool
Rotate each row by half a column.
Returns
-------
cylinder : MeshData
Vertices and faces computed for a cylindrical surface.
|
entailment
|
def create_cone(cols, radius=1.0, length=1.0):
"""Create a cone
Parameters
----------
cols : int
Number of faces.
radius : float
Base cone radius.
length : float
Length of the cone.
Returns
-------
cone : MeshData
Vertices and faces computed for a cone surface.
"""
verts = np.empty((cols+1, 3), dtype=np.float32)
# compute vertexes
th = np.linspace(2 * np.pi, 0, cols+1).reshape(1, cols+1)
verts[:-1, 2] = 0.0
verts[:-1, 0] = radius * np.cos(th[0, :-1]) # x = r cos(th)
verts[:-1, 1] = radius * np.sin(th[0, :-1]) # y = r sin(th)
# Add the extremity
verts[-1, 0] = 0.0
verts[-1, 1] = 0.0
verts[-1, 2] = length
verts = verts.reshape((cols+1), 3) # just reshape: no redundant vertices
# compute faces
faces = np.empty((cols, 3), dtype=np.uint32)
template = np.array([[0, 1]])
for pos in range(cols):
faces[pos, :-1] = template + pos
faces[:, 2] = cols
faces[-1, 1] = 0
return MeshData(vertices=verts, faces=faces)
|
Create a cone
Parameters
----------
cols : int
Number of faces.
radius : float
Base cone radius.
length : float
Length of the cone.
Returns
-------
cone : MeshData
Vertices and faces computed for a cone surface.
|
entailment
|
def create_arrow(rows, cols, radius=0.1, length=1.0,
cone_radius=None, cone_length=None):
"""Create a 3D arrow using a cylinder plus cone
Parameters
----------
rows : int
Number of rows.
cols : int
Number of columns.
radius : float
Base cylinder radius.
length : float
Length of the arrow.
cone_radius : float
Radius of the cone base.
If None, then this defaults to 2x the cylinder radius.
cone_length : float
Length of the cone.
If None, then this defaults to 1/3 of the arrow length.
Returns
-------
arrow : MeshData
Vertices and faces computed for a cone surface.
"""
# create the cylinder
md_cyl = None
if cone_radius is None:
cone_radius = radius*2.0
if cone_length is None:
con_L = length/3.0
cyl_L = length*2.0/3.0
else:
cyl_L = max(0, length - cone_length)
con_L = min(cone_length, length)
if cyl_L != 0:
md_cyl = create_cylinder(rows, cols, radius=[radius, radius],
length=cyl_L)
# create the cone
md_con = create_cone(cols, radius=cone_radius, length=con_L)
verts = md_con.get_vertices()
nbr_verts_con = verts.size//3
faces = md_con.get_faces()
if md_cyl is not None:
trans = np.array([[0.0, 0.0, cyl_L]])
verts = np.vstack((verts+trans, md_cyl.get_vertices()))
faces = np.vstack((faces, md_cyl.get_faces()+nbr_verts_con))
return MeshData(vertices=verts, faces=faces)
|
Create a 3D arrow using a cylinder plus cone
Parameters
----------
rows : int
Number of rows.
cols : int
Number of columns.
radius : float
Base cylinder radius.
length : float
Length of the arrow.
cone_radius : float
Radius of the cone base.
If None, then this defaults to 2x the cylinder radius.
cone_length : float
Length of the cone.
If None, then this defaults to 1/3 of the arrow length.
Returns
-------
arrow : MeshData
Vertices and faces computed for a cone surface.
|
entailment
|
def create_grid_mesh(xs, ys, zs):
'''Generate vertices and indices for an implicitly connected mesh.
The intention is that this makes it simple to generate a mesh
from meshgrid data.
Parameters
----------
xs : ndarray
A 2d array of x coordinates for the vertices of the mesh. Must
have the same dimensions as ys and zs.
ys : ndarray
A 2d array of y coordinates for the vertices of the mesh. Must
have the same dimensions as xs and zs.
zs : ndarray
A 2d array of z coordinates for the vertices of the mesh. Must
have the same dimensions as xs and ys.
Returns
-------
vertices : ndarray
The array of vertices in the mesh.
indices : ndarray
The array of indices for the mesh.
'''
shape = xs.shape
length = shape[0] * shape[1]
vertices = np.zeros((length, 3))
vertices[:, 0] = xs.reshape(length)
vertices[:, 1] = ys.reshape(length)
vertices[:, 2] = zs.reshape(length)
basic_indices = np.array([0, 1, 1 + shape[1], 0,
0 + shape[1], 1 + shape[1]],
dtype=np.uint32)
inner_grid_length = (shape[0] - 1) * (shape[1] - 1)
offsets = np.arange(inner_grid_length)
offsets += np.repeat(np.arange(shape[0] - 1), shape[1] - 1)
offsets = np.repeat(offsets, 6)
indices = np.resize(basic_indices, len(offsets)) + offsets
indices = indices.reshape((len(indices) // 3, 3))
return vertices, indices
|
Generate vertices and indices for an implicitly connected mesh.
The intention is that this makes it simple to generate a mesh
from meshgrid data.
Parameters
----------
xs : ndarray
A 2d array of x coordinates for the vertices of the mesh. Must
have the same dimensions as ys and zs.
ys : ndarray
A 2d array of y coordinates for the vertices of the mesh. Must
have the same dimensions as xs and zs.
zs : ndarray
A 2d array of z coordinates for the vertices of the mesh. Must
have the same dimensions as xs and ys.
Returns
-------
vertices : ndarray
The array of vertices in the mesh.
indices : ndarray
The array of indices for the mesh.
|
entailment
|
def _straight_line_vertices(adjacency_mat, node_coords, directed=False):
"""
Generate the vertices for straight lines between nodes.
If it is a directed graph, it also generates the vertices which can be
passed to an :class:`ArrowVisual`.
Parameters
----------
adjacency_mat : array
The adjacency matrix of the graph
node_coords : array
The current coordinates of all nodes in the graph
directed : bool
Wether the graph is directed. If this is true it will also generate
the vertices for arrows which can be passed to :class:`ArrowVisual`.
Returns
-------
vertices : tuple
Returns a tuple containing containing (`line_vertices`,
`arrow_vertices`)
"""
if not issparse(adjacency_mat):
adjacency_mat = np.asarray(adjacency_mat, float)
if (adjacency_mat.ndim != 2 or adjacency_mat.shape[0] !=
adjacency_mat.shape[1]):
raise ValueError("Adjacency matrix should be square.")
arrow_vertices = np.array([])
edges = _get_edges(adjacency_mat)
line_vertices = node_coords[edges.ravel()]
if directed:
arrows = np.array(list(_get_directed_edges(adjacency_mat)))
arrow_vertices = node_coords[arrows.ravel()]
arrow_vertices = arrow_vertices.reshape((len(arrow_vertices)/2, 4))
return line_vertices, arrow_vertices
|
Generate the vertices for straight lines between nodes.
If it is a directed graph, it also generates the vertices which can be
passed to an :class:`ArrowVisual`.
Parameters
----------
adjacency_mat : array
The adjacency matrix of the graph
node_coords : array
The current coordinates of all nodes in the graph
directed : bool
Wether the graph is directed. If this is true it will also generate
the vertices for arrows which can be passed to :class:`ArrowVisual`.
Returns
-------
vertices : tuple
Returns a tuple containing containing (`line_vertices`,
`arrow_vertices`)
|
entailment
|
def _rescale_layout(pos, scale=1):
"""
Normalize the given coordinate list to the range [0, `scale`].
Parameters
----------
pos : array
Coordinate list
scale : number
The upperbound value for the coordinates range
Returns
-------
pos : array
The rescaled (normalized) coordinates in the range [0, `scale`].
Notes
-----
Changes `pos` in place.
"""
pos -= pos.min(axis=0)
pos *= scale / pos.max()
return pos
|
Normalize the given coordinate list to the range [0, `scale`].
Parameters
----------
pos : array
Coordinate list
scale : number
The upperbound value for the coordinates range
Returns
-------
pos : array
The rescaled (normalized) coordinates in the range [0, `scale`].
Notes
-----
Changes `pos` in place.
|
entailment
|
def get_handle():
'''
Get unique FT_Library handle
'''
global __handle__
if not __handle__:
__handle__ = FT_Library()
error = FT_Init_FreeType(byref(__handle__))
if error:
raise RuntimeError(hex(error))
return __handle__
|
Get unique FT_Library handle
|
entailment
|
def version():
'''
Return the version of the FreeType library being used as a tuple of
( major version number, minor version number, patch version number )
'''
amajor = FT_Int()
aminor = FT_Int()
apatch = FT_Int()
library = get_handle()
FT_Library_Version(library, byref(amajor), byref(aminor), byref(apatch))
return (amajor.value, aminor.value, apatch.value)
|
Return the version of the FreeType library being used as a tuple of
( major version number, minor version number, patch version number )
|
entailment
|
def make_camera(cam_type, *args, **kwargs):
""" Factory function for creating new cameras using a string name.
Parameters
----------
cam_type : str
May be one of:
* 'panzoom' : Creates :class:`PanZoomCamera`
* 'turntable' : Creates :class:`TurntableCamera`
* None : Creates :class:`Camera`
Notes
-----
All extra arguments are passed to the __init__ method of the selected
Camera class.
"""
cam_types = {None: BaseCamera}
for camType in (BaseCamera, PanZoomCamera, PerspectiveCamera,
TurntableCamera, FlyCamera, ArcballCamera):
cam_types[camType.__name__[:-6].lower()] = camType
try:
return cam_types[cam_type](*args, **kwargs)
except KeyError:
raise KeyError('Unknown camera type "%s". Options are: %s' %
(cam_type, cam_types.keys()))
|
Factory function for creating new cameras using a string name.
Parameters
----------
cam_type : str
May be one of:
* 'panzoom' : Creates :class:`PanZoomCamera`
* 'turntable' : Creates :class:`TurntableCamera`
* None : Creates :class:`Camera`
Notes
-----
All extra arguments are passed to the __init__ method of the selected
Camera class.
|
entailment
|
def set_data_values(self, label, x, y, z):
"""
Set the position of the datapoints
"""
# TODO: avoid re-allocating an array every time
self.layers[label]['data'] = np.array([x, y, z]).transpose()
self._update()
|
Set the position of the datapoints
|
entailment
|
def SegmentCollection(mode="agg-fast", *args, **kwargs):
"""
mode: string
- "raw" (speed: fastest, size: small, output: ugly, no dash,
no thickness)
- "agg" (speed: slower, size: medium, output: perfect, no dash)
"""
if mode == "raw":
return RawSegmentCollection(*args, **kwargs)
return AggSegmentCollection(*args, **kwargs)
|
mode: string
- "raw" (speed: fastest, size: small, output: ugly, no dash,
no thickness)
- "agg" (speed: slower, size: medium, output: perfect, no dash)
|
entailment
|
def surface(func, umin=0, umax=2 * np.pi, ucount=64, urepeat=1.0,
vmin=0, vmax=2 * np.pi, vcount=64, vrepeat=1.0):
"""
Computes the parameterization of a parametric surface
func: function(u,v)
Parametric function used to build the surface
"""
vtype = [('position', np.float32, 3),
('texcoord', np.float32, 2),
('normal', np.float32, 3)]
itype = np.uint32
# umin, umax, ucount = 0, 2*np.pi, 64
# vmin, vmax, vcount = 0, 2*np.pi, 64
vcount += 1
ucount += 1
n = vcount * ucount
Un = np.repeat(np.linspace(0, 1, ucount, endpoint=True), vcount)
Vn = np.tile(np.linspace(0, 1, vcount, endpoint=True), ucount)
U = umin + Un * (umax - umin)
V = vmin + Vn * (vmax - vmin)
vertices = np.zeros(n, dtype=vtype)
for i, (u, v) in enumerate(zip(U, V)):
vertices["position"][i] = func(u, v)
vertices["texcoord"][:, 0] = Un * urepeat
vertices["texcoord"][:, 1] = Vn * vrepeat
indices = []
for i in range(ucount - 1):
for j in range(vcount - 1):
indices.append(i * (vcount) + j)
indices.append(i * (vcount) + j + 1)
indices.append(i * (vcount) + j + vcount + 1)
indices.append(i * (vcount) + j + vcount)
indices.append(i * (vcount) + j + vcount + 1)
indices.append(i * (vcount) + j)
indices = np.array(indices, dtype=itype)
vertices["normal"] = normals(vertices["position"],
indices.reshape(len(indices) / 3, 3))
return vertices, indices
|
Computes the parameterization of a parametric surface
func: function(u,v)
Parametric function used to build the surface
|
entailment
|
def PointCollection(mode="raw", *args, **kwargs):
"""
mode: string
- "raw" (speed: fastest, size: small, output: ugly)
- "agg" (speed: fast, size: small, output: beautiful)
"""
if mode == "raw":
return RawPointCollection(*args, **kwargs)
return AggPointCollection(*args, **kwargs)
|
mode: string
- "raw" (speed: fastest, size: small, output: ugly)
- "agg" (speed: fast, size: small, output: beautiful)
|
entailment
|
def roll_data(self, data):
"""Append new data to the right side of every line strip and remove
as much data from the left.
Parameters
----------
data : array-like
A data array to append.
"""
data = data.astype('float32')[..., np.newaxis]
s1 = self._data_shape[1] - self._offset
if data.shape[1] > s1:
self._pos_tex[:, self._offset:] = data[:, :s1]
self._pos_tex[:, :data.shape[1] - s1] = data[:, s1:]
self._offset = (self._offset + data.shape[1]) % self._data_shape[1]
else:
self._pos_tex[:, self._offset:self._offset+data.shape[1]] = data
self._offset += data.shape[1]
self.shared_program['offset'] = self._offset
self.update()
|
Append new data to the right side of every line strip and remove
as much data from the left.
Parameters
----------
data : array-like
A data array to append.
|
entailment
|
def set_data(self, index, data):
"""Set the complete data for a single line strip.
Parameters
----------
index : int
The index of the line strip to be replaced.
data : array-like
The data to assign to the selected line strip.
"""
self._pos_tex[index, :] = data
self.update()
|
Set the complete data for a single line strip.
Parameters
----------
index : int
The index of the line strip to be replaced.
data : array-like
The data to assign to the selected line strip.
|
entailment
|
def add_program(self, name=None):
"""Create a program and add it to this MultiProgram.
It is the caller's responsibility to keep a reference to the returned
program.
The *name* must be unique, but is otherwise arbitrary and used for
debugging purposes.
"""
if name is None:
name = 'program' + str(self._next_prog_id)
self._next_prog_id += 1
if name in self._programs:
raise KeyError("Program named '%s' already exists." % name)
# create a program and update it to look like the rest
prog = ModularProgram(self._vcode, self._fcode)
for key, val in self._set_items.items():
prog[key] = val
self.frag._new_program(prog)
self.vert._new_program(prog)
self._programs[name] = prog
return prog
|
Create a program and add it to this MultiProgram.
It is the caller's responsibility to keep a reference to the returned
program.
The *name* must be unique, but is otherwise arbitrary and used for
debugging purposes.
|
entailment
|
def _new_program(self, p):
"""New program was added to the multiprogram; update items in the
shader.
"""
for k, v in self._set_items.items():
getattr(p, self._shader)[k] = v
|
New program was added to the multiprogram; update items in the
shader.
|
entailment
|
def attach(self, canvas):
"""Attach this tranform to a canvas
Parameters
----------
canvas : instance of Canvas
The canvas.
"""
self._canvas = canvas
canvas.events.resize.connect(self.on_resize)
canvas.events.mouse_wheel.connect(self.on_mouse_wheel)
canvas.events.mouse_move.connect(self.on_mouse_move)
|
Attach this tranform to a canvas
Parameters
----------
canvas : instance of Canvas
The canvas.
|
entailment
|
def on_resize(self, event):
"""Resize handler
Parameters
----------
event : instance of Event
The event.
"""
if self._aspect is None:
return
w, h = self._canvas.size
aspect = self._aspect / (w / h)
self.scale = (self.scale[0], self.scale[0] / aspect)
self.shader_map()
|
Resize handler
Parameters
----------
event : instance of Event
The event.
|
entailment
|
def on_mouse_move(self, event):
"""Mouse move handler
Parameters
----------
event : instance of Event
The event.
"""
if event.is_dragging:
dxy = event.pos - event.last_event.pos
button = event.press_event.button
if button == 1:
dxy = self.canvas_tr.map(dxy)
o = self.canvas_tr.map([0, 0])
t = dxy - o
self.move(t)
elif button == 2:
center = self.canvas_tr.map(event.press_event.pos)
if self._aspect is None:
self.zoom(np.exp(dxy * (0.01, -0.01)), center)
else:
s = dxy[1] * -0.01
self.zoom(np.exp(np.array([s, s])), center)
self.shader_map()
|
Mouse move handler
Parameters
----------
event : instance of Event
The event.
|
entailment
|
def on_mouse_wheel(self, event):
"""Mouse wheel handler
Parameters
----------
event : instance of Event
The event.
"""
self.zoom(np.exp(event.delta * (0.01, -0.01)), event.pos)
|
Mouse wheel handler
Parameters
----------
event : instance of Event
The event.
|
entailment
|
def _frenet_frames(points, closed):
'''Calculates and returns the tangents, normals and binormals for
the tube.'''
tangents = np.zeros((len(points), 3))
normals = np.zeros((len(points), 3))
epsilon = 0.0001
# Compute tangent vectors for each segment
tangents = np.roll(points, -1, axis=0) - np.roll(points, 1, axis=0)
if not closed:
tangents[0] = points[1] - points[0]
tangents[-1] = points[-1] - points[-2]
mags = np.sqrt(np.sum(tangents * tangents, axis=1))
tangents /= mags[:, np.newaxis]
# Get initial normal and binormal
t = np.abs(tangents[0])
smallest = np.argmin(t)
normal = np.zeros(3)
normal[smallest] = 1.
vec = np.cross(tangents[0], normal)
normals[0] = np.cross(tangents[0], vec)
# Compute normal and binormal vectors along the path
for i in range(1, len(points)):
normals[i] = normals[i-1]
vec = np.cross(tangents[i-1], tangents[i])
if norm(vec) > epsilon:
vec /= norm(vec)
theta = np.arccos(np.clip(tangents[i-1].dot(tangents[i]), -1, 1))
normals[i] = rotate(-np.degrees(theta),
vec)[:3, :3].dot(normals[i])
if closed:
theta = np.arccos(np.clip(normals[0].dot(normals[-1]), -1, 1))
theta /= len(points) - 1
if tangents[0].dot(np.cross(normals[0], normals[-1])) > 0:
theta *= -1.
for i in range(1, len(points)):
normals[i] = rotate(-np.degrees(theta*i),
tangents[i])[:3, :3].dot(normals[i])
binormals = np.cross(tangents, normals)
return tangents, normals, binormals
|
Calculates and returns the tangents, normals and binormals for
the tube.
|
entailment
|
def max_order(self):
"""
Depth of the smallest HEALPix cells found in the MOC instance.
"""
# TODO: cache value
combo = int(0)
for iv in self._interval_set._intervals:
combo |= iv[0] | iv[1]
ret = AbstractMOC.HPY_MAX_NORDER - (utils.number_trailing_zeros(combo) // 2)
if ret < 0:
ret = 0
return ret
|
Depth of the smallest HEALPix cells found in the MOC instance.
|
entailment
|
def intersection(self, another_moc, *args):
"""
Intersection between the MOC instance and other MOCs.
Parameters
----------
another_moc : `~mocpy.moc.MOC`
The MOC used for performing the intersection with self.
args : `~mocpy.moc.MOC`
Other additional MOCs to perform the intersection with.
Returns
-------
result : `~mocpy.moc.MOC`/`~mocpy.tmoc.TimeMOC`
The resulting MOC.
"""
interval_set = self._interval_set.intersection(another_moc._interval_set)
for moc in args:
interval_set = interval_set.intersection(moc._interval_set)
return self.__class__(interval_set)
|
Intersection between the MOC instance and other MOCs.
Parameters
----------
another_moc : `~mocpy.moc.MOC`
The MOC used for performing the intersection with self.
args : `~mocpy.moc.MOC`
Other additional MOCs to perform the intersection with.
Returns
-------
result : `~mocpy.moc.MOC`/`~mocpy.tmoc.TimeMOC`
The resulting MOC.
|
entailment
|
def union(self, another_moc, *args):
"""
Union between the MOC instance and other MOCs.
Parameters
----------
another_moc : `~mocpy.moc.MOC`
The MOC used for performing the union with self.
args : `~mocpy.moc.MOC`
Other additional MOCs to perform the union with.
Returns
-------
result : `~mocpy.moc.MOC`/`~mocpy.tmoc.TimeMOC`
The resulting MOC.
"""
interval_set = self._interval_set.union(another_moc._interval_set)
for moc in args:
interval_set = interval_set.union(moc._interval_set)
return self.__class__(interval_set)
|
Union between the MOC instance and other MOCs.
Parameters
----------
another_moc : `~mocpy.moc.MOC`
The MOC used for performing the union with self.
args : `~mocpy.moc.MOC`
Other additional MOCs to perform the union with.
Returns
-------
result : `~mocpy.moc.MOC`/`~mocpy.tmoc.TimeMOC`
The resulting MOC.
|
entailment
|
def difference(self, another_moc, *args):
"""
Difference between the MOC instance and other MOCs.
Parameters
----------
another_moc : `~mocpy.moc.MOC`
The MOC used that will be substracted to self.
args : `~mocpy.moc.MOC`
Other additional MOCs to perform the difference with.
Returns
-------
result : `~mocpy.moc.MOC` or `~mocpy.tmoc.TimeMOC`
The resulting MOC.
"""
interval_set = self._interval_set.difference(another_moc._interval_set)
for moc in args:
interval_set = interval_set.difference(moc._interval_set)
return self.__class__(interval_set)
|
Difference between the MOC instance and other MOCs.
Parameters
----------
another_moc : `~mocpy.moc.MOC`
The MOC used that will be substracted to self.
args : `~mocpy.moc.MOC`
Other additional MOCs to perform the difference with.
Returns
-------
result : `~mocpy.moc.MOC` or `~mocpy.tmoc.TimeMOC`
The resulting MOC.
|
entailment
|
def _neighbour_pixels(hp, ipix):
"""
Returns all the pixels neighbours of ``ipix``
"""
neigh_ipix = np.unique(hp.neighbours(ipix).ravel())
# Remove negative pixel values returned by `~astropy_healpix.HEALPix.neighbours`
return neigh_ipix[np.where(neigh_ipix >= 0)]
|
Returns all the pixels neighbours of ``ipix``
|
entailment
|
def from_cells(cls, cells):
"""
Creates a MOC from a numpy array representing the HEALPix cells.
Parameters
----------
cells : `numpy.ndarray`
Must be a numpy structured array (See https://docs.scipy.org/doc/numpy-1.15.0/user/basics.rec.html).
The structure of a cell contains 3 attributes:
- A `ipix` value being a np.uint64
- A `depth` value being a np.uint32
- A `fully_covered` flag bit stored in a np.uint8
Returns
-------
moc : `~mocpy.moc.MOC`
The MOC.
"""
shift = (AbstractMOC.HPY_MAX_NORDER - cells["depth"]) << 1
p1 = cells["ipix"]
p2 = cells["ipix"] + 1
intervals = np.vstack((p1 << shift, p2 << shift)).T
return cls(IntervalSet(intervals))
|
Creates a MOC from a numpy array representing the HEALPix cells.
Parameters
----------
cells : `numpy.ndarray`
Must be a numpy structured array (See https://docs.scipy.org/doc/numpy-1.15.0/user/basics.rec.html).
The structure of a cell contains 3 attributes:
- A `ipix` value being a np.uint64
- A `depth` value being a np.uint32
- A `fully_covered` flag bit stored in a np.uint8
Returns
-------
moc : `~mocpy.moc.MOC`
The MOC.
|
entailment
|
def from_json(cls, json_moc):
"""
Creates a MOC from a dictionary of HEALPix cell arrays indexed by their depth.
Parameters
----------
json_moc : dict(str : [int]
A dictionary of HEALPix cell arrays indexed by their depth.
Returns
-------
moc : `~mocpy.moc.MOC` or `~mocpy.tmoc.TimeMOC`
the MOC.
"""
intervals = np.array([])
for order, pix_l in json_moc.items():
if len(pix_l) == 0:
continue
pix = np.array(pix_l)
p1 = pix
p2 = pix + 1
shift = 2 * (AbstractMOC.HPY_MAX_NORDER - int(order))
itv = np.vstack((p1 << shift, p2 << shift)).T
if intervals.size == 0:
intervals = itv
else:
intervals = np.vstack((intervals, itv))
return cls(IntervalSet(intervals))
|
Creates a MOC from a dictionary of HEALPix cell arrays indexed by their depth.
Parameters
----------
json_moc : dict(str : [int]
A dictionary of HEALPix cell arrays indexed by their depth.
Returns
-------
moc : `~mocpy.moc.MOC` or `~mocpy.tmoc.TimeMOC`
the MOC.
|
entailment
|
def _uniq_pixels_iterator(self):
"""
Generator giving the NUNIQ HEALPix pixels of the MOC.
Returns
-------
uniq :
the NUNIQ HEALPix pixels iterator
"""
intervals_uniq_l = IntervalSet.to_nuniq_interval_set(self._interval_set)._intervals
for uniq_iv in intervals_uniq_l:
for uniq in range(uniq_iv[0], uniq_iv[1]):
yield uniq
|
Generator giving the NUNIQ HEALPix pixels of the MOC.
Returns
-------
uniq :
the NUNIQ HEALPix pixels iterator
|
entailment
|
def from_fits(cls, filename):
"""
Loads a MOC from a FITS file.
The specified FITS file must store the MOC (i.e. the list of HEALPix cells it contains) in a binary HDU table.
Parameters
----------
filename : str
The path to the FITS file.
Returns
-------
result : `~mocpy.moc.MOC` or `~mocpy.tmoc.TimeMOC`
The resulting MOC.
"""
table = Table.read(filename)
intervals = np.vstack((table['UNIQ'], table['UNIQ']+1)).T
nuniq_interval_set = IntervalSet(intervals)
interval_set = IntervalSet.from_nuniq_interval_set(nuniq_interval_set)
return cls(interval_set)
|
Loads a MOC from a FITS file.
The specified FITS file must store the MOC (i.e. the list of HEALPix cells it contains) in a binary HDU table.
Parameters
----------
filename : str
The path to the FITS file.
Returns
-------
result : `~mocpy.moc.MOC` or `~mocpy.tmoc.TimeMOC`
The resulting MOC.
|
entailment
|
def from_str(cls, value):
"""
Create a MOC from a str.
This grammar is expressed is the `MOC IVOA <http://ivoa.net/documents/MOC/20190215/WD-MOC-1.1-20190215.pdf>`__
specification at section 2.3.2.
Parameters
----------
value : str
The MOC as a string following the grammar rules.
Returns
-------
moc : `~mocpy.moc.MOC` or `~mocpy.tmoc.TimeMOC`
The resulting MOC
Examples
--------
>>> from mocpy import MOC
>>> moc = MOC.from_str("2/2-25,28,29 4/0 6/")
"""
# Import lark parser when from_str is called
# at least one time
from lark import Lark, Transformer
class ParsingException(Exception):
pass
class TreeToJson(Transformer):
def value(self, items):
res = {}
for item in items:
if item is not None: # Do not take into account the "sep" branches
res.update(item)
return res
def sep(self, items):
pass
def depthpix(self, items):
depth = str(items[0])
pixs_l = items[1:][0]
return {depth: pixs_l}
def uniq_pix(self, pix):
if pix:
return [int(pix[0])]
def range_pix(self, range_pix):
lower_bound = int(range_pix[0])
upper_bound = int(range_pix[1])
return np.arange(lower_bound, upper_bound + 1, dtype=int)
def pixs(self, items):
ipixs = []
for pix in items:
if pix is not None: # Do not take into account the "sep" branches
ipixs.extend(pix)
return ipixs
# Initialize the parser when from_str is called
# for the first time
if AbstractMOC.LARK_PARSER_STR is None:
AbstractMOC.LARK_PARSER_STR = Lark(r"""
value: depthpix (sep+ depthpix)*
depthpix : INT "/" sep* pixs
pixs : pix (sep+ pix)*
pix : INT? -> uniq_pix
| (INT "-" INT) -> range_pix
sep : " " | "," | "\n" | "\r"
%import common.INT
""", start='value')
try:
tree = AbstractMOC.LARK_PARSER_STR.parse(value)
except Exception as err:
raise ParsingException("Could not parse {0}. \n Check the grammar section 2.3.2 of http://ivoa.net/documents/MOC/20190215/WD-MOC-1.1-20190215.pdf to see the correct syntax for writing a MOC from a str".format(value))
moc_json = TreeToJson().transform(tree)
return cls.from_json(moc_json)
|
Create a MOC from a str.
This grammar is expressed is the `MOC IVOA <http://ivoa.net/documents/MOC/20190215/WD-MOC-1.1-20190215.pdf>`__
specification at section 2.3.2.
Parameters
----------
value : str
The MOC as a string following the grammar rules.
Returns
-------
moc : `~mocpy.moc.MOC` or `~mocpy.tmoc.TimeMOC`
The resulting MOC
Examples
--------
>>> from mocpy import MOC
>>> moc = MOC.from_str("2/2-25,28,29 4/0 6/")
|
entailment
|
def _to_json(uniq):
"""
Serializes a MOC to the JSON format.
Parameters
----------
uniq : `~numpy.ndarray`
The array of HEALPix cells representing the MOC to serialize.
Returns
-------
result_json : {str : [int]}
A dictionary of HEALPix cell lists indexed by their depth.
"""
result_json = {}
depth, ipix = utils.uniq2orderipix(uniq)
min_depth = np.min(depth[0])
max_depth = np.max(depth[-1])
for d in range(min_depth, max_depth+1):
pix_index = np.where(depth == d)[0]
if pix_index.size:
# there are pixels belonging to the current order
ipix_depth = ipix[pix_index]
result_json[str(d)] = ipix_depth.tolist()
return result_json
|
Serializes a MOC to the JSON format.
Parameters
----------
uniq : `~numpy.ndarray`
The array of HEALPix cells representing the MOC to serialize.
Returns
-------
result_json : {str : [int]}
A dictionary of HEALPix cell lists indexed by their depth.
|
entailment
|
def _to_str(uniq):
"""
Serializes a MOC to the STRING format.
HEALPix cells are separated by a comma. The HEALPix cell at order 0 and number 10 is encoded
by the string: "0/10", the first digit representing the depth and the second the HEALPix cell number
for this depth. HEALPix cells next to each other within a specific depth can be expressed as a range and
therefore written like that: "12/10-150". This encodes the list of HEALPix cells from 10 to 150 at the
depth 12.
Parameters
----------
uniq : `~numpy.ndarray`
The array of HEALPix cells representing the MOC to serialize.
Returns
-------
result : str
The serialized MOC.
"""
def write_cells(serial, a, b, sep=''):
if a == b:
serial += '{0}{1}'.format(a, sep)
else:
serial += '{0}-{1}{2}'.format(a, b, sep)
return serial
res = ''
if uniq.size == 0:
return res
depth, ipixels = utils.uniq2orderipix(uniq)
min_depth = np.min(depth[0])
max_depth = np.max(depth[-1])
for d in range(min_depth, max_depth+1):
pix_index = np.where(depth == d)[0]
if pix_index.size > 0:
# Serialize the depth followed by a slash
res += '{0}/'.format(d)
# Retrieve the pixel(s) for this depth
ipix_depth = ipixels[pix_index]
if ipix_depth.size == 1:
# If there is only one pixel we serialize it and
# go to the next depth
res = write_cells(res, ipix_depth[0], ipix_depth[0])
else:
# Sort them in case there are several
ipix_depth = np.sort(ipix_depth)
beg_range = ipix_depth[0]
last_range = beg_range
# Loop over the sorted pixels by tracking the lower bound of
# the current range and the last pixel.
for ipix in ipix_depth[1:]:
# If the current pixel does not follow the previous one
# then we can end a range and serializes it
if ipix > last_range + 1:
res = write_cells(res, beg_range, last_range, sep=',')
# The current pixel is the beginning of a new range
beg_range = ipix
last_range = ipix
# Write the last range
res = write_cells(res, beg_range, last_range)
# Add a ' ' separator before writing serializing the pixels of the next depth
res += ' '
# Remove the last ' ' character
res = res[:-1]
return res
|
Serializes a MOC to the STRING format.
HEALPix cells are separated by a comma. The HEALPix cell at order 0 and number 10 is encoded
by the string: "0/10", the first digit representing the depth and the second the HEALPix cell number
for this depth. HEALPix cells next to each other within a specific depth can be expressed as a range and
therefore written like that: "12/10-150". This encodes the list of HEALPix cells from 10 to 150 at the
depth 12.
Parameters
----------
uniq : `~numpy.ndarray`
The array of HEALPix cells representing the MOC to serialize.
Returns
-------
result : str
The serialized MOC.
|
entailment
|
def _to_fits(self, uniq, optional_kw_dict=None):
"""
Serializes a MOC to the FITS format.
Parameters
----------
uniq : `numpy.ndarray`
The array of HEALPix cells representing the MOC to serialize.
optional_kw_dict : dict
Optional keywords arguments added to the FITS header.
Returns
-------
thdulist : `astropy.io.fits.HDUList`
The list of HDU tables.
"""
depth = self.max_order
if depth <= 13:
fits_format = '1J'
else:
fits_format = '1K'
tbhdu = fits.BinTableHDU.from_columns(
fits.ColDefs([
fits.Column(name='UNIQ', format=fits_format, array=uniq)
]))
tbhdu.header['PIXTYPE'] = 'HEALPIX'
tbhdu.header['ORDERING'] = 'NUNIQ'
tbhdu.header.update(self._fits_header_keywords)
tbhdu.header['MOCORDER'] = depth
tbhdu.header['MOCTOOL'] = 'MOCPy'
if optional_kw_dict:
for key in optional_kw_dict:
tbhdu.header[key] = optional_kw_dict[key]
thdulist = fits.HDUList([fits.PrimaryHDU(), tbhdu])
return thdulist
|
Serializes a MOC to the FITS format.
Parameters
----------
uniq : `numpy.ndarray`
The array of HEALPix cells representing the MOC to serialize.
optional_kw_dict : dict
Optional keywords arguments added to the FITS header.
Returns
-------
thdulist : `astropy.io.fits.HDUList`
The list of HDU tables.
|
entailment
|
def serialize(self, format='fits', optional_kw_dict=None):
"""
Serializes the MOC into a specific format.
Possible formats are FITS, JSON and STRING
Parameters
----------
format : str
'fits' by default. The other possible choice is 'json' or 'str'.
optional_kw_dict : dict
Optional keywords arguments added to the FITS header. Only used if ``format`` equals to 'fits'.
Returns
-------
result : `astropy.io.fits.HDUList` or JSON dictionary
The result of the serialization.
"""
formats = ('fits', 'json', 'str')
if format not in formats:
raise ValueError('format should be one of %s' % (str(formats)))
uniq_l = []
for uniq in self._uniq_pixels_iterator():
uniq_l.append(uniq)
uniq = np.array(uniq_l)
if format == 'fits':
result = self._to_fits(uniq=uniq,
optional_kw_dict=optional_kw_dict)
elif format == 'str':
result = self.__class__._to_str(uniq=uniq)
else:
# json format serialization
result = self.__class__._to_json(uniq=uniq)
return result
|
Serializes the MOC into a specific format.
Possible formats are FITS, JSON and STRING
Parameters
----------
format : str
'fits' by default. The other possible choice is 'json' or 'str'.
optional_kw_dict : dict
Optional keywords arguments added to the FITS header. Only used if ``format`` equals to 'fits'.
Returns
-------
result : `astropy.io.fits.HDUList` or JSON dictionary
The result of the serialization.
|
entailment
|
def write(self, path, format='fits', overwrite=False, optional_kw_dict=None):
"""
Writes the MOC to a file.
Format can be 'fits' or 'json', though only the fits format is officially supported by the IVOA.
Parameters
----------
path : str, optional
The path to the file to save the MOC in.
format : str, optional
The format in which the MOC will be serialized before being saved. Possible formats are "fits" or "json".
By default, ``format`` is set to "fits".
overwrite : bool, optional
If the file already exists and you want to overwrite it, then set the ``overwrite`` keyword. Default to False.
optional_kw_dict : optional
Optional keywords arguments added to the FITS header. Only used if ``format`` equals to 'fits'.
"""
serialization = self.serialize(format=format, optional_kw_dict=optional_kw_dict)
if format == 'fits':
serialization.writeto(path, overwrite=overwrite)
else:
import json
with open(path, 'w') as h:
h.write(json.dumps(serialization, sort_keys=True, indent=2))
|
Writes the MOC to a file.
Format can be 'fits' or 'json', though only the fits format is officially supported by the IVOA.
Parameters
----------
path : str, optional
The path to the file to save the MOC in.
format : str, optional
The format in which the MOC will be serialized before being saved. Possible formats are "fits" or "json".
By default, ``format`` is set to "fits".
overwrite : bool, optional
If the file already exists and you want to overwrite it, then set the ``overwrite`` keyword. Default to False.
optional_kw_dict : optional
Optional keywords arguments added to the FITS header. Only used if ``format`` equals to 'fits'.
|
entailment
|
def degrade_to_order(self, new_order):
"""
Degrades the MOC instance to a new, less precise, MOC.
The maximum depth (i.e. the depth of the smallest HEALPix cells that can be found in the MOC) of the
degraded MOC is set to ``new_order``.
Parameters
----------
new_order : int
Maximum depth of the output degraded MOC.
Returns
-------
moc : `~mocpy.moc.MOC` or `~mocpy.tmoc.TimeMOC`
The degraded MOC.
"""
shift = 2 * (AbstractMOC.HPY_MAX_NORDER - new_order)
ofs = (int(1) << shift) - 1
mask = ~ofs
adda = int(0)
addb = ofs
iv_set = []
for iv in self._interval_set._intervals:
a = (iv[0] + adda) & mask
b = (iv[1] + addb) & mask
if b > a:
iv_set.append((a, b))
return self.__class__(IntervalSet(np.asarray(iv_set)))
|
Degrades the MOC instance to a new, less precise, MOC.
The maximum depth (i.e. the depth of the smallest HEALPix cells that can be found in the MOC) of the
degraded MOC is set to ``new_order``.
Parameters
----------
new_order : int
Maximum depth of the output degraded MOC.
Returns
-------
moc : `~mocpy.moc.MOC` or `~mocpy.tmoc.TimeMOC`
The degraded MOC.
|
entailment
|
def set_name(self, name):
""" Set Screen Name """
self.name = name
self.server.request("screen_set %s name %s" % (self.ref, self.name))
|
Set Screen Name
|
entailment
|
def set_width(self, width):
""" Set Screen Width """
if width > 0 and width <= self.server.server_info.get("screen_width"):
self.width = width
self.server.request("screen_set %s wid %i" % (self.ref, self.width))
|
Set Screen Width
|
entailment
|
def set_height(self, height):
""" Set Screen Height """
if height > 0 and height <= self.server.server_info.get("screen_height"):
self.height = height
self.server.request("screen_set %s hgt %i" % (self.ref, self.height))
|
Set Screen Height
|
entailment
|
def set_cursor_x(self, x):
""" Set Screen Cursor X Position """
if x >= 0 and x <= self.server.server_info.get("screen_width"):
self.cursor_x = x
self.server.request("screen_set %s cursor_x %i" % (self.ref, self.cursor_x))
|
Set Screen Cursor X Position
|
entailment
|
def set_cursor_y(self, y):
""" Set Screen Cursor Y Position """
if y >= 0 and y <= self.server.server_info.get("screen_height"):
self.cursor_y = y
self.server.request("screen_set %s cursor_y %i" % (self.ref, self.cursor_y))
|
Set Screen Cursor Y Position
|
entailment
|
def set_duration(self, duration):
""" Set Screen Change Interval Duration """
if duration > 0:
self.duration = duration
self.server.request("screen_set %s duration %i" % (self.ref, (self.duration * 8)))
|
Set Screen Change Interval Duration
|
entailment
|
def set_timeout(self, timeout):
""" Set Screen Timeout Duration """
if timeout > 0:
self.timeout = timeout
self.server.request("screen_set %s timeout %i" % (self.ref, (self.timeout * 8)))
|
Set Screen Timeout Duration
|
entailment
|
def set_priority(self, priority):
""" Set Screen Priority Class """
if priority in ["hidden", "background", "info", "foreground", "alert", "input"]:
self.priority = priority
self.server.request("screen_set %s priority %s" % (self.ref, self.priority))
|
Set Screen Priority Class
|
entailment
|
def set_backlight(self, state):
""" Set Screen Backlight Mode """
if state in ["on", "off", "toggle", "open", "blink", "flash"]:
self.backlight = state
self.server.request("screen_set %s backlight %s" % (self.ref, self.backlight))
|
Set Screen Backlight Mode
|
entailment
|
def set_heartbeat(self, state):
""" Set Screen Heartbeat Display Mode """
if state in ["on", "off", "open"]:
self.heartbeat = state
self.server.request("screen_set %s heartbeat %s" % (self.ref, self.heartbeat))
|
Set Screen Heartbeat Display Mode
|
entailment
|
def set_cursor(self, cursor):
""" Set Screen Cursor Mode """
if cursor in ["on", "off", "under", "block"]:
self.cursor = cursor
self.server.request("screen_set %s cursor %s" % (self.ref, self.cursor))
|
Set Screen Cursor Mode
|
entailment
|
def clear(self):
""" Clear Screen """
widgets.StringWidget(self, ref="_w1_", text=" " * 20, x=1, y=1)
widgets.StringWidget(self, ref="_w2_", text=" " * 20, x=1, y=2)
widgets.StringWidget(self, ref="_w3_", text=" " * 20, x=1, y=3)
widgets.StringWidget(self, ref="_w4_", text=" " * 20, x=1, y=4)
|
Clear Screen
|
entailment
|
def add_string_widget(self, ref, text="Text", x=1, y=1):
""" Add String Widget """
if ref not in self.widgets:
widget = widgets.StringWidget(screen=self, ref=ref, text=text, x=x, y=y)
self.widgets[ref] = widget
return self.widgets[ref]
|
Add String Widget
|
entailment
|
def add_title_widget(self, ref, text="Title"):
""" Add Title Widget """
if ref not in self.widgets:
widget = widgets.TitleWidget(screen=self, ref=ref, text=text)
self.widgets[ref] = widget
return self.widgets[ref]
|
Add Title Widget
|
entailment
|
def add_hbar_widget(self, ref, x=1, y=1, length=10):
""" Add Horizontal Bar Widget """
if ref not in self.widgets:
widget = widgets.HBarWidget(screen=self, ref=ref, x=x, y=y, length=length)
self.widgets[ref] = widget
return self.widgets[ref]
|
Add Horizontal Bar Widget
|
entailment
|
def add_vbar_widget(self, ref, x=1, y=1, length=10):
""" Add Vertical Bar Widget """
if ref not in self.widgets:
widget = widgets.VBarWidget(screen=self, ref=ref, x=x, y=y, length=length)
self.widgets[ref] = widget
return self.widgets[ref]
|
Add Vertical Bar Widget
|
entailment
|
def add_frame_widget(self, ref, left=1, top=1, right=20, bottom=1, width=20, height=4, direction="h", speed=1):
""" Add Frame Widget """
if ref not in self.widgets:
widget = widgets.FrameWidget(
screen=self, ref=ref, left=left, top=top, right=right, bottom=bottom, width=width, height=height,
direction=direction, speed=speed,
)
self.widgets[ref] = widget
return self.widgets[ref]
|
Add Frame Widget
|
entailment
|
def add_number_widget(self, ref, x=1, value=1):
""" Add Number Widget """
if ref not in self.widgets:
widget = widgets.NumberWidget(screen=self, ref=ref, x=x, value=value)
self.widgets[ref] = widget
return self.widgets[ref]
|
Add Number Widget
|
entailment
|
def del_widget(self, ref):
""" Delete/Remove A Widget """
self.server.request("widget_del %s %s" % (self.name, ref))
del(self.widgets[ref])
|
Delete/Remove A Widget
|
entailment
|
def orbit(self, azim, elev):
""" Orbits the camera around the center position.
Parameters
----------
azim : float
Angle in degrees to rotate horizontally around the center point.
elev : float
Angle in degrees to rotate vertically around the center point.
"""
self.azimuth += azim
self.elevation = np.clip(self.elevation + elev, -90, 90)
self.view_changed()
|
Orbits the camera around the center position.
Parameters
----------
azim : float
Angle in degrees to rotate horizontally around the center point.
elev : float
Angle in degrees to rotate vertically around the center point.
|
entailment
|
def _update_rotation(self, event):
"""Update rotation parmeters based on mouse movement"""
p1 = event.mouse_event.press_event.pos
p2 = event.mouse_event.pos
if self._event_value is None:
self._event_value = self.azimuth, self.elevation
self.azimuth = self._event_value[0] - (p2 - p1)[0] * 0.5
self.elevation = self._event_value[1] + (p2 - p1)[1] * 0.5
|
Update rotation parmeters based on mouse movement
|
entailment
|
def _rotate_tr(self):
"""Rotate the transformation matrix based on camera parameters"""
up, forward, right = self._get_dim_vectors()
self.transform.rotate(self.elevation, -right)
self.transform.rotate(self.azimuth, up)
|
Rotate the transformation matrix based on camera parameters
|
entailment
|
def _dist_to_trans(self, dist):
"""Convert mouse x, y movement into x, y, z translations"""
rae = np.array([self.roll, self.azimuth, self.elevation]) * np.pi / 180
sro, saz, sel = np.sin(rae)
cro, caz, cel = np.cos(rae)
dx = (+ dist[0] * (cro * caz + sro * sel * saz)
+ dist[1] * (sro * caz - cro * sel * saz))
dy = (+ dist[0] * (cro * saz - sro * sel * caz)
+ dist[1] * (sro * saz + cro * sel * caz))
dz = (- dist[0] * sro * cel + dist[1] * cro * cel)
return dx, dy, dz
|
Convert mouse x, y movement into x, y, z translations
|
entailment
|
def _set_config(c):
"""Set gl configuration for GLFW """
glfw.glfwWindowHint(glfw.GLFW_RED_BITS, c['red_size'])
glfw.glfwWindowHint(glfw.GLFW_GREEN_BITS, c['green_size'])
glfw.glfwWindowHint(glfw.GLFW_BLUE_BITS, c['blue_size'])
glfw.glfwWindowHint(glfw.GLFW_ALPHA_BITS, c['alpha_size'])
glfw.glfwWindowHint(glfw.GLFW_ACCUM_RED_BITS, 0)
glfw.glfwWindowHint(glfw.GLFW_ACCUM_GREEN_BITS, 0)
glfw.glfwWindowHint(glfw.GLFW_ACCUM_BLUE_BITS, 0)
glfw.glfwWindowHint(glfw.GLFW_ACCUM_ALPHA_BITS, 0)
glfw.glfwWindowHint(glfw.GLFW_DEPTH_BITS, c['depth_size'])
glfw.glfwWindowHint(glfw.GLFW_STENCIL_BITS, c['stencil_size'])
# glfw.glfwWindowHint(glfw.GLFW_CONTEXT_VERSION_MAJOR, c['major_version'])
# glfw.glfwWindowHint(glfw.GLFW_CONTEXT_VERSION_MINOR, c['minor_version'])
# glfw.glfwWindowHint(glfw.GLFW_SRGB_CAPABLE, c['srgb'])
glfw.glfwWindowHint(glfw.GLFW_SAMPLES, c['samples'])
glfw.glfwWindowHint(glfw.GLFW_STEREO, c['stereo'])
if not c['double_buffer']:
raise RuntimeError('GLFW must double buffer, consider using a '
'different backend, or using double buffering')
|
Set gl configuration for GLFW
|
entailment
|
def _process_mod(self, key, down):
"""Process (possible) keyboard modifiers
GLFW provides "mod" with many callbacks, but not (critically) the
scroll callback, so we keep track on our own here.
"""
if key in MOD_KEYS:
if down:
if key not in self._mod:
self._mod.append(key)
elif key in self._mod:
self._mod.pop(self._mod.index(key))
return self._mod
|
Process (possible) keyboard modifiers
GLFW provides "mod" with many callbacks, but not (critically) the
scroll callback, so we keep track on our own here.
|
entailment
|
def _patch():
""" Monkey-patch pyopengl to fix a bug in glBufferSubData. """
import sys
from OpenGL import GL
if sys.version_info > (3,):
buffersubdatafunc = GL.glBufferSubData
if hasattr(buffersubdatafunc, 'wrapperFunction'):
buffersubdatafunc = buffersubdatafunc.wrapperFunction
_m = sys.modules[buffersubdatafunc.__module__]
_m.long = int
# Fix missing enum
try:
from OpenGL.GL.VERSION import GL_2_0
GL_2_0.GL_OBJECT_SHADER_SOURCE_LENGTH = GL_2_0.GL_SHADER_SOURCE_LENGTH
except Exception:
pass
|
Monkey-patch pyopengl to fix a bug in glBufferSubData.
|
entailment
|
def _get_function_from_pyopengl(funcname):
""" Try getting the given function from PyOpenGL, return
a dummy function (that shows a warning when called) if it
could not be found.
"""
func = None
# Get function from GL
try:
func = getattr(_GL, funcname)
except AttributeError:
# Get function from FBO
try:
func = getattr(_FBO, funcname)
except AttributeError:
func = None
# Try using "alias"
if not bool(func):
# Some functions are known by a slightly different name
# e.g. glDepthRangef, glClearDepthf
if funcname.endswith('f'):
try:
func = getattr(_GL, funcname[:-1])
except AttributeError:
pass
# Set dummy function if we could not find it
if func is None:
func = _make_unavailable_func(funcname)
logger.warning('warning: %s not available' % funcname)
return func
|
Try getting the given function from PyOpenGL, return
a dummy function (that shows a warning when called) if it
could not be found.
|
entailment
|
def _inject():
""" Copy functions from OpenGL.GL into _pyopengl namespace.
"""
NS = _pyopengl2.__dict__
for glname, ourname in _pyopengl2._functions_to_import:
func = _get_function_from_pyopengl(glname)
NS[ourname] = func
|
Copy functions from OpenGL.GL into _pyopengl namespace.
|
entailment
|
def _get_vispy_font_filename(face, bold, italic):
"""Fetch a remote vispy font"""
name = face + '-'
name += 'Regular' if not bold and not italic else ''
name += 'Bold' if bold else ''
name += 'Italic' if italic else ''
name += '.ttf'
return load_data_file('fonts/%s' % name)
|
Fetch a remote vispy font
|
entailment
|
def _check_color_dim(val):
"""Ensure val is Nx(n_col), usually Nx3"""
val = np.atleast_2d(val)
if val.shape[1] not in (3, 4):
raise RuntimeError('Value must have second dimension of size 3 or 4')
return val, val.shape[1]
|
Ensure val is Nx(n_col), usually Nx3
|
entailment
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.