sentence1
stringlengths 52
3.87M
| sentence2
stringlengths 1
47.2k
| label
stringclasses 1
value |
|---|---|---|
def disable_gui(self):
"""Disable GUI event loop integration.
If an application was registered, this sets its ``_in_event_loop``
attribute to False. It then calls :meth:`clear_inputhook`.
"""
gui = self._current_gui
if gui in self.apps:
self.apps[gui]._in_event_loop = False
return self.clear_inputhook()
|
Disable GUI event loop integration.
If an application was registered, this sets its ``_in_event_loop``
attribute to False. It then calls :meth:`clear_inputhook`.
|
entailment
|
def set_current_canvas(canvas):
""" Make a canvas active. Used primarily by the canvas itself.
"""
# Notify glir
canvas.context._do_CURRENT_command = True
# Try to be quick
if canvasses and canvasses[-1]() is canvas:
return
# Make this the current
cc = [c() for c in canvasses if c() is not None]
while canvas in cc:
cc.remove(canvas)
cc.append(canvas)
canvasses[:] = [weakref.ref(c) for c in cc]
|
Make a canvas active. Used primarily by the canvas itself.
|
entailment
|
def forget_canvas(canvas):
""" Forget about the given canvas. Used by the canvas when closed.
"""
cc = [c() for c in canvasses if c() is not None]
while canvas in cc:
cc.remove(canvas)
canvasses[:] = [weakref.ref(c) for c in cc]
|
Forget about the given canvas. Used by the canvas when closed.
|
entailment
|
def create_shared(self, name, ref):
""" For the app backends to create the GLShared object.
Parameters
----------
name : str
The name.
ref : object
The reference.
"""
if self._shared is not None:
raise RuntimeError('Can only set_shared once.')
self._shared = GLShared(name, ref)
|
For the app backends to create the GLShared object.
Parameters
----------
name : str
The name.
ref : object
The reference.
|
entailment
|
def flush_commands(self, event=None):
""" Flush
Parameters
----------
event : instance of Event
The event.
"""
if self._do_CURRENT_command:
self._do_CURRENT_command = False
canvas = get_current_canvas()
if canvas and hasattr(canvas, '_backend'):
fbo = canvas._backend._vispy_get_fb_bind_location()
else:
fbo = 0
self.shared.parser.parse([('CURRENT', 0, fbo)])
self.glir.flush(self.shared.parser)
|
Flush
Parameters
----------
event : instance of Event
The event.
|
entailment
|
def add_ref(self, name, ref):
""" Add a reference for the backend object that gives access
to the low level context. Used in vispy.app.canvas.backends.
The given name must match with that of previously added
references.
"""
if self._name is None:
self._name = name
elif name != self._name:
raise RuntimeError('Contexts can only share between backends of '
'the same type')
self._refs.append(weakref.ref(ref))
|
Add a reference for the backend object that gives access
to the low level context. Used in vispy.app.canvas.backends.
The given name must match with that of previously added
references.
|
entailment
|
def ref(self):
""" A reference (stored internally via a weakref) to an object
that the backend system can use to obtain the low-level
information of the "reference context". In Vispy this will
typically be the CanvasBackend object.
"""
# Clean
self._refs = [r for r in self._refs if (r() is not None)]
# Get ref
ref = self._refs[0]() if self._refs else None
if ref is not None:
return ref
else:
raise RuntimeError('No reference for available for GLShared')
|
A reference (stored internally via a weakref) to an object
that the backend system can use to obtain the low-level
information of the "reference context". In Vispy this will
typically be the CanvasBackend object.
|
entailment
|
def get_dpi(raise_error=True):
"""Get screen DPI from the OS
Parameters
----------
raise_error : bool
If True, raise an error if DPI could not be determined.
Returns
-------
dpi : float
Dots per inch of the primary screen.
"""
display = quartz.CGMainDisplayID()
mm = quartz.CGDisplayScreenSize(display)
px = quartz.CGDisplayBounds(display).size
return (px.width/mm.width + px.height/mm.height) * 0.5 * 25.4
|
Get screen DPI from the OS
Parameters
----------
raise_error : bool
If True, raise an error if DPI could not be determined.
Returns
-------
dpi : float
Dots per inch of the primary screen.
|
entailment
|
def link(self, var):
""" Link this Varying to another object from which it will derive its
dtype. This method is used internally when assigning an attribute to
a varying using syntax ``Function[varying] = attr``.
"""
assert self._dtype is not None or hasattr(var, 'dtype')
self._link = var
self.changed()
|
Link this Varying to another object from which it will derive its
dtype. This method is used internally when assigning an attribute to
a varying using syntax ``Function[varying] = attr``.
|
entailment
|
def obj(x):
"""Two Dimensional Shubert Function"""
j = np.arange(1, 6)
tmp1 = np.dot(j, np.cos((j+1)*x[0] + j))
tmp2 = np.dot(j, np.cos((j+1)*x[1] + j))
return tmp1 * tmp2
|
Two Dimensional Shubert Function
|
entailment
|
def besj(self, x, n):
'''
Function BESJ calculates Bessel function of first kind of order n
Arguments:
n - an integer (>=0), the order
x - value at which the Bessel function is required
--------------------
C++ Mathematical Library
Converted from equivalent FORTRAN library
Converted by Gareth Walker for use by course 392 computational project
All functions tested and yield the same results as the corresponding
FORTRAN versions.
If you have any problems using these functions please report them to
M.Muldoon@UMIST.ac.uk
Documentation available on the web
http://www.ma.umist.ac.uk/mrm/Teaching/392/libs/392.html
Version 1.0 8/98
29 October, 1999
--------------------
Adapted for use in AGG library by
Andy Wilk (castor.vulgaris@gmail.com)
Adapted for use in vispy library by
Nicolas P. Rougier (Nicolas.Rougier@inria.fr)
-----------------------------------------------------------------------
'''
if n < 0:
return 0.0
d = 1e-6
b = 0
if math.fabs(x) <= d:
if n != 0:
return 0
return 1
b1 = 0 # b1 is the value from the previous iteration
# Set up a starting order for recurrence
m1 = int(math.fabs(x)) + 6
if math.fabs(x) > 5:
m1 = int(math.fabs(1.4 * x + 60 / x))
m2 = int(n + 2 + math.fabs(x) / 4)
if m1 > m2:
m2 = m1
# Apply recurrence down from curent max order
while True:
c3 = 0
c2 = 1e-30
c4 = 0
m8 = 1
if m2 / 2 * 2 == m2:
m8 = -1
imax = m2 - 2
for i in range(1, imax+1):
c6 = 2 * (m2 - i) * c2 / x - c3
c3 = c2
c2 = c6
if m2 - i - 1 == n:
b = c6
m8 = -1 * m8
if m8 > 0:
c4 = c4 + 2 * c6
c6 = 2 * c2 / x - c3
if n == 0:
b = c6
c4 += c6
b /= c4
if math.fabs(b - b1) < d:
return b
b1 = b
m2 += 3
|
Function BESJ calculates Bessel function of first kind of order n
Arguments:
n - an integer (>=0), the order
x - value at which the Bessel function is required
--------------------
C++ Mathematical Library
Converted from equivalent FORTRAN library
Converted by Gareth Walker for use by course 392 computational project
All functions tested and yield the same results as the corresponding
FORTRAN versions.
If you have any problems using these functions please report them to
M.Muldoon@UMIST.ac.uk
Documentation available on the web
http://www.ma.umist.ac.uk/mrm/Teaching/392/libs/392.html
Version 1.0 8/98
29 October, 1999
--------------------
Adapted for use in AGG library by
Andy Wilk (castor.vulgaris@gmail.com)
Adapted for use in vispy library by
Nicolas P. Rougier (Nicolas.Rougier@inria.fr)
-----------------------------------------------------------------------
|
entailment
|
def copy(self):
""" Create an exact copy of this quaternion.
"""
return Quaternion(self.w, self.x, self.y, self.z, False)
|
Create an exact copy of this quaternion.
|
entailment
|
def norm(self):
""" Returns the norm of the quaternion
norm = w**2 + x**2 + y**2 + z**2
"""
tmp = self.w**2 + self.x**2 + self.y**2 + self.z**2
return tmp**0.5
|
Returns the norm of the quaternion
norm = w**2 + x**2 + y**2 + z**2
|
entailment
|
def _normalize(self):
""" Make the quaternion unit length.
"""
# Get length
L = self.norm()
if not L:
raise ValueError('Quaternion cannot have 0-length.')
# Correct
self.w /= L
self.x /= L
self.y /= L
self.z /= L
|
Make the quaternion unit length.
|
entailment
|
def conjugate(self):
""" Obtain the conjugate of the quaternion.
This is simply the same quaternion but with the sign of the
imaginary (vector) parts reversed.
"""
new = self.copy()
new.x *= -1
new.y *= -1
new.z *= -1
return new
|
Obtain the conjugate of the quaternion.
This is simply the same quaternion but with the sign of the
imaginary (vector) parts reversed.
|
entailment
|
def inverse(self):
""" returns q.conjugate()/q.norm()**2
So if the quaternion is unit length, it is the same
as the conjugate.
"""
new = self.conjugate()
tmp = self.norm()**2
new.w /= tmp
new.x /= tmp
new.y /= tmp
new.z /= tmp
return new
|
returns q.conjugate()/q.norm()**2
So if the quaternion is unit length, it is the same
as the conjugate.
|
entailment
|
def exp(self):
""" Returns the exponent of the quaternion.
(not tested)
"""
# Init
vecNorm = self.x**2 + self.y**2 + self.z**2
wPart = np.exp(self.w)
q = Quaternion()
# Calculate
q.w = wPart * np.cos(vecNorm)
q.x = wPart * self.x * np.sin(vecNorm) / vecNorm
q.y = wPart * self.y * np.sin(vecNorm) / vecNorm
q.z = wPart * self.z * np.sin(vecNorm) / vecNorm
return q
|
Returns the exponent of the quaternion.
(not tested)
|
entailment
|
def log(self):
""" Returns the natural logarithm of the quaternion.
(not tested)
"""
# Init
norm = self.norm()
vecNorm = self.x**2 + self.y**2 + self.z**2
tmp = self.w / norm
q = Quaternion()
# Calculate
q.w = np.log(norm)
q.x = np.log(norm) * self.x * np.arccos(tmp) / vecNorm
q.y = np.log(norm) * self.y * np.arccos(tmp) / vecNorm
q.z = np.log(norm) * self.z * np.arccos(tmp) / vecNorm
return q
|
Returns the natural logarithm of the quaternion.
(not tested)
|
entailment
|
def rotate_point(self, p):
""" Rotate a Point instance using this quaternion.
"""
# Prepare
p = Quaternion(0, p[0], p[1], p[2], False) # Do not normalize!
q1 = self.normalize()
q2 = self.inverse()
# Apply rotation
r = (q1*p)*q2
# Make point and return
return r.x, r.y, r.z
|
Rotate a Point instance using this quaternion.
|
entailment
|
def get_matrix(self):
""" Create a 4x4 homography matrix that represents the rotation
of the quaternion.
"""
# Init matrix (remember, a matrix, not an array)
a = np.zeros((4, 4), dtype=np.float32)
w, x, y, z = self.w, self.x, self.y, self.z
# First row
a[0, 0] = - 2.0 * (y * y + z * z) + 1.0
a[1, 0] = + 2.0 * (x * y + z * w)
a[2, 0] = + 2.0 * (x * z - y * w)
a[3, 0] = 0.0
# Second row
a[0, 1] = + 2.0 * (x * y - z * w)
a[1, 1] = - 2.0 * (x * x + z * z) + 1.0
a[2, 1] = + 2.0 * (z * y + x * w)
a[3, 1] = 0.0
# Third row
a[0, 2] = + 2.0 * (x * z + y * w)
a[1, 2] = + 2.0 * (y * z - x * w)
a[2, 2] = - 2.0 * (x * x + y * y) + 1.0
a[3, 2] = 0.0
# Fourth row
a[0, 3] = 0.0
a[1, 3] = 0.0
a[2, 3] = 0.0
a[3, 3] = 1.0
return a
|
Create a 4x4 homography matrix that represents the rotation
of the quaternion.
|
entailment
|
def get_axis_angle(self):
""" Get the axis-angle representation of the quaternion.
(The angle is in radians)
"""
# Init
angle = 2 * np.arccos(max(min(self.w, 1.), -1.))
scale = (self.x**2 + self.y**2 + self.z**2)**0.5
# Calc axis
if scale:
ax = self.x / scale
ay = self.y / scale
az = self.z / scale
else:
# No rotation, so arbitrary axis
ax, ay, az = 1, 0, 0
# Return
return angle, ax, ay, az
|
Get the axis-angle representation of the quaternion.
(The angle is in radians)
|
entailment
|
def create_from_axis_angle(cls, angle, ax, ay, az, degrees=False):
""" Classmethod to create a quaternion from an axis-angle representation.
(angle should be in radians).
"""
if degrees:
angle = np.radians(angle)
while angle < 0:
angle += np.pi*2
angle2 = angle/2.0
sinang2 = np.sin(angle2)
return Quaternion(np.cos(angle2), ax*sinang2, ay*sinang2, az*sinang2)
|
Classmethod to create a quaternion from an axis-angle representation.
(angle should be in radians).
|
entailment
|
def create_from_euler_angles(cls, rx, ry, rz, degrees=False):
""" Classmethod to create a quaternion given the euler angles.
"""
if degrees:
rx, ry, rz = np.radians([rx, ry, rz])
# Obtain quaternions
qx = Quaternion(np.cos(rx/2), 0, 0, np.sin(rx/2))
qy = Quaternion(np.cos(ry/2), 0, np.sin(ry/2), 0)
qz = Quaternion(np.cos(rz/2), np.sin(rz/2), 0, 0)
# Almost done
return qx*qy*qz
|
Classmethod to create a quaternion given the euler angles.
|
entailment
|
def as_enum(enum):
""" Turn a possibly string enum into an integer enum.
"""
if isinstance(enum, string_types):
try:
enum = getattr(gl, 'GL_' + enum.upper())
except AttributeError:
try:
enum = _internalformats['GL_' + enum.upper()]
except KeyError:
raise ValueError('Could not find int value for enum %r' % enum)
return enum
|
Turn a possibly string enum into an integer enum.
|
entailment
|
def convert_shaders(convert, shaders):
""" Modify shading code so that we can write code once
and make it run "everywhere".
"""
# New version of the shaders
out = []
if convert == 'es2':
for isfragment, shader in enumerate(shaders):
has_version = False
has_prec_float = False
has_prec_int = False
lines = []
# Iterate over lines
for line in shader.lstrip().splitlines():
if line.startswith('#version'):
has_version = True
continue
if line.startswith('precision '):
has_prec_float = has_prec_float or 'float' in line
has_prec_int = has_prec_int or 'int' in line
lines.append(line.rstrip())
# Write
# BUG: fails on WebGL (Chrome)
# if True:
# lines.insert(has_version, '#line 0')
if not has_prec_float:
lines.insert(has_version, 'precision highp float;')
if not has_prec_int:
lines.insert(has_version, 'precision highp int;')
# BUG: fails on WebGL (Chrome)
# if not has_version:
# lines.insert(has_version, '#version 100')
out.append('\n'.join(lines))
elif convert == 'desktop':
for isfragment, shader in enumerate(shaders):
has_version = False
lines = []
# Iterate over lines
for line in shader.lstrip().splitlines():
has_version = has_version or line.startswith('#version')
if line.startswith('precision '):
line = ''
for prec in (' highp ', ' mediump ', ' lowp '):
line = line.replace(prec, ' ')
lines.append(line.rstrip())
# Write
if not has_version:
lines.insert(0, '#version 120\n')
out.append('\n'.join(lines))
else:
raise ValueError('Cannot convert shaders to %r.' % convert)
return tuple(out)
|
Modify shading code so that we can write code once
and make it run "everywhere".
|
entailment
|
def as_es2_command(command):
""" Modify a desktop command so it works on es2.
"""
if command[0] == 'FUNC':
return (command[0], re.sub(r'^gl([A-Z])',
lambda m: m.group(1).lower(), command[1])) + command[2:]
if command[0] == 'SHADERS':
return command[:2] + convert_shaders('es2', command[2:])
if command[0] == 'UNIFORM':
return command[:-1] + (command[-1].tolist(),)
return command
|
Modify a desktop command so it works on es2.
|
entailment
|
def _check_pyopengl_3D():
"""Helper to ensure users have OpenGL for 3D texture support (for now)"""
global USE_TEX_3D
USE_TEX_3D = True
try:
import OpenGL.GL as _gl
except ImportError:
raise ImportError('PyOpenGL is required for 3D texture support')
return _gl
|
Helper to ensure users have OpenGL for 3D texture support (for now)
|
entailment
|
def show(self, filter=None):
""" Print the list of commands currently in the queue. If filter is
given, print only commands that match the filter.
"""
for command in self._commands:
if command[0] is None: # or command[1] in self._invalid_objects:
continue # Skip nill commands
if filter and command[0] != filter:
continue
t = []
for e in command:
if isinstance(e, np.ndarray):
t.append('array %s' % str(e.shape))
elif isinstance(e, str):
s = e.strip()
if len(s) > 20:
s = s[:18] + '... %i lines' % (e.count('\n')+1)
t.append(s)
else:
t.append(e)
print(tuple(t))
|
Print the list of commands currently in the queue. If filter is
given, print only commands that match the filter.
|
entailment
|
def flush(self, parser):
""" Flush all current commands to the GLIR interpreter.
"""
if self._verbose:
show = self._verbose if isinstance(self._verbose, str) else None
self.show(show)
parser.parse(self._filter(self.clear(), parser))
|
Flush all current commands to the GLIR interpreter.
|
entailment
|
def _filter(self, commands, parser):
""" Filter DATA/SIZE commands that are overridden by a
SIZE command.
"""
resized = set()
commands2 = []
for command in reversed(commands):
if command[0] == 'SHADERS':
convert = parser.convert_shaders()
if convert:
shaders = self._convert_shaders(convert, command[2:])
command = command[:2] + shaders
elif command[1] in resized:
if command[0] in ('SIZE', 'DATA'):
continue # remove this command
elif command[0] == 'SIZE':
resized.add(command[1])
commands2.append(command)
return list(reversed(commands2))
|
Filter DATA/SIZE commands that are overridden by a
SIZE command.
|
entailment
|
def associate(self, queue):
"""Merge this queue with another.
Both queues will use a shared command list and either one can be used
to fill or flush the shared queue.
"""
assert isinstance(queue, GlirQueue)
if queue._shared is self._shared:
return
# merge commands
self._shared._commands.extend(queue.clear())
self._shared._verbose |= queue._shared._verbose
self._shared._associations[queue] = None
# update queue and all related queues to use the same _shared object
for ch in queue._shared._associations:
ch._shared = self._shared
self._shared._associations[ch] = None
queue._shared = self._shared
|
Merge this queue with another.
Both queues will use a shared command list and either one can be used
to fill or flush the shared queue.
|
entailment
|
def _parse(self, command):
""" Parse a single command.
"""
cmd, id_, args = command[0], command[1], command[2:]
if cmd == 'CURRENT':
# This context is made current
self.env.clear()
self._gl_initialize()
self.env['fbo'] = args[0]
gl.glBindFramebuffer(gl.GL_FRAMEBUFFER, args[0])
elif cmd == 'FUNC':
# GL function call
args = [as_enum(a) for a in args]
try:
getattr(gl, id_)(*args)
except AttributeError:
logger.warning('Invalid gl command: %r' % id_)
elif cmd == 'CREATE':
# Creating an object
if args[0] is not None:
klass = self._classmap[args[0]]
self._objects[id_] = klass(self, id_)
else:
self._invalid_objects.add(id_)
elif cmd == 'DELETE':
# Deleting an object
ob = self._objects.get(id_, None)
if ob is not None:
self._objects[id_] = JUST_DELETED
ob.delete()
else:
# Doing somthing to an object
ob = self._objects.get(id_, None)
if ob == JUST_DELETED:
return
if ob is None:
if id_ not in self._invalid_objects:
raise RuntimeError('Cannot %s object %i because it '
'does not exist' % (cmd, id_))
return
# Triage over command. Order of commands is set so most
# common ones occur first.
if cmd == 'DRAW': # Program
ob.draw(*args)
elif cmd == 'TEXTURE': # Program
ob.set_texture(*args)
elif cmd == 'UNIFORM': # Program
ob.set_uniform(*args)
elif cmd == 'ATTRIBUTE': # Program
ob.set_attribute(*args)
elif cmd == 'DATA': # VertexBuffer, IndexBuffer, Texture
ob.set_data(*args)
elif cmd == 'SIZE': # VertexBuffer, IndexBuffer,
ob.set_size(*args) # Texture[1D, 2D, 3D], RenderBuffer
elif cmd == 'ATTACH': # FrameBuffer
ob.attach(*args)
elif cmd == 'FRAMEBUFFER': # FrameBuffer
ob.set_framebuffer(*args)
elif cmd == 'SHADERS': # Program
ob.set_shaders(*args)
elif cmd == 'WRAPPING': # Texture1D, Texture2D, Texture3D
ob.set_wrapping(*args)
elif cmd == 'INTERPOLATION': # Texture1D, Texture2D, Texture3D
ob.set_interpolation(*args)
else:
logger.warning('Invalid GLIR command %r' % cmd)
|
Parse a single command.
|
entailment
|
def parse(self, commands):
""" Parse a list of commands.
"""
# Get rid of dummy objects that represented deleted objects in
# the last parsing round.
to_delete = []
for id_, val in self._objects.items():
if val == JUST_DELETED:
to_delete.append(id_)
for id_ in to_delete:
self._objects.pop(id_)
for command in commands:
self._parse(command)
|
Parse a list of commands.
|
entailment
|
def _gl_initialize(self):
""" Deal with compatibility; desktop does not have sprites
enabled by default. ES has.
"""
if '.es' in gl.current_backend.__name__:
pass # ES2: no action required
else:
# Desktop, enable sprites
GL_VERTEX_PROGRAM_POINT_SIZE = 34370
GL_POINT_SPRITE = 34913
gl.glEnable(GL_VERTEX_PROGRAM_POINT_SIZE)
gl.glEnable(GL_POINT_SPRITE)
if self.capabilities['max_texture_size'] is None: # only do once
self.capabilities['gl_version'] = gl.glGetParameter(gl.GL_VERSION)
self.capabilities['max_texture_size'] = \
gl.glGetParameter(gl.GL_MAX_TEXTURE_SIZE)
this_version = self.capabilities['gl_version'].split(' ')[0]
this_version = LooseVersion(this_version)
|
Deal with compatibility; desktop does not have sprites
enabled by default. ES has.
|
entailment
|
def activate(self):
""" Avoid overhead in calling glUseProgram with same arg.
Warning: this will break if glUseProgram is used somewhere else.
Per context we keep track of one current program.
"""
if self._handle != self._parser.env.get('current_program', False):
self._parser.env['current_program'] = self._handle
gl.glUseProgram(self._handle)
|
Avoid overhead in calling glUseProgram with same arg.
Warning: this will break if glUseProgram is used somewhere else.
Per context we keep track of one current program.
|
entailment
|
def deactivate(self):
""" Avoid overhead in calling glUseProgram with same arg.
Warning: this will break if glUseProgram is used somewhere else.
Per context we keep track of one current program.
"""
if self._parser.env.get('current_program', 0) != 0:
self._parser.env['current_program'] = 0
gl.glUseProgram(0)
|
Avoid overhead in calling glUseProgram with same arg.
Warning: this will break if glUseProgram is used somewhere else.
Per context we keep track of one current program.
|
entailment
|
def set_shaders(self, vert, frag):
""" This function takes care of setting the shading code and
compiling+linking it into a working program object that is ready
to use.
"""
self._linked = False
# Create temporary shader objects
vert_handle = gl.glCreateShader(gl.GL_VERTEX_SHADER)
frag_handle = gl.glCreateShader(gl.GL_FRAGMENT_SHADER)
# For both vertex and fragment shader: set source, compile, check
for code, handle, type_ in [(vert, vert_handle, 'vertex'),
(frag, frag_handle, 'fragment')]:
gl.glShaderSource(handle, code)
gl.glCompileShader(handle)
status = gl.glGetShaderParameter(handle, gl.GL_COMPILE_STATUS)
if not status:
errors = gl.glGetShaderInfoLog(handle)
errormsg = self._get_error(code, errors, 4)
raise RuntimeError("Shader compilation error in %s:\n%s" %
(type_ + ' shader', errormsg))
# Attach shaders
gl.glAttachShader(self._handle, vert_handle)
gl.glAttachShader(self._handle, frag_handle)
# Link the program and check
gl.glLinkProgram(self._handle)
if not gl.glGetProgramParameter(self._handle, gl.GL_LINK_STATUS):
raise RuntimeError('Program linking error:\n%s'
% gl.glGetProgramInfoLog(self._handle))
# Now we can remove the shaders. We no longer need them and it
# frees up precious GPU memory:
# http://gamedev.stackexchange.com/questions/47910
gl.glDetachShader(self._handle, vert_handle)
gl.glDetachShader(self._handle, frag_handle)
gl.glDeleteShader(vert_handle)
gl.glDeleteShader(frag_handle)
# Now we know what variables will be used by the program
self._unset_variables = self._get_active_attributes_and_uniforms()
self._handles = {}
self._known_invalid = set()
self._linked = True
|
This function takes care of setting the shading code and
compiling+linking it into a working program object that is ready
to use.
|
entailment
|
def _get_active_attributes_and_uniforms(self):
""" Retrieve active attributes and uniforms to be able to check that
all uniforms/attributes are set by the user.
Other GLIR implementations may omit this.
"""
# This match a name of the form "name[size]" (= array)
regex = re.compile("""(?P<name>\w+)\s*(\[(?P<size>\d+)\])\s*""")
# Get how many active attributes and uniforms there are
cu = gl.glGetProgramParameter(self._handle, gl.GL_ACTIVE_UNIFORMS)
ca = gl.glGetProgramParameter(self.handle, gl.GL_ACTIVE_ATTRIBUTES)
# Get info on each one
attributes = []
uniforms = []
for container, count, func in [(attributes, ca, gl.glGetActiveAttrib),
(uniforms, cu, gl.glGetActiveUniform)]:
for i in range(count):
name, size, gtype = func(self._handle, i)
m = regex.match(name) # Check if xxx[0] instead of xx
if m:
name = m.group('name')
for i in range(size):
container.append(('%s[%d]' % (name, i), gtype))
else:
container.append((name, gtype))
#return attributes, uniforms
return set([v[0] for v in attributes] + [v[0] for v in uniforms])
|
Retrieve active attributes and uniforms to be able to check that
all uniforms/attributes are set by the user.
Other GLIR implementations may omit this.
|
entailment
|
def _parse_error(self, error):
""" Parses a single GLSL error and extracts the linenr and description
Other GLIR implementations may omit this.
"""
error = str(error)
# Nvidia
# 0(7): error C1008: undefined variable "MV"
m = re.match(r'(\d+)\((\d+)\)\s*:\s(.*)', error)
if m:
return int(m.group(2)), m.group(3)
# ATI / Intel
# ERROR: 0:131: '{' : syntax error parse error
m = re.match(r'ERROR:\s(\d+):(\d+):\s(.*)', error)
if m:
return int(m.group(2)), m.group(3)
# Nouveau
# 0:28(16): error: syntax error, unexpected ')', expecting '('
m = re.match(r'(\d+):(\d+)\((\d+)\):\s(.*)', error)
if m:
return int(m.group(2)), m.group(4)
# Other ...
return None, error
|
Parses a single GLSL error and extracts the linenr and description
Other GLIR implementations may omit this.
|
entailment
|
def _get_error(self, code, errors, indentation=0):
"""Get error and show the faulty line + some context
Other GLIR implementations may omit this.
"""
# Init
results = []
lines = None
if code is not None:
lines = [line.strip() for line in code.split('\n')]
for error in errors.split('\n'):
# Strip; skip empy lines
error = error.strip()
if not error:
continue
# Separate line number from description (if we can)
linenr, error = self._parse_error(error)
if None in (linenr, lines):
results.append('%s' % error)
else:
results.append('on line %i: %s' % (linenr, error))
if linenr > 0 and linenr < len(lines):
results.append(' %s' % lines[linenr - 1])
# Add indentation and return
results = [' ' * indentation + r for r in results]
return '\n'.join(results)
|
Get error and show the faulty line + some context
Other GLIR implementations may omit this.
|
entailment
|
def set_texture(self, name, value):
""" Set a texture sampler. Value is the id of the texture to link.
"""
if not self._linked:
raise RuntimeError('Cannot set uniform when program has no code')
# Get handle for the uniform, first try cache
handle = self._handles.get(name, -1)
if handle < 0:
if name in self._known_invalid:
return
handle = gl.glGetUniformLocation(self._handle, name)
self._unset_variables.discard(name) # Mark as set
self._handles[name] = handle # Store in cache
if handle < 0:
self._known_invalid.add(name)
logger.info('Variable %s is not an active uniform' % name)
return
# Program needs to be active in order to set uniforms
self.activate()
if True:
# Sampler: the value is the id of the texture
tex = self._parser.get_object(value)
if tex == JUST_DELETED:
return
if tex is None:
raise RuntimeError('Could not find texture with id %i' % value)
unit = len(self._samplers)
if name in self._samplers:
unit = self._samplers[name][-1] # Use existing unit
self._samplers[name] = tex._target, tex.handle, unit
gl.glUniform1i(handle, unit)
|
Set a texture sampler. Value is the id of the texture to link.
|
entailment
|
def set_uniform(self, name, type_, value):
""" Set a uniform value. Value is assumed to have been checked.
"""
if not self._linked:
raise RuntimeError('Cannot set uniform when program has no code')
# Get handle for the uniform, first try cache
handle = self._handles.get(name, -1)
count = 1
if handle < 0:
if name in self._known_invalid:
return
handle = gl.glGetUniformLocation(self._handle, name)
self._unset_variables.discard(name) # Mark as set
# if we set a uniform_array, mark all as set
if not type_.startswith('mat'):
count = value.nbytes // (4 * self.ATYPEINFO[type_][0])
if count > 1:
for ii in range(count):
if '%s[%s]' % (name, ii) in self._unset_variables:
self._unset_variables.discard('%s[%s]' % (name, ii))
self._handles[name] = handle # Store in cache
if handle < 0:
self._known_invalid.add(name)
logger.info('Variable %s is not an active uniform' % name)
return
# Look up function to call
funcname = self.UTYPEMAP[type_]
func = getattr(gl, funcname)
# Program needs to be active in order to set uniforms
self.activate()
# Triage depending on type
if type_.startswith('mat'):
# Value is matrix, these gl funcs have alternative signature
transpose = False # OpenGL ES 2.0 does not support transpose
func(handle, 1, transpose, value)
else:
# Regular uniform
func(handle, count, value)
|
Set a uniform value. Value is assumed to have been checked.
|
entailment
|
def set_attribute(self, name, type_, value):
""" Set an attribute value. Value is assumed to have been checked.
"""
if not self._linked:
raise RuntimeError('Cannot set attribute when program has no code')
# Get handle for the attribute, first try cache
handle = self._handles.get(name, -1)
if handle < 0:
if name in self._known_invalid:
return
handle = gl.glGetAttribLocation(self._handle, name)
self._unset_variables.discard(name) # Mark as set
self._handles[name] = handle # Store in cache
if handle < 0:
self._known_invalid.add(name)
if value[0] != 0 and value[2] > 0: # VBO with offset
return # Probably an unused element in a structured VBO
logger.info('Variable %s is not an active attribute' % name)
return
# Program needs to be active in order to set uniforms
self.activate()
# Triage depending on VBO or tuple data
if value[0] == 0:
# Look up function call
funcname = self.ATYPEMAP[type_]
func = getattr(gl, funcname)
# Set data
self._attributes[name] = 0, handle, func, value[1:]
else:
# Get meta data
vbo_id, stride, offset = value
size, gtype, dtype = self.ATYPEINFO[type_]
# Get associated VBO
vbo = self._parser.get_object(vbo_id)
if vbo == JUST_DELETED:
return
if vbo is None:
raise RuntimeError('Could not find VBO with id %i' % vbo_id)
# Set data
func = gl.glVertexAttribPointer
args = size, gtype, gl.GL_FALSE, stride, offset
self._attributes[name] = vbo.handle, handle, func, args
|
Set an attribute value. Value is assumed to have been checked.
|
entailment
|
def draw(self, mode, selection):
""" Draw program in given mode, with given selection (IndexBuffer or
first, count).
"""
if not self._linked:
raise RuntimeError('Cannot draw program if code has not been set')
# Init
gl.check_error('Check before draw')
mode = as_enum(mode)
# Draw
if len(selection) == 3:
# Selection based on indices
id_, gtype, count = selection
if count:
self._pre_draw()
ibuf = self._parser.get_object(id_)
ibuf.activate()
gl.glDrawElements(mode, count, as_enum(gtype), None)
ibuf.deactivate()
else:
# Selection based on start and count
first, count = selection
if count:
self._pre_draw()
gl.glDrawArrays(mode, first, count)
# Wrap up
gl.check_error('Check after draw')
self._post_draw()
|
Draw program in given mode, with given selection (IndexBuffer or
first, count).
|
entailment
|
def as_matrix_transform(transform):
"""
Simplify a transform to a single matrix transform, which makes it a lot
faster to compute transformations.
Raises a TypeError if the transform cannot be simplified.
"""
if isinstance(transform, ChainTransform):
matrix = np.identity(4)
for tr in transform.transforms:
# We need to do the matrix multiplication manually because VisPy
# somehow doesn't mutliply matrices if there is a perspective
# component. The equation below looks like it's the wrong way
# around, but the VisPy matrices are transposed.
matrix = np.matmul(as_matrix_transform(tr).matrix, matrix)
return MatrixTransform(matrix)
elif isinstance(transform, InverseTransform):
matrix = as_matrix_transform(transform._inverse)
return MatrixTransform(matrix.inv_matrix)
elif isinstance(transform, NullTransform):
return MatrixTransform()
elif isinstance(transform, STTransform):
return transform.as_matrix()
elif isinstance(transform, MatrixTransform):
return transform
else:
raise TypeError("Could not simplify transform of type {0}".format(type(transform)))
|
Simplify a transform to a single matrix transform, which makes it a lot
faster to compute transformations.
Raises a TypeError if the transform cannot be simplified.
|
entailment
|
def circular(adjacency_mat, directed=False):
"""Places all nodes on a single circle.
Parameters
----------
adjacency_mat : matrix or sparse
The graph adjacency matrix
directed : bool
Whether the graph is directed. If this is True, is will also
generate the vertices for arrows, which can be passed to an
ArrowVisual.
Yields
------
(node_vertices, line_vertices, arrow_vertices) : tuple
Yields the node and line vertices in a tuple. This layout only yields a
single time, and has no builtin animation
"""
if issparse(adjacency_mat):
adjacency_mat = adjacency_mat.tocoo()
num_nodes = adjacency_mat.shape[0]
t = np.linspace(0, 2 * np.pi, num_nodes, endpoint=False, dtype=np.float32)
# Visual coordinate system is between 0 and 1, so generate a circle with
# radius 0.5 and center it at the point (0.5, 0.5).
node_coords = (0.5 * np.array([np.cos(t), np.sin(t)]) + 0.5).T
line_vertices, arrows = _straight_line_vertices(adjacency_mat,
node_coords, directed)
yield node_coords, line_vertices, arrows
|
Places all nodes on a single circle.
Parameters
----------
adjacency_mat : matrix or sparse
The graph adjacency matrix
directed : bool
Whether the graph is directed. If this is True, is will also
generate the vertices for arrows, which can be passed to an
ArrowVisual.
Yields
------
(node_vertices, line_vertices, arrow_vertices) : tuple
Yields the node and line vertices in a tuple. This layout only yields a
single time, and has no builtin animation
|
entailment
|
def append(self, P, closed=False, itemsize=None, **kwargs):
"""
Append a new set of vertices to the collection.
For kwargs argument, n is the number of vertices (local) or the number
of item (shared)
Parameters
----------
P : np.array
Vertices positions of the path(s) to be added
closed: bool
Whether path(s) is/are closed
itemsize: int or None
Size of an individual path
caps : list, array or 2-tuple
Path start /end cap
join : list, array or float
path segment join
color : list, array or 4-tuple
Path color
miter_limit : list, array or float
Miter limit for join
linewidth : list, array or float
Path linewidth
antialias : list, array or float
Path antialias area
"""
itemsize = itemsize or len(P)
itemcount = len(P) / itemsize
# Computes the adjacency information
n, p = len(P), P.shape[-1]
Z = np.tile(P, 2).reshape(2 * len(P), p)
V = np.empty(n, dtype=self.vtype)
V['p0'][1:-1] = Z[0::2][:-2]
V['p1'][:-1] = Z[1::2][:-1]
V['p2'][:-1] = Z[1::2][+1:]
V['p3'][:-2] = Z[0::2][+2:]
# Apply default values on vertices
for name in self.vtype.names:
if name not in ['collection_index', 'p0', 'p1', 'p2', 'p3']:
V[name] = kwargs.get(name, self._defaults[name])
# Extract relevant segments only
V = (V.reshape(n / itemsize, itemsize)[:, :-1])
if closed:
V['p0'][:, 0] = V['p2'][:, -1]
V['p3'][:, -1] = V['p1'][:, 0]
else:
V['p0'][:, 0] = V['p1'][:, 0]
V['p3'][:, -1] = V['p2'][:, -1]
V = V.ravel()
# Quadruple each point (we're using 2 triangles / segment)
# No shared vertices between segment because of joins
V = np.repeat(V, 4, axis=0).reshape((len(V), 4))
V['uv'] = (-1, -1), (-1, +1), (+1, -1), (+1, +1)
V = V.ravel()
n = itemsize
if closed:
# uint16 for WebGL
I = np.resize(
np.array([0, 1, 2, 1, 2, 3], dtype=np.uint32), n * 2 * 3)
I += np.repeat(4 * np.arange(n, dtype=np.uint32), 6)
I[-6:] = 4 * n - 6, 4 * n - 5, 0, 4 * n - 5, 0, 1
else:
I = np.resize(
np.array([0, 1, 2, 1, 2, 3], dtype=np.uint32), (n - 1) * 2 * 3)
I += np.repeat(4 * np.arange(n - 1, dtype=np.uint32), 6)
I = I.ravel()
# Uniforms
if self.utype:
U = np.zeros(itemcount, dtype=self.utype)
for name in self.utype.names:
if name not in ["__unused__"]:
U[name] = kwargs.get(name, self._defaults[name])
else:
U = None
Collection.append(self, vertices=V, uniforms=U,
indices=I, itemsize=itemsize * 4 - 4)
|
Append a new set of vertices to the collection.
For kwargs argument, n is the number of vertices (local) or the number
of item (shared)
Parameters
----------
P : np.array
Vertices positions of the path(s) to be added
closed: bool
Whether path(s) is/are closed
itemsize: int or None
Size of an individual path
caps : list, array or 2-tuple
Path start /end cap
join : list, array or float
path segment join
color : list, array or 4-tuple
Path color
miter_limit : list, array or float
Miter limit for join
linewidth : list, array or float
Path linewidth
antialias : list, array or float
Path antialias area
|
entailment
|
def draw(self, mode="triangles"):
""" Draw collection """
gl.glDepthMask(0)
Collection.draw(self, mode)
gl.glDepthMask(1)
|
Draw collection
|
entailment
|
def set_data(self, pos=None, symbol='o', size=10., edge_width=1.,
edge_width_rel=None, edge_color='black', face_color='white',
scaling=False):
""" Set the data used to display this visual.
Parameters
----------
pos : array
The array of locations to display each symbol.
symbol : str
The style of symbol to draw (see Notes).
size : float or array
The symbol size in px.
edge_width : float | None
The width of the symbol outline in pixels.
edge_width_rel : float | None
The width as a fraction of marker size. Exactly one of
`edge_width` and `edge_width_rel` must be supplied.
edge_color : Color | ColorArray
The color used to draw each symbol outline.
face_color : Color | ColorArray
The color used to draw each symbol interior.
scaling : bool
If set to True, marker scales when rezooming.
Notes
-----
Allowed style strings are: disc, arrow, ring, clobber, square, diamond,
vbar, hbar, cross, tailed_arrow, x, triangle_up, triangle_down,
and star.
"""
assert (isinstance(pos, np.ndarray) and
pos.ndim == 2 and pos.shape[1] in (2, 3))
if (edge_width is not None) + (edge_width_rel is not None) != 1:
raise ValueError('exactly one of edge_width and edge_width_rel '
'must be non-None')
if edge_width is not None:
if edge_width < 0:
raise ValueError('edge_width cannot be negative')
else:
if edge_width_rel < 0:
raise ValueError('edge_width_rel cannot be negative')
self.symbol = symbol
self.scaling = scaling
edge_color = ColorArray(edge_color).rgba
if len(edge_color) == 1:
edge_color = edge_color[0]
face_color = ColorArray(face_color).rgba
if len(face_color) == 1:
face_color = face_color[0]
n = len(pos)
data = np.zeros(n, dtype=[('a_position', np.float32, 3),
('a_fg_color', np.float32, 4),
('a_bg_color', np.float32, 4),
('a_size', np.float32, 1),
('a_edgewidth', np.float32, 1)])
data['a_fg_color'] = edge_color
data['a_bg_color'] = face_color
if edge_width is not None:
data['a_edgewidth'] = edge_width
else:
data['a_edgewidth'] = size*edge_width_rel
data['a_position'][:, :pos.shape[1]] = pos
data['a_size'] = size
self.shared_program['u_antialias'] = self.antialias # XXX make prop
self._data = data
self._vbo.set_data(data)
self.shared_program.bind(self._vbo)
self.update()
|
Set the data used to display this visual.
Parameters
----------
pos : array
The array of locations to display each symbol.
symbol : str
The style of symbol to draw (see Notes).
size : float or array
The symbol size in px.
edge_width : float | None
The width of the symbol outline in pixels.
edge_width_rel : float | None
The width as a fraction of marker size. Exactly one of
`edge_width` and `edge_width_rel` must be supplied.
edge_color : Color | ColorArray
The color used to draw each symbol outline.
face_color : Color | ColorArray
The color used to draw each symbol interior.
scaling : bool
If set to True, marker scales when rezooming.
Notes
-----
Allowed style strings are: disc, arrow, ring, clobber, square, diamond,
vbar, hbar, cross, tailed_arrow, x, triangle_up, triangle_down,
and star.
|
entailment
|
def hook_changed(self, hook_name, widget, new_data):
"""Handle a hook upate."""
if hook_name == 'song':
self.song_changed(widget, new_data)
elif hook_name == 'state':
self.state_changed(widget, new_data)
elif hook_name == 'elapsed_and_total':
elapsed, total = new_data
self.time_changed(widget, elapsed, total)
|
Handle a hook upate.
|
entailment
|
def update(self, func, **kw):
"Update the signature of func with the data in self"
func.__name__ = self.name
func.__doc__ = getattr(self, 'doc', None)
func.__dict__ = getattr(self, 'dict', {})
func.__defaults__ = getattr(self, 'defaults', ())
func.__kwdefaults__ = getattr(self, 'kwonlydefaults', None)
func.__annotations__ = getattr(self, 'annotations', None)
callermodule = sys._getframe(3).f_globals.get('__name__', '?')
func.__module__ = getattr(self, 'module', callermodule)
func.__dict__.update(kw)
|
Update the signature of func with the data in self
|
entailment
|
def make(self, src_templ, evaldict=None, addsource=False, **attrs):
"Make a new function from a given template and update the signature"
src = src_templ % vars(self) # expand name and signature
evaldict = evaldict or {}
mo = DEF.match(src)
if mo is None:
raise SyntaxError('not a valid function template\n%s' % src)
name = mo.group(1) # extract the function name
names = set([name] + [arg.strip(' *') for arg in
self.shortsignature.split(',')])
for n in names:
if n in ('_func_', '_call_'):
raise NameError('%s is overridden in\n%s' % (n, src))
if not src.endswith('\n'): # add a newline just for safety
src += '\n' # this is needed in old versions of Python
try:
code = compile(src, '<string>', 'single')
# print >> sys.stderr, 'Compiling %s' % src
exec(code, evaldict)
except:
print('Error in generated code:', file=sys.stderr)
print(src, file=sys.stderr)
raise
func = evaldict[name]
if addsource:
attrs['__source__'] = src
self.update(func, **attrs)
return func
|
Make a new function from a given template and update the signature
|
entailment
|
def create(cls, obj, body, evaldict, defaults=None,
doc=None, module=None, addsource=True, **attrs):
"""
Create a function from the strings name, signature and body.
evaldict is the evaluation dictionary. If addsource is true an attribute
__source__ is added to the result. The attributes attrs are added,
if any.
"""
if isinstance(obj, str): # "name(signature)"
name, rest = obj.strip().split('(', 1)
signature = rest[:-1] #strip a right parens
func = None
else: # a function
name = None
signature = None
func = obj
self = cls(func, name, signature, defaults, doc, module)
ibody = '\n'.join(' ' + line for line in body.splitlines())
return self.make('def %(name)s(%(signature)s):\n' + ibody,
evaldict, addsource, **attrs)
|
Create a function from the strings name, signature and body.
evaldict is the evaluation dictionary. If addsource is true an attribute
__source__ is added to the result. The attributes attrs are added,
if any.
|
entailment
|
def set_data(self, data=None, vertex_colors=None, face_colors=None,
color=None):
""" Set the scalar array data
Parameters
----------
data : ndarray
A 3D array of scalar values. The isosurface is constructed to show
all locations in the scalar field equal to ``self.level``.
vertex_colors : array-like | None
Colors to use for each vertex.
face_colors : array-like | None
Colors to use for each face.
color : instance of Color
The color to use.
"""
# We only change the internal variables if they are provided
if data is not None:
self._data = data
self._recompute = True
if vertex_colors is not None:
self._vertex_colors = vertex_colors
self._update_meshvisual = True
if face_colors is not None:
self._face_colors = face_colors
self._update_meshvisual = True
if color is not None:
self._color = Color(color)
self._update_meshvisual = True
self.update()
|
Set the scalar array data
Parameters
----------
data : ndarray
A 3D array of scalar values. The isosurface is constructed to show
all locations in the scalar field equal to ``self.level``.
vertex_colors : array-like | None
Colors to use for each vertex.
face_colors : array-like | None
Colors to use for each face.
color : instance of Color
The color to use.
|
entailment
|
def get_scene_bounds(self, dim=None):
"""Get the total bounds based on the visuals present in the scene
Parameters
----------
dim : int | None
Dimension to return.
Returns
-------
bounds : list | tuple
If ``dim is None``, Returns a list of 3 tuples, otherwise
the bounds for the requested dimension.
"""
# todo: handle sub-children
# todo: handle transformations
# Init
bounds = [(np.inf, -np.inf), (np.inf, -np.inf), (np.inf, -np.inf)]
# Get bounds of all children
for ob in self.scene.children:
if hasattr(ob, 'bounds'):
for axis in (0, 1, 2):
if (dim is not None) and dim != axis:
continue
b = ob.bounds(axis)
if b is not None:
b = min(b), max(b) # Ensure correct order
bounds[axis] = (min(bounds[axis][0], b[0]),
max(bounds[axis][1], b[1]))
# Set defaults
for axis in (0, 1, 2):
if any(np.isinf(bounds[axis])):
bounds[axis] = -1, 1
if dim is not None:
return bounds[dim]
else:
return bounds
|
Get the total bounds based on the visuals present in the scene
Parameters
----------
dim : int | None
Dimension to return.
Returns
-------
bounds : list | tuple
If ``dim is None``, Returns a list of 3 tuples, otherwise
the bounds for the requested dimension.
|
entailment
|
def _string_to_rgb(color):
"""Convert user string or hex color to color array (length 3 or 4)"""
if not color.startswith('#'):
if color.lower() not in _color_dict:
raise ValueError('Color "%s" unknown' % color)
color = _color_dict[color]
assert color[0] == '#'
# hex color
color = color[1:]
lc = len(color)
if lc in (3, 4):
color = ''.join(c + c for c in color)
lc = len(color)
if lc not in (6, 8):
raise ValueError('Hex color must have exactly six or eight '
'elements following the # sign')
color = np.array([int(color[i:i+2], 16) / 255. for i in range(0, lc, 2)])
return color
|
Convert user string or hex color to color array (length 3 or 4)
|
entailment
|
def _user_to_rgba(color, expand=True, clip=False):
"""Convert color(s) from any set of fmts (str/hex/arr) to RGB(A) array"""
if color is None:
color = np.zeros(4, np.float32)
if isinstance(color, string_types):
color = _string_to_rgb(color)
elif isinstance(color, ColorArray):
color = color.rgba
# We have to treat this specially
elif isinstance(color, (list, tuple)):
if any(isinstance(c, string_types) for c in color):
color = [_user_to_rgba(c, expand=expand, clip=clip) for c in color]
if any(len(c) > 1 for c in color):
raise RuntimeError('could not parse colors, are they nested?')
color = [c[0] for c in color]
color = np.atleast_2d(color).astype(np.float32)
if color.shape[1] not in (3, 4):
raise ValueError('color must have three or four elements')
if expand and color.shape[1] == 3: # only expand if requested
color = np.concatenate((color, np.ones((color.shape[0], 1))),
axis=1)
if color.min() < 0 or color.max() > 1:
if clip:
color = np.clip(color, 0, 1)
else:
raise ValueError("Color values must be between 0 and 1 (or use "
"clip=True to automatically clip the values).")
return color
|
Convert color(s) from any set of fmts (str/hex/arr) to RGB(A) array
|
entailment
|
def _array_clip_val(val):
"""Helper to turn val into array and clip between 0 and 1"""
val = np.array(val)
if val.max() > 1 or val.min() < 0:
logger.warning('value will be clipped between 0 and 1')
val[...] = np.clip(val, 0, 1)
return val
|
Helper to turn val into array and clip between 0 and 1
|
entailment
|
def extend(self, colors):
"""Extend a ColorArray with new colors
Parameters
----------
colors : instance of ColorArray
The new colors.
"""
colors = ColorArray(colors)
self._rgba = np.vstack((self._rgba, colors._rgba))
return self
|
Extend a ColorArray with new colors
Parameters
----------
colors : instance of ColorArray
The new colors.
|
entailment
|
def rgba(self, val):
"""Set the color using an Nx4 array of RGBA floats"""
# Note: all other attribute sets get routed here!
# This method is meant to do the heavy lifting of setting data
rgba = _user_to_rgba(val, expand=False)
if self._rgba is None:
self._rgba = rgba # only on init
else:
self._rgba[:, :rgba.shape[1]] = rgba
|
Set the color using an Nx4 array of RGBA floats
|
entailment
|
def RGBA(self, val):
"""Set the color using an Nx4 array of RGBA uint8 values"""
# need to convert to normalized float
val = np.atleast_1d(val).astype(np.float32) / 255
self.rgba = val
|
Set the color using an Nx4 array of RGBA uint8 values
|
entailment
|
def RGB(self, val):
"""Set the color using an Nx3 array of RGB uint8 values"""
# need to convert to normalized float
val = np.atleast_1d(val).astype(np.float32) / 255.
self.rgba = val
|
Set the color using an Nx3 array of RGB uint8 values
|
entailment
|
def value(self, val):
"""Set the color using length-N array of (from HSV)"""
hsv = self._hsv
hsv[:, 2] = _array_clip_val(val)
self.rgba = _hsv_to_rgb(hsv)
|
Set the color using length-N array of (from HSV)
|
entailment
|
def lighter(self, dv=0.1, copy=True):
"""Produce a lighter color (if possible)
Parameters
----------
dv : float
Amount to increase the color value by.
copy : bool
If False, operation will be carried out in-place.
Returns
-------
color : instance of ColorArray
The lightened Color.
"""
color = self.copy() if copy else self
color.value += dv
return color
|
Produce a lighter color (if possible)
Parameters
----------
dv : float
Amount to increase the color value by.
copy : bool
If False, operation will be carried out in-place.
Returns
-------
color : instance of ColorArray
The lightened Color.
|
entailment
|
def darker(self, dv=0.1, copy=True):
"""Produce a darker color (if possible)
Parameters
----------
dv : float
Amount to decrease the color value by.
copy : bool
If False, operation will be carried out in-place.
Returns
-------
color : instance of ColorArray
The darkened Color.
"""
color = self.copy() if copy else self
color.value -= dv
return color
|
Produce a darker color (if possible)
Parameters
----------
dv : float
Amount to decrease the color value by.
copy : bool
If False, operation will be carried out in-place.
Returns
-------
color : instance of ColorArray
The darkened Color.
|
entailment
|
def viewbox_mouse_event(self, event):
""" The ViewBox received a mouse event; update transform
accordingly.
Default implementation adjusts scale factor when scolling.
Parameters
----------
event : instance of Event
The event.
"""
BaseCamera.viewbox_mouse_event(self, event)
if event.type == 'mouse_wheel':
s = 1.1 ** - event.delta[1]
self._scale_factor *= s
if self._distance is not None:
self._distance *= s
self.view_changed()
|
The ViewBox received a mouse event; update transform
accordingly.
Default implementation adjusts scale factor when scolling.
Parameters
----------
event : instance of Event
The event.
|
entailment
|
def _set_range(self, init):
""" Reset the camera view using the known limits.
"""
if init and (self._scale_factor is not None):
return # We don't have to set our scale factor
# Get window size (and store factor now to sync with resizing)
w, h = self._viewbox.size
w, h = float(w), float(h)
# Get range and translation for x and y
x1, y1, z1 = self._xlim[0], self._ylim[0], self._zlim[0]
x2, y2, z2 = self._xlim[1], self._ylim[1], self._zlim[1]
rx, ry, rz = (x2 - x1), (y2 - y1), (z2 - z1)
# Correct ranges for window size. Note that the window width
# influences the x and y data range, while the height influences
# the z data range.
if w / h > 1:
rx /= w / h
ry /= w / h
else:
rz /= h / w
# Convert to screen coordinates. In screen x, only x and y have effect.
# In screen y, all three dimensions have effect. The idea of the lines
# below is to calculate the range on screen when that will fit the
# data under any rotation.
rxs = (rx**2 + ry**2)**0.5
rys = (rx**2 + ry**2 + rz**2)**0.5
self.scale_factor = max(rxs, rys) * 1.04
|
Reset the camera view using the known limits.
|
entailment
|
def viewbox_mouse_event(self, event):
"""
The viewbox received a mouse event; update transform
accordingly.
Parameters
----------
event : instance of Event
The event.
"""
if event.handled or not self.interactive:
return
PerspectiveCamera.viewbox_mouse_event(self, event)
if event.type == 'mouse_release':
self._event_value = None # Reset
elif event.type == 'mouse_press':
event.handled = True
elif event.type == 'mouse_move':
if event.press_event is None:
return
modifiers = event.mouse_event.modifiers
p1 = event.mouse_event.press_event.pos
p2 = event.mouse_event.pos
d = p2 - p1
if 1 in event.buttons and not modifiers:
# Rotate
self._update_rotation(event)
elif 2 in event.buttons and not modifiers:
# Zoom
if self._event_value is None:
self._event_value = (self._scale_factor, self._distance)
zoomy = (1 + self.zoom_factor) ** d[1]
self.scale_factor = self._event_value[0] * zoomy
# Modify distance if its given
if self._distance is not None:
self._distance = self._event_value[1] * zoomy
self.view_changed()
elif 1 in event.buttons and keys.SHIFT in modifiers:
# Translate
norm = np.mean(self._viewbox.size)
if self._event_value is None or len(self._event_value) == 2:
self._event_value = self.center
dist = (p1 - p2) / norm * self._scale_factor
dist[1] *= -1
# Black magic part 1: turn 2D into 3D translations
dx, dy, dz = self._dist_to_trans(dist)
# Black magic part 2: take up-vector and flipping into account
ff = self._flip_factors
up, forward, right = self._get_dim_vectors()
dx, dy, dz = right * dx + forward * dy + up * dz
dx, dy, dz = ff[0] * dx, ff[1] * dy, dz * ff[2]
c = self._event_value
self.center = c[0] + dx, c[1] + dy, c[2] + dz
elif 2 in event.buttons and keys.SHIFT in modifiers:
# Change fov
if self._event_value is None:
self._event_value = self._fov
fov = self._event_value - d[1] / 5.0
self.fov = min(180.0, max(0.0, fov))
|
The viewbox received a mouse event; update transform
accordingly.
Parameters
----------
event : instance of Event
The event.
|
entailment
|
def _update_camera_pos(self):
""" Set the camera position and orientation"""
# transform will be updated several times; do not update camera
# transform until we are done.
ch_em = self.events.transform_change
with ch_em.blocker(self._update_transform):
tr = self.transform
tr.reset()
up, forward, right = self._get_dim_vectors()
# Create mapping so correct dim is up
pp1 = np.array([(0, 0, 0), (0, 0, -1), (1, 0, 0), (0, 1, 0)])
pp2 = np.array([(0, 0, 0), forward, right, up])
tr.set_mapping(pp1, pp2)
tr.translate(-self._actual_distance * forward)
self._rotate_tr()
tr.scale([1.0/a for a in self._flip_factors])
tr.translate(np.array(self.center))
|
Set the camera position and orientation
|
entailment
|
def is_interactive(self):
""" Determine if the user requested interactive mode.
"""
# The Python interpreter sets sys.flags correctly, so use them!
if sys.flags.interactive:
return True
# IPython does not set sys.flags when -i is specified, so first
# check it if it is already imported.
if '__IPYTHON__' not in dir(six.moves.builtins):
return False
# Then we check the application singleton and determine based on
# a variable it sets.
try:
from IPython.config.application import Application as App
return App.initialized() and App.instance().interact
except (ImportError, AttributeError):
return False
|
Determine if the user requested interactive mode.
|
entailment
|
def run(self, allow_interactive=True):
""" Enter the native GUI event loop.
Parameters
----------
allow_interactive : bool
Is the application allowed to handle interactive mode for console
terminals? By default, typing ``python -i main.py`` results in
an interactive shell that also regularly calls the VisPy event
loop. In this specific case, the run() function will terminate
immediately and rely on the interpreter's input loop to be run
after script execution.
"""
if allow_interactive and self.is_interactive():
inputhook.set_interactive(enabled=True, app=self)
else:
return self._backend._vispy_run()
|
Enter the native GUI event loop.
Parameters
----------
allow_interactive : bool
Is the application allowed to handle interactive mode for console
terminals? By default, typing ``python -i main.py`` results in
an interactive shell that also regularly calls the VisPy event
loop. In this specific case, the run() function will terminate
immediately and rely on the interpreter's input loop to be run
after script execution.
|
entailment
|
def _use(self, backend_name=None):
"""Select a backend by name. See class docstring for details.
"""
# See if we're in a specific testing mode, if so DONT check to see
# if it's a valid backend. If it isn't, it's a good thing we
# get an error later because we should have decorated our test
# with requires_application()
test_name = os.getenv('_VISPY_TESTING_APP', None)
# Check whether the given name is valid
if backend_name is not None:
if backend_name.lower() == 'default':
backend_name = None # Explicitly use default, avoid using test
elif backend_name.lower() not in BACKENDMAP:
raise ValueError('backend_name must be one of %s or None, not '
'%r' % (BACKEND_NAMES, backend_name))
elif test_name is not None:
backend_name = test_name.lower()
assert backend_name in BACKENDMAP
# Should we try and load any backend, or just this specific one?
try_others = backend_name is None
# Get backends to try ...
imported_toolkits = [] # Backends for which the native lib is imported
backends_to_try = []
if not try_others:
# We should never hit this, since we check above
assert backend_name.lower() in BACKENDMAP.keys()
# Add it
backends_to_try.append(backend_name.lower())
else:
# See if a backend is loaded
for name, module_name, native_module_name in CORE_BACKENDS:
if native_module_name and native_module_name in sys.modules:
imported_toolkits.append(name.lower())
backends_to_try.append(name.lower())
# See if a default is given
default_backend = config['default_backend'].lower()
if default_backend.lower() in BACKENDMAP.keys():
if default_backend not in backends_to_try:
backends_to_try.append(default_backend)
# After this, try each one
for name, module_name, native_module_name in CORE_BACKENDS:
name = name.lower()
if name not in backends_to_try:
backends_to_try.append(name)
# Now try each one
for key in backends_to_try:
name, module_name, native_module_name = BACKENDMAP[key]
TRIED_BACKENDS.append(name)
mod_name = 'backends.' + module_name
__import__(mod_name, globals(), level=1)
mod = getattr(backends, module_name)
if not mod.available:
msg = ('Could not import backend "%s":\n%s'
% (name, str(mod.why_not)))
if not try_others:
# Fail if user wanted to use a specific backend
raise RuntimeError(msg)
elif key in imported_toolkits:
# Warn if were unable to use an already imported toolkit
msg = ('Although %s is already imported, the %s backend '
'could not\nbe used ("%s"). \nNote that running '
'multiple GUI toolkits simultaneously can cause '
'side effects.' %
(native_module_name, name, str(mod.why_not)))
logger.warning(msg)
else:
# Inform otherwise
logger.info(msg)
else:
# Success!
self._backend_module = mod
logger.debug('Selected backend %s' % module_name)
break
else:
raise RuntimeError('Could not import any of the backends. '
'You need to install any of %s. We recommend '
'PyQt' % [b[0] for b in CORE_BACKENDS])
# Store classes for app backend and canvas backend
self._backend = self.backend_module.ApplicationBackend()
|
Select a backend by name. See class docstring for details.
|
entailment
|
def _set_config(c):
"""Set gl configuration"""
gl_attribs = [glcanvas.WX_GL_RGBA,
glcanvas.WX_GL_DEPTH_SIZE, c['depth_size'],
glcanvas.WX_GL_STENCIL_SIZE, c['stencil_size'],
glcanvas.WX_GL_MIN_RED, c['red_size'],
glcanvas.WX_GL_MIN_GREEN, c['green_size'],
glcanvas.WX_GL_MIN_BLUE, c['blue_size'],
glcanvas.WX_GL_MIN_ALPHA, c['alpha_size']]
gl_attribs += [glcanvas.WX_GL_DOUBLEBUFFER] if c['double_buffer'] else []
gl_attribs += [glcanvas.WX_GL_STEREO] if c['stereo'] else []
return gl_attribs
|
Set gl configuration
|
entailment
|
def _get_mods(evt):
"""Helper to extract list of mods from event"""
mods = []
mods += [keys.CONTROL] if evt.ControlDown() else []
mods += [keys.ALT] if evt.AltDown() else []
mods += [keys.SHIFT] if evt.ShiftDown() else []
mods += [keys.META] if evt.MetaDown() else []
return mods
|
Helper to extract list of mods from event
|
entailment
|
def _process_key(evt):
"""Helper to convert from wx keycode to vispy keycode"""
key = evt.GetKeyCode()
if key in KEYMAP:
return KEYMAP[key], ''
if 97 <= key <= 122:
key -= 32
if key >= 32 and key <= 127:
return keys.Key(chr(key)), chr(key)
else:
return None, None
|
Helper to convert from wx keycode to vispy keycode
|
entailment
|
def is_child(self, node):
"""Check if a node is a child of the current node
Parameters
----------
node : instance of Node
The potential child.
Returns
-------
child : bool
Whether or not the node is a child.
"""
if node in self.children:
return True
for c in self.children:
if c.is_child(node):
return True
return False
|
Check if a node is a child of the current node
Parameters
----------
node : instance of Node
The potential child.
Returns
-------
child : bool
Whether or not the node is a child.
|
entailment
|
def scene_node(self):
"""The first ancestor of this node that is a SubScene instance, or self
if no such node exists.
"""
if self._scene_node is None:
from .subscene import SubScene
p = self.parent
while True:
if isinstance(p, SubScene) or p is None:
self._scene_node = p
break
p = p.parent
if self._scene_node is None:
self._scene_node = self
return self._scene_node
|
The first ancestor of this node that is a SubScene instance, or self
if no such node exists.
|
entailment
|
def update(self):
"""
Emit an event to inform listeners that properties of this Node have
changed. Also request a canvas update.
"""
self.events.update()
c = getattr(self, 'canvas', None)
if c is not None:
c.update(node=self)
|
Emit an event to inform listeners that properties of this Node have
changed. Also request a canvas update.
|
entailment
|
def set_transform(self, type_, *args, **kwargs):
""" Create a new transform of *type* and assign it to this node.
All extra arguments are used in the construction of the transform.
Parameters
----------
type_ : str
The transform type.
*args : tuple
Arguments.
**kwargs : dict
Keywoard arguments.
"""
self.transform = create_transform(type_, *args, **kwargs)
|
Create a new transform of *type* and assign it to this node.
All extra arguments are used in the construction of the transform.
Parameters
----------
type_ : str
The transform type.
*args : tuple
Arguments.
**kwargs : dict
Keywoard arguments.
|
entailment
|
def _update_trsys(self, event):
"""Called when has changed.
This allows the node and its children to react (notably, VisualNode
uses this to update its TransformSystem).
Note that this method is only called when one transform is replaced by
another; it is not called if an existing transform internally changes
its state.
"""
for ch in self.children:
ch._update_trsys(event)
self.events.transform_change()
self.update()
|
Called when has changed.
This allows the node and its children to react (notably, VisualNode
uses this to update its TransformSystem).
Note that this method is only called when one transform is replaced by
another; it is not called if an existing transform internally changes
its state.
|
entailment
|
def parent_chain(self):
"""
Return the list of parents starting from this node. The chain ends
at the first node with no parents.
"""
chain = [self]
while True:
try:
parent = chain[-1].parent
except Exception:
break
if parent is None:
break
chain.append(parent)
return chain
|
Return the list of parents starting from this node. The chain ends
at the first node with no parents.
|
entailment
|
def _describe_tree(self, prefix, with_transform):
"""Helper function to actuall construct the tree"""
extra = ': "%s"' % self.name if self.name is not None else ''
if with_transform:
extra += (' [%s]' % self.transform.__class__.__name__)
output = ''
if len(prefix) > 0:
output += prefix[:-3]
output += ' +--'
output += '%s%s\n' % (self.__class__.__name__, extra)
n_children = len(self.children)
for ii, child in enumerate(self.children):
sub_prefix = prefix + (' ' if ii+1 == n_children else ' |')
output += child._describe_tree(sub_prefix, with_transform)
return output
|
Helper function to actuall construct the tree
|
entailment
|
def common_parent(self, node):
"""
Return the common parent of two entities
If the entities have no common parent, return None.
Parameters
----------
node : instance of Node
The other node.
Returns
-------
parent : instance of Node | None
The parent.
"""
p1 = self.parent_chain()
p2 = node.parent_chain()
for p in p1:
if p in p2:
return p
return None
|
Return the common parent of two entities
If the entities have no common parent, return None.
Parameters
----------
node : instance of Node
The other node.
Returns
-------
parent : instance of Node | None
The parent.
|
entailment
|
def node_path_to_child(self, node):
"""Return a list describing the path from this node to a child node
If *node* is not a (grand)child of this node, then raise RuntimeError.
Parameters
----------
node : instance of Node
The child node.
Returns
-------
path : list | None
The path.
"""
if node is self:
return []
# Go up from the child node as far as we can
path1 = [node]
child = node
while child.parent is not None:
child = child.parent
path1.append(child)
# Early exit
if child is self:
return list(reversed(path1))
# Verify that we're not cut off
if path1[-1].parent is None:
raise RuntimeError('%r is not a child of %r' % (node, self))
def _is_child(path, parent, child):
path.append(parent)
if child in parent.children:
return path
else:
for c in parent.children:
possible_path = _is_child(path[:], c, child)
if possible_path:
return possible_path
return None
# Search from the parent towards the child
path2 = _is_child([], self, path1[-1])
if not path2:
raise RuntimeError('%r is not a child of %r' % (node, self))
# Return
return path2 + list(reversed(path1))
|
Return a list describing the path from this node to a child node
If *node* is not a (grand)child of this node, then raise RuntimeError.
Parameters
----------
node : instance of Node
The child node.
Returns
-------
path : list | None
The path.
|
entailment
|
def node_path(self, node):
"""Return two lists describing the path from this node to another
Parameters
----------
node : instance of Node
The other node.
Returns
-------
p1 : list
First path (see below).
p2 : list
Second path (see below).
Notes
-----
The first list starts with this node and ends with the common parent
between the endpoint nodes. The second list contains the remainder of
the path from the common parent to the specified ending node.
For example, consider the following scenegraph::
A --- B --- C --- D
\
--- E --- F
Calling `D.node_path(F)` will return::
([D, C, B], [E, F])
"""
p1 = self.parent_chain()
p2 = node.parent_chain()
cp = None
for p in p1:
if p in p2:
cp = p
break
if cp is None:
raise RuntimeError("No single-path common parent between nodes %s "
"and %s." % (self, node))
p1 = p1[:p1.index(cp)+1]
p2 = p2[:p2.index(cp)][::-1]
return p1, p2
|
Return two lists describing the path from this node to another
Parameters
----------
node : instance of Node
The other node.
Returns
-------
p1 : list
First path (see below).
p2 : list
Second path (see below).
Notes
-----
The first list starts with this node and ends with the common parent
between the endpoint nodes. The second list contains the remainder of
the path from the common parent to the specified ending node.
For example, consider the following scenegraph::
A --- B --- C --- D
\
--- E --- F
Calling `D.node_path(F)` will return::
([D, C, B], [E, F])
|
entailment
|
def node_path_transforms(self, node):
"""Return the list of transforms along the path to another node.
The transforms are listed in reverse order, such that the last
transform should be applied first when mapping from this node to
the other.
Parameters
----------
node : instance of Node
The other node.
Returns
-------
transforms : list
A list of Transform instances.
"""
a, b = self.node_path(node)
return ([n.transform for n in a[:-1]] +
[n.transform.inverse for n in b])[::-1]
|
Return the list of transforms along the path to another node.
The transforms are listed in reverse order, such that the last
transform should be applied first when mapping from this node to
the other.
Parameters
----------
node : instance of Node
The other node.
Returns
-------
transforms : list
A list of Transform instances.
|
entailment
|
def read(cls, fname):
""" read(fname, fmt)
This classmethod is the entry point for reading OBJ files.
Parameters
----------
fname : str
The name of the file to read.
fmt : str
Can be "obj" or "gz" to specify the file format.
"""
# Open file
fmt = op.splitext(fname)[1].lower()
assert fmt in ('.obj', '.gz')
opener = open if fmt == '.obj' else gzip_open
with opener(fname, 'rb') as f:
try:
reader = WavefrontReader(f)
while True:
reader.readLine()
except EOFError:
pass
# Done
t0 = time.time()
mesh = reader.finish()
logger.debug('reading mesh took ' +
str(time.time() - t0) +
' seconds')
return mesh
|
read(fname, fmt)
This classmethod is the entry point for reading OBJ files.
Parameters
----------
fname : str
The name of the file to read.
fmt : str
Can be "obj" or "gz" to specify the file format.
|
entailment
|
def readLine(self):
""" The method that reads a line and processes it.
"""
# Read line
line = self._f.readline().decode('ascii', 'ignore')
if not line:
raise EOFError()
line = line.strip()
if line.startswith('v '):
# self._vertices.append( *self.readTuple(line) )
self._v.append(self.readTuple(line))
elif line.startswith('vt '):
self._vt.append(self.readTuple(line, 3))
elif line.startswith('vn '):
self._vn.append(self.readTuple(line))
elif line.startswith('f '):
self._faces.append(self.readFace(line))
elif line.startswith('#'):
pass # Comment
elif line.startswith('mtllib '):
logger.warning('Notice reading .OBJ: material properties are '
'ignored.')
elif any(line.startswith(x) for x in ('g ', 's ', 'o ', 'usemtl ')):
pass # Ignore groups and smoothing groups, obj names, material
elif not line.strip():
pass
else:
logger.warning('Notice reading .OBJ: ignoring %s command.'
% line.strip())
|
The method that reads a line and processes it.
|
entailment
|
def readTuple(self, line, n=3):
""" Reads a tuple of numbers. e.g. vertices, normals or teture coords.
"""
numbers = [num for num in line.split(' ') if num]
return [float(num) for num in numbers[1:n + 1]]
|
Reads a tuple of numbers. e.g. vertices, normals or teture coords.
|
entailment
|
def readFace(self, line):
""" Each face consists of three or more sets of indices. Each set
consists of 1, 2 or 3 indices to vertices/normals/texcords.
"""
# Get parts (skip first)
indexSets = [num for num in line.split(' ') if num][1:]
final_face = []
for indexSet in indexSets:
# Did we see this exact index earlier? If so, it's easy
final_index = self._facemap.get(indexSet)
if final_index is not None:
final_face.append(final_index)
continue
# If not, we need to sync the vertices/normals/texcords ...
# Get and store final index
final_index = len(self._vertices)
final_face.append(final_index)
self._facemap[indexSet] = final_index
# What indices were given?
indices = [i for i in indexSet.split('/')]
# Store new set of vertex/normal/texcords.
# If there is a single face that does not specify the texcord
# index, the texcords are ignored. Likewise for the normals.
if True:
vertex_index = self._absint(indices[0], len(self._v))
self._vertices.append(self._v[vertex_index])
if self._texcords is not None:
if len(indices) > 1 and indices[1]:
texcord_index = self._absint(indices[1], len(self._vt))
self._texcords.append(self._vt[texcord_index])
else:
if self._texcords:
logger.warning('Ignoring texture coordinates because '
'it is not specified for all faces.')
self._texcords = None
if self._normals is not None:
if len(indices) > 2 and indices[2]:
normal_index = self._absint(indices[2], len(self._vn))
self._normals.append(self._vn[normal_index])
else:
if self._normals:
logger.warning('Ignoring normals because it is not '
'specified for all faces.')
self._normals = None
# Check face
if self._faces and len(self._faces[0]) != len(final_face):
raise RuntimeError(
'Vispy requires that all faces are either triangles or quads.')
# Done
return final_face
|
Each face consists of three or more sets of indices. Each set
consists of 1, 2 or 3 indices to vertices/normals/texcords.
|
entailment
|
def finish(self):
""" Converts gathere lists to numpy arrays and creates
BaseMesh instance.
"""
self._vertices = np.array(self._vertices, 'float32')
if self._faces:
self._faces = np.array(self._faces, 'uint32')
else:
# Use vertices only
self._vertices = np.array(self._v, 'float32')
self._faces = None
if self._normals:
self._normals = np.array(self._normals, 'float32')
else:
self._normals = self._calculate_normals()
if self._texcords:
self._texcords = np.array(self._texcords, 'float32')
else:
self._texcords = None
return self._vertices, self._faces, self._normals, self._texcords
|
Converts gathere lists to numpy arrays and creates
BaseMesh instance.
|
entailment
|
def write(cls, fname, vertices, faces, normals,
texcoords, name='', reshape_faces=True):
""" This classmethod is the entry point for writing mesh data to OBJ.
Parameters
----------
fname : string
The filename to write to. Must end with ".obj" or ".gz".
vertices : numpy array
The vertex data
faces : numpy array
The face data
texcoords : numpy array
The texture coordinate per vertex
name : str
The name of the object (e.g. 'teapot')
reshape_faces : bool
Reshape the `faces` array to (Nf, 3). Set to `False`
if you need to write a mesh with non triangular faces.
"""
# Open file
fmt = op.splitext(fname)[1].lower()
if fmt not in ('.obj', '.gz'):
raise ValueError('Filename must end with .obj or .gz, not "%s"'
% (fmt,))
opener = open if fmt == '.obj' else gzip_open
f = opener(fname, 'wb')
try:
writer = WavefrontWriter(f)
writer.writeMesh(vertices, faces, normals,
texcoords, name, reshape_faces=reshape_faces)
except EOFError:
pass
finally:
f.close()
|
This classmethod is the entry point for writing mesh data to OBJ.
Parameters
----------
fname : string
The filename to write to. Must end with ".obj" or ".gz".
vertices : numpy array
The vertex data
faces : numpy array
The face data
texcoords : numpy array
The texture coordinate per vertex
name : str
The name of the object (e.g. 'teapot')
reshape_faces : bool
Reshape the `faces` array to (Nf, 3). Set to `False`
if you need to write a mesh with non triangular faces.
|
entailment
|
def writeTuple(self, val, what):
""" Writes a tuple of numbers (on one line).
"""
# Limit to three values. so RGBA data drops the alpha channel
# Format can handle up to 3 texcords
val = val[:3]
# Make string
val = ' '.join([str(v) for v in val])
# Write line
self.writeLine('%s %s' % (what, val))
|
Writes a tuple of numbers (on one line).
|
entailment
|
def writeFace(self, val, what='f'):
""" Write the face info to the net line.
"""
# OBJ counts from 1
val = [v + 1 for v in val]
# Make string
if self._hasValues and self._hasNormals:
val = ' '.join(['%i/%i/%i' % (v, v, v) for v in val])
elif self._hasNormals:
val = ' '.join(['%i//%i' % (v, v) for v in val])
elif self._hasValues:
val = ' '.join(['%i/%i' % (v, v) for v in val])
else:
val = ' '.join(['%i' % v for v in val])
# Write line
self.writeLine('%s %s' % (what, val))
|
Write the face info to the net line.
|
entailment
|
def writeMesh(self, vertices, faces, normals, values,
name='', reshape_faces=True):
""" Write the given mesh instance.
"""
# Store properties
self._hasNormals = normals is not None
self._hasValues = values is not None
self._hasFaces = faces is not None
# Get faces and number of vertices
if faces is None:
faces = np.arange(len(vertices))
reshape_faces = True
if reshape_faces:
Nfaces = faces.size // 3
faces = faces.reshape((Nfaces, 3))
else:
is_triangular = np.array([len(f) == 3
for f in faces])
if not(np.all(is_triangular)):
logger.warning('''Faces doesn't appear to be triangular,
be advised the file cannot be read back in vispy''')
# Number of vertices
N = vertices.shape[0]
# Get string with stats
stats = []
stats.append('%i vertices' % N)
if self._hasValues:
stats.append('%i texcords' % N)
else:
stats.append('no texcords')
if self._hasNormals:
stats.append('%i normals' % N)
else:
stats.append('no normals')
stats.append('%i faces' % faces.shape[0])
# Write header
self.writeLine('# Wavefront OBJ file')
self.writeLine('# Created by vispy.')
self.writeLine('#')
if name:
self.writeLine('# object %s' % name)
else:
self.writeLine('# unnamed object')
self.writeLine('# %s' % ', '.join(stats))
self.writeLine('')
# Write data
if True:
for i in range(N):
self.writeTuple(vertices[i], 'v')
if self._hasNormals:
for i in range(N):
self.writeTuple(normals[i], 'vn')
if self._hasValues:
for i in range(N):
self.writeTuple(values[i], 'vt')
if True:
for i in range(faces.shape[0]):
self.writeFace(faces[i])
|
Write the given mesh instance.
|
entailment
|
def _fast_cross_3d(x, y):
"""Compute cross product between list of 3D vectors
Much faster than np.cross() when the number of cross products
becomes large (>500). This is because np.cross() methods become
less memory efficient at this stage.
Parameters
----------
x : array
Input array 1.
y : array
Input array 2.
Returns
-------
z : array
Cross product of x and y.
Notes
-----
x and y must both be 2D row vectors. One must have length 1, or both
lengths must match.
"""
assert x.ndim == 2
assert y.ndim == 2
assert x.shape[1] == 3
assert y.shape[1] == 3
assert (x.shape[0] == 1 or y.shape[0] == 1) or x.shape[0] == y.shape[0]
if max([x.shape[0], y.shape[0]]) >= 500:
return np.c_[x[:, 1] * y[:, 2] - x[:, 2] * y[:, 1],
x[:, 2] * y[:, 0] - x[:, 0] * y[:, 2],
x[:, 0] * y[:, 1] - x[:, 1] * y[:, 0]]
else:
return np.cross(x, y)
|
Compute cross product between list of 3D vectors
Much faster than np.cross() when the number of cross products
becomes large (>500). This is because np.cross() methods become
less memory efficient at this stage.
Parameters
----------
x : array
Input array 1.
y : array
Input array 2.
Returns
-------
z : array
Cross product of x and y.
Notes
-----
x and y must both be 2D row vectors. One must have length 1, or both
lengths must match.
|
entailment
|
def _calculate_normals(rr, tris):
"""Efficiently compute vertex normals for triangulated surface"""
# ensure highest precision for our summation/vectorization "trick"
rr = rr.astype(np.float64)
# first, compute triangle normals
r1 = rr[tris[:, 0], :]
r2 = rr[tris[:, 1], :]
r3 = rr[tris[:, 2], :]
tri_nn = _fast_cross_3d((r2 - r1), (r3 - r1))
# Triangle normals and areas
size = np.sqrt(np.sum(tri_nn * tri_nn, axis=1))
size[size == 0] = 1.0 # prevent ugly divide-by-zero
tri_nn /= size[:, np.newaxis]
npts = len(rr)
# the following code replaces this, but is faster (vectorized):
#
# for p, verts in enumerate(tris):
# nn[verts, :] += tri_nn[p, :]
#
nn = np.zeros((npts, 3))
for verts in tris.T: # note this only loops 3x (number of verts per tri)
for idx in range(3): # x, y, z
nn[:, idx] += np.bincount(verts.astype(np.int32),
tri_nn[:, idx], minlength=npts)
size = np.sqrt(np.sum(nn * nn, axis=1))
size[size == 0] = 1.0 # prevent ugly divide-by-zero
nn /= size[:, np.newaxis]
return nn
|
Efficiently compute vertex normals for triangulated surface
|
entailment
|
def resize(image, shape, kind='linear'):
"""Resize an image
Parameters
----------
image : ndarray
Array of shape (N, M, ...).
shape : tuple
2-element shape.
kind : str
Interpolation, either "linear" or "nearest".
Returns
-------
scaled_image : ndarray
New image, will have dtype np.float64.
"""
image = np.array(image, float)
shape = np.array(shape, int)
if shape.ndim != 1 or shape.size != 2:
raise ValueError('shape must have two elements')
if image.ndim < 2:
raise ValueError('image must have two dimensions')
if not isinstance(kind, string_types) or kind not in ('nearest', 'linear'):
raise ValueError('mode must be "nearest" or "linear"')
r = np.linspace(0, image.shape[0] - 1, shape[0])
c = np.linspace(0, image.shape[1] - 1, shape[1])
if kind == 'linear':
r_0 = np.floor(r).astype(int)
c_0 = np.floor(c).astype(int)
r_1 = r_0 + 1
c_1 = c_0 + 1
top = (r_1 - r)[:, np.newaxis]
bot = (r - r_0)[:, np.newaxis]
lef = (c - c_0)[np.newaxis, :]
rig = (c_1 - c)[np.newaxis, :]
c_1 = np.minimum(c_1, image.shape[1] - 1)
r_1 = np.minimum(r_1, image.shape[0] - 1)
for arr in (top, bot, lef, rig):
arr.shape = arr.shape + (1,) * (image.ndim - 2)
out = top * rig * image[r_0][:, c_0, ...]
out += bot * rig * image[r_1][:, c_0, ...]
out += top * lef * image[r_0][:, c_1, ...]
out += bot * lef * image[r_1][:, c_1, ...]
else: # kind == 'nearest'
r = np.round(r).astype(int)
c = np.round(c).astype(int)
out = image[r][:, c, ...]
return out
|
Resize an image
Parameters
----------
image : ndarray
Array of shape (N, M, ...).
shape : tuple
2-element shape.
kind : str
Interpolation, either "linear" or "nearest".
Returns
-------
scaled_image : ndarray
New image, will have dtype np.float64.
|
entailment
|
def append(self, P0, P1, itemsize=None, **kwargs):
"""
Append a new set of segments to the collection.
For kwargs argument, n is the number of vertices (local) or the number
of item (shared)
Parameters
----------
P : np.array
Vertices positions of the path(s) to be added
closed: bool
Whether path(s) is/are closed
itemsize: int or None
Size of an individual path
color : list, array or 4-tuple
Path color
"""
itemsize = itemsize or 1
itemcount = len(P0) / itemsize
V = np.empty(itemcount, dtype=self.vtype)
# Apply default values on vertices
for name in self.vtype.names:
if name not in ['collection_index', 'P']:
V[name] = kwargs.get(name, self._defaults[name])
V = np.repeat(V, 2, axis=0)
V['P'][0::2] = P0
V['P'][1::2] = P1
# Uniforms
if self.utype:
U = np.zeros(itemcount, dtype=self.utype)
for name in self.utype.names:
if name not in ["__unused__"]:
U[name] = kwargs.get(name, self._defaults[name])
else:
U = None
Collection.append(self, vertices=V, uniforms=U, itemsize=itemsize)
|
Append a new set of segments to the collection.
For kwargs argument, n is the number of vertices (local) or the number
of item (shared)
Parameters
----------
P : np.array
Vertices positions of the path(s) to be added
closed: bool
Whether path(s) is/are closed
itemsize: int or None
Size of an individual path
color : list, array or 4-tuple
Path color
|
entailment
|
def parse(self):
"""Parse the lines, and fill self.line_fields accordingly."""
for line in self.lines:
# Parse the line
field_defs = self.parse_line(line)
fields = []
# Convert field parameters into Field objects
for (kind, options) in field_defs:
logger.debug("Creating field %s(%r)", kind, options)
fields.append(self.field_registry.create(kind, **options))
# Add the list of Field objects to the 'fields per line'.
self.line_fields.append(fields)
# Pre-fill the list of widgets
for field in fields:
self.widgets[field] = None
|
Parse the lines, and fill self.line_fields accordingly.
|
entailment
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.