code stringlengths 3 1.18M | language stringclasses 1 value |
|---|---|
package org.poly2tri.transform.coordinate;
/**
* A transform that aligns given source normal with the XY plane normal [0,0,1]
*
* @author thahlen@gmail.com
*/
public class AnyToXYTransform extends Matrix3Transform
{
/**
* Assumes source normal is normalized
*/
public AnyToXYTransform( double nx, double ny, double nz )
{
setSourceNormal( nx, ny, nz );
}
/**
* Assumes source normal is normalized
*
* @param nx
* @param ny
* @param nz
*/
public void setSourceNormal( double nx, double ny, double nz )
{
double h,f,c,vx,vy,hvx;
vx = -ny;
vy = nx;
c = nz;
h = (1-c)/(1-c*c);
hvx = h*vx;
f = (c < 0) ? -c : c;
if( f < 1.0 - 1.0E-4 )
{
m00=c + hvx*vx;
m01=hvx*vy;
m02=-vy;
m10=hvx*vy;
m11=c + h*vy*vy;
m12=vx;
m20=vy;
m21=-vx;
m22=c;
}
else
{
// if "from" and "to" vectors are nearly parallel
m00=1;
m01=0;
m02=0;
m10=0;
m11=1;
m12=0;
m20=0;
m21=0;
if( c > 0 )
{
m22=1;
}
else
{
m22=-1;
}
}
}
}
| Java |
package org.poly2tri.transform.coordinate;
import java.util.List;
import org.poly2tri.geometry.primitives.Point;
public abstract class Matrix3Transform implements CoordinateTransform
{
protected double m00,m01,m02,m10,m11,m12,m20,m21,m22;
public void transform( Point p, Point store )
{
final double px = p.getX();
final double py = p.getY();
final double pz = p.getZ();
store.set(m00 * px + m01 * py + m02 * pz,
m10 * px + m11 * py + m12 * pz,
m20 * px + m21 * py + m22 * pz );
}
public void transform( Point p )
{
final double px = p.getX();
final double py = p.getY();
final double pz = p.getZ();
p.set(m00 * px + m01 * py + m02 * pz,
m10 * px + m11 * py + m12 * pz,
m20 * px + m21 * py + m22 * pz );
}
public void transform( List<? extends Point> list )
{
for( Point p : list )
{
transform( p );
}
}
}
| Java |
package org.poly2tri.transform.coordinate;
import java.util.List;
import org.poly2tri.geometry.primitives.Point;
public class NoTransform implements CoordinateTransform
{
public void transform( Point p, Point store )
{
store.set( p.getX(), p.getY(), p.getZ() );
}
public void transform( Point p )
{
}
public void transform( List<? extends Point> list )
{
}
}
| Java |
package org.poly2tri.transform.coordinate;
import java.util.List;
import org.poly2tri.geometry.primitives.Point;
public abstract interface CoordinateTransform
{
public abstract void transform( Point p, Point store );
public abstract void transform( Point p );
public abstract void transform( List<? extends Point> list );
}
| Java |
package org.poly2tri.transform.coordinate;
/**
* A transform that aligns the XY plane normal [0,0,1] with any given target normal
*
* http://www.cs.brown.edu/~jfh/papers/Moller-EBA-1999/paper.pdf
*
* @author thahlen@gmail.com
*
*/
public class XYToAnyTransform extends Matrix3Transform
{
/**
* Assumes target normal is normalized
*/
public XYToAnyTransform( double nx, double ny, double nz )
{
setTargetNormal( nx, ny, nz );
}
/**
* Assumes target normal is normalized
*
* @param nx
* @param ny
* @param nz
*/
public void setTargetNormal( double nx, double ny, double nz )
{
double h,f,c,vx,vy,hvx;
vx = ny;
vy = -nx;
c = nz;
h = (1-c)/(1-c*c);
hvx = h*vx;
f = (c < 0) ? -c : c;
if( f < 1.0 - 1.0E-4 )
{
m00=c + hvx*vx;
m01=hvx*vy;
m02=-vy;
m10=hvx*vy;
m11=c + h*vy*vy;
m12=vx;
m20=vy;
m21=-vx;
m22=c;
}
else
{
// if "from" and "to" vectors are nearly parallel
m00=1;
m01=0;
m02=0;
m10=0;
m11=1;
m12=0;
m20=0;
m21=0;
if( c > 0 )
{
m22=1;
}
else
{
m22=-1;
}
}
}
}
| Java |
/* Poly2Tri
* Copyright (c) 2009-2010, Poly2Tri Contributors
* http://code.google.com/p/poly2tri/
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* * Neither the name of Poly2Tri nor the names of its contributors may be
* used to endorse or promote products derived from this software without specific
* prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.poly2tri;
import org.poly2tri.geometry.polygon.Polygon;
import org.poly2tri.geometry.polygon.PolygonSet;
import org.poly2tri.triangulation.Triangulatable;
import org.poly2tri.triangulation.TriangulationAlgorithm;
import org.poly2tri.triangulation.TriangulationContext;
import org.poly2tri.triangulation.TriangulationMode;
import org.poly2tri.triangulation.TriangulationProcess;
import org.poly2tri.triangulation.delaunay.sweep.DTSweep;
import org.poly2tri.triangulation.delaunay.sweep.DTSweepContext;
import org.poly2tri.triangulation.sets.ConstrainedPointSet;
import org.poly2tri.triangulation.sets.PointSet;
import org.poly2tri.triangulation.util.PolygonGenerator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class Poly2Tri
{
private final static Logger logger = LoggerFactory.getLogger( Poly2Tri.class );
private static final TriangulationAlgorithm _defaultAlgorithm = TriangulationAlgorithm.DTSweep;
public static void triangulate( PolygonSet ps )
{
TriangulationContext<?> tcx = createContext( _defaultAlgorithm );
for( Polygon p : ps.getPolygons() )
{
tcx.prepareTriangulation( p );
triangulate( tcx );
tcx.clear();
}
}
public static void triangulate( Polygon p )
{
triangulate( _defaultAlgorithm, p );
}
public static void triangulate( ConstrainedPointSet cps )
{
triangulate( _defaultAlgorithm, cps );
}
public static void triangulate( PointSet ps )
{
triangulate( _defaultAlgorithm, ps );
}
public static TriangulationContext<?> createContext( TriangulationAlgorithm algorithm )
{
switch( algorithm )
{
case DTSweep:
default:
return new DTSweepContext();
}
}
public static void triangulate( TriangulationAlgorithm algorithm,
Triangulatable t )
{
TriangulationContext<?> tcx;
// long time = System.nanoTime();
tcx = createContext( algorithm );
tcx.prepareTriangulation( t );
triangulate( tcx );
// logger.info( "Triangulation of {} points [{}ms]", tcx.getPoints().size(), ( System.nanoTime() - time ) / 1e6 );
}
public static void triangulate( TriangulationContext<?> tcx )
{
switch( tcx.algorithm() )
{
case DTSweep:
default:
DTSweep.triangulate( (DTSweepContext)tcx );
}
}
/**
* Will do a warmup run to let the JVM optimize the triangulation code
*/
public static void warmup()
{
/*
* After a method is run 10000 times, the Hotspot compiler will compile
* it into native code. Periodically, the Hotspot compiler may recompile
* the method. After an unspecified amount of time, then the compilation
* system should become quiet.
*/
Polygon poly = PolygonGenerator.RandomCircleSweep2( 50, 50000 );
TriangulationProcess process = new TriangulationProcess();
process.triangulate( poly );
}
}
| Java |
/* Poly2Tri
* Copyright (c) 2009-2010, Poly2Tri Contributors
* http://code.google.com/p/poly2tri/
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* * Neither the name of Poly2Tri nor the names of its contributors may be
* used to endorse or promote products derived from this software without specific
* prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.poly2tri.triangulation;
import java.util.ArrayList;
import org.poly2tri.geometry.primitives.Point;
import org.poly2tri.triangulation.delaunay.sweep.DTSweepConstraint;
public abstract class TriangulationPoint extends Point
{
// List of edges this point constitutes an upper ending point (CDT)
private ArrayList<DTSweepConstraint> edges;
@Override
public String toString()
{
return "[" + getX() + "," + getY() + "]";
}
public abstract double getX();
public abstract double getY();
public abstract double getZ();
public abstract float getXf();
public abstract float getYf();
public abstract float getZf();
public abstract void set( double x, double y, double z );
public ArrayList<DTSweepConstraint> getEdges()
{
return edges;
}
public void addEdge( DTSweepConstraint e )
{
if( edges == null )
{
edges = new ArrayList<DTSweepConstraint>();
}
edges.add( e );
}
public boolean hasEdges()
{
return edges != null;
}
/**
* @param p - edge destination point
* @return the edge from this point to given point
*/
public DTSweepConstraint getEdge( TriangulationPoint p )
{
for( DTSweepConstraint c : edges )
{
if( c.p == p )
{
return c;
}
}
return null;
}
public boolean equals(Object obj)
{
if( obj instanceof TriangulationPoint )
{
TriangulationPoint p = (TriangulationPoint)obj;
return getX() == p.getX() && getY() == p.getY();
}
return super.equals( obj );
}
public int hashCode()
{
long bits = java.lang.Double.doubleToLongBits(getX());
bits ^= java.lang.Double.doubleToLongBits(getY()) * 31;
return (((int) bits) ^ ((int) (bits >> 32)));
}
}
| Java |
package org.poly2tri.triangulation.delaunay.sweep;
public class PointOnEdgeException extends RuntimeException
{
/**
*
*/
private static final long serialVersionUID = 1L;
public PointOnEdgeException( String msg )
{
super(msg);
}
}
| Java |
/* Poly2Tri
* Copyright (c) 2009-2010, Poly2Tri Contributors
* http://code.google.com/p/poly2tri/
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* * Neither the name of Poly2Tri nor the names of its contributors may be
* used to endorse or promote products derived from this software without specific
* prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.poly2tri.triangulation.delaunay.sweep;
import org.poly2tri.triangulation.TriangulationPoint;
import org.poly2tri.triangulation.delaunay.DelaunayTriangle;
public class AdvancingFrontNode
{
protected AdvancingFrontNode next = null;
protected AdvancingFrontNode prev = null;
protected final Double key; // XXX: BST
protected final double value;
protected final TriangulationPoint point;
protected DelaunayTriangle triangle;
public AdvancingFrontNode( TriangulationPoint point )
{
this.point = point;
value = point.getX();
key = Double.valueOf( value ); // XXX: BST
}
public AdvancingFrontNode getNext()
{
return next;
}
public AdvancingFrontNode getPrevious()
{
return prev;
}
public TriangulationPoint getPoint()
{
return point;
}
public DelaunayTriangle getTriangle()
{
return triangle;
}
public boolean hasNext()
{
return next != null;
}
public boolean hasPrevious()
{
return prev != null;
}
}
| Java |
package org.poly2tri.triangulation.delaunay.sweep;
public class AdvancingFrontIndex<A>
{
double _min,_max;
IndexNode<A> _root;
public AdvancingFrontIndex( double min, double max, int depth )
{
if( depth > 5 ) depth = 5;
_root = createIndex( depth );
}
private IndexNode<A> createIndex( int n )
{
IndexNode<A> node = null;
if( n > 0 )
{
node = new IndexNode<A>();
node.bigger = createIndex( n-1 );
node.smaller = createIndex( n-1 );
}
return node;
}
public A fetchAndRemoveIndex( A key )
{
return null;
}
public A fetchAndInsertIndex( A key )
{
return null;
}
class IndexNode<A>
{
A value;
IndexNode<A> smaller;
IndexNode<A> bigger;
double range;
}
}
| Java |
package org.poly2tri.triangulation.delaunay.sweep;
import java.util.Comparator;
import org.poly2tri.triangulation.TriangulationPoint;
public class DTSweepPointComparator implements Comparator<TriangulationPoint>
{
public int compare( TriangulationPoint p1, TriangulationPoint p2 )
{
if(p1.getY() < p2.getY() )
{
return -1;
}
else if( p1.getY() > p2.getY())
{
return 1;
}
else
{
if(p1.getX() < p2.getX())
{
return -1;
}
else if( p1.getX() > p2.getX() )
{
return 1;
}
else
{
return 0;
}
}
}
}
| Java |
package org.poly2tri.triangulation.delaunay.sweep;
import org.poly2tri.triangulation.TriangulationContext;
import org.poly2tri.triangulation.TriangulationDebugContext;
import org.poly2tri.triangulation.TriangulationPoint;
import org.poly2tri.triangulation.delaunay.DelaunayTriangle;
public class DTSweepDebugContext extends TriangulationDebugContext
{
/*
* Fields used for visual representation of current triangulation
*/
protected DelaunayTriangle _primaryTriangle;
protected DelaunayTriangle _secondaryTriangle;
protected TriangulationPoint _activePoint;
protected AdvancingFrontNode _activeNode;
protected DTSweepConstraint _activeConstraint;
public DTSweepDebugContext( DTSweepContext tcx )
{
super( tcx );
}
public boolean isDebugContext()
{
return true;
}
// private Tuple2<TPoint,Double> m_circumCircle = new Tuple2<TPoint,Double>( new TPoint(), new Double(0) );
// public Tuple2<TPoint,Double> getCircumCircle() { return m_circumCircle; }
public DelaunayTriangle getPrimaryTriangle()
{
return _primaryTriangle;
}
public DelaunayTriangle getSecondaryTriangle()
{
return _secondaryTriangle;
}
public AdvancingFrontNode getActiveNode()
{
return _activeNode;
}
public DTSweepConstraint getActiveConstraint()
{
return _activeConstraint;
}
public TriangulationPoint getActivePoint()
{
return _activePoint;
}
public void setPrimaryTriangle( DelaunayTriangle triangle )
{
_primaryTriangle = triangle;
_tcx.update("setPrimaryTriangle");
}
public void setSecondaryTriangle( DelaunayTriangle triangle )
{
_secondaryTriangle = triangle;
_tcx.update("setSecondaryTriangle");
}
public void setActivePoint( TriangulationPoint point )
{
_activePoint = point;
}
public void setActiveConstraint( DTSweepConstraint e )
{
_activeConstraint = e;
_tcx.update("setWorkingSegment");
}
public void setActiveNode( AdvancingFrontNode node )
{
_activeNode = node;
_tcx.update("setWorkingNode");
}
@Override
public void clear()
{
_primaryTriangle = null;
_secondaryTriangle = null;
_activePoint = null;
_activeNode = null;
_activeConstraint = null;
}
// public void setWorkingCircumCircle( TPoint point, TPoint point2, TPoint point3 )
// {
// double dx,dy;
//
// CircleXY.circumCenter( point, point2, point3, m_circumCircle.a );
// dx = m_circumCircle.a.getX()-point.getX();
// dy = m_circumCircle.a.getY()-point.getY();
// m_circumCircle.b = Double.valueOf( Math.sqrt( dx*dx + dy*dy ) );
//
// }
}
| Java |
/* Poly2Tri
* Copyright (c) 2009-2010, Poly2Tri Contributors
* http://code.google.com/p/poly2tri/
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* * Neither the name of Poly2Tri nor the names of its contributors may be
* used to endorse or promote products derived from this software without specific
* prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.poly2tri.triangulation.delaunay;
import java.util.ArrayList;
import org.poly2tri.triangulation.TriangulationPoint;
import org.poly2tri.triangulation.delaunay.sweep.DTSweepConstraint;
import org.poly2tri.triangulation.point.TPoint;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class DelaunayTriangle
{
private final static Logger logger = LoggerFactory.getLogger( DelaunayTriangle.class );
/** Neighbor pointers */
public final DelaunayTriangle[] neighbors = new DelaunayTriangle[3];
/** Flags to determine if an edge is a Constrained edge */
public final boolean[] cEdge = new boolean[] { false, false, false };
/** Flags to determine if an edge is a Delauney edge */
public final boolean[] dEdge = new boolean[] { false, false, false };
/** Has this triangle been marked as an interior triangle? */
protected boolean interior = false;
public final TriangulationPoint[] points = new TriangulationPoint[3];
public DelaunayTriangle( TriangulationPoint p1, TriangulationPoint p2, TriangulationPoint p3 )
{
points[0] = p1;
points[1] = p2;
points[2] = p3;
}
public int index( TriangulationPoint p )
{
if( p == points[0] )
{
return 0;
}
else if( p == points[1] )
{
return 1;
}
else if( p == points[2] )
{
return 2;
}
throw new RuntimeException("Calling index with a point that doesn't exist in triangle");
}
public int indexCW( TriangulationPoint p )
{
int index = index(p);
switch( index )
{
case 0: return 2;
case 1: return 0;
default: return 1;
}
}
public int indexCCW( TriangulationPoint p )
{
int index = index(p);
switch( index )
{
case 0: return 1;
case 1: return 2;
default: return 0;
}
}
public boolean contains( TriangulationPoint p )
{
return ( p == points[0] || p == points[1] || p == points[2] );
}
public boolean contains( DTSweepConstraint e )
{
return ( contains( e.p ) && contains( e.q ) );
}
public boolean contains( TriangulationPoint p, TriangulationPoint q )
{
return ( contains( p ) && contains( q ) );
}
// Update neighbor pointers
private void markNeighbor( TriangulationPoint p1,
TriangulationPoint p2,
DelaunayTriangle t )
{
if( ( p1 == points[2] && p2 == points[1] ) || ( p1 == points[1] && p2 == points[2] ) )
{
neighbors[0] = t;
}
else if( ( p1 == points[0] && p2 == points[2] ) || ( p1 == points[2] && p2 == points[0] ) )
{
neighbors[1] = t;
}
else if( ( p1 == points[0] && p2 == points[1] ) || ( p1 == points[1] && p2 == points[0] ) )
{
neighbors[2] = t;
}
else
{
logger.error( "Neighbor error, please report!" );
// throw new Exception("Neighbor error, please report!");
}
}
/* Exhaustive search to update neighbor pointers */
public void markNeighbor( DelaunayTriangle t )
{
if( t.contains( points[1], points[2] ) )
{
neighbors[0] = t;
t.markNeighbor( points[1], points[2], this );
}
else if( t.contains( points[0], points[2] ) )
{
neighbors[1] = t;
t.markNeighbor( points[0], points[2], this );
}
else if( t.contains( points[0], points[1] ) )
{
neighbors[2] = t;
t.markNeighbor( points[0], points[1], this );
}
else
{
logger.error( "markNeighbor failed" );
}
}
public void clearNeighbors()
{
neighbors[0] = neighbors[1] = neighbors[2] = null;
}
public void clearNeighbor( DelaunayTriangle triangle )
{
if( neighbors[0] == triangle )
{
neighbors[0] = null;
}
else if( neighbors[1] == triangle )
{
neighbors[1] = null;
}
else
{
neighbors[2] = null;
}
}
/**
* Clears all references to all other triangles and points
*/
public void clear()
{
DelaunayTriangle t;
for( int i=0; i<3; i++ )
{
t = neighbors[i];
if( t != null )
{
t.clearNeighbor( this );
}
}
clearNeighbors();
points[0]=points[1]=points[2]=null;
}
/**
* @param t - opposite triangle
* @param p - the point in t that isn't shared between the triangles
* @return
*/
public TriangulationPoint oppositePoint( DelaunayTriangle t, TriangulationPoint p )
{
assert t != this : "self-pointer error";
return pointCW( t.pointCW(p) );
}
// The neighbor clockwise to given point
public DelaunayTriangle neighborCW( TriangulationPoint point )
{
if( point == points[0] )
{
return neighbors[1];
}
else if( point == points[1] )
{
return neighbors[2];
}
return neighbors[0];
}
// The neighbor counter-clockwise to given point
public DelaunayTriangle neighborCCW( TriangulationPoint point )
{
if( point == points[0] )
{
return neighbors[2];
}
else if( point == points[1] )
{
return neighbors[0];
}
return neighbors[1];
}
// The neighbor across to given point
public DelaunayTriangle neighborAcross( TriangulationPoint opoint )
{
if( opoint == points[0] )
{
return neighbors[0];
}
else if( opoint == points[1] )
{
return neighbors[1];
}
return neighbors[2];
}
// The point counter-clockwise to given point
public TriangulationPoint pointCCW( TriangulationPoint point )
{
if( point == points[0] )
{
return points[1];
}
else if( point == points[1] )
{
return points[2];
}
else if( point == points[2] )
{
return points[0];
}
logger.error( "point location error" );
throw new RuntimeException("[FIXME] point location error");
}
// The point counter-clockwise to given point
public TriangulationPoint pointCW( TriangulationPoint point )
{
if( point == points[0] )
{
return points[2];
}
else if( point == points[1] )
{
return points[0];
}
else if( point == points[2] )
{
return points[1];
}
logger.error( "point location error" );
throw new RuntimeException("[FIXME] point location error");
}
// Legalize triangle by rotating clockwise around oPoint
public void legalize( TriangulationPoint oPoint, TriangulationPoint nPoint )
{
if( oPoint == points[0] )
{
points[1] = points[0];
points[0] = points[2];
points[2] = nPoint;
}
else if( oPoint == points[1] )
{
points[2] = points[1];
points[1] = points[0];
points[0] = nPoint;
}
else if( oPoint == points[2] )
{
points[0] = points[2];
points[2] = points[1];
points[1] = nPoint;
}
else
{
logger.error( "legalization error" );
throw new RuntimeException("legalization bug");
}
}
public void printDebug()
{
System.out.println( points[0] + "," + points[1] + "," + points[2] );
}
// Finalize edge marking
public void markNeighborEdges()
{
for( int i = 0; i < 3; i++ )
{
if( cEdge[i] )
{
switch( i )
{
case 0:
if( neighbors[0] != null )
neighbors[0].markConstrainedEdge( points[1], points[2] );
break;
case 1:
if( neighbors[1] != null )
neighbors[1].markConstrainedEdge( points[0], points[2] );
break;
case 2:
if( neighbors[2] != null )
neighbors[2].markConstrainedEdge( points[0], points[1] );
break;
}
}
}
}
public void markEdge( DelaunayTriangle triangle )
{
for( int i = 0; i < 3; i++ )
{
if( cEdge[i] )
{
switch( i )
{
case 0:
triangle.markConstrainedEdge( points[1], points[2] );
break;
case 1:
triangle.markConstrainedEdge( points[0], points[2] );
break;
case 2:
triangle.markConstrainedEdge( points[0], points[1] );
break;
}
}
}
}
public void markEdge( ArrayList<DelaunayTriangle> tList )
{
for( DelaunayTriangle t : tList )
{
for( int i = 0; i < 3; i++ )
{
if( t.cEdge[i] )
{
switch( i )
{
case 0:
markConstrainedEdge( t.points[1], t.points[2] );
break;
case 1:
markConstrainedEdge( t.points[0], t.points[2] );
break;
case 2:
markConstrainedEdge( t.points[0], t.points[1] );
break;
}
}
}
}
}
public void markConstrainedEdge( int index )
{
cEdge[index] = true;
}
public void markConstrainedEdge( DTSweepConstraint edge )
{
markConstrainedEdge( edge.p, edge.q );
if( ( edge.q == points[0] && edge.p == points[1] )
|| ( edge.q == points[1] && edge.p == points[0] ) )
{
cEdge[2] = true;
}
else if( ( edge.q == points[0] && edge.p == points[2] )
|| ( edge.q == points[2] && edge.p == points[0] ) )
{
cEdge[1] = true;
}
else if( ( edge.q == points[1] && edge.p == points[2] )
|| ( edge.q == points[2] && edge.p == points[1] ) )
{
cEdge[0] = true;
}
}
// Mark edge as constrained
public void markConstrainedEdge( TriangulationPoint p, TriangulationPoint q )
{
if( ( q == points[0] && p == points[1] ) || ( q == points[1] && p == points[0] ) )
{
cEdge[2] = true;
}
else if( ( q == points[0] && p == points[2] ) || ( q == points[2] && p == points[0] ) )
{
cEdge[1] = true;
}
else if( ( q == points[1] && p == points[2] ) || ( q == points[2] && p == points[1] ) )
{
cEdge[0] = true;
}
}
public double area()
{
double a = (points[0].getX() - points[2].getX())*(points[1].getY() - points[0].getY());
double b = (points[0].getX() - points[1].getX())*(points[2].getY() - points[0].getY());
return 0.5*Math.abs( a - b );
}
public TPoint centroid()
{
double cx = ( points[0].getX() + points[1].getX() + points[2].getX() ) / 3d;
double cy = ( points[0].getY() + points[1].getY() + points[2].getY() ) / 3d;
return new TPoint( cx, cy );
}
/**
* Get the neighbor that share this edge
*
* @param constrainedEdge
* @return index of the shared edge or -1 if edge isn't shared
*/
public int edgeIndex( TriangulationPoint p1, TriangulationPoint p2 )
{
if( points[0] == p1 )
{
if( points[1] == p2 )
{
return 2;
}
else if( points[2] == p2 )
{
return 1;
}
}
else if( points[1] == p1 )
{
if( points[2] == p2 )
{
return 0;
}
else if( points[0] == p2 )
{
return 2;
}
}
else if( points[2] == p1 )
{
if( points[0] == p2 )
{
return 1;
}
else if( points[1] == p2 )
{
return 0;
}
}
return -1;
}
public boolean getConstrainedEdgeCCW( TriangulationPoint p )
{
if( p == points[0] )
{
return cEdge[2];
}
else if( p == points[1] )
{
return cEdge[0];
}
return cEdge[1];
}
public boolean getConstrainedEdgeCW( TriangulationPoint p )
{
if( p == points[0] )
{
return cEdge[1];
}
else if( p == points[1] )
{
return cEdge[2];
}
return cEdge[0];
}
public boolean getConstrainedEdgeAcross( TriangulationPoint p )
{
if( p == points[0] )
{
return cEdge[0];
}
else if( p == points[1] )
{
return cEdge[1];
}
return cEdge[2];
}
public void setConstrainedEdgeCCW( TriangulationPoint p, boolean ce )
{
if( p == points[0] )
{
cEdge[2] = ce;
}
else if( p == points[1] )
{
cEdge[0] = ce;
}
else
{
cEdge[1] = ce;
}
}
public void setConstrainedEdgeCW( TriangulationPoint p, boolean ce )
{
if( p == points[0] )
{
cEdge[1] = ce;
}
else if( p == points[1] )
{
cEdge[2] = ce;
}
else
{
cEdge[0] = ce;
}
}
public void setConstrainedEdgeAcross( TriangulationPoint p, boolean ce )
{
if( p == points[0] )
{
cEdge[0] = ce;
}
else if( p == points[1] )
{
cEdge[1] = ce;
}
else
{
cEdge[2] = ce;
}
}
public boolean getDelunayEdgeCCW( TriangulationPoint p )
{
if( p == points[0] )
{
return dEdge[2];
}
else if( p == points[1] )
{
return dEdge[0];
}
return dEdge[1];
}
public boolean getDelunayEdgeCW( TriangulationPoint p )
{
if( p == points[0] )
{
return dEdge[1];
}
else if( p == points[1] )
{
return dEdge[2];
}
return dEdge[0];
}
public boolean getDelunayEdgeAcross( TriangulationPoint p )
{
if( p == points[0] )
{
return dEdge[0];
}
else if( p == points[1] )
{
return dEdge[1];
}
return dEdge[2];
}
public void setDelunayEdgeCCW( TriangulationPoint p, boolean e )
{
if( p == points[0] )
{
dEdge[2] = e;
}
else if( p == points[1] )
{
dEdge[0] = e;
}
else
{
dEdge[1] = e;
}
}
public void setDelunayEdgeCW( TriangulationPoint p, boolean e )
{
if( p == points[0] )
{
dEdge[1] = e;
}
else if( p == points[1] )
{
dEdge[2] = e;
}
else
{
dEdge[0] = e;
}
}
public void setDelunayEdgeAcross( TriangulationPoint p, boolean e )
{
if( p == points[0] )
{
dEdge[0] = e;
}
else if( p == points[1] )
{
dEdge[1] = e;
}
else
{
dEdge[2] = e;
}
}
public void clearDelunayEdges()
{
dEdge[0] = false;
dEdge[1] = false;
dEdge[2] = false;
}
public boolean isInterior()
{
return interior;
}
public void isInterior( boolean b )
{
interior = b;
}
}
| Java |
package org.poly2tri.triangulation;
public enum TriangulationMode
{
UNCONSTRAINED,CONSTRAINED,POLYGON;
}
| Java |
/* Poly2Tri
* Copyright (c) 2009-2010, Poly2Tri Contributors
* http://code.google.com/p/poly2tri/
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* * Neither the name of Poly2Tri nor the names of its contributors may be
* used to endorse or promote products derived from this software without specific
* prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.poly2tri.triangulation;
public interface TriangulationProcessListener
{
public void triangulationEvent( TriangulationProcessEvent e, Triangulatable unit );
}
| Java |
/* Poly2Tri
* Copyright (c) 2009-2010, Poly2Tri Contributors
* http://code.google.com/p/poly2tri/
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* * Neither the name of Poly2Tri nor the names of its contributors may be
* used to endorse or promote products derived from this software without specific
* prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.poly2tri.triangulation;
public enum TriangulationAlgorithm
{
DTSweep
}
| Java |
/* Poly2Tri
* Copyright (c) 2009-2010, Poly2Tri Contributors
* http://code.google.com/p/poly2tri/
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* * Neither the name of Poly2Tri nor the names of its contributors may be
* used to endorse or promote products derived from this software without specific
* prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.poly2tri.triangulation.point;
import org.poly2tri.triangulation.TriangulationPoint;
public class TPoint extends TriangulationPoint
{
private double _x;
private double _y;
private double _z;
public TPoint( double x, double y )
{
this( x, y, 0 );
}
public TPoint( double x, double y, double z )
{
_x = x;
_y = y;
_z = z;
}
public double getX() { return _x; }
public double getY() { return _y; }
public double getZ() { return _z; }
public float getXf() { return (float)_x; }
public float getYf() { return (float)_y; }
public float getZf() { return (float)_z; }
@Override
public void set( double x, double y, double z )
{
_x = x;
_y = y;
_z = z;
}
}
| Java |
/* Poly2Tri
* Copyright (c) 2009-2010, Poly2Tri Contributors
* http://code.google.com/p/poly2tri/
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* * Neither the name of Poly2Tri nor the names of its contributors may be
* used to endorse or promote products derived from this software without specific
* prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.poly2tri.triangulation.point;
import java.nio.FloatBuffer;
import org.poly2tri.triangulation.TriangulationPoint;
public class FloatBufferPoint extends TriangulationPoint
{
private final FloatBuffer _fb;
private final int _ix,_iy,_iz;
public FloatBufferPoint( FloatBuffer fb, int index )
{
_fb = fb;
_ix = index;
_iy = index+1;
_iz = index+2;
}
public final double getX()
{
return _fb.get( _ix );
}
public final double getY()
{
return _fb.get( _iy );
}
public final double getZ()
{
return _fb.get( _iz );
}
public final float getXf()
{
return _fb.get( _ix );
}
public final float getYf()
{
return _fb.get( _iy );
}
public final float getZf()
{
return _fb.get( _iz );
}
@Override
public void set( double x, double y, double z )
{
_fb.put( _ix, (float)x );
_fb.put( _iy, (float)y );
_fb.put( _iz, (float)z );
}
public static TriangulationPoint[] toPoints( FloatBuffer fb )
{
FloatBufferPoint[] points = new FloatBufferPoint[fb.limit()/3];
for( int i=0,j=0; i<points.length; i++, j+=3 )
{
points[i] = new FloatBufferPoint(fb, j);
}
return points;
}
}
| Java |
package org.poly2tri.triangulation.util;
import java.util.ArrayList;
import java.util.List;
import org.poly2tri.triangulation.TriangulationPoint;
import org.poly2tri.triangulation.point.TPoint;
public class PointGenerator
{
public static List<TriangulationPoint> uniformDistribution( int n, double scale )
{
ArrayList<TriangulationPoint> points = new ArrayList<TriangulationPoint>();
for( int i=0; i<n; i++ )
{
points.add( new TPoint( scale*(0.5 - Math.random()), scale*(0.5 - Math.random()) ) );
}
return points;
}
public static List<TriangulationPoint> uniformGrid( int n, double scale )
{
double x=0;
double size = scale/n;
double halfScale = 0.5*scale;
ArrayList<TriangulationPoint> points = new ArrayList<TriangulationPoint>();
for( int i=0; i<n+1; i++ )
{
x = halfScale - i*size;
for( int j=0; j<n+1; j++ )
{
points.add( new TPoint( x, halfScale - j*size ) );
}
}
return points;
}
}
| Java |
/* Poly2Tri
* Copyright (c) 2009-2010, Poly2Tri Contributors
* http://code.google.com/p/poly2tri/
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* * Neither the name of Poly2Tri nor the names of its contributors may be
* used to endorse or promote products derived from this software without specific
* prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.poly2tri.triangulation.util;
public class Tuple3<A,B,C>
{
public A a;
public B b;
public C c;
public Tuple3(A a,B b,C c)
{
this.a = a;
this.b = b;
this.c = c;
}
}
| Java |
/* Poly2Tri
* Copyright (c) 2009-2010, Poly2Tri Contributors
* http://code.google.com/p/poly2tri/
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* * Neither the name of Poly2Tri nor the names of its contributors may be
* used to endorse or promote products derived from this software without specific
* prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.poly2tri.triangulation.util;
public class Tuple2<A,B>
{
public A a;
public B b;
public Tuple2(A a,B b)
{
this.a = a;
this.b = b;
}
}
| Java |
/* Poly2Tri
* Copyright (c) 2009-2010, Poly2Tri Contributors
* http://code.google.com/p/poly2tri/
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* * Neither the name of Poly2Tri nor the names of its contributors may be
* used to endorse or promote products derived from this software without specific
* prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.poly2tri.triangulation.util;
import org.poly2tri.geometry.polygon.Polygon;
import org.poly2tri.geometry.polygon.PolygonPoint;
public class PolygonGenerator
{
private static final double PI_2 = 2.0*Math.PI;
public static Polygon RandomCircleSweep( double scale, int vertexCount )
{
PolygonPoint point;
PolygonPoint[] points;
double radius = scale/4;
points = new PolygonPoint[vertexCount];
for(int i=0; i<vertexCount; i++)
{
do
{
if( i%250 == 0 )
{
radius += scale/2*(0.5 - Math.random());
}
else if( i%50 == 0 )
{
radius += scale/5*(0.5 - Math.random());
}
else
{
radius += 25*scale/vertexCount*(0.5 - Math.random());
}
radius = radius > scale/2 ? scale/2 : radius;
radius = radius < scale/10 ? scale/10 : radius;
} while( radius < scale/10 || radius > scale/2 );
point = new PolygonPoint( radius*Math.cos( (PI_2*i)/vertexCount ),
radius*Math.sin( (PI_2*i)/vertexCount ) );
points[i] = point;
}
return new Polygon( points );
}
public static Polygon RandomCircleSweep2( double scale, int vertexCount )
{
PolygonPoint point;
PolygonPoint[] points;
double radius = scale/4;
points = new PolygonPoint[vertexCount];
for(int i=0; i<vertexCount; i++)
{
do
{
radius += scale/5*(0.5 - Math.random());
radius = radius > scale/2 ? scale/2 : radius;
radius = radius < scale/10 ? scale/10 : radius;
} while( radius < scale/10 || radius > scale/2 );
point = new PolygonPoint( radius*Math.cos( (PI_2*i)/vertexCount ),
radius*Math.sin( (PI_2*i)/vertexCount ) );
points[i] = point;
}
return new Polygon( points );
}
}
| Java |
package org.poly2tri.triangulation;
public abstract class TriangulationDebugContext
{
protected TriangulationContext<?> _tcx;
public TriangulationDebugContext( TriangulationContext<?> tcx )
{
_tcx = tcx;
}
public abstract void clear();
}
| Java |
/* Poly2Tri
* Copyright (c) 2009-2010, Poly2Tri Contributors
* http://code.google.com/p/poly2tri/
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* * Neither the name of Poly2Tri nor the names of its contributors may be
* used to endorse or promote products derived from this software without specific
* prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.poly2tri.triangulation.sets;
import java.util.ArrayList;
import java.util.List;
import org.poly2tri.triangulation.Triangulatable;
import org.poly2tri.triangulation.TriangulationContext;
import org.poly2tri.triangulation.TriangulationMode;
import org.poly2tri.triangulation.TriangulationPoint;
import org.poly2tri.triangulation.delaunay.DelaunayTriangle;
public class PointSet implements Triangulatable
{
List<TriangulationPoint> _points;
List<DelaunayTriangle> _triangles;
public PointSet( List<TriangulationPoint> points )
{
_points = new ArrayList<TriangulationPoint>();
_points.addAll( points );
}
public TriangulationMode getTriangulationMode()
{
return TriangulationMode.UNCONSTRAINED;
}
public List<TriangulationPoint> getPoints()
{
return _points;
}
public List<DelaunayTriangle> getTriangles()
{
return _triangles;
}
public void addTriangle( DelaunayTriangle t )
{
_triangles.add( t );
}
public void addTriangles( List<DelaunayTriangle> list )
{
_triangles.addAll( list );
}
public void clearTriangulation()
{
_triangles.clear();
}
public void prepareTriangulation( TriangulationContext<?> tcx )
{
if( _triangles == null )
{
_triangles = new ArrayList<DelaunayTriangle>( _points.size() );
}
else
{
_triangles.clear();
}
tcx.addPoints( _points );
}
}
| Java |
/* Poly2Tri
* Copyright (c) 2009-2010, Poly2Tri Contributors
* http://code.google.com/p/poly2tri/
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* * Neither the name of Poly2Tri nor the names of its contributors may be
* used to endorse or promote products derived from this software without specific
* prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.poly2tri.triangulation;
import java.util.ArrayList;
import java.util.List;
import org.poly2tri.triangulation.delaunay.DelaunayTriangle;
public abstract class TriangulationContext<A extends TriangulationDebugContext>
{
protected A _debug;
protected boolean _debugEnabled = false;
protected ArrayList<DelaunayTriangle> _triList = new ArrayList<DelaunayTriangle>();
protected ArrayList<TriangulationPoint> _points = new ArrayList<TriangulationPoint>(200);
protected TriangulationMode _triangulationMode;
protected Triangulatable _triUnit;
private boolean _terminated = false;
private boolean _waitUntilNotified;
private int _stepTime = -1;
private int _stepCount = 0;
public int getStepCount() { return _stepCount; }
public void done()
{
_stepCount++;
}
public abstract TriangulationAlgorithm algorithm();
public void prepareTriangulation( Triangulatable t )
{
_triUnit = t;
_triangulationMode = t.getTriangulationMode();
t.prepareTriangulation( this );
}
public abstract TriangulationConstraint newConstraint( TriangulationPoint a, TriangulationPoint b );
public void addToList( DelaunayTriangle triangle )
{
_triList.add( triangle );
}
public List<DelaunayTriangle> getTriangles()
{
return _triList;
}
public Triangulatable getTriangulatable()
{
return _triUnit;
}
public List<TriangulationPoint> getPoints()
{
return _points;
}
public synchronized void update(String message)
{
if( _debugEnabled )
{
try
{
synchronized( this )
{
_stepCount++;
if( _stepTime > 0 )
{
wait( (int)_stepTime );
/** Can we resume execution or are we expected to wait? */
if( _waitUntilNotified )
{
wait();
}
}
else
{
wait();
}
// We have been notified
_waitUntilNotified = false;
}
}
catch( InterruptedException e )
{
update("Triangulation was interrupted");
}
}
if( _terminated )
{
throw new RuntimeException( "Triangulation process terminated before completion");
}
}
public void clear()
{
_points.clear();
_terminated = false;
if( _debug != null )
{
_debug.clear();
}
_stepCount=0;
}
public TriangulationMode getTriangulationMode()
{
return _triangulationMode;
}
public synchronized void waitUntilNotified(boolean b)
{
_waitUntilNotified = b;
}
public void terminateTriangulation()
{
_terminated=true;
}
public boolean isDebugEnabled()
{
return _debugEnabled;
}
public abstract void isDebugEnabled( boolean b );
public A getDebugContext()
{
return _debug;
}
public void addPoints( List<TriangulationPoint> points )
{
_points.addAll( points );
}
}
| Java |
/* Poly2Tri
* Copyright (c) 2009-2010, Poly2Tri Contributors
* http://code.google.com/p/poly2tri/
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* * Neither the name of Poly2Tri nor the names of its contributors may be
* used to endorse or promote products derived from this software without specific
* prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.poly2tri.triangulation;
public enum TriangulationProcessEvent
{
Started,Waiting,Failed,Aborted,Done
}
| Java |
package org.poly2tri.triangulation;
import java.util.List;
import org.poly2tri.triangulation.delaunay.DelaunayTriangle;
public interface Triangulatable
{
/**
* Preparations needed before triangulation start should be handled here
* @param tcx
*/
public void prepareTriangulation( TriangulationContext<?> tcx );
public List<DelaunayTriangle> getTriangles();
public List<TriangulationPoint> getPoints();
public void addTriangle( DelaunayTriangle t );
public void addTriangles( List<DelaunayTriangle> list );
public void clearTriangulation();
public TriangulationMode getTriangulationMode();
}
| Java |
package org.poly2tri.examples.ardor3d;
import java.util.List;
import org.poly2tri.Poly2Tri;
import org.poly2tri.examples.ardor3d.base.P2TSimpleExampleBase;
import org.poly2tri.triangulation.TriangulationPoint;
import org.poly2tri.triangulation.point.TPoint;
import org.poly2tri.triangulation.sets.ConstrainedPointSet;
import org.poly2tri.triangulation.tools.ardor3d.ArdorMeshMapper;
import org.poly2tri.triangulation.util.PointGenerator;
import com.ardor3d.framework.FrameHandler;
import com.ardor3d.input.logical.LogicalLayer;
import com.ardor3d.math.ColorRGBA;
import com.ardor3d.scenegraph.Mesh;
import com.google.inject.Inject;
public class CDTUniformDistributionExample extends P2TSimpleExampleBase
{
public static void main(final String[] args)
{
start(CDTUniformDistributionExample.class);
}
@Inject
public CDTUniformDistributionExample( LogicalLayer logicalLayer, FrameHandler frameHandler )
{
super( logicalLayer, frameHandler );
}
@Override
protected void initExample()
{
super.initExample();
Mesh mesh = new Mesh();
mesh.setDefaultColor( ColorRGBA.BLUE );
_node.attachChild( mesh );
double scale = 100;
int size = 1000;
int index = (int)(Math.random()*size);
List<TriangulationPoint> points = PointGenerator.uniformDistribution( size, scale );
// Lets add a constraint that cuts the uniformDistribution in half
points.add( new TPoint(0,scale/2) );
points.add( new TPoint(0,-scale/2) );
index = size;
ConstrainedPointSet cps = new ConstrainedPointSet( points, new int[]{ index, index+1 } );
Poly2Tri.triangulate( cps );
ArdorMeshMapper.updateTriangleMesh( mesh, cps );
}
}
| Java |
package org.poly2tri.examples.ardor3d;
import java.util.ArrayList;
import org.poly2tri.Poly2Tri;
import org.poly2tri.examples.ardor3d.base.P2TSimpleExampleBase;
import org.poly2tri.geometry.polygon.Polygon;
import org.poly2tri.geometry.polygon.PolygonPoint;
import org.poly2tri.triangulation.tools.ardor3d.ArdorMeshMapper;
import com.ardor3d.framework.FrameHandler;
import com.ardor3d.input.logical.LogicalLayer;
import com.ardor3d.math.ColorRGBA;
import com.ardor3d.renderer.state.WireframeState;
import com.ardor3d.scenegraph.Mesh;
import com.ardor3d.scenegraph.Node;
import com.google.inject.Inject;
public class CDTHoleExample extends P2TSimpleExampleBase
{
public static void main(final String[] args)
{
start(CDTHoleExample.class);
}
@Inject
public CDTHoleExample( LogicalLayer logicalLayer, FrameHandler frameHandler )
{
super( logicalLayer, frameHandler );
}
@Override
protected void initExample()
{
super.initExample();
Node node = new Node();
node.setRenderState( new WireframeState() );
_node.attachChild( node );
Polygon circle;
Polygon hole;
circle = createCirclePolygon( 64, 25, 1 );
hole = createCirclePolygon( 32, 25, 0.25, -0.5, -0.5 );
circle.addHole( hole );
hole = createCirclePolygon( 64, 25, 0.5, 0.25, 0.25 );
circle.addHole( hole );
Mesh mesh = new Mesh();
mesh.setDefaultColor( ColorRGBA.RED );
mesh.setTranslation( 0, 0, 0.01 );
node.attachChild( mesh );
Mesh mesh2 = new Mesh();
mesh2.setDefaultColor( ColorRGBA.BLUE );
_node.attachChild( mesh2 );
Poly2Tri.triangulate( circle );
ArdorMeshMapper.updateTriangleMesh( mesh, circle );
ArdorMeshMapper.updateTriangleMesh( mesh2, circle );
}
private Polygon createCirclePolygon( int n, double scale, double radius )
{
return createCirclePolygon( n, scale, radius, 0, 0 );
}
private Polygon createCirclePolygon( int n,
double scale,
double radius,
double x,
double y )
{
if( n < 3 ) n=3;
PolygonPoint[] points = new PolygonPoint[n];
for( int i=0; i<n; i++ )
{
points[i] = new PolygonPoint( scale*(x + radius*Math.cos( (2.0*Math.PI*i)/n )),
scale*(y + radius*Math.sin( (2.0*Math.PI*i)/n ) ));
}
return new Polygon( points );
}
}
| Java |
package org.poly2tri.examples.ardor3d;
import java.io.IOException;
import java.util.ArrayList;
import org.poly2tri.Poly2Tri;
import org.poly2tri.examples.ardor3d.base.P2TSimpleExampleBase;
import org.poly2tri.examples.ardor3d.misc.DataLoader;
import org.poly2tri.examples.ardor3d.misc.ExampleSets;
import org.poly2tri.triangulation.TriangulationAlgorithm;
import org.poly2tri.triangulation.TriangulationContext;
import org.poly2tri.triangulation.TriangulationPoint;
import org.poly2tri.triangulation.point.TPoint;
import org.poly2tri.triangulation.sets.PointSet;
import org.poly2tri.triangulation.tools.ardor3d.ArdorMeshMapper;
import org.poly2tri.triangulation.util.PointGenerator;
import com.ardor3d.framework.FrameHandler;
import com.ardor3d.input.logical.LogicalLayer;
import com.ardor3d.math.ColorRGBA;
import com.ardor3d.renderer.state.WireframeState;
import com.ardor3d.scenegraph.Mesh;
import com.google.inject.Inject;
public class DTUniformDistributionExample extends P2TSimpleExampleBase
{
public static void main(final String[] args)
{
start(DTUniformDistributionExample.class);
}
@Inject
public DTUniformDistributionExample( LogicalLayer logicalLayer, FrameHandler frameHandler )
{
super( logicalLayer, frameHandler );
}
@Override
protected void initExample()
{
super.initExample();
PointSet ps;
Mesh mesh;
mesh = new Mesh();
mesh.setDefaultColor( ColorRGBA.BLUE );
mesh.setRenderState( new WireframeState() );
_node.attachChild( mesh );
try
{
ps = DataLoader.loadPointSet( ExampleSets.Example2, 0.1 );
ps = new PointSet( PointGenerator.uniformDistribution( 10000, 60 ) );
Poly2Tri.triangulate( ps );
ArdorMeshMapper.updateTriangleMesh( mesh, ps );
}
catch( IOException e )
{}
}
}
| Java |
/**
* Copyright (c) 2008-2009 Ardor Labs, Inc.
*
* This file is part of Ardor3D.
*
* Ardor3D is free software: you can redistribute it and/or modify it
* under the terms of its license which may be found in the accompanying
* LICENSE file or at <http://www.ardor3d.com/LICENSE>.
*/
package org.poly2tri.examples.ardor3d.misc;
import java.nio.FloatBuffer;
import com.ardor3d.math.Vector3;
import com.ardor3d.math.type.ReadOnlyVector3;
import com.ardor3d.scenegraph.Mesh;
import com.ardor3d.util.geom.BufferUtils;
public class Triangle extends Mesh
{
private static final long serialVersionUID = 1L;
public Triangle()
{
this( "Triangle" );
}
public Triangle(final String name )
{
this( name,
new Vector3( Math.cos( Math.toRadians( 90 ) ), Math.sin( Math.toRadians( 90 ) ), 0 ),
new Vector3( Math.cos( Math.toRadians( 210 ) ), Math.sin( Math.toRadians( 210 ) ), 0 ),
new Vector3( Math.cos( Math.toRadians( 330 ) ), Math.sin( Math.toRadians( 330 ) ), 0 ));
}
public Triangle(final String name,
ReadOnlyVector3 a,
ReadOnlyVector3 b,
ReadOnlyVector3 c )
{
super(name);
initialize(a,b,c);
}
/**
* <code>resize</code> changes the width and height of the given quad by altering its vertices.
*
* @param width
* the new width of the <code>Quad</code>.
* @param height
* the new height of the <code>Quad</code>.
*/
// public void resize( double radius )
// {
// _meshData.getVertexBuffer().clear();
// _meshData.getVertexBuffer().put((float) (-width / 2)).put((float) (height / 2)).put(0);
// _meshData.getVertexBuffer().put((float) (-width / 2)).put((float) (-height / 2)).put(0);
// _meshData.getVertexBuffer().put((float) (width / 2)).put((float) (-height / 2)).put(0);
// _meshData.getVertexBuffer().put((float) (width / 2)).put((float) (height / 2)).put(0);
// }
/**
* <code>initialize</code> builds the data for the <code>Quad</code> object.
*
* @param width
* the width of the <code>Quad</code>.
* @param height
* the height of the <code>Quad</code>.
*/
private void initialize(ReadOnlyVector3 a, ReadOnlyVector3 b, ReadOnlyVector3 c )
{
final int verts = 3;
_meshData.setVertexBuffer(BufferUtils.createVector3Buffer(3));
_meshData.setNormalBuffer(BufferUtils.createVector3Buffer(3));
final FloatBuffer tbuf = BufferUtils.createVector2Buffer(3);
_meshData.setTextureBuffer(tbuf, 0);
_meshData.setIndexBuffer(BufferUtils.createIntBuffer(3));
Vector3 ba = Vector3.fetchTempInstance();
Vector3 ca = Vector3.fetchTempInstance();
ba.set( b ).subtractLocal( a );
ca.set( c ).subtractLocal( a );
ba.crossLocal( ca ).normalizeLocal();
_meshData.getNormalBuffer().put(ba.getXf()).put(ba.getYf()).put(ba.getZf());
_meshData.getNormalBuffer().put(ba.getXf()).put(ba.getYf()).put(ba.getZf());
_meshData.getNormalBuffer().put(ba.getXf()).put(ba.getYf()).put(ba.getZf());
Vector3.releaseTempInstance( ba );
Vector3.releaseTempInstance( ca );
tbuf.put(0).put(1);
tbuf.put(0).put(0);
tbuf.put(1).put(0);
_meshData.getIndexBuffer().put(0);
_meshData.getIndexBuffer().put(1);
_meshData.getIndexBuffer().put(2);
_meshData.getVertexBuffer().put(a.getXf()).put(a.getYf()).put(a.getZf());
_meshData.getVertexBuffer().put(b.getXf()).put(b.getYf()).put(b.getZf());
_meshData.getVertexBuffer().put(c.getXf()).put(c.getYf()).put(c.getZf());
}
} | Java |
package org.poly2tri.examples.ardor3d.misc;
public enum ExampleModels
{
Test ("test.dat",1,0,0,true),
Two ("2.dat",1,0,0,true),
Debug ("debug.dat",1,0,0,false),
Debug2 ("debug2.dat",1,0,0,false),
Bird ("bird.dat",1,0,0,false),
Custom ("funny.dat",1,0,0,false),
Diamond ("diamond.dat",1,0,0,false),
Dude ("dude.dat",1,-0.1,0,true),
Nazca_heron ("nazca_heron.dat",1.3,0,0.35,false),
Nazca_monkey ("nazca_monkey.dat",1,0,0,false),
Star ("star.dat",1,0,0,false),
Strange ("strange.dat",1,0,0,true),
Tank ("tank.dat",1.3,0,0,true);
private final static String m_basePath = "org/poly2tri/examples/data/";
private String m_filename;
private double m_scale;
private double m_x;
private double m_y;
private boolean _invertedYAxis;
ExampleModels( String filename, double scale, double x, double y, boolean invertedY )
{
m_filename = filename;
m_scale = scale;
m_x = x;
m_y = y;
_invertedYAxis = invertedY;
}
public String getFilename()
{
return m_basePath + m_filename;
}
public double getScale()
{
return m_scale;
}
public double getX()
{
return m_x;
}
public double getY()
{
return m_y;
}
public boolean invertedYAxis()
{
return _invertedYAxis;
}
} | Java |
package org.poly2tri.examples.ardor3d.misc;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.FileNotFoundException;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.StringTokenizer;
import org.poly2tri.geometry.polygon.Polygon;
import org.poly2tri.polygon.ardor3d.ArdorPolygon;
import org.poly2tri.triangulation.TriangulationPoint;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.ardor3d.math.Vector3;
public class PolygonLoader
{
private final static Logger logger = LoggerFactory.getLogger( PolygonLoader.class );
public static Polygon loadModel( ExampleModels model, double scale ) throws FileNotFoundException, IOException
{
String line;
ArrayList<Vector3> points = new ArrayList<Vector3>();
InputStream istream = PolygonLoader.class.getClassLoader().getResourceAsStream( model.getFilename() );
if( istream == null )
{
throw new FileNotFoundException( "Couldn't find " + model );
}
InputStreamReader ir = new InputStreamReader( istream );
BufferedReader reader = new BufferedReader( ir );
while( ( line = reader.readLine() ) != null )
{
StringTokenizer tokens = new StringTokenizer( line, " ," );
points.add( new Vector3( Float.valueOf( tokens.nextToken() ).floatValue(),
Float.valueOf( tokens.nextToken() ).floatValue(),
0f ));
}
if( points.isEmpty() )
{
throw new IOException( "no data in file " + model );
}
// Rescale models so they are centered at 0,0 and don't fall outside the
// unit square
double maxX, maxY, minX, minY;
maxX = minX = points.get( 0 ).getX();
if( model.invertedYAxis() )
{
maxY = minY = -points.get( 0 ).getY();
}
else
{
maxY = minY = points.get( 0 ).getY();
}
for( Vector3 p : points )
{
if( model.invertedYAxis() )
{
p.setY( -p.getY() );
}
maxX = p.getX() > maxX ? p.getX() : maxX;
maxY = p.getY() > maxY ? p.getY() : maxY;
minX = p.getX() < minX ? p.getX() : minX;
minY = p.getY() < minY ? p.getY() : minY;
}
double width, height, xScale, yScale;
width = maxX - minX;
height = maxY - minY;
xScale = scale * 1f / width;
yScale = scale * 1f / height;
// System.out.println("scale/height=" + SCALE + "/" + height );
// System.out.println("scale=" + yScale);
for( Vector3 p : points )
{
if( model.invertedYAxis() )
{
p.subtractLocal( maxX - width / 2, maxY - height / 2, 0 );
}
else
{
p.subtractLocal( maxX - width / 2, maxY - height / 2, 0 );
}
p.multiplyLocal( xScale < yScale ? xScale : yScale );
}
return new ArdorPolygon( points);
}
public static void saveModel( String path, TriangulationPoint[] points )
{
FileWriter writer = null;
BufferedWriter w = null;
String file = path+System.currentTimeMillis()+".dat";
try
{
writer = new FileWriter(file);
w = new BufferedWriter(writer);
for( TriangulationPoint p : points )
{
w.write( Float.toString( p.getXf() ) +" "+ Float.toString( p.getYf() ));
w.newLine();
}
logger.info( "Saved polygon\n" + file );
}
catch( IOException e )
{
logger.error( "Failed to save model" );
}
finally
{
if( w != null )
{
try
{
w.close();
}
catch( IOException e2 )
{
}
}
}
}
/**
* This is a very unoptimal dump of the triangles as absolute lines.
* For manual importation to an SVG<br>
*
* @param path
* @param ps
*/
// public static void saveTriLine( String path, PolygonSet ps )
// {
// FileWriter writer = null;
// BufferedWriter w = null;
// String file = path+System.currentTimeMillis()+".tri";
//
// if( ps.getTriangles() == null || ps.getTriangles().isEmpty() )
// {
// return;
// }
//
// try
// {
//
// writer = new FileWriter(file);
// w = new BufferedWriter(writer);
// for( DelaunayTriangle t : ps.getTriangles() )
// {
// for( int i=0; i<3; i++ )
// {
// w.write( Float.toString( t.points[i].getXf() ) +","+ Float.toString( t.points[i].getYf() )+" ");
// }
//// w.newLine();
// }
// logger.info( "Saved triangle lines\n" + file );
// }
// catch( IOException e )
// {
// logger.error( "Failed to save triangle lines" + e.getMessage() );
// }
// finally
// {
// if( w != null )
// {
// try
// {
// w.close();
// }
// catch( IOException e2 )
// {
// }
// }
// }
// }
}
| Java |
package org.poly2tri.examples.ardor3d.misc;
public enum ExampleSets
{
Example1 ("example1.dat",1,0,0,true),
Example2 ("example2.dat",1,0,0,true),
Example3 ("example3.dat",1,0,0,false),
Example4 ("example4.dat",1,0,0,false);
private final static String m_basePath = "org/poly2tri/examples/data/pointsets/";
private String m_filename;
private double m_scale;
private double m_x;
private double m_y;
private boolean _invertedYAxis;
ExampleSets( String filename, double scale, double x, double y, boolean invertedY )
{
m_filename = filename;
m_scale = scale;
m_x = x;
m_y = y;
_invertedYAxis = invertedY;
}
public String getFilename()
{
return m_basePath + m_filename;
}
public double getScale()
{
return m_scale;
}
public double getX()
{
return m_x;
}
public double getY()
{
return m_y;
}
public boolean invertedYAxis()
{
return _invertedYAxis;
}
} | Java |
package org.poly2tri.examples.ardor3d.misc;
import org.poly2tri.triangulation.point.TPoint;
public class MyPoint extends TPoint
{
int index;
public MyPoint( double x, double y )
{
super( x, y );
}
public void setIndex(int i)
{
index = i;
}
public int getIndex()
{
return index;
}
public boolean equals(Object other)
{
if (!(other instanceof MyPoint)) return false;
MyPoint p = (MyPoint)other;
return getX() == p.getX() && getY() == p.getY();
}
public int hashCode() { return (int)getX() + (int)getY(); }
}
| Java |
package org.poly2tri.examples.ardor3d.misc;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.FileNotFoundException;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.StringTokenizer;
import org.poly2tri.geometry.polygon.Polygon;
import org.poly2tri.geometry.polygon.PolygonSet;
import org.poly2tri.polygon.ardor3d.ArdorPolygon;
import org.poly2tri.triangulation.TriangulationPoint;
import org.poly2tri.triangulation.delaunay.DelaunayTriangle;
import org.poly2tri.triangulation.point.TPoint;
import org.poly2tri.triangulation.sets.PointSet;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.ardor3d.math.Vector3;
public class DataLoader
{
private final static Logger logger = LoggerFactory.getLogger( DataLoader.class );
public static Polygon loadModel( ExampleModels model, double scale )
throws FileNotFoundException, IOException
{
String line;
ArrayList<Vector3> points = new ArrayList<Vector3>();
InputStream istream = DataLoader.class.getClassLoader().getResourceAsStream( model.getFilename() );
if( istream == null )
{
throw new FileNotFoundException( "Couldn't find " + model );
}
InputStreamReader ir = new InputStreamReader( istream );
BufferedReader reader = new BufferedReader( ir );
while( ( line = reader.readLine() ) != null )
{
StringTokenizer tokens = new StringTokenizer( line, " ," );
points.add( new Vector3( Double.valueOf( tokens.nextToken() ).doubleValue(),
Double.valueOf( tokens.nextToken() ).doubleValue(),
0f ));
}
if( points.isEmpty() )
{
throw new IOException( "no data in file " + model );
}
// Rescale models so they are centered at 0,0 and don't fall outside the
// unit square
double maxX, maxY, minX, minY;
maxX = minX = points.get( 0 ).getX();
if( model.invertedYAxis() )
{
maxY = minY = -points.get( 0 ).getY();
}
else
{
maxY = minY = points.get( 0 ).getY();
}
for( Vector3 p : points )
{
if( model.invertedYAxis() )
{
p.setY( -p.getY() );
}
maxX = p.getX() > maxX ? p.getX() : maxX;
maxY = p.getY() > maxY ? p.getY() : maxY;
minX = p.getX() < minX ? p.getX() : minX;
minY = p.getY() < minY ? p.getY() : minY;
}
double width, height, xScale, yScale;
width = maxX - minX;
height = maxY - minY;
xScale = scale * 1f / width;
yScale = scale * 1f / height;
// System.out.println("scale/height=" + SCALE + "/" + height );
// System.out.println("scale=" + yScale);
for( Vector3 p : points )
{
if( model.invertedYAxis() )
{
p.subtractLocal( maxX - width / 2, maxY - height / 2, 0 );
}
else
{
p.subtractLocal( maxX - width / 2, maxY - height / 2, 0 );
}
p.multiplyLocal( xScale < yScale ? xScale : yScale );
}
return new ArdorPolygon( points);
}
public static PointSet loadPointSet( ExampleSets set, double scale )
throws FileNotFoundException, IOException
{
String line;
ArrayList<TriangulationPoint> points = new ArrayList<TriangulationPoint>();
InputStream istream = DataLoader.class.getClassLoader().getResourceAsStream( set.getFilename() );
if( istream == null )
{
throw new FileNotFoundException( "Couldn't find " + set );
}
InputStreamReader ir = new InputStreamReader( istream );
BufferedReader reader = new BufferedReader( ir );
while( ( line = reader.readLine() ) != null )
{
StringTokenizer tokens = new StringTokenizer( line, " ," );
points.add( new TPoint( scale*Float.valueOf( tokens.nextToken() ).floatValue(),
scale*Float.valueOf( tokens.nextToken() ).floatValue() ));
}
if( points.isEmpty() )
{
throw new IOException( "no data in file " + set );
}
// Rescale models so they are centered at 0,0 and don't fall outside the
// unit square
// double maxX, maxY, minX, minY;
// maxX = minX = points.get( 0 ).getX();
// if( set.invertedYAxis() )
// {
// maxY = minY = -points.get( 0 ).getY();
// }
// else
// {
// maxY = minY = points.get( 0 ).getY();
// }
// for( TPoint p : points )
// {
// if( set.invertedYAxis() )
// {
// p.setY( -p.getY() );
// }
// maxX = p.getX() > maxX ? p.getX() : maxX;
// maxY = p.getY() > maxY ? p.getY() : maxY;
// minX = p.getX() < minX ? p.getX() : minX;
// minY = p.getY() < minY ? p.getY() : minY;
// }
//
// double width, height, xScale, yScale;
// width = maxX - minX;
// height = maxY - minY;
// xScale = scale * 1f / width;
// yScale = scale * 1f / height;
//
// // System.out.println("scale/height=" + SCALE + "/" + height );
// // System.out.println("scale=" + yScale);
//
// for( TPoint p : points )
// {
// if( set.invertedYAxis() )
// {
// p.subtractLocal( maxX - width / 2, maxY - height / 2, 0 );
// }
// else
// {
// p.subtractLocal( maxX - width / 2, maxY - height / 2, 0 );
// }
// p.multiplyLocal( xScale < yScale ? xScale : yScale );
// }
return new PointSet( points );
}
public static void saveModel( String path, TriangulationPoint[] points )
{
FileWriter writer = null;
BufferedWriter w = null;
String file = path+System.currentTimeMillis()+".dat";
try
{
writer = new FileWriter(file);
w = new BufferedWriter(writer);
for( TriangulationPoint p : points )
{
w.write( Float.toString( p.getXf() ) +" "+ Float.toString( p.getYf() ));
w.newLine();
}
logger.info( "Saved polygon\n" + file );
}
catch( IOException e )
{
logger.error( "Failed to save model" );
}
finally
{
if( w != null )
{
try
{
w.close();
}
catch( IOException e2 )
{
}
}
}
}
/**
* This is a very unoptimal dump of the triangles as absolute lines.
* For manual importation to an SVG<br>
*
* @param path
* @param ps
*/
public static void saveTriLine( String path, PolygonSet ps )
{
FileWriter writer = null;
BufferedWriter w = null;
String file = path+System.currentTimeMillis()+".tri";
if( ps.getPolygons() == null || ps.getPolygons().isEmpty() )
{
return;
}
try
{
writer = new FileWriter(file);
w = new BufferedWriter(writer);
for( DelaunayTriangle t : ps.getPolygons().get(0).getTriangles() )
{
for( int i=0; i<3; i++ )
{
w.write( Float.toString( t.points[i].getXf() ) +","+ Float.toString( t.points[i].getYf() )+" ");
}
// w.newLine();
}
logger.info( "Saved triangle lines\n" + file );
}
catch( IOException e )
{
logger.error( "Failed to save triangle lines" + e.getMessage() );
}
finally
{
if( w != null )
{
try
{
w.close();
}
catch( IOException e2 )
{
}
}
}
}
}
| Java |
package org.poly2tri.examples.ardor3d;
import java.util.ArrayList;
import org.poly2tri.Poly2Tri;
import org.poly2tri.examples.ardor3d.base.P2TSimpleExampleBase;
import org.poly2tri.geometry.polygon.Polygon;
import org.poly2tri.geometry.polygon.PolygonPoint;
import org.poly2tri.triangulation.point.TPoint;
import org.poly2tri.triangulation.tools.ardor3d.ArdorMeshMapper;
import org.poly2tri.triangulation.util.PolygonGenerator;
import com.ardor3d.framework.FrameHandler;
import com.ardor3d.input.logical.LogicalLayer;
import com.ardor3d.math.ColorRGBA;
import com.ardor3d.renderer.state.WireframeState;
import com.ardor3d.scenegraph.Mesh;
import com.ardor3d.scenegraph.Node;
import com.google.inject.Inject;
public class CDTSteinerPointExample extends P2TSimpleExampleBase
{
public static void main(final String[] args)
{
start(CDTSteinerPointExample.class);
}
@Inject
public CDTSteinerPointExample( LogicalLayer logicalLayer, FrameHandler frameHandler )
{
super( logicalLayer, frameHandler );
}
@Override
protected void initExample()
{
super.initExample();
Node node = new Node();
node.setRenderState( new WireframeState() );
_node.attachChild( node );
Polygon poly;
poly = createCirclePolygon( 32, 1.5 );
// top left
Mesh mesh = new Mesh();
mesh.setDefaultColor( ColorRGBA.BLUE );
mesh.setTranslation( -2, 2, 0 );
node.attachChild( mesh );
Poly2Tri.triangulate( poly );
ArdorMeshMapper.updateTriangleMesh( mesh, poly );
// bottom left
mesh = new Mesh();
mesh.setDefaultColor( ColorRGBA.RED );
mesh.setTranslation( -2, -2, 0 );
node.attachChild( mesh );
poly.addSteinerPoint( new TPoint(0,0) );
Poly2Tri.triangulate( poly );
ArdorMeshMapper.updateTriangleMesh( mesh, poly );
poly = PolygonGenerator.RandomCircleSweep2( 4, 200 );
// top right
mesh = new Mesh();
mesh.setDefaultColor( ColorRGBA.BLUE );
mesh.setTranslation( 2, 2, 0 );
node.attachChild( mesh );
Poly2Tri.triangulate( poly );
ArdorMeshMapper.updateTriangleMesh( mesh, poly );
// bottom right
mesh = new Mesh();
mesh.setDefaultColor( ColorRGBA.RED );
mesh.setTranslation( 2, -2, 0 );
node.attachChild( mesh );
poly.addSteinerPoint( new TPoint(0,0) );
Poly2Tri.triangulate( poly );
ArdorMeshMapper.updateTriangleMesh( mesh, poly );
}
private Polygon createCirclePolygon( int n, double radius )
{
if( n < 3 ) n=3;
PolygonPoint[] points = new PolygonPoint[n];
for( int i=0; i<n; i++ )
{
points[i] = new PolygonPoint( radius*Math.cos( (2.0*Math.PI*i)/n ),
radius*Math.sin( (2.0*Math.PI*i)/n ) );
}
return new Polygon( points );
}
}
| Java |
package org.poly2tri.examples.ardor3d.base;
import java.net.URISyntaxException;
import org.lwjgl.opengl.Display;
import com.ardor3d.example.ExampleBase;
import com.ardor3d.framework.FrameHandler;
import com.ardor3d.image.Texture;
import com.ardor3d.image.Image.Format;
import com.ardor3d.input.logical.LogicalLayer;
import com.ardor3d.renderer.queue.RenderBucketType;
import com.ardor3d.renderer.state.BlendState;
import com.ardor3d.renderer.state.TextureState;
import com.ardor3d.renderer.state.WireframeState;
import com.ardor3d.scenegraph.Node;
import com.ardor3d.scenegraph.hint.LightCombineMode;
import com.ardor3d.scenegraph.shape.Quad;
import com.ardor3d.util.TextureManager;
import com.ardor3d.util.resource.ResourceLocatorTool;
import com.ardor3d.util.resource.SimpleResourceLocator;
import com.google.inject.Inject;
public abstract class P2TSimpleExampleBase extends ExampleBase
{
protected Node _node;
protected Quad _logotype;
protected int _width,_height;
@Inject
public P2TSimpleExampleBase( LogicalLayer logicalLayer, FrameHandler frameHandler )
{
super( logicalLayer, frameHandler );
}
@Override
protected void initExample()
{
_canvas.setVSyncEnabled( true );
_canvas.getCanvasRenderer().getCamera().setLocation(0, 0, 65);
_width = Display.getDisplayMode().getWidth();
_height = Display.getDisplayMode().getHeight();
_root.getSceneHints().setLightCombineMode( LightCombineMode.Off );
_node = new Node();
_node.getSceneHints().setLightCombineMode( LightCombineMode.Off );
// _node.setRenderState( new WireframeState() );
_root.attachChild( _node );
try {
SimpleResourceLocator srl = new SimpleResourceLocator(ExampleBase.class.getClassLoader().getResource("org/poly2tri/examples/data/"));
ResourceLocatorTool.addResourceLocator(ResourceLocatorTool.TYPE_MODEL, srl);
SimpleResourceLocator sr2 = new SimpleResourceLocator(ExampleBase.class.getClassLoader().getResource("org/poly2tri/examples/textures/"));
ResourceLocatorTool.addResourceLocator(ResourceLocatorTool.TYPE_TEXTURE, sr2);
} catch (final URISyntaxException ex) {
ex.printStackTrace();
}
_logotype = new Quad("box", 128, 128 );
_logotype.setTranslation( 74, _height - 74, 0 );
_logotype.getSceneHints().setRenderBucketType( RenderBucketType.Ortho );
BlendState bs = new BlendState();
bs.setBlendEnabled( true );
bs.setEnabled( true );
bs.setSourceFunction(BlendState.SourceFunction.SourceAlpha);
bs.setDestinationFunction(BlendState.DestinationFunction.OneMinusSourceAlpha);
_logotype.setRenderState( bs );
TextureState ts = new TextureState();
ts.setEnabled(true);
ts.setTexture(TextureManager.load("poly2tri_logotype_256x256.png",
Texture.MinificationFilter.Trilinear,
Format.GuessNoCompression, true));
_logotype.setRenderState(ts);
_root.attachChild( _logotype );
}
}
| Java |
package org.poly2tri.examples.ardor3d.base;
import java.nio.FloatBuffer;
import java.util.List;
import org.poly2tri.geometry.polygon.PolygonSet;
import org.poly2tri.triangulation.TriangulationAlgorithm;
import org.poly2tri.triangulation.TriangulationPoint;
import org.poly2tri.triangulation.TriangulationProcess;
import org.poly2tri.triangulation.delaunay.DelaunayTriangle;
import org.poly2tri.triangulation.delaunay.sweep.DTSweepContext;
import org.poly2tri.triangulation.tools.ardor3d.ArdorMeshMapper;
import com.ardor3d.framework.Canvas;
import com.ardor3d.framework.FrameHandler;
import com.ardor3d.input.Key;
import com.ardor3d.input.logical.InputTrigger;
import com.ardor3d.input.logical.KeyPressedCondition;
import com.ardor3d.input.logical.LogicalLayer;
import com.ardor3d.input.logical.TriggerAction;
import com.ardor3d.input.logical.TwoInputStates;
import com.ardor3d.math.ColorRGBA;
import com.ardor3d.math.Vector3;
import com.ardor3d.renderer.IndexMode;
import com.ardor3d.renderer.queue.RenderBucketType;
import com.ardor3d.renderer.state.WireframeState;
import com.ardor3d.scenegraph.Mesh;
import com.ardor3d.scenegraph.MeshData;
import com.ardor3d.scenegraph.Node;
import com.ardor3d.scenegraph.Point;
import com.ardor3d.scenegraph.hint.LightCombineMode;
import com.ardor3d.ui.text.BasicText;
import com.ardor3d.util.ReadOnlyTimer;
import com.ardor3d.util.geom.BufferUtils;
public abstract class P2TExampleBase extends P2TSimpleExampleBase
{
protected TriangulationProcess _process;
protected CDTSweepMesh _cdtSweepMesh;
protected CDTSweepPoints _cdtSweepPoints;
protected PolygonSet _polygonSet;
private long _processTimestamp;
/** Text fields used to present info about the example. */
protected final BasicText _exampleInfo[] = new BasicText[7];
public P2TExampleBase( LogicalLayer logicalLayer, FrameHandler frameHandler )
{
super( logicalLayer, frameHandler );
}
@Override
protected void initExample()
{
super.initExample();
// Warmup the triangulation code for better performance
// when we need triangulation during runtime
// Poly2Tri.warmup();
_process = new TriangulationProcess(TriangulationAlgorithm.DTSweep);
_cdtSweepPoints = new CDTSweepPoints();
_cdtSweepMesh = new CDTSweepMesh();
_node.attachChild( _cdtSweepPoints.getSceneNode() );
_node.attachChild( _cdtSweepMesh.getSceneNode() );
final Node textNodes = new Node("Text");
textNodes.getSceneHints().setRenderBucketType(RenderBucketType.Ortho);
textNodes.getSceneHints().setLightCombineMode(LightCombineMode.Off);
_root.attachChild( textNodes );
for (int i = 0; i < _exampleInfo.length; i++)
{
_exampleInfo[i] = BasicText.createDefaultTextLabel("Text", "", 16);
_exampleInfo[i].setTranslation(new Vector3(10, (_exampleInfo.length-i-1) * 20 + 10, 0));
textNodes.attachChild(_exampleInfo[i]);
}
updateText();
}
protected DTSweepContext getContext()
{
return (DTSweepContext)_process.getContext();
}
/**
* Update text information.
*/
protected void updateText()
{
_exampleInfo[0].setText("");
_exampleInfo[1].setText("[Home] Toggle wireframe");
_exampleInfo[2].setText("[End] Toggle vertex points");
}
@Override
protected void updateExample(final ReadOnlyTimer timer)
{
if( _process.isDone() && _processTimestamp != _process.getTimestamp() )
{
_processTimestamp = _process.getTimestamp();
updateMesh();
_exampleInfo[0].setText("[" + _process.getTriangulationTime() + "ms] " + _process.getPointCount() + " points" );
}
}
public void exit()
{
super.exit();
_process.shutdown();
}
protected void triangulate()
{
_process.triangulate( _polygonSet );
}
protected void updateMesh()
{
if( _process.getContext().getTriangulatable() != null )
{
if( _process.getContext().isDebugEnabled() )
{
if( _process.isDone() )
{
_cdtSweepMesh.update( _process.getContext().getTriangulatable().getTriangles() );
_cdtSweepPoints.update( _process.getContext().getTriangulatable().getPoints() );
}
else
{
_cdtSweepMesh.update( _process.getContext().getTriangles() );
_cdtSweepPoints.update( _process.getContext().getPoints() );
}
}
else
{
_cdtSweepMesh.update( _polygonSet.getPolygons().get(0).getTriangles() );
_cdtSweepPoints.update( _polygonSet.getPolygons().get(0).getPoints() );
}
}
}
@Override
public void registerInputTriggers()
{
super.registerInputTriggers();
_controlHandle.setMoveSpeed( 10 );
// HOME - toogleWireframe
_logicalLayer.registerTrigger( new InputTrigger( new KeyPressedCondition( Key.HOME ), new TriggerAction() {
public void perform( final Canvas canvas, final TwoInputStates inputState, final double tpf )
{
_cdtSweepMesh.toogleWireframe();
}
} ) );
// END - tooglePoints
_logicalLayer.registerTrigger( new InputTrigger( new KeyPressedCondition( Key.END ), new TriggerAction() {
public void perform( final Canvas canvas, final TwoInputStates inputState, final double tpf )
{
_cdtSweepPoints.toogleVisibile();
}
} ) );
}
protected abstract class SceneElement<A>
{
protected Node _node;
public SceneElement(String name)
{
_node = new Node(name);
_node.getSceneHints().setAllPickingHints( false );
}
public abstract void update( A element );
public Node getSceneNode()
{
return _node;
}
}
protected class CDTSweepPoints extends SceneElement<List<TriangulationPoint>>
{
private Point m_point = new Point();
private boolean _pointsVisible = true;
public CDTSweepPoints()
{
super("Mesh");
m_point.setDefaultColor( ColorRGBA.RED );
m_point.setPointSize( 1 );
m_point.setTranslation( 0, 0, 0.01 );
_node.attachChild( m_point );
MeshData md = m_point.getMeshData();
int size = 1000;
FloatBuffer vertBuf = BufferUtils.createFloatBuffer( (int)size*3 );
md.setVertexBuffer( vertBuf );
}
public void toogleVisibile()
{
if( _pointsVisible )
{
m_point.removeFromParent();
_pointsVisible = false;
}
else
{
_node.attachChild( m_point );
_pointsVisible = true;
}
}
@Override
public void update( List<TriangulationPoint> list )
{
ArdorMeshMapper.updateVertexBuffer( m_point, list );
}
}
protected class CDTSweepMesh extends SceneElement<List<DelaunayTriangle>>
{
private Mesh m_mesh = new Mesh();
private WireframeState _ws = new WireframeState();
public CDTSweepMesh()
{
super("Mesh");
MeshData md;
m_mesh.setDefaultColor( ColorRGBA.BLUE );
m_mesh.setRenderState( _ws );
_node.attachChild( m_mesh );
md = m_mesh.getMeshData();
int size = 1000;
FloatBuffer vertBuf = BufferUtils.createFloatBuffer( (int)size*3*3 );
md.setVertexBuffer( vertBuf );
md.setIndexMode( IndexMode.Triangles );
}
public void toogleWireframe()
{
if( _ws.isEnabled() )
{
_ws.setEnabled( false );
}
else
{
_ws.setEnabled( true );
}
}
@Override
public void update( List<DelaunayTriangle> triangles )
{
ArdorMeshMapper.updateTriangleMesh( m_mesh, triangles );
}
}
}
| Java |
/* Poly2Tri
* Copyright (c) 2009-2010, Poly2Tri Contributors
* http://code.google.com/p/poly2tri/
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* * Neither the name of Poly2Tri nor the names of its contributors may be
* used to endorse or promote products derived from this software without specific
* prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.poly2tri.examples.ardor3d;
import java.io.IOException;
import java.nio.FloatBuffer;
import java.util.ArrayList;
import java.util.List;
import org.poly2tri.examples.ardor3d.base.P2TExampleBase;
import org.poly2tri.examples.ardor3d.misc.DataLoader;
import org.poly2tri.examples.ardor3d.misc.ExampleModels;
import org.poly2tri.examples.ardor3d.misc.Triangle;
import org.poly2tri.geometry.polygon.Polygon;
import org.poly2tri.geometry.polygon.PolygonSet;
import org.poly2tri.triangulation.TriangulationPoint;
import org.poly2tri.triangulation.delaunay.DelaunayTriangle;
import org.poly2tri.triangulation.delaunay.sweep.AdvancingFront;
import org.poly2tri.triangulation.delaunay.sweep.AdvancingFrontNode;
import org.poly2tri.triangulation.delaunay.sweep.DTSweepConstraint;
import org.poly2tri.triangulation.delaunay.sweep.DTSweepContext;
import org.poly2tri.triangulation.point.TPoint;
import org.poly2tri.triangulation.sets.ConstrainedPointSet;
import org.poly2tri.triangulation.sets.PointSet;
import org.poly2tri.triangulation.util.PolygonGenerator;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.ardor3d.framework.Canvas;
import com.ardor3d.framework.FrameHandler;
import com.ardor3d.input.Key;
import com.ardor3d.input.logical.InputTrigger;
import com.ardor3d.input.logical.KeyPressedCondition;
import com.ardor3d.input.logical.LogicalLayer;
import com.ardor3d.input.logical.TriggerAction;
import com.ardor3d.input.logical.TwoInputStates;
import com.ardor3d.math.ColorRGBA;
import com.ardor3d.renderer.IndexMode;
import com.ardor3d.renderer.state.WireframeState;
import com.ardor3d.scenegraph.Line;
import com.ardor3d.scenegraph.Point;
import com.ardor3d.util.ReadOnlyTimer;
import com.ardor3d.util.geom.BufferUtils;
import com.google.inject.Inject;
/**
* Toggle Model with PageUp and PageDown<br>
* Toggle Wireframe with Home<br>
* Toggle Vertex points with End<br>
* Use 1 and 2 to generate random polygons<br>
*
* @author Thomas
*
*/
public class CDTModelExample extends P2TExampleBase
{
private final static Logger logger = LoggerFactory.getLogger( CDTModelExample.class );
private ExampleModels m_currentModel = ExampleModels.Two;
private static double SCALE = 50;
private Line m_line;
// Build parameters
private int m_vertexCount = 10000;
// Scene components
private CDTSweepAdvancingFront _cdtSweepAdvancingFront;
private CDTSweepActiveNode _cdtSweepActiveNode;
private CDTSweepActiveTriangles _cdtSweepActiveTriangle;
private CDTSweepActiveEdge _cdtSweepActiveEdge;
// private GUICircumCircle m_circumCircle;
private int m_stepCount = 0;
private boolean m_autoStep = true;
private final String m_dataPath = "src/main/resources/org/poly2tri/examples/data/";
public static void main(final String[] args)
{
start(CDTModelExample.class);
}
@Inject
public CDTModelExample( LogicalLayer logicalLayer, FrameHandler frameHandler )
{
super( logicalLayer, frameHandler );
}
protected void updateExample(final ReadOnlyTimer timer)
{
super.updateExample( timer );
if( getContext().isDebugEnabled() )
{
int count = _process.getStepCount();
if( m_stepCount < count )
{
_process.requestRead();
if( _process.isReadable() )
{
updateMesh();
m_stepCount = count;
if( m_autoStep )
{
_process.resume();
}
}
}
}
}
@Override
protected void initExample()
{
super.initExample();
// getContext().isDebugEnabled( true );
if( getContext().isDebugEnabled() )
{
_cdtSweepAdvancingFront = new CDTSweepAdvancingFront();
_node.attachChild( _cdtSweepAdvancingFront.getSceneNode() );
_cdtSweepActiveNode = new CDTSweepActiveNode();
_node.attachChild( _cdtSweepActiveNode.getSceneNode() );
_cdtSweepActiveTriangle = new CDTSweepActiveTriangles();
_node.attachChild( _cdtSweepActiveTriangle.getSceneNode() );
_cdtSweepActiveEdge = new CDTSweepActiveEdge();
_node.attachChild( _cdtSweepActiveEdge.getSceneNode() );
// m_circumCircle = new GUICircumCircle();
// m_node.attachChild( m_circumCircle.getSceneNode() );
}
buildModel(m_currentModel);
triangulate();
}
/**
* Update text information.
*/
protected void updateText()
{
super.updateText();
_exampleInfo[3].setText("[PageUp] Next model");
_exampleInfo[4].setText("[PageDown] Previous model");
_exampleInfo[5].setText("[1] Generate polygon type A ");
_exampleInfo[6].setText("[2] Generate polygon type B ");
}
private void buildModel( ExampleModels model )
{
Polygon poly;
if( model != null )
{
try
{
poly = DataLoader.loadModel( model, SCALE );
_polygonSet = new PolygonSet( poly );
}
catch( IOException e )
{
logger.info( "Failed to load model {}", e.getMessage() );
model = null;
}
}
if( model == null )
{
_polygonSet = new PolygonSet( PolygonGenerator.RandomCircleSweep( SCALE, m_vertexCount ) );
}
}
private ConstrainedPointSet buildCustom()
{
ArrayList<TriangulationPoint> list = new ArrayList<TriangulationPoint>(20);
int[] index;
list.add( new TPoint(2.2715518,-4.5233157) );
list.add( new TPoint(3.4446202,-3.5232647) );
list.add( new TPoint(4.7215156,-4.5233157) );
list.add( new TPoint(6.0311967,-3.5232647) );
list.add( new TPoint(3.4446202,-7.2578132) );
list.add( new TPoint(.81390847,-3.5232647) );
index = new int[]{3,5};
return new ConstrainedPointSet( list, index );
}
protected void triangulate()
{
super.triangulate();
m_stepCount = 0;
}
protected void updateMesh()
{
super.updateMesh();
DTSweepContext tcx = getContext();
if( tcx.isDebugEnabled() )
{
_cdtSweepActiveTriangle.update( tcx );
_cdtSweepActiveEdge.update( tcx );
_cdtSweepActiveNode.update( tcx );
_cdtSweepAdvancingFront.update( tcx );
// m_circumCircle.update( tcx.getCircumCircle() );
}
}
@Override
public void registerInputTriggers()
{
super.registerInputTriggers();
// SPACE - toggle models
_logicalLayer.registerTrigger( new InputTrigger( new KeyPressedCondition( Key.PAGEUP_PRIOR ), new TriggerAction() {
public void perform( final Canvas canvas, final TwoInputStates inputState, final double tpf )
{
int index;
index = (m_currentModel.ordinal()+1)%ExampleModels.values().length;
m_currentModel = ExampleModels.values()[index];
buildModel(m_currentModel);
_node.setScale( m_currentModel.getScale() );
triangulate();
}
} ) );
// SPACE - toggle models backwards
_logicalLayer.registerTrigger( new InputTrigger( new KeyPressedCondition( Key.PAGEDOWN_NEXT ), new TriggerAction() {
public void perform( final Canvas canvas, final TwoInputStates inputState, final double tpf )
{
int index;
index = ((m_currentModel.ordinal()-1)%ExampleModels.values().length + ExampleModels.values().length)%ExampleModels.values().length;
m_currentModel = ExampleModels.values()[index];
buildModel(m_currentModel);
_node.setScale( m_currentModel.getScale() );
triangulate();
}
} ) );
_logicalLayer.registerTrigger( new InputTrigger( new KeyPressedCondition( Key.ONE ), new TriggerAction() {
public void perform( final Canvas canvas, final TwoInputStates inputState, final double tpf )
{
_polygonSet = new PolygonSet( PolygonGenerator.RandomCircleSweep( SCALE, m_vertexCount ) );
triangulate();
}
} ) );
_logicalLayer.registerTrigger( new InputTrigger( new KeyPressedCondition( Key.TWO ), new TriggerAction() {
public void perform( final Canvas canvas, final TwoInputStates inputState, final double tpf )
{
_polygonSet = new PolygonSet( PolygonGenerator.RandomCircleSweep2( SCALE, 200 ) );
triangulate();
}
} ) );
// X -start
_logicalLayer.registerTrigger( new InputTrigger( new KeyPressedCondition( Key.X ), new TriggerAction() {
public void perform( final Canvas canvas, final TwoInputStates inputState, final double tpf )
{
// Lets create a TriangulationProcess that allows you to step thru the TriangulationAlgorithm
// _process.getContext().isDebugEnabled( true );
// _process.triangulate();
// m_stepCount = 0;
}
} ) );
// C - step
_logicalLayer.registerTrigger( new InputTrigger( new KeyPressedCondition( Key.C ), new TriggerAction() {
public void perform( final Canvas canvas, final TwoInputStates inputState, final double tpf )
{
// updateMesh();
_process.resume();
}
} ) );
// Z - toggle autostep
_logicalLayer.registerTrigger( new InputTrigger( new KeyPressedCondition( Key.Z ), new TriggerAction() {
public void perform( final Canvas canvas, final TwoInputStates inputState, final double tpf )
{
m_autoStep = m_autoStep ? false : true;
}
} ) );
// space - save triangle lines
_logicalLayer.registerTrigger( new InputTrigger( new KeyPressedCondition( Key.SPACE ), new TriggerAction() {
public void perform( final Canvas canvas, final TwoInputStates inputState, final double tpf )
{
// PolygonLoader.saveTriLine( m_dataPath, _polygonSet );
m_stepCount = 0;
_process.triangulate( buildCustom() );
}
} ) );
}
class CDTSweepAdvancingFront extends SceneElement<DTSweepContext>
{
protected Line m_nodeLines;
protected Point m_frontPoints;
protected Line m_frontLine;
public CDTSweepAdvancingFront()
{
super("AdvancingFront");
m_frontLine = new Line();
m_frontLine.getMeshData().setIndexMode( IndexMode.LineStrip );
m_frontLine.getMeshData().setVertexBuffer( BufferUtils.createVector3Buffer( 800 ) );
m_frontLine.setDefaultColor( ColorRGBA.ORANGE );
m_frontLine.setTranslation( 0, 0.05, 0 );
_node.attachChild( m_frontLine );
m_frontPoints = new Point();
m_frontPoints.getMeshData().setVertexBuffer( m_frontLine.getMeshData().getVertexBuffer() );
m_frontPoints.setPointSize( 6 );
m_frontPoints.setDefaultColor( ColorRGBA.ORANGE );
m_frontPoints.setTranslation( 0, 0.05, 0 );
_node.attachChild( m_frontPoints );
m_nodeLines = new Line();
m_nodeLines.getMeshData().setIndexMode( IndexMode.Lines );
m_nodeLines.getMeshData().setVertexBuffer( BufferUtils.createVector3Buffer( 2*800 ) );
m_nodeLines.setDefaultColor( ColorRGBA.YELLOW );
m_nodeLines.setTranslation( 0, 0.05, 0 );
_node.attachChild( m_nodeLines );
}
@Override
public void update( DTSweepContext tcx )
{
AdvancingFront front = ((DTSweepContext)tcx).getAdvancingFront();
AdvancingFrontNode node;
DelaunayTriangle tri;
if( front == null ) return;
FloatBuffer fb = m_frontLine.getMeshData().getVertexBuffer();
FloatBuffer nodeVert = m_nodeLines.getMeshData().getVertexBuffer();
fb.limit( fb.capacity() );
nodeVert.limit( fb.capacity() );
fb.rewind();
nodeVert.rewind();
int count=0;
node = front.head;
TriangulationPoint point;
do
{
point = node.getPoint();
fb.put( point.getXf() ).put( point.getYf() ).put( point.getZf() );
tri = node.getTriangle();
if( tri != null )
{
nodeVert.put( point.getXf() ).put( point.getYf() ).put( point.getZf() );
nodeVert.put( ( tri.points[0].getXf() + tri.points[1].getXf() + tri.points[2].getXf() )/3 );
nodeVert.put( ( tri.points[0].getYf() + tri.points[1].getYf() + tri.points[2].getYf() )/3 );
nodeVert.put( ( tri.points[0].getZf() + tri.points[1].getZf() + tri.points[2].getZf() )/3 );
}
count++;
} while( (node = node.getNext()) != null );
fb.limit( 3*count );
nodeVert.limit( 2*count*3 );
}
}
// class GUICircumCircle extends SceneElement<Tuple2<TriangulationPoint,Double>>
// {
// private int VCNT = 64;
// private Line m_circle = new Line();
//
// public GUICircumCircle()
// {
// super("CircumCircle");
// m_circle.getMeshData().setIndexMode( IndexMode.LineLoop );
// m_circle.getMeshData().setVertexBuffer( BufferUtils.createVector3Buffer( VCNT ) );
// m_circle.setDefaultColor( ColorRGBA.WHITE );
// m_circle.setLineWidth( 1 );
// m_node.attachChild( m_circle );
// }
//
// @Override
// public void update( Tuple2<TriangulationPoint,Double> circle )
// {
// float x,y;
// if( circle.a != null )
// {
// FloatBuffer fb = m_circle.getMeshData().getVertexBuffer();
// fb.rewind();
// for( int i=0; i < VCNT; i++ )
// {
// x = (float)circle.a.getX() + (float)(circle.b*Math.cos( 2*Math.PI*((double)i%VCNT)/VCNT ));
// y = (float)circle.a.getY() + (float)(circle.b*Math.sin( 2*Math.PI*((double)i%VCNT)/VCNT ));
// fb.put( x ).put( y ).put( 0 );
// }
// }
// else
// {
// m_node.detachAllChildren();
// }
// }
// }
class CDTSweepMeshExtended extends CDTSweepMesh
{
// private Line m_conLine = new Line();
public CDTSweepMeshExtended()
{
super();
// Line that show the connection between triangles
// m_conLine.setDefaultColor( ColorRGBA.RED );
// m_conLine.getMeshData().setIndexMode( IndexMode.Lines );
// m_node.attachChild( m_conLine );
//
// vertBuf = BufferUtils.createFloatBuffer( size*3*3*3 );
// m_conLine.getMeshData().setVertexBuffer( vertBuf );
}
@Override
public void update( List<DelaunayTriangle> triangles )
{
super.update( triangles );
// MeshData md;
// Vector3 v1 = Vector3.fetchTempInstance();
// Vector3 v2 = Vector3.fetchTempInstance();
// FloatBuffer v2Buf;
//
//
// md = m_mesh.getMeshData();
// v2Buf = m_conLine.getMeshData().getVertexBuffer();
//
//// logger.info( "Triangle count [{}]", tcx.getMap().size() );
//
// int size = 2*3*3*ps.getTriangles().size();
// if( v2Buf.capacity() < size )
// {
// v2Buf = BufferUtils.createFloatBuffer( size );
// m_conLine.getMeshData().setVertexBuffer( v2Buf );
// }
// else
// {
// v2Buf.limit( 2*size );
// }
//
// v2Buf.rewind();
// int lineCount=0;
// ArdorVector3Point p;
// for( DelaunayTriangle t : ps.getTriangles() )
// {
// v1.set( t.points[0] ).addLocal( t.points[1] ).addLocal( t.points[2] ).multiplyLocal( 1.0d/3 );
// if( t.neighbors[0] != null )
// {
// v2.set( t.points[2] ).subtractLocal( t.points[1] ).multiplyLocal( 0.5 ).addLocal( t.points[1] );
// v2Buf.put( v1.getXf() ).put( v1.getYf() ).put( v1.getZf() );
// v2Buf.put( v2.getXf() ).put( v2.getYf() ).put( v2.getZf() );
// lineCount++;
// }
// if( t.neighbors[1] != null )
// {
// v2.set( t.points[0] ).subtractLocal( t.points[2] ).multiplyLocal( 0.5 ).addLocal( t.points[2] );
// v2Buf.put( v1.getXf() ).put( v1.getYf() ).put( v1.getZf() );
// v2Buf.put( v2.getXf() ).put( v2.getYf() ).put( v2.getZf() );
// lineCount++;
// }
// if( t.neighbors[2] != null )
// {
// v2.set( t.points[1] ).subtractLocal( t.points[0] ).multiplyLocal( 0.5 ).addLocal( t.points[0] );
// v2Buf.put( v1.getXf() ).put( v1.getYf() ).put( v1.getZf() );
// v2Buf.put( v2.getXf() ).put( v2.getYf() ).put( v2.getZf() );
// lineCount++;
// }
// }
// v2Buf.limit( 2*3*lineCount );
// Vector3.releaseTempInstance( v1 );
// Vector3.releaseTempInstance( v2 );
}
}
class CDTSweepActiveEdge extends SceneElement<DTSweepContext>
{
private Line m_edgeLine = new Line();
public CDTSweepActiveEdge()
{
super("ActiveEdge");
m_edgeLine.getMeshData().setIndexMode( IndexMode.Lines );
m_edgeLine.getMeshData().setVertexBuffer( BufferUtils.createVector3Buffer( 2 ) );
m_edgeLine.setDefaultColor( ColorRGBA.YELLOW );
m_edgeLine.setLineWidth( 3 );
}
@Override
public void update( DTSweepContext tcx )
{
DTSweepConstraint edge = tcx.getDebugContext().getActiveConstraint();
if( edge != null )
{
FloatBuffer fb = m_edgeLine.getMeshData().getVertexBuffer();
fb.rewind();
fb.put( edge.getP().getXf() ).put( edge.getP().getYf() ).put( 0 );
fb.put( edge.getQ().getXf() ).put( edge.getQ().getYf() ).put( 0 );
_node.attachChild( m_edgeLine );
}
else
{
_node.detachAllChildren();
}
}
}
class CDTSweepActiveTriangles extends SceneElement<DTSweepContext>
{
private Triangle m_a = new Triangle();
private Triangle m_b = new Triangle();
public CDTSweepActiveTriangles()
{
super("ActiveTriangles");
_node.getSceneHints().setAllPickingHints( false );
m_a.setDefaultColor( new ColorRGBA( 0.8f,0.8f,0.8f,1.0f ) );
m_b.setDefaultColor( new ColorRGBA( 0.5f,0.5f,0.5f,1.0f ) );
}
public void setScale( double scale )
{
m_a.setScale( scale );
m_b.setScale( scale );
}
@Override
public void update( DTSweepContext tcx )
{
DelaunayTriangle t,t2;
t = tcx.getDebugContext().getPrimaryTriangle();
t2 = tcx.getDebugContext().getSecondaryTriangle();
_node.detachAllChildren();
if( t != null )
{
FloatBuffer fb = m_a.getMeshData().getVertexBuffer();
fb.rewind();
fb.put( t.points[0].getXf() ).put( t.points[0].getYf() ).put( t.points[0].getZf() );
fb.put( t.points[1].getXf() ).put( t.points[1].getYf() ).put( t.points[1].getZf() );
fb.put( t.points[2].getXf() ).put( t.points[2].getYf() ).put( t.points[2].getZf() );
_node.attachChild( m_a );
}
if( t2 != null )
{
FloatBuffer fb = m_b.getMeshData().getVertexBuffer();
fb.rewind();
fb.put( t2.points[0].getXf() ).put( t2.points[0].getYf() ).put( t2.points[0].getZf() );
fb.put( t2.points[1].getXf() ).put( t2.points[1].getYf() ).put( t2.points[1].getZf() );
fb.put( t2.points[2].getXf() ).put( t2.points[2].getYf() ).put( t2.points[2].getZf() );
_node.attachChild( m_b );
}
}
}
class CDTSweepActiveNode extends SceneElement<DTSweepContext>
{
private Triangle m_a = new Triangle();
private Triangle m_b = new Triangle();
private Triangle m_c = new Triangle();
public CDTSweepActiveNode()
{
super("WorkingNode");
_node.setRenderState( new WireframeState() );
m_a.setDefaultColor( ColorRGBA.DARK_GRAY );
m_b.setDefaultColor( ColorRGBA.LIGHT_GRAY );
m_c.setDefaultColor( ColorRGBA.DARK_GRAY );
setScale( 0.5 );
}
public void setScale( double scale )
{
m_a.setScale( scale );
m_b.setScale( scale );
m_c.setScale( scale );
}
@Override
public void update( DTSweepContext tcx )
{
AdvancingFrontNode node = tcx.getDebugContext().getActiveNode();
TriangulationPoint p;
if( node != null )
{
if( node.getPrevious() != null )
{
p = node.getPrevious().getPoint();
m_a.setTranslation( p.getXf(), p.getYf(), p.getZf() );
}
p = node.getPoint();
m_b.setTranslation( p.getXf(), p.getYf(), p.getZf() );
if( node.getNext() != null )
{
p = node.getNext().getPoint();
m_c.setTranslation( p.getXf(), p.getYf(), p.getZf() );
}
_node.attachChild( m_a );
_node.attachChild( m_b );
_node.attachChild( m_c );
}
else
{
_node.detachAllChildren();
}
}
}
}
| Java |
package org.poly2tri.examples;
import org.poly2tri.Poly2Tri;
import org.poly2tri.triangulation.sets.PointSet;
import org.poly2tri.triangulation.util.PointGenerator;
public class ProfilingExample
{
public static void main(final String[] args)
throws Exception
{
PointSet ps = new PointSet( PointGenerator.uniformDistribution( 50, 500000 ) );
for( int i=0; i<1; i++ )
{
Poly2Tri.triangulate( ps );
}
Thread.sleep( 10000000 );
}
public void startProfiling()
throws Exception
{
}
}
| Java |
package sorting;
public class HeapSort
{
}
| Java |
package sorting;
public class SelectionSort
{
private int[] myData;
public SelectionSort(int[] data)
{
myData = data;
}
public static void main(String[] args)
{
int[] data = new int[20];
if (args.length > 20) {
System.err.println("Not valid for more than 20 entries");
}
int i = 0;
for (String s : args) {
data[i++] = Integer.parseInt(s);
}
SelectionSort sorter = new SelectionSort(data);
System.out.println("Before sorting: " + sorter);
sorter.sort();
System.out.println("After sorting: " + sorter);
System.out.println(SelectionSort.getNotes());
}
private void sort()
{
for (int i = 0; i < myData.length; i++) {
int min = i;
for (int j = i + 1; j < myData.length; j++) {
if (myData[j] < myData[min]) {
min = j;
}
}
if (min != i) {
SortUtils.swap(myData, i, min);
}
}
}
@Override
public String toString()
{
StringBuilder sb = new StringBuilder();
for (int i : myData) {
sb.append(i + " ");
}
return sb.toString();
}
public static String getNotes()
{
return "The worst case complexity is O(n^2)";
}
}
| Java |
package sorting;
public class SortUtils
{
public static void swap(int[] array, int pos1, int pos2)
{
int temp = array[pos1];
array[pos1] = array[pos2];
array[pos2] = temp;
}
public static void main(String[] args)
{
int[] array = new int [2];
array[0] = 1;
array[1] = 2;
System.out.println("Array: " + array[0] + " " + array[1]);
swap(array, 0, 1);
System.out.println("Array: " + array[0] + " " + array[1]);
}
}
| Java |
package sorting;
public class InsertionSort
{
private int[] myData;
public InsertionSort(int[] data)
{
myData = data;
}
public static void main(String[] args)
{
int[] data = new int[20];
if (args.length > 20) {
System.err.println("Not valid for more than 20 entries");
}
int i = 0;
for (String s : args) {
data[i++] = Integer.parseInt(s);
}
InsertionSort sorter = new InsertionSort(data);
System.out.println("Before sorting: " + sorter);
sorter.sort();
System.out.println("After sorting: " + sorter);
System.out.println(InsertionSort.getNotes());
}
private void sort()
{
for (int i = 1; i < myData.length; i++) {
int key = myData[i];
int j = i-1;
while (j >= 0 && myData[j] > key ) {
myData[j + 1] = myData[j];
j--;
}
myData[j + 1] = key;
}
}
@Override
public String toString()
{
StringBuilder sb = new StringBuilder();
for (int i : myData) {
sb.append(i + " ");
}
return sb.toString();
}
public static String getNotes()
{
return "The worst case complexity is O(n^2)";
}
}
| Java |
package sorting;
public class MergeSort
{
private int[] myData;
public MergeSort(int[] data)
{
myData = data;
}
public static void main(String[] args)
{
int[] data = new int[20];
if (args.length > 20) {
System.err.println("Not valid for more than 20 entries");
}
int i = 0;
for (String s : args) {
data[i++] = Integer.parseInt(s);
}
MergeSort sorter = new MergeSort(data);
System.out.println("Before sorting: " + sorter);
System.out.println("length: " + data.length);
sorter.sort(0, data.length - 1);
System.out.println("After sorting: " + sorter);
System.out.println(MergeSort.getNotes());
}
private void sort(int min, int max)
{
if (min < max) {
StringBuilder sb = new StringBuilder("Sorting: ");
for (int i = min; i <= max; i ++) {
sb.append(myData[i] + " ");
}
System.out.println(sb.toString());
int mid = (min + max)/2;
sort(min, mid);
sort(mid + 1, max);
merge(min, mid, max);
}
}
private void merge(int min, int mid, int max)
{
StringBuilder sb = new StringBuilder("Merging: ");
for (int i = min; i <= mid; i ++) {
sb.append(myData[i] + " ");
}
StringBuilder sb1 = new StringBuilder();
for (int i = mid + 1; i <= max; i ++) {
sb1.append(myData[i] + " ");
}
System.out.println(sb.toString() + " and " + sb1.toString());
int[] arr1 = new int[mid - min + 1];
int[] arr2 = new int [max - mid];
// Copy the first half.
for(int k = 0; k <= mid -min; k ++) {
arr1[k] = myData[min + k];
}
// Copy the second half.
for(int k = 0; k <= max - mid - 1; k ++) {
arr2[k] = myData[k + mid + 1];
}
int i = 0, j = 0, k = min;
while(i < arr1.length && j < arr2.length) {
if (arr1[i] < arr2[j]) {
myData[k++] = arr1[i];
i++;
}
else {
myData[k++] = arr2[j];
j++;
}
}
while (i < arr1.length) {
myData[k++] = arr1[i++];
}
while (j < arr2.length) {
myData[k++] = arr2[j++];
}
System.out.println("After merge: ");
sb = new StringBuilder();
for (int km = min; km <= max; km++) {
sb.append(myData[km] + " ");
}
System.out.println(sb.toString());
}
@Override
public String toString()
{
StringBuilder sb = new StringBuilder();
for (int i : myData) {
sb.append(i + " ");
}
return sb.toString();
}
public static String getNotes()
{
return "The worst case complexity is O(nlogn)";
}
}
| Java |
package misc;
public class PattermMatch
{
private String myPara;
private String myPattern;
public PattermMatch(String pattern)
{
StringBuilder sb = new StringBuilder();
sb.append("In computer science, pattern matching is the act of ");
sb.append("checking some sequence of tokens for the presence of the ");
sb.append("constituents of some pattern. In contrast to pattern");
sb.append("The patterns generally have the form of either sequences ");
sb.append("or tree structures.");
myPara = sb.toString();
myPattern = pattern;
}
public int match()
{
for (int i = 0; i < myPara.length() - myPattern.length() + 1; i++) {
boolean found = true;
for (int j = 0; j < myPattern.length(); j++) {
if (myPara.charAt(i + j) != myPattern.charAt(j)) {
found = false;
break;
}
}
if (found) {
return i;
}
}
return -1;
}
public int matchPattern()
{
int n = myPara.length();
int m = myPattern.length();
for(int i=0; i < n; i++) { // Run at most n times.
int k = 0;
while (k < m && (myPattern.charAt(k) == myPara.charAt(i + k))) {
k ++; // Run at most m times
}
if (k == m) {
return i;
}
}
return -1;
}
public static void main(String[] args)
{
System.out.println("Enter Pattern: ");
PattermMatch pm = new PattermMatch(args[0]);
int index = pm.match();
if (index == -1) {
System.out.println("Pattern did not match :(");
}
else {
System.out.println("Pattern matched at index " + index + " :)");
}
}
}
| Java |
package structures;
public class LinkedListTests
{
public static class Node
{
int myData;
Node myNext;
Node(int data)
{
myData = data;
myNext = null;
}
public int getData()
{
return myData;
}
public Node getNext()
{
return myNext;
}
public void setNext(Node next)
{
myNext = next;
}
}
public static void main(String[] args)
{
System.out.println("Lets create the linked list: ");
if (args.length > 20) {
System.err.println("Not valid for more than 20 entries");
}
Node headerNode= null;
Node prev = null;
for (String s : args) {
Node node = new Node(Integer.parseInt(s));
if (prev != null) {
prev.setNext(node);
}
else {
// to mark the first node to hold the reference
headerNode = node;
}
prev = node;
}
// Print the list created.
LinkedListTests.printList(headerNode);
// Insert an element at first.
headerNode = LinkedListTests.insertFirst(headerNode, 0);
LinkedListTests.printList(headerNode);
// Insert an element at last
headerNode = LinkedListTests.insertLast(headerNode, 100);
LinkedListTests.printList(headerNode);
// Insert one element to null list.
Node node = LinkedListTests.insertLast(null, 100);
LinkedListTests.printList(node);
// Delete a data item not present
headerNode = LinkedListTests.deleteNode(headerNode, 444);
// Delete a valid item
System.out.println("Deleting element with data 98");
headerNode = LinkedListTests.deleteNode(headerNode, 98);
LinkedListTests.printList(headerNode);
}
private static void printList(Node headerNode)
{
Node node = headerNode;
if (headerNode == null) {
System.out.println("List is empty");
return;
}
StringBuilder sb = new StringBuilder();
while(node != null) {
sb.append(node.getData() + " ");
node = node.getNext();
}
System.out.println(sb.toString());
}
private static Node insertFirst(Node headerNode, int num)
{
Node node = new Node(num);
node.setNext(headerNode);
return node;
}
private static Node insertLast(Node headerNode, int num)
{
if (headerNode == null) {
headerNode = new Node(num);
return headerNode;
}
Node node = headerNode;
while(node.getNext() != null) {
node = node.getNext();
}
node.setNext(new Node(num));
return headerNode;
}
private static Node deleteNode(Node headerNode, int num)
{
if (headerNode == null) {
System.out.println("Cannot delete data from empty list");
return null;
}
Node prev = null;
Node node = headerNode;
while(node != null && node.getData() != num) {
prev = node;
node = node.getNext();
}
if (node == null) {
System.out.println("Did not find element with data " + num);
return headerNode;
}
else {
prev.setNext(node.getNext());
return headerNode;
}
}
}
| Java |
package practicejava.threads;
public class EvenOddPrinter
{
private static boolean myShouldPrintEven = true;
/**
* Runnable to be instantiated one for the even printing and one for the
* odd printing.
*/
public static class NumPrinter implements Runnable
{
private boolean myIsEven;
private int myNum;
private Object myMonitor;
public NumPrinter(boolean isEven, Object monitor)
{
myIsEven = isEven;
myMonitor = monitor;
if (myIsEven) {
myNum = 0;
}
else {
myNum = 1;
}
}
@Override
public void run()
{
while (myNum < 30) {
synchronized (myMonitor) {
while((myIsEven && !myShouldPrintEven) ||
(!myIsEven && myShouldPrintEven))
{
try {
myMonitor.wait();
}
catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
System.out.println("Num: " + myNum);
myNum +=2;
myShouldPrintEven = !myShouldPrintEven;
myMonitor.notify();
}
}
}
}
public static void main(String[] args)
{
Object monitor = new Object();
Thread evenTh = new Thread(new NumPrinter(true, monitor));
Thread oddTh = new Thread(new NumPrinter(false, monitor));
evenTh.start();
oddTh.start();
}
}
| Java |
package pt.inesc.id.l2f.annotation.tools.cn;
import java.io.StringReader;
import java.util.List;
import pt.inesc.id.l2f.annotation.document.laf.Classification;
import pt.inesc.id.l2f.annotation.document.laf.Feature;
import pt.inesc.id.l2f.annotation.document.laf.FeatureStructure;
import pt.inesc.id.l2f.annotation.document.laf.MorphoSyntacticAnnotation;
import pt.inesc.id.l2f.annotation.document.laf.Segment;
import pt.inesc.id.l2f.annotation.document.laf.Segmentation;
import pt.inesc.id.l2f.annotation.tool.Classifier;
import pt.inesc.id.l2f.annotation.tool.execution.JavaExecutionMode;
import edu.stanford.nlp.ling.HasWord;
import edu.stanford.nlp.ling.Sentence;
import edu.stanford.nlp.ling.TaggedWord;
import edu.stanford.nlp.ling.Word;
import edu.stanford.nlp.tagger.maxent.MaxentTagger;
/**
*
*
*
* @author Tiago Luis
*
*/
public class ChinesePOSTagger extends Classifier {
// chinese model
private static final String DICTIONARY = "en/stanford-tagger/stanford-chinese-tagger-2008-07-07/chinese.tagger";
static {
try {
MaxentTagger.init(DICTIONARY);
} catch (Exception e) {
e.printStackTrace();
}
}
public ChinesePOSTagger() {
super(new JavaExecutionMode());
}
@SuppressWarnings("unchecked")
public void tagg(MorphoSyntacticAnnotation a, List<String> input, List<String> output) {
Segmentation segmentation = new Segmentation();
MorphoSyntacticAnnotation annotation = new MorphoSyntacticAnnotation();
int offset = 0;
long i = 0;
try {
String process = "";
for (String segment : input) {
process += segment;
}
List<Sentence> sentences = MaxentTagger.tokenizeText(new StringReader(process));
for (Sentence sentence : sentences) {
Sentence<TaggedWord> taggedSentence = MaxentTagger.tagSentence(sentence);
for (int j = 0; j < sentence.length(); j++) {
TaggedWord word = taggedSentence.getHasWord(j);
String from = new String();
String to = new String();
from = Integer.toString(((Word) sentence.getHasWord(j)).beginPosition());
to = Integer.toString(((Word) sentence.getHasWord(j)).endPosition());
Segment s = new Segment("t" + i++, from, to, ((HasWord) word).word());
segmentation.addSegment(s);
annotation.addClassification(this.createClassification(s, word.tag()));
offset += ((HasWord) word).word().length();
}
}
this.addSegmentation(segmentation);
this.addMorphoSyntacticAnnotation(annotation);
} catch (Exception e) {
e.printStackTrace();
}
}
private Classification createClassification(Segment segment, String c) {
Classification classification = new Classification();
classification.addSegment(segment.getId());
FeatureStructure fs = new FeatureStructure();
Feature pos = new Feature();
pos.setName("pos");
pos.setValue(c);
fs.addFeature("pos", pos);
classification.addFeatureStructure(fs);
return classification;
}
@Override
public void close() {}
@Override
public void start() {}
}
| Java |
package pt.inesc.id.l2f.annotation.tools.cn;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.Reader;
import java.util.ArrayList;
import java.util.List;
import pt.inesc.id.l2f.annotation.input.TextElement;
import pt.inesc.id.l2f.annotation.tool.execution.ToolExecutionModeUnit;
import pt.inesc.id.l2f.annotation.tool.execution.ExternalProcessExecutionMode;
import pt.inesc.id.l2f.annotation.unit.InputDocumentProcessUnit;
import pt.inesc.id.l2f.annotation.unit.LinguisticAnnotationProcessUnit;
/**
*
*
* @author Tiago Luis
*
*/
public class ChineseExecutionMode extends ExternalProcessExecutionMode {
public ChineseExecutionMode(String[] command, String[][] environment, String charset) {
super(command, environment, charset);
}
@Override
public ToolExecutionModeUnit setInput(InputDocumentProcessUnit unit) {
List<String> input = new ArrayList<String>();
TextElement node = null;
while ((node = unit.getInputDocument().next()) != null) {
// String id = "none";
String text = node.getText();
if (text.matches("\\s+")) {
continue;
}
input.add(text);
}
// TODO: Fix this
input.add("\n");
return new ToolExecutionModeUnit(input, unit);
}
@Override
public ToolExecutionModeUnit setInput(LinguisticAnnotationProcessUnit unit) {
return null;
}
// @Override
// public ToolExecutionModeUnit setInput(SyntacticProcessUnit unit) {
// return null;
// }
@Override
public ToolExecutionModeUnit setOutput(ToolExecutionModeUnit unit, InputStream is, Reader reader) {
BufferedReader br = new BufferedReader(reader);
List<String> output = new ArrayList<String>();
try {
String line = new String();
while (true) {
line = br.readLine();
if (line == null) {
break;
}
output.add(line);
}
unit.setOutput(output);
} catch (IOException e) {
e.printStackTrace();
}
return unit;
}
}
| Java |
package pt.inesc.id.l2f.annotation.tools.en;
import java.io.StringReader;
import java.util.List;
import pt.inesc.id.l2f.annotation.document.laf.Classification;
import pt.inesc.id.l2f.annotation.document.laf.Feature;
import pt.inesc.id.l2f.annotation.document.laf.FeatureStructure;
import pt.inesc.id.l2f.annotation.document.laf.MorphoSyntacticAnnotation;
import pt.inesc.id.l2f.annotation.document.laf.Segment;
import pt.inesc.id.l2f.annotation.document.laf.Segmentation;
import pt.inesc.id.l2f.annotation.tool.Classifier;
import pt.inesc.id.l2f.annotation.tool.execution.JavaExecutionMode;
import edu.stanford.nlp.ling.HasWord;
import edu.stanford.nlp.ling.Sentence;
import edu.stanford.nlp.ling.TaggedWord;
import edu.stanford.nlp.ling.Word;
import edu.stanford.nlp.tagger.maxent.MaxentTagger;
/**
*
*
*
* @author Tiago Luis
*
*/
public class StanfordEnglishPOSTagger extends Classifier {
// english dictionary
private static final String DICTIONARY = "tagger/models/en/bidirectional-wsj-0-18.tagger";
static {
try {
MaxentTagger.init(DICTIONARY);
} catch (Exception e) {
e.printStackTrace();
}
}
public StanfordEnglishPOSTagger() {
super(new JavaExecutionMode());
}
@SuppressWarnings("unchecked")
public void tagg(MorphoSyntacticAnnotation a, List<String> input, List<String> output) {
Segmentation segmentation = new Segmentation();
MorphoSyntacticAnnotation annotation = new MorphoSyntacticAnnotation();
int offset = 0;
long i = 0;
try {
String process = "";
for (String segment : input) {
process += segment;
}
List<Sentence> sentences = MaxentTagger.tokenizeText(new StringReader(process));
for (Sentence sentence : sentences) {
Sentence<TaggedWord> taggedSentence = MaxentTagger.tagSentence(sentence);
for (int j = 0; j < sentence.length(); j++) {
TaggedWord word = taggedSentence.getHasWord(j);
// String xpointer = "xpointer(id('" + "id" + "')";
String from = new String();
String to = new String();
// from = xpointer + "/point()[" + Integer.toString(segment.indexOf(((HasWord) word).word(), offset)) + "])";
// to = xpointer + "/point()[" + Integer.toString(segment.indexOf(((HasWord) word).word(), offset) + ((HasWord) word).word().length()) + "])";
// from = Integer.toString(process.indexOf(word.word(), offset));
// to = Integer.toString(process.indexOf(word.word(), offset) + word.word().length());
from = Integer.toString(((Word) sentence.getHasWord(j)).beginPosition());
to = Integer.toString(((Word) sentence.getHasWord(j)).endPosition());
Segment s = new Segment("t" + i++, from, to, ((HasWord) word).word());
segmentation.addSegment(s);
// String classification = MaxentTagger.tagString(((HasWord) word).word());
// String[] split = classification.split("/");
//
// if (split.length == 2) {
// classification = split[1];
// annotation.addClassification(this.createClassification(s, classification));
// }
annotation.addClassification(this.createClassification(s, word.tag()));
offset += ((HasWord) word).word().length();
}
}
this.addSegmentation(segmentation);
this.addMorphoSyntacticAnnotation(annotation);
} catch (Exception e) {
e.printStackTrace();
}
}
private Classification createClassification(Segment segment, String c) {
Classification classification = new Classification();
classification.addSegment(segment.getId());
FeatureStructure fs = new FeatureStructure();
Feature pos = new Feature();
pos.setName("pos");
pos.setValue(c);
fs.addFeature("pos", pos);
classification.addFeatureStructure(fs);
return classification;
}
@Override
public void close() {}
@Override
public void start() {}
}
| Java |
package pt.inesc.id.l2f.annotation.tools.kr;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.Reader;
import pt.inesc.id.l2f.annotation.tool.execution.ToolExecutionModeUnit;
import pt.inesc.id.l2f.annotation.tool.execution.ExternalProcessExecutionMode;
import pt.inesc.id.l2f.annotation.unit.InputDocumentProcessUnit;
import pt.inesc.id.l2f.annotation.unit.LinguisticAnnotationProcessUnit;
/**
*
*
* @author Tiago Luis
*
* @param <Unit>
*/
public class KoreanExecutionMode extends ExternalProcessExecutionMode {
public KoreanExecutionMode(String[] command, String[][] environment, String charset) {
super(command, environment, charset);
}
public void process(BufferedReader br, ToolExecutionModeUnit unit) {
try {
System.out.println("DEBUG: " + br.readLine() + " " + unit.getInput().get(0));
} catch (IOException e) {
e.printStackTrace();
}
}
@Override
public ToolExecutionModeUnit setInput(InputDocumentProcessUnit unit) {
return null;
}
@Override
public ToolExecutionModeUnit setInput(LinguisticAnnotationProcessUnit unit) {
return null;
}
// @Override
// public ToolExecutionModeUnit setInput(SyntacticProcessUnit unit) {
// return null;
// }
@Override
public ToolExecutionModeUnit setOutput(ToolExecutionModeUnit unit, InputStream is, Reader reader) {
return null;
}
}
| Java |
package pt.inesc.id.l2f.annotation.tools.kr;
import java.util.List;
import pt.inesc.id.l2f.annotation.document.laf.MorphoSyntacticAnnotation;
import pt.inesc.id.l2f.annotation.tool.Classifier;
/**
*
*
*
* @author Tiago Luis
*
*/
public class KoreanPOSTagger extends Classifier {
// korean POSTagger command
private static final String[] COMMAND = {"/usr/bin/chasen"};
// korean POSTagger environment variables
private static final String[][] ENVIRONMENT = {};
// korean POSTagger charset
private static final String CHARSET = "EUC-JP";
public KoreanPOSTagger() {
super(new KoreanExecutionMode(COMMAND, ENVIRONMENT, CHARSET));
// HACK: Cannot refer to 'this' while explicitly invoking a constructor
_mode.setTool(this);
}
@Override
public void start() {
// _mode = new JapaneseMultiThreadedExecutionMode(this, "EUC-JP", builder, null, out);
_mode.init();
_mode.start();
}
@Override
public void close() {
_mode.close();
}
@Override
public void tagg(MorphoSyntacticAnnotation annotation, List<String> input, List<String> output) {}
}
| Java |
package pt.inesc.id.l2f.annotation.tools.ar;
import java.io.StringReader;
import java.util.HashSet;
import java.util.List;
import gpl.pierrick.brihaye.aramorph.AraMorph;
import gpl.pierrick.brihaye.aramorph.Solution;
import gpl.pierrick.brihaye.aramorph.lucene.ArabicTokenizer;
import org.apache.lucene.analysis.Token;
import pt.inesc.id.l2f.annotation.document.laf.Classification;
import pt.inesc.id.l2f.annotation.document.laf.Feature;
import pt.inesc.id.l2f.annotation.document.laf.FeatureStructure;
import pt.inesc.id.l2f.annotation.document.laf.MorphoSyntacticAnnotation;
import pt.inesc.id.l2f.annotation.document.laf.Segment;
import pt.inesc.id.l2f.annotation.document.laf.Segmentation;
import pt.inesc.id.l2f.annotation.tool.Classifier;
import pt.inesc.id.l2f.annotation.tool.execution.JavaExecutionMode;
/**
*
*
* @author Tiago Luis
*
*/
public class ArabicPOSTagger extends Classifier {
private AraMorph araMorph;
public ArabicPOSTagger() {
super(new JavaExecutionMode());
// TODO: colocar num método init
araMorph = new AraMorph();
}
@SuppressWarnings({ "deprecation", "unchecked" })
public void tagg(MorphoSyntacticAnnotation annotation, List<String> input, List<String> output) {
Segmentation segmentation = new Segmentation();
MorphoSyntacticAnnotation morphoSyntacticAnnotation = new MorphoSyntacticAnnotation();
long i = 0;
try {
for (String segment : input) {
ArabicTokenizer tokenizer = new ArabicTokenizer(new StringReader(segment));
Token word = null;
while ((word = tokenizer.next()) != null) {
String term = word.termText();
String from = new String();
String to = new String();
from = Integer.toString(word.startOffset());
to = Integer.toString(word.endOffset());
Segment seg = new Segment("t" + i++, from, to, term);
segmentation.addSegment(seg);
if (araMorph.analyzeToken(word.termText())) {
HashSet<Solution> solutions = araMorph.getWordSolutions(word.termText());
for (Solution solution : solutions) {
morphoSyntacticAnnotation.addClassification(this.createClassification(seg, solution));
}
}
}
}
this.addSegmentation(segmentation);
this.addMorphoSyntacticAnnotation(morphoSyntacticAnnotation);
} catch (Exception e) {
e.printStackTrace();
}
}
private Classification createClassification(Segment segment, Solution solution) {
Classification classification = new Classification();
classification.addSegment(segment.getId());
FeatureStructure fs = new FeatureStructure();
// lemma feature
Feature lemma = new Feature();
lemma.setName("lemma");
lemma.setValue(solution.getLemma());
// stem POS feature
Feature stemPOS = new Feature();
stemPOS.setName("stemPOS");
stemPOS.setValue(solution.getStemPOS());
fs.addFeature("lemma", lemma);
fs.addFeature("stemPOS", stemPOS);
classification.addFeatureStructure(fs);
return classification;
}
@Override
public void close() {}
@Override
public void start() {}
}
| Java |
package pt.inesc.id.l2f.annotation.tools.jp;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.Reader;
import java.util.ArrayList;
import java.util.List;
import pt.inesc.id.l2f.annotation.input.TextElement;
import pt.inesc.id.l2f.annotation.tool.execution.ToolExecutionModeUnit;
import pt.inesc.id.l2f.annotation.tool.execution.ExternalProcessExecutionMode;
import pt.inesc.id.l2f.annotation.unit.InputDocumentProcessUnit;
import pt.inesc.id.l2f.annotation.unit.LinguisticAnnotationProcessUnit;
/**
*
*
* @author Tiago Luis
*
*/
public class JapaneseExecutionMode extends ExternalProcessExecutionMode {
public JapaneseExecutionMode(String[] command, String[][] environment, String charset) {
super(command, environment, charset);
}
@Override
public ToolExecutionModeUnit setInput(InputDocumentProcessUnit unit) {
List<String> input = new ArrayList<String>();
TextElement node = null;
while ((node = unit.getInputDocument().next()) != null) {
// String id = "none";
String text = node.getText();
if (text.matches("\\s+")) {
continue;
}
input.add(text);
}
return new ToolExecutionModeUnit(input, unit);
}
@Override
public ToolExecutionModeUnit setInput(LinguisticAnnotationProcessUnit unit) {
return null;
}
// @Override
// public ToolExecutionModeUnit setInput(SyntacticProcessUnit unit) {
// return null;
// }
@Override
public ToolExecutionModeUnit setOutput(ToolExecutionModeUnit unit, InputStream is, Reader reader) {
BufferedReader br = new BufferedReader(reader);
List<String> output = new ArrayList<String>();
try {
String line = new String();
while (true) {
line = br.readLine();
if (line.equals("EOS")) {
break;
}
output.add(line);
}
unit.setOutput(output);
} catch (IOException e) {
e.printStackTrace();
}
return unit;
}
}
| Java |
package pt.inesc.id.l2f.annotation.tools.jp;
import java.util.List;
import pt.inesc.id.l2f.annotation.document.laf.MorphoSyntacticAnnotation;
import pt.inesc.id.l2f.annotation.document.laf.Segment;
import pt.inesc.id.l2f.annotation.document.laf.Segmentation;
import pt.inesc.id.l2f.annotation.tool.Classifier;
/**
*
*
* @author Tiago Luis
*
*/
public class JapanesePOSTagger extends Classifier {
// japanese POSTagger command
private static final String[] COMMAND = {"/usr/bin/chasen", "-F", "%m\t%ps\t%pe\t%P-\t%B-\n"};
// japanese POSTagger environment variables
private static final String[][] ENVIRONMENT = {};
// japanese POSTagger charset
private static final String CHARSET = "EUC-JP";
public JapanesePOSTagger() {
super(new JapaneseExecutionMode(COMMAND, ENVIRONMENT, CHARSET));
// HACK: Cannot refer to 'this' while explicitly invoking a constructor
_mode.setTool(this);
}
@Override
public void start() {
_mode.init();
_mode.start();
}
@Override
public void close() {
_mode.close();
}
@Override
public void tagg(MorphoSyntacticAnnotation annotation, List<String> input, List<String> output) {
Segmentation segmentation = new Segmentation();
for (int i = 0; i < output.size(); i++) {
String line = output.get(i);
// get token
String[] elements = line.split("\\s+");
segmentation.addSegment(new Segment("t" + i, elements[1], elements[2], elements[0]));
}
this.addSegmentation(segmentation);
}
} | Java |
package pt.inesc.id.l2f.annotation.tools.pt.jmarv;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import pt.inesc.id.l2f.annotation.document.laf.Classification;
import pt.inesc.id.l2f.annotation.document.laf.FeatureStructure;
import pt.inesc.id.l2f.annotation.document.laf.MorphoSyntacticAnnotation;
import pt.inesc.id.l2f.annotation.document.util.Pair;
import pt.inesc.id.l2f.annotation.tool.Classifier;
import pt.inesc.id.l2f.annotation.tool.execution.JavaExecutionMode;
import pt.inesc.id.l2f.jmarv.Disambiguator;
import pt.inesc.id.l2f.jmarv.Element;
import pt.inesc.id.l2f.jmarv.MarvToken;
import pt.inesc.id.l2f.jmarv.Segment;
/**
*
*
*
* @author Tiago Luis
*
*/
public class JMARv extends Classifier {
private Disambiguator _disambiguator;
private static Map<String, String> _cat = new HashMap<String, String>();
private static Map<String, String> _subcat = new HashMap<String, String>();
static {
// initialize CAT hash table
_cat.put("nou", "N");
_cat.put("ver", "V");
_cat.put("adj", "A");
_cat.put("pro", "P");
_cat.put("pre", "S");
_cat.put("art", "T");
_cat.put("adv", "R");
_cat.put("con", "C");
_cat.put("num", "M");
_cat.put("int", "I");
_cat.put("pmk", "U");
_cat.put("res", "X");
_cat.put("pun", "O");
// initialize SCT hash table
_subcat.put("dem", "d");
}
public JMARv() {
super(new JavaExecutionMode());
try {
_disambiguator = new Disambiguator("jmarv/dict/");
} catch (IOException e) {
e.printStackTrace();
}
}
@Override
public void close() {}
@Override
public void start() {}
@Override
public void tagg(MorphoSyntacticAnnotation annotation, List<String> input, List<String> output) {
MorphoSyntacticAnnotation morphoSyntacticAnnotation = new MorphoSyntacticAnnotation();
List<Pair<String, Classification>> classifications = new ArrayList<Pair<String, Classification>>();
// JMARv segment
Segment segment = new Segment();
for (Classification classification : annotation.getClassifications()) {
pt.inesc.id.l2f.annotation.document.laf.Segment s = this.getSegment(classification.getSegments().get(0));
MarvToken marvToken = new MarvToken(s.getWord());
// FIXME: pode ser mais de um token
if (classification.getSegments() != null) {
for (FeatureStructure fs : classification.getFeatureStructures()) {
// FeatureStructure fs = wordForm.getFeatureStructure();
String cat = _cat.get(fs.getFeature("CAT").getValue());
String subcat = ".";
if (fs.getFeature("SCT") != null) {
subcat = _subcat.get(fs.getFeature("SCT").getValue());
}
// marvToken = new MarvToken(s.getWord());
marvToken.addClassification(new pt.inesc.id.l2f.jmarv.Classification(cat + subcat));
// System.out.println("word: " + s.getWord() + "\t" + "classification: " + cat + " " + subcat);
// if (mapfs.get(s.getWord()) == null) {
// mapfs.put(s.getWord(), new ArrayList<FeatureStructure>());
// }
//
// mapfs.get(s.getWord()).add(fs);
}
classifications.add(new Pair<String, Classification>(s.getWord(), classification));
if (marvToken.getClassifications().size() > 0) {
segment.add(marvToken);
} else {
marvToken.addClassification(new pt.inesc.id.l2f.jmarv.Classification("U."));
segment.add(marvToken);
}
}
}
int i = 0;
if (segment.size() > 1) {
List<Element<String, String>> result = _disambiguator.disambiguate(segment);
for (Element<String, String> value : result) {
String token = value.getFirst();
String classification = value.getSecond();
Classification c = this.getClassification(classifications, token);
if (c != null) {
for (FeatureStructure fs : c.getFeatureStructures()) {
// FeatureStructure fs = wordForm.getFeatureStructure();
String cat = _cat.get(fs.getFeature("CAT").getValue());
String subcat = ".";
if (fs.getFeature("SCT") != null) {
subcat = _subcat.get(fs.getFeature("SCT").getValue());
}
if (classification.equals(cat + subcat)) {
// add wordform to document
// wordForms.add(wordForm);
Classification c2 = new Classification();
c2.setSegments(c.getSegments());
c2.addFeatureStructure(fs);
morphoSyntacticAnnotation.addClassification(c2);
}
}
}
i++;
}
}
this.addMorphoSyntacticAnnotation(morphoSyntacticAnnotation);
}
private Classification getClassification(List<Pair<String, Classification>> classifications, String word) {
Pair<String, Classification> classification = null;
while (classifications.size() > 0) {
classification = classifications.get(0);
// remove element found
classifications.remove(classification);
if (classification.getFirst().equals(word)) {
break;
}
}
return classification.getSecond();
}
}
| Java |
package pt.inesc.id.l2f.annotation.tools.pt;
import java.util.HashMap;
import java.util.Map;
public class Lemma {
private String _gstem;
// private String _pstem;
private Map<String, String> _features;
public Lemma() {
_features = new HashMap<String, String>();
}
public Lemma(String stem) {
this();
_gstem = stem;
}
/**
* @return the _gstem
*/
public String getGStem() {
return _gstem;
}
/**
* @param _gstem the _gstem to set
*/
public void setGStem(String _gstem) {
this._gstem = _gstem;
}
/**
* @return the _cat
*/
public String get_cat() {
return _features.get("CAT");
}
/**
* @return the _subcat
*/
public String get_subcat() {
return _features.get("SCT");
}
/**
* @return the _features
*/
public java.util.Map<String, String> getFeatures() {
return _features;
}
/**
* @param _features the _features to set
*/
public void set_features(java.util.Map<String, String> _features) {
this._features = _features;
}
/**
*
*
* @param key
* @param value
*/
public void setFeature(String key, String value) {
_features.put(key, value);
}
}
| Java |
package pt.inesc.id.l2f.annotation.tools.pt;
import java.util.StringTokenizer;
import pt.inesc.id.l2f.annotation.document.laf.Segment;
import pt.inesc.id.l2f.annotation.document.laf.Segmentation;
import pt.inesc.id.l2f.annotation.tool.Tokenizer;
import pt.inesc.id.l2f.annotation.tool.execution.JavaExecutionMode;
/**
*
*
* @author Tiago Luis
*
*/
public class SpaceTokenizer extends Tokenizer {
long i = 0;
public SpaceTokenizer() {
super(new JavaExecutionMode());
}
@Override
protected Segmentation tokenize(String input, String id) {
Segmentation segmentation = new Segmentation();
int offset = 0;
StringTokenizer words = new StringTokenizer(input);
while (words.hasMoreElements()) {
String word = words.nextToken();
String xpointer = "xpointer(id('" + id + "')";
String from = new String();
String to = new String();
from = xpointer + "/point()[" + Integer.toString(input.indexOf(word, offset)) + "])";
to = xpointer + "/point()[" + Integer.toString(input.indexOf(word, offset) + word.length()) + "])";
segmentation.addSegment(new Segment("t" + i++, from, to, word));
offset += word.length() + 1;
}
return segmentation;
}
public void close() {
}
@Override
public void start() {}
}
| Java |
package pt.inesc.id.l2f.annotation.tools.pt;
import java.util.ArrayList;
import java.util.List;
import pt.inesc.id.l2f.annotation.tools.pt.palavroso.SegmentVisitor;
public abstract class MorphologicalUnit {
private List<Lemma> _lemmas;
private String _form;
public void setForm(String form) {
_form = form;
_lemmas = new ArrayList<Lemma>();
}
/**
* @return the lemmas
*/
public List<Lemma> getLemmas() {
return _lemmas;
}
/**
* @param lemmas the lemmas to set
*/
public void setLemmas(List<Lemma> lemmas) {
_lemmas = lemmas;
}
/**
* @return the form
*/
public String getForm() {
return _form;
}
public void accept(SegmentVisitor visitor) {
visitor.visitToken(this);
}
}
| Java |
package pt.inesc.id.l2f.annotation.tools.pt.rudrico;
import java.io.InputStream;
import java.io.Reader;
import java.io.StringWriter;
import java.util.ArrayList;
import java.util.List;
import pt.inesc.id.l2f.annotation.document.laf.Classification;
import pt.inesc.id.l2f.annotation.document.laf.Feature;
import pt.inesc.id.l2f.annotation.document.laf.FeatureStructure;
import pt.inesc.id.l2f.annotation.document.laf.Segment;
import pt.inesc.id.l2f.annotation.document.xml.XMLWriter;
import pt.inesc.id.l2f.annotation.tool.execution.ToolExecutionModeUnit;
import pt.inesc.id.l2f.annotation.tool.execution.ExternalProcessExecutionMode;
import pt.inesc.id.l2f.annotation.tools.pt.rudrico.xml.AnnotationDocument;
import pt.inesc.id.l2f.annotation.tools.pt.rudrico.xml.Id;
import pt.inesc.id.l2f.annotation.tools.pt.rudrico.xml.Sentence;
import pt.inesc.id.l2f.annotation.tools.pt.rudrico.xml.Word;
import pt.inesc.id.l2f.annotation.tools.pt.rudrico.xml.Class;
import pt.inesc.id.l2f.annotation.unit.InputDocumentProcessUnit;
import pt.inesc.id.l2f.annotation.unit.LinguisticAnnotationProcessUnit;
/**
*
*
* @author Tiago Luis
*
* @param <Unit>
*/
public class RudricoExecutionMode extends ExternalProcessExecutionMode {
public RudricoExecutionMode(String[] command, String[][] environment, String charset) {
super(command, environment, charset);
}
@Override
public ToolExecutionModeUnit setInput(InputDocumentProcessUnit unit) {
return null;
}
@Override
public ToolExecutionModeUnit setInput(LinguisticAnnotationProcessUnit unit) {
AnnotationDocument document = new AnnotationDocument();
Sentence sentence = new Sentence();
// List<WordFormAlternatives> wfAlternatives = unit.getDocument().getWordFormalternatives();
for (Classification classification : unit.getDocument().getLastMorphoSyntacticAnnotation().getClassifications()) {
Segment segment = unit.getDocument().getSegment(classification.getSegments().get(0));
Word word = new Word(segment.getWord());
for (FeatureStructure fs : classification.getFeatureStructures()) {
Class c = new Class();
// FeatureStructure fs = wordForm.getFeatureStructure();
for (Feature f : fs.getFeatures().values()) {
c.setRoot(segment.getWord().toLowerCase());
c.addId(new Id(f.getName(), f.getValue()));
}
c.addId(new Id("LOW", segment.getFrom()));
c.addId(new Id("HIG", segment.getTo()));
word.addClass(c);
}
sentence.addWord(word);
}
document.addSentence(sentence);
StringWriter w = new StringWriter();
XMLWriter xmlw = new XMLWriter(w);
document.writeTo(xmlw);
List<String> input = new ArrayList<String>();
input.add(w.getBuffer().toString());
return new ToolExecutionModeUnit(input, unit);
}
// @Override
// public ToolExecutionModeUnit setInput(SyntacticProcessUnit unit) {
// return null;
// }
@Override
public ToolExecutionModeUnit setOutput(ToolExecutionModeUnit unit, InputStream is, Reader reader) {
AnnotationDocument document = new AnnotationDocument();
document.readFrom(is);
// AnnotationXMLReader r = new AnnotationXMLReader(document);
// InputSource inputSource = new InputSource(reader);
// inputSource.setEncoding(_charset);
// r.process(inputSource);
StringWriter w = new StringWriter();
XMLWriter xmlw = new XMLWriter(w);
document.writeTo(xmlw);
List<String> output = new ArrayList<String>();
output.add(w.getBuffer().toString());
unit.setOutput(output);
return unit;
}
}
| Java |
package pt.inesc.id.l2f.annotation.tools.pt.rudrico.jni;
import java.io.StringWriter;
import java.util.ArrayList;
import java.util.List;
import pt.inesc.id.l2f.annotation.document.laf.Classification;
import pt.inesc.id.l2f.annotation.document.laf.Feature;
import pt.inesc.id.l2f.annotation.document.laf.FeatureStructure;
import pt.inesc.id.l2f.annotation.document.laf.Segment;
import pt.inesc.id.l2f.annotation.document.xml.XMLWriter;
import pt.inesc.id.l2f.annotation.tool.execution.JNIExecutionMode;
import pt.inesc.id.l2f.annotation.tool.execution.ToolExecutionModeUnit;
import pt.inesc.id.l2f.annotation.tools.pt.rudrico.xml.AnnotationDocument;
import pt.inesc.id.l2f.annotation.tools.pt.rudrico.xml.Class;
import pt.inesc.id.l2f.annotation.tools.pt.rudrico.xml.Id;
import pt.inesc.id.l2f.annotation.tools.pt.rudrico.xml.Sentence;
import pt.inesc.id.l2f.annotation.tools.pt.rudrico.xml.Word;
import pt.inesc.id.l2f.annotation.unit.InputDocumentProcessUnit;
import pt.inesc.id.l2f.annotation.unit.LinguisticAnnotationProcessUnit;
public class RudricoJNIExecutionMode extends JNIExecutionMode {
private native void initialize();
private native String process(String input);
// private native void release();
static {
System.loadLibrary("RudricoJNI");
}
public long _l;
public RudricoJNIExecutionMode() {}
@Override
public void init() {
// initialize
this.initialize();
}
@Override
public ToolExecutionModeUnit getOutput(InputDocumentProcessUnit unit) {
return null;
}
@Override
public ToolExecutionModeUnit getOutput(LinguisticAnnotationProcessUnit unit) {
List<String> output = new ArrayList<String>();
AnnotationDocument document = new AnnotationDocument();
Sentence sentence = new Sentence();
for (Classification classification : unit.getDocument().getLastMorphoSyntacticAnnotation().getClassifications()) {
Segment segment = unit.getDocument().getSegment(classification.getSegments().get(0));
Word word = new Word(segment.getWord());
// TODO: update ao modo de execução command line
for (FeatureStructure fs : classification.getFeatureStructures()) {
Class c = new Class();
for (Feature f : fs.getFeatures().values()) {
if (f.getName().equals("POS")) {
continue;
}
if (f.getName().equals("LEMMA")) {
c.setRoot(f.getValue());
continue;
}
c.addId(new Id(f.getName(), f.getValue()));
}
c.addId(new Id("LOW", segment.getFrom()));
c.addId(new Id("HIG", segment.getTo()));
if (segment.getWord().matches("^[A-ZÀÁÉÍÓÚÇÂÊÔÛÃÕ].*")) {
c.addId(new Id("UPC", "true"));
} else {
c.addId(new Id("UPC", "false"));
}
word.addClass(c);
}
sentence.addWord(word);
}
document.addSentence(sentence);
StringWriter w = new StringWriter();
XMLWriter xmlw = new XMLWriter(w);
document.writeTo(xmlw);
List<String> input = new ArrayList<String>();
input.add(w.getBuffer().toString());
ToolExecutionModeUnit eunit = new ToolExecutionModeUnit(input, unit);
for (String s : eunit.getInput()) {
String o = this.process(s);
output.add(o);
}
eunit.setOutput(output);
return eunit;
}
@Override
public void close() {
super.close();
// free resources
// this.release();
}
}
| Java |
package pt.inesc.id.l2f.annotation.tools.pt.rudrico.thrift;
import java.io.StringWriter;
import java.util.ArrayList;
import java.util.List;
import com.facebook.thrift.TException;
import pt.inesc.id.l2f.annotation.tool.execution.ThriftExecutionMode;
import pt.inesc.id.l2f.annotation.tool.execution.ToolExecutionModeUnit;
import pt.inesc.id.l2f.annotation.tools.pt.rudrico.thrift.gen.RudricoServer;
import pt.inesc.id.l2f.annotation.tools.pt.rudrico.thrift.gen.RudricoServer.Client;
import pt.inesc.id.l2f.annotation.tools.pt.rudrico.xml.AnnotationDocument;
import pt.inesc.id.l2f.annotation.tools.pt.rudrico.xml.Class;
import pt.inesc.id.l2f.annotation.tools.pt.rudrico.xml.Id;
import pt.inesc.id.l2f.annotation.tools.pt.rudrico.xml.Sentence;
import pt.inesc.id.l2f.annotation.tools.pt.rudrico.xml.Word;
import pt.inesc.id.l2f.annotation.unit.InputDocumentProcessUnit;
import pt.inesc.id.l2f.annotation.unit.LinguisticAnnotationProcessUnit;
import pt.inesc.id.l2f.annotation.document.laf.Classification;
import pt.inesc.id.l2f.annotation.document.laf.Feature;
import pt.inesc.id.l2f.annotation.document.laf.FeatureStructure;
import pt.inesc.id.l2f.annotation.document.laf.Segment;
import pt.inesc.id.l2f.annotation.document.xml.XMLWriter;
public class RudricoThriftClientExecutionMode extends ThriftExecutionMode {
// ...
private static final String COMMAND = "/usr/share/split/rudrico/thrift/bin/server";
// ...
private static final String[][] ENVIRONMENT = {};
// ...
private Client _client;
public RudricoThriftClientExecutionMode(int port) {
super(new String[] { COMMAND, String.valueOf(port) }, port, ENVIRONMENT);
}
public RudricoThriftClientExecutionMode(String hostname, int port) {
super(hostname, port);
}
@Override
public ToolExecutionModeUnit getOutput(InputDocumentProcessUnit unit) {
return null;
}
@Override
public ToolExecutionModeUnit getOutput(LinguisticAnnotationProcessUnit unit) {
List<String> output = new ArrayList<String>();
AnnotationDocument document = new AnnotationDocument();
Sentence sentence = new Sentence();
for (Classification classification : unit.getDocument().getLastMorphoSyntacticAnnotation().getClassifications()) {
Segment segment = unit.getDocument().getSegment(classification.getSegments().get(0));
Word word = new Word(segment.getWord());
// TODO: update ao modo de execução command line
for (FeatureStructure fs : classification.getFeatureStructures()) {
Class c = new Class();
for (Feature f : fs.getFeatures().values()) {
if (f.getName().equals("POS")) {
continue;
}
if (f.getName().equals("LEMMA")) {
c.setRoot(f.getValue());
continue;
}
c.addId(new Id(f.getName(), f.getValue()));
}
c.addId(new Id("LOW", segment.getFrom()));
c.addId(new Id("HIG", segment.getTo()));
if (segment.getWord().matches("^[A-ZÀÁÉÍÓÚÇÂÊÔÛÃÕ].*")) {
c.addId(new Id("UPC", "true"));
} else {
c.addId(new Id("UPC", "false"));
}
word.addClass(c);
}
sentence.addWord(word);
}
document.addSentence(sentence);
StringWriter w = new StringWriter();
XMLWriter xmlw = new XMLWriter(w);
document.writeTo(xmlw);
List<String> input = new ArrayList<String>();
input.add(w.getBuffer().toString());
ToolExecutionModeUnit eunit = new ToolExecutionModeUnit(input, unit);
try {
for (String s : eunit.getInput()) {
String o = _client.process(s);
output.add(o);
}
} catch (TException e) {
e.printStackTrace();
}
eunit.setOutput(output);
return eunit;
}
@Override
public void createClient() {
_client = new RudricoServer.Client(_protocol);
//
// Thread t = new Thread(this);
// t.start();
}
}
| Java |
package pt.inesc.id.l2f.annotation.tools.pt.rudrico.xml;
import java.io.DataInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.List;
import pt.inesc.id.l2f.annotation.document.Document;
import pt.inesc.id.l2f.annotation.document.xml.XMLReader;
import pt.inesc.id.l2f.annotation.document.xml.XMLWriter;
public class AnnotationDocument implements Document {
// ...
private List<Sentence> _sentences;
public AnnotationDocument() {
_sentences = new ArrayList<Sentence>();
}
/**
*
*
* @param is
*
* @throws IOException
* @throws ParserConfigurationException
* @throws SAXException
*/
public void readFrom(InputStream is) {
XMLReader xmlr = new XMLReader(new DataInputStream(is), "UTF-8");
this.readFrom(xmlr);
// AnnotationXMLReader xmlr = new AnnotationXMLReader(this);
//
// InputSource s = new InputSource(is);
//
// s.setEncoding("ISO-8859-1");
//
// xmlr.process(s);
}
/**
*
*
* @param sentence
*/
public void addSentence(Sentence sentence) {
_sentences.add(sentence);
}
/**
*
*
* @return
*/
public List<Sentence> getSentences() {
return _sentences;
}
// public void readFrom(Reader reader) {
// XMLReader xmlr = new XMLReader(reader);
// this.readFrom(xmlr);
// }
/**
*
*
* @param xmlr
*/
public void readFrom(XMLReader xmlr) {
int event = -1;
while (true) {
event = xmlr.next();
if (xmlr.isDocumentEnd(event)) {
break;
}
if (xmlr.isElementStart(event)) {
String name = xmlr.getElementName();
if (name.equals("sentence")) {
Sentence sentence = new Sentence();
sentence.readFrom(xmlr);
_sentences.add(sentence);
}
}
}
}
// /**
// *
// *
// * @param writer
// * @param include
// */
// public void writeTo(Writer writer) {
// XMLWriter xmlw = new XMLWriter(writer);
// this.writeTo(xmlw);
// }
public void writeTo(XMLWriter xmlw) {
// write XML prologue
xmlw.writeStartDocument("UTF-8", "1.0");
xmlw.writeDTD("<!DOCTYPE l2f_annotation SYSTEM \"/afs/l2f/home/tmcl/rudrico/l2f_annotation.dtd\">");
// write root element
xmlw.writeStartElement("l2f_annotation");
// write sentences
for (Sentence sentence : _sentences) {
sentence.writeTo(xmlw);
}
xmlw.writeEndElement();
// write document end (closes all open structures)
xmlw.writeEndDocument();
}
}
| Java |
package pt.inesc.id.l2f.annotation.tools.pt.rudrico.xml;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import pt.inesc.id.l2f.annotation.document.DocumentElement;
import pt.inesc.id.l2f.annotation.document.xml.XMLReader;
import pt.inesc.id.l2f.annotation.document.xml.XMLWriter;
public class Sentence implements DocumentElement {
private List<Word> _words;
public Sentence() {
_words = new ArrayList<Word>();
}
public Sentence(List<Word> words) {
_words = words;
}
public Sentence(Sentence sentence) {
if (sentence.getWords() != null) {
_words = new ArrayList<Word>(sentence.getWords());
}
}
/**
* @return the words
*/
public List<Word> getWords() {
return Collections.unmodifiableList(_words);
}
/**
* @param word the word to add
*/
public void addWord(Word word) {
_words.add(word);
}
public void readFrom(XMLReader xmlr) {
int event = -1;
while (true) {
event = xmlr.next();
if (xmlr.isElementEnd(event, "sentence")) {
break;
}
if (xmlr.isElementStart(event)) {
String name = xmlr.getElementName();
if (name.equals("word")) {
Word word = new Word();
word.setName(xmlr.getAttributes().get("name"));
word.readFrom(xmlr);
_words.add(word);
}
}
}
}
public void writeTo(XMLWriter xmlw) {
xmlw.writeStartElement("sentence");
for (Word word : _words) {
word.writeTo(xmlw);
}
xmlw.writeEndElement();
}
}
| Java |
package pt.inesc.id.l2f.annotation.tools.pt.rudrico.xml;
import java.util.ArrayList;
import java.util.List;
import pt.inesc.id.l2f.annotation.document.DocumentElement;
import pt.inesc.id.l2f.annotation.document.xml.XMLReader;
import pt.inesc.id.l2f.annotation.document.xml.XMLWriter;
public class Word implements DocumentElement {
private String _name;
private List<Class> _classes;
public Word() {
_classes = new ArrayList<Class>();
}
public Word(String name) {
this();
_name = name;
}
public Word(String name, List<Class> classes) {
_name = name;
_classes = classes;
}
public Word(Word word) {
if (word.getName() != null) {
_name = new String(word.getName());
}
if (word.getName() != null) {
_classes = new ArrayList<Class>(word.getClasses());
}
}
/**
* @return the name
*/
public String getName() {
return _name;
}
/**
* @param name the name to set
*/
public void setName(String name) {
_name = name;
}
/**
* @return the classes
*/
public List<Class> getClasses() {
return _classes;
}
/**
*
* @param c the class to add
*/
public void addClass(Class c) {
_classes.add(c);
}
public void readFrom(XMLReader xmlr) {
int event = -1;
while (true) {
event = xmlr.next();
if (xmlr.isElementEnd(event, "word")) {
break;
}
if (xmlr.isElementStart(event)) {
String name = xmlr.getElementName();
if (name.equals("class")) {
Class c = new Class();
c.setRoot(xmlr.getAttributes().get("root"));
c.readFrom(xmlr);
_classes.add(c);
}
}
}
}
public void writeTo(XMLWriter xmlw) {
xmlw.writeStartElement("word");
xmlw.writeAttribute("name", _name);
for (Class c : _classes) {
c.writeTo(xmlw);
}
xmlw.writeEndElement();
}
}
| Java |
package pt.inesc.id.l2f.annotation.tools.pt.rudrico.xml;
import java.util.ArrayList;
import java.util.List;
import pt.inesc.id.l2f.annotation.document.DocumentElement;
import pt.inesc.id.l2f.annotation.document.xml.XMLReader;
import pt.inesc.id.l2f.annotation.document.xml.XMLWriter;
public class Class implements DocumentElement {
private String _root;
private List<Id> _ids;
public Class() {
_ids = new ArrayList<Id>();
}
public Class(String root, List<Id> ids) {
_root = root;
_ids = ids;
}
public Class(Class c) {
if (c.getRoot() != null) {
_root = new String(c.getRoot());
}
if (c.getIds() != null) {
_ids = new ArrayList<Id>(c.getIds());
}
}
/**
* @return the root
*/
public String getRoot() {
return _root;
}
/**
* @param root the root to set
*/
public void setRoot(String root) {
_root = root;
}
/**
* @return the ids
*/
public List<Id> getIds() {
return _ids;
}
/**
* @param id the id to add
*/
public void addId(Id id) {
_ids.add(id);
}
public void readFrom(XMLReader xmlr) {
int event = -1;
while (true) {
event = xmlr.next();
if (xmlr.isElementEnd(event, "class")) {
break;
}
if (xmlr.isElementStart(event)) {
String name = xmlr.getElementName();
if (name.equals("id")) {
Id id = new Id();
id.readFrom(xmlr);
_ids.add(id);
}
}
}
}
public void writeTo(XMLWriter xmlw) {
xmlw.writeStartElement("class");
// write root
xmlw.writeAttribute("root", _root);
// write ids
for (Id id : _ids) {
id.writeTo(xmlw);
}
// write class end element
xmlw.writeEndElement();
}
}
| Java |
package pt.inesc.id.l2f.annotation.tools.pt.rudrico.xml;
import java.util.Map;
import pt.inesc.id.l2f.annotation.document.DocumentElement;
import pt.inesc.id.l2f.annotation.document.xml.XMLReader;
import pt.inesc.id.l2f.annotation.document.xml.XMLWriter;
public class Id implements DocumentElement {
private String _atrib;
private String _value;
public Id() {}
public Id(String atrib, String value) {
_atrib = atrib;
_value = value;
}
public Id(Id id) {
if (id.getAtrib() != null) {
_atrib = id.getAtrib();
}
if (id.getValue() != null) {
_value = id.getValue();
}
}
/**
* @return the atrib
*/
public String getAtrib() {
return _atrib;
}
/**
* @param atrib the atrib to set
*/
public void setAtrib(String atrib) {
_atrib = atrib;
}
/**
* @return the value
*/
public String getValue() {
return _value;
}
/**
* @param value the value to set
*/
public void setValue(String value) {
_value = value;
}
public void readFrom(XMLReader xmlr) {
Map<String, String> attributes = xmlr.getAttributes();
_atrib = attributes.get("atrib");
_value = attributes.get("value");
}
public void writeTo(XMLWriter xmlw) {
xmlw.writeStartElement("id");
xmlw.writeAttribute("atrib", _atrib);
xmlw.writeAttribute("value", _value);
// write id end element
xmlw.writeEndElement();
}
}
| Java |
package pt.inesc.id.l2f.annotation.tools.pt.rudrico;
import java.io.ByteArrayInputStream;
import java.util.List;
import pt.inesc.id.l2f.annotation.document.laf.Classification;
import pt.inesc.id.l2f.annotation.document.laf.Feature;
import pt.inesc.id.l2f.annotation.document.laf.FeatureStructure;
import pt.inesc.id.l2f.annotation.document.laf.MorphoSyntacticAnnotation;
import pt.inesc.id.l2f.annotation.document.laf.Segment;
import pt.inesc.id.l2f.annotation.document.laf.Segmentation;
import pt.inesc.id.l2f.annotation.tools.pt.rudrico.jni.RudricoJNIExecutionMode;
import pt.inesc.id.l2f.annotation.tools.pt.rudrico.xml.AnnotationDocument;
import pt.inesc.id.l2f.annotation.tools.pt.rudrico.xml.Class;
import pt.inesc.id.l2f.annotation.tools.pt.rudrico.xml.Id;
import pt.inesc.id.l2f.annotation.tools.pt.rudrico.xml.Sentence;
import pt.inesc.id.l2f.annotation.tools.pt.rudrico.xml.Word;
import pt.inesc.id.l2f.annotation.tool.Classifier;
/**
*
*
* @author Tiago Luis
*
*/
public class Rudrico extends Classifier {
// rudrico command
// private static final String[] COMMAND = {"/usr/bin/RuDriCo", "-R", "/usr/share/split/rudrico/rules/rules.pl"};
// rudrico environment variables
// private static final String[][] ENVIRONMENT = {};
// rudrico charset
// private static final String CHARSET = "ISO-8859-1";
public Rudrico() {
// super(new RudricoThriftClientExecutionMode("wc09", 9091));
// super(new RudricoThriftClientExecutionMode(ThriftExecutionMode.findFreePort()));
super(new RudricoJNIExecutionMode());
// HACK: Cannot refer to 'this' while explicitly invoking a constructor
_mode.setTool(this);
}
@Override
public void start() {
_mode.start();
}
@Override
public void close() {
_mode.close();
}
@Override
public void tagg(MorphoSyntacticAnnotation annotation, List<String> input, List<String> output) {
Segmentation segmentation = new Segmentation();
MorphoSyntacticAnnotation morphoSyntacticAnnotation = new MorphoSyntacticAnnotation();
int id = 0;
// get rudrico output document
for (String string : output) {
AnnotationDocument document = new AnnotationDocument();
document.readFrom(new ByteArrayInputStream(string.getBytes()));
for (Sentence sentence : document.getSentences()) {
for (Word word : sentence.getWords()) {
Segment segment = new Segment();
segment.setWord(word.getName());
// TODO: fix this
segment.setId("t" + id++);
segment.setFrom("none");
segment.setTo("none");
segmentation.addSegment(segment);
Classification classification = new Classification();
classification.addSegment(segment.getId());
// WordFormAlternatives wfAlt = new WordFormAlternatives();
for (Class c : word.getClasses()) {
FeatureStructure fs = new FeatureStructure();
for (Id i : c.getIds()) {
if (i.getAtrib().equals("LOW")) {
segment.setFrom(i.getValue());
} else if (i.getAtrib().equals("HIG")) {
segment.setTo(i.getValue());
} else {
Feature f = new Feature();
f.setName(i.getAtrib());
f.setValue(i.getValue());
fs.addFeature(i.getAtrib(), f);
}
}
classification.addFeatureStructure(fs);
// wfAlt.addWordForm(wf);
}
// wordFormAlternatives.add(wfAlt);
morphoSyntacticAnnotation.addClassification(classification);
}
}
}
this.addSegmentation(segmentation);
this.addMorphoSyntacticAnnotation(morphoSyntacticAnnotation);
}
}
| Java |
package pt.inesc.id.l2f.annotation.tools.pt.palavroso;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import pt.inesc.id.l2f.annotation.tools.pt.Lemma;
import pt.inesc.id.l2f.annotation.tools.pt.MorphologicalUnit;
public class PalavrosoToken extends MorphologicalUnit {
private static Pattern pattern = Pattern.compile("\\[([^\\s]+)\\]([^\\s]+)?");
// MOOD translation hash table
private static Map<String, String> _pos3 = new HashMap<String, String>();
// TENSE translation hash table
private static Map<String, String> _pos4 = new HashMap<String, String>();
// PERSON translation hash table
private static Map<String, String> _pos5 = new HashMap<String, String>();
// NUMBER translation hash table
private static Map<String, String> _pos6 = new HashMap<String, String>();
// GENDER translation hash table
private static Map<String, String> _pos7 = new HashMap<String, String>();
// DEGREE translation hash table
private static Map<String, String> _pos8 = new HashMap<String, String>();
// CASE translation hash table
private static Map<String, String> _pos9 = new HashMap<String, String>();
// FORMATION translation hash table
private static Map<String, String> _pos10 = new HashMap<String, String>();
// NOUN subcategory values
private static Map<String, String> _pos2nou = new HashMap<String, String>();
// PRONOUN subcategory values
private static Map<String, String> _pos2pro = new HashMap<String, String>();
// ARTICLE subcategory values
private static Map<String, String> _pos2art = new HashMap<String, String>();
// CONJUNCTION subcategory values
private static Map<String, String> _pos2con = new HashMap<String, String>();
// NUMERAL subcategory values
private static Map<String, String> _pos2num = new HashMap<String, String>();
// RESIDUAL subcategory values
private static Map<String, String> _pos2res = new HashMap<String, String>();
static {
// initialize MOOD hash table
_pos3.put("i", "ind");
_pos3.put("s", "sbj");
_pos3.put("m", "imp");
_pos3.put("c", "cnd");
_pos3.put("n", "inf");
_pos3.put("f", "iif");
_pos3.put("p", "par");
_pos3.put("g", "ger");
_pos3.put("=", "=");
// initialize TENSE hash table
_pos4.put("p", "prs");
_pos4.put("i", "pim");
_pos4.put("f", "fut");
_pos4.put("s", "ppe");
_pos4.put("q", "pmp");
_pos4.put("=", "=");
// initialize PERSON hash table
_pos5.put("1", "1");
_pos5.put("2", "2");
_pos5.put("3", "3");
_pos5.put("=", "=");
// initialize NUMBER hash table
_pos6.put("s", "s");
_pos6.put("p", "p");
_pos6.put("=", "=");
// initialize GENDER hash table
_pos7.put("m", "m");
_pos7.put("f", "f");
_pos7.put("=", "=");
// initialize DEGREE hash table
_pos8.put("p", "pst");
_pos8.put("c", "cmp");
_pos8.put("s", "sup");
_pos8.put("=", "=");
// initialize CASE hash table
_pos9.put("n", "nom");
_pos9.put("a", "acc");
_pos9.put("d", "dat");
_pos9.put("=", "=");
// initialize FORMATION hash table
_pos10.put("s", "sim");
_pos10.put("f", "fus");
_pos10.put("=", "=");
// initialize NOUN subcategory values
_pos2nou.put("c", "com");
_pos2nou.put("p", "prp");
// initialize PRONOUN subcategory values
_pos2pro.put("p", "per");
_pos2pro.put("d", "dem");
_pos2pro.put("i", "idf");
_pos2pro.put("o", "pos");
_pos2pro.put("t", "itr");
_pos2pro.put("r", "rel");
_pos2pro.put("e", "exc");
_pos2pro.put("f", "ref");
// initialize ARTICLE subcategory values
_pos2art.put("d", "def");
_pos2art.put("i", "idf");
// initialize CONJUNCTION subcategory values
_pos2con.put("c", "coo");
_pos2con.put("s", "sub");
// initialize NUMERAL subcategory values
_pos2num.put("c", "car");
_pos2num.put("o", "ord");
_pos2num.put("r", "roc");
_pos2num.put("z", "roo");
_pos2num.put("=", "=");
// initialize RESIDUAL subcategory values
_pos2res.put("f", "lwr");
_pos2res.put("a", "abb");
_pos2res.put("y", "acr");
_pos2res.put("s", "sym");
_pos2res.put("e", "ema");
_pos2res.put("h", "htt");
_pos2res.put("i", "ipp");
}
public PalavrosoToken(String form, List<String> lemmas) {
super();
super.setForm(form);
for (String lstring : lemmas) {
Matcher matcher = pattern.matcher(lstring);
boolean matchFound = matcher.find();
if (matchFound) {
Lemma lemma = new Lemma(matcher.group(1));
char[] classification = matcher.group(2).toCharArray();
lemma.setFeature("POS", String.valueOf(classification[0]) + String.valueOf(classification[1]));
// process classification
this.processFeatures(lemma, classification);
// add lemma
super.getLemmas().add(lemma);
}
}
// // input: a maria comeu
// // output: a ([a]Pd..=sf.== [a]S....==..s [a]Td...sf... [a]Pp..3sf.as [a]Nc...sm...)
// // maria ([maria]Nc...sf... [maria]A....sfp..) comeu ([comer]V.is3s=...)
//
// // input: a maria far-se-lhos-ia todos
// // output: a ([a]Pd..=sf.== [a]S....==..s [a]Td...sf... [a]Pp..3sf.as [a]Nc...sm...)
// // maria ([maria]Nc...sf... [maria]A....sfp..)
// // far-se-lhos-ia ([fazer]V.c=3s=... -[se]Pf..3==.a=+[se]U.........+[se]Pi..===.== -[lhe]Pp..3==.d= -[o]Pp..3pm.a=)
// // todos ([todos]Pi..=pm.== [todo]Nc...pm... [todo]A....pmp..)
//
//// System.out.println("analysis: " + analysis);
//
// if (analysis.startsWith("[ ([[]O.........)")) {
// super.setForm("[");
//
// Lemma lemma = new Lemma("[");
// this.processFeatures(lemma, "O.........".toCharArray());
// super.getLemmas().add(lemma);
// } else if (analysis.startsWith("] ([]]O.........)")) {
// super.setForm("]");
//
// Lemma lemma = new Lemma("]");
// this.processFeatures(lemma, "O.........".toCharArray());
// super.getLemmas().add(lemma);
// } else if (analysis.startsWith("( ([(]O.........)")) {
// super.setForm("(");
//
// Lemma lemma = new Lemma("(");
// this.processFeatures(lemma, "O.........".toCharArray());
// super.getLemmas().add(lemma);
// } else if (analysis.startsWith(") ([)]O.........)")) {
// super.setForm(")");
//
// Lemma lemma = new Lemma(")");
// this.processFeatures(lemma, "O.........".toCharArray());
// super.getLemmas().add(lemma);
// } else {
// analysis = analysis.replaceAll("\\s", "");
//
// String toks1[] = analysis.split("\\(");
//
// String toks2[] = null;
//
// super.setForm(toks1[0]);
//
// toks2 = toks1[1].split("\\[");
//
// // toks2[0] is empty (always)
//
// for(int i = 1; i < toks2.length; i++) {
// String toks3[] = toks2[i].split("\\]");
//
//// System.out.println("Stem: " + toks3[0]);
// Lemma lemma = new Lemma(toks3[0]);
//
//// System.out.println("DEBUG: " + toks3[0]);
//
//// System.out.println("DEBUG (lemma): " + toks3[0]);
//
//// System.out.println("Cat: " + toks3[1].toCharArray()[0]);
//// lemma.set_cat(new String(toks3[1].toCharArray(), 0, 1));
//// System.out.println("SubCat: " + toks3[1].toCharArray()[1]);
//// lemma.set_subcat(new String(toks3[1].toCharArray(), 1, 1));
//// System.out.println("Feats:" + toks3[1]);
//// lemma.set_feature("feats", toks3[1]);
//
// this.processFeatures(lemma, toks3[1].toCharArray());
//
// super.getLemmas().add(lemma);
// }
// }
}
private void processFeatures(Lemma lemma, char[] tagg) {
// transforms a tag in a multiple PROP/value elements
if (tagg[0] == 'N') {
lemma.setFeature("CAT", "nou");
if (tagg[1] != '=') { lemma.setFeature("SCT", _pos2nou.get(String.valueOf(tagg[1]))); }
if (tagg[5] != '=') { lemma.setFeature("NUM", _pos6.get(String.valueOf(tagg[5]))); }
if (tagg[6] != '=') { lemma.setFeature("GEN", _pos7.get(String.valueOf(tagg[6]))); }
} else if (tagg[0] == 'V') {
lemma.setFeature("CAT", "ver");
if (tagg[2] != '=') { lemma.setFeature("MOD", _pos3.get(String.valueOf(tagg[2]))); }
if (tagg[3] != '=') { lemma.setFeature("TEN", _pos4.get(String.valueOf(tagg[3]))); }
if (tagg[4] != '=') { lemma.setFeature("PER", _pos5.get(String.valueOf(tagg[4]))); }
if (tagg[5] != '=') { lemma.setFeature("NUM", _pos6.get(String.valueOf(tagg[5]))); }
if (tagg[6] != '=') { lemma.setFeature("GEN", _pos7.get(String.valueOf(tagg[6]))); }
} else if (tagg[0] == 'A') {
lemma.setFeature("CAT", "adj");
if (tagg[5] != '=') { lemma.setFeature("NUM", _pos6.get(String.valueOf(tagg[5]))); }
if (tagg[6] != '=') { lemma.setFeature("GEN", _pos7.get(String.valueOf(tagg[6]))); }
if (tagg[7] != '=') { lemma.setFeature("DEG", _pos8.get(String.valueOf(tagg[7]))); }
} else if (tagg[0] == 'P') {
lemma.setFeature("CAT", "pro");
if (tagg[1] != '=') { lemma.setFeature("SCT", _pos2pro.get(String.valueOf(tagg[1]))); }
if (tagg[4] != '=') { lemma.setFeature("PER", _pos5.get(String.valueOf(tagg[4]))); }
if (tagg[5] != '=') { lemma.setFeature("NUM", _pos6.get(String.valueOf(tagg[5]))); }
if (tagg[6] != '=') { lemma.setFeature("GEN", _pos7.get(String.valueOf(tagg[6]))); }
if (tagg[8] != '=') { lemma.setFeature("CAS", _pos9.get(String.valueOf(tagg[8]))); }
if (tagg[9] != '=') { lemma.setFeature("FOR", _pos10.get(String.valueOf(tagg[9]))); }
} else if (tagg[0] == 'T') {
lemma.setFeature("CAT", "art");
if (tagg[1] != '=') { lemma.setFeature("SCT", _pos2art.get(String.valueOf(tagg[1]))); }
if (tagg[5] != '=') { lemma.setFeature("NUM", _pos6.get(String.valueOf(tagg[5]))); }
if (tagg[6] != '=') { lemma.setFeature("GEN", _pos7.get(String.valueOf(tagg[6]))); }
} else if (tagg[0] == 'R') {
lemma.setFeature("CAT", "adv");
if (tagg[7] != '=') { lemma.setFeature("DEG", _pos8.get(String.valueOf(tagg[7]))); }
} else if (tagg[0] == 'S') {
lemma.setFeature("CAT", "pre");
if (tagg[5] != '=') { lemma.setFeature("NUM", _pos6.get(String.valueOf(tagg[5]))); }
if (tagg[6] != '=') { lemma.setFeature("GEN", _pos7.get(String.valueOf(tagg[6]))); }
if (tagg[9] != '=') { lemma.setFeature("FOR", _pos10.get(String.valueOf(tagg[9]))); }
} else if (tagg[0] == 'C') {
lemma.setFeature("CAT", "con");
if (tagg[6] != '=') { lemma.setFeature("SCT", _pos2con.get(String.valueOf(tagg[1]))); } // result += " <id atrib="SCT" value="". $POS2CON{tagg[1]} . ""/>" . "\n";};
} else if (tagg[0] == 'M') {
lemma.setFeature("CAT", "num");
if (tagg[1] != '=') { lemma.setFeature("SCT", _pos2num.get(String.valueOf(tagg[1]))); }
if (tagg[5] != '=') { lemma.setFeature("NUM", _pos6.get(String.valueOf(tagg[5]))); }
if (tagg[6] != '=') { lemma.setFeature("GEN", _pos7.get(String.valueOf(tagg[6]))); }
} else if (tagg[0] == 'I') {
lemma.setFeature("CAT", "int");
} else if (tagg[0] == 'U') {
lemma.setFeature("CAT", "pmk");
} else if (tagg[0] == 'X') {
lemma.setFeature("CAT", "res");
if (tagg[6] != '=') {
lemma.setFeature("SCT", _pos2res.get(String.valueOf(tagg[1])));
}
} else if (tagg[0] == 'O') {
lemma.setFeature("CAT", "pun");
} else {
// TODO: definir uma excepção
throw new RuntimeException("Error in CAT");
}
}
}
| Java |
package pt.inesc.id.l2f.annotation.tools.pt.palavroso;
import java.util.ArrayList;
import java.util.List;
public class PalavrosoSegment {
List<PalavrosoToken> _tokens;
public PalavrosoSegment() {
_tokens = new ArrayList<PalavrosoToken>();
}
// List<MorphologicalUnit> _tokens = new ArrayList<MorphologicalUnit>();
//
// public MorphologicalUnit token(int i) {
// return _tokens.get(i);
// }
//
// public void add(MorphologicalUnit t) {
// _tokens.add(t);
// }
//
// public int size() {
// return _tokens.size();
// }
//
// public MorphologicalUnit get(int i) {
// return _tokens.get(i);
// }
//
// public void accept(SegmentVisitor visitor) {
// visitor.visitPalavrosoSegment(this);
// }
//
// public void reset() {
// _tokens = new ArrayList<MorphologicalUnit>();
// }
public List<PalavrosoToken> process(String analysis) {
analysis = analysis.replaceAll("\n", "");
String[] elements = analysis.split("\\s");
for (int i = 0; i < elements.length; i++) {
String word = elements[i++];
// System.out.println("word: " + word);
List<String> list = new ArrayList<String>();
while (i < elements.length) {
String element = elements[i];
// System.out.println("classification: " + element);
if (element.startsWith("(")) {
element = element.substring("(".length());
}
list.add(element);
if (element.endsWith(")")) {
_tokens.add(new PalavrosoToken(word, list));
break;
}
i++;
}
}
// String pstring = "";
// String[] elements = analysis.split(" ");
//
// int i;
//
// for (i = 0; i < elements.length; i++) {
// pstring += elements[i] + " ";
//
// if (elements[i].startsWith("(")) {
//
// if (elements[i].length() == 1) {
// break;
// }
//
// for (i = i + 1; i < elements.length; i++) {
// pstring += elements[i] + " ";
//
// if (elements[i].endsWith(")")) {
//// System.out.println("DEBUG: " + pstring);
//
// PalavrosoToken ptoken = new PalavrosoToken(pstring);
// _tokens.add(ptoken);
//
// pstring = "";
//
// break;
// }
// }
// }
// }
return _tokens;
}
}
| Java |
package pt.inesc.id.l2f.annotation.tools.pt.palavroso;
import pt.inesc.id.l2f.annotation.tools.pt.MorphologicalUnit;
public abstract class SegmentVisitor {
public abstract void visitPalavrosoSegment(PalavrosoSegment palavrosoSegment);
public abstract void visitToken(MorphologicalUnit token);
}
| Java |
package pt.inesc.id.l2f.annotation.tools.pt.palavroso.thrift;
import java.util.ArrayList;
import java.util.List;
import com.facebook.thrift.TException;
import pt.inesc.id.l2f.annotation.tool.execution.ThriftExecutionMode;
import pt.inesc.id.l2f.annotation.tool.execution.ToolExecutionModeUnit;
import pt.inesc.id.l2f.annotation.tools.pt.palavroso.thrift.gen.PalavrosoServer;
import pt.inesc.id.l2f.annotation.tools.pt.palavroso.thrift.gen.PalavrosoServer.Client;
import pt.inesc.id.l2f.annotation.unit.InputDocumentProcessUnit;
import pt.inesc.id.l2f.annotation.unit.LinguisticAnnotationProcessUnit;
import pt.inesc.id.l2f.annotation.input.InputDocument;
import pt.inesc.id.l2f.annotation.input.TextElement;
public class PalavrosoThriftClientExecutionMode extends ThriftExecutionMode {
// ...
private static final String COMMAND = "/usr/share/split/palavroso/thrift/bin/server";
// ...
private static final String[][] ENVIRONMENT = {};
// ...
private Client _client;
public PalavrosoThriftClientExecutionMode(String hostname, int port) {
super(hostname, port);
}
public PalavrosoThriftClientExecutionMode(int port) {
super(new String[] { COMMAND, String.valueOf(port) }, port, ENVIRONMENT);
}
@Override
public void createClient() {
_client = new PalavrosoServer.Client(_protocol);
}
@Override
public ToolExecutionModeUnit getOutput(InputDocumentProcessUnit unit) {
List<String> input = new ArrayList<String>();
List<String> output = new ArrayList<String>();
InputDocument document = unit.getInputDocument();
TextElement node = null;
while ((node = document.next()) != null) {
try {
String o = _client.process(node.getText());
output.add(o);
} catch (TException e) {
e.printStackTrace();
}
}
ToolExecutionModeUnit eunit = new ToolExecutionModeUnit(input, unit);
eunit.setOutput(output);
return eunit;
}
@Override
public ToolExecutionModeUnit getOutput(LinguisticAnnotationProcessUnit unit) {
// List<String> input = new ArrayList<String>();
// List<Token> tokens = new ArrayList<Token>();
// List<String> output = new ArrayList<String>();
//
// for (Token token : unit.get_odoc().getTokens()) {
// input.add(token.getWord() + "\n");
//
// tokens.add(token);
// }
//
// ToolExecutionModeUnit eunit = new PalavrosoExecutionModeUnit(input, unit, tokens);
//
// try {
// String i = "";
// for (String s : eunit.getInput()) {
// i += s + " ";
// }
//
// String o = _client.process(i);
//
// String[] lines = o.split("\n");
//
// for (String line : lines) {
// output.add(line);
// }
// } catch (TException e) {
// e.printStackTrace();
// }
//
// eunit.setOutput(output);
//
// return eunit;
return null;
}
}
| Java |
package pt.inesc.id.l2f.annotation.tools.pt.palavroso;
public class PalavrosoWindows extends Thread {
public String input(String input) {
return input + " " + "([maria]Nc...sf... [maria]A....sfp..)";
}
}
| Java |
package pt.inesc.id.l2f.annotation.tools.pt.palavroso;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.Reader;
import java.util.ArrayList;
import java.util.List;
import pt.inesc.id.l2f.annotation.input.InputDocument;
import pt.inesc.id.l2f.annotation.input.TextElement;
import pt.inesc.id.l2f.annotation.tool.execution.ToolExecutionModeUnit;
import pt.inesc.id.l2f.annotation.tool.execution.ExternalProcessExecutionMode;
import pt.inesc.id.l2f.annotation.unit.InputDocumentProcessUnit;
import pt.inesc.id.l2f.annotation.unit.LinguisticAnnotationProcessUnit;
/**
*
*
* @author Tiago Luis
*
*/
public class PalavrosoExecutionMode extends ExternalProcessExecutionMode {
public PalavrosoExecutionMode(String[] command, String[][] environment, String charset) {
super(command, environment, charset);
}
@Override
public ToolExecutionModeUnit setInput(InputDocumentProcessUnit unit) {
List<String> input = new ArrayList<String>();
InputDocument document = unit.getInputDocument();
TextElement node;
while ((node = document.next()) != null) {
input.add(node.getText());
}
input.add("\nKKAAPPUUTT\n");
return new ToolExecutionModeUnit(input, unit);
}
@Override
public ToolExecutionModeUnit setInput(LinguisticAnnotationProcessUnit unit) {
// TODO: lançar excepção
return null;
}
@Override
public ToolExecutionModeUnit setOutput(ToolExecutionModeUnit unit, InputStream is, Reader reader) {
List<String> output = new ArrayList<String>();
BufferedReader br = (BufferedReader) reader;
try {
String line = "";
while (true) {
line = br.readLine();
if (line == null || line.startsWith("KKAAPPUUTT")) {
break;
}
output.add(line);
}
unit.setOutput(output);
} catch (IOException e) {
e.printStackTrace();
}
return unit;
}
}
| Java |
package pt.inesc.id.l2f.annotation.tools.pt.palavroso;
import java.util.List;
import pt.inesc.id.l2f.annotation.document.laf.Classification;
import pt.inesc.id.l2f.annotation.document.laf.Feature;
import pt.inesc.id.l2f.annotation.document.laf.FeatureStructure;
import pt.inesc.id.l2f.annotation.document.laf.MorphoSyntacticAnnotation;
import pt.inesc.id.l2f.annotation.document.laf.Segment;
import pt.inesc.id.l2f.annotation.document.laf.Segmentation;
import pt.inesc.id.l2f.annotation.tools.pt.Lemma;
import pt.inesc.id.l2f.annotation.tool.Classifier;
/**
*
*
* @author Tiago Luis
*
*/
public class Palavroso extends Classifier {
// palavroso command
private static final String[] COMMAND = {"/usr/bin/morfolog", "-spu", "-D", "/usr/share/split/palavroso/dict/palav.lex"};
// palavroso environment variables
private static final String[][] ENVIRONMENT = {{"PALAVROSO", "palavroso/services/lemmatization/"}};
// palavroso charset
private static final String CHARSET = "ISO-8859-1";
public Palavroso() {
super(new PalavrosoExecutionMode(COMMAND, ENVIRONMENT, CHARSET));
// super(new PalavrosoThriftClientExecutionMode(ThriftExecutionMode.findFreePort()));
// super(new PalavrosoThriftClientExecutionMode("wc09", 9090));
// HACK: cannot refer to 'this' while explicitly invoking a constructor
_mode.setTool(this);
}
public void start() {
_mode.init();
_mode.start();
}
@Override
public void close() {
_mode.close();
}
@Override
public void tagg(MorphoSyntacticAnnotation a, List<String> input, List<String> output) {
Segmentation segmentation = new Segmentation();
MorphoSyntacticAnnotation annotation = new MorphoSyntacticAnnotation();
int i = 0;
int id = 0;
for (String line : output) {
// String id = token.getId();
// String line = output.get(i++);
String current = input.get(i);
int offset = 0;
// TODO: isto pode acontecer?
// if (line.matches("\\s*")) {
// continue;
// }
// String[] t1= line.replaceAll("\\s", "").split("\\)");
PalavrosoSegment pseg = new PalavrosoSegment();
List<PalavrosoToken> ptokens = pseg.process(line);
for (PalavrosoToken ptoken : ptokens) {
// for (String t2 : t1) {
// if (t2.equals("")) {
// continue;
// }
// MorphologicalUnit palavrosoToken = new PalavrosoToken(t2);
// // TODO: ver se passou-se algo aqui
// pseg.add(palavrosoToken);
// WordFormAlternatives wfAlt = new WordFormAlternatives();
int from = current.indexOf(ptoken.getForm(), offset);
offset += from + ptoken.getForm().length();
Segment segment = new Segment("t" + id++, String.valueOf(from), String.valueOf(from + ptoken.getForm().length()), ptoken.getForm());
segmentation.addSegment(segment);
if (ptoken.getLemmas() == null)
continue;
Classification classification = new Classification();
classification.addSegment(segment.getId());
for (Lemma lemma : ptoken.getLemmas()) {
// List<Segment> t = new ArrayList<Segment>();
// t.add(this.getToken(id));
// t.add(segment);
// wordForm.setAttribute("ref", segment.getAttributes().getNamedItem("id").getNodeValue());
FeatureStructure fs = new FeatureStructure();
// add lemma feature
fs.addFeature("LEMMA", new Feature("LEMMA", lemma.getGStem()));
// add features
for (String name : lemma.getFeatures().keySet()) {
String value = lemma.getFeatures().get(name);
if (value != null) {
Feature feature = new Feature();
feature.setName(name);
feature.setValue(value);
fs.addFeature(name, feature);
}
}
classification.addFeatureStructure(fs);
// wfAlt.addWordForm(wordForm);
}
// wordFormAlternatives.add(wfAlt);
annotation.addClassification(classification);
}
}
this.addSegmentation(segmentation);
this.addMorphoSyntacticAnnotation(annotation);
}
}
| Java |
package pt.inesc.id.l2f.annotation.output;
public abstract class OutputDocument {
}
| Java |
package pt.inesc.id.l2f.annotation.stage;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
import pt.inesc.id.l2f.annotation.tool.Tool;
import pt.inesc.id.l2f.annotation.unit.ProcessUnit;
/**
*
*
* @author Tiago Luis
*
*/
public class Stage implements Runnable {
// ...
protected Tool _tool;
// ...
protected BlockingQueue<ProcessUnit> _input;
// ...
protected BlockingQueue<ProcessUnit> _output;
public Stage(Tool tool) {
_tool = tool;
_input = new LinkedBlockingQueue<ProcessUnit>(100);
_output = new LinkedBlockingQueue<ProcessUnit>(100);
// set current stage
_tool.setStage(this);
}
public Stage(BlockingQueue<ProcessUnit> input, BlockingQueue<ProcessUnit> output, Tool tool) {
_input = input;
_output = output;
_tool = tool;
// set current stage
_tool.setStage(this);
}
/**
*
*/
public void configure() {}
public void run() {
try {
while (true) {
ProcessUnit unit = _input.take();
// stop if it finds a null value
if (unit.isLast()) {
_tool.close();
_output.put(unit);
break;
}
// process unit
unit.accept(_tool);
}
} catch (InterruptedException e) {
e.printStackTrace();
}
}
/**
* Passes the unit (to be processed) to input queue.
*
* @param unit
*/
public void process(ProcessUnit unit) {
try {
_input.put(unit);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
/**
* Passes the processed unit to next stage.
*
* @param unit the unit to pass to next stage (after being processed)
* @throws InterruptedException
*/
public void collect(ProcessUnit unit) {
try {
_output.put(unit);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
/**
*
*/
public void close() {}
/**
*
* @return the tool
*/
public Tool getTool() {
return _tool;
}
/**
*
* @param tool the tool to set
*/
public void setTool(Tool tool) {
_tool = tool;
}
/**
*
* @return the input queue
*/
public BlockingQueue<ProcessUnit> getInputQueue() {
return _input;
}
/**
*
* @param input the input queue to set
*/
public void setInputQueue(BlockingQueue<ProcessUnit> input) {
_input = input;
}
/**
*
* @return the output queue
*/
public BlockingQueue<ProcessUnit> getOutputQueue() {
return _output;
}
/**
*
* @param output the output queue to set
*/
public void setOutputQueue(BlockingQueue<ProcessUnit> output) {
_output = output;
}
}
| Java |
package pt.inesc.id.l2f.annotation.stage;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.util.zip.GZIPOutputStream;
import pt.inesc.id.l2f.annotation.document.xml.XMLWriter;
import pt.inesc.id.l2f.annotation.tool.IdentityTool;
import pt.inesc.id.l2f.annotation.unit.FinalProcessUnit;
import pt.inesc.id.l2f.annotation.unit.ProcessUnit;
public class FinalStage extends Stage {
private boolean _compressed;
private String _filename;
public FinalStage(String filename) {
super(new IdentityTool());
_filename = filename;
_compressed = false;
}
public FinalStage(String filename, boolean compressed) {
super(new IdentityTool());
_filename = filename;
_compressed = compressed;
}
public void run() {
OutputStream fos = null;
XMLWriter xmlw = null;
try {
if (_compressed) {
fos = new GZIPOutputStream(new FileOutputStream(_filename + ".gz"));
} else {
fos = new FileOutputStream(_filename);
}
xmlw = new XMLWriter(fos);
// write document start
xmlw.writeStartDocument("UTF-8", "1.0");
// write root element start
xmlw.writeStartElement("units");
while (true) {
ProcessUnit unit = _input.take();
if (unit instanceof FinalProcessUnit) {
break;
}
unit.writeTo(xmlw);
}
// write root element end
xmlw.writeEndElement();
// write document end
xmlw.writeEndDocument();
} catch (InterruptedException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} finally {
if (fos != null) {
try {
fos.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
}
/**
* @return the filename
*/
public String getFilename() {
return _filename;
}
/**
* @param filename the filename to set
*/
public void setFilename(String filename) {
_filename = filename;
}
}
| Java |
package pt.inesc.id.l2f.annotation.stage;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
import pt.inesc.id.l2f.annotation.unit.FinalProcessUnit;
import pt.inesc.id.l2f.annotation.unit.ProcessUnit;
public class InitialStage extends Thread {
private BlockingQueue<ProcessUnit> _in;
private BlockingQueue<ProcessUnit> _out;
public InitialStage(BlockingQueue<ProcessUnit> out) {
_out = out;
_in = new LinkedBlockingQueue<ProcessUnit>();
}
public void input(ProcessUnit input) {
try {
_in.put(input);
} catch (InterruptedException e) {
// TODO lançar uma excepção
e.printStackTrace();
}
}
public void run() {
try {
while (true) {
ProcessUnit s = _in.take();
// copy element to consumers
_out.put(s);
// stop if finds a null value
if (s instanceof FinalProcessUnit) {
break;
}
}
} catch (InterruptedException e) {
e.printStackTrace();
}
}
public void close() {
try {
_in.put(new FinalProcessUnit());
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
| Java |
package pt.inesc.id.l2f.annotation.input;
import java.io.FileNotFoundException;
import java.io.FileReader;
import pt.inesc.id.l2f.annotation.document.xml.XMLReader;
public class TEIDocument extends InputDocument {
// ...
private XMLReader _xmlr;
public TEIDocument(String filename) throws FileNotFoundException {
_xmlr = new XMLReader(new FileReader(filename));
}
@Override
public TextElement next() {
int event = -1;
while (true) {
event = _xmlr.next();
if (_xmlr.isDocumentEnd(event)) {
break;
}
if (_xmlr.isCharacters(event)) {
String text = _xmlr.getcharacters();
if (text.matches("\\s+")) {
continue;
}
return new TextElement(text);
}
}
return null;
}
}
| Java |
package pt.inesc.id.l2f.annotation.input;
import java.util.ArrayList;
import java.util.List;
public class TextDocument extends InputDocument {
// ...
private List<TextElement> _nodes;
// ...
private int _current = 0;
public TextDocument() {
_nodes = new ArrayList<TextElement>();
}
/**
*
*
* @param text
*/
public TextDocument(List<String> text) {
this();
for (String t : text) {
_nodes.add(new TextElement(t));
}
}
public TextDocument(String text) {
this();
_nodes.add(new TextElement(text));
}
@Override
public TextElement next() {
if (_current == _nodes.size()) {
_current = 0;
return null;
}
return _nodes.get(_current++);
}
}
| Java |
package pt.inesc.id.l2f.annotation.input;
public class TextElement {
// ...
private String _text;
public TextElement(String text) {
_text = text;
}
/**
* @return the text
*/
public String getText() {
return _text;
}
/**
* @param text the text to set
*/
public void setText(String text) {
_text = text;
}
}
| Java |
package pt.inesc.id.l2f.annotation.input;
import java.io.BufferedReader;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
public class RawDocument extends InputDocument {
// ...
private BufferedReader _br;
public RawDocument(String filename) throws FileNotFoundException {
_br = new BufferedReader(new FileReader(filename));
}
@Override
public TextElement next() {
try {
String line = _br.readLine();
if (line != null) {
return new TextElement(line);
}
} catch (IOException e) {
e.printStackTrace();
}
return null;
}
}
| Java |
package pt.inesc.id.l2f.annotation.input;
public abstract class InputDocument {
/**
*
*
* @return (null quando não houver mais unidades...)
*/
public abstract TextElement next();
}
| Java |
package pt.inesc.id.l2f.annotation.execution;
import java.util.ArrayList;
import java.util.List;
import pt.inesc.id.l2f.annotation.document.laf.LinguisticAnnotationDocument;
import pt.inesc.id.l2f.annotation.input.InputDocument;
import pt.inesc.id.l2f.annotation.input.TextDocument;
import pt.inesc.id.l2f.annotation.stage.Stage;
import pt.inesc.id.l2f.annotation.tool.Tool;
import pt.inesc.id.l2f.annotation.unit.InputDocumentProcessUnit;
import pt.inesc.id.l2f.annotation.unit.LinguisticAnnotationProcessUnit;
public class LocalExecutionMode extends ExecutionMode {
public LocalExecutionMode(Stage ... stages) {
super(stages);
}
public LocalExecutionMode(List<Stage> stages) {
super(stages);
}
public LocalExecutionMode(Tool ... tools) {
super(tools);
}
@Override
public void start() {
super.start();
}
@Override
public void close() {
super.close();
}
@Override
public void annotateText(Text input, Path output) {
Stage first = _stages.getFirst();
// add ....
first.process(new InputDocumentProcessUnit(new TextDocument(input.getText())));
}
@Override
public void annotateText(List<Text> input, Path output) {
Stage first = _stages.getFirst();
// add ....
List<String> list = new ArrayList<String>();
for (Text text : input) {
list.add(text.getText());
}
first.process(new InputDocumentProcessUnit(new TextDocument(list)));
}
@Override
public void annotateFile(File input, Path output) {
}
@Override
public void annotateText(Directory input, Path output) {
}
@Override
public void annotate(List<LinguisticAnnotationDocument> annotations, Path output) {
// Stage first = _stages.getFirst();
//
// // create document that will contain all....
// LinguisticAnnotationDocument document = new LinguisticAnnotationDocument();
//
// for (LinguisticAnnotationDocument doc : annotations) {
// document.merge(doc);
// }
//
// first.process(new LinguisticAnnotationProcessUnit(document, new LinguisticAnnotationDocument()));
}
@Override
public void annotateInputDocuments(List<InputDocument> documents, Path output) {
Stage first = _stages.getFirst();
for (InputDocument doc : documents) {
first.process(new InputDocumentProcessUnit(doc));
}
}
@Override
public void annotate(LinguisticAnnotationProcessUnit annotation, Path output) {
Stage first = _stages.getFirst();
first.process(annotation);
}
@Override
public void annotateFile(List<File> input, Path output) {
}
}
| Java |
package pt.inesc.id.l2f.annotation.execution.hadoop;
import java.io.IOException;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.DataOutputBuffer;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.mapred.FileSplit;
import org.apache.hadoop.mapred.InputSplit;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.RecordReader;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.TextInputFormat;
/**
* Reads records that are delimited by a specifc begin/end tag.
*/
public class XmlInputFormat extends TextInputFormat {
public static final String START_TAG_KEY = "xmlinput.start";
public static final String END_TAG_KEY = "xmlinput.end";
public void configure(JobConf jobConf) {
super.configure(jobConf);
}
@SuppressWarnings("unchecked")
public RecordReader getRecordReader(InputSplit inputSplit, JobConf jobConf, Reporter reporter) throws IOException {
return new XmlRecordReader((FileSplit) inputSplit, jobConf);
}
@SuppressWarnings("unchecked")
public static class XmlRecordReader implements RecordReader {
private byte[] startTag;
private byte[] endTag;
private long start;
private long end;
private FSDataInputStream fsin;
private DataOutputBuffer buffer = new DataOutputBuffer();
public XmlRecordReader(FileSplit split, JobConf jobConf) throws IOException {
startTag = jobConf.get("xmlinput.start").getBytes("utf-8");
endTag = jobConf.get("xmlinput.end").getBytes("utf-8");
// open the file and seek to the start of the split
start = split.getStart();
end = start + split.getLength();
Path file = split.getPath();
FileSystem fs = file.getFileSystem(jobConf);
fsin = fs.open(split.getPath());
fsin.seek(start);
}
public boolean next(Object key, Object value) throws IOException {
if (fsin.getPos() < end) {
if (readUntilMatch(startTag, false)) {
try {
buffer.write(startTag);
if (readUntilMatch(endTag, true)) {
((Text) key).set(Long.toString(fsin.getPos()));
((Text) value).set(buffer.getData(), 0, buffer.getLength());
return true;
}
} finally {
buffer.reset();
}
}
}
return false;
}
public WritableComparable createKey() {
return new Text();
}
public Writable createValue() {
return new Text();
}
public long getPos() throws IOException {
return fsin.getPos();
}
public void close() throws IOException {
fsin.close();
}
public float getProgress() throws IOException {
return ((float) (fsin.getPos() - start)) / ((float) (end - start));
}
private boolean readUntilMatch(byte[] match, boolean withinBlock) throws IOException {
int i = 0;
while (true) {
int b = fsin.read();
// end of file
if (b == -1) return false;
// save to buffer
if (withinBlock) buffer.write(b);
// check if we're matching
if (b == match[i]) {
i++;
if (i >= match.length) return true;
} else i = 0;
// see if we've passed the stop point
if(!withinBlock && i == 0 && fsin.getPos() >= end) return false;
}
}
}
}
| Java |
package pt.inesc.id.l2f.annotation.execution.hadoop;
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.util.ToolRunner;
import pt.inesc.id.l2f.annotation.document.laf.LinguisticAnnotationDocument;
import pt.inesc.id.l2f.annotation.execution.Directory;
import pt.inesc.id.l2f.annotation.execution.File;
import pt.inesc.id.l2f.annotation.execution.LafFile;
import pt.inesc.id.l2f.annotation.execution.ParallelExecutionMode;
import pt.inesc.id.l2f.annotation.execution.Path;
import pt.inesc.id.l2f.annotation.execution.RawFile;
import pt.inesc.id.l2f.annotation.execution.TeiFile;
import pt.inesc.id.l2f.annotation.execution.Text;
import pt.inesc.id.l2f.annotation.input.InputDocument;
import pt.inesc.id.l2f.annotation.stage.Stage;
import pt.inesc.id.l2f.annotation.tool.Tool;
import pt.inesc.id.l2f.annotation.unit.LinguisticAnnotationProcessUnit;
public class HadoopExecutionMode extends ParallelExecutionMode {
// ...
private Configuration _conf;
public HadoopExecutionMode(Stage ... stages) {
super(stages);
_conf = new Configuration();
}
public HadoopExecutionMode(Tool ... tools) {
super(tools);
_conf = new Configuration();
}
@Override
public void start() {
// _annotation.start();
}
@Override
public void close() {
// _annotation.close();
}
@Override
public void annotateText(Text input, Path output) {
}
public void setMappers(int mappers) {
_conf.setInt("split.framework.mappers", mappers);
}
public void setReducers(int reducers) {
_conf.setInt("split.framework.reducers", reducers);
}
public void compressMapOutput() {
_conf.setBoolean("split.framework.compress.map", true);
}
public void compressFinalOutput() {
_conf.setBoolean("split.framework.compress.output", true);
}
public void addInputPath(String path) {
String previous = _conf.get("split.framework.input", "");
// add path
_conf.set("split.framework.input", path + "\n" + previous);
}
public void addOutputPath(String path) {
_conf.set("split.framework.output", path);
}
@Override
public void annotateText(List<Text> input, Path output) {}
@Override
public void annotateFile(File input, Path output) {
List<File> file = new ArrayList<File>();
file.add(input);
this.annotateFile(file, output);
}
@Override
public void annotateFile(List<File> input, Path output) {
try {
File last = null;
for (File file : input) {
this.addInputPath(file.getPath());
last = file;
}
this.addOutputPath(output.getPath());
if (last instanceof RawFile) {
_conf.set("split.framework.input.type", "raw");
} else if (last instanceof TeiFile) {
_conf.set("split.framework.input.type", "tei");
} else if (last instanceof LafFile) {
_conf.set("split.framework.input.type", "laf");
}
String tools = new String();
for (Stage stage : _stages) {
tools += stage.getTool().getClass().getCanonicalName() + " ";
}
// remove last character (blank space)
tools = tools.substring(0, tools.length() - " ".length());
_conf.set("split.framework.tools", tools);
ToolRunner.run(_conf, new ParallelAnnotation(), null);
} catch (Exception e) {
e.printStackTrace();
}
}
@Override
public void annotateText(Directory input, Path output) {
try {
this.addInputPath(input.getPath());
this.addOutputPath(output.getPath());
String tools = new String();
for (Stage stage : _stages) {
tools += stage.getTool().getClass().getCanonicalName() + " ";
}
// remove last character (blank space)
tools = tools.substring(0, tools.length() - " ".length());
_conf.set("split.framework.tools", tools);
ToolRunner.run(_conf, new ParallelAnnotation(), null);
} catch (Exception e) {
e.printStackTrace();
}
}
@Override
public void annotate(List<LinguisticAnnotationDocument> annotations, Path output) {
}
@Override
public void annotateInputDocuments(List<InputDocument> documents, Path output) {
// Stage first = _stages.getFirst();
//
// for (InputDocument doc : documents) {
// first.process(new InputDocumentProcessUnit(doc));
// }
}
@Override
public void annotate(LinguisticAnnotationProcessUnit annotation, Path output) {
}
}
| Java |
package pt.inesc.id.l2f.annotation.execution.hadoop;
import java.io.IOException;
import java.util.Iterator;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.io.SequenceFile.CompressionType;
import org.apache.hadoop.io.compress.GzipCodec;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.FileOutputFormat;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.Mapper;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reducer;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.MapReduceBase;
import org.apache.hadoop.mapred.SequenceFileOutputFormat;
import org.apache.hadoop.mapred.TextInputFormat;
import pt.inesc.id.l2f.annotation.execution.hadoop.laf.LafMapClass;
import pt.inesc.id.l2f.annotation.execution.hadoop.laf.LafReduceClass;
import pt.inesc.id.l2f.annotation.execution.hadoop.raw.RawMapClass;
import pt.inesc.id.l2f.annotation.execution.hadoop.raw.RawReduceClass;
import pt.inesc.id.l2f.annotation.execution.hadoop.tei.TeiMapClass;
import pt.inesc.id.l2f.annotation.execution.hadoop.tei.TeiReduceClass;
import pt.inesc.id.l2f.annotation.unit.LinguisticAnnotationProcessUnit;
/**
*
*
* @author Tiago Luis
*/
public class ParallelAnnotation extends Configured implements org.apache.hadoop.util.Tool {
public abstract static class Map extends MapReduceBase implements Mapper<WritableComparable, Writable, WritableComparable, Writable> {
// ...
private boolean _hasCollector = false;
public void configure(JobConf job) {
this.start(job);
}
public void map(WritableComparable key, Writable value, OutputCollector<WritableComparable, Writable> output, Reporter reporter) throws IOException {
// store output collector and reporter
if (!_hasCollector) {
this.setOutputCollector(output);
this.setReporter(reporter);
_hasCollector = true;
}
this.process(key, value, output, reporter);
}
public void close() throws IOException {
this.stop();
}
/**
*
*
* @param job
*/
public abstract void start(JobConf job);
/**
*
*
* @param key
* @param value
* @param output
* @param reporter
* @throws IOException
*/
public abstract void process(WritableComparable key, Writable value, OutputCollector<WritableComparable, Writable> output, Reporter reporter) throws IOException;
/**
*
*
*/
public abstract void stop();
/**
*
*
* @param output
*/
public abstract void setOutputCollector(OutputCollector<WritableComparable, Writable> output);
/**
*
*
* @param reporter
*/
public abstract void setReporter(Reporter reporter);
}
// -----------------------------------------------------------------
public abstract static class Reduce extends MapReduceBase implements Reducer<WritableComparable, Writable, WritableComparable, Writable> {
// ...
private boolean _hasCollector = false;
public void configure(JobConf job) {
this.start(job);
}
public void reduce(WritableComparable key, Iterator<Writable> values, OutputCollector<WritableComparable, Writable> output, Reporter reporter) throws IOException {
// store output collector and reporter
if (!_hasCollector) {
this.setOutputCollector(output);
this.setReporter(reporter);
_hasCollector = true;
}
this.process(key, values, output, reporter);
}
public void close() throws IOException {
this.stop();
}
/**
*
*
* @param job
*/
public abstract void start(JobConf job);
/**
*
*
* @param key
* @param value
* @param output
* @param reporter
* @throws IOException
*/
public abstract void process(WritableComparable key, Iterator<Writable> values, OutputCollector<WritableComparable, Writable> output, Reporter reporter) throws IOException;
/**
*
*
*/
public abstract void stop();
/**
*
*
* @param output
*/
public abstract void setOutputCollector(OutputCollector<WritableComparable, Writable> output);
/**
*
*
* @param reporter
*/
public abstract void setReporter(Reporter reporter);
}
// -----------------------------------------------------------------
/**
* The main driver for ...... map/reduce program. Invoke this method to
* submit the map/reduce job.
*
* @throws IOException
* in case of communication problems with the job tracker.
*/
public int run(String[] args) throws Exception {
Configuration conf = super.getConf();
JobConf job = new JobConf(conf, ParallelAnnotation.class);
job.setJobName("SPLiT");
job.setOutputFormat(ParallelAnnotationOutputFormat.class);
job.setOutputValueClass(LinguisticAnnotationProcessUnit.class);
String type = conf.get("split.framework.input.type", "raw");
if (type.equals("raw")) {
job.setInputFormat(TextInputFormat.class);
job.setMapperClass(RawMapClass.class);
// job.setCombinerClass(RawReduceClass.class);
job.setReducerClass(RawReduceClass.class);
job.setOutputKeyClass(Text.class);
} else if (type.equals("tei")) {
job.set("xmlinput.start", "<p ");
job.set("xmlinput.end", "</p>");
job.setInputFormat(XmlInputFormat.class);
job.setMapperClass(TeiMapClass.class);
// job.setCombinerClass(TeiReduceClass.class);
job.setReducerClass(TeiReduceClass.class);
job.setOutputKeyClass(Text.class);
} else if (type.equals("laf")) {
job.set("xmlinput.start", "<unit ");
job.set("xmlinput.end", "</unit>");
job.setInputFormat(XmlInputFormat.class);
job.setMapperClass(LafMapClass.class);
// job.setCombinerClass(LafReduceClass.class);
job.setReducerClass(LafReduceClass.class);
job.setOutputKeyClass(Text.class);
}
job.setNumMapTasks(conf.getInt("split.framework.mappers", 2));
job.setNumReduceTasks(conf.getInt("split.framework.reducers", 1));
// add input paths
String[] inputs = conf.get("split.framework.input", "input").split("\n");
for (String input : inputs) {
if (input.equals("")) {
continue;
}
FileInputFormat.addInputPath(job, new Path(input));
}
FileOutputFormat.setOutputPath(job, new Path(conf.get("split.framework.output", "output")));
if (conf.getBoolean("split.framework.compress.map", false)) {
// FIXME: isto não está a funcionar
job.setCompressMapOutput(true);
job.setMapOutputCompressorClass(GzipCodec.class);
}
if (conf.getBoolean("split.framework.compress.output", false)) {
FileOutputFormat.setCompressOutput(job, true);
FileOutputFormat.setOutputCompressorClass(job, GzipCodec.class);
}
// uncomment to run locally in a single process
// conf.set("mapred.job.tracker", "local");
JobClient.runJob(job);
return 0;
}
}
| Java |
package pt.inesc.id.l2f.annotation.execution.hadoop.tei;
import java.io.IOException;
import java.util.Iterator;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reporter;
import pt.inesc.id.l2f.annotation.execution.hadoop.ParallelAnnotation.Reduce;
public class TeiReduceClass extends Reduce {
public TeiReduceClass() {}
@Override
public void start(JobConf job) {}
@Override
public void process(WritableComparable key, Iterator<Writable> values, OutputCollector<WritableComparable, Writable> output, Reporter reporter) throws IOException {
while (values.hasNext()) {
output.collect(key, values.next());
}
}
@Override
public void setOutputCollector(OutputCollector<WritableComparable, Writable> output) {}
@Override
public void setReporter(Reporter reporter) {}
@Override
public void stop() {}
}
| Java |
package pt.inesc.id.l2f.annotation.execution.hadoop.tei;
import java.io.IOException;
import java.io.StringReader;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reporter;
import pt.inesc.id.l2f.annotation.document.xml.XMLReader;
import pt.inesc.id.l2f.annotation.execution.LocalExecutionMode;
import pt.inesc.id.l2f.annotation.execution.hadoop.HadoopFinalStage;
import pt.inesc.id.l2f.annotation.execution.hadoop.ParallelAnnotation.Map;
import pt.inesc.id.l2f.annotation.tool.Tool;
public class TeiMapClass extends Map {
// these are just for testing counters
private enum Counter {
SKIPPED
}
public TeiMapClass() {}
// ...
private LocalExecutionMode _local;
// ...
private long _maximum;
@Override
public void start(JobConf job) {
// get maximum
_maximum = job.getInt("split.framework.maximum", -1);
try {
// get tools
String[] toolsProperty = job.get("split.framework.tools").split(" ");
Tool[] tools = new Tool[toolsProperty.length];
for (int i = 0; i < toolsProperty.length; i++) {
tools[i] = (Tool) job.getClassLoader().loadClass(toolsProperty[i]).newInstance();
}
_local = new LocalExecutionMode(tools);
_local.addFinalStage(new HadoopFinalStage());
_local.start();
} catch (InstantiationException e) {
e.printStackTrace();
} catch (IllegalAccessException e) {
e.printStackTrace();
} catch (ClassNotFoundException e) {
e.printStackTrace();
}
}
@Override
public void process(WritableComparable key, Writable value, OutputCollector<WritableComparable, Writable> output, Reporter reporter) throws IOException {
String input = ((Text) value).toString();
XMLReader xmlr = new XMLReader(new StringReader(input));
String characters = this.getCharacters(xmlr);
if (_maximum != -1) {
if (input.length() < _maximum) {
_local.annotateText(new pt.inesc.id.l2f.annotation.execution.Text(characters), null);
} else {
reporter.incrCounter(Counter.SKIPPED, 1);
}
} else {
_local.annotateText(new pt.inesc.id.l2f.annotation.execution.Text(characters), null);
}
}
@Override
public void stop() {
_local.close();
}
@Override
public void setOutputCollector(OutputCollector<WritableComparable, Writable> output) {
((HadoopFinalStage) _local.getFinalStage()).setOutputCollector(output);
}
@Override
public void setReporter(Reporter reporter) {
((HadoopFinalStage) _local.getFinalStage()).setReporter(reporter);
}
private String getCharacters(XMLReader xmlr) {
int event = -1;
while (true) {
event = xmlr.next();
if (xmlr.isElementEnd(event, "p")) {
break;
}
if (xmlr.isCharacters(event)) {
return xmlr.getcharacters();
}
}
return null;
}
}
| Java |
package pt.inesc.id.l2f.annotation.execution.hadoop;
import java.io.DataOutputStream;
import java.io.IOException;
import java.io.OutputStreamWriter;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.io.compress.CompressionCodec;
import org.apache.hadoop.io.compress.GzipCodec;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.OutputFormatBase;
import org.apache.hadoop.mapred.RecordWriter;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.util.Progressable;
import org.apache.hadoop.util.ReflectionUtils;
import pt.inesc.id.l2f.annotation.document.xml.XMLWriter;
import pt.inesc.id.l2f.annotation.unit.ProcessUnit;
public class ParallelAnnotationOutputFormat extends OutputFormatBase<WritableComparable, ProcessUnit> {
public RecordWriter<WritableComparable, ProcessUnit> getRecordWriter(FileSystem ignored, JobConf job, String name, Progressable progress) throws IOException {
Path dir = job.getOutputPath();
FileSystem fs = dir.getFileSystem(job);
boolean isCompressed = getCompressOutput(job);
if (!isCompressed) {
FSDataOutputStream out = fs.create(new Path(dir, name), progress);
final OutputStreamWriter osr = new OutputStreamWriter(out);
final XMLWriter xmlw = new XMLWriter(osr);
// write document start
xmlw.writeStartDocument("UTF-8", "1.0");
// write root element start
xmlw.writeStartElement("units");
return new RecordWriter<WritableComparable, ProcessUnit>() {
public void write(WritableComparable key, ProcessUnit value) throws IOException {
if (key != null) {
if (value != null) {
value.writeTo(xmlw);
}
}
}
public void close(Reporter reporter) throws IOException {
// write root element end
xmlw.writeEndDocument();
// write document end
xmlw.writeEndDocument();
// close output stream
osr.close();
}
};
} else {
Class<? extends CompressionCodec> codecClass = getOutputCompressorClass(job, GzipCodec.class);
// create the named codec
CompressionCodec codec = (CompressionCodec)
ReflectionUtils.newInstance(codecClass, job);
// build the filename including the extension
Path filename = new Path(dir, name + codec.getDefaultExtension());
FSDataOutputStream fileOut = fs.create(filename, progress);
DataOutputStream out = new DataOutputStream(codec.createOutputStream(fileOut));
final OutputStreamWriter osr = new OutputStreamWriter(out);
final XMLWriter xmlw = new XMLWriter(osr);
// write document start
xmlw.writeStartDocument("UTF-8", "1.0");
// write root element start
xmlw.writeStartElement("units");
return new RecordWriter<WritableComparable, ProcessUnit>() {
public void write(WritableComparable key, ProcessUnit value) throws IOException {
if (key != null) {
if (value != null) {
value.writeTo(xmlw);
}
}
}
public void close(Reporter reporter) throws IOException {
// write root element end
xmlw.writeEndDocument();
// write document end
xmlw.writeEndDocument();
// close output stream
osr.close();
}
};
}
}
}
| Java |
package pt.inesc.id.l2f.annotation.execution.hadoop.raw;
import java.io.IOException;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reporter;
import pt.inesc.id.l2f.annotation.execution.LocalExecutionMode;
import pt.inesc.id.l2f.annotation.execution.hadoop.HadoopFinalStage;
import pt.inesc.id.l2f.annotation.execution.hadoop.ParallelAnnotation.Map;
import pt.inesc.id.l2f.annotation.tool.Tool;
public class RawMapClass extends Map {
// these are just for testing counters
private enum Counter {
SKIPPED
}
// ...
private LocalExecutionMode _local;
// ...
private long _maximum;
@Override
public void start(JobConf job) {
// get maximum
_maximum = job.getInt("split.framework.maximum", -1);
try {
// get tools
String[] toolsProperty = job.get("split.framework.tools").split(" ");
Tool[] tools = new Tool[toolsProperty.length];
for (int i = 0; i < toolsProperty.length; i++) {
tools[i] = (Tool) job.getClassLoader().loadClass(toolsProperty[i]).newInstance();
}
_local = new LocalExecutionMode(tools);
_local.addFinalStage(new HadoopFinalStage());
_local.start();
} catch (InstantiationException e) {
e.printStackTrace();
} catch (IllegalAccessException e) {
e.printStackTrace();
} catch (ClassNotFoundException e) {
e.printStackTrace();
}
}
@Override
public void process(WritableComparable key, Writable value, OutputCollector<WritableComparable, Writable> output, Reporter reporter) throws IOException {
String input = ((Text) value).toString();
if (_maximum != -1) {
if (input.length() < _maximum) {
_local.annotateText(new pt.inesc.id.l2f.annotation.execution.Text(input), null);
} else {
reporter.incrCounter(Counter.SKIPPED, 1);
}
} else {
_local.annotateText(new pt.inesc.id.l2f.annotation.execution.Text(input), null);
}
}
@Override
public void stop() {
_local.close();
}
@Override
public void setOutputCollector(OutputCollector<WritableComparable, Writable> output) {
((HadoopFinalStage) _local.getFinalStage()).setOutputCollector(output);
}
@Override
public void setReporter(Reporter reporter) {
((HadoopFinalStage) _local.getFinalStage()).setReporter(reporter);
}
}
| Java |
package pt.inesc.id.l2f.annotation.execution.hadoop.raw;
import java.io.IOException;
import java.util.Iterator;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reporter;
import pt.inesc.id.l2f.annotation.execution.hadoop.ParallelAnnotation.Reduce;
public class RawReduceClass extends Reduce {
@Override
public void start(JobConf job) {}
@Override
public void process(WritableComparable key, Iterator<Writable> values, OutputCollector<WritableComparable, Writable> output, Reporter reporter) throws IOException {
while (values.hasNext()) {
output.collect(key, values.next());
}
}
@Override
public void setOutputCollector(OutputCollector<WritableComparable, Writable> output) {}
@Override
public void setReporter(Reporter reporter) {}
@Override
public void stop() {}
}
| Java |
package pt.inesc.id.l2f.annotation.execution.hadoop;
/**
* A description of an example program based on its class and a human-readable description.
*
* @author Tiago Luis
*/
public class MorphoSyntacticAnnotationDriver {
// public static void main(String argv[]){
// ProgramDriver pgd = new ProgramDriver();
//
// try {
// pgd.addClass("maf", ParallelAnnotation.class, "A map/reduce program that makes morpho-syntatic annotations to input format.");
// pgd.driver(argv);
// } catch(Throwable e) {
// e.printStackTrace();
// }
// }
}
| Java |
package pt.inesc.id.l2f.annotation.execution.hadoop.laf;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reporter;
import pt.inesc.id.l2f.annotation.document.laf.LinguisticAnnotationDocument;
import pt.inesc.id.l2f.annotation.execution.LocalExecutionMode;
import pt.inesc.id.l2f.annotation.execution.hadoop.HadoopFinalStage;
import pt.inesc.id.l2f.annotation.execution.hadoop.ParallelAnnotation.Reduce;
import pt.inesc.id.l2f.annotation.tool.Tool;
import pt.inesc.id.l2f.annotation.unit.Dependencies;
import pt.inesc.id.l2f.annotation.unit.LinguisticAnnotationProcessUnit;
public class LafReduceClass extends Reduce {
// ...
private LocalExecutionMode _local;
@Override
public void start(JobConf job) {
try {
// get tools
String[] toolsProperty = job.get("split.framework.tools").split(" ");
Tool[] tools = new Tool[toolsProperty.length];
for (int i = 0; i < toolsProperty.length; i++) {
tools[i] = (Tool) job.getClassLoader().loadClass(toolsProperty[i]).newInstance();
}
_local = new LocalExecutionMode(tools);
_local.addFinalStage(new HadoopFinalStage());
_local.start();
} catch (InstantiationException e) {
e.printStackTrace();
} catch (IllegalAccessException e) {
e.printStackTrace();
} catch (ClassNotFoundException e) {
e.printStackTrace();
}
}
@Override
public void process(WritableComparable key, Iterator<Writable> values, OutputCollector<WritableComparable, Writable> output, Reporter reporter) throws IOException {
List<LinguisticAnnotationProcessUnit> units = new ArrayList<LinguisticAnnotationProcessUnit>();
// sort linguistic annotations
while (values.hasNext()) {
units.add((LinguisticAnnotationProcessUnit) values.next());
}
Collections.sort(units);
// ....
LinguisticAnnotationDocument annotation = new LinguisticAnnotationDocument();
// get last linguistic annotation
LinguisticAnnotationProcessUnit last = null;
// store ....
Dependencies dep = new Dependencies();
for (LinguisticAnnotationProcessUnit unit : units) {
annotation.merge(unit.getDocument());
dep.addDependency(unit.getId());
last = unit;
}
_local.annotate(new LinguisticAnnotationProcessUnit(annotation, new LinguisticAnnotationDocument(), dep, last.getAnnotationId(), last.getStageNumber() + 1), null);
}
@Override
public void setOutputCollector(OutputCollector<WritableComparable, Writable> output) {
((HadoopFinalStage) _local.getFinalStage()).setOutputCollector(output);
}
@Override
public void setReporter(Reporter reporter) {
((HadoopFinalStage) _local.getFinalStage()).setReporter(reporter);
}
@Override
public void stop() {
_local.close();
}
}
| Java |
package pt.inesc.id.l2f.annotation.execution.hadoop.laf;
import java.io.IOException;
import java.io.StringReader;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reporter;
import pt.inesc.id.l2f.annotation.document.xml.XMLReader;
import pt.inesc.id.l2f.annotation.execution.hadoop.ParallelAnnotation.Map;
import pt.inesc.id.l2f.annotation.unit.LinguisticAnnotationProcessUnit;
public class LafMapClass extends Map {
@Override
public void start(JobConf job) {}
@Override
public void process(WritableComparable key, Writable value, OutputCollector<WritableComparable, Writable> output, Reporter reporter) throws IOException {
String input = ((Text) value).toString();
XMLReader xmlr = new XMLReader(new StringReader(input));
LinguisticAnnotationProcessUnit unit = this.readLinguisticAnnotationUnit(xmlr);
System.out.println("MAP (id): " + unit.getId());
System.out.println("MAP (stage): " + unit.getStageNumber());
System.out.println("MAP (annotation id): " + unit.getAnnotationId());
output.collect(new Text(unit.getAnnotationId()), unit);
}
@Override
public void stop() {}
@Override
public void setOutputCollector(OutputCollector<WritableComparable, Writable> output) {}
@Override
public void setReporter(Reporter reporter) {}
// TODO: remover
private LinguisticAnnotationProcessUnit readLinguisticAnnotationUnit(XMLReader xmlr) {
int event = -1;
while (true) {
event = xmlr.next();
if (xmlr.isDocumentEnd(event)) {
break;
}
if (xmlr.isElementStart(event)) {
String name = xmlr.getElementName();
if (name.equals("unit")) {
java.util.Map<String, String> feature = xmlr.getAttributes();
LinguisticAnnotationProcessUnit unit = new LinguisticAnnotationProcessUnit(feature.get("id"), feature.get("annotation"), Integer.valueOf(feature.get("stage")));
unit.readFrom(xmlr);
return unit;
}
}
}
return null;
}
}
| Java |
package pt.inesc.id.l2f.annotation.execution.hadoop;
import java.io.IOException;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reporter;
import pt.inesc.id.l2f.annotation.stage.Stage;
import pt.inesc.id.l2f.annotation.tool.IdentityTool;
import pt.inesc.id.l2f.annotation.unit.FinalProcessUnit;
import pt.inesc.id.l2f.annotation.unit.ProcessUnit;
public class HadoopFinalStage extends Stage {
private Reporter _reporter;
private OutputCollector<WritableComparable, Writable> _output;
public HadoopFinalStage() {
super(new IdentityTool());
}
public void run() {
long i = 0;
try {
while (true) {
ProcessUnit unit = _input.take();
if (unit instanceof FinalProcessUnit) {
break;
}
// LinguisticAnnotationProcessUnit lunit = (LinguisticAnnotationProcessUnit) unit;
// String output = "";
//
// int size = lunit.getOutputDocumentMorphoSyntacticAnnotations().size();
// MorphoSyntacticAnnotation lastAnnotation = lunit.getOutputDocumentMorphoSyntacticAnnotations().get(size - 1);
//
// for (Classification classification : lastAnnotation.getClassifications()) {
// output += lunit.getDocument().getSegment(classification.getSegments().get(0)).getWord() + " (" + classification.getFeatureStructures().getLast().getFeature("CAT").getValue() + ") ";
// }
//
// _output.collect(new LongWritable(i++), new Text(output));
_output.collect(new Text(Long.toString(i++)), unit);
// StringWriter sw = new StringWriter();
// document.writeTo(sw);
// _output.collect(new LongWritable(i++), new Text(sw.toString()));
}
} catch (InterruptedException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
/**
*
*
* @return the reporter
*/
public Reporter getReporter() {
return _reporter;
}
/**
*
*
* @param reporter the reporter to set
*/
public void setReporter(Reporter reporter) {
_reporter = reporter;
}
/**
*
*
* @return
*/
public boolean hasReporter() {
return _reporter != null;
}
/**
*
*
* @return the output collector
*/
public OutputCollector<WritableComparable, Writable> getOutputCollector() {
return _output;
}
/**
* @param output the output collector to set
*/
public void setOutputCollector(OutputCollector<WritableComparable, Writable> output) {
_output = output;
}
}
| Java |
package pt.inesc.id.l2f.annotation.execution;
/**
*
* @author Tiago Luis
*
*/
public class RawFile extends File {
public RawFile(String file) {
super(file);
}
}
| Java |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.