添加关照、全局等高线、修改图层问题

This commit is contained in:
2025-07-17 18:54:05 +08:00
parent c781d38c0c
commit b274b62671
4594 changed files with 791769 additions and 4921 deletions

View File

@ -0,0 +1,344 @@
// Original src: https://github.com/zz85/threejs-path-flow
const CHANNELS = 4;
const TEXTURE_WIDTH = 1024;
const TEXTURE_HEIGHT = 4;
import {
DataTexture,
RGBAFormat,
FloatType,
RepeatWrapping,
Mesh,
InstancedMesh,
NearestFilter,
DynamicDrawUsage,
Matrix4
} from 'three';
/**
* Make a new DataTexture to store the descriptions of the curves.
*
* @param { number } numberOfCurves the number of curves needed to be described by this texture.
*/
export function initSplineTexture( numberOfCurves = 1 ) {
const dataArray = new Float32Array( TEXTURE_WIDTH * TEXTURE_HEIGHT * numberOfCurves * CHANNELS );
const dataTexture = new DataTexture(
dataArray,
TEXTURE_WIDTH,
TEXTURE_HEIGHT * numberOfCurves,
RGBAFormat,
FloatType
);
dataTexture.wrapS = RepeatWrapping;
dataTexture.wrapY = RepeatWrapping;
dataTexture.magFilter = NearestFilter;
dataTexture.needsUpdate = true;
return dataTexture;
}
/**
* Write the curve description to the data texture
*
* @param { DataTexture } texture The DataTexture to write to
* @param { Curve } splineCurve The curve to describe
* @param { number } offset Which curve slot to write to
*/
export function updateSplineTexture( texture, splineCurve, offset = 0 ) {
const numberOfPoints = Math.floor( TEXTURE_WIDTH * ( TEXTURE_HEIGHT / 4 ) );
splineCurve.arcLengthDivisions = numberOfPoints / 2;
splineCurve.updateArcLengths();
const points = splineCurve.getSpacedPoints( numberOfPoints );
const frenetFrames = splineCurve.computeFrenetFrames( numberOfPoints, true );
for ( let i = 0; i < numberOfPoints; i ++ ) {
const rowOffset = Math.floor( i / TEXTURE_WIDTH );
const rowIndex = i % TEXTURE_WIDTH;
let pt = points[ i ];
setTextureValue( texture, rowIndex, pt.x, pt.y, pt.z, 0 + rowOffset + ( TEXTURE_HEIGHT * offset ) );
pt = frenetFrames.tangents[ i ];
setTextureValue( texture, rowIndex, pt.x, pt.y, pt.z, 1 + rowOffset + ( TEXTURE_HEIGHT * offset ) );
pt = frenetFrames.normals[ i ];
setTextureValue( texture, rowIndex, pt.x, pt.y, pt.z, 2 + rowOffset + ( TEXTURE_HEIGHT * offset ) );
pt = frenetFrames.binormals[ i ];
setTextureValue( texture, rowIndex, pt.x, pt.y, pt.z, 3 + rowOffset + ( TEXTURE_HEIGHT * offset ) );
}
texture.needsUpdate = true;
}
function setTextureValue( texture, index, x, y, z, o ) {
const image = texture.image;
const { data } = image;
const i = CHANNELS * TEXTURE_WIDTH * o; // Row Offset
data[ index * CHANNELS + i + 0 ] = x;
data[ index * CHANNELS + i + 1 ] = y;
data[ index * CHANNELS + i + 2 ] = z;
data[ index * CHANNELS + i + 3 ] = 1;
}
/**
* Create a new set of uniforms for describing the curve modifier
*
* @param { DataTexture } Texture which holds the curve description
*/
export function getUniforms( splineTexture ) {
const uniforms = {
spineTexture: { value: splineTexture },
pathOffset: { type: 'f', value: 0 }, // time of path curve
pathSegment: { type: 'f', value: 1 }, // fractional length of path
spineOffset: { type: 'f', value: 161 },
spineLength: { type: 'f', value: 400 },
flow: { type: 'i', value: 1 },
};
return uniforms;
}
export function modifyShader( material, uniforms, numberOfCurves = 1 ) {
if ( material.__ok ) return;
material.__ok = true;
material.onBeforeCompile = ( shader ) => {
if ( shader.__modified ) return;
shader.__modified = true;
Object.assign( shader.uniforms, uniforms );
const vertexShader = `
uniform sampler2D spineTexture;
uniform float pathOffset;
uniform float pathSegment;
uniform float spineOffset;
uniform float spineLength;
uniform int flow;
float textureLayers = ${TEXTURE_HEIGHT * numberOfCurves}.;
float textureStacks = ${TEXTURE_HEIGHT / 4}.;
${shader.vertexShader}
`
// chunk import moved in front of modified shader below
.replace( '#include <beginnormal_vertex>', '' )
// vec3 transformedNormal declaration overriden below
.replace( '#include <defaultnormal_vertex>', '' )
// vec3 transformed declaration overriden below
.replace( '#include <begin_vertex>', '' )
// shader override
.replace(
/void\s*main\s*\(\)\s*\{/,
`
void main() {
#include <beginnormal_vertex>
vec4 worldPos = modelMatrix * vec4(position, 1.);
bool bend = flow > 0;
float xWeight = bend ? 0. : 1.;
#ifdef USE_INSTANCING
float pathOffsetFromInstanceMatrix = instanceMatrix[3][2];
float spineLengthFromInstanceMatrix = instanceMatrix[3][0];
float spinePortion = bend ? (worldPos.x + spineOffset) / spineLengthFromInstanceMatrix : 0.;
float mt = (spinePortion * pathSegment + pathOffset + pathOffsetFromInstanceMatrix)*textureStacks;
#else
float spinePortion = bend ? (worldPos.x + spineOffset) / spineLength : 0.;
float mt = (spinePortion * pathSegment + pathOffset)*textureStacks;
#endif
mt = mod(mt, textureStacks);
float rowOffset = floor(mt);
#ifdef USE_INSTANCING
rowOffset += instanceMatrix[3][1] * ${TEXTURE_HEIGHT}.;
#endif
vec3 spinePos = texture2D(spineTexture, vec2(mt, (0. + rowOffset + 0.5) / textureLayers)).xyz;
vec3 a = texture2D(spineTexture, vec2(mt, (1. + rowOffset + 0.5) / textureLayers)).xyz;
vec3 b = texture2D(spineTexture, vec2(mt, (2. + rowOffset + 0.5) / textureLayers)).xyz;
vec3 c = texture2D(spineTexture, vec2(mt, (3. + rowOffset + 0.5) / textureLayers)).xyz;
mat3 basis = mat3(a, b, c);
vec3 transformed = basis
* vec3(worldPos.x * xWeight, worldPos.y * 1., worldPos.z * 1.)
+ spinePos;
vec3 transformedNormal = normalMatrix * (basis * objectNormal);
` ).replace(
'#include <project_vertex>',
`vec4 mvPosition = modelViewMatrix * vec4( transformed, 1.0 );
gl_Position = projectionMatrix * mvPosition;`
);
shader.vertexShader = vertexShader;
};
}
/**
* A helper class for making meshes bend aroudn curves
*/
export class Flow {
/**
* @param {Mesh} mesh The mesh to clone and modify to bend around the curve
* @param {number} numberOfCurves The amount of space that should preallocated for additional curves
*/
constructor( mesh, numberOfCurves = 1 ) {
const obj3D = mesh.clone();
const splineTexure = initSplineTexture( numberOfCurves );
const uniforms = getUniforms( splineTexure );
obj3D.traverse( function ( child ) {
if (
child instanceof Mesh ||
child instanceof InstancedMesh
) {
if ( Array.isArray( child.material ) ) {
const materials = [];
for ( const material of child.material ) {
const newMaterial = material.clone();
modifyShader( newMaterial, uniforms, numberOfCurves );
materials.push( newMaterial );
}
child.material = materials;
} else {
child.material = child.material.clone();
modifyShader( child.material, uniforms, numberOfCurves );
}
}
} );
this.curveArray = new Array( numberOfCurves );
this.curveLengthArray = new Array( numberOfCurves );
this.object3D = obj3D;
this.splineTexure = splineTexure;
this.uniforms = uniforms;
}
updateCurve( index, curve ) {
if ( index >= this.curveArray.length ) throw Error( 'Index out of range for Flow' );
const curveLength = curve.getLength();
this.uniforms.spineLength.value = curveLength;
this.curveLengthArray[ index ] = curveLength;
this.curveArray[ index ] = curve;
updateSplineTexture( this.splineTexure, curve, index );
}
moveAlongCurve( amount ) {
this.uniforms.pathOffset.value += amount;
}
}
const matrix = new Matrix4();
/**
* A helper class for creating instanced versions of flow, where the instances are placed on the curve.
*/
export class InstancedFlow extends Flow {
/**
*
* @param {number} count The number of instanced elements
* @param {number} curveCount The number of curves to preallocate for
* @param {Geometry} geometry The geometry to use for the instanced mesh
* @param {Material} material The material to use for the instanced mesh
*/
constructor( count, curveCount, geometry, material ) {
const mesh = new InstancedMesh(
geometry,
material,
count
);
mesh.instanceMatrix.setUsage( DynamicDrawUsage );
mesh.frustumCulled = false;
super( mesh, curveCount );
this.offsets = new Array( count ).fill( 0 );
this.whichCurve = new Array( count ).fill( 0 );
}
/**
* The extra information about which curve and curve position is stored in the translation components of the matrix for the instanced objects
* This writes that information to the matrix and marks it as needing update.
*
* @param {number} index of the instanced element to update
*/
writeChanges( index ) {
matrix.makeTranslation(
this.curveLengthArray[ this.whichCurve[ index ] ],
this.whichCurve[ index ],
this.offsets[ index ]
);
this.object3D.setMatrixAt( index, matrix );
this.object3D.instanceMatrix.needsUpdate = true;
}
/**
* Move an individual element along the curve by a specific amount
*
* @param {number} index Which element to update
* @param {number} offset Move by how much
*/
moveIndividualAlongCurve( index, offset ) {
this.offsets[ index ] += offset;
this.writeChanges( index );
}
/**
* Select which curve to use for an element
*
* @param {number} index the index of the instanced element to update
* @param {number} curveNo the index of the curve it should use
*/
setCurve( index, curveNo ) {
if ( isNaN( curveNo ) ) throw Error( 'curve index being set is Not a Number (NaN)' );
this.whichCurve[ index ] = curveNo;
this.writeChanges( index );
}
}

View File

@ -0,0 +1,279 @@
import {
BufferAttribute,
BufferGeometry,
Vector3
} from 'three';
import * as BufferGeometryUtils from '../utils/BufferGeometryUtils.js';
const _A = new Vector3();
const _B = new Vector3();
const _C = new Vector3();
class EdgeSplitModifier {
modify( geometry, cutOffAngle, tryKeepNormals = true ) {
function computeNormals() {
normals = new Float32Array( indexes.length * 3 );
for ( let i = 0; i < indexes.length; i += 3 ) {
let index = indexes[ i ];
_A.set(
positions[ 3 * index ],
positions[ 3 * index + 1 ],
positions[ 3 * index + 2 ] );
index = indexes[ i + 1 ];
_B.set(
positions[ 3 * index ],
positions[ 3 * index + 1 ],
positions[ 3 * index + 2 ] );
index = indexes[ i + 2 ];
_C.set(
positions[ 3 * index ],
positions[ 3 * index + 1 ],
positions[ 3 * index + 2 ] );
_C.sub( _B );
_A.sub( _B );
const normal = _C.cross( _A ).normalize();
for ( let j = 0; j < 3; j ++ ) {
normals[ 3 * ( i + j ) ] = normal.x;
normals[ 3 * ( i + j ) + 1 ] = normal.y;
normals[ 3 * ( i + j ) + 2 ] = normal.z;
}
}
}
function mapPositionsToIndexes() {
pointToIndexMap = Array( positions.length / 3 );
for ( let i = 0; i < indexes.length; i ++ ) {
const index = indexes[ i ];
if ( pointToIndexMap[ index ] == null ) {
pointToIndexMap[ index ] = [];
}
pointToIndexMap[ index ].push( i );
}
}
function edgeSplitToGroups( indexes, cutOff, firstIndex ) {
_A.set( normals[ 3 * firstIndex ], normals[ 3 * firstIndex + 1 ], normals[ 3 * firstIndex + 2 ] ).normalize();
const result = {
splitGroup: [],
currentGroup: [ firstIndex ]
};
for ( const j of indexes ) {
if ( j !== firstIndex ) {
_B.set( normals[ 3 * j ], normals[ 3 * j + 1 ], normals[ 3 * j + 2 ] ).normalize();
if ( _B.dot( _A ) < cutOff ) {
result.splitGroup.push( j );
} else {
result.currentGroup.push( j );
}
}
}
return result;
}
function edgeSplit( indexes, cutOff, original = null ) {
if ( indexes.length === 0 ) return;
const groupResults = [];
for ( const index of indexes ) {
groupResults.push( edgeSplitToGroups( indexes, cutOff, index ) );
}
let result = groupResults[ 0 ];
for ( const groupResult of groupResults ) {
if ( groupResult.currentGroup.length > result.currentGroup.length ) {
result = groupResult;
}
}
if ( original != null ) {
splitIndexes.push( {
original: original,
indexes: result.currentGroup
} );
}
if ( result.splitGroup.length ) {
edgeSplit( result.splitGroup, cutOff, original || result.currentGroup[ 0 ] );
}
}
let hadNormals = false;
let oldNormals = null;
if ( geometry.attributes.normal ) {
hadNormals = true;
geometry = geometry.clone();
if ( tryKeepNormals === true && geometry.index !== null ) {
oldNormals = geometry.attributes.normal.array;
}
geometry.deleteAttribute( 'normal' );
}
if ( geometry.index == null ) {
geometry = BufferGeometryUtils.mergeVertices( geometry );
}
const indexes = geometry.index.array;
const positions = geometry.getAttribute( 'position' ).array;
let normals;
let pointToIndexMap;
computeNormals();
mapPositionsToIndexes();
const splitIndexes = [];
for ( const vertexIndexes of pointToIndexMap ) {
edgeSplit( vertexIndexes, Math.cos( cutOffAngle ) - 0.001 );
}
const newAttributes = {};
for ( const name of Object.keys( geometry.attributes ) ) {
const oldAttribute = geometry.attributes[ name ];
const newArray = new oldAttribute.array.constructor( ( indexes.length + splitIndexes.length ) * oldAttribute.itemSize );
newArray.set( oldAttribute.array );
newAttributes[ name ] = new BufferAttribute( newArray, oldAttribute.itemSize, oldAttribute.normalized );
}
const newIndexes = new Uint32Array( indexes.length );
newIndexes.set( indexes );
for ( let i = 0; i < splitIndexes.length; i ++ ) {
const split = splitIndexes[ i ];
const index = indexes[ split.original ];
for ( const attribute of Object.values( newAttributes ) ) {
for ( let j = 0; j < attribute.itemSize; j ++ ) {
attribute.array[ ( indexes.length + i ) * attribute.itemSize + j ] =
attribute.array[ index * attribute.itemSize + j ];
}
}
for ( const j of split.indexes ) {
newIndexes[ j ] = indexes.length + i;
}
}
geometry = new BufferGeometry();
geometry.setIndex( new BufferAttribute( newIndexes, 1 ) );
for ( const name of Object.keys( newAttributes ) ) {
geometry.setAttribute( name, newAttributes[ name ] );
}
if ( hadNormals ) {
geometry.computeVertexNormals();
if ( oldNormals !== null ) {
const changedNormals = new Array( oldNormals.length / 3 ).fill( false );
for ( const splitData of splitIndexes )
changedNormals[ splitData.original ] = true;
for ( let i = 0; i < changedNormals.length; i ++ ) {
if ( changedNormals[ i ] === false ) {
for ( let j = 0; j < 3; j ++ )
geometry.attributes.normal.array[ 3 * i + j ] = oldNormals[ 3 * i + j ];
}
}
}
}
return geometry;
}
}
export { EdgeSplitModifier };

View File

@ -0,0 +1,617 @@
import {
BufferGeometry,
Float32BufferAttribute,
Vector2,
Vector3,
Vector4
} from '../../three.module.min.js';
import * as BufferGeometryUtils from '../utils/BufferGeometryUtils.js';
/**
* Simplification Geometry Modifier
* - based on code and technique
* - by Stan Melax in 1998
* - Progressive Mesh type Polygon Reduction Algorithm
* - http://www.melax.com/polychop/
*/
const _cb = new Vector3(), _ab = new Vector3();
class SimplifyModifier {
modify( geometry, count ) {
geometry = geometry.clone();
// currently morphAttributes are not supported
delete geometry.morphAttributes.position;
delete geometry.morphAttributes.normal;
const attributes = geometry.attributes;
// this modifier can only process indexed and non-indexed geomtries with at least a position attribute
for ( const name in attributes ) {
if ( name !== 'position' && name !== 'uv' && name !== 'normal' && name !== 'tangent' && name !== 'color' ) geometry.deleteAttribute( name );
}
geometry = BufferGeometryUtils.mergeVertices( geometry );
//
// put data of original geometry in different data structures
//
const vertices = [];
const faces = [];
// add vertices
const positionAttribute = geometry.getAttribute( 'position' );
const uvAttribute = geometry.getAttribute( 'uv' );
const normalAttribute = geometry.getAttribute( 'normal' );
const tangentAttribute = geometry.getAttribute( 'tangent' );
const colorAttribute = geometry.getAttribute( 'color' );
let t = null;
let v2 = null;
let nor = null;
let col = null;
for ( let i = 0; i < positionAttribute.count; i ++ ) {
const v = new Vector3().fromBufferAttribute( positionAttribute, i );
if ( uvAttribute ) {
v2 = new Vector2().fromBufferAttribute( uvAttribute, i );
}
if ( normalAttribute ) {
nor = new Vector3().fromBufferAttribute( normalAttribute, i );
}
if ( tangentAttribute ) {
t = new Vector4().fromBufferAttribute( tangentAttribute, i );
}
if ( colorAttribute ) {
col = new THREE.Color().fromBufferAttribute( colorAttribute, i );
}
const vertex = new Vertex( v, v2, nor, t, col );
vertices.push( vertex );
}
// add faces
let index = geometry.getIndex();
if ( index !== null ) {
for ( let i = 0; i < index.count; i += 3 ) {
const a = index.getX( i );
const b = index.getX( i + 1 );
const c = index.getX( i + 2 );
const triangle = new Triangle( vertices[ a ], vertices[ b ], vertices[ c ], a, b, c );
faces.push( triangle );
}
} else {
for ( let i = 0; i < positionAttribute.count; i += 3 ) {
const a = i;
const b = i + 1;
const c = i + 2;
const triangle = new Triangle( vertices[ a ], vertices[ b ], vertices[ c ], a, b, c );
faces.push( triangle );
}
}
// compute all edge collapse costs
for ( let i = 0, il = vertices.length; i < il; i ++ ) {
computeEdgeCostAtVertex( vertices[ i ] );
}
let nextVertex;
let z = count;
while ( z -- ) {
nextVertex = minimumCostEdge( vertices );
if ( ! nextVertex ) {
console.log( 'THREE.SimplifyModifier: No next vertex' );
break;
}
collapse( vertices, faces, nextVertex, nextVertex.collapseNeighbor );
}
//
const simplifiedGeometry = new BufferGeometry();
const position = [];
const uv = [];
const normal = [];
const tangent = [];
const color = [];
index = [];
//
for ( let i = 0; i < vertices.length; i ++ ) {
const vertex = vertices[ i ];
position.push( vertex.position.x, vertex.position.y, vertex.position.z );
if ( vertex.uv ) {
uv.push( vertex.uv.x, vertex.uv.y );
}
if ( vertex.normal ) {
normal.push( vertex.normal.x, vertex.normal.y, vertex.normal.z );
}
if ( vertex.tangent ) {
tangent.push( vertex.tangent.x, vertex.tangent.y, vertex.tangent.z, vertex.tangent.w );
}
if ( vertex.color ) {
color.push( vertex.color.r, vertex.color.g, vertex.color.b );
}
// cache final index to GREATLY speed up faces reconstruction
vertex.id = i;
}
//
for ( let i = 0; i < faces.length; i ++ ) {
const face = faces[ i ];
index.push( face.v1.id, face.v2.id, face.v3.id );
}
simplifiedGeometry.setAttribute( 'position', new Float32BufferAttribute( position, 3 ) );
if ( uv.length > 0 ) simplifiedGeometry.setAttribute( 'uv', new Float32BufferAttribute( uv, 2 ) );
if ( normal.length > 0 ) simplifiedGeometry.setAttribute( 'normal', new Float32BufferAttribute( normal, 3 ) );
if ( tangent.length > 0 ) simplifiedGeometry.setAttribute( 'tangent', new Float32BufferAttribute( tangent, 4 ) );
if ( color.length > 0 ) simplifiedGeometry.setAttribute( 'color', new Float32BufferAttribute( color, 3 ) );
simplifiedGeometry.setIndex( index );
return simplifiedGeometry;
}
}
function pushIfUnique( array, object ) {
if ( array.indexOf( object ) === - 1 ) array.push( object );
}
function removeFromArray( array, object ) {
const k = array.indexOf( object );
if ( k > - 1 ) array.splice( k, 1 );
}
function computeEdgeCollapseCost( u, v ) {
// if we collapse edge uv by moving u to v then how
// much different will the model change, i.e. the "error".
const edgelength = v.position.distanceTo( u.position );
let curvature = 0;
const sideFaces = [];
// find the "sides" triangles that are on the edge uv
for ( let i = 0, il = u.faces.length; i < il; i ++ ) {
const face = u.faces[ i ];
if ( face.hasVertex( v ) ) {
sideFaces.push( face );
}
}
// use the triangle facing most away from the sides
// to determine our curvature term
for ( let i = 0, il = u.faces.length; i < il; i ++ ) {
let minCurvature = 1;
const face = u.faces[ i ];
for ( let j = 0; j < sideFaces.length; j ++ ) {
const sideFace = sideFaces[ j ];
// use dot product of face normals.
const dotProd = face.normal.dot( sideFace.normal );
minCurvature = Math.min( minCurvature, ( 1.001 - dotProd ) / 2 );
}
curvature = Math.max( curvature, minCurvature );
}
// crude approach in attempt to preserve borders
// though it seems not to be totally correct
const borders = 0;
if ( sideFaces.length < 2 ) {
// we add some arbitrary cost for borders,
// borders += 10;
curvature = 1;
}
const amt = edgelength * curvature + borders;
return amt;
}
function computeEdgeCostAtVertex( v ) {
// compute the edge collapse cost for all edges that start
// from vertex v. Since we are only interested in reducing
// the object by selecting the min cost edge at each step, we
// only cache the cost of the least cost edge at this vertex
// (in member variable collapse) as well as the value of the
// cost (in member variable collapseCost).
if ( v.neighbors.length === 0 ) {
// collapse if no neighbors.
v.collapseNeighbor = null;
v.collapseCost = - 0.01;
return;
}
v.collapseCost = 100000;
v.collapseNeighbor = null;
// search all neighboring edges for "least cost" edge
for ( let i = 0; i < v.neighbors.length; i ++ ) {
const collapseCost = computeEdgeCollapseCost( v, v.neighbors[ i ] );
if ( ! v.collapseNeighbor ) {
v.collapseNeighbor = v.neighbors[ i ];
v.collapseCost = collapseCost;
v.minCost = collapseCost;
v.totalCost = 0;
v.costCount = 0;
}
v.costCount ++;
v.totalCost += collapseCost;
if ( collapseCost < v.minCost ) {
v.collapseNeighbor = v.neighbors[ i ];
v.minCost = collapseCost;
}
}
// we average the cost of collapsing at this vertex
v.collapseCost = v.totalCost / v.costCount;
// v.collapseCost = v.minCost;
}
function removeVertex( v, vertices ) {
console.assert( v.faces.length === 0 );
while ( v.neighbors.length ) {
const n = v.neighbors.pop();
removeFromArray( n.neighbors, v );
}
removeFromArray( vertices, v );
}
function removeFace( f, faces ) {
removeFromArray( faces, f );
if ( f.v1 ) removeFromArray( f.v1.faces, f );
if ( f.v2 ) removeFromArray( f.v2.faces, f );
if ( f.v3 ) removeFromArray( f.v3.faces, f );
// TODO optimize this!
const vs = [ f.v1, f.v2, f.v3 ];
for ( let i = 0; i < 3; i ++ ) {
const v1 = vs[ i ];
const v2 = vs[ ( i + 1 ) % 3 ];
if ( ! v1 || ! v2 ) continue;
v1.removeIfNonNeighbor( v2 );
v2.removeIfNonNeighbor( v1 );
}
}
function collapse( vertices, faces, u, v ) {
// Collapse the edge uv by moving vertex u onto v
if ( ! v ) {
// u is a vertex all by itself so just delete it..
removeVertex( u, vertices );
return;
}
if ( v.uv ) {
u.uv.copy( v.uv );
}
if ( v.normal ) {
v.normal.add( u.normal ).normalize();
}
if ( v.tangent ) {
v.tangent.add( u.tangent ).normalize();
}
const tmpVertices = [];
for ( let i = 0; i < u.neighbors.length; i ++ ) {
tmpVertices.push( u.neighbors[ i ] );
}
// delete triangles on edge uv:
for ( let i = u.faces.length - 1; i >= 0; i -- ) {
if ( u.faces[ i ] && u.faces[ i ].hasVertex( v ) ) {
removeFace( u.faces[ i ], faces );
}
}
// update remaining triangles to have v instead of u
for ( let i = u.faces.length - 1; i >= 0; i -- ) {
u.faces[ i ].replaceVertex( u, v );
}
removeVertex( u, vertices );
// recompute the edge collapse costs in neighborhood
for ( let i = 0; i < tmpVertices.length; i ++ ) {
computeEdgeCostAtVertex( tmpVertices[ i ] );
}
}
function minimumCostEdge( vertices ) {
// O(n * n) approach. TODO optimize this
let least = vertices[ 0 ];
for ( let i = 0; i < vertices.length; i ++ ) {
if ( vertices[ i ].collapseCost < least.collapseCost ) {
least = vertices[ i ];
}
}
return least;
}
// we use a triangle class to represent structure of face slightly differently
class Triangle {
constructor( v1, v2, v3, a, b, c ) {
this.a = a;
this.b = b;
this.c = c;
this.v1 = v1;
this.v2 = v2;
this.v3 = v3;
this.normal = new Vector3();
this.computeNormal();
v1.faces.push( this );
v1.addUniqueNeighbor( v2 );
v1.addUniqueNeighbor( v3 );
v2.faces.push( this );
v2.addUniqueNeighbor( v1 );
v2.addUniqueNeighbor( v3 );
v3.faces.push( this );
v3.addUniqueNeighbor( v1 );
v3.addUniqueNeighbor( v2 );
}
computeNormal() {
const vA = this.v1.position;
const vB = this.v2.position;
const vC = this.v3.position;
_cb.subVectors( vC, vB );
_ab.subVectors( vA, vB );
_cb.cross( _ab ).normalize();
this.normal.copy( _cb );
}
hasVertex( v ) {
return v === this.v1 || v === this.v2 || v === this.v3;
}
replaceVertex( oldv, newv ) {
if ( oldv === this.v1 ) this.v1 = newv;
else if ( oldv === this.v2 ) this.v2 = newv;
else if ( oldv === this.v3 ) this.v3 = newv;
removeFromArray( oldv.faces, this );
newv.faces.push( this );
oldv.removeIfNonNeighbor( this.v1 );
this.v1.removeIfNonNeighbor( oldv );
oldv.removeIfNonNeighbor( this.v2 );
this.v2.removeIfNonNeighbor( oldv );
oldv.removeIfNonNeighbor( this.v3 );
this.v3.removeIfNonNeighbor( oldv );
this.v1.addUniqueNeighbor( this.v2 );
this.v1.addUniqueNeighbor( this.v3 );
this.v2.addUniqueNeighbor( this.v1 );
this.v2.addUniqueNeighbor( this.v3 );
this.v3.addUniqueNeighbor( this.v1 );
this.v3.addUniqueNeighbor( this.v2 );
this.computeNormal();
}
}
class Vertex {
constructor( v, uv, normal, tangent, color ) {
this.position = v;
this.uv = uv;
this.normal = normal;
this.tangent = tangent;
this.color = color;
this.id = - 1; // external use position in vertices list (for e.g. face generation)
this.faces = []; // faces vertex is connected
this.neighbors = []; // neighbouring vertices aka "adjacentVertices"
// these will be computed in computeEdgeCostAtVertex()
this.collapseCost = 0; // cost of collapsing this vertex, the less the better. aka objdist
this.collapseNeighbor = null; // best candinate for collapsing
}
addUniqueNeighbor( vertex ) {
pushIfUnique( this.neighbors, vertex );
}
removeIfNonNeighbor( n ) {
const neighbors = this.neighbors;
const faces = this.faces;
const offset = neighbors.indexOf( n );
if ( offset === - 1 ) return;
for ( let i = 0; i < faces.length; i ++ ) {
if ( faces[ i ].hasVertex( n ) ) return;
}
neighbors.splice( offset, 1 );
}
}
export { SimplifyModifier };

View File

@ -0,0 +1,307 @@
import {
BufferGeometry,
Color,
Float32BufferAttribute,
Vector2,
Vector3
} from 'three';
/**
* Break faces with edges longer than maxEdgeLength
*/
class TessellateModifier {
constructor( maxEdgeLength = 0.1, maxIterations = 6 ) {
this.maxEdgeLength = maxEdgeLength;
this.maxIterations = maxIterations;
}
modify( geometry ) {
if ( geometry.index !== null ) {
geometry = geometry.toNonIndexed();
}
//
const maxIterations = this.maxIterations;
const maxEdgeLengthSquared = this.maxEdgeLength * this.maxEdgeLength;
const va = new Vector3();
const vb = new Vector3();
const vc = new Vector3();
const vm = new Vector3();
const vs = [ va, vb, vc, vm ];
const na = new Vector3();
const nb = new Vector3();
const nc = new Vector3();
const nm = new Vector3();
const ns = [ na, nb, nc, nm ];
const ca = new Color();
const cb = new Color();
const cc = new Color();
const cm = new Color();
const cs = [ ca, cb, cc, cm ];
const ua = new Vector2();
const ub = new Vector2();
const uc = new Vector2();
const um = new Vector2();
const us = [ ua, ub, uc, um ];
const u2a = new Vector2();
const u2b = new Vector2();
const u2c = new Vector2();
const u2m = new Vector2();
const u2s = [ u2a, u2b, u2c, u2m ];
const attributes = geometry.attributes;
const hasNormals = attributes.normal !== undefined;
const hasColors = attributes.color !== undefined;
const hasUVs = attributes.uv !== undefined;
const hasUV1s = attributes.uv1 !== undefined;
let positions = attributes.position.array;
let normals = hasNormals ? attributes.normal.array : null;
let colors = hasColors ? attributes.color.array : null;
let uvs = hasUVs ? attributes.uv.array : null;
let uv1s = hasUV1s ? attributes.uv1.array : null;
let positions2 = positions;
let normals2 = normals;
let colors2 = colors;
let uvs2 = uvs;
let uv1s2 = uv1s;
let iteration = 0;
let tessellating = true;
function addTriangle( a, b, c ) {
const v1 = vs[ a ];
const v2 = vs[ b ];
const v3 = vs[ c ];
positions2.push( v1.x, v1.y, v1.z );
positions2.push( v2.x, v2.y, v2.z );
positions2.push( v3.x, v3.y, v3.z );
if ( hasNormals ) {
const n1 = ns[ a ];
const n2 = ns[ b ];
const n3 = ns[ c ];
normals2.push( n1.x, n1.y, n1.z );
normals2.push( n2.x, n2.y, n2.z );
normals2.push( n3.x, n3.y, n3.z );
}
if ( hasColors ) {
const c1 = cs[ a ];
const c2 = cs[ b ];
const c3 = cs[ c ];
colors2.push( c1.r, c1.g, c1.b );
colors2.push( c2.r, c2.g, c2.b );
colors2.push( c3.r, c3.g, c3.b );
}
if ( hasUVs ) {
const u1 = us[ a ];
const u2 = us[ b ];
const u3 = us[ c ];
uvs2.push( u1.x, u1.y );
uvs2.push( u2.x, u2.y );
uvs2.push( u3.x, u3.y );
}
if ( hasUV1s ) {
const u21 = u2s[ a ];
const u22 = u2s[ b ];
const u23 = u2s[ c ];
uv1s2.push( u21.x, u21.y );
uv1s2.push( u22.x, u22.y );
uv1s2.push( u23.x, u23.y );
}
}
while ( tessellating && iteration < maxIterations ) {
iteration ++;
tessellating = false;
positions = positions2;
positions2 = [];
if ( hasNormals ) {
normals = normals2;
normals2 = [];
}
if ( hasColors ) {
colors = colors2;
colors2 = [];
}
if ( hasUVs ) {
uvs = uvs2;
uvs2 = [];
}
if ( hasUV1s ) {
uv1s = uv1s2;
uv1s2 = [];
}
for ( let i = 0, i2 = 0, il = positions.length; i < il; i += 9, i2 += 6 ) {
va.fromArray( positions, i + 0 );
vb.fromArray( positions, i + 3 );
vc.fromArray( positions, i + 6 );
if ( hasNormals ) {
na.fromArray( normals, i + 0 );
nb.fromArray( normals, i + 3 );
nc.fromArray( normals, i + 6 );
}
if ( hasColors ) {
ca.fromArray( colors, i + 0 );
cb.fromArray( colors, i + 3 );
cc.fromArray( colors, i + 6 );
}
if ( hasUVs ) {
ua.fromArray( uvs, i2 + 0 );
ub.fromArray( uvs, i2 + 2 );
uc.fromArray( uvs, i2 + 4 );
}
if ( hasUV1s ) {
u2a.fromArray( uv1s, i2 + 0 );
u2b.fromArray( uv1s, i2 + 2 );
u2c.fromArray( uv1s, i2 + 4 );
}
const dab = va.distanceToSquared( vb );
const dbc = vb.distanceToSquared( vc );
const dac = va.distanceToSquared( vc );
if ( dab > maxEdgeLengthSquared || dbc > maxEdgeLengthSquared || dac > maxEdgeLengthSquared ) {
tessellating = true;
if ( dab >= dbc && dab >= dac ) {
vm.lerpVectors( va, vb, 0.5 );
if ( hasNormals ) nm.lerpVectors( na, nb, 0.5 );
if ( hasColors ) cm.lerpColors( ca, cb, 0.5 );
if ( hasUVs ) um.lerpVectors( ua, ub, 0.5 );
if ( hasUV1s ) u2m.lerpVectors( u2a, u2b, 0.5 );
addTriangle( 0, 3, 2 );
addTriangle( 3, 1, 2 );
} else if ( dbc >= dab && dbc >= dac ) {
vm.lerpVectors( vb, vc, 0.5 );
if ( hasNormals ) nm.lerpVectors( nb, nc, 0.5 );
if ( hasColors ) cm.lerpColors( cb, cc, 0.5 );
if ( hasUVs ) um.lerpVectors( ub, uc, 0.5 );
if ( hasUV1s ) u2m.lerpVectors( u2b, u2c, 0.5 );
addTriangle( 0, 1, 3 );
addTriangle( 3, 2, 0 );
} else {
vm.lerpVectors( va, vc, 0.5 );
if ( hasNormals ) nm.lerpVectors( na, nc, 0.5 );
if ( hasColors ) cm.lerpColors( ca, cc, 0.5 );
if ( hasUVs ) um.lerpVectors( ua, uc, 0.5 );
if ( hasUV1s ) u2m.lerpVectors( u2a, u2c, 0.5 );
addTriangle( 0, 1, 3 );
addTriangle( 3, 1, 2 );
}
} else {
addTriangle( 0, 1, 2 );
}
}
}
const geometry2 = new BufferGeometry();
geometry2.setAttribute( 'position', new Float32BufferAttribute( positions2, 3 ) );
if ( hasNormals ) {
geometry2.setAttribute( 'normal', new Float32BufferAttribute( normals2, 3 ) );
}
if ( hasColors ) {
geometry2.setAttribute( 'color', new Float32BufferAttribute( colors2, 3 ) );
}
if ( hasUVs ) {
geometry2.setAttribute( 'uv', new Float32BufferAttribute( uvs2, 2 ) );
}
if ( hasUV1s ) {
geometry2.setAttribute( 'uv1', new Float32BufferAttribute( uv1s2, 2 ) );
}
return geometry2;
}
}
export { TessellateModifier };