游戏项目中,经常会需要对网格(mesh)的边进行软化。对于一般情况,美工会将这个过程在其他软件中完成(方案一)。但对于在运行时生成的网格(如动态地形),或者需要保留硬边另外生成软边的情况下(例如需要硬边的显示效果,但需要软边挤出描边),就只能另寻他法。
所幸Unity 在导入模型的时候,在ModelImporter 设置中提供了软化边的选项(方案二),通过设置normalCalculationMode normalSmoothingAngle 可以实现在Unity 环境下对模型法线进行平滑。这个方案无疑不能支持运行时动态生成的网格,同样无法保留原始的硬边信息。
此外,在运行时,Unity 提供了mesh.RecalculateNormal() 方法实现软化边的需要(方案三),但这个方法有个很emmm..的问题:只有当多个面公用一个顶点,这个顶点才会参与软化。这意味着如果网格中在同一位置的不同顶点的法线不会被平滑。
为了弥补以上三个方案的不足,A BETTER METHOD TO RECALCULATE NORMALS IN UNITY 设计了一个结合方案二、三的解决方案(方案四)。其基本思路是将处于同一位置的不同顶点使用哈希聚簇,然后对它们进行软化,这样即使分属于两个面的不同的边,只要它们在同一(非常相近)位置,它们都将被软化,这正是unity 运行时平滑法线所欠缺的地方。
由于方案四仅仅考虑直接更改法线,有时候我们需要将生成的平滑法线作为额外数据(例如存在uv3 中),所以对方案四的代码稍作修改。
代码如下:
using System.Collections.Generic;
using UnityEngine;
public static class MeshExtends
{
/// <summary>
/// 根据角度阈值平滑网格法线,此算法会将相同位置的不同顶点进行平滑(Unity 自带的不平滑)
/// </summary>
public static Vector3[] RecalculateNormals(this Mesh mesh, float angle)
{
var cosineThreshold = Mathf.Cos(angle * Mathf.Deg2Rad);
var vertices = mesh.vertices;
var normals = new Vector3[vertices.Length];
var triNormals = new Vector3[mesh.subMeshCount][];
var dictionary = new Dictionary<VertexKey, List<VertexEntry>>(vertices.Length);
for (var subMeshIndex = 0; subMeshIndex < mesh.subMeshCount; ++subMeshIndex)
{
var triangles = mesh.GetTriangles(subMeshIndex);
triNormals[subMeshIndex] = new Vector3[triangles.Length / 3];
for (var i = 0; i < triangles.Length; i += 3)
{
int i1 = triangles[i];
int i2 = triangles[i + 1];
int i3 = triangles[i + 2];
Vector3 p1 = vertices[i2] - vertices[i1];
Vector3 p2 = vertices[i3] - vertices[i1];
Vector3 normal = Vector3.Cross(p1, p2).normalized;
int triIndex = i / 3;
triNormals[subMeshIndex][triIndex] = normal;
List<VertexEntry> entry;
VertexKey key;
if (!dictionary.TryGetValue(key = new VertexKey(vertices[i1]), out entry))
{
entry = new List<VertexEntry>(4);
dictionary.Add(key, entry);
}
entry.Add(new VertexEntry(subMeshIndex, triIndex, i1));
if (!dictionary.TryGetValue(key = new VertexKey(vertices[i2]), out entry))
{
entry = new List<VertexEntry>();
dictionary.Add(key, entry);
}
entry.Add(new VertexEntry(subMeshIndex, triIndex, i2));
if (!dictionary.TryGetValue(key = new VertexKey(vertices[i3]), out entry))
{
entry = new List<VertexEntry>();
dictionary.Add(key, entry);
}
entry.Add(new VertexEntry(subMeshIndex, triIndex, i3));
}
}
foreach (var vertList in dictionary.Values)
{
for (var i = 0; i < vertList.Count; ++i)
{
var sum = new Vector3();
var lhsEntry = vertList[i];
for (var j = 0; j < vertList.Count; ++j)
{
var rhsEntry = vertList[j];
if (lhsEntry.VertexIndex == rhsEntry.VertexIndex)
{
sum += triNormals[rhsEntry.MeshIndex][rhsEntry.TriangleIndex];
}
else
{
var dot = Vector3.Dot(
triNormals[lhsEntry.MeshIndex][lhsEntry.TriangleIndex],
triNormals[rhsEntry.MeshIndex][rhsEntry.TriangleIndex]);
if (dot >= cosineThreshold)
{
sum += triNormals[rhsEntry.MeshIndex][rhsEntry.TriangleIndex];
}
}
}
normals[lhsEntry.VertexIndex] = sum.normalized;
}
}
return normals;
}
private struct VertexKey
{
private readonly long _x;
private readonly long _y;
private readonly long _z;
private const int Tolerance = 100000;
private const long FNV32Init = 0x811c9dc5;
private const long FNV32Prime = 0x01000193;
public VertexKey(Vector3 position)
{
_x = (long)(Mathf.Round(position.x * Tolerance));
_y = (long)(Mathf.Round(position.y * Tolerance));
_z = (long)(Mathf.Round(position.z * Tolerance));
}
public override bool Equals(object obj)
{
var key = (VertexKey)obj;
return _x == key._x && _y == key._y && _z == key._z;
}
public override int GetHashCode()
{
long rv = FNV32Init;
rv ^= _x;
rv *= FNV32Prime;
rv ^= _y;
rv *= FNV32Prime;
rv ^= _z;
rv *= FNV32Prime;
return rv.GetHashCode();
}
}
private struct VertexEntry
{
public int MeshIndex;
public int TriangleIndex;
public int VertexIndex;
public VertexEntry(int meshIndex, int triIndex, int vertIndex)
{
MeshIndex = meshIndex;
TriangleIndex = triIndex;
VertexIndex = vertIndex;
}
}
}