1.局部坐标转世界坐标
transform.TransformPoint(Vector3 position)
//获取本地转世界矩阵乘以局部坐标
transform.localToWorldMatrix * localPosition
2.世界坐标转局部坐标
transform.InverseTransformPoint(Vector3 position)
//获取世界转局部矩阵乘以世界坐标
transform.worldToLocalMatrix *worldPosition
3.世界坐标转UV坐标
a:射线获取物体上的坐标
b:坐标转换到物体的局部坐标
c:获取当前坐标所在的三角面的三个点的局部坐标和uv坐标
d:把点的坐标转换到ndc空间
e:在ndc坐标系下,构建重心坐标系,计算三个点的重心坐标系的值
d:通过重心坐标系,三个的点的权重值,计算射线碰撞点的uv坐标值
Vector3 p = transform.InverseTransformPoint(worldPos);
Matrix4x4 mvp = renderCamera.projectionMatrix * renderCamera.worldToCameraMatrix * transform.localToWorldMatrix;
/// <summary>
/// Convert local-space point to texture coordinates.
/// </summary>
/// <param name="localPoint">Local-Space Point</param>
/// <param name="matrixMVP">World-View-Projection Transformation matrix.</param>
/// <param name="uv">UV coordinates after conversion.</param>
/// <returns>Whether the conversion was successful.</returns>
public bool LocalPointToUV(Vector3 localPoint, Matrix4x4 matrixMVP, out Vector2 uv)
{
int index0;
int index1;
int index2;
Vector3 t1;
Vector3 t2;
Vector3 t3;
Vector3 p = localPoint;
for (var i = 0; i < meshTriangles.Length; i += 3)
{
index0 = i + 0;
index1 = i + 1;
index2 = i + 2;
t1 = meshVertices[meshTriangles[index0]];
t2 = meshVertices[meshTriangles[index1]];
t3 = meshVertices[meshTriangles[index2]];
if(!Math.ExistPointInPlane(p, t1, t2, t3))
continue;
if(!Math.ExistPointOnTriangleEdge(p, t1, t2, t3) && !Math.ExistPointInTriangle(p, t1, t2, t3))
continue;
var uv1 = meshUV[meshTriangles[index0]];
var uv2 = meshUV[meshTriangles[index1]];
var uv3 = meshUV[meshTriangles[index2]];
uv = Math.TextureCoordinateCalculation(p, t1, uv1, t2, uv2, t3, uv3, matrixMVP);
return true;
}
uv = default(Vector3);
return false;
}
/// <summary>
/// Calculate UV coordinates within a triangle of points.
/// The point to be investigated needs to be a point inside the triangle.
/// </summary>
/// <param name="p">Points to investigate.</param>
/// <param name="t1">Vertex of triangle.</param>
/// <param name="t1UV">UV coordinates of t1.</param>
/// <param name="t2">Vertex of triangle.</param>
/// <param name="t2UV">UV coordinates of t2.</param>
/// <param name="t3">Vertex of triangle.</param>
/// <param name="t3UV">UV coordinates of t3.</param>
/// <param name="transformMatrix">MVP transformation matrix.</param>
/// <returns>UV coordinates of the point to be investigated.</returns>
public static Vector2 TextureCoordinateCalculation(Vector3 p, Vector3 t1, Vector2 t1UV, Vector3 t2, Vector2 t2UV, Vector3 t3, Vector2 t3UV, Matrix4x4 transformMatrix)
{
// 投影空间下,顶点的位置
Vector4 p1_p = transformMatrix * new Vector4(t1.x, t1.y, t1.z, 1);
Vector4 p2_p = transformMatrix * new Vector4(t2.x, t2.y, t2.z, 1);
Vector4 p3_p = transformMatrix * new Vector4(t3.x, t3.y, t3.z, 1);
Vector4 p_p = transformMatrix * new Vector4(p.x, p.y, p.z, 1);
//二维坐标
Vector2 p1_n = new Vector2(p1_p.x, p1_p.y) / p1_p.w;
Vector2 p2_n = new Vector2(p2_p.x, p2_p.y) / p2_p.w;
Vector2 p3_n = new Vector2(p3_p.x, p3_p.y) / p3_p.w;
Vector2 p_n = new Vector2(p_p.x, p_p.y) / p_p.w;
//重心坐标系
var s = 0.5f * ((p2_n.x - p1_n.x) * (p3_n.y - p1_n.y) - (p2_n.y - p1_n.y) * (p3_n.x - p1_n.x));
var s1 = 0.5f * ((p3_n.x - p_n.x) * (p1_n.y - p_n.y) - (p3_n.y - p_n.y) * (p1_n.x - p_n.x));
var s2 = 0.5f * ((p1_n.x - p_n.x) * (p2_n.y - p_n.y) - (p1_n.y - p_n.y) * (p2_n.x - p_n.x));
var u = s1 / s;
var v = s2 / s;
var w = 1 / ((1 - u - v) * 1 / p1_p.w + u * 1 / p2_p.w + v * 1 / p3_p.w);
return w * ((1 - u - v) * t1UV / p1_p.w + u * t2UV / p2_p.w + v * t3UV / p3_p.w);
}
碰到过的坐标系:局部坐标系,世界坐标系,相机坐标系,ndc坐标系,屏幕坐标系,视口坐标系,极坐标系,重心坐标系
通过坐标系的描述类型分为:左手坐标系,右手坐标系,极坐标系,重心坐标系
嗯!大概就这么多,之后会继续补充,解释一下以上都是自己的理解!!!