EDIT: So after a brief contact with the Assimp dev, I was pointed towards the import process. As I took over the code from someone else, I did not think looking that part:
using (var importer = new AssimpContext())
{
scene = importer.ImportFile(file, PostProcessSteps.Triangulate | PostProcessSteps.FlipUVs | PostProcessSteps.JoinIdenticalVertices);
}
FlipUVs does exactly what it says, it flips on the y axis so the origin is now top left corner. So now I am able to get the model with proper UV but still mirrored mesh. Setting the parent object with scale x = -1 flips it back to normal and makes it look fine but I guess this is not meant to be. So I keep looking.
See the picture, there are two crane models. The one on the left is loaded at runtime via serialization and reconstruction while the right one is the original one simply dragged to the scene. Serialization happens with Assimp library.
The floor happens to be created first and seems to get the right uv map. While the other items get wrong uv map. Though I am printing the values of the uv maps and they seem to match the original one as they should.
This is how to serialize, this is Mesh class from Assimp, not the Unity Mesh class, the app serializing is Windows application built in UWP:
private static void SerializeMeshes(BinaryWriter writer, IEnumerable<Mesh> meshes)
{
foreach (Mesh mesh in meshes)
{
ICollection<int> triangles = MeshLoadTriangles(mesh);
MeshSerializeHeader(writer, mesh.Name, mesh.VertexCount, triangles.Count, mesh.MaterialIndex);
MeshSerializeVertices(writer, mesh.Vertices);
MeshSerializeUVCoordinate(writer, mesh.TextureCoordinateChannels);
MeshSerializeTriangleIndices(writer, triangles);
}
}
private static void MeshSerializeUVCoordinate(BinaryWriter writer, List<Vector3D>[] textureCoordinateChannels)
{
// get first channel and serialize to writer. Discard z channel
// This is Vector3D since happening outside Unity
List<Vector3D> list = textureCoordinateChannels[0];
foreach (Vector3D v in list)
{
float x = v.X;
float y = v.Y;
writer.Write(x);
writer.Write(y);
}
}
private static void MeshSerializeVertices(BinaryWriter writer, IEnumerable<Vector3D> vertices)
{
foreach (Vector3D vertex in vertices)
{
Vector3D temp = vertex;
writer.Write(temp.X);
writer.Write(temp.Y);
writer.Write(temp.Z);
}
}
private static void MeshSerializeTriangleIndices(BinaryWriter writer, IEnumerable<int> triangleIndices)
{
foreach (int index in triangleIndices) { writer.Write(index); }
}
And this is the invert process:
private static void DeserializeMeshes(BinaryReader reader, SceneGraph scene)
{
MeshData[] meshes = new MeshData[scene.meshCount];
for (int i = 0; i < scene.meshCount; i++)
{
meshes[i] = new MeshData();
MeshReadHeader(reader, meshes[i]);
MeshReadVertices(reader, meshes[i]);
MeshReadUVCoordinate(reader, meshes[i]);
MeshReadTriangleIndices(reader, meshes[i]);
}
scene.meshes = meshes as IEnumerable<MeshData>;
}
private static void MeshReadUVCoordinate(BinaryReader reader, MeshData meshData)
{
bool hasUv = reader.ReadBoolean();
if(hasUv == false) { return; }
Vector2[] uvs = new Vector2[meshData.vertexCount];
for (int i = 0; i < uvs.Length; i++)
{
uvs[i] = new Vector2();
uvs[i].x = reader.ReadSingle();
uvs[i].y = reader.ReadSingle();
}
meshData.uvs = uvs;
}
private static void MeshReadHeader(BinaryReader reader, MeshData meshData)
{
meshData.name = reader.ReadString();
meshData.vertexCount = reader.ReadInt32();
meshData.triangleCount = reader.ReadInt32();
meshData.materialIndex = reader.ReadInt32();
}
private static void MeshReadVertices(BinaryReader reader, MeshData meshData)
{
Vector3[] vertices = new Vector3[meshData.vertexCount];
for (int i = 0; i < vertices.Length; i++)
{
vertices[i] = new Vector3();
vertices[i].x = reader.ReadSingle();
vertices[i].y = reader.ReadSingle();
vertices[i].z = reader.ReadSingle();
}
meshData.vertices = vertices;
}
private static void MeshReadTriangleIndices(BinaryReader reader, MeshData meshData)
{
int[] triangleIndices = new int[meshData.triangleCount];
for (int i = 0; i < triangleIndices.Length; i++)
{
triangleIndices[i] = reader.ReadInt32();
}
meshData.triangles = triangleIndices;
}
MeshData is just a temporary container with the deserialized values from the fbx. Then, meshes are created:
private static Mesh[] CreateMeshes(SceneGraph scene)
{
Mesh[] meshes = new Mesh[scene.meshCount];
int index = 0;
foreach (MeshData meshData in scene.meshes)
{
meshes[index] = new Mesh();
Vector3[] vec = meshData.vertices;
meshes[index].vertices = vec;
meshes[index].triangles = meshData.triangles;
meshes[index].uv = meshData.uvs;
meshes[index].normals = meshData.normals;
meshes[index].RecalculateNormals();
index++;
}
return meshes;
}
I don't see any reason in the code that should result in this kind of behaviour, I'd say it would totally screw the mesh if the values were wrong.
I can see that the fbx files I have are using quad instead of triangle for the indexing.
Could it be that Assimp does not go to well with this?
I did not solve the issue in a proper way from Assimp.
The basic solution we used was to scale negatively the axis that was flipped in the object transform.
A more appropriate solution would have been to feed all the vertices to a matrix in the Unity side so it resolves the position of the vertices properly.