How to fix:
Insert the following method in place of BuildMeshFromNodeHandleForColoredCubesVolume in MeshConversion.cs
Code:
#if CUBIQUITY_USE_UNSAFE
unsafe public static void BuildMeshFromNodeHandleForColoredCubesVolume(Mesh mesh, uint nodeHandle, bool onlyPositions) {
// Get the data from Cubiquity.
ushort noOfVertices; ColoredCubesVertex* vertices; uint noOfIndices; ushort* indices;
CubiquityDLL.GetColoredCubesMesh(nodeHandle, &noOfVertices, &vertices, &noOfIndices, &indices);
#else
public static void BuildMeshFromNodeHandleForColoredCubesVolume(Mesh mesh, uint nodeHandle, bool onlyPositions)
{
// Get the data from Cubiquity.
ColoredCubesVertex[] vertices;
ushort[] indices;
CubiquityDLL.GetMesh<ColoredCubesVertex>(nodeHandle, out vertices, out indices);
int noOfVertices = vertices.Length;
int noOfIndices = indices.Length;
#endif
// Clear any previous mesh data.
mesh.Clear(true);
// Cubiquity uses 16-bit index arrays to save space, and it appears Unity does the same (at least, there is
// a limit of 65535 vertices per mesh). However, the Mesh.triangles property is of the signed 32-bit int[]
// type rather than the unsigned 16-bit ushort[] type. Perhaps this is so they can switch to 32-bit index
// buffers in the future? At any rate, it means we have to perform a conversion.
/*int[] indicesAsInt = new int[noOfIndices];
for (int ct = 0; ct < noOfIndices; ct++)
{
indicesAsInt[ct] = indices[ct];
}*/
// Each vertex of every triangle should have its own vertex instead
// of sharing vertices
int[] indicesAsInt = new int[noOfIndices];
Vector3[] positions = new Vector3[noOfIndices];
Color32[] colors32 = new Color32[noOfIndices];
// Required for the CubicVertex decoding process.
Vector3 offset = new Vector3(0.5f, 0.5f, 0.5f);
for (int i = 0; i < noOfIndices; i++) {
indicesAsInt[i] = i;
ColoredCubesVertex vertice = vertices[indices[i]];
positions[i].Set(vertice.x, vertice.y, vertice.z);
positions[i] -= offset;
if (!onlyPositions) {
colors32[i] = (Color32)vertice.color;
}
}
// Assign vertex data to the mesh.
mesh.vertices = positions;
if (!onlyPositions) {
mesh.colors32 = colors32;
}
/*
// Required for the CubicVertex decoding process.
Vector3 offset = new Vector3(0.5f, 0.5f, 0.5f);
// Copy the vertex positions from Cubiquity into the Unity mesh.
Vector3[] positions = new Vector3[noOfVertices];
for (int ct = 0; ct < noOfVertices; ct++)
{
// Get and decode the position
positions[ct].Set(vertices[ct].x, vertices[ct].y, vertices[ct].z);
positions[ct] -= offset;
}
*/
// For collision meshes the vertex positions are enough, but
// for meshes which are rendered we want all vertex attributes.
/*
if(!onlyPositions)
{
Color32[] colors32 = new Color32[noOfVertices];
for (int ct = 0; ct < noOfVertices; ct++)
{
// Get and decode the color
colors32[ct] = (Color32)vertices[ct].color;
}
// Assign vertex data to the mesh.
mesh.colors32 = colors32;
}
*/
// Assign index data to the mesh.
mesh.triangles = indicesAsInt;
mesh.RecalculateNormals();
}
and in ColoredCubes.shader comment out the following line:
Code:
v.normal = float3 (0.0f, 0.0f, 1.0f);
It seems that the normal from the surface shader isn't actually being drawn to the normal buffer (or it isn't being drawn correctly??)
In any case the behavior from Unity here seems very bizarre. Could it be a bug in Unity itself?