diff --git a/main.cpp b/main.cpp index 545b7e3..d48f8cf 100644 --- a/main.cpp +++ b/main.cpp @@ -181,24 +181,24 @@ static unsigned char sample_lightmap(const world_t* world, const face_t *face, c // Towards X width = (int)(ceil(bounds.max.y / 16) - floor(bounds.min.y / 16)) * 16; height = (int)(ceil(bounds.max.z / 16) - floor(bounds.min.z / 16)) * 16; - u = (point.y - bounds.min.y) / width; - v = (point.z - bounds.min.z) / height; + u = (point.y - bounds.min.y) / (bounds.max.y - bounds.min.y); + v = (point.z - bounds.min.z) / (bounds.max.z - bounds.min.z); break; case 1: case 4: // Towards Y width = (int)(ceil(bounds.max.x / 16) - floor(bounds.min.x / 16)) * 16; height = (int)(ceil(bounds.max.z / 16) - floor(bounds.min.z / 16)) * 16; - u = (point.x - bounds.min.x) / width; - v = (point.z - bounds.min.z) / height; + u = (point.x - bounds.min.x) / (bounds.max.x - bounds.min.x); + v = (point.z - bounds.min.z) / (bounds.max.z - bounds.min.z); break; case 2: case 5: // Towards Z width = (int)(ceil(bounds.max.x / 16) - floor(bounds.min.x / 16)) * 16; height = (int)(ceil(bounds.max.y / 16) - floor(bounds.min.y / 16)) * 16; - u = (point.x - bounds.min.x) / width; - v = (point.y - bounds.min.y) / height; + u = (point.x - bounds.min.x) / (bounds.max.x - bounds.min.x); + v = (point.y - bounds.min.y) / (bounds.max.y - bounds.min.y); break; default: printf("Error: unknown plane type %d\n", plane->type); @@ -352,7 +352,7 @@ int process_faces(const world_t* world) //if (face->ledge_num >= 10) // export_lightmap(world, face, bounds, faceIdx); - outFace.numFaceVertices = (unsigned short)(outFaceVertices.size() - outFace.firstFaceVertex); + outFace.numFaceVertices = (unsigned char)(outFaceVertices.size() - outFace.firstFaceVertex); outFaces.push_back(outFace); } diff --git a/ps1bsp.h b/ps1bsp.h index b1e27d3..7154fc8 100644 --- a/ps1bsp.h +++ b/ps1bsp.h @@ -60,7 +60,7 @@ typedef struct typedef struct { unsigned short firstFaceVertex; - unsigned short numFaceVertices; + unsigned char numFaceVertices; } ps1bsp_face_t; // Pre-parsed and encoded entity data (this runs the risk of becoming too bloated)