I have a std::vector<Triangle*> that I am trying to render in OpenGL. My vector is defined as:
#pragma once
#include <glm\glm.hpp>
struct Triangle
{
glm::vec3 _vertex0, _vertex1, _vertex2;
Triangle(glm::vec3 vertex0, glm::vec3 vertex1, glm::vec3 vertex2)
{
_vertex0 = vertex0;
_vertex1 = vertex1;
_vertex2 = vertex2;
}
Triangle(glm::vec4 vertex0, glm::vec4 vertex1, glm::vec4 vertex2)
{
_vertex0 = glm::vec3(vertex0.x, vertex0.y, vertex0.z);
_vertex1 = glm::vec3(vertex1.x, vertex1.y, vertex1.z);
_vertex2 = glm::vec3(vertex2.x, vertex2.y, vertex2.z);
}
};
My Asset struct is:
struct Asset
{
Asset() { }
Asset(std::string assetOBJFile)
{
glGenVertexArrays(1, &vertexArrayID);
glBindVertexArray(vertexArrayID);
programID = LoadShaders("vertexShader.txt", "fragmentShader.txt");
// Read our .obj file
loadOBJ(assetOBJFile.c_str(), originalTriangles, vertices, faces);
boundingSphere = BoundingSphere(vertices);
// Load it into a VBO
glGenBuffers(1, &vertexbuffer);
glBindBuffer(GL_ARRAY_BUFFER, vertexbuffer);
glBufferData(GL_ARRAY_BUFFER, originalTriangles.size() * sizeof(Triangle), &originalTriangles[0], GL_STATIC_DRAW);
//velocity = glm::vec3(0.0, 1.0, 1.0);
velocity = glm::vec3(0.0, 0.0, 0.0);
position = glm::vec3(0.0, 0.0, 0.0);
lastTime = glfwGetTime();
}
GLuint vertexArrayID;
GLuint programID;
GLuint vertexbuffer;
std::vector<GLuint> faces;
std::vector<glm::vec3> vertices;
std::vector<Triangle*> originalTriangles;
std::vector<Triangle*> triangles;
BoundingSphere boundingSphere;
glm::vec3 velocity;
double lastTime;
glm::vec3 position;
};
and my code to render an asset is:
void renderAsset(Asset asset)
{
glUseProgram(asset.programID);
GLuint cameraID = glGetUniformLocation(asset.programID, "camera");
glUniformMatrix4fv(cameraID, 1, GL_FALSE, &camera[0][0]);
GLuint positionID = glGetUniformLocation(asset.programID, "position");
glUniformMatrix4fv(positionID, 1, GL_FALSE, &glm::translate(glm::mat4(), asset.position)[0][0]);
glEnableVertexAttribArray(0);
glBindBuffer(GL_ARRAY_BUFFER, asset.vertexbuffer);
glVertexAttribPointer(0, 3, GL_FLOAT, GL_FALSE, 0, (void*)0);
glDrawArrays(GL_TRIANGLES, 0, asset.originalTriangles.size()* 9);
glDisableVertexAttribArray(0);
}
However, when I try to render an asset I just end up with a bunch of gibberish on the screen. If I change the vector of Triangle* to Triangle everything renders fine, but I need pointers to construct my spatial hash.
&originalTriangles[0]point to? A list of pointers toTriangle, the contents of theTriangleare ... somewhere. Could you expand why "But I need pointers to construct my spatial hash" is relevant? OpenGL needs those triangles in a consecutive piece of memory, that's just the way it is.