c++ - Triangle Grid Not Rendering correctly [DirectX 11] -


i'm trying make grid of triangles terrain generation project in directx 11, when gets drawn screen draws in 3 axis, instead of x , z.

enter image description here enter image description here

i correct amount of vertices , indices, in vector of indices has size of 972, first 486 of them set 0 instead of actual values.

enter image description here

i wondering if clarification on whether setting vertex/index buffers correctly.

below example 10 10 grid.

generateterrain method

void application::generateterrain(int vertrows, int vertcols) {     hresult hr;      // ------------------------------------- create vertex buffer --------------------------------------      totalcellrows = vertrows - 1;     totalcellcols = vertcols - 1;      // width , total width     float dx = 1.0f;     float totalwidth = totalcellcols * dx;      // depth , total depth     float dz = 1.0f;     float totaldepth = totalcellrows * dz;      // x , z offsets     float xoffset = -totalwidth * 0.5f;     float zoffset = totaldepth * 0.5f;      totalvertices = vertrows * vertcols;     totaltriangles = (totalcellrows * totalcellcols) * 2;     totalindices = totaltriangles * 3;      terrainvertices = new simplevertex[totalvertices];      // array version     int k = 0;     (int = 0; < vertrows; i++)     {         (int j = 0; j < vertcols; j++)         {             simplevertex newvertex;             terrainvertices[k].posl = xmfloat3(j * dx + xoffset, 0.0f, -(i * dz) + zoffset);             terrainvertices[k].norml = xmfloat3(0.0f, 1.0f, 0.0f);             terrainvertices[k].tex = xmfloat2(0.0f, 0.0f);             k++;         }     }      d3d11_buffer_desc bd;     zeromemory(&bd, sizeof(bd));     bd.usage = d3d11_usage_default;     bd.bytewidth = sizeof(simplevertex) * totalvertices;     bd.bindflags = d3d11_bind_vertex_buffer;     bd.cpuaccessflags = 0;      d3d11_subresource_data initdata;     zeromemory(&initdata, sizeof(initdata));     initdata.psysmem = &terrainvertices;      hr = _pd3ddevice->createbuffer(&bd, &initdata, &_pgridvertexbuffer);      // ------------------------------------- create index buffer --------------------------------------      // vector version      indices.resize(totalindices);     (word = 0; < (word)vertrows - 1; i++)     {         (word j = 0; j < (word)vertcols - 1; j++)         {             indices.push_back(i * vertcols + j);             indices.push_back(i * vertcols + (j + 1));             indices.push_back((i + 1) * vertcols + j);              indices.push_back((i + 1) * vertcols + j);             indices.push_back((i * vertcols + (j + 1)));             indices.push_back((i + 1) * vertcols + (j + 1));         }     }      zeromemory(&bd, sizeof(bd));     bd.usage = d3d11_usage_default;     bd.bytewidth = sizeof(word) * totalindices;     bd.bindflags = d3d11_bind_index_buffer;     bd.cpuaccessflags = 0;      zeromemory(&initdata, sizeof(initdata));     initdata.psysmem = &indices;     hr = _pd3ddevice->createbuffer(&bd, &initdata, &_pgridindexbuffer); } 

indices.resize(totalindices); <----- !!!!!!error!!!! u mean reserve (word = 0; < (word)vertrows - 1; i++) {     (word j = 0; j < (word)vertcols - 1; j++)     {         indices.push_back(i * vertcols + j);         indices.push_back(i * vertcols + (j + 1));         indices.push_back((i + 1) * vertcols + j);          indices.push_back((i + 1) * vertcols + j);         indices.push_back((i * vertcols + (j + 1)));         indices.push_back((i + 1) * vertcols + (j + 1));     } } 

you write resize, vector resized, , add more indices pushback. think want reserve memory "reserve" , add them pushback.

also recommend use pointer first value, instead of pointer vector

you:

initdata.psysmem = &indices; 

better:

initdata.psysmem = &indices[0]; 

buffer initialization seems ok.

good luck


Comments

Popular posts from this blog

Hatching array of circles in AutoCAD using c# -

ios - UITEXTFIELD InputView Uipicker not working in swift -

Python Pig Latin Translator -