Newer
Older
LLMemType mt2(LLMemType::MTYPE_VERTEX_DESTRUCTOR);
destroyGLBuffer();
destroyGLIndices();
if (mGLArray)
{
David Parks
committed
releaseVAOName(mGLArray);
}
Xiaohong Bao
committed
David Parks
committed
if (mFence)
{
delete mFence;
}
mFence = NULL;
sVertexCount -= mNumVerts;
sIndexCount -= mNumIndices;
llassert_always(!mMappedData && !mMappedIndexData);
David Parks
committed
1028
1029
1030
1031
1032
1033
1034
1035
1036
1037
1038
1039
1040
1041
1042
1043
1044
1045
1046
1047
1048
1049
1050
1051
void LLVertexBuffer::placeFence() const
{
/*if (!mFence && useVBOs())
{
if (gGLManager.mHasSync)
{
mFence = new LLGLSyncFence();
}
}
if (mFence)
{
mFence->placeFence();
}*/
}
void LLVertexBuffer::waitFence() const
{
/*if (mFence)
{
mFence->wait();
}*/
}
//----------------------------------------------------------------------------
void LLVertexBuffer::genBuffer(U32 size)
{
mSize = vbo_block_size(size);
if (mUsage == GL_STREAM_DRAW_ARB)
{
mMappedData = sStreamVBOPool.allocate(mGLBuffer, mSize);
}
else
{
mMappedData = sDynamicVBOPool.allocate(mGLBuffer, mSize);
}
sGLCount++;
}
void LLVertexBuffer::genIndices(U32 size)
{
mIndicesSize = vbo_block_size(size);
if (mUsage == GL_STREAM_DRAW_ARB)
{
mMappedIndexData = sStreamIBOPool.allocate(mGLIndices, mIndicesSize);
}
else
{
mMappedIndexData = sDynamicIBOPool.allocate(mGLIndices, mIndicesSize);
}
sGLCount++;
}
void LLVertexBuffer::releaseBuffer()
{
if (mUsage == GL_STREAM_DRAW_ARB)
{
sStreamVBOPool.release(mGLBuffer, mMappedData, mSize);
}
else
{
sDynamicVBOPool.release(mGLBuffer, mMappedData, mSize);
}
mGLBuffer = 0;
mMappedData = NULL;
sGLCount--;
}
void LLVertexBuffer::releaseIndices()
{
if (mUsage == GL_STREAM_DRAW_ARB)
{
sStreamIBOPool.release(mGLIndices, mMappedIndexData, mIndicesSize);
}
{
sDynamicIBOPool.release(mGLIndices, mMappedIndexData, mIndicesSize);
}
mGLIndices = 0;
mMappedIndexData = NULL;
sGLCount--;
}
void LLVertexBuffer::createGLBuffer(U32 size)
LLMemType mt2(LLMemType::MTYPE_VERTEX_CREATE_VERTICES);
if (mGLBuffer)
{
destroyGLBuffer();
}
if (size == 0)
{
return;
}
Leslie Linden
committed
mEmpty = true;
Leslie Linden
committed
mMappedDataUsingVBOs = useVBOs();
if (mMappedDataUsingVBOs)
genBuffer(size);
}
else
{
static int gl_buffer_idx = 0;
mGLBuffer = ++gl_buffer_idx;
mMappedData = (U8*)ALLOCATE_MEM(sPrivatePoolp, size);
mSize = size;
void LLVertexBuffer::createGLIndices(U32 size)
LLMemType mt2(LLMemType::MTYPE_VERTEX_CREATE_INDICES);
if (mGLIndices)
{
destroyGLIndices();
}
if (size == 0)
{
return;
}
Leslie Linden
committed
mEmpty = true;
//pad by 16 bytes for aligned copies
size += 16;
Leslie Linden
committed
mMappedIndexDataUsingVBOs = useVBOs();
if (mMappedIndexDataUsingVBOs)
//pad by another 16 bytes for VBO pointer adjustment
size += 16;
genIndices(size);
mMappedIndexData = (U8*)ALLOCATE_MEM(sPrivatePoolp, size);
static int gl_buffer_idx = 0;
mGLIndices = ++gl_buffer_idx;
mIndicesSize = size;
}
}
void LLVertexBuffer::destroyGLBuffer()
{
LLMemType mt2(LLMemType::MTYPE_VERTEX_DESTROY_BUFFER);
if (mGLBuffer)
{
Leslie Linden
committed
if (mMappedDataUsingVBOs)
releaseBuffer();
}
else
{
Leslie Linden
committed
FREE_MEM(sPrivatePoolp, (void*) mMappedData);
mMappedData = NULL;
Leslie Linden
committed
mEmpty = true;
}
}
mGLBuffer = 0;
//unbind();
}
void LLVertexBuffer::destroyGLIndices()
{
LLMemType mt2(LLMemType::MTYPE_VERTEX_DESTROY_INDICES);
if (mGLIndices)
{
Leslie Linden
committed
if (mMappedIndexDataUsingVBOs)
Loren Shih
committed
releaseIndices();
}
else
{
Leslie Linden
committed
FREE_MEM(sPrivatePoolp, (void*) mMappedIndexData);
mMappedIndexData = NULL;
Leslie Linden
committed
mEmpty = true;
}
}
mGLIndices = 0;
//unbind();
}
void LLVertexBuffer::updateNumVerts(S32 nverts)
{
LLMemType mt2(LLMemType::MTYPE_VERTEX_UPDATE_VERTS);
{
llwarns << "Vertex buffer overflow!" << llendl;
}
U32 needed_size = calcOffsets(mTypeMask, mOffsets, nverts);
if (needed_size > mSize || needed_size <= mSize/2)
createGLBuffer(needed_size);
sVertexCount -= mNumVerts;
mNumVerts = nverts;
sVertexCount += mNumVerts;
}
void LLVertexBuffer::updateNumIndices(S32 nindices)
{
LLMemType mt2(LLMemType::MTYPE_VERTEX_UPDATE_INDICES);
U32 needed_size = sizeof(U16) * nindices;
if (needed_size > mIndicesSize || needed_size <= mIndicesSize/2)
createGLIndices(needed_size);
sIndexCount -= mNumIndices;
mNumIndices = nindices;
sIndexCount += mNumIndices;
}
void LLVertexBuffer::allocateBuffer(S32 nverts, S32 nindices, bool create)
{
LLMemType mt2(LLMemType::MTYPE_VERTEX_ALLOCATE_BUFFER);
stop_glerror();
if (nverts < 0 || nindices < 0 ||
nverts > 65536)
{
llerrs << "Bad vertex buffer allocation: " << nverts << " : " << nindices << llendl;
}
updateNumVerts(nverts);
updateNumIndices(nindices);
if (create && (nverts || nindices))
{
//actually allocate space for the vertex buffer if using VBO mapping
flush();
David Parks
committed
if (gGLManager.mHasVertexArrayObject && useVBOs() && (LLRender::sGLCoreProfile || sUseVAO))
{
David Parks
committed
mGLArray = getVAOName();
David Parks
committed
setupVertexArray();
}
static LLFastTimer::DeclareTimer FTM_SETUP_VERTEX_ARRAY("Setup VAO");
David Parks
committed
void LLVertexBuffer::setupVertexArray()
{
if (!mGLArray)
{
return;
}
#if GL_ARB_vertex_array_object
glBindVertexArray(mGLArray);
#endif
sGLRenderArray = mGLArray;
David Parks
committed
U32 attrib_size[] =
{
3, //TYPE_VERTEX,
3, //TYPE_NORMAL,
2, //TYPE_TEXCOORD0,
2, //TYPE_TEXCOORD1,
2, //TYPE_TEXCOORD2,
2, //TYPE_TEXCOORD3,
4, //TYPE_COLOR,
David Parks
committed
4, //TYPE_EMISSIVE,
David Parks
committed
3, //TYPE_BINORMAL,
1, //TYPE_WEIGHT,
4, //TYPE_WEIGHT4,
4, //TYPE_CLOTHWEIGHT,
David Parks
committed
1, //TYPE_TEXTURE_INDEX
David Parks
committed
};
U32 attrib_type[] =
{
GL_FLOAT, //TYPE_VERTEX,
GL_FLOAT, //TYPE_NORMAL,
GL_FLOAT, //TYPE_TEXCOORD0,
GL_FLOAT, //TYPE_TEXCOORD1,
GL_FLOAT, //TYPE_TEXCOORD2,
GL_FLOAT, //TYPE_TEXCOORD3,
GL_UNSIGNED_BYTE, //TYPE_COLOR,
GL_UNSIGNED_BYTE, //TYPE_EMISSIVE,
GL_FLOAT, //TYPE_BINORMAL,
GL_FLOAT, //TYPE_WEIGHT,
GL_FLOAT, //TYPE_WEIGHT4,
GL_FLOAT, //TYPE_CLOTHWEIGHT,
David Parks
committed
GL_UNSIGNED_INT, //TYPE_TEXTURE_INDEX
David Parks
committed
};
bool attrib_integer[] =
{
false, //TYPE_VERTEX,
false, //TYPE_NORMAL,
false, //TYPE_TEXCOORD0,
false, //TYPE_TEXCOORD1,
false, //TYPE_TEXCOORD2,
false, //TYPE_TEXCOORD3,
false, //TYPE_COLOR,
false, //TYPE_EMISSIVE,
false, //TYPE_BINORMAL,
false, //TYPE_WEIGHT,
false, //TYPE_WEIGHT4,
false, //TYPE_CLOTHWEIGHT,
true, //TYPE_TEXTURE_INDEX
David Parks
committed
};
U32 attrib_normalized[] =
{
GL_FALSE, //TYPE_VERTEX,
GL_FALSE, //TYPE_NORMAL,
GL_FALSE, //TYPE_TEXCOORD0,
GL_FALSE, //TYPE_TEXCOORD1,
GL_FALSE, //TYPE_TEXCOORD2,
GL_FALSE, //TYPE_TEXCOORD3,
GL_TRUE, //TYPE_COLOR,
GL_TRUE, //TYPE_EMISSIVE,
GL_FALSE, //TYPE_BINORMAL,
GL_FALSE, //TYPE_WEIGHT,
GL_FALSE, //TYPE_WEIGHT4,
GL_FALSE, //TYPE_CLOTHWEIGHT,
GL_FALSE, //TYPE_TEXTURE_INDEX
David Parks
committed
};
bindGLBuffer(true);
bindGLIndices(true);
for (U32 i = 0; i < TYPE_MAX; ++i)
{
if (mTypeMask & (1 << i))
{
glEnableVertexAttribArrayARB(i);
David Parks
committed
David Parks
committed
{
David Parks
committed
//glVertexattribIPointer requires GLSL 1.30 or later
if (gGLManager.mGLSLVersionMajor > 1 || gGLManager.mGLSLVersionMinor >= 30)
{
glVertexAttribIPointer(i, attrib_size[i], attrib_type[i], sTypeSize[i], (void*) mOffsets[i]);
}
David Parks
committed
}
else
{
glVertexAttribPointerARB(i, attrib_size[i], attrib_type[i], attrib_normalized[i], sTypeSize[i], (void*) mOffsets[i]);
}
David Parks
committed
}
else
{
glDisableVertexAttribArrayARB(i);
}
}
//draw a dummy triangle to set index array pointer
//glDrawElements(GL_TRIANGLES, 0, GL_UNSIGNED_SHORT, NULL);
David Parks
committed
unbind();
David Parks
committed
}
void LLVertexBuffer::resizeBuffer(S32 newnverts, S32 newnindices)
{
LLMemType mt2(LLMemType::MTYPE_VERTEX_RESIZE_BUFFER);
updateNumVerts(newnverts);
updateNumIndices(newnindices);
if (useVBOs())
{
David Parks
committed
flush();
if (mGLArray)
{ //if size changed, offsets changed
setupVertexArray();
}
}
Leslie Linden
committed
bool LLVertexBuffer::useVBOs() const
//it's generally ineffective to use VBO for things that are streaming on apple
Leslie Linden
committed
return (mUsage != 0);
}
//----------------------------------------------------------------------------
bool expand_region(LLVertexBuffer::MappedRegion& region, S32 index, S32 count)
{
S32 end = index+count;
S32 region_end = region.mIndex+region.mCount;
if (end < region.mIndex ||
index > region_end)
{ //gap exists, do not merge
return false;
}
S32 new_end = llmax(end, region_end);
S32 new_index = llmin(index, region.mIndex);
region.mIndex = new_index;
region.mCount = new_end-new_index;
return true;
}
static LLFastTimer::DeclareTimer FTM_VBO_MAP_BUFFER_RANGE("VBO Map Range");
static LLFastTimer::DeclareTimer FTM_VBO_MAP_BUFFER("VBO Map");
// Map for data access
volatile U8* LLVertexBuffer::mapVertexBuffer(S32 type, S32 index, S32 count, bool map_range)
David Parks
committed
bindGLBuffer(true);
LLMemType mt2(LLMemType::MTYPE_VERTEX_MAP_BUFFER);
Xiaohong Bao
committed
llerrs << "LLVertexBuffer::mapVeretxBuffer() called on a finalized buffer." << llendl;
if (!useVBOs() && !mMappedData && !mMappedIndexData)
Xiaohong Bao
committed
llerrs << "LLVertexBuffer::mapVertexBuffer() called on unallocated buffer." << llendl;
if (useVBOs())
if (!mMappable || gGLManager.mHasMapBufferRange || gGLManager.mHasFlushBufferRange)
{
if (count == -1)
{
David Parks
committed
count = mNumVerts-index;
}
bool mapped = false;
//see if range is already mapped
for (U32 i = 0; i < mMappedVertexRegions.size(); ++i)
{
MappedRegion& region = mMappedVertexRegions[i];
if (region.mType == type)
{
if (expand_region(region, index, count))
{
mapped = true;
David Parks
committed
break;
}
}
}
if (!mapped)
{
//not already mapped, map new region
MappedRegion region(type, mMappable && map_range ? -1 : index, count);
mMappedVertexRegions.push_back(region);
}
}
if (mVertexLocked && map_range)
{
llerrs << "Attempted to map a specific range of a buffer that was already mapped." << llendl;
}
if (!mVertexLocked)
{
LLMemType mt_v(LLMemType::MTYPE_VERTEX_MAP_BUFFER_VERTICES);
Leslie Linden
committed
mVertexLocked = true;
David Parks
committed
sMappedCount++;
stop_glerror();
Xiaohong Bao
committed
Xiaohong Bao
committed
{
map_range = false;
Xiaohong Bao
committed
}
else
{
volatile U8* src = NULL;
David Parks
committed
waitFence();
if (gGLManager.mHasMapBufferRange)
{
if (map_range)
{
LLFastTimer t(FTM_VBO_MAP_BUFFER_RANGE);
S32 offset = mOffsets[type] + sTypeSize[type]*index;
S32 length = (sTypeSize[type]*count+0xF) & ~0xF;
David Parks
committed
src = (U8*) glMapBufferRange(GL_ARRAY_BUFFER_ARB, offset, length,
GL_MAP_WRITE_BIT |
GL_MAP_FLUSH_EXPLICIT_BIT |
GL_MAP_INVALIDATE_RANGE_BIT);
}
else
{
if (gDebugGL)
{
GLint size = 0;
glGetBufferParameterivARB(GL_ARRAY_BUFFER_ARB, GL_BUFFER_SIZE_ARB, &size);
if (size < mSize)
{
llerrs << "Invalid buffer size." << llendl;
}
}
LLFastTimer t(FTM_VBO_MAP_BUFFER);
David Parks
committed
src = (U8*) glMapBufferRange(GL_ARRAY_BUFFER_ARB, 0, mSize,
GL_MAP_WRITE_BIT |
GL_MAP_FLUSH_EXPLICIT_BIT);
#endif
}
}
else if (gGLManager.mHasFlushBufferRange)
{
if (map_range)
{
glBufferParameteriAPPLE(GL_ARRAY_BUFFER_ARB, GL_BUFFER_SERIALIZED_MODIFY_APPLE, GL_FALSE);
glBufferParameteriAPPLE(GL_ARRAY_BUFFER_ARB, GL_BUFFER_FLUSHING_UNMAP_APPLE, GL_FALSE);
src = (U8*) glMapBufferARB(GL_ARRAY_BUFFER_ARB, GL_WRITE_ONLY_ARB);
}
else
{
src = (U8*) glMapBufferARB(GL_ARRAY_BUFFER_ARB, GL_WRITE_ONLY_ARB);
}
}
else
{
map_range = false;
src = (U8*) glMapBufferARB(GL_ARRAY_BUFFER_ARB, GL_WRITE_ONLY_ARB);
}
mMappedData = LL_NEXT_ALIGNED_ADDRESS<volatile U8>(src);
Loren Shih
committed
Xiaohong Bao
committed
}
if (!mMappedData)
{
log_glerror();
Leslie Linden
committed
//check the availability of memory
LLMemory::logMemoryInfo(true);
{
//--------------------
//print out more debug info before crash
llinfos << "vertex buffer size: (num verts : num indices) = " << getNumVerts() << " : " << getNumIndices() << llendl;
GLint size;
glGetBufferParameterivARB(GL_ARRAY_BUFFER_ARB, GL_BUFFER_SIZE_ARB, &size);
llinfos << "GL_ARRAY_BUFFER_ARB size is " << size << llendl;
//--------------------
GLint buff;
glGetIntegerv(GL_ARRAY_BUFFER_BINDING_ARB, &buff);
if ((GLuint)buff != mGLBuffer)
{
llerrs << "Invalid GL vertex buffer bound: " << buff << llendl;
}
llerrs << "glMapBuffer returned NULL (no vertex data)" << llendl;
}
else
Xiaohong Bao
committed
{
llerrs << "memory allocation for vertex data failed." << llendl;
Xiaohong Bao
committed
}
}
else
{
map_range = false;
Xiaohong Bao
committed
}
if (map_range && gGLManager.mHasMapBufferRange && mMappable)
{
return mMappedData;
}
else
{
return mMappedData+mOffsets[type]+sTypeSize[type]*index;
}
Xiaohong Bao
committed
}
static LLFastTimer::DeclareTimer FTM_VBO_MAP_INDEX_RANGE("IBO Map Range");
static LLFastTimer::DeclareTimer FTM_VBO_MAP_INDEX("IBO Map");
volatile U8* LLVertexBuffer::mapIndexBuffer(S32 index, S32 count, bool map_range)
Xiaohong Bao
committed
{
LLMemType mt2(LLMemType::MTYPE_VERTEX_MAP_BUFFER);
David Parks
committed
bindGLIndices(true);
Xiaohong Bao
committed
if (mFinal)
{
llerrs << "LLVertexBuffer::mapIndexBuffer() called on a finalized buffer." << llendl;
}
if (!useVBOs() && !mMappedData && !mMappedIndexData)
{
llerrs << "LLVertexBuffer::mapIndexBuffer() called on unallocated buffer." << llendl;
}
if (useVBOs())
Xiaohong Bao
committed
{
if (!mMappable || gGLManager.mHasMapBufferRange || gGLManager.mHasFlushBufferRange)
{
if (count == -1)
{
David Parks
committed
count = mNumIndices-index;
}
bool mapped = false;
//see if range is already mapped
for (U32 i = 0; i < mMappedIndexRegions.size(); ++i)
{
MappedRegion& region = mMappedIndexRegions[i];
if (expand_region(region, index, count))
{
mapped = true;
David Parks
committed
break;
}
}
if (!mapped)
{
//not already mapped, map new region
MappedRegion region(TYPE_INDEX, mMappable && map_range ? -1 : index, count);
mMappedIndexRegions.push_back(region);
}
}
if (mIndexLocked && map_range)
{
llerrs << "Attempted to map a specific range of a buffer that was already mapped." << llendl;
}
if (!mIndexLocked)
Xiaohong Bao
committed
{
LLMemType mt_v(LLMemType::MTYPE_VERTEX_MAP_BUFFER_INDICES);
Leslie Linden
committed
mIndexLocked = true;
David Parks
committed
sMappedCount++;
Xiaohong Bao
committed
stop_glerror();
David Parks
committed
if (gDebugGL && useVBOs())
{
GLint elem = 0;
glGetIntegerv(GL_ELEMENT_ARRAY_BUFFER_BINDING_ARB, &elem);
if (elem != mGLIndices)
{
llerrs << "Wrong index buffer bound!" << llendl;
}
}
Xiaohong Bao
committed
{
map_range = false;
Xiaohong Bao
committed
}
else
{
volatile U8* src = NULL;
David Parks
committed
waitFence();
if (gGLManager.mHasMapBufferRange)
{
if (map_range)
{
LLFastTimer t(FTM_VBO_MAP_INDEX_RANGE);
S32 offset = sizeof(U16)*index;
S32 length = sizeof(U16)*count;
David Parks
committed
src = (U8*) glMapBufferRange(GL_ELEMENT_ARRAY_BUFFER_ARB, offset, length,
GL_MAP_WRITE_BIT |
GL_MAP_FLUSH_EXPLICIT_BIT |
GL_MAP_INVALIDATE_RANGE_BIT);
}
else
{
LLFastTimer t(FTM_VBO_MAP_INDEX);
David Parks
committed
src = (U8*) glMapBufferRange(GL_ELEMENT_ARRAY_BUFFER_ARB, 0, sizeof(U16)*mNumIndices,
GL_MAP_WRITE_BIT |
GL_MAP_FLUSH_EXPLICIT_BIT);
#endif
}
}
else if (gGLManager.mHasFlushBufferRange)
{
if (map_range)
{
glBufferParameteriAPPLE(GL_ELEMENT_ARRAY_BUFFER_ARB, GL_BUFFER_SERIALIZED_MODIFY_APPLE, GL_FALSE);
glBufferParameteriAPPLE(GL_ELEMENT_ARRAY_BUFFER_ARB, GL_BUFFER_FLUSHING_UNMAP_APPLE, GL_FALSE);
src = (U8*) glMapBufferARB(GL_ELEMENT_ARRAY_BUFFER_ARB, GL_WRITE_ONLY_ARB);
}
else
{
src = (U8*) glMapBufferARB(GL_ELEMENT_ARRAY_BUFFER_ARB, GL_WRITE_ONLY_ARB);
}
}
else
{
LLFastTimer t(FTM_VBO_MAP_INDEX);
map_range = false;
src = (U8*) glMapBufferARB(GL_ELEMENT_ARRAY_BUFFER_ARB, GL_WRITE_ONLY_ARB);
}
mMappedIndexData = src; //LL_NEXT_ALIGNED_ADDRESS<U8>(src);
mAlignedIndexOffset = mMappedIndexData - src;
stop_glerror();
Xiaohong Bao
committed
}
}
if (!mMappedIndexData)
{
log_glerror();
LLMemory::logMemoryInfo(true);
Xiaohong Bao
committed
GLint buff;
glGetIntegerv(GL_ELEMENT_ARRAY_BUFFER_BINDING_ARB, &buff);
if ((GLuint)buff != mGLIndices)
{
llerrs << "Invalid GL index buffer bound: " << buff << llendl;
}
Xiaohong Bao
committed
llerrs << "glMapBuffer returned NULL (no index data)" << llendl;
}
else
{
llerrs << "memory allocation for Index data failed. " << llendl;
Xiaohong Bao
committed
}
else
{
map_range = false;
}
Xiaohong Bao
committed
if (map_range && gGLManager.mHasMapBufferRange && mMappable)
{
return mMappedIndexData;
}
else
{
return mMappedIndexData + sizeof(U16)*index;
}
static LLFastTimer::DeclareTimer FTM_VBO_UNMAP("VBO Unmap");
static LLFastTimer::DeclareTimer FTM_VBO_FLUSH_RANGE("Flush VBO Range");
static LLFastTimer::DeclareTimer FTM_IBO_UNMAP("IBO Unmap");
static LLFastTimer::DeclareTimer FTM_IBO_FLUSH_RANGE("Flush IBO Range");
David Parks
committed
void LLVertexBuffer::unmapBuffer()
LLMemType mt2(LLMemType::MTYPE_VERTEX_UNMAP_BUFFER);
David Parks
committed
if (!useVBOs())
Xiaohong Bao
committed
{
return; //nothing to unmap
Xiaohong Bao
committed
}
bool updated_all = false;
David Parks
committed
if (mMappedData && mVertexLocked)
LLFastTimer t(FTM_VBO_UNMAP);
David Parks
committed
bindGLBuffer(true);
updated_all = mIndexLocked; //both vertex and index buffers done updating
Xiaohong Bao
committed
Xiaohong Bao
committed
{
David Parks
committed
if (!mMappedVertexRegions.empty())
{
stop_glerror();
for (U32 i = 0; i < mMappedVertexRegions.size(); ++i)
{
const MappedRegion& region = mMappedVertexRegions[i];
S32 offset = region.mIndex >= 0 ? mOffsets[region.mType]+sTypeSize[region.mType]*region.mIndex : 0;
S32 length = sTypeSize[region.mType]*region.mCount;
glBufferSubDataARB(GL_ARRAY_BUFFER_ARB, offset, length, (U8*) mMappedData+offset);
David Parks
committed
stop_glerror();
}
mMappedVertexRegions.clear();
}
else
{
stop_glerror();
glBufferSubDataARB(GL_ARRAY_BUFFER_ARB, 0, getSize(), (U8*) mMappedData);
David Parks
committed
stop_glerror();
}
Xiaohong Bao
committed
}
else
{
if (gGLManager.mHasMapBufferRange || gGLManager.mHasFlushBufferRange)
{
if (!mMappedVertexRegions.empty())
{
stop_glerror();
for (U32 i = 0; i < mMappedVertexRegions.size(); ++i)
{
const MappedRegion& region = mMappedVertexRegions[i];
S32 offset = region.mIndex >= 0 ? mOffsets[region.mType]+sTypeSize[region.mType]*region.mIndex : 0;
S32 length = sTypeSize[region.mType]*region.mCount;
if (gGLManager.mHasMapBufferRange)
{
LLFastTimer t(FTM_VBO_FLUSH_RANGE);
#ifdef GL_ARB_map_buffer_range
glFlushMappedBufferRange(GL_ARRAY_BUFFER_ARB, offset, length);
#endif
}
else if (gGLManager.mHasFlushBufferRange)
{
glFlushMappedBufferRangeAPPLE(GL_ARRAY_BUFFER_ARB, offset, length);
}
stop_glerror();
}
mMappedVertexRegions.clear();
}
}
Xiaohong Bao
committed
stop_glerror();
glUnmapBufferARB(GL_ARRAY_BUFFER_ARB);
stop_glerror();
Xiaohong Bao
committed
mMappedData = NULL;
}
Leslie Linden
committed
mVertexLocked = false;
Xiaohong Bao
committed
sMappedCount--;
}
David Parks
committed
if (mMappedIndexData && mIndexLocked)
Xiaohong Bao
committed
{
LLFastTimer t(FTM_IBO_UNMAP);
David Parks
committed
bindGLIndices();
Xiaohong Bao
committed
{
David Parks
committed
if (!mMappedIndexRegions.empty())
{
for (U32 i = 0; i < mMappedIndexRegions.size(); ++i)
{
const MappedRegion& region = mMappedIndexRegions[i];
S32 offset = region.mIndex >= 0 ? sizeof(U16)*region.mIndex : 0;
S32 length = sizeof(U16)*region.mCount;
glBufferSubDataARB(GL_ELEMENT_ARRAY_BUFFER_ARB, offset, length, (U8*) mMappedIndexData+offset);
David Parks
committed
stop_glerror();
}
mMappedIndexRegions.clear();
}
else
{
stop_glerror();
glBufferSubDataARB(GL_ELEMENT_ARRAY_BUFFER_ARB, 0, getIndicesSize(), (U8*) mMappedIndexData);
David Parks
committed
stop_glerror();
}
Xiaohong Bao
committed
}
else
{
if (gGLManager.mHasMapBufferRange || gGLManager.mHasFlushBufferRange)
{
if (!mMappedIndexRegions.empty())
{
for (U32 i = 0; i < mMappedIndexRegions.size(); ++i)
{
const MappedRegion& region = mMappedIndexRegions[i];
S32 offset = region.mIndex >= 0 ? sizeof(U16)*region.mIndex : 0;
S32 length = sizeof(U16)*region.mCount;
if (gGLManager.mHasMapBufferRange)
{
LLFastTimer t(FTM_IBO_FLUSH_RANGE);
#ifdef GL_ARB_map_buffer_range
glFlushMappedBufferRange(GL_ELEMENT_ARRAY_BUFFER_ARB, offset, length);
#endif
}
else if (gGLManager.mHasFlushBufferRange)
{
David Parks
committed
#ifdef GL_APPLE_flush_buffer_range
glFlushMappedBufferRangeAPPLE(GL_ELEMENT_ARRAY_BUFFER_ARB, offset, length);
David Parks
committed
#endif
stop_glerror();
}
mMappedIndexRegions.clear();
}
}
Xiaohong Bao
committed
stop_glerror();
glUnmapBufferARB(GL_ELEMENT_ARRAY_BUFFER_ARB);
stop_glerror();
mMappedIndexData = NULL;
Xiaohong Bao
committed
}
mIndexLocked = false;
Xiaohong Bao
committed
sMappedCount--;
}
if(updated_all)
{
Leslie Linden
committed
mEmpty = false;
}
}
//----------------------------------------------------------------------------
template <class T,S32 type> struct VertexBufferStrider
{
typedef LLStrider<T> strider_t;
static bool get(LLVertexBuffer& vbo,
strider_t& strider,
S32 index, S32 count, bool map_range)
{
if (type == LLVertexBuffer::TYPE_INDEX)
{
volatile U8* ptr = vbo.mapIndexBuffer(index, count, map_range);
if (ptr == NULL)
Xiaohong Bao
committed
{
llwarns << "mapIndexBuffer failed!" << llendl;
Leslie Linden
committed
return false;
Xiaohong Bao
committed
}
strider = (T*)ptr;
strider.setStride(0);
Leslie Linden
committed
return true;
}
else if (vbo.hasDataType(type))
{
S32 stride = LLVertexBuffer::sTypeSize[type];