Skip to content

Commit

Permalink
[Vk] Wrap mCurrentCmdBuffer in accessor
Browse files Browse the repository at this point in the history
  • Loading branch information
eugenegff committed Sep 20, 2024
1 parent 081c763 commit 8b3ddcc
Show file tree
Hide file tree
Showing 15 changed files with 65 additions and 58 deletions.
2 changes: 1 addition & 1 deletion CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -559,7 +559,7 @@ set( OGRE_DEBUG_LEVEL_DEBUG 3 CACHE STRING
3 - High. We perform intensive validation without concerns for performance."
)
set( OGRE_DEBUG_LEVEL_RELEASE 0 CACHE STRING
"Specify debug level for Release, RelWithDebInfo and MinSizeRel builds. See OGRE_MAX_DEBUG_LEVEL_DEBUG" )
"Specify debug level for Release, RelWithDebInfo and MinSizeRel builds. See OGRE_DEBUG_LEVEL_DEBUG" )

cmake_dependent_option(OGRE_CONFIG_CONTAINERS_USE_CUSTOM_ALLOCATOR "STL containers in Ogre use the custom allocator" TRUE "" FALSE)
if( OGRE_CONFIG_ALLOCATOR EQUAL 0 )
Expand Down
7 changes: 6 additions & 1 deletion RenderSystems/Vulkan/include/OgreVulkanQueue.h
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,6 @@ namespace Ogre
uint32 mQueueIdx;

VkQueue mQueue;
VkCommandBuffer mCurrentCmdBuffer;

VulkanDevice *mOwnerDevice;

Expand Down Expand Up @@ -116,6 +115,7 @@ namespace Ogre
FastArray<VulkanWindowSwapChainBased *> mWindowsPendingSwap;

protected:
VkCommandBuffer mCurrentCmdBuffer;
FastArray<VkCommandBuffer> mPendingCmds;

VulkanVaoManager *mVaoManager;
Expand Down Expand Up @@ -220,6 +220,11 @@ namespace Ogre
void endCommandBuffer();

public:
VkCommandBuffer getCurrentCmdBuffer()
{
OGRE_ASSERT_LOW( mCurrentCmdBuffer );
return mCurrentCmdBuffer;
}
EncoderState getEncoderState() const { return mEncoderState; }

void getGraphicsEncoder();
Expand Down
4 changes: 2 additions & 2 deletions RenderSystems/Vulkan/src/OgreVulkanAsyncTextureTicket.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -132,7 +132,7 @@ namespace Ogre
memBarrier.dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT;

// GPU must stop using this buffer before we can write into it
vkCmdPipelineBarrier( mQueue->mCurrentCmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT,
vkCmdPipelineBarrier( mQueue->getCurrentCmdBuffer(), VK_PIPELINE_STAGE_TRANSFER_BIT,
VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 1u, &memBarrier, 0u, 0, 0u, 0 );
}

Expand Down Expand Up @@ -168,7 +168,7 @@ namespace Ogre
region.imageExtent.height = srcTextureBox.height;
region.imageExtent.depth = srcTextureBox.depth;

vkCmdCopyImageToBuffer( mQueue->mCurrentCmdBuffer, srcTextureVk->getFinalTextureName(),
vkCmdCopyImageToBuffer( mQueue->getCurrentCmdBuffer(), srcTextureVk->getFinalTextureName(),
VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, mVboName.mVboName, 1u, &region );

if( accurateTracking )
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,7 @@ namespace Ogre
region.srcOffset = ( *itor )->getBlockStart() + oldBuffer.mInternalBufferStart;
region.dstOffset = ( *itor )->getBlockStart() + mBuffer.mInternalBufferStart;
region.size = ( *itor )->getBlockSize();
vkCmdCopyBuffer( mDevice->mGraphicsQueue.mCurrentCmdBuffer, oldBuffer.mVboName,
vkCmdCopyBuffer( mDevice->mGraphicsQueue.getCurrentCmdBuffer(), oldBuffer.mVboName,
mBuffer.mVboName, 1u, &region );
( *itor )->mLastFrameUsed = currentFrame;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -297,7 +297,7 @@ namespace Ogre
region.srcOffset = srcOffset + srcOffsetStart;
region.dstOffset = dstOffset + dstOffsetStart;
region.size = alignToNextMultiple<size_t>( length, 4u );
vkCmdCopyBuffer( mDevice->mGraphicsQueue.mCurrentCmdBuffer, srcBuf, dstBuf, 1u,
vkCmdCopyBuffer( mDevice->mGraphicsQueue.getCurrentCmdBuffer(), srcBuf, dstBuf, 1u,
&region );

if( this->mDiscardBuffer )
Expand Down
10 changes: 5 additions & 5 deletions RenderSystems/Vulkan/src/OgreVulkanQueue.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -54,8 +54,8 @@ namespace Ogre
mFamilyIdx( 0u ),
mQueueIdx( 0u ),
mQueue( 0 ),
mCurrentCmdBuffer( 0 ),
mOwnerDevice( 0 ),
mCurrentCmdBuffer( 0 ),
mVaoManager( 0 ),
mRenderSystem( 0 ),
mCurrentFence( 0 ),
Expand Down Expand Up @@ -635,7 +635,7 @@ namespace Ogre

// Wait until earlier render, compute and transfers are done so we can copy what
// they wrote (unless we're only here for a texture transition)
vkCmdPipelineBarrier( mCurrentCmdBuffer, srcStage & mOwnerDevice->mSupportedStages,
vkCmdPipelineBarrier( getCurrentCmdBuffer(), srcStage & mOwnerDevice->mSupportedStages,
VK_PIPELINE_STAGE_TRANSFER_BIT, 0, numMemBarriers, &memBarrier, 0u, 0,
numImageMemBarriers, &imageMemBarrier );
}
Expand Down Expand Up @@ -839,7 +839,7 @@ namespace Ogre

// Wait until earlier render, compute and transfers are done so we can copy what
// they wrote (unless we're only here for a texture transition)
vkCmdPipelineBarrier( mCurrentCmdBuffer, srcStage & mOwnerDevice->mSupportedStages,
vkCmdPipelineBarrier( getCurrentCmdBuffer(), srcStage & mOwnerDevice->mSupportedStages,
VK_PIPELINE_STAGE_TRANSFER_BIT, 0, numMemBarriers, &memBarrier, 0u, 0,
numImageMemBarriers, &imageMemBarrier );
}
Expand Down Expand Up @@ -937,7 +937,7 @@ namespace Ogre
numMemBarriers = 1u;

// GPU must stop using this buffer before we can write into it
vkCmdPipelineBarrier( mCurrentCmdBuffer, VK_PIPELINE_STAGE_VERTEX_INPUT_BIT,
vkCmdPipelineBarrier( getCurrentCmdBuffer(), VK_PIPELINE_STAGE_VERTEX_INPUT_BIT,
VK_PIPELINE_STAGE_TRANSFER_BIT, 0, numMemBarriers, &memBarrier, 0u,
0, 0u, 0 );
}
Expand Down Expand Up @@ -997,7 +997,7 @@ namespace Ogre

// Wait until earlier render, compute and transfers are done
// Block render, compute and transfers until we're done
vkCmdPipelineBarrier( mCurrentCmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT,
vkCmdPipelineBarrier( getCurrentCmdBuffer(), VK_PIPELINE_STAGE_TRANSFER_BIT,
dstStage & mOwnerDevice->mSupportedStages, 0, numMemBarriers,
&memBarrier, 0u, 0, static_cast<uint32_t>( mImageMemBarriers.size() ),
mImageMemBarriers.begin() );
Expand Down
4 changes: 2 additions & 2 deletions RenderSystems/Vulkan/src/OgreVulkanRenderPassDescriptor.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -904,7 +904,7 @@ namespace Ogre
if( mInformationOnly )
return;

VkCommandBuffer cmdBuffer = mQueue->mCurrentCmdBuffer;
VkCommandBuffer cmdBuffer = mQueue->getCurrentCmdBuffer();

const VulkanFrameBufferDescValue &fboDesc = mSharedFboItor->second;

Expand Down Expand Up @@ -967,7 +967,7 @@ namespace Ogre
if( mQueue->getEncoderState() != VulkanQueue::EncoderGraphicsOpen )
return;

vkCmdEndRenderPass( mQueue->mCurrentCmdBuffer );
vkCmdEndRenderPass( mQueue->getCurrentCmdBuffer() );

if( isInterruptingRendering )
{
Expand Down
48 changes: 25 additions & 23 deletions RenderSystems/Vulkan/src/OgreVulkanRenderSystem.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -2182,7 +2182,7 @@ namespace Ogre
if( mPso )
oldRootLayout = reinterpret_cast<VulkanHlmsPso *>( mPso->rsData )->rootLayout;

VkCommandBuffer cmdBuffer = mActiveDevice->mGraphicsQueue.mCurrentCmdBuffer;
VkCommandBuffer cmdBuffer = mActiveDevice->mGraphicsQueue.getCurrentCmdBuffer();
OGRE_ASSERT_LOW( pso->rsData );
VulkanHlmsPso *vulkanPso = reinterpret_cast<VulkanHlmsPso *>( pso->rsData );
vkCmdBindPipeline( cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, vulkanPso->pso );
Expand Down Expand Up @@ -2212,7 +2212,7 @@ namespace Ogre
{
OGRE_ASSERT_LOW( pso->rsData );
vulkanPso = reinterpret_cast<VulkanHlmsPso *>( pso->rsData );
VkCommandBuffer cmdBuffer = mActiveDevice->mGraphicsQueue.mCurrentCmdBuffer;
VkCommandBuffer cmdBuffer = mActiveDevice->mGraphicsQueue.getCurrentCmdBuffer();
vkCmdBindPipeline( cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, vulkanPso->pso );

if( vulkanPso->rootLayout != oldRootLayout )
Expand All @@ -2232,7 +2232,7 @@ namespace Ogre
{
flushRootLayoutCS();

vkCmdDispatch( mActiveDevice->mGraphicsQueue.mCurrentCmdBuffer, pso.mNumThreadGroups[0],
vkCmdDispatch( mActiveDevice->mGraphicsQueue.getCurrentCmdBuffer(), pso.mNumThreadGroups[0],
pso.mNumThreadGroups[1], pso.mNumThreadGroups[2] );
}
//-------------------------------------------------------------------------
Expand All @@ -2258,7 +2258,7 @@ namespace Ogre

OGRE_ASSERT_LOW( numVertexBuffers < 15u );

VkCommandBuffer cmdBuffer = mActiveDevice->mGraphicsQueue.mCurrentCmdBuffer;
VkCommandBuffer cmdBuffer = mActiveDevice->mGraphicsQueue.getCurrentCmdBuffer();
if( numVertexBuffers > 0u )
{
vkCmdBindVertexBuffers( cmdBuffer, 0, static_cast<uint32>( numVertexBuffers ),
Expand Down Expand Up @@ -2304,7 +2304,7 @@ namespace Ogre
{
flushRootLayout();

VkCommandBuffer cmdBuffer = mActiveDevice->mGraphicsQueue.mCurrentCmdBuffer;
VkCommandBuffer cmdBuffer = mActiveDevice->mGraphicsQueue.getCurrentCmdBuffer();
vkCmdDrawIndexedIndirect( cmdBuffer, mIndirectBuffer,
reinterpret_cast<VkDeviceSize>( cmd->indirectBufferOffset ),
cmd->numDraws, sizeof( CbDrawIndexed ) );
Expand All @@ -2314,7 +2314,7 @@ namespace Ogre
{
flushRootLayout();

VkCommandBuffer cmdBuffer = mActiveDevice->mGraphicsQueue.mCurrentCmdBuffer;
VkCommandBuffer cmdBuffer = mActiveDevice->mGraphicsQueue.getCurrentCmdBuffer();
vkCmdDrawIndirect( cmdBuffer, mIndirectBuffer,
reinterpret_cast<VkDeviceSize>( cmd->indirectBufferOffset ), cmd->numDraws,
sizeof( CbDrawStrip ) );
Expand All @@ -2327,7 +2327,7 @@ namespace Ogre
CbDrawIndexed *drawCmd = reinterpret_cast<CbDrawIndexed *>( mSwIndirectBufferPtr +
(size_t)cmd->indirectBufferOffset );

VkCommandBuffer cmdBuffer = mActiveDevice->mGraphicsQueue.mCurrentCmdBuffer;
VkCommandBuffer cmdBuffer = mActiveDevice->mGraphicsQueue.getCurrentCmdBuffer();

for( uint32 i = cmd->numDraws; i--; )
{
Expand All @@ -2345,7 +2345,7 @@ namespace Ogre
CbDrawStrip *drawCmd =
reinterpret_cast<CbDrawStrip *>( mSwIndirectBufferPtr + (size_t)cmd->indirectBufferOffset );

VkCommandBuffer cmdBuffer = mActiveDevice->mGraphicsQueue.mCurrentCmdBuffer;
VkCommandBuffer cmdBuffer = mActiveDevice->mGraphicsQueue.getCurrentCmdBuffer();

for( uint32 i = cmd->numDraws; i--; )
{
Expand All @@ -2359,7 +2359,7 @@ namespace Ogre
{
VulkanVaoManager *vaoManager = static_cast<VulkanVaoManager *>( mVaoManager );

VkCommandBuffer cmdBuffer = mActiveDevice->mGraphicsQueue.mCurrentCmdBuffer;
VkCommandBuffer cmdBuffer = mActiveDevice->mGraphicsQueue.getCurrentCmdBuffer();

VkBuffer vulkanVertexBuffers[16];
VkDeviceSize offsets[16];
Expand Down Expand Up @@ -2420,7 +2420,7 @@ namespace Ogre
{
flushRootLayout();

VkCommandBuffer cmdBuffer = mActiveDevice->mGraphicsQueue.mCurrentCmdBuffer;
VkCommandBuffer cmdBuffer = mActiveDevice->mGraphicsQueue.getCurrentCmdBuffer();
vkCmdDrawIndexed( cmdBuffer, cmd->primCount, cmd->instanceCount, cmd->firstVertexIndex,
(int32_t)mCurrentVertexBuffer->vertexStart, cmd->baseInstance );
}
Expand All @@ -2429,7 +2429,7 @@ namespace Ogre
{
flushRootLayout();

VkCommandBuffer cmdBuffer = mActiveDevice->mGraphicsQueue.mCurrentCmdBuffer;
VkCommandBuffer cmdBuffer = mActiveDevice->mGraphicsQueue.getCurrentCmdBuffer();
vkCmdDraw( cmdBuffer, cmd->primCount, cmd->instanceCount, cmd->firstVertexIndex,
cmd->baseInstance );
}
Expand All @@ -2443,7 +2443,7 @@ namespace Ogre

const size_t numberOfInstances = op.numberOfInstances;

VkCommandBuffer cmdBuffer = mActiveDevice->mGraphicsQueue.mCurrentCmdBuffer;
VkCommandBuffer cmdBuffer = mActiveDevice->mGraphicsQueue.getCurrentCmdBuffer();

// Render to screen!
if( op.useIndexes )
Expand Down Expand Up @@ -2692,7 +2692,7 @@ namespace Ogre
#if OGRE_DEBUG_MODE >= OGRE_DEBUG_MEDIUM
if( !CmdBeginDebugUtilsLabelEXT )
return; // VK_EXT_debug_utils not available
VkCommandBuffer cmdBuffer = mActiveDevice->mGraphicsQueue.mCurrentCmdBuffer;
VkCommandBuffer cmdBuffer = mActiveDevice->mGraphicsQueue.getCurrentCmdBuffer();
VkDebugUtilsLabelEXT markerInfo;
makeVkStruct( markerInfo, VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT );
markerInfo.pLabelName = event.c_str();
Expand All @@ -2705,7 +2705,7 @@ namespace Ogre
#if OGRE_DEBUG_MODE >= OGRE_DEBUG_MEDIUM
if( !CmdEndDebugUtilsLabelEXT )
return; // VK_EXT_debug_utils not available
VkCommandBuffer cmdBuffer = mActiveDevice->mGraphicsQueue.mCurrentCmdBuffer;
VkCommandBuffer cmdBuffer = mActiveDevice->mGraphicsQueue.getCurrentCmdBuffer();
CmdEndDebugUtilsLabelEXT( cmdBuffer );
#endif
}
Expand Down Expand Up @@ -2905,11 +2905,11 @@ namespace Ogre
mActiveDevice->mGraphicsQueue.getGraphicsEncoder();

VulkanVaoManager *vaoManager = static_cast<VulkanVaoManager *>( mVaoManager );
vaoManager->bindDrawIdVertexBuffer( mActiveDevice->mGraphicsQueue.mCurrentCmdBuffer );
vaoManager->bindDrawIdVertexBuffer( mActiveDevice->mGraphicsQueue.getCurrentCmdBuffer() );

if( mStencilEnabled )
{
vkCmdSetStencilReference( mActiveDevice->mGraphicsQueue.mCurrentCmdBuffer,
vkCmdSetStencilReference( mActiveDevice->mGraphicsQueue.getCurrentCmdBuffer(),
VK_STENCIL_FACE_FRONT_AND_BACK, mStencilRefValue );
}

Expand Down Expand Up @@ -2943,7 +2943,8 @@ namespace Ogre
#endif
}

vkCmdSetViewport( mActiveDevice->mGraphicsQueue.mCurrentCmdBuffer, 0u, numViewports, vkVp );
vkCmdSetViewport( mActiveDevice->mGraphicsQueue.getCurrentCmdBuffer(), 0u, numViewports,
vkVp );
}

if( mVpChanged || numViewports > 1u )
Expand All @@ -2966,7 +2967,7 @@ namespace Ogre
#endif
}

vkCmdSetScissor( mActiveDevice->mGraphicsQueue.mCurrentCmdBuffer, 0u, numViewports,
vkCmdSetScissor( mActiveDevice->mGraphicsQueue.getCurrentCmdBuffer(), 0u, numViewports,
scissorRect );
}

Expand Down Expand Up @@ -3340,10 +3341,11 @@ namespace Ogre
if( dstStage == 0 )
dstStage = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT;

vkCmdPipelineBarrier(
mActiveDevice->mGraphicsQueue.mCurrentCmdBuffer, srcStage & mActiveDevice->mSupportedStages,
dstStage & mActiveDevice->mSupportedStages, 0, numMemBarriers, &memBarrier, 0u, 0,
static_cast<uint32>( mImageBarriers.size() ), mImageBarriers.begin() );
vkCmdPipelineBarrier( mActiveDevice->mGraphicsQueue.getCurrentCmdBuffer(),
srcStage & mActiveDevice->mSupportedStages,
dstStage & mActiveDevice->mSupportedStages, 0, numMemBarriers, &memBarrier,
0u, 0, static_cast<uint32>( mImageBarriers.size() ),
mImageBarriers.begin() );
mImageBarriers.clear();
}
//-------------------------------------------------------------------------
Expand Down Expand Up @@ -3864,7 +3866,7 @@ namespace Ogre

if( mActiveDevice->mGraphicsQueue.getEncoderState() == VulkanQueue::EncoderGraphicsOpen )
{
vkCmdSetStencilReference( mActiveDevice->mGraphicsQueue.mCurrentCmdBuffer,
vkCmdSetStencilReference( mActiveDevice->mGraphicsQueue.getCurrentCmdBuffer(),
VK_STENCIL_FACE_FRONT_AND_BACK, mStencilRefValue );
}
}
Expand Down
2 changes: 1 addition & 1 deletion RenderSystems/Vulkan/src/OgreVulkanRootLayout.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -611,7 +611,7 @@ namespace Ogre
if( firstDirtySet < mSets.size() )
{
vkCmdBindDescriptorSets(
device->mGraphicsQueue.mCurrentCmdBuffer,
device->mGraphicsQueue.getCurrentCmdBuffer(),
mCompute ? VK_PIPELINE_BIND_POINT_COMPUTE : VK_PIPELINE_BIND_POINT_GRAPHICS, mRootLayout,
firstDirtySet, static_cast<uint32_t>( mSets.size() ) - firstDirtySet,
&descSets[firstDirtySet], 0u, 0 );
Expand Down
2 changes: 1 addition & 1 deletion RenderSystems/Vulkan/src/OgreVulkanStagingTexture.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -183,7 +183,7 @@ namespace Ogre
region.imageExtent.height = srcBox.height;
region.imageExtent.depth = srcBox.depth;

vkCmdCopyBufferToImage( device->mGraphicsQueue.mCurrentCmdBuffer, mVboName,
vkCmdCopyBufferToImage( device->mGraphicsQueue.getCurrentCmdBuffer(), mVboName,
dstTextureVulkan->getFinalTextureName(),
VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, &region );
}
Expand Down
Loading

0 comments on commit 8b3ddcc

Please sign in to comment.