From 2ee1c83db2fe07c0ebdc8cab4e666b137c36b722 Mon Sep 17 00:00:00 2001 From: Eugene Golushkov Date: Fri, 20 Sep 2024 22:18:20 +0200 Subject: [PATCH] [Vk] Wrap mCurrentCmdBuffer in accessor --- CMakeLists.txt | 2 +- .../Vulkan/include/OgreVulkanQueue.h | 7 ++- .../src/OgreVulkanAsyncTextureTicket.cpp | 4 +- .../src/OgreVulkanDiscardBufferManager.cpp | 2 +- .../src/OgreVulkanHardwareBufferCommon.cpp | 2 +- RenderSystems/Vulkan/src/OgreVulkanQueue.cpp | 8 ++-- .../src/OgreVulkanRenderPassDescriptor.cpp | 4 +- .../Vulkan/src/OgreVulkanRenderSystem.cpp | 48 ++++++++++--------- .../Vulkan/src/OgreVulkanRootLayout.cpp | 2 +- .../Vulkan/src/OgreVulkanStagingTexture.cpp | 2 +- .../Vulkan/src/OgreVulkanTextureGpu.cpp | 16 +++---- .../src/OgreVulkanTextureGpuManager.cpp | 10 ++-- .../src/Vao/OgreVulkanBufferInterface.cpp | 4 +- .../src/Vao/OgreVulkanStagingBuffer.cpp | 8 ++-- .../Vulkan/src/Vao/OgreVulkanVaoManager.cpp | 2 +- 15 files changed, 64 insertions(+), 57 deletions(-) diff --git a/CMakeLists.txt b/CMakeLists.txt index dac665e58c7..dacb8578545 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -559,7 +559,7 @@ set( OGRE_DEBUG_LEVEL_DEBUG 3 CACHE STRING 3 - High. We perform intensive validation without concerns for performance." ) set( OGRE_DEBUG_LEVEL_RELEASE 0 CACHE STRING - "Specify debug level for Release, RelWithDebInfo and MinSizeRel builds. See OGRE_MAX_DEBUG_LEVEL_DEBUG" ) + "Specify debug level for Release, RelWithDebInfo and MinSizeRel builds. See OGRE_DEBUG_LEVEL_DEBUG" ) cmake_dependent_option(OGRE_CONFIG_CONTAINERS_USE_CUSTOM_ALLOCATOR "STL containers in Ogre use the custom allocator" TRUE "" FALSE) if( OGRE_CONFIG_ALLOCATOR EQUAL 0 ) diff --git a/RenderSystems/Vulkan/include/OgreVulkanQueue.h b/RenderSystems/Vulkan/include/OgreVulkanQueue.h index 37b6dad1c14..a7dfa0a45fd 100644 --- a/RenderSystems/Vulkan/include/OgreVulkanQueue.h +++ b/RenderSystems/Vulkan/include/OgreVulkanQueue.h @@ -77,7 +77,6 @@ namespace Ogre uint32 mQueueIdx; VkQueue mQueue; - VkCommandBuffer mCurrentCmdBuffer; VulkanDevice *mOwnerDevice; @@ -116,6 +115,7 @@ namespace Ogre FastArray mWindowsPendingSwap; protected: + VkCommandBuffer mCurrentCmdBuffer; FastArray mPendingCmds; VulkanVaoManager *mVaoManager; @@ -220,6 +220,11 @@ namespace Ogre void endCommandBuffer(); public: + VkCommandBuffer getCurrentCmdBuffer() + { + OGRE_ASSERT_LOW( mCurrentCmdBuffer ); + return mCurrentCmdBuffer; + } EncoderState getEncoderState() const { return mEncoderState; } void getGraphicsEncoder(); diff --git a/RenderSystems/Vulkan/src/OgreVulkanAsyncTextureTicket.cpp b/RenderSystems/Vulkan/src/OgreVulkanAsyncTextureTicket.cpp index f19d91eb87e..7f23f57e19c 100644 --- a/RenderSystems/Vulkan/src/OgreVulkanAsyncTextureTicket.cpp +++ b/RenderSystems/Vulkan/src/OgreVulkanAsyncTextureTicket.cpp @@ -132,7 +132,7 @@ namespace Ogre memBarrier.dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT; // GPU must stop using this buffer before we can write into it - vkCmdPipelineBarrier( mQueue->mCurrentCmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, + vkCmdPipelineBarrier( mQueue->getCurrentCmdBuffer(), VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 1u, &memBarrier, 0u, 0, 0u, 0 ); } @@ -168,7 +168,7 @@ namespace Ogre region.imageExtent.height = srcTextureBox.height; region.imageExtent.depth = srcTextureBox.depth; - vkCmdCopyImageToBuffer( mQueue->mCurrentCmdBuffer, srcTextureVk->getFinalTextureName(), + vkCmdCopyImageToBuffer( mQueue->getCurrentCmdBuffer(), srcTextureVk->getFinalTextureName(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, mVboName.mVboName, 1u, ®ion ); if( accurateTracking ) diff --git a/RenderSystems/Vulkan/src/OgreVulkanDiscardBufferManager.cpp b/RenderSystems/Vulkan/src/OgreVulkanDiscardBufferManager.cpp index 78a35c689fc..641757cf33f 100644 --- a/RenderSystems/Vulkan/src/OgreVulkanDiscardBufferManager.cpp +++ b/RenderSystems/Vulkan/src/OgreVulkanDiscardBufferManager.cpp @@ -96,7 +96,7 @@ namespace Ogre region.srcOffset = ( *itor )->getBlockStart() + oldBuffer.mInternalBufferStart; region.dstOffset = ( *itor )->getBlockStart() + mBuffer.mInternalBufferStart; region.size = ( *itor )->getBlockSize(); - vkCmdCopyBuffer( mDevice->mGraphicsQueue.mCurrentCmdBuffer, oldBuffer.mVboName, + vkCmdCopyBuffer( mDevice->mGraphicsQueue.getCurrentCmdBuffer(), oldBuffer.mVboName, mBuffer.mVboName, 1u, ®ion ); ( *itor )->mLastFrameUsed = currentFrame; } diff --git a/RenderSystems/Vulkan/src/OgreVulkanHardwareBufferCommon.cpp b/RenderSystems/Vulkan/src/OgreVulkanHardwareBufferCommon.cpp index 67bca529ccc..1224070574e 100644 --- a/RenderSystems/Vulkan/src/OgreVulkanHardwareBufferCommon.cpp +++ b/RenderSystems/Vulkan/src/OgreVulkanHardwareBufferCommon.cpp @@ -297,7 +297,7 @@ namespace Ogre region.srcOffset = srcOffset + srcOffsetStart; region.dstOffset = dstOffset + dstOffsetStart; region.size = alignToNextMultiple( length, 4u ); - vkCmdCopyBuffer( mDevice->mGraphicsQueue.mCurrentCmdBuffer, srcBuf, dstBuf, 1u, + vkCmdCopyBuffer( mDevice->mGraphicsQueue.getCurrentCmdBuffer(), srcBuf, dstBuf, 1u, ®ion ); if( this->mDiscardBuffer ) diff --git a/RenderSystems/Vulkan/src/OgreVulkanQueue.cpp b/RenderSystems/Vulkan/src/OgreVulkanQueue.cpp index a9c077f110f..bb15e5f927c 100644 --- a/RenderSystems/Vulkan/src/OgreVulkanQueue.cpp +++ b/RenderSystems/Vulkan/src/OgreVulkanQueue.cpp @@ -635,7 +635,7 @@ namespace Ogre // Wait until earlier render, compute and transfers are done so we can copy what // they wrote (unless we're only here for a texture transition) - vkCmdPipelineBarrier( mCurrentCmdBuffer, srcStage & mOwnerDevice->mSupportedStages, + vkCmdPipelineBarrier( getCurrentCmdBuffer(), srcStage & mOwnerDevice->mSupportedStages, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, numMemBarriers, &memBarrier, 0u, 0, numImageMemBarriers, &imageMemBarrier ); } @@ -839,7 +839,7 @@ namespace Ogre // Wait until earlier render, compute and transfers are done so we can copy what // they wrote (unless we're only here for a texture transition) - vkCmdPipelineBarrier( mCurrentCmdBuffer, srcStage & mOwnerDevice->mSupportedStages, + vkCmdPipelineBarrier( getCurrentCmdBuffer(), srcStage & mOwnerDevice->mSupportedStages, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, numMemBarriers, &memBarrier, 0u, 0, numImageMemBarriers, &imageMemBarrier ); } @@ -937,7 +937,7 @@ namespace Ogre numMemBarriers = 1u; // GPU must stop using this buffer before we can write into it - vkCmdPipelineBarrier( mCurrentCmdBuffer, VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, + vkCmdPipelineBarrier( getCurrentCmdBuffer(), VK_PIPELINE_STAGE_VERTEX_INPUT_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, numMemBarriers, &memBarrier, 0u, 0, 0u, 0 ); } @@ -997,7 +997,7 @@ namespace Ogre // Wait until earlier render, compute and transfers are done // Block render, compute and transfers until we're done - vkCmdPipelineBarrier( mCurrentCmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, + vkCmdPipelineBarrier( getCurrentCmdBuffer(), VK_PIPELINE_STAGE_TRANSFER_BIT, dstStage & mOwnerDevice->mSupportedStages, 0, numMemBarriers, &memBarrier, 0u, 0, static_cast( mImageMemBarriers.size() ), mImageMemBarriers.begin() ); diff --git a/RenderSystems/Vulkan/src/OgreVulkanRenderPassDescriptor.cpp b/RenderSystems/Vulkan/src/OgreVulkanRenderPassDescriptor.cpp index 90df712f152..e1678014b0f 100644 --- a/RenderSystems/Vulkan/src/OgreVulkanRenderPassDescriptor.cpp +++ b/RenderSystems/Vulkan/src/OgreVulkanRenderPassDescriptor.cpp @@ -904,7 +904,7 @@ namespace Ogre if( mInformationOnly ) return; - VkCommandBuffer cmdBuffer = mQueue->mCurrentCmdBuffer; + VkCommandBuffer cmdBuffer = mQueue->getCurrentCmdBuffer(); const VulkanFrameBufferDescValue &fboDesc = mSharedFboItor->second; @@ -967,7 +967,7 @@ namespace Ogre if( mQueue->getEncoderState() != VulkanQueue::EncoderGraphicsOpen ) return; - vkCmdEndRenderPass( mQueue->mCurrentCmdBuffer ); + vkCmdEndRenderPass( mQueue->getCurrentCmdBuffer() ); if( isInterruptingRendering ) { diff --git a/RenderSystems/Vulkan/src/OgreVulkanRenderSystem.cpp b/RenderSystems/Vulkan/src/OgreVulkanRenderSystem.cpp index 768bbe42cdd..9e0f038a1e8 100644 --- a/RenderSystems/Vulkan/src/OgreVulkanRenderSystem.cpp +++ b/RenderSystems/Vulkan/src/OgreVulkanRenderSystem.cpp @@ -2182,7 +2182,7 @@ namespace Ogre if( mPso ) oldRootLayout = reinterpret_cast( mPso->rsData )->rootLayout; - VkCommandBuffer cmdBuffer = mActiveDevice->mGraphicsQueue.mCurrentCmdBuffer; + VkCommandBuffer cmdBuffer = mActiveDevice->mGraphicsQueue.getCurrentCmdBuffer(); OGRE_ASSERT_LOW( pso->rsData ); VulkanHlmsPso *vulkanPso = reinterpret_cast( pso->rsData ); vkCmdBindPipeline( cmdBuffer, VK_PIPELINE_BIND_POINT_GRAPHICS, vulkanPso->pso ); @@ -2212,7 +2212,7 @@ namespace Ogre { OGRE_ASSERT_LOW( pso->rsData ); vulkanPso = reinterpret_cast( pso->rsData ); - VkCommandBuffer cmdBuffer = mActiveDevice->mGraphicsQueue.mCurrentCmdBuffer; + VkCommandBuffer cmdBuffer = mActiveDevice->mGraphicsQueue.getCurrentCmdBuffer(); vkCmdBindPipeline( cmdBuffer, VK_PIPELINE_BIND_POINT_COMPUTE, vulkanPso->pso ); if( vulkanPso->rootLayout != oldRootLayout ) @@ -2232,7 +2232,7 @@ namespace Ogre { flushRootLayoutCS(); - vkCmdDispatch( mActiveDevice->mGraphicsQueue.mCurrentCmdBuffer, pso.mNumThreadGroups[0], + vkCmdDispatch( mActiveDevice->mGraphicsQueue.getCurrentCmdBuffer(), pso.mNumThreadGroups[0], pso.mNumThreadGroups[1], pso.mNumThreadGroups[2] ); } //------------------------------------------------------------------------- @@ -2258,7 +2258,7 @@ namespace Ogre OGRE_ASSERT_LOW( numVertexBuffers < 15u ); - VkCommandBuffer cmdBuffer = mActiveDevice->mGraphicsQueue.mCurrentCmdBuffer; + VkCommandBuffer cmdBuffer = mActiveDevice->mGraphicsQueue.getCurrentCmdBuffer(); if( numVertexBuffers > 0u ) { vkCmdBindVertexBuffers( cmdBuffer, 0, static_cast( numVertexBuffers ), @@ -2304,7 +2304,7 @@ namespace Ogre { flushRootLayout(); - VkCommandBuffer cmdBuffer = mActiveDevice->mGraphicsQueue.mCurrentCmdBuffer; + VkCommandBuffer cmdBuffer = mActiveDevice->mGraphicsQueue.getCurrentCmdBuffer(); vkCmdDrawIndexedIndirect( cmdBuffer, mIndirectBuffer, reinterpret_cast( cmd->indirectBufferOffset ), cmd->numDraws, sizeof( CbDrawIndexed ) ); @@ -2314,7 +2314,7 @@ namespace Ogre { flushRootLayout(); - VkCommandBuffer cmdBuffer = mActiveDevice->mGraphicsQueue.mCurrentCmdBuffer; + VkCommandBuffer cmdBuffer = mActiveDevice->mGraphicsQueue.getCurrentCmdBuffer(); vkCmdDrawIndirect( cmdBuffer, mIndirectBuffer, reinterpret_cast( cmd->indirectBufferOffset ), cmd->numDraws, sizeof( CbDrawStrip ) ); @@ -2327,7 +2327,7 @@ namespace Ogre CbDrawIndexed *drawCmd = reinterpret_cast( mSwIndirectBufferPtr + (size_t)cmd->indirectBufferOffset ); - VkCommandBuffer cmdBuffer = mActiveDevice->mGraphicsQueue.mCurrentCmdBuffer; + VkCommandBuffer cmdBuffer = mActiveDevice->mGraphicsQueue.getCurrentCmdBuffer(); for( uint32 i = cmd->numDraws; i--; ) { @@ -2345,7 +2345,7 @@ namespace Ogre CbDrawStrip *drawCmd = reinterpret_cast( mSwIndirectBufferPtr + (size_t)cmd->indirectBufferOffset ); - VkCommandBuffer cmdBuffer = mActiveDevice->mGraphicsQueue.mCurrentCmdBuffer; + VkCommandBuffer cmdBuffer = mActiveDevice->mGraphicsQueue.getCurrentCmdBuffer(); for( uint32 i = cmd->numDraws; i--; ) { @@ -2359,7 +2359,7 @@ namespace Ogre { VulkanVaoManager *vaoManager = static_cast( mVaoManager ); - VkCommandBuffer cmdBuffer = mActiveDevice->mGraphicsQueue.mCurrentCmdBuffer; + VkCommandBuffer cmdBuffer = mActiveDevice->mGraphicsQueue.getCurrentCmdBuffer(); VkBuffer vulkanVertexBuffers[16]; VkDeviceSize offsets[16]; @@ -2420,7 +2420,7 @@ namespace Ogre { flushRootLayout(); - VkCommandBuffer cmdBuffer = mActiveDevice->mGraphicsQueue.mCurrentCmdBuffer; + VkCommandBuffer cmdBuffer = mActiveDevice->mGraphicsQueue.getCurrentCmdBuffer(); vkCmdDrawIndexed( cmdBuffer, cmd->primCount, cmd->instanceCount, cmd->firstVertexIndex, (int32_t)mCurrentVertexBuffer->vertexStart, cmd->baseInstance ); } @@ -2429,7 +2429,7 @@ namespace Ogre { flushRootLayout(); - VkCommandBuffer cmdBuffer = mActiveDevice->mGraphicsQueue.mCurrentCmdBuffer; + VkCommandBuffer cmdBuffer = mActiveDevice->mGraphicsQueue.getCurrentCmdBuffer(); vkCmdDraw( cmdBuffer, cmd->primCount, cmd->instanceCount, cmd->firstVertexIndex, cmd->baseInstance ); } @@ -2443,7 +2443,7 @@ namespace Ogre const size_t numberOfInstances = op.numberOfInstances; - VkCommandBuffer cmdBuffer = mActiveDevice->mGraphicsQueue.mCurrentCmdBuffer; + VkCommandBuffer cmdBuffer = mActiveDevice->mGraphicsQueue.getCurrentCmdBuffer(); // Render to screen! if( op.useIndexes ) @@ -2692,7 +2692,7 @@ namespace Ogre #if OGRE_DEBUG_MODE >= OGRE_DEBUG_MEDIUM if( !CmdBeginDebugUtilsLabelEXT ) return; // VK_EXT_debug_utils not available - VkCommandBuffer cmdBuffer = mActiveDevice->mGraphicsQueue.mCurrentCmdBuffer; + VkCommandBuffer cmdBuffer = mActiveDevice->mGraphicsQueue.getCurrentCmdBuffer(); VkDebugUtilsLabelEXT markerInfo; makeVkStruct( markerInfo, VK_STRUCTURE_TYPE_DEBUG_UTILS_LABEL_EXT ); markerInfo.pLabelName = event.c_str(); @@ -2705,7 +2705,7 @@ namespace Ogre #if OGRE_DEBUG_MODE >= OGRE_DEBUG_MEDIUM if( !CmdEndDebugUtilsLabelEXT ) return; // VK_EXT_debug_utils not available - VkCommandBuffer cmdBuffer = mActiveDevice->mGraphicsQueue.mCurrentCmdBuffer; + VkCommandBuffer cmdBuffer = mActiveDevice->mGraphicsQueue.getCurrentCmdBuffer(); CmdEndDebugUtilsLabelEXT( cmdBuffer ); #endif } @@ -2905,11 +2905,11 @@ namespace Ogre mActiveDevice->mGraphicsQueue.getGraphicsEncoder(); VulkanVaoManager *vaoManager = static_cast( mVaoManager ); - vaoManager->bindDrawIdVertexBuffer( mActiveDevice->mGraphicsQueue.mCurrentCmdBuffer ); + vaoManager->bindDrawIdVertexBuffer( mActiveDevice->mGraphicsQueue.getCurrentCmdBuffer() ); if( mStencilEnabled ) { - vkCmdSetStencilReference( mActiveDevice->mGraphicsQueue.mCurrentCmdBuffer, + vkCmdSetStencilReference( mActiveDevice->mGraphicsQueue.getCurrentCmdBuffer(), VK_STENCIL_FACE_FRONT_AND_BACK, mStencilRefValue ); } @@ -2943,7 +2943,8 @@ namespace Ogre #endif } - vkCmdSetViewport( mActiveDevice->mGraphicsQueue.mCurrentCmdBuffer, 0u, numViewports, vkVp ); + vkCmdSetViewport( mActiveDevice->mGraphicsQueue.getCurrentCmdBuffer(), 0u, numViewports, + vkVp ); } if( mVpChanged || numViewports > 1u ) @@ -2966,7 +2967,7 @@ namespace Ogre #endif } - vkCmdSetScissor( mActiveDevice->mGraphicsQueue.mCurrentCmdBuffer, 0u, numViewports, + vkCmdSetScissor( mActiveDevice->mGraphicsQueue.getCurrentCmdBuffer(), 0u, numViewports, scissorRect ); } @@ -3340,10 +3341,11 @@ namespace Ogre if( dstStage == 0 ) dstStage = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT; - vkCmdPipelineBarrier( - mActiveDevice->mGraphicsQueue.mCurrentCmdBuffer, srcStage & mActiveDevice->mSupportedStages, - dstStage & mActiveDevice->mSupportedStages, 0, numMemBarriers, &memBarrier, 0u, 0, - static_cast( mImageBarriers.size() ), mImageBarriers.begin() ); + vkCmdPipelineBarrier( mActiveDevice->mGraphicsQueue.getCurrentCmdBuffer(), + srcStage & mActiveDevice->mSupportedStages, + dstStage & mActiveDevice->mSupportedStages, 0, numMemBarriers, &memBarrier, + 0u, 0, static_cast( mImageBarriers.size() ), + mImageBarriers.begin() ); mImageBarriers.clear(); } //------------------------------------------------------------------------- @@ -3864,7 +3866,7 @@ namespace Ogre if( mActiveDevice->mGraphicsQueue.getEncoderState() == VulkanQueue::EncoderGraphicsOpen ) { - vkCmdSetStencilReference( mActiveDevice->mGraphicsQueue.mCurrentCmdBuffer, + vkCmdSetStencilReference( mActiveDevice->mGraphicsQueue.getCurrentCmdBuffer(), VK_STENCIL_FACE_FRONT_AND_BACK, mStencilRefValue ); } } diff --git a/RenderSystems/Vulkan/src/OgreVulkanRootLayout.cpp b/RenderSystems/Vulkan/src/OgreVulkanRootLayout.cpp index bb68f345d95..162eb433bc7 100644 --- a/RenderSystems/Vulkan/src/OgreVulkanRootLayout.cpp +++ b/RenderSystems/Vulkan/src/OgreVulkanRootLayout.cpp @@ -611,7 +611,7 @@ namespace Ogre if( firstDirtySet < mSets.size() ) { vkCmdBindDescriptorSets( - device->mGraphicsQueue.mCurrentCmdBuffer, + device->mGraphicsQueue.getCurrentCmdBuffer(), mCompute ? VK_PIPELINE_BIND_POINT_COMPUTE : VK_PIPELINE_BIND_POINT_GRAPHICS, mRootLayout, firstDirtySet, static_cast( mSets.size() ) - firstDirtySet, &descSets[firstDirtySet], 0u, 0 ); diff --git a/RenderSystems/Vulkan/src/OgreVulkanStagingTexture.cpp b/RenderSystems/Vulkan/src/OgreVulkanStagingTexture.cpp index f3050b87630..134e7ed3219 100644 --- a/RenderSystems/Vulkan/src/OgreVulkanStagingTexture.cpp +++ b/RenderSystems/Vulkan/src/OgreVulkanStagingTexture.cpp @@ -183,7 +183,7 @@ namespace Ogre region.imageExtent.height = srcBox.height; region.imageExtent.depth = srcBox.depth; - vkCmdCopyBufferToImage( device->mGraphicsQueue.mCurrentCmdBuffer, mVboName, + vkCmdCopyBufferToImage( device->mGraphicsQueue.getCurrentCmdBuffer(), mVboName, dstTextureVulkan->getFinalTextureName(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, ®ion ); } diff --git a/RenderSystems/Vulkan/src/OgreVulkanTextureGpu.cpp b/RenderSystems/Vulkan/src/OgreVulkanTextureGpu.cpp index 48976353cfd..cc4626c7bbb 100644 --- a/RenderSystems/Vulkan/src/OgreVulkanTextureGpu.cpp +++ b/RenderSystems/Vulkan/src/OgreVulkanTextureGpu.cpp @@ -185,7 +185,7 @@ namespace Ogre imageBarrier.newLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL; vkCmdPipelineBarrier( - device->mGraphicsQueue.mCurrentCmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, + device->mGraphicsQueue.getCurrentCmdBuffer(), VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, 0, 0u, 0, 0u, 0, 1u, &imageBarrier ); mCurrLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL; @@ -223,7 +223,7 @@ namespace Ogre imageBarrier.oldLayout = VK_IMAGE_LAYOUT_UNDEFINED; imageBarrier.newLayout = mCurrLayout; vkCmdPipelineBarrier( - device->mGraphicsQueue.mCurrentCmdBuffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, + device->mGraphicsQueue.getCurrentCmdBuffer(), VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, 0, 0u, 0, 0u, 0, 1u, &imageBarrier ); } } @@ -525,7 +525,7 @@ namespace Ogre if( dstTexture->isMultisample() && !dstTexture->hasMsaaExplicitResolves() ) dstTextureName = dstTexture->mMsaaFramebufferName; - vkCmdCopyImage( device->mGraphicsQueue.mCurrentCmdBuffer, srcTextureName, mCurrLayout, + vkCmdCopyImage( device->mGraphicsQueue.getCurrentCmdBuffer(), srcTextureName, mCurrLayout, dstTextureName, dstTexture->mCurrLayout, 1u, ®ion ); if( dstTexture->isMultisample() && !dstTexture->hasMsaaExplicitResolves() && @@ -542,7 +542,7 @@ namespace Ogre resolve.extent.height = getInternalHeight(); resolve.extent.depth = getDepth(); - vkCmdResolveImage( device->mGraphicsQueue.mCurrentCmdBuffer, + vkCmdResolveImage( device->mGraphicsQueue.getCurrentCmdBuffer(), dstTexture->mMsaaFramebufferName, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, dstTexture->mFinalTextureName, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, &resolve ); @@ -602,7 +602,7 @@ namespace Ogre imageBarrier[1].newLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL; imageBarrier[1].srcAccessMask = 0; imageBarrier[1].dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT; - vkCmdPipelineBarrier( device->mGraphicsQueue.mCurrentCmdBuffer, + vkCmdPipelineBarrier( device->mGraphicsQueue.getCurrentCmdBuffer(), VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 0u, 0, 0u, 0, 1u, &imageBarrier[1] ); } @@ -641,7 +641,7 @@ namespace Ogre region.dstOffsets[1].y = static_cast( std::max( internalHeight >> i, 1u ) ); region.dstOffsets[1].z = static_cast( std::max( getDepth() >> i, 1u ) ); - vkCmdBlitImage( device->mGraphicsQueue.mCurrentCmdBuffer, mFinalTextureName, mCurrLayout, + vkCmdBlitImage( device->mGraphicsQueue.getCurrentCmdBuffer(), mFinalTextureName, mCurrLayout, mFinalTextureName, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, ®ion, VK_FILTER_LINEAR ); @@ -656,7 +656,7 @@ namespace Ogre // Wait for vkCmdBlitImage on mip i to finish before advancing to mip i+1 // Also transition src mip 'i' to TRANSFER_SRC_OPTIMAL // Also transition src mip 'i+1' to TRANSFER_DST_OPTIMAL - vkCmdPipelineBarrier( device->mGraphicsQueue.mCurrentCmdBuffer, + vkCmdPipelineBarrier( device->mGraphicsQueue.getCurrentCmdBuffer(), VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 0u, 0, 0u, 0, numBarriers, imageBarrier ); } @@ -1008,7 +1008,7 @@ namespace Ogre imageBarrier.oldLayout = VK_IMAGE_LAYOUT_UNDEFINED; imageBarrier.newLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL; imageBarrier.image = mMsaaFramebufferName; - vkCmdPipelineBarrier( device->mGraphicsQueue.mCurrentCmdBuffer, + vkCmdPipelineBarrier( device->mGraphicsQueue.getCurrentCmdBuffer(), VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, PixelFormatGpuUtils::isDepth( finalPixelFormat ) ? ( VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | diff --git a/RenderSystems/Vulkan/src/OgreVulkanTextureGpuManager.cpp b/RenderSystems/Vulkan/src/OgreVulkanTextureGpuManager.cpp index 8ca4f4acafd..58180a5b99d 100644 --- a/RenderSystems/Vulkan/src/OgreVulkanTextureGpuManager.cpp +++ b/RenderSystems/Vulkan/src/OgreVulkanTextureGpuManager.cpp @@ -190,7 +190,7 @@ namespace Ogre ++barrierCount; } - vkCmdPipelineBarrier( device->mGraphicsQueue.mCurrentCmdBuffer, + vkCmdPipelineBarrier( device->mGraphicsQueue.getCurrentCmdBuffer(), VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 0u, 0, 0u, 0, static_cast( barrierCount ), imageMemBarrier ); @@ -229,7 +229,7 @@ namespace Ogre else region.imageExtent.height = 4u; - vkCmdCopyBufferToImage( device->mGraphicsQueue.mCurrentCmdBuffer, stagingBuffVboName, + vkCmdCopyBufferToImage( device->mGraphicsQueue.getCurrentCmdBuffer(), stagingBuffVboName, mBlankTexture[i].vkImage, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1u, ®ion ); } @@ -250,9 +250,9 @@ namespace Ogre ++barrierCount; } - vkCmdPipelineBarrier( device->mGraphicsQueue.mCurrentCmdBuffer, VK_PIPELINE_STAGE_TRANSFER_BIT, - VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, 0, 0u, 0, 0u, 0, - static_cast( barrierCount ), imageMemBarrier ); + vkCmdPipelineBarrier( device->mGraphicsQueue.getCurrentCmdBuffer(), + VK_PIPELINE_STAGE_TRANSFER_BIT, VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT, 0, 0u, + 0, 0u, 0, static_cast( barrierCount ), imageMemBarrier ); mBlankTexture[TextureTypes::Unknown] = mBlankTexture[TextureTypes::Type2D]; if( c_bSkipAliasable ) diff --git a/RenderSystems/Vulkan/src/Vao/OgreVulkanBufferInterface.cpp b/RenderSystems/Vulkan/src/Vao/OgreVulkanBufferInterface.cpp index 21f0a82198c..a223a7725eb 100644 --- a/RenderSystems/Vulkan/src/Vao/OgreVulkanBufferInterface.cpp +++ b/RenderSystems/Vulkan/src/Vao/OgreVulkanBufferInterface.cpp @@ -203,7 +203,7 @@ namespace Ogre region.srcOffset = srcOffsetBytes; region.dstOffset = dstOffsetBytes; region.size = sizeBytes; - vkCmdCopyBuffer( device->mGraphicsQueue.mCurrentCmdBuffer, mVboName, dstBufferVk->getVboName(), - 1u, ®ion ); + vkCmdCopyBuffer( device->mGraphicsQueue.getCurrentCmdBuffer(), mVboName, + dstBufferVk->getVboName(), 1u, ®ion ); } } // namespace Ogre diff --git a/RenderSystems/Vulkan/src/Vao/OgreVulkanStagingBuffer.cpp b/RenderSystems/Vulkan/src/Vao/OgreVulkanStagingBuffer.cpp index a6b9d4ff2c3..84c058ec14e 100644 --- a/RenderSystems/Vulkan/src/Vao/OgreVulkanStagingBuffer.cpp +++ b/RenderSystems/Vulkan/src/Vao/OgreVulkanStagingBuffer.cpp @@ -221,7 +221,7 @@ namespace Ogre VulkanVaoManager *vaoManager = static_cast( mVaoManager ); VulkanDevice *device = vaoManager->getDevice(); - VkCommandBuffer cmdBuffer = device->mGraphicsQueue.mCurrentCmdBuffer; + VkCommandBuffer cmdBuffer = device->mGraphicsQueue.getCurrentCmdBuffer(); OGRE_ASSERT_MEDIUM( mUnmapTicket != std::numeric_limits::max() && "VulkanStagingBuffer already unmapped!" ); @@ -313,7 +313,7 @@ namespace Ogre OGRE_ASSERT_HIGH( !Workarounds::mPowerVRAlignment || ( region.dstOffset % Workarounds::mPowerVRAlignment ) == 0u ); #endif - vkCmdCopyBuffer( device->mGraphicsQueue.mCurrentCmdBuffer, bufferInterface->getVboName(), + vkCmdCopyBuffer( device->mGraphicsQueue.getCurrentCmdBuffer(), bufferInterface->getVboName(), mVboName, 1u, ®ion ); return freeRegionOffset; @@ -354,7 +354,7 @@ namespace Ogre OGRE_ASSERT_HIGH( !Workarounds::mPowerVRAlignment || ( region.dstOffset % Workarounds::mPowerVRAlignment ) == 0u ); #endif - vkCmdCopyBuffer( device->mGraphicsQueue.mCurrentCmdBuffer, mVboName, dstBuffer, 1u, ®ion ); + vkCmdCopyBuffer( device->mGraphicsQueue.getCurrentCmdBuffer(), mVboName, dstBuffer, 1u, ®ion ); if( mUploadOnly ) { @@ -417,7 +417,7 @@ namespace Ogre OGRE_ASSERT_HIGH( !Workarounds::mPowerVRAlignment || ( region.dstOffset % Workarounds::mPowerVRAlignment ) == 0u ); #endif - vkCmdCopyBuffer( device->mGraphicsQueue.mCurrentCmdBuffer, srcBuffer, mVboName, 1u, ®ion ); + vkCmdCopyBuffer( device->mGraphicsQueue.getCurrentCmdBuffer(), srcBuffer, mVboName, 1u, ®ion ); return freeRegionOffset + extraOffset; } diff --git a/RenderSystems/Vulkan/src/Vao/OgreVulkanVaoManager.cpp b/RenderSystems/Vulkan/src/Vao/OgreVulkanVaoManager.cpp index 6758d819f64..d10472aa809 100644 --- a/RenderSystems/Vulkan/src/Vao/OgreVulkanVaoManager.cpp +++ b/RenderSystems/Vulkan/src/Vao/OgreVulkanVaoManager.cpp @@ -525,7 +525,7 @@ namespace Ogre VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT | VK_ACCESS_TRANSFER_READ_BIT | VK_ACCESS_TRANSFER_WRITE_BIT /*| VK_ACCESS_HOST_READ_BIT | VK_ACCESS_HOST_WRITE_BIT*/; - vkCmdPipelineBarrier( mDevice->mGraphicsQueue.mCurrentCmdBuffer, + vkCmdPipelineBarrier( mDevice->mGraphicsQueue.getCurrentCmdBuffer(), VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT, 0, 1u, &memBarrier, 0u, 0, 0u, 0 ); }