diff --git a/Gems/ROS2/Code/CMakeLists.txt b/Gems/ROS2/Code/CMakeLists.txt index a0ee8699a..6704f1659 100644 --- a/Gems/ROS2/Code/CMakeLists.txt +++ b/Gems/ROS2/Code/CMakeLists.txt @@ -62,6 +62,7 @@ ly_add_target( AZ::AzCore AZ::AzFramework Gem::Atom_RPI.Public + Gem::Atom_RHI.Reflect Gem::Atom_Feature_Common.Static Gem::Atom_Component_DebugCamera.Static Gem::Atom_AtomBridge.Static diff --git a/Gems/ROS2/Code/Source/Camera/CameraSensor.cpp b/Gems/ROS2/Code/Source/Camera/CameraSensor.cpp index da6ea6710..cce870f0f 100644 --- a/Gems/ROS2/Code/Source/Camera/CameraSensor.cpp +++ b/Gems/ROS2/Code/Source/Camera/CameraSensor.cpp @@ -16,8 +16,10 @@ #include #include #include +#include #include #include +#include #include #include #include @@ -128,6 +130,39 @@ namespace ROS2 m_view->SetViewToClipMatrix(m_cameraSensorDescription.m_viewToClipMatrix); m_scene = AZ::RPI::RPISystemInterface::Get()->GetSceneByName(AZ::Name("Main")); + // In console mode, since there is no default pipeline, a BRDF texture needs to be manually initialized. + AZ::ApplicationTypeQuery appType; + AZ::ComponentApplicationBus::Broadcast(&AZ::ComponentApplicationBus::Events::QueryApplicationType, appType); + if (appType.IsConsoleMode()) + { + auto brdfpipelineName = AZStd::string::format( + "%sBRDF_Pipeline%s", + m_cameraSensorDescription.m_cameraName.c_str(), + m_entityId.ToString().c_str()); + AZ::RPI::RenderPipelineDescriptor brdfPipelineDesc; + brdfPipelineDesc.m_mainViewTagName = "MainCamera"; + brdfPipelineDesc.m_name = brdfpipelineName; + brdfPipelineDesc.m_rootPassTemplate = "BRDFTexturePipeline"; + brdfPipelineDesc.m_executeOnce = true; + + const AZStd::shared_ptr brdfTextureTemplate = + AZ::RPI::PassSystemInterface::Get()->GetPassTemplate(AZ::Name("BRDFTextureTemplate")); + AZ::Data::Asset brdfImageAsset = AZ::RPI::AssetUtils::LoadAssetById< + AZ::RPI::AttachmentImageAsset>( + brdfTextureTemplate->m_imageAttachments[0].m_assetRef.m_assetId, + AZ::RPI::AssetUtils::TraceLevel::Error); + if (brdfImageAsset.IsReady()) + { + m_brdfTexture = AZ::RPI::AttachmentImage::FindOrCreate(brdfImageAsset); + } + + if (!m_scene->GetRenderPipeline(AZ::Name(brdfPipelineDesc.m_name))) + { + AZ::RPI::RenderPipelinePtr brdfTexturePipeline = AZ::RPI::RenderPipeline::CreateRenderPipeline(brdfPipelineDesc); + m_scene->AddRenderPipeline(brdfTexturePipeline); + } + } + auto cameraPipelineTypeName = Internal::PipelineNameFromChannelType(GetChannelType()); m_pipelineName = AZStd::string::format( diff --git a/Gems/ROS2/Code/Source/Camera/CameraSensor.h b/Gems/ROS2/Code/Source/Camera/CameraSensor.h index 43096f9d5..18805d615 100644 --- a/Gems/ROS2/Code/Source/Camera/CameraSensor.h +++ b/Gems/ROS2/Code/Source/Camera/CameraSensor.h @@ -15,6 +15,8 @@ #include #include #include +#include +#include namespace ROS2 { @@ -55,6 +57,7 @@ namespace ROS2 AZ::EntityId m_entityId; AZ::RPI::RenderPipelinePtr m_pipeline; AZStd::string m_pipelineName; + AZ::Data::Instance m_brdfTexture; //! Request a frame from the rendering pipeline //! @param cameraPose - current camera pose from which the rendering should take place