3
0

AcesDisplayMapperFeatureProcessor.cpp 19 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464
  1. /*
  2. * Copyright (c) Contributors to the Open 3D Engine Project.
  3. * For complete copyright and license terms please see the LICENSE at the root of this distribution.
  4. *
  5. * SPDX-License-Identifier: Apache-2.0 OR MIT
  6. *
  7. */
  8. #include <Atom/Feature/ACES/AcesDisplayMapperFeatureProcessor.h>
  9. #include <ACES/Aces.h>
  10. #include <Atom/Feature/LookupTable/LookupTableAsset.h>
  11. #include <Atom/RHI/Factory.h>
  12. #include <Atom/RHI/RHISystemInterface.h>
  13. #include <Atom/RPI.Public/Image/ImageSystemInterface.h>
  14. #include <Atom/RPI.Public/Image/StreamingImagePool.h>
  15. #include <Atom/RPI.Reflect/Asset/AssetUtils.h>
  16. #include <AzCore/Debug/Trace.h>
  17. namespace
  18. {
  19. static const AZ::RHI::Format LutFormat = AZ::RHI::Format::R16G16B16A16_FLOAT;
  20. uint16_t ConvertFloatToHalf(const float Value)
  21. {
  22. uint32_t result;
  23. uint32_t uiValue = ((uint32_t*)(&Value))[0];
  24. uint32_t sign = (uiValue & 0x80000000U) >> 16U; // Sign shifted two bytes right for combining with return
  25. uiValue = uiValue & 0x7FFFFFFFU; // Hack off the sign
  26. if (uiValue > 0x47FFEFFFU)
  27. {
  28. // The number is too large to be represented as a half. Saturate to infinity.
  29. result = 0x7FFFU;
  30. }
  31. else
  32. {
  33. if (uiValue < 0x38800000U)
  34. {
  35. // The number is too small to be represented as a normalized half.
  36. // Convert it to a denormalized value.
  37. uint32_t shift = 113U - (uiValue >> 23U);
  38. uiValue = (0x800000U | (uiValue & 0x7FFFFFU)) >> shift;
  39. }
  40. else
  41. {
  42. // Rebias the exponent to represent the value as a normalized half.
  43. uiValue += 0xC8000000U;
  44. }
  45. result = ((uiValue + 0x0FFFU + ((uiValue >> 13U) & 1U)) >> 13U) & 0x7FFFU;
  46. }
  47. // Add back sign and return
  48. return static_cast<uint16_t>(result | sign);
  49. }
  50. }
  51. namespace AZ::Render
  52. {
  53. void AcesDisplayMapperFeatureProcessor::Reflect(ReflectContext* context)
  54. {
  55. if (auto* serializeContext = azrtti_cast<SerializeContext*>(context))
  56. {
  57. serializeContext
  58. ->Class<AcesDisplayMapperFeatureProcessor, FeatureProcessor>()
  59. ->Version(0);
  60. }
  61. }
  62. void AcesDisplayMapperFeatureProcessor::Activate()
  63. {
  64. GetDefaultDisplayMapperConfiguration(m_displayMapperConfiguration);
  65. }
  66. void AcesDisplayMapperFeatureProcessor::Deactivate()
  67. {
  68. m_ownedLuts.clear();
  69. }
  70. void AcesDisplayMapperFeatureProcessor::Simulate(const FeatureProcessor::SimulatePacket& packet)
  71. {
  72. AZ_PROFILE_FUNCTION(AzRender);
  73. AZ_UNUSED(packet);
  74. }
  75. void AcesDisplayMapperFeatureProcessor::Render([[maybe_unused]] const FeatureProcessor::RenderPacket& packet)
  76. {
  77. }
  78. void AcesDisplayMapperFeatureProcessor::ApplyLdrOdtParameters(DisplayMapperParameters* displayMapperParameters)
  79. {
  80. AZ_Assert(displayMapperParameters != nullptr, "The pOutParameters must not to be null pointer.");
  81. if (displayMapperParameters == nullptr)
  82. {
  83. return;
  84. }
  85. // These values in the ODT parameter are taken from the reference ACES transform.
  86. //
  87. // The original ACES references.
  88. // Common:
  89. // https://github.com/ampas/aces-dev/blob/master/transforms/ctl/lib/ACESlib.ODT_Common.ctl
  90. // For sRGB:
  91. // https://github.com/ampas/aces-dev/tree/master/transforms/ctl/odt/sRGB
  92. displayMapperParameters->m_cinemaLimits[0] = 0.02f;
  93. displayMapperParameters->m_cinemaLimits[1] = 48.0f;
  94. displayMapperParameters->m_acesSplineParams = GetAcesODTParameters(OutputDeviceTransformType_48Nits);
  95. displayMapperParameters->m_OutputDisplayTransformFlags = AlterSurround | ApplyDesaturation | ApplyCATD60toD65;
  96. displayMapperParameters->m_OutputDisplayTransformMode = Srgb;
  97. ColorConvertionMatrixType colorMatrixType = XYZ_To_Rec709;
  98. switch (displayMapperParameters->m_OutputDisplayTransformMode)
  99. {
  100. case Srgb:
  101. colorMatrixType = XYZ_To_Rec709;
  102. break;
  103. case PerceptualQuantizer:
  104. case Ldr:
  105. colorMatrixType = XYZ_To_Bt2020;
  106. break;
  107. default:
  108. break;
  109. }
  110. displayMapperParameters->m_XYZtoDisplayPrimaries = GetColorConvertionMatrix(colorMatrixType);
  111. displayMapperParameters->m_surroundGamma = 0.9811f;
  112. displayMapperParameters->m_gamma = 2.2f;
  113. }
  114. void AcesDisplayMapperFeatureProcessor::ApplyHdrOdtParameters(DisplayMapperParameters* displayMapperParameters, const OutputDeviceTransformType& odtType)
  115. {
  116. AZ_Assert(displayMapperParameters != nullptr, "The pOutParameters must not to be null pointer.");
  117. if (displayMapperParameters == nullptr)
  118. {
  119. return;
  120. }
  121. // Dynamic range limit values taken from NVIDIA HDR sample.
  122. // These values represent and low and high end of the dynamic range in terms of stops from middle grey (0.18)
  123. float lowerDynamicRangeInStops = -12.f;
  124. float higherDynamicRangeInStops = 10.f;
  125. const float MIDDLE_GREY = 0.18f;
  126. switch (odtType)
  127. {
  128. case OutputDeviceTransformType_1000Nits:
  129. higherDynamicRangeInStops = 10.f;
  130. break;
  131. case OutputDeviceTransformType_2000Nits:
  132. higherDynamicRangeInStops = 11.f;
  133. break;
  134. case OutputDeviceTransformType_4000Nits:
  135. higherDynamicRangeInStops = 12.f;
  136. break;
  137. default:
  138. AZ_Assert(false, "Invalid output device transform type.");
  139. break;
  140. }
  141. displayMapperParameters->m_cinemaLimits[0] = MIDDLE_GREY * exp2(lowerDynamicRangeInStops);
  142. displayMapperParameters->m_cinemaLimits[1] = MIDDLE_GREY * exp2(higherDynamicRangeInStops);
  143. displayMapperParameters->m_acesSplineParams = GetAcesODTParameters(odtType);
  144. displayMapperParameters->m_OutputDisplayTransformFlags = AlterSurround | ApplyDesaturation | ApplyCATD60toD65;
  145. displayMapperParameters->m_OutputDisplayTransformMode = PerceptualQuantizer;
  146. ColorConvertionMatrixType colorMatrixType = XYZ_To_Bt2020;
  147. displayMapperParameters->m_XYZtoDisplayPrimaries = GetColorConvertionMatrix(colorMatrixType);
  148. // Surround gamma value is from the dim surround gamma from the ACES reference transforms.
  149. // https://github.com/ampas/aces-dev/blob/master/transforms/ctl/lib/ACESlib.ODT_Common.ctl
  150. displayMapperParameters->m_surroundGamma = 0.9811f;
  151. displayMapperParameters->m_gamma = 1.0f; // gamma not used with perceptual quantizer, but just set to 1.0 anyways
  152. }
  153. OutputDeviceTransformType AcesDisplayMapperFeatureProcessor::GetOutputDeviceTransformType(RHI::Format bufferFormat)
  154. {
  155. OutputDeviceTransformType outputDeviceTransformType = OutputDeviceTransformType_48Nits;
  156. if (bufferFormat == RHI::Format::R8G8B8A8_UNORM ||
  157. bufferFormat == RHI::Format::B8G8R8A8_UNORM)
  158. {
  159. outputDeviceTransformType = OutputDeviceTransformType_48Nits;
  160. }
  161. else if (bufferFormat == RHI::Format::R10G10B10A2_UNORM)
  162. {
  163. outputDeviceTransformType = OutputDeviceTransformType_1000Nits;
  164. }
  165. else
  166. {
  167. AZ_Assert(false, "Not yet supported.");
  168. // To work normally on unsupported environment, initialize the display parameters by OutputDeviceTransformType_48Nits.
  169. outputDeviceTransformType = OutputDeviceTransformType_48Nits;
  170. }
  171. return outputDeviceTransformType;
  172. }
  173. void AcesDisplayMapperFeatureProcessor::GetAcesDisplayMapperParameters(DisplayMapperParameters* displayMapperParameters, OutputDeviceTransformType odtType)
  174. {
  175. switch (odtType)
  176. {
  177. case OutputDeviceTransformType_48Nits:
  178. ApplyLdrOdtParameters(displayMapperParameters);
  179. break;
  180. case OutputDeviceTransformType_1000Nits:
  181. case OutputDeviceTransformType_2000Nits:
  182. case OutputDeviceTransformType_4000Nits:
  183. ApplyHdrOdtParameters(displayMapperParameters, odtType);
  184. break;
  185. default:
  186. AZ_Assert(false, "This ODT type[%d] is not supported.", odtType);
  187. break;
  188. }
  189. }
  190. void AcesDisplayMapperFeatureProcessor::GetOwnedLut(DisplayMapperLut& displayMapperLut, const AZ::Name& lutName)
  191. {
  192. auto it = m_ownedLuts.find(lutName);
  193. if (it == m_ownedLuts.end())
  194. {
  195. InitializeLutImage(lutName);
  196. it = m_ownedLuts.find(lutName);
  197. AZ_Assert(it != m_ownedLuts.end(), "AcesDisplayMapperFeatureProcessor unable to create LUT %s", lutName.GetCStr());
  198. }
  199. displayMapperLut = it->second;
  200. }
  201. void AcesDisplayMapperFeatureProcessor::GetDisplayMapperLut(DisplayMapperLut& displayMapperLut)
  202. {
  203. const AZ::Name acesLutName("AcesLutImage");
  204. auto it = m_ownedLuts.find(acesLutName);
  205. if (it == m_ownedLuts.end())
  206. {
  207. InitializeLutImage(acesLutName);
  208. it = m_ownedLuts.find(acesLutName);
  209. AZ_Assert(it != m_ownedLuts.end(), "AcesDisplayMapperFeatureProcessor unable to create ACES LUT image");
  210. }
  211. displayMapperLut = it->second;
  212. }
  213. void AcesDisplayMapperFeatureProcessor::GetLutFromAssetLocation(DisplayMapperAssetLut& displayMapperAssetLut, const AZStd::string& assetPath)
  214. {
  215. Data::AssetId assetId = RPI::AssetUtils::GetAssetIdForProductPath(assetPath.c_str(), RPI::AssetUtils::TraceLevel::Error);
  216. GetLutFromAssetId(displayMapperAssetLut, assetId);
  217. }
  218. void AcesDisplayMapperFeatureProcessor::GetLutFromAssetId(DisplayMapperAssetLut& displayMapperAssetLut, const AZ::Data::AssetId assetId)
  219. {
  220. if (!assetId.IsValid())
  221. {
  222. return;
  223. }
  224. // Check first if this already exists
  225. auto it = m_assetLuts.find(assetId.ToString<AZStd::string>());
  226. if (it != m_assetLuts.end())
  227. {
  228. displayMapperAssetLut = it->second;
  229. return;
  230. }
  231. // Read the lut which is a .3dl file embedded within an azasset file.
  232. Data::Asset<RPI::AnyAsset> asset = RPI::AssetUtils::LoadAssetById<RPI::AnyAsset>(assetId, RPI::AssetUtils::TraceLevel::Error);
  233. const LookupTableAsset* lutAsset = RPI::GetDataFromAnyAsset<LookupTableAsset>(asset);
  234. if (lutAsset == nullptr)
  235. {
  236. AZ_Error("AcesDisplayMapperFeatureProcessor", false, "Unable to read LUT from asset.");
  237. asset.Release();
  238. return;
  239. }
  240. // The first row of numbers in a 3dl file is a number of vertices that partition the space from [0,..1023]
  241. // This assumes that the vertices are evenly spaced apart. Non-uniform spacing is supported by the format,
  242. // but haven't been encountered yet.
  243. const size_t lutSize = lutAsset->m_intervals.size();
  244. if (lutSize == 0)
  245. {
  246. AZ_Error("AcesDisplayMapperFeatureProcessor", false, "Lut asset has invalid size.");
  247. asset.Release();
  248. return;
  249. }
  250. // Create a buffer of half floats from the LUT and use it to initialize a 3d texture.
  251. const size_t kChannels = 4;
  252. const size_t kChannelBytes = 2;
  253. const size_t bytesPerRow = lutSize * kChannels * kChannelBytes;
  254. const size_t bytesPerSlice = bytesPerRow * lutSize;
  255. AZStd::vector<uint16_t> u16Buffer;
  256. const size_t bufferSize = lutSize * lutSize * lutSize * kChannels;
  257. u16Buffer.resize(bufferSize);
  258. for (size_t slice = 0; slice < lutSize; slice++)
  259. {
  260. for (size_t column = 0; column < lutSize; column++)
  261. {
  262. for (size_t row = 0; row < lutSize; row++)
  263. {
  264. // Index in the LUT texture data
  265. size_t idx = (column * kChannels) +
  266. ((bytesPerRow * row) / kChannelBytes) +
  267. ((bytesPerSlice * slice) / kChannelBytes);
  268. // Vertices the .3dl file are listed first by increasing slice, then row, and finally column coordinate
  269. // This corresponds to blue, green, and red channels, respectively.
  270. size_t assetIdx = slice + lutSize * row + (lutSize * lutSize * column);
  271. AZ::u64 red = lutAsset->m_values[assetIdx * 3 + 0];
  272. AZ::u64 green = lutAsset->m_values[assetIdx * 3 + 1];
  273. AZ::u64 blue = lutAsset->m_values[assetIdx * 3 + 2];
  274. // The vertices in the file are given as a positive integer value in [0,..4095] and need to be normalized
  275. constexpr float NormalizeValue = 4095.0f;
  276. u16Buffer[idx + 0] = ConvertFloatToHalf(static_cast<float>(red) / NormalizeValue);
  277. u16Buffer[idx + 1] = ConvertFloatToHalf(static_cast<float>(green) / NormalizeValue);
  278. u16Buffer[idx + 2] = ConvertFloatToHalf(static_cast<float>(blue) / NormalizeValue);
  279. u16Buffer[idx + 3] = 0x3b00; // 1.0 in half
  280. }
  281. }
  282. }
  283. asset.Release();
  284. Data::Instance<RPI::StreamingImagePool> streamingImagePool = RPI::ImageSystemInterface::Get()->GetSystemStreamingPool();
  285. RHI::Size imageSize;
  286. imageSize.m_width = static_cast<uint32_t>(lutSize);
  287. imageSize.m_height = static_cast<uint32_t>(lutSize);
  288. imageSize.m_depth = static_cast<uint32_t>(lutSize);
  289. size_t imageDataSize = bytesPerSlice * lutSize;
  290. Data::Instance<RPI::StreamingImage> lutStreamingImage = RPI::StreamingImage::CreateFromCpuData(
  291. *streamingImagePool, RHI::ImageDimension::Image3D, imageSize, LutFormat, u16Buffer.data(), imageDataSize);
  292. AZ_Error("AcesDisplayMapperFeatureProcessor", lutStreamingImage, "Failed to initialize the lut assetId %s.", assetId.ToString<AZStd::string>().c_str());
  293. DisplayMapperAssetLut assetLut;
  294. assetLut.m_lutStreamingImage = lutStreamingImage;
  295. // Add to the list of LUT asset resources
  296. m_assetLuts.insert(AZStd::pair<AZStd::string, DisplayMapperAssetLut>(assetId.ToString<AZStd::string>(), assetLut));
  297. displayMapperAssetLut = assetLut;
  298. }
  299. void AcesDisplayMapperFeatureProcessor::InitializeImagePool()
  300. {
  301. AZ::RHI::Factory& factory = RHI::Factory::Get();
  302. m_displayMapperImagePool = factory.CreateImagePool();
  303. m_displayMapperImagePool->SetName(Name("DisplayMapperImagePool"));
  304. RHI::ImagePoolDescriptor imagePoolDesc = {};
  305. imagePoolDesc.m_bindFlags = RHI::ImageBindFlags::ShaderReadWrite;
  306. imagePoolDesc.m_budgetInBytes = ImagePoolBudget;
  307. RHI::Device* device = RHI::RHISystemInterface::Get()->GetDevice();
  308. RHI::ResultCode resultCode = m_displayMapperImagePool->Init(*device, imagePoolDesc);
  309. if (resultCode != RHI::ResultCode::Success)
  310. {
  311. AZ_Error("AcesDisplayMapperFeatureProcessor", false, "Failed to initialize image pool.");
  312. return;
  313. }
  314. }
  315. void AcesDisplayMapperFeatureProcessor::InitializeLutImage(const AZ::Name& lutName)
  316. {
  317. if (!m_displayMapperImagePool)
  318. {
  319. InitializeImagePool();
  320. }
  321. DisplayMapperLut lutResource;
  322. lutResource.m_lutImage = RHI::Factory::Get().CreateImage();
  323. lutResource.m_lutImage->SetName(lutName);
  324. RHI::ImageInitRequest imageRequest;
  325. imageRequest.m_image = lutResource.m_lutImage.get();
  326. static const int LutSize = 32;
  327. imageRequest.m_descriptor = RHI::ImageDescriptor::Create3D(RHI::ImageBindFlags::ShaderReadWrite, LutSize, LutSize, LutSize, LutFormat);
  328. RHI::ResultCode resultCode = m_displayMapperImagePool->InitImage(imageRequest);
  329. if (resultCode != RHI::ResultCode::Success)
  330. {
  331. AZ_Error("AcesDisplayMapperFeatureProcessor", false, "Failed to initialize LUT image.");
  332. return;
  333. }
  334. lutResource.m_lutImageViewDescriptor = RHI::ImageViewDescriptor::Create(LutFormat, 0, 0);
  335. lutResource.m_lutImageView = lutResource.m_lutImage->GetImageView(lutResource.m_lutImageViewDescriptor);
  336. if (!lutResource.m_lutImageView.get())
  337. {
  338. AZ_Error("AcesDisplayMapperFeatureProcessor", false, "Failed to initialize LUT image view.");
  339. return;
  340. }
  341. // Add to the list of lut resources
  342. lutResource.m_lutImageView->SetName(lutName);
  343. m_ownedLuts[lutName] = lutResource;
  344. }
  345. ShaperParams AcesDisplayMapperFeatureProcessor::GetShaperParameters(ShaperPresetType shaperPreset, float customMinEv, float customMaxEv)
  346. {
  347. // Default is a linear shaper with bias 0.0 and scale 1.0. That is, fx = x*1.0 + 0.0
  348. ShaperParams shaperParams = { ShaperType::Linear, 0.0, 1.f };
  349. switch (shaperPreset)
  350. {
  351. case ShaperPresetType::None:
  352. break;
  353. case ShaperPresetType::Log2_48Nits:
  354. shaperParams = GetAcesShaperParameters(OutputDeviceTransformType::OutputDeviceTransformType_48Nits);
  355. break;
  356. case ShaperPresetType::Log2_1000Nits:
  357. shaperParams = GetAcesShaperParameters(OutputDeviceTransformType::OutputDeviceTransformType_1000Nits);
  358. break;
  359. case ShaperPresetType::Log2_2000Nits:
  360. shaperParams = GetAcesShaperParameters(OutputDeviceTransformType::OutputDeviceTransformType_2000Nits);
  361. break;
  362. case ShaperPresetType::Log2_4000Nits:
  363. shaperParams = GetAcesShaperParameters(OutputDeviceTransformType::OutputDeviceTransformType_4000Nits);
  364. break;
  365. case ShaperPresetType::LinearCustomRange:
  366. {
  367. // Map the range min exposure - max exposure to 0-1. Convert EV values to linear values here to avoid that work in the shader.
  368. // Shader equation becomes (x - bias) / scale;
  369. constexpr float MediumGray = 0.18f;
  370. const float minValue = MediumGray * powf(2, customMinEv);
  371. const float maxValue = MediumGray * powf(2, customMaxEv);
  372. shaperParams.m_type = ShaperType::Linear;
  373. shaperParams.m_scale = 1.0f / (maxValue - minValue);
  374. shaperParams.m_bias = -minValue * shaperParams.m_scale;
  375. break;
  376. }
  377. case ShaperPresetType::Log2CustomRange:
  378. shaperParams = GetLog2ShaperParameters(customMinEv, customMaxEv);
  379. break;
  380. case ShaperPresetType::PqSmpteSt2084:
  381. shaperParams.m_type = ShaperType::PqSmpteSt2084;
  382. break;
  383. default:
  384. AZ_Error("DisplayMapperPass", false, "Invalid shaper preset type.");
  385. break;
  386. }
  387. return shaperParams;
  388. }
  389. void AcesDisplayMapperFeatureProcessor::GetDefaultDisplayMapperConfiguration(DisplayMapperConfigurationDescriptor& config)
  390. {
  391. // Default configuration is ACES with LDR color grading LUT disabled.
  392. config.m_operationType = DisplayMapperOperationType::Aces;
  393. config.m_ldrGradingLutEnabled = false;
  394. config.m_ldrColorGradingLut.Release();
  395. }
  396. void AcesDisplayMapperFeatureProcessor::RegisterDisplayMapperConfiguration(const DisplayMapperConfigurationDescriptor& config)
  397. {
  398. m_displayMapperConfiguration = config;
  399. }
  400. DisplayMapperConfigurationDescriptor AcesDisplayMapperFeatureProcessor::GetDisplayMapperConfiguration()
  401. {
  402. return m_displayMapperConfiguration;
  403. }
  404. } // namespace AZ::Render