diff --git a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/DynamicGI/DynamicGISkyOcclusion.compute b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/DynamicGI/DynamicGISkyOcclusion.compute index b32ec4e4c61..c8baa72a193 100644 --- a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/DynamicGI/DynamicGISkyOcclusion.compute +++ b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/DynamicGI/DynamicGISkyOcclusion.compute @@ -26,3 +26,43 @@ void MainRayGenShader( RayGenExecute(dispatchInfo); } + +#pragma kernel EncodeShadingDirection + +StructuredBuffer _SkyShadingPrecomputedDirection; +StructuredBuffer _SkyShadingDirections; +RWStructuredBuffer _SkyShadingIndices; + +uint _ProbeCount; + +uint LinearSearchClosestDirection(float3 direction) +{ + int indexMax = 255; + float bestDot = -10.0f; + int bestIndex = 0; + + for (int index=0; index< indexMax; index++) + { + float currentDot = dot(direction, _SkyShadingPrecomputedDirection[index]); + if (currentDot > bestDot) + { + bestDot = currentDot; + bestIndex = index; + } + } + return bestIndex; +} + +[numthreads(64, 1, 1)] +void EncodeShadingDirection(uint probeId : SV_DispatchThreadID) +{ + if (probeId >= _ProbeCount) + return; + + uint bestDirectionIndex = 255; + float norm = length(_SkyShadingDirections[probeId]); + if (norm > 0.0001f) + bestDirectionIndex = LinearSearchClosestDirection(_SkyShadingDirections[probeId] / norm); + + _SkyShadingIndices[probeId] = bestDirectionIndex; +} diff --git a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/DynamicGI/DynamicGISkyOcclusion.hlsl b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/DynamicGI/DynamicGISkyOcclusion.hlsl index 427ee08615f..ce296609ec7 100644 --- a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/DynamicGI/DynamicGISkyOcclusion.hlsl +++ b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/DynamicGI/DynamicGISkyOcclusion.hlsl @@ -19,30 +19,8 @@ int _BackFaceCulling; int _BakeSkyShadingDirection; StructuredBuffer _ProbePositions; -StructuredBuffer _SkyShadingPrecomputedDirection; RWStructuredBuffer _SkyOcclusionOut; RWStructuredBuffer _SkyShadingOut; -RWStructuredBuffer _SkyShadingDirectionIndexOut; - - -uint LinearSearchClosestDirection(float3 direction) -{ - int indexMax = 255; - float bestDot = -10.0f; - int bestIndex = 0; - - for (int index=0; index< indexMax; index++) - { - float currentDot = dot(direction, _SkyShadingPrecomputedDirection[index]); - if (currentDot > bestDot) - { - bestDot = currentDot; - bestIndex = index; - } - } - return bestIndex; -} - void RayGenExecute(UnifiedRT::DispatchInfo dispatchInfo) { @@ -168,23 +146,5 @@ void RayGenExecute(UnifiedRT::DispatchInfo dispatchInfo) // The 1.125f exponent comes from experimental testing. It's the value that works the best when trying to match a bake and deringing done with the lightmapper, but it has no theoretical explanation. // In the future, we should replace these custom windowing and deringing operations with the ones used in the lightmapper to implement a more academical solution. _SkyOcclusionOut[probeId].yzw *= lerp(1.0f, radianceToIrradianceFactor, pow(windowL1, 1.125f)); - - - // Normalize computed direction - if (_BakeSkyShadingDirection > 0) - { - uint bestDirectionIndex = 255; - float norm = length(_SkyShadingOut[probeId]); - if (norm > 0.0001f) - { - _SkyShadingOut[probeId] /= norm; - bestDirectionIndex = LinearSearchClosestDirection(_SkyShadingOut[probeId]); - } - else - { - _SkyShadingOut[probeId] = float3(0.0f, 0.0f, 0.0f); - } - _SkyShadingDirectionIndexOut[probeId] = bestDirectionIndex; - } } } diff --git a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeAdjustmentVolumeEditor.cs b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeAdjustmentVolumeEditor.cs index 982d0d897ab..0c1e7b5705c 100644 --- a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeAdjustmentVolumeEditor.cs +++ b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeAdjustmentVolumeEditor.cs @@ -261,7 +261,7 @@ static void DrawBakingHelpers(SerializedProbeAdjustmentVolume p, Editor owner) using (new EditorGUI.DisabledScope(bakingSet == null)) { if (GUILayout.Button(Styles.s_PreviewLighting)) - ProbeGIBaking.BakeAdjustmentVolume(bakingSet, ptv); + AdaptiveProbeVolumes.BakeAdjustmentVolume(bakingSet, ptv); ProbeVolumeLightingTab.BakeAPVButton(); } diff --git a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.Dilate.cs b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.Dilate.cs index fcf468dca09..3735e9701b8 100644 --- a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.Dilate.cs +++ b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.Dilate.cs @@ -1,8 +1,11 @@ using System.Collections.Generic; +using UnityEditor; + +using Cell = UnityEngine.Rendering.ProbeReferenceVolume.Cell; namespace UnityEngine.Rendering { - partial class ProbeGIBaking + partial class AdaptiveProbeVolumes { static ComputeShader dilationShader; static int dilationKernel = -1; @@ -16,6 +19,13 @@ static void InitDilationShaders() } } + static void GetProbeAndChunkIndex(int globalProbeIndex, out int chunkIndex, out int chunkProbeIndex) + { + var chunkSizeInProbeCount = ProbeBrickPool.GetChunkSizeInProbeCount(); + chunkIndex = globalProbeIndex / chunkSizeInProbeCount; + chunkProbeIndex = globalProbeIndex - chunkIndex * chunkSizeInProbeCount; + } + [GenerateHLSL(needAccessors = false)] struct DilatedProbe { @@ -97,10 +107,7 @@ internal void ToSphericalHarmonicsShaderConstants(ProbeReferenceVolume.Cell cell if (cellChunkData.skyOcclusionDataL0L1.Length != 0) WriteToShaderSkyOcclusion(SO_L0L1, cellChunkData.skyOcclusionDataL0L1, index * 4); if (cellChunkData.skyShadingDirectionIndices.Length != 0) - { - var directions = DynamicSkyPrecomputedDirections.GetPrecomputedDirections(); - cellChunkData.skyShadingDirectionIndices[index] = (byte)LinearSearchClosestDirection(directions, SO_Direction); - } + cellChunkData.skyShadingDirectionIndices[index] = (byte)SkyOcclusionBaker.EncodeSkyShadingDirection(SO_Direction); } } @@ -167,6 +174,112 @@ public void Dispose() static readonly int _DilationParameters2 = Shader.PropertyToID("_DilationParameters2"); static readonly int _OutputProbes = Shader.PropertyToID("_OutputProbes"); + // Can definitively be optimized later on. + // Also note that all the bookkeeping of all the reference volumes will likely need to change when we move to + // proper UX. + internal static void PerformDilation() + { + var prv = ProbeReferenceVolume.instance; + var perSceneDataList = prv.perSceneDataList; + if (perSceneDataList.Count == 0) return; + SetBakingContext(perSceneDataList); + + List tempLoadedCells = new List(); + + if (m_BakingSet.hasDilation) + { + var dilationSettings = m_BakingSet.settings.dilationSettings; + + // Make sure all assets are loaded. + prv.PerformPendingOperations(); + + // TODO: This loop is very naive, can be optimized, but let's first verify if we indeed want this or not. + for (int iterations = 0; iterations < dilationSettings.dilationIterations; ++iterations) + { + // Try to load all available cells to the GPU. Might not succeed depending on the memory budget. + prv.LoadAllCells(); + + // Dilate all cells + List dilatedCells = new List(prv.cells.Values.Count); + bool everythingLoaded = !prv.hasUnloadedCells; + + if (everythingLoaded) + { + foreach (var cell in prv.cells.Values) + { + if (m_CellsToDilate.ContainsKey(cell.desc.index)) + { + PerformDilation(cell, m_BakingSet); + dilatedCells.Add(cell); + } + } + } + else + { + // When everything does not fit in memory, we are going to dilate one cell at a time. + // To do so, we load the cell and all its neighbours and then dilate. + // This is an inefficient use of memory but for now most of the time is spent in reading back the result anyway so it does not introduce any performance regression. + + // Free All memory to make room for each cell and its neighbors for dilation. + prv.UnloadAllCells(); + + foreach (var cell in prv.cells.Values) + { + if (!m_CellsToDilate.ContainsKey(cell.desc.index)) + continue; + + var cellPos = cell.desc.position; + // Load the cell and all its neighbors before doing dilation. + for (int x = -1; x <= 1; ++x) + { + for (int y = -1; y <= 1; ++y) + { + for (int z = -1; z <= 1; ++z) + { + Vector3Int pos = cellPos + new Vector3Int(x, y, z); + if (m_CellPosToIndex.TryGetValue(pos, out var cellToLoadIndex)) + { + if (prv.cells.TryGetValue(cellToLoadIndex, out var cellToLoad)) + { + if (prv.LoadCell(cellToLoad)) + { + tempLoadedCells.Add(cellToLoad); + } + else + Debug.LogError($"Not enough memory to perform dilation for cell {cell.desc.index}"); + } + } + } + } + } + + PerformDilation(cell, m_BakingSet); + dilatedCells.Add(cell); + + // Free memory again. + foreach (var cellToUnload in tempLoadedCells) + prv.UnloadCell(cellToUnload); + tempLoadedCells.Clear(); + } + } + + // Now write back the assets. + WriteDilatedCells(dilatedCells); + + AssetDatabase.SaveAssets(); + AssetDatabase.Refresh(); + + // Reload data + foreach (var sceneData in perSceneDataList) + { + sceneData.QueueSceneRemoval(); + sceneData.QueueSceneLoading(); + } + prv.PerformPendingOperations(); + } + } + } + static void PerformDilation(ProbeReferenceVolume.Cell cell, ProbeVolumeBakingSet bakingSet) { InitDilationShaders(); @@ -235,5 +348,36 @@ static void PerformDilation(ProbeReferenceVolume.Cell cell, ProbeVolumeBakingSet data.ExtractDilatedProbes(); data.Dispose(); } + + // NOTE: This is somewhat hacky and is going to likely be slow (or at least slower than it could). + // It is only a first iteration of the concept that won't be as impactful on memory as other options. + internal static void RevertDilation() + { + if (m_BakingSet == null) + { + if (ProbeReferenceVolume.instance.perSceneDataList.Count == 0) return; + SetBakingContext(ProbeReferenceVolume.instance.perSceneDataList); + } + + var dilationSettings = m_BakingSet.settings.dilationSettings; + var blackProbe = new SphericalHarmonicsL2(); + + int chunkSizeInProbes = ProbeBrickPool.GetChunkSizeInProbeCount(); + foreach (var cell in ProbeReferenceVolume.instance.cells.Values) + { + for (int i = 0; i < cell.data.validity.Length; ++i) + { + if (dilationSettings.enableDilation && dilationSettings.dilationDistance > 0.0f && cell.data.validity[i] > dilationSettings.dilationValidityThreshold) + { + GetProbeAndChunkIndex(i, out var chunkIndex, out var index); + + var cellChunkData = GetCellChunkData(cell.data, chunkIndex); + + WriteToShaderCoeffsL0L1(blackProbe, cellChunkData.shL0L1RxData, cellChunkData.shL1GL1RyData, cellChunkData.shL1BL1RzData, index * 4); + WriteToShaderCoeffsL2(blackProbe, cellChunkData.shL2Data_0, cellChunkData.shL2Data_1, cellChunkData.shL2Data_2, cellChunkData.shL2Data_3, index * 4); + } + } + } + } } } diff --git a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.Invalidation.cs b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.Invalidation.cs index 7e0092455b2..220893b1a6a 100644 --- a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.Invalidation.cs +++ b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.Invalidation.cs @@ -1,13 +1,10 @@ +using System; using System.Collections.Generic; -using UnityEditor; using Unity.Collections; -using Unity.Collections.LowLevel.Unsafe; -using Unity.Jobs; -using System; namespace UnityEngine.Rendering { - partial class ProbeGIBaking + partial class AdaptiveProbeVolumes { // We use this scratch memory as a way of spoofing the texture. static DynamicArray s_Validity_locData = new DynamicArray(); diff --git a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.LightTransport.cs b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.LightTransport.cs index cb9c8620719..989a71db301 100644 --- a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.LightTransport.cs +++ b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.LightTransport.cs @@ -1,425 +1,129 @@ using System; using System.Collections.Generic; using System.Runtime.CompilerServices; -using System.Threading; using Unity.Collections; using UnityEditor; -using UnityEngine.Assertions; using UnityEngine.LightTransport; using UnityEngine.LightTransport.PostProcessing; using UnityEngine.Rendering.Sampling; using UnityEngine.Rendering.UnifiedRayTracing; + using TouchupVolumeWithBoundsList = System.Collections.Generic.List<(UnityEngine.Rendering.ProbeReferenceVolume.Volume obb, UnityEngine.Bounds aabb, UnityEngine.Rendering.ProbeAdjustmentVolume volume)>; namespace UnityEngine.Rendering { - partial class ProbeGIBaking + partial class AdaptiveProbeVolumes { - enum BakingStep + /// + /// Lighting baker + /// + public abstract class LightingBaker : IDisposable { - VirtualOffset, - LaunchThread, - SkyOcclusion, - Integration, - FinalizeCells, - - Last = FinalizeCells + 1 + /// Indicates that the Step method can be safely called from a thread. + public virtual bool isThreadSafe => false; + /// Set to true when the main thread cancels baking. + public static bool cancel { get; internal set; } + + /// The current baking step. + public abstract ulong currentStep { get; } + /// The total amount of step. + public abstract ulong stepCount { get; } + + /// Array storing the probe lighting as Spherical Harmonics. + public abstract NativeArray irradiance { get; } + /// Array storing the probe validity. A value of 1 means a probe is invalid. + public abstract NativeArray validity { get; } + + /// + /// This is called before the start of baking to allow allocating necessary resources. + /// + /// The probe positions. Also contains reflection probe positions used for normalization. + public abstract void Initialize(NativeArray probePositions); + + /// + /// Run a step of light baking. Baking is considered done when currentStep property equals stepCount. + /// If isThreadSafe is true, this method may be called from a different thread. + /// + /// Return false if bake failed and should be stopped. + public abstract bool Step(); + + /// + /// Performs necessary tasks to free allocated resources. + /// + public abstract void Dispose(); } - struct BakeData + class DefaultLightTransport : LightingBaker { - // Inputs - public BakeJob[] jobs; - public NativeList positions; - public InputExtraction.BakeInput input; - public List additionalRequests; - public NativeArray sortedPositions; + public override bool isThreadSafe => true; - // Workers - public Thread bakingThread; - public VirtualOffsetBaking virtualOffsetJob; - public SkyOcclusionBaking skyOcclusionJob; - public int cellIndex; + int bakedProbeCount; + NativeArray positions; + InputExtraction.BakeInput input; + + public BakeJob[] jobs; // Outputs public NativeArray irradianceResults; public NativeArray validityResults; - // Progress reporting - public BakingStep step; - public int bakedProbeCount; - public int totalProbeCount; - public ulong stepCount; + public override ulong currentStep => (ulong)bakedProbeCount; + public override ulong stepCount => (ulong)positions.Length; - // Cancellation - public bool cancel; - public bool failed; + public override NativeArray irradiance => irradianceResults; + public override NativeArray validity => validityResults; - public void Init(ProbeVolumeBakingSet bakingSet, BakeJob[] bakeJobs, NativeList probePositions, List requests) + public override void Initialize(NativeArray probePositions) { - var result = InputExtraction.ExtractFromScene(out input); - Assert.IsTrue(result, "InputExtraction.ExtractFromScene failed."); - - jobs = bakeJobs; - positions = probePositions; - additionalRequests = requests; - - virtualOffsetJob.Initialize(bakingSet, probePositions); - skyOcclusionJob.Initialize(bakingSet, jobs, positions.Length); - cellIndex = 0; - - bakedProbeCount = 0; - totalProbeCount = probePositions.Length + requests.Count; - stepCount = virtualOffsetJob.stepCount + (ulong)totalProbeCount + skyOcclusionJob.stepCount; - - irradianceResults = new NativeArray(totalProbeCount, Allocator.Persistent, NativeArrayOptions.UninitializedMemory); - validityResults = new NativeArray(totalProbeCount, Allocator.Persistent, NativeArrayOptions.UninitializedMemory); - - step = BakingStep.VirtualOffset; - } - - public void StartThread() - { - sortedPositions = new NativeArray(positions.Length + additionalRequests.Count, Allocator.Persistent, NativeArrayOptions.UninitializedMemory); - SortPositions(s_BakeData.jobs, positions, virtualOffsetJob.offsets, additionalRequests, sortedPositions); - - bakingThread = new Thread(BakeThread); - bakingThread.Start(); - } - - public bool Done() - { - ulong currentStep = s_BakeData.virtualOffsetJob.currentStep + (ulong)s_BakeData.bakedProbeCount + s_BakeData.skyOcclusionJob.currentStep; - return currentStep >= s_BakeData.stepCount && s_BakeData.step == BakingStep.Last; - } - - public void Dispose() - { - if (failed) - Debug.LogError("Probe Volume Baking failed."); - - if (jobs == null) + if (!InputExtraction.ExtractFromScene(out input)) + { + Debug.LogError("InputExtraction.ExtractFromScene failed."); return; + } - foreach (var job in jobs) - job.Dispose(); - - positions.Dispose(); - if (sortedPositions.IsCreated) - sortedPositions.Dispose(); - - virtualOffsetJob.Dispose(); - skyOcclusionJob.Dispose(); - - irradianceResults.Dispose(); - validityResults.Dispose(); - - // clear references to managed arrays - this = default; - } - } - - static APVRTContext s_TracingContext; - static BakeData s_BakeData; - static TouchupVolumeWithBoundsList s_AdjustmentVolumes; - - internal class LightTransportBakingProfiling : BakingProfiling, IDisposable - { - //protected override string LogFile => "BakeGI"; - protected override bool ShowProgressBar => false; + bakedProbeCount = 0; + positions = probePositions; - public enum Stages - { - BakeGI, - IntegrateDirectRadiance, - IntegrateIndirectRadiance, - IntegrateValidity, - Postprocess, - ReadBack, - None + irradianceResults = new NativeArray(positions.Length, Allocator.Persistent, NativeArrayOptions.UninitializedMemory); + validityResults = new NativeArray(positions.Length, Allocator.Persistent, NativeArrayOptions.UninitializedMemory); } - static Stages currentStage = Stages.None; - public LightTransportBakingProfiling(Stages stage) : base(stage, ref currentStage) { } - public override Stages GetLastStep() => Stages.None; - public static void GetProgressRange(out float progress0, out float progress1) { float s = 1 / (float)Stages.None; progress0 = (float)currentStage * s; progress1 = progress0 + s; } - public void Dispose() { OnDispose(ref currentStage); } - } - - internal static bool PrepareBaking() - { - BakeJob[] jobs; - NativeList positions; - List requests; - int additionalRequests; - using (new BakingSetupProfiling(BakingSetupProfiling.Stages.OnBakeStarted)) + public override bool Step() { - if (!InitializeBake()) + if (input == null) return false; - s_AdjustmentVolumes = GetAdjustementVolumes(); - - requests = AdditionalGIBakeRequestsManager.GetProbeNormalizationRequests(); - additionalRequests = requests.Count; - - jobs = CreateBakingJobs(m_BakingSet, requests.Count != 0); - var regularJobs = jobs.AsSpan(0, requests.Count != 0 ? jobs.Length - 1 : jobs.Length); - - // Note: this could be executed in the baking delegate to be non blocking - using (new BakingSetupProfiling(BakingSetupProfiling.Stages.PlaceProbes)) - positions = RunPlacement(regularJobs); - - if (positions.Length == 0) - { - positions.Dispose(); - Clear(); - CleanBakeData(); + var context = BakeContext.New(input, positions); + if (!context.isCreated) return false; - } - } - - s_BakeData.Init(m_BakingSet, jobs, positions, requests); - return true; - } - - static void BakeDelegate(ref float progress, ref bool done) - { - if (s_BakeData.step == BakingStep.VirtualOffset) - { - if (s_BakeData.virtualOffsetJob.RunVirtualOffsetStep()) - s_BakeData.step++; - } - - if (s_BakeData.step == BakingStep.LaunchThread) - { - s_BakeData.StartThread(); - s_BakeData.skyOcclusionJob.StartBaking(s_BakeData.sortedPositions); - - s_BakeData.step++; - } - - if (s_BakeData.step == BakingStep.SkyOcclusion) - { - if (s_BakeData.skyOcclusionJob.RunSkyOcclusionStep()) - s_BakeData.step++; - } - - if (s_BakeData.step == BakingStep.Integration) - { - if (s_BakeData.bakedProbeCount >= s_BakeData.totalProbeCount) - s_BakeData.step++; - } - - if (s_BakeData.step == BakingStep.FinalizeCells) - { - FinalizeCell(s_BakeData.cellIndex++, s_BakeData.irradianceResults, s_BakeData.validityResults, - s_BakeData.virtualOffsetJob.offsets, - s_BakeData.skyOcclusionJob.occlusionResults, s_BakeData.skyOcclusionJob.directionResults); - - if (s_BakeData.cellIndex >= m_BakingBatch.cells.Count) - s_BakeData.step++; - } - - // Handle error case - if (s_BakeData.failed) - { - CleanBakeData(); - done = true; - return; - } - - // Use LightTransport progress to have async report on baking progress - ulong currentStep = s_BakeData.virtualOffsetJob.currentStep + s_BakeData.skyOcclusionJob.currentStep; - foreach (var job in s_BakeData.jobs) currentStep += job.currentStep; - progress = currentStep / (float)s_BakeData.stepCount; - - // Use our counter to determine when baking is done - if (s_BakeData.Done()) - { - FinalizeBake(); - done = true; - } - } - static void BakeThread() - { - var context = BakeContext.New(s_BakeData.input, s_BakeData.sortedPositions); - - try - { - for (int i = 0; i < s_BakeData.jobs.Length; i++) + try { - ref var job = ref s_BakeData.jobs[i]; - if (job.indices.Length != 0) + for (int i = 0; i < jobs.Length; i++) { - bool success = context.Bake(job, ref s_BakeData.irradianceResults, ref s_BakeData.validityResults, ref s_BakeData.cancel); - if (success) - s_BakeData.bakedProbeCount += job.indices.Length; - s_BakeData.failed = !success; - } - } - } - finally - { - context.Dispose(); - } - } - - static void UpdateLightStatus() - { - var lightingSettings = ProbeVolumeLightingTab.GetLightingSettings(); - - // The contribution from all Baked and Mixed lights in the scene should be disabled to avoid double contribution. - var lights = Object.FindObjectsByType(FindObjectsSortMode.None); - foreach (var light in lights) - { - if (light.lightmapBakeType != LightmapBakeType.Realtime) - { - var bakingOutput = light.bakingOutput; - bakingOutput.isBaked = true; - bakingOutput.lightmapBakeType = light.lightmapBakeType; - bakingOutput.mixedLightingMode = lightingSettings.mixedBakeMode; - light.bakingOutput = bakingOutput; - } - } - } - - static void FinalizeBake() - { - using (new BakingCompleteProfiling(BakingCompleteProfiling.Stages.FinalizingBake)) - { - int probeCount = s_BakeData.positions.Length; - int requestCount = s_BakeData.additionalRequests.Count; + ref var job = ref jobs[i]; + if (job.probeCount != 0) + { + if (!context.Bake(job, ref irradianceResults, ref validityResults)) + return false; - if (probeCount != 0) - { - try - { - ApplyPostBakeOperations(s_BakeData.irradianceResults, s_BakeData.validityResults, - s_BakeData.virtualOffsetJob.offsets, - s_BakeData.skyOcclusionJob.occlusionResults, s_BakeData.skyOcclusionJob.directionResults); - } - catch (Exception e) - { - Debug.LogError(e); + bakedProbeCount += job.probeCount; + } } } - - if (requestCount != 0) + finally { - var additionalIrradiance = s_BakeData.irradianceResults.GetSubArray(s_BakeData.irradianceResults.Length - requestCount, requestCount); - var additionalValidity = s_BakeData.validityResults.GetSubArray(s_BakeData.validityResults.Length - requestCount, requestCount); - AdditionalGIBakeRequestsManager.OnAdditionalProbesBakeCompleted(additionalIrradiance, additionalValidity); - } - } - - CleanBakeData(); - - // We need to reset that view - ProbeReferenceVolume.instance.ResetDebugViewToMaxSubdiv(); - } - - static void OnBakeCancelled() - { - if (s_BakeData.bakingThread != null) - { - s_BakeData.cancel = true; - s_BakeData.bakingThread.Join(); - } - - CleanBakeData(); - } - - static void CleanBakeData() - { - s_BakeData.Dispose(); - m_BakingBatch = null; - s_AdjustmentVolumes = null; - - // If lighting pannel is not created, we have to dispose ourselves - if (ProbeVolumeLightingTab.instance == null) - ProbeGIBaking.Dispose(); - - Lightmapping.ResetAdditionalBakeDelegate(); - - partialBakeSceneList = null; - ProbeReferenceVolume.instance.checksDuringBakeAction = null; - } - - static internal void Dispose() - { - s_TracingContext.Dispose(); - } - - static BakeJob[] CreateBakingJobs(ProbeVolumeBakingSet bakingSet, bool hasAdditionalRequests) - { - // Build the list of adjustment volumes affecting sample count - var touchupVolumesAndBounds = new TouchupVolumeWithBoundsList(); - { - // This is slow, but we should have very little amount of touchup volumes. - foreach (var adjustment in s_AdjustmentVolumes) - { - if (adjustment.volume.mode == ProbeAdjustmentVolume.Mode.OverrideSampleCount) - touchupVolumesAndBounds.Add(adjustment); - } - - // Sort by volume to give priority to smaller volumes - touchupVolumesAndBounds.Sort((a, b) => (a.aabb.size.x * a.aabb.size.y * a.aabb.size.z).CompareTo(b.aabb.size.x * b.aabb.size.y * b.aabb.size.z)); - } - - var lightingSettings = ProbeVolumeLightingTab.GetLightingSettings(); - bool skyOcclusion = bakingSet.skyOcclusion; - - int additionalJobs = hasAdditionalRequests ? 2 : 1; - var jobs = new BakeJob[touchupVolumesAndBounds.Count + additionalJobs]; - - for (int i = 0; i < touchupVolumesAndBounds.Count; i++) - jobs[i].Create(lightingSettings, skyOcclusion, touchupVolumesAndBounds[i]); - - jobs[touchupVolumesAndBounds.Count + 0].Create(bakingSet, lightingSettings, skyOcclusion); - if (hasAdditionalRequests) - jobs[touchupVolumesAndBounds.Count + 1].Create(bakingSet, lightingSettings, false); - - return jobs; - } - - // Place positions contiguously for each bake job in a single array and apply virtual offsets - static void SortPositions(BakeJob[] jobs, NativeList positions, Vector3[] offsets, List requests, NativeArray sortedPositions) - { - int regularJobCount = requests.Count != 0 ? jobs.Length - 1 : jobs.Length; - - // Construct position arrays - int currentOffset = 0; - for (int i = 0; i < regularJobCount; i++) - { - ref var job = ref jobs[i]; - var indices = job.indices; - for (int j = 0; j < indices.Length; j++) - { - var pos = positions[indices[j]]; - if (offsets != null) pos += offsets[indices[j]]; - - sortedPositions[currentOffset + j] = pos; + context.Dispose(); } - job.startOffset = currentOffset; - currentOffset += indices.Length; + return true; } - Debug.Assert(currentOffset == positions.Length); - - if (requests.Count != 0) + public override void Dispose() { - ref var requestJob = ref jobs[jobs.Length - 1]; - requestJob.startOffset = currentOffset; - for (int i = 0; i < requests.Count; i++) - { - requestJob.indices.Add(currentOffset); - sortedPositions[currentOffset++] = requests[i]; - } - - Debug.Assert(currentOffset == sortedPositions.Length); + irradianceResults.Dispose(); + validityResults.Dispose(); } } @@ -430,7 +134,7 @@ struct BakeJob public ProbeAdjustmentVolume touchup; public int startOffset; - public NativeList indices; + public int probeCount; public int directSampleCount; public int indirectSampleCount; @@ -445,7 +149,7 @@ struct BakeJob public BakeProgressState progress; public ulong currentStep => (ulong)Mathf.Min(progress.Progress() * 0.01f / (float)(directSampleCount + indirectSampleCount + validitySampleCount), stepCount); // this is how the progress is computed in c++ - public ulong stepCount => (ulong)indices.Length; + public ulong stepCount => (ulong)probeCount; [MethodImpl(MethodImplOptions.AggressiveInlining)] public void Create(ProbeVolumeBakingSet bakingSet, LightingSettings lightingSettings, bool ignoreEnvironement) @@ -475,7 +179,6 @@ internal void Create(LightingSettings lightingSettings, bool ignoreEnvironement, void Create(LightingSettings lightingSettings, bool ignoreEnvironement, int directSampleCount, int indirectSampleCount, int sampleCountMultiplier, int maxBounces) { // We could preallocate wrt touchup aabb volume, or total brick count for the global job - indices = new NativeList(Allocator.Persistent); progress = new BakeProgressState(); this.directSampleCount = directSampleCount * sampleCountMultiplier; @@ -496,13 +199,35 @@ public bool Contains(Vector3 point) [MethodImpl(MethodImplOptions.AggressiveInlining)] public void Dispose() { - indices.Dispose(); progress.Dispose(); } } struct BakeContext { + internal class LightTransportBakingProfiling : BakingProfiling, IDisposable + { + //protected override string LogFile => "BakeGI"; + protected override bool ShowProgressBar => false; + + public enum Stages + { + BakeGI, + IntegrateDirectRadiance, + IntegrateIndirectRadiance, + IntegrateValidity, + Postprocess, + ReadBack, + None + } + + static Stages currentStage = Stages.None; + public LightTransportBakingProfiling(Stages stage) : base(stage, ref currentStage) { } + public override Stages GetLastStep() => Stages.None; + public static void GetProgressRange(out float progress0, out float progress1) { float s = 1 / (float)Stages.None; progress0 = (float)currentStage * s; progress1 = progress0 + s; } + public void Dispose() { OnDispose(ref currentStage); } + } + public IDeviceContext ctx; public IProbeIntegrator integrator; public IWorld world; @@ -518,8 +243,8 @@ struct BakeContext public BufferID combinedSHBufferId; public BufferID irradianceBufferId; - NativeArray jobIrradianceResults; - NativeArray jobValidityResults; + public bool allocatedBuffers; + public bool isCreated => allocatedBuffers; const float k_PushOffset = 0.0001f; const int k_MaxProbeCountPerBatch = 128 * 1024; @@ -538,15 +263,24 @@ public static BakeContext New(InputExtraction.BakeInput input, NativeArray(batchSize, Allocator.Persistent, NativeArrayOptions.UninitializedMemory); - jobValidityResults = new NativeArray(batchSize, Allocator.Persistent, NativeArrayOptions.UninitializedMemory); + allocatedBuffers = true; } - public bool Bake(in BakeJob job, ref NativeArray irradianceResults, ref NativeArray validityResults, ref bool cancel) + public bool Bake(in BakeJob job, ref NativeArray irradianceResults, ref NativeArray validityResults) { // Divide the job into batches of 128k probes to reduce memory usage. - int batchCount = CoreUtils.DivRoundUp(job.indices.Length, k_MaxProbeCountPerBatch); + int batchCount = CoreUtils.DivRoundUp(job.probeCount, k_MaxProbeCountPerBatch); // Get slices for all buffers because the API require those // All jobs use overlapping slices as they are not run simultaneously @@ -599,7 +332,7 @@ public bool Bake(in BakeJob job, ref NativeArray irradianc for (int batchIndex = 0; batchIndex < batchCount; batchIndex++) { int batchOffset = batchIndex * k_MaxProbeCountPerBatch; - int probeCount = Mathf.Min(job.indices.Length - batchOffset, k_MaxProbeCountPerBatch); + int probeCount = Mathf.Min(job.probeCount - batchOffset, k_MaxProbeCountPerBatch); // Get the correct slice of position as all jobs share the same array. var positionsSlice = new BufferSlice(positionsBufferID, (ulong)(job.startOffset + batchOffset)); @@ -615,7 +348,7 @@ public bool Bake(in BakeJob job, ref NativeArray irradianc { var integrationResult = integrator.IntegrateDirectRadiance(ctx, 0, probeCount, job.directSampleCount, job.ignoreEnvironement, directRadianceSlice); if (integrationResult.type != IProbeIntegrator.ResultType.Success) return false; - if (cancel) return true; + if (LightingBaker.cancel) return true; } // Bake indirect radiance @@ -623,7 +356,7 @@ public bool Bake(in BakeJob job, ref NativeArray irradianc { var integrationResult = integrator.IntegrateIndirectRadiance(ctx, 0, probeCount, job.indirectSampleCount, job.ignoreEnvironement, indirectRadianceSlice); if (integrationResult.type != IProbeIntegrator.ResultType.Success) return false; - if (cancel) return true; + if (LightingBaker.cancel) return true; } // Bake validity @@ -631,7 +364,7 @@ public bool Bake(in BakeJob job, ref NativeArray irradianc { var validityResult = integrator.IntegrateValidity(ctx, 0, probeCount, job.validitySampleCount, validitySlice); if (validityResult.type != IProbeIntegrator.ResultType.Success) return false; - if (cancel) return true; + if (LightingBaker.cancel) return true; } /// Postprocess @@ -669,6 +402,9 @@ public bool Bake(in BakeJob job, ref NativeArray irradianc /// Read results + var jobIrradianceResults = irradianceResults.GetSubArray(job.startOffset + batchOffset, probeCount); + var jobValidityResults = validityResults.GetSubArray(job.startOffset + batchOffset, probeCount); + // Schedule read backs to get results back from GPU memory into CPU memory. var irradianceReadEvent = ctx.ReadBuffer(combinedSHSlice, jobIrradianceResults); var validityReadEvent = ctx.ReadBuffer(validitySlice, jobValidityResults); @@ -681,15 +417,7 @@ public bool Bake(in BakeJob job, ref NativeArray irradianc if (!waitResult) return false; } - // Write the batch results into the final buffer - for (int i = 0; i < probeCount; i++) - { - var dst = job.indices[i + batchOffset]; - irradianceResults[dst] = jobIrradianceResults[i]; - validityResults[dst] = jobValidityResults[i]; - } - - if (cancel) + if (LightingBaker.cancel) return true; } @@ -698,18 +426,18 @@ public bool Bake(in BakeJob job, ref NativeArray irradianc public void Dispose() { - jobIrradianceResults.Dispose(); - jobValidityResults.Dispose(); - - ctx.DestroyBuffer(positionsBufferID); - ctx.DestroyBuffer(directRadianceBufferId); - ctx.DestroyBuffer(indirectRadianceBufferId); - ctx.DestroyBuffer(validityBufferId); - - ctx.DestroyBuffer(windowedDirectSHBufferId); - ctx.DestroyBuffer(boostedIndirectSHBufferId); - ctx.DestroyBuffer(combinedSHBufferId); - ctx.DestroyBuffer(irradianceBufferId); + if (allocatedBuffers) + { + ctx.DestroyBuffer(positionsBufferID); + ctx.DestroyBuffer(directRadianceBufferId); + ctx.DestroyBuffer(indirectRadianceBufferId); + ctx.DestroyBuffer(validityBufferId); + + ctx.DestroyBuffer(windowedDirectSHBufferId); + ctx.DestroyBuffer(boostedIndirectSHBufferId); + ctx.DestroyBuffer(combinedSHBufferId); + ctx.DestroyBuffer(irradianceBufferId); + } postProcessor.Dispose(); world.Dispose(); @@ -718,6 +446,27 @@ public void Dispose() } } + static void UpdateLightStatus() + { + var lightingSettings = ProbeVolumeLightingTab.GetLightingSettings(); + + // The contribution from all Baked and Mixed lights in the scene should be disabled to avoid double contribution. + var lights = Object.FindObjectsByType(FindObjectsSortMode.None); + foreach (var light in lights) + { + if (light.lightmapBakeType != LightmapBakeType.Realtime) + { + var bakingOutput = light.bakingOutput; + bakingOutput.isBaked = true; + bakingOutput.lightmapBakeType = light.lightmapBakeType; + bakingOutput.mixedLightingMode = lightingSettings.mixedBakeMode; + light.bakingOutput = bakingOutput; + } + } + } + + // Helper struct to manage tracing backend + struct APVRTContext { RayTracingContext m_Context; @@ -817,98 +566,38 @@ public void Dispose() // Helper functions to bake a subset of the probes - static int s_AsyncBakeTaskID = -1; - internal static bool HasAsyncBakeInProgress() => s_AsyncBakeTaskID != -1; - internal static bool CancelAsyncBake() => Progress.Cancel(s_AsyncBakeTaskID); - - internal static void AsyncBakeCallback() + internal static void BakeProbes(Vector3[] positionValues, SphericalHarmonicsL2[] shValues, float[] validityValues) { - float progress = 0.0f; - bool done = false; - BakeDelegate(ref progress, ref done); - Progress.Report(s_AsyncBakeTaskID, progress, s_BakeData.step.ToString()); - - if (done) - { - UpdateLightStatus(); - Progress.Remove(s_AsyncBakeTaskID); + int numProbes = positionValues.Length; - EditorApplication.update -= AsyncBakeCallback; - s_AsyncBakeTaskID = -1; - } - } + var positionsInput = new NativeArray(positionValues, Allocator.Temp); - internal static void BakeGI() - { - if (HasAsyncBakeInProgress() || !PrepareBaking()) - return; + var lightingJob = lightingOverride ?? new DefaultLightTransport(); + lightingJob.Initialize(positionsInput); - s_AsyncBakeTaskID = Progress.Start("Bake Adaptive Probe Volumes"); - Progress.RegisterCancelCallback(s_AsyncBakeTaskID, () => + var defaultJob = lightingJob as DefaultLightTransport; + if (defaultJob != null) { - if (s_BakeData.bakingThread != null) - { - s_BakeData.cancel = true; - s_BakeData.bakingThread.Join(); - } - - CleanBakeData(); + var job = new BakeJob(); + job.Create(null, ProbeVolumeLightingTab.GetLightingSettings(), false); + job.probeCount = numProbes; - EditorApplication.update -= AsyncBakeCallback; - s_AsyncBakeTaskID = -1; - return true; - }); - - EditorApplication.update += AsyncBakeCallback; - } + defaultJob.jobs = new BakeJob[] { job }; + } - internal static void BakeProbes(Vector3[] positionValues, SphericalHarmonicsL2[] shValues, float[] validityValues) - { - int numProbes = positionValues.Length; + while (lightingJob.currentStep < lightingJob.stepCount) + lightingJob.Step(); - var job = new BakeJob(); - job.Create(null, ProbeVolumeLightingTab.GetLightingSettings(), false); + lightingJob.irradiance.CopyTo(shValues); + lightingJob.validity.CopyTo(validityValues); - for (int probeIndex = 0; probeIndex < numProbes; probeIndex++) + if (defaultJob != null) { - job.indices.Add(probeIndex); + foreach (var job in defaultJob.jobs) + job.Dispose(); } - - var positionsInput = new NativeArray(numProbes, Allocator.Persistent, NativeArrayOptions.UninitializedMemory); - positionsInput.CopyFrom(positionValues); - - var irradianceResults = new NativeArray(numProbes, Allocator.Persistent, NativeArrayOptions.UninitializedMemory); - var validityResults = new NativeArray(numProbes, Allocator.Persistent, NativeArrayOptions.UninitializedMemory); - - InputExtraction.BakeInput input; - InputExtraction.ExtractFromScene(out input); - - var context = BakeContext.New(input, positionsInput); - bool cancel = false; - context.Bake(job, ref irradianceResults, ref validityResults, ref cancel); - job.Dispose(); - context.Dispose(); - - irradianceResults.CopyTo(shValues); - validityResults.CopyTo(validityValues); - + lightingJob.Dispose(); positionsInput.Dispose(); - irradianceResults.Dispose(); - validityResults.Dispose(); - } - - internal static void BakeSingleProbe(Vector3 position, out SphericalHarmonicsL2 sh, out float validity) - { - Vector3[] positionValues = new Vector3[1]; - positionValues[0] = position; - - SphericalHarmonicsL2[] shValues = new SphericalHarmonicsL2[1]; - float[] validityValues = new float[1]; - - BakeProbes(positionValues, shValues, validityValues); - - sh = shValues[0]; - validity = validityValues[0]; } internal static void BakeAdjustmentVolume(ProbeVolumeBakingSet bakingSet, ProbeAdjustmentVolume touchup) @@ -1003,7 +692,7 @@ internal static void BakeAdjustmentVolume(ProbeVolumeBakingSet bakingSet, ProbeA index = uniquePositions.Length; positionToIndex[probeHash] = index; m_BakingBatch.uniqueBrickSubdiv[probeHash] = subdivLevel; - job.indices.Add(index); + job.probeCount++; uniquePositions.Add(pos); } else @@ -1017,74 +706,76 @@ internal static void BakeAdjustmentVolume(ProbeVolumeBakingSet bakingSet, ProbeA if (uniquePositions.Length != 0) { + bool failed = false; + var jobs = new BakeJob[] { job }; + // Apply virtual offset - var virtualOffsetJob = new VirtualOffsetBaking(); - virtualOffsetJob.Initialize(bakingSet, uniquePositions); - if (virtualOffsetJob.offsets != null) + var virtualOffsetJob = virtualOffsetOverride ?? new DefaultVirtualOffset(); + virtualOffsetJob.Initialize(bakingSet, uniquePositions.AsArray()); + while (!failed && virtualOffsetJob.currentStep < virtualOffsetJob.stepCount) + failed |= !virtualOffsetJob.Step(); + if (!failed && virtualOffsetJob.offsets.IsCreated) { - while (virtualOffsetJob.currentStep < virtualOffsetJob.stepCount) - virtualOffsetJob.RunVirtualOffsetStep(); for (int i = 0; i < uniquePositions.Length; i++) uniquePositions[i] += virtualOffsetJob.offsets[i]; } // Bake sky occlusion - var skyOcclusionJob = new SkyOcclusionBaking(); - skyOcclusionJob.Initialize(bakingSet, new BakeJob[] { job }, uniquePositions.Length); - if (skyOcclusionJob.stepCount != 0) - { - skyOcclusionJob.StartBaking(uniquePositions.AsArray()); - while (skyOcclusionJob.currentStep < skyOcclusionJob.stepCount) - skyOcclusionJob.RunSkyOcclusionStep(); - } + var skyOcclusionJob = skyOcclusionOverride ?? new DefaultSkyOcclusion(); + skyOcclusionJob.Initialize(bakingSet, uniquePositions.AsArray()); + if (skyOcclusionJob is DefaultSkyOcclusion defaultSOJob) + defaultSOJob.jobs = jobs; + while (!failed && skyOcclusionJob.currentStep < skyOcclusionJob.stepCount) + failed |= !skyOcclusionJob.Step(); + if (!failed && skyOcclusionJob.shadingDirections.IsCreated) + skyOcclusionJob.Encode(); // Bake probe SH - var irradianceResults = new NativeArray(uniquePositions.Length, Allocator.Persistent, NativeArrayOptions.UninitializedMemory); - var validityResults = new NativeArray(uniquePositions.Length, Allocator.Persistent, NativeArrayOptions.UninitializedMemory); - { - bool cancel = false; - InputExtraction.BakeInput input; - InputExtraction.ExtractFromScene(out input); - var context = BakeContext.New(input, uniquePositions.AsArray()); - context.Bake(job, ref irradianceResults, ref validityResults, ref cancel); - context.Dispose(); - } + var lightingJob = lightingOverride ?? new DefaultLightTransport(); + lightingJob.Initialize(uniquePositions.AsArray()); + if (lightingJob is DefaultLightTransport defaultLightingJob) + defaultLightingJob.jobs = jobs; + while (!failed && lightingJob.currentStep < lightingJob.stepCount) + failed |= !lightingJob.Step(); // Upload new data in cells foreach ((int uniqueProbeIndex, int cellIndex, int i) in bakedProbes) { ref var cell = ref bakingCells[cellIndex]; cell.SetBakedData(m_BakingSet, m_BakingBatch, cellVolumes[cellIndex], i, uniqueProbeIndex, - irradianceResults[uniqueProbeIndex], validityResults[uniqueProbeIndex], - virtualOffsetJob.offsets, skyOcclusionJob.occlusionResults, skyOcclusionJob.directionResults); + lightingJob.irradiance[uniqueProbeIndex], lightingJob.validity[uniqueProbeIndex], + virtualOffsetJob.offsets, skyOcclusionJob.occlusion, skyOcclusionJob.encodedDirections); } + skyOcclusionJob.encodedDirections.Dispose(); virtualOffsetJob.Dispose(); skyOcclusionJob.Dispose(); - irradianceResults.Dispose(); - validityResults.Dispose(); + lightingJob.Dispose(); - for (int c = 0; c < bakingCells.Length; c++) + if (!failed) { - ref var cell = ref bakingCells[c]; - ComputeValidityMasks(cell); - } + for (int c = 0; c < bakingCells.Length; c++) + { + ref var cell = ref bakingCells[c]; + ComputeValidityMasks(cell); + } - // Write result to disk - WriteBakingCells(bakingCells); + // Write result to disk + WriteBakingCells(bakingCells); - // Reload everything - AssetDatabase.SaveAssets(); - AssetDatabase.Refresh(); + // Reload everything + AssetDatabase.SaveAssets(); + AssetDatabase.Refresh(); - if (m_BakingSet.hasDilation) - { - // Force reloading of data - foreach (var data in prv.perSceneDataList) - data.Initialize(); + if (m_BakingSet.hasDilation) + { + // Force reloading of data + foreach (var data in prv.perSceneDataList) + data.Initialize(); - InitDilationShaders(); - PerformDilation(); + InitDilationShaders(); + PerformDilation(); + } } } @@ -1107,7 +798,7 @@ internal static void BakeAdjustmentVolume(ProbeVolumeBakingSet bakingSet, ProbeA } if (ProbeVolumeLightingTab.instance == null) - ProbeGIBaking.Dispose(); + AdaptiveProbeVolumes.Dispose(); } } } diff --git a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.Placement.cs b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.Placement.cs new file mode 100644 index 00000000000..798cd76298d --- /dev/null +++ b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.Placement.cs @@ -0,0 +1,375 @@ +using System.Linq; +using System.Collections.Generic; +using Unity.Collections; +using UnityEngine.SceneManagement; +using UnityEditor; + +using Brick = UnityEngine.Rendering.ProbeBrickIndex.Brick; + +namespace UnityEngine.Rendering +{ + class ProbeVolumeProfileInfo + { + public int simplificationLevels; + public float minDistanceBetweenProbes; + public Vector3 probeOffset; + + public int maxSubdivision => ProbeVolumeBakingSet.GetMaxSubdivision(simplificationLevels); + public float minBrickSize => ProbeVolumeBakingSet.GetMinBrickSize(minDistanceBetweenProbes); + public int cellSizeInBricks => ProbeVolumeBakingSet.GetCellSizeInBricks(simplificationLevels); + public float cellSizeInMeters => (float)cellSizeInBricks * minBrickSize; + + public Vector3Int PositionToCell(Vector3 position) => Vector3Int.FloorToInt((position - probeOffset) / cellSizeInMeters); + } + + public partial class AdaptiveProbeVolumes + { + static internal ProbeVolumeProfileInfo m_ProfileInfo = null; + + static void FindWorldBounds() + { + var prv = ProbeReferenceVolume.instance; + prv.clearAssetsOnVolumeClear = true; + + var activeScene = SceneManager.GetActiveScene(); + var activeSet = ProbeVolumeBakingSet.GetBakingSetForScene(activeScene); + + bool hasFoundBounds = false; + + foreach (var sceneGUID in activeSet.sceneGUIDs) + { + var bakeData = activeSet.GetSceneBakeData(sceneGUID); + if (bakeData.hasProbeVolume) + { + if (hasFoundBounds) + { + globalBounds.Encapsulate(bakeData.bounds); + } + else + { + globalBounds = bakeData.bounds; + hasFoundBounds = true; + } + } + } + + ProbeReferenceVolume.instance.globalBounds = globalBounds; + } + + static List GetPerSceneDataList() + { + var fullPerSceneDataList = ProbeReferenceVolume.instance.perSceneDataList; + if (!isBakingSceneSubset) + return fullPerSceneDataList; + + List usedPerSceneDataList = new (); + foreach (var sceneData in fullPerSceneDataList) + { + if (partialBakeSceneList.Contains(ProbeReferenceVolume.GetSceneGUID(sceneData.gameObject.scene))) + usedPerSceneDataList.Add(sceneData); + } + return usedPerSceneDataList; + } + + internal static List GetProbeVolumeList() + { + var fullPvList = GameObject.FindObjectsByType(FindObjectsSortMode.InstanceID); + List usedPVList; + + if (isBakingSceneSubset) + { + usedPVList = new List(); + foreach (var pv in fullPvList) + { + if (pv.isActiveAndEnabled && partialBakeSceneList.Contains(ProbeReferenceVolume.GetSceneGUID(pv.gameObject.scene))) + usedPVList.Add(pv); + } + } + else + { + usedPVList = new List(fullPvList); + } + + return usedPVList; + } + + static ProbeVolumeProfileInfo GetProfileInfoFromBakingSet(ProbeVolumeBakingSet set) + { + var result = new ProbeVolumeProfileInfo(); + result.minDistanceBetweenProbes = set.minDistanceBetweenProbes; + result.simplificationLevels = set.simplificationLevels; + result.probeOffset = set.probeOffset; + return result; + } + + static int PosToIndex(Vector3Int pos) + { + Vector3Int normalizedPos = pos - minCellPosition; + return normalizedPos.z * (cellCount.x * cellCount.y) + normalizedPos.y * cellCount.x + normalizedPos.x; + } + + static internal bool CanFreezePlacement() + { + if (!ProbeReferenceVolume.instance.supportLightingScenarios) + return false; + + // Check if all the scene datas in the scene have a baking set, if not then we cannot enable this option. + var sceneDataList = GetPerSceneDataList(); + if (sceneDataList.Count == 0) + return false; + + foreach (var sceneData in sceneDataList) + { + if (sceneData.bakingSet == null || sceneData.bakingSet.GetSceneCellIndexList(sceneData.sceneGUID) == null) + return false; + } + + return true; + } + + static NativeList RunPlacement() + { + // Overwrite loaded settings with data from profile. Note that the m_BakingSet.profile is already patched up if isFreezingPlacement + float prevBrickSize = ProbeReferenceVolume.instance.MinBrickSize(); + int prevMaxSubdiv = ProbeReferenceVolume.instance.GetMaxSubdivision(); + Vector3 prevOffset = ProbeReferenceVolume.instance.ProbeOffset(); + ProbeReferenceVolume.instance.SetSubdivisionDimensions(m_ProfileInfo.minBrickSize, m_ProfileInfo.maxSubdivision, m_ProfileInfo.probeOffset); + + // All probes need to be baked only once for the whole batch and not once per cell + // The reason is that the baker is not deterministic so the same probe position baked in two different cells may have different values causing seams artefacts. + m_BakingBatch = new BakingBatch(cellCount); + + // Run subdivision + ProbeSubdivisionResult result; + using (new BakingSetupProfiling(BakingSetupProfiling.Stages.BakeBricks)) + result = GetWorldSubdivision(); + + // Compute probe positions + NativeList positions; + using (new BakingSetupProfiling(BakingSetupProfiling.Stages.ApplySubdivisionResults)) + positions = ApplySubdivisionResults(result); + + // Restore loaded asset settings + ProbeReferenceVolume.instance.SetSubdivisionDimensions(prevBrickSize, prevMaxSubdiv, prevOffset); + + return positions; + } + + static ProbeSubdivisionResult GetWorldSubdivision() + { + if (isFreezingPlacement) + return GetBricksFromLoaded(); + + var ctx = PrepareProbeSubdivisionContext(); + return BakeBricks(ctx, m_BakingBatch.contributors); + } + + static NativeList ApplySubdivisionResults(ProbeSubdivisionResult results) + { + int cellIdx = 0, freq = 10; // Don't refresh progress bar at every iteration because it's slow + BakingSetupProfiling.GetProgressRange(out float progress0, out float progress1); + + var positions = new NativeList(Allocator.Persistent); + Dictionary positionToIndex = new(); + foreach ((var position, var bounds, var bricks) in results.cells) + { + if (++cellIdx % freq == 0) + EditorUtility.DisplayProgressBar("Baking Probe Volumes", $"Subdividing cell {cellIdx} out of {results.cells.Count}", Mathf.Lerp(progress0, progress1, cellIdx / (float)results.cells.Count)); + + int positionStart = positions.Length; + + ConvertBricksToPositions(bricks, out var probePositions, out var brickSubdivLevels); + DeduplicateProbePositions(in probePositions, in brickSubdivLevels, positionToIndex, m_BakingBatch, positions, out var probeIndices); + + BakingCell cell = new BakingCell() + { + index = PosToIndex(position), + position = position, + bounds = bounds, + bricks = bricks, + probePositions = probePositions, + probeIndices = probeIndices, + }; + + m_BakingBatch.cells.Add(cell); + m_BakingBatch.cellIndex2SceneReferences[cell.index] = new HashSet(results.scenesPerCells[cell.position]); + } + + return positions; + } + + private static void DeduplicateProbePositions(in Vector3[] probePositions, in int[] brickSubdivLevel, Dictionary positionToIndex, BakingBatch batch, + NativeList uniquePositions, out int[] indices) + { + indices = new int[probePositions.Length]; + int uniqueIndex = positionToIndex.Count; + + for (int i = 0; i < probePositions.Length; i++) + { + var pos = probePositions[i]; + var brickSubdiv = brickSubdivLevel[i]; + int probeHash = batch.GetProbePositionHash(pos); + + if (positionToIndex.TryGetValue(probeHash, out var index)) + { + indices[i] = index; + int oldBrickLevel = batch.uniqueBrickSubdiv[probeHash]; + if (brickSubdiv < oldBrickLevel) + batch.uniqueBrickSubdiv[probeHash] = brickSubdiv; + } + else + { + positionToIndex[probeHash] = uniqueIndex; + indices[i] = uniqueIndex; + batch.uniqueBrickSubdiv[probeHash] = brickSubdiv; + uniquePositions.Add(pos); + uniqueIndex++; + } + } + } + + static ProbeSubdivisionResult GetBricksFromLoaded() + { + var dataList = GetPerSceneDataList(); + var result = new ProbeSubdivisionResult(); + + foreach (var data in dataList) + { + var cellSize = m_ProfileInfo.minDistanceBetweenProbes * 3.0f * m_ProfileInfo.cellSizeInBricks; + Vector3 cellDimensions = new Vector3(cellSize, cellSize, cellSize); + + // Loop through cells in asset, we need to be careful as there'll be duplicates. + // As we go through the cells we fill ProbeSubdivisionResult as we go. + var cells = m_BakingSet.GetSceneCellIndexList(data.sceneGUID); + foreach (var cellIndex in cells) + { + var cellDesc = m_BakingSet.GetCellDesc(cellIndex); + var cellData = m_BakingSet.GetCellData(cellIndex); + var cellPos = cellDesc.position; + + if (!result.scenesPerCells.ContainsKey(cellPos)) + { + result.scenesPerCells[cellPos] = new HashSet(); + + var center = new Vector3((cellPos.x + 0.5f) * cellSize, (cellPos.y + 0.5f) * cellSize, (cellPos.z + 0.5f) * cellSize); + result.cells.Add((cellPos, new Bounds(center, cellDimensions), cellData.bricks.ToArray())); + } + result.scenesPerCells[cellPos].Add(data.sceneGUID); + } + } + + return result; + } + + static internal ProbeSubdivisionContext PrepareProbeSubdivisionContext(bool liveContext = false) + { + ProbeSubdivisionContext ctx = new ProbeSubdivisionContext(); + + // Prepare all the information in the scene for baking GI. + Vector3 refVolOrigin = Vector3.zero; // TODO: This will need to be center of the world bounds. + var perSceneDataList = GetPerSceneDataList(); + + if (m_BakingSet == null) + { + if (perSceneDataList.Count == 0) return ctx; + SetBakingContext(perSceneDataList); + } + + var profileInfo = m_ProfileInfo; + if (liveContext || m_ProfileInfo == null) + profileInfo = GetProfileInfoFromBakingSet(m_BakingSet); + + ctx.Initialize(m_BakingSet, profileInfo, refVolOrigin); + return ctx; + } + + static internal ProbeSubdivisionResult BakeBricks(ProbeSubdivisionContext ctx, in GIContributors contributors) + { + var result = new ProbeSubdivisionResult(); + + if (ctx.probeVolumes.Count == 0) + return result; + + using (var gpuResources = ProbePlacement.AllocateGPUResources(ctx.probeVolumes.Count, ctx.profile)) + { + // subdivide all the cells and generate brick positions + foreach (var cell in ctx.cells) + { + var scenesInCell = new HashSet(); + + // Calculate overlaping probe volumes to avoid unnecessary work + var overlappingProbeVolumes = new List<(ProbeVolume component, ProbeReferenceVolume.Volume volume, Bounds bounds)>(); + foreach (var probeVolume in ctx.probeVolumes) + { + if (ProbeVolumePositioning.OBBAABBIntersect(probeVolume.volume, cell.bounds, probeVolume.bounds)) + { + overlappingProbeVolumes.Add(probeVolume); + scenesInCell.Add(ProbeReferenceVolume.GetSceneGUID(probeVolume.component.gameObject.scene)); + } + } + + // Calculate valid renderers to avoid unnecessary work (a renderer needs to overlap a probe volume and match the layer) + var filteredContributors = contributors.Filter(ctx.bakingSet, cell.bounds, overlappingProbeVolumes); + + if (filteredContributors.Count == 0 && !overlappingProbeVolumes.Any(v => v.component.fillEmptySpaces)) + continue; + + var bricks = ProbePlacement.SubdivideCell(cell.bounds, ctx, gpuResources, filteredContributors, overlappingProbeVolumes); + if (bricks.Length == 0) + continue; + + foreach (var renderer in filteredContributors.renderers) + scenesInCell.Add(ProbeReferenceVolume.GetSceneGUID(renderer.component.gameObject.scene)); + foreach (var terrain in filteredContributors.terrains) + scenesInCell.Add(ProbeReferenceVolume.GetSceneGUID(terrain.component.gameObject.scene)); + + result.cells.Add((cell.position, cell.bounds, bricks)); + result.scenesPerCells[cell.position] = scenesInCell; + } + } + + return result; + } + + static void ModifyProfileFromLoadedData(ProbeVolumeBakingSet bakingSet) + { + m_ProfileInfo.simplificationLevels = bakingSet.bakedSimplificationLevels; + m_ProfileInfo.minDistanceBetweenProbes = bakingSet.bakedMinDistanceBetweenProbes; + m_ProfileInfo.probeOffset = bakingSet.bakedProbeOffset; + globalBounds = bakingSet.globalBounds; + } + + // Converts brick information into positional data at kBrickProbeCountPerDim * kBrickProbeCountPerDim * kBrickProbeCountPerDim resolution + internal static void ConvertBricksToPositions(Brick[] bricks, out Vector3[] outProbePositions, out int[] outBrickSubdiv) + { + int posIdx = 0; + float scale = ProbeReferenceVolume.instance.MinBrickSize() / ProbeBrickPool.kBrickCellCount; + Vector3 offset = ProbeReferenceVolume.instance.ProbeOffset(); + + outProbePositions = new Vector3[bricks.Length * ProbeBrickPool.kBrickProbeCountTotal]; + outBrickSubdiv = new int[bricks.Length * ProbeBrickPool.kBrickProbeCountTotal]; + + foreach (var b in bricks) + { + int brickSize = ProbeReferenceVolume.CellSize(b.subdivisionLevel); + Vector3Int brickOffset = b.position * ProbeBrickPool.kBrickCellCount; + + for (int z = 0; z < ProbeBrickPool.kBrickProbeCountPerDim; z++) + { + for (int y = 0; y < ProbeBrickPool.kBrickProbeCountPerDim; y++) + { + for (int x = 0; x < ProbeBrickPool.kBrickProbeCountPerDim; x++) + { + var probeOffset = brickOffset + new Vector3Int(x, y, z) * brickSize; + + outProbePositions[posIdx] = offset + (Vector3)probeOffset * scale; + outBrickSubdiv[posIdx] = b.subdivisionLevel; + + posIdx++; + } + } + } + } + } + } +} diff --git a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.Placement.cs.meta b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.Placement.cs.meta new file mode 100644 index 00000000000..4efaa438eda --- /dev/null +++ b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.Placement.cs.meta @@ -0,0 +1,2 @@ +fileFormatVersion: 2 +guid: b7ab09a96da079e40ba7c2dd4cb4b8c3 \ No newline at end of file diff --git a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.Serialization.cs b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.Serialization.cs new file mode 100644 index 00000000000..b593c56ddad --- /dev/null +++ b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.Serialization.cs @@ -0,0 +1,1091 @@ +using System; +using System.Collections.Generic; +using Unity.Collections; +using Unity.Collections.LowLevel.Unsafe; +using UnityEditor; + +using Brick = UnityEngine.Rendering.ProbeBrickIndex.Brick; +using Cell = UnityEngine.Rendering.ProbeReferenceVolume.Cell; +using CellDesc = UnityEngine.Rendering.ProbeReferenceVolume.CellDesc; +using CellData = UnityEngine.Rendering.ProbeReferenceVolume.CellData; +using IndirectionEntryInfo = UnityEngine.Rendering.ProbeReferenceVolume.IndirectionEntryInfo; +using StreamableCellDesc = UnityEngine.Rendering.ProbeVolumeStreamableAsset.StreamableCellDesc; + +namespace UnityEngine.Rendering +{ + public partial class AdaptiveProbeVolumes + { + struct CellCounts + { + public int bricksCount; + public int chunksCount; + + public void Add(CellCounts o) + { + bricksCount += o.bricksCount; + chunksCount += o.chunksCount; + } + } + + struct CellChunkData + { + public bool scenarioValid; + + public NativeArray shL0L1RxData; + public NativeArray shL1GL1RyData; + public NativeArray shL1BL1RzData; + + // Optional L2 Data + public NativeArray shL2Data_0; + public NativeArray shL2Data_1; + public NativeArray shL2Data_2; + public NativeArray shL2Data_3; + + public NativeArray validityNeighMaskData; + public NativeArray skyOcclusionDataL0L1; + public NativeArray skyShadingDirectionIndices; + } + + internal const string kAPVStreamingAssetsPath = "APVStreamingAssets"; + + static CellCounts m_TotalCellCounts; + + static CellChunkData GetCellChunkData(CellData cellData, int chunkIndex) + { + var result = new CellChunkData(); + + int chunkSizeInProbes = ProbeBrickPool.GetChunkSizeInProbeCount(); + int chunkOffset = chunkSizeInProbes * chunkIndex; + + if (m_BakingSet != null) + { + result.scenarioValid = cellData.scenarios.TryGetValue(m_BakingSet.lightingScenario, out var scenarioData); + + if (result.scenarioValid) + { + result.shL0L1RxData = scenarioData.shL0L1RxData.GetSubArray(chunkOffset * 4, chunkSizeInProbes * 4); + result.shL1GL1RyData = scenarioData.shL1GL1RyData.GetSubArray(chunkOffset * 4, chunkSizeInProbes * 4); + result.shL1BL1RzData = scenarioData.shL1BL1RzData.GetSubArray(chunkOffset * 4, chunkSizeInProbes * 4); + + if (scenarioData.shL2Data_0.Length > 0) // we might have no L2 if we are not during baking but during touchup interaction + { + result.shL2Data_0 = scenarioData.shL2Data_0.GetSubArray(chunkOffset * 4, chunkSizeInProbes * 4); + result.shL2Data_1 = scenarioData.shL2Data_1.GetSubArray(chunkOffset * 4, chunkSizeInProbes * 4); + result.shL2Data_2 = scenarioData.shL2Data_2.GetSubArray(chunkOffset * 4, chunkSizeInProbes * 4); + result.shL2Data_3 = scenarioData.shL2Data_3.GetSubArray(chunkOffset * 4, chunkSizeInProbes * 4); + } + } + } + + if (cellData.skyOcclusionDataL0L1.Length > 0) + { + result.skyOcclusionDataL0L1 = cellData.skyOcclusionDataL0L1.GetSubArray(chunkOffset * 4, chunkSizeInProbes * 4); + if (cellData.skyShadingDirectionIndices.Length > 0) + { + result.skyShadingDirectionIndices = cellData.skyShadingDirectionIndices.GetSubArray(chunkOffset, chunkSizeInProbes); + } + } + + result.validityNeighMaskData = cellData.validityNeighMaskData.GetSubArray(chunkOffset, chunkSizeInProbes); + + return result; + } + + static Dictionary RemapBakedCells(bool isBakingSubset) + { + // When baking a baking set. It is possible that cells layout has changed (min and max position of cells in the set). + // If this is the case then the cell index for a given position will change. + // Because of this, when doing partial bakes, we need to generate a remapping table of the old cells to the new layout in order to be able to update existing data. + Dictionary oldToNewCellRemapping = new Dictionary(); + + if (isBakingSubset) + { + // Layout has changed but is still compatible. Remap all cells that are not part of the bake. + if (minCellPosition != m_BakingSet.minCellPosition || maxCellPosition != m_BakingSet.maxCellPosition) + { + var alreadyBakedCells = m_BakingSet.cellDescs; + var newCells = new SerializedDictionary(); + + // Generate remapping for all cells baked the last time. + foreach (var cellKvP in alreadyBakedCells) + { + var cell = cellKvP.Value; + int oldIndex = cell.index; + int remappedIndex = PosToIndex(cell.position); + oldToNewCellRemapping.Add(oldIndex, remappedIndex); + + cell.index = remappedIndex; + newCells.Add(oldIndex, cell); + } + } + } + + return oldToNewCellRemapping; + } + + static void GenerateScenesCellLists(List bakedSceneDataList, Dictionary cellRemapTable) + { + bool needRemap = cellRemapTable.Count != 0; + + // Build lists of scene GUIDs and assign baking set to the PerSceneData. + var bakedSceneGUIDList = new List(); + foreach (var data in bakedSceneDataList) + { + Debug.Assert(ProbeVolumeBakingSet.SceneHasProbeVolumes(data.sceneGUID)); + bakedSceneGUIDList.Add(data.sceneGUID); + + if (m_BakingSet != data.bakingSet) + { + data.bakingSet = m_BakingSet; + EditorUtility.SetDirty(data); + } + } + + var currentPerSceneCellList = m_BakingSet.perSceneCellLists; // Cell lists from last baking. + m_BakingSet.perSceneCellLists = new SerializedDictionary>(); + + // Partial baking: Copy over scene cell lists for scenes not being baked. + // Layout change: Remap indices. + foreach (var scene in currentPerSceneCellList) + { + // Scene is not baked. Remap if needed or add it back to the baking set. + if (!bakedSceneGUIDList.Contains(scene.Key)) + { + if (needRemap) + { + var newCellList = new List(); + foreach (var cell in scene.Value) + newCellList.Add(cellRemapTable[cell]); + + m_BakingSet.perSceneCellLists.Add(scene.Key, newCellList); + } + else + { + m_BakingSet.perSceneCellLists.Add(scene.Key, scene.Value); + } + } + } + + // Allocate baked cells to the relevant scenes cell list. + foreach (var cell in m_BakedCells.Values) + { + foreach (var scene in m_BakingBatch.cellIndex2SceneReferences[cell.index]) + { + // This scene has a probe volume in it? + if (bakedSceneGUIDList.Contains(scene)) + { + List indexList; + if (!m_BakingSet.perSceneCellLists.TryGetValue(scene, out indexList)) + { + indexList = new List(); + m_BakingSet.perSceneCellLists.Add(scene, indexList); + } + + indexList.Add(cell.index); + } + } + } + + EditorUtility.SetDirty(m_BakingSet); + } + + static void PrepareCellsForWriting(bool isBakingSubset) + { + // Remap if needed existing Cell descriptors in the baking set. + var cellRemapTable = RemapBakedCells(isBakingSubset); + + // Generate list of cells for all cells being baked and remap untouched existing scenes if needed. + GenerateScenesCellLists(GetPerSceneDataList(), cellRemapTable); + + if (isBakingSubset) + { + // Resolve all unloaded scene cells in CPU memory. This will allow us to extract them into BakingCells in order to have the full list for writing. + // Other cells should already be in the baked cells list. + var loadedSceneDataList = ProbeReferenceVolume.instance.perSceneDataList; + foreach(var sceneGUID in m_BakingSet.sceneGUIDs) + { + // If a scene was baked + if (m_BakingSet.perSceneCellLists.TryGetValue(sceneGUID, out var cellList)) + { + // And the scene is not loaded + if (!loadedSceneDataList.Exists((x) => x.sceneGUID == sceneGUID) && cellList.Count != 0) + { + // Resolve its data in CPU memory. + bool resolved = m_BakingSet.ResolveCellData(cellList); + Debug.Assert(resolved, "Could not resolve unloaded scene data"); + } + } + } + + // Extract all cells that weren't baked into baking cells. + // Merge existing data of cells belonging both to the baking scene list and to scenes not being baked (prevents losing placement data for those). + // This way we have a full cell list to provide to WriteBakingCells + ExtractBakingCells(); + } + } + + static void FinalizeCell(int c, NativeArray positionRemap, NativeArray sh, NativeArray validity, NativeArray virtualOffsets, NativeArray skyOcclusion, NativeArray skyDirection) + { + if (c == 0) + { + m_BakedCells.Clear(); + m_CellPosToIndex.Clear(); + m_CellsToDilate.Clear(); + } + + bool hasVirtualOffset = virtualOffsets.IsCreated; + bool hasSkyOcclusion = skyOcclusion.IsCreated; + bool hasSkyDirection = skyDirection.IsCreated; + + var cell = m_BakingBatch.cells[c]; + int numProbes = cell.probePositions.Length; + Debug.Assert(numProbes > 0); + + var probeRefVolume = ProbeReferenceVolume.instance; + var localTouchupVolumes = cell.SelectIntersectingAdjustmentVolumes(s_AdjustmentVolumes); + + cell.sh = new SphericalHarmonicsL2[numProbes]; + cell.validity = new float[numProbes]; + cell.validityNeighbourMask = new byte[numProbes]; + cell.skyOcclusionDataL0L1 = new Vector4[hasSkyOcclusion ? numProbes : 0]; + cell.skyShadingDirectionIndices = new byte[hasSkyDirection ? numProbes : 0]; + cell.offsetVectors = new Vector3[hasVirtualOffset ? numProbes : 0]; + cell.touchupVolumeInteraction = new float[numProbes]; + cell.minSubdiv = probeRefVolume.GetMaxSubdivision(); + cell.shChunkCount = ProbeBrickPool.GetChunkCount(cell.bricks.Length); + + for (int i = 0; i < numProbes; ++i) + { + int brickIdx = i / 64; + int subdivLevel = cell.bricks[brickIdx].subdivisionLevel; + cell.minSubdiv = Mathf.Min(cell.minSubdiv, subdivLevel); + + int uniqueProbeIndex = positionRemap[cell.probeIndices[i]]; + cell.SetBakedData(m_BakingSet, m_BakingBatch, localTouchupVolumes, i, uniqueProbeIndex, + sh[uniqueProbeIndex], validity[uniqueProbeIndex], virtualOffsets, skyOcclusion, skyDirection); + } + + ComputeValidityMasks(cell); + + m_BakedCells[cell.index] = cell; + m_CellsToDilate[cell.index] = cell; + m_CellPosToIndex.Add(cell.position, cell.index); + } + + static void AnalyzeBrickForIndirectionEntries(ref BakingCell cell) + { + var prv = ProbeReferenceVolume.instance; + int cellSizeInBricks = m_ProfileInfo.cellSizeInBricks; + int entrySubdivLevel = Mathf.Min(m_ProfileInfo.simplificationLevels, prv.GetGlobalIndirectionEntryMaxSubdiv()); + int indirectionEntrySizeInBricks = ProbeReferenceVolume.CellSize(entrySubdivLevel); + int numOfIndirectionEntriesPerCellDim = cellSizeInBricks / indirectionEntrySizeInBricks; + + int numOfEntries = numOfIndirectionEntriesPerCellDim * numOfIndirectionEntriesPerCellDim * numOfIndirectionEntriesPerCellDim; + cell.indirectionEntryInfo = new IndirectionEntryInfo[numOfEntries]; + + // This is fairly naive now, if we need optimization this is the place to be. + + Vector3Int cellPosInEntries = cell.position * numOfIndirectionEntriesPerCellDim; + Vector3Int cellPosInBricks = cell.position * cellSizeInBricks; + + int totalIndexChunks = 0; + int i = 0; + for (int x = 0; x < numOfIndirectionEntriesPerCellDim; ++x) + { + for (int y = 0; y < numOfIndirectionEntriesPerCellDim; ++y) + { + for (int z = 0; z < numOfIndirectionEntriesPerCellDim; ++z) + { + Vector3Int entryPositionInBricks = cellPosInBricks + new Vector3Int(x, y, z) * indirectionEntrySizeInBricks; + Bounds entryBoundsInBricks = new Bounds(); + entryBoundsInBricks.min = entryPositionInBricks; + entryBoundsInBricks.max = entryPositionInBricks + new Vector3Int(indirectionEntrySizeInBricks, indirectionEntrySizeInBricks, indirectionEntrySizeInBricks); + + int minSubdiv = m_ProfileInfo.maxSubdivision; + bool touchedBrick = false; + foreach (Brick b in cell.bricks) + { + if (b.subdivisionLevel < minSubdiv) + { + if (b.IntersectArea(entryBoundsInBricks)) + { + touchedBrick = true; + minSubdiv = b.subdivisionLevel; + if (minSubdiv == 0) break; + } + } + } + + cell.indirectionEntryInfo[i].minSubdiv = minSubdiv; + cell.indirectionEntryInfo[i].positionInBricks = cellPosInBricks + new Vector3Int(x, y, z) * indirectionEntrySizeInBricks; + cell.indirectionEntryInfo[i].hasOnlyBiggerBricks = minSubdiv > entrySubdivLevel && touchedBrick; + + ProbeBrickIndex.IndirectionEntryUpdateInfo unused = new ProbeBrickIndex.IndirectionEntryUpdateInfo(); + int brickCount = ProbeReferenceVolume.instance.GetNumberOfBricksAtSubdiv(cell.indirectionEntryInfo[i], ref unused); + + totalIndexChunks += Mathf.CeilToInt((float)brickCount / ProbeBrickIndex.kIndexChunkSize); + + i++; + } + } + } + + // Chunk count. + cell.indexChunkCount = totalIndexChunks; + } + + // Mathf.HalfToFloat(Mathf.FloatToHalf(float.MaxValue)) returns +inf, so clamp manually to avoid that + static float s_MaxSHValue = 65504; // IEEE max half + + static ushort SHFloatToHalf(float value) + { + return Mathf.FloatToHalf(Mathf.Min(value, s_MaxSHValue)); + } + + static float SHHalfToFloat(ushort value) + { + return Mathf.HalfToFloat(value); + } + + static byte SHFloatToByte(float value) + { + return (byte)(Mathf.Clamp(value, 0.0f, 1.0f) * 255.0f); + } + + static float SHByteToFloat(byte value) + { + return value / 255.0f; + } + + static void WriteToShaderCoeffsL0L1(in SphericalHarmonicsL2 sh, NativeArray shaderCoeffsL0L1Rx, NativeArray shaderCoeffsL1GL1Ry, NativeArray shaderCoeffsL1BL1Rz, int offset) + { + shaderCoeffsL0L1Rx[offset + 0] = SHFloatToHalf(sh[0, 0]); shaderCoeffsL0L1Rx[offset + 1] = SHFloatToHalf(sh[1, 0]); shaderCoeffsL0L1Rx[offset + 2] = SHFloatToHalf(sh[2, 0]); shaderCoeffsL0L1Rx[offset + 3] = SHFloatToHalf(sh[0, 1]); + shaderCoeffsL1GL1Ry[offset + 0] = SHFloatToByte(sh[1, 1]); shaderCoeffsL1GL1Ry[offset + 1] = SHFloatToByte(sh[1, 2]); shaderCoeffsL1GL1Ry[offset + 2] = SHFloatToByte(sh[1, 3]); shaderCoeffsL1GL1Ry[offset + 3] = SHFloatToByte(sh[0, 2]); + shaderCoeffsL1BL1Rz[offset + 0] = SHFloatToByte(sh[2, 1]); shaderCoeffsL1BL1Rz[offset + 1] = SHFloatToByte(sh[2, 2]); shaderCoeffsL1BL1Rz[offset + 2] = SHFloatToByte(sh[2, 3]); shaderCoeffsL1BL1Rz[offset + 3] = SHFloatToByte(sh[0, 3]); + } + + static void WriteToShaderCoeffsL2(in SphericalHarmonicsL2 sh, NativeArray shaderCoeffsL2_0, NativeArray shaderCoeffsL2_1, NativeArray shaderCoeffsL2_2, NativeArray shaderCoeffsL2_3, int offset) + { + shaderCoeffsL2_0[offset + 0] = SHFloatToByte(sh[0, 4]); shaderCoeffsL2_0[offset + 1] = SHFloatToByte(sh[0, 5]); shaderCoeffsL2_0[offset + 2] = SHFloatToByte(sh[0, 6]); shaderCoeffsL2_0[offset + 3] = SHFloatToByte(sh[0, 7]); + shaderCoeffsL2_1[offset + 0] = SHFloatToByte(sh[1, 4]); shaderCoeffsL2_1[offset + 1] = SHFloatToByte(sh[1, 5]); shaderCoeffsL2_1[offset + 2] = SHFloatToByte(sh[1, 6]); shaderCoeffsL2_1[offset + 3] = SHFloatToByte(sh[1, 7]); + shaderCoeffsL2_2[offset + 0] = SHFloatToByte(sh[2, 4]); shaderCoeffsL2_2[offset + 1] = SHFloatToByte(sh[2, 5]); shaderCoeffsL2_2[offset + 2] = SHFloatToByte(sh[2, 6]); shaderCoeffsL2_2[offset + 3] = SHFloatToByte(sh[2, 7]); + shaderCoeffsL2_3[offset + 0] = SHFloatToByte(sh[0, 8]); shaderCoeffsL2_3[offset + 1] = SHFloatToByte(sh[1, 8]); shaderCoeffsL2_3[offset + 2] = SHFloatToByte(sh[2, 8]); + } + + static void ReadFromShaderCoeffsL0L1(ref SphericalHarmonicsL2 sh, NativeArray shaderCoeffsL0L1Rx, NativeArray shaderCoeffsL1GL1Ry, NativeArray shaderCoeffsL1BL1Rz, int offset) + { + sh[0, 0] = SHHalfToFloat(shaderCoeffsL0L1Rx[offset + 0]); sh[1, 0] = SHHalfToFloat(shaderCoeffsL0L1Rx[offset + 1]); sh[2, 0] = SHHalfToFloat(shaderCoeffsL0L1Rx[offset + 2]); sh[0, 1] = SHHalfToFloat(shaderCoeffsL0L1Rx[offset + 3]); + sh[1, 1] = SHByteToFloat(shaderCoeffsL1GL1Ry[offset + 0]); sh[1, 2] = SHByteToFloat(shaderCoeffsL1GL1Ry[offset + 1]); sh[1, 3] = SHByteToFloat(shaderCoeffsL1GL1Ry[offset + 2]); sh[0, 2] = SHByteToFloat(shaderCoeffsL1GL1Ry[offset + 3]); + sh[2, 1] = SHByteToFloat(shaderCoeffsL1BL1Rz[offset + 0]); sh[2, 2] = SHByteToFloat(shaderCoeffsL1BL1Rz[offset + 1]); sh[2, 3] = SHByteToFloat(shaderCoeffsL1BL1Rz[offset + 2]); sh[0, 3] = SHByteToFloat(shaderCoeffsL1BL1Rz[offset + 3]); + } + + static void ReadFromShaderCoeffsL2(ref SphericalHarmonicsL2 sh, NativeArray shaderCoeffsL2_0, NativeArray shaderCoeffsL2_1, NativeArray shaderCoeffsL2_2, NativeArray shaderCoeffsL2_3, int offset) + { + sh[0, 4] = SHByteToFloat(shaderCoeffsL2_0[offset + 0]); sh[0, 5] = SHByteToFloat(shaderCoeffsL2_0[offset + 1]); sh[0, 6] = SHByteToFloat(shaderCoeffsL2_0[offset + 2]); sh[0, 7] = SHByteToFloat(shaderCoeffsL2_0[offset + 3]); + sh[1, 4] = SHByteToFloat(shaderCoeffsL2_1[offset + 0]); sh[1, 5] = SHByteToFloat(shaderCoeffsL2_1[offset + 1]); sh[1, 6] = SHByteToFloat(shaderCoeffsL2_1[offset + 2]); sh[1, 7] = SHByteToFloat(shaderCoeffsL2_1[offset + 3]); + sh[2, 4] = SHByteToFloat(shaderCoeffsL2_2[offset + 0]); sh[2, 5] = SHByteToFloat(shaderCoeffsL2_2[offset + 1]); sh[2, 6] = SHByteToFloat(shaderCoeffsL2_2[offset + 2]); sh[2, 7] = SHByteToFloat(shaderCoeffsL2_2[offset + 3]); + sh[0, 8] = SHByteToFloat(shaderCoeffsL2_3[offset + 0]); sh[1, 8] = SHByteToFloat(shaderCoeffsL2_3[offset + 1]); sh[2, 8] = SHByteToFloat(shaderCoeffsL2_3[offset + 2]); + } + + static void ReadFullFromShaderCoeffsL0L1L2(ref SphericalHarmonicsL2 sh, + NativeArray shaderCoeffsL0L1Rx, NativeArray shaderCoeffsL1GL1Ry, NativeArray shaderCoeffsL1BL1Rz, + NativeArray shaderCoeffsL2_0, NativeArray shaderCoeffsL2_1, NativeArray shaderCoeffsL2_2, NativeArray shaderCoeffsL2_3, + int probeIdx) + { + ReadFromShaderCoeffsL0L1(ref sh, shaderCoeffsL0L1Rx, shaderCoeffsL1GL1Ry, shaderCoeffsL1BL1Rz, probeIdx * 4); + if (shaderCoeffsL2_0.Length > 0) + ReadFromShaderCoeffsL2(ref sh, shaderCoeffsL2_0, shaderCoeffsL2_1, shaderCoeffsL2_2, shaderCoeffsL2_3, probeIdx * 4); + + } + + static void WriteToShaderSkyOcclusion(in Vector4 occlusionL0L1, NativeArray shaderCoeffsSkyOcclusionL0L1, int offset) + { + shaderCoeffsSkyOcclusionL0L1[offset + 0] = SHFloatToHalf(occlusionL0L1.x); + shaderCoeffsSkyOcclusionL0L1[offset + 1] = SHFloatToHalf(occlusionL0L1.y); + shaderCoeffsSkyOcclusionL0L1[offset + 2] = SHFloatToHalf(occlusionL0L1.z); + shaderCoeffsSkyOcclusionL0L1[offset + 3] = SHFloatToHalf(occlusionL0L1.w); + } + + static void ReadFromShaderCoeffsSkyOcclusion(ref Vector4 skyOcclusionL0L1, NativeArray skyOcclusionDataL0L1, int probeIdx) + { + int offset = probeIdx * 4; + skyOcclusionL0L1.x = SHHalfToFloat(skyOcclusionDataL0L1[offset + 0]); + skyOcclusionL0L1.y = SHHalfToFloat(skyOcclusionDataL0L1[offset + 1]); + skyOcclusionL0L1.z = SHHalfToFloat(skyOcclusionDataL0L1[offset + 2]); + skyOcclusionL0L1.w = SHHalfToFloat(skyOcclusionDataL0L1[offset + 3]); + } + + // Returns index in the GPU layout of probe of coordinate (x, y, z) in the brick at brickIndex for a DataLocation of size locSize + static int GetProbeGPUIndex(int brickIndex, int x, int y, int z, Vector3Int locSize) + { + Vector3Int locSizeInBrick = locSize / ProbeBrickPool.kBrickProbeCountPerDim; + + int bx = brickIndex % locSizeInBrick.x; + int by = (brickIndex / locSizeInBrick.x) % locSizeInBrick.y; + int bz = ((brickIndex / locSizeInBrick.x) / locSizeInBrick.y) % locSizeInBrick.z; + + // In probes + int ix = bx * ProbeBrickPool.kBrickProbeCountPerDim + x; + int iy = by * ProbeBrickPool.kBrickProbeCountPerDim + y; + int iz = bz * ProbeBrickPool.kBrickProbeCountPerDim + z; + + return ix + locSize.x * (iy + locSize.y * iz); + } + + static BakingCell ConvertCellToBakingCell(CellDesc cellDesc, CellData cellData) + { + BakingCell bc = new BakingCell + { + position = cellDesc.position, + index = cellDesc.index, + bricks = cellData.bricks.ToArray(), + minSubdiv = cellDesc.minSubdiv, + indexChunkCount = cellDesc.indexChunkCount, + shChunkCount = cellDesc.shChunkCount, + probeIndices = null, // Not needed for this conversion. + indirectionEntryInfo = cellDesc.indirectionEntryInfo, + }; + + bool hasVirtualOffsets = cellData.offsetVectors.Length > 0; + bool hasSkyOcclusion = cellData.skyOcclusionDataL0L1.Length > 0; + bool hasSkyShadingDirection = cellData.skyShadingDirectionIndices.Length > 0; + + // Runtime Cell arrays may contain padding to match chunk size + // so we use the actual probe count for these arrays. + int probeCount = cellDesc.probeCount; + bc.probePositions = new Vector3[probeCount]; + bc.validity = new float[probeCount]; + bc.touchupVolumeInteraction = new float[probeCount]; + bc.validityNeighbourMask = new byte[probeCount]; + bc.skyOcclusionDataL0L1 = hasSkyOcclusion ? new Vector4[probeCount] : null; + bc.skyShadingDirectionIndices = hasSkyShadingDirection ? new byte[probeCount] : null; + bc.offsetVectors = hasVirtualOffsets ? new Vector3[probeCount] : null; + bc.sh = new SphericalHarmonicsL2[probeCount]; + + // Runtime data layout is for GPU consumption. + // We need to convert it back to a linear layout for the baking cell. + int probeIndex = 0; + int chunkOffsetInProbes = 0; + var chunksCount = cellDesc.shChunkCount; + var chunkSizeInProbes = ProbeBrickPool.GetChunkSizeInProbeCount(); + Vector3Int locSize = ProbeBrickPool.ProbeCountToDataLocSize(chunkSizeInProbes); + + var blackSH = GetBlackSH(); + + for (int chunkIndex = 0; chunkIndex < chunksCount; ++chunkIndex) + { + var cellChunkData = GetCellChunkData(cellData, chunkIndex); + + for (int brickIndex = 0; brickIndex < m_BakingSet.chunkSizeInBricks; ++brickIndex) + { + if (probeIndex >= probeCount) + break; + + for (int z = 0; z < ProbeBrickPool.kBrickProbeCountPerDim; z++) + { + for (int y = 0; y < ProbeBrickPool.kBrickProbeCountPerDim; y++) + { + for (int x = 0; x < ProbeBrickPool.kBrickProbeCountPerDim; x++) + { + var remappedIndex = GetProbeGPUIndex(brickIndex, x, y, z, locSize); + + // Scenario data can be invalid due to partially baking the set. + if (cellChunkData.scenarioValid) + ReadFullFromShaderCoeffsL0L1L2(ref bc.sh[probeIndex], cellChunkData.shL0L1RxData, cellChunkData.shL1GL1RyData, cellChunkData.shL1BL1RzData, + cellChunkData.shL2Data_0, cellChunkData.shL2Data_1, cellChunkData.shL2Data_2, cellChunkData.shL2Data_3, remappedIndex); + else + bc.sh[probeIndex] = blackSH; + + bc.validityNeighbourMask[probeIndex] = cellChunkData.validityNeighMaskData[remappedIndex]; + if (hasSkyOcclusion) + ReadFromShaderCoeffsSkyOcclusion(ref bc.skyOcclusionDataL0L1[probeIndex], cellChunkData.skyOcclusionDataL0L1, remappedIndex); + if (hasSkyShadingDirection) + { + bc.skyShadingDirectionIndices[probeIndex] = cellChunkData.skyShadingDirectionIndices[remappedIndex]; + } + + remappedIndex += chunkOffsetInProbes; + bc.probePositions[probeIndex] = cellData.probePositions[remappedIndex]; + bc.validity[probeIndex] = cellData.validity[remappedIndex]; + bc.touchupVolumeInteraction[probeIndex] = cellData.touchupVolumeInteraction[remappedIndex]; + if (hasVirtualOffsets) + bc.offsetVectors[probeIndex] = cellData.offsetVectors[remappedIndex]; + + probeIndex++; + } + } + } + } + + chunkOffsetInProbes += chunkSizeInProbes; + } + + return bc; + } + + // This is slow, but artists wanted this... This can be optimized later. + static BakingCell MergeCells(BakingCell dst, BakingCell srcCell) + { + int maxSubdiv = Math.Max(dst.bricks[0].subdivisionLevel, srcCell.bricks[0].subdivisionLevel); + bool hasVirtualOffsets = s_BakeData.virtualOffsetJob.offsets.IsCreated; + bool hasSkyOcclusion = s_BakeData.skyOcclusionJob.occlusion.IsCreated; + bool hasSkyShadingDirection = s_BakeData.skyOcclusionJob.shadingDirections.IsCreated; + + List<(Brick, int, int)> consolidatedBricks = new List<(Brick, int, int)>(); + HashSet<(Vector3Int, int)> addedBricks = new HashSet<(Vector3Int, int)>(); + + for (int b = 0; b < dst.bricks.Length; ++b) + { + var brick = dst.bricks[b]; + addedBricks.Add((brick.position, brick.subdivisionLevel)); + consolidatedBricks.Add((brick, b, 0)); + } + + // Now with lower priority we grab from src. + for (int b = 0; b < srcCell.bricks.Length; ++b) + { + var brick = srcCell.bricks[b]; + + if (!addedBricks.Contains((brick.position, brick.subdivisionLevel))) + { + consolidatedBricks.Add((brick, b, 1)); + } + } + + // And finally we sort. We don't need to check for anything but brick as we don't have duplicates. + consolidatedBricks.Sort(((Brick, int, int) lhs, (Brick, int, int) rhs) => + { + if (lhs.Item1.subdivisionLevel != rhs.Item1.subdivisionLevel) + return lhs.Item1.subdivisionLevel > rhs.Item1.subdivisionLevel ? -1 : 1; + if (lhs.Item1.position.z != rhs.Item1.position.z) + return lhs.Item1.position.z < rhs.Item1.position.z ? -1 : 1; + if (lhs.Item1.position.y != rhs.Item1.position.y) + return lhs.Item1.position.y < rhs.Item1.position.y ? -1 : 1; + if (lhs.Item1.position.x != rhs.Item1.position.x) + return lhs.Item1.position.x < rhs.Item1.position.x ? -1 : 1; + + return 0; + }); + + BakingCell outCell = new BakingCell(); + + int numberOfProbes = consolidatedBricks.Count * ProbeBrickPool.kBrickProbeCountTotal; + outCell.index = dst.index; + outCell.position = dst.position; + outCell.bricks = new Brick[consolidatedBricks.Count]; + outCell.probePositions = new Vector3[numberOfProbes]; + outCell.minSubdiv = Math.Min(dst.minSubdiv, srcCell.minSubdiv); + outCell.sh = new SphericalHarmonicsL2[numberOfProbes]; + outCell.validity = new float[numberOfProbes]; + outCell.validityNeighbourMask = new byte[numberOfProbes]; + outCell.skyOcclusionDataL0L1 = hasSkyOcclusion ? new Vector4[numberOfProbes] : null; + outCell.skyShadingDirectionIndices = hasSkyShadingDirection ? new byte[numberOfProbes] : null; + outCell.offsetVectors = hasVirtualOffsets ? new Vector3[numberOfProbes] : null; + outCell.touchupVolumeInteraction = new float[numberOfProbes]; + outCell.shChunkCount = ProbeBrickPool.GetChunkCount(outCell.bricks.Length); + // We don't need to analyse here, it will be done upon writing back. + outCell.indirectionEntryInfo = new IndirectionEntryInfo[srcCell.indirectionEntryInfo.Length]; + + BakingCell[] consideredCells = { dst, srcCell }; + + for (int i = 0; i < consolidatedBricks.Count; ++i) + { + var b = consolidatedBricks[i]; + int brickIndexInSource = b.Item2; + + outCell.bricks[i] = consideredCells[b.Item3].bricks[brickIndexInSource]; + + for (int p = 0; p < ProbeBrickPool.kBrickProbeCountTotal; ++p) + { + int outIdx = i * ProbeBrickPool.kBrickProbeCountTotal + p; + int srcIdx = brickIndexInSource * ProbeBrickPool.kBrickProbeCountTotal + p; + outCell.probePositions[outIdx] = consideredCells[b.Item3].probePositions[srcIdx]; + outCell.sh[outIdx] = consideredCells[b.Item3].sh[srcIdx]; + outCell.validity[outIdx] = consideredCells[b.Item3].validity[srcIdx]; + outCell.validityNeighbourMask[outIdx] = consideredCells[b.Item3].validityNeighbourMask[srcIdx]; + if (hasSkyOcclusion) + outCell.skyOcclusionDataL0L1[outIdx] = consideredCells[b.Item3].skyOcclusionDataL0L1[srcIdx]; + if (hasSkyShadingDirection) + outCell.skyShadingDirectionIndices[outIdx] = consideredCells[b.Item3].skyShadingDirectionIndices[srcIdx]; + if (hasVirtualOffsets) + outCell.offsetVectors[outIdx] = consideredCells[b.Item3].offsetVectors[srcIdx]; + outCell.touchupVolumeInteraction[outIdx] = consideredCells[b.Item3].touchupVolumeInteraction[srcIdx]; + } + } + return outCell; + } + + static void ExtractBakingCells() + { + // For cells that are being baked, this loop will merge existing baked data with newly baked data to not lose data. + var loadedSceneDataList = ProbeReferenceVolume.instance.perSceneDataList; + foreach (var data in loadedSceneDataList) + { + var cells = m_BakingSet.GetSceneCellIndexList(data.sceneGUID); + + var numberOfCells = cells.Count; + + for (int i = 0; i < numberOfCells; ++i) + { + if (m_BakedCells.ContainsKey(cells[i])) + { + var cell = m_BakingSet.GetCellDesc(cells[i]); + + // This can happen if doing a partial bake before ever doing a full bake. + if (cell == null || !m_BakedCells.ContainsKey(cell.index)) + continue; + + var cellData = m_BakingSet.GetCellData(cells[i]); + + // When doing partial baking some cells might not have any already baked data. + if (cellData == null || !cellData.scenarios.ContainsKey(m_BakingSet.lightingScenario)) + continue; + + BakingCell bc = ConvertCellToBakingCell(cell, cellData); + bc = MergeCells(m_BakedCells[cell.index], bc); + m_BakedCells[cell.index] = bc; + } + } + } + + // Here we convert to baking cells all cells that were not already baked. + // This allows us to have the full set of cells ready for writing all at once. + foreach (var cell in m_BakingSet.cellDescs.Values) + { + if (!m_BakedCells.ContainsKey(cell.index)) + { + var cellData = m_BakingSet.GetCellData(cell.index); + if (cellData == null) + continue; + + m_BakedCells.Add(cell.index, ConvertCellToBakingCell(cell, cellData)); + } + } + } + + static long AlignRemainder16(long count) => count % 16L; + + static void WriteNativeArray(System.IO.FileStream fs, NativeArray array) where T : struct + { + unsafe + { + fs.Write(new ReadOnlySpan(array.GetUnsafeReadOnlyPtr(), array.Length * UnsafeUtility.SizeOf())); + fs.Write(new byte[AlignRemainder16(fs.Position)]); + } + } + + /// + /// This method converts a list of baking cells into 5 separate assets: + /// 2 assets per baking state: + /// CellData: a binary flat file containing L0L1 probes data + /// CellOptionalData: a binary flat file containing L2 probe data (when present) + /// 3 assets shared between states: + /// ProbeVolumeAsset: a Scriptable Object which currently contains book-keeping data, runtime cells, and references to flattened data + /// CellSharedData: a binary flat file containing bricks data + /// CellSupportData: a binary flat file containing debug data (stripped from player builds if building without debug shaders) + /// + unsafe static void WriteBakingCells(BakingCell[] bakingCells) + { + m_BakingSet.GetBlobFileNames(m_BakingSet.lightingScenario, out var cellDataFilename, out var cellBricksDataFilename, out var cellOptionalDataFilename, out var cellSharedDataFilename, out var cellSupportDataFilename); + + m_BakingSet.cellDescs = new SerializedDictionary(); + m_BakingSet.bakedMinDistanceBetweenProbes = m_ProfileInfo.minDistanceBetweenProbes; + m_BakingSet.bakedSimplificationLevels = m_ProfileInfo.simplificationLevels; + m_BakingSet.bakedProbeOffset = m_ProfileInfo.probeOffset; + m_BakingSet.bakedSkyOcclusion = m_BakingSet.skyOcclusion; + m_BakingSet.bakedSkyShadingDirection = m_BakingSet.bakedSkyOcclusion && m_BakingSet.skyOcclusionShadingDirection; + + var cellSharedDataDescs = new SerializedDictionary(); + var cellL0L1DataDescs = new SerializedDictionary(); + var cellL2DataDescs = new SerializedDictionary(); + var cellBricksDescs = new SerializedDictionary(); + var cellSupportDescs = new SerializedDictionary(); + + var voSettings = m_BakingSet.settings.virtualOffsetSettings; + bool hasVirtualOffsets = voSettings.useVirtualOffset; + bool handlesSkyOcclusion = m_BakingSet.bakedSkyOcclusion; + bool handlesSkyShading = m_BakingSet.bakedSkyShadingDirection && m_BakingSet.bakedSkyShadingDirection; + + for (var i = 0; i < bakingCells.Length; ++i) + { + AnalyzeBrickForIndirectionEntries(ref bakingCells[i]); + var bakingCell = bakingCells[i]; + + m_BakingSet.cellDescs.Add(bakingCell.index, new CellDesc + { + position = bakingCell.position, + index = bakingCell.index, + probeCount = bakingCell.probePositions.Length, + minSubdiv = bakingCell.minSubdiv, + indexChunkCount = bakingCell.indexChunkCount, + shChunkCount = bakingCell.shChunkCount, + indirectionEntryInfo = bakingCell.indirectionEntryInfo, + bricksCount = bakingCell.bricks.Length, + }); + + m_BakingSet.maxSHChunkCount = Mathf.Max(m_BakingSet.maxSHChunkCount, bakingCell.shChunkCount); + + m_TotalCellCounts.Add(new CellCounts + { + bricksCount = bakingCell.bricks.Length, + chunksCount = bakingCell.shChunkCount + }); + } + + // All per probe data is stored per chunk and contiguously for each cell. + // This is done so that we can stream from disk one cell at a time by group of chunks. + + var chunkSizeInProbes = ProbeBrickPool.GetChunkSizeInProbeCount(); + + // CellData + // L0 and L1 Data: 12 Coeffs stored in 3 textures. L0 (rgb) and R1x as ushort in one texture, the rest as byte in two 4 component textures. + var L0L1R1xChunkSize = sizeof(ushort) * 4 * chunkSizeInProbes; // 4 ushort components per probe + var L1ChunkSize = sizeof(byte) * 4 * chunkSizeInProbes; // 4 components per probe + var L0L1ChunkSize = L0L1R1xChunkSize + 2 * L1ChunkSize; + var L0L1TotalSize = m_TotalCellCounts.chunksCount * L0L1ChunkSize; + using var probesL0L1 = new NativeArray(L0L1TotalSize, Allocator.Persistent, NativeArrayOptions.UninitializedMemory); + + m_BakingSet.L0ChunkSize = L0L1R1xChunkSize; + m_BakingSet.L1ChunkSize = L1ChunkSize; + + // CellOptionalData + // L2 Data: 15 Coeffs stored in 4 byte4 textures. + var L2TextureChunkSize = 4 * sizeof(byte) * chunkSizeInProbes; // 4 byte component per probe + var L2ChunkSize = L2TextureChunkSize * 4; // 4 Textures for all L2 data. + var L2TotalSize = m_TotalCellCounts.chunksCount * L2ChunkSize; // 4 textures + using var probesL2 = new NativeArray(L2TotalSize, Allocator.Persistent, NativeArrayOptions.UninitializedMemory); + + m_BakingSet.L2TextureChunkSize = L2TextureChunkSize; + + + // CellSharedData + m_BakingSet.sharedValidityMaskChunkSize = sizeof(byte) * chunkSizeInProbes; + m_BakingSet.sharedSkyOcclusionL0L1ChunkSize = handlesSkyOcclusion ? sizeof(ushort) * 4 * chunkSizeInProbes : 0; + m_BakingSet.sharedSkyShadingDirectionIndicesChunkSize = handlesSkyShading ? sizeof(byte) * chunkSizeInProbes : 0; + m_BakingSet.sharedDataChunkSize = m_BakingSet.sharedValidityMaskChunkSize + m_BakingSet.sharedSkyOcclusionL0L1ChunkSize + m_BakingSet.sharedSkyShadingDirectionIndicesChunkSize; + + var sharedDataTotalSize = m_TotalCellCounts.chunksCount * m_BakingSet.sharedDataChunkSize; + using var sharedData = new NativeArray(sharedDataTotalSize, Allocator.Persistent, NativeArrayOptions.UninitializedMemory); + + // Brick data + using var bricks = new NativeArray(m_TotalCellCounts.bricksCount, Allocator.Persistent, NativeArrayOptions.UninitializedMemory); + + // CellSupportData + m_BakingSet.supportPositionChunkSize = sizeof(Vector3) * chunkSizeInProbes; + m_BakingSet.supportValidityChunkSize = sizeof(float) * chunkSizeInProbes; + m_BakingSet.supportOffsetsChunkSize = hasVirtualOffsets ? sizeof(Vector3) * chunkSizeInProbes : 0; + m_BakingSet.supportTouchupChunkSize = sizeof(float) * chunkSizeInProbes; + + m_BakingSet.supportDataChunkSize = m_BakingSet.supportPositionChunkSize + m_BakingSet.supportValidityChunkSize + m_BakingSet.supportOffsetsChunkSize + m_BakingSet.supportTouchupChunkSize; + var supportDataTotalSize = m_TotalCellCounts.chunksCount * m_BakingSet.supportDataChunkSize; + using var supportData = new NativeArray(supportDataTotalSize, Allocator.Persistent, NativeArrayOptions.UninitializedMemory); + + var sceneStateHash = m_BakingSet.GetBakingHashCode(); + var startCounts = new CellCounts(); + + int sharedChunkOffset = 0; + + int shL0L1ChunkOffset = 0; + int shL2ChunkOffset = 0; + int supportChunkOffset = 0; + + var blackSH = GetBlackSH(); + + // Size of the DataLocation used to do the copy texture at runtime. Used to generate the right layout for the 3D texture. + Vector3Int locSize = ProbeBrickPool.ProbeCountToDataLocSize(ProbeBrickPool.GetChunkSizeInProbeCount()); + + for (var i = 0; i < bakingCells.Length; ++i) + { + var bakingCell = bakingCells[i]; + var cellDesc = m_BakingSet.cellDescs[bakingCell.index]; + var chunksCount = cellDesc.shChunkCount; + + cellSharedDataDescs.Add(bakingCell.index, new StreamableCellDesc() { offset = startCounts.chunksCount * m_BakingSet.sharedDataChunkSize, elementCount = chunksCount }); + cellL0L1DataDescs.Add(bakingCell.index, new StreamableCellDesc() { offset = startCounts.chunksCount * L0L1ChunkSize, elementCount = chunksCount }); + cellL2DataDescs.Add(bakingCell.index, new StreamableCellDesc() { offset = startCounts.chunksCount * L2ChunkSize, elementCount = chunksCount }); + cellBricksDescs.Add(bakingCell.index, new StreamableCellDesc() { offset = startCounts.bricksCount * sizeof(Brick), elementCount = cellDesc.bricksCount }); + cellSupportDescs.Add(bakingCell.index, new StreamableCellDesc() { offset = startCounts.chunksCount * m_BakingSet.supportDataChunkSize, elementCount = chunksCount }); + + sceneStateHash = sceneStateHash * 23 + bakingCell.GetBakingHashCode(); + + var inputProbesCount = bakingCell.probePositions.Length; + + int shidx = 0; + + // Cell base offsets for each data streams + int cellL0R1xOffset = shL0L1ChunkOffset; + int cellL1GL1RyOffset = cellL0R1xOffset + chunksCount * L0L1R1xChunkSize; + int cellL1BL1RzOffset = cellL1GL1RyOffset + chunksCount * L1ChunkSize; + + int validityMaskOffset = sharedChunkOffset; + int skyOcclusionL0L1Offset = validityMaskOffset + chunksCount * m_BakingSet.sharedValidityMaskChunkSize; + int skyShadingIndicesOffset = skyOcclusionL0L1Offset + chunksCount * m_BakingSet.sharedSkyOcclusionL0L1ChunkSize; + + int positionOffset = supportChunkOffset; + int validityOffset = positionOffset + chunksCount * m_BakingSet.supportPositionChunkSize; + int touchupOffset = validityOffset + chunksCount * m_BakingSet.supportValidityChunkSize; + int offsetsOffset = touchupOffset + chunksCount * m_BakingSet.supportTouchupChunkSize; // Keep last as it's optional. + + // Here we directly map each chunk to the layout of the 3D textures in order to be able to copy the data directly to the GPU. + // The granularity at runtime is one chunk at a time currently so the temporary data loc used is sized accordingly. + for (int chunkIndex = 0; chunkIndex < chunksCount; ++chunkIndex) + { + NativeArray probesTargetL0L1Rx = probesL0L1.GetSubArray(cellL0R1xOffset + chunkIndex * L0L1R1xChunkSize, L0L1R1xChunkSize).Reinterpret(1); + NativeArray probesTargetL1GL1Ry = probesL0L1.GetSubArray(cellL1GL1RyOffset + chunkIndex * L1ChunkSize, L1ChunkSize); + NativeArray probesTargetL1BL1Rz = probesL0L1.GetSubArray(cellL1BL1RzOffset + chunkIndex * L1ChunkSize, L1ChunkSize); + + NativeArray validityNeighboorMaskChunkTarget = sharedData.GetSubArray(validityMaskOffset + chunkIndex * m_BakingSet.sharedValidityMaskChunkSize, m_BakingSet.sharedValidityMaskChunkSize); + NativeArray skyOcclusionL0L1ChunkTarget = sharedData.GetSubArray(skyOcclusionL0L1Offset + chunkIndex * m_BakingSet.sharedSkyOcclusionL0L1ChunkSize, m_BakingSet.sharedSkyOcclusionL0L1ChunkSize).Reinterpret(1); + NativeArray skyShadingIndicesChunkTarget = sharedData.GetSubArray(skyShadingIndicesOffset + chunkIndex * m_BakingSet.sharedSkyShadingDirectionIndicesChunkSize, m_BakingSet.sharedSkyShadingDirectionIndicesChunkSize); + + + NativeArray positionsChunkTarget = supportData.GetSubArray(positionOffset + chunkIndex * m_BakingSet.supportPositionChunkSize, m_BakingSet.supportPositionChunkSize).Reinterpret(1); + NativeArray validityChunkTarget = supportData.GetSubArray(validityOffset + chunkIndex * m_BakingSet.supportValidityChunkSize, m_BakingSet.supportValidityChunkSize).Reinterpret(1); + NativeArray touchupVolumeInteractionChunkTarget = supportData.GetSubArray(touchupOffset + chunkIndex * m_BakingSet.supportTouchupChunkSize, m_BakingSet.supportTouchupChunkSize).Reinterpret(1); + NativeArray offsetChunkTarget = supportData.GetSubArray(offsetsOffset + chunkIndex * m_BakingSet.supportOffsetsChunkSize, m_BakingSet.supportOffsetsChunkSize).Reinterpret(1); + + NativeArray probesTargetL2_0 = probesL2.GetSubArray(shL2ChunkOffset + chunksCount * L2TextureChunkSize * 0 + chunkIndex * L2TextureChunkSize, L2TextureChunkSize); + NativeArray probesTargetL2_1 = probesL2.GetSubArray(shL2ChunkOffset + chunksCount * L2TextureChunkSize * 1 + chunkIndex * L2TextureChunkSize, L2TextureChunkSize); + NativeArray probesTargetL2_2 = probesL2.GetSubArray(shL2ChunkOffset + chunksCount * L2TextureChunkSize * 2 + chunkIndex * L2TextureChunkSize, L2TextureChunkSize); + NativeArray probesTargetL2_3 = probesL2.GetSubArray(shL2ChunkOffset + chunksCount * L2TextureChunkSize * 3 + chunkIndex * L2TextureChunkSize, L2TextureChunkSize); + + for (int brickIndex = 0; brickIndex < m_BakingSet.chunkSizeInBricks; brickIndex++) + { + for (int z = 0; z < ProbeBrickPool.kBrickProbeCountPerDim; z++) + { + for (int y = 0; y < ProbeBrickPool.kBrickProbeCountPerDim; y++) + { + for (int x = 0; x < ProbeBrickPool.kBrickProbeCountPerDim; x++) + { + int index = GetProbeGPUIndex(brickIndex, x, y, z, locSize); + + // We are processing chunks at a time. + // So in practice we can go over the number of SH we have in the input list. + // We fill with encoded black to avoid copying garbage in the final atlas. + if (shidx >= inputProbesCount) + { + WriteToShaderCoeffsL0L1(blackSH, probesTargetL0L1Rx, probesTargetL1GL1Ry, probesTargetL1BL1Rz, index * 4); + WriteToShaderCoeffsL2(blackSH, probesTargetL2_0, probesTargetL2_1, probesTargetL2_2, probesTargetL2_3, index * 4); + if (m_BakingSet.bakedSkyOcclusion) + { + WriteToShaderSkyOcclusion(Vector4.zero, skyOcclusionL0L1ChunkTarget, index * 4); + if (m_BakingSet.bakedSkyShadingDirection) + { + skyShadingIndicesChunkTarget[index] = 255; + } + } + + validityNeighboorMaskChunkTarget[index] = 0; + validityChunkTarget[index] = 0.0f; + positionsChunkTarget[index] = Vector3.zero; + touchupVolumeInteractionChunkTarget[index] = 0.0f; + if (hasVirtualOffsets) + offsetChunkTarget[index] = Vector3.zero; + } + else + { + ref var sh = ref bakingCell.sh[shidx]; + + WriteToShaderCoeffsL0L1(sh, probesTargetL0L1Rx, probesTargetL1GL1Ry, probesTargetL1BL1Rz, index * 4); + WriteToShaderCoeffsL2(sh, probesTargetL2_0, probesTargetL2_1, probesTargetL2_2, probesTargetL2_3, index * 4); + if (m_BakingSet.bakedSkyOcclusion) + { + WriteToShaderSkyOcclusion(bakingCell.skyOcclusionDataL0L1[shidx], skyOcclusionL0L1ChunkTarget, index * 4); + if (m_BakingSet.bakedSkyShadingDirection) + { + skyShadingIndicesChunkTarget[index] = (byte)(bakingCell.skyShadingDirectionIndices[shidx]); + } + } + + validityChunkTarget[index] = bakingCell.validity[shidx]; + validityNeighboorMaskChunkTarget[index] = bakingCell.validityNeighbourMask[shidx]; + positionsChunkTarget[index] = bakingCell.probePositions[shidx]; + touchupVolumeInteractionChunkTarget[index] = bakingCell.touchupVolumeInteraction[shidx]; + if (hasVirtualOffsets) + offsetChunkTarget[index] = bakingCell.offsetVectors[shidx]; + } + shidx++; + } + } + } + } + } + + shL0L1ChunkOffset += (chunksCount * L0L1ChunkSize); + shL2ChunkOffset += (chunksCount * L2ChunkSize); + supportChunkOffset += (chunksCount * m_BakingSet.supportDataChunkSize); + sharedChunkOffset += (chunksCount * m_BakingSet.sharedDataChunkSize); + + bricks.GetSubArray(startCounts.bricksCount, cellDesc.bricksCount).CopyFrom(bakingCell.bricks); + + startCounts.Add(new CellCounts() + { + bricksCount = cellDesc.bricksCount, + chunksCount = cellDesc.shChunkCount + }); + } + + // Need to save here because the forced import below discards the changes. + EditorUtility.SetDirty(m_BakingSet); + AssetDatabase.SaveAssets(); + + // Explicitly make sure the binary output files are writable since we write them using the C# file API (i.e. check out Perforce files if applicable) + var outputPaths = new List(new[] { cellDataFilename, cellBricksDataFilename, cellSharedDataFilename, cellSupportDataFilename, cellOptionalDataFilename }); + + if (!AssetDatabase.MakeEditable(outputPaths.ToArray())) + Debug.LogWarning($"Failed to make one or more probe volume output file(s) writable. This could result in baked data not being properly written to disk. {string.Join(",", outputPaths)}"); + + unsafe + { + using (var fs = new System.IO.FileStream(cellDataFilename, System.IO.FileMode.Create, System.IO.FileAccess.Write)) + { + WriteNativeArray(fs, probesL0L1); + } + using (var fs = new System.IO.FileStream(cellOptionalDataFilename, System.IO.FileMode.Create, System.IO.FileAccess.Write)) + { + WriteNativeArray(fs, probesL2); + } + using (var fs = new System.IO.FileStream(cellSharedDataFilename, System.IO.FileMode.Create, System.IO.FileAccess.Write)) + { + WriteNativeArray(fs, sharedData); + } + using (var fs = new System.IO.FileStream(cellBricksDataFilename, System.IO.FileMode.Create, System.IO.FileAccess.Write)) + { + WriteNativeArray(fs, bricks); + } + using (var fs = new System.IO.FileStream(cellSupportDataFilename, System.IO.FileMode.Create, System.IO.FileAccess.Write)) + { + WriteNativeArray(fs, supportData); + } + } + + AssetDatabase.ImportAsset(cellDataFilename); + AssetDatabase.ImportAsset(cellOptionalDataFilename); + AssetDatabase.ImportAsset(cellBricksDataFilename); + AssetDatabase.ImportAsset(cellSharedDataFilename); + AssetDatabase.ImportAsset(cellSupportDataFilename); + + var bakingSetGUID = AssetDatabase.AssetPathToGUID(AssetDatabase.GetAssetPath(m_BakingSet)); + + m_BakingSet.scenarios[ProbeReferenceVolume.instance.lightingScenario] = new ProbeVolumeBakingSet.PerScenarioDataInfo + { + sceneHash = sceneStateHash, + cellDataAsset = new ProbeVolumeStreamableAsset(kAPVStreamingAssetsPath, cellL0L1DataDescs, L0L1ChunkSize, bakingSetGUID, AssetDatabase.AssetPathToGUID(cellDataFilename)), + cellOptionalDataAsset = new ProbeVolumeStreamableAsset(kAPVStreamingAssetsPath, cellL2DataDescs, L2ChunkSize, bakingSetGUID, AssetDatabase.AssetPathToGUID(cellOptionalDataFilename)), + }; + m_BakingSet.cellSharedDataAsset = new ProbeVolumeStreamableAsset(kAPVStreamingAssetsPath, cellSharedDataDescs, m_BakingSet.sharedDataChunkSize, bakingSetGUID, AssetDatabase.AssetPathToGUID(cellSharedDataFilename)); + m_BakingSet.cellBricksDataAsset = new ProbeVolumeStreamableAsset(kAPVStreamingAssetsPath, cellBricksDescs, sizeof(Brick), bakingSetGUID, AssetDatabase.AssetPathToGUID(cellBricksDataFilename)); + m_BakingSet.cellSupportDataAsset = new ProbeVolumeStreamableAsset(kAPVStreamingAssetsPath, cellSupportDescs, m_BakingSet.supportDataChunkSize, bakingSetGUID, AssetDatabase.AssetPathToGUID(cellSupportDataFilename)); + + EditorUtility.SetDirty(m_BakingSet); + } + + unsafe static void WriteDilatedCells(List cells) + { + m_BakingSet.GetBlobFileNames(m_BakingSet.lightingScenario, out var cellDataFilename, out var _, out var cellOptionalDataFilename, out var cellSharedDataFilename, out var _); + + var chunkSizeInProbes = ProbeBrickPool.GetChunkSizeInProbeCount(); + + // CellData + // L0 and L1 Data: 12 Coeffs stored in 3 textures. L0 (rgb) and R1x as ushort in one texture, the rest as byte in two 4 component textures. + var L0L1R1xChunkSize = sizeof(ushort) * 4 * chunkSizeInProbes; // 4 ushort components per probe + var L1ChunkSize = sizeof(byte) * 4 * chunkSizeInProbes; // 4 components per probe + var L0L1ChunkSize = L0L1R1xChunkSize + 2 * L1ChunkSize; + var L0L1TotalSize = m_TotalCellCounts.chunksCount * L0L1ChunkSize; + using var probesL0L1 = new NativeArray(L0L1TotalSize, Allocator.Persistent, NativeArrayOptions.UninitializedMemory); + + // CellOptionalData + // L2 Data: 15 Coeffs stored in 4 byte4 textures. + var L2ChunkSize = 4 * sizeof(byte) * chunkSizeInProbes; // 4 byte component per probe + var L2TotalSize = m_TotalCellCounts.chunksCount * L2ChunkSize * 4; // 4 textures + using var probesL2 = new NativeArray(L2TotalSize, Allocator.Persistent, NativeArrayOptions.UninitializedMemory); + + // CellSharedData + var sharedValidityMaskChunkSize = m_BakingSet.sharedValidityMaskChunkSize; + var sharedSkyOcclusionL0L1ChunkSize = m_BakingSet.sharedSkyOcclusionL0L1ChunkSize; + var sharedSkyShadingDirectionIndicesChunkSize = m_BakingSet.sharedSkyShadingDirectionIndicesChunkSize; + var sharedDataTotalSize = m_TotalCellCounts.chunksCount * m_BakingSet.sharedDataChunkSize; + using var sharedData = new NativeArray(sharedDataTotalSize, Allocator.Persistent, NativeArrayOptions.UninitializedMemory); + + // We don't want to overwrite validity data + sharedData.CopyFrom(System.IO.File.ReadAllBytes(cellSharedDataFilename)); + + // When baking with partially loaded scenes, the list of cells being dilated might be smaller than the full list of cells in the bake. + // In this case, in order not to destroy the rest of the data, we need to load it back before writing. + if (cells.Count != m_BakingSet.cellDescs.Count) + { + probesL0L1.CopyFrom(System.IO.File.ReadAllBytes(cellDataFilename)); + probesL2.CopyFrom(System.IO.File.ReadAllBytes(cellOptionalDataFilename)); + } + + var lightingScenario = ProbeReferenceVolume.instance.lightingScenario; + Debug.Assert(m_BakingSet.scenarios.ContainsKey(lightingScenario)); + var scenarioDataInfo = m_BakingSet.scenarios[lightingScenario]; + + for (var i = 0; i < cells.Count; ++i) + { + var srcCell = cells[i]; + + var srcCellDesc = srcCell.desc; + var scenarioData = srcCell.data.scenarios[lightingScenario]; + + var L0L1chunkBaseOffset = scenarioDataInfo.cellDataAsset.streamableCellDescs[srcCellDesc.index].offset; + var L2chunkBaseOffset = scenarioDataInfo.cellOptionalDataAsset.streamableCellDescs[srcCellDesc.index].offset; + var sharedchunkBaseOffset = m_BakingSet.cellSharedDataAsset.streamableCellDescs[srcCellDesc.index].offset; + var shChunksCount = srcCellDesc.shChunkCount; + + NativeArray probesTargetL0L1Rx = probesL0L1.GetSubArray(L0L1chunkBaseOffset, L0L1R1xChunkSize * shChunksCount).Reinterpret(1); + NativeArray probesTargetL1GL1Ry = probesL0L1.GetSubArray(L0L1chunkBaseOffset + shChunksCount * L0L1R1xChunkSize, L1ChunkSize * shChunksCount); + NativeArray probesTargetL1BL1Rz = probesL0L1.GetSubArray(L0L1chunkBaseOffset + shChunksCount * (L0L1R1xChunkSize + L1ChunkSize), L1ChunkSize * shChunksCount); + + probesTargetL0L1Rx.CopyFrom(scenarioData.shL0L1RxData); + probesTargetL1GL1Ry.CopyFrom(scenarioData.shL1GL1RyData); + probesTargetL1BL1Rz.CopyFrom(scenarioData.shL1BL1RzData); + + NativeArray probesTargetL2_0 = probesL2.GetSubArray(L2chunkBaseOffset + shChunksCount * L2ChunkSize * 0, L2ChunkSize * shChunksCount); + NativeArray probesTargetL2_1 = probesL2.GetSubArray(L2chunkBaseOffset + shChunksCount * L2ChunkSize * 1, L2ChunkSize * shChunksCount); + NativeArray probesTargetL2_2 = probesL2.GetSubArray(L2chunkBaseOffset + shChunksCount * L2ChunkSize * 2, L2ChunkSize * shChunksCount); + NativeArray probesTargetL2_3 = probesL2.GetSubArray(L2chunkBaseOffset + shChunksCount * L2ChunkSize * 3, L2ChunkSize * shChunksCount); + + probesTargetL2_0.CopyFrom(scenarioData.shL2Data_0); + probesTargetL2_1.CopyFrom(scenarioData.shL2Data_1); + probesTargetL2_2.CopyFrom(scenarioData.shL2Data_2); + probesTargetL2_3.CopyFrom(scenarioData.shL2Data_3); + + if (sharedSkyOcclusionL0L1ChunkSize != 0) + { + NativeArray skyOcclusionL0L1ChunkTarget = sharedData.GetSubArray(sharedchunkBaseOffset + shChunksCount * sharedValidityMaskChunkSize, sharedSkyOcclusionL0L1ChunkSize * shChunksCount).Reinterpret(1); + skyOcclusionL0L1ChunkTarget.CopyFrom(srcCell.data.skyOcclusionDataL0L1); + + if (sharedSkyShadingDirectionIndicesChunkSize != 0) + { + NativeArray skyShadingIndicesChunkTarget = sharedData.GetSubArray(sharedchunkBaseOffset + shChunksCount * (sharedValidityMaskChunkSize + sharedSkyOcclusionL0L1ChunkSize), sharedSkyShadingDirectionIndicesChunkSize * shChunksCount); + skyShadingIndicesChunkTarget.CopyFrom(srcCell.data.skyShadingDirectionIndices); + } + } + } + + // Explicitly make sure the binary output files are writable since we write them using the C# file API (i.e. check out Perforce files if applicable) + var outputPaths = new List(new[] { cellDataFilename, cellSharedDataFilename, cellOptionalDataFilename }); + + if (!AssetDatabase.MakeEditable(outputPaths.ToArray())) + Debug.LogWarning($"Failed to make one or more probe volume output file(s) writable. This could result in baked data not being properly written to disk. {string.Join(",", outputPaths)}"); + + unsafe + { + using (var fs = new System.IO.FileStream(cellDataFilename, System.IO.FileMode.Create, System.IO.FileAccess.Write)) + { + WriteNativeArray(fs, probesL0L1); + } + using (var fs = new System.IO.FileStream(cellOptionalDataFilename, System.IO.FileMode.Create, System.IO.FileAccess.Write)) + { + WriteNativeArray(fs, probesL2); + } + using (var fs = new System.IO.FileStream(cellSharedDataFilename, System.IO.FileMode.Create, System.IO.FileAccess.Write)) + { + WriteNativeArray(fs, sharedData); + } + } + } + } +} diff --git a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.Serialization.cs.meta b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.Serialization.cs.meta new file mode 100644 index 00000000000..29407c8682a --- /dev/null +++ b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.Serialization.cs.meta @@ -0,0 +1,2 @@ +fileFormatVersion: 2 +guid: 35308793dc5ef9843913c188f6efacb0 \ No newline at end of file diff --git a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.SkyOcclusion.cs b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.SkyOcclusion.cs index 5e1c2e76ae6..05df36ace52 100644 --- a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.SkyOcclusion.cs +++ b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.SkyOcclusion.cs @@ -1,3 +1,4 @@ +using System; using System.Runtime.InteropServices; using UnityEngine.Rendering.Sampling; using UnityEngine.Rendering.UnifiedRayTracing; @@ -5,9 +6,114 @@ namespace UnityEngine.Rendering { - partial class ProbeGIBaking + partial class AdaptiveProbeVolumes { - struct SkyOcclusionBaking + /// + /// Sky occlusion baker + /// + public abstract class SkyOcclusionBaker : IDisposable + { + /// The current baking step. + public abstract ulong currentStep { get; } + /// The total amount of step. + public abstract ulong stepCount { get; } + + /// Array storing the sky occlusion per probe. Expects Layout DC, x, y, z. + public abstract NativeArray occlusion { get; } + /// Array storing the sky shading direction per probe. + public abstract NativeArray shadingDirections { get; } + + /// + /// This is called before the start of baking to allow allocating necessary resources. + /// + /// The baking set that is currently baked. + /// The probe positions. + public abstract void Initialize(ProbeVolumeBakingSet bakingSet, NativeArray probePositions); + + /// + /// Run a step of sky occlusion baking. Baking is considered done when currentStep property equals stepCount. + /// + /// Return false if bake failed and should be stopped. + public abstract bool Step(); + + /// + /// Performs necessary tasks to free allocated resources. + /// + public abstract void Dispose(); + + internal NativeArray encodedDirections; + internal void Encode() { encodedDirections = EncodeShadingDirection(shadingDirections); } + + static int k_MaxProbeCountPerBatch = 65535; + static readonly int _SkyShadingPrecomputedDirection = Shader.PropertyToID("_SkyShadingPrecomputedDirection"); + static readonly int _SkyShadingDirections = Shader.PropertyToID("_SkyShadingDirections"); + static readonly int _SkyShadingIndices = Shader.PropertyToID("_SkyShadingIndices"); + static readonly int _ProbeCount = Shader.PropertyToID("_ProbeCount"); + + internal static NativeArray EncodeShadingDirection(NativeArray directions) + { + var cs = GraphicsSettings.GetRenderPipelineSettings().skyOcclusionCS; + int kernel = cs.FindKernel("EncodeShadingDirection"); + + DynamicSkyPrecomputedDirections.Initialize(); + var precomputedShadingDirections = ProbeReferenceVolume.instance.GetRuntimeResources().SkyPrecomputedDirections; + + int probeCount = directions.Length; + int batchSize = Mathf.Min(k_MaxProbeCountPerBatch, probeCount); + int batchCount = CoreUtils.DivRoundUp(probeCount, k_MaxProbeCountPerBatch); + + var directionBuffer = new GraphicsBuffer(GraphicsBuffer.Target.Structured, batchSize, Marshal.SizeOf()); + var encodedBuffer = new GraphicsBuffer(GraphicsBuffer.Target.Structured, batchSize, Marshal.SizeOf()); + + var directionResults = new NativeArray(probeCount, Allocator.Persistent, NativeArrayOptions.UninitializedMemory); + + for (int batchIndex = 0; batchIndex < batchCount; batchIndex++) + { + int batchOffset = batchIndex * k_MaxProbeCountPerBatch; + int probeInBatch = Mathf.Min(probeCount - batchOffset, k_MaxProbeCountPerBatch); + + directionBuffer.SetData(directions, batchOffset, 0, probeInBatch); + + cs.SetBuffer(kernel, _SkyShadingPrecomputedDirection, precomputedShadingDirections); + cs.SetBuffer(kernel, _SkyShadingDirections, directionBuffer); + cs.SetBuffer(kernel, _SkyShadingIndices, encodedBuffer); + + cs.SetInt(_ProbeCount, probeInBatch); + cs.Dispatch(kernel, CoreUtils.DivRoundUp(probeCount, 64), 1, 1); + + var batchResult = directionResults.GetSubArray(batchOffset, probeInBatch); + AsyncGPUReadback.RequestIntoNativeArray(ref batchResult, encodedBuffer, probeInBatch * sizeof(uint), 0).WaitForCompletion(); + } + + directionBuffer.Dispose(); + encodedBuffer.Dispose(); + + return directionResults; + } + + internal static uint EncodeSkyShadingDirection(Vector3 direction) + { + var precomputedDirections = DynamicSkyPrecomputedDirections.GetPrecomputedDirections(); + + uint indexMax = 255; + float bestDot = -10.0f; + uint bestIndex = 0; + + for (uint index = 0; index < indexMax; index++) + { + float currentDot = Vector3.Dot(direction, precomputedDirections[index]); + if (currentDot > bestDot) + { + bestDot = currentDot; + bestIndex = index; + } + } + + return bestIndex; + } + } + + class DefaultSkyOcclusion : SkyOcclusionBaker { const int k_MaxProbeCountPerBatch = 65535 * 64; const float k_SkyOcclusionOffsetRay = 0.015f; @@ -19,61 +125,87 @@ struct SkyOcclusionBaking static readonly int _OffsetRay = Shader.PropertyToID("_OffsetRay"); static readonly int _ProbePositions = Shader.PropertyToID("_ProbePositions"); static readonly int _SkyOcclusionOut = Shader.PropertyToID("_SkyOcclusionOut"); - static readonly int _SkyShadingPrecomputedDirection = Shader.PropertyToID("_SkyShadingPrecomputedDirection"); static readonly int _SkyShadingOut = Shader.PropertyToID("_SkyShadingOut"); - static readonly int _SkyShadingDirectionIndexOut = Shader.PropertyToID("_SkyShadingDirectionIndexOut"); static readonly int _AverageAlbedo = Shader.PropertyToID("_AverageAlbedo"); static readonly int _BackFaceCulling = Shader.PropertyToID("_BackFaceCulling"); static readonly int _BakeSkyShadingDirection = Shader.PropertyToID("_BakeSkyShadingDirection"); static readonly int _SobolBuffer = Shader.PropertyToID("_SobolBuffer"); static readonly int _CPRBuffer = Shader.PropertyToID("_CPRBuffer"); - public bool skyOcclusion; - public bool skyDirection; - - private int skyOcclusionBackFaceCulling; - private float skyOcclusionAverageAlbedo; - private int probeCount; + int skyOcclusionBackFaceCulling; + float skyOcclusionAverageAlbedo; + int probeCount; + ulong step; // Input data NativeArray probePositions; - private BakeJob[] jobs; - private int currentJob; - public int sampleIndex; - public int batchIndex; + int currentJob; + int sampleIndex; + int batchIndex; + + public BakeJob[] jobs; // Output buffers - private GraphicsBuffer occlusionOutputBuffer; - private GraphicsBuffer skyShadingIndexBuffer; - public Vector4[] occlusionResults; - public uint[] directionResults; - - private IRayTracingAccelStruct m_AccelerationStructure; - private GraphicsBuffer scratchBuffer; - private GraphicsBuffer probePositionsBuffer; - private GraphicsBuffer skyShadingBuffer; - private ComputeBuffer precomputedShadingDirections; - private GraphicsBuffer sobolBuffer; - private GraphicsBuffer cprBuffer; // Cranley Patterson rotation - - public ulong currentStep; - public ulong stepCount => (ulong)probeCount; - - public void Initialize(ProbeVolumeBakingSet bakingSet, BakeJob[] bakeJobs, int probeCount) + GraphicsBuffer occlusionOutputBuffer; + GraphicsBuffer shadingDirectionBuffer; + NativeArray occlusionResults; + NativeArray directionResults; + + public override NativeArray occlusion => occlusionResults; + public override NativeArray shadingDirections => directionResults; + + IRayTracingAccelStruct m_AccelerationStructure; + GraphicsBuffer scratchBuffer; + GraphicsBuffer probePositionsBuffer; + GraphicsBuffer sobolBuffer; + GraphicsBuffer cprBuffer; // Cranley Patterson rotation + + public override ulong currentStep => step; + public override ulong stepCount => (ulong)probeCount; + + public override void Initialize(ProbeVolumeBakingSet bakingSet, NativeArray positions) { - // We have to copy the values from the baking set as they may get modified by the user while baking - skyOcclusion = bakingSet.skyOcclusion; - skyDirection = bakingSet.skyOcclusionShadingDirection && skyOcclusion; skyOcclusionAverageAlbedo = bakingSet.skyOcclusionAverageAlbedo; skyOcclusionBackFaceCulling = 0; // see PR #40707 - jobs = bakeJobs; currentJob = 0; sampleIndex = 0; batchIndex = 0; - currentStep = 0; - this.probeCount = skyOcclusion ? probeCount : 0; + step = 0; + probeCount = bakingSet.skyOcclusion ? positions.Length : 0; + probePositions = positions; + + if (stepCount == 0) + return; + + // Allocate array storing results + occlusionResults = new NativeArray(probeCount, Allocator.Persistent, NativeArrayOptions.UninitializedMemory); + if (bakingSet.skyOcclusionShadingDirection) + directionResults = new NativeArray(probeCount, Allocator.Persistent, NativeArrayOptions.UninitializedMemory); + + // Create acceletation structure + m_AccelerationStructure = BuildAccelerationStructure(); + var skyOcclusionShader = s_TracingContext.shaderSO; + bool skyDirection = shadingDirections.IsCreated; + + int batchSize = Mathf.Min(k_MaxProbeCountPerBatch, probeCount); + probePositionsBuffer = new GraphicsBuffer(GraphicsBuffer.Target.Structured, batchSize, Marshal.SizeOf()); + occlusionOutputBuffer = new GraphicsBuffer(GraphicsBuffer.Target.Structured, batchSize, Marshal.SizeOf()); + shadingDirectionBuffer = new GraphicsBuffer(GraphicsBuffer.Target.Structured, skyDirection ? batchSize : 1, Marshal.SizeOf()); + scratchBuffer = RayTracingHelper.CreateScratchBufferForBuildAndDispatch(m_AccelerationStructure, skyOcclusionShader, (uint)batchSize, 1, 1); + + var buildCmd = new CommandBuffer(); + m_AccelerationStructure.Build(buildCmd, scratchBuffer); + Graphics.ExecuteCommandBuffer(buildCmd); + buildCmd.Dispose(); + + int sobolBufferSize = (int)(SobolData.SobolDims * SobolData.SobolSize); + sobolBuffer = new GraphicsBuffer(GraphicsBuffer.Target.Structured, sobolBufferSize, Marshal.SizeOf()); + sobolBuffer.SetData(SobolData.SobolMatrices); + + cprBuffer = new GraphicsBuffer(GraphicsBuffer.Target.Structured, SamplingResources.cranleyPattersonRotationBufferSize, Marshal.SizeOf()); + cprBuffer.SetData(SamplingResources.GetCranleyPattersonRotations()); } static IRayTracingAccelStruct BuildAccelerationStructure() @@ -125,71 +257,26 @@ static IRayTracingAccelStruct BuildAccelerationStructure() return accelStruct; } - public void StartBaking(NativeArray positions) - { - if (!skyOcclusion) - return; - - probePositions = positions; - occlusionResults = new Vector4[probeCount]; - directionResults = skyDirection ? new uint[probeCount] : null; - - // Create acceletation structure - m_AccelerationStructure = BuildAccelerationStructure(); - var skyOcclusionShader = s_TracingContext.shaderSO; - - int batchSize = Mathf.Min(k_MaxProbeCountPerBatch, probeCount); - probePositionsBuffer = new GraphicsBuffer(GraphicsBuffer.Target.Structured, batchSize, Marshal.SizeOf()); - occlusionOutputBuffer = new GraphicsBuffer(GraphicsBuffer.Target.Structured, batchSize, Marshal.SizeOf()); - skyShadingBuffer = new GraphicsBuffer(GraphicsBuffer.Target.Structured, skyDirection ? batchSize : 1, Marshal.SizeOf()); - skyShadingIndexBuffer = new GraphicsBuffer(GraphicsBuffer.Target.Structured, skyDirection ? batchSize : 1, Marshal.SizeOf()); - scratchBuffer = RayTracingHelper.CreateScratchBufferForBuildAndDispatch(m_AccelerationStructure, skyOcclusionShader, - (uint)batchSize, 1, 1); - - var buildCmd = new CommandBuffer(); - m_AccelerationStructure.Build(buildCmd, scratchBuffer); - Graphics.ExecuteCommandBuffer(buildCmd); - buildCmd.Dispose(); - - int sobolBufferSize = (int)(SobolData.SobolDims * SobolData.SobolSize); - sobolBuffer = new GraphicsBuffer(GraphicsBuffer.Target.Structured, sobolBufferSize, Marshal.SizeOf()); - sobolBuffer.SetData(SobolData.SobolMatrices); - - cprBuffer = new GraphicsBuffer(GraphicsBuffer.Target.Structured, SamplingResources.cranleyPattersonRotationBufferSize, Marshal.SizeOf()); - cprBuffer.SetData(SamplingResources.GetCranleyPattersonRotations()); - - if (skyDirection) - { - DynamicSkyPrecomputedDirections.Initialize(); - precomputedShadingDirections = ProbeReferenceVolume.instance.GetRuntimeResources().SkyPrecomputedDirections; - } - else - { - precomputedShadingDirections = new ComputeBuffer(1, Marshal.SizeOf()); - } - } - - public bool RunSkyOcclusionStep() + public override bool Step() { if (currentStep >= stepCount) return true; ref var job = ref jobs[currentJob]; - if (job.indices.Length == 0) + if (job.probeCount == 0) { currentJob++; - return false; - + return true; } var cmd = new CommandBuffer(); var skyOccShader = s_TracingContext.shaderSO; // Divide the job into batches of 128k probes to reduce memory usage. - int batchCount = CoreUtils.DivRoundUp(job.indices.Length, k_MaxProbeCountPerBatch); + int batchCount = CoreUtils.DivRoundUp(job.probeCount, k_MaxProbeCountPerBatch); int batchOffset = batchIndex * k_MaxProbeCountPerBatch; - int batchSize = Mathf.Min(job.indices.Length - batchOffset, k_MaxProbeCountPerBatch); + int batchSize = Mathf.Min(job.probeCount - batchOffset, k_MaxProbeCountPerBatch); if (sampleIndex == 0) { @@ -199,16 +286,14 @@ public bool RunSkyOcclusionStep() s_TracingContext.BindSamplingTextures(cmd); skyOccShader.SetAccelerationStructure(cmd, "_AccelStruct", m_AccelerationStructure); - skyOccShader.SetIntParam(cmd, _BakeSkyShadingDirection, skyDirection ? 1 : 0); + skyOccShader.SetIntParam(cmd, _BakeSkyShadingDirection, shadingDirections.IsCreated ? 1 : 0); skyOccShader.SetIntParam(cmd, _BackFaceCulling, skyOcclusionBackFaceCulling); skyOccShader.SetFloatParam(cmd, _AverageAlbedo, skyOcclusionAverageAlbedo); skyOccShader.SetFloatParam(cmd, _OffsetRay, k_SkyOcclusionOffsetRay); skyOccShader.SetBufferParam(cmd, _ProbePositions, probePositionsBuffer); skyOccShader.SetBufferParam(cmd, _SkyOcclusionOut, occlusionOutputBuffer); - skyOccShader.SetBufferParam(cmd, _SkyShadingPrecomputedDirection, precomputedShadingDirections); - skyOccShader.SetBufferParam(cmd, _SkyShadingOut, skyShadingBuffer); - skyOccShader.SetBufferParam(cmd, _SkyShadingDirectionIndexOut, skyShadingIndexBuffer); + skyOccShader.SetBufferParam(cmd, _SkyShadingOut, shadingDirectionBuffer); skyOccShader.SetBufferParam(cmd, _SobolBuffer, sobolBuffer); skyOccShader.SetBufferParam(cmd, _CPRBuffer, cprBuffer); @@ -240,70 +325,51 @@ public bool RunSkyOcclusionStep() } // Progress bar - currentStep += (ulong)batchSize; + step += (ulong)batchSize; break; } } cmd.Dispose(); - return false; + return true; } void FetchResults(in BakeJob job, int batchOffset, int batchSize) { - var batchOcclusionResults = new Vector4[batchSize]; - var batchDirectionResults = skyDirection ? new uint[batchSize] : null; + var batchOcclusionResults = occlusionResults.GetSubArray(job.startOffset + batchOffset, batchSize); + var req1 = AsyncGPUReadback.RequestIntoNativeArray(ref batchOcclusionResults, occlusionOutputBuffer, batchSize * 4 * sizeof(float), 0); - occlusionOutputBuffer.GetData(batchOcclusionResults); - if (skyDirection) - skyShadingIndexBuffer.GetData(batchDirectionResults); - - for (int i = 0; i < batchSize; i++) + if (directionResults.IsCreated) { - var dst = job.indices[i + batchOffset]; - occlusionResults[dst] = batchOcclusionResults[i]; - if (skyDirection) - directionResults[dst] = batchDirectionResults[i]; + var batchDirectionResults = directionResults.GetSubArray(job.startOffset + batchOffset, batchSize); + var req2 = AsyncGPUReadback.RequestIntoNativeArray(ref batchDirectionResults, shadingDirectionBuffer, batchSize * 3 * sizeof(float), 0); + + req2.WaitForCompletion(); } + + // TODO: use double buffering to hide readback latency + req1.WaitForCompletion(); } - public void Dispose() + public override void Dispose() { if (m_AccelerationStructure == null) return; occlusionOutputBuffer?.Dispose(); - skyShadingBuffer?.Dispose(); + shadingDirectionBuffer?.Dispose(); scratchBuffer?.Dispose(); probePositionsBuffer?.Dispose(); - skyShadingIndexBuffer?.Dispose(); sobolBuffer?.Dispose(); cprBuffer?.Dispose(); - if (!skyDirection) - precomputedShadingDirections?.Dispose(); + occlusionResults.Dispose(); + if (directionResults.IsCreated) + directionResults.Dispose(); m_AccelerationStructure.Dispose(); } } - - internal static uint LinearSearchClosestDirection(Vector3[] precomputedDirections, Vector3 direction) - { - uint indexMax = 255; - float bestDot = -10.0f; - uint bestIndex = 0; - - for (uint index = 0; index < indexMax; index++) - { - float currentDot = Vector3.Dot(direction, precomputedDirections[index]); - if (currentDot > bestDot) - { - bestDot = currentDot; - bestIndex = index; - } - } - return bestIndex; - } } } diff --git a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.VirtualOffset.cs b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.VirtualOffset.cs index 2dbeb7db1aa..9a4a678e840 100644 --- a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.VirtualOffset.cs +++ b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.VirtualOffset.cs @@ -1,3 +1,4 @@ +using System; using System.Collections.Generic; using System.Runtime.InteropServices; using Unity.Collections; @@ -6,9 +7,41 @@ namespace UnityEngine.Rendering { - partial class ProbeGIBaking + partial class AdaptiveProbeVolumes { - struct VirtualOffsetBaking + /// + /// Virtual offset baker + /// + public abstract class VirtualOffsetBaker : IDisposable + { + /// The current baking step. + public abstract ulong currentStep { get; } + /// The total amount of step. + public abstract ulong stepCount { get; } + + /// Array storing the resulting virtual offsets to be applied to probe positions. + public abstract NativeArray offsets { get; } + + /// + /// This is called before the start of baking to allow allocating necessary resources. + /// + /// The baking set that is currently baked. + /// The probe positions. + public abstract void Initialize(ProbeVolumeBakingSet bakingSet, NativeArray probePositions); + + /// + /// Run a step of virtual offset baking. Baking is considered done when currentStep property equals stepCount. + /// + /// Return false if bake failed and should be stopped. + public abstract bool Step(); + + /// + /// Performs necessary tasks to free allocated resources. + /// + public abstract void Dispose(); + } + + class DefaultVirtualOffset : VirtualOffsetBaker { static int k_MaxProbeCountPerBatch = 65535; @@ -27,7 +60,8 @@ struct ProbeData }; int batchPosIdx; - NativeList positions; + NativeArray positions; + NativeArray results; Dictionary cellToVolumes; ProbeData[] probeData; Vector3[] batchResult; @@ -38,17 +72,17 @@ struct ProbeData float validityThreshold; // Output buffer - public Vector3[] offsets; + public override NativeArray offsets => results; private IRayTracingAccelStruct m_AccelerationStructure; private GraphicsBuffer probeBuffer; private GraphicsBuffer offsetBuffer; private GraphicsBuffer scratchBuffer; - public ulong currentStep => (ulong)batchPosIdx; - public ulong stepCount => batchResult == null ? 0 : (ulong)positions.Length; + public override ulong currentStep => (ulong)batchPosIdx; + public override ulong stepCount => batchResult == null ? 0 : (ulong)positions.Length; - public void Initialize(ProbeVolumeBakingSet bakingSet, NativeList probePositions) + public override void Initialize(ProbeVolumeBakingSet bakingSet, NativeArray probePositions) { var voSettings = bakingSet.settings.virtualOffsetSettings; if (!voSettings.useVirtualOffset) @@ -60,13 +94,13 @@ public void Initialize(ProbeVolumeBakingSet bakingSet, NativeList probe geometryBias = voSettings.outOfGeoOffset; validityThreshold = voSettings.validityThreshold; - offsets = new Vector3[probePositions.Length]; + results = new NativeArray(probePositions.Length, Allocator.Persistent); cellToVolumes = GetTouchupsPerCell(out bool hasAppliers); if (scaleForSearchDist == 0.0f) { if (hasAppliers) - DoApplyVirtualOffsetsFromAdjustmentVolumes(probePositions, offsets, cellToVolumes); + DoApplyVirtualOffsetsFromAdjustmentVolumes(probePositions, results, cellToVolumes); return; } @@ -134,7 +168,7 @@ static IRayTracingAccelStruct BuildAccelerationStructure(int mask) return accelStruct; } - public bool RunVirtualOffsetStep() + public override bool Step() { if (currentStep >= stepCount) return true; @@ -158,7 +192,7 @@ public bool RunVirtualOffsetStep() { if (touchup.ContainsPoint(obb, center, positions[batchPosIdx])) { - offsets[batchPosIdx] = offset; + results[batchPosIdx] = offset; adjusted = true; break; } @@ -209,15 +243,17 @@ public bool RunVirtualOffsetStep() offsetBuffer.GetData(batchResult); for (int i = 0; i < probeCountInBatch; i++) - offsets[probeData[i].probeIndex] = batchResult[i]; + results[probeData[i].probeIndex] = batchResult[i]; cmd.Dispose(); - - return false; + return true; } - public void Dispose() + public override void Dispose() { + if (results.IsCreated) + results.Dispose(); + if (batchResult == null) return; @@ -225,8 +261,6 @@ public void Dispose() probeBuffer.Dispose(); offsetBuffer.Dispose(); scratchBuffer?.Dispose(); - - this = default; } } @@ -290,11 +324,11 @@ static internal void RecomputeVOForDebugOnly() cell.debugProbes = null; } - VirtualOffsetBaking job = new(); - job.Initialize(m_BakingSet, positionList); + VirtualOffsetBaker job = virtualOffsetOverride ?? new DefaultVirtualOffset(); + job.Initialize(m_BakingSet, positionList.AsArray()); while (job.currentStep < job.stepCount) - job.RunVirtualOffsetStep(); + job.Step(); foreach (var cell in m_BakingBatch.cells) { @@ -335,7 +369,7 @@ static internal void RecomputeVOForDebugOnly() } } - partial class ProbeGIBaking + partial class AdaptiveProbeVolumes { struct TouchupsPerCell { @@ -358,8 +392,8 @@ static Dictionary GetTouchupsPerCell(out bool hasAppliers) hasAppliers |= mode == ProbeAdjustmentVolume.Mode.ApplyVirtualOffset; - Vector3Int min = m_ProfileInfo.PositionToCell(adjustment.aabb.min); - Vector3Int max = m_ProfileInfo.PositionToCell(adjustment.aabb.max); + Vector3Int min = Vector3Int.Max(m_ProfileInfo.PositionToCell(adjustment.aabb.min), minCellPosition); + Vector3Int max = Vector3Int.Min(m_ProfileInfo.PositionToCell(adjustment.aabb.max), maxCellPosition); for (int x = min.x; x <= max.x; x++) { @@ -384,11 +418,13 @@ static Dictionary GetTouchupsPerCell(out bool hasAppliers) return cellToVolumes; } - static Vector3[] DoApplyVirtualOffsetsFromAdjustmentVolumes(NativeList positions, Vector3[] offsets, Dictionary cellToVolumes) + static void DoApplyVirtualOffsetsFromAdjustmentVolumes(NativeArray positions, NativeArray offsets, Dictionary cellToVolumes) { for (int i = 0; i < positions.Length; i++) { - int cellIndex = PosToIndex(m_ProfileInfo.PositionToCell(positions[i])); + var cellPos = m_ProfileInfo.PositionToCell(positions[i]); + cellPos.Clamp(minCellPosition, maxCellPosition); + int cellIndex = PosToIndex(cellPos); if (cellToVolumes.TryGetValue(cellIndex, out var volumes)) { foreach (var (touchup, obb, center, offset) in volumes.appliers) @@ -401,7 +437,6 @@ static Vector3[] DoApplyVirtualOffsetsFromAdjustmentVolumes(NativeList } } } - return offsets; } enum InstanceFlags diff --git a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.cs b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.cs index 615265acbfb..9febb64852e 100644 --- a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.cs +++ b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.cs @@ -1,17 +1,13 @@ using System; using System.Linq; +using System.Threading; using System.Collections.Generic; using Unity.Collections; -using Unity.Collections.LowLevel.Unsafe; using UnityEngine.SceneManagement; using UnityEditor; using Brick = UnityEngine.Rendering.ProbeBrickIndex.Brick; -using Cell = UnityEngine.Rendering.ProbeReferenceVolume.Cell; -using CellDesc = UnityEngine.Rendering.ProbeReferenceVolume.CellDesc; -using CellData = UnityEngine.Rendering.ProbeReferenceVolume.CellData; using IndirectionEntryInfo = UnityEngine.Rendering.ProbeReferenceVolume.IndirectionEntryInfo; -using StreamableCellDesc = UnityEngine.Rendering.ProbeVolumeStreamableAsset.StreamableCellDesc; using TouchupVolumeWithBoundsList = System.Collections.Generic.List<(UnityEngine.Rendering.ProbeReferenceVolume.Volume obb, UnityEngine.Bounds aabb, UnityEngine.Rendering.ProbeAdjustmentVolume volume)>; @@ -27,7 +23,7 @@ struct BakingCell public SphericalHarmonicsL2[] sh; public byte[] validityNeighbourMask; public Vector4[] skyOcclusionDataL0L1; - public uint[] skyShadingDirectionIndices; + public byte[] skyShadingDirectionIndices; public float[] validity; public Vector3[] offsetVectors; public float[] touchupVolumeInteraction; @@ -132,8 +128,6 @@ void ReadAdjustmentVolumes(ProbeVolumeBakingSet bakingSet, BakingBatch bakingBat intensityScale = 1.0f; skyShadingDirectionOverride = null; - var skyPrecomputedDirections = DynamicSkyPrecomputedDirections.GetPrecomputedDirections(); - foreach (var touchup in localTouchupVolumes) { var touchupBound = touchup.aabb; @@ -164,7 +158,7 @@ void ReadAdjustmentVolumes(ProbeVolumeBakingSet bakingSet, BakingBatch bakingBat bakingBatch.customDilationThresh[(index, i)] = thresh; } else if (touchupVolume.mode == ProbeAdjustmentVolume.Mode.OverrideSkyDirection && bakingSet.skyOcclusion && bakingSet.skyOcclusionShadingDirection) - skyShadingDirectionOverride = ProbeGIBaking.LinearSearchClosestDirection(skyPrecomputedDirections, touchupVolume.skyDirection); + skyShadingDirectionOverride = AdaptiveProbeVolumes.SkyOcclusionBaker.EncodeSkyShadingDirection(touchupVolume.skyDirection); if (touchupVolume.mode == ProbeAdjustmentVolume.Mode.IntensityScale) intensityScale = touchupVolume.intensityScale; @@ -183,20 +177,20 @@ void ReadAdjustmentVolumes(ProbeVolumeBakingSet bakingSet, BakingBatch bakingBat } internal void SetBakedData(ProbeVolumeBakingSet bakingSet, BakingBatch bakingBatch, TouchupVolumeWithBoundsList localTouchupVolumes, int i, int probeIndex, - in SphericalHarmonicsL2 sh, float validity, Vector3[] virtualOffsets, Vector4[] skyOcclusion, uint[] skyDirection) + in SphericalHarmonicsL2 sh, float validity, NativeArray virtualOffsets, NativeArray skyOcclusion, NativeArray skyDirection) { ReadAdjustmentVolumes(bakingSet, bakingBatch, localTouchupVolumes, i, validity, out var invalidatedProbe, out var intensityScale, out var skyShadingDirectionOverride); SetSHCoefficients(i, sh, intensityScale, validity, bakingSet.settings.dilationSettings); - if (virtualOffsets != null) + if (virtualOffsets.IsCreated) offsetVectors[i] = virtualOffsets[probeIndex]; - if (skyOcclusion != null) + if (skyOcclusion.IsCreated) { skyOcclusionDataL0L1[i] = skyOcclusion[probeIndex]; - if (skyDirection != null) - skyShadingDirectionIndices[i] = skyShadingDirectionOverride.HasValue ? skyShadingDirectionOverride.Value : skyDirection[probeIndex]; + if (skyDirection.IsCreated) + skyShadingDirectionIndices[i] = (byte)(skyShadingDirectionOverride ?? skyDirection[probeIndex]); } float currValidity = invalidatedProbe ? 1.0f : validity; @@ -268,25 +262,11 @@ public int GetProbePositionHash(Vector3 position) public int GetSubdivLevelAt(Vector3 position) => uniqueBrickSubdiv[GetProbePositionHash(position)]; } - class ProbeVolumeProfileInfo - { - public int simplificationLevels; - public float minDistanceBetweenProbes; - public Vector3 probeOffset; - - public int maxSubdivision => ProbeVolumeBakingSet.GetMaxSubdivision(simplificationLevels); - public float minBrickSize => ProbeVolumeBakingSet.GetMinBrickSize(minDistanceBetweenProbes); - public int cellSizeInBricks => ProbeVolumeBakingSet.GetCellSizeInBricks(simplificationLevels); - public float cellSizeInMeters => (float)cellSizeInBricks * minBrickSize; - - public Vector3Int PositionToCell(Vector3 position) => Vector3Int.FloorToInt((position - probeOffset) / cellSizeInMeters); - } - /// /// Class responsible for baking of Probe Volumes /// [InitializeOnLoad] - public partial class ProbeGIBaking + public partial class AdaptiveProbeVolumes { internal abstract class BakingProfiling where T : Enum { @@ -395,45 +375,209 @@ public BakingCompleteProfiling(Stages stage) : base(stage, ref currentStage) { } public void Dispose() { OnDispose(ref currentStage); } } - struct CellCounts + struct BakeData { - public int bricksCount; - public int chunksCount; + // Inputs + public BakeJob[] jobs; + public int probeCount; + public int reflectionProbeCount; + + public NativeArray positionRemap; + public NativeArray sortedPositions; + + // Workers + public Thread bakingThread; + public VirtualOffsetBaker virtualOffsetJob; + public SkyOcclusionBaker skyOcclusionJob; + public LightingBaker lightingJob; + public int cellIndex; + + // Progress reporting + public BakingStep step; + public ulong stepCount; + + // Cancellation + public bool failed; + + public void Init(ProbeVolumeBakingSet bakingSet, NativeList probePositions, List requests) + { + probeCount = probePositions.Length; + reflectionProbeCount = requests.Count; + + jobs = CreateBakingJobs(bakingSet, requests.Count != 0); + SortPositions(probePositions, requests); + + virtualOffsetJob = virtualOffsetOverride ?? new DefaultVirtualOffset(); + virtualOffsetJob.Initialize(bakingSet, sortedPositions.GetSubArray(0, probeCount)); + + skyOcclusionJob = skyOcclusionOverride ?? new DefaultSkyOcclusion(); + skyOcclusionJob.Initialize(bakingSet, sortedPositions.GetSubArray(0, probeCount)); + if (skyOcclusionJob is DefaultSkyOcclusion defaultSOJob) + defaultSOJob.jobs = jobs; + + lightingJob = lightingOverride ?? new DefaultLightTransport(); + lightingJob.Initialize(sortedPositions); + if (lightingJob is DefaultLightTransport defaultLightingJob) + defaultLightingJob.jobs = jobs; - public void Add(CellCounts o) + cellIndex = 0; + + LightingBaker.cancel = false; + step = BakingStep.VirtualOffset; + stepCount = virtualOffsetJob.stepCount + lightingJob.stepCount + skyOcclusionJob.stepCount; + } + + public void ExecuteLightingAsync() { - bricksCount += o.bricksCount; - chunksCount += o.chunksCount; + bakingThread = new Thread(() => { + var job = s_BakeData.lightingJob; + while (job.currentStep < job.stepCount) + { + if (!job.Step()) + { + s_BakeData.failed = true; + return; + } + if (LightingBaker.cancel) + break; + } + }); + bakingThread.Start(); } - } - struct CellChunkData - { - public bool scenarioValid; + static BakeJob[] CreateBakingJobs(ProbeVolumeBakingSet bakingSet, bool hasAdditionalRequests) + { + // Build the list of adjustment volumes affecting sample count + var touchupVolumesAndBounds = new TouchupVolumeWithBoundsList(); + { + // This is slow, but we should have very little amount of touchup volumes. + foreach (var adjustment in s_AdjustmentVolumes) + { + if (adjustment.volume.mode == ProbeAdjustmentVolume.Mode.OverrideSampleCount) + touchupVolumesAndBounds.Add(adjustment); + } + + // Sort by volume to give priority to smaller volumes + touchupVolumesAndBounds.Sort((a, b) => (a.aabb.size.x * a.aabb.size.y * a.aabb.size.z).CompareTo(b.aabb.size.x * b.aabb.size.y * b.aabb.size.z)); + } + + var lightingSettings = ProbeVolumeLightingTab.GetLightingSettings(); + bool skyOcclusion = bakingSet.skyOcclusion; + + int additionalJobs = hasAdditionalRequests ? 2 : 1; + var jobs = new BakeJob[touchupVolumesAndBounds.Count + additionalJobs]; + + for (int i = 0; i < touchupVolumesAndBounds.Count; i++) + jobs[i].Create(lightingSettings, skyOcclusion, touchupVolumesAndBounds[i]); + + jobs[touchupVolumesAndBounds.Count + 0].Create(bakingSet, lightingSettings, skyOcclusion); + if (hasAdditionalRequests) + jobs[touchupVolumesAndBounds.Count + 1].Create(bakingSet, lightingSettings, false); + + return jobs; + } + + // Place positions contiguously for each bake job in a single array, with reflection probes at the end + public void SortPositions(NativeList probePositions, List additionalRequests) + { + positionRemap = new NativeArray(probePositions.Length, Allocator.Persistent, NativeArrayOptions.UninitializedMemory); + sortedPositions = new NativeArray(probePositions.Length + additionalRequests.Count, Allocator.Persistent, NativeArrayOptions.UninitializedMemory); + int regularJobCount = additionalRequests.Count != 0 ? jobs.Length - 1 : jobs.Length; + + // Place each probe in the correct job + int[] jobSize = new int[regularJobCount]; + for (int i = 0; i < probePositions.Length; i++) + { + // Last regular job (so before reflection probes if they exist) is the default one + // In case we don't match any touchup, we should be placed in this one + int jobIndex = 0; + for (; jobIndex < regularJobCount - 1; jobIndex++) + { + if (jobs[jobIndex].Contains(probePositions[i])) + break; + } + + positionRemap[i] = jobIndex; + jobSize[jobIndex]++; + } + + // Compute the size and offset of each job in the sorted array + int currentOffset = 0; + for (int i = 0; i < regularJobCount; i++) + { + ref var job = ref jobs[i]; + job.startOffset = currentOffset; + job.probeCount = jobSize[i]; + currentOffset += job.probeCount; + jobSize[i] = 0; + } + + Debug.Assert(currentOffset == probePositions.Length); + + // Sort position and store remapping + for (int i = 0; i < probePositions.Length; i++) + { + int jobIndex = positionRemap[i]; + int newPos = jobs[jobIndex].startOffset + jobSize[jobIndex]++; + positionRemap[i] = newPos; + sortedPositions[newPos] = probePositions[i]; + } + + // Place reflection probe positions at the end of the array + if (additionalRequests.Count != 0) + { + ref var requestJob = ref jobs[jobs.Length - 1]; + requestJob.startOffset = currentOffset; + requestJob.probeCount = additionalRequests.Count; + for (int i = 0; i < additionalRequests.Count; i++) + sortedPositions[currentOffset++] = additionalRequests[i]; + + Debug.Assert(currentOffset == sortedPositions.Length); + } + } + + public void ApplyVirtualOffset() + { + var offsets = virtualOffsetJob.offsets; + for (int i = 0; i < offsets.Length; i++) + sortedPositions[i] += offsets[i]; + } + + public bool Done() + { + ulong currentStep = s_BakeData.virtualOffsetJob.currentStep + lightingJob.currentStep + s_BakeData.skyOcclusionJob.currentStep; + return currentStep >= s_BakeData.stepCount && s_BakeData.step == BakingStep.Last; + } - public NativeArray shL0L1RxData; - public NativeArray shL1GL1RyData; - public NativeArray shL1BL1RzData; + public void Dispose() + { + if (failed) + Debug.LogError("Probe Volume Baking failed."); + + if (jobs == null) + return; + + foreach (var job in jobs) + job.Dispose(); - // Optional L2 Data - public NativeArray shL2Data_0; - public NativeArray shL2Data_1; - public NativeArray shL2Data_2; - public NativeArray shL2Data_3; + positionRemap.Dispose(); + sortedPositions.Dispose(); - public NativeArray validityNeighMaskData; - public NativeArray skyOcclusionDataL0L1; - public NativeArray skyShadingDirectionIndices; + skyOcclusionJob.encodedDirections.Dispose(); + virtualOffsetJob.Dispose(); + skyOcclusionJob.Dispose(); + lightingJob.Dispose(); + + // clear references to managed data + this = default; + } } - internal const string kAPVStreamingAssetsPath = "APVStreamingAssets"; static bool m_IsInit = false; static BakingBatch m_BakingBatch; static ProbeVolumeBakingSet m_BakingSet = null; - static CellCounts m_TotalCellCounts; - - static internal ProbeVolumeProfileInfo m_ProfileInfo = null; + static TouchupVolumeWithBoundsList s_AdjustmentVolumes; static Bounds globalBounds = new Bounds(); static Vector3Int minCellPosition = Vector3Int.one * int.MaxValue; @@ -441,6 +585,8 @@ struct CellChunkData static Vector3Int cellCount = Vector3Int.zero; static int pvHashesAtBakeStart = -1; + static APVRTContext s_TracingContext; + static BakeData s_BakeData; static Dictionary m_CellPosToIndex = new Dictionary(); static Dictionary m_BakedCells = new Dictionary(); @@ -448,47 +594,10 @@ struct CellChunkData // This is because during partial bake we only want to dilate those cells. static Dictionary m_CellsToDilate = new Dictionary(); - internal static List partialBakeSceneList = null; + internal static HashSet partialBakeSceneList = null; internal static bool isBakingSceneSubset => partialBakeSceneList != null; internal static bool isFreezingPlacement = false; - internal static List GetPerSceneDataList() - { - var fullPerSceneDataList = ProbeReferenceVolume.instance.perSceneDataList; - if (!isBakingSceneSubset) - return fullPerSceneDataList; - - List usedPerSceneDataList = new (); - foreach (var sceneData in fullPerSceneDataList) - { - if (partialBakeSceneList.Contains(ProbeReferenceVolume.GetSceneGUID(sceneData.gameObject.scene))) - usedPerSceneDataList.Add(sceneData); - } - return usedPerSceneDataList; - } - - internal static List GetProbeVolumeList() - { - var fullPvList = GameObject.FindObjectsByType(FindObjectsSortMode.InstanceID); - List usedPVList; - - if (isBakingSceneSubset) - { - usedPVList = new List(); - foreach (var pv in fullPvList) - { - if (pv.isActiveAndEnabled && partialBakeSceneList.Contains(ProbeReferenceVolume.GetSceneGUID(pv.gameObject.scene))) - usedPVList.Add(pv); - } - } - else - { - usedPVList = new List(fullPvList); - } - - return usedPVList; - } - static SphericalHarmonicsL2 s_BlackSH; static bool s_BlackSHInitialized = false; @@ -509,12 +618,12 @@ static SphericalHarmonicsL2 GetBlackSH() return s_BlackSH; } - static ProbeGIBaking() + static AdaptiveProbeVolumes() { Init(); } - internal static void Init() + static internal void Init() { if (!m_IsInit) { @@ -525,6 +634,21 @@ internal static void Init() } } + static internal void Dispose() + { + s_TracingContext.Dispose(); + } + + static void OnLightingDataCleared() + { + if (ProbeReferenceVolume.instance == null) + return; + if (!ProbeReferenceVolume.instance.isInitialized || !ProbeReferenceVolume.instance.enabledBySRP) + return; + + Clear(); + } + static internal void Clear() { var activeSet = ProbeVolumeBakingSet.GetBakingSetForScene(SceneManager.GetActiveScene()); @@ -542,62 +666,6 @@ static internal void Clear() probeVolume.OnLightingDataAssetCleared(); } - static internal bool CanFreezePlacement() - { - if (!ProbeReferenceVolume.instance.supportLightingScenarios) - return false; - - // Check if all the scene datas in the scene have a baking set, if not then we cannot enable this option. - var sceneDataList = GetPerSceneDataList(); - if (sceneDataList.Count == 0) - return false; - - foreach (var sceneData in sceneDataList) - { - if (sceneData.bakingSet == null || sceneData.bakingSet.GetSceneCellIndexList(sceneData.sceneGUID) == null) - return false; - } - - return true; - } - - static void GetProbeAndChunkIndex(int globalProbeIndex, out int chunkIndex, out int chunkProbeIndex) - { - var chunkSizeInProbeCount = ProbeBrickPool.GetChunkSizeInProbeCount(); - chunkIndex = globalProbeIndex / chunkSizeInProbeCount; - chunkProbeIndex = globalProbeIndex - chunkIndex * chunkSizeInProbeCount; - } - - static void FindWorldBounds() - { - var prv = ProbeReferenceVolume.instance; - prv.clearAssetsOnVolumeClear = true; - - var activeScene = SceneManager.GetActiveScene(); - var activeSet = ProbeVolumeBakingSet.GetBakingSetForScene(activeScene); - - bool hasFoundBounds = false; - - foreach (var sceneGUID in activeSet.sceneGUIDs) - { - var bakeData = activeSet.GetSceneBakeData(sceneGUID); - if (bakeData.hasProbeVolume) - { - if (hasFoundBounds) - { - globalBounds.Encapsulate(bakeData.bounds); - } - else - { - globalBounds = bakeData.bounds; - hasFoundBounds = true; - } - } - } - - ProbeReferenceVolume.instance.globalBounds = globalBounds; - } - static bool SetBakingContext(List perSceneData) { var prv = ProbeReferenceVolume.instance; @@ -692,6 +760,7 @@ static void CachePVHashes(List probeVolumes) pvHashesAtBakeStart += pvHashesAtBakeStart * 23 + pv.GetHashCode(); } } + static void CheckPVChanges() { // If we have baking in flight. @@ -712,13 +781,97 @@ static void CheckPVChanges() } } - static ProbeVolumeProfileInfo GetProfileInfoFromBakingSet(ProbeVolumeBakingSet set) + static void CellCountInDirections(out Vector3Int minCellPositionXYZ, out Vector3Int maxCellPositionXYZ, float cellSizeInMeters, Vector3 worldOffset) + { + minCellPositionXYZ = Vector3Int.zero; + maxCellPositionXYZ = Vector3Int.zero; + + var centeredMin = globalBounds.min - worldOffset; + var centeredMax = globalBounds.max - worldOffset; + + + minCellPositionXYZ.x = Mathf.FloorToInt(centeredMin.x / cellSizeInMeters); + minCellPositionXYZ.y = Mathf.FloorToInt(centeredMin.y / cellSizeInMeters); + minCellPositionXYZ.z = Mathf.FloorToInt(centeredMin.z / cellSizeInMeters); + + maxCellPositionXYZ.x = Mathf.CeilToInt(centeredMax.x / cellSizeInMeters) - 1; + maxCellPositionXYZ.y = Mathf.CeilToInt(centeredMax.y / cellSizeInMeters) - 1; + maxCellPositionXYZ.z = Mathf.CeilToInt(centeredMax.z / cellSizeInMeters) - 1; + } + + static TouchupVolumeWithBoundsList GetAdjustementVolumes() + { + // This is slow, but we should have very little amount of touchup volumes. + var touchupVolumes = Object.FindObjectsByType(FindObjectsSortMode.InstanceID); + + var touchupVolumesAndBounds = new TouchupVolumeWithBoundsList(touchupVolumes.Length); + foreach (var touchup in touchupVolumes) + { + if (touchup.isActiveAndEnabled) + { + touchup.GetOBBandAABB(out var obb, out var aabb); + touchupVolumesAndBounds.Add((obb, aabb, touchup)); + touchup.skyDirection.Normalize(); + } + } + + return touchupVolumesAndBounds; + } + + // Actual baking process + + enum BakingStep + { + VirtualOffset, + LaunchThread, + SkyOcclusion, + Integration, + FinalizeCells, + + Last = FinalizeCells + 1 + } + + static void OnBakeStarted() + { + if (PrepareBaking()) + { + ProbeReferenceVolume.instance.checksDuringBakeAction = CheckPVChanges; + Lightmapping.SetAdditionalBakeDelegate(BakeDelegate); + } + } + + internal static bool PrepareBaking() { - var result = new ProbeVolumeProfileInfo(); - result.minDistanceBetweenProbes = set.minDistanceBetweenProbes; - result.simplificationLevels = set.simplificationLevels; - result.probeOffset = set.probeOffset; - return result; + if (AdaptiveProbeVolumes.isRunning) + AdaptiveProbeVolumes.Cancel(); + + List requests; + NativeList positions; + using (new BakingSetupProfiling(BakingSetupProfiling.Stages.OnBakeStarted)) + { + if (!InitializeBake()) + return false; + + s_AdjustmentVolumes = GetAdjustementVolumes(); + + requests = AdditionalGIBakeRequestsManager.GetProbeNormalizationRequests(); + + // Note: this could be executed in the baking delegate to be non blocking + using (new BakingSetupProfiling(BakingSetupProfiling.Stages.PlaceProbes)) + positions = RunPlacement(); + + if (positions.Length == 0) + { + positions.Dispose(); + Clear(); + CleanBakeData(); + return false; + } + } + + s_BakeData.Init(m_BakingSet, positions, requests); + positions.Dispose(); + return true; } static bool InitializeBake() @@ -782,428 +935,153 @@ static bool InitializeBake() return true; } - static void OnBakeStarted() + static void BakeDelegate(ref float progress, ref bool done) { - if (HasAsyncBakeInProgress()) - CancelAsyncBake(); - - if (PrepareBaking()) + if (s_BakeData.step == BakingStep.VirtualOffset) { + if (!s_BakeData.virtualOffsetJob.Step()) + s_BakeData.failed = true; + if (s_BakeData.virtualOffsetJob.currentStep >= s_BakeData.virtualOffsetJob.stepCount) + { + if (s_BakeData.virtualOffsetJob.offsets.IsCreated) + s_BakeData.ApplyVirtualOffset(); + s_BakeData.step++; + } + } - ProbeReferenceVolume.instance.checksDuringBakeAction = CheckPVChanges; - Lightmapping.SetAdditionalBakeDelegate(BakeDelegate); + if (s_BakeData.step == BakingStep.LaunchThread) + { + if (s_BakeData.lightingJob.isThreadSafe) + s_BakeData.ExecuteLightingAsync(); + s_BakeData.step++; } - } - static void CellCountInDirections(out Vector3Int minCellPositionXYZ, out Vector3Int maxCellPositionXYZ, float cellSizeInMeters, Vector3 worldOffset) - { - minCellPositionXYZ = Vector3Int.zero; - maxCellPositionXYZ = Vector3Int.zero; + if (s_BakeData.step == BakingStep.SkyOcclusion) + { + if (!s_BakeData.skyOcclusionJob.Step()) + s_BakeData.failed = true; + if (s_BakeData.skyOcclusionJob.currentStep >= s_BakeData.skyOcclusionJob.stepCount) + { + if (!s_BakeData.failed && s_BakeData.skyOcclusionJob.shadingDirections.IsCreated) + s_BakeData.skyOcclusionJob.Encode(); + s_BakeData.step++; + } + } - var centeredMin = globalBounds.min - worldOffset; - var centeredMax = globalBounds.max - worldOffset; + if (s_BakeData.step == BakingStep.Integration) + { + if (!s_BakeData.lightingJob.isThreadSafe) + { + if (!s_BakeData.lightingJob.Step()) + s_BakeData.failed = true; + } + if (s_BakeData.lightingJob.currentStep >= s_BakeData.lightingJob.stepCount) + { + if (s_BakeData.lightingJob.isThreadSafe) + s_BakeData.bakingThread.Join(); + s_BakeData.step++; + } + } - minCellPositionXYZ.x = Mathf.FloorToInt(centeredMin.x / cellSizeInMeters); - minCellPositionXYZ.y = Mathf.FloorToInt(centeredMin.y / cellSizeInMeters); - minCellPositionXYZ.z = Mathf.FloorToInt(centeredMin.z / cellSizeInMeters); + if (s_BakeData.step == BakingStep.FinalizeCells) + { + FinalizeCell(s_BakeData.cellIndex++, s_BakeData.positionRemap, + s_BakeData.lightingJob.irradiance, s_BakeData.lightingJob.validity, + s_BakeData.virtualOffsetJob.offsets, + s_BakeData.skyOcclusionJob.occlusion, s_BakeData.skyOcclusionJob.encodedDirections); - maxCellPositionXYZ.x = Mathf.CeilToInt(centeredMax.x / cellSizeInMeters) - 1; - maxCellPositionXYZ.y = Mathf.CeilToInt(centeredMax.y / cellSizeInMeters) - 1; - maxCellPositionXYZ.z = Mathf.CeilToInt(centeredMax.z / cellSizeInMeters) - 1; - } + if (s_BakeData.cellIndex >= m_BakingBatch.cells.Count) + s_BakeData.step++; + } - static void BrickCountInDirections(out Vector3Int cellsInXYZ, float brickSizeInMeter) - { - cellsInXYZ = Vector3Int.zero; + // Handle error case + if (s_BakeData.failed) + { + CleanBakeData(); + done = true; + return; + } - Vector3 center = Vector3.zero; - var centeredMin = globalBounds.min - center; - var centeredMax = globalBounds.max - center; + // When using default backend, live progress report is not accurate + // So we can't rely on it to know when baking is done, but it's useful for showing progress + ulong currentStep = s_BakeData.virtualOffsetJob.currentStep + s_BakeData.skyOcclusionJob.currentStep; + if (s_BakeData.lightingJob is DefaultLightTransport defaultJob) + { + foreach (var job in defaultJob.jobs) + currentStep += job.currentStep; + } + else + currentStep += s_BakeData.lightingJob.currentStep; + progress = currentStep / (float)s_BakeData.stepCount; - cellsInXYZ.x = Mathf.Max(Mathf.CeilToInt(Mathf.Abs(centeredMin.x / brickSizeInMeter)), Mathf.CeilToInt(Mathf.Abs(centeredMax.x / brickSizeInMeter))) * 2; - cellsInXYZ.y = Mathf.Max(Mathf.CeilToInt(Mathf.Abs(centeredMin.y / brickSizeInMeter)), Mathf.CeilToInt(Mathf.Abs(centeredMax.y / brickSizeInMeter))) * 2; - cellsInXYZ.z = Mathf.Max(Mathf.CeilToInt(Mathf.Abs(centeredMin.z / brickSizeInMeter)), Mathf.CeilToInt(Mathf.Abs(centeredMax.z / brickSizeInMeter))) * 2; + // Use our counter to determine when baking is done + if (s_BakeData.Done()) + { + FinalizeBake(); + done = true; + } } - static CellChunkData GetCellChunkData(CellData cellData, int chunkIndex) + static void FinalizeBake(bool cleanup = true) { - var result = new CellChunkData(); - - int chunkSizeInProbes = ProbeBrickPool.GetChunkSizeInProbeCount(); - int chunkOffset = chunkSizeInProbes * chunkIndex; - - if (m_BakingSet != null) + using (new BakingCompleteProfiling(BakingCompleteProfiling.Stages.FinalizingBake)) { - result.scenarioValid = cellData.scenarios.TryGetValue(m_BakingSet.lightingScenario, out var scenarioData); - - if (result.scenarioValid) + if (s_BakeData.probeCount != 0) { - result.shL0L1RxData = scenarioData.shL0L1RxData.GetSubArray(chunkOffset * 4, chunkSizeInProbes * 4); - result.shL1GL1RyData = scenarioData.shL1GL1RyData.GetSubArray(chunkOffset * 4, chunkSizeInProbes * 4); - result.shL1BL1RzData = scenarioData.shL1BL1RzData.GetSubArray(chunkOffset * 4, chunkSizeInProbes * 4); - - if (scenarioData.shL2Data_0.Length > 0) // we might have no L2 if we are not during baking but during touchup interaction + try { - result.shL2Data_0 = scenarioData.shL2Data_0.GetSubArray(chunkOffset * 4, chunkSizeInProbes * 4); - result.shL2Data_1 = scenarioData.shL2Data_1.GetSubArray(chunkOffset * 4, chunkSizeInProbes * 4); - result.shL2Data_2 = scenarioData.shL2Data_2.GetSubArray(chunkOffset * 4, chunkSizeInProbes * 4); - result.shL2Data_3 = scenarioData.shL2Data_3.GetSubArray(chunkOffset * 4, chunkSizeInProbes * 4); + ApplyPostBakeOperations(); + } + catch (Exception e) + { + Debug.LogError(e); } } - } - if (cellData.skyOcclusionDataL0L1.Length > 0) - { - result.skyOcclusionDataL0L1 = cellData.skyOcclusionDataL0L1.GetSubArray(chunkOffset * 4, chunkSizeInProbes * 4); - if (cellData.skyShadingDirectionIndices.Length > 0) + if (s_BakeData.reflectionProbeCount != 0) { - result.skyShadingDirectionIndices = cellData.skyShadingDirectionIndices.GetSubArray(chunkOffset, chunkSizeInProbes); + var additionalIrradiance = s_BakeData.lightingJob.irradiance.GetSubArray(s_BakeData.probeCount, s_BakeData.reflectionProbeCount); + var additionalValidity = s_BakeData.lightingJob.validity.GetSubArray(s_BakeData.probeCount, s_BakeData.reflectionProbeCount); + AdditionalGIBakeRequestsManager.OnAdditionalProbesBakeCompleted(additionalIrradiance, additionalValidity); } } - result.validityNeighMaskData = cellData.validityNeighMaskData.GetSubArray(chunkOffset, chunkSizeInProbes); + if (cleanup) + CleanBakeData(); - return result; + // We need to reset that view + ProbeReferenceVolume.instance.ResetDebugViewToMaxSubdiv(); } - // NOTE: This is somewhat hacky and is going to likely be slow (or at least slower than it could). - // It is only a first iteration of the concept that won't be as impactful on memory as other options. - internal static void RevertDilation() + static void OnBakeCancelled() { - if (m_BakingSet == null) + if (s_BakeData.bakingThread != null) { - if (ProbeReferenceVolume.instance.perSceneDataList.Count == 0) return; - SetBakingContext(ProbeReferenceVolume.instance.perSceneDataList); + LightingBaker.cancel = true; + s_BakeData.bakingThread.Join(); + LightingBaker.cancel = false; } - var dilationSettings = m_BakingSet.settings.dilationSettings; - var blackProbe = new SphericalHarmonicsL2(); + CleanBakeData(); + } - int chunkSizeInProbes = ProbeBrickPool.GetChunkSizeInProbeCount(); - foreach (var cell in ProbeReferenceVolume.instance.cells.Values) - { - for (int i = 0; i < cell.data.validity.Length; ++i) - { - if (dilationSettings.enableDilation && dilationSettings.dilationDistance > 0.0f && cell.data.validity[i] > dilationSettings.dilationValidityThreshold) - { - GetProbeAndChunkIndex(i, out var chunkIndex, out var index); - - var cellChunkData = GetCellChunkData(cell.data, chunkIndex); - - WriteToShaderCoeffsL0L1(blackProbe, cellChunkData.shL0L1RxData, cellChunkData.shL1GL1RyData, cellChunkData.shL1BL1RzData, index * 4); - WriteToShaderCoeffsL2(blackProbe, cellChunkData.shL2Data_0, cellChunkData.shL2Data_1, cellChunkData.shL2Data_2, cellChunkData.shL2Data_3, index * 4); - } - } - } - } - - // Can definitively be optimized later on. - // Also note that all the bookkeeping of all the reference volumes will likely need to change when we move to - // proper UX. - internal static void PerformDilation() + static void CleanBakeData() { - var prv = ProbeReferenceVolume.instance; - var perSceneDataList = prv.perSceneDataList; - if (perSceneDataList.Count == 0) return; - SetBakingContext(perSceneDataList); - - List tempLoadedCells = new List(); - - if (m_BakingSet.hasDilation) - { - var dilationSettings = m_BakingSet.settings.dilationSettings; - - // Make sure all assets are loaded. - prv.PerformPendingOperations(); - - // TODO: This loop is very naive, can be optimized, but let's first verify if we indeed want this or not. - for (int iterations = 0; iterations < dilationSettings.dilationIterations; ++iterations) - { - // Try to load all available cells to the GPU. Might not succeed depending on the memory budget. - prv.LoadAllCells(); - - // Dilate all cells - List dilatedCells = new List(prv.cells.Values.Count); - bool everythingLoaded = !prv.hasUnloadedCells; - - if (everythingLoaded) - { - foreach (var cell in prv.cells.Values) - { - if (m_CellsToDilate.ContainsKey(cell.desc.index)) - { - PerformDilation(cell, m_BakingSet); - dilatedCells.Add(cell); - } - } - } - else - { - // When everything does not fit in memory, we are going to dilate one cell at a time. - // To do so, we load the cell and all its neighbours and then dilate. - // This is an inefficient use of memory but for now most of the time is spent in reading back the result anyway so it does not introduce any performance regression. - - // Free All memory to make room for each cell and its neighbors for dilation. - prv.UnloadAllCells(); - - foreach (var cell in prv.cells.Values) - { - if (!m_CellsToDilate.ContainsKey(cell.desc.index)) - continue; - - var cellPos = cell.desc.position; - // Load the cell and all its neighbors before doing dilation. - for (int x = -1; x <= 1; ++x) - { - for (int y = -1; y <= 1; ++y) - { - for (int z = -1; z <= 1; ++z) - { - Vector3Int pos = cellPos + new Vector3Int(x, y, z); - if (m_CellPosToIndex.TryGetValue(pos, out var cellToLoadIndex)) - { - if (prv.cells.TryGetValue(cellToLoadIndex, out var cellToLoad)) - { - if (prv.LoadCell(cellToLoad)) - { - tempLoadedCells.Add(cellToLoad); - } - else - Debug.LogError($"Not enough memory to perform dilation for cell {cell.desc.index}"); - } - } - } - } - } - - PerformDilation(cell, m_BakingSet); - dilatedCells.Add(cell); - - // Free memory again. - foreach (var cellToUnload in tempLoadedCells) - prv.UnloadCell(cellToUnload); - tempLoadedCells.Clear(); - } - } - - // Now write back the assets. - WriteDilatedCells(dilatedCells); - - AssetDatabase.SaveAssets(); - AssetDatabase.Refresh(); - - // Reload data - foreach (var sceneData in perSceneDataList) - { - sceneData.QueueSceneRemoval(); - sceneData.QueueSceneLoading(); - } - prv.PerformPendingOperations(); - } - } - } - - static Dictionary RemapBakedCells(bool isBakingSubset) - { - // When baking a baking set. It is possible that cells layout has changed (min and max position of cells in the set). - // If this is the case then the cell index for a given position will change. - // Because of this, when doing partial bakes, we need to generate a remapping table of the old cells to the new layout in order to be able to update existing data. - Dictionary oldToNewCellRemapping = new Dictionary(); - - if (isBakingSubset) - { - // Layout has changed but is still compatible. Remap all cells that are not part of the bake. - if (minCellPosition != m_BakingSet.minCellPosition || maxCellPosition != m_BakingSet.maxCellPosition) - { - var alreadyBakedCells = m_BakingSet.cellDescs; - var newCells = new SerializedDictionary(); - - // Generate remapping for all cells baked the last time. - foreach (var cellKvP in alreadyBakedCells) - { - var cell = cellKvP.Value; - int oldIndex = cell.index; - int remappedIndex = PosToIndex(cell.position); - oldToNewCellRemapping.Add(oldIndex, remappedIndex); - - cell.index = remappedIndex; - newCells.Add(oldIndex, cell); - } - } - } - - return oldToNewCellRemapping; - } - - static void GenerateScenesCellLists(List bakedSceneDataList, Dictionary cellRemapTable) - { - bool needRemap = cellRemapTable.Count != 0; - - // Build lists of scene GUIDs and assign baking set to the PerSceneData. - var bakedSceneGUIDList = new List(); - foreach (var data in bakedSceneDataList) - { - Debug.Assert(ProbeVolumeBakingSet.SceneHasProbeVolumes(data.sceneGUID)); - bakedSceneGUIDList.Add(data.sceneGUID); - - if (m_BakingSet != data.bakingSet) - { - data.bakingSet = m_BakingSet; - EditorUtility.SetDirty(data); - } - } - - var currentPerSceneCellList = m_BakingSet.perSceneCellLists; // Cell lists from last baking. - m_BakingSet.perSceneCellLists = new SerializedDictionary>(); - - // Partial baking: Copy over scene cell lists for scenes not being baked. - // Layout change: Remap indices. - foreach (var scene in currentPerSceneCellList) - { - // Scene is not baked. Remap if needed or add it back to the baking set. - if (!bakedSceneGUIDList.Contains(scene.Key)) - { - if (needRemap) - { - var newCellList = new List(); - foreach (var cell in scene.Value) - newCellList.Add(cellRemapTable[cell]); - - m_BakingSet.perSceneCellLists.Add(scene.Key, newCellList); - } - else - { - m_BakingSet.perSceneCellLists.Add(scene.Key, scene.Value); - } - } - } - - // Allocate baked cells to the relevant scenes cell list. - foreach (var cell in m_BakedCells.Values) - { - foreach (var scene in m_BakingBatch.cellIndex2SceneReferences[cell.index]) - { - // This scene has a probe volume in it? - if (bakedSceneGUIDList.Contains(scene)) - { - List indexList; - if (!m_BakingSet.perSceneCellLists.TryGetValue(scene, out indexList)) - { - indexList = new List(); - m_BakingSet.perSceneCellLists.Add(scene, indexList); - } - - indexList.Add(cell.index); - } - } - } - - EditorUtility.SetDirty(m_BakingSet); - } - - static void PrepareCellsForWriting(bool isBakingSubset) - { - // Remap if needed existing Cell descriptors in the baking set. - var cellRemapTable = RemapBakedCells(isBakingSubset); - - // Generate list of cells for all cells being baked and remap untouched existing scenes if needed. - GenerateScenesCellLists(GetPerSceneDataList(), cellRemapTable); - - if (isBakingSubset) - { - // Resolve all unloaded scene cells in CPU memory. This will allow us to extract them into BakingCells in order to have the full list for writing. - // Other cells should already be in the baked cells list. - var loadedSceneDataList = ProbeReferenceVolume.instance.perSceneDataList; - foreach(var sceneGUID in m_BakingSet.sceneGUIDs) - { - // If a scene was baked - if (m_BakingSet.perSceneCellLists.TryGetValue(sceneGUID, out var cellList)) - { - // And the scene is not loaded - if (!loadedSceneDataList.Exists((x) => x.sceneGUID == sceneGUID) && cellList.Count != 0) - { - // Resolve its data in CPU memory. - bool resolved = m_BakingSet.ResolveCellData(cellList); - Debug.Assert(resolved, "Could not resolve unloaded scene data"); - } - } - } - - // Extract all cells that weren't baked into baking cells. - // Merge existing data of cells belonging both to the baking scene list and to scenes not being baked (prevents losing placement data for those). - // This way we have a full cell list to provide to WriteBakingCells - ExtractBakingCells(); - } - } - - static TouchupVolumeWithBoundsList GetAdjustementVolumes() - { - // This is slow, but we should have very little amount of touchup volumes. - var touchupVolumes = Object.FindObjectsByType(FindObjectsSortMode.InstanceID); - - var touchupVolumesAndBounds = new TouchupVolumeWithBoundsList(touchupVolumes.Length); - foreach (var touchup in touchupVolumes) - { - if (touchup.isActiveAndEnabled) - { - touchup.GetOBBandAABB(out var obb, out var aabb); - touchupVolumesAndBounds.Add((obb, aabb, touchup)); - touchup.skyDirection.Normalize(); - } - } - - return touchupVolumesAndBounds; - } - - static void FinalizeCell(int c, NativeArray sh, NativeArray validity, Vector3[] virtualOffsets, Vector4[] skyOcclusion, uint[] skyDirection) - { - if (c == 0) - { - m_BakedCells.Clear(); - m_CellPosToIndex.Clear(); - m_CellsToDilate.Clear(); - } - - bool hasVirtualOffset = virtualOffsets != null; - bool hasSkyOcclusion = skyOcclusion != null; - bool hasSkyDirection = skyDirection != null; - - var cell = m_BakingBatch.cells[c]; - int numProbes = cell.probePositions.Length; - Debug.Assert(numProbes > 0); - - var probeRefVolume = ProbeReferenceVolume.instance; - var localTouchupVolumes = cell.SelectIntersectingAdjustmentVolumes(s_AdjustmentVolumes); - - cell.sh = new SphericalHarmonicsL2[numProbes]; - cell.validity = new float[numProbes]; - cell.validityNeighbourMask = new byte[numProbes]; - cell.skyOcclusionDataL0L1 = new Vector4[hasSkyOcclusion ? numProbes : 0]; - cell.skyShadingDirectionIndices = new uint[hasSkyDirection ? numProbes : 0]; - cell.offsetVectors = new Vector3[hasVirtualOffset ? numProbes : 0]; - cell.touchupVolumeInteraction = new float[numProbes]; - cell.minSubdiv = probeRefVolume.GetMaxSubdivision(); - cell.shChunkCount = ProbeBrickPool.GetChunkCount(cell.bricks.Length); - - for (int i = 0; i < numProbes; ++i) - { - int brickIdx = i / 64; - int subdivLevel = cell.bricks[brickIdx].subdivisionLevel; - cell.minSubdiv = Mathf.Min(cell.minSubdiv, subdivLevel); + s_BakeData.Dispose(); + m_BakingBatch = null; + s_AdjustmentVolumes = null; - int uniqueProbeIndex = cell.probeIndices[i]; - cell.SetBakedData(m_BakingSet, m_BakingBatch, localTouchupVolumes, i, uniqueProbeIndex, - sh[uniqueProbeIndex], validity[uniqueProbeIndex], virtualOffsets, skyOcclusion, skyDirection); - } + // If lighting pannel is not created, we have to dispose ourselves + if (ProbeVolumeLightingTab.instance == null) + AdaptiveProbeVolumes.Dispose(); - ComputeValidityMasks(cell); + Lightmapping.ResetAdditionalBakeDelegate(); - m_BakedCells[cell.index] = cell; - m_CellsToDilate[cell.index] = cell; - m_CellPosToIndex.Add(cell.position, cell.index); + partialBakeSceneList = null; + ProbeReferenceVolume.instance.checksDuringBakeAction = null; } - static void ApplyPostBakeOperations(NativeArray sh, NativeArray validity, Vector3[] virtualOffsets, Vector4[] skyOcclusion, uint[] skyDirection) + static void ApplyPostBakeOperations() { var probeRefVolume = ProbeReferenceVolume.instance; @@ -1278,1098 +1156,57 @@ static void ApplyPostBakeOperations(NativeArray sh, Native adjustment.volume.cachedHashCode = adjustment.volume.GetHashCode(); } - static void AnalyzeBrickForIndirectionEntries(ref BakingCell cell) - { - var prv = ProbeReferenceVolume.instance; - int cellSizeInBricks = m_ProfileInfo.cellSizeInBricks; - int entrySubdivLevel = Mathf.Min(m_ProfileInfo.simplificationLevels, prv.GetGlobalIndirectionEntryMaxSubdiv()); - int indirectionEntrySizeInBricks = ProbeReferenceVolume.CellSize(entrySubdivLevel); - int numOfIndirectionEntriesPerCellDim = cellSizeInBricks / indirectionEntrySizeInBricks; - - int numOfEntries = numOfIndirectionEntriesPerCellDim * numOfIndirectionEntriesPerCellDim * numOfIndirectionEntriesPerCellDim; - cell.indirectionEntryInfo = new IndirectionEntryInfo[numOfEntries]; - - // This is fairly naive now, if we need optimization this is the place to be. - - Vector3Int cellPosInEntries = cell.position * numOfIndirectionEntriesPerCellDim; - Vector3Int cellPosInBricks = cell.position * cellSizeInBricks; - - int totalIndexChunks = 0; - int i = 0; - for (int x = 0; x < numOfIndirectionEntriesPerCellDim; ++x) - { - for (int y = 0; y < numOfIndirectionEntriesPerCellDim; ++y) - { - for (int z = 0; z < numOfIndirectionEntriesPerCellDim; ++z) - { - Vector3Int entryPositionInBricks = cellPosInBricks + new Vector3Int(x, y, z) * indirectionEntrySizeInBricks; - Bounds entryBoundsInBricks = new Bounds(); - entryBoundsInBricks.min = entryPositionInBricks; - entryBoundsInBricks.max = entryPositionInBricks + new Vector3Int(indirectionEntrySizeInBricks, indirectionEntrySizeInBricks, indirectionEntrySizeInBricks); - - int minSubdiv = m_ProfileInfo.maxSubdivision; - bool touchedBrick = false; - foreach (Brick b in cell.bricks) - { - if (b.subdivisionLevel < minSubdiv) - { - if (b.IntersectArea(entryBoundsInBricks)) - { - touchedBrick = true; - minSubdiv = b.subdivisionLevel; - if (minSubdiv == 0) break; - } - } - } - - cell.indirectionEntryInfo[i].minSubdiv = minSubdiv; - cell.indirectionEntryInfo[i].positionInBricks = cellPosInBricks + new Vector3Int(x, y, z) * indirectionEntrySizeInBricks; - cell.indirectionEntryInfo[i].hasOnlyBiggerBricks = minSubdiv > entrySubdivLevel && touchedBrick; - - ProbeBrickIndex.IndirectionEntryUpdateInfo unused = new ProbeBrickIndex.IndirectionEntryUpdateInfo(); - int brickCount = ProbeReferenceVolume.instance.GetNumberOfBricksAtSubdiv(cell.indirectionEntryInfo[i], ref unused); - - totalIndexChunks += Mathf.CeilToInt((float)brickCount / ProbeBrickIndex.kIndexChunkSize); - - i++; - } - } - } - - // Chunk count. - cell.indexChunkCount = totalIndexChunks; - } - - static void OnLightingDataCleared() - { - if (ProbeReferenceVolume.instance == null) - return; - if (!ProbeReferenceVolume.instance.isInitialized || !ProbeReferenceVolume.instance.enabledBySRP) - return; - - Clear(); - } - - // Mathf.HalfToFloat(Mathf.FloatToHalf(float.MaxValue)) returns +inf, so clamp manually to avoid that - static float s_MaxSHValue = 65504; // IEEE max half - - static ushort SHFloatToHalf(float value) - { - return Mathf.FloatToHalf(Mathf.Min(value, s_MaxSHValue)); - } - - static float SHHalfToFloat(ushort value) - { - return Mathf.HalfToFloat(value); - } - - static byte SHFloatToByte(float value) - { - return (byte)(Mathf.Clamp(value, 0.0f, 1.0f) * 255.0f); - } - - static float SHByteToFloat(byte value) - { - return value / 255.0f; - } - - static void WriteToShaderCoeffsL0L1(in SphericalHarmonicsL2 sh, NativeArray shaderCoeffsL0L1Rx, NativeArray shaderCoeffsL1GL1Ry, NativeArray shaderCoeffsL1BL1Rz, int offset) - { - shaderCoeffsL0L1Rx[offset + 0] = SHFloatToHalf(sh[0, 0]); shaderCoeffsL0L1Rx[offset + 1] = SHFloatToHalf(sh[1, 0]); shaderCoeffsL0L1Rx[offset + 2] = SHFloatToHalf(sh[2, 0]); shaderCoeffsL0L1Rx[offset + 3] = SHFloatToHalf(sh[0, 1]); - shaderCoeffsL1GL1Ry[offset + 0] = SHFloatToByte(sh[1, 1]); shaderCoeffsL1GL1Ry[offset + 1] = SHFloatToByte(sh[1, 2]); shaderCoeffsL1GL1Ry[offset + 2] = SHFloatToByte(sh[1, 3]); shaderCoeffsL1GL1Ry[offset + 3] = SHFloatToByte(sh[0, 2]); - shaderCoeffsL1BL1Rz[offset + 0] = SHFloatToByte(sh[2, 1]); shaderCoeffsL1BL1Rz[offset + 1] = SHFloatToByte(sh[2, 2]); shaderCoeffsL1BL1Rz[offset + 2] = SHFloatToByte(sh[2, 3]); shaderCoeffsL1BL1Rz[offset + 3] = SHFloatToByte(sh[0, 3]); - } - - static void WriteToShaderCoeffsL2(in SphericalHarmonicsL2 sh, NativeArray shaderCoeffsL2_0, NativeArray shaderCoeffsL2_1, NativeArray shaderCoeffsL2_2, NativeArray shaderCoeffsL2_3, int offset) - { - shaderCoeffsL2_0[offset + 0] = SHFloatToByte(sh[0, 4]); shaderCoeffsL2_0[offset + 1] = SHFloatToByte(sh[0, 5]); shaderCoeffsL2_0[offset + 2] = SHFloatToByte(sh[0, 6]); shaderCoeffsL2_0[offset + 3] = SHFloatToByte(sh[0, 7]); - shaderCoeffsL2_1[offset + 0] = SHFloatToByte(sh[1, 4]); shaderCoeffsL2_1[offset + 1] = SHFloatToByte(sh[1, 5]); shaderCoeffsL2_1[offset + 2] = SHFloatToByte(sh[1, 6]); shaderCoeffsL2_1[offset + 3] = SHFloatToByte(sh[1, 7]); - shaderCoeffsL2_2[offset + 0] = SHFloatToByte(sh[2, 4]); shaderCoeffsL2_2[offset + 1] = SHFloatToByte(sh[2, 5]); shaderCoeffsL2_2[offset + 2] = SHFloatToByte(sh[2, 6]); shaderCoeffsL2_2[offset + 3] = SHFloatToByte(sh[2, 7]); - shaderCoeffsL2_3[offset + 0] = SHFloatToByte(sh[0, 8]); shaderCoeffsL2_3[offset + 1] = SHFloatToByte(sh[1, 8]); shaderCoeffsL2_3[offset + 2] = SHFloatToByte(sh[2, 8]); - } - - static void ReadFromShaderCoeffsL0L1(ref SphericalHarmonicsL2 sh, NativeArray shaderCoeffsL0L1Rx, NativeArray shaderCoeffsL1GL1Ry, NativeArray shaderCoeffsL1BL1Rz, int offset) - { - sh[0, 0] = SHHalfToFloat(shaderCoeffsL0L1Rx[offset + 0]); sh[1, 0] = SHHalfToFloat(shaderCoeffsL0L1Rx[offset + 1]); sh[2, 0] = SHHalfToFloat(shaderCoeffsL0L1Rx[offset + 2]); sh[0, 1] = SHHalfToFloat(shaderCoeffsL0L1Rx[offset + 3]); - sh[1, 1] = SHByteToFloat(shaderCoeffsL1GL1Ry[offset + 0]); sh[1, 2] = SHByteToFloat(shaderCoeffsL1GL1Ry[offset + 1]); sh[1, 3] = SHByteToFloat(shaderCoeffsL1GL1Ry[offset + 2]); sh[0, 2] = SHByteToFloat(shaderCoeffsL1GL1Ry[offset + 3]); - sh[2, 1] = SHByteToFloat(shaderCoeffsL1BL1Rz[offset + 0]); sh[2, 2] = SHByteToFloat(shaderCoeffsL1BL1Rz[offset + 1]); sh[2, 3] = SHByteToFloat(shaderCoeffsL1BL1Rz[offset + 2]); sh[0, 3] = SHByteToFloat(shaderCoeffsL1BL1Rz[offset + 3]); - } - - static void ReadFromShaderCoeffsL2(ref SphericalHarmonicsL2 sh, NativeArray shaderCoeffsL2_0, NativeArray shaderCoeffsL2_1, NativeArray shaderCoeffsL2_2, NativeArray shaderCoeffsL2_3, int offset) - { - sh[0, 4] = SHByteToFloat(shaderCoeffsL2_0[offset + 0]); sh[0, 5] = SHByteToFloat(shaderCoeffsL2_0[offset + 1]); sh[0, 6] = SHByteToFloat(shaderCoeffsL2_0[offset + 2]); sh[0, 7] = SHByteToFloat(shaderCoeffsL2_0[offset + 3]); - sh[1, 4] = SHByteToFloat(shaderCoeffsL2_1[offset + 0]); sh[1, 5] = SHByteToFloat(shaderCoeffsL2_1[offset + 1]); sh[1, 6] = SHByteToFloat(shaderCoeffsL2_1[offset + 2]); sh[1, 7] = SHByteToFloat(shaderCoeffsL2_1[offset + 3]); - sh[2, 4] = SHByteToFloat(shaderCoeffsL2_2[offset + 0]); sh[2, 5] = SHByteToFloat(shaderCoeffsL2_2[offset + 1]); sh[2, 6] = SHByteToFloat(shaderCoeffsL2_2[offset + 2]); sh[2, 7] = SHByteToFloat(shaderCoeffsL2_2[offset + 3]); - sh[0, 8] = SHByteToFloat(shaderCoeffsL2_3[offset + 0]); sh[1, 8] = SHByteToFloat(shaderCoeffsL2_3[offset + 1]); sh[2, 8] = SHByteToFloat(shaderCoeffsL2_3[offset + 2]); - } - - static void ReadFullFromShaderCoeffsL0L1L2(ref SphericalHarmonicsL2 sh, - NativeArray shaderCoeffsL0L1Rx, NativeArray shaderCoeffsL1GL1Ry, NativeArray shaderCoeffsL1BL1Rz, - NativeArray shaderCoeffsL2_0, NativeArray shaderCoeffsL2_1, NativeArray shaderCoeffsL2_2, NativeArray shaderCoeffsL2_3, - int probeIdx) - { - ReadFromShaderCoeffsL0L1(ref sh, shaderCoeffsL0L1Rx, shaderCoeffsL1GL1Ry, shaderCoeffsL1BL1Rz, probeIdx * 4); - if (shaderCoeffsL2_0.Length > 0) - ReadFromShaderCoeffsL2(ref sh, shaderCoeffsL2_0, shaderCoeffsL2_1, shaderCoeffsL2_2, shaderCoeffsL2_3, probeIdx * 4); - - } - - static void WriteToShaderSkyOcclusion(in Vector4 occlusionL0L1, NativeArray shaderCoeffsSkyOcclusionL0L1, int offset) - { - shaderCoeffsSkyOcclusionL0L1[offset + 0] = SHFloatToHalf(occlusionL0L1.x); - shaderCoeffsSkyOcclusionL0L1[offset + 1] = SHFloatToHalf(occlusionL0L1.y); - shaderCoeffsSkyOcclusionL0L1[offset + 2] = SHFloatToHalf(occlusionL0L1.z); - shaderCoeffsSkyOcclusionL0L1[offset + 3] = SHFloatToHalf(occlusionL0L1.w); - } - - static void ReadFromShaderCoeffsSkyOcclusion(ref Vector4 skyOcclusionL0L1, NativeArray skyOcclusionDataL0L1, int probeIdx) - { - int offset = probeIdx * 4; - skyOcclusionL0L1.x = SHHalfToFloat(skyOcclusionDataL0L1[offset + 0]); - skyOcclusionL0L1.y = SHHalfToFloat(skyOcclusionDataL0L1[offset + 1]); - skyOcclusionL0L1.z = SHHalfToFloat(skyOcclusionDataL0L1[offset + 2]); - skyOcclusionL0L1.w = SHHalfToFloat(skyOcclusionDataL0L1[offset + 3]); - } - // Returns index in the GPU layout of probe of coordinate (x, y, z) in the brick at brickIndex for a DataLocation of size locSize - static int GetProbeGPUIndex(int brickIndex, int x, int y, int z, Vector3Int locSize) + static int s_AsyncBakeTaskID = -1; + internal static void AsyncBakeCallback() { - Vector3Int locSizeInBrick = locSize / ProbeBrickPool.kBrickProbeCountPerDim; - - int bx = brickIndex % locSizeInBrick.x; - int by = (brickIndex / locSizeInBrick.x) % locSizeInBrick.y; - int bz = ((brickIndex / locSizeInBrick.x) / locSizeInBrick.y) % locSizeInBrick.z; + float progress = 0.0f; + bool done = false; + BakeDelegate(ref progress, ref done); + Progress.Report(s_AsyncBakeTaskID, progress, s_BakeData.step.ToString()); - // In probes - int ix = bx * ProbeBrickPool.kBrickProbeCountPerDim + x; - int iy = by * ProbeBrickPool.kBrickProbeCountPerDim + y; - int iz = bz * ProbeBrickPool.kBrickProbeCountPerDim + z; - - return ix + locSize.x * (iy + locSize.y * iz); - } - - static BakingCell ConvertCellToBakingCell(CellDesc cellDesc, CellData cellData) - { - BakingCell bc = new BakingCell + if (done) { - position = cellDesc.position, - index = cellDesc.index, - bricks = cellData.bricks.ToArray(), - minSubdiv = cellDesc.minSubdiv, - indexChunkCount = cellDesc.indexChunkCount, - shChunkCount = cellDesc.shChunkCount, - probeIndices = null, // Not needed for this conversion. - indirectionEntryInfo = cellDesc.indirectionEntryInfo, - }; - - bool hasVirtualOffsets = cellData.offsetVectors.Length > 0; - bool hasSkyOcclusion = cellData.skyOcclusionDataL0L1.Length > 0; - bool hasSkyShadingDirection = cellData.skyShadingDirectionIndices.Length > 0; - - // Runtime Cell arrays may contain padding to match chunk size - // so we use the actual probe count for these arrays. - int probeCount = cellDesc.probeCount; - bc.probePositions = new Vector3[probeCount]; - bc.validity = new float[probeCount]; - bc.touchupVolumeInteraction = new float[probeCount]; - bc.validityNeighbourMask = new byte[probeCount]; - bc.skyOcclusionDataL0L1 = hasSkyOcclusion ? new Vector4[probeCount] : null; - bc.skyShadingDirectionIndices = hasSkyShadingDirection ? new uint[probeCount] : null; - bc.offsetVectors = hasVirtualOffsets ? new Vector3[probeCount] : null; - bc.sh = new SphericalHarmonicsL2[probeCount]; - - // Runtime data layout is for GPU consumption. - // We need to convert it back to a linear layout for the baking cell. - int probeIndex = 0; - int chunkOffsetInProbes = 0; - var chunksCount = cellDesc.shChunkCount; - var chunkSizeInProbes = ProbeBrickPool.GetChunkSizeInProbeCount(); - Vector3Int locSize = ProbeBrickPool.ProbeCountToDataLocSize(chunkSizeInProbes); - - var blackSH = GetBlackSH(); - - for (int chunkIndex = 0; chunkIndex < chunksCount; ++chunkIndex) - { - var cellChunkData = GetCellChunkData(cellData, chunkIndex); - - for (int brickIndex = 0; brickIndex < m_BakingSet.chunkSizeInBricks; ++brickIndex) - { - if (probeIndex >= probeCount) - break; - - for (int z = 0; z < ProbeBrickPool.kBrickProbeCountPerDim; z++) - { - for (int y = 0; y < ProbeBrickPool.kBrickProbeCountPerDim; y++) - { - for (int x = 0; x < ProbeBrickPool.kBrickProbeCountPerDim; x++) - { - var remappedIndex = GetProbeGPUIndex(brickIndex, x, y, z, locSize); - - // Scenario data can be invalid due to partially baking the set. - if (cellChunkData.scenarioValid) - ReadFullFromShaderCoeffsL0L1L2(ref bc.sh[probeIndex], cellChunkData.shL0L1RxData, cellChunkData.shL1GL1RyData, cellChunkData.shL1BL1RzData, - cellChunkData.shL2Data_0, cellChunkData.shL2Data_1, cellChunkData.shL2Data_2, cellChunkData.shL2Data_3, remappedIndex); - else - bc.sh[probeIndex] = blackSH; - - bc.validityNeighbourMask[probeIndex] = cellChunkData.validityNeighMaskData[remappedIndex]; - if (hasSkyOcclusion) - ReadFromShaderCoeffsSkyOcclusion(ref bc.skyOcclusionDataL0L1[probeIndex], cellChunkData.skyOcclusionDataL0L1, remappedIndex); - if (hasSkyShadingDirection) - { - bc.skyShadingDirectionIndices[probeIndex] = cellChunkData.skyShadingDirectionIndices[remappedIndex]; - } - - remappedIndex += chunkOffsetInProbes; - bc.probePositions[probeIndex] = cellData.probePositions[remappedIndex]; - bc.validity[probeIndex] = cellData.validity[remappedIndex]; - bc.touchupVolumeInteraction[probeIndex] = cellData.touchupVolumeInteraction[remappedIndex]; - if (hasVirtualOffsets) - bc.offsetVectors[probeIndex] = cellData.offsetVectors[remappedIndex]; - - probeIndex++; - } - } - } - } + UpdateLightStatus(); + Progress.Remove(s_AsyncBakeTaskID); - chunkOffsetInProbes += chunkSizeInProbes; + EditorApplication.update -= AsyncBakeCallback; + s_AsyncBakeTaskID = -1; } - - return bc; } - // This is slow, but artists wanted this... This can be optimized later. - static BakingCell MergeCells(BakingCell dst, BakingCell srcCell) + /// + /// Starts an asynchronous bake job for Adaptive Probe Volumes. + /// + /// Returns true if the bake was successfully started. + internal static bool BakeAsync() { - int maxSubdiv = Math.Max(dst.bricks[0].subdivisionLevel, srcCell.bricks[0].subdivisionLevel); - bool hasVirtualOffsets = s_BakeData.virtualOffsetJob.offsets != null; - bool hasSkyOcclusion = s_BakeData.skyOcclusionJob.skyOcclusion; - bool hasSkyShadingDirection = s_BakeData.skyOcclusionJob.skyDirection; - - List<(Brick, int, int)> consolidatedBricks = new List<(Brick, int, int)>(); - HashSet<(Vector3Int, int)> addedBricks = new HashSet<(Vector3Int, int)>(); - - for (int b = 0; b < dst.bricks.Length; ++b) - { - var brick = dst.bricks[b]; - addedBricks.Add((brick.position, brick.subdivisionLevel)); - consolidatedBricks.Add((brick, b, 0)); - } - - // Now with lower priority we grab from src. - for (int b = 0; b < srcCell.bricks.Length; ++b) - { - var brick = srcCell.bricks[b]; - - if (!addedBricks.Contains((brick.position, brick.subdivisionLevel))) - { - consolidatedBricks.Add((brick, b, 1)); - } - } + if (Lightmapping.isRunning || AdaptiveProbeVolumes.isRunning || !PrepareBaking()) + return false; - // And finally we sort. We don't need to check for anything but brick as we don't have duplicates. - consolidatedBricks.Sort(((Brick, int, int) lhs, (Brick, int, int) rhs) => + s_AsyncBakeTaskID = Progress.Start("Bake Adaptive Probe Volumes"); + Progress.RegisterCancelCallback(s_AsyncBakeTaskID, () => { - if (lhs.Item1.subdivisionLevel != rhs.Item1.subdivisionLevel) - return lhs.Item1.subdivisionLevel > rhs.Item1.subdivisionLevel ? -1 : 1; - if (lhs.Item1.position.z != rhs.Item1.position.z) - return lhs.Item1.position.z < rhs.Item1.position.z ? -1 : 1; - if (lhs.Item1.position.y != rhs.Item1.position.y) - return lhs.Item1.position.y < rhs.Item1.position.y ? -1 : 1; - if (lhs.Item1.position.x != rhs.Item1.position.x) - return lhs.Item1.position.x < rhs.Item1.position.x ? -1 : 1; - - return 0; + OnBakeCancelled(); + EditorApplication.update -= AsyncBakeCallback; + s_AsyncBakeTaskID = -1; + return true; }); - BakingCell outCell = new BakingCell(); - - int numberOfProbes = consolidatedBricks.Count * ProbeBrickPool.kBrickProbeCountTotal; - outCell.index = dst.index; - outCell.position = dst.position; - outCell.bricks = new Brick[consolidatedBricks.Count]; - outCell.probePositions = new Vector3[numberOfProbes]; - outCell.minSubdiv = Math.Min(dst.minSubdiv, srcCell.minSubdiv); - outCell.sh = new SphericalHarmonicsL2[numberOfProbes]; - outCell.validity = new float[numberOfProbes]; - outCell.validityNeighbourMask = new byte[numberOfProbes]; - outCell.skyOcclusionDataL0L1 = hasSkyOcclusion ? new Vector4[numberOfProbes] : null; - outCell.skyShadingDirectionIndices = hasSkyShadingDirection ? new uint[numberOfProbes] : null; - outCell.offsetVectors = hasVirtualOffsets ? new Vector3[numberOfProbes] : null; - outCell.touchupVolumeInteraction = new float[numberOfProbes]; - outCell.shChunkCount = ProbeBrickPool.GetChunkCount(outCell.bricks.Length); - // We don't need to analyse here, it will be done upon writing back. - outCell.indirectionEntryInfo = new IndirectionEntryInfo[srcCell.indirectionEntryInfo.Length]; - - BakingCell[] consideredCells = { dst, srcCell }; - - for (int i = 0; i < consolidatedBricks.Count; ++i) - { - var b = consolidatedBricks[i]; - int brickIndexInSource = b.Item2; - - outCell.bricks[i] = consideredCells[b.Item3].bricks[brickIndexInSource]; - - for (int p = 0; p < ProbeBrickPool.kBrickProbeCountTotal; ++p) - { - int outIdx = i * ProbeBrickPool.kBrickProbeCountTotal + p; - int srcIdx = brickIndexInSource * ProbeBrickPool.kBrickProbeCountTotal + p; - outCell.probePositions[outIdx] = consideredCells[b.Item3].probePositions[srcIdx]; - outCell.sh[outIdx] = consideredCells[b.Item3].sh[srcIdx]; - outCell.validity[outIdx] = consideredCells[b.Item3].validity[srcIdx]; - outCell.validityNeighbourMask[outIdx] = consideredCells[b.Item3].validityNeighbourMask[srcIdx]; - if (hasSkyOcclusion) - outCell.skyOcclusionDataL0L1[outIdx] = consideredCells[b.Item3].skyOcclusionDataL0L1[srcIdx]; - if (hasSkyShadingDirection) - outCell.skyShadingDirectionIndices[outIdx] = consideredCells[b.Item3].skyShadingDirectionIndices[srcIdx]; - if (hasVirtualOffsets) - outCell.offsetVectors[outIdx] = consideredCells[b.Item3].offsetVectors[srcIdx]; - outCell.touchupVolumeInteraction[outIdx] = consideredCells[b.Item3].touchupVolumeInteraction[srcIdx]; - } - } - return outCell; - } - - static void ExtractBakingCells() - { - // For cells that are being baked, this loop will merge existing baked data with newly baked data to not lose data. - var loadedSceneDataList = ProbeReferenceVolume.instance.perSceneDataList; - foreach (var data in loadedSceneDataList) - { - var cells = m_BakingSet.GetSceneCellIndexList(data.sceneGUID); - - var numberOfCells = cells.Count; - - for (int i = 0; i < numberOfCells; ++i) - { - if (m_BakedCells.ContainsKey(cells[i])) - { - var cell = m_BakingSet.GetCellDesc(cells[i]); - - // This can happen if doing a partial bake before ever doing a full bake. - if (cell == null || !m_BakedCells.ContainsKey(cell.index)) - continue; - - var cellData = m_BakingSet.GetCellData(cells[i]); - - // When doing partial baking some cells might not have any already baked data. - if (cellData == null || !cellData.scenarios.ContainsKey(m_BakingSet.lightingScenario)) - continue; - - BakingCell bc = ConvertCellToBakingCell(cell, cellData); - bc = MergeCells(m_BakedCells[cell.index], bc); - m_BakedCells[cell.index] = bc; - } - } - } - - // Here we convert to baking cells all cells that were not already baked. - // This allows us to have the full set of cells ready for writing all at once. - foreach (var cell in m_BakingSet.cellDescs.Values) - { - if (!m_BakedCells.ContainsKey(cell.index)) - { - var cellData = m_BakingSet.GetCellData(cell.index); - if (cellData == null) - continue; - - m_BakedCells.Add(cell.index, ConvertCellToBakingCell(cell, cellData)); - } - } - } - - static long AlignRemainder16(long count) => count % 16L; - - static void WriteNativeArray(System.IO.FileStream fs, NativeArray array) where T : struct - { - unsafe - { - fs.Write(new ReadOnlySpan(array.GetUnsafeReadOnlyPtr(), array.Length * UnsafeUtility.SizeOf())); - fs.Write(new byte[AlignRemainder16(fs.Position)]); - } + EditorApplication.update += AsyncBakeCallback; + return true; } /// - /// This method converts a list of baking cells into 5 separate assets: - /// 2 assets per baking state: - /// CellData: a binary flat file containing L0L1 probes data - /// CellOptionalData: a binary flat file containing L2 probe data (when present) - /// 3 assets shared between states: - /// ProbeVolumeAsset: a Scriptable Object which currently contains book-keeping data, runtime cells, and references to flattened data - /// CellSharedData: a binary flat file containing bricks data - /// CellSupportData: a binary flat file containing debug data (stripped from player builds if building without debug shaders) + /// Returns true when the bake job is running, false otherwise (Read Only). /// - unsafe static void WriteBakingCells(BakingCell[] bakingCells) - { - m_BakingSet.GetBlobFileNames(m_BakingSet.lightingScenario, out var cellDataFilename, out var cellBricksDataFilename, out var cellOptionalDataFilename, out var cellSharedDataFilename, out var cellSupportDataFilename); - - m_BakingSet.cellDescs = new SerializedDictionary(); - m_BakingSet.bakedMinDistanceBetweenProbes = m_ProfileInfo.minDistanceBetweenProbes; - m_BakingSet.bakedSimplificationLevels = m_ProfileInfo.simplificationLevels; - m_BakingSet.bakedProbeOffset = m_ProfileInfo.probeOffset; - m_BakingSet.bakedSkyOcclusion = m_BakingSet.skyOcclusion; - m_BakingSet.bakedSkyShadingDirection = m_BakingSet.bakedSkyOcclusion && m_BakingSet.skyOcclusionShadingDirection; - - var cellSharedDataDescs = new SerializedDictionary(); - var cellL0L1DataDescs = new SerializedDictionary(); - var cellL2DataDescs = new SerializedDictionary(); - var cellBricksDescs = new SerializedDictionary(); - var cellSupportDescs = new SerializedDictionary(); - - var voSettings = m_BakingSet.settings.virtualOffsetSettings; - bool hasVirtualOffsets = voSettings.useVirtualOffset; - bool handlesSkyOcclusion = m_BakingSet.bakedSkyOcclusion; - bool handlesSkyShading = m_BakingSet.bakedSkyShadingDirection && m_BakingSet.bakedSkyShadingDirection; - - for (var i = 0; i < bakingCells.Length; ++i) - { - AnalyzeBrickForIndirectionEntries(ref bakingCells[i]); - var bakingCell = bakingCells[i]; - - m_BakingSet.cellDescs.Add(bakingCell.index, new CellDesc - { - position = bakingCell.position, - index = bakingCell.index, - probeCount = bakingCell.probePositions.Length, - minSubdiv = bakingCell.minSubdiv, - indexChunkCount = bakingCell.indexChunkCount, - shChunkCount = bakingCell.shChunkCount, - indirectionEntryInfo = bakingCell.indirectionEntryInfo, - bricksCount = bakingCell.bricks.Length, - }); - - m_BakingSet.maxSHChunkCount = Mathf.Max(m_BakingSet.maxSHChunkCount, bakingCell.shChunkCount); - - m_TotalCellCounts.Add(new CellCounts - { - bricksCount = bakingCell.bricks.Length, - chunksCount = bakingCell.shChunkCount - }); - } - - // All per probe data is stored per chunk and contiguously for each cell. - // This is done so that we can stream from disk one cell at a time by group of chunks. - - var chunkSizeInProbes = ProbeBrickPool.GetChunkSizeInProbeCount(); - - // CellData - // L0 and L1 Data: 12 Coeffs stored in 3 textures. L0 (rgb) and R1x as ushort in one texture, the rest as byte in two 4 component textures. - var L0L1R1xChunkSize = sizeof(ushort) * 4 * chunkSizeInProbes; // 4 ushort components per probe - var L1ChunkSize = sizeof(byte) * 4 * chunkSizeInProbes; // 4 components per probe - var L0L1ChunkSize = L0L1R1xChunkSize + 2 * L1ChunkSize; - var L0L1TotalSize = m_TotalCellCounts.chunksCount * L0L1ChunkSize; - using var probesL0L1 = new NativeArray(L0L1TotalSize, Allocator.Persistent, NativeArrayOptions.UninitializedMemory); - - m_BakingSet.L0ChunkSize = L0L1R1xChunkSize; - m_BakingSet.L1ChunkSize = L1ChunkSize; - - // CellOptionalData - // L2 Data: 15 Coeffs stored in 4 byte4 textures. - var L2TextureChunkSize = 4 * sizeof(byte) * chunkSizeInProbes; // 4 byte component per probe - var L2ChunkSize = L2TextureChunkSize * 4; // 4 Textures for all L2 data. - var L2TotalSize = m_TotalCellCounts.chunksCount * L2ChunkSize; // 4 textures - using var probesL2 = new NativeArray(L2TotalSize, Allocator.Persistent, NativeArrayOptions.UninitializedMemory); - - m_BakingSet.L2TextureChunkSize = L2TextureChunkSize; - - - // CellSharedData - m_BakingSet.sharedValidityMaskChunkSize = sizeof(byte) * chunkSizeInProbes; - m_BakingSet.sharedSkyOcclusionL0L1ChunkSize = handlesSkyOcclusion ? sizeof(ushort) * 4 * chunkSizeInProbes : 0; - m_BakingSet.sharedSkyShadingDirectionIndicesChunkSize = handlesSkyShading ? sizeof(byte) * chunkSizeInProbes : 0; - m_BakingSet.sharedDataChunkSize = m_BakingSet.sharedValidityMaskChunkSize + m_BakingSet.sharedSkyOcclusionL0L1ChunkSize + m_BakingSet.sharedSkyShadingDirectionIndicesChunkSize; - - var sharedDataTotalSize = m_TotalCellCounts.chunksCount * m_BakingSet.sharedDataChunkSize; - using var sharedData = new NativeArray(sharedDataTotalSize, Allocator.Persistent, NativeArrayOptions.UninitializedMemory); - - // Brick data - using var bricks = new NativeArray(m_TotalCellCounts.bricksCount, Allocator.Persistent, NativeArrayOptions.UninitializedMemory); - - // CellSupportData - m_BakingSet.supportPositionChunkSize = sizeof(Vector3) * chunkSizeInProbes; - m_BakingSet.supportValidityChunkSize = sizeof(float) * chunkSizeInProbes; - m_BakingSet.supportOffsetsChunkSize = hasVirtualOffsets ? sizeof(Vector3) * chunkSizeInProbes : 0; - m_BakingSet.supportTouchupChunkSize = sizeof(float) * chunkSizeInProbes; - - m_BakingSet.supportDataChunkSize = m_BakingSet.supportPositionChunkSize + m_BakingSet.supportValidityChunkSize + m_BakingSet.supportOffsetsChunkSize + m_BakingSet.supportTouchupChunkSize; - var supportDataTotalSize = m_TotalCellCounts.chunksCount * m_BakingSet.supportDataChunkSize; - using var supportData = new NativeArray(supportDataTotalSize, Allocator.Persistent, NativeArrayOptions.UninitializedMemory); - - var sceneStateHash = m_BakingSet.GetBakingHashCode(); - var startCounts = new CellCounts(); - - int sharedChunkOffset = 0; - - int shL0L1ChunkOffset = 0; - int shL2ChunkOffset = 0; - int supportChunkOffset = 0; - - var blackSH = GetBlackSH(); - - // Size of the DataLocation used to do the copy texture at runtime. Used to generate the right layout for the 3D texture. - Vector3Int locSize = ProbeBrickPool.ProbeCountToDataLocSize(ProbeBrickPool.GetChunkSizeInProbeCount()); - - for (var i = 0; i < bakingCells.Length; ++i) - { - var bakingCell = bakingCells[i]; - var cellDesc = m_BakingSet.cellDescs[bakingCell.index]; - var chunksCount = cellDesc.shChunkCount; - - cellSharedDataDescs.Add(bakingCell.index, new StreamableCellDesc() { offset = startCounts.chunksCount * m_BakingSet.sharedDataChunkSize, elementCount = chunksCount }); - cellL0L1DataDescs.Add(bakingCell.index, new StreamableCellDesc() { offset = startCounts.chunksCount * L0L1ChunkSize, elementCount = chunksCount }); - cellL2DataDescs.Add(bakingCell.index, new StreamableCellDesc() { offset = startCounts.chunksCount * L2ChunkSize, elementCount = chunksCount }); - cellBricksDescs.Add(bakingCell.index, new StreamableCellDesc() { offset = startCounts.bricksCount * sizeof(Brick), elementCount = cellDesc.bricksCount }); - cellSupportDescs.Add(bakingCell.index, new StreamableCellDesc() { offset = startCounts.chunksCount * m_BakingSet.supportDataChunkSize, elementCount = chunksCount }); - - sceneStateHash = sceneStateHash * 23 + bakingCell.GetBakingHashCode(); - - var inputProbesCount = bakingCell.probePositions.Length; - - int shidx = 0; - - // Cell base offsets for each data streams - int cellL0R1xOffset = shL0L1ChunkOffset; - int cellL1GL1RyOffset = cellL0R1xOffset + chunksCount * L0L1R1xChunkSize; - int cellL1BL1RzOffset = cellL1GL1RyOffset + chunksCount * L1ChunkSize; - - int validityMaskOffset = sharedChunkOffset; - int skyOcclusionL0L1Offset = validityMaskOffset + chunksCount * m_BakingSet.sharedValidityMaskChunkSize; - int skyShadingIndicesOffset = skyOcclusionL0L1Offset + chunksCount * m_BakingSet.sharedSkyOcclusionL0L1ChunkSize; - - int positionOffset = supportChunkOffset; - int validityOffset = positionOffset + chunksCount * m_BakingSet.supportPositionChunkSize; - int touchupOffset = validityOffset + chunksCount * m_BakingSet.supportValidityChunkSize; - int offsetsOffset = touchupOffset + chunksCount * m_BakingSet.supportTouchupChunkSize; // Keep last as it's optional. - - // Here we directly map each chunk to the layout of the 3D textures in order to be able to copy the data directly to the GPU. - // The granularity at runtime is one chunk at a time currently so the temporary data loc used is sized accordingly. - for (int chunkIndex = 0; chunkIndex < chunksCount; ++chunkIndex) - { - NativeArray probesTargetL0L1Rx = probesL0L1.GetSubArray(cellL0R1xOffset + chunkIndex * L0L1R1xChunkSize, L0L1R1xChunkSize).Reinterpret(1); - NativeArray probesTargetL1GL1Ry = probesL0L1.GetSubArray(cellL1GL1RyOffset + chunkIndex * L1ChunkSize, L1ChunkSize); - NativeArray probesTargetL1BL1Rz = probesL0L1.GetSubArray(cellL1BL1RzOffset + chunkIndex * L1ChunkSize, L1ChunkSize); - - NativeArray validityNeighboorMaskChunkTarget = sharedData.GetSubArray(validityMaskOffset + chunkIndex * m_BakingSet.sharedValidityMaskChunkSize, m_BakingSet.sharedValidityMaskChunkSize); - NativeArray skyOcclusionL0L1ChunkTarget = sharedData.GetSubArray(skyOcclusionL0L1Offset + chunkIndex * m_BakingSet.sharedSkyOcclusionL0L1ChunkSize, m_BakingSet.sharedSkyOcclusionL0L1ChunkSize).Reinterpret(1); - NativeArray skyShadingIndicesChunkTarget = sharedData.GetSubArray(skyShadingIndicesOffset + chunkIndex * m_BakingSet.sharedSkyShadingDirectionIndicesChunkSize, m_BakingSet.sharedSkyShadingDirectionIndicesChunkSize); - - - NativeArray positionsChunkTarget = supportData.GetSubArray(positionOffset + chunkIndex * m_BakingSet.supportPositionChunkSize, m_BakingSet.supportPositionChunkSize).Reinterpret(1); - NativeArray validityChunkTarget = supportData.GetSubArray(validityOffset + chunkIndex * m_BakingSet.supportValidityChunkSize, m_BakingSet.supportValidityChunkSize).Reinterpret(1); - NativeArray touchupVolumeInteractionChunkTarget = supportData.GetSubArray(touchupOffset + chunkIndex * m_BakingSet.supportTouchupChunkSize, m_BakingSet.supportTouchupChunkSize).Reinterpret(1); - NativeArray offsetChunkTarget = supportData.GetSubArray(offsetsOffset + chunkIndex * m_BakingSet.supportOffsetsChunkSize, m_BakingSet.supportOffsetsChunkSize).Reinterpret(1); - - NativeArray probesTargetL2_0 = probesL2.GetSubArray(shL2ChunkOffset + chunksCount * L2TextureChunkSize * 0 + chunkIndex * L2TextureChunkSize, L2TextureChunkSize); - NativeArray probesTargetL2_1 = probesL2.GetSubArray(shL2ChunkOffset + chunksCount * L2TextureChunkSize * 1 + chunkIndex * L2TextureChunkSize, L2TextureChunkSize); - NativeArray probesTargetL2_2 = probesL2.GetSubArray(shL2ChunkOffset + chunksCount * L2TextureChunkSize * 2 + chunkIndex * L2TextureChunkSize, L2TextureChunkSize); - NativeArray probesTargetL2_3 = probesL2.GetSubArray(shL2ChunkOffset + chunksCount * L2TextureChunkSize * 3 + chunkIndex * L2TextureChunkSize, L2TextureChunkSize); - - for (int brickIndex = 0; brickIndex < m_BakingSet.chunkSizeInBricks; brickIndex++) - { - for (int z = 0; z < ProbeBrickPool.kBrickProbeCountPerDim; z++) - { - for (int y = 0; y < ProbeBrickPool.kBrickProbeCountPerDim; y++) - { - for (int x = 0; x < ProbeBrickPool.kBrickProbeCountPerDim; x++) - { - int index = GetProbeGPUIndex(brickIndex, x, y, z, locSize); - - // We are processing chunks at a time. - // So in practice we can go over the number of SH we have in the input list. - // We fill with encoded black to avoid copying garbage in the final atlas. - if (shidx >= inputProbesCount) - { - WriteToShaderCoeffsL0L1(blackSH, probesTargetL0L1Rx, probesTargetL1GL1Ry, probesTargetL1BL1Rz, index * 4); - WriteToShaderCoeffsL2(blackSH, probesTargetL2_0, probesTargetL2_1, probesTargetL2_2, probesTargetL2_3, index * 4); - if (m_BakingSet.bakedSkyOcclusion) - { - WriteToShaderSkyOcclusion(Vector4.zero, skyOcclusionL0L1ChunkTarget, index * 4); - if (m_BakingSet.bakedSkyShadingDirection) - { - skyShadingIndicesChunkTarget[index] = 255; - } - } - - validityNeighboorMaskChunkTarget[index] = 0; - validityChunkTarget[index] = 0.0f; - positionsChunkTarget[index] = Vector3.zero; - touchupVolumeInteractionChunkTarget[index] = 0.0f; - if (hasVirtualOffsets) - offsetChunkTarget[index] = Vector3.zero; - } - else - { - ref var sh = ref bakingCell.sh[shidx]; - - WriteToShaderCoeffsL0L1(sh, probesTargetL0L1Rx, probesTargetL1GL1Ry, probesTargetL1BL1Rz, index * 4); - WriteToShaderCoeffsL2(sh, probesTargetL2_0, probesTargetL2_1, probesTargetL2_2, probesTargetL2_3, index * 4); - if (m_BakingSet.bakedSkyOcclusion) - { - WriteToShaderSkyOcclusion(bakingCell.skyOcclusionDataL0L1[shidx], skyOcclusionL0L1ChunkTarget, index * 4); - if (m_BakingSet.bakedSkyShadingDirection) - { - skyShadingIndicesChunkTarget[index] = (byte)(bakingCell.skyShadingDirectionIndices[shidx]); - } - } - - validityChunkTarget[index] = bakingCell.validity[shidx]; - validityNeighboorMaskChunkTarget[index] = bakingCell.validityNeighbourMask[shidx]; - positionsChunkTarget[index] = bakingCell.probePositions[shidx]; - touchupVolumeInteractionChunkTarget[index] = bakingCell.touchupVolumeInteraction[shidx]; - if (hasVirtualOffsets) - offsetChunkTarget[index] = bakingCell.offsetVectors[shidx]; - } - shidx++; - } - } - } - } - } - - shL0L1ChunkOffset += (chunksCount * L0L1ChunkSize); - shL2ChunkOffset += (chunksCount * L2ChunkSize); - supportChunkOffset += (chunksCount * m_BakingSet.supportDataChunkSize); - sharedChunkOffset += (chunksCount * m_BakingSet.sharedDataChunkSize); - - bricks.GetSubArray(startCounts.bricksCount, cellDesc.bricksCount).CopyFrom(bakingCell.bricks); + internal static bool isRunning => s_AsyncBakeTaskID != -1; - startCounts.Add(new CellCounts() - { - bricksCount = cellDesc.bricksCount, - chunksCount = cellDesc.shChunkCount - }); - } - - // Need to save here because the forced import below discards the changes. - EditorUtility.SetDirty(m_BakingSet); - AssetDatabase.SaveAssets(); - - // Explicitly make sure the binary output files are writable since we write them using the C# file API (i.e. check out Perforce files if applicable) - var outputPaths = new List(new[] { cellDataFilename, cellBricksDataFilename, cellSharedDataFilename, cellSupportDataFilename, cellOptionalDataFilename }); - - if (!AssetDatabase.MakeEditable(outputPaths.ToArray())) - Debug.LogWarning($"Failed to make one or more probe volume output file(s) writable. This could result in baked data not being properly written to disk. {string.Join(",", outputPaths)}"); - - unsafe - { - using (var fs = new System.IO.FileStream(cellDataFilename, System.IO.FileMode.Create, System.IO.FileAccess.Write)) - { - WriteNativeArray(fs, probesL0L1); - } - using (var fs = new System.IO.FileStream(cellOptionalDataFilename, System.IO.FileMode.Create, System.IO.FileAccess.Write)) - { - WriteNativeArray(fs, probesL2); - } - using (var fs = new System.IO.FileStream(cellSharedDataFilename, System.IO.FileMode.Create, System.IO.FileAccess.Write)) - { - WriteNativeArray(fs, sharedData); - } - using (var fs = new System.IO.FileStream(cellBricksDataFilename, System.IO.FileMode.Create, System.IO.FileAccess.Write)) - { - WriteNativeArray(fs, bricks); - } - using (var fs = new System.IO.FileStream(cellSupportDataFilename, System.IO.FileMode.Create, System.IO.FileAccess.Write)) - { - WriteNativeArray(fs, supportData); - } - } - - AssetDatabase.ImportAsset(cellDataFilename); - AssetDatabase.ImportAsset(cellOptionalDataFilename); - AssetDatabase.ImportAsset(cellBricksDataFilename); - AssetDatabase.ImportAsset(cellSharedDataFilename); - AssetDatabase.ImportAsset(cellSupportDataFilename); - - var bakingSetGUID = AssetDatabase.AssetPathToGUID(AssetDatabase.GetAssetPath(m_BakingSet)); - - m_BakingSet.scenarios[ProbeReferenceVolume.instance.lightingScenario] = new ProbeVolumeBakingSet.PerScenarioDataInfo - { - sceneHash = sceneStateHash, - cellDataAsset = new ProbeVolumeStreamableAsset(kAPVStreamingAssetsPath, cellL0L1DataDescs, L0L1ChunkSize, bakingSetGUID, AssetDatabase.AssetPathToGUID(cellDataFilename)), - cellOptionalDataAsset = new ProbeVolumeStreamableAsset(kAPVStreamingAssetsPath, cellL2DataDescs, L2ChunkSize, bakingSetGUID, AssetDatabase.AssetPathToGUID(cellOptionalDataFilename)), - }; - m_BakingSet.cellSharedDataAsset = new ProbeVolumeStreamableAsset(kAPVStreamingAssetsPath, cellSharedDataDescs, m_BakingSet.sharedDataChunkSize, bakingSetGUID, AssetDatabase.AssetPathToGUID(cellSharedDataFilename)); - m_BakingSet.cellBricksDataAsset = new ProbeVolumeStreamableAsset(kAPVStreamingAssetsPath, cellBricksDescs, sizeof(Brick), bakingSetGUID, AssetDatabase.AssetPathToGUID(cellBricksDataFilename)); - m_BakingSet.cellSupportDataAsset = new ProbeVolumeStreamableAsset(kAPVStreamingAssetsPath, cellSupportDescs, m_BakingSet.supportDataChunkSize, bakingSetGUID, AssetDatabase.AssetPathToGUID(cellSupportDataFilename)); - - EditorUtility.SetDirty(m_BakingSet); - } - - unsafe static void WriteDilatedCells(List cells) - { - m_BakingSet.GetBlobFileNames(m_BakingSet.lightingScenario, out var cellDataFilename, out var _, out var cellOptionalDataFilename, out var cellSharedDataFilename, out var _); - - var chunkSizeInProbes = ProbeBrickPool.GetChunkSizeInProbeCount(); - - // CellData - // L0 and L1 Data: 12 Coeffs stored in 3 textures. L0 (rgb) and R1x as ushort in one texture, the rest as byte in two 4 component textures. - var L0L1R1xChunkSize = sizeof(ushort) * 4 * chunkSizeInProbes; // 4 ushort components per probe - var L1ChunkSize = sizeof(byte) * 4 * chunkSizeInProbes; // 4 components per probe - var L0L1ChunkSize = L0L1R1xChunkSize + 2 * L1ChunkSize; - var L0L1TotalSize = m_TotalCellCounts.chunksCount * L0L1ChunkSize; - using var probesL0L1 = new NativeArray(L0L1TotalSize, Allocator.Persistent, NativeArrayOptions.UninitializedMemory); - - // CellOptionalData - // L2 Data: 15 Coeffs stored in 4 byte4 textures. - var L2ChunkSize = 4 * sizeof(byte) * chunkSizeInProbes; // 4 byte component per probe - var L2TotalSize = m_TotalCellCounts.chunksCount * L2ChunkSize * 4; // 4 textures - using var probesL2 = new NativeArray(L2TotalSize, Allocator.Persistent, NativeArrayOptions.UninitializedMemory); - - // CellSharedData - var sharedValidityMaskChunkSize = m_BakingSet.sharedValidityMaskChunkSize; - var sharedSkyOcclusionL0L1ChunkSize = m_BakingSet.sharedSkyOcclusionL0L1ChunkSize; - var sharedSkyShadingDirectionIndicesChunkSize = m_BakingSet.sharedSkyShadingDirectionIndicesChunkSize; - var sharedDataTotalSize = m_TotalCellCounts.chunksCount * m_BakingSet.sharedDataChunkSize; - using var sharedData = new NativeArray(sharedDataTotalSize, Allocator.Persistent, NativeArrayOptions.UninitializedMemory); - - // We don't want to overwrite validity data - sharedData.CopyFrom(System.IO.File.ReadAllBytes(cellSharedDataFilename)); - - // When baking with partially loaded scenes, the list of cells being dilated might be smaller than the full list of cells in the bake. - // In this case, in order not to destroy the rest of the data, we need to load it back before writing. - if (cells.Count != m_BakingSet.cellDescs.Count) - { - probesL0L1.CopyFrom(System.IO.File.ReadAllBytes(cellDataFilename)); - probesL2.CopyFrom(System.IO.File.ReadAllBytes(cellOptionalDataFilename)); - } - - var lightingScenario = ProbeReferenceVolume.instance.lightingScenario; - Debug.Assert(m_BakingSet.scenarios.ContainsKey(lightingScenario)); - var scenarioDataInfo = m_BakingSet.scenarios[lightingScenario]; - - for (var i = 0; i < cells.Count; ++i) - { - var srcCell = cells[i]; - - var srcCellDesc = srcCell.desc; - var scenarioData = srcCell.data.scenarios[lightingScenario]; - - var L0L1chunkBaseOffset = scenarioDataInfo.cellDataAsset.streamableCellDescs[srcCellDesc.index].offset; - var L2chunkBaseOffset = scenarioDataInfo.cellOptionalDataAsset.streamableCellDescs[srcCellDesc.index].offset; - var sharedchunkBaseOffset = m_BakingSet.cellSharedDataAsset.streamableCellDescs[srcCellDesc.index].offset; - var shChunksCount = srcCellDesc.shChunkCount; - - NativeArray probesTargetL0L1Rx = probesL0L1.GetSubArray(L0L1chunkBaseOffset, L0L1R1xChunkSize * shChunksCount).Reinterpret(1); - NativeArray probesTargetL1GL1Ry = probesL0L1.GetSubArray(L0L1chunkBaseOffset + shChunksCount * L0L1R1xChunkSize, L1ChunkSize * shChunksCount); - NativeArray probesTargetL1BL1Rz = probesL0L1.GetSubArray(L0L1chunkBaseOffset + shChunksCount * (L0L1R1xChunkSize + L1ChunkSize), L1ChunkSize * shChunksCount); - - probesTargetL0L1Rx.CopyFrom(scenarioData.shL0L1RxData); - probesTargetL1GL1Ry.CopyFrom(scenarioData.shL1GL1RyData); - probesTargetL1BL1Rz.CopyFrom(scenarioData.shL1BL1RzData); - - NativeArray probesTargetL2_0 = probesL2.GetSubArray(L2chunkBaseOffset + shChunksCount * L2ChunkSize * 0, L2ChunkSize * shChunksCount); - NativeArray probesTargetL2_1 = probesL2.GetSubArray(L2chunkBaseOffset + shChunksCount * L2ChunkSize * 1, L2ChunkSize * shChunksCount); - NativeArray probesTargetL2_2 = probesL2.GetSubArray(L2chunkBaseOffset + shChunksCount * L2ChunkSize * 2, L2ChunkSize * shChunksCount); - NativeArray probesTargetL2_3 = probesL2.GetSubArray(L2chunkBaseOffset + shChunksCount * L2ChunkSize * 3, L2ChunkSize * shChunksCount); - - probesTargetL2_0.CopyFrom(scenarioData.shL2Data_0); - probesTargetL2_1.CopyFrom(scenarioData.shL2Data_1); - probesTargetL2_2.CopyFrom(scenarioData.shL2Data_2); - probesTargetL2_3.CopyFrom(scenarioData.shL2Data_3); - - if (sharedSkyOcclusionL0L1ChunkSize != 0) - { - NativeArray skyOcclusionL0L1ChunkTarget = sharedData.GetSubArray(sharedchunkBaseOffset + shChunksCount * sharedValidityMaskChunkSize, sharedSkyOcclusionL0L1ChunkSize * shChunksCount).Reinterpret(1); - skyOcclusionL0L1ChunkTarget.CopyFrom(srcCell.data.skyOcclusionDataL0L1); - - if (sharedSkyShadingDirectionIndicesChunkSize != 0) - { - NativeArray skyShadingIndicesChunkTarget = sharedData.GetSubArray(sharedchunkBaseOffset + shChunksCount * (sharedValidityMaskChunkSize + sharedSkyOcclusionL0L1ChunkSize), sharedSkyShadingDirectionIndicesChunkSize * shChunksCount); - skyShadingIndicesChunkTarget.CopyFrom(srcCell.data.skyShadingDirectionIndices); - } - } - } - - // Explicitly make sure the binary output files are writable since we write them using the C# file API (i.e. check out Perforce files if applicable) - var outputPaths = new List(new[] { cellDataFilename, cellSharedDataFilename, cellOptionalDataFilename }); - - if (!AssetDatabase.MakeEditable(outputPaths.ToArray())) - Debug.LogWarning($"Failed to make one or more probe volume output file(s) writable. This could result in baked data not being properly written to disk. {string.Join(",", outputPaths)}"); - - unsafe - { - using (var fs = new System.IO.FileStream(cellDataFilename, System.IO.FileMode.Create, System.IO.FileAccess.Write)) - { - WriteNativeArray(fs, probesL0L1); - } - using (var fs = new System.IO.FileStream(cellOptionalDataFilename, System.IO.FileMode.Create, System.IO.FileAccess.Write)) - { - WriteNativeArray(fs, probesL2); - } - using (var fs = new System.IO.FileStream(cellSharedDataFilename, System.IO.FileMode.Create, System.IO.FileAccess.Write)) - { - WriteNativeArray(fs, sharedData); - } - } - } - - private static void DeduplicateProbePositions(in Vector3[] probePositions, in int[] brickSubdivLevel, Dictionary positionToIndex, BakingBatch batch, - NativeList uniquePositions, out int[] indices) - { - indices = new int[probePositions.Length]; - int uniqueIndex = positionToIndex.Count; - - for (int i = 0; i < probePositions.Length; i++) - { - var pos = probePositions[i]; - var brickSubdiv = brickSubdivLevel[i]; - int probeHash = batch.GetProbePositionHash(pos); - - if (positionToIndex.TryGetValue(probeHash, out var index)) - { - indices[i] = index; - int oldBrickLevel = batch.uniqueBrickSubdiv[probeHash]; - if (brickSubdiv < oldBrickLevel) - batch.uniqueBrickSubdiv[probeHash] = brickSubdiv; - } - else - { - positionToIndex[probeHash] = uniqueIndex; - indices[i] = uniqueIndex; - batch.uniqueBrickSubdiv[probeHash] = brickSubdiv; - uniquePositions.Add(pos); - uniqueIndex++; - } - } - } - - static NativeList RunPlacement(Span jobs) - { - // Overwrite loaded settings with data from profile. Note that the m_BakingSet.profile is already patched up if isFreezingPlacement - float prevBrickSize = ProbeReferenceVolume.instance.MinBrickSize(); - int prevMaxSubdiv = ProbeReferenceVolume.instance.GetMaxSubdivision(); - Vector3 prevOffset = ProbeReferenceVolume.instance.ProbeOffset(); - ProbeReferenceVolume.instance.SetSubdivisionDimensions(m_ProfileInfo.minBrickSize, m_ProfileInfo.maxSubdivision, m_ProfileInfo.probeOffset); - - // All probes need to be baked only once for the whole batch and not once per cell - // The reason is that the baker is not deterministic so the same probe position baked in two different cells may have different values causing seams artefacts. - m_BakingBatch = new BakingBatch(cellCount); - - // Run subdivision - ProbeSubdivisionResult result; - using (new BakingSetupProfiling(BakingSetupProfiling.Stages.BakeBricks)) - result = GetWorldSubdivision(); - - // Compute probe positions - NativeList positions; - using (new BakingSetupProfiling(BakingSetupProfiling.Stages.ApplySubdivisionResults)) - positions = ApplySubdivisionResults(result, jobs); - - // Restore loaded asset settings - ProbeReferenceVolume.instance.SetSubdivisionDimensions(prevBrickSize, prevMaxSubdiv, prevOffset); - - return positions; - } - - static internal ProbeSubdivisionContext PrepareProbeSubdivisionContext(bool liveContext = false) - { - ProbeSubdivisionContext ctx = new ProbeSubdivisionContext(); - - // Prepare all the information in the scene for baking GI. - Vector3 refVolOrigin = Vector3.zero; // TODO: This will need to be center of the world bounds. - var perSceneDataList = GetPerSceneDataList(); - - if (m_BakingSet == null) - { - if (perSceneDataList.Count == 0) return ctx; - SetBakingContext(perSceneDataList); - } - - var profileInfo = m_ProfileInfo; - if (liveContext || m_ProfileInfo == null) - profileInfo = GetProfileInfoFromBakingSet(m_BakingSet); - - ctx.Initialize(m_BakingSet, profileInfo, refVolOrigin); - return ctx; - } - - static ProbeSubdivisionResult GetWorldSubdivision() - { - if (isFreezingPlacement) - return GetBricksFromLoaded(); - - var ctx = PrepareProbeSubdivisionContext(); - return BakeBricks(ctx, m_BakingBatch.contributors); - } - - static internal ProbeSubdivisionResult BakeBricks(ProbeSubdivisionContext ctx, in GIContributors contributors) - { - var result = new ProbeSubdivisionResult(); - - if (ctx.probeVolumes.Count == 0) - return result; - - using (var gpuResources = ProbePlacement.AllocateGPUResources(ctx.probeVolumes.Count, ctx.profile)) - { - // subdivide all the cells and generate brick positions - foreach (var cell in ctx.cells) - { - var scenesInCell = new HashSet(); - - // Calculate overlaping probe volumes to avoid unnecessary work - var overlappingProbeVolumes = new List<(ProbeVolume component, ProbeReferenceVolume.Volume volume, Bounds bounds)>(); - foreach (var probeVolume in ctx.probeVolumes) - { - if (ProbeVolumePositioning.OBBAABBIntersect(probeVolume.volume, cell.bounds, probeVolume.bounds)) - { - overlappingProbeVolumes.Add(probeVolume); - scenesInCell.Add(ProbeReferenceVolume.GetSceneGUID(probeVolume.component.gameObject.scene)); - } - } - - // Calculate valid renderers to avoid unnecessary work (a renderer needs to overlap a probe volume and match the layer) - var filteredContributors = contributors.Filter(ctx.bakingSet, cell.bounds, overlappingProbeVolumes); - - if (filteredContributors.Count == 0 && !overlappingProbeVolumes.Any(v => v.component.fillEmptySpaces)) - continue; - - var bricks = ProbePlacement.SubdivideCell(cell.bounds, ctx, gpuResources, filteredContributors, overlappingProbeVolumes); - if (bricks.Length == 0) - continue; - - foreach (var renderer in filteredContributors.renderers) - scenesInCell.Add(ProbeReferenceVolume.GetSceneGUID(renderer.component.gameObject.scene)); - foreach (var terrain in filteredContributors.terrains) - scenesInCell.Add(ProbeReferenceVolume.GetSceneGUID(terrain.component.gameObject.scene)); - - result.cells.Add((cell.position, cell.bounds, bricks)); - result.scenesPerCells[cell.position] = scenesInCell; - } - } - - return result; - } - - static ProbeSubdivisionResult GetBricksFromLoaded() - { - var dataList = GetPerSceneDataList(); - var result = new ProbeSubdivisionResult(); - - foreach (var data in dataList) - { - var cellSize = m_ProfileInfo.minDistanceBetweenProbes * 3.0f * m_ProfileInfo.cellSizeInBricks; - Vector3 cellDimensions = new Vector3(cellSize, cellSize, cellSize); - - // Loop through cells in asset, we need to be careful as there'll be duplicates. - // As we go through the cells we fill ProbeSubdivisionResult as we go. - var cells = m_BakingSet.GetSceneCellIndexList(data.sceneGUID); - foreach (var cellIndex in cells) - { - var cellDesc = m_BakingSet.GetCellDesc(cellIndex); - var cellData = m_BakingSet.GetCellData(cellIndex); - var cellPos = cellDesc.position; - - if (!result.scenesPerCells.ContainsKey(cellPos)) - { - result.scenesPerCells[cellPos] = new HashSet(); - - var center = new Vector3((cellPos.x + 0.5f) * cellSize, (cellPos.y + 0.5f) * cellSize, (cellPos.z + 0.5f) * cellSize); - result.cells.Add((cellPos, new Bounds(center, cellDimensions), cellData.bricks.ToArray())); - } - result.scenesPerCells[cellPos].Add(data.sceneGUID); - } - } - - return result; - } - - static void ModifyProfileFromLoadedData(ProbeVolumeBakingSet bakingSet) - { - m_ProfileInfo.simplificationLevels = bakingSet.bakedSimplificationLevels; - m_ProfileInfo.minDistanceBetweenProbes = bakingSet.bakedMinDistanceBetweenProbes; - m_ProfileInfo.probeOffset = bakingSet.bakedProbeOffset; - globalBounds = bakingSet.globalBounds; - } - - // Converts brick information into positional data at kBrickProbeCountPerDim * kBrickProbeCountPerDim * kBrickProbeCountPerDim resolution - internal static void ConvertBricksToPositions(Brick[] bricks, out Vector3[] outProbePositions, out int[] outBrickSubdiv) - { - int posIdx = 0; - float scale = ProbeReferenceVolume.instance.MinBrickSize() / ProbeBrickPool.kBrickCellCount; - Vector3 offset = ProbeReferenceVolume.instance.ProbeOffset(); - - outProbePositions = new Vector3[bricks.Length * ProbeBrickPool.kBrickProbeCountTotal]; - outBrickSubdiv = new int[bricks.Length * ProbeBrickPool.kBrickProbeCountTotal]; - - foreach (var b in bricks) - { - int brickSize = ProbeReferenceVolume.CellSize(b.subdivisionLevel); - Vector3Int brickOffset = b.position * ProbeBrickPool.kBrickCellCount; - - for (int z = 0; z < ProbeBrickPool.kBrickProbeCountPerDim; z++) - { - for (int y = 0; y < ProbeBrickPool.kBrickProbeCountPerDim; y++) - { - for (int x = 0; x < ProbeBrickPool.kBrickProbeCountPerDim; x++) - { - var probeOffset = brickOffset + new Vector3Int(x, y, z) * brickSize; - - outProbePositions[posIdx] = offset + (Vector3)probeOffset * scale; - outBrickSubdiv[posIdx] = b.subdivisionLevel; - - posIdx++; - } - } - } - } - } - - static int PosToIndex(Vector3Int pos) - { - Vector3Int normalizedPos = pos - minCellPosition; - return normalizedPos.z * (cellCount.x * cellCount.y) + normalizedPos.y * cellCount.x + normalizedPos.x; - } - - static NativeList ApplySubdivisionResults(ProbeSubdivisionResult results, Span jobs) - { - int cellIdx = 0, freq = 10; // Don't refresh progress bar at every iteration because it's slow - LightTransportBakingProfiling.GetProgressRange(out float progress0, out float progress1); - - var positions = new NativeList(Allocator.Persistent); - Dictionary positionToIndex = new(); - foreach ((var position, var bounds, var bricks) in results.cells) - { - if (++cellIdx % freq == 0) - EditorUtility.DisplayProgressBar("Baking Probe Volumes", $"Subdividing cell {cellIdx} out of {results.cells.Count}", Mathf.Lerp(progress0, progress1, cellIdx / (float)results.cells.Count)); - - int positionStart = positions.Length; - - ConvertBricksToPositions(bricks, out var probePositions, out var brickSubdivLevels); - DeduplicateProbePositions(in probePositions, in brickSubdivLevels, positionToIndex, m_BakingBatch, positions, out var probeIndices); - - if (jobs != null) - { - // Place each newly created probe in the correct job - for (int i = positionStart; i < positions.Length; i++) - { - int jobIndex = 0; - for (; jobIndex < jobs.Length - 1; jobIndex++) - { - if (jobs[jobIndex].Contains(positions[i])) - break; - } - - jobs[jobIndex].indices.Add(i); - } - } - - BakingCell cell = new BakingCell() - { - index = PosToIndex(position), - position = position, - bounds = bounds, - bricks = bricks, - probePositions = probePositions, - probeIndices = probeIndices, - }; - - m_BakingBatch.cells.Add(cell); - m_BakingBatch.cellIndex2SceneReferences[cell.index] = new HashSet(results.scenesPerCells[cell.position]); - } - - return positions; - } + /// + /// Cancels the currently running asynchronous bake job. + /// + /// + internal static bool Cancel() => Progress.Cancel(s_AsyncBakeTaskID); /// /// Request additional bake request manager to recompute baked data for an array of requests @@ -2415,5 +1252,47 @@ public static void BakeAdditionalRequest(int probeInstanceID) BakeAdditionalRequests(probeInstanceIDs); } + + static VirtualOffsetBaker virtualOffsetOverride = null; + static LightingBaker lightingOverride = null; + static SkyOcclusionBaker skyOcclusionOverride = null; + + /// Used to override the virtual offset baking system. + /// The baker override or null to use the default system. + public static void SetVirtualOffsetBakerOverride(VirtualOffsetBaker baker) + { + virtualOffsetOverride = baker; + } + /// Used to override the sky occlusion baking system. + /// The baker override or null to use the default system. + public static void SetLightingBakerOverride(LightingBaker baker) + { + lightingOverride = baker; + } + /// Used to override the probe volume light baking system. + /// The baker override or null to use the default system. + public static void SetSkyOcclusionBakerOverride(SkyOcclusionBaker baker) + { + skyOcclusionOverride = baker; + } + + /// Used to override the virtual offset baking system. + /// The baker override or null if none is set. + public static VirtualOffsetBaker GetVirtualOffsetBakerOverride() + { + return virtualOffsetOverride; + } + /// Used to override the virtual offset baking system. + /// The baker override or null if none is set. + public static LightingBaker GetLightingBakerOverride() + { + return lightingOverride; + } + /// Get the current sky occlusion baker override + /// The baker override or null if none is set. + public static SkyOcclusionBaker GetSkyOcclusionBakerOverride() + { + return skyOcclusionOverride; + } } } diff --git a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbePlacement.cs b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbePlacement.cs index efea973c440..2cc834fa5e6 100644 --- a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbePlacement.cs +++ b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbePlacement.cs @@ -329,7 +329,7 @@ static void SubdivideSubCell(Bounds cellAABB, ProbeSubdivisionContext subdivisio return; } - + float minBrickSize = subdivisionCtx.profile.minBrickSize; var cellOffset = subdivisionCtx.profile.probeOffset; diff --git a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeSubdivisionContext.cs b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeSubdivisionContext.cs index 11f4100b30c..4b8d67589fa 100644 --- a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeSubdivisionContext.cs +++ b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeSubdivisionContext.cs @@ -73,7 +73,7 @@ static void UpdateRealtimeSubdivisionDebug() IEnumerator Subdivide() { - var ctx = ProbeGIBaking.PrepareProbeSubdivisionContext(true); + var ctx = AdaptiveProbeVolumes.PrepareProbeSubdivisionContext(true); var contributors = GIContributors.Find(GIContributors.ContributorFilter.All); // Cull all the cells that are not visible (we don't need them for realtime debug) @@ -114,7 +114,7 @@ IEnumerator Subdivide() ctx.cells.Clear(); ctx.cells.Add(cell); - var result = ProbeGIBaking.BakeBricks(ctx, contributors); + var result = AdaptiveProbeVolumes.BakeBricks(ctx, contributors); if (result.cells.Count != 0) ProbeReferenceVolume.instance.realtimeSubdivisionInfo[cell.bounds] = result.cells[0].bricks; @@ -144,7 +144,7 @@ public void Initialize(ProbeVolumeBakingSet bakingSet, ProbeVolumeProfileInfo pr float cellSize = profileInfo.cellSizeInMeters; Vector3 cellDimensions = new Vector3(cellSize, cellSize, cellSize); - var pvList = ProbeGIBaking.GetProbeVolumeList(); + var pvList = AdaptiveProbeVolumes.GetProbeVolumeList(); foreach (var pv in pvList) { if (!pv.isActiveAndEnabled) diff --git a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeVolumeBakingProcessSettingsDrawer.cs b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeVolumeBakingProcessSettingsDrawer.cs index c07bb1615f9..66c84064c4a 100644 --- a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeVolumeBakingProcessSettingsDrawer.cs +++ b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeVolumeBakingProcessSettingsDrawer.cs @@ -78,8 +78,8 @@ void DrawDilationSettings(SerializedProperty dilationSettings) { if (IndentedButton(EditorGUIUtility.TrTextContent("Refresh Dilation"))) { - ProbeGIBaking.RevertDilation(); - ProbeGIBaking.PerformDilation(); + AdaptiveProbeVolumes.RevertDilation(); + AdaptiveProbeVolumes.PerformDilation(); } } } @@ -108,7 +108,7 @@ void DrawVirtualOffsetSettings(SerializedProperty virtualOffsetSettings) if (IndentedButton(EditorGUIUtility.TrTextContent("Refresh Virtual Offset Debug", "Re-run the virtual offset simulation; it will be applied only for debug visualization sake and not affect baked data."))) { - ProbeGIBaking.RecomputeVOForDebugOnly(); + AdaptiveProbeVolumes.RecomputeVOForDebugOnly(); } } } diff --git a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeVolumeBakingSetEditor.cs b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeVolumeBakingSetEditor.cs index 4aea99b3ebe..7d393805e2e 100644 --- a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeVolumeBakingSetEditor.cs +++ b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeVolumeBakingSetEditor.cs @@ -79,7 +79,6 @@ static class Styles public static readonly GUIContent skyOcclusionAverageAlbedo = new GUIContent("Albedo Override", "Sky Occlusion does not consider the albedo of materials in the Scene when calculating bounced light from the sky. Albedo Override determines the value used instead. Lower values darken and higher values will brighten the Scene."); public static readonly GUIContent skyOcclusionShadingDirection = new GUIContent("Sky Direction", "For each probe, additionally bake the most suitable direction to use for sampling the Scene’s Ambient Probe. When disabled, surface normals are used instead. Sky Direction improves visual quality at the expense of memory."); public static readonly GUIContent cpuLightmapperNotSupportedWarning = new GUIContent("Sky Occlusion is not supported by the current lightmapper. Ensure that Progressive GPU is selected in Lightmapper Settings."); - // Probe Settings section @@ -148,7 +147,7 @@ void ProbePlacementGUI() return; EditorGUI.indentLevel++; - bool canFreezePlacement = ProbeGIBaking.CanFreezePlacement(); + bool canFreezePlacement = AdaptiveProbeVolumes.CanFreezePlacement(); if (ProbeReferenceVolume.instance.supportLightingScenarios) { using (new EditorGUI.DisabledGroupScope(!canFreezePlacement)) @@ -160,9 +159,9 @@ void ProbePlacementGUI() m_FreezePlacement.boolValue = freeze; } - ProbeGIBaking.isFreezingPlacement = canFreezePlacement && m_FreezePlacement.boolValue; + AdaptiveProbeVolumes.isFreezingPlacement = canFreezePlacement && m_FreezePlacement.boolValue; - if (canFreezePlacement && !ProbeGIBaking.isFreezingPlacement && m_LightingScenarios.arraySize > 1) + if (canFreezePlacement && !AdaptiveProbeVolumes.isFreezingPlacement && m_LightingScenarios.arraySize > 1) { foreach (var guid in bakingSet.sceneGUIDs) { @@ -179,7 +178,7 @@ void ProbePlacementGUI() } } - using (new EditorGUI.DisabledScope(Lightmapping.isRunning || (canFreezePlacement && ProbeGIBaking.isFreezingPlacement))) + using (new EditorGUI.DisabledScope(Lightmapping.isRunning || (canFreezePlacement && AdaptiveProbeVolumes.isFreezingPlacement))) { // Display vector3 ourselves otherwise display is messed up { diff --git a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeVolumeBuildProcessor.cs b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeVolumeBuildProcessor.cs index 2f54202078b..0f570debecf 100644 --- a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeVolumeBuildProcessor.cs +++ b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeVolumeBuildProcessor.cs @@ -118,7 +118,7 @@ public override void PrepareForBuild(BuildPlayerContext buildPlayerContext) } } - buildPlayerContext.AddAdditionalPathToStreamingAssets(tempStreamingAssetsPath, ProbeGIBaking.kAPVStreamingAssetsPath); + buildPlayerContext.AddAdditionalPathToStreamingAssets(tempStreamingAssetsPath, AdaptiveProbeVolumes.kAPVStreamingAssetsPath); } } } diff --git a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeVolumeLightingTab.cs b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeVolumeLightingTab.cs index 08d8aac897a..501384cf87c 100644 --- a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeVolumeLightingTab.cs +++ b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeVolumeLightingTab.cs @@ -205,7 +205,7 @@ public override void OnDisable() EditorSceneManager.sceneOpened -= OnSceneOpened; - ProbeGIBaking.Dispose(); + AdaptiveProbeVolumes.Dispose(); } #region On GUI @@ -305,17 +305,17 @@ void BakeButtonCallback(object data) int option = (int)data; switch (option) { - case 0: ProbeGIBaking.BakeGI(); break; + case 0: AdaptiveProbeVolumes.BakeAsync(); break; case 1: BakeAllReflectionProbes(); break; case 2: ClearBakedData(); break; default: Debug.Log("invalid option in BakeButtonCallback"); break; } } - if (ProbeGIBaking.HasAsyncBakeInProgress()) + if (AdaptiveProbeVolumes.isRunning) { if (GUILayout.Button(Styles.cancelBake, Styles.buttonStyle)) - ProbeGIBaking.CancelAsyncBake(); + AdaptiveProbeVolumes.Cancel(); return; } @@ -851,15 +851,15 @@ internal static ProbeVolumeBakingSet GetSingleSceneSet(Scene scene) #region Async Bake internal static void BakeAPVButton() { - if (ProbeGIBaking.HasAsyncBakeInProgress()) + if (AdaptiveProbeVolumes.isRunning) { if (GUILayout.Button(Styles.cancelBake)) - ProbeGIBaking.CancelAsyncBake(); + AdaptiveProbeVolumes.Cancel(); } else { if (GUILayout.Button(Styles.generateAPV)) - ProbeGIBaking.BakeGI(); + AdaptiveProbeVolumes.BakeAsync(); } } #endregion @@ -982,7 +982,7 @@ internal bool PrepareAPVBake() return false; // Exclude scenes unchecked from the UI and scenes from other baking sets - ProbeGIBaking.partialBakeSceneList = new(); + AdaptiveProbeVolumes.partialBakeSceneList = new(); for (int i = 0; i < SceneManager.sceneCount; i++) { var scene = SceneManager.GetSceneAt(i); @@ -992,13 +992,13 @@ internal bool PrepareAPVBake() var sceneBakeData = sceneBakingSet.GetSceneBakeData(guid); if (sceneBakeData.hasProbeVolume && !sceneBakeData.bakeScene) continue; - ProbeGIBaking.partialBakeSceneList.Add(guid); + AdaptiveProbeVolumes.partialBakeSceneList.Add(guid); } - if (ProbeGIBaking.partialBakeSceneList.Count == activeSet.sceneGUIDs.Count) - ProbeGIBaking.partialBakeSceneList = null; + if (AdaptiveProbeVolumes.partialBakeSceneList.Count == activeSet.sceneGUIDs.Count) + AdaptiveProbeVolumes.partialBakeSceneList = null; - if (ProbeGIBaking.partialBakeSceneList != null) + if (AdaptiveProbeVolumes.partialBakeSceneList != null) { // Layout has changed and is incompatible. if (!activeSet.freezePlacement && diff --git a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeVolumeUI.Drawer.cs b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeVolumeUI.Drawer.cs index 8788eeb30e4..c22a45460f3 100644 --- a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeVolumeUI.Drawer.cs +++ b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeVolumeUI.Drawer.cs @@ -113,7 +113,7 @@ static void Drawer_VolumeContent(SerializedProbeVolume serialized, Editor owner) EditorGUILayout.Space(); EditorGUILayout.LabelField("Subdivision Override", EditorStyles.boldLabel); - bool isFreezingPlacement = bakingSet != null && bakingSet.freezePlacement && ProbeGIBaking.CanFreezePlacement(); + bool isFreezingPlacement = bakingSet != null && bakingSet.freezePlacement && AdaptiveProbeVolumes.CanFreezePlacement(); using (new EditorGUI.DisabledScope(isFreezingPlacement)) { // Get settings from scene profile if available diff --git a/Packages/com.unity.render-pipelines.core/Editor/Material/MaterialHeaderScopeList.cs b/Packages/com.unity.render-pipelines.core/Editor/Material/MaterialHeaderScopeList.cs index b364f1c97c2..aa3ec4cd5fb 100644 --- a/Packages/com.unity.render-pipelines.core/Editor/Material/MaterialHeaderScopeList.cs +++ b/Packages/com.unity.render-pipelines.core/Editor/Material/MaterialHeaderScopeList.cs @@ -69,6 +69,11 @@ public void DrawHeaders(MaterialEditor materialEditor, Material material) EditorGUILayout.Space(); } + + // Reset label width back to the default of 0 (fix UUM-66215) + // NOTE: Because of how EditorGUIUtility.labelWidth works, when the internal value is 0, + // we cannot read that value back from the property getter. So we just set it to 0 here. + EditorGUIUtility.labelWidth = 0; } } } diff --git a/Packages/com.unity.render-pipelines.core/Editor/RenderGraph/RenderGraphViewer.cs b/Packages/com.unity.render-pipelines.core/Editor/RenderGraph/RenderGraphViewer.cs index 0ceb1e5b0fd..7c2fa6060d3 100644 --- a/Packages/com.unity.render-pipelines.core/Editor/RenderGraph/RenderGraphViewer.cs +++ b/Packages/com.unity.render-pipelines.core/Editor/RenderGraph/RenderGraphViewer.cs @@ -127,6 +127,7 @@ static partial class Classes enum EmptyStateReason { None = 0, + NoGraphRegistered, NoExecutionRegistered, NoDataAvailable, WaitingForCameraRender, @@ -137,7 +138,8 @@ enum EmptyStateReason static readonly string[] kEmptyStateMessages = { "", - L10n.Tr("The selected camera is not active. Activate the selected camera to display data in the Render Graph viewer."), + L10n.Tr("No Render Graph has been registered. The Render Graph Viewer is only functional when Render Graph API is in use."), + L10n.Tr("The selected camera has not rendered anything yet. Interact with the selected camera to display data in the Render Graph Viewer."), L10n.Tr("No data to display. Click refresh to capture data."), L10n.Tr("Waiting for the selected camera to render. Depending on the camera, you may need to trigger rendering by selecting the Scene or Game view."), L10n.Tr("No passes to display. Select a different Pass Filter to display contents."), @@ -1399,6 +1401,12 @@ void RebuildGraphViewerUI() ClearGraphViewerUI(); ClearEmptyStateMessage(); + if (m_RegisteredGraphs.Count == 0) + { + SetEmptyStateMessage(EmptyStateReason.NoGraphRegistered); + return; + } + if (!CaptureEnabled()) { SetEmptyStateMessage(EmptyStateReason.NoExecutionRegistered); diff --git a/Packages/com.unity.render-pipelines.core/Runtime-PrivateShared/AssemblyInfo.cs b/Packages/com.unity.render-pipelines.core/Runtime-PrivateShared/AssemblyInfo.cs index d39566cfc38..7b5baea47ee 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime-PrivateShared/AssemblyInfo.cs +++ b/Packages/com.unity.render-pipelines.core/Runtime-PrivateShared/AssemblyInfo.cs @@ -10,6 +10,15 @@ // Remember to only use this shared API to cherry pick the code part that you want to // share but not go directly in user codebase project. // Every new entry here should be discussed. It is always better to have good public API. -// Don't add logic in this assemblie. It is only to share private methods. Only redirection allowed. +// Don't add logic in this assembly. It is only to share private methods. Only redirection allowed. -//EXAMPLE: See com.unity.render-pipelines.core/Editor-PrivateShared/AssemblyInfo.cs \ No newline at end of file +//EXAMPLE: See com.unity.render-pipelines.core/Editor-PrivateShared/AssemblyInfo.cs + +namespace Unity.RenderPipelines.Core.Runtime.Shared +{ + internal static class CameraCaptureBridge + { + public static System.Collections.Generic.IEnumerator> + GetCachedCaptureActionsEnumerator(UnityEngine.Camera camera) => UnityEngine.Rendering.CameraCaptureBridge.GetCachedCaptureActionsEnumerator(camera); + } +} diff --git a/Packages/com.unity.render-pipelines.core/Runtime/Debugging/Prefabs/Widgets/DebugUIButton.prefab b/Packages/com.unity.render-pipelines.core/Runtime/Debugging/Prefabs/Widgets/DebugUIButton.prefab index b9308fdb7d6..480dfbf43de 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime/Debugging/Prefabs/Widgets/DebugUIButton.prefab +++ b/Packages/com.unity.render-pipelines.core/Runtime/Debugging/Prefabs/Widgets/DebugUIButton.prefab @@ -1,22 +1,12 @@ %YAML 1.1 %TAG !u! tag:unity3d.com,2011: ---- !u!1001 &100100000 -Prefab: - m_ObjectHideFlags: 1 - serializedVersion: 2 - m_Modification: - m_TransformParent: {fileID: 0} - m_Modifications: [] - m_RemovedComponents: [] - m_ParentPrefab: {fileID: 0} - m_RootGameObject: {fileID: 1349998662384948} - m_IsPrefabParent: 1 --- !u!1 &1346781532117404 GameObject: m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 m_Component: - component: {fileID: 224019920140556580} - component: {fileID: 222080438315005238} @@ -28,44 +18,53 @@ GameObject: m_NavMeshLayer: 0 m_StaticEditorFlags: 0 m_IsActive: 1 ---- !u!1 &1349998662384948 -GameObject: +--- !u!224 &224019920140556580 +RectTransform: m_ObjectHideFlags: 0 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - serializedVersion: 5 - m_Component: - - component: {fileID: 224438017010656346} - - component: {fileID: 222869912906783786} - - component: {fileID: 114163390439191134} - - component: {fileID: 114467080906542876} - - component: {fileID: 114307598231942114} - m_Layer: 5 - m_Name: DebugUI Button - m_TagString: Untagged - m_Icon: {fileID: 0} - m_NavMeshLayer: 0 - m_StaticEditorFlags: 0 - m_IsActive: 1 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1346781532117404} + m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} + m_LocalPosition: {x: 0, y: 0, z: 0} + m_LocalScale: {x: 1, y: 1, z: 1} + m_ConstrainProportionsScale: 0 + m_Children: [] + m_Father: {fileID: 224438017010656346} + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} + m_AnchorMin: {x: 0, y: 0} + m_AnchorMax: {x: 1, y: 1} + m_AnchoredPosition: {x: 0, y: 0} + m_SizeDelta: {x: 0, y: 0} + m_Pivot: {x: 0.5, y: 0.5} +--- !u!222 &222080438315005238 +CanvasRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1346781532117404} + m_CullTransparentMesh: 1 --- !u!114 &114152708984687776 MonoBehaviour: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 1346781532117404} m_Enabled: 1 m_EditorHideFlags: 0 - m_Script: {fileID: 708705254, guid: f70555f144d8491a825f0804e09c671c, type: 3} + m_Script: {fileID: 11500000, guid: 5f7201a12d95ffc409449d95f23cf332, type: 3} m_Name: m_EditorClassIdentifier: m_Material: {fileID: 0} m_Color: {r: 0.8, g: 0.8, b: 0.8, a: 1} m_RaycastTarget: 1 + m_RaycastPadding: {x: 0, y: 0, z: 0, w: 0} + m_Maskable: 1 m_OnCullStateChanged: m_PersistentCalls: m_Calls: [] - m_TypeName: UnityEngine.UI.MaskableGraphic+CullStateChangedEvent, UnityEngine.UI, - Version=1.0.0.0, Culture=neutral, PublicKeyToken=null m_FontData: m_Font: {fileID: 12800000, guid: 74a5091d8707f334b9a5c31ef71a64ba, type: 3} m_FontSize: 16 @@ -80,25 +79,74 @@ MonoBehaviour: m_VerticalOverflow: 0 m_LineSpacing: 1 m_Text: Button +--- !u!1 &1349998662384948 +GameObject: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + serializedVersion: 6 + m_Component: + - component: {fileID: 224438017010656346} + - component: {fileID: 222869912906783786} + - component: {fileID: 114163390439191134} + - component: {fileID: 114467080906542876} + - component: {fileID: 114307598231942114} + m_Layer: 5 + m_Name: DebugUIButton + m_TagString: Untagged + m_Icon: {fileID: 0} + m_NavMeshLayer: 0 + m_StaticEditorFlags: 0 + m_IsActive: 1 +--- !u!224 &224438017010656346 +RectTransform: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1349998662384948} + m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} + m_LocalPosition: {x: 0, y: 0, z: 0} + m_LocalScale: {x: 1, y: 1, z: 1} + m_ConstrainProportionsScale: 0 + m_Children: + - {fileID: 224019920140556580} + m_Father: {fileID: 0} + m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} + m_AnchorMin: {x: 0, y: 0} + m_AnchorMax: {x: 0, y: 0} + m_AnchoredPosition: {x: 0, y: 0} + m_SizeDelta: {x: 0, y: 26} + m_Pivot: {x: 0.5, y: 0.5} +--- !u!222 &222869912906783786 +CanvasRenderer: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} + m_GameObject: {fileID: 1349998662384948} + m_CullTransparentMesh: 1 --- !u!114 &114163390439191134 MonoBehaviour: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 1349998662384948} m_Enabled: 1 m_EditorHideFlags: 0 - m_Script: {fileID: -765806418, guid: f70555f144d8491a825f0804e09c671c, type: 3} + m_Script: {fileID: 11500000, guid: fe87c0e1cc204ed48ad3b37840f39efc, type: 3} m_Name: m_EditorClassIdentifier: m_Material: {fileID: 0} m_Color: {r: 1, g: 1, b: 1, a: 1} m_RaycastTarget: 1 + m_RaycastPadding: {x: 0, y: 0, z: 0, w: 0} + m_Maskable: 1 m_OnCullStateChanged: m_PersistentCalls: m_Calls: [] - m_TypeName: UnityEngine.UI.MaskableGraphic+CullStateChangedEvent, UnityEngine.UI, - Version=1.0.0.0, Culture=neutral, PublicKeyToken=null m_Sprite: {fileID: 21300000, guid: d49e78756811bfa4aafb8b6535417991, type: 3} m_Type: 1 m_PreserveAspect: 0 @@ -107,33 +155,23 @@ MonoBehaviour: m_FillAmount: 1 m_FillClockwise: 1 m_FillOrigin: 0 ---- !u!114 &114307598231942114 -MonoBehaviour: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1349998662384948} - m_Enabled: 1 - m_EditorHideFlags: 0 - m_Script: {fileID: 11500000, guid: 8bff080b4e3bae64c80b54402ced6cc6, type: 3} - m_Name: - m_EditorClassIdentifier: - colorDefault: {r: 0.8, g: 0.8, b: 0.8, a: 1} - colorSelected: {r: 0.25, g: 0.65, b: 0.8, a: 1} - nameLabel: {fileID: 114152708984687776} + m_UseSpriteMesh: 0 + m_PixelsPerUnitMultiplier: 1 --- !u!114 &114467080906542876 MonoBehaviour: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 1349998662384948} m_Enabled: 1 m_EditorHideFlags: 0 - m_Script: {fileID: 1392445389, guid: f70555f144d8491a825f0804e09c671c, type: 3} + m_Script: {fileID: 11500000, guid: 4e29b1a8efbd4b44bb3f3716e73f07ff, type: 3} m_Name: m_EditorClassIdentifier: m_Navigation: m_Mode: 0 + m_WrapAround: 0 m_SelectOnUp: {fileID: 0} m_SelectOnDown: {fileID: 0} m_SelectOnLeft: {fileID: 0} @@ -143,71 +181,51 @@ MonoBehaviour: m_NormalColor: {r: 0, g: 0, b: 0, a: 0.60784316} m_HighlightedColor: {r: 0, g: 0, b: 0, a: 0.8666667} m_PressedColor: {r: 0.28235295, g: 0.28235295, b: 0.28235295, a: 1} + m_SelectedColor: {r: 0, g: 0, b: 0, a: 0.8666667} m_DisabledColor: {r: 0, g: 0, b: 0, a: 0.28627452} m_ColorMultiplier: 1 m_FadeDuration: 0.1 m_SpriteState: m_HighlightedSprite: {fileID: 0} m_PressedSprite: {fileID: 0} + m_SelectedSprite: {fileID: 0} m_DisabledSprite: {fileID: 0} m_AnimationTriggers: m_NormalTrigger: Normal m_HighlightedTrigger: Highlighted m_PressedTrigger: Pressed + m_SelectedTrigger: Highlighted m_DisabledTrigger: Disabled m_Interactable: 1 m_TargetGraphic: {fileID: 114163390439191134} m_OnClick: m_PersistentCalls: - m_Calls: [] - m_TypeName: UnityEngine.UI.Button+ButtonClickedEvent, UnityEngine.UI, Version=1.0.0.0, - Culture=neutral, PublicKeyToken=null ---- !u!222 &222080438315005238 -CanvasRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1346781532117404} ---- !u!222 &222869912906783786 -CanvasRenderer: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1349998662384948} ---- !u!224 &224019920140556580 -RectTransform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} - m_GameObject: {fileID: 1346781532117404} - m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} - m_LocalPosition: {x: 0, y: 0, z: 0} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: [] - m_Father: {fileID: 224438017010656346} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} - m_AnchorMin: {x: 0, y: 0} - m_AnchorMax: {x: 1, y: 1} - m_AnchoredPosition: {x: 0, y: 0} - m_SizeDelta: {x: 0, y: 0} - m_Pivot: {x: 0.5, y: 0.5} ---- !u!224 &224438017010656346 -RectTransform: - m_ObjectHideFlags: 1 - m_PrefabParentObject: {fileID: 0} - m_PrefabInternal: {fileID: 100100000} + m_Calls: + - m_Target: {fileID: 114307598231942114} + m_TargetAssemblyTypeName: UnityEngine.Rendering.UI.DebugUIHandlerButton, + Unity.RenderPipelines.Core.Runtime + m_MethodName: OnAction + m_Mode: 1 + m_Arguments: + m_ObjectArgument: {fileID: 0} + m_ObjectArgumentAssemblyTypeName: UnityEngine.Object, UnityEngine + m_IntArgument: 0 + m_FloatArgument: 0 + m_StringArgument: + m_BoolArgument: 0 + m_CallState: 1 +--- !u!114 &114307598231942114 +MonoBehaviour: + m_ObjectHideFlags: 0 + m_CorrespondingSourceObject: {fileID: 0} + m_PrefabInstance: {fileID: 0} + m_PrefabAsset: {fileID: 0} m_GameObject: {fileID: 1349998662384948} - m_LocalRotation: {x: 0, y: 0, z: 0, w: 1} - m_LocalPosition: {x: 0, y: 0, z: 0} - m_LocalScale: {x: 1, y: 1, z: 1} - m_Children: - - {fileID: 224019920140556580} - m_Father: {fileID: 0} - m_RootOrder: 0 - m_LocalEulerAnglesHint: {x: 0, y: 0, z: 0} - m_AnchorMin: {x: 0, y: 0} - m_AnchorMax: {x: 0, y: 0} - m_AnchoredPosition: {x: 0, y: 0} - m_SizeDelta: {x: 0, y: 26} - m_Pivot: {x: 0.5, y: 0.5} + m_Enabled: 1 + m_EditorHideFlags: 0 + m_Script: {fileID: 11500000, guid: 8bff080b4e3bae64c80b54402ced6cc6, type: 3} + m_Name: + m_EditorClassIdentifier: + colorDefault: {r: 0.8, g: 0.8, b: 0.8, a: 1} + colorSelected: {r: 0.25, g: 0.65, b: 0.8, a: 1} + nameLabel: {fileID: 114152708984687776} diff --git a/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/Debug/DebugDisplayGPUResidentDrawer.cs b/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/Debug/DebugDisplayGPUResidentDrawer.cs index 1ab7265f50e..0165e84ea2c 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/Debug/DebugDisplayGPUResidentDrawer.cs +++ b/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/Debug/DebugDisplayGPUResidentDrawer.cs @@ -32,18 +32,20 @@ private bool displayBatcherStats } /// Returns the view instances id for the selected occluder debug view index, or 0 if not valid. - internal int GetOccluderViewInstanceID() + internal bool GetOccluderViewInstanceID(out int viewInstanceID) { DebugRendererBatcherStats debugStats = GPUResidentDrawer.GetDebugStats(); - if (debugStats == null) - return 0; - - if (occluderDebugViewIndex >= 0 && occluderDebugViewIndex < debugStats.occluderStats.Length) + if (debugStats != null) { - return debugStats.occluderStats[occluderDebugViewIndex].viewInstanceID; + if (occluderDebugViewIndex >= 0 && occluderDebugViewIndex < debugStats.occluderStats.Length) + { + viewInstanceID = debugStats.occluderStats[occluderDebugViewIndex].viewInstanceID; + return true; + } } - return 0; + viewInstanceID = 0; + return false; } /// Returns if the occlusion test heatmap debug overlay is enabled. @@ -192,6 +194,7 @@ private static DebugUI.Table.Row AddInstanceOcclusionPassDataRow(int eventIndex) new DebugUI.Value { displayName = "View Instance ID", refreshRate = k_RefreshRate, formatString = k_FormatString, getter = () => GetInstanceOcclusionEventStats(eventIndex).viewInstanceID }, new DebugUI.Value { displayName = "Event Type", refreshRate = k_RefreshRate, formatString = k_FormatString, getter = () => $"{GetInstanceOcclusionEventStats(eventIndex).eventType}" }, new DebugUI.Value { displayName = "Occluder Version", refreshRate = k_RefreshRate, formatString = k_FormatString, getter = () => OccluderVersionString(GetInstanceOcclusionEventStats(eventIndex)) }, + new DebugUI.Value { displayName = "Subview Mask", refreshRate = k_RefreshRate, formatString = k_FormatString, getter = () => $"0x{GetInstanceOcclusionEventStats(eventIndex).subviewMask:X}" }, new DebugUI.Value { displayName = "Occlusion Test", refreshRate = k_RefreshRate, formatString = k_FormatString, getter = () => $"{OcclusionTestString(GetInstanceOcclusionEventStats(eventIndex))}" }, new DebugUI.Value { displayName = "Visible Instances", refreshRate = k_RefreshRate, formatString = k_FormatString, getter = () => VisibleInstancesString(GetInstanceOcclusionEventStats(eventIndex)) }, new DebugUI.Value { displayName = "Culled Instances", refreshRate = k_RefreshRate, formatString = k_FormatString, getter = () => CulledInstancesString(GetInstanceOcclusionEventStats(eventIndex)) }, @@ -209,11 +212,12 @@ private static DebugUI.Table.Row AddOcclusionContextDataRow(int index) children = { new DebugUI.Value { displayName = "View Instance ID", refreshRate = k_RefreshRate, formatString = k_FormatString, getter = () => GetOccluderStats(index).viewInstanceID }, - new DebugUI.Value { displayName = "Texture Size", refreshRate = k_RefreshRate, formatString = k_FormatString, getter = + new DebugUI.Value { displayName = "Subview Count", refreshRate = k_RefreshRate, formatString = k_FormatString, getter = () => GetOccluderStats(index).subviewCount }, + new DebugUI.Value { displayName = "Size Per Subview", refreshRate = k_RefreshRate, formatString = k_FormatString, getter = () => { - Vector2Int size = GetOccluderStats(index).occluderTextureSize; - return $"{size.x}x{size.x}"; + Vector2Int size = GetOccluderStats(index).occluderMipLayoutSize; + return $"{size.x}x{size.y}"; }}, } }; diff --git a/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/GPUResidentBatcher.cs b/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/GPUResidentBatcher.cs index 40898c6c872..beff8c6b687 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/GPUResidentBatcher.cs +++ b/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/GPUResidentBatcher.cs @@ -84,20 +84,20 @@ public void DestroyMeshes(NativeArray destroyedMeshes) m_InstanceCullingBatcher.DestroyMeshes(destroyedMeshes); } - public void InstanceOcclusionTest(RenderGraph renderGraph, in OcclusionCullingSettings settings) + public void InstanceOcclusionTest(RenderGraph renderGraph, in OcclusionCullingSettings settings, ReadOnlySpan subviewOcclusionTests) { if (!m_BatchersContext.hasBoundingSpheres) return; - m_InstanceCullingBatcher.culler.InstanceOcclusionTest(renderGraph, settings, m_BatchersContext); + m_InstanceCullingBatcher.culler.InstanceOcclusionTest(renderGraph, settings, subviewOcclusionTests, m_BatchersContext); } - public void UpdateInstanceOccluders(RenderGraph renderGraph, in OccluderParameters occluderParams) + public void UpdateInstanceOccluders(RenderGraph renderGraph, in OccluderParameters occluderParams, ReadOnlySpan occluderSubviewUpdates) { if (!m_BatchersContext.hasBoundingSpheres) return; - m_BatchersContext.occlusionCullingCommon.UpdateInstanceOccluders(renderGraph, occluderParams); + m_BatchersContext.occlusionCullingCommon.UpdateInstanceOccluders(renderGraph, occluderParams, occluderSubviewUpdates); } public void UpdateRenderers(NativeArray renderersID) diff --git a/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/GPUResidentDrawer.cs b/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/GPUResidentDrawer.cs index 0f240e49dc3..db16171c4bb 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/GPUResidentDrawer.cs +++ b/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/GPUResidentDrawer.cs @@ -74,9 +74,10 @@ public static void PostCullBeginCameraRendering(RenderRequestBatcherContext cont /// /// Render graph that will have a compute pass added. /// The view to update and occlusion test to use. - public static void InstanceOcclusionTest(RenderGraph renderGraph, in OcclusionCullingSettings settings) + /// Specifies the occluder subviews to use with each culling split index. + public static void InstanceOcclusionTest(RenderGraph renderGraph, in OcclusionCullingSettings settings, ReadOnlySpan subviewOcclusionTests) { - s_Instance?.batcher.InstanceOcclusionTest(renderGraph, settings); + s_Instance?.batcher.InstanceOcclusionTest(renderGraph, settings, subviewOcclusionTests); } /// @@ -88,9 +89,10 @@ public static void InstanceOcclusionTest(RenderGraph renderGraph, in OcclusionCu /// /// Render graph that will have a compute pass added. /// Parameter to specify the view and depth buffer to read. - public static void UpdateInstanceOccluders(RenderGraph renderGraph, in OccluderParameters occluderParameters) + /// Specifies which occluder subviews to update from slices of the input depth buffer. + public static void UpdateInstanceOccluders(RenderGraph renderGraph, in OccluderParameters occluderParameters, ReadOnlySpan occluderSubviewUpdates) { - s_Instance?.batcher.UpdateInstanceOccluders(renderGraph, occluderParameters); + s_Instance?.batcher.UpdateInstanceOccluders(renderGraph, occluderParameters, occluderSubviewUpdates); } /// diff --git a/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/GPUResidentDrawerDebug.cs b/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/GPUResidentDrawerDebug.cs index a36a88fa9e5..15d05fb04cf 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/GPUResidentDrawerDebug.cs +++ b/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/GPUResidentDrawerDebug.cs @@ -25,6 +25,7 @@ internal struct InstanceOcclusionEventStats public int viewInstanceID; public InstanceOcclusionEventType eventType; public int occluderVersion; + public int subviewMask; public OcclusionTest occlusionTest; public int visibleInstances; public int culledInstances; @@ -33,7 +34,8 @@ internal struct InstanceOcclusionEventStats internal struct DebugOccluderStats { public int viewInstanceID; - public Vector2Int occluderTextureSize; + public int subviewCount; + public Vector2Int occluderMipLayoutSize; } internal class DebugRendererBatcherStats : IDisposable @@ -66,8 +68,8 @@ public void Dispose() internal struct OcclusionCullingDebugOutput { - public RTHandle occluderTexture; - public GraphicsBuffer debugPyramid; + public RTHandle occluderDepthPyramid; + public GraphicsBuffer occlusionDebugOverlay; public OcclusionCullingDebugShaderVariables cb; } } diff --git a/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/GPUResidentDrawerTypes.cs b/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/GPUResidentDrawerTypes.cs index 6d47cc8e198..1e58536d34e 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/GPUResidentDrawerTypes.cs +++ b/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/GPUResidentDrawerTypes.cs @@ -53,6 +53,15 @@ public static uint GetBatchLayerMask(this OcclusionTest occlusionTest) } } + /// Parameter structure for passing to GPUResidentDrawer.InstanceOcclusionTest. + public struct SubviewOcclusionTest + { + /// The split index to read from the CPU culling output. + public int cullingSplitIndex; + /// The occluder subview to occlusion test against. + public int occluderSubviewIndex; + } + /// Parameter structure for passing to GPUResidentDrawer.InstanceOcclusionTest. public struct OcclusionCullingSettings { @@ -60,6 +69,8 @@ public struct OcclusionCullingSettings public int viewInstanceID; /// The occlusion test to use. public OcclusionTest occlusionTest; + /// An instance multiplier to use for the generated indirect draw calls. + public int instanceMultiplier; /// Creates a new structure using the given parameters. /// The instance ID of the camera to find culling output and occluders for. @@ -68,14 +79,22 @@ public OcclusionCullingSettings(int viewInstanceID, OcclusionTest occlusionTest) { this.viewInstanceID = viewInstanceID; this.occlusionTest = occlusionTest; + this.instanceMultiplier = 1; } } /// Parameters structure for passing to GPUResidentDrawer.UpdateInstanceOccluders. - public struct OccluderParameters + public struct OccluderSubviewUpdate { - /// The instance ID of the camera, used to identify these occluders for the occlusion test. - public int viewInstanceID; + /// + /// The subview index within this camera or light, used to identify these occluders for the occlusion test. + /// + public int subviewIndex; + + /// The slice index of the depth data to read. + public int depthSliceIndex; + /// The offset in pixels to the start of the depth data to read. + public Vector2Int depthOffset; /// The transform from world space to view space when rendering the depth buffer. public Matrix4x4 viewMatrix; @@ -86,28 +105,47 @@ public struct OccluderParameters /// An additional world space offset to apply when moving between world space and view space. public Vector3 viewOffsetWorldSpace; + /// Creates a new structure using the given parameters. + /// The index of the subview within this occluder. + public OccluderSubviewUpdate(int subviewIndex) + { + this.subviewIndex = subviewIndex; + + this.depthSliceIndex = 0; + this.depthOffset = Vector2Int.zero; + + this.viewMatrix = Matrix4x4.identity; + this.invViewMatrix = Matrix4x4.identity; + this.gpuProjMatrix = Matrix4x4.identity; + this.viewOffsetWorldSpace = Vector3.zero; + } + } + + /// Parameters structure for passing to GPUResidentDrawer.UpdateInstanceOccluders. + public struct OccluderParameters + { + /// The instance ID of the camera, used to identify these occluders for the occlusion test. + public int viewInstanceID; + /// The total number of subviews for this occluder. + public int subviewCount; + /// The depth texture to read. public TextureHandle depthTexture; - /// The offset in pixels to the start of the depth data to read. - public Vector2Int depthOffset; /// The size in pixels of the area of the depth data to read. public Vector2Int depthSize; - /// The number of slices, expected to be 0 or 1 for 2D and 2DArray textures respectively. - public int depthSliceCount; + /// True if the depth texture is a texture array, false otherwise. + public bool depthIsArray; /// Creates a new structure using the given parameters. /// The instance ID of the camera to associate with these occluders. public OccluderParameters(int viewInstanceID) { this.viewInstanceID = viewInstanceID; - this.viewMatrix = Matrix4x4.identity; - this.invViewMatrix = Matrix4x4.identity; - this.gpuProjMatrix = Matrix4x4.identity; - this.viewOffsetWorldSpace = Vector3.zero; + this.subviewCount = 1; + this.depthTexture = TextureHandle.nullHandle; - this.depthOffset = Vector2Int.zero; this.depthSize = Vector2Int.zero; - this.depthSliceCount = 0; + this.depthIsArray = false; } } } diff --git a/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/InstanceCuller.cs b/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/InstanceCuller.cs index 2e2f1024db8..ebd6c13541b 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/InstanceCuller.cs +++ b/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/InstanceCuller.cs @@ -1311,6 +1311,7 @@ internal struct Info public int viewInstanceID; public InstanceOcclusionEventType eventType; public int occluderVersion; + public int subviewMask; public OcclusionTest occlusionTest; public bool HasVersion() @@ -1361,7 +1362,7 @@ public void Dispose() m_CounterBuffer.Dispose(); } - public int TryAdd(int viewInstanceID, InstanceOcclusionEventType eventType, int occluderVersion, OcclusionTest occlusionTest) + public int TryAdd(int viewInstanceID, InstanceOcclusionEventType eventType, int occluderVersion, int subviewMask, OcclusionTest occlusionTest) { int passIndex = m_PendingInfo.Length; if (passIndex + 1 > MaxPassCount) @@ -1372,6 +1373,7 @@ public int TryAdd(int viewInstanceID, InstanceOcclusionEventType eventType, int viewInstanceID = viewInstanceID, eventType = eventType, occluderVersion = occluderVersion, + subviewMask = subviewMask, occlusionTest = occlusionTest, }); return passIndex; @@ -1445,6 +1447,7 @@ public void MoveToDebugStatsAndClear(DebugRendererBatcherStats debugStats) viewInstanceID = info.viewInstanceID, eventType = info.eventType, occluderVersion = occluderVersion, + subviewMask = info.subviewMask, occlusionTest = info.occlusionTest, visibleInstances = notOccludedCounter, culledInstances = occludedCounter, @@ -1773,8 +1776,8 @@ public unsafe JobHandle CreateCullJobTree( cullingJobHandle = drawCommandOutputHandle; } - rendererVisibilityMasks.Dispose(cullingJobHandle); - rendererCrossFadeValues.Dispose(cullingJobHandle); + cullingJobHandle = rendererVisibilityMasks.Dispose(cullingJobHandle); + cullingJobHandle = rendererCrossFadeValues.Dispose(cullingJobHandle); return cullingJobHandle; } @@ -1928,7 +1931,7 @@ private JobHandle CreatePickingCullingOutputJob_EditorOnly(in BatchCullingContex #endif - public void InstanceOccludersUpdated(int viewInstanceID, RenderersBatchersContext batchersContext) + public void InstanceOccludersUpdated(int viewInstanceID, int subviewMask, RenderersBatchersContext batchersContext) { if (m_DebugStats?.enabled ?? false) { @@ -1940,6 +1943,7 @@ public void InstanceOccludersUpdated(int viewInstanceID, RenderersBatchersContex viewInstanceID, InstanceOcclusionEventType.OccluderUpdate, occluderCtx.version, + subviewMask, OcclusionTest.None); } } @@ -1973,11 +1977,12 @@ public ParallelBitArray GetCompactedVisibilityMasks(bool syncCullingJobs) private class InstanceOcclusionTestPassData { public OcclusionCullingSettings settings; + public InstanceOcclusionTestSubviewSettings subviewSettings; public OccluderHandles occluderHandles; public IndirectBufferContextHandles bufferHandles; } - public void InstanceOcclusionTest(RenderGraph renderGraph, in OcclusionCullingSettings settings, RenderersBatchersContext batchersContext) + public void InstanceOcclusionTest(RenderGraph renderGraph, in OcclusionCullingSettings settings, ReadOnlySpan subviewOcclusionTests, RenderersBatchersContext batchersContext) { if (!batchersContext.occlusionCullingCommon.GetOccluderContext(settings.viewInstanceID, out OccluderContext occluderCtx)) return; @@ -1991,6 +1996,7 @@ public void InstanceOcclusionTest(RenderGraph renderGraph, in OcclusionCullingSe builder.AllowGlobalStateModification(true); passData.settings = settings; + passData.subviewSettings = InstanceOcclusionTestSubviewSettings.FromSpan(subviewOcclusionTests); passData.bufferHandles = m_IndirectStorage.ImportBuffers(renderGraph); passData.occluderHandles = occluderHandles; @@ -2003,6 +2009,7 @@ public void InstanceOcclusionTest(RenderGraph renderGraph, in OcclusionCullingSe batcher.instanceCullingBatcher.culler.AddOcclusionCullingDispatch( context.cmd, data.settings, + data.subviewSettings, data.bufferHandles, data.occluderHandles, batcher.batchersContext); @@ -2039,6 +2046,7 @@ internal void EnsureValidOcclusionTestResults(int viewInstanceID) _InstanceInfoCount = (uint)allocInfo.instanceCount, _BoundingSphereInstanceDataAddress = 0, _DebugCounterIndex = -1, + _InstanceMultiplierShift = 0, }; cmd.SetBufferData(m_ConstantBuffer, m_ShaderVariables); cmd.SetComputeConstantBufferParam(cs, ShaderIDs.InstanceOcclusionCullerShaderVariables, m_ConstantBuffer, 0, m_ConstantBuffer.stride); @@ -2060,11 +2068,11 @@ internal void EnsureValidOcclusionTestResults(int viewInstanceID) private void AddOcclusionCullingDispatch( ComputeCommandBuffer cmd, in OcclusionCullingSettings settings, + in InstanceOcclusionTestSubviewSettings subviewSettings, in IndirectBufferContextHandles bufferHandles, in OccluderHandles occluderHandles, RenderersBatchersContext batchersContext) { - int settingsCullingSplitIndex = 0; // TODO: rework to split mask for shadow caster culling var occlusionCullingCommon = batchersContext.occlusionCullingCommon; int indirectContextIndex = m_IndirectStorage.TryGetContextIndex(settings.viewInstanceID); if (indirectContextIndex >= 0) @@ -2074,8 +2082,12 @@ private void AddOcclusionCullingDispatch( // check what compute we need to do (if any) bool hasOccluders = occlusionCullingCommon.GetOccluderContext(settings.viewInstanceID, out OccluderContext occluderCtx); + // check we have occluders for all the required subviews, disable the occlusion test if not + hasOccluders = hasOccluders && ((subviewSettings.occluderSubviewMask & occluderCtx.subviewValidMask) == subviewSettings.occluderSubviewMask); + IndirectBufferContext.BufferState newBufferState = IndirectBufferContext.BufferState.Zeroed; - OccluderState newOccluderState = new OccluderState(); + int newOccluderVersion = 0; + int newSubviewMask = 0; switch (settings.occlusionTest) { case OcclusionTest.None: @@ -2085,11 +2097,8 @@ private void AddOcclusionCullingDispatch( if (hasOccluders) { newBufferState = IndirectBufferContext.BufferState.AllInstancesOcclusionTested; - newOccluderState = new OccluderState - { - version = occluderCtx.version, - cullingSplitIndex = settingsCullingSplitIndex, - }; + newOccluderVersion = occluderCtx.version; + newSubviewMask = subviewSettings.occluderSubviewMask; } else { @@ -2105,9 +2114,9 @@ private void AddOcclusionCullingDispatch( case IndirectBufferContext.BufferState.AllInstancesOcclusionTested: case IndirectBufferContext.BufferState.OccludedInstancesReTested: // valid or already done - if (bufferCtx.occluderState.cullingSplitIndex != settingsCullingSplitIndex) + if (bufferCtx.subviewMask != subviewSettings.occluderSubviewMask) { - Debug.Log("Expected the previous occlusion test to be from the same split index"); + Debug.Log("Expected an occlusion test of TestCulled to use the same subview mask as the previous occlusion test"); hasMatchingCullingOutput = false; } break; @@ -2127,18 +2136,15 @@ private void AddOcclusionCullingDispatch( if (hasMatchingCullingOutput) { newBufferState = IndirectBufferContext.BufferState.OccludedInstancesReTested; - newOccluderState = new OccluderState - { - version = occluderCtx.version, - cullingSplitIndex = settingsCullingSplitIndex, - }; + newOccluderVersion = occluderCtx.version; + newSubviewMask = subviewSettings.occluderSubviewMask; } } break; } // issue the work (if any) - if ((bufferCtx.bufferState != newBufferState || !bufferCtx.occluderState.Matches(newOccluderState))) + if (!bufferCtx.Matches(newBufferState, newOccluderVersion, newSubviewMask)) { bool isFirstPass = (newBufferState == IndirectBufferContext.BufferState.AllInstancesOcclusionTested); bool isSecondPass = (newBufferState == IndirectBufferContext.BufferState.OccludedInstancesReTested); @@ -2155,7 +2161,8 @@ private void AddOcclusionCullingDispatch( IndirectBufferAllocInfo allocInfo = m_IndirectStorage.GetAllocInfo(indirectContextIndex); bufferCtx.bufferState = newBufferState; - bufferCtx.occluderState = newOccluderState; + bufferCtx.occluderVersion = newOccluderVersion; + bufferCtx.subviewMask = newSubviewMask; if (!allocInfo.IsEmpty()) { @@ -2165,17 +2172,16 @@ private void AddOcclusionCullingDispatch( debugCounterIndex = m_OcclusionEventDebugArray.TryAdd( settings.viewInstanceID, InstanceOcclusionEventType.OcclusionTest, - newOccluderState.version, + newOccluderVersion, + newSubviewMask, isFirstPass ? OcclusionTest.TestAll : isSecondPass ? OcclusionTest.TestCulled : OcclusionTest.None); } // set up keywords - bool useArray = false; bool occlusionDebug = false; if (isFirstPass || isSecondPass) { - useArray = OcclusionCullingCommon.UseArray(in occluderCtx); - occlusionDebug = OcclusionCullingCommon.UseOcclusionDebug(in occluderCtx) && occluderHandles.debugPyramid.IsValid(); + occlusionDebug = OcclusionCullingCommon.UseOcclusionDebug(in occluderCtx) && occluderHandles.occlusionDebugOverlay.IsValid(); } var cs = m_OcclusionTestShader.cs; var firstPassKeyword = new LocalKeyword(cs, "OCCLUSION_FIRST_PASS"); @@ -2191,11 +2197,12 @@ private void AddOcclusionCullingDispatch( _InstanceInfoCount = (uint)allocInfo.instanceCount, _BoundingSphereInstanceDataAddress = batchersContext.renderersParameters.boundingSphere.gpuAddress, _DebugCounterIndex = debugCounterIndex, + _InstanceMultiplierShift = (settings.instanceMultiplier == 2) ? 1 : 0, }; cmd.SetBufferData(m_ConstantBuffer, m_ShaderVariables); cmd.SetComputeConstantBufferParam(cs, ShaderIDs.InstanceOcclusionCullerShaderVariables, m_ConstantBuffer, 0, m_ConstantBuffer.stride); - occlusionCullingCommon.PrepareCulling(cmd, in occluderCtx, settings.viewInstanceID, settingsCullingSplitIndex, m_OcclusionTestShader, useArray, occlusionDebug); + occlusionCullingCommon.PrepareCulling(cmd, in occluderCtx, settings, subviewSettings, m_OcclusionTestShader, occlusionDebug); if (doCopyInstances) { diff --git a/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/InstanceCullingBatcher.cs b/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/InstanceCullingBatcher.cs index 3188f222e3a..c89565999a8 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/InstanceCullingBatcher.cs +++ b/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/InstanceCullingBatcher.cs @@ -1049,9 +1049,10 @@ public void BuildBatch( m_DrawInstanceData.NeedsRebuild(); UpdateInstanceDataBufferLayoutVersion(); } - public void InstanceOccludersUpdated(int viewInstanceID) + + public void InstanceOccludersUpdated(int viewInstanceID, int subviewMask) { - m_Culler.InstanceOccludersUpdated(viewInstanceID, m_BatchersContext); + m_Culler.InstanceOccludersUpdated(viewInstanceID, subviewMask, m_BatchersContext); } public void UpdateFrame() diff --git a/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/InstanceOcclusionCuller.cs b/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/InstanceOcclusionCuller.cs index 40c0f788aa2..5ee36dd6ecc 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/InstanceOcclusionCuller.cs +++ b/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/InstanceOcclusionCuller.cs @@ -26,14 +26,14 @@ internal struct OccluderDerivedData /// public Vector4 facingDirWorldSpace; - public static OccluderDerivedData FromParameters(in OccluderParameters occluderParams) + public static OccluderDerivedData FromParameters(in OccluderSubviewUpdate occluderSubviewUpdate) { - var origin = occluderParams.viewOffsetWorldSpace + (Vector3)occluderParams.invViewMatrix.GetColumn(3); // view origin in world space - var xViewVec = (Vector3)occluderParams.invViewMatrix.GetColumn(0); // positive x axis in world space - var yViewVec = (Vector3)occluderParams.invViewMatrix.GetColumn(1); // positive y axis in world space - var towardsVec = (Vector3)occluderParams.invViewMatrix.GetColumn(2); // positive z axis in world space + var origin = occluderSubviewUpdate.viewOffsetWorldSpace + (Vector3)occluderSubviewUpdate.invViewMatrix.GetColumn(3); // view origin in world space + var xViewVec = (Vector3)occluderSubviewUpdate.invViewMatrix.GetColumn(0); // positive x axis in world space + var yViewVec = (Vector3)occluderSubviewUpdate.invViewMatrix.GetColumn(1); // positive y axis in world space + var towardsVec = (Vector3)occluderSubviewUpdate.invViewMatrix.GetColumn(2); // positive z axis in world space - var viewMatrixNoTranslation = occluderParams.viewMatrix; + var viewMatrixNoTranslation = occluderSubviewUpdate.viewMatrix; viewMatrixNoTranslation.SetColumn(3, new Vector4(0.0f, 0.0f, 0.0f, 1.0f)); return new OccluderDerivedData @@ -41,7 +41,7 @@ public static OccluderDerivedData FromParameters(in OccluderParameters occluderP viewOriginWorldSpace = origin, facingDirWorldSpace = towardsVec.normalized, radialDirWorldSpace = (xViewVec + yViewVec).normalized, - viewProjMatrix = occluderParams.gpuProjMatrix * viewMatrixNoTranslation, + viewProjMatrix = occluderSubviewUpdate.gpuProjMatrix * viewMatrixNoTranslation, }; } } @@ -49,7 +49,7 @@ public static OccluderDerivedData FromParameters(in OccluderParameters occluderP internal struct OccluderHandles { public TextureHandle occluderDepthPyramid; - public BufferHandle debugPyramid; + public BufferHandle occlusionDebugOverlay; public bool IsValid() { @@ -59,15 +59,15 @@ public bool IsValid() public void UseForOcclusionTest(IBaseRenderGraphBuilder builder) { builder.UseTexture(occluderDepthPyramid, AccessFlags.Read); - if (debugPyramid.IsValid()) - builder.UseBuffer(debugPyramid, AccessFlags.ReadWrite); + if (occlusionDebugOverlay.IsValid()) + builder.UseBuffer(occlusionDebugOverlay, AccessFlags.ReadWrite); } public void UseForOccluderUpdate(IBaseRenderGraphBuilder builder) { builder.UseTexture(occluderDepthPyramid, AccessFlags.ReadWrite); - if (debugPyramid.IsValid()) - builder.UseBuffer(debugPyramid, AccessFlags.ReadWrite); + if (occlusionDebugOverlay.IsValid()) + builder.UseBuffer(occlusionDebugOverlay, AccessFlags.ReadWrite); } } @@ -120,17 +120,6 @@ public int GetExtraDrawInfoSlotIndex() } } - internal struct OccluderState - { - public int version; - public int cullingSplitIndex; - - public bool Matches(OccluderState other) - { - return cullingSplitIndex == other.cullingSplitIndex && version == other.version; - } - } - internal struct IndirectBufferContext { public JobHandle cullingJobHandle; @@ -145,13 +134,22 @@ public enum BufferState } public BufferState bufferState; - public OccluderState occluderState; + public int occluderVersion; + public int subviewMask; public IndirectBufferContext(JobHandle cullingJobHandle) { this.cullingJobHandle = cullingJobHandle; this.bufferState = BufferState.Pending; - this.occluderState = new OccluderState(); + this.occluderVersion = 0; + this.subviewMask = 0; + } + + public bool Matches(BufferState bufferState, int occluderVersion, int subviewMask) + { + return this.bufferState == bufferState + && this.occluderVersion == occluderVersion + && this.subviewMask == subviewMask; } } @@ -173,17 +171,26 @@ private static class ShaderIDs public const int k_FirstDepthMipIndex = 3; // 8x8 tiles public const int k_MaxOccluderMips = (int)OcclusionCullingCommonConfig.MaxOccluderMips; public const int k_MaxSilhouettePlanes = (int)OcclusionCullingCommonConfig.MaxOccluderSilhouettePlanes; + public const int k_MaxSubviewsPerView = (int)OcclusionCullingCommonConfig.MaxSubviewsPerView; public int version; - public OccluderDerivedData cameraData; public Vector2Int depthBufferSize; - public int depthSliceCount; + + public NativeArray subviewData; + public int subviewCount { get { return subviewData.Length; } } + public int subviewValidMask; + + public bool IsSubviewValid(int subviewIndex) + { + return subviewIndex < subviewCount && (subviewValidMask & (1 << subviewIndex)) != 0; + } public NativeArray occluderMipBounds; - public Vector2Int occluderTextureSize; - public Vector2Int debugTextureSize; + public Vector2Int occluderMipLayoutSize; // total size of 2D layout specified by occluderMipBounds + public Vector2Int occluderDepthPyramidSize; // at least the size of N mip layouts tiled vertically (one per subview) public RTHandle occluderDepthPyramid; - public GraphicsBuffer debugPyramid; + public int occlusionDebugOverlaySize; + public GraphicsBuffer occlusionDebugOverlay; public bool debugNeedsClear; public ComputeBuffer constantBuffer; public NativeArray constantBufferData; @@ -200,6 +207,9 @@ public Vector2 depthBufferSizeInOccluderPixels { public void Dispose() { + if (subviewData.IsCreated) + subviewData.Dispose(); + if (occluderMipBounds.IsCreated) occluderMipBounds.Dispose(); @@ -208,10 +218,10 @@ public void Dispose() occluderDepthPyramid.Release(); occluderDepthPyramid = null; } - if (debugPyramid != null) + if (occlusionDebugOverlay != null) { - debugPyramid.Release(); - debugPyramid = null; + occlusionDebugOverlay.Release(); + occlusionDebugOverlay = null; } if (constantBuffer != null) { @@ -223,7 +233,7 @@ public void Dispose() constantBufferData.Dispose(); } - private Vector2Int UpdateMipBounds() + private void UpdateMipBounds() { int occluderPixelSize = 1 << k_FirstDepthMipIndex; Vector2Int topMipSize = (depthBufferSize + (occluderPixelSize - 1) * Vector2Int.one) / occluderPixelSize; @@ -254,51 +264,51 @@ private Vector2Int UpdateMipBounds() mipSize.x = (mipSize.x + 1) / 2; mipSize.y = (mipSize.y + 1) / 2; } - return totalSize; + + occluderMipLayoutSize = totalSize; } - private void AllocateTexturesIfNecessary(Vector2Int occluderTextureSize, Vector2Int debugTextureSize) + private void AllocateTexturesIfNecessary(bool debugOverlayEnabled) { - if (occluderTextureSize.x > this.occluderTextureSize.x || occluderTextureSize.y > this.occluderTextureSize.y) + Vector2Int minDepthPyramidSize = new Vector2Int(occluderMipLayoutSize.x, occluderMipLayoutSize.y * subviewCount); + if (occluderDepthPyramidSize.x < minDepthPyramidSize.x || occluderDepthPyramidSize.y < minDepthPyramidSize.y) { if (occluderDepthPyramid != null) occluderDepthPyramid.Release(); + occluderDepthPyramidSize = minDepthPyramidSize; occluderDepthPyramid = RTHandles.Alloc( - occluderTextureSize.x, occluderTextureSize.y, - slices: Mathf.Max(depthSliceCount, 1), - dimension: (depthSliceCount == 0 ? TextureDimension.Tex2D : TextureDimension.Tex2DArray), + occluderDepthPyramidSize.x, occluderDepthPyramidSize.y, + dimension: TextureDimension.Tex2D, colorFormat: GraphicsFormat.R32_SFloat, filterMode: FilterMode.Point, wrapMode: TextureWrapMode.Clamp, enableRandomWrite: true, name: "Occluder Depths"); - - this.occluderTextureSize = occluderTextureSize; } - if (debugTextureSize.x > this.debugTextureSize.x || debugTextureSize.y > this.debugTextureSize.y) + int newDebugOverlaySize = debugOverlayEnabled ? (minDepthPyramidSize.x * minDepthPyramidSize.y) : 0; + if (occlusionDebugOverlaySize < newDebugOverlaySize) { - if (debugPyramid != null) - debugPyramid.Release(); + if (occlusionDebugOverlay != null) + occlusionDebugOverlay.Release(); - // We use buffer instead of texture, because some platforms don't support atmoic operations for Texture2D - debugPyramid = new GraphicsBuffer(GraphicsBuffer.Target.Structured, GraphicsBuffer.UsageFlags.None, - debugTextureSize.x * debugTextureSize.y * Mathf.Max(depthSliceCount, 1) + (int)OcclusionCullingCommonConfig.DebugPyramidOffset, sizeof(uint)); + occlusionDebugOverlaySize = newDebugOverlaySize; + debugNeedsClear = true; - this.debugTextureSize = debugTextureSize; - this.debugNeedsClear = true; + // We use buffer instead of texture, because some platforms don't support atmoic operations for Texture2D + occlusionDebugOverlay = new GraphicsBuffer(GraphicsBuffer.Target.Structured, GraphicsBuffer.UsageFlags.None, + occlusionDebugOverlaySize + (int)OcclusionCullingCommonConfig.DebugPyramidOffset, sizeof(uint)); } - - if (debugTextureSize.x == 0) + if (newDebugOverlaySize == 0) { - if (debugPyramid != null) + if (occlusionDebugOverlay != null) { - debugPyramid.Release(); - debugPyramid = null; + occlusionDebugOverlay.Release(); + occlusionDebugOverlay = null; } - this.debugTextureSize = debugTextureSize; + occlusionDebugOverlaySize = newDebugOverlaySize; } if (constantBuffer == null) @@ -316,59 +326,93 @@ internal static void SetKeyword(ComputeCommandBuffer cmd, ComputeShader cs, in L cmd.DisableKeyword(cs, keyword); } - public void CreateFarDepthPyramid(ComputeCommandBuffer cmd, in OccluderParameters occluderParams, in OccluderHandles occluderHandles, NativeArray silhouettePlanes, ComputeShader occluderDepthPyramidCS, int occluderDepthDownscaleKernel) + private OccluderDepthPyramidConstants SetupFarDepthPyramidConstants(ReadOnlySpan occluderSubviewUpdates, NativeArray silhouettePlanes) + { + OccluderDepthPyramidConstants cb = new OccluderDepthPyramidConstants(); + + // write globals + cb._OccluderMipLayoutSizeX = (uint)occluderMipLayoutSize.x; + cb._OccluderMipLayoutSizeY = (uint)occluderMipLayoutSize.y; + + // write per-subview data + int updateCount = occluderSubviewUpdates.Length; + for (int updateIndex = 0; updateIndex < updateCount; ++updateIndex) + { + ref readonly OccluderSubviewUpdate update = ref occluderSubviewUpdates[updateIndex]; + + int subviewIndex = update.subviewIndex; + subviewData[subviewIndex] = OccluderDerivedData.FromParameters(update); + subviewValidMask |= 1 << update.subviewIndex; + + Matrix4x4 viewProjMatrix + = update.gpuProjMatrix + * update.viewMatrix + * Matrix4x4.Translate(-update.viewOffsetWorldSpace); + Matrix4x4 invViewProjMatrix = viewProjMatrix.inverse; + + unsafe + { + for (int j = 0; j < 16; ++j) + cb._InvViewProjMatrix[16 * updateIndex + j] = invViewProjMatrix[j]; + + cb._SrcOffset[4 * updateIndex + 0] = (uint)update.depthOffset.x; + cb._SrcOffset[4 * updateIndex + 1] = (uint)update.depthOffset.y; + cb._SrcOffset[4 * updateIndex + 2] = 0; + cb._SrcOffset[4 * updateIndex + 3] = 0; + } + + cb._SrcSliceIndices |= (((uint)update.depthSliceIndex & 0xf) << (4 * updateIndex)); + cb._DstSubviewIndices |= ((uint)subviewIndex << (4 * updateIndex)); + } + + // TODO: transform these planes from world space into NDC space planes + for (int i = 0; i < k_MaxSilhouettePlanes; ++i) + { + Plane plane = new Plane(Vector3.zero, 0.0f); + if (i < silhouettePlanes.Length) + plane = silhouettePlanes[i]; + unsafe + { + cb._SilhouettePlanes[4 * i + 0] = plane.normal.x; + cb._SilhouettePlanes[4 * i + 1] = plane.normal.y; + cb._SilhouettePlanes[4 * i + 2] = plane.normal.z; + cb._SilhouettePlanes[4 * i + 3] = plane.distance; + } + } + cb._SilhouettePlaneCount = (uint)silhouettePlanes.Length; + + return cb; + } + + public void CreateFarDepthPyramid(ComputeCommandBuffer cmd, in OccluderParameters occluderParams, ReadOnlySpan occluderSubviewUpdates, in OccluderHandles occluderHandles, NativeArray silhouettePlanes, ComputeShader occluderDepthPyramidCS, int occluderDepthDownscaleKernel) { - Matrix4x4 viewProjMatrix - = occluderParams.gpuProjMatrix - * occluderParams.viewMatrix - * Matrix4x4.Translate(-occluderParams.viewOffsetWorldSpace); - Matrix4x4 invViewProjMatrix = viewProjMatrix.inverse; + OccluderDepthPyramidConstants cb = SetupFarDepthPyramidConstants(occluderSubviewUpdates, silhouettePlanes); var cs = occluderDepthPyramidCS; int kernel = occluderDepthDownscaleKernel; var srcKeyword = new LocalKeyword(cs, "USE_SRC"); - var arrayKeyword = new LocalKeyword(cs, "USE_ARRAY"); - var msaaSrcKeyword = new LocalKeyword(cs, "USE_MSAA_SRC"); + var srcIsArrayKeyword = new LocalKeyword(cs, "SRC_IS_ARRAY"); + var srcIsMsaaKeyword = new LocalKeyword(cs, "SRC_IS_MSAA"); - bool useArray = (occluderParams.depthSliceCount != 0); + bool srcIsArray = occluderParams.depthIsArray; RTHandle depthTexture = (RTHandle)occluderParams.depthTexture; - bool useMSAA = depthTexture?.isMSAAEnabled ?? false; - - SetKeyword(cmd, cs, arrayKeyword, useArray); + bool srcIsMsaa = depthTexture?.isMSAAEnabled ?? false; int mipCount = k_FirstDepthMipIndex + k_MaxOccluderMips; for (int mipIndexBase = 0; mipIndexBase < mipCount - 1; mipIndexBase += 4) { - OccluderDepthPyramidConstants cb = new OccluderDepthPyramidConstants(); - cmd.SetComputeTextureParam(cs, kernel, ShaderIDs._DstDepth, occluderHandles.occluderDepthPyramid); bool useSrc = (mipIndexBase == 0); SetKeyword(cmd, cs, srcKeyword, useSrc); - SetKeyword(cmd, cs, msaaSrcKeyword, useSrc && useMSAA); + SetKeyword(cmd, cs, srcIsArrayKeyword, useSrc && srcIsArray); + SetKeyword(cmd, cs, srcIsMsaaKeyword, useSrc && srcIsMsaa); if (useSrc) cmd.SetComputeTextureParam(cs, kernel, ShaderIDs._SrcDepth, occluderParams.depthTexture); cb._MipCount = (uint)Math.Min(mipCount - 1 - mipIndexBase, 4); - cb._SilhouettePlaneCount = (uint)silhouettePlanes.Length; - cb._InvViewProjMatrix = invViewProjMatrix; - - // TODO: transform these planes from world space into NDC space planes - for (int i = 0; i < k_MaxSilhouettePlanes; ++i) - { - Plane plane = new Plane(Vector3.zero, 0.0f); - if (i < silhouettePlanes.Length) - plane = silhouettePlanes[i]; - unsafe - { - cb._SilhouettePlanes[4 * i + 0] = plane.normal.x; - cb._SilhouettePlanes[4 * i + 1] = plane.normal.y; - cb._SilhouettePlanes[4 * i + 2] = plane.normal.z; - cb._SilhouettePlanes[4 * i + 3] = plane.distance; - } - } Vector2Int srcSize = Vector2Int.zero; for (int i = 0; i < 5; ++i) @@ -378,7 +422,6 @@ Matrix4x4 viewProjMatrix int mipIndex = mipIndexBase + i; if (mipIndex == 0) { - offset = occluderParams.depthOffset; size = occluderParams.depthSize; } else @@ -405,7 +448,7 @@ Matrix4x4 viewProjMatrix cmd.SetBufferData(constantBuffer, constantBufferData); cmd.SetComputeConstantBufferParam(cs, ShaderIDs.OccluderDepthPyramidConstants, constantBuffer, 0, constantBuffer.stride); - cmd.DispatchCompute(cs, kernel, (srcSize.x + 15) / 16, (srcSize.y + 15) / 16, 1); + cmd.DispatchCompute(cs, kernel, (srcSize.x + 15) / 16, (srcSize.y + 15) / 16, occluderSubviewUpdates.Length); } } @@ -413,9 +456,9 @@ public OccluderHandles Import(RenderGraph renderGraph) { RenderTargetInfo rtInfo = new RenderTargetInfo { - width = occluderTextureSize.x, - height = occluderTextureSize.y, - volumeDepth = Mathf.Max(depthSliceCount, 1), + width = occluderDepthPyramidSize.x, + height = occluderDepthPyramidSize.y, + volumeDepth = 1, msaaSamples = 1, format = GraphicsFormat.R32_SFloat, bindMS = false, @@ -424,34 +467,40 @@ public OccluderHandles Import(RenderGraph renderGraph) { occluderDepthPyramid = renderGraph.ImportTexture(occluderDepthPyramid, rtInfo) }; - if (debugPyramid != null) - occluderHandles.debugPyramid = renderGraph.ImportBuffer(debugPyramid); + if (occlusionDebugOverlay != null) + occluderHandles.occlusionDebugOverlay = renderGraph.ImportBuffer(occlusionDebugOverlay); return occluderHandles; } public void PrepareOccluders(in OccluderParameters occluderParams) { - cameraData = OccluderDerivedData.FromParameters(occluderParams); + if (subviewCount != occluderParams.subviewCount) + { + if (subviewData.IsCreated) + subviewData.Dispose(); + + subviewData = new NativeArray(occluderParams.subviewCount, Allocator.Persistent); + subviewValidMask = 0; + } depthBufferSize = occluderParams.depthSize; - depthSliceCount = occluderParams.depthSliceCount; // enable debug counters for cameras when the overlay is enabled bool debugOverlayEnabled = GPUResidentDrawer.GetDebugStats()?.occlusionOverlayEnabled ?? false; - - Vector2Int newTextureSize = UpdateMipBounds(); - AllocateTexturesIfNecessary(newTextureSize, debugOverlayEnabled ? newTextureSize : Vector2Int.zero); + UpdateMipBounds(); + AllocateTexturesIfNecessary(debugOverlayEnabled); } internal OcclusionCullingDebugOutput GetDebugOutput() { var debugOutput = new OcclusionCullingDebugOutput { - occluderTexture = occluderDepthPyramid, - debugPyramid = debugPyramid, + occluderDepthPyramid = occluderDepthPyramid, + occlusionDebugOverlay = occlusionDebugOverlay, }; debugOutput.cb._DepthSizeInOccluderPixels = depthBufferSizeInOccluderPixels; - debugOutput.cb._DebugPyramidSize = new Vector4(debugTextureSize.x, debugTextureSize.y, 0.0f, 0.0f); + debugOutput.cb._OccluderMipLayoutSizeX = (uint)occluderMipLayoutSize.x; + debugOutput.cb._OccluderMipLayoutSizeY = (uint)occluderMipLayoutSize.y; for (int i = 0; i < occluderMipBounds.Length; ++i) { var mipBounds = occluderMipBounds[i]; @@ -481,6 +530,30 @@ internal struct IndirectBufferLimits public int maxDrawCount; } + internal struct InstanceOcclusionTestSubviewSettings + { + public int testCount; + public int occluderSubviewIndices; + public int occluderSubviewMask; + public int cullingSplitIndices; + public int cullingSplitMask; + + public static InstanceOcclusionTestSubviewSettings FromSpan(ReadOnlySpan subviewOcclusionTests) + { + InstanceOcclusionTestSubviewSettings settings = new InstanceOcclusionTestSubviewSettings(); + for (int testIndex = 0; testIndex < subviewOcclusionTests.Length; ++testIndex) + { + SubviewOcclusionTest subviewTest = subviewOcclusionTests[testIndex]; + settings.occluderSubviewIndices |= subviewTest.occluderSubviewIndex << (4 * testIndex); + settings.occluderSubviewMask |= 1 << subviewTest.occluderSubviewIndex; + settings.cullingSplitIndices |= subviewTest.cullingSplitIndex << (4 * testIndex); + settings.cullingSplitMask |= 1 << subviewTest.cullingSplitIndex; + } + settings.testCount = subviewOcclusionTests.Length; + return settings; + } + } + internal struct IndirectBufferContextHandles { public BufferHandle instanceBuffer; diff --git a/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/InstanceOcclusionCullerShaderVariables.cs b/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/InstanceOcclusionCullerShaderVariables.cs index 819a1d33538..55b8c4a1b05 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/InstanceOcclusionCullerShaderVariables.cs +++ b/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/InstanceOcclusionCullerShaderVariables.cs @@ -11,7 +11,7 @@ internal unsafe struct InstanceOcclusionCullerShaderVariables public uint _InstanceInfoCount; public int _BoundingSphereInstanceDataAddress; public int _DebugCounterIndex; + public int _InstanceMultiplierShift; public int _InstanceOcclusionCullerPad0; - public int _InstanceOcclusionCullerPad1; } } diff --git a/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/InstanceOcclusionCullerShaderVariables.cs.hlsl b/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/InstanceOcclusionCullerShaderVariables.cs.hlsl index 4aa3c56e261..55ed9398004 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/InstanceOcclusionCullerShaderVariables.cs.hlsl +++ b/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/InstanceOcclusionCullerShaderVariables.cs.hlsl @@ -13,8 +13,8 @@ CBUFFER_START(InstanceOcclusionCullerShaderVariables) uint _InstanceInfoCount; int _BoundingSphereInstanceDataAddress; int _DebugCounterIndex; + int _InstanceMultiplierShift; int _InstanceOcclusionCullerPad0; - int _InstanceOcclusionCullerPad1; CBUFFER_END diff --git a/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/OccluderDepthPyramidConstants.cs b/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/OccluderDepthPyramidConstants.cs index 699489419f5..36a05eeb817 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/OccluderDepthPyramidConstants.cs +++ b/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/OccluderDepthPyramidConstants.cs @@ -3,17 +3,26 @@ namespace UnityEngine.Rendering [GenerateHLSL(needAccessors = false, generateCBuffer = true)] internal unsafe struct OccluderDepthPyramidConstants { - public Matrix4x4 _InvViewProjMatrix; + [HLSLArray(OccluderContext.k_MaxSubviewsPerView, typeof(Matrix4x4))] + public fixed float _InvViewProjMatrix[OccluderContext.k_MaxSubviewsPerView * 16]; - [HLSLArray((int)OcclusionCullingCommonConfig.MaxOccluderSilhouettePlanes, typeof(Vector4))] - public fixed float _SilhouettePlanes[(int)OcclusionCullingCommonConfig.MaxOccluderSilhouettePlanes * 4]; + [HLSLArray(OccluderContext.k_MaxSilhouettePlanes, typeof(Vector4))] + public fixed float _SilhouettePlanes[OccluderContext.k_MaxSilhouettePlanes * 4]; + + [HLSLArray(OccluderContext.k_MaxSubviewsPerView, typeof(ShaderGenUInt4))] + public fixed uint _SrcOffset[OccluderContext.k_MaxSubviewsPerView * 4]; [HLSLArray(5, typeof(ShaderGenUInt4))] public fixed uint _MipOffsetAndSize[5 * 4]; - public uint _MipCount; - public uint _SilhouettePlaneCount; + public uint _OccluderMipLayoutSizeX; + public uint _OccluderMipLayoutSizeY; public uint _OccluderDepthPyramidPad0; public uint _OccluderDepthPyramidPad1; + + public uint _SrcSliceIndices; // packed 4 bits each + public uint _DstSubviewIndices; // packed 4 bits each + public uint _MipCount; + public uint _SilhouettePlaneCount; } } diff --git a/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/OccluderDepthPyramidConstants.cs.hlsl b/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/OccluderDepthPyramidConstants.cs.hlsl index 30627df126c..a52233460e2 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/OccluderDepthPyramidConstants.cs.hlsl +++ b/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/OccluderDepthPyramidConstants.cs.hlsl @@ -7,13 +7,18 @@ // Generated from UnityEngine.Rendering.OccluderDepthPyramidConstants // PackingRules = Exact CBUFFER_START(OccluderDepthPyramidConstants) - float4x4 _InvViewProjMatrix; + float4x4 _InvViewProjMatrix[6]; float4 _SilhouettePlanes[6]; + uint4 _SrcOffset[6]; uint4 _MipOffsetAndSize[5]; - uint _MipCount; - uint _SilhouettePlaneCount; + uint _OccluderMipLayoutSizeX; + uint _OccluderMipLayoutSizeY; uint _OccluderDepthPyramidPad0; uint _OccluderDepthPyramidPad1; + uint _SrcSliceIndices; + uint _DstSubviewIndices; + uint _MipCount; + uint _SilhouettePlaneCount; CBUFFER_END diff --git a/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/OcclusionCullingCommon.cs b/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/OcclusionCullingCommon.cs index d34ea42e2d4..97a6a836071 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/OcclusionCullingCommon.cs +++ b/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/OcclusionCullingCommon.cs @@ -2,6 +2,7 @@ using System.Collections.Generic; using Unity.Collections; using Unity.Collections.LowLevel.Unsafe; +using Unity.Mathematics; using UnityEngine.Rendering.RenderGraphModule; namespace UnityEngine.Rendering @@ -11,19 +12,25 @@ internal enum OcclusionCullingCommonConfig { MaxOccluderMips = 8, MaxOccluderSilhouettePlanes = 6, - DebugPyramidOffset = 4, + MaxSubviewsPerView = 6, + DebugPyramidOffset = 4, // TODO: rename + } + + [GenerateHLSL(needAccessors = false)] + internal enum OcclusionTestDebugFlag + { + AlwaysPass = (1 << 0), + CountVisible = (1 << 1), } internal struct OcclusionTestComputeShader { public ComputeShader cs; - public LocalKeyword useArrayKeyword; public LocalKeyword occlusionDebugKeyword; public void Init(ComputeShader cs) { this.cs = cs; - this.useArrayKeyword = new LocalKeyword(cs, "USE_ARRAY"); this.occlusionDebugKeyword = new LocalKeyword(cs, "OCCLUSION_DEBUG"); } } @@ -160,7 +167,7 @@ private struct OccluderContextSlot private SilhouettePlaneCache m_SilhouettePlaneCache; - private NativeParallelHashMap m_SubviewIDToIndexMap; + private NativeParallelHashMap m_ViewIDToIndexMap; private List m_OccluderContextData; private NativeList m_OccluderContextSlots; private NativeList m_FreeOccluderContexts; @@ -187,7 +194,7 @@ internal void Init(GPUResidentDrawerResources resources) m_SilhouettePlaneCache.Init(); - m_SubviewIDToIndexMap = new NativeParallelHashMap(64, Allocator.Persistent); + m_ViewIDToIndexMap = new NativeParallelHashMap(64, Allocator.Persistent); m_OccluderContextData = new List(); m_OccluderContextSlots = new NativeList(64, Allocator.Persistent); m_FreeOccluderContexts = new NativeList(64, Allocator.Persistent); @@ -206,39 +213,32 @@ private static class ShaderIDs { public static readonly int OcclusionCullingCommonShaderVariables = Shader.PropertyToID("OcclusionCullingCommonShaderVariables"); public static readonly int _OccluderDepthPyramid = Shader.PropertyToID("_OccluderDepthPyramid"); - public static readonly int _OcclusionDebugPyramid = Shader.PropertyToID("_OcclusionDebugPyramid"); + public static readonly int _OcclusionDebugOverlay = Shader.PropertyToID("_OcclusionDebugOverlay"); - public static readonly int _OcclusionDebugPyramidOverlay = Shader.PropertyToID("_OcclusionDebugPyramidOverlay"); public static readonly int OcclusionCullingDebugShaderVariables = Shader.PropertyToID("OcclusionCullingDebugShaderVariables"); } - internal static bool UseArray(in OccluderContext occluderCtx) - { - return occluderCtx.depthSliceCount != 0; - } - internal static bool UseOcclusionDebug(in OccluderContext occluderCtx) { - return occluderCtx.debugTextureSize.x != 0; + return occluderCtx.occlusionDebugOverlaySize != 0; } - internal void PrepareCulling(ComputeCommandBuffer cmd, in OccluderContext occluderCtx, int viewInstanceID, int cullingSplitIndex, in OcclusionTestComputeShader shader, bool useArray, bool useOcclusionDebug) + internal void PrepareCulling(ComputeCommandBuffer cmd, in OccluderContext occluderCtx, in OcclusionCullingSettings settings, in InstanceOcclusionTestSubviewSettings subviewSettings, in OcclusionTestComputeShader shader, bool useOcclusionDebug) { - OccluderContext.SetKeyword(cmd, shader.cs, shader.useArrayKeyword, useArray); OccluderContext.SetKeyword(cmd, shader.cs, shader.occlusionDebugKeyword, useOcclusionDebug); var debugStats = GPUResidentDrawer.GetDebugStats(); m_CommonShaderVariables[0] = new OcclusionCullingCommonShaderVariables( in occluderCtx, - cullingSplitIndex, + subviewSettings, debugStats?.occlusionOverlayCountVisible ?? false, debugStats?.overrideOcclusionTestToAlwaysPass ?? false); cmd.SetBufferData(m_CommonConstantBuffer, m_CommonShaderVariables); cmd.SetComputeConstantBufferParam(shader.cs, ShaderIDs.OcclusionCullingCommonShaderVariables, m_CommonConstantBuffer, 0, m_CommonConstantBuffer.stride); - DispatchDebugClear(cmd, viewInstanceID); + DispatchDebugClear(cmd, settings.viewInstanceID); } internal static void SetDepthPyramid(ComputeCommandBuffer cmd, in OcclusionTestComputeShader shader, int kernel, in OccluderHandles occluderHandles) @@ -248,7 +248,7 @@ internal static void SetDepthPyramid(ComputeCommandBuffer cmd, in OcclusionTestC internal static void SetDebugPyramid(ComputeCommandBuffer cmd, in OcclusionTestComputeShader shader, int kernel, in OccluderHandles occluderHandles) { - cmd.SetComputeBufferParam(shader.cs, kernel, ShaderIDs._OcclusionDebugPyramid, occluderHandles.debugPyramid); + cmd.SetComputeBufferParam(shader.cs, kernel, ShaderIDs._OcclusionDebugOverlay, occluderHandles.occlusionDebugOverlay); } private class OcclusionTestOverlaySetupPassData @@ -269,7 +269,7 @@ public void RenderDebugOcclusionTestOverlay(RenderGraph renderGraph, DebugDispla return; OcclusionCullingDebugOutput debugOutput = GetOcclusionTestDebugOutput(viewInstanceID); - if (debugOutput.debugPyramid == null) + if (debugOutput.occlusionDebugOverlay == null) return; using (var builder = renderGraph.AddComputePass("OcclusionTestOverlay", out var passData, m_ProfilingSamplerOcclusionTestOverlay)) @@ -298,7 +298,7 @@ public void RenderDebugOcclusionTestOverlay(RenderGraph renderGraph, DebugDispla { builder.AllowGlobalStateModification(true); - passData.debugPyramid = renderGraph.ImportBuffer(debugOutput.debugPyramid); + passData.debugPyramid = renderGraph.ImportBuffer(debugOutput.occlusionDebugOverlay); builder.SetRenderAttachment(colorBuffer, 0); builder.UseBuffer(passData.debugPyramid); @@ -306,7 +306,7 @@ public void RenderDebugOcclusionTestOverlay(RenderGraph renderGraph, DebugDispla builder.SetRenderFunc( (OcclusionTestOverlayPassData data, RasterGraphContext ctx) => { - ctx.cmd.SetGlobalBuffer(ShaderIDs._OcclusionDebugPyramidOverlay, data.debugPyramid); + ctx.cmd.SetGlobalBuffer(ShaderIDs._OcclusionDebugOverlay, data.debugPyramid); CoreUtils.DrawFullScreen(ctx.cmd, m_DebugOcclusionTestMaterial); }); } @@ -335,17 +335,15 @@ public void RenderDebugOccluderOverlay(RenderGraph renderGraph, DebugDisplayGPUR if (!debugSettings.occluderDebugViewEnable) return; - int viewInstanceID = debugSettings.GetOccluderViewInstanceID(); - if (viewInstanceID == 0) + if (!debugSettings.GetOccluderViewInstanceID(out var viewInstanceID)) return; - var occluderTexture = GetOcclusionTestDebugOutput(viewInstanceID).occluderTexture; + var occluderTexture = GetOcclusionTestDebugOutput(viewInstanceID).occluderDepthPyramid; if (occluderTexture == null) return; Material debugMaterial = m_OccluderDebugViewMaterial; - bool isArrayTexture = occluderTexture.rt.dimension == TextureDimension.Tex2DArray; - int passIndex = isArrayTexture ? debugMaterial.FindPass("DebugOccluder_Array") : debugMaterial.FindPass("DebugOccluder"); + int passIndex = debugMaterial.FindPass("DebugOccluder"); Vector2 outputSize = occluderTexture.referenceSize; float scaleFactor = maxHeight / outputSize.y; @@ -380,7 +378,7 @@ public void RenderDebugOccluderOverlay(RenderGraph renderGraph, DebugDisplayGPUR private void DispatchDebugClear(ComputeCommandBuffer cmd, int viewInstanceID) { - if (!m_SubviewIDToIndexMap.TryGetValue(viewInstanceID, out var contextIndex)) + if (!m_ViewIDToIndexMap.TryGetValue(viewInstanceID, out var contextIndex)) return; OccluderContext occluderCtx = m_OccluderContextData[contextIndex]; @@ -392,10 +390,10 @@ private void DispatchDebugClear(ComputeCommandBuffer cmd, int viewInstanceID) cmd.SetComputeConstantBufferParam(cs, ShaderIDs.OcclusionCullingCommonShaderVariables, m_CommonConstantBuffer, 0, m_CommonConstantBuffer.stride); - cmd.SetComputeBufferParam(cs, kernel, ShaderIDs._OcclusionDebugPyramid, occluderCtx.debugPyramid); + cmd.SetComputeBufferParam(cs, kernel, ShaderIDs._OcclusionDebugOverlay, occluderCtx.occlusionDebugOverlay); Vector2Int mip0Size = occluderCtx.occluderMipBounds[0].size; - cmd.DispatchCompute(cs, kernel, (mip0Size.x + 7) / 8, (mip0Size.y + 7) / 8, Mathf.Max(occluderCtx.depthSliceCount, 1)); + cmd.DispatchCompute(cs, kernel, (mip0Size.x + 7) / 8, (mip0Size.y + 7) / 8, occluderCtx.subviewCount); // mark as cleared in the dictionary occluderCtx.debugNeedsClear = false; @@ -408,7 +406,7 @@ private OccluderHandles PrepareOccluders(RenderGraph renderGraph, in OccluderPar OccluderHandles occluderHandles = new OccluderHandles(); if (occluderParams.depthTexture.IsValid()) { - if (!m_SubviewIDToIndexMap.TryGetValue(occluderParams.viewInstanceID, out var contextIndex)) + if (!m_ViewIDToIndexMap.TryGetValue(occluderParams.viewInstanceID, out var contextIndex)) contextIndex = NewContext(occluderParams.viewInstanceID); OccluderContext ctx = m_OccluderContextData[contextIndex]; @@ -423,15 +421,15 @@ private OccluderHandles PrepareOccluders(RenderGraph renderGraph, in OccluderPar return occluderHandles; } - private void CreateFarDepthPyramid(ComputeCommandBuffer cmd, in OccluderParameters occluderParams, in OccluderHandles occluderHandles) + private void CreateFarDepthPyramid(ComputeCommandBuffer cmd, in OccluderParameters occluderParams, ReadOnlySpan occluderSubviewUpdates, in OccluderHandles occluderHandles) { - if (!m_SubviewIDToIndexMap.TryGetValue(occluderParams.viewInstanceID, out var contextIndex)) + if (!m_ViewIDToIndexMap.TryGetValue(occluderParams.viewInstanceID, out var contextIndex)) return; var silhouettePlanes = m_SilhouettePlaneCache.GetSubArray(occluderParams.viewInstanceID); OccluderContext ctx = m_OccluderContextData[contextIndex]; - ctx.CreateFarDepthPyramid(cmd, occluderParams, occluderHandles, silhouettePlanes, m_OccluderDepthPyramidCS, m_OccluderDepthDownscaleKernel); + ctx.CreateFarDepthPyramid(cmd, occluderParams, occluderSubviewUpdates, occluderHandles, silhouettePlanes, m_OccluderDepthPyramidCS, m_OccluderDepthDownscaleKernel); ctx.version++; m_OccluderContextData[contextIndex] = ctx; @@ -443,10 +441,11 @@ private void CreateFarDepthPyramid(ComputeCommandBuffer cmd, in OccluderParamete private class UpdateOccludersPassData { public OccluderParameters occluderParams; + public List occluderSubviewUpdates; public OccluderHandles occluderHandles; } - public bool UpdateInstanceOccluders(RenderGraph renderGraph, in OccluderParameters occluderParams) + public bool UpdateInstanceOccluders(RenderGraph renderGraph, in OccluderParameters occluderParams, ReadOnlySpan occluderSubviewUpdates) { var occluderHandles = PrepareOccluders(renderGraph, occluderParams); if (!occluderHandles.occluderDepthPyramid.IsValid()) @@ -457,6 +456,12 @@ public bool UpdateInstanceOccluders(RenderGraph renderGraph, in OccluderParamete builder.AllowGlobalStateModification(true); passData.occluderParams = occluderParams; + if (passData.occluderSubviewUpdates is null) + passData.occluderSubviewUpdates = new List(); + else + passData.occluderSubviewUpdates.Clear(); + for (int i = 0; i < occluderSubviewUpdates.Length; ++i) + passData.occluderSubviewUpdates.Add(occluderSubviewUpdates[i]); passData.occluderHandles = occluderHandles; builder.UseTexture(passData.occluderParams.depthTexture); @@ -465,9 +470,17 @@ public bool UpdateInstanceOccluders(RenderGraph renderGraph, in OccluderParamete builder.SetRenderFunc( (UpdateOccludersPassData data, ComputeGraphContext context) => { + Span occluderSubviewUpdates = stackalloc OccluderSubviewUpdate[data.occluderSubviewUpdates.Count]; + int subviewMask = 0; + for (int i = 0; i < data.occluderSubviewUpdates.Count; ++i) + { + occluderSubviewUpdates[i] = data.occluderSubviewUpdates[i]; + subviewMask |= 1 << data.occluderSubviewUpdates[i].subviewIndex; + } + var batcher = GPUResidentDrawer.instance.batcher; - batcher.occlusionCullingCommon.CreateFarDepthPyramid(context.cmd, in data.occluderParams, in data.occluderHandles); - batcher.instanceCullingBatcher.InstanceOccludersUpdated(data.occluderParams.viewInstanceID); + batcher.occlusionCullingCommon.CreateFarDepthPyramid(context.cmd, in data.occluderParams, occluderSubviewUpdates, in data.occluderHandles); + batcher.instanceCullingBatcher.InstanceOccludersUpdated(data.occluderParams.viewInstanceID, subviewMask); }); } @@ -481,7 +494,7 @@ internal void UpdateSilhouettePlanes(int viewInstanceID, NativeArray plan internal OcclusionCullingDebugOutput GetOcclusionTestDebugOutput(int viewInstanceID) { - if (m_SubviewIDToIndexMap.TryGetValue(viewInstanceID, out var contextIndex) && m_OccluderContextSlots[contextIndex].valid) + if (m_ViewIDToIndexMap.TryGetValue(viewInstanceID, out var contextIndex) && m_OccluderContextSlots[contextIndex].valid) return m_OccluderContextData[contextIndex].GetDebugOutput(); return new OcclusionCullingDebugOutput(); } @@ -489,14 +502,15 @@ internal OcclusionCullingDebugOutput GetOcclusionTestDebugOutput(int viewInstanc public void UpdateOccluderStats(DebugRendererBatcherStats debugStats) { debugStats.occluderStats.Clear(); - foreach (var pair in m_SubviewIDToIndexMap) + foreach (var pair in m_ViewIDToIndexMap) { if (pair.Value < m_OccluderContextSlots.Length && m_OccluderContextSlots[pair.Value].valid) { debugStats.occluderStats.Add(new DebugOccluderStats { viewInstanceID = pair.Key, - occluderTextureSize = m_OccluderContextData[pair.Value].occluderTextureSize, + subviewCount = m_OccluderContextData[pair.Value].subviewCount, + occluderMipLayoutSize = m_OccluderContextData[pair.Value].occluderMipLayoutSize, }); } } @@ -504,12 +518,12 @@ public void UpdateOccluderStats(DebugRendererBatcherStats debugStats) internal bool HasOccluderContext(int viewInstanceID) { - return m_SubviewIDToIndexMap.ContainsKey(viewInstanceID); + return m_ViewIDToIndexMap.ContainsKey(viewInstanceID); } internal bool GetOccluderContext(int viewInstanceID, out OccluderContext occluderContext) { - if (m_SubviewIDToIndexMap.TryGetValue(viewInstanceID, out var contextIndex) && m_OccluderContextSlots[contextIndex].valid) + if (m_ViewIDToIndexMap.TryGetValue(viewInstanceID, out var contextIndex) && m_OccluderContextSlots[contextIndex].valid) { occluderContext = m_OccluderContextData[contextIndex]; return true; @@ -561,19 +575,19 @@ private int NewContext(int viewInstanceID) m_OccluderContextSlots.Add(newCtxSlot); } - m_SubviewIDToIndexMap.Add(viewInstanceID, newSlot); + m_ViewIDToIndexMap.Add(viewInstanceID, newSlot); return newSlot; } private void DeleteContext(int viewInstanceID) { - if (!m_SubviewIDToIndexMap.TryGetValue(viewInstanceID, out var contextIndex) || !m_OccluderContextSlots[contextIndex].valid) + if (!m_ViewIDToIndexMap.TryGetValue(viewInstanceID, out var contextIndex) || !m_OccluderContextSlots[contextIndex].valid) return; m_OccluderContextData[contextIndex].Dispose(); m_OccluderContextSlots[contextIndex] = new OccluderContextSlot { valid = false }; m_FreeOccluderContexts.Add(contextIndex); - m_SubviewIDToIndexMap.Remove(viewInstanceID); + m_ViewIDToIndexMap.Remove(viewInstanceID); } public void Dispose() @@ -589,7 +603,7 @@ public void Dispose() m_SilhouettePlaneCache.Dispose(); - m_SubviewIDToIndexMap.Dispose(); + m_ViewIDToIndexMap.Dispose(); m_FreeOccluderContexts.Dispose(); m_OccluderContextData.Clear(); m_OccluderContextSlots.Dispose(); diff --git a/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/OcclusionCullingCommon.cs.hlsl b/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/OcclusionCullingCommon.cs.hlsl index 4805e3f49d0..84c4a7b33f5 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/OcclusionCullingCommon.cs.hlsl +++ b/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/OcclusionCullingCommon.cs.hlsl @@ -9,7 +9,14 @@ // #define OCCLUSIONCULLINGCOMMONCONFIG_MAX_OCCLUDER_MIPS (8) #define OCCLUSIONCULLINGCOMMONCONFIG_MAX_OCCLUDER_SILHOUETTE_PLANES (6) +#define OCCLUSIONCULLINGCOMMONCONFIG_MAX_SUBVIEWS_PER_VIEW (6) #define OCCLUSIONCULLINGCOMMONCONFIG_DEBUG_PYRAMID_OFFSET (4) +// +// UnityEngine.Rendering.OcclusionTestDebugFlag: static fields +// +#define OCCLUSIONTESTDEBUGFLAG_ALWAYS_PASS (1) +#define OCCLUSIONTESTDEBUGFLAG_COUNT_VISIBLE (2) + #endif diff --git a/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/OcclusionCullingCommon.hlsl b/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/OcclusionCullingCommon.hlsl index 64a664e3abb..ce94d17c4cf 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/OcclusionCullingCommon.hlsl +++ b/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/OcclusionCullingCommon.hlsl @@ -2,35 +2,33 @@ #define _OCCLUSION_CULLING_COMMON_H // If using this the shader should add -// #pragma multi_compile _ USE_ARRAY // #pragma multi_compile _ OCCLUSION_DEBUG // before including this file #include "Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/OcclusionCullingCommon.cs.hlsl" #include "Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/OcclusionCullingCommonShaderVariables.cs.hlsl" -#include "Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/OcclusionCullingDefine.hlsl" #include "Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/OcclusionTestCommon.hlsl" #include "Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/GeometryUtilities.hlsl" #define OCCLUSION_ENABLE_GATHER_TRIM 1 -TEXTURE2D_A(_OccluderDepthPyramid); +TEXTURE2D(_OccluderDepthPyramid); SAMPLER(s_linear_clamp_sampler); #ifdef OCCLUSION_DEBUG -RWStructuredBuffer _OcclusionDebugPyramid; +RWStructuredBuffer _OcclusionDebugOverlay; -uint toDebugPyramidCoord(uint2 coord) +uint OcclusionDebugOverlayOffset(uint2 coord) { - return OCCLUSIONCULLINGCOMMONCONFIG_DEBUG_PYRAMID_OFFSET + coord.x + _DebugPyramidSize.x * (coord.y + g_slice_index * _DebugPyramidSize.y); + return OCCLUSIONCULLINGCOMMONCONFIG_DEBUG_PYRAMID_OFFSET + coord.x + _OccluderMipLayoutSizeX * coord.y; } #endif -bool IsOcclusionVisible(float3 frontCenterPosRWS, float2 centerPosNDC, float2 radialPosNDC) +bool IsOcclusionVisible(float3 frontCenterPosRWS, float2 centerPosNDC, float2 radialPosNDC, int subviewIndex) { bool isVisible = true; - float queryClosestDepth = ComputeNormalizedDeviceCoordinatesWithZ(frontCenterPosRWS, _ViewProjMatrix).z; - bool isBehindCamera = dot(frontCenterPosRWS, _FacingDirWorldSpace.xyz) >= 0.f; + float queryClosestDepth = ComputeNormalizedDeviceCoordinatesWithZ(frontCenterPosRWS, _ViewProjMatrix[subviewIndex]).z; + bool isBehindCamera = dot(frontCenterPosRWS, _FacingDirWorldSpace[subviewIndex].xyz) >= 0.f; float2 centerCoordInTopMip = centerPosNDC * _DepthSizeInOccluderPixels.xy; float radiusInPixels = length((radialPosNDC - centerPosNDC) * _DepthSizeInOccluderPixels.xy); @@ -44,30 +42,33 @@ bool IsOcclusionVisible(float3 frontCenterPosRWS, float2 centerPosNDC, float2 ra // scale our coordinate to this mip float2 centerCoordInChosenMip = ldexp(centerCoordInTopMip, -mipLevel); int4 mipBounds = _OccluderMipBounds[mipLevel]; + mipBounds.y += subviewIndex * _OccluderMipLayoutSizeY; - if (!_DebugAlwaysPassOcclusionTest) + if ((_OcclusionTestDebugFlags & OCCLUSIONTESTDEBUGFLAG_ALWAYS_PASS) == 0) { // gather4 occluder depths to cover this radius - float2 gatherUv = (float2(mipBounds.xy) + clamp(centerCoordInChosenMip, .5f, float2(mipBounds.zw) - .5f)) * _OccluderTextureSize.zw; - float4 gatherDepths = GATHER_TEXTURE2D_A(_OccluderDepthPyramid, s_linear_clamp_sampler, gatherUv); + float2 gatherUv = (float2(mipBounds.xy) + clamp(centerCoordInChosenMip, .5f, float2(mipBounds.zw) - .5f)) * _OccluderDepthPyramidSize.zw; + float4 gatherDepths = GATHER_TEXTURE2D(_OccluderDepthPyramid, s_linear_clamp_sampler, gatherUv); float occluderDepth = FarthestDepth(gatherDepths); isVisible = IsVisibleAfterOcclusion(occluderDepth, queryClosestDepth); } - + #ifdef OCCLUSION_DEBUG // show footprint of gather4 in debug output - int countForOverlay = _DebugOverlayCountOccluded ^ (isVisible ? 1 : 0); - if (countForOverlay != 0) + bool countForOverlay = ((_OcclusionTestDebugFlags & OCCLUSIONTESTDEBUGFLAG_COUNT_VISIBLE) != 0); + if (!isVisible) + countForOverlay = !countForOverlay; + if (countForOverlay) { uint2 debugCoord = mipBounds.xy + uint2(clamp(int2(centerCoordInChosenMip - .5f), 0, mipBounds.zw - 2)); - InterlockedAdd(_OcclusionDebugPyramid[toDebugPyramidCoord(debugCoord + uint2(0, 0))], 1); - InterlockedAdd(_OcclusionDebugPyramid[toDebugPyramidCoord(debugCoord + uint2(1, 0))], 1); - InterlockedAdd(_OcclusionDebugPyramid[toDebugPyramidCoord(debugCoord + uint2(0, 1))], 1); - InterlockedAdd(_OcclusionDebugPyramid[toDebugPyramidCoord(debugCoord + uint2(1, 1))], 1); + InterlockedAdd(_OcclusionDebugOverlay[OcclusionDebugOverlayOffset(debugCoord + uint2(0, 0))], 1); + InterlockedAdd(_OcclusionDebugOverlay[OcclusionDebugOverlayOffset(debugCoord + uint2(1, 0))], 1); + InterlockedAdd(_OcclusionDebugOverlay[OcclusionDebugOverlayOffset(debugCoord + uint2(0, 1))], 1); + InterlockedAdd(_OcclusionDebugOverlay[OcclusionDebugOverlayOffset(debugCoord + uint2(1, 1))], 1); // accumulate the total in the first slot - InterlockedAdd(_OcclusionDebugPyramid[0], 1); + InterlockedAdd(_OcclusionDebugOverlay[0], 1); } #endif @@ -76,20 +77,30 @@ bool IsOcclusionVisible(float3 frontCenterPosRWS, float2 centerPosNDC, float2 ra return isVisible; } -bool IsOcclusionVisible(BoundingObjectData data) +bool IsOcclusionVisible(BoundingObjectData data, int subviewIndex) { - return IsOcclusionVisible(data.frontCenterPosRWS, data.centerPosNDC, data.radialPosNDC); + return IsOcclusionVisible(data.frontCenterPosRWS, data.centerPosNDC, data.radialPosNDC, subviewIndex); } -bool IsOcclusionVisible(SphereBound boundingSphere) +bool IsOcclusionVisible(SphereBound boundingSphere, int subviewIndex) { - return IsOcclusionVisible(CalculateBoundingObjectData(boundingSphere, - _ViewProjMatrix, _ViewOriginWorldSpace, _RadialDirWorldSpace, _FacingDirWorldSpace)); + BoundingObjectData data = CalculateBoundingObjectData( + boundingSphere, + _ViewProjMatrix[subviewIndex], + _ViewOriginWorldSpace[subviewIndex], + _RadialDirWorldSpace[subviewIndex], + _FacingDirWorldSpace[subviewIndex]); + return IsOcclusionVisible(data, subviewIndex); } -bool IsOcclusionVisible(CylinderBound cylinderBound) +bool IsOcclusionVisible(CylinderBound cylinderBound, int subviewIndex) { - return IsOcclusionVisible(CalculateBoundingObjectData(cylinderBound, - _ViewProjMatrix, _ViewOriginWorldSpace, _RadialDirWorldSpace, _FacingDirWorldSpace)); + BoundingObjectData data = CalculateBoundingObjectData( + cylinderBound, + _ViewProjMatrix[subviewIndex], + _ViewOriginWorldSpace[subviewIndex], + _RadialDirWorldSpace[subviewIndex], + _FacingDirWorldSpace[subviewIndex]); + return IsOcclusionVisible(data, subviewIndex); } #endif diff --git a/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/OcclusionCullingCommonShaderVariables.cs b/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/OcclusionCullingCommonShaderVariables.cs index 8d12fa55d27..863f3abc8dc 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/OcclusionCullingCommonShaderVariables.cs +++ b/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/OcclusionCullingCommonShaderVariables.cs @@ -7,38 +7,72 @@ internal unsafe struct OcclusionCullingCommonShaderVariables [HLSLArray(OccluderContext.k_MaxOccluderMips, typeof(ShaderGenUInt4))] public fixed uint _OccluderMipBounds[OccluderContext.k_MaxOccluderMips * 4]; - public Matrix4x4 _ViewProjMatrix; // from view-centered world space + [HLSLArray(OccluderContext.k_MaxSubviewsPerView, typeof(Matrix4x4))] + public fixed float _ViewProjMatrix[OccluderContext.k_MaxSubviewsPerView * 16]; // from view-centered world space + + [HLSLArray(OccluderContext.k_MaxSubviewsPerView, typeof(Vector4))] + public fixed float _ViewOriginWorldSpace[OccluderContext.k_MaxSubviewsPerView * 4]; + + [HLSLArray(OccluderContext.k_MaxSubviewsPerView, typeof(Vector4))] + public fixed float _FacingDirWorldSpace[OccluderContext.k_MaxSubviewsPerView * 4]; + + [HLSLArray(OccluderContext.k_MaxSubviewsPerView, typeof(Vector4))] + public fixed float _RadialDirWorldSpace[OccluderContext.k_MaxSubviewsPerView * 4]; - public Vector4 _ViewOriginWorldSpace; - public Vector4 _FacingDirWorldSpace; - public Vector4 _RadialDirWorldSpace; public Vector4 _DepthSizeInOccluderPixels; - public Vector4 _OccluderTextureSize; - public Vector4 _DebugPyramidSize; + public Vector4 _OccluderDepthPyramidSize; - public int _RendererListSplitMask; - public int _DebugAlwaysPassOcclusionTest; - public int _DebugOverlayCountOccluded; - public int _Padding0; + public uint _OccluderMipLayoutSizeX; + public uint _OccluderMipLayoutSizeY; + public uint _OcclusionTestDebugFlags; + public uint _OcclusionCullingCommonPad0; + + public int _OcclusionTestCount; + public int _OccluderSubviewIndices; // packed 4 bits each + public int _CullingSplitIndices; // packed 4 bits each + public int _CullingSplitMask; // only used for early out internal OcclusionCullingCommonShaderVariables( in OccluderContext occluderCtx, - int cullingSplitIndex, + in InstanceOcclusionTestSubviewSettings subviewSettings, bool occlusionOverlayCountVisible, bool overrideOcclusionTestToAlwaysPass) { - _ViewProjMatrix = occluderCtx.cameraData.viewProjMatrix; - _ViewOriginWorldSpace = occluderCtx.cameraData.viewOriginWorldSpace; - _DebugAlwaysPassOcclusionTest = overrideOcclusionTestToAlwaysPass ? 1 : 0; - _FacingDirWorldSpace = occluderCtx.cameraData.facingDirWorldSpace; - _RadialDirWorldSpace = occluderCtx.cameraData.radialDirWorldSpace; - _DebugOverlayCountOccluded = occlusionOverlayCountVisible ? 0 : 1; - _Padding0 = 0; - _DebugPyramidSize = new Vector4(occluderCtx.debugTextureSize.x, occluderCtx.debugTextureSize.y, 0.0f, 0.0f); - _RendererListSplitMask = 1 << cullingSplitIndex; + for (int i = 0; i < occluderCtx.subviewCount; ++i) + { + if (occluderCtx.IsSubviewValid(i)) + { + unsafe + { + for (int j = 0; j < 16; ++j) + _ViewProjMatrix[16 * i + j] = occluderCtx.subviewData[i].viewProjMatrix[j]; + + for (int j = 0; j < 4; ++j) + { + _ViewOriginWorldSpace[4 * i + j] = occluderCtx.subviewData[i].viewOriginWorldSpace[j]; + _FacingDirWorldSpace[4 * i + j] = occluderCtx.subviewData[i].facingDirWorldSpace[j]; + _RadialDirWorldSpace[4 * i + j] = occluderCtx.subviewData[i].radialDirWorldSpace[j]; + } + } + } + } + _OccluderMipLayoutSizeX = (uint)occluderCtx.occluderMipLayoutSize.x; + _OccluderMipLayoutSizeY = (uint)occluderCtx.occluderMipLayoutSize.y; + _OcclusionTestDebugFlags + = (overrideOcclusionTestToAlwaysPass ? (uint)OcclusionTestDebugFlag.AlwaysPass : 0) + | (occlusionOverlayCountVisible ? (uint)OcclusionTestDebugFlag.CountVisible : 0); + _OcclusionCullingCommonPad0 = 0; + + _OcclusionTestCount = subviewSettings.testCount; + _OccluderSubviewIndices = subviewSettings.occluderSubviewIndices; + _CullingSplitIndices = subviewSettings.cullingSplitIndices; + _CullingSplitMask = subviewSettings.cullingSplitMask; + _DepthSizeInOccluderPixels = occluderCtx.depthBufferSizeInOccluderPixels; - Vector2Int textureSize = occluderCtx.occluderTextureSize; - _OccluderTextureSize = new Vector4(textureSize.x, textureSize.y, 1.0f / textureSize.x, 1.0f / textureSize.y); + + Vector2Int textureSize = occluderCtx.occluderDepthPyramidSize; + _OccluderDepthPyramidSize = new Vector4(textureSize.x, textureSize.y, 1.0f / textureSize.x, 1.0f / textureSize.y); + for (int i = 0; i < occluderCtx.occluderMipBounds.Length; ++i) { var mipBounds = occluderCtx.occluderMipBounds[i]; diff --git a/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/OcclusionCullingCommonShaderVariables.cs.hlsl b/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/OcclusionCullingCommonShaderVariables.cs.hlsl index d2c6dc649f1..2762f78467d 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/OcclusionCullingCommonShaderVariables.cs.hlsl +++ b/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/OcclusionCullingCommonShaderVariables.cs.hlsl @@ -8,17 +8,20 @@ // PackingRules = Exact CBUFFER_START(OcclusionCullingCommonShaderVariables) uint4 _OccluderMipBounds[8]; - float4x4 _ViewProjMatrix; - float4 _ViewOriginWorldSpace; - float4 _FacingDirWorldSpace; - float4 _RadialDirWorldSpace; + float4x4 _ViewProjMatrix[6]; + float4 _ViewOriginWorldSpace[6]; + float4 _FacingDirWorldSpace[6]; + float4 _RadialDirWorldSpace[6]; float4 _DepthSizeInOccluderPixels; - float4 _OccluderTextureSize; - float4 _DebugPyramidSize; - int _RendererListSplitMask; - int _DebugAlwaysPassOcclusionTest; - int _DebugOverlayCountOccluded; - int _Padding0; + float4 _OccluderDepthPyramidSize; + uint _OccluderMipLayoutSizeX; + uint _OccluderMipLayoutSizeY; + uint _OcclusionTestDebugFlags; + uint _OcclusionCullingCommonPad0; + int _OcclusionTestCount; + int _OccluderSubviewIndices; + int _CullingSplitIndices; + int _CullingSplitMask; CBUFFER_END diff --git a/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/OcclusionCullingDebugShaderVariables.cs b/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/OcclusionCullingDebugShaderVariables.cs index cec86797c27..c20688c4ad7 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/OcclusionCullingDebugShaderVariables.cs +++ b/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/OcclusionCullingDebugShaderVariables.cs @@ -6,9 +6,13 @@ namespace UnityEngine.Rendering internal unsafe struct OcclusionCullingDebugShaderVariables { public Vector4 _DepthSizeInOccluderPixels; - public Vector4 _DebugPyramidSize; [HLSLArray(OccluderContext.k_MaxOccluderMips, typeof(ShaderGenUInt4))] public fixed uint _OccluderMipBounds[OccluderContext.k_MaxOccluderMips * 4]; + + public uint _OccluderMipLayoutSizeX; + public uint _OccluderMipLayoutSizeY; + public uint _OcclusionCullingDebugPad0; + public uint _OcclusionCullingDebugPad1; } } diff --git a/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/OcclusionCullingDebugShaderVariables.cs.hlsl b/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/OcclusionCullingDebugShaderVariables.cs.hlsl index e345ac8629b..088c2a96f8c 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/OcclusionCullingDebugShaderVariables.cs.hlsl +++ b/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/OcclusionCullingDebugShaderVariables.cs.hlsl @@ -8,8 +8,11 @@ // PackingRules = Exact CBUFFER_START(OcclusionCullingDebugShaderVariables) float4 _DepthSizeInOccluderPixels; - float4 _DebugPyramidSize; uint4 _OccluderMipBounds[8]; + uint _OccluderMipLayoutSizeX; + uint _OccluderMipLayoutSizeY; + uint _OcclusionCullingDebugPad0; + uint _OcclusionCullingDebugPad1; CBUFFER_END diff --git a/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/OcclusionCullingDefine.hlsl b/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/OcclusionCullingDefine.hlsl deleted file mode 100644 index 73624a03f43..00000000000 --- a/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/OcclusionCullingDefine.hlsl +++ /dev/null @@ -1,24 +0,0 @@ -#ifndef _OCCLUSION_CULLING_DEFINE_H -#define _OCCLUSION_CULLING_DEFINE_H - -// If using this the shader should add -// #pragma multi_compile _ USE_ARRAY - -static int g_slice_index = 0; -#ifdef USE_ARRAY -#define TEXTURE2D_A TEXTURE2D_ARRAY -#define RW_TEXTURE2D_A RW_TEXTURE2D_ARRAY -#define SET_SLICE_INDEX(N) g_slice_index = N -#define ARRAY_COORD(C) int3((C), g_slice_index) -#define GATHER_TEXTURE2D_A(textureName, samplerName, coord2) GATHER_TEXTURE2D_ARRAY(textureName, samplerName, coord2, g_slice_index) -#define LOAD_TEXTURE2D_A(textureName, coord2) LOAD_TEXTURE2D_ARRAY(textureName, coord2, g_slice_index) -#else -#define TEXTURE2D_A TEXTURE2D -#define RW_TEXTURE2D_A RW_TEXTURE2D -#define SET_SLICE_INDEX(N) -#define ARRAY_COORD(C) C -#define GATHER_TEXTURE2D_A GATHER_TEXTURE2D -#define LOAD_TEXTURE2D_A LOAD_TEXTURE2D -#endif - -#endif diff --git a/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/DynamicGI/DynamicSkyPrecomputedDirections.cs b/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/DynamicGI/DynamicSkyPrecomputedDirections.cs index a5b0f0af0d7..c592995e240 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/DynamicGI/DynamicSkyPrecomputedDirections.cs +++ b/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/DynamicGI/DynamicSkyPrecomputedDirections.cs @@ -1,3 +1,4 @@ +using System.Runtime.CompilerServices; using RuntimeResources = UnityEngine.Rendering.ProbeReferenceVolume.RuntimeResources; namespace UnityEngine.Rendering @@ -9,11 +10,13 @@ internal static class DynamicSkyPrecomputedDirections static ComputeBuffer m_DirectionsBuffer = null; static Vector3[] m_Directions = null; + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static void GetRuntimeResources(ref RuntimeResources rr) { rr.SkyPrecomputedDirections = m_DirectionsBuffer; } + [MethodImpl(MethodImplOptions.AggressiveInlining)] internal static Vector3[] GetPrecomputedDirections() { return m_Directions; diff --git a/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeReferenceVolume.Binding.cs b/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeReferenceVolume.Binding.cs index a8eb6859b79..bd69e46a2e9 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeReferenceVolume.Binding.cs +++ b/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeReferenceVolume.Binding.cs @@ -57,8 +57,8 @@ public void BindAPVRuntimeResources(CommandBuffer cmdBuffer, bool isProbeVolumeE cmdBuffer.SetGlobalTexture(ShaderIDs._APVResL1B_L1Rz, rr.L1_B_rz); cmdBuffer.SetGlobalTexture(ShaderIDs._APVResValidity, rr.Validity); - cmdBuffer.SetGlobalTexture(ShaderIDs._SkyOcclusionTexL0L1, rr.SkyOcclusionL0L1 ?? (RenderTargetIdentifier)TextureXR.GetBlackTexture3D()); - cmdBuffer.SetGlobalTexture(ShaderIDs._SkyShadingDirectionIndicesTex, rr.SkyShadingDirectionIndices ?? (RenderTargetIdentifier)TextureXR.GetBlackTexture3D()); + cmdBuffer.SetGlobalTexture(ShaderIDs._SkyOcclusionTexL0L1, rr.SkyOcclusionL0L1 ?? (RenderTargetIdentifier)CoreUtils.blackVolumeTexture); + cmdBuffer.SetGlobalTexture(ShaderIDs._SkyShadingDirectionIndicesTex, rr.SkyShadingDirectionIndices ?? (RenderTargetIdentifier)CoreUtils.blackVolumeTexture); cmdBuffer.SetGlobalBuffer(ShaderIDs._SkyPrecomputedDirections, rr.SkyPrecomputedDirections); if (refVolume.shBands == ProbeVolumeSHBands.SphericalHarmonicsL2) @@ -88,22 +88,22 @@ public void BindAPVRuntimeResources(CommandBuffer cmdBuffer, bool isProbeVolumeE cmdBuffer.SetGlobalBuffer(ShaderIDs._APVResIndex, m_EmptyIndexBuffer); cmdBuffer.SetGlobalBuffer(ShaderIDs._APVResCellIndices, m_EmptyIndexBuffer); - cmdBuffer.SetGlobalTexture(ShaderIDs._APVResL0_L1Rx, TextureXR.GetBlackTexture3D()); + cmdBuffer.SetGlobalTexture(ShaderIDs._APVResL0_L1Rx, CoreUtils.blackVolumeTexture); - cmdBuffer.SetGlobalTexture(ShaderIDs._APVResL1G_L1Ry, TextureXR.GetBlackTexture3D()); - cmdBuffer.SetGlobalTexture(ShaderIDs._APVResL1B_L1Rz, TextureXR.GetBlackTexture3D()); - cmdBuffer.SetGlobalTexture(ShaderIDs._APVResValidity, TextureXR.GetBlackTexture3D()); + cmdBuffer.SetGlobalTexture(ShaderIDs._APVResL1G_L1Ry, CoreUtils.blackVolumeTexture); + cmdBuffer.SetGlobalTexture(ShaderIDs._APVResL1B_L1Rz, CoreUtils.blackVolumeTexture); + cmdBuffer.SetGlobalTexture(ShaderIDs._APVResValidity, CoreUtils.blackVolumeTexture); - cmdBuffer.SetGlobalTexture(ShaderIDs._SkyOcclusionTexL0L1, TextureXR.GetBlackTexture3D()); - cmdBuffer.SetGlobalTexture(ShaderIDs._SkyShadingDirectionIndicesTex, TextureXR.GetBlackTexture3D()); + cmdBuffer.SetGlobalTexture(ShaderIDs._SkyOcclusionTexL0L1, CoreUtils.blackVolumeTexture); + cmdBuffer.SetGlobalTexture(ShaderIDs._SkyShadingDirectionIndicesTex, CoreUtils.blackVolumeTexture); cmdBuffer.SetGlobalBuffer(ShaderIDs._SkyPrecomputedDirections, m_EmptyDirectionsBuffer); if (refVolume.shBands == ProbeVolumeSHBands.SphericalHarmonicsL2) { - cmdBuffer.SetGlobalTexture(ShaderIDs._APVResL2_0, TextureXR.GetBlackTexture3D()); - cmdBuffer.SetGlobalTexture(ShaderIDs._APVResL2_1, TextureXR.GetBlackTexture3D()); - cmdBuffer.SetGlobalTexture(ShaderIDs._APVResL2_2, TextureXR.GetBlackTexture3D()); - cmdBuffer.SetGlobalTexture(ShaderIDs._APVResL2_3, TextureXR.GetBlackTexture3D()); + cmdBuffer.SetGlobalTexture(ShaderIDs._APVResL2_0, CoreUtils.blackVolumeTexture); + cmdBuffer.SetGlobalTexture(ShaderIDs._APVResL2_1, CoreUtils.blackVolumeTexture); + cmdBuffer.SetGlobalTexture(ShaderIDs._APVResL2_2, CoreUtils.blackVolumeTexture); + cmdBuffer.SetGlobalTexture(ShaderIDs._APVResL2_3, CoreUtils.blackVolumeTexture); } } } diff --git a/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeReferenceVolume.cs b/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeReferenceVolume.cs index 51484819246..e95b5e55ca6 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeReferenceVolume.cs +++ b/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeReferenceVolume.cs @@ -857,12 +857,15 @@ public void SetActiveBakingSet(ProbeVolumeBakingSet bakingSet) foreach (var data in perSceneDataList) data.QueueSceneRemoval(); - + UnloadBakingSet(); SetBakingSetAsCurrent(bakingSet); - - foreach (var data in perSceneDataList) - data.QueueSceneLoading(); + + if (m_CurrentBakingSet != null) + { + foreach (var data in perSceneDataList) + data.QueueSceneLoading(); + } } void SetBakingSetAsCurrent(ProbeVolumeBakingSet bakingSet) @@ -2069,7 +2072,7 @@ void DeinitProbeReferenceVolume() m_TemporaryDataLocation.Cleanup(); m_ProbeReferenceVolumeInit = false; - + if (m_CurrentBakingSet != null) m_CurrentBakingSet.Cleanup(); m_CurrentBakingSet = null; diff --git a/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeVolumeBakingSet.cs b/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeVolumeBakingSet.cs index c880e94ed4f..30ff93708b0 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeVolumeBakingSet.cs +++ b/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeVolumeBakingSet.cs @@ -777,7 +777,7 @@ void ResolveSharedCellData(List cellIndices, NativeArray(sourceBricks, Allocator.Persistent) : sourceBricks; cellData.validityNeighMaskData = m_UseStreamingAsset ? new NativeArray(sourceValidityNeightMaskData, Allocator.Persistent) : sourceValidityNeightMaskData; - // TODO save sky occlusion in a separate asset (see ProbeGIBaking WriteBakingCells) + // TODO save sky occlusion in a separate asset (see AdaptiveProbeVolumes WriteBakingCells) // And load it depending on ProbeReferenceVolume.instance.skyOcclusion if (bakedSkyOcclusion) { diff --git a/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeVolumeGlobalSettings.cs b/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeVolumeGlobalSettings.cs index bc22447d3e1..70925e18c03 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeVolumeGlobalSettings.cs +++ b/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeVolumeGlobalSettings.cs @@ -11,7 +11,7 @@ class ProbeVolumeRuntimeResources : IRenderPipelineResources int m_Version = 1; public int version { get => m_Version; } - + [Header("Runtime")] [ResourcePath("Runtime/Lighting/ProbeVolume/ProbeVolumeBlendStates.compute")] public ComputeShader probeVolumeBlendStatesCS; @@ -30,7 +30,7 @@ class ProbeVolumeDebugResources : IRenderPipelineResources int m_Version = 1; public int version { get => m_Version; } - + [Header("Debug")] [ResourcePath("Runtime/Debug/ProbeVolumeDebug.shader")] public Shader probeVolumeDebugShader; @@ -55,7 +55,7 @@ class ProbeVolumeBakingResources : IRenderPipelineResources int m_Version = 1; public int version { get => m_Version; } - + [Header("Baking")] [ResourcePath("Editor/Lighting/ProbeVolume/ProbeVolumeCellDilation.compute")] public ComputeShader dilationShader; diff --git a/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeVolumePerSceneData.cs b/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeVolumePerSceneData.cs index 9c4a5d52828..65ca8a4d039 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeVolumePerSceneData.cs +++ b/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeVolumePerSceneData.cs @@ -15,6 +15,10 @@ namespace UnityEngine.Rendering [AddComponentMenu("")] // Hide. public class ProbeVolumePerSceneData : MonoBehaviour { + /// The baking set this scene is part of. + /// The baking set this scene is part of. + ProbeVolumeBakingSet GetBakingSet() => bakingSet; + [SerializeField] internal ProbeVolumeBakingSet bakingSet; [SerializeField] internal string sceneGUID = ""; diff --git a/Packages/com.unity.render-pipelines.core/Runtime/PostProcessing/Shaders/FSRCommon.hlsl b/Packages/com.unity.render-pipelines.core/Runtime/PostProcessing/Shaders/FSRCommon.hlsl index 88659e00417..81ad28c19b8 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime/PostProcessing/Shaders/FSRCommon.hlsl +++ b/Packages/com.unity.render-pipelines.core/Runtime/PostProcessing/Shaders/FSRCommon.hlsl @@ -71,14 +71,26 @@ float4 _FsrEasuConstants3; #if FSR_EASU_H AH4 FsrEasuRH(AF2 p) { + #ifdef FSR_CLAMP_COORD + p = FSR_CLAMP_COORD(p); + #endif + return (AH4)GATHER_RED_TEXTURE2D_X(FSR_INPUT_TEXTURE, FSR_INPUT_SAMPLER, p); } AH4 FsrEasuGH(AF2 p) { + #ifdef FSR_CLAMP_COORD + p = FSR_CLAMP_COORD(p); + #endif + return (AH4)GATHER_GREEN_TEXTURE2D_X(FSR_INPUT_TEXTURE, FSR_INPUT_SAMPLER, p); } AH4 FsrEasuBH(AF2 p) { + #ifdef FSR_CLAMP_COORD + p = FSR_CLAMP_COORD(p); + #endif + return (AH4)GATHER_BLUE_TEXTURE2D_X(FSR_INPUT_TEXTURE, FSR_INPUT_SAMPLER, p); } @@ -99,14 +111,26 @@ void FsrEasuProcessInput(inout AH4 r, inout AH4 g, inout AH4 b) #else AF4 FsrEasuRF(AF2 p) { + #ifdef FSR_CLAMP_COORD + p = FSR_CLAMP_COORD(p); + #endif + return GATHER_RED_TEXTURE2D_X(FSR_INPUT_TEXTURE, FSR_INPUT_SAMPLER, p); } AF4 FsrEasuGF(AF2 p) { + #ifdef FSR_CLAMP_COORD + p = FSR_CLAMP_COORD(p); + #endif + return GATHER_GREEN_TEXTURE2D_X(FSR_INPUT_TEXTURE, FSR_INPUT_SAMPLER, p); } AF4 FsrEasuBF(AF2 p) { + #ifdef FSR_CLAMP_COORD + p = FSR_CLAMP_COORD(p); + #endif + return GATHER_BLUE_TEXTURE2D_X(FSR_INPUT_TEXTURE, FSR_INPUT_SAMPLER, p); } diff --git a/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/Debug/RenderGraphDebugParams.cs b/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/Debug/RenderGraphDebugParams.cs index 9e1bb4670e5..f0c8c2351a4 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/Debug/RenderGraphDebugParams.cs +++ b/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/Debug/RenderGraphDebugParams.cs @@ -31,7 +31,7 @@ private static class Strings { public static readonly NameAndTooltip ClearRenderTargetsAtCreation = new() { name = "Clear Render Targets At Creation", tooltip = "Enable to clear all render textures before any rendergraph passes to check if some clears are missing." }; public static readonly NameAndTooltip ClearRenderTargetsAtFree = new() { name = "Clear Render Targets When Freed", tooltip = "Enable to clear all render textures when textures are freed by the graph to detect use after free of textures." }; - public static readonly NameAndTooltip DisablePassCulling = new() { name = "Disable Pass Culling", tooltip = "Enable to temporarily disable culling to asses if a pass is culled." }; + public static readonly NameAndTooltip DisablePassCulling = new() { name = "Disable Pass Culling", tooltip = "Enable to temporarily disable culling to assess if a pass is culled." }; public static readonly NameAndTooltip ImmediateMode = new() { name = "Immediate Mode", tooltip = "Enable to force render graph to execute all passes in the order you registered them." }; public static readonly NameAndTooltip EnableLogging = new() { name = "Enable Logging", tooltip = "Enable to allow HDRP to capture information in the log." }; public static readonly NameAndTooltip LogFrameInformation = new() { name = "Log Frame Information", tooltip = "Enable to log information output from each frame." }; diff --git a/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/RenderGraph.DebugData.cs b/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/RenderGraph.DebugData.cs index af8430101c2..6d540219558 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/RenderGraph.DebugData.cs +++ b/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/RenderGraph.DebugData.cs @@ -194,6 +194,8 @@ public class PassScriptInfo } } + readonly string[] k_PassNameDebugIgnoreList = new string[] { k_BeginProfilingSamplerPassName, k_EndProfilingSamplerPassName }; + [Conditional("UNITY_EDITOR")] void AddPassDebugMetadata(string passName, string file, int line) { @@ -201,6 +203,10 @@ void AddPassDebugMetadata(string passName, string file, int line) if (m_CaptureDebugDataForExecution == null) return; + for (int i = 0; i < k_PassNameDebugIgnoreList.Length; ++i) + if (passName == k_PassNameDebugIgnoreList[i]) + return; + if (!DebugData.s_PassScriptMetadata.TryAdd(passName, new DebugData.PassScriptInfo { filePath = file, line = line })) { var existingFile = DebugData.s_PassScriptMetadata[passName].filePath; diff --git a/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/RenderGraph.cs b/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/RenderGraph.cs index 1f91dc0ac0e..4440a1bc6f3 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/RenderGraph.cs +++ b/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/RenderGraph.cs @@ -1247,6 +1247,9 @@ class ProfilingScopePassData public ProfilingSampler sampler; } + const string k_BeginProfilingSamplerPassName = "BeginProfile"; + const string k_EndProfilingSamplerPassName = "EndProfile"; + /// /// Begin a profiling scope. /// @@ -1260,7 +1263,7 @@ public void BeginProfilingSampler(ProfilingSampler sampler, if (sampler == null) return; - using (var builder = AddRenderPass("BeginProfile", out var passData, (ProfilingSampler)null, file, line)) + using (var builder = AddRenderPass(k_BeginProfilingSamplerPassName, out var passData, (ProfilingSampler)null, file, line)) { passData.sampler = sampler; builder.AllowPassCulling(false); @@ -1285,7 +1288,7 @@ public void EndProfilingSampler(ProfilingSampler sampler, if (sampler == null) return; - using (var builder = AddRenderPass("EndProfile", out var passData, (ProfilingSampler)null, file, line)) + using (var builder = AddRenderPass(k_EndProfilingSamplerPassName, out var passData, (ProfilingSampler)null, file, line)) { passData.sampler = sampler; builder.AllowPassCulling(false); diff --git a/Packages/com.unity.render-pipelines.core/Runtime/RenderPipelineResources/GPUDriven/DebugOccluder.shader b/Packages/com.unity.render-pipelines.core/Runtime/RenderPipelineResources/GPUDriven/DebugOccluder.shader index 07fb60ccdfe..1b9cfb36af0 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime/RenderPipelineResources/GPUDriven/DebugOccluder.shader +++ b/Packages/com.unity.render-pipelines.core/Runtime/RenderPipelineResources/GPUDriven/DebugOccluder.shader @@ -62,29 +62,6 @@ Shader "Hidden/Core/DebugOccluder" ENDHLSL } - - Pass - { - Name "DebugOccluder_Array" - ZTest Off - Blend One Zero - Cull Off - ZWrite On - - HLSLPROGRAM - - TEXTURE2D_ARRAY(_OccluderTexture); - - #pragma vertex Vert - #pragma fragment Fragment - - float4 Fragment(Varyings input) : SV_Target - { - return GetOutputColor(SAMPLE_TEXTURE2D_ARRAY(_OccluderTexture, ltc_linear_clamp_sampler, input.texcoord, 0).x); - } - - ENDHLSL - } } Fallback Off } diff --git a/Packages/com.unity.render-pipelines.core/Runtime/RenderPipelineResources/GPUDriven/DebugOcclusionTest.shader b/Packages/com.unity.render-pipelines.core/Runtime/RenderPipelineResources/GPUDriven/DebugOcclusionTest.shader index f7ccaeaf51f..59662a3ba5e 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime/RenderPipelineResources/GPUDriven/DebugOcclusionTest.shader +++ b/Packages/com.unity.render-pipelines.core/Runtime/RenderPipelineResources/GPUDriven/DebugOcclusionTest.shader @@ -20,13 +20,13 @@ Shader "Hidden/Core/DebugOcclusionTest" #include "Packages/com.unity.render-pipelines.core/ShaderLibrary/Common.hlsl" #include "Packages/com.unity.render-pipelines.core/ShaderLibrary/Debug.hlsl" + #include "Packages/com.unity.render-pipelines.core/ShaderLibrary/TextureXR.hlsl" #include "Packages/com.unity.render-pipelines.core/ShaderLibrary/UnityInstancing.hlsl" #include "Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/InstanceOcclusionCuller.cs.hlsl" #include "Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/OcclusionCullingDebugShaderVariables.cs.hlsl" - #include "Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/OcclusionCullingDefine.hlsl" #include "Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/OcclusionCullingCommon.cs.hlsl" - StructuredBuffer _OcclusionDebugPyramidOverlay; + StructuredBuffer _OcclusionDebugOverlay; struct Attributes { @@ -65,27 +65,35 @@ Shader "Hidden/Core/DebugOcclusionTest" return color; } - uint toDebugPyramidCoord(uint2 coord) + uint OcclusionDebugOverlayOffset(uint2 coord) { - return OCCLUSIONCULLINGCOMMONCONFIG_DEBUG_PYRAMID_OFFSET + coord.x + _DebugPyramidSize.x * (coord.y + g_slice_index * _DebugPyramidSize.y); + return OCCLUSIONCULLINGCOMMONCONFIG_DEBUG_PYRAMID_OFFSET + coord.x + _OccluderMipLayoutSizeX * coord.y; } float4 Frag(Varyings input) : SV_Target { UNITY_SETUP_STEREO_EYE_INDEX_POST_VERTEX(input); - SET_SLICE_INDEX(0); uint2 coord = uint2(input.uv); + uint subviewIndex = unity_StereoEyeIndex; + uint total = 0; for (int i = 0; i < OCCLUSIONCULLINGCOMMONCONFIG_MAX_OCCLUDER_MIPS; ++i) - total += _OcclusionDebugPyramidOverlay[toDebugPyramidCoord(_OccluderMipBounds[i].xy + (coord >> i))]; + { + int4 mipBounds = _OccluderMipBounds[i]; + mipBounds.y += subviewIndex * _OccluderMipLayoutSizeY; + + uint2 debugCoord = mipBounds.xy + uint2(min(int2(coord >> i), mipBounds.zw - 1)); + + total += _OcclusionDebugOverlay[OcclusionDebugOverlayOffset(debugCoord)]; + } if(total == 0) return float4(0, 0, 0, 0); float cost = log2((float)total); - uint screenTotal = _OcclusionDebugPyramidOverlay[0]; // This should be always >= 1, because total >= 1 at this point. + uint screenTotal = _OcclusionDebugOverlay[0]; // This should be always >= 1, because total >= 1 at this point. float costScreenTotal = log2((float)screenTotal); return OverlayHeapMapColor(cost, costScreenTotal, 0.4); diff --git a/Packages/com.unity.render-pipelines.core/Runtime/RenderPipelineResources/GPUDriven/InstanceOcclusionCullingKernels.compute b/Packages/com.unity.render-pipelines.core/Runtime/RenderPipelineResources/GPUDriven/InstanceOcclusionCullingKernels.compute index b2591ee110b..116811044d1 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime/RenderPipelineResources/GPUDriven/InstanceOcclusionCullingKernels.compute +++ b/Packages/com.unity.render-pipelines.core/Runtime/RenderPipelineResources/GPUDriven/InstanceOcclusionCullingKernels.compute @@ -102,11 +102,11 @@ void CopyInstances(uint dispatchIdx : SV_DispatchThreadID) IndirectDrawInfo drawInfo = LoadDrawInfo(dispatchIdx); uint argsBase = DRAW_ARGS_INDEX(dispatchIdx); - _DrawArgs[argsBase + 0] = drawInfo.indexCount; // IndirectDrawIndexedArgs.indexCountPerInstance - _DrawArgs[argsBase + 1] = drawInfo.maxInstanceCount; // IndirectDrawIndexedArgs.instanceCount - _DrawArgs[argsBase + 2] = drawInfo.firstIndex; // IndirectDrawIndexedArgs.startIndex - _DrawArgs[argsBase + 3] = drawInfo.baseVertex; // IndirectDrawIndexedArgs.baseVertexIndex - _DrawArgs[argsBase + 4] = 0; // IndirectDrawIndexedArgs.startInstance + _DrawArgs[argsBase + 0] = drawInfo.indexCount; // IndirectDrawIndexedArgs.indexCountPerInstance + _DrawArgs[argsBase + 1] = drawInfo.maxInstanceCount << _InstanceMultiplierShift; // IndirectDrawIndexedArgs.instanceCount + _DrawArgs[argsBase + 2] = drawInfo.firstIndex; // IndirectDrawIndexedArgs.startIndex + _DrawArgs[argsBase + 3] = drawInfo.baseVertex; // IndirectDrawIndexedArgs.baseVertexIndex + _DrawArgs[argsBase + 4] = 0; // IndirectDrawIndexedArgs.startInstance } if (dispatchIdx < _InstanceInfoCount) @@ -119,10 +119,8 @@ void CopyInstances(uint dispatchIdx : SV_DispatchThreadID) } [numthreads(64,1,1)] -void CullInstances(uint instanceInfoOffset : SV_DispatchThreadID, uint2 groupId : SV_GroupID) +void CullInstances(uint instanceInfoOffset : SV_DispatchThreadID) { - SET_SLICE_INDEX(groupId.y); - uint instanceInfoCount = GetInstanceInfoCount(); if (instanceInfoOffset < instanceInfoCount) { @@ -130,9 +128,9 @@ void CullInstances(uint instanceInfoOffset : SV_DispatchThreadID, uint2 groupId uint drawOffset = instanceInfo.drawOffsetAndSplitMask >> 8; uint splitMask = instanceInfo.drawOffsetAndSplitMask & 0xff; - // check renderer list filtering (only run occlusion culling for this cascade) + // early out if none of these culling splits are visible // TODO: plumb through other state per draw command to filter here? - if ((splitMask & _RendererListSplitMask) == 0) + if ((splitMask & _CullingSplitMask) == 0) return; bool isVisible = true; @@ -141,7 +139,22 @@ void CullInstances(uint instanceInfoOffset : SV_DispatchThreadID, uint2 groupId int instanceID = instanceInfo.instanceIndexAndCrossFade & 0xffffff; SphereBound boundingSphere = LoadInstanceBoundingSphere(instanceID); - isVisible = IsOcclusionVisible(boundingSphere); + bool isOccludedInAll = true; + for (int testIndex = 0; testIndex < _OcclusionTestCount; ++testIndex) + { + // unpack the culling split index and subview index for this test + int splitIndex = (_CullingSplitIndices >> (4 * testIndex)) & 0xf; + int subviewIndex = (_OccluderSubviewIndices >> (4 * testIndex)) & 0xf; + + // skip if this draw call is not present in this split index + if (((1 << splitIndex) & splitMask) == 0) + continue; + + // occlusion test against the corresponding subview + if (IsOcclusionVisible(boundingSphere, subviewIndex)) + isOccludedInAll = false; + } + isVisible = !isOccludedInAll; #ifdef OCCLUSION_FIRST_PASS // if we failed the occlusion check, then add to the list for the second pass @@ -159,15 +172,15 @@ void CullInstances(uint instanceInfoOffset : SV_DispatchThreadID, uint2 groupId { // TODO: sum each within wave, first thread in wave issues atomic add to memory int counterIndex = isVisible ? INSTANCEOCCLUSIONTESTDEBUGCOUNTER_NOT_OCCLUDED : INSTANCEOCCLUSIONTESTDEBUGCOUNTER_OCCLUDED; - int resultUnused = 0; - InterlockedAdd(_OcclusionDebugCounters[_DebugCounterIndex*INSTANCEOCCLUSIONTESTDEBUGCOUNTER_COUNT + counterIndex], 1, resultUnused); + InterlockedAdd(_OcclusionDebugCounters[_DebugCounterIndex*INSTANCEOCCLUSIONTESTDEBUGCOUNTER_COUNT + counterIndex], 1); } if (isVisible) { uint argsBase = DRAW_ARGS_INDEX(drawOffset); uint offsetWithinDraw = 0; - InterlockedAdd(_DrawArgs[argsBase + 1], 1, offsetWithinDraw); // IndirectDrawIndexedArgs.instanceCount + InterlockedAdd(_DrawArgs[argsBase + 1], 1 << _InstanceMultiplierShift, offsetWithinDraw); // IndirectDrawIndexedArgs.instanceCount + offsetWithinDraw = offsetWithinDraw >> _InstanceMultiplierShift; IndirectDrawInfo drawInfo = LoadDrawInfo(drawOffset); uint writeIndex = drawInfo.firstInstanceGlobalIndex + offsetWithinDraw; diff --git a/Packages/com.unity.render-pipelines.core/Runtime/RenderPipelineResources/GPUDriven/OccluderDepthPyramidKernels.compute b/Packages/com.unity.render-pipelines.core/Runtime/RenderPipelineResources/GPUDriven/OccluderDepthPyramidKernels.compute index 1c44e43763f..37da4b2442c 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime/RenderPipelineResources/GPUDriven/OccluderDepthPyramidKernels.compute +++ b/Packages/com.unity.render-pipelines.core/Runtime/RenderPipelineResources/GPUDriven/OccluderDepthPyramidKernels.compute @@ -7,45 +7,47 @@ #include "Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/OcclusionTestCommon.hlsl" #pragma multi_compile _ USE_SRC -#pragma multi_compile _ USE_ARRAY -#pragma multi_compile _ USE_MSAA_SRC +#pragma multi_compile _ SRC_IS_ARRAY +#pragma multi_compile _ SRC_IS_MSAA -#ifdef USE_ARRAY -#if defined(USE_SRC) && defined(USE_MSAA_SRC) -#define TEXTURE2D_A(T) Texture2DMSArray T -#else -#define TEXTURE2D_A TEXTURE2D_ARRAY -#endif -#define RW_TEXTURE2D_A RW_TEXTURE2D_ARRAY -static int g_slice_index = 0; -#define SET_SLICE_INDEX(N) g_slice_index = N -#define ARRAY_COORD(C) int3((C), g_slice_index) -#define LOAD_MSAA(coord, sampleIndex) LOAD_TEXTURE2D_ARRAY_MSAA(_SrcDepth, coord.xy, g_slice_index, sampleIndex) -#else -#if defined(USE_SRC) && defined(USE_MSAA_SRC) -#define TEXTURE2D_A(T) Texture2DMS T -#else -#define TEXTURE2D_A TEXTURE2D -#endif -#define RW_TEXTURE2D_A RW_TEXTURE2D -#define SET_SLICE_INDEX(N) -#define ARRAY_COORD(C) C -#define LOAD_MSAA(coord, sampleIndex) LOAD_TEXTURE2D_MSAA(_SrcDepth, coord, sampleIndex) -#endif +RW_TEXTURE2D(float, _DstDepth); -#if defined(USE_SRC) && defined(USE_MSAA_SRC) -#define LOAD_TEXTURE(C) LOAD_MSAA(C, 0) // Always read sample 0, alternatively the depth could be resolved similar to CopyDepthPass.hlsl -#else -#define LOAD_TEXTURE(C) _SrcDepth[C] +#ifdef USE_SRC + #ifdef SRC_IS_MSAA + #ifdef SRC_IS_ARRAY + Texture2DMSArray _SrcDepth; + #else + Texture2DMS _SrcDepth; + #endif + #else + #ifdef SRC_IS_ARRAY + TEXTURE2D_ARRAY(_SrcDepth); + #else + TEXTURE2D(_SrcDepth); + #endif + #endif #endif -RW_TEXTURE2D_A(float, _DstDepth); - +float LoadDepth(int2 coord, int srcSliceIndex) +{ #ifdef USE_SRC -TEXTURE2D_A(_SrcDepth); + #ifdef SRC_IS_MSAA + #ifdef SRC_IS_ARRAY + return LOAD_TEXTURE2D_ARRAY_MSAA(_SrcDepth, coord, srcSliceIndex, 0).x; + #else + return LOAD_TEXTURE2D_MSAA(_SrcDepth, coord, 0).x; + #endif + #else + #ifdef SRC_IS_ARRAY + return _SrcDepth[int3(coord, srcSliceIndex)].x; + #else + return _SrcDepth[coord].x; + #endif + #endif #else -#define _SrcDepth _DstDepth + return _DstDepth[coord].x; #endif +} uint2 CoordInTileByIndex(uint i) { @@ -71,16 +73,25 @@ void SubgroupMergeDepths(uint threadID : SV_GroupThreadID, uint bitIndex, inout GroupMemoryBarrierWithGroupSync(); } -#define _MipOffset(N) int2(_MipOffsetAndSize[N].xy) -#define _MipSize(N) int2(_MipOffsetAndSize[N].zw) +int2 DestMipOffset(int mipIndex, int dstSubviewIndex) +{ + uint2 mipOffset = _MipOffsetAndSize[mipIndex].xy; + mipOffset.y += dstSubviewIndex * _OccluderMipLayoutSizeY; + return int2(mipOffset); +} + +int2 DestMipSize(int mipIndex) +{ + return int2(_MipOffsetAndSize[mipIndex].zw); +} -bool IsSilhouetteCulled(int2 coord) +bool IsSilhouetteCulled(int2 coord, int updateIndex) { - int2 srcSize = _MipSize(0); + int2 srcSize = DestMipSize(0); float2 posNDC = float2( (coord.x + 0.5f) / (float)srcSize.x, (coord.y + 0.5f) / (float)srcSize.y); - float3 posWS = ComputeWorldSpacePosition(posNDC, 0.2f, _InvViewProjMatrix); + float3 posWS = ComputeWorldSpacePosition(posNDC, 0.2f, _InvViewProjMatrix[updateIndex]); for (uint i = 0; i < _SilhouettePlaneCount; ++i) { @@ -95,35 +106,42 @@ bool IsSilhouetteCulled(int2 coord) [numthreads(64, 1, 1)] void OccluderDepthDownscale(uint threadID : SV_GroupThreadID, uint3 groupID : SV_GroupID) { - SET_SLICE_INDEX(groupID.z); - // assign threads to pixels in a swizzle-like pattern int2 dstCoord1 = (groupID.xy << 3) | CoordInTileByIndex(threadID); + int updateIndex = groupID.z; + + int srcSliceIndex = (_SrcSliceIndices >> (4*updateIndex)) & 0xf; + int dstSubviewIndex = (_DstSubviewIndices >> (4*updateIndex)) & 0xf; +#if USE_SRC + int2 loadOffset = int2(_SrcOffset[updateIndex].xy); +#else + int2 loadOffset = DestMipOffset(0, dstSubviewIndex); +#endif int2 srcCoord = dstCoord1 << 1; - int2 srcLimit = _MipSize(0) - 1; + int2 srcLimit = DestMipSize(0) - 1; - float p00 = LOAD_TEXTURE(ARRAY_COORD(_MipOffset(0) + min(srcCoord + int2(0, 0), srcLimit))).x; - float p10 = LOAD_TEXTURE(ARRAY_COORD(_MipOffset(0) + min(srcCoord + int2(1, 0), srcLimit))).x; - float p01 = LOAD_TEXTURE(ARRAY_COORD(_MipOffset(0) + min(srcCoord + int2(0, 1), srcLimit))).x; - float p11 = LOAD_TEXTURE(ARRAY_COORD(_MipOffset(0) + min(srcCoord + int2(1, 1), srcLimit))).x; + float p00 = LoadDepth(loadOffset + min(srcCoord + int2(0, 0), srcLimit), srcSliceIndex); + float p10 = LoadDepth(loadOffset + min(srcCoord + int2(1, 0), srcLimit), srcSliceIndex); + float p01 = LoadDepth(loadOffset + min(srcCoord + int2(0, 1), srcLimit), srcSliceIndex); + float p11 = LoadDepth(loadOffset + min(srcCoord + int2(1, 1), srcLimit), srcSliceIndex); #ifdef USE_SRC - if (IsSilhouetteCulled(srcCoord + int2(0, 0))) + if (IsSilhouetteCulled(srcCoord + int2(0, 0), updateIndex)) p00 = 1.f - UNITY_RAW_FAR_CLIP_VALUE; - if (IsSilhouetteCulled(srcCoord + int2(1, 0))) + if (IsSilhouetteCulled(srcCoord + int2(1, 0), updateIndex)) p10 = 1.f - UNITY_RAW_FAR_CLIP_VALUE; - if (IsSilhouetteCulled(srcCoord + int2(0, 1))) + if (IsSilhouetteCulled(srcCoord + int2(0, 1), updateIndex)) p01 = 1.f - UNITY_RAW_FAR_CLIP_VALUE; - if (IsSilhouetteCulled(srcCoord + int2(1, 1))) + if (IsSilhouetteCulled(srcCoord + int2(1, 1), updateIndex)) p11 = 1.f - UNITY_RAW_FAR_CLIP_VALUE; #endif float farDepth = FarthestDepth(float4(p00, p10, p01, p11)); // write dst0 - if (all(dstCoord1 < _MipSize(1))) - _DstDepth[ARRAY_COORD(_MipOffset(1) + dstCoord1)] = farDepth; + if (all(dstCoord1 < DestMipSize(1))) + _DstDepth[DestMipOffset(1, dstSubviewIndex) + dstCoord1] = farDepth; // merge towards thread 0 in subgroup size 4 if (2 <= _MipCount) @@ -133,8 +151,8 @@ void OccluderDepthDownscale(uint threadID : SV_GroupThreadID, uint3 groupID : SV if ((threadID & 0x3) == 0) { int2 dstCoord2 = dstCoord1 >> 1; - if (all(dstCoord2 < _MipSize(2))) - _DstDepth[ARRAY_COORD(_MipOffset(2) + dstCoord2)] = farDepth; + if (all(dstCoord2 < DestMipSize(2))) + _DstDepth[DestMipOffset(2, dstSubviewIndex) + dstCoord2] = farDepth; } } @@ -146,8 +164,8 @@ void OccluderDepthDownscale(uint threadID : SV_GroupThreadID, uint3 groupID : SV if ((threadID & 0xf) == 0) { int2 dstCoord3 = dstCoord1 >> 2; - if (all(dstCoord3 < _MipSize(3))) - _DstDepth[ARRAY_COORD(_MipOffset(3) + dstCoord3)] = farDepth; + if (all(dstCoord3 < DestMipSize(3))) + _DstDepth[DestMipOffset(3, dstSubviewIndex) + dstCoord3] = farDepth; } } @@ -159,8 +177,8 @@ void OccluderDepthDownscale(uint threadID : SV_GroupThreadID, uint3 groupID : SV if ((threadID & 0x3f) == 0) { int2 dstCoord4 = dstCoord1 >> 3; - if (all(dstCoord4 < _MipSize(4))) - _DstDepth[ARRAY_COORD(_MipOffset(4) + dstCoord4)] = farDepth; + if (all(dstCoord4 < DestMipSize(4))) + _DstDepth[DestMipOffset(4, dstSubviewIndex) + dstCoord4] = farDepth; } } } diff --git a/Packages/com.unity.render-pipelines.core/Runtime/RenderPipelineResources/GPUDriven/OcclusionCullingDebug.compute b/Packages/com.unity.render-pipelines.core/Runtime/RenderPipelineResources/GPUDriven/OcclusionCullingDebug.compute index bff07b53d0c..c201535fc54 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime/RenderPipelineResources/GPUDriven/OcclusionCullingDebug.compute +++ b/Packages/com.unity.render-pipelines.core/Runtime/RenderPipelineResources/GPUDriven/OcclusionCullingDebug.compute @@ -10,16 +10,17 @@ void ClearOcclusionDebug(uint2 dispatchIndex : SV_DispatchThreadID, uint3 groupId : SV_GroupID) { #ifdef OCCLUSION_DEBUG - SET_SLICE_INDEX(groupId.z); + int clearSliceIndex = groupId.z; for (int i = 0; i < OCCLUSIONCULLINGCOMMONCONFIG_MAX_OCCLUDER_MIPS; ++i) { uint4 bounds = _OccluderMipBounds[i]; + bounds.y += clearSliceIndex * _OccluderMipLayoutSizeY; if (all(dispatchIndex < bounds.zw)) - _OcclusionDebugPyramid[toDebugPyramidCoord(bounds.xy + dispatchIndex)] = 0; + _OcclusionDebugOverlay[OcclusionDebugOverlayOffset(bounds.xy + dispatchIndex)] = 0; } - if (all(dispatchIndex == 0)) - _OcclusionDebugPyramid[0] = 0; + if (all(dispatchIndex == 0) && clearSliceIndex == 0) + _OcclusionDebugOverlay[0] = 0; #endif } diff --git a/Packages/com.unity.render-pipelines.core/Runtime/Utilities/CameraCaptureBridge.cs b/Packages/com.unity.render-pipelines.core/Runtime/Utilities/CameraCaptureBridge.cs index 45145156d6d..52c4cae23ce 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime/Utilities/CameraCaptureBridge.cs +++ b/Packages/com.unity.render-pipelines.core/Runtime/Utilities/CameraCaptureBridge.cs @@ -8,8 +8,13 @@ namespace UnityEngine.Rendering /// public static class CameraCaptureBridge { - private static Dictionary>> actionDict = - new Dictionary>>(); + private class CameraEntry + { + internal HashSet> actions; + internal IEnumerator> cachedEnumerator; + } + + private static Dictionary actionDict = new(); private static bool _enabled; @@ -35,10 +40,20 @@ public static bool enabled /// Enumeration of actions public static IEnumerator> GetCaptureActions(Camera camera) { - if (!actionDict.TryGetValue(camera, out var actions) || actions.Count == 0) + if (!actionDict.TryGetValue(camera, out var entry) || entry.actions.Count == 0) + return null; + + return entry.actions.GetEnumerator(); + } + + internal static IEnumerator> GetCachedCaptureActionsEnumerator(Camera camera) + { + if (!actionDict.TryGetValue(camera, out var entry) || entry.actions.Count == 0) return null; - return actions.GetEnumerator(); + // internal use only + entry.cachedEnumerator.Reset(); + return entry.cachedEnumerator; } /// @@ -48,14 +63,15 @@ public static IEnumerator> GetCapt /// The action to add public static void AddCaptureAction(Camera camera, Action action) { - actionDict.TryGetValue(camera, out var actions); - if (actions == null) + actionDict.TryGetValue(camera, out var entry); + if (entry == null) { - actions = new HashSet>(); - actionDict.Add(camera, actions); + entry = new CameraEntry {actions = new HashSet>()}; + actionDict.Add(camera, entry); } - actions.Add(action); + entry.actions.Add(action); + entry.cachedEnumerator = entry.actions.GetEnumerator(); } /// @@ -68,8 +84,11 @@ public static void RemoveCaptureAction(Camera camera, Action public float m_Turbo = 10.0f; -#if ENABLE_INPUT_SYSTEM +#if USE_INPUT_SYSTEM InputAction lookAction; InputAction moveAction; InputAction speedAction; @@ -60,7 +64,7 @@ void OnEnable() void RegisterInputs() { -#if ENABLE_INPUT_SYSTEM +#if USE_INPUT_SYSTEM var map = new InputActionMap("Free Camera"); lookAction = map.AddAction("look", binding: "/delta"); @@ -122,7 +126,7 @@ void UpdateInputs() leftShiftBoost = false; fire1 = false; -#if ENABLE_INPUT_SYSTEM +#if USE_INPUT_SYSTEM var lookDelta = lookAction.ReadValue(); inputRotateAxisX = lookDelta.x * m_LookSpeedMouse * k_MouseSensitivityMultiplier; inputRotateAxisY = lookDelta.y * m_LookSpeedMouse * k_MouseSensitivityMultiplier; diff --git a/Packages/com.unity.render-pipelines.core/Samples~/Common/Scripts/SamplesShowcase.cs b/Packages/com.unity.render-pipelines.core/Samples~/Common/Scripts/SamplesShowcase.cs index f4cc88b4326..747f532b40c 100644 --- a/Packages/com.unity.render-pipelines.core/Samples~/Common/Scripts/SamplesShowcase.cs +++ b/Packages/com.unity.render-pipelines.core/Samples~/Common/Scripts/SamplesShowcase.cs @@ -1,3 +1,7 @@ +#if (ENABLE_INPUT_SYSTEM && INPUT_SYSTEM_INSTALLED) +#define USE_INPUT_SYSTEM +#endif + using System.Collections.Generic; #if UNITY_EDITOR using UnityEditor; @@ -9,7 +13,7 @@ using System.Collections; using UnityEngine.Rendering; using System.Text.RegularExpressions; -#if ENABLE_INPUT_SYSTEM +#if USE_INPUT_SYSTEM using UnityEngine.InputSystem; #endif @@ -124,8 +128,8 @@ void Update() } #endif -#if ENABLE_INPUT_SYSTEM - if(Keyboard.current.rightArrowKey.wasPressedThisFrame || Keyboard.current.upArrowKey.wasPressedThisFrame) +#if USE_INPUT_SYSTEM + if (Keyboard.current.rightArrowKey.wasPressedThisFrame || Keyboard.current.upArrowKey.wasPressedThisFrame) { SwitchEffect(currentIndex+1); } diff --git a/Packages/com.unity.render-pipelines.core/ShaderLibrary/API/FoveatedRendering_Metal.hlsl b/Packages/com.unity.render-pipelines.core/ShaderLibrary/API/FoveatedRendering_Metal.hlsl index 22e8f0fc04e..b5724d37a74 100644 --- a/Packages/com.unity.render-pipelines.core/ShaderLibrary/API/FoveatedRendering_Metal.hlsl +++ b/Packages/com.unity.render-pipelines.core/ShaderLibrary/API/FoveatedRendering_Metal.hlsl @@ -3,33 +3,21 @@ #if !defined(UNITY_COMPILER_DXC) && (defined(UNITY_PLATFORM_OSX) || defined(UNITY_PLATFORM_IOS)) - // These are tokens that hlslcc is looking for in order - // to inject variable rasterization rate MSL code. - // DO NOT RENAME unless you also change logic in translation - float3 _UV_HlslccVRRDistort0 = float3(0.0, 0.0, 0.0); - float3 _UV_HlslccVRRDistort1 = float3(0.0, 0.0, 0.0); - float3 _UV_HlslccVRRResolve0 = float3(0.0, 0.0, 0.0); - float3 _UV_HlslccVRRResolve1 = float3(0.0, 0.0, 0.0); + // These are tokens that hlslcc is looking for in order to inject Variable Rasterization Rate MSL code. + // DO NOT RENAME unless you also change logic in translation. + // They should be used in conjunction with a 'mad' instruction where the order of parameters must be: + // Param 1 - uv to be remapped + // Param 2 - token + // Param 3 - stereo eye index + float2 _UV_HlslccVRRDistort; + float2 _UV_HlslccVRRResolve; float2 RemapFoveatedRenderingLinearToNonUniform(float2 uv, bool yFlip = false) { if (yFlip) uv.y = 1.0 - uv.y; - // TODO: This is not ideal looking code, but our hlsl to msl translation - // layer can rearrange instructions while doing optimizations. - // That can easily break things because we expect certain tokens and swizzles. - // When changing this make sure to check the compiled msl code for foveation. - if (unity_StereoEyeIndex == 1) - { - uv += _UV_HlslccVRRResolve0.yz; - uv = uv * _UV_HlslccVRRResolve1.xy; - } - else - { - uv += _UV_HlslccVRRResolve1.yz; - uv = uv * _UV_HlslccVRRResolve0.xy; - } + uv = mad(uv, _UV_HlslccVRRResolve, unity_StereoEyeIndex); if (yFlip) uv.y = 1.0 - uv.y; @@ -60,17 +48,7 @@ if (yFlip) uv.y = 1.0 - uv.y; - // NOTE: Check comment for similar code in RemapFoveatedRenderingLinearToNonUniform - if (unity_StereoEyeIndex == 1) - { - uv += _UV_HlslccVRRDistort0.yz; - uv = uv * _UV_HlslccVRRDistort1.xy; - } - else - { - uv += _UV_HlslccVRRDistort1.yz; - uv = uv * _UV_HlslccVRRDistort0.xy; - } + uv = mad(uv, _UV_HlslccVRRDistort, unity_StereoEyeIndex); if (yFlip) uv.y = 1.0 - uv.y; @@ -91,17 +69,7 @@ if (yFlip) uv.y = 1.0 - uv.y; - // NOTE: Check comment for similar code in RemapFoveatedRenderingLinearToNonUniform - if (unity_StereoEyeIndex == 1) - { - uv += _UV_HlslccVRRDistort0.yz; - uv = uv * _UV_HlslccVRRDistort1.xy; - } - else - { - uv += _UV_HlslccVRRDistort1.yz; - uv = uv * _UV_HlslccVRRDistort0.xy; - } + uv = mad(uv, _UV_HlslccVRRDistort, unity_StereoEyeIndex); if (yFlip) uv.y = 1.0 - uv.y; diff --git a/Packages/com.unity.render-pipelines.core/ShaderLibrary/Packing.hlsl b/Packages/com.unity.render-pipelines.core/ShaderLibrary/Packing.hlsl index f6ba15f5566..b9c2186ab6f 100644 --- a/Packages/com.unity.render-pipelines.core/ShaderLibrary/Packing.hlsl +++ b/Packages/com.unity.render-pipelines.core/ShaderLibrary/Packing.hlsl @@ -560,9 +560,8 @@ float2 Unpack888ToFloat2(float3 x) // Pack 2 float values from the [0, 1] range, to an 8 bits float from the [0, 1] range float PackFloat2To8(float2 f) { - float x_expanded = f.x * 15.0; // f.x encoded over 4 bits, can have 2^4 = 16 distinct values mapped to [0, 1, ..., 15] - float y_expanded = f.y * 15.0; // f.y encoded over 4 bits, can have 2^4 = 16 distinct values mapped to [0, 1, ..., 15] - float x_y_expanded = x_expanded * 16.0 + y_expanded; // f.x encoded over higher bits, f.y encoded over the lower bits - x_y values in range [0, 1, ..., 255] + float2 i = floor(f * 15.0); // f.x & f.y encoded over 4 bits, can have 2^4 = 16 distinct values mapped to [0, 1, ..., 15] + float x_y_expanded = i.x * 16.0 + i.y; // f.x encoded over higher bits, f.y encoded over the lower bits - x_y values in range [0, 1, ..., 255] return x_y_expanded / 255.0; // above 4 lines equivalent to: diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/HDRP-Asset.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/HDRP-Asset.md index 3b97736fa71..13f455552e6 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/HDRP-Asset.md +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/HDRP-Asset.md @@ -91,16 +91,19 @@ These settings control the draw distance and resolution of the decals atlas that | **Rendering Layer Mask Buffer** | Enable the checkbox to make HDRP write the Rendering Layer Mask of GameObjects in a fullscreen buffer target. This comes with a performance and memory cost.
The [HD Sample Buffer node](https://docs.unity3d.com/Packages/com.unity.shadergraphlatest?subfolder=/manual/HD-Sample-Buffer-Node.html) in ShaderGraph can sample this target. | ### Light Probe Lighting -Use these settings in the **Quality** > **HDRP** menu to configure [Probe Volumes](probevolumes.md). - -| **Property** | **Description** | -|---------------------------------|-----------------------------------------------------------------| -| **Light Probe System** | • **Light Probe Groups (Legacy)**: Use the same [Light Probe Group system](https://docs.unity3d.com/Manual/class-LightProbeGroup.html) as the Built-In Render Pipeline.
• **Probe Volumes**: Use [Probe Volumes](probevolumes.md). | -| **Memory Budget** | Limits the width and height of the textures that store baked Global Illumination data, which determines the amount of memory Unity sets aside to store baked Probe Volume data. These textures have a fixed depth.
Options:
• **Memory Budget Low**
• **Memory Budget Medium**
• **Memory Budget High** | -| **Scenario Blending Memory Budget** | Limits the width and height of the textures that Unity uses to blend between Lighting Scenarios. This determines the amount of memory Unity sets aside to store Lighting Scenario blending data, and store data while doing the blending operation. These textures have a fixed depth.
Options:
• **Memory Budget Low**
• **Memory Budget Medium**
• **Memory Budget High** | | -| **SH Bands** | Determines the [spherical harmonics (SH) bands](https://docs.unity3d.com/Manual/LightProbes-TechnicalInformation.html) Unity uses to store probe data. L2 provides more precise results, but uses more system resources.
Options:
• **Spherical Harmonics L1** (default)
• **Spherical Harmonics L2** | -| **Enable Streaming** | Enable to use [streaming](probevolumes-streaming.md). | -| **Estimated GPU Memory Cost** | Indicates the amount of Global Illumination and scenario blending texture data associated with Probe Volumes in your project.| +Use these settings in the **Quality** > **HDRP** menu to configure [Adaptive Probe Volumes](probevolumes.md). + +| **Property** | **Sub-property** | **Description** | +|-|-|-| +| **Light Probe System** ||
  • **Light Probe Groups (Legacy)**: Use the same [Light Probe Group system](https://docs.unity3d.com/Manual/class-LightProbeGroup.html) as the Built-In Render Pipeline.
  • **Adaptive Probe Volumes**: Use [Adaptive Probe Volumes](probevolumes.md).
| +|| **Memory Budget** | Limits the width and height of the textures that store baked Global Illumination data, which determines the amount of memory Unity sets aside to store baked Adaptive Probe Volume data. These textures have a fixed depth.
Options:
  • **Memory Budget Low**
  • **Memory Budget Medium**
  • **Memory Budget High**
| +|| **SH Bands** | Determines the [spherical harmonics (SH) bands](https://docs.unity3d.com/Manual/LightProbes-TechnicalInformation.html) Unity uses to store probe data. L2 provides more precise results, but uses more system resources.
Options:
  • **Spherical Harmonics L1**
  • **Spherical Harmonics L2**
| +| **Lighting Scenarios** || Enable to use Lighting Scenarios. Refer to [Bake different lighting setups using Lighting Scenarios](probevolumes-bakedifferentlightingsetups.md) for more information. | +|| **Scenario Blending** | Enable blending between different Lighting Scenarios. This uses more memory and makes rendering slower. | +|| **Scenario Blending Memory Budget** | Limits the width and height of the textures that Unity uses to blend between Lighting Scenarios. This determines the amount of memory Unity sets aside to store Lighting Scenario blending data, and store data while doing the blending operation. These textures have a fixed depth.
Options:
• **Memory Budget Low**
• **Memory Budget Medium**
• **Memory Budget High** | +| **Enable GPU Streaming** || Enable to stream Adaptive Probe Volume data from CPU memory to GPU memory at runtime. Refer to [Streaming Adaptive Probe Volumes](probevolumes-streaming.md) for more information. | +| **Enable Disk Streaming** || Enable to stream Adaptive Probe Volume data from disk to CPU memory at runtime. [Streaming Adaptive Probe Volumes](probevolumes-streaming.md) for more information. | +| **Estimated GPU Memory Cost** || Indicates the amount of texture data used by Adaptive Probe Volumes in your project. This includes textures used both for Global Illumination and Lighting Scenario blending. | ### Cookies diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/probevolumes-debug-displayprobebricks1.PNG b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/probevolumes-debug-displayprobebricks1.PNG index e9baab29489..b6560446831 100644 Binary files a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/probevolumes-debug-displayprobebricks1.PNG and b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/probevolumes-debug-displayprobebricks1.PNG differ diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/probevolumes-debug-displayprobebricks2.PNG b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/probevolumes-debug-displayprobebricks2.PNG deleted file mode 100644 index 0f4a3baaddb..00000000000 Binary files a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/probevolumes-debug-displayprobebricks2.PNG and /dev/null differ diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/probevolumes-debug-displayprobebricks3.PNG b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/probevolumes-debug-displayprobebricks3.PNG deleted file mode 100644 index 3f435a0efff..00000000000 Binary files a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/probevolumes-debug-displayprobebricks3.PNG and /dev/null differ diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/probevolumes-debug-displayprobecells.PNG b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/probevolumes-debug-displayprobecells.PNG index 33dbddfeb5b..ce154809bba 100644 Binary files a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/probevolumes-debug-displayprobecells.PNG and b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/probevolumes-debug-displayprobecells.PNG differ diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/probevolumes-debug-displayprobes.PNG b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/probevolumes-debug-displayprobes.PNG index c47e326ae70..d34e63fa637 100644 Binary files a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/probevolumes-debug-displayprobes.PNG and b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/probevolumes-debug-displayprobes.PNG differ diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/TableOfContents.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/TableOfContents.md index 38b8fa617d4..034c37ba64a 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/TableOfContents.md +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/TableOfContents.md @@ -76,14 +76,20 @@ * [Additional shadow detail](shadows-additional-detail.md) * [Use contact shadows](Override-Contact-Shadows.md) * [Use micro shadows](Override-Micro-Shadows.md) - * [Probe Volumes](probevolumes.md) - * [Understand Probe Volumes](probevolumes-concept.md) - * [Use Probe Volumes](probevolumes-use.md) - * [Display and adjust Probe Volumes](probevolumes-showandadjust.md) - * [Fix issues with Probe Volumes](probevolumes-fixissues.md) + * [Adaptive Probe Volumes](probevolumes.md) + * [Understanding Adaptive Probe Volumes](probevolumes-concept.md) + * [Use Adaptive Probe Volumes](probevolumes-use.md) + * [Display Adaptive Probe Volumes](probevolumes-showandadjust.md) + * [Configure the size and density of Adaptive Probe Volumes](probevolumes-changedensity.md) + * [Bake multiple scenes together with Baking Sets](probevolumes-usebakingsets.md) + * [Bake different lighting setups with Lighting Scenarios](probevolumes-bakedifferentlightingsetups) * [Streaming](probevolumes-streaming.md) + * [Fix issues with Adaptive Probe Volumes](probevolumes-fixissues.md) * [Sky Occlusion](probevolumes-skyocclusion.md) - * [Probe Volume settings and properties](probevolumes-settings.md) + * [Adaptive Probe Volume Inspector window reference](probevolumes-inspector-reference.md) + * [Adaptive Probe Volumes panel reference](probevolumes-lighting-panel-reference.md) + * [Probe Volumes Options Override reference](probevolumes-options-override-reference.md) + * [Probe Adjustment Volume component reference](probevolumes-adjustment-volume-component-reference.md) * [Ray tracing](ray-tracing.md) * [Set up ray tracing](Ray-Tracing-Getting-Started.md) * [Implement ray tracing with shader graph](SGNode-Raytracing-Quality.md) @@ -246,7 +252,13 @@ * [Depth of Field](Post-Processing-Depth-of-Field.md) * [Film Grain](Post-Processing-Film-Grain.md) * [Lens Distortion](Post-Processing-Lens-Distortion.md) - * [Lens Flare (SRP)](shared/lens-flare/lens-flare-component.md) + * [Lens flares](shared/lens-flare/lens-flare.md) + * [Choose a lens flare type](shared/lens-flare/choose-a-lens-flare-type.md) + * [Add lens flares](shared/lens-flare/lens-flare-component.md) + * [Add screen space lens flares](shared/lens-flare/Override-Screen-Space-Lens-Flare.md) + * [Lens Flare (SRP) reference](shared/lens-flare/lens-flare-reference.md) + * [Lens Flare (SRP) Data Asset reference](shared/lens-flare/lens-flare-asset.md) + * [Screen Space Lens Flare override reference](shared/lens-flare/reference-screen-space-lens-flare.md) * [Panini Projection](Post-Processing-Panini-Projection.md) * [Vignette](Post-Processing-Vignette.md) * [Motion effects](motion-effects.md) @@ -298,6 +310,10 @@ * [Lighting environment reference](reference-lighting-environment.md) * [Ambient Occlusion reference](reference-ambient-occlusion.md) * [IES Importer reference](IES-Importer.md) + * [Lens flares reference](shared/lens-flare/lens-flare-lens-flares-reference.md) + * [Lens Flare (SRP) reference](shared/lens-flare/lens-flare-reference.md) + * [Lens Flare (SRP) Data Asset reference](shared/lens-flare/lens-flare-asset.md) + * [Screen Space Lens Flare override reference](shared/lens-flare/reference-screen-space-lens-flare.md) * [Volumetric lighting reference](reference-volumetric-lighting.md) * [Screen Space Global Illumination](reference-screen-space-global-illumination.md) * [Screen Space Lens Flare](shared/lens-flare/reference-screen-space-lens-flare.md) @@ -388,6 +404,5 @@ * [Rendering and post-processing reference](reference-rendering-post-processing.md) * [Dynamic Resolution component reference](reference-dynamic-resolution.md) * [High Dynamic Range (HDR) tonemapping reference](reference-hdr-tonemapping.md) - * [Lens Flare (SRP) Data Asset reference](shared/lens-flare/lens-flare-asset.md) * [Custom Pass reference](custom-pass-reference.md) * [HDRP Glossary](Glossary.md) diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/frame-settings-reference.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/frame-settings-reference.md index e970971fe07..a5c4e867357 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/frame-settings-reference.md +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/frame-settings-reference.md @@ -279,13 +279,13 @@ These settings control lighting features for your rendering components. Here you Probe Volume - Use Probe Volumes. + Use Adaptive Probe Volumes. Normalize Reflection Probes - Use the data in Probe Volumes to adjust lighting from Reflection Probes to match the local environment, which reduces the number of Reflection Probes you need. + Use the data in Adaptive Probe Volumes to adjust lighting from Reflection Probes to match the local environment, which reduces the number of Reflection Probes you need. Screen Space Shadows diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/how-hdrp-calculates-color-for-reflection-and-refraction.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/how-hdrp-calculates-color-for-reflection-and-refraction.md index 62dda628a33..434ba9b8cd5 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/how-hdrp-calculates-color-for-reflection-and-refraction.md +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/how-hdrp-calculates-color-for-reflection-and-refraction.md @@ -25,7 +25,7 @@ HDRP uses only the [specular color hierarchy](#specular) if the following happen HDRP works down the following hierarchy to find the diffuse color. -1. If you have a Probe Volume in your Scene, HDRP uses the lighting data from the Probe Volume. +1. If you have an Adaptive Probe Volume in your Scene, HDRP uses the lighting data from the Adaptive Probe Volume. 2. If there's a baked lightmap for this position, HDRP uses the lightmap texture. 3. If you have a baked [Light Probe](https://docs.unity3d.com/Manual/LightProbes.html) and the object is in the probe's Influence Volume, HDRP uses the lighting data from the Light Probe. 4. If you set a static sky texture in **Lighting** > **Environment** > **Static Lighting Sky**, HDRP uses the cubemap texture of the sky in the ambient probe. diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/lighting.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/lighting.md index 9f6351a3889..7326d6f4e94 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/lighting.md +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/lighting.md @@ -10,7 +10,7 @@ Learn how to create and use lights in the High Definition Render Pipeline (HDRP) |[Control exposure](Override-Exposure.md)|Calculate the amount of light to use in different lighting situations.| |[Shadows](shadows.md)|Create and control shadows to add depth and realism to a Scene.| |[Reflection](Reflection-in-HDRP.md)|Create reflective materials and control how reflection behaves in a scene.| -|[Probe Volumes](probevolumes.md)|Automatically place [Light Probes](https://docs.unity3d.com/Manual/LightProbes.html) in a scene to create high-quality lighting.| +|[Adaptive Probe Volumes (APV)](probevolumes.md)|Automatically place [Light Probes](https://docs.unity3d.com/Manual/LightProbes.html) in a scene to create high-quality lighting.| |[Ray tracing](ray-tracing.md)|Use ray tracing to create realistic lighting and access other data in a scene that doesn't appear on screen.| |[Global Illumination](global-illumination.md)|Create realistic diffuse lighting.| |[Volumetric lighting](lighting-volumetric.md)|Use the HDRP volume system to apply screen space lighting effects.| diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/path-tracing-limitations.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/path-tracing-limitations.md index d2c296b7128..fa57b017aed 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/path-tracing-limitations.md +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/path-tracing-limitations.md @@ -15,7 +15,6 @@ HDRP path tracing in Unity currently has the following limitations: - Shader Graphs that use [Custom Interpolators](https://docs.unity3d.com/Packages/com.unity.shadergraph@latest/index.html?subfolder=/manual/Custom-Interpolators.html). - Local Volumetric Fog. - Tessellation. - - Tube and Disc-shaped Area Lights. - Translucent Opaque Materials. - Several of HDRP's Materials. This includes Eye, Hair, and Decal. - Per-pixel displacement (parallax occlusion mapping, height map, depth offset). @@ -34,4 +33,4 @@ When building your custom shaders using shader graph, some nodes are incompatibl ### Unsupported features of ray tracing -For information about unsupported features of ray tracing in general, see [Ray tracing limitations](Ray-Tracing-Getting-Started.md#limitations). \ No newline at end of file +For information about unsupported features of ray tracing in general, see [Ray tracing limitations](Ray-Tracing-Getting-Started.md#limitations). diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes-adjustment-volume-component-reference.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes-adjustment-volume-component-reference.md new file mode 100644 index 00000000000..61edbcebc29 --- /dev/null +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes-adjustment-volume-component-reference.md @@ -0,0 +1,84 @@ +# Probe Adjustment Volume component reference + +Select a [Probe Adjustment Volume Component](probevolumes-fixissues.md#add-a-probe-adjustment-volume-component) and open the Inspector to view its properties. + +Refer to the following for more information about using the Probe Adjustment Volume component: + +- [Fix issues with Adaptive Probe Volumes](probevolumes-fixissues.md) +- [Configure the size and density of Adaptive Probe Volumes](probevolumes-changedensity.md) + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
PropertyDescription
Influence Volume
ShapeSet the shape of the Adjustment Volume to either Box or Sphere.
SizeSet the size of the Adjustment Volume. This property only appears if you set Shape to Box.
RadiusSet the radius of the Adjustment Volume. This property only appears if you set Shape to Sphere.
Mode +

Select how to override probes inside the Adjustment Volume.

+
    +
  • Invalidate Probes: Mark selected probes as invalid. Refer to How light probe validity works for more information.
  • +
  • Override Validity Threshold: Override the threshold HDRP uses to determine whether Light Probes are marked as invalid. Refer to Adjust Dilation for more information.
  • +
  • Apply Virtual Offset: Change the position Light Probes use when sampling the lighting in the scene during baking. Refer to Adjust Virtual Offset for more information.
  • +
  • Override Virtual Offset Settings: Override the biases HDRP uses during baking to determine when Light Probes use Virtual Offset, and calculate sampling positions. Refer to Adjust Virtual Offset for more information
  • +
  • Intensity Scale: Override the intensity of probes to brighten or darken affected areas.
  • +
  • Override Sky Direction: Override the direction used for sampling the ambient probe when using Sky Occlusion.
  • +
  • Override Sample Count: Override the sample count used to compute Lighting and Sky Occlusion.
  • +
+
Dilation Validity Threshold +

Override the ratio of backfaces a probe samples before HDRP considers it invalid. This option only appears if you set Mode to Override Validity Threshold, and you enable Additional Properties.

+
Virtual Offset Rotation +

Set the rotation angle for the Virtual Offset vector on all probes in the Adjustment Volume. This option only appears if you set Mode to Apply Virtual Offset.

+
Virtual Offset Distance +

Set how far HDRP pushes probes along the Virtual Offset Rotation vector. This option only appears if you set Mode to Apply Virtual Offset.

+
Geometry Bias +

Sets how far HDRP pushes a probe's capture point out of geometry after one of its sampling rays hits geometry. This option only appears if you set Mode to Override Virtual Offset Settings.

+
Ray Origin Bias

Override the distance between a probe's center and the point HDRP uses to determine the origin of that probe's sampling ray. This can be used to push rays beyond nearby geometry if the geometry causes issues. This option appears only if you set Mode to Override Virtual Offset Settings.

+
Intensity Scale +

Change the brightness of all probes covered by the Probe Volumes Adjustment Volume component. Use this sparingly, because changing the intensity of probe data can lead to inconsistencies in the lighting. This option only appears if you set Mode to Intensity Scale.

+
diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes-bakedifferentlightingsetups.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes-bakedifferentlightingsetups.md new file mode 100644 index 00000000000..37121fea5cc --- /dev/null +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes-bakedifferentlightingsetups.md @@ -0,0 +1,91 @@ +# Bake different lighting setups with Lighting Scenarios + +A Lighting Scenario contains the baked lighting data for a scene or Baking Set. You can bake different lighting setups into different Lighting Scenario assets, and change which one the High Definition Render Pipeline (HDRP) uses at runtime. + +For example, you can create one Lighting Scenario with the lights on, and another Lighting Scenario with the lights off. At runtime, you can enable the second Lighting Scenario when the player turns the lights off. + +## Enable Lighting Scenarios + +To use Lighting Scenarios, go to the active [HDRP Asset](HDRP-Asset.md) and enable **Lighting** > **Light Probe Lighting** > **Lighting Scenarios**. + +## Add a Lighting Scenario + +To create a new Lighting Scenario so you can store baking results inside, do the following: + +1. Open the [Adaptive Probe Volumes panel](probevolumes-lighting-panel-reference.md) in the Lighting window. +2. In the **Lighting Scenarios** section, select the **Add** (**+**) button to add a Lighting Scenario. + +## Bake into a Lighting Scenario + +To bake into a Lighting Scenario, follow these steps: + +1. In the **Lighting Scenarios** section, select a Lighting Scenario to make it active. +2. Select **Generate Lighting**. HDRP stores the baking results in the active Lighting Scenario. + +You can set which Lighting Scenario HDRP uses at runtime using the [ProbeReferenceVolume API](https://docs.unity3d.com/Packages/com.unity.render-pipelines.core@17.0/api/UnityEngine.Rendering.ProbeReferenceVolume.html). + +If you change the active Lighting Scenarios at runtime, HDRP changes only the indirect lighting data in the Light Probes. You might still need to use scripts to move geometry, modify lights or change direct lighting. + +## Blend between Lighting Scenarios + +To enable blending between Lighting Scenarios, go to the active [HDRP Asset](HDRP-Asset.md) and enable **Light Probe Lighting** > **Scenario Blending**. + +You can blend between Lighting Scenarios at runtime using the [BlendLightingScenario API](https://docs.unity3d.com/Packages/com.unity.render-pipelines.core@17.0/api/UnityEngine.Rendering.ProbeReferenceVolume.html#UnityEngine_Rendering_ProbeReferenceVolume_BlendLightingScenario_System_String_System_Single_). + +For example, the following script does the following: + +1. Sets `scenario01` as the active Lighting Scenario. +2. Sets up the number of cells to blend per frame, which can be useful for optimization purposes. +3. Updates the Adaptive Probe Volume blending factor every frame to blend between `scenario01` and `scenario02`. + +``` +using System.Collections; +using System.Collections.Generic; +using UnityEngine; + +public class BlendLightingScenarios : MonoBehaviour +{ + UnityEngine.Rendering.ProbeReferenceVolume probeRefVolume; + public string scenario01 = "Scenario01Name"; + public string scenario02 = "Scenario02Name"; + [Range(0, 1)] public float blendingFactor = 0.5f; + [Min(1)] public int numberOfCellsBlendedPerFrame = 10; + + void Start() + { + probeRefVolume = UnityEngine.Rendering.ProbeReferenceVolume.instance; + probeRefVolume.lightingScenario = scenario01; + probeRefVolume.numberOfCellsBlendedPerFrame = numberOfCellsBlendedPerFrame; + } + + void Update() + { + probeRefVolume.BlendLightingScenario(scenario02, blendingFactor); + } +} +``` + +### Preview blending between Lighting Scenarios + +You can use the [Rendering Debugger](rendering-debugger-window-reference.md#probe-volume-panel) to preview transitions between Lighting Scenarios. Follow these steps: + +1. Go to **Window** > **Analysis** > **Rendering Debugger** to open the Rendering Debugger. +2. Set **Scenario Blend Target** to a Lighting Scenario. +3. Use **Scenario Blending Factor** to check the effect of blending between the Lighting Scenarios in the Scene view. + +### Keep Light Probes the same in different Lighting Scenarios + +If you move static geometry between bakes, Light Probe positions might be different. This means you can't blend between Lighting Scenarios, because the number of Light Probes and their positions must be the same in each Lighting Scenario you blend between. + +To avoid this, you can prevent HDRP recomputing probe positions when you bake. Follow these steps: + +1. Bake one Lighting Scenario. +2. Set another Lighting Scenario as the active Lighting Scenario. +3. Change your scene lighting or geometry. +4. In the **Probe Placement** section, set **Probe Positions** to **Don't Recalculate**. +5. Select **Generate Lighting** to recompute only the indirect lighting, and skip the probe placement computations. + +## Additional resources + +- [ProbeReferenceVolume API](https://docs.unity3d.com/Packages/com.unity.render-pipelines.core@17.0/api/UnityEngine.Rendering.ProbeReferenceVolume.html) +- [Bake multiple scenes together with Baking Sets](probevolumes-usebakingsets.md) diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes-changedensity.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes-changedensity.md new file mode 100644 index 00000000000..dbab931799c --- /dev/null +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes-changedensity.md @@ -0,0 +1,39 @@ +# Configure the size and density of Adaptive Probe Volumes + +Refer to [Understanding Adaptive Probe Volumes](probevolumes-concept.md) for more information about how Adaptive Probe Volumes work. + +## Change the size + +To ensure HDRP considers static geometry from all loaded scenes when it places Light Probes, set **Mode** to **Global** in the Adaptive Probe Volume Inspector window so the Adaptive Probe Volume covers the entire scene. + +You can also do one of the following in the Inspector of an Adaptive Probe Volume, to set the size of an Adaptive Probe Volume: + +- Set **Mode** to **Local** and set the size manually. +- Set **Mode** to **Local** and select **Fit to all Scenes**, **Fit to Scene**, or **Fit to Selection**. Refer to [Adaptive Probe Volume Inspector reference](probevolumes-inspector-reference.md) for more information. +- To exclude certain GameObjects when HDRP calculates Light Probe positions, enable **Override Renderer Filters**. For more information about Layers, refer to [Layers and Layer Masks](https://docs.unity3d.com/Manual/layers-and-layermasks.html). + +You can use multiple Adaptive Probe Volumes in a single scene, and they can overlap. However in a Baking Set, HDRP creates only a single Light Probe structure. + +## Adjust Light Probe density + +You might need to do the following in your project: + +- Increase Light Probe density in highly detailed scenes or areas such as interiors, to get a good lighting result. +- Decrease Light Probe density in empty areas, to avoid those areas using disk space and increasing bake time unnecessarily. + +In the [Inspector for an Adaptive Probe Volume](probevolumes-inspector-reference.md), enable and adjust **Override Probe Spacing** to set a minimum and maximum density for the Light Probes in the Adaptive Probe Volume. + +The values can't exceed the **Min Probe Spacing** or **Max Probe Spacing** values in the **Probe Placement** section of the [Adaptive Probe Volumes panel](probevolumes-lighting-panel-reference.md), so you might need to adjust these values first. + +You can also add local Adaptive Probe Volumes in different areas with different **Override Probe Spacing** values, to control Light Probe density more granularly. For example, in empty areas, add a local Adaptive Probe Volume with a higher **Override Probe Spacing** minimum value, to make sure Light Probes have a lower density in those areas. + +If you increase Light Probe density, you might increase bake time and how much disk space your Adaptive Probe Volume uses. + +### Decrease Light Probe density for terrain + +Because terrain is detailed but less important than your main scenery or characters, you can do the following: + +1. Put terrain on its own [Layer](https://docs.unity3d.com/Manual/layers-and-layermasks.html). +2. Surround the terrain with an Adaptive Probe Volume. +3. In the Inspector for the Adaptive Probe Volume, enable **Override Renderer Filters**, then in **Layer Mask** select only your terrain Layer. +4. To adjust Light Probe density to capture more or less lighting detail, enable **Override Probe Spacing** and adjust the values. diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes-concept.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes-concept.md index 281fc7118c0..d25c7974f0f 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes-concept.md +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes-concept.md @@ -1,86 +1,88 @@ -# Understand Probe Volumes +# Understanding Adaptive Probe Volumes -A Probe Volume is a group of [Light Probes](https://docs.unity3d.com/Manual/LightProbes.html) Unity places automatically based on the geometry density in your Scene, to create baked indirect lighting. Using Probe Volumes means you don't need to spend time manually placing and configuring Light Probes. +An Adaptive Probe Volume is a group of [Light Probes](https://docs.unity3d.com/Manual/LightProbes.html) that Unity places automatically based on the geometry density in your Scene, to create baked indirect lighting. You can use Adaptive Probe Volumes instead of manually placing and configuring Light Probes. ## Advantages and limitations -| **Feature** | **Original Light Probes** | **Probe Volumes** | +| **Feature** | **Light Probe Groups** | **Adaptive Probe Volumes** | |---|---|---| -| Selection of surrounding probes | Per object | Per pixel | +| Selection of surrounding probes | Per GameObject | Per pixel | | Optimize memory use with streaming | No | Yes | | Place probes automatically | No | Yes | | Blend between different bakes | No | Yes | | Place probes manually | Yes | No | -Probe Volumes have the following advantages: +Adaptive Probe Volumes have the following advantages: -- Unity selects surrounding probes per-pixel rather than per-object, which means HDRP lights objects more accurately. +- Unity samples surrounding probes per-pixel rather than per GameObject. This sampling approach results in better lighting consistency, and fewer seams between adjacent GameObjects. - If you use [volumetric fog](create-a-local-fog-effect.md), the per-pixel probe selection provides more accurate lighting for the variations in a fog mass. -- You can adjust Light Probe layouts across a Scene, for example using a denser set of Light Probes in an interior area with more detailed lighting or geometry. See [Display and adjust Probe Volumes](probevolumes-showandadjust.md) for more information. -- Probe Volumes work well with multiple scenes simultaneously. See [Baking Sets](probevolumes-concept.md#baking-sets) -- Because Probe Volumes can cover a whole scene, screen space effects can fall back to Light Probes to get lighting data from objects that are off-screen or occluded. See [Screen Space Global Illumination](Override-Screen-Space-GI.md) for more information. -- Unity can use the data in Probe Volumes to adjust lighting from Reflection Probes to match the local environment, which reduces the number of Reflection Probes you need. See [Frame Settings properties](frame-settings-reference.md). -- Probe Volumes include [streaming](probevolumes-streaming.md) functionality to support large open worlds. -- Probe Volumes support [Sky Occlusion](probevolumes-skyocclusion.md) for dynamic sky relighting at runtime. +- You can adjust Light Probe layouts across a scene, for example using a denser set of Light Probes in an interior area with more detailed lighting or geometry. Refer to [Configure the size and density of Adaptive Probe Volumes](probevolumes-changedensity.md) for more information. +- Adaptive Probe Volumes work well if you [work with multiple scenes](https://docs.unity3d.com/Manual/MultiSceneEditing.html). Refer to [Baking Sets](probevolumes-concept.md#baking-sets) for more information. +- Because Adaptive Probe Volumes can cover a whole scene, screen space effects can fall back to Light Probes to get lighting data from GameObjects that are off-screen or occluded. Refer to [Screen Space Global Illumination](Override-Screen-Space-GI.md) for more information. +- Unity can use the data in Adaptive Probe Volumes to adjust lighting from Reflection Probes so it more closely matches the local environment, which reduces the number of Reflection Probes you need. Refer to [Frame Settings properties](frame-settings-reference.md). +- Adaptive Probe Volumes include [streaming](probevolumes-streaming.md) functionality to support large open worlds. +- Adaptive Probe Volumes support [Sky Occlusion](probevolumes-skyocclusion.md) for dynamic sky relighting at runtime. ![](Images/probevolumes-per-pixel.png)
-The left scene uses original Light Probes, where the per-object lighting means each part of the car is uniformly lit and the lighting does not transition well from light to dark. The right scene uses Probe Volumes. This image uses the ArchVizPRO Photostudio HDRP asset from the Unity Asset Store. +The car model is made up of separate GameObjects. The left scene uses Light Probe Groups, which use per-object lighting, so each part of the car samples a single blended probe value. The right scene uses Adaptive Probe Volumes, which use per-pixel lighting, so each part of the car samples its nearest probes. This image uses the ArchVizPRO Photostudio HDRP asset from the Unity Asset Store. ![](Images/probevolumes-reflection-probe-normalization.png)
In the left scene, Reflection Probe Normalization is disabled. In the right scene, Reflection Probe Normalization is enabled, and there's less specular light leaking on the kitchen cabinet. This image uses the ArchVizPRO Interior Vol.5 HDRP asset from the Unity Asset Store. -Probe Volumes have the following limitations: +Adaptive Probe Volumes have the following limitations: -- You can't adjust the locations of Light Probes inside a Probe Volume. You can use settings and overrides to try to fix visible artifacts, but it might not be possible to make sure Light Probes follow walls or are at the boundary between different lighting areas. See [Fix issues with Probe Volumes](probevolumes-fixissues.md) for more information. -- You can't convert [original Light Probes](https://docs.unity3d.com/Manual/LightProbes.html) into a Probe Volume. +- You can't adjust the locations of Light Probes inside an Adaptive Probe Volume. You can use settings and overrides to try to fix visible artifacts, but it might not be possible to make sure Light Probes follow walls or are at the exact boundary between different lighting areas. Refer to [Fix issues with Adaptive Probe Volumes](probevolumes-fixissues.md) for more information. +- You can't convert [Light Probe Groups](https://docs.unity3d.com/Manual/LightProbes.html) into an Adaptive Probe Volume. -## How Probe Volumes work +## How Adaptive Probe Volumes work -When you add a Probe Volume to a Scene, HDRP automatically distributes rectangular groups ('bricks') of Light Probes inside the volume. +HDRP automatically fills an Adaptive Probe Volume with a 3D structure of 'bricks'. Each brick contains 64 Light Probes, arranged in a 4 × 4 × 4 grid. - -### Brick size and Light Probe density +HDRP uses bricks with different sizes to match the amount of geometry in different areas of your scene. For example, in areas with more geometry, HDRP uses small bricks with a short distance between Light Probes. The Light Probes capture lighting at a higher resolution, so lighting is more accurate. -HDRP structures a brick in the following way: +The default Light Probe spacing is 1, 3, 9, or 27 m. -- A brick contains 64 Light Probes, arranged in a 4 × 4 × 4 grid. -- By default, the distance between the Light Probes is 1, 3, 9 or 27 meters. +![](Images/probevolumes-debug-displayprobebricks1.PNG)
+In this screenshot from the Rendering Debugger, the small purple bricks contain Light Probes spaced 1 meter apart, to capture data from high-geometry areas. The large blue bricks contain Light Probes spaced 3 meters apart, to capture data from areas with less geometry. -Because the number of Light Probes in a brick is always the same, the larger the distance between the probes, the larger the brick. +Each pixel of a GameObject samples lighting data from the eight closest Light Probes around it. -### How HDRP distributes bricks +You can do the following: -In areas of your Scene with more geometry, HDRP uses bricks with a short distance between Light Probes. The short distance means Light probes are closer together, and lighting data is higher resolution. +- Use the Rendering Debugger to visualize the layout of bricks and Light Probes. Refer to [Display Adaptive Probe Volumes](probevolumes-showandadjust.md). +- [Configure the size and density of Adaptive Probe Volumes](probevolumes-changedensity.md). +- [Add a Volume to your scene](probevolumes-fixissues.md#volume) to adjust which Light Probes GameObjects sample. -In areas with less geometry, HDRP uses bricks with a large distance between Light Probes. The large distance means Light Probes are farther apart, and lighting data is lower resolution. + +## Baking Sets -You can use the Rendering Debugger to visualize and configure the layout of bricks and Light Probes. See [Display and adjust Probe Volumes](probevolumes-showandadjust.md). +To store lighting from a scene in an Adaptive Probe Volume, the scene must be part of a Baking Set. -![](Images/probevolumes-debug-displayprobebricks2.PNG)
-In this screenshot from the Rendering Debugger, the small red bricks contain Light Probes spaced 0.3 meters apart, to capture data from high-geometry areas. The large blue bricks contain Light Probes spaced 3 meters apart, to capture data from areas with less geometry. This image uses the ArchVizPRO Interior Vol.8 HDRP asset from the Unity Asset Store. +A Baking Set contains the following: -Each pixel of an object samples lighting data from the eight closest Light Probes around it. See [Add a Volume to your Scene](probevolumes-fixissues.md#volume) for more information on adjusting which Light Probes objects sample. +- One or more scenes, which optionally include Adaptive Probe Volumes. +- A single collection of settings. - -## Baking sets +By default, HDRP uses **Single Scene** mode, and places each scene in its own Baking Set automatically. However, only one Baking Set can be active at any time, so if you [work with multiple scenes](https://docs.unity3d.com/Manual/MultiSceneEditing.html), you must add these scenes to a single Baking Set if you want to bake them together. Refer to [Bake multiple scenes together with Baking Sets](probevolumes-usebakingsets.md) for more information. -Each Scene that uses Probe Volumes must be part of a Baking Set. -A Baking Set contains the following: + +### Lighting Scenarios -- One or more Scenes, which optionally include Probe Volumes. -- A single set of settings. +A Lighting Scenario asset contains the baked lighting data for a scene or Baking Set. You can bake different lighting setups into different Lighting Scenario assets, and change which one HDRP uses at runtime, or blend between them. -By default, HDRP will use **Single Scene** mode, and place each scene in its own Baking Set automatically. However, only one Baking Set can be active at any time, so if you [work with multiple scenes](https://docs.unity3d.com/Manual/MultiSceneEditing.html), you must add any scenes you'll load to the same Baking Set and bake them all altogether. +Refer to [Bake different lighting setups with Lighting Scenarios](probevolumes-bakedifferentlightingsetups.md) for more information. -You can only add each scene to a single Baking Set. +#### How Lighting Scenarios store data -HDRP will use the settings from the Baking Set when baking the lighting, and serialize the result on disk for runtime usage. Additionally, HDRP supports Lighting Scenarios, which means you can bake several variations of the lighting data for a Baking Set and switch between them at runtime. +Adaptive Probe Volumes splits Lighting Scenario data into two parts, to avoid duplicating data: +- The shared data, which contains mainly the scene subdivision information and probe placement. +- The per scenario data, which contains the probe lighting information. -See [Use Baking Sets](probevolumes-use.md#use-baking-sets). +HDRP can't share the data or blend between Lighting Scenarios if you move geometry between bakes, because the Light Probe positions might change. Refer to [Keep Light Probes the same in different Lighting Scenarios](probevolumes-bakedifferentlightingsetups.md#keep-light-probes-the-same-in-different-lighting-scenarios) for more information. ## Additional resources * [Light Probes](https://docs.unity3d.com/Manual/LightProbes.html) -* [Local Volumetric Fog](create-a-local-fog-effect.md) -* [Work with multiple Scenes in Unity](https://docs.unity3d.com/Documentation/Manual/MultiSceneEditing.html) +* [Create a local fog effect](create-a-local-fog-effect.md) +* [Work with multiple scenes in Unity](https://docs.unity3d.com/Documentation/Manual/MultiSceneEditing.html) diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes-fixissues.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes-fixissues.md index 2808be8d2a2..e37fe2625f9 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes-fixissues.md +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes-fixissues.md @@ -1,17 +1,17 @@ -# Fix issues with Probe Volumes +# Fix issues with Adaptive Probe Volumes -You can adjust settings or use Volume overrides to fix artefacts from Probe Volumes. +Adjust settings or use Volume overrides to fix artefacts from Adaptive Probe Volumes. -## Fix dark blotches or streaks +## How Light Probe validity works -Dark blotches and streaks are caused by Light Probes inside geometry ('invalid' probes). HDRP marks a Light Probe as invalid when the probe fires sampling rays to capture surrounding light data, but the rays hits the unlit backfaces inside geometry. +Light Probes inside geometry are called invalid probes. The High Definition Render Pipeline (HDRP) marks a Light Probe as invalid when the probe fires sampling rays to capture surrounding light data, but the rays hit the unlit backfaces inside geometry. -You can see which Light Probes are invalid using the [Rendering Debugger](rendering-debugger-window-reference.md#ProbeVolume). +HDRP uses the following techniques to minimise incorrect lighting data from Light Probes: -To minimise dark blotches and streaks, HDRP uses the following techniques: +- [Virtual Offset](#virtualoffset) tries to make invalid Light Probes valid, by moving their capture points so they're outside any [colliders](https://docs.unity3d.com/Documentation/Manual/CollidersOverview.html). +- [Dilation](#dilation) detects Light Probes that remain invalid after Virtual Offset, and gives them data from valid Light Probes nearby. -- [Virtual Offset](#virtualoffset), to try to move the capture point of an invalid Light Probe so it's outside any [colliders](https://docs.unity3d.com/Documentation/Manual/CollidersOverview.html). -- [Dilation](#dilation), to detect Light Probes that remain invalid after Virtual Offset, and give them data from valid Light Probes nearby. +You can check which Light Probes are invalid using the [Rendering Debugger](rendering-debugger-window-reference.md#ProbeVolume). ![](Images/probevolumes-virtualoffsetvsnot.png)
In the Scene on the left, Virtual Offset isn't active and dark bands are visible. In the Scene on the right, Virtual Offset is active.
@@ -19,10 +19,14 @@ In the Scene on the left, Virtual Offset isn't active and dark bands are visible ![](Images/probevolumes-dilationvsnot.png)
In the Scene on the left, Dilation isn't active and some areas are too dark. In the Scene on the right, Dilation is active.
+## Fix dark blotches or streaks + ### Adjust Virtual Offset -You can configure Virtual Offset in the [Baking Set properties](probevolumes-settings.md#pv-tab). You can adjust the following: +You can configure **Virtual Offset Settings** in the [Adaptive Probe Volumes panel](probevolumes-lighting-panel-reference.md) in the Lighting window. This changes how HDRP calculates the validity of Light Probes. + +You can adjust the following: - The length of the sampling ray Unity uses to find a valid capture point. - How far Unity moves a Light Probe's capture position to avoid geometry. @@ -34,14 +38,16 @@ You can also disable Virtual Offset for a Baking Set. Virtual Offset only affect ### Adjust Dilation -You can configure Dilation in the [Baking Set properties](probevolumes-settings.md#pv-tab). You can adjust the following: +You can configure **Probe Dilation Settings** in the [Adaptive Probe Volumes panel](probevolumes-lighting-panel-reference.md) in the Lighting window). This changes how HDRP calculates the validity of Light Probes, and how invalid Light Probes use lighting data from nearby valid Light Probes. + +You can adjust the following: - The percentage of backfaces a Light Probe can sample before HDRP considers that probe invalid. - How far away from the invalid probe Unity searches for valid probes to contribute lighting data. - How many iterations of Dilation HDRP does during the bake. - How to weight the data from valid probes based on their spatial relationship with the invalid probe. -[How you adjust Light Probe density](probevolumes-showandadjust.md#adjust-light-probe-density) affects the final results, because HDRP uses the settings as a multiplier to calculate the distance between probes. +[How you adjust Light Probe density](probevolumes-changedensity.md) affects the final results, because HDRP uses the settings as a multiplier to calculate the distance between probes. You can also disable Dilation for a Baking Set. Dilation only affects baking time, so disabling Dilation doesn't affect runtime performance. @@ -53,66 +59,70 @@ Light leaks are areas that are too light or dark, often in the corners of a wall A light leak.
-Light leaks are often caused when HDRP gets lighting data from a Light Probe that should be occluded, for example because it's on the other side of the wall. Probe Volumes use regular grids of Light Probes, so Light Probes may not follow walls or be at the boundary between different lighting areas. +Light leaks often occur when geometry receives light from a Light Probe that isn't visible to the geometry, for example because the Light Probe is on the other side of a wall. Adaptive Probe Volumes use regular grids of Light Probes, so Light Probes might not follow walls or be at the boundary between different lighting areas. To fix light leaks, you can do the following: - [Create thicker walls](#thickerwalls). -- [Add a Volume to your Scene](#volume). +- [Add a Volume to your scene](#volume). - [Adjust Baking Set properties](#probevolumesettings). - [Use a Probe Adjustment Volume](#probevolumeadjustment). ### Create thicker walls -Adjust walls so their width is closer to the distance between probes in the local [brick](probevolumes-concept.md#brick-size-and-light-probe-density). +Adjust walls so their width is closer to the distance between probes in the local [brick](probevolumes-concept.md#how-probe-volumes-work) -### Add a Volume to your Scene +### Add a Volume to your scene -You can add a [Volume](understand-volumes.md) with a **Probe Volume Options** override to adjust which Light Probes object pixels sample at runtime. +You can add a [Volume](scene-setup.md), then add a **Probe Volumes Options** override to the Volume. This adjusts the position that GameObjects use to sample the Light Probes. -1. Add a [Volume](understand-volumes.md) to your Scene and make sure its area overlaps the camera position. -2. Select **Add Override**, then select **Lighting** > **Probe Volume Options**. -3. Enable **Normal Bias**, then adjust the value to move the position that object pixels use to sample the Light Probes, along the pixel's surface normal. -4. Enable **View Bias**, then adjust the value to move the position that object pixels use to sample the Light Probes, towards the camera. -4. Disable and enable **Leak Reduction Mode** to see if it improves light leaks. +1. Add a [Volume](scene-setup.md) to your scene and make sure its area overlaps the camera position. +2. Select **Add Override**, then select **Lighting** > **Probe Volumes Options**. +3. Enable **Normal Bias**, then adjust the value to move the position that GameObject pixels use to sample the Light Probes, along the pixel's surface normal. +4. Enable **View Bias**, then adjust the value to move the position that GameObject pixels use to sample the Light Probes, towards the camera. +4. Disable and enable **Leak Reduction Mode** to check if it improves light leaks. -Because HDRP calculates the effects of a Volume based on Camera position, the effect may change as the Camera moves. +Volumes only affect the scene if the camera is near or inside the volume. Refer to [Understand volumes](understand-volumes.md) for more information. -See [Probe Volume settings and properties](probevolumes-settings.md#probe-volumes-options-override) for more information on **Probe Volume Options** settings. +Refer to [Probe Volumes Options Override reference](probevolumes-options-override-reference.md) for more information on **Probe Volumes Options** settings. ### Adjust Baking Set properties -If adding a Volume doesn't work, use the [Baking Set properties](probevolumes-settings.md#pv-tab) to adjust Virtual Offset and Dilation settings. +If adding a Volume doesn't work, use the [Adaptive Probe Volumes panel](probevolumes-lighting-panel-reference.md) in the Lighting window to adjust Virtual Offset and Dilation settings. -1. In **Dilation Settings**, reduce **Dilation Distance**. +1. In **Probe Dilation Settings**, reduce **Search Radius**. This can help in situations where invalid Light Probes are receiving lighting data from more distant Light Probes. However, a lower **Search Radius** might cause light leaks. 2. In **Virtual Offset Settings**, reduce **Search Distance Multiplier** and **Ray Origin Bias**. -3. If there are light leaks in multiple locations, adjust **Min Distance Between Probes** and **Max Distance Between Probes** to increase the density of Light Probes. -4. Click **Generate Lighting** to rebake the scene using the new settings. +3. If there are light leaks in multiple locations, adjust **Min Probe Spacing** and **Max Probe Spacing** to increase the density of Light Probes. +4. Select **Generate Lighting** to rebake the scene using the new settings. Note: Don't use very low values for the settings, or Dilation and Virtual Offset might not work. -### Add a Probe Adjustment Volume Component +### Add a Probe Adjustment Volume component -Use a Probe Adjustment Volume Component to adjust various baking settings for probes covered by the volume. -They can be used to adjust the sample counts, or fix a light leak trough Virtual Offset or by forcing dilation. +Use a Probe Adjustment Volume component to make Light Probes invalid in a small area. This triggers Dilation during baking, and improves the results of **Leak Reduction Mode** at runtime. -1. In the Probe Volume Inspector, select **Add Component**, then select **Light** > **Probe Adjustment Volume**. -2. Set the **Size** so the **Probe Adjustment Volume** area overlaps the Light Probes you want to adjust. +1. In the Adaptive Probe Volume Inspector, select **Add Component**, then select **Light** > **Probe Adjustment Volume**. +2. Set the **Size** so the **Probe Adjustment Volume** area overlaps the Light Probes causing light leaks. +3. Set **Probe Volume Overrides** > **Mode** to **Invalidate Probes**, to invalidate the Light Probes in the Volume. +4. If you have a [Volume with a Probe Volumes Options override](#volume), enable **Leak Reduction Mode**. +6. In **Probe Volume Settings**, select **Generate Lighting** to rebake the scene using the new settings. Clicking the 'Update Probes' button inside the **Probe Adjustment Volume** editor will regenerate the lighting data for probes covered by the volume. This is useful when iterating on a region of the world as it avoids baking the whole scene to see the result. Note that this button will only run the lighting and validity computations, so changing the space between probes, or toggling Virtual Offset or Sky Occlusion will not have any effect until doing a full rebake of the Baking Set. -Adding a Probe Adjustment Volume solves some light leak issues but usually not all. If you use many Probe Adjustment Volumes in a Scene, your Scene might be harder to understand and maintain. +Using a Probe Adjustment Volume component solves most light leak issues, but often not all. + +If you use many Probe Adjustment Volumes in a scene, your bake will be slower, and your scene might be harder to understand and maintain. -See [Settings and properties related to Probe Adjustment Volumes](probevolumes-settings.md#pv-adjustment) for a list of all the settings that can be adjusted. +Refer to [Probe Adjustment Volume component reference](probevolumes-adjustment-volume-component-reference.md) for more information. ## Fix seams -Seams are artefacts that appear when one lighting condition transitions immediately into another. Seams are caused when two adjacent bricks have different Light Probe densities. See [bricks](probevolumes-concept.md#brick-size-and-light-probe-density). +Seams are artefacts that appear when one lighting condition transitions immediately into another. Seams are caused when two adjacent bricks have different Light Probe densities. Refer to [bricks](probevolumes-concept.md#how-probe-volumes-work) for more information. ![](Images/probevolumes-seams.JPG)
Two seams. @@ -120,11 +130,13 @@ Two seams. To fix seams, do the following: -1. Add a [Volume](understand-volumes.md) to your Scene and make sure its area overlaps the position of the camera. -2. Select **Add Override**, then select **Lighting** > **Probe Volume Options**. -3. Enable **Sampling Bias**, then try adjusting the value to add noise and make the transition more diffuse. +1. Add a [Volume](scene-setup.md) to your scene and make sure its area overlaps the position of the camera. +2. Select **Add Override**, then select **Lighting** > **Adaptive Probe Volumes Options**. +3. Enable **Sampling Noise**, then try adjusting the value to add noise and make the transition more diffuse. Noise can help break up noticeable edges in indirect lighting at brick boundaries. ## Additional resources -* [Display and adjust Probe Volumes](probevolumes-showandadjust.md) -* [Settings and properties related to Probe Volumes](probevolumes-settings.md) +* [Configure the size and density of Adaptive Probe Volumes](probevolumes-changedensity.md) +* [Adaptive Probe Volumes panel reference](probevolumes-lighting-panel-reference.md) +* [Probe Volumes Options Override reference](probevolumes-options-override-reference.md) +* [Probe Adjustment Volume component reference](probevolumes-adjustment-volume-component-reference.md) diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes-inspector-reference.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes-inspector-reference.md new file mode 100644 index 00000000000..00471dd0d56 --- /dev/null +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes-inspector-reference.md @@ -0,0 +1,75 @@ +# Adaptive Probe Volume Inspector reference + +Select an Adaptive Probe Volume and open the Inspector to view its properties. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
PropertyDescription
Mode
GlobalHDRP sizes this Adaptive Probe Volume to include all renderers in the scene or Baking Set that have **Contribute Global Illumination** enabled in their Mesh Renderer component. HDRP recalculates the volume size every time you save or generate lighting.
SceneHDRP sizes this Adaptive Probe Volume to include all renderers in the same scene as this Adaptive Probe Volume. HDRP recalculates the volume size every time you save or generate lighting.
LocalSet the size of this Adaptive Probe Volume manually.
SizeSet the size of this Adaptive Probe Volume. This setting only appears when you set Mode to Local.
Subdivision Override
Override Probe SpacingOverride the Probe Spacing set in the Baking Set for this Adaptive Probe Volume. This cannot exceed the Min Probe Spacing and Max Probe Spacing values in the Adaptive Probe Volumes panel in the Lighting window.
Geometry Settings
Override Renderer FiltersEnable filtering by Layer which GameObjects HDRP considers when it generates probe positions. Use this to exclude certain GameObjects from contributing to Adaptive Probe Volume lighting.
Layer MaskFilter by Layer which GameObjects HDRP considers when it generates probe positions.
Min Renderer SizeThe smallest Renderer size HDRP considers when it generates probe positions.
Fill Empty SpacesEnable HDRP filling the empty space between and around Renderers with bricks. Bricks in empty spaces always use the **Max Probe Spacing** value.
+ +## Size gizmo + +To resize the Adaptive Probe Volume, use one of the handles of the box gizmo in the Scene view. You can't resize an Adaptive Probe Volume by changing the Transform component of the GameObject, or using the scale gizmo. + +In this screenshot, a red box indicates the box gizmo handles. + +![](Images/ProbeVolume-Size-gizmo.png)
+The resize handles for Adaptive Probe Volumes. + +## Probe Volume limitations with Asset Bundles and Addressables + +Internally, the Probe Volume system uses the Streaming Asset feature to store baked data. This is necessary to allow both efficient loading and streaming of data. The consequence is that Probe Volume baked data is incompatible with Asset Bundles and Addressables as it is explicitly moved inside the Streaming Asset folder upon Player build. +In order to allow the use of Asset Bundles and Addressables when necessary, a toggle is provided in the Probe Volume Graphics settings: *Disable Streaming Assets*. When enabling this option, the system will no longer use Streaming Assets internally but regular Assets that can be managed manually by the user. +Enabling this option will also disable the use of Disk Streaming and increase memory consumption in multi-scene setups. diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes-lighting-panel-reference.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes-lighting-panel-reference.md new file mode 100644 index 00000000000..6cbdb0dd60b --- /dev/null +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes-lighting-panel-reference.md @@ -0,0 +1,165 @@ +# Adaptive Probe Volumes panel properties + +This page explains the properties in the **Adaptive Probe Volumes** panel in Lighting settings. To open the panel, from the main menu select **Window** > **Rendering** > **Lighting** > **Adaptive Probe Volumes**. + +## Baking + +To open Baking Set properties, either select the Baking Set asset in the Project window, or from the main menu select **Window** > **Rendering** > **Lighting** > **Adaptive Probe Volumes** tab. + +### Baking + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
PropertyDescription
Baking Mode
Single SceneUse only the active scene to calculate the lighting data in Adaptive Probe Volumes.
Baking SetUse the scenes in this Baking Set to calculate the lighting data in Adaptive Probe Volumes.
Current Baking SetThe current Baking Set asset.
Scenes in Baking SetLists the scenes in the current Baking Set.
Status: Indicates whether the scene is loaded.
Bake: When enabled, HDRP generates lighting for this scene.
Use + and - to add or remove a scene from the active Baking Set.
Use the two-line icon to the left of each scene to drag the scene up or down in the list.
+ +### Probe Placement + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
PropertyDescription
Probe Positions
RecalculateRecalculate probe positions during baking, to accommodate changes in scene geometry. Refer to Bake different lighting setups with Lighting Scenarios for more information.
Don't RecalculateDon't recalculate probe positions during baking. This keeps the probe positions the same as the last successful bake, which means HDRP can blend probes in different Lighting Scenarios. Refer to Bake different lighting setups with Lighting Scenarios for more information.
Min Probe SpacingThe minimum distance between probes, in meters. Refer to Configure the size and density of Adaptive Probe Volumes for more information.
Max Probe SpacingThe maximum distance between probes, in meters. Refer to Configure the size and density of Adaptive Probe Volumes for more information.
Renderer Filter Settings
Layer MaskSpecify the Layers HDRP considers when it generates probe positions. Select a Layer to enable or disable it.
Min Renderer SizeThe smallest Renderer size HDRP considers when it places probes.
+ +### Lighting Scenarios + +This section appears only if you enable **Lighting Scenarios** under **Light Probe Lighting** in the [HDRP Asset](HDRP-Asset.md). + +| **Property** ||| **Description** | +|-|-|-|-| +| **Scenarios** ||| Lists the Lighting Scenarios in the Baking Set. To rename a Lighting Scenario, double-click its name. | +|| **Active** || Set the currently loaded Lighting Scenario, which HDRP writes to when you select **Generate Lighting**. | +|| **Status** || Indicates the status of the active Lighting Scenario. | +||| **Invalid Scenario** | A warning icon appears if the active Lighting Scenario is baked but HDRP can't load it anymore, for example if another Lighting Scenario has been baked that caused changes in the probe subdivision. | +||| **Not Baked** | An information icon appears if you haven't baked any lighting data for the active Lighting Scenario.| +||| **Not Loaded** | An information icon appears if scenes in the Baking Set aren't currently loaded in the Hierarchy window, so HDRP can't determine the Lighting Scenario status. | + +## Probe Invalidity Settings + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
PropertyDescription
Probe Dilation Settings
Enable DilationWhen enabled, HDRP replaces data in invalid probes with data from nearby valid probes. Enabled by default. Refer to Fix issues with Adaptive Probe Volumes.
Search RadiusDetermine how far from an invalid probe HDRP searches for valid neighbors. Higher values include more distant probes that might be in different lighting conditions than the invalid probe, resulting in unwanted behaviors such as light leaks.
Validity ThresholdSet the ratio of backfaces a probe samples before HDRP considers it invalid. Higher values mean HDRP is more likely to mark a probe invalid.
Dilation IterationsSet the number of times Unity repeats the dilation calculation. This increases the spread of dilation effect, but increases the time HDRP needs to calculate probe lighting.
Squared Distance WeightingEnable weighing the contribution of neighbouring probes by squared distance, rather than linear distance. Probes that are closer to invalid probes will contribute more to the lighting data.
Virtual Offset Settings
Enable Virtual Offset Enable HDRP moving the capture point of invalid probes into a valid area. Refer to Fix issues with Adaptive Probe Volumes.
Search Distance MultiplierSet the length of the sampling ray HDRP uses to search for valid probe positions. High values might cause unwanted results, such as probe capture points pushing through neighboring geometry.
Geometry BiasSet how far HDRP pushes a probe's capture point out of geometry after one of its sampling rays hits geometry.
Ray Origin biasSet the distance between a probe's center and the point HDRP uses as the origin of each sampling ray. High values might cause unwanted results, such as rays missing nearby occluding geometry.
Layer MaskSpecify which layers HDRP includes in collision calculations for [Virtual Offset](probevolumes-fixissues.md).
Refresh Virtual Offset DebugRe-run the virtual offset simulation to preview updated results, without affecting baked data.
+ +### Adaptive Probe Volume Disk Usage + +| **Property** | **Description** | +|-|-| +| **Scenario Size** | Indicates how much space on disk is used by the currently selected Lighting Scenario. | +| **Baking Set Size** | Indicates how much space on disk is used by all the baked Light Probe data for the currently selected Baking Set. This includes the data for all Lighting Scenarios, and the data shared by all Lighting Scenarios. diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes-options-override-reference.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes-options-override-reference.md new file mode 100644 index 00000000000..9fc8679a2da --- /dev/null +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes-options-override-reference.md @@ -0,0 +1,19 @@ +# Probe Volumes Options Override reference + +To add a Probe Volumes Options Override, do the following: + +1. Add a [Volume](understand-volumes.md) to your Scene and make sure its area overlaps the position of the camera. +2. Select **Add Override**, then select **Lighting** > **Probe Volumes Options**. + +Refer to [Fix issues with Adaptive Probe Volumes](probevolumes-fixissues.md) for more information about using the Probe Volumes Options Override. + +| **Property** | **Description** | +|------------------------------------|-------------| +| **Normal Bias** | Enable to move the position used by shaded pixels when sampling Light Probes. The value is in meters. This affects how sampling is moved along the pixel's surface normal. | +| **View Bias** | Enable to move the sampling position towards the camera when sampling Light Probes. The results of **View Bias** vary depending on the camera position. The value is in meters. | +| **Scale Bias with Min Probe Distance** | Scale the **Normal Bias** or **View Bias** so it's proportional to the spacing between Light Probes in a [brick](probevolumes-concept.md#how-probe-volumes-work). | +| **Sampling Noise** | Enable to increase or decrease the amount of noise HDRP adds to the position used by shaded pixels when sampling Light Probes. This can help [fix seams](probevolumes-fixissues.md#fix-seams) between bricks. | +| **Animate Sampling Noise** | Enable to animate sampling noise when Temporal Anti-Aliasing (TAA) is enabled. This can make noise patterns less visible. | +| **Leak Reduction Mode** | Enable to choose the method Unity uses to reduce leaks. Refer to [Fix light leaks](probevolumes-fixissues.md#fix-light-leaks).
Options:
• **Validity and Normal Based**: Enable to make HDRP prevent invalid Light Probes contributing to the lighting result, and give Light Probes more weight than others based on the GameObject pixel's sampling position.
• **None**: No leak reduction. +| **Min Valid Dot Product Value** | Enable to make HDRP reduce a Light Probe's influence on a GameObject if the direction towards the Light Probe is too different to the GameObject's surface normal direction. The value is the minimum [dot product](https://docs.unity3d.com/ScriptReference/Vector3.Dot.html) between the two directions where HDRP will reduce the Light Probe's influence. | +| **Occlusion Only Reflection Normalization** | Enable to limit Reflection Probe Normalization so it only decreases the intensity of reflections. Keep this enabled to reduce light leaks. Refer to [Frame Settings](frame-settings-reference.md#lighting). | diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes-settings.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes-settings.md deleted file mode 100644 index d2d2758882d..00000000000 --- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes-settings.md +++ /dev/null @@ -1,342 +0,0 @@ -# Probe Volume settings and properties - -This page explains the settings you can use to configure Probe Volumes. - - -## Baking Set properties - -To open Baking Set properties, either select the Baking Set asset in the Project window, or go to **Window > Rendering > Lighting** and select the **Probe Volumes** tab. - -### Baking - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
PropertyDescription
Baking Mode
Single SceneBake only the active scene.
Baking Sets (Advanced)Bake all scenes that are part of the Baking Set
Baking SetIndicates the active Baking Set.
ScenesLists the Scenes in the active Baking Set.
Scene: Indicates whether the scene is loaded.
Bake: When enabled, HDRP generates lighting for this scene.
Use + and - to add or remove a Scene from the active Baking Set.
Use the three-line icon to the left of each Scene to drag the Scene up or down in the list.
- -### Lighting Scenarios - -This section appears only if you enable **Lighting Scenarios** under **Light Probe Lighting** in the [HDRP Asset](HDRP-Asset.md). - -| **Property** ||| **Description** | -|-|-|-|-| -| **Scenarios** ||| Lists the Lighting Scenarios in the Baking Set. To rename a Lighting Scenario, double-click its name. | -|| **Active** || Set the currently loaded Lighting Scenario, which HDRP writes to when you select **Generate Lighting**. | -|| **Status** || Indicates the status of the active Lighting Scenario. | -||| **Invalid Scenario** | A warning icon appears if the active Lighting Scenario is baked but HDRP can't load it anymore, for example if another Lighting Scenario has been baked that caused changes in the probe subdivision. | -||| **Not Baked** | An information icon appears if you haven't baked any lighting data for the active Lighting Scenario.| -||| **Not Loaded** | An information icon appears if scenes in the Baking Set aren't currently loaded in the Hierarchy window, so HDRP can't determine the Lighting Scenario status. | - -### Probe Placement - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
PropertyDescription
Probe Positions
RecalculateRecalculate probe positions during baking, so you can bake multiple Lighting Scenarios that would produce different brick layouts due to differences in scene geometry.
Don't RecalculateDon't recalculate probe positions during baking.
Min Probe SpacingThe minimum distance between probes, in meters. See Display and adjust Probe Volumes for additional information.
Max Probe SpacingThe maximum distance between probes, in meters. See Display and adjust Probe Volumes for additional information.
Renderer Filter Settings
Layer MaskSpecify the Layers HDRP considers when it generates probe positions. Select a Layer to enable or disable it.
Min Renderer SizeThe smallest Renderer size HDRP considers when it places probes.
- -### Probe Invalidity Settings - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
PropertyDescription
Probe Dilation Settings
Enable DilationEnable HDRP replacing data in invalid probes with data from valid probes nearby. Enabled by default. See Fix issues with Probe Volumes.
Search RadiusDetermine how far from an invalid probe HDRP searches for valid neighbors. Higher values include more distant probes that may be in different lighting conditions than the invalid probe, resulting in unwanted behaviors.
Validity ThresholdSet the ratio of backfaces a probe samples before HDRP considers it invalid. Higher values mean HDRP is more likely to mark a probe invalid.
Dilation IterationsSet the number of times Unity repeats the dilation calculation. This increases spread of dilation effect, but requires additional processing power.
Squared Distance WeightingEnable weighing the contribution of neighbouring probes by squared distance, rather than linear distance.
Virtual Offset Settings
Enable Virtual Offset Enable HDRP moving the capture point of invalid probes so they're valid again. See Fix issues with Probe Volumes.
Search Distance MultiplierSet the length of the sampling ray HDRP uses to search for valid probe positions. High values may cause unwanted results, such as probe capture points pushing through neighboring geometry.
Geometry BiasSet how far HDRP pushes a probe's capture point out of geometry after one of its sampling rays hits geometry.
Ray Origin biasSet the distance between a probe's center and the point HDRP uses to determine the origin of that probe's sampling ray. High values may cause unwanted results, such as sampling from an area of the scene with dissimilar lighting.
Layer MaskSpecify which layers HDRP includes in collision calculations for [Virtual Offset](probevolumes-fixissues.md).
Refresh Virtual Offset DebugRe-run the virtual offset simulation; it will be applied only for debug visualization sake and not affect baked data.
- -### Probe Volume Disk Usage - -| **Property** | **Description** | -|-|-| -| **Scenario Size** | Indicates how much space on disk is used by the currently selected Lighting Scenario. | -| **Baking Set Size** | Indicates how much space on disk is used by all the baked data for the currently selected Baking Set. This includes the data for all Lighting Scenarios, and the data shared by all Lighting Scenarios. - -## Probe Volume Properties - - -Select a Probe Volume and open the Inspector to view its properties. - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
PropertyDescription
Mode
GlobalHDRP sizes this Probe Volume to include all Renderers in your project that contribute Global Illumination. HDRP recalculates the volume size every time you save or generate lighting.
SceneHDRP sizes this Probe Volume to include all Renderers in the same scene as this Probe Volume. HDRP recalculates the volume size every time you save or generate lighting.
LocalSet the size of this Probe Volume manually.
SizeSet the size of this Probe Volume. This setting only appears when Mode is set to Local.
Subdivision Override
Override Probe SpacingOverride the Probe Spacing set in the Baking Set for this Probe Volume.
Geometry Settings
Override Renderer FiltersEnable filtering by Layer which GameObjects HDRP considers when it generates probe positions.
Layer MaskFilter by Layer which GameObjects HDRP considers when it generates probe positions.
Min Renderer SizeThe smallest Renderer size HDRP considers when it generates probe positions.
Fill Empty SpacesEnable HDRP filling the empty space between Renderers with bricks that have the largest distance between probes.
- -## Probe Adjustment Volume - - -Select a [Probe Adjustment Volume Component](probevolumes-fixissues.md#add-a-probe-adjustment-volume-component) and open the Inspector to view its properties. - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
PropertyDescription
Influence Volume
ShapeSet the shape of the Adjustment Volume to either **Box** or **Sphere**.
SizeSet the size of the Adjustment Volume. This property only appears if you set **Shape** to **Box**.
RadiusSet the radius of the Adjustment Volume. This property only appears if you set **Shape** to **Sphere**.
Mode -

Select how to override probes inside the Adjustment Volume.

-
    -
  • Invalidate Probes: Mark selected probes as invalid.
  • -
  • Override Validity Threshold: Override **Dilation Validity Threshold**.
  • -
  • Apply Virtual Offset: Manually apply a Virtual Offset on selected probes.
  • -
  • Override Virtual Offset Settings: Override Virtual Offset biases.
  • -
  • Override Sky Direction: Override the direction used for sampling the ambient probe when using Sky Occlusion.
  • -
  • Override Sample Count: Override the sample count used to compute Lighting and Sky Occlusion.
  • -
-
Intensity Scale -

Set the scale HDRP applies to all probes in the Adjustment Volume. Use this sparingly, because changing the intensity of probe data can lead to inconsistencies in the lighting. This option only appears if you set Mode to Invalidate Probes, and you enable Additional Properties.

-
Dilation Validity Threshold -

Override the ratio of backfaces a probe samples before HDRP considers it invalid. This option only appears if you set Mode to Override Validity Threshold, and you enable Additional Properties.

-
Virtual Offset Rotation -

Set the rotation angle for the Virtual Offset vector on all probes in the Adjustment Volume. This option only appears if you set Mode to Apply Virtual Offset.

-
Virtual Offset Distance -

Set how far HDRP pushes probes along the Virtual Offset Rotation vector. This option only appears if you set Mode to Apply Virtual Offset.

-
Geometry Bias -

Sets how far HDRP pushes a probe's capture point out of geometry after one of its sampling rays hits geometry. This option only appears if you set Mode to Override Virtual Offset Settings.

-
Ray Origin Bias

Override the distance between a probe's center and the point HDRP uses to determine the origin of that probe's sampling ray. This option only appears if you set Mode to Override Virtual Offset Settings.

-
- -## Probe Volumes Options Override - - -To add a Probe Volume Options Override, do the following: - -1. Add a [Volume](understand-volumes.md) to your Scene and make sure its area overlaps the position of the camera. -2. Select **Add Override**, then select **Lighting** > **Probe Volume Options**. - -| **Property** | **Description** | -|------------------------------------|-------------| -| **Normal Bias** | Enable to move the position that object pixels use to sample the Light Probes, along the pixel's surface normal. The value is in meters. | -| **View Bias** | Enable to move the position that object pixels use to sample the Light Probes, towards the camera. The results of **View Bias** vary depending on the camera position. The value is in meters. | -| **Scale Bias with Min Probe Distance** | Scale the **Normal Bias** or **View Bias** so it's proportional to the spacing between Light Probes in a [brick](probevolumes-concept.md#brick-size-and-light-probe-density). | -| **Sampling Noise** | Enable to increase or decrease HDRP adding noise to lighting, to help [fix seams](probevolumes-fixissues.md#fix-seams). | -| **Animate Sampling Noise** | Enable to animate sampling noise when Temporal Anti-Aliasing (TAA) is enabled. This can make noise patterns less visible. | -| **Leak Reduction Mode** | Enable to choose the method Unity uses to reduce leaks. See [Fix light leaks](probevolumes-fixissues.md#fix-light-leaks).
Options:
• **Validity and Normal Based**: Enable to make HDRP prevent invalid Light Probes contributing to the lighting result, and give Light Probes more weight than others based on the object pixel's sampling position.
• **None**: No leak reduction. -| **Min Valid Dot Product Value** | Enable to make HDRP reduce a Light Probe's influence on an object if the direction towards the Light Probe is too different to the object's surface normal direction. The value is the minimum [dot product](https://docs.unity3d.com/ScriptReference/Vector3.Dot.html) between the two directions where HDRP will reduce the Light Probe's influence. | -| **Occlusion Only Reflection Normalization** | Enable to limit Reflection Probe Normalization so it only decreases the intensity of reflections. Keep this enabled to reduce light leaks. See [Frame Settings](frame-settings-reference.md#lighting). | - -## Size gizmo - -To resize the Probe Volume, use one of the handles of the box gizmo in the Scene View. You can't resize a Probe Volume by rescaling the GameObject or using the scale gizmo. - -In this screenshot, a red box indicates the box gizmo handles. - -![](Images/ProbeVolume-Size-gizmo.png)
-The resize handles for Probe Volumes. - -## Probe Volume limitations with Asset Bundles and Addressables - -Internally, the Probe Volume system uses the Streaming Asset feature to store baked data. This is necessary to allow both efficient loading and streaming of data. The consequence is that Probe Volume baked data is incompatible with Asset Bundles and Addressables as it is explicitly moved inside the Streaming Asset folder upon Player build. -In order to allow the use of Asset Bundles and Addressables when necessary, a toggle is provided in the Probe Volume Graphics settings: *Disable Streaming Assets*. When enabling this option, the system will no longer use Streaming Assets internally but regular Assets that can be managed manually by the user. -Enabling this option will also disable the use of Disk Streaming and increase memory consumption in multi-scene setups. diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes-showandadjust.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes-showandadjust.md index 919280a6a26..eed706afcce 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes-showandadjust.md +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes-showandadjust.md @@ -1,19 +1,19 @@ -# Display and adjust Probe Volumes +# Display Adaptive Probe Volumes -You can use the Rendering Debugger to see how HDRP places Light Probes in a Probe Volume, then use Probe Volume settings to configure the layout. +You can use the Rendering Debugger to check how HDRP places Light Probes in an Adaptive Probe Volume, then use Adaptive Probe Volume settings to configure the layout. -## Display Probe Volumes +## Display Adaptive Probe Volumes -To display Probe Volumes, open the [Rendering Debugger](rendering-debugger-window-reference.md#ProbeVolume). +To display Adaptive Probe Volumes, open the [Rendering Debugger](rendering-debugger-window-reference.md#ProbeVolume) and select the **Probe Volume** tab. -You can display the following: +You can do the following: -- Enable **Display Probes** to display the locations of Light Probes. -- Enable **Display Bricks** to display the outlines of groups of Light Probes ('bricks'). See [Understand Probe Volumes](probevolumes-concept.md#brick-size-and-light-probe-density) for more information on bricks. -- Enable **Display Cells** to display the outlines of cells, which are the units that [streaming](probevolumes-streaming.md) uses. -- Enable **Debug Probe Sampling** to inspect details about Probe Volume sampling at a given world position. +- Enable **Probe Visualization** > **Display Probes** to display the locations of Light Probes and the lighting they store. +- Enable **Subdivision Visualization** > **Display Bricks** to display the outlines of groups of Light Probes ('bricks'). Refer to [Understanding Adaptive Probe Volumes](probevolumes-concept.md#how-probe-volumes-work) for more information on bricks. +- Enable **Subdivision Visualization** > **Display Cells** to display the outlines of cells, which are groups of bricks used for [streaming](probevolumes-streaming.md). +- Enable **Subdivision Visualization** > **Debug Probe Sampling** to display how neighboring Light Probes influence a chosen position. Select a surface to display the weights HDRP uses to sample nearby Light Probes. -To update the location of Light Probes, bricks, and cells automatically when you change settings, enable **Realtime Update**. +If the Rendering Debugger displays invalid probes when you select **Display Probes**, refer to [Fix issues with Adaptive Probe Volumes](probevolumes-fixissues.md). ![](Images/probevolumes-debug-displayprobes.PNG)
The Rendering Debugger with **Display Probes** enabled. @@ -27,51 +27,11 @@ The Rendering Debugger with **Display Cells** enabled. ![](Images/APVsamplingDebug.png)
The Rendering Debugger with **Debug Probe Sampling** enabled -## Adjust - -### Adjust Probe Volume size - -To achieve the highest quality lighting, you should enable **Global** in the Probe Volume Inspector, so the Probe Volume covers the entire Scene. - -You can also do the following in a Probe Volume Inspector to set the size of a Probe Volume: - -- Disable **Global** and set the size manually. -- Disable **Global** and select **Fit to all Scenes**, **Fit to Scene** or **Fit to Selection**. See [Probe Volume Inspector properties](probevolumes-settings.md#probe-volume-properties) for more information. -- Select **Override Renderer Filter**, then select which layers HDRP considers when it generates Light Probe positions. For more information about Layers, see [Layers and Layer Masks](https://docs.unity3d.com/Manual/layers-and-layermasks.html). - -You can overlap multiple Probe Volumes in one Scene or Baking Set. - -### Adjust Light Probe density - -If your Scene includes areas of detailed geometry, you might need to increase Light Probe density in these areas to achieve a good lighting result. - -You can use the following to adjust Light Probe density across a whole Probe Volume: - -- In the [Baking Set properties](probevolumes-settings.md#pv-tab), set the **Min Probe Spacing** and **Max Probe Spacing** - which affects all the Scenes and Probe Volumes in the Set. -- In a [Probe Volume's Inspector](probevolumes-settings.md#probe-volume-properties), adjust the **Override Probe Spacing** slider - which affects only the Probe Volume, and overrides the settings set in the **Baking Set**. - -Note: In the Inspector for a Probe Volume, the values in **Override Probe Spacing** can't exceed the **Min Probe Spacing** or the **Max Probe Spacing** in the **Baking Set Settings**. - -If you increase Light Probe density, you might increase bake time and how much disk space your Probe Volumes use. - -### Use multiple Probe Volumes - -You can use multiple Probe Volumes to control Light Probe density in more detail across a Scene or Baking Set. For example: - -1. In the [Baking Set properties](probevolumes-settings.md#pv-tab), set the probe spacing to between 1 meters and 27 meters. -2. To cover empty areas, add another Probe Volume, enable **Global**, and override the probe spacing to between 9 meters and 27 meters. -3. To cover a smaller high-detail area, add another Probe Volume, disable **Global**, set a smaller size, and override the probe spacing to between 1 meters and 9 meters. - -### Terrain - -Because terrain is detailed but less important to you than your main scenery or characters, you can do the following: - -1. Put terrain on its own [Layer](https://docs.unity3d.com/Manual/layers-and-layermasks.html). -2. Surround the terrain with a Probe Volume. -3. In the Inspector for the Probe Volume, enable **Override Renderer Filters**, then in **Layer Mask** select only your terrain Layer. -4. To adjust Light Probe density to capture more or less lighting detail, enable **Override Probe Spacing** and adjust the slider values. +Refer to [Rendering Debugger window reference](rendering-debugger-window-reference.md#ProbeVolume) for more information. ## Additional resources -* [Rendering Debugger](rendering-debugger-window-reference.md#probe-volume) -* [Probe Volumes settings and properties](probevolumes-settings.md) +* [Configure the size and density of an Adaptive Probe Volume](probevolumes-changedensity.md) +* [Adaptive Probe Volumes panel reference](probevolumes-lighting-panel-reference.md) +* [Probe Volumes Options Override reference](probevolumes-options-override-reference.md) +* [Probe Adjustment Volume component reference](probevolumes-adjustment-volume-component-reference.md) diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes-skyocclusion.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes-skyocclusion.md index 5a9afa3069a..295afd97f45 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes-skyocclusion.md +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes-skyocclusion.md @@ -52,7 +52,7 @@ Enabling Sky Direction can improve visual results, especially in cave-like scena ## Debugging Sky Occlusion -You can inspect the Sky Occlusion value using the **Display Probes** option in the [Rendering Debugger](Render-Pipeline-Debug-Window.md#ProbeVolume). Two views are provided in the **Probe Shading Mode** dropdown: +You can inspect the Sky Occlusion value using the **Display Probes** option in the [Rendering Debugger](rendering-debugger-window-reference.md#probe-volume-panel). Two views are provided in the **Probe Shading Mode** dropdown: 1. **Sky Occlusion SH**: Display the gray value (scalar) used to attenuate Sky lighting. 2. **Sky Direction**: Displays a green dot corresponding to the direction used to sample the Ambient Probe. If **Sky Direction** was not enabled or could not be computed this displays a red probe. @@ -66,5 +66,5 @@ You can inspect the Sky Occlusion value using the **Display Probes** option in t -* [Understand Probe Volumes](probevolumes-concept.md) +* [Understand Adaptive Probe Volumes](probevolumes-concept.md) * [Visual Environment Volume override](Override-Visual-Environment.md) \ No newline at end of file diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes-streaming.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes-streaming.md index d190e3ecdfc..7d0dc9206e1 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes-streaming.md +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes-streaming.md @@ -1,31 +1,38 @@ -# Streaming +# Streaming Adaptive Probe Volumes -You can enable Probe Volume streaming to provide high quality lighting for games set in large open worlds. +You can enable Adaptive Probe Volume streaming to enable Adaptive Probe Volume lighting in very large worlds. Using streaming means you can bake Adaptive Probe Volume data larger than available CPU or GPU memory, and load it at runtime when it's needed. At runtime, as your camera moves, the High Definition Render Pipeline (HDRP) loads only Adaptive Probe Volume data from cells within the camera's view frustum. -At runtime, as your Camera moves, HDRP loads and uses only the sections of a Probe Volume that overlap visible geometry in your Scene. - -The smallest section HDRP loads and uses is a 'cell', which is the same size as the largest [brick](probevolumes-concept.md) in a Probe Volume. You can influence the size of cells in a Probe Volume by [adjusting the density of Light Probes](probevolumes-showandadjust.md#adjust-light-probe-density). - -To view the cells in a Probe Volume, use the **Display Cells** setting in [Rendering Debugger](rendering-debugger-window-reference.md#ProbeVolume). - -![](Images/probevolumes-debug-displayprobecells.PNG)
-The Rendering Debugger with **Display Cells** enabled. +You can enable and disable streaming for different [HDRP quality levels](quality-settings.md). ## Enable streaming To enable streaming, do the following: -1. Open the **Edit** menu and select **Project Settings** > **Quality** > **HDRP**. -2. Expand **Lighting** > **Light Probe Lighting**. -3. Enable **Enable Streaming**. +1. From the main menu, select **Edit** > **Project Settings** > **Quality** > **HDRP**. +2. Select a Quality Level. +3. Expand **Lighting** > **Light Probe Lighting**. + +You can now enable two types of streaming: + +- Enable **Enable Disk Streaming** to stream from disk to CPU memory. +- Enable **Enable GPU Streaming** to stream from CPU memory to GPU memory. You must enable **Enable Disk Streaming** first. -You can configure streaming settings in the same window. See [HDRP Asset](HDRP-Asset.md#Lighting) for more information. +You can configure streaming settings in the same window. Refer to [HDRP Asset](HDRP-Asset.md#Lighting) for more information. ## Compatibility with Asset Bundles -The underlying system used to support streaming causes limitation with regards to Asset Bundles and Addressables. Please see [this section for more information](probevolumes-settings.md#probe-volume-limitations-with-asset-bundles-and-addressables) +The underlying system used to support streaming causes limitation with regards to Asset Bundles and Addressables. Please see [this section for more information](probevolumes-inspector-reference.md#probe-volume-limitations-with-asset-bundles-and-addressables) + +## Debug streaming + +The smallest section HDRP loads and uses is a cell, which is the same size as the largest [brick](probevolumes-concept.md) in an Adaptive Probe Volume. You can influence the size of cells in an Adaptive Probe Volume by [adjusting the density of Light Probes](probevolumes-changedensity.md) + +To view the cells in an Adaptive Probe Volume, or debug streaming, use the [Rendering Debugger](rendering-debugger-window-reference.md#probe-volume-panel). + +![](Images/probevolumes-debug-displayprobecells.PNG)
+The Rendering Debugger with **Display Cells** enabled. # Additional resources -* [Understand Probe Volumes](probevolumes-concept.md) +* [Understanding Adaptive Probe Volumes](probevolumes-concept.md) * [Frame Settings](frame-settings-reference.md) diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes-use.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes-use.md index 04c35739cee..cbe021e9a1c 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes-use.md +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes-use.md @@ -1,161 +1,61 @@ -# Use Probe Volumes +# Use Adaptive Probe Volumes -This page provides the basic workflow you need to use Probe Volumes in your project. +This page provides the basic workflow you need to use Adaptive Probe Volumes in your project. -## Add and bake a Probe Volume +## Add and bake an Adaptive Probe Volume -### Enable Probe Volumes +### Enable Adaptive Probe Volumes -1. Open the **Edit** menu and select **Project Settings** > **Quality** > **HDRP**. +1. From the main menu, select **Edit** > **Project Settings** > **Quality** > **HDRP**. 2. Expand **Lighting** > **Light Probe Lighting**. -3. Set **Light Probe System** to **Probe Volumes**. -4. Select **Graphics** > **Pipeline Specific Settings** > **HDRP**. +3. Set **Light Probe System** to **Adaptive Probe Volumes**. +4. Select the **Graphics** > **Pipeline Specific Settings** > **HDRP** tab. 5. Go to **Frame Settings**. -6. Expand **Camera** > **Lighting** and enable **Probe Volumes**. +6. Expand **Camera** > **Lighting** and enable **Adaptive Probe Volumes**. -To make sure Reflection Probes also capture lighting data from Probe Volumes, you should also do the following: +To make sure Reflection Probes also capture lighting data from Adaptive Probe Volumes, you should also do the following: -1. Expand **Realtime Reflection** > **Lighting** and enable **Probe Volumes**. -2. Expand **Baked or Custom Reflection** > **Lighting** and enable **Probe Volumes**. +1. Expand **Realtime Reflection** > **Lighting** and enable **Adaptive Probe Volumes**. +2. Expand **Baked or Custom Reflection** > **Lighting** and enable **Adaptive Probe Volumes**. -### Add a Probe Volume to the Scene +### Add an Adaptive Probe Volume to the Scene -1. Open the **GameObject** menu and select **Light** > **Probe Volumes** > **Probe Volume**. -2. In the Inspector for the Probe Volume, set **Mode** to **Global** to make this Probe Volume cover your entire Scene. +1. From the main menu, select **GameObject** > **Light** > **Adaptive Probe Volumes** > **Adaptive Probe Volume**. +2. In the Inspector for the Adaptive Probe Volume, set **Mode** to **Global** to make this Adaptive Probe Volume cover your entire Scene. ### Adjust your Light and Mesh Renderer settings -1. To include a Light in a Probe Volume's baked lighting data, open the Inspector for the Light then set the **Light Mode** to **Mixed** or **Baked**. -2. To include an object in a Probe Volume's baked lighting data, open the Inspector for the object and enable **Contribute Global Illumination**. -3. To make an object receive baked lighting, open the Inspector for the object and set **Receive Global Illumination** to **Light Probes**. +1. To include a Light in an Adaptive Probe Volume's baked lighting data, open the Inspector for the Light then set the **Light Mode** to **Mixed** or **Baked**. +2. To include a GameObject in an Adaptive Probe Volume's baked lighting data, open the Inspector for the GameObject and enable **Contribute Global Illumination**. +3. To make a GameObject receive baked lighting, open the Inspector for the GameObject and set **Receive Global Illumination** to **Light Probes**. ### Bake your lighting -1. Open the **Window** menu and select **Rendering** > **Lighting**. -2. Select the **Probe Volumes** tab. +1. From the main menu, select **Window** > **Rendering** > **Lighting**. +2. Select the **Adaptive Probe Volumes** panel. 3. Set **Baking Mode** to **Single Scene**. 4. Select **Generate Lighting**. -If no scene in the Baking Set contains a Probe Volume, Unity asks if you want to create a Probe Volume automatically. +If no scene in the Baking Set contains an Adaptive Probe Volume, Unity asks if you want to create an Adaptive Probe Volume automatically. You can change baking settings in the Lighting window's [Lightmapping Settings](https://docs.unity3d.com/Documentation/Manual/class-LightingSettings.html#LightmappingSettings). -## Configure a Probe Volume +Refer to [Bake different lighting setups with Adaptive Probe Volumes](probevolumes-usebakingsets.md) for more information about Baking Sets. -You can use the following to configure a Probe Volume: +If there are visual artefacts in baked lighting, such as dark blotches or light leaks, refer to [Fix issues with Adaptive Probe Volumes](probevolumes-fixissues.md). -- Use [Baking Set properties](probevolumes-settings.md#pv-tab) to change the probe spacing and behaviour in all the Probe Volumes in a Baking Set. -- Use the settings in the [Probe Volume Inspector](probevolumes-settings.md#probe-volume-properties) to change the Probe Volume size and probe density. -- Add a [Probe Adjustment Volume](probevolumes-settings.md#probe-adjustment-volume) to the scene, to make probes invalid in a small area or fix other lighting issues. -- Add a [Volume](understand-volumes.md) to your Scene with a [Probe Volume Options](probevolumes-settings.md#probe-volumes-options-override) override, to change the way HDRP samples Probe Volume data when the Camera is inside the Volume. This doesn't affect baking. +## Configure an Adaptive Probe Volume -For more information, see the following: +You can use the following to configure an Adaptive Probe Volume: -- [Adjust Probe Volume size](probevolumes-showandadjust.md#adjust-probe-volume-size). -- [Fix issues with Probe Volumes](probevolumes-fixissues.md). - -## Use Baking Sets - -To configure [Baking Set](probevolumes-concept.md#baking-sets) properties, open **Window** > **Rendering** > **Lighting** > **Probe Volumes**. See [Baking Set properties](probevolumes-settings.md#pv-tab) for more information. - -If you [load multiple scenes simultaneously](https://docs.unity3d.com/Documentation/Manual/MultiSceneEditing.html) in your project, for example if you load multiples scenes at the same time in an open world game, you must do the following to place the scenes in a single Baking Set and bake them together: - -1. Set **Baking Mode** to **Baking Sets (Advanced)**. -2. Select an existing Baking Set asset, or select **New** to create a new Baking Set. -3. Select the scenes. -4. Under **Scenes in Baking Set**, select **+** to add the scenes. - -To remove a Scene from a Baking Set, select the Scene in the **Scenes in Baking Set** list, then select **-**. - -When you select **Generate Lighting**, HDRP bakes the lighting for all the scenes in the Baking Set. - -For faster iteration times, disable **Bake** next to a scene to stop Unity baking the scene. This results in incomplete data, but can help reduce baking time when you're iterating on a part of the world. - -### Move a Scene between Baking Sets - -1. Select the Baking Set you want to move a Scene to. -2. In the **Scenes** section, use the **+** to select the Scene you want to move. -3. In the popup message, select **Yes** to move the Scene. - -### Load a Scene - -Unity doesn't automatically load the Scenes in a Baking Set when you select the Scene in the **Scenes** list. To load a Scene, select **Load All Scenes In Set**. - -When you load multiple Scenes together, the lighting might be too bright because HDRP combines light from all the Scenes. See [Set up multiple Scenes](https://docs.unity3d.com/Manual/setupmultiplescenes.html) for more information on loading and unloading Scenes. - -You can load multiple Scenes together only if they belong to the same Baking Set. - - -### Add and blend Lighting Scenarios - -A Lighting Scenario represents the probe volume data for a single bake. You can use multiple Lighting Scenarios to store baking results for different Scene setups, and switch or blend between them at runtime. For example, you can use one Lighting Scenario for when a lamp is off, and one for when it's on. - -To create a new Lighting Scenario and store baking results inside, do the following: - -1. Select a Baking Set. -2. In the **Lighting Scenarios** section, select **+** to add a Lighting Scenario. The Lighting Scenario displays **Active Scenario**. -3. Select **Generate Lighting**. HDRP stores the baking results in the Lighting Scenario. - -To store baking results in a different Lighting Scenario, select the Lighting Scenario so it displays **Active Scenario**. - -Probe Volumes split the baked data into multiple parts: - -- The shared data, which contains mainly the scene subdivision information and probe placement. -- The per scenario data, which contains the probe lighting information. - -As a result, HDRP doesn't need to duplicate baked data on disk when you use multiple **Lighting Scenarios**, but this requires that all Lighting Scenarios use the same probe placement, and therefore that the geometry doesn't change between bakes of all Lighting Scenarios. - -To switch or blend lighting scenarios at runtime, use the following [C# API](https://docs.unity3d.com/Packages/com.unity.render-pipelines.core@latest/index.html?subfolder=/api/UnityEngine.Rendering.ProbeReferenceVolume.html) : -* [lightingScenario](https://docs.unity3d.com/Packages/com.unity.render-pipelines.core@latest/index.html?subfolder=/api/UnityEngine.Rendering.ProbeReferenceVolume.lightingScenario.html#UnityEngine_Rendering_ProbeReferenceVolume_lightingScenario) -* [BlendLightingScenario(string, float)](https://docs.unity3d.com/Packages/com.unity.render-pipelines.core@latest/index.html?subfolder=/api/UnityEngine.Rendering.ProbeReferenceVolume.BlendLightingScenario.html#UnityEngine_Rendering_ProbeReferenceVolume_BlendLightingScenario_System_String_System_Single_) - -For example, the following script begins by setting 'scenario01' as active. Next, it sets up the number of cells that should be blended per frame, which can be useful for optimization purposes. Finally, it updates the probe volume blending factor every frame to blend between scenario01 and scenario02: -```C# -using System.Collections; -using System.Collections.Generic; -using UnityEngine; - -public class BlendLightingScenarios : MonoBehaviour -{ - UnityEngine.Rendering.ProbeReferenceVolume probeRefVolume; - public string scenario01 = "Scenario01Name"; - public string scenario02 = "Scenario02Name"; - [Range(0, 1)] public float blendingFactor = 0.5f; - [Min(1)] public int numberOfCellsBlendedPerFrame = 10; - - void Start() - { - probeRefVolume = UnityEngine.Rendering.ProbeReferenceVolume.instance; - probeRefVolume.lightingScenario = scenario01; - probeRefVolume.numberOfCellsBlendedPerFrame = numberOfCellsBlendedPerFrame; - } - - void Update() - { - probeRefVolume.BlendLightingScenario(scenario02, blendingFactor); - } -} - -``` -Keep in mind that this API only manages the probe volume data. You need to handle other aspects yourself, such as modifying direct lighting in your scene to match the baked lighting scenario. - -### Keep probe positions the same in different Lighting Scenarios - -If you need to make changes to the static geometry for your Lighting Scenarios, for example one Lighting Scenario where a door is open and one where the door is closed, you can do the following to stop HDRP recomputing probe positions when baking. - -1. Bake one Lighting Scenario. -2. Switch to another Lighting Scenario. -3. Change your scene lighting or geometry. -4. Set **Probe Positions** to **Don't Recalculate**. -5. Select **Generate Lighting** to recompute only the indirect lighting, and skip the probe placement computations. - -If you switch between Lighting Scenarios at runtime, HDRP changes only the Probe Volume's baked indirect lighting. You might still need to use scripts to move geometry or change direct lighting. - -You can use the use the [Rendering Debugger](rendering-debugger-window-reference.md#ProbeVolume) to preview transitions between Lighting Scenarios. +- Use the [Adaptive Probe Volumes panel](probevolumes-lighting-panel-reference.md) in the Lighting window to change the probe spacing and behaviour in all the Adaptive Probe Volumes in a Baking Set. +- Use the settings in the [Adaptive Probe Volume Inspector window](probevolumes-inspector-reference.md) to change the Adaptive Probe Volume size and probe density. +- Add a [Probe Adjustment Volume component](probevolumes-adjustment-volume-component-reference.md) to the Adaptive Probe Volume, to make probes invalid in a small area or fix other lighting issues. +- Add a [Volume](understand-volumes.md) to your scene with a [Probe Volumes Options Override](probevolumes-options-override-reference.md), to change the way HDRP samples Adaptive Probe Volume data when the camera is inside the volume. This doesn't affect baking. ## Additional resources -- [Display and adjust Probe Volumes](probevolumes-showandadjust.md) -- [Fix issues with Probe Volumes](probevolumes-fixissues.md) +- [Bake multiple scenes together with Baking Sets](probevolumes-usebakingsets.md) +- [Bake different lighting setups with Lighting Scenarios](probevolumes-bakedifferentlightingsetups.md) +- [Fix issues with Adaptive Probe Volumes](probevolumes-fixissues.md) - [Work with multiple Scenes in Unity](https://docs.unity3d.com/Documentation/Manual/MultiSceneEditing.html) -- [Probe Volumes settings and properties](probevolumes-settings.md) diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes-usebakingsets.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes-usebakingsets.md new file mode 100644 index 00000000000..c102084e384 --- /dev/null +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes-usebakingsets.md @@ -0,0 +1,38 @@ +# Bake multiple scenes together with Baking Sets + +If you [load multiple scenes simultaneously](https://docs.unity3d.com/Documentation/Manual/MultiSceneEditing.html) in your project, for example if you load multiples scenes at the same time in an open world game, you can add the scenes to a single Baking Set so you can bake the lighting for all the scenes together. + +Refer to [Understanding probe volumes](probevolumes-concept.md#baking-sets) for more information about Baking Sets. + +## Create a Baking Set + +To place multiple scenes in a single Baking Set and bake them together, follow these steps: + +1. From the main menu, select **Window** > **Rendering** > **Lighting**. +2. Set **Baking Mode** to **Baking Set**. +2. In **Current Baking Set**, select an existing Baking Set asset, or select **New** to create a new Baking Set. +4. Use the **Add** (**+**) button to add scenes. + +You can only add each scene to a single Baking Set. + +To remove a scene from a Baking Set, select the scene in the **Scenes in Baking Set** list, then select the **Remove** (**-**) button. + +## Bake a Baking Set + +Select **Generate Lighting** to bake the lighting in all the scenes in a baking set. + +The High Definition Render Pipeline (HDRP) uses the settings from the Baking Set, and serializes the results in the `Assets` folder, in a subfolder with the same name as the active scene. You can move or rename the folder. + +For faster iteration times, disable **Bake** next to a scene name. This stops Unity baking lighting data for this scene. This might result in incomplete data, but it can help reduce baking time when you're iterating on parts of a large world. + +### Load a scene + +Unity doesn't automatically load the scenes in a Baking Set when you select the scene in the **Scenes** list. To load a scene, select **Load Baking Set**. + +When you load multiple scenes together, the lighting might be too bright because HDRP combines light from all the scenes. Refer to [Set up multiple Scenes](https://docs.unity3d.com/Manual/setupmultiplescenes.html) for more information on loading and unloading Scenes. + +You can load multiple scenes together only if they belong to the same Baking Set. + +## Additional resources + +- [Bake different lighting setups with Lighting Scenarios](probevolumes-bakedifferentlightingsetups.md) diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes.md index a9d49f6b758..3f11a61b5fa 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes.md +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes.md @@ -1,19 +1,26 @@ -# Probe Volumes +# Adaptive Probe Volumes (APV) -Probe Volumes make [Light Probes](https://docs.unity3d.com/Manual/LightProbes.html) easier to use by automating placement. They also provide higher quality, more accurate lighting, because they light per-pixel not per-object. +Adaptive Probe Volumes (APV) make [Light Probes](https://docs.unity3d.com/Manual/LightProbes.html) easier to use by automating placement. They also provide higher quality, more accurate lighting, because they light per-pixel not per-object. -| Topic | Description | +| Topic | Description | |--------------------------|-------------------------------------------------------------| -| [Understand Probe Volumes](probevolumes-concept.md) | The purpose of Probe Volumes and what you can do with them. | -| [Use Probe Volumes](probevolumes-use.md) | How to add Probe Volumes to your project and configure them. | -| [Display and adjust Probe Volumes](probevolumes-showandadjust.md) | How to visualize and adjust the structure of Probe Volumes. | -| [Fix issues with Probe Volumes](probevolumes-fixissues.md) | How to reduce light leaks and seams in your lighting result. | -| [Streaming](probevolumes-streaming.md) | How Probe Volumes stream lighting data to provide lighting for large open worlds. | -| [Sky Occlusion](probevolumes-skyocclusion.md) | How to use Sky Occlusion with Probe Volumes for dynamic sky lighting. | -| [Settings and properties related to Probe Volumes](probevolumes-settings.md) | Learn about Probe Volume settings. | +| [Understanding Adaptive Probe Volumes](probevolumes-concept.md) | The purpose of Adaptive Probe Volumes and what you can do with them. | +| [Use Adaptive Probe Volumes](probevolumes-use.md) | Add Adaptive Probe Volumes to your project and configure them. | +| [Display Adaptive Probe Volumes](probevolumes-showandadjust.md) | Visualize the structure of Adaptive Probe Volumes. | +| [Configure the size and density of Adaptive Probe Volumes](probevolumes-changedensity.md) | Change the size of an Adaptive Probe Volume, or increase the density of Light Probes. | +| [Bake multiple scenes together with Baking Sets](probevolumes-usebakingsets.md) | Add scenes to a Baking Set so you can bake the lighting for all the scenes together. | +| [Bake different lighting setups with Lighting Scenarios](probevolumes-bakedifferentlightingsetups.md) | Use multiple Lighting Scenarios to store baking results for different scene setups, and switch between them at runtime. | +| [Streaming Adaptive Probe Volumes](probevolumes-streaming.md) | How Adaptive Probe Volumes stream lighting data to provide lighting for large open worlds. | +| [Sky Occlusion](probevolumes-skyocclusion.md) | How to use Sky Occlusion with Adaptive Probe Volumes for dynamic sky lighting. | +| [Fix issues with Adaptive Probe Volumes](probevolumes-fixissues.md) | Reduce light leaks and seams in your lighting result. | +| [Adaptive Probe Volume Inspector window reference](probevolumes-inspector-reference.md) | Reference for the Adaptive Probe Volume Inspector window. | +| [Adaptive Probe Volumes panel reference](probevolumes-lighting-panel-reference.md) | Reference for the Adaptive Probe Volumes panel in the Lighting settings. | +| [Probe Volumes Options Override reference](probevolumes-options-override-reference.md) | Reference for the Adaptive Probe Volumes Options Override. | +| [Probe Adjustment Volume component reference](probevolumes-adjustment-volume-component-reference.md) | Reference for the Probe Adjustment Volume component. | ## Additional resources * [Light Probes](https://docs.unity3d.com/Manual/LightProbes.html) * [Light Probes for moving objects](https://docs.unity3d.com/Manual/LightProbes-MovingObjects.html) * [Light Probe Group](https://docs.unity3d.com/Manual/class-LightProbeGroup.html) +* [Rendering Debugger](rendering-debugger-window-reference.md) diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/rendering-debugger-window-reference.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/rendering-debugger-window-reference.md index cc4e364fcd2..4a2fab735f0 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/rendering-debugger-window-reference.md +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/rendering-debugger-window-reference.md @@ -21,10 +21,10 @@ Refer to [Use the Rendering debugger](use-the-rendering-debugger.md) for more in The **Decals** panel has tools that you can use to debug [decals](decal-material-inspector-reference.md) affecting transparent objects in your project. -| **Debug Option** | **Description** | +| **Debug Option** | **Description** | | ----------------- | ------------------------------------------------------------ | | **Display Atlas** | Enable the checkbox to display the decal atlas for a Camera in the top left of that Camera's view. | -| **Mip Level** | Use the slider to select the mip level for the decal atlas. The higher the mip level, the blurrier the decal atlas. | +| **Mip Level** | Use the slider to select the mip level for the decal atlas. The higher the mip level, the blurrier the decal atlas. | @@ -40,27 +40,27 @@ Use the [runtime shortcuts](#Navigation at runtime) to open the Display stats wi The Frame Stats section displays the average, minimum, and maximum value of each property. HDRP calculates each Frame Stat value over the 30 most recent frames. -| **Property** | | **Description** | +| **Property** | | **Description** | | ---------------------------- | ----------------------- | ------------------------------------------------------------ | -| **Frame Rate** | | The frame rate (in frames per second) for the current camera view. | -| **Frame Time** | | The total frame time for the current camera view. | -| **CPU Main Thread Frame** | | The total time (in milliseconds) between the start of the frame and the time when the Main Thread finished the job. | -| **CPU Render Thread Frame** | | The time (in milliseconds) between the start of the work on the Render Thread and the time Unity waits to render the present frame ([Gfx.PresentFrame](https://docs.unity3d.com/2022.1/Documentation/Manual/profiler-markers.html)). | -| **CPU Present Wait** | | The time (in milliseconds) that the CPU spent waiting for Unity to render the present frame ([Gfx.PresentFrame](https://docs.unity3d.com/2022.1/Documentation/Manual/profiler-markers.html)) during the last frame. | -| **GPU Frame** | | The amount of time (in milliseconds) the GPU takes to render a given frame. | -| **RT Mode** | | When you [enable ray tracing](Ray-Tracing-Getting-Started.md), this property shows the ray tracing quality mode that HDRP uses during rendering. HDRP updates this value once every frame based on the previous frame. | -| **Count Rays** | | Count the number of traced rays for each effect (in MRays / frame). This property only appears when you enable ray tracing. | -| | **Ambient Occlusion** | The number of rays that HDRP traced for [Ambient Occlusion (AO)](Ambient-Occlusion.md) computations, when you enable realtime ambient occlusion (RT AO). | -| | **Shadows Directional** | The number of rays that HDRP traced for [directional lights](Light-Component.md) when you enable ray-traced shadows. | -| | **Shadows Area** | The number of rays that HDRP traced towards area lights when you enable [ray-traced shadows](Ray-Traced-Shadows.md). | -| | **Shadows Point/Spot** | The number of rays that HDRP traced towards point and spot lights when you enable ray-traced shadows. | -| | **Reflection Forward** | The number of rays that HDRP traced for reflection computations that use [forward shading](Forward-And-Deferred-Rendering.md). | -| | **Reflection Deferred** | The number of rays that HDRP traced for reflection computations that use [deferred shading](Forward-And-Deferred-Rendering.md). | -| | **Diffuse GI Forward** | The number of rays that HDRP traced for diffuse [Global Illumination (GI)](Ray-Traced-Global-Illumination.md) computations that use forward shading. | -| | **Diffuse GI Deferred** | The number of rays that HDRP traced for diffuse Global Illumination (GI) computations that use deferred shading. | -| | **Recursive** | The number of rays that HDRP traced for diffuse Global Illumination (GI) computations when you enable recursive ray tracing. | -| | **Total** | The total number of rays that HDRP traced. | -| **Debug XR Layout** | | Display debug information for XR passes.
This mode is only available in editor and development builds. | +| **Frame Rate** | | The frame rate (in frames per second) for the current camera view. | +| **Frame Time** | | The total frame time for the current camera view. | +| **CPU Main Thread Frame** | | The total time (in milliseconds) between the start of the frame and the time when the Main Thread finished the job. | +| **CPU Render Thread Frame** | | The time (in milliseconds) between the start of the work on the Render Thread and the time Unity waits to render the present frame ([Gfx.PresentFrame](https://docs.unity3d.com/2022.1/Documentation/Manual/profiler-markers.html)). | +| **CPU Present Wait** | | The time (in milliseconds) that the CPU spent waiting for Unity to render the present frame ([Gfx.PresentFrame](https://docs.unity3d.com/2022.1/Documentation/Manual/profiler-markers.html)) during the last frame. | +| **GPU Frame** | | The amount of time (in milliseconds) the GPU takes to render a given frame. | +| **RT Mode** | | When you [enable ray tracing](Ray-Tracing-Getting-Started.md), this property shows the ray tracing quality mode that HDRP uses during rendering. HDRP updates this value once every frame based on the previous frame. | +| **Count Rays** | | Count the number of traced rays for each effect (in MRays / frame). This property only appears when you enable ray tracing. | +| | **Ambient Occlusion** | The number of rays that HDRP traced for [Ambient Occlusion (AO)](Ambient-Occlusion.md) computations, when you enable realtime ambient occlusion (RT AO). | +| | **Shadows Directional** | The number of rays that HDRP traced for [directional lights](Light-Component.md) when you enable ray-traced shadows. | +| | **Shadows Area** | The number of rays that HDRP traced towards area lights when you enable [ray-traced shadows](Ray-Traced-Shadows.md). | +| | **Shadows Point/Spot** | The number of rays that HDRP traced towards point and spot lights when you enable ray-traced shadows. | +| | **Reflection Forward** | The number of rays that HDRP traced for reflection computations that use [forward shading](Forward-And-Deferred-Rendering.md). | +| | **Reflection Deferred** | The number of rays that HDRP traced for reflection computations that use [deferred shading](Forward-And-Deferred-Rendering.md). | +| | **Diffuse GI Forward** | The number of rays that HDRP traced for diffuse [Global Illumination (GI)](Ray-Traced-Global-Illumination.md) computations that use forward shading. | +| | **Diffuse GI Deferred** | The number of rays that HDRP traced for diffuse Global Illumination (GI) computations that use deferred shading. | +| | **Recursive** | The number of rays that HDRP traced for diffuse Global Illumination (GI) computations when you enable recursive ray tracing. | +| | **Total** | The total number of rays that HDRP traced. | +| **Debug XR Layout** | | Display debug information for XR passes.
This mode is only available in editor and development builds. | @@ -74,12 +74,12 @@ The **Bottlenecks** section describes the distribution of the last 60 frames acr #### Bottleneck categories -| **Category** | **Description** | +| **Category** | **Description** | | ------------------- | ------------------------------------------------------------ | -| **CPU** | The percentage of the last 60 frames in which the CPU limited the frame time. | -| **GPU** | The percentage of the last 60 frames in which the GPU limited the frame time. | +| **CPU** | The percentage of the last 60 frames in which the CPU limited the frame time. | +| **GPU** | The percentage of the last 60 frames in which the GPU limited the frame time. | | **Present limited** | The percentage of the last 60 frames in which the frame time was limited by the following presentation constraints:
• Vertical Sync (Vsync): Vsync synchronizes rendering to the refresh rate of your display.
•[Target framerate]([Application.targetFrameRate](https://docs.unity3d.com/ScriptReference/Application-targetFrameRate.html)): A function that you can use to manually limit the frame rate of an application. If a frame is ready before the time you specify in targetFrameRate, Unity waits before presenting the frame. | -| **Balanced** | This percentage of the last 60 frames in which the frame time was not limited by any of the above categories. A frame that is 100% balanced indicates the processing time for both CPU and GPU is approximately equal. | +| **Balanced** | This percentage of the last 60 frames in which the frame time was not limited by any of the above categories. A frame that is 100% balanced indicates the processing time for both CPU and GPU is approximately equal. | #### Bottleneck example @@ -98,12 +98,12 @@ In this example, the bottleneck is the GPU. The Detailed Stats section displays the amount of time in milliseconds that each rendering step takes on the CPU and GPU. HDRP updates these values once every frame based on the previous frame. -| **Property** | **Description** | +| **Property** | **Description** | | -------------------------------- | ------------------------------------------------------------ | | Update every second with average | Calculate average values over one second and update every second. | -| Hide empty scopes | Hide profiling scopes that use 0.00ms of processing time on the CPU and GPU. | -| Count Rays | Count the number of traced rays for each effect (in MRays / frame). This mode only appears when you enable ray tracing. | -| Debug XR Layout | Enable to display debug information for [XR](https://docs.unity3d.com/Manual/XR.html) passes. This mode only appears in the editor and development builds. | +| Hide empty scopes | Hide profiling scopes that use 0.00ms of processing time on the CPU and GPU. | +| Count Rays | Count the number of traced rays for each effect (in MRays / frame). This mode only appears when you enable ray tracing. | +| Debug XR Layout | Enable to display debug information for [XR](https://docs.unity3d.com/Manual/XR.html) passes. This mode only appears in the editor and development builds. | @@ -112,93 +112,93 @@ The Detailed Stats section displays the amount of time in milliseconds that each The **Material** panel has tools that you can use to visualize different Material properties. - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Debug OptionDescription
Common Material PropertyUse the drop-down to select a Material property to visualize on every GameObject on screen. All HDRP Materials share the properties available.
MaterialUse the drop-down to select a Material property to visualize on every GameObject on screen using a specific Shader. The properties available depend on the HDRP Material type you select in the drop-down.
Rendering Layer MaskThese parameters only appear when you set the Material Debug Option to Rendering Layers.
Filter with Light Layers from Selected LightEnable the checkbox to visualize GameObjects that the selected light affects.
Use Light's Shadow Layer MaskEnable the checkbox to visualize GameObjects that cast shadows for the selected light.
Filter LayersUse the drop-down to filter layers that you want to display. GameObjects that have a matching layer appear in a specific color. Use **Layers Color** to define this color.
Layers ColorUse the color pickers to select the display color of each rendering layer.
EngineUse the drop-down to select a Material property to visualize on every GameObject on a screen that uses a specific Shader. The properties available are the same as Material but are in the form that the lighting engine uses them (for example, Smoothness is Perceptual Roughness).
AttributesUse the drop-down to select a 3D GameObject attribute, like Texture Coordinates or Vertex Color, to visualize on screen.
PropertiesUse the drop-down to select a property that the debugger uses to highlight GameObjects on screen. The debugger highlights GameObjects that use a Material with the property that you select.
GBufferUse the drop-down to select a property to visualize from the GBuffer for deferred Materials.
Material ValidatorUse the drop-down to select properties to display validation colors for:
  • Diffuse Color: Select this option to check if the diffuse colors in your Scene adheres to an acceptable PBR range. If the Material color is out of this range, the debugger displays it in the Too High Color color if it's above the range, or in the Too Low Color if it's below the range.
  • -
  • Metal or SpecularColor: Select this option to check if a pixel contains a metallic or specular color that adheres to an acceptable PBR range. If it doesn't, the debugger highlights it in the Not A Pure Metal Color. For information about the acceptable PBR ranges in Unity, see the Material Charts documentation.
Too High ColorUse the color picker to select the color that the debugger displays when a Material's diffuse color is above the acceptable PBR range.
This property only appears when you select Diffuse Color or Metal or SpecularColor from the Material Validator drop-down.
Too Low ColorUse the color picker to select the color that the debugger displays when a Material's diffuse color is below the acceptable PBR range.
This property only appears when you select Diffuse Color or Metal or SpecularColor from the Material Validator drop-down.
Not A Pure Metal ColorUse the color picker to select the color that the debugger displays if a pixel defined as metallic has a non-zero albedo value. The debugger only highlights these pixels if you enable the True Metals checkbox.
This property only appears when you select Diffuse Color or Metal or SpecularColor from the Material Validator drop-down.
Pure MetalsEnable the checkbox to make the debugger highlight any pixels which Unity defines as metallic, but which have a non-zero albedo value. The debugger uses the Not A Pure Metal Color to highlight these pixels.
This property only appears when you select Diffuse Color or Metal or SpecularColor from the Material Validator drop-down.
Override Global Material Texture Mip BiasEnable the checkbox to override the mipmap level bias of texture samplers in material shaders. Use the Debug Global Material Texture Mip Bias Value to control the mipmap level bias override.
When using this feature, be aware of the following: -
    -
  • It only affects gbuffer, forward opaque, transparency and decal passes.
  • -
  • It doesn't affect virtual texturing sampling.
  • -
  • It doesn't affect custom passes.
  • -
Debug Global Material Texture Mip Bias ValueUse the slider to control the amount of mip bias of texture samplers in material shaders.
Debug OptionDescription
Common Material PropertyUse the drop-down to select a Material property to visualize on every GameObject on screen. All HDRP Materials share the properties available.
MaterialUse the drop-down to select a Material property to visualize on every GameObject on screen using a specific Shader. The properties available depend on the HDRP Material type you select in the drop-down.
Rendering Layer MaskThese parameters only appear when you set the Material Debug Option to Rendering Layers.
Filter with Light Layers from Selected LightEnable the checkbox to visualize GameObjects that the selected light affects.
Use Light's Shadow Layer MaskEnable the checkbox to visualize GameObjects that cast shadows for the selected light.
Filter LayersUse the drop-down to filter layers that you want to display. GameObjects that have a matching layer appear in a specific color. Use **Layers Color** to define this color.
Layers ColorUse the color pickers to select the display color of each rendering layer.
EngineUse the drop-down to select a Material property to visualize on every GameObject on a screen that uses a specific Shader. The properties available are the same as Material but are in the form that the lighting engine uses them (for example, Smoothness is Perceptual Roughness).
AttributesUse the drop-down to select a 3D GameObject attribute, like Texture Coordinates or Vertex Color, to visualize on screen.
PropertiesUse the drop-down to select a property that the debugger uses to highlight GameObjects on screen. The debugger highlights GameObjects that use a Material with the property that you select.
GBufferUse the drop-down to select a property to visualize from the GBuffer for deferred Materials.
Material ValidatorUse the drop-down to select properties to display validation colors for:
  • Diffuse Color: Select this option to check if the diffuse colors in your Scene adheres to an acceptable PBR range. If the Material color is out of this range, the debugger displays it in the Too High Color color if it's above the range, or in the Too Low Color if it's below the range.
  • +
  • Metal or SpecularColor: Select this option to check if a pixel contains a metallic or specular color that adheres to an acceptable PBR range. If it doesn't, the debugger highlights it in the Not A Pure Metal Color. For information about the acceptable PBR ranges in Unity, see the Material Charts documentation.
Too High ColorUse the color picker to select the color that the debugger displays when a Material's diffuse color is above the acceptable PBR range.
This property only appears when you select Diffuse Color or Metal or SpecularColor from the Material Validator drop-down.
Too Low ColorUse the color picker to select the color that the debugger displays when a Material's diffuse color is below the acceptable PBR range.
This property only appears when you select Diffuse Color or Metal or SpecularColor from the Material Validator drop-down.
Not A Pure Metal ColorUse the color picker to select the color that the debugger displays if a pixel defined as metallic has a non-zero albedo value. The debugger only highlights these pixels if you enable the True Metals checkbox.
This property only appears when you select Diffuse Color or Metal or SpecularColor from the Material Validator drop-down.
Pure MetalsEnable the checkbox to make the debugger highlight any pixels which Unity defines as metallic, but which have a non-zero albedo value. The debugger uses the Not A Pure Metal Color to highlight these pixels.
This property only appears when you select Diffuse Color or Metal or SpecularColor from the Material Validator drop-down.
Override Global Material Texture Mip BiasEnable the checkbox to override the mipmap level bias of texture samplers in material shaders. Use the Debug Global Material Texture Mip Bias Value to control the mipmap level bias override.
When using this feature, be aware of the following: +
    +
  • It only affects gbuffer, forward opaque, transparency and decal passes.
  • +
  • It doesn't affect virtual texturing sampling.
  • +
  • It doesn't affect custom passes.
  • +
Debug Global Material Texture Mip Bias ValueUse the slider to control the amount of mip bias of texture samplers in material shaders.
If the geometry or the shading normal is denormalized, the view renders the target pixel red. @@ -211,54 +211,54 @@ The **Lighting** panel has tools that you can use to visualize various component - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
Shadow Debug OptionDescription
Debug ModeUse the drop-down to select which shadow debug information to overlay on the screen:
-
    -
  • None: Select this mode to remove the shadow debug information from the screen.
  • -
  • VisualizePunctualLightAtlas: Select this mode to overlay the shadow atlas for Punctual Lights in your Scene.
  • -
  • VisualizeDirectionalLightAtlas: Select this mode to overlay the shadow atlas for Directional Lights in your Scene.
  • -
  • VisualizeAreaLightAtlas: Select this mode to overlay the shadow atlas for area Lights in your Scene.
  • -
  • VisualizeShadowMap: Select this mode to overlay a single shadow map for a Light in your Scene.
  • -
  • SingleShadow: Select this mode to replace the Scene's lighting with a single Light. To select which Light to isolate, see Use Selection or Shadow Map Index.
  • -
Use SelectionEnable the checkbox to display the shadow map for the Light you select in the Scene.
This property only appears when you select VisualizeShadowMap or SingleShadow from the Shadow Debug Mode drop-down.
Shadow Map IndexUse the slider to select the index of the shadow map to view. To use this property correctly, you must have at least one Light in your Scene that uses shadow maps.
Global Scale FactorUse the slider to set the global scale that HDRP applies to the shadow rendering resolution.
Clear Shadow AtlasEnable the checkbox to clear the shadow atlas every frame.
Range Minimum ValueSet the minimum shadow value to display in the various shadow debug overlays.
Range Maximum ValueSet the maximum shadow value to display in the various shadow debug overlays.
Log Cached Shadow Atlas StatusSet the maximum shadow value to display in the various shadow debug overlays.
Shadow Debug OptionDescription
Debug ModeUse the drop-down to select which shadow debug information to overlay on the screen:
+
    +
  • None: Select this mode to remove the shadow debug information from the screen.
  • +
  • VisualizePunctualLightAtlas: Select this mode to overlay the shadow atlas for Punctual Lights in your Scene.
  • +
  • VisualizeDirectionalLightAtlas: Select this mode to overlay the shadow atlas for Directional Lights in your Scene.
  • +
  • VisualizeAreaLightAtlas: Select this mode to overlay the shadow atlas for area Lights in your Scene.
  • +
  • VisualizeShadowMap: Select this mode to overlay a single shadow map for a Light in your Scene.
  • +
  • SingleShadow: Select this mode to replace the Scene's lighting with a single Light. To select which Light to isolate, see Use Selection or Shadow Map Index.
  • +
Use SelectionEnable the checkbox to display the shadow map for the Light you select in the Scene.
This property only appears when you select VisualizeShadowMap or SingleShadow from the Shadow Debug Mode drop-down.
Shadow Map IndexUse the slider to select the index of the shadow map to view. To use this property correctly, you must have at least one Light in your Scene that uses shadow maps.
Global Scale FactorUse the slider to set the global scale that HDRP applies to the shadow rendering resolution.
Clear Shadow AtlasEnable the checkbox to clear the shadow atlas every frame.
Range Minimum ValueSet the minimum shadow value to display in the various shadow debug overlays.
Range Maximum ValueSet the maximum shadow value to display in the various shadow debug overlays.
Log Cached Shadow Atlas StatusSet the maximum shadow value to display in the various shadow debug overlays.
@@ -476,88 +476,71 @@ The **Lighting** panel has tools that you can use to visualize various component The **Volume** panel has tools that you can use to visualize the Volume Components affecting a camera. -| **Debug Option** | **Description** | +| **Debug Option** | **Description** | | ---------------------- | ---------------------------------------------------- | -| **Component** | Use the drop-down to select which volume component to visualize. | -| **Camera** | Use the drop-down to select which camera to use as volume anchor. | -| **Parameter** | List of parameters for the selected component. | +| **Component** | Use the drop-down to select which volume component to visualize. | +| **Camera** | Use the drop-down to select which camera to use as volume anchor. | +| **Parameter** | List of parameters for the selected component. | | **Interpolated Value** | Current value affecting the choosen camera for each parameter. | -| **Other columns** | Each one of the remaining columns display the parameter values of a volume affecting the selected **Camera**. They're sorted from left to right by decreasing influence. | +| **Other columns** | Each one of the remaining columns display the parameter values of a volume affecting the selected **Camera**. They're sorted from left to right by decreasing influence. | ## Probe Volume panel -These settings make it possible for you to visualize [Probe Volumes](probevolumes.md) in your Scene, and configure the visualization. - - -
- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
Property - Description -
Subdivision Visualization
Display CellsDisplay cells, which are the units used for streaming
Display BricksDisplay Bricks.
Realtime Update Preview Probe Volume data in the Scene without baking.
Culling DistanceDetermines how far from the Scene Camera Unity draws debug visuals for cells and bricks, in meters.
Probe Visualization
Display ProbesDisplay probe positions.
Debug Probe SamplingDisplay how probes are sampled at a selected pixel.
Virtual OffsetDisplay the offsets Unity applies to Light Probe capture positions.
Culling DistanceDetermines how far from the Scene Camera Unity draws Light Probe position indicators, in meters.
Scenario Blending
Number of Cells Blended Per FrameDetermines the maximum number of cells Unity blends per frame.
Turnover RateDelay in seconds between updates to cell, brick, and Light Probe positions during Lighting Scenario blending.
Scenario To Blend WithSelect another Lighting Scenario to blend with the active lighting scenario.
Scenario Blending FactorAdjust to blend between Light Scenarios.
+These settings make it possible for you to visualize [Adaptive Probe Volumes](probevolumes.md) in your Scene, and configure the visualization. + +### Subdivision Visualization + +| **Property** | **Sub-property** | **Description** | +|-|-|-| +| **Display Cells** || Display cells. Refer to [Understanding Adaptive Probe Volumes](../probevolumes-concept.md) for more information. | +| **Display Bricks** || Display bricks. Refer to [Understanding Adaptive Probe Volumes](../probevolumes-concept.md) for more information. | +| **Live Subdivision Preview** || Enable a preview of Adaptive Probe Volume data in the scene without baking. This might make the Editor slower. This setting appears only if you select **Display Cells** or **Display Bricks**. | +|| **Cell Updates Per Frame** | Set the number of cells, bricks, and probe positions to update per frame. Higher values might make the Editor slower. The default value is 4. This property appears only if you enable **Live Subdivision Preview**. | +|| **Update Frequency** | Set how frequently Unity updates cell, bricks, and probe positions, in seconds. The default value is 1. This property appears only if you enable **Live Subdivision Preview**. | +| **Debug Draw Distance** || Set how far from the scene camera Unity draws debug visuals for cells and bricks, in meters. The default value is 500. | + +### Probe Visualization + +| **Property** | **Sub-property** | **Description** | +|-|-|-| +| **Display Probes** || Display probes. | +|| **Probe Shading Mode** | Set what the Rendering Debugger displays. The options are:
  • SH: Display the [spherical harmonics (SH) lighting data](https://docs.unity3d.com/Manual/LightProbes-TechnicalInformation.html) for the final color calculation. The number of bands depends on the **SH Bands** setting in the active [HDRP Asset](HDRP-Asset.md).
  • SHL0: Display the spherical harmonics (SH) lighting data with only the first band.
  • SHL0L1: Display the spherical Harmonics (SH) lighting data with the first two bands.
  • Validity: Display whether probes are valid, based on the number of backfaces the probe samples. Refer to [Fix issues with Adaptive Probe Volumes](probevolumes-fixissues.md) for more information about probe validity.
  • Probe Validity Over Dilation Threshold: Display red if a probe samples too many backfaces, based on the **Validity Threshold** set in the [Adaptive Probe Volumes panel](probevolumes-lighting-panel-reference.md). This means the probe can't be baked or sampled.
  • Invalidated By Touchup Volumes: Display probes that a [Probe Adjustment Volume component](probevolumes-adjustment-volume-component-reference.md) has made invalid.
  • Size: Display a different color for each size of [brick](probevolumes-concept.md).
| +|| **Debug Size** | Set the size of the displayed probes. The default is 0.3. | +|| **Exposure Compensation** | Set the brightness of the displayed probes. Decrease the value to increase brightness. The default is 0. This property appears only if you set **Probe Shading Mode** to **SH**, **SHL0**, or **SHL0L1**. | +|| **Max Subdivisions Displayed** | Set the lowest probe density to display. For example, set this to 0 to display only the highest probe density. | +|| **Min Subdivisions Displayed** | Set the highest probe density to display. | +| **Debug Probe Sampling** || Display how probes are sampled for a pixel. In the Scene view, in the **Adaptive Probe Volumes** overlay, select **Select Pixel** to change the pixel. | +|| **Debug Size** | Set the size of the **Debug Probe Sampling** display. | +|| **Debug With Sampling Noise** | Enable sampling noise for this debug view. Enabling this gives more accurate information, but makes the information more difficult to read. | +| **Virtual Offset Debug** || Display the offsets Unity applies to Light Probe capture positions. | +|| **Debug Size** | Set the size of the arrows that represent Virtual Offset values. | +| **Debug Draw Distance** || Set how far from the scene camera Unity draws debug visuals for cells and bricks, in meters. The default is 200. | + +### Streaming + +Use the following properties to control how HDRP streams Adaptive Probe Volumes. Refer to [Streaming Adaptive Probe Volumes](probevolumes-streaming.md) for more information. + +| **Property** | **Description** | +| ------------ | --------------- | +| **Freeze Streaming** | Stop Unity from streaming probe data. | +| **Display Streaming Score** | If you enable **Display Cells**, this setting darkens cells that have a lower priority for streaming. Cells closer to the camera usually have the highest priority. | +| **Maximum cell streaming** | Stream as many cells as possible every frame. | +| **Display Index Fragmentation** | Open an overlay that displays how fragmented the streaming memory is. A green square is an area of used memory. The more spaces between the green squares, the more fragmented the memory. | +| **Index Fragmentation Rate** | Displays the amount of fragmentation as a numerical value, where 0 is no fragmentation. | +| **Verbose Log** | Log information about streaming. | + +### Scenario Blending + +Use the following properties to control how HDRP blends Lighting Scenarios. Refer to [Bake different lighting setups with Lighting Scenarios](probevolumes-bakedifferentlightingsetups.md) for more information. + +| **Property** | **Description** | +| - | - | +| **Number of Cells Blended Per Frame** | Determines the maximum number of cells Unity blends per frame. The default is 10,000. | +| **Turnover Rate** | Set the blending priority of cells close to the camera. The range is 0 to 1, where 0 sets the cells close to the camera with high priority, and 1 sets all cells with equal priority. Increase **Turnover Rate** to avoid cells close to the camera blending too frequently. | +| **Scenario To Blend With** | Select a Lighting Scenario to blend with the active Lighting Scenario. | +| **Scenario Blending Factor** | Set how far to blend from the active Lighting Scenario to the **Scenario To Blend With**. The range is 0 to 1, where 0 is fully the active Lighting Scenario, and 1 is fully the **Scenario To Blend With**. | @@ -669,10 +652,10 @@ For more information on how to debug compute thickness, refer to - ### Type | **Property** | **Description** | | ------------ | ------------------------------------------------------------ | | Type | Select the type of Lens Flare Element this asset creates:
• [Image](#Image)
• [Circle](#Circle)
• [Polygon](#Polygon) | - - #### Image -![](../../images/shared/lens-flare/LensFlareShapeImage.png) - | **Property** | **Description** | | --------------------- | ------------------------------------------------------------ | | Flare Texture | The Texture this lens flare element uses. | @@ -47,8 +41,6 @@ The Lens Flare Element asset has the following properties: #### Circle -![](../../images/shared/lens-flare/LensFlareShapeCircle.png) - | **Property** | **Description** | | ------------ | ------------------------------------------------------------ | | Gradient | Controls the offset of the circular flare's gradient. This value ranges from 0 to 1. | @@ -59,8 +51,6 @@ The Lens Flare Element asset has the following properties: #### Polygon -![](../../images/shared/lens-flare/LensFlareShapePolygon.png) - | **Property** | **Description** | | ------------ | ------------------------------------------------------------ | | Gradient | Controls the offset of the polygon flare's gradient. This value ranges from 0 to 1. | @@ -73,8 +63,6 @@ The Lens Flare Element asset has the following properties: #### Ring -![](images/LensFlareShapeRing.png) - | **Property** | **Description** | | --------------- | -------------------------------------------------------------- | | Gradient | Controls the offset of the circular flare's gradient. This value ranges from 0 to 1. | @@ -89,8 +77,6 @@ The Lens Flare Element asset has the following properties: #### Lens Flare Data Driven SRP -![](images/LensFlareShapeLensFlareDataSRP.png) - | **Property** | **Description** | | --------------- | ------------------------------------------------------------ | | Asset | Lens Flare Data SRP asset as an element. | @@ -98,16 +84,15 @@ The Lens Flare Element asset has the following properties: Unity support an Lens Flare Data SRP recursive, but with a hard cutoff after 16 recursions call. For instance asset A constains asset B which constains asset A (infinite recursion). That will trigger a warning and execution 16 recursions: -~~~~~~ + +``` "LensFlareSRPAsset contains too deep recursive asset (> 16). Be careful to not have recursive aggregation, A contains B, B contains A, ... which will produce an infinite loop." -~~~~~~ +``` ## Color -![](../../images/shared/lens-flare/LensFlareColor.png) - | **Property** | **Description** | | ----------------------- | ------------------------------------------------------------ | | Color Type | Select the color type of Lens Flare Element this asset creates:
• [Constant](#ColorConstant)
• [Radial](#ColorRadial)
• [Angular](#ColorAngular) | @@ -120,8 +105,6 @@ That will trigger a warning and execution 16 recursions: ### Constant Color -![](../../images/shared/lens-flare/LensFlareColorConstant.png) - | **Property** | **Description** | | ----------------------- | ------------------------------------------------------------ | | Tint | Changes the tint of the lens flare. If this asset is attached to the light, this property is based on the light tint. | @@ -130,8 +113,6 @@ That will trigger a warning and execution 16 recursions: ### Constant Color -![](../../images/shared/lens-flare/LensFlareColorRadialGradient.png) - | **Property** | **Description** | | ----------------------- | ------------------------------------------------------------ | | Tint Radial | Specifies the radial gradient tint of the element. If the element type is set to Image, the Flare Texture is multiplied by this color. | @@ -140,8 +121,6 @@ That will trigger a warning and execution 16 recursions: ### Constant Color -![](../../images/shared/lens-flare/LensFlareColorAngularGradient.png) - | **Property** | **Description** | | ----------------------- | ------------------------------------------------------------ | | Tint Angular | Specifies the angular gradient tint of the element. If the element type is set to Image, the Flare Texture is multiplied by this color. | @@ -154,16 +133,10 @@ That will trigger a warning and execution 16 recursions: ### Cutoff -![](../../images/shared/lens-flare/LensFlareCutoff.png) - -| **Property** | **Description** | -| ----------------------- | ------------------------------------------------------------ | -| Cutoff Speed | Sets the speed at which the radius occludes the element. -A value of zero (with a large radius) does not occlude anything. The higher this value, the faster the element is occluded on the side of the screen. -The effect of this value is more noticeable with multiple elements. -. | -| Cutoff Radius | Sets the normalized radius of the lens shape used to occlude the lens flare element. -A radius of one is equivalent to the scale of the element. | +| **Property** | **Description** | +|-|-| +| Cutoff Speed | Sets the speed at which the radius occludes the element.

A value of zero (with a large radius) does not occlude anything. The higher this value, the faster the element is occluded on the side of the screen.

The effect of this value is more noticeable with multiple elements. | +| Cutoff Radius | Sets the normalized radius of the lens shape used to occlude the lens flare element. A radius of one is equivalent to the scale of the element. | @@ -171,8 +144,6 @@ A radius of one is equivalent to the scale of the element. | ### Transform -![](../../images/shared/lens-flare/LensFlareTransform.png) - | **Property** | **Description** | | ----------------------- | ------------------------------------------------------------ | | Position Offset | Defines the offset of the lens flare's position in screen space, relative to its source. | @@ -186,8 +157,6 @@ A radius of one is equivalent to the scale of the element. | ### Axis Transform -![](../../images/shared/lens-flare/LensFlareAxisTransform.png) - | **Property** | **Description** | | ----------------- | ------------------------------------------------------------ | | Starting Position | Defines the starting position of the lens flare relative to its source. This value operates in screen space. | @@ -198,8 +167,6 @@ A radius of one is equivalent to the scale of the element. | ### Distortion -![](../../images/shared/lens-flare/LensFlareRadialDistortion.png) - | **Property** | **Description** | | --------------- | ------------------------------------------------------------ | | Enable | Set this property to True to enable distortion. | @@ -220,7 +187,6 @@ A radius of one is equivalent to the scale of the element. | | Relative To Center | If true the distortion is relative to center of the screen otherwise relative to lensFlare source screen position. | #### Uniform -![](../../images/shared/lens-flare/LensFlareMultileElementUniform.png) | **Property** | **Description** | | --------------- | ------------------------------------------------------------ | @@ -231,8 +197,6 @@ A radius of one is equivalent to the scale of the element. | #### Curve -![](../../images/shared/lens-flare/LensFlareMultileElementCurve.png) - | **Property** | **Description** | | ---------------- | ------------------------------------------------------------ | | Colors | The range of colors that this asset applies to the lens flares. You can use the **Position Spacing** curve to determine how this range affects each lens flare. | @@ -244,8 +208,6 @@ A radius of one is equivalent to the scale of the element. | #### Random -![](../../images/shared/lens-flare/LensFlareMultileElementRandom.png) - | **Property** | **Description** | | ------------------- | ------------------------------------------------------------ | | Seed | The base value that this asset uses to generate randomness. | diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/shared/lens-flare/lens-flare-component.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/shared/lens-flare/lens-flare-component.md index dfb83ec3c65..b78e3fa3881 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/shared/lens-flare/lens-flare-component.md +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/shared/lens-flare/lens-flare-component.md @@ -1,4 +1,4 @@ -# Lens Flare (SRP) +# Add lens flares ![](../../images/shared/lens-flare/lens-flare-header.png) @@ -10,7 +10,7 @@ Use the Lens Flare (SRP) component to create lens flares for lights that have sp ## Create a lens flare in SRP -The Lens Flare (SRP) component controls where the lens flare is as well as properties such as attenuation and whether the lens flare considers occlusion. For properties that define how the lens flare looks, SRP uses the [Lens Flare (SRP) Data](shared/lens-flare/lens-flare-asset.md) asset. Each Lens Flare (SRP) component must reference a Lens Flare (SRP) data asset to display a lens flare on-screen. +The Lens Flare (SRP) component controls where the lens flare is as well as properties such as attenuation and whether the lens flare considers occlusion. For properties that define how the lens flare looks, SRP uses the [Lens Flare (SRP) Data](lens-flare-asset.md) asset. Each Lens Flare (SRP) component must reference a Lens Flare (SRP) data asset to display a lens flare on-screen. To create a lens flare in a scene: @@ -21,33 +21,7 @@ To create a lens flare in a scene: 5. In the Lens Flare (SRP) component Inspector, assign the new Lens Flare (SRP) Data asset to the **Lens Flare Data** property. 6. Select the Lens Flare (SRP) Data asset and, in the Inspector, add a new element to the **Elements** list. A default white lens flare now renders at the position of the Lens Flare (SRP) component. For information on how to customize how the lens flare looks, see [Lens Flare (SRP) Data](lens-flare-asset.md). -## Properties - -### General - -| **Property** | **Description** | -| --------------- | ------------------------------------------------------------ | -| Lens Flare Data | Select the [Lens Flare (SRP) Data](lens-flare-asset.md) asset this component controls. | -| Intensity | Multiplies the intensity of the lens flare. | -| Scale | Multiplies the scale of the lens flare. | -| Light Override | Specifies the light component where the color and shape values are fetched from when using "Modulate By Light Color" or "Attenuation By Light Shape" properties on a Lens Flare Element. If nothing is specified, the light component from this gameobject is used. | -| Attenuation by Light Shape | Enable this property to automatically change the appearance of the lens flare based on the type of light you attached this component to.
For example, if this component is attached to a spot light and the camera is looking at this light from behind, the lens flare will not be visible.
This property is only available when this component is attached to a light. | -| Attenuation Distance |The distance between the start and the end of the Attenuation Distance Curve.
This value operates between 0 and 1 in world space. | -| Attenuation Distance Curve | Fades out the appearance of the lens flare over the distance between the GameObject this asset is attached to, and the Camera. | -| Scale Distance | The distance between the start and the end of the **Scale Distance Curve**.
This value operates between 0 and 1 in world space. | -| Scale Distance Curve | Changes the size of the lens flare over the distance between the GameObject this asset is attached to, and the Camera. | -| Screen Attenuation Curve | Reduces the effect of the lens flare based on its distance from the edge of the screen. You can use this to display a lens flare at the edge of your screen | - -### Occlusion - -| **Property** | **Description** | -| --------------- | ------------------------------------------------------------ | -| Enable | Enable this property to partially obscure the lens flare based on the depth buffer | -| Background Clouds | When enabled, the occlusion is attenuated by the Background Clouds used on the Visual Environnement (Cloud layer). | -| Volumetric Clouds | When enabled, HDRP uses the volumetric clouds texture to occlude the lens flare. HDRP always considers the lens flare to be behind the volumetric clouds because it calculates occlusion in screen space. | -| Water | When enabled, HDRP uses the Water Rendering (in screen space) for the occlusion. | -| Occlusion Radius | Defines how far from the light source Unity occludes the lens flare. This value is in world space. | -| Sample Count | The number of random samples the CPU uses to generate the **Occlusion Radius.** | -| Occlusion Offset | Offsets the plane that the occlusion operates on. A higher value moves this plane closer to Camera. This value is in world space.
For example, if a lens flare is inside the light bulb, you can use this to sample occlusion outside the light bulb. | -| Occlusion Remap Curve | Specifies the curve used to remap the occlusion of the flare. By default, the occlusion is linear, between 0 and 1. This can be specifically useful to occlude flare more drastically when behind clouds. | -| Allow Off Screen | Enable this property to allow lens flares outside the Camera's view to affect the current field of view. | +Refer to the following for more information: + +- [Lens Flare (SRP) reference](lens-flare-reference.md) +- [Lens Flare (SRP) Data Asset reference](lens-flare-asset.md) diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/shared/lens-flare/lens-flare-reference.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/shared/lens-flare/lens-flare-reference.md new file mode 100644 index 00000000000..56e01ce6a4b --- /dev/null +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/shared/lens-flare/lens-flare-reference.md @@ -0,0 +1,9 @@ +# Lens flares references + +Property descriptions for lens flares. + +| Page| +|-| +| [Lens Flare (SRP) reference](lens-flare-srp-reference.md) | +| [Lens Flare (SRP) Data Asset reference](lens-flare-asset.md) | +| [Screen Space Lens Flare override reference](reference-screen-space-lens-flare.md) | \ No newline at end of file diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/shared/lens-flare/lens-flare-srp-reference.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/shared/lens-flare/lens-flare-srp-reference.md new file mode 100644 index 00000000000..53bd57d2b46 --- /dev/null +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/shared/lens-flare/lens-flare-srp-reference.md @@ -0,0 +1,31 @@ +# Lens Flare (SRP) component reference + +Refer to [Add lens flares](lens-flare-component.md) for information on how to use the Lens Flare (SRP) component. + +## General + +| **Property** | **Description** | +| --------------- | ------------------------------------------------------------ | +| Lens Flare Data | Select the [Lens Flare (SRP) Data](lens-flare-asset.md) asset this component controls. | +| Intensity | Multiplies the intensity of the lens flare. | +| Scale | Multiplies the scale of the lens flare. | +| Light Override | Specifies the light component Unity gets the color and shape values from, if you enable **Modulate By Light Color** or **Attenuation By Light Shape**. If you don't specify a light component, Unity uses the Light component from this GameObject. | +| Attenuation by Light Shape | Enable this property to automatically change the appearance of the lens flare based on the type of light you attached this component to.
For example, if this component is attached to a spot light and the camera is looking at this light from behind, the lens flare is not visible.
This property is only available when this component is attached to a light. | +| Attenuation Distance |The distance between the start and the end of the Attenuation Distance Curve.
This value operates between 0 and 1 in world space. | +| Attenuation Distance Curve | Fades out the appearance of the lens flare over the distance between the GameObject this asset is attached to, and the Camera. | +| Scale Distance | The distance between the start and the end of the **Scale Distance Curve**.
This value operates between 0 and 1 in world space. | +| Scale Distance Curve | Changes the size of the lens flare over the distance between the GameObject this asset is attached to, and the Camera. | +| Screen Attenuation Curve | Reduces the effect of the lens flare based on its distance from the edge of the screen. You can use this to display a lens flare at the edge of your screen | + +## Occlusion + +| **Property** | **Description** | +| --------------- | ------------------------------------------------------------ | +| Enable | Enable this property to partially obscure the lens flare based on the depth buffer | +| Environment Occlusion | When enabled, the occlusion is attenuated by background clouds, volumetric clouds, fog and water. | +| Occlusion Radius | Defines how far from the light source Unity occludes the lens flare. This value is in world space. | +| Sample Count | The number of random samples the CPU uses to generate the **Occlusion Radius.** | +| Occlusion Offset | Offsets the plane that the occlusion operates on. A higher value moves this plane closer to Camera. This value is in world space.
For example, if a lens flare is inside the light bulb, you can use this to sample occlusion outside the light bulb. | +| Occlusion Remap Curve | Specifies the curve used to remap the occlusion of the flare. By default, the occlusion is linear, between 0 and 1. This can be useful to occlude flare more drastically when behind clouds. | +| Allow Off Screen | Enable this property to allow lens flares outside the Camera's view to affect the current field of view. | + diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/shared/lens-flare/lens-flare.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/shared/lens-flare/lens-flare.md new file mode 100644 index 00000000000..d5325ae9f64 --- /dev/null +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/shared/lens-flare/lens-flare.md @@ -0,0 +1,12 @@ +# Lens flares + +Lens flares simulate the effect of lights refracting inside a camera lens. Use lens flares to represent bright lights, or to add atmosphere to a scene. + +|Page|Description| +|-|-| +| [Choose a lens flare type](choose-a-lens-flare-type.md) | Understand the differences between lens flares and screen space lens flares. | +| [Add lens flares](lens-flare-component.md) | Use the Lens Flare (SRP) component to create lens flares for lights that have specific locations in your scene, for example bright bulbs. | +| [Add screen space lens flares](Override-Screen-Space-Lens-Flare.md) | Use the Screen Space Lens Flare override to create lens flares for emissive surfaces, bright spots in your scene that appear depending on the camera view, and all onscreen lights. | +| [Lens Flare (SRP) reference](lens-flare-srp-reference.md) | Reference for the Lens Flare (SRP) component. | +| [Lens Flare (SRP) Data Asset reference](lens-flare-asset.md) | Reference for the Lens Flare (SRP) Data Asset. | +| [Screen Space Lens Flare override reference](reference-screen-space-lens-flare.md) | Reference for the Screen Space Lens Flare override. | diff --git a/Packages/com.unity.render-pipelines.high-definition/Editor/Lighting/Reflection/HDBakedReflectionSystem.cs b/Packages/com.unity.render-pipelines.high-definition/Editor/Lighting/Reflection/HDBakedReflectionSystem.cs index 6067dc3ce7e..ebf9ee76495 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Editor/Lighting/Reflection/HDBakedReflectionSystem.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Editor/Lighting/Reflection/HDBakedReflectionSystem.cs @@ -432,7 +432,7 @@ public static bool BakeProbes(IEnumerable bakedProbes) // APV Normalization (Execute baking) { - ProbeGIBaking.BakeAdditionalRequests(probeInstanceIDs.ToArray()); + AdaptiveProbeVolumes.BakeAdditionalRequests(probeInstanceIDs.ToArray()); } // Render and write the result to disk diff --git a/Packages/com.unity.render-pipelines.high-definition/Editor/Material/ShaderGraph/HDSubTarget.cs b/Packages/com.unity.render-pipelines.high-definition/Editor/Material/ShaderGraph/HDSubTarget.cs index da7a7f7d2d2..49ef88a43ee 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Editor/Material/ShaderGraph/HDSubTarget.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Editor/Material/ShaderGraph/HDSubTarget.cs @@ -66,6 +66,7 @@ public SystemData systemData $"{HDUtils.GetHDRenderPipelinePath()}Editor/Material/ShaderGraph/Templates/", $"{HDUtils.GetVFXPath()}/Editor/ShaderGraph/Templates" }; + protected virtual bool supportGlobalMipBias => true; public virtual string identifier => GetType().Name; @@ -208,6 +209,13 @@ protected SubShaderDescriptor PostProcessSubShader(SubShaderDescriptor subShader }; } + if (supportGlobalMipBias) + { + if (passDescriptor.defines == null) + passDescriptor.defines = new(); + passDescriptor.defines.Add(CoreDefines.SupportGlobalMipBias); + } + CollectPassKeywords(ref passDescriptor); finalPasses.Add(passDescriptor, passes[i].fieldConditions); @@ -275,6 +283,13 @@ protected KernelDescriptor PostProcessKernel(KernelDescriptor kernel) // Overwrite the pass pragmas with just the kernel pragma for now. passDescriptor.pragmas = new PragmaCollection { Pragma.Kernel(kernel.name) }; + if (supportGlobalMipBias) + { + if (passDescriptor.defines == null) + passDescriptor.defines = new(); + passDescriptor.defines.Add(CoreDefines.SupportGlobalMipBias); + } + CollectPassKeywords(ref passDescriptor); kernel.passDescriptorReference = passDescriptor; diff --git a/Packages/com.unity.render-pipelines.high-definition/Editor/Material/ShaderGraph/HDTarget.cs b/Packages/com.unity.render-pipelines.high-definition/Editor/Material/ShaderGraph/HDTarget.cs index 316590bb338..144cd03e272 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Editor/Material/ShaderGraph/HDTarget.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Editor/Material/ShaderGraph/HDTarget.cs @@ -1075,6 +1075,11 @@ static class CoreKeywords #region Defines static class CoreDefines { + public static DefineCollection SupportGlobalMipBias = new DefineCollection + { + { CoreKeywordDescriptors.SupportGlobalMipBias, 1 }, + }; + public static DefineCollection Tessellation = new DefineCollection { { CoreKeywordDescriptors.Tessellation, 1 }, @@ -1625,6 +1630,15 @@ static class CoreKeywordDescriptors scope = KeywordScope.Local, }; + public static KeywordDescriptor SupportGlobalMipBias = new KeywordDescriptor() + { + displayName = "Support Global Mip Bias", + referenceName = "SUPPORT_GLOBAL_MIP_BIAS", + type = KeywordType.Boolean, + definition = KeywordDefinition.ShaderFeature, + scope = KeywordScope.Local, + }; + public static KeywordDescriptor TessellationModification = new KeywordDescriptor() { displayName = "Tessellation Modification", diff --git a/Packages/com.unity.render-pipelines.high-definition/Editor/Material/UIBlocks/MaterialUIBlockList.cs b/Packages/com.unity.render-pipelines.high-definition/Editor/Material/UIBlocks/MaterialUIBlockList.cs index 5fcc0e3245c..bf4ae45fae5 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Editor/Material/UIBlocks/MaterialUIBlockList.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Editor/Material/UIBlocks/MaterialUIBlockList.cs @@ -71,6 +71,11 @@ public void OnGUI(MaterialEditor materialEditor, MaterialProperty[] properties) Debug.LogException(e); } } + + // Reset label width back to the default of 0 (fix UUM-66215) + // NOTE: Because of how EditorGUIUtility.labelWidth works, when the internal value is 0, + // we cannot read that value back from the property getter. So we just set it to 0 here. + EditorGUIUtility.labelWidth = 0; } /// diff --git a/Packages/com.unity.render-pipelines.high-definition/Editor/PostProcessing/ColorCurvesEditor.cs b/Packages/com.unity.render-pipelines.high-definition/Editor/PostProcessing/ColorCurvesEditor.cs index c332c5d95c4..ace12b21347 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Editor/PostProcessing/ColorCurvesEditor.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Editor/PostProcessing/ColorCurvesEditor.cs @@ -40,16 +40,16 @@ sealed class ColorCurvesEditor : VolumeComponentEditor static GUIStyle s_PreLabel; - static GUIContent[] s_Curves = + static string[] s_CurveNames = { - new GUIContent("Master"), - new GUIContent("Red"), - new GUIContent("Green"), - new GUIContent("Blue"), - new GUIContent("Hue Vs Hue"), - new GUIContent("Hue Vs Sat"), - new GUIContent("Sat Vs Sat"), - new GUIContent("Lum Vs Sat") + "Master", + "Red", + "Green", + "Blue", + "Hue Vs Hue", + "Hue Vs Sat", + "Sat Vs Sat", + "Lum Vs Sat" }; public override void OnEnable() @@ -128,9 +128,28 @@ void CurveOverrideToggle(SerializedProperty overrideProp) overrideProp.boolValue = GUILayout.Toggle(overrideProp.boolValue, EditorGUIUtility.TrTextContent("Override"), EditorStyles.toolbarButton); } + string MakeCurveSelectionPopupLabel(int id) + { + string label = s_CurveNames[id]; + const string overrideSuffix = " (Overriding)"; + switch (id) + { + case 0: if (m_Master.overrideState.boolValue) label += overrideSuffix; break; + case 1: if (m_Red.overrideState.boolValue) label += overrideSuffix; break; + case 2: if (m_Green.overrideState.boolValue) label += overrideSuffix; break; + case 3: if (m_Blue.overrideState.boolValue) label += overrideSuffix; break; + case 4: if (m_HueVsHue.overrideState.boolValue) label += overrideSuffix; break; + case 5: if (m_HueVsSat.overrideState.boolValue) label += overrideSuffix; break; + case 6: if (m_SatVsSat.overrideState.boolValue) label += overrideSuffix; break; + case 7: if (m_LumVsSat.overrideState.boolValue) label += overrideSuffix; break; + } + return label; + } + int DoCurveSelectionPopup(int id) { - GUILayout.Label(s_Curves[id], EditorStyles.toolbarPopup, GUILayout.MaxWidth(150f)); + var label = MakeCurveSelectionPopupLabel(id); + GUILayout.Label(label, EditorStyles.toolbarPopup, GUILayout.MaxWidth(150f)); var lastRect = GUILayoutUtility.GetLastRect(); var e = Event.current; @@ -139,13 +158,15 @@ int DoCurveSelectionPopup(int id) { var menu = new GenericMenu(); - for (int i = 0; i < s_Curves.Length; i++) + for (int i = 0; i < s_CurveNames.Length; i++) { if (i == 4) menu.AddSeparator(""); int current = i; // Capture local for closure - menu.AddItem(s_Curves[i], current == id, () => + + var menuLabel = MakeCurveSelectionPopupLabel(i); + menu.AddItem(new GUIContent(menuLabel), current == id, () => { m_SelectedCurve.intValue = current; serializedObject.ApplyModifiedProperties(); diff --git a/Packages/com.unity.render-pipelines.high-definition/Editor/RenderPipeline/CustomPass/CustomPassFullScreenShader.shadergraph b/Packages/com.unity.render-pipelines.high-definition/Editor/RenderPipeline/CustomPass/CustomPassFullScreenShader.shadergraph new file mode 100644 index 00000000000..225dd0f9451 --- /dev/null +++ b/Packages/com.unity.render-pipelines.high-definition/Editor/RenderPipeline/CustomPass/CustomPassFullScreenShader.shadergraph @@ -0,0 +1,377 @@ +{ + "m_SGVersion": 3, + "m_Type": "UnityEditor.ShaderGraph.GraphData", + "m_ObjectId": "96110b76ffe84d2cb7eb17ba99ad2fd7", + "m_Properties": [], + "m_Keywords": [], + "m_Dropdowns": [], + "m_CategoryData": [ + { + "m_Id": "38dcc741398442d6b94afcb5fdb66766" + } + ], + "m_Nodes": [ + { + "m_Id": "bc3f1b16fd264a509456bf48e04b9aa6" + }, + { + "m_Id": "676b49446f2f46efaebf16af8cee3a3e" + }, + { + "m_Id": "89fdc4a3afd4434e8c91055d956ac9b1" + } + ], + "m_GroupDatas": [], + "m_StickyNoteDatas": [], + "m_Edges": [ + { + "m_OutputSlot": { + "m_Node": { + "m_Id": "89fdc4a3afd4434e8c91055d956ac9b1" + }, + "m_SlotId": 2 + }, + "m_InputSlot": { + "m_Node": { + "m_Id": "bc3f1b16fd264a509456bf48e04b9aa6" + }, + "m_SlotId": 0 + } + } + ], + "m_VertexContext": { + "m_Position": { + "x": 0.0, + "y": 0.0 + }, + "m_Blocks": [] + }, + "m_FragmentContext": { + "m_Position": { + "x": 0.0, + "y": 200.0 + }, + "m_Blocks": [ + { + "m_Id": "bc3f1b16fd264a509456bf48e04b9aa6" + }, + { + "m_Id": "676b49446f2f46efaebf16af8cee3a3e" + } + ] + }, + "m_PreviewData": { + "serializedMesh": { + "m_SerializedMesh": "{\"mesh\":{\"instanceID\":0}}", + "m_Guid": "" + }, + "preventRotation": false + }, + "m_Path": "Hidden/HDRP", + "m_GraphPrecision": 1, + "m_PreviewMode": 2, + "m_OutputNode": { + "m_Id": "" + }, + "m_SubDatas": [], + "m_ActiveTargets": [ + { + "m_Id": "85cbe4f16c8f42a29ee6045b172247ae" + } + ] +} + +{ + "m_SGVersion": 0, + "m_Type": "UnityEditor.ShaderGraph.Vector1MaterialSlot", + "m_ObjectId": "0ccae4cf52854acda50df49b7a2de4fd", + "m_Id": 1, + "m_DisplayName": "Lod", + "m_SlotType": 0, + "m_Hidden": false, + "m_ShaderOutputName": "Lod", + "m_StageCapability": 2, + "m_Value": 0.0, + "m_DefaultValue": 0.0, + "m_Labels": [] +} + +{ + "m_SGVersion": 0, + "m_Type": "UnityEditor.ShaderGraph.CategoryData", + "m_ObjectId": "38dcc741398442d6b94afcb5fdb66766", + "m_Name": "", + "m_ChildObjectList": [] +} + +{ + "m_SGVersion": 0, + "m_Type": "UnityEditor.ShaderGraph.ColorRGBMaterialSlot", + "m_ObjectId": "407d4602997049f0a44b727ba5e999dc", + "m_Id": 0, + "m_DisplayName": "Base Color", + "m_SlotType": 0, + "m_Hidden": false, + "m_ShaderOutputName": "BaseColor", + "m_StageCapability": 2, + "m_Value": { + "x": 0.5, + "y": 0.5, + "z": 0.5 + }, + "m_DefaultValue": { + "x": 0.0, + "y": 0.0, + "z": 0.0 + }, + "m_Labels": [], + "m_ColorMode": 0, + "m_DefaultColor": { + "r": 0.5, + "g": 0.5, + "b": 0.5, + "a": 1.0 + } +} + +{ + "m_SGVersion": 0, + "m_Type": "UnityEditor.ShaderGraph.Vector1MaterialSlot", + "m_ObjectId": "499322b7a1524b59903589297fd0c085", + "m_Id": 0, + "m_DisplayName": "Alpha", + "m_SlotType": 0, + "m_Hidden": false, + "m_ShaderOutputName": "Alpha", + "m_StageCapability": 2, + "m_Value": 1.0, + "m_DefaultValue": 1.0, + "m_Labels": [] +} + +{ + "m_SGVersion": 0, + "m_Type": "UnityEditor.ShaderGraph.BlockNode", + "m_ObjectId": "676b49446f2f46efaebf16af8cee3a3e", + "m_Group": { + "m_Id": "" + }, + "m_Name": "SurfaceDescription.Alpha", + "m_DrawState": { + "m_Expanded": true, + "m_Position": { + "serializedVersion": "2", + "x": 0.0, + "y": 0.0, + "width": 0.0, + "height": 0.0 + } + }, + "m_Slots": [ + { + "m_Id": "499322b7a1524b59903589297fd0c085" + } + ], + "synonyms": [], + "m_Precision": 0, + "m_PreviewExpanded": true, + "m_DismissedVersion": 0, + "m_PreviewMode": 0, + "m_CustomColors": { + "m_SerializableColors": [] + }, + "m_SerializedDescriptor": "SurfaceDescription.Alpha" +} + +{ + "m_SGVersion": 0, + "m_Type": "UnityEditor.Rendering.HighDefinition.ShaderGraph.HDFullscreenSubTarget", + "m_ObjectId": "68b4ee763a504778b9b51585673070eb" +} + +{ + "m_SGVersion": 0, + "m_Type": "UnityEditor.Rendering.Fullscreen.ShaderGraph.FullscreenData", + "m_ObjectId": "6c5785c067cb41f38b7a2dd7e52fb488", + "m_Version": 0, + "m_fullscreenMode": 0, + "m_BlendMode": 0, + "m_SrcColorBlendMode": 0, + "m_DstColorBlendMode": 1, + "m_ColorBlendOperation": 0, + "m_SrcAlphaBlendMode": 0, + "m_DstAlphaBlendMode": 1, + "m_AlphaBlendOperation": 0, + "m_EnableStencil": false, + "m_StencilReference": 0, + "m_StencilReadMask": 255, + "m_StencilWriteMask": 255, + "m_StencilCompareFunction": 8, + "m_StencilPassOperation": 0, + "m_StencilFailOperation": 0, + "m_StencilDepthFailOperation": 0, + "m_DepthWrite": false, + "m_depthWriteMode": 0, + "m_AllowMaterialOverride": false, + "m_DepthTestMode": 0 +} + +{ + "m_SGVersion": 0, + "m_Type": "UnityEditor.Rendering.HighDefinition.ShaderGraph.HDTarget", + "m_ObjectId": "85cbe4f16c8f42a29ee6045b172247ae", + "m_ActiveSubTarget": { + "m_Id": "68b4ee763a504778b9b51585673070eb" + }, + "m_Datas": [ + { + "m_Id": "6c5785c067cb41f38b7a2dd7e52fb488" + }, + { + "m_Id": "9fdf6528559a4d10904a9d7762bc5321" + } + ], + "m_CustomEditorGUI": "", + "m_SupportVFX": false, + "m_SupportLineRendering": false +} + +{ + "m_SGVersion": 0, + "m_Type": "UnityEditor.Rendering.HighDefinition.HDSceneColorNode", + "m_ObjectId": "89fdc4a3afd4434e8c91055d956ac9b1", + "m_Group": { + "m_Id": "" + }, + "m_Name": "HD Scene Color", + "m_DrawState": { + "m_Expanded": true, + "m_Position": { + "serializedVersion": "2", + "x": -297.0, + "y": 200.0, + "width": 160.0, + "height": 136.0 + } + }, + "m_Slots": [ + { + "m_Id": "d377937c6e6d4a0fa47407cfb4969f22" + }, + { + "m_Id": "0ccae4cf52854acda50df49b7a2de4fd" + }, + { + "m_Id": "b4f911e9370149ffaa8b6d39fa2653e5" + } + ], + "synonyms": [ + "screen buffer" + ], + "m_Precision": 0, + "m_PreviewExpanded": true, + "m_DismissedVersion": 0, + "m_PreviewMode": 0, + "m_CustomColors": { + "m_SerializableColors": [] + }, + "m_Exposure": true +} + +{ + "m_SGVersion": 0, + "m_Type": "UnityEditor.Rendering.HighDefinition.ShaderGraph.HDFullscreenData", + "m_ObjectId": "9fdf6528559a4d10904a9d7762bc5321", + "m_ShowOnlyHDStencilBits": true +} + +{ + "m_SGVersion": 0, + "m_Type": "UnityEditor.ShaderGraph.ColorRGBMaterialSlot", + "m_ObjectId": "b4f911e9370149ffaa8b6d39fa2653e5", + "m_Id": 2, + "m_DisplayName": "Output", + "m_SlotType": 1, + "m_Hidden": false, + "m_ShaderOutputName": "Output", + "m_StageCapability": 3, + "m_Value": { + "x": 0.0, + "y": 0.0, + "z": 0.0 + }, + "m_DefaultValue": { + "x": 0.0, + "y": 0.0, + "z": 0.0 + }, + "m_Labels": [], + "m_ColorMode": 1, + "m_DefaultColor": { + "r": 0.0, + "g": 0.0, + "b": 0.0, + "a": 1.0 + } +} + +{ + "m_SGVersion": 0, + "m_Type": "UnityEditor.ShaderGraph.BlockNode", + "m_ObjectId": "bc3f1b16fd264a509456bf48e04b9aa6", + "m_Group": { + "m_Id": "" + }, + "m_Name": "SurfaceDescription.BaseColor", + "m_DrawState": { + "m_Expanded": true, + "m_Position": { + "serializedVersion": "2", + "x": 0.0, + "y": 0.0, + "width": 0.0, + "height": 0.0 + } + }, + "m_Slots": [ + { + "m_Id": "407d4602997049f0a44b727ba5e999dc" + } + ], + "synonyms": [], + "m_Precision": 0, + "m_PreviewExpanded": true, + "m_DismissedVersion": 0, + "m_PreviewMode": 0, + "m_CustomColors": { + "m_SerializableColors": [] + }, + "m_SerializedDescriptor": "SurfaceDescription.BaseColor" +} + +{ + "m_SGVersion": 0, + "m_Type": "UnityEditor.ShaderGraph.ScreenPositionMaterialSlot", + "m_ObjectId": "d377937c6e6d4a0fa47407cfb4969f22", + "m_Id": 0, + "m_DisplayName": "UV", + "m_SlotType": 0, + "m_Hidden": false, + "m_ShaderOutputName": "UV", + "m_StageCapability": 3, + "m_Value": { + "x": 0.0, + "y": 0.0, + "z": 0.0, + "w": 0.0 + }, + "m_DefaultValue": { + "x": 0.0, + "y": 0.0, + "z": 0.0, + "w": 0.0 + }, + "m_Labels": [], + "m_ScreenSpaceType": 0 +} + diff --git a/Packages/com.unity.render-pipelines.high-definition/Editor/RenderPipeline/CustomPass/CustomPassFullScreenShader.shadergraph.meta b/Packages/com.unity.render-pipelines.high-definition/Editor/RenderPipeline/CustomPass/CustomPassFullScreenShader.shadergraph.meta new file mode 100644 index 00000000000..096513879e9 --- /dev/null +++ b/Packages/com.unity.render-pipelines.high-definition/Editor/RenderPipeline/CustomPass/CustomPassFullScreenShader.shadergraph.meta @@ -0,0 +1,10 @@ +fileFormatVersion: 2 +guid: cf68df6de41ec1f43b0d25a39170d17f +ScriptedImporter: + internalIDToNameTable: [] + externalObjects: {} + serializedVersion: 2 + userData: + assetBundleName: + assetBundleVariant: + script: {fileID: 11500000, guid: 625f186215c104763be7675aa2d941aa, type: 3} diff --git a/Packages/com.unity.render-pipelines.high-definition/Editor/RenderPipeline/CustomPass/FullScreenCustomPassDrawer.cs b/Packages/com.unity.render-pipelines.high-definition/Editor/RenderPipeline/CustomPass/FullScreenCustomPassDrawer.cs index 6f3aeba439e..d73c6c1c89f 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Editor/RenderPipeline/CustomPass/FullScreenCustomPassDrawer.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Editor/RenderPipeline/CustomPass/FullScreenCustomPassDrawer.cs @@ -6,6 +6,9 @@ using UnityEngine.Rendering.HighDefinition; using System.Linq; using System; +using System.IO; +using System.Text.RegularExpressions; +using UnityEditor.ShaderGraph; namespace UnityEditor.Rendering.HighDefinition { @@ -24,12 +27,52 @@ private class Styles public static GUIContent fullScreenPassMaterial = new GUIContent("FullScreen Material", "FullScreen Material used for the full screen DrawProcedural."); public static GUIContent materialPassName = new GUIContent("Pass Name", "The shader pass to use for your fullscreen pass."); public static GUIContent fetchColorBuffer = new GUIContent("Fetch Color Buffer", "Tick this if your effect sample/fetch the camera color buffer"); + public static GUIContent newMaterialButton = new GUIContent("New", "Creates a new Shader and Material asset using the fullscreen templates."); public readonly static string writeAndFetchColorBufferWarning = "Fetching and Writing to the camera color buffer at the same time is not supported on most platforms."; public readonly static string stencilWriteOverReservedBits = "The Stencil Write Mask of your material overwrites the bits reserved by HDRP. To avoid rendering errors, set the Write Mask to " + (int)(UserStencilUsage.AllUserBits); public readonly static string stencilHelpInfo = $"Stencil is enabled on the material. To help you configure the stencil operations, use these values for the bits available in HDRP: User Bit 0: {(int)UserStencilUsage.UserBit0} User Bit 1: {(int)UserStencilUsage.UserBit1}"; } + class CreateFullscreenMaterialAction : ProjectWindowCallback.EndNameEditAction + { + public bool createShaderGraphShader; // TODO + public FullScreenCustomPass customPass; + + static Regex s_ShaderNameRegex = new(@"(Material$|Mat$)", RegexOptions.IgnoreCase); + + public override void Action(int instanceId, string pathName, string resourceFile) + { + string fileName = Path.GetFileNameWithoutExtension(pathName); + string directoryName = Path.GetDirectoryName(pathName); + // Clean up name to create shader file: + var shaderName = s_ShaderNameRegex.Replace(fileName, "") + "Shader"; + shaderName += createShaderGraphShader ? "." + ShaderGraphImporter.Extension : ".shader"; + string shaderPath = Path.Combine(directoryName, shaderName); + shaderPath = AssetDatabase.GenerateUniqueAssetPath(shaderPath); + pathName = AssetDatabase.GenerateUniqueAssetPath(pathName); + + string templateFolder = $"{HDUtils.GetHDRenderPipelinePath()}/Editor/RenderPipeline/CustomPass"; + string templatePath = createShaderGraphShader ? $"{templateFolder}/CustomPassFullScreenShader.shadergraph" : $"{templateFolder}/CustomPassFullScreenShader.template"; + + // Load template code and replace shader name with current file name + string templateCode = File.ReadAllText(templatePath); + templateCode = templateCode.Replace("#SCRIPTNAME#", fileName); + File.WriteAllText(shaderPath, templateCode); + + AssetDatabase.Refresh(); + AssetDatabase.ImportAsset(shaderPath); + var shader = AssetDatabase.LoadAssetAtPath(shaderPath); + shader.name = Path.GetFileName(pathName); + var material = new Material(shader); + + customPass.fullscreenPassMaterial = material; + + AssetDatabase.CreateAsset(material, pathName); + ProjectWindowUtil.ShowCreatedAsset(material); + } + } + // Fullscreen pass SerializedProperty m_FullScreenPassMaterial; SerializedProperty m_MaterialPassName; @@ -40,6 +83,9 @@ private class Styles bool m_ShowStencilWriteWarning = false; bool m_ShowStencilInfoBox = false; + static readonly float k_NewMaterialButtonWidth = 60; + static readonly string k_DefaultMaterialName = "New FullScreen Material.mat"; + CustomPass.TargetBuffer targetColorBuffer => (CustomPass.TargetBuffer)m_TargetColorBuffer.intValue; CustomPass.TargetBuffer targetDepthBuffer => (CustomPass.TargetBuffer)m_TargetDepthBuffer.intValue; @@ -66,7 +112,12 @@ protected override void DoPassGUI(SerializedProperty customPass, Rect rect) rect.y += Styles.helpBoxHeight; } - EditorGUI.PropertyField(rect, m_FullScreenPassMaterial, Styles.fullScreenPassMaterial); + Rect materialField = rect; + Rect newMaterialField = rect; + if (m_FullScreenPassMaterial.objectReferenceValue == null) + materialField.xMax -= k_NewMaterialButtonWidth; + newMaterialField.xMin += materialField.width; + EditorGUI.PropertyField(materialField, m_FullScreenPassMaterial, Styles.fullScreenPassMaterial); rect.y += Styles.defaultLineSpace; if (m_FullScreenPassMaterial.objectReferenceValue is Material mat) { @@ -126,6 +177,33 @@ protected override void DoPassGUI(SerializedProperty customPass, Rect rect) } } } + else if (m_FullScreenPassMaterial.objectReferenceValue == null) + { + // null material, show the button to create a new material & associated shaders + ShowNewMaterialButton(newMaterialField); + } + } + + void ShowNewMaterialButton(Rect buttonRect) + { + // Small padding to separate both fields: + buttonRect.xMin += 2; + if (!EditorGUI.DropdownButton(buttonRect, Styles.newMaterialButton, FocusType.Keyboard)) + return; + + void CreateMaterial(bool shaderGraph) + { + var materialIcon = AssetPreview.GetMiniTypeThumbnail(typeof(Material)); + var action = ScriptableObject.CreateInstance(); + action.createShaderGraphShader = shaderGraph; + action.customPass = target as FullScreenCustomPass; + ProjectWindowUtil.StartNameEditingIfProjectWindowExists(0, action, k_DefaultMaterialName, materialIcon, null); + } + + GenericMenu menu = new GenericMenu(); + menu.AddItem(new GUIContent("ShaderGraph"), false, () => CreateMaterial(true)); + menu.AddItem(new GUIContent("Handwritten Shader"), false, () => CreateMaterial(false)); + menu.DropDown(buttonRect); } bool DoesWriteMaskContainsReservedBits(int writeMask) diff --git a/Packages/com.unity.render-pipelines.high-definition/Editor/VFXGraph/Outputs/VFXDecalHDRPOutput.cs b/Packages/com.unity.render-pipelines.high-definition/Editor/VFXGraph/Outputs/VFXDecalHDRPOutput.cs index 4bdd3fc96a5..ace3973d0bc 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Editor/VFXGraph/Outputs/VFXDecalHDRPOutput.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Editor/VFXGraph/Outputs/VFXDecalHDRPOutput.cs @@ -360,31 +360,31 @@ public override void Sanitize(int version) } } - internal override void GenerateErrors(VFXInvalidateErrorReporter manager) + internal override void GenerateErrors(VFXErrorReporter report) { - base.GenerateErrors(manager); + base.GenerateErrors(report); GetDecalSupport(out var supportDecals, out var enableDecalLayers, out var metalAndAODecals); if (!supportDecals) { - manager.RegisterError("DecalsDisabled", VFXErrorType.Warning, - $"Decals will not be rendered because the 'Decals' is disabled in your HDRP Asset. Enable 'Decals' in your HDRP Asset to make this output work."); + report.RegisterError("DecalsDisabled", VFXErrorType.Warning, + $"Decals will not be rendered because the 'Decals' is disabled in your HDRP Asset. Enable 'Decals' in your HDRP Asset to make this output work.", this); } if (!enableDecalLayers) { - manager.RegisterError("DecalLayersDisabled", VFXErrorType.Warning, + report.RegisterError("DecalLayersDisabled", VFXErrorType.Warning, $"The Angle Fade parameter won't have any effect, because the 'Decal Layers' setting is disabled." + $" Enable 'Decal Layers' in your HDRP Asset if you want to control the Angle Fade." + - $" There is a performance cost of enabling this option."); + $" There is a performance cost of enabling this option.", this); } if (!metalAndAODecals) { - manager.RegisterError("DecalMetalAODisabled", VFXErrorType.Warning, + report.RegisterError("DecalMetalAODisabled", VFXErrorType.Warning, $"The Metallic and Ambient Occlusion parameters won't have any effect, because the 'Metal and AO properties' setting is disabled." + - $" Enable 'Metal and AO properties' in your HDRP Asset if you want to control the Metal and AO properties of decals. There is a performance cost of enabling this option."); + $" Enable 'Metal and AO properties' in your HDRP Asset if you want to control the Metal and AO properties of decals. There is a performance cost of enabling this option.", this); } } diff --git a/Packages/com.unity.render-pipelines.high-definition/Editor/VFXGraph/Outputs/VFXDistortionQuadStripOutput.cs b/Packages/com.unity.render-pipelines.high-definition/Editor/VFXGraph/Outputs/VFXDistortionQuadStripOutput.cs index 416bdd137dd..15cd11119fd 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Editor/VFXGraph/Outputs/VFXDistortionQuadStripOutput.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Editor/VFXGraph/Outputs/VFXDistortionQuadStripOutput.cs @@ -73,11 +73,11 @@ public override IEnumerable additionalDefines } } - internal sealed override void GenerateErrors(VFXInvalidateErrorReporter manager) + internal sealed override void GenerateErrors(VFXErrorReporter report) { if (GetAttributesInfos().Any(x => x.mode.HasFlag(VFXAttributeMode.Write) && x.attrib.Equals(VFXAttribute.Position))) { - manager.RegisterError("WritePositionInStrip", VFXErrorType.Warning, VFXQuadStripOutput.WriteToPositionMessage); + report.RegisterError("WritePositionInStrip", VFXErrorType.Warning, VFXQuadStripOutput.WriteToPositionMessage, this); } } } diff --git a/Packages/com.unity.render-pipelines.high-definition/Editor/VFXGraph/Outputs/VFXLitMeshOutput.cs b/Packages/com.unity.render-pipelines.high-definition/Editor/VFXGraph/Outputs/VFXLitMeshOutput.cs index d679802612b..93b141525cf 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Editor/VFXGraph/Outputs/VFXLitMeshOutput.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Editor/VFXGraph/Outputs/VFXLitMeshOutput.cs @@ -114,14 +114,14 @@ public override VFXExpressionMapper GetExpressionMapper(VFXDeviceTarget target) return mapper; } - internal override void GenerateErrors(VFXInvalidateErrorReporter manager) + internal override void GenerateErrors(VFXErrorReporter report) { - base.GenerateErrors(manager); + base.GenerateErrors(report); var dataParticle = GetData() as VFXDataParticle; if (dataParticle != null && dataParticle.boundsMode != BoundsSettingMode.Manual) - manager.RegisterError("WarningBoundsComputation", VFXErrorType.Warning, $"Bounds computation have no sense of what the scale of the output mesh is," + + report.RegisterError("WarningBoundsComputation", VFXErrorType.Warning, $"Bounds computation have no sense of what the scale of the output mesh is," + $" so the resulted computed bounds can be too small or big" + - $" Please use padding to mitigate this discrepancy."); + $" Please use padding to mitigate this discrepancy.", this); } } } diff --git a/Packages/com.unity.render-pipelines.high-definition/Editor/VFXGraph/Outputs/VFXLitQuadStripOutput.cs b/Packages/com.unity.render-pipelines.high-definition/Editor/VFXGraph/Outputs/VFXLitQuadStripOutput.cs index dd5986e10b9..77f88db3ba4 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Editor/VFXGraph/Outputs/VFXLitQuadStripOutput.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Editor/VFXGraph/Outputs/VFXLitQuadStripOutput.cs @@ -120,11 +120,11 @@ public override void Sanitize(int version) base.Sanitize(version); } - internal sealed override void GenerateErrors(VFXInvalidateErrorReporter manager) + internal sealed override void GenerateErrors(VFXErrorReporter report) { if (GetAttributesInfos().Any(x => x.mode.HasFlag(VFXAttributeMode.Write) && x.attrib.Equals(VFXAttribute.Position))) { - manager.RegisterError("WritePositionInStrip", VFXErrorType.Warning, VFXQuadStripOutput.WriteToPositionMessage); + report.RegisterError("WritePositionInStrip", VFXErrorType.Warning, VFXQuadStripOutput.WriteToPositionMessage, this); } } } diff --git a/Packages/com.unity.render-pipelines.high-definition/Editor/VFXGraph/Outputs/VFXVolumetricFogOutput.cs b/Packages/com.unity.render-pipelines.high-definition/Editor/VFXGraph/Outputs/VFXVolumetricFogOutput.cs index 4c2acae8686..b0a92a8396e 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Editor/VFXGraph/Outputs/VFXVolumetricFogOutput.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Editor/VFXGraph/Outputs/VFXVolumetricFogOutput.cs @@ -281,12 +281,12 @@ protected override VFXShaderWriter renderState } } - internal override void GenerateErrors(VFXInvalidateErrorReporter manager) + internal override void GenerateErrors(VFXErrorReporter report) { if (!HDRenderPipeline.currentAsset?.currentPlatformRenderPipelineSettings.supportVolumetrics ?? false) { - manager.RegisterError("VolumetricFogDisabled", VFXErrorType.Warning, - $"The current HDRP Asset does not support volumetric fog. To fix this error, go to the Lighting section of your HDRP asset and enable 'Volumetric Fog'."); + report.RegisterError("VolumetricFogDisabled", VFXErrorType.Warning, + $"The current HDRP Asset does not support volumetric fog. To fix this error, go to the Lighting section of your HDRP asset and enable 'Volumetric Fog'.", this); } var data = GetData(); @@ -294,13 +294,13 @@ internal override void GenerateErrors(VFXInvalidateErrorReporter manager) { if (!data.IsCurrentAttributeWritten(VFXAttribute.Size) && !data.IsCurrentAttributeWritten(VFXAttribute.ScaleX)) { - manager.RegisterError("SizeTooSmall", VFXErrorType.Warning, - $"The size of the fog particle is not modified. This can make the volumetric fog effect invisible because the default size is too small. To fix this, add a size block in your system and increase it's value."); + report.RegisterError("SizeTooSmall", VFXErrorType.Warning, + $"The size of the fog particle is not modified. This can make the volumetric fog effect invisible because the default size is too small. To fix this, add a size block in your system and increase it's value.", this); } if (data.IsCurrentAttributeWritten(VFXAttribute.ScaleY) || data.IsCurrentAttributeWritten(VFXAttribute.ScaleZ)) { - manager.RegisterError("ScaleYZIgnored", VFXErrorType.Warning, - $"The scale on Y and Z axis are ignored by the volumetric fog. Configure your scale component to X only to remove this message."); + report.RegisterError("ScaleYZIgnored", VFXErrorType.Warning, + $"The scale on Y and Z axis are ignored by the volumetric fog. Configure your scale component to X only to remove this message.", this); } } } diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/LightLoop/lightlistbuild.compute b/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/LightLoop/lightlistbuild.compute index 7229b7fbd40..c11ba80ebd1 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/LightLoop/lightlistbuild.compute +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/LightLoop/lightlistbuild.compute @@ -150,6 +150,7 @@ void TileLightListGen(uint3 dispatchThreadId : SV_DispatchThreadID, uint threadI uint t=threadID; int i; + UNITY_UNROLLX(LIGHT_LIST_MAX_COARSE_ENTRIES) for(i=t; i> recorderCaptureActions; public Vector2 viewportScale; public Material blitMaterial; + public Rect viewportSize; } internal void ExecuteCaptureActions(RenderGraph renderGraph, TextureHandle input) @@ -1722,14 +1727,16 @@ internal void ExecuteCaptureActions(RenderGraph renderGraph, TextureHandle input using (var builder = renderGraph.AddRenderPass("Execute Capture Actions", out var passData)) { var inputDesc = renderGraph.GetTextureDesc(input); - var rtHandleScale = RTHandles.rtHandleProperties.rtHandleScale; - passData.viewportScale = new Vector2(rtHandleScale.x, rtHandleScale.y); + var targetSize = RTHandles.rtHandleProperties.currentRenderTargetSize; + passData.viewportScale = new Vector2(targetSize.x / finalViewport.width, targetSize.y / finalViewport.height); + passData.blitMaterial = HDUtils.GetBlitMaterial(inputDesc.dimension); passData.recorderCaptureActions = m_RecorderCaptureActions; passData.input = builder.ReadTexture(input); + passData.viewportSize = finalViewport; // We need to blit to an intermediate texture because input resolution can be bigger than the camera resolution // Since recorder does not know about this, we need to send a texture of the right size. - passData.tempTexture = builder.CreateTransientTexture(new TextureDesc(actualWidth, actualHeight) + passData.tempTexture = builder.CreateTransientTexture(new TextureDesc((int)finalViewport.width, (int)finalViewport.height) { colorFormat = inputDesc.colorFormat, name = "TempCaptureActions" }); builder.SetRenderFunc( @@ -1740,6 +1747,7 @@ internal void ExecuteCaptureActions(RenderGraph renderGraph, TextureHandle input mpb.SetVector(HDShaderIDs._BlitScaleBias, data.viewportScale); mpb.SetFloat(HDShaderIDs._BlitMipLevel, 0); ctx.cmd.SetRenderTarget(data.tempTexture); + ctx.cmd.SetViewport(data.viewportSize); ctx.cmd.DrawProcedural(Matrix4x4.identity, data.blitMaterial, 0, MeshTopology.Triangles, 3, 1, mpb); for (data.recorderCaptureActions.Reset(); data.recorderCaptureActions.MoveNext();) diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDRenderPipeline.PostProcess.cs b/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDRenderPipeline.PostProcess.cs index 2f8fcde0678..d4b05582f26 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDRenderPipeline.PostProcess.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDRenderPipeline.PostProcess.cs @@ -3090,7 +3090,7 @@ static void DoPhysicallyBasedDepthOfField(in DepthOfFieldParameters dofParameter float mipLevel = 1 + Mathf.Ceil(Mathf.Log(maxCoc, 2)); cmd.SetComputeVectorParam(cs, HDShaderIDs._Params, new Vector4(sampleCount, maxCoc, anamorphism, 0.0f)); - cmd.SetComputeVectorParam(cs, HDShaderIDs._Params2, new Vector4(dofParameters.adaptiveSamplingWeights.x, dofParameters.adaptiveSamplingWeights.y, 0.0f, 0.0f)); + cmd.SetComputeVectorParam(cs, HDShaderIDs._Params2, new Vector4(dofParameters.adaptiveSamplingWeights.x, dofParameters.adaptiveSamplingWeights.y, (float)dofParameters.resolution, 1.0f/(float)dofParameters.resolution)); cmd.SetComputeTextureParam(cs, kernel, HDShaderIDs._InputTexture, source); cmd.SetComputeTextureParam(cs, kernel, HDShaderIDs._InputCoCTexture, fullresCoC); cmd.SetComputeTextureParam(cs, kernel, HDShaderIDs._InputNearTexture, scaledDof); diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDRenderPipeline.Prepass.cs b/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDRenderPipeline.Prepass.cs index d4a9335f465..67854ef972f 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDRenderPipeline.Prepass.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDRenderPipeline.Prepass.cs @@ -1,3 +1,4 @@ +using System; using System.Collections.Generic; using UnityEngine.Experimental.Rendering; using UnityEngine.Rendering.RenderGraphModule; @@ -218,7 +219,6 @@ enum OccluderPass OccluderPass GetOccluderPass(HDCamera hdCamera) { bool useGPUOcclusionCulling = GPUResidentDrawer.IsInstanceOcclusionCullingEnabled() - && !XRSRPSettings.enabled && hdCamera.camera.cameraType is CameraType.Game or CameraType.SceneView or CameraType.Preview; if (!useGPUOcclusionCulling) return OccluderPass.None; @@ -233,25 +233,47 @@ OccluderPass GetOccluderPass(HDCamera hdCamera) void UpdateInstanceOccluders(RenderGraph renderGraph, HDCamera hdCamera, TextureHandle depthTexture) { - var occluderParameters = new OccluderParameters(hdCamera.camera.GetInstanceID()) + bool isSinglePassXR = hdCamera.xr.enabled && hdCamera.xr.singlePassEnabled; + var occluderParams = new OccluderParameters(hdCamera.camera.GetInstanceID()) { - viewMatrix = hdCamera.mainViewConstants.viewMatrix, - invViewMatrix = hdCamera.mainViewConstants.invViewMatrix, - gpuProjMatrix = hdCamera.mainViewConstants.projMatrix, - viewOffsetWorldSpace = hdCamera.mainViewConstants.worldSpaceCameraPos, - + subviewCount = isSinglePassXR ? 2 : 1, depthTexture = depthTexture, - depthOffset = new Vector2Int(0, 0), depthSize = new Vector2Int(hdCamera.actualWidth, hdCamera.actualHeight), - depthSliceCount = TextureXR.useTexArray ? 1 : 0, + depthIsArray = TextureXR.useTexArray, }; - GPUResidentDrawer.UpdateInstanceOccluders(renderGraph, occluderParameters); + Span occluderSubviewUpdates = stackalloc OccluderSubviewUpdate[occluderParams.subviewCount]; + for (int subviewIndex = 0; subviewIndex < occluderParams.subviewCount; ++subviewIndex) + { + occluderSubviewUpdates[subviewIndex] = new OccluderSubviewUpdate(subviewIndex) + { + depthSliceIndex = subviewIndex, + viewMatrix = hdCamera.m_XRViewConstants[subviewIndex].viewMatrix, + invViewMatrix = hdCamera.m_XRViewConstants[subviewIndex].invViewMatrix, + gpuProjMatrix = hdCamera.m_XRViewConstants[subviewIndex].projMatrix, + viewOffsetWorldSpace = hdCamera.m_XRViewConstants[subviewIndex].worldSpaceCameraPos, + }; + } + GPUResidentDrawer.UpdateInstanceOccluders(renderGraph, occluderParams, occluderSubviewUpdates); } void InstanceOcclusionTest(RenderGraph renderGraph, HDCamera hdCamera, OcclusionTest occlusionTest) { - var occlusionSettings = new OcclusionCullingSettings(hdCamera.camera.GetInstanceID(), occlusionTest); - GPUResidentDrawer.InstanceOcclusionTest(renderGraph, occlusionSettings); + bool isSinglePassXR = hdCamera.xr.enabled && hdCamera.xr.singlePassEnabled; + int subviewCount = isSinglePassXR ? 2 : 1; + var settings = new OcclusionCullingSettings(hdCamera.camera.GetInstanceID(), occlusionTest) + { + instanceMultiplier = (isSinglePassXR && !SystemInfo.supportsMultiview) ? 2 : 1, + }; + Span subviewOcclusionTests = stackalloc SubviewOcclusionTest[subviewCount]; + for (int subviewIndex = 0; subviewIndex < subviewCount; ++subviewIndex) + { + subviewOcclusionTests[subviewIndex] = new SubviewOcclusionTest() + { + cullingSplitIndex = 0, + occluderSubviewIndex = subviewIndex, + }; + } + GPUResidentDrawer.InstanceOcclusionTest(renderGraph, settings, subviewOcclusionTests); } PrepassOutput RenderPrepass(RenderGraph renderGraph, @@ -290,8 +312,6 @@ PrepassOutput RenderPrepass(RenderGraph renderGraph, RenderRayTracingDepthPrepass(renderGraph, cullingResults, hdCamera, result.depthBuffer); - ApplyCameraMipBias(hdCamera); - OccluderPass occluderPass = GetOccluderPass(hdCamera); bool shouldRenderMotionVectorAfterGBuffer = false; @@ -326,8 +346,6 @@ PrepassOutput RenderPrepass(RenderGraph renderGraph, } } - ResetCameraMipBias(hdCamera); - // If we have MSAA, we need to complete the motion vector buffer before buffer resolves, hence we need to run camera mv first. // This is always fine since shouldRenderMotionVectorAfterGBuffer is always false for forward. bool needCameraMVBeforeResolve = msaa; @@ -343,8 +361,6 @@ PrepassOutput RenderPrepass(RenderGraph renderGraph, // At this point in forward all objects have been rendered to the prepass (depth/normal/motion vectors) so we can resolve them ResolvePrepassBuffers(renderGraph, hdCamera, ref result); - ApplyCameraMipBias(hdCamera); - if (IsComputeThicknessNeeded(hdCamera)) // Compute thicknes for AllOpaque before the GBuffer without reading DepthBuffer RenderThickness(renderGraph, cullingResults, thicknessTexture, TextureHandle.nullHandle, hdCamera, HDRenderQueue.k_RenderQueue_AllOpaque, false); @@ -405,8 +421,6 @@ PrepassOutput RenderPrepass(RenderGraph renderGraph, mip1FromDownsampleForLowResTrans = mip1FromDownsampleForLowResTrans && hdCamera.frameSettings.IsEnabled(FrameSettingsField.LowResTransparent) && hdCamera.isLowResScaleHalf; - ResetCameraMipBias(hdCamera); - DownsampleDepthForLowResTransparency(renderGraph, hdCamera, mip1FromDownsampleForLowResTrans, ref result); // In both forward and deferred, everything opaque should have been rendered at this point so we can safely copy the depth buffer for later processing. @@ -1157,7 +1171,7 @@ void CopyDepthBufferIfNeeded(RenderGraph renderGraph, HDCamera hdCamera, ref Pre // In vulkan, dx12 and consoles the first read of a texture always triggers a depth decompression // (in vulkan is seen as a vk event, in dx12 as a barrier, and in gnm as a straight up depth decompress compute job). // Unfortunately, the current render graph implementation only see's the current texture as a read since the abstraction doesnt go too low. - // The GfxDevice has no context of passes so it can't put the barrier in the right spot... so for now hacking this by *assuming* this is the first read. :( + // The GfxDevice has no context of passes so it can't put the barrier in the right spot... so for now hacking this by *assuming* this is the first read. :( passData.inputDepth = builder.ReadWriteTexture(output.resolvedDepthBuffer); //passData.inputDepth = builder.ReadTexture(output.resolvedDepthBuffer); diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDRenderPipeline.RenderGraph.cs b/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDRenderPipeline.RenderGraph.cs index 24cc4b8770a..f91b74b8784 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDRenderPipeline.RenderGraph.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDRenderPipeline.RenderGraph.cs @@ -203,16 +203,12 @@ void RecordRenderGraph(RenderRequest renderRequest, var deferredLightingOutput = RenderDeferredLighting(m_RenderGraph, hdCamera, colorBuffer, prepassOutput.depthBuffer, prepassOutput.depthPyramidTexture, lightingBuffers, prepassOutput.gbuffer, shadowResult, gpuLightListOutput); - ApplyCameraMipBias(hdCamera); - RenderForwardOpaque(m_RenderGraph, hdCamera, colorBuffer, lightingBuffers, gpuLightListOutput, prepassOutput, vtFeedbackBuffer, shadowResult, cullingResults); if (IsComputeThicknessNeeded(hdCamera)) // Compute the thickness for All Transparent which can be occluded by opaque written on the DepthBuffer (which includes the Forward Opaques). RenderThickness(m_RenderGraph, cullingResults, thicknessTexture, prepassOutput.depthPyramidTexture, hdCamera, HDRenderQueue.k_RenderQueue_AllTransparent, true); - ResetCameraMipBias(hdCamera); - if (aovRequest.isValid) aovRequest.PushCameraTexture(m_RenderGraph, AOVBuffers.Normals, hdCamera, prepassOutput.resolvedNormalBuffer, aovBuffers); @@ -353,7 +349,7 @@ void RecordRenderGraph(RenderRequest renderRequest, aovRequest.PushCameraTexture(m_RenderGraph, AOVBuffers.VolumetricFog, hdCamera, colorBuffer, aovBuffers); } - ResetCameraSizeForAfterPostProcess(m_RenderGraph, hdCamera, commandBuffer); + ResetCameraDataAfterPostProcess(m_RenderGraph, hdCamera, commandBuffer); RenderCustomPass(m_RenderGraph, hdCamera, postProcessDest, prepassOutput, customPassCullingResults, cullingResults, CustomPassInjectionPoint.AfterPostProcess, aovRequest, aovCustomPassBuffers); @@ -601,50 +597,15 @@ void UpdateParentExposure(RenderGraph renderGraph, HDCamera hdCamera) } } - void ApplyCameraMipBias(HDCamera hdCamera) + float GetGlobalMipBias(HDCamera hdCamera) { - float globalMaterialMipBias = 0.0f; + float globalMaterialMipBias; if (m_CurrentDebugDisplaySettings != null && m_CurrentDebugDisplaySettings.data.UseDebugGlobalMipBiasOverride()) - { globalMaterialMipBias = m_CurrentDebugDisplaySettings.data.GetDebugGlobalMipBiasOverride(); - } else - { globalMaterialMipBias = hdCamera.globalMipBias; - } - PushCameraGlobalMipBiasPass(m_RenderGraph, hdCamera, globalMaterialMipBias); - } - void ResetCameraMipBias(HDCamera hdCamera) => PushCameraGlobalMipBiasPass(m_RenderGraph, hdCamera, 0.0f); - - class PushCameraGlobalMipBiasData - { - public HDCamera hdCamera; - public float mipBias; - public ShaderVariablesGlobal globalCB; - public ShaderVariablesXR xrCB; - } - - void PushCameraGlobalMipBiasPass(RenderGraph renderGraph, HDCamera hdCamera, float mipBias) - { - if (!ShaderConfig.s_GlobalMipBias) - return; - - using (var builder = renderGraph.AddRenderPass("Push Global Camera Mip Bias", out var passData)) - { - passData.hdCamera = hdCamera; - passData.mipBias = mipBias; - passData.globalCB = m_ShaderVariablesGlobalCB; - passData.xrCB = m_ShaderVariablesXRCB; - - builder.SetRenderFunc( - (PushCameraGlobalMipBiasData data, RenderGraphContext context) => - { - data.hdCamera.globalMipBias = data.mipBias; - data.hdCamera.UpdateGlobalMipBiasCB(ref data.globalCB); - ConstantBuffer.PushGlobal(context.cmd, data.globalCB, HDShaderIDs._ShaderVariablesGlobal); - }); - } + return globalMaterialMipBias; } class SetFinalTargetPassData @@ -1696,9 +1657,6 @@ TextureHandle RenderTransparency(RenderGraph renderGraph, RenderCustomPass(m_RenderGraph, hdCamera, colorBuffer, prepassOutput, customPassCullingResults, cullingResults, CustomPassInjectionPoint.BeforePreRefraction, aovRequest, aovCustomPassBuffers); SetGlobalColorForCustomPass(renderGraph, currentColorPyramid); - // Render pre-refraction objects - ApplyCameraMipBias(hdCamera); - // Combine volumetric clouds with prerefraction transparents CombineVolumetricClouds(renderGraph, hdCamera, colorBuffer, prepassOutput.resolvedDepthBuffer, transparentPrepass, ref opticalFogTransmittance); @@ -1707,8 +1665,6 @@ TextureHandle RenderTransparency(RenderGraph renderGraph, RenderForwardTransparent(renderGraph, hdCamera, colorBuffer, normalBuffer, prepassOutput, transparentPrepass, vtFeedbackBuffer, volumetricLighting, ssrLightingBuffer, null, lightLists, shadowResult, cullingResults, true, preRefractionList); - ResetCameraMipBias(hdCamera); - // Render the deferred water lighting RenderWaterLighting(renderGraph, hdCamera, colorBuffer, prepassOutput.depthBuffer, prepassOutput.depthPyramidTexture, volumetricLighting, ssrLightingBuffer, transparentPrepass, lightLists, ref opticalFogTransmittance); @@ -1734,9 +1690,7 @@ TextureHandle RenderTransparency(RenderGraph renderGraph, RenderCustomPass(renderGraph, hdCamera, colorBuffer, prepassOutput, customPassCullingResults, cullingResults, CustomPassInjectionPoint.BeforeTransparent, aovRequest, aovCustomPassBuffers); // Render all type of transparent forward (unlit, lit, complex (hair...)) to keep the sorting between transparent objects. - ApplyCameraMipBias(hdCamera); RenderForwardTransparent(renderGraph, hdCamera, colorBuffer, normalBuffer, prepassOutput, transparentPrepass, vtFeedbackBuffer, volumetricLighting, ssrLightingBuffer, currentColorPyramid, lightLists, shadowResult, cullingResults, false, refractionList); - ResetCameraMipBias(hdCamera); colorBuffer = ResolveMSAAColor(renderGraph, hdCamera, colorBuffer, m_NonMSAAColorBuffer); @@ -1748,9 +1702,7 @@ TextureHandle RenderTransparency(RenderGraph renderGraph, var passNames = m_Asset.currentPlatformRenderPipelineSettings.supportTransparentBackface ? m_AllTransparentPassNames : m_TransparentNoBackfaceNames; var lowResTranspRendererList = renderGraph.CreateRendererList( CreateTransparentRendererListDesc(cullingResults, hdCamera.camera, passNames, m_CurrentRendererConfigurationBakedLighting, HDRenderQueue.k_RenderQueue_LowTransparent)); - ApplyCameraMipBias(hdCamera); var lowResTransparentBuffer = RenderLowResTransparent(renderGraph, hdCamera, prepassOutput.downsampledDepthBuffer, cullingResults, lowResTranspRendererList); - ResetCameraMipBias(hdCamera); CombineAndUpsampleTransparent(renderGraph, hdCamera, colorBuffer, lowResTransparentBuffer, prepassOutput.downsampledDepthBuffer, transparentPrepass, preRefractionList, lowResTranspRendererList); } @@ -2364,7 +2316,7 @@ class ResetCameraSizeForAfterPostProcessPassData public ShaderVariablesGlobal shaderVariablesGlobal; } - void ResetCameraSizeForAfterPostProcess(RenderGraph renderGraph, HDCamera hdCamera, CommandBuffer commandBuffer) + void ResetCameraDataAfterPostProcess(RenderGraph renderGraph, HDCamera hdCamera, CommandBuffer commandBuffer) { if (DynamicResolutionHandler.instance.DynamicResolutionEnabled()) { @@ -2379,6 +2331,7 @@ void ResetCameraSizeForAfterPostProcess(RenderGraph renderGraph, HDCamera hdCame { data.shaderVariablesGlobal._ScreenSize = new Vector4(data.hdCamera.finalViewport.width, data.hdCamera.finalViewport.height, 1.0f / data.hdCamera.finalViewport.width, 1.0f / data.hdCamera.finalViewport.height); data.shaderVariablesGlobal._RTHandleScale = RTHandles.rtHandleProperties.rtHandleScale; + data.hdCamera.UpdateGlobalMipBiasCB(ref data.shaderVariablesGlobal, 0); ConstantBuffer.PushGlobal(ctx.cmd, data.shaderVariablesGlobal, HDShaderIDs._ShaderVariablesGlobal); RTHandles.SetReferenceSize((int)data.hdCamera.finalViewport.width, (int)data.hdCamera.finalViewport.height); }); diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDRenderPipeline.cs b/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDRenderPipeline.cs index e08612e7a5f..74e7d2eb416 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDRenderPipeline.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDRenderPipeline.cs @@ -1103,6 +1103,7 @@ void UpdateGlobalConstantBuffers(HDCamera hdCamera, CommandBuffer cmd) void UpdateShaderVariablesGlobalCB(HDCamera hdCamera, CommandBuffer cmd) { hdCamera.UpdateShaderVariablesGlobalCB(ref m_ShaderVariablesGlobalCB); + hdCamera.UpdateGlobalMipBiasCB(ref m_ShaderVariablesGlobalCB, GetGlobalMipBias(hdCamera)); Fog.UpdateShaderVariablesGlobalCB(ref m_ShaderVariablesGlobalCB, hdCamera); UpdateShaderVariablesGlobalSubsurface(ref m_ShaderVariablesGlobalCB, hdCamera); UpdateShaderVariablesGlobalDecal(ref m_ShaderVariablesGlobalCB, hdCamera); @@ -1553,6 +1554,7 @@ bool PrepareAndCullCamera(Camera camera, XRPass xrPass, bool cameraRequestedDyna skipRequest = true; // First prepare the global constant buffer for users (Only camera properties) hdCamera.UpdateShaderVariablesGlobalCB(ref m_ShaderVariablesGlobalCB); + hdCamera.UpdateGlobalMipBiasCB(ref m_ShaderVariablesGlobalCB, GetGlobalMipBias(hdCamera)); ConstantBuffer.PushGlobal(m_ShaderVariablesGlobalCB, HDShaderIDs._ShaderVariablesGlobal); // Execute custom render BeginCameraRendering(renderContext, camera); diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDRenderPipelineAsset.DefaultResources.cs b/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDRenderPipelineAsset.DefaultResources.cs index 15ea6c8e43f..cfa06df888f 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDRenderPipelineAsset.DefaultResources.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDRenderPipelineAsset.DefaultResources.cs @@ -53,7 +53,10 @@ public partial class HDRenderPipelineAsset #region SpeedTree /// HDRP default speed tree v8 shader - public override Shader defaultSpeedTree8Shader => defaultShaders.defaultSpeedTree8Shader; + public override Shader defaultSpeedTree8Shader => defaultShaders?.defaultSpeedTree8Shader; + + /// HDRP default speed tree v9 shader + public override Shader defaultSpeedTree9Shader => defaultShaders?.defaultSpeedTree9Shader; #endregion diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipelineResources/HDRenderPipelineEditorShaders.cs b/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipelineResources/HDRenderPipelineEditorShaders.cs index 66e245ec606..2fc7344fd86 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipelineResources/HDRenderPipelineEditorShaders.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipelineResources/HDRenderPipelineEditorShaders.cs @@ -67,6 +67,16 @@ public Shader defaultSpeedTree8Shader get => m_DefaultSpeedTree8Shader; set => this.SetValueAndNotify(ref m_DefaultSpeedTree8Shader, value, nameof(m_DefaultSpeedTree8Shader)); } + + [SerializeField] + [ResourcePath("Runtime/Material/Nature/SpeedTree9_HDRP.shadergraph")] + private Shader m_DefaultSpeedTree9Shader; + + public Shader defaultSpeedTree9Shader + { + get => m_DefaultSpeedTree9Shader; + set => this.SetValueAndNotify(ref m_DefaultSpeedTree9Shader, value, nameof(m_DefaultSpeedTree9Shader)); + } #endregion } } diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/ShaderLibrary/ShaderVariables.hlsl b/Packages/com.unity.render-pipelines.high-definition/Runtime/ShaderLibrary/ShaderVariables.hlsl index 23167148e6b..f08b918744c 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/ShaderLibrary/ShaderVariables.hlsl +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/ShaderLibrary/ShaderVariables.hlsl @@ -175,7 +175,7 @@ TEXTURE2D(unity_MipmapStreaming_DebugTex); // In HDRP, all material samplers have the possibility of having a mip bias. // This mip bias is necessary for temporal upsamplers, since they render to a lower // resolution into a higher resolution target. -#if defined(SHADEROPTIONS_GLOBAL_MIP_BIAS) && SHADEROPTIONS_GLOBAL_MIP_BIAS != 0 +#if defined(SHADEROPTIONS_GLOBAL_MIP_BIAS) && SHADEROPTIONS_GLOBAL_MIP_BIAS != 0 && defined(SUPPORT_GLOBAL_MIP_BIAS) //simple 2d textures bias manipulation #ifdef PLATFORM_SAMPLE_TEXTURE2D_BIAS diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/VFXGraph/Shaders/VFXDefines.hlsl b/Packages/com.unity.render-pipelines.high-definition/Runtime/VFXGraph/Shaders/VFXDefines.hlsl index d4918335feb..60a5b859ad0 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/VFXGraph/Shaders/VFXDefines.hlsl +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/VFXGraph/Shaders/VFXDefines.hlsl @@ -59,3 +59,7 @@ #else #define CULL_VERTEX(o) { o.VFX_VARYING_POSCS.x = VFX_NAN; return o; } #endif + +// Enable the support of global mip bias in the shader. +// Only has effect if the global mip bias is enabled in shader config and DRS is enabled. +#define SUPPORT_GLOBAL_MIP_BIAS diff --git a/Packages/com.unity.render-pipelines.high-definition/Samples~/FullscreenSamples/Scripts/FullscreenSamplesEffectSelection.cs b/Packages/com.unity.render-pipelines.high-definition/Samples~/FullscreenSamples/Scripts/FullscreenSamplesEffectSelection.cs index 08d52dfa420..79dd4720bc6 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Samples~/FullscreenSamples/Scripts/FullscreenSamplesEffectSelection.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Samples~/FullscreenSamples/Scripts/FullscreenSamplesEffectSelection.cs @@ -1,3 +1,7 @@ +#if (ENABLE_INPUT_SYSTEM && INPUT_SYSTEM_INSTALLED) +#define USE_INPUT_SYSTEM +#endif + using System.Collections; using System.Collections.Generic; using UnityEngine; @@ -5,7 +9,7 @@ using UnityEngine.Rendering.HighDefinition; using UnityEngine.UI; -#if ENABLE_INPUT_SYSTEM +#if USE_INPUT_SYSTEM using UnityEngine.InputSystem; #endif @@ -141,7 +145,7 @@ void Update() if (Application.isFocused) { -#if ENABLE_INPUT_SYSTEM +#if USE_INPUT_SYSTEM if (Keyboard.current.rightArrowKey.wasPressedThisFrame || Keyboard.current.dKey.wasPressedThisFrame) { diff --git a/Packages/com.unity.render-pipelines.high-definition/Samples~/LensFlareSamples/Scripts/LensFlareSamplesInputAndControl.cs b/Packages/com.unity.render-pipelines.high-definition/Samples~/LensFlareSamples/Scripts/LensFlareSamplesInputAndControl.cs index 2f458a76551..bd1c7f583be 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Samples~/LensFlareSamples/Scripts/LensFlareSamplesInputAndControl.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Samples~/LensFlareSamples/Scripts/LensFlareSamplesInputAndControl.cs @@ -1,8 +1,12 @@ +#if (ENABLE_INPUT_SYSTEM && INPUT_SYSTEM_INSTALLED) +#define USE_INPUT_SYSTEM +#endif + using UnityEngine; using UnityEngine.Rendering; using UnityEngine.UI; -#if ENABLE_INPUT_SYSTEM +#if USE_INPUT_SYSTEM using UnityEngine.InputSystem; #endif @@ -58,7 +62,7 @@ void Update() private void SetSkyFromInput() { -#if ENABLE_INPUT_SYSTEM +#if USE_INPUT_SYSTEM if (Keyboard.current.digit1Key.wasPressedThisFrame) { SetSky(0); @@ -102,7 +106,7 @@ void SetSky(int inputNumber) private void MoveLightWithMouse() { -#if ENABLE_INPUT_SYSTEM +#if USE_INPUT_SYSTEM if (Mouse.current.leftButton.IsPressed()) { var mousePosition = Mouse.current.position.ReadValue(); @@ -121,7 +125,7 @@ private void CameraMovementWithMouse() { LockCursorWhileMouseButtonDown(); -#if ENABLE_INPUT_SYSTEM +#if USE_INPUT_SYSTEM if (Mouse.current.rightButton.isPressed) { var mouseMovement = Mouse.current.delta.ReadValue() * cameraRotationSpeed / 30f; @@ -154,7 +158,7 @@ private void CameraMovementWithMouse() private void LockCursorWhileMouseButtonDown() { -#if ENABLE_INPUT_SYSTEM +#if USE_INPUT_SYSTEM if (Mouse.current.rightButton.wasPressedThisFrame) { Cursor.lockState = CursorLockMode.Locked; diff --git a/Packages/com.unity.render-pipelines.high-definition/Samples~/WaterSamples/Scripts/LookWithMouse.cs b/Packages/com.unity.render-pipelines.high-definition/Samples~/WaterSamples/Scripts/LookWithMouse.cs index 8ea40539fe8..b1b650ce77d 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Samples~/WaterSamples/Scripts/LookWithMouse.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Samples~/WaterSamples/Scripts/LookWithMouse.cs @@ -1,4 +1,8 @@ -#if ENABLE_INPUT_SYSTEM +#if (ENABLE_INPUT_SYSTEM && INPUT_SYSTEM_INSTALLED) +#define USE_INPUT_SYSTEM +#endif + +#if USE_INPUT_SYSTEM using UnityEngine.InputSystem; #endif @@ -28,7 +32,7 @@ void Update() { bool unlockPressed = false, lockPressed = false; -#if ENABLE_INPUT_SYSTEM +#if USE_INPUT_SYSTEM float mouseX = 0, mouseY = 0; if (Mouse.current != null) diff --git a/Packages/com.unity.render-pipelines.high-definition/Samples~/WaterSamples/Scripts/PlayerMovement.cs b/Packages/com.unity.render-pipelines.high-definition/Samples~/WaterSamples/Scripts/PlayerMovement.cs index 7d04242feb8..17b41aacb97 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Samples~/WaterSamples/Scripts/PlayerMovement.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Samples~/WaterSamples/Scripts/PlayerMovement.cs @@ -1,4 +1,8 @@ -#if ENABLE_INPUT_SYSTEM +#if (ENABLE_INPUT_SYSTEM && INPUT_SYSTEM_INSTALLED) +#define USE_INPUT_SYSTEM +#endif + +#if USE_INPUT_SYSTEM using UnityEngine.InputSystem; #endif @@ -27,7 +31,7 @@ public class PlayerMovement : MonoBehaviour Vector3 velocity; bool isGrounded; -#if ENABLE_INPUT_SYSTEM +#if USE_INPUT_SYSTEM InputAction movement; InputAction jump; @@ -60,7 +64,7 @@ void Update() float z; bool jumpPressed = false; -#if ENABLE_INPUT_SYSTEM +#if USE_INPUT_SYSTEM var delta = movement.ReadValue(); x = -delta.x; z = -delta.y; diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/Images/probe-volumes/APVsamplingDebug.png b/Packages/com.unity.render-pipelines.universal/Documentation~/Images/probe-volumes/APVsamplingDebug.png new file mode 100644 index 00000000000..135046fdb1e Binary files /dev/null and b/Packages/com.unity.render-pipelines.universal/Documentation~/Images/probe-volumes/APVsamplingDebug.png differ diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/Images/probe-volumes/ProbeVolume-Size-gizmo.png b/Packages/com.unity.render-pipelines.universal/Documentation~/Images/probe-volumes/ProbeVolume-Size-gizmo.png new file mode 100644 index 00000000000..ded3446d6d7 Binary files /dev/null and b/Packages/com.unity.render-pipelines.universal/Documentation~/Images/probe-volumes/ProbeVolume-Size-gizmo.png differ diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/Images/probe-volumes/ProbeVolumesGI.png b/Packages/com.unity.render-pipelines.universal/Documentation~/Images/probe-volumes/ProbeVolumesGI.png new file mode 100644 index 00000000000..99cec441336 Binary files /dev/null and b/Packages/com.unity.render-pipelines.universal/Documentation~/Images/probe-volumes/ProbeVolumesGI.png differ diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/Images/probe-volumes/probevolumes-debug-displayprobebricks1.png b/Packages/com.unity.render-pipelines.universal/Documentation~/Images/probe-volumes/probevolumes-debug-displayprobebricks1.png new file mode 100644 index 00000000000..b6560446831 Binary files /dev/null and b/Packages/com.unity.render-pipelines.universal/Documentation~/Images/probe-volumes/probevolumes-debug-displayprobebricks1.png differ diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/Images/probe-volumes/probevolumes-debug-displayprobecells.png b/Packages/com.unity.render-pipelines.universal/Documentation~/Images/probe-volumes/probevolumes-debug-displayprobecells.png new file mode 100644 index 00000000000..ce154809bba Binary files /dev/null and b/Packages/com.unity.render-pipelines.universal/Documentation~/Images/probe-volumes/probevolumes-debug-displayprobecells.png differ diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/Images/probe-volumes/probevolumes-debug-displayprobes.png b/Packages/com.unity.render-pipelines.universal/Documentation~/Images/probe-volumes/probevolumes-debug-displayprobes.png new file mode 100644 index 00000000000..d34e63fa637 Binary files /dev/null and b/Packages/com.unity.render-pipelines.universal/Documentation~/Images/probe-volumes/probevolumes-debug-displayprobes.png differ diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/Images/probe-volumes/probevolumes-dilationvsnot.png b/Packages/com.unity.render-pipelines.universal/Documentation~/Images/probe-volumes/probevolumes-dilationvsnot.png new file mode 100644 index 00000000000..a7dc6007641 Binary files /dev/null and b/Packages/com.unity.render-pipelines.universal/Documentation~/Images/probe-volumes/probevolumes-dilationvsnot.png differ diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/Images/probe-volumes/probevolumes-lightleak.jpg b/Packages/com.unity.render-pipelines.universal/Documentation~/Images/probe-volumes/probevolumes-lightleak.jpg new file mode 100644 index 00000000000..7533725637c Binary files /dev/null and b/Packages/com.unity.render-pipelines.universal/Documentation~/Images/probe-volumes/probevolumes-lightleak.jpg differ diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/Images/probe-volumes/probevolumes-reflection-probe-normalization.png b/Packages/com.unity.render-pipelines.universal/Documentation~/Images/probe-volumes/probevolumes-reflection-probe-normalization.png new file mode 100644 index 00000000000..39073f07517 Binary files /dev/null and b/Packages/com.unity.render-pipelines.universal/Documentation~/Images/probe-volumes/probevolumes-reflection-probe-normalization.png differ diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/Images/probe-volumes/probevolumes-seams.jpg b/Packages/com.unity.render-pipelines.universal/Documentation~/Images/probe-volumes/probevolumes-seams.jpg new file mode 100644 index 00000000000..69bec2e0049 Binary files /dev/null and b/Packages/com.unity.render-pipelines.universal/Documentation~/Images/probe-volumes/probevolumes-seams.jpg differ diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/Images/probe-volumes/probevolumes-virtualoffsetvsnot.png b/Packages/com.unity.render-pipelines.universal/Documentation~/Images/probe-volumes/probevolumes-virtualoffsetvsnot.png new file mode 100644 index 00000000000..8f6432b8a7d Binary files /dev/null and b/Packages/com.unity.render-pipelines.universal/Documentation~/Images/probe-volumes/probevolumes-virtualoffsetvsnot.png differ diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/Images/render-graph-viewer-icons.png b/Packages/com.unity.render-pipelines.universal/Documentation~/Images/render-graph-viewer-icons.png new file mode 100644 index 00000000000..cfcdde98307 Binary files /dev/null and b/Packages/com.unity.render-pipelines.universal/Documentation~/Images/render-graph-viewer-icons.png differ diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/Images/render-graph-viewer.png b/Packages/com.unity.render-pipelines.universal/Documentation~/Images/render-graph-viewer.png new file mode 100644 index 00000000000..75b1aac63a4 Binary files /dev/null and b/Packages/com.unity.render-pipelines.universal/Documentation~/Images/render-graph-viewer.png differ diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/TableOfContents.md b/Packages/com.unity.render-pipelines.universal/Documentation~/TableOfContents.md index 320ae8dc9da..0d0642de3d0 100644 --- a/Packages/com.unity.render-pipelines.universal/Documentation~/TableOfContents.md +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/TableOfContents.md @@ -6,7 +6,7 @@ * [Feature list](urp-feature-list.md) * [Feature Comparison with the Built-in Render Pipeline](universalrp-builtin-feature-comparison.md) * [Getting started](InstallingAndConfiguringURP.md) - * [Create a Project with 3D Sample Scenes](creating-a-new-project-with-urp.md) + * [Create a project with URP](creating-a-new-project-with-urp.md) * [Install URP into an existing Project](InstallURPIntoAProject.md) * [Package samples](package-samples.md) * [URP Package Samples](package-sample-urp-package-samples.md) @@ -60,8 +60,26 @@ * [View and control a light from its perspective](lights-placement-tool.md) * [The Universal Additional Light Data component](universal-additional-light-data.md) * [Shadows in the Universal Render Pipeline](Shadows-in-URP.md) + * [Adaptive Probe Volumes (APV)](probevolumes.md) + * [Understanding Adaptive Probe Volumes](probevolumes-concept.md) + * [Use Adaptive Probe Volumes](probevolumes-use.md) + * [Display Adaptive Probe Volumes](probevolumes-showandadjust.md) + * [Configure the size and density of Adaptive Probe Volumes](probevolumes-changedensity.md) + * [Bake multiple scenes together with Baking Sets](probevolumes-usebakingsets.md) + * [Streaming](probevolumes-streaming.md) + * [Fix issues with Adaptive Probe Volumes](probevolumes-fixissues.md) + * [Adaptive Probe Volume Inspector window reference](probevolumes-inspector-reference.md) + * [Adaptive Probe Volumes panel reference](probevolumes-lighting-panel-reference.md) + * [Probe Volumes Options Override reference](probevolumes-options-override-reference.md) + * [Probe Adjustment Volume component reference](probevolumes-adjustment-volume-component-reference.md) * [Reflection probes](lighting/reflection-probes.md) - * [Lens Flare asset](shared/lens-flare/lens-flare-asset.md) + * [Lens flares](shared/lens-flare/lens-flare.md) + * [Choose a lens flare type](shared/lens-flare/choose-a-lens-flare-type.md) + * [Add lens flares](shared/lens-flare/lens-flare-component.md) + * [Add screen space lens flares](shared/lens-flare/post-processing-screen-space-lens-flare.md) + * [Lens Flare (SRP) reference](shared/lens-flare/lens-flare-srp-reference.md) + * [Lens Flare (SRP) Data Asset reference](shared/lens-flare/lens-flare-asset.md) + * [Screen Space Lens Flare override reference](shared/lens-flare/reference-screen-space-lens-flare.md) * [Cameras](cameras.md) * [Cameras in URP](cameras/camera-differences-in-urp.md) * [Understand camera render order](cameras-advanced.md) @@ -138,22 +156,37 @@ * [Reconstruct the world space positions](writing-shaders-urp-reconstruct-world-position.md) * [URP ShaderLab Pass tags](urp-shaders/urp-shaderlab-pass-tags.md) * [Custom rendering and post-processing](customizing-urp.md) - * [Custom render passes](renderer-features/custom-rendering-passes.md) - * [Custom render pass workflow in URP](renderer-features/custom-rendering-pass-workflow-in-urp.md) - * [Scriptable Render Passes](renderer-features/scriptable-render-passes.md) - * [Scriptable Render Passes](renderer-features/intro-to-scriptable-render-passes.md) - * [Write a Scriptable Render Pass](renderer-features/write-a-scriptable-render-pass.md) - * [Inject a pass via scripting](customize/inject-render-pass-via-script.md) + * [Custom render pass workflow in URP](renderer-features/custom-rendering-pass-workflow-in-urp.md) + * [Scriptable Render Passes](renderer-features/scriptable-render-passes.md) + * [Introduction to Scriptable Render Passes](renderer-features/intro-to-scriptable-render-passes.md) + * [Render graph system](render-graph.md) + * [Introduction to the render graph system](render-graph-introduction.md) + * [Write a render pass using the render graph system](render-graph-write-render-pass.md) + * [Use textures](working-with-textures.md) + * [Create a render graph system texture](render-graph-create-a-texture.md) + * [Import a texture into the render graph system](render-graph-import-a-texture.md) + * [Access a texture in a custom render pass](render-graph-read-write-texture.md) + * [Transfer a texture between render passes](render-graph-pass-textures-between-passes.md) + * [URP blit best practices](customize/blit-overview.md) + * [Perform a full screen blit in URP](renderer-features/how-to-fullscreen-blit.md) + * [Draw objects in a render pass](render-graph-draw-objects-in-a-pass.md) + * [Use frame data](accessing-frame-data.md) + * [Analyze a render graph](render-graph-view.md) + * [Use Compatibility Mode APIs in render graph render passes](render-graph-unsafe-pass.md) + * [Render Graph Viewer window reference](render-graph-viewer-reference.md) + * [Adding a Scriptable Render Pass to the frame rendering loop](inject-a-render-pass.md) * [Scriptable Renderer Features](renderer-features/scriptable-renderer-features/scriptable-renderer-features-landing.md) * [Introduction to Scriptable Renderer Features](renderer-features/scriptable-renderer-features/intro-to-scriptable-renderer-features.md) * [Inject a custom render pass using a Scriptable Renderer Feature](renderer-features/scriptable-renderer-features/inject-a-pass-using-a-scriptable-renderer-feature.md) * [Apply a Scriptable Renderer Feature to a specific camera type](renderer-features/scriptable-renderer-features/apply-scriptable-feature-to-specific-camera.md) - * [Example of a complete Scriptable Renderer Feature](renderer-features/create-custom-renderer-feature.md) - * [Using textures](working-with-textures.md) - * [URP blit best practices](customize/blit-overview.md) - * [Perform a full screen blit in URP](renderer-features/how-to-fullscreen-blit.md) - * [Injection points reference](customize/custom-pass-injection-points.md) - * [Scriptable Renderer Feature and Scriptable Render Pass API reference](renderer-features/scriptable-renderer-features/scriptable-renderer-feature-reference.md) + * [Example of a complete Scriptable Renderer Feature](renderer-features/create-custom-renderer-feature.md) + * [Scriptable Renderer Feature API reference](renderer-features/scriptable-renderer-features/scriptable-renderer-feature-reference.md) + * [Inject a render pass via scripting](customize/inject-render-pass-via-script.md) + * [Injection points reference](customize/custom-pass-injection-points.md) + * [Compatibility mode](compatibility-mode.md) + * [Write a Scriptable Render Pass in Compatibility Mode](renderer-features/write-a-scriptable-render-pass.md) + * [Example of a complete Scriptable Renderer Feature in Compatibility Mode](renderer-features/create-custom-renderer-feature-compatibility-mode.md) + * [Scriptable Render Pass Compatibility Mode API reference](renderer-features/scriptable-renderer-features/scriptable-render-pass-reference.md) * [Optimization](urp-optimization.md) * [Rendering Debugger](features/rendering-debugger.md) * [Add controls to the Rendering Debugger](features/rendering-debugger-add-controls.md) diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/accessing-frame-data.md b/Packages/com.unity.render-pipelines.universal/Documentation~/accessing-frame-data.md new file mode 100644 index 00000000000..543853b8e8d --- /dev/null +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/accessing-frame-data.md @@ -0,0 +1,83 @@ +# Use frame data + +You can fetch the textures the Universal Render Pipeline (URP) creates for the current frame, for example the active color buffer or a G-buffer texture, and use them in your render passes. + +These textures are called frame data, resource data, or frame resources. + +Some textures might not exist in the frame data, depending on which injection point you use to insert your custom render pass into the URP frame rendering loop. Refer to the following for information about which textures exist when: + +- [Injection points reference](customize/custom-pass-injection-points.md) + +## Get frame data + +The frame data is in the `ContextContainer` object that URP provides when you override the `RecordRenderGraph` method. + +Follow these steps to get a handle to a texture in the frame data: + +1. Get all the frame data as a `UniversalResourceData` object, using the `Get` method of the `ContextContainer` object. + + For example: + + ```csharp + public override void RecordRenderGraph(RenderGraph renderGraph, ContextContainer frameContext) + { + using (var builder = renderGraph.AddPass("Get frame data", out var passData)) + { + UniversalResourceData frameData = frameContext.Get(); + } + } + ``` + +2. Get the handle to a texture in the frame data. + + For example, the following gets a handle to the active color texture: + + ```csharp + TextureHandle activeColorTexture = frameData.activeColorTexture; + ``` + +You can then read from and write to the texture. Refer to [Use a texture in a render pass](render-graph-read-write-texture.md) for more information. + +You can use the [ConfigureInput](xref:UnityEngine.Rendering.Universal.ScriptableRenderPass.ConfigureInput(UnityEngine.Rendering.Universal.ScriptableRenderPassInput)) API to make sure URP generates the texture you need in the frame data. + +## Textures in the frame data + +You can fetch the following textures from the frame data. + +| **Property** | **Texture** | **URP shader pass that writes to the texture** | +|-|-|-| +| `additionalShadowsTexture ` | The additional shadow map. | `ShadowCaster` | +| `activeColorTexture` | The color texture the camera currently targets. | Any pass, depending on your settings | +| `activeDepthTexture` | The depth texture the camera is currently targets. | Any pass, depending on your settings | +| `afterPostProcessColor` | The main color texture after URP's post processing passes. | `UberPost` | +| `backBufferColor ` | The color texture of the screen back buffer. If you use [post-processing](integration-with-post-processing.md), URP writes to this texture at the end of rendering, unless you enable [HDR Debug Views](post-processing/hdr-output.md#hdr-debug-views). Refer to `debugScreenTexture` for more information. | Any pass, depending on your settings | +| `backBufferDepth ` | The depth texture of the screen back buffer. | Any pass, depending on your settings | +| `cameraColor` | The main color texture for the camera. You can store multiple samples in this texture if you enable [Multisample Anti-aliasing (MSAA)](anti-aliasing.md#msaa). | Any pass, depending on your settings | +| `cameraDepth` | The main depth texture for the camera. You can store multiple samples in this texture if you enable [Multisample Anti-aliasing (MSAA)](anti-aliasing.md#msaa). | Any pass, depending on your settings | +| `cameraDepthTexture` | A copy of the depth texture, if you enable **Depth Priming Mode** in the [renderer](urp-universal-renderer.md) or **Depth Texture** in the active [URP Asset](universalrp-asset.md). | `CopyDepth` or `DepthPrepass` | +| `cameraNormalsTexture` | The scene normals texture. Contains the scene depth for objects with shaders that have a `DepthNormals` pass. | `DepthNormals` prepass | +| `cameraOpaqueTexture` | A texture with the opaque objects in the scene, if you enable **Opaque Texture** in the [URP Asset](universalrp-asset.md). | `CopyColor` | +| `dBuffer` | The Decals texture. Refer to [DBuffer](renderer-feature-decal.md#dbuffer) for more information. | `Decals` | +| `dBufferDepth` | The Decals depth texture. Refer to [DBuffer](renderer-feature-decal.md#dbuffer) for more information. | `Decals` | +| `debugScreenTexture` | If you enable [HDR Debug Views](post-processing/hdr-output.md#hdr-debug-views), URP writes the output of [post-processing](integration-with-post-processing.md) to this texture instead of `backBufferColor`. | `uberPost` and `finalPost` | +| `gBuffer` | The G-buffer textures. Refer to [G-buffer](rendering/deferred-rendering-path.md#g-buffer-layout) for more information. | `GBuffer` | +| `internalColorLut` | The internal look-up textures (LUT) texture. | `InternalLut` | +| `mainShadowsTexture ` | The main shadow map. | `ShadowCaster` | +| `motionVectorColor` | The motion vectors color texture. Refer to [motion vectors](features/motion-vectors.md) for more information. | `Camera Motion Vectors` and `MotionVectors` | +| `motionVectorDepth` | The motion vectors depth texture. Refer to [motion vectors](features/motion-vectors.md) for more information. | `Camera Motion Vectors` and `MotionVectors` | +| `overlayUITexture` | The overlay UI texture. | `DrawScreenSpaceUI` | +| `renderingLayersTexture` | The Rendering Layers texture. Refer to [Rendering layers](features/rendering-layers.md) | `DrawOpaques` or the `DepthNormals` prepass, depending on your settings. | +| `ssaoTexture` | The Screen Space Ambient Occlusion (SSAO) texture. Refer to [Ambient occlusion](post-processing-ssao.md) for more information. | `SSAO` | + +## Example + +Refer to the following for examples of custom render passes that use the frame data: + +- The render graph system samples in the [URP package samples](package-sample-urp-package-samples.md) + +## Additional resources + +- [Rendering](rendering-in-universalrp.md) +- [Render pipeline concepts](urp-concepts.md) +- [Deferred rendering path in URP](rendering/deferred-rendering-path.md) + diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/cameras-advanced.md b/Packages/com.unity.render-pipelines.universal/Documentation~/cameras-advanced.md index d07c0813b97..82e5e0bb3eb 100644 --- a/Packages/com.unity.render-pipelines.universal/Documentation~/cameras-advanced.md +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/cameras-advanced.md @@ -2,10 +2,10 @@ This page describes when a Universal Render Pipeline (URP) camera performs the following operations: -* [Clearing the color and depth buffers](#clearing) +* [Clearing the color and depth buffers](#clearing-the-color-and-depth-buffers) * [Base Camera](#base-camera) * [Overlay Camera](#overlay-camera) -* [Culling and rendering](#camera-culling-and-rendering-order) +* [Culling and rendering](#culling-and-rendering) * [Rendering order optimizations.](#rendering-order-optimizations) * [Render requests](#render-requests) diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/compatibility-mode.md b/Packages/com.unity.render-pipelines.universal/Documentation~/compatibility-mode.md new file mode 100644 index 00000000000..60d142cb6db --- /dev/null +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/compatibility-mode.md @@ -0,0 +1,11 @@ +## Compatibility Mode + +If you enable **Compatibility Mode (Render Graph Disabled)** in [URP graphics settings](urp-global-settings.md), you can write a Scriptable Render Pass without using the [render graph API](render-graph.md). + +> **Note**: Unity no longer develops or improves the rendering path that doesn't use the render graph API. Use the render graph API instead when developing new graphics features. + +|Page|Description| +|-|-| +|[Write a Scriptable Render Pass in Compatibility Mode](renderer-features/write-a-scriptable-render-pass.md)|An example of creating a Scriptable Render Pass in Compatibility Mode.| +|[Example of a complete Scriptable Renderer Feature in Compatibility Mode](renderer-features/create-custom-renderer-feature-compatibility-mode.md)|An example of a complete Scriptable Renderer Feature in Compatibility Mode.| +|[Scriptable Render Pass API reference](renderer-features/scriptable-renderer-features/scriptable-render-pass-reference.md)|Reference for the Scriptable Render Pass API.| diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/creating-a-new-project-with-urp.md b/Packages/com.unity.render-pipelines.universal/Documentation~/creating-a-new-project-with-urp.md index fed7b741692..e333bec0976 100644 --- a/Packages/com.unity.render-pipelines.universal/Documentation~/creating-a-new-project-with-urp.md +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/creating-a-new-project-with-urp.md @@ -1,13 +1,21 @@ -# Create a project with 3D Sample Scenes +# Create a project with URP ![URP 3D Sample](Images/AssetShots/Beauty/Template.png) -To explore various features of the Universal Render Pipeline (URP), create a new Project with 3D Sample Scenes. +The Unity Hub contains the following templates that let you create a pre-configured Universal Render Pipeline (URP) project. + +| **Template** | **Description** | +|---|---| +| 2D URP | This is an empty project for 2D applications. URP is pre-configured with 2D renderer. | +| 3D (URP) | This is an empty project for 3D applications. URP is pre-configured with 3D renderer. | +| 3D Sample Scenes (URP) | This sample contains four environments that showcase the versatility, scalability, and customizability of URP. The project demonstrates different art styles, rendering paths, and scene complexities. Each scene shows you how to tailor a project to different platforms, from mobile and untethered devices to high-end PCs and consoles. | + +To create a new project using a URP template: 1. Open the Unity Hub. -2. On the Home page, click **New project** to start a new Project. +2. Select the **Projects** tab, then select **New project**. -3. In the **Sample** tab, select **3D Sample Scenes (URP)**. +3. Select one of the URP templates. -4. Click **Create project**. Unity creates a new project with URP installed and configured, and includes example content that demonstrates URP features. +4. Fill in the **Project settings** fields and select **Create project**. Unity creates a new pre-configured URP project. diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/customizing-urp.md b/Packages/com.unity.render-pipelines.universal/Documentation~/customizing-urp.md index ddaa8fe937f..c4bb9cc65e7 100644 --- a/Packages/com.unity.render-pipelines.universal/Documentation~/customizing-urp.md +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/customizing-urp.md @@ -1,14 +1,17 @@ # Custom rendering and post-processing -Customize and extend the rendering process in the Universal Render Pipeline (URP). URP uses Renderer Features to implement certain effects. URP includes a selection of pre-built Renderer Features and the ability to create customized Renderer Features known as Scriptable Renderer Features. +Customize and extend the rendering process in the Universal Render Pipeline (URP). Create a custom render pass in a C# script and inject it into the URP frame rendering loop. -| Page | Description | +|Page|Description| |-|-| -|[Custom render passes](renderer-features/custom-rendering-passes.md)|Create a custom render pass in a C# script and inject it into the URP frame rendering loop.| -|[Injection points reference](customize/custom-pass-injection-points.md)|The injection points you can use to inject render passes into the frame rendering loop.| -|[Scriptable Renderer Feature and Scriptable Render Pass API reference](renderer-features/scriptable-renderer-features/scriptable-renderer-feature-reference.md)|Common methods you can use to write Scriptable Renderer Passes and Scriptable Renderer Features.| +|[Custom render pass workflow in URP](renderer-features/custom-rendering-pass-workflow-in-urp.md) |Add and inject a custom render pass to change how URP renders a scene or the objects within a scene.| +|[Scriptable Render Passes](renderer-features/scriptable-render-passes.md)|Use the Scriptable Render Pass API and the render graph system to create a custom render pass.| +|[Scriptable Renderer Features](renderer-features/scriptable-renderer-features/scriptable-renderer-features-landing.md)|Use the `ScriptableRendererFeature` API to inject a custom render pass into the URP frame rendering loop.| +|[Compatibility Mode](compatibility-mode.md)|Write a Scriptable Render Pass if you enable **Compatibility Mode (Render Graph Disabled)** in URP graphics settings. Unity no longer develops or improves this rendering path.| ## Additional resources +- [Rendering](rendering-in-universalrp.md) +- [Render pipeline concepts](urp-concepts.md) - [Pre-built effects (Renderer Features)](urp-renderer-feature.md) - [How to create a custom post-processing effect](post-processing/post-processing-custom-effect-low-code.md) diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/features/rendering-debugger.md b/Packages/com.unity.render-pipelines.universal/Documentation~/features/rendering-debugger.md index a5c2f9c13fb..8a10350892b 100644 --- a/Packages/com.unity.render-pipelines.universal/Documentation~/features/rendering-debugger.md +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/features/rendering-debugger.md @@ -54,11 +54,15 @@ The **Rendering Debugger** window contains the following sections: * [Frequently Used](#frequently-used) +* [Rendering](#rendering) + * [Material](#material) * [Lighting](#lighting) -* [Rendering](#rendering) +* [Render Graph](#render-graph) + +* [Probe Volume](#probe-volume-panel) The following illustration shows the Rendering Debugger window in the Scene view. @@ -72,7 +76,7 @@ Use the [runtime shortcuts](#Navigation at runtime) to open the Display stats wi ### Frame Stats -The Frame Stats section displays the average, minimum, and maximum value of each property. HDRP calculates each Frame Stat value over the 30 most recent frames. +The Frame Stats section displays the average, minimum, and maximum value of each property. URP calculates each Frame Stat value over the 30 most recent frames. | **Property** | **Description** | | ---------------------------- | ------------------------------------------------------------ | @@ -107,8 +111,8 @@ The **Bottlenecks** section describes the distribution of the last 60 frames acr If Vsync limited 20 of the 60 most recent frames, the Bottleneck section might appear as follows: -- **CPU** 0.0%: This indicates that HDRP did not render any of the last 60 frames on the CPU. -- **GPU** 66.6%: This indicates that the GPU limited 66.6% of the 60 most recent frames rendered by HDRP. +- **CPU** 0.0%: This indicates that URP did not render any of the last 60 frames on the CPU. +- **GPU** 66.6%: This indicates that the GPU limited 66.6% of the 60 most recent frames rendered by URP. - **Present Limited** 33.3%: This indicates that presentation constraints (Vsync or the [target framerate](https://docs.unity3d.com/ScriptReference/Application-targetFrameRate.html)) limited 33.3% of the last 60 frames. - **Balanced** 0.0%: This indicates that in the last 60 frames, there were 0 frames where the CPU processing time and GPU processing time were the same. @@ -118,7 +122,7 @@ In this example, the bottleneck is the GPU. ### Detailed Stats -The Detailed Stats section displays the amount of time in milliseconds that each rendering step takes on the CPU and GPU. HDRP updates these values once every frame based on the previous frame. +The Detailed Stats section displays the amount of time in milliseconds that each rendering step takes on the CPU and GPU. URP updates these values once every frame based on the previous frame. | **Property** | **Description** | | -------------------------------- | ------------------------------------------------------------ | @@ -130,6 +134,36 @@ The Detailed Stats section displays the amount of time in milliseconds that each This section contains a selection of properties that users use often. The properties are from the other sections in the Rendering Debugger window. For information about the properties, refer to the sections [Material](#material), [Lighting](#lighting), and [Rendering](#rendering). + +### Rendering + +The properties in this section let you visualize different rendering features. + +#### Rendering Debug + +![](../Images/rendering-debugger/rendering-debug.png)
*The Rendering Debug subsection.* + +| **Property** | **Description** | +| ------------------------------ | ------------------------------------------------------------ | +| **Map Overlays** | Specifies which render pipeline texture to overlay on the screen. The options are:
  • **None**: Renders the scene normally without a texture overlay.
  • **Depth**: Overlays the camera's depth texture on the screen.
  • **Additional Lights Shadow Map**: Overlays the [shadow map](https://docs.unity3d.com/Manual/shadow-mapping.html) that contains shadows cast by lights other than the main directional light.
  • **Main Light Shadow Map**: Overlays the shadow map that contains shadows cast by the main directional light.
| +| **  Map Size** | The width and height of the overlay texture as a percentage of the view window URP displays it in. For example, a value of **50** fills up a quarter of the screen (50% of the width and 50% of the height). | +| **HDR** | Indicates whether to use [high dynamic range (HDR)](https://docs.unity3d.com/Manual/HDR.html) to render the scene. Enabling this property only has an effect if you enable **HDR** in your URP Asset. | +| **MSAA** | Indicates whether to use [Multisample Anti-aliasing (MSAA)](./../anti-aliasing.md#msaa) to render the scene. Enabling this property only has an effect if:
  • You set **Anti Aliasing (MSAA)** to a value other than **Disabled** in your URP Asset.
  • You use the Game View. MSAA has no effect in the Scene View.
| +| **Post-processing** | Specifies how URP applies post-processing. The options are:
  • **Disabled**: Disables post-processing.
  • **Auto**: Unity enables or disables post-processing depending on the currently active debug modes. If color changes from post-processing would change the meaning of a debug mode's pixel, Unity disables post-processing. If no debug modes are active, or if color changes from post-processing don't change the meaning of the active debug modes' pixels, Unity enables post-processing.
  • **Enabled**: Applies post-processing to the image that the camera captures.
| +| **Additional Wireframe Modes** | Specifies whether and how to render wireframes for meshes in your scene. The options are:
  • **None**: Doesn't render wireframes.
  • **Wireframe**: Exclusively renders edges for meshes in your scene. In this mode, you can see the wireframe for meshes through the wireframe for closer meshes.
  • **Solid Wireframe**: Exclusively renders edges and faces for meshes in your scene. In this mode, the faces of each wireframe mesh hide edges behind them.
  • **Shaded Wireframe**: Renders edges for meshes as an overlay. In this mode, Unity renders the scene in color and overlays the wireframe over the top.
| +| **Overdraw** | Indicates whether to render the overdraw debug view. This is useful to check where Unity draws pixels over one other. | + +#### Pixel Validation + +![](../Images/rendering-debugger/pixel-validation.png)
*The Pixel Validation subsection.* + +| **Property** | **Description** | +| -------------------------------- | ------------------------------------------------------------ | +| **Pixel Validation Mode** | Specifies which mode Unity uses to validate pixel color values. The options are:
  • **None**: Renders the scene normally and doesn't validate any pixels.
  • **Highlight NaN, Inf and Negative Values**: Highlights pixels that have color values that are NaN, Inf, or negative.
  • **Highlight Values Outside Range**: Highlights pixels that have color values outside a particular range. Use **Value Range Min** and **Value Range Max**.
| +| **  Channels** | Specifies which value to use for the pixel value range validation. The options are:
  • **RGB**: Validates the pixel using the luminance value calculated from the red, green, and blue color channels.
  • **R**: Validates the pixel using the value from the red color channel.
  • **G**: Validates the pixel using the value from the green color channel.
  • **B**: Validates the pixel using the value from the blue color channel.
  • **A**: Validates the pixel using the value from the alpha channel.
This property only appears if you set **Pixel Validation Mode** to **Highlight Values Outside Range**. | +| **   Value Range Min** | The minimum valid color value. Unity highlights color values that are less than this value.

This property only appears if you set **Pixel Validation Mode** to **Highlight Values Outside Range**. | +| **   Value Range Max** | The maximum valid color value. Unity highlights color values that are greater than this value.

This property only appears if you set **Pixel Validation Mode** to **Highlight Values Outside Range**. | + ### Material The properties in this section let you visualize different Material properties. @@ -164,34 +198,66 @@ The properties in this section let you visualize different settings and elements | **Lighting Debug Mode** | Specifies which lighting and shadow information to overlay on-screen to debug. The options are:
  • **None**: Renders the scene normally without a debug overlay.
  • **Shadow Cascades**: Overlays shadow cascade information so you can determine which shadow cascade each pixel uses. Use this to debug shadow cascade distances. For information on which color represents which shadow cascade, refer to the [Shadows section of the URP Asset](../universalrp-asset.md#shadows).
  • **Lighting Without Normal Maps**: Renders the scene to visualize lighting. This mode uses neutral materials and disables normal maps. This and the **Lighting With Normal Maps** mode are useful for debugging lighting issues caused by normal maps.
  • **Lighting With Normal Maps**: Renders the scene to visualize lighting. This mode uses neutral materials and allows normal maps.
  • **Reflections**: Renders the scene to visualize reflections. This mode applies perfectly smooth, reflective materials to every Mesh Renderer.
  • **Reflections With Smoothness**: Renders the scene to visualize reflections. This mode applies reflective materials without an overridden smoothness to every GameObject.
| | **Lighting Features** | Specifies flags for which lighting features contribute to the final lighting result. Use this to view and debug specific lighting features in your scene. The options are:
  • **Nothing**: Shortcut to disable all flags.
  • **Everything**: Shortcut to enable all flags.
  • **Global Illumination**: Indicates whether to render [global illumination](https://docs.unity3d.com/Manual/realtime-gi-using-enlighten.html).
  • **Main Light**: Indicates whether the main directional [Light](../light-component.md) contributes to lighting.
  • **Additional Lights**: Indicates whether lights other than the main directional light contribute to lighting.
  • **Vertex Lighting**: Indicates whether additional lights that use per-vertex lighting contribute to lighting.
  • **Emission**: Indicates whether [emissive](https://docs.unity3d.com/Manual/StandardShaderMaterialParameterEmission.html) materials contribute to lighting.
  • **Ambient Occlusion**: Indicates whether [ambient occlusion](../post-processing-ssao.md) contributes to lighting.
| -### Rendering - -The properties in this section let you visualize different rendering features. - -#### Rendering Debug +### Render Graph -![](../Images/rendering-debugger/rendering-debug.png)
*The Rendering Debug subsection.* +The properties in this section let you change how the [render graph system](../render-graph.md) works. -| **Property** | **Description** | -| ------------------------------ | ------------------------------------------------------------ | -| **Map Overlays** | Specifies which render pipeline texture to overlay on the screen. The options are:
  • **None**: Renders the scene normally without a texture overlay.
  • **Depth**: Overlays the camera's depth texture on the screen.
  • **Additional Lights Shadow Map**: Overlays the [shadow map](https://docs.unity3d.com/Manual/shadow-mapping.html) that contains shadows cast by lights other than the main directional light.
  • **Main Light Shadow Map**: Overlays the shadow map that contains shadows cast by the main directional light.
| -| **  Map Size** | The width and height of the overlay texture as a percentage of the view window URP displays it in. For example, a value of **50** fills up a quarter of the screen (50% of the width and 50% of the height). | -| **HDR** | Indicates whether to use [high dynamic range (HDR)](https://docs.unity3d.com/Manual/HDR.html) to render the scene. Enabling this property only has an effect if you enable **HDR** in your URP Asset. | -| **MSAA** | Indicates whether to use [Multisample Anti-aliasing (MSAA)](./../anti-aliasing.md#msaa) to render the scene. Enabling this property only has an effect if:
  • You set **Anti Aliasing (MSAA)** to a value other than **Disabled** in your URP Asset.
  • You use the Game View. MSAA has no effect in the Scene View.
| -| **Post-processing** | Specifies how URP applies post-processing. The options are:
  • **Disabled**: Disables post-processing.
  • **Auto**: Unity enables or disables post-processing depending on the currently active debug modes. If color changes from post-processing would change the meaning of a debug mode's pixel, Unity disables post-processing. If no debug modes are active, or if color changes from post-processing don't change the meaning of the active debug modes' pixels, Unity enables post-processing.
  • **Enabled**: Applies post-processing to the image that the camera captures.
| -| **Additional Wireframe Modes** | Specifies whether and how to render wireframes for meshes in your scene. The options are:
  • **None**: Doesn't render wireframes.
  • **Wireframe**: Exclusively renders edges for meshes in your scene. In this mode, you can see the wireframe for meshes through the wireframe for closer meshes.
  • **Solid Wireframe**: Exclusively renders edges and faces for meshes in your scene. In this mode, the faces of each wireframe mesh hide edges behind them.
  • **Shaded Wireframe**: Renders edges for meshes as an overlay. In this mode, Unity renders the scene in color and overlays the wireframe over the top.
| -| **Overdraw** | Indicates whether to render the overdraw debug view. This is useful to check where Unity draws pixels over one other. | - -#### Pixel Validation - -![](../Images/rendering-debugger/pixel-validation.png)
*The Pixel Validation subsection.* +| **Property** | **Description** | +| --- | --- | +| **Clear Render Targets At Creation** | Clear render textures the first time the render graph system uses them. | +| **Clear Render Targets When Freed** | Clear render textures when they're no longer used by render graph. | +| **Disable Pass Culling** | Disable URP culling render passes that have no impact on the final render. | +| **Immediate Mode** | Force URP to execute passes in the order you set them up. | +| **Enable Logging** | Enable logging to the **Console** window. | +| **Log Frame Information** | Log how URP uses the resources during the frame, in the **Console** window. | +| **Log Resources** | Log the resources URP uses during the frame, in the **Console** window. | + + + +## Probe Volume panel + +These settings make it possible for you to visualize [Adaptive Probe Volumes](probevolumes.md) in your Scene, and configure the visualization. + +### Subdivision Visualization + +| **Property** | **Sub-property** | **Description** | +|-|-|-| +| **Display Cells** || Display cells. Refer to [Understanding Adaptive Probe Volumes](../probevolumes-concept.md) for more information. | +| **Display Bricks** || Display bricks. Refer to [Understanding Adaptive Probe Volumes](../probevolumes-concept.md) for more information. | +| **Live Subdivision Preview** || Enable a preview of Adaptive Probe Volume data in the scene without baking. This might make the Editor slower. This setting appears only if you select **Display Cells** or **Display Bricks**. | +|| **Cell Updates Per Frame** | Set the number of cells, bricks, and probe positions to update per frame. Higher values might make the Editor slower. The default value is 4. This property appears only if you enable **Live Subdivision Preview**. | +|| **Update Frequency** | Set how frequently Unity updates cell, bricks, and probe positions, in seconds. The default value is 1. This property appears only if you enable **Live Subdivision Preview**. | +| **Debug Draw Distance** || Set how far from the scene camera Unity draws debug visuals for cells and bricks, in meters. The default value is 500. | + +### Probe Visualization + +| **Property** | **Sub-property** | **Description** | +|-|-|-| +| **Display Probes** || Display probes. | +|| **Probe Shading Mode** | Set what the Rendering Debugger displays. The options are:
  • SH: Display the [spherical harmonics (SH) lighting data](https://docs.unity3d.com/Manual/LightProbes-TechnicalInformation.html) for the final color calculation. The number of bands depends on the **SH Bands** setting in the active [URP Asset](../universalrp-asset.md).
  • SHL0: Display the spherical harmonics (SH) lighting data with only the first band.
  • SHL0L1: Display the spherical Harmonics (SH) lighting data with the first two bands.
  • Validity: Display whether probes are valid, based on the number of backfaces the probe samples. Refer to [Fix issues with Adaptive Probe Volumes](../probevolumes-fixissues.md) for more information about probe validity.
  • Probe Validity Over Dilation Threshold: Display red if a probe samples too many backfaces, based on the **Validity Threshold** set in the [Adaptive Probe Volumes panel](../probevolumes-lighting-panel-reference.md). This means the probe can't be baked or sampled.
  • Invalidated By Touchup Volumes: Display probes that a [Probe Adjustment Volume component](../probevolumes-adjustment-volume-component-reference.md) has made invalid.
  • Size: Display a different color for each size of [brick](../probevolumes-concept.md).
| +|| **Debug Size** | Set the size of the displayed probes. The default is 0.3. | +|| **Exposure Compensation** | Set the brightness of the displayed probes. Decrease the value to increase brightness. The default is 0. This property appears only if you set **Probe Shading Mode** to **SH**, **SHL0**, or **SHL0L1**. | +|| **Max Subdivisions Displayed** | Set the lowest probe density to display. For example, set this to 0 to display only the highest probe density. | +|| **Min Subdivisions Displayed** | Set the highest probe density to display. | +| **Debug Probe Sampling** || Display how probes are sampled for a pixel. In the Scene view, in the **Adaptive Probe Volumes** overlay, select **Select Pixel** to change the pixel. | +|| **Debug Size** | Set the size of the **Debug Probe Sampling** display. | +|| **Debug With Sampling Noise** | Enable sampling noise for this debug view. Enabling this gives more accurate information, but makes the information more difficult to read. | +| **Virtual Offset Debug** || Display the offsets Unity applies to Light Probe capture positions. | +|| **Debug Size** | Set the size of the arrows that represent Virtual Offset values. | +| **Debug Draw Distance** || Set how far from the scene camera Unity draws debug visuals for cells and bricks, in meters. The default is 200. | + +### Streaming + +Use the following properties to control how URP streams Adaptive Probe Volumes. Refer to [Streaming Adaptive Probe Volumes](../probevolumes-streaming.md) for more information. -| **Property** | **Description** | -| -------------------------------- | ------------------------------------------------------------ | -| **Pixel Validation Mode** | Specifies which mode Unity uses to validate pixel color values. The options are:
  • **None**: Renders the scene normally and doesn't validate any pixels.
  • **Highlight NaN, Inf and Negative Values**: Highlights pixels that have color values that are NaN, Inf, or negative.
  • **Highlight Values Outside Range**: Highlights pixels that have color values outside a particular range. Use **Value Range Min** and **Value Range Max**.
| -| **  Channels** | Specifies which value to use for the pixel value range validation. The options are:
  • **RGB**: Validates the pixel using the luminance value calculated from the red, green, and blue color channels.
  • **R**: Validates the pixel using the value from the red color channel.
  • **G**: Validates the pixel using the value from the green color channel.
  • **B**: Validates the pixel using the value from the blue color channel.
  • **A**: Validates the pixel using the value from the alpha channel.
This property only appears if you set **Pixel Validation Mode** to **Highlight Values Outside Range**. | -| **   Value Range Min** | The minimum valid color value. Unity highlights color values that are less than this value.

This property only appears if you set **Pixel Validation Mode** to **Highlight Values Outside Range**. | -| **   Value Range Max** | The maximum valid color value. Unity highlights color values that are greater than this value.

This property only appears if you set **Pixel Validation Mode** to **Highlight Values Outside Range**. | +| **Property** | **Description** | +| ------------ | --------------- | +| **Freeze Streaming** | Stop Unity from streaming probe data. | +| **Display Streaming Score** | If you enable **Display Cells**, this setting darkens cells that have a lower priority for streaming. Cells closer to the camera usually have the highest priority. | +| **Maximum cell streaming** | Stream as many cells as possible every frame. | +| **Display Index Fragmentation** | Open an overlay that displays how fragmented the streaming memory is. A green square is an area of used memory. The more spaces between the green squares, the more fragmented the memory. | +| **Index Fragmentation Rate** | Displays the amount of fragmentation as a numerical value, where 0 is no fragmentation. | +| **Verbose Log** | Log information about streaming. | ## Navigation at runtime diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/inject-a-render-pass.md b/Packages/com.unity.render-pipelines.universal/Documentation~/inject-a-render-pass.md new file mode 100644 index 00000000000..eb34d25f8cb --- /dev/null +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/inject-a-render-pass.md @@ -0,0 +1,11 @@ +# Adding a Scriptable Render Pass to the frame rendering loop + +Add the custom render pass to the Universal Render Pipeline (URP) frame rendering loop by creating a Scriptable Renderer Feature, or using the `RenderPipelineManager` API. + +|Page|Description| +|-|-| +| [Scriptable Renderer Features](renderer-features/scriptable-renderer-features/scriptable-renderer-features-landing.md) | Write a class that inherits `ScriptableRendererFeature`, and use it to creates an instance of the custom render pass you created, and insert the custom render pass into the rendering pipeline. | +| [Inject a render pass via scripting](customize/inject-render-pass-via-script.md) | Use the `RenderPipelineManager` API to insert a custom render pass into the rendering pipeline. | +| [Injection points reference](customize/custom-pass-injection-points.md) | URP contains multiple injection points that let you inject render passes at different points in the frame rendering loop. | + + diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/lighting.md b/Packages/com.unity.render-pipelines.universal/Documentation~/lighting.md index 97a30420719..5bda4738b9e 100644 --- a/Packages/com.unity.render-pipelines.universal/Documentation~/lighting.md +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/lighting.md @@ -13,3 +13,15 @@ Areas where the Universal Render Pipeline (URP) differs from Unity's common ligh For a full comparison of lighting features between Unity's Built-in Render Pipeline and URP, and an up to date list of lighting features that are currently under research, check the [feature comparison chart](universalrp-builtin-feature-comparison.md). For a general introduction to lighting in Unity and examples of common lighting workflows, refer to the [Lighting section of the Unity Manual](https://docs.unity3d.com/Manual/LightingOverview.html). + +## Configure lighting for better performance + +Refer to [Configure for better performance](configure-for-better-performance.md) for more information about how to adjust lighting settings for better performance. + +## Additional resources + +* [Universal Render Pipeline for advanced Unity creators](https://unity.com/resources/introduction-universal-render-pipeline-for-advanced-unity-creators-2022lts) +* [Shedding some light on the Universal Render Pipeline](https://blog.unity.com/engine-platform/shedding-light-on-universal-render-pipeline-for-unity-2021-lts) +* [Optimize your Unity project with URP](https://www.youtube.com/watch?v=NFBr21V0zvU&ab_channel=Unity) +* [Creating Believable Visuals](https://learn.unity.com/tutorial/creating-believable-visuals) +* [Creative Core: Lighting](https://learn.unity.com/project/creative-core-lighting) diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/probevolumes-adjustment-volume-component-reference.md b/Packages/com.unity.render-pipelines.universal/Documentation~/probevolumes-adjustment-volume-component-reference.md new file mode 100644 index 00000000000..92d36d5f422 --- /dev/null +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/probevolumes-adjustment-volume-component-reference.md @@ -0,0 +1,82 @@ +# Probe Adjustment Volume component reference + +Select a [Probe Adjustment Volume Component](probevolumes-fixissues.md#add-a-probe-adjustment-volume-component) and open the Inspector to view its properties. + +Refer to the following for more information about using the Probe Adjustment Volume component: + +- [Fix issues with Adaptive Probe Volumes](probevolumes-fixissues.md) +- [Configure the size and density of Adaptive Probe Volumes](probevolumes-changedensity.md) + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
PropertyDescription
Influence Volume
ShapeSet the shape of the Adjustment Volume to either Box or Sphere.
SizeSet the size of the Adjustment Volume. This property only appears if you set Shape to Box.
RadiusSet the radius of the Adjustment Volume. This property only appears if you set Shape to Sphere.
Mode +

Select how to override probes inside the Adjustment Volume.

+
    +
  • Invalidate Probes: Mark selected probes as invalid. Refer to How light probe validity works for more information.
  • +
  • Override Validity Threshold: Override the threshold URP uses to determine whether Light Probes are marked as invalid. Refer to Adjust Dilation for more information.
  • +
  • Apply Virtual Offset: Change the position Light Probes use when sampling the lighting in the scene during baking. Refer to Adjust Virtual Offset for more information.
  • +
  • Override Virtual Offset Settings: Override the biases URP uses during baking to determine when Light Probes use Virtual Offset, and calculate sampling positions. Refer to Adjust Virtual Offset for more information
  • +
  • Intensity Scale: Override the intensity of probes to brighten or darken affected areas.
  • +
+
Dilation Validity Threshold +

Override the ratio of backfaces a probe samples before URP considers it invalid. This option only appears if you set Mode to Override Validity Threshold, and you enable Additional Properties.

+
Virtual Offset Rotation +

Set the rotation angle for the Virtual Offset vector on all probes in the Adjustment Volume. This option only appears if you set Mode to Apply Virtual Offset.

+
Virtual Offset Distance +

Set how far URP pushes probes along the Virtual Offset Rotation vector. This option only appears if you set Mode to Apply Virtual Offset.

+
Geometry Bias +

Sets how far URP pushes a probe's capture point out of geometry after one of its sampling rays hits geometry. This option only appears if you set Mode to Override Virtual Offset Settings.

+
Ray Origin Bias

Override the distance between a probe's center and the point URP uses to determine the origin of that probe's sampling ray. This can be used to push rays beyond nearby geometry if the geometry causes issues. This option appears only if you set Mode to Override Virtual Offset Settings.

+
Intensity Scale +

Change the brightness of all probes covered by the Probe Volumes Adjustment Volume component. Use this sparingly, because changing the intensity of probe data can lead to inconsistencies in the lighting. This option only appears if you set Mode to Intensity Scale.

+
diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/probevolumes-changedensity.md b/Packages/com.unity.render-pipelines.universal/Documentation~/probevolumes-changedensity.md new file mode 100644 index 00000000000..72da58e95e1 --- /dev/null +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/probevolumes-changedensity.md @@ -0,0 +1,39 @@ +# Configure the size and density of Adaptive Probe Volumes + +Refer to [Understanding Adaptive Probe Volumes](probevolumes-concept.md) for more information about how Adaptive Probe Volumes work. + +## Change the size + +To ensure the Universal Render Pipeline (URP) considers static geometry from all loaded scenes when it places Light Probes, set **Mode** to **Global** in the Adaptive Probe Volume Inspector window so the Adaptive Probe Volume covers the entire scene. + +You can also do one of the following in the Inspector of an Adaptive Probe Volume, to set the size of an Adaptive Probe Volume: + +- Set **Mode** to **Local** and set the size manually. +- Set **Mode** to **Local** and select **Fit to all Scenes**, **Fit to Scene**, or **Fit to Selection**. Refer to [Adaptive Probe Volume Inspector reference](probevolumes-inspector-reference.md) for more information. +- To exclude certain GameObjects when URP calculates Light Probe positions, enable **Override Renderer Filters**. For more information about Layers, refer to [Layers and Layer Masks](https://docs.unity3d.com/Manual/layers-and-layermasks.html). + +You can use multiple Adaptive Probe Volumes in a single scene, and they can overlap. However in a Baking Set, URP creates only a single Light Probe structure. + +## Adjust Light Probe density + +You might need to do the following in your project: + +- Increase Light Probe density in highly detailed scenes or areas such as interiors, to get a good lighting result. +- Decrease Light Probe density in empty areas, to avoid those areas using disk space and increasing bake time unnecessarily. + +In the [Inspector for an Adaptive Probe Volume](probevolumes-inspector-reference.md), enable and adjust **Override Probe Spacing** to set a minimum and maximum density for the Light Probes in the Adaptive Probe Volume. + +The values can't exceed the **Min Probe Spacing** or **Max Probe Spacing** values in the **Probe Placement** section of the [Adaptive Probe Volumes panel](probevolumes-lighting-panel-reference.md), so you might need to adjust these values first. + +You can also add local Adaptive Probe Volumes in different areas with different **Override Probe Spacing** values, to control Light Probe density more granularly. For example, in empty areas, add a local Adaptive Probe Volume with a higher **Override Probe Spacing** minimum value, to make sure Light Probes have a lower density in those areas. + +If you increase Light Probe density, you might increase bake time and how much disk space your Adaptive Probe Volume uses. + +### Decrease Light Probe density for terrain + +Because terrain is detailed but less important than your main scenery or characters, you can do the following: + +1. Put terrain on its own [Layer](https://docs.unity3d.com/Manual/layers-and-layermasks.html). +2. Surround the terrain with an Adaptive Probe Volume. +3. In the Inspector for the Adaptive Probe Volume, enable **Override Renderer Filters**, then in **Layer Mask** select only your terrain Layer. +4. To adjust Light Probe density to capture more or less lighting detail, enable **Override Probe Spacing** and adjust the values. diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/probevolumes-concept.md b/Packages/com.unity.render-pipelines.universal/Documentation~/probevolumes-concept.md new file mode 100644 index 00000000000..cbf04583610 --- /dev/null +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/probevolumes-concept.md @@ -0,0 +1,60 @@ +# Understanding Adaptive Probe Volumes + +an Adaptive Probe Volume is a group of [Light Probes](https://docs.unity3d.com/Manual/LightProbes.html) that Unity places automatically based on the geometry density in your Scene, to create baked indirect lighting. You can use Adaptive Probe Volumes instead of manually placing and configuring Light Probes. + +## Advantages and limitations + +| **Feature** | **Light Probe Groups** | **Adaptive Probe Volumes** | +|---|---|---| +| Selection of surrounding probes | Per GameObject | Per pixel | +| Optimize memory use with streaming | No | Yes | +| Place probes automatically | No | Yes | +| Place probes manually | Yes | No | + +Adaptive Probe Volumes have the following advantages: + +- Unity samples surrounding probes per-pixel rather than per GameObject. This sampling approach results in better lighting consistency, and fewer seams between adjacent GameObjects. +- You can adjust Light Probe layouts across a scene, for example using a denser set of Light Probes in an interior area with more detailed lighting or geometry. Refer to [Configure the size and density of Adaptive Probe Volumes](probevolumes-changedensity.md) for more information. +- Adaptive Probe Volumes work well if you [work with multiple scenes](https://docs.unity3d.com/Manual/MultiSceneEditing.html). Refer to [Baking Sets](probevolumes-concept.md#baking-sets) for more information. +- Adaptive Probe Volumes include [streaming](probevolumes-streaming.md) functionality to support large open worlds. + +Adaptive Probe Volumes have the following limitations: + +- You can't adjust the locations of Light Probes inside an Adaptive Probe Volume. You can use settings and overrides to try to fix visible artifacts, but it might not be possible to make sure Light Probes follow walls or are at the exact boundary between different lighting areas. Refer to [Fix issues with Adaptive Probe Volumes](probevolumes-fixissues.md) for more information. +- You can't convert [Light Probe Groups](https://docs.unity3d.com/Manual/LightProbes.html) into an Adaptive Probe Volume. + +## How Adaptive Probe Volumes work + +URP automatically fills an Adaptive Probe Volume with a 3D structure of 'bricks'. Each brick contains 64 Light Probes, arranged in a 4 × 4 × 4 grid. + +URP uses bricks with different sizes to match the amount of geometry in different areas of your scene. For example, in areas with more geometry, URP uses small bricks with a short distance between Light Probes. The Light Probes capture lighting at a higher resolution, so lighting is more accurate. + +The default Light Probe spacing is 1, 3, 9, or 27 m. + +![](Images/probe-volumes/probevolumes-debug-displayprobebricks1.PNG)
+In this screenshot from the Rendering Debugger, the small purple bricks contain Light Probes spaced 1 meter apart, to capture data from high-geometry areas. The large blue bricks contain Light Probes spaced 3 meters apart, to capture data from areas with less geometry. + +Each pixel of a GameObject samples lighting data from the eight closest Light Probes around it. + +You can do the following: + +- Use the Rendering Debugger to visualize the layout of bricks and Light Probes. Refer to [Display Adaptive Probe Volumes](probevolumes-showandadjust.md). +- [Configure the size and density of Adaptive Probe Volumes](probevolumes-changedensity.md). +- [Add a Volume to your scene](probevolumes-fixissues.md#volume) to adjust which Light Probes GameObjects sample. + + +## Baking Sets + +To store lighting from a scene in an Adaptive Probe Volume, the scene must be part of a Baking Set. + +A Baking Set contains the following: + +- One or more scenes, which optionally include Adaptive Probe Volumes. +- A single collection of settings. + +By default, URP uses **Single Scene** mode, and places each scene in its own Baking Set automatically. However, only one Baking Set can be active at any time, so if you [work with multiple scenes](https://docs.unity3d.com/Manual/MultiSceneEditing.html), you must add these scenes to a single Baking Set if you want to bake them together. Refer to [Bake multiple scenes together with Baking Sets](probevolumes-usebakingsets.md) for more information. + +## Additional resources + +* [Light Probes](https://docs.unity3d.com/Manual/LightProbes.html) +* [Work with multiple scenes in Unity](https://docs.unity3d.com/Documentation/Manual/MultiSceneEditing.html) diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/probevolumes-fixissues.md b/Packages/com.unity.render-pipelines.universal/Documentation~/probevolumes-fixissues.md new file mode 100644 index 00000000000..d062868a5e4 --- /dev/null +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/probevolumes-fixissues.md @@ -0,0 +1,139 @@ +# Fix issues with Adaptive Probe Volumes + +Adjust settings or use Volume overrides to fix artefacts from Adaptive Probe Volumes. + +## How Light Probe validity works + +Light Probes inside geometry are called invalid probes. The Universal Render Pipeline (URP) marks a Light Probe as invalid when the probe fires sampling rays to capture surrounding light data, but the rays hit the unlit backfaces inside geometry. + +URP uses the following techniques to minimise incorrect lighting data from Light Probes: + +- [Virtual Offset](#virtualoffset) tries to make invalid Light Probes valid, by moving their capture points so they're outside any [colliders](https://docs.unity3d.com/Documentation/Manual/CollidersOverview.html). +- [Dilation](#dilation) detects Light Probes that remain invalid after Virtual Offset, and gives them data from valid Light Probes nearby. + +You can check which Light Probes are invalid using the [Rendering Debugger](features/rendering-debugger.md) + +![](Images/probe-volumes/probevolumes-virtualoffsetvsnot.png)
+In the Scene on the left, Virtual Offset isn't active and dark bands are visible. In the Scene on the right, Virtual Offset is active.
+ +![](Images/probe-volumes/probevolumes-dilationvsnot.png)
+In the Scene on the left, Dilation isn't active and some areas are too dark. In the Scene on the right, Dilation is active.
+ +## Fix dark blotches or streaks + + +### Adjust Virtual Offset + +You can configure **Virtual Offset Settings** in the [Adaptive Probe Volumes panel](probevolumes-lighting-panel-reference.md) in the Lighting window. This changes how URP calculates the validity of Light Probes. + +You can adjust the following: + +- The length of the sampling ray Unity uses to find a valid capture point. +- How far Unity moves a Light Probe's capture position to avoid geometry. +- How far Unity moves the start point of rays. +- How many times a probe's sampling ray hits colliders before Unity considers the probe invalid. + +You can also disable Virtual Offset for a Baking Set. Virtual Offset only affects baking time, so disabling Virtual Offset doesn't affect runtime performance. + + +### Adjust Dilation + +You can configure **Probe Dilation Settings** in the [Adaptive Probe Volumes panel](probevolumes-lighting-panel-reference.md) in the Lighting window). This changes how URP calculates the validity of Light Probes, and how invalid Light Probes use lighting data from nearby valid Light Probes. + +You can adjust the following: + +- The percentage of backfaces a Light Probe can sample before URP considers that probe invalid. +- How far away from the invalid probe Unity searches for valid probes to contribute lighting data. +- How many iterations of Dilation URP does during the bake. +- How to weight the data from valid probes based on their spatial relationship with the invalid probe. + +[How you adjust Light Probe density](probevolumes-changedensity.md) affects the final results, because URP uses the settings as a multiplier to calculate the distance between probes. + +You can also disable Dilation for a Baking Set. Dilation only affects baking time, so disabling Dilation doesn't affect runtime performance. + +## Fix light leaks + +Light leaks are areas that are too light or dark, often in the corners of a wall or ceiling. + +![](Images/probe-volumes/probevolumes-lightleak.JPG)
+A light leak. +
+ +Light leaks often occur when geometry receives light from a Light Probe that isn't visible to the geometry, for example because the Light Probe is on the other side of a wall. Adaptive Probe Volumes use regular grids of Light Probes, so Light Probes might not follow walls or be at the boundary between different lighting areas. + +To fix light leaks, you can do the following: + +- [Create thicker walls](#thickerwalls). +- [Add a Volume to your scene](#volume). +- [Adjust Baking Set properties](#probevolumesettings). +- [Use a Probe Adjustment Volume](#probevolumeadjustment). + + +### Create thicker walls + +Adjust walls so their width is closer to the distance between probes in the local [brick](probevolumes-concept.md#how-probe-volumes-work) + + +### Add a Volume to your scene + +You can add a [Volume](volumes-landing-page.md), then add a **Probe Volumes Options** override to the Volume. This adjusts the position that GameObjects use to sample the Light Probes. + +1. Add a [Volume](set-up-a-volume.md) to your scene and make sure its area overlaps the camera position. +2. Select **Add Override**, then select **Lighting** > **Probe Volumes Options**. +3. Enable **Normal Bias**, then adjust the value to move the position that GameObject pixels use to sample the Light Probes, along the pixel's surface normal. +4. Enable **View Bias**, then adjust the value to move the position that GameObject pixels use to sample the Light Probes, towards the camera. +4. Disable and enable **Leak Reduction Mode** to check if it improves light leaks. + +Volumes only affect the scene if the camera is near or inside the volume. Refer to [Understand volumes](volumes.md) for more information. + +Refer to [Probe Volumes Options Override reference](probevolumes-options-override-reference.md) for more information on **Adaptive Probe Volumes Options** settings. + + +### Adjust Baking Set properties + +If adding a Volume doesn't work, use the [Adaptive Probe Volumes panel](probevolumes-lighting-panel-reference.md) in the Lighting window to adjust Virtual Offset and Dilation settings. + +1. In **Probe Dilation Settings**, reduce **Search Radius**. This can help in situations where invalid Light Probes are receiving lighting data from more distant Light Probes. However, a lower **Search Radius** might cause light leaks. +2. In **Virtual Offset Settings**, reduce **Search Distance Multiplier** and **Ray Origin Bias**. +3. If there are light leaks in multiple locations, adjust **Min Probe Spacing** and **Max Probe Spacing** to increase the density of Light Probes. +4. Select **Generate Lighting** to rebake the scene using the new settings. + +Note: Don't use very low values for the settings, or Dilation and Virtual Offset might not work. + + +### Add a Probe Adjustment Volume component + +Use a Probe Adjustment Volume component to make Light Probes invalid in a small area. This triggers Dilation during baking, and improves the results of **Leak Reduction Mode** at runtime. + +1. In the Adaptive Probe Volume Inspector, select **Add Component**, then select **Light** > **Probe Adjustment Volume**. +2. Set the **Size** so the **Probe Adjustment Volume** area overlaps the Light Probes causing light leaks. +3. Set **Probe Volume Overrides** > **Mode** to **Invalidate Probes**, to invalidate the Light Probes in the Volume. +4. If you have a [Volume with a Probe Volumes Options override](#volume), enable **Leak Reduction Mode**. +6. In **Probe Volume Settings**, select **Generate Lighting** to rebake the scene using the new settings. + +Using a Probe Adjustment Volume component solves most light leak issues, but often not all. + +If you use many Probe Adjustment Volumes in a scene, your bake will be slower, and your scene might be harder to understand and maintain. + +Refer to [Probe Adjustment Volume component reference](probevolumes-adjustment-volume-component-reference.md) for more information. + +## Fix seams + +Seams are artefacts that appear when one lighting condition transitions immediately into another. Seams are caused when two adjacent bricks have different Light Probe densities. Refer to [bricks](probevolumes-concept.md#how-probe-volumes-work) for more information. + +![](Images/probe-volumes/probevolumes-seams.JPG)
+Two seams. +
+ +To fix seams, do the following: + +1. Add a [Volume](set-up-a-volume.md) to your scene and make sure its area overlaps the position of the camera. +2. Select **Add Override**, then select **Lighting** > **Probe Volumes Options**. +3. Enable **Sampling Noise**, then try adjusting the value to add noise and make the transition more diffuse. Noise can help break up noticeable edges in indirect lighting at brick boundaries. + +## Additional resources + +* [Configure the size and density of Adaptive Probe Volumes](probevolumes-changedensity.md) +* [Adaptive Probe Volumes panel reference](probevolumes-lighting-panel-reference.md) +* [Probe Volumes Options Override reference](probevolumes-options-override-reference.md) +* [Probe Adjustment Volume component reference](probevolumes-adjustment-volume-component-reference.md) diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/probevolumes-inspector-reference.md b/Packages/com.unity.render-pipelines.universal/Documentation~/probevolumes-inspector-reference.md new file mode 100644 index 00000000000..5e825b992cd --- /dev/null +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/probevolumes-inspector-reference.md @@ -0,0 +1,69 @@ +# Adaptive Probe Volume Inspector reference + +Select an Adaptive Probe Volume and open the Inspector to view its properties. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
PropertyDescription
Mode
GlobalURP sizes this Adaptive Probe Volume to include all renderers in the scene or Baking Set that have Contribute Global Illumination enabled in their Mesh Renderer component. URP recalculates the volume size every time you save or generate lighting.
SceneURP sizes this Adaptive Probe Volume to include all renderers in the same scene as this Adaptive Probe Volume. URP recalculates the volume size every time you save or generate lighting.
LocalSet the size of this Adaptive Probe Volume manually.
SizeSet the size of this Adaptive Probe Volume. This setting only appears when you set Mode to Local.
Subdivision Override
Override Probe SpacingOverride the Probe Spacing set in the Baking Set for this Adaptive Probe Volume. This cannot exceed the Min Probe Spacing and Max Probe Spacing values in the Adaptive Probe Volumes panel in the Lighting window.
Geometry Settings
Override Renderer FiltersEnable filtering by Layer which GameObjects URP considers when it generates probe positions. Use this to exclude certain GameObjects from contributing to Adaptive Probe Volume lighting.
Layer MaskFilter by Layer which GameObjects URP considers when it generates probe positions.
Min Renderer SizeThe smallest Renderer size URP considers when it generates probe positions.
Fill Empty SpacesEnable URP filling the empty space between and around Renderers with bricks. Bricks in empty spaces always use the **Max Probe Spacing** value.
+ +## Size gizmo + +To resize the Adaptive Probe Volume, use one of the handles of the box gizmo in the Scene view. You can't resize an Adaptive Probe Volume by changing the Transform component of the GameObject, or using the scale gizmo. + +In this screenshot, a red box indicates the box gizmo handles. + +![](Images/probe-volumes/ProbeVolume-Size-gizmo.png)
+The resize handles for Adaptive Probe Volumes. diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/probevolumes-lighting-panel-reference.md b/Packages/com.unity.render-pipelines.universal/Documentation~/probevolumes-lighting-panel-reference.md new file mode 100644 index 00000000000..2451376091a --- /dev/null +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/probevolumes-lighting-panel-reference.md @@ -0,0 +1,144 @@ +# Adaptive Probe Volumes panel properties + +This page explains the properties in the **Adaptive Probe Volumes** panel in Lighting settings. To open the panel, from the main menu select **Window** > **Rendering** > **Lighting** > **Adaptive Probe Volumes**. + +## Baking + +To open Baking Set properties, either select the Baking Set asset in the Project window, or from the main menu select **Window** > **Rendering** > **Lighting** > **Adaptive Probe Volumes** tab. + +### Baking + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
PropertyDescription
Baking Mode
Single SceneUse only the active scene to calculate the lighting data in Adaptive Probe Volumes.
Baking SetUse the scenes in this Baking Set to calculate the lighting data in Adaptive Probe Volumes.
Current Baking SetThe current Baking Set asset.
Scenes in Baking SetLists the scenes in the current Baking Set.
Status: Indicates whether the scene is loaded.
Bake: When enabled, URP generates lighting for this scene.
Use + and - to add or remove a scene from the active Baking Set.
Use the two-line icon to the left of each scene to drag the scene up or down in the list.
+ +### Probe Placement + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
PropertyDescription
Probe Positions
Min Probe SpacingThe minimum distance between probes, in meters. Refer to Configure the size and density of Adaptive Probe Volumes for more information.
Max Probe SpacingThe maximum distance between probes, in meters. Refer to Configure the size and density of Adaptive Probe Volumes for more information.
Renderer Filter Settings
Layer MaskSpecify the Layers URP considers when it generates probe positions. Select a Layer to enable or disable it.
Min Renderer SizeThe smallest Renderer size URP considers when it places probes.
+ +## Probe Invalidity Settings + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
PropertyDescription
Probe Dilation Settings
Enable DilationWhen enabled, URP replaces data in invalid probes with data from nearby valid probes. Enabled by default. Refer to Fix issues with Adaptive Probe Volumes.
Search RadiusDetermine how far from an invalid probe URP searches for valid neighbors. Higher values include more distant probes that might be in different lighting conditions than the invalid probe, resulting in unwanted behaviors such as light leaks.
Validity ThresholdSet the ratio of backfaces a probe samples before URP considers it invalid. Higher values mean URP is more likely to mark a probe invalid.
Dilation IterationsSet the number of times Unity repeats the dilation calculation. This increases the spread of dilation effect, but increases the time URP needs to calculate probe lighting.
Squared Distance WeightingEnable weighing the contribution of neighbouring probes by squared distance, rather than linear distance. Probes that are closer to invalid probes will contribute more to the lighting data.
Virtual Offset Settings
Enable Virtual Offset Enable URP moving the capture point of invalid probes into a valid area. Refer to Fix issues with Adaptive Probe Volumes.
Search Distance MultiplierSet the length of the sampling ray URP uses to search for valid probe positions. High values might cause unwanted results, such as probe capture points pushing through neighboring geometry.
Geometry BiasSet how far URP pushes a probe's capture point out of geometry after one of its sampling rays hits geometry.
Ray Origin biasSet the distance between a probe's center and the point URP uses as the origin of each sampling ray. High values might cause unwanted results, such as rays missing nearby occluding geometry.
Layer MaskSpecify which layers URP includes in collision calculations for [Virtual Offset](probevolumes-fixissues.md).
Refresh Virtual Offset DebugRe-run the virtual offset simulation to preview updated results, without affecting baked data.
+ +### Adaptive Probe Volume Disk Usage + +| **Property** | **Description** | +|-|-| +| **Scenario Size** | Indicates how much space on disk is used by the baked Light Probe data. | +| **Baking Set Size** | Indicates how much space on disk is used by all the baked Light Probe data for the currently selected Baking Set. diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/probevolumes-options-override-reference.md b/Packages/com.unity.render-pipelines.universal/Documentation~/probevolumes-options-override-reference.md new file mode 100644 index 00000000000..02808340c43 --- /dev/null +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/probevolumes-options-override-reference.md @@ -0,0 +1,18 @@ +# Probe Volumes Options Override reference + +To add a Probe Volumes Options Override, do the following: + +1. Add a [Volume](set-up-a-volume.md) to your Scene and make sure its area overlaps the position of the camera. +2. Select **Add Override**, then select **Lighting** > **Probe Volumes Options**. + +Refer to [Fix issues with Adaptive Probe Volumes](probevolumes-fixissues.md) for more information about using the Probe Volumes Options Override. + +| **Property** | **Description** | +|------------------------------------|-------------| +| **Normal Bias** | Enable to move the position used by shaded pixels when sampling Light Probes. The value is in meters. This affects how sampling is moved along the pixel's surface normal. | +| **View Bias** | Enable to move the sampling position towards the camera when sampling Light Probes. The results of **View Bias** vary depending on the camera position. The value is in meters. | +| **Scale Bias with Min Probe Distance** | Scale the **Normal Bias** or **View Bias** so it's proportional to the spacing between Light Probes in a [brick](probevolumes-concept.md#how-probe-volumes-work). | +| **Sampling Noise** | Enable to increase or decrease the amount of noise URP adds to the position used by shaded pixels when sampling Light Probes. This can help [fix seams](probevolumes-fixissues.md#fix-seams) between bricks. | +| **Animate Sampling Noise** | Enable to animate sampling noise when Temporal Anti-Aliasing (TAA) is enabled. This can make noise patterns less visible. | +| **Leak Reduction Mode** | Enable to choose the method Unity uses to reduce leaks. Refer to [Fix light leaks](probevolumes-fixissues.md#fix-light-leaks).
Options:
• **Validity and Normal Based**: Enable to make URP prevent invalid Light Probes contributing to the lighting result, and give Light Probes more weight than others based on the GameObject pixel's sampling position.
• **None**: No leak reduction. +| **Min Valid Dot Product Value** | Enable to make URP reduce a Light Probe's influence on a GameObject if the direction towards the Light Probe is too different to the GameObject's surface normal direction. The value is the minimum [dot product](https://docs.unity3d.com/ScriptReference/Vector3.Dot.html) between the two directions where URP will reduce the Light Probe's influence. | diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/probevolumes-showandadjust.md b/Packages/com.unity.render-pipelines.universal/Documentation~/probevolumes-showandadjust.md new file mode 100644 index 00000000000..9e6235f0394 --- /dev/null +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/probevolumes-showandadjust.md @@ -0,0 +1,37 @@ +# Display Adaptive Probe Volumes + +You can use the Rendering Debugger to check how URP places Light Probes in an Adaptive Probe Volume, then use Adaptive Probe Volume settings to configure the layout. + +## Display Adaptive Probe Volumes + +To display Adaptive Probe Volumes, open the [Rendering Debugger](features/rendering-debugger.md) and select the **Probe Volume** tab. + +You can do the following: + +- Enable **Probe Visualization** > **Display Probes** to display the locations of Light Probes and the lighting they store. +- Enable **Subdivision Visualization** > **Display Bricks** to display the outlines of groups of Light Probes ('bricks'). Refer to [Understanding Adaptive Probe Volumes](probevolumes-concept.md#how-probe-volumes-work) for more information on bricks. +- Enable **Subdivision Visualization** > **Display Cells** to display the outlines of cells, which are groups of bricks used for [streaming](probevolumes-streaming.md). +- Enable **Subdivision Visualization** > **Debug Probe Sampling** to display how neighboring Light Probes influence a chosen position. Select a surface to display the weights URP uses to sample nearby Light Probes. + +If the Rendering Debugger displays invalid probes when you select **Display Probes**, refer to [Fix issues with Adaptive Probe Volumes](probevolumes-fixissues.md). + +![](Images/probe-volumes/probevolumes-debug-displayprobes.PNG)
+The Rendering Debugger with **Display Probes** enabled. + +![](Images/probe-volumes/probevolumes-debug-displayprobebricks1.PNG)
+The Rendering Debugger with **Display Bricks** enabled. + +![](Images/probe-volumes/probevolumes-debug-displayprobecells.PNG)
+The Rendering Debugger with **Display Cells** enabled. + +![](Images/probe-volumes/APVsamplingDebug.png)
+The Rendering Debugger with **Debug Probe Sampling** enabled + +Refer to [Rendering Debugger](features/rendering-debugger.md) for more information. + +## Additional resources + +* [Configure the size and density of an Adaptive Probe Volume](probevolumes-changedensity.md) +* [Adaptive Probe Volumes panel reference](probevolumes-lighting-panel-reference.md) +* [Probe Volumes Options Override reference](probevolumes-options-override-reference.md) +* [Probe Adjustment Volume component reference](probevolumes-adjustment-volume-component-reference.md) diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/probevolumes-streaming.md b/Packages/com.unity.render-pipelines.universal/Documentation~/probevolumes-streaming.md new file mode 100644 index 00000000000..9b65f2be42e --- /dev/null +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/probevolumes-streaming.md @@ -0,0 +1,30 @@ +# Streaming Adaptive Probe Volumes + +You can enable Adaptive Probe Volume streaming to enable Adaptive Probe Volume lighting in very large worlds. Using streaming means you can bake Adaptive Probe Volume data larger than available CPU or GPU memory, and load it at runtime when it's needed. At runtime, as your camera moves, the Universal Render Pipeline (URP) loads only Adaptive Probe Volume data from cells within the camera's view frustum. + +You can enable and disable streaming for different [URP quality levels](birp-onboarding/quality-settings-location.md). + +## Enable streaming + +To enable streaming, do the following: + +1. From the main menu, select **Edit** > **Project Settings** > **Quality**. +2. Select a Quality Level. +3. Double-click the **Render Pipeline Asset** to open it in the Inspector. +4. Expand **Lighting**. +5. Enable **Enable Streaming** to stream from CPU memory to GPU memory. + +You can configure streaming settings in the same window. Refer to [URP Asset](universalrp-asset.md) for more information. + +## Debug streaming + +The smallest section URP loads and uses is a cell, which is the same size as the largest [brick](probevolumes-concept.md) in an Adaptive Probe Volume. You can influence the size of cells in an Adaptive Probe Volume by [adjusting the density of Light Probes](probevolumes-changedensity.md) + +To view the cells in an Adaptive Probe Volume, or debug streaming, use the [Rendering Debugger](features/rendering-debugger.md). + +![](Images/probe-volumes/probevolumes-debug-displayprobecells.PNG)
+The Rendering Debugger with **Display Cells** enabled. + +# Additional resources + +* [Understanding Adaptive Probe Volumes](probevolumes-concept.md) diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/probevolumes-use.md b/Packages/com.unity.render-pipelines.universal/Documentation~/probevolumes-use.md new file mode 100644 index 00000000000..c2317bf3204 --- /dev/null +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/probevolumes-use.md @@ -0,0 +1,52 @@ +# Use Adaptive Probe Volumes + +This page provides the basic workflow you need to use Adaptive Probe Volumes in your project. + +## Add and bake an Adaptive Probe Volume + +### Enable Adaptive Probe Volumes + +1. From the main menu, select **Edit** > **Project Settings** > **Quality**. +2. In the **Rendering** section, double-click the active **Render Pipeline Asset** to open it in the Inspector window. +2. In the **Lighting** section, set **Light Probe System** to **Adaptive Probe Volumes**. + +### Add an Adaptive Probe Volume to the Scene + +1. From the main menu, select **GameObject** > **Light** > **Adaptive Probe Volumes** > **Adaptive Probe Volume**. +2. In the Inspector for the Adaptive Probe Volume, set **Mode** to **Global** to make this Adaptive Probe Volume cover your entire Scene. + +### Adjust your Light and Mesh Renderer settings + +1. To include a Light in an Adaptive Probe Volume's baked lighting data, open the Inspector for the Light then set the **Light Mode** to **Mixed** or **Baked**. +2. To include a GameObject in an Adaptive Probe Volume's baked lighting data, open the Inspector for the GameObject and enable **Contribute Global Illumination**. +3. To make a GameObject receive baked lighting, open the Inspector for the GameObject and set **Receive Global Illumination** to **Light Probes**. + +### Bake your lighting + +1. From the main menu, select **Window** > **Rendering** > **Lighting**. +2. Select the **Adaptive Probe Volumes** panel. +3. Set **Baking Mode** to **Single Scene**. +4. Select **Generate Lighting**. + +If no scene in the Baking Set contains an Adaptive Probe Volume, Unity asks if you want to create an Adaptive Probe Volume automatically. + +You can change baking settings in the Lighting window's [Lightmapping Settings](https://docs.unity3d.com/Documentation/Manual/class-LightingSettings.html#LightmappingSettings). + +Refer to [Bake different lighting setups with Baking Sets](probevolumes-usebakingsets.md) for more information about Baking Sets. + +If there are visual artefacts in baked lighting, such as dark blotches or light leaks, refer to [Fix issues with Adaptive Probe Volumes](probevolumes-fixissues.md). + +## Configure an Adaptive Probe Volume + +You can use the following to configure an Adaptive Probe Volume: + +- Use the [Adaptive Probe Volumes panel](probevolumes-lighting-panel-reference.md) in the Lighting window to change the probe spacing and behaviour in all the Adaptive Probe Volumes in a Baking Set. +- Use the settings in the [Adaptive Probe Volume Inspector window](probevolumes-inspector-reference.md) to change the Adaptive Probe Volume size and probe density. +- Add a [Probe Adjustment Volume component](probevolumes-adjustment-volume-component-reference.md) to the Adaptive Probe Volume, to make probes invalid in a small area or fix other lighting issues. +- Add a [Volume](set-up-a-volume.md) to your scene with a [Probe Volumes Options Override](probevolumes-options-override-reference.md), to change the way URP samples Adaptive Probe Volume data when the camera is inside the volume. This doesn't affect baking. + +## Additional resources + +- [Bake multiple scenes together with Baking Sets](probevolumes-usebakingsets.md) +- [Fix issues with Adaptive Probe Volumes](probevolumes-fixissues.md) +- [Work with multiple Scenes in Unity](https://docs.unity3d.com/Documentation/Manual/MultiSceneEditing.html) diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/probevolumes-usebakingsets.md b/Packages/com.unity.render-pipelines.universal/Documentation~/probevolumes-usebakingsets.md new file mode 100644 index 00000000000..6e9d2eaeab7 --- /dev/null +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/probevolumes-usebakingsets.md @@ -0,0 +1,34 @@ +# Bake multiple scenes together with Baking Sets + +If you [load multiple scenes simultaneously](https://docs.unity3d.com/Documentation/Manual/MultiSceneEditing.html) in your project, for example if you load multiples scenes at the same time in an open world game, you can add the scenes to a single Baking Set so you can bake the lighting for all the scenes together. + +Refer to [Understanding probe volumes](probevolumes-concept.md#baking-sets) for more information about Baking Sets. + +## Create a Baking Set + +To place multiple scenes in a single Baking Set and bake them together, follow these steps: + +1. From the main menu, select **Window** > **Rendering** > **Lighting**. +2. Set **Baking Mode** to **Baking Set**. +2. In **Current Baking Set**, select an existing Baking Set asset, or select **New** to create a new Baking Set. +4. Use the **Add** (**+**) button to add scenes. + +You can only add each scene to a single Baking Set. + +To remove a scene from a Baking Set, select the scene in the **Scenes in Baking Set** list, then select the **Remove** (**-**) button. + +## Bake a Baking Set + +Select **Generate Lighting** to bake the lighting in all the scenes in a baking set. + +The Universal Render Pipeline (URP) uses the settings from the Baking Set, and serializes the results in the `Assets` folder, in a subfolder with the same name as the active scene. You can move or rename the folder. + +For faster iteration times, disable **Bake** next to a scene name. This stops Unity baking lighting data for this scene. This might result in incomplete data, but it can help reduce baking time when you're iterating on parts of a large world. + +### Load a scene + +Unity doesn't automatically load the scenes in a Baking Set when you select the scene in the **Scenes** list. To load a scene, select **Load Baking Set**. + +When you load multiple scenes together, the lighting might be too bright because URP combines light from all the scenes. Refer to [Set up multiple Scenes](https://docs.unity3d.com/Manual/setupmultiplescenes.html) for more information on loading and unloading Scenes. + +You can load multiple scenes together only if they belong to the same Baking Set. diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/probevolumes.md b/Packages/com.unity.render-pipelines.universal/Documentation~/probevolumes.md new file mode 100644 index 00000000000..e8c0dded249 --- /dev/null +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/probevolumes.md @@ -0,0 +1,24 @@ +# Adaptive Probe Volumes (APV) + +Adaptive Probe Volumes make [Light Probes](https://docs.unity3d.com/Manual/LightProbes.html) easier to use by automating placement. They also provide higher quality, more accurate lighting, because they light per-pixel not per-object. + +| Topic | Description | +|--------------------------|-------------------------------------------------------------| +| [Understanding Adaptive Probe Volumes](probevolumes-concept.md) | The purpose of Adaptive Probe Volumes and what you can do with them. | +| [Use Adaptive Probe Volumes](probevolumes-use.md) | Add Adaptive Probe Volumes to your project and configure them. | +| [Display Adaptive Probe Volumes](probevolumes-showandadjust.md) | Visualize the structure of Adaptive Probe Volumes. | +| [Configure the size and density of Adaptive Probe Volumes](probevolumes-changedensity.md) | Change the size of an Adaptive Probe Volume, or increase the density of Light Probes. | +| [Bake multiple scenes together with Baking Sets](probevolumes-usebakingsets.md) | Add scenes to a Baking Set so you can bake the lighting for all the scenes together. | +| [Streaming Adaptive Probe Volumes](probevolumes-streaming.md) | How Adaptive Probe Volumes stream lighting data to provide lighting for large open worlds. | +| [Fix issues with Adaptive Probe Volumes](probevolumes-fixissues.md) | Reduce light leaks and seams in your lighting result. | +| [Adaptive Probe Volume Inspector window reference](probevolumes-inspector-reference.md) | Reference for the Adaptive Probe Volume Inspector window. | +| [Adaptive Probe Volumes panel reference](probevolumes-lighting-panel-reference.md) | Reference for the Adaptive Probe Volumes panel in the Lighting settings. | +| [Probe Volumes Options Override reference](probevolumes-options-override-reference.md) | Reference for the Probe Volumes Options Override. | +| [Probe Adjustment Volume component reference](probevolumes-adjustment-volume-component-reference.md) | Reference for the Probe Adjustment Volume component. | + +## Additional resources + +* [Light Probes](https://docs.unity3d.com/Manual/LightProbes.html) +* [Light Probes for moving objects](https://docs.unity3d.com/Manual/LightProbes-MovingObjects.html) +* [Light Probe Group](https://docs.unity3d.com/Manual/class-LightProbeGroup.html) +* [Rendering Debugger](features/rendering-debugger.md) diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/render-graph-create-a-texture.md b/Packages/com.unity.render-pipelines.universal/Documentation~/render-graph-create-a-texture.md new file mode 100644 index 00000000000..736843d71a3 --- /dev/null +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/render-graph-create-a-texture.md @@ -0,0 +1,96 @@ +# Create a render graph system texture + +You can create a render graph texture in a custom render pass. You can then [read from or write to the texture](render-graph-read-write-texture.md). + +When the Universal Render Pipeline (URP) optimizes the render graph, it might not create a texture if the final frame doesn't use the texture, to reduce the memory and bandwidth the render passes use. Refer to [Introduction to the render graph system](render-graph-introduction.md) for more information. + +If you need to use a texture in multiple frames or on multiple cameras, for example a texture asset you imported in your project, refer to [Import a texture into the render graph system](render-graph-import-a-texture.md). + +## Create a texture + +To create a texture, in the `RecordRenderGraph` method of your `ScriptableRenderPass` class, follow these steps: + +1. Create a [`RenderTextureDescriptor`](https://docs.unity3d.com/ScriptReference/RenderTextureDescriptor.html) object with the texture properties you need. +2. Use the [`UniversalRenderer.CreateRenderGraphTexture`](xref:UnityEngine.Rendering.Universal.UniversalRenderer.CreateRenderGraphTexture(UnityEngine.Experimental.Rendering.RenderGraphModule.RenderGraph,UnityEngine.RenderTextureDescriptor,System.String,System.Boolean,UnityEngine.FilterMode,UnityEngine.TextureWrapMode)) method to create a texture and return a texture handle. + +For example, the following creates a texture the same size as the screen. + +```csharp +RenderTextureDescriptor textureProperties = new RenderTextureDescriptor(Screen.width, Screen.height, RenderTextureFormat.Default, 0); +TextureHandle textureHandle = UniversalRenderer.CreateRenderGraphTexture(renderGraph, textureProperties, "My texture", false); +``` + +You can then [use the texture](render-graph-read-write-texture.md) in the same custom render pass. + +Only the current camera can access the texture. To access the texture somewhere else, for example from another camera or in custom rendering code, [import a texture](render-graph-import-a-texture.md) instead. + +The render graph system manages the lifetime of textures you create with `CreateRenderGraphTexture`, so you don't need to manually release the memory they use when you're finished with them. + +### Example + +The following Scriptable Renderer Feature contains an example render pass that creates a texture and clears it to yellow. Refer to [Inject a pass using a Scriptable Renderer Feature](renderer-features/scriptable-renderer-features/inject-a-pass-using-a-scriptable-renderer-feature.md#add-renderer-feature-to-asset) for instructions on how to add the render pass to a project. + +Use the [Frame Debugger](https://docs.unity3d.com/2023.3/Documentation/Manual/frame-debugger-window.html) to check the texture the render pass adds. + +```csharp +using UnityEngine; +using UnityEngine.Rendering.Universal; +using UnityEngine.Rendering.RenderGraphModule; +using UnityEngine.Rendering; + +public class CreateYellowTextureFeature : ScriptableRendererFeature +{ + CreateYellowTexture customPass; + + public override void Create() + { + customPass = new CreateYellowTexture(); + customPass.renderPassEvent = RenderPassEvent.AfterRenderingPostProcessing; + } + + public override void AddRenderPasses(ScriptableRenderer renderer, ref RenderingData renderingData) + { + renderer.EnqueuePass(customPass); + } + + class CreateYellowTexture : ScriptableRenderPass + { + class PassData + { + internal TextureHandle cameraColorTexture; + } + + public override void RecordRenderGraph(RenderGraph renderGraph, ContextContainer frameContext) + { + using (var builder = renderGraph.AddRasterRenderPass("Create yellow texture", out var passData)) + { + // Create texture properties that match the screen size + RenderTextureDescriptor textureProperties = new RenderTextureDescriptor(Screen.width, Screen.height, RenderTextureFormat.Default, 0); + + // Create a temporary texture + TextureHandle texture = UniversalRenderer.CreateRenderGraphTexture(renderGraph, textureProperties, "My texture", false); + + // Set the texture as the render target + builder.SetRenderAttachment(texture, 0, AccessFlags.Write); + + builder.AllowPassCulling(false); + + builder.SetRenderFunc((PassData data, RasterGraphContext context) => ExecutePass(data, context)); + } + } + + static void ExecutePass(PassData data, RasterGraphContext context) + { + // Clear the render target to yellow + context.cmd.ClearRenderTarget(true, true, Color.yellow); + } + } + +} +``` + +## Additional resources + +* [Import a texture into the render graph system](render-graph-import-a-texture.md) +* [Use frame data](accessing-frame-data.md) +* [Textures](https://docs.unity3d.com/Manual/Textures.html) diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/render-graph-draw-objects-in-a-pass.md b/Packages/com.unity.render-pipelines.universal/Documentation~/render-graph-draw-objects-in-a-pass.md new file mode 100644 index 00000000000..99893708826 --- /dev/null +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/render-graph-draw-objects-in-a-pass.md @@ -0,0 +1,176 @@ +# Draw objects in a render pass + +To draw objects in a custom render pass that uses the render graph system, use the `RendererListHandle` API to create a list of objects to draw. + +## Create a list of objects to draw + +Follow these steps: + +1. In your `ScriptableRenderPass` class, in the class you use for pass data, create a [`RendererListHandle`](https://docs.unity3d.com/Packages/com.unity.render-pipelines.core@17.0/api/UnityEngine.Rendering.RenderGraphModule.RendererListHandle.html) field. + + For example: + + ```csharp + private class PassData + { + public RendererListHandle objectsToDraw; + } + ``` + +2. Create a [RendererListParams](https://docs.unity3d.com/ScriptReference/Rendering.RendererListParams.html) object that contains the objects to draw, drawing settings, and culling data. Refer to [Creating a simple render loop in a custom render pipeline](https://docs.unity3d.com/Manual/srp-creating-simple-render-loop.html) for more information. + + Refer to [Example](#example) below for a detailed example. + +3. In the `RecordRenderGraph` method, use the [`CreateRendererList` API](https://docs.unity3d.com/ScriptReference/Rendering.ScriptableRenderContext.CreateRendererList.html) to convert the `RendererListParams` object to a handle that the render graph system can use. + + For example: + + ```csharp + RenderListHandle rendererListHandle = renderGraph.CreateRendererList(rendererListParameters); + ``` + +4. Set the `RendererListHandle` field in the pass data. + + For example: + + ```csharp + passData.objectsToDraw = rendererListHandle; + ``` + +## Draw the objects + +After you set a `RendererListHandle` in the pass data, you can draw the objects in the list. + +Follow these steps: + +1. In the `RecordRenderGraph` method, tell the render graph system to use the list of objects, using the `UseRendererList` API. + + For example: + + ```csharp + builder.UseRendererList(passData.rendererListHandle); + ``` + +2. Set the texture to draw the objects onto. Set both the color texture and the depth texture so URP renders the objects correctly. + + For example, the following tells URP to draw to the color texture and depth texture of the active camera texture. + + ```csharp + UniversalResourceData frameData = frameContext.Get(); + builder.SetRenderAttachment(frameData.activeColorTexture, 0); + builder.SetRenderAttachmentDepth(frameData.activeDepthTexture, AccessFlags.Write); + ``` + +3. In your `SetRenderFunc` method, draw the renderers using the [`DrawRendererList`](https://docs.unity3d.com/ScriptReference/Rendering.CommandBuffer.DrawRendererList.html) API. + + For example: + + ```csharp + context.cmd.DrawRendererList(passData.rendererListHandle); + ``` + +## Example + +The following Scriptable Renderer Feature redraws the objects in the scene that have their `Lightmode` tag set to `UniversalForward`, using an override material. + +After you [add this Scriptable Reader Feature to the renderer](renderer-features/scriptable-renderer-features/inject-a-pass-using-a-scriptable-renderer-feature.md), set the **Material To Use** parameter to any material. + +```csharp +using UnityEngine; +using UnityEngine.Rendering; +using UnityEngine.Rendering.Universal; +using UnityEngine.Rendering.RenderGraphModule; + +public class DrawObjectsWithOverrideMaterial : ScriptableRendererFeature +{ + + DrawObjectsPass drawObjectsPass; + public Material overrideMaterial; + + public override void Create() + { + // Create the render pass that draws the objects, and pass in the override material + drawObjectsPass = new DrawObjectsPass(overrideMaterial); + + // Insert render passes after URP's post-processing render pass + drawObjectsPass.renderPassEvent = RenderPassEvent.AfterRenderingPostProcessing; + } + + public override void AddRenderPasses(ScriptableRenderer renderer, ref RenderingData renderingData) + { + // Add the render pass to the URP rendering loop + renderer.EnqueuePass(drawObjectsPass); + } + + class DrawObjectsPass : ScriptableRenderPass + { + private Material materialToUse; + + public DrawObjectsPass(Material overrideMaterial) + { + // Set the pass's local copy of the override material + materialToUse = overrideMaterial; + } + + private class PassData + { + // Create a field to store the list of objects to draw + public RendererListHandle rendererListHandle; + } + + public override void RecordRenderGraph(RenderGraph renderGraph, ContextContainer frameContext) + { + using (var builder = renderGraph.AddRasterRenderPass("Redraw objects", out var passData)) + { + // Get the data needed to create the list of objects to draw + UniversalRenderingData renderingData = frameContext.Get(); + UniversalCameraData cameraData = frameContext.Get(); + UniversalLightData lightData = frameContext.Get(); + SortingCriteria sortFlags = cameraData.defaultOpaqueSortFlags; + RenderQueueRange renderQueueRange = RenderQueueRange.opaque; + FilteringSettings filterSettings = new FilteringSettings(renderQueueRange, ~0); + + // Redraw only objects that have their LightMode tag set to UniversalForward + ShaderTagId shadersToOverride = new ShaderTagId("UniversalForward"); + + // Create drawing settings + DrawingSettings drawSettings = RenderingUtils.CreateDrawingSettings(shadersToOverride, renderingData, cameraData, lightData, sortFlags); + + // Add the override material to the drawing settings + drawSettings.overrideMaterial = materialToUse; + + // Create the list of objects to draw + var rendererListParameters = new RendererListParams(renderingData.cullResults, drawSettings, filterSettings); + + // Convert the list to a list handle that the render graph system can use + passData.rendererListHandle = renderGraph.CreateRendererList(rendererListParameters); + + // Set the render target as the color and depth textures of the active camera texture + UniversalResourceData resourceData = frameContext.Get(); + builder.UseRendererList(passData.rendererListHandle); + builder.SetRenderAttachment(resourceData.activeColorTexture, 0); + builder.SetRenderAttachmentDepth(resourceData.activeDepthTexture, AccessFlags.Write); + + builder.SetRenderFunc((PassData data, RasterGraphContext context) => ExecutePass(data, context)); + } + } + + static void ExecutePass(PassData data, RasterGraphContext context) + { + // Clear the render target to black + context.cmd.ClearRenderTarget(true, true, Color.black); + + // Draw the objects in the list + context.cmd.DrawRendererList(data.rendererListHandle); + } + + } + +} +``` + +## Additional resources + +- [Use textures](working-with-textures.md) +- [Using frame data](accessing-frame-data.md) +- [Scriptable Renderer Features](renderer-features/scriptable-renderer-features/scriptable-renderer-features-landing.md) diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/render-graph-import-a-texture.md b/Packages/com.unity.render-pipelines.universal/Documentation~/render-graph-import-a-texture.md new file mode 100644 index 00000000000..208fd1d11ef --- /dev/null +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/render-graph-import-a-texture.md @@ -0,0 +1,176 @@ +# Import a texture into the render graph system + +When you [create a render graph system texture](render-graph-create-a-texture.md) in a render pass, the render graph system handles the creation and disposal of the texture. This process means the texture might not exist in the next frame, and other cameras might not be able to use it. + +To make sure a texture is available across frames and cameras, you can import it into the render graph system using the `ImportTexture` API. + +You can import a texture if you use a texture created outside the render graph system. For example, you can create a render texture that points to a texture in your project, such as a [texture asset](https://docs.unity3d.com/Manual/ImportingTextures.html), and use it as the input to a render pass. + +The render graph system doesn't manage the lifetime of imported textures. As a result, the following applies: + +- You must [dispose of the imported render texture](#dispose-of-a-render-texture) to free up the memory it uses when you're finished with it. +- URP can't cull render passes that use imported textures. As a result, rendering might be slower. + +Refer to [Using the RTHandle system](https://docs.unity3d.com/Packages/com.unity.render-pipelines.core@17.0/manual/rthandle-system-using.html) in the SRP Core manual for more information about the `RTHandle` API. + +## Import a texture + +To import a texture, in the `RecordRenderGraph` method of your `ScriptableRenderPass` class, follow these steps: + +1. Create a render texture handle using the [RTHandle](https://docs.unity3d.com/Packages/com.unity.render-pipelines.core@17.0/api/UnityEngine.Rendering.RTHandle.html) API. + + For example: + + ```csharp + private RTHandle renderTextureHandle; + ``` + +2. Create a [RenderTextureDescriptor](https://docs.unity3d.com/ScriptReference/RenderTextureDescriptor.html) object with the texture properties you need. + + For example: + + ```csharp + RenderTextureDescriptor textureProperties = new RenderTextureDescriptor(Screen.width, Screen.height, RenderTextureFormat.Default, 0); + ``` + +3. Use the [ReAllocateIfNeeded](xref:UnityEngine.Rendering.Universal.RenderingUtils.ReAllocateIfNeeded(UnityEngine.Rendering.RTHandle@,UnityEngine.RenderTextureDescriptor@,UnityEngine.FilterMode,UnityEngine.TextureWrapMode,System.Boolean,System.Int32,System.Single,System.String)) method to create a render texture and attach it to the render texture handle. This method creates a render texture only if the render texture handle is null, or the render texture has different properties to the render texture descriptor. + + For example: + + ```csharp + RenderingUtils.ReAllocateIfNeeded(ref renderTextureHandle, textureProperties, FilterMode.Bilinear, TextureWrapMode.Clamp, name: "My render texture" ); + ``` + +4. Import the texture, to convert the `RTHandle` object to a `TextureHandle` object the render graph system can use. + + For example: + + ```csharp + TextureHandle texture = renderGraph.ImportTexture(renderTextureHandle); + ``` + +You can then use the `TextureHandle` object to [read from or write to the render texture](render-graph-read-write-texture.md). + +## Import a texture from your project + +To import a texture from your project, such as an imported texture attached to a material, follow these steps: + +1. Use the [`RTHandles.Alloc`](https://docs.unity3d.com/Packages/com.unity.render-pipelines.core@17.0/api/UnityEngine.Rendering.RTHandles.html#UnityEngine_Rendering_RTHandles_Alloc_UnityEngine_RenderTexture_) API to create a render texture handle from the external texture. + + For example: + + ```csharp + RTHandle renderTexture = RTHandles.Alloc(texture); + ``` + +2. Import the texture, to convert the `RTHandle` object to a `TextureHandle` object that the render graph system can use. + + For example: + + ```csharp + TextureHandle textureHandle = renderGraph.ImportTexture(renderTexture); + ``` + +You can then use the `TextureHandle` object to [read from or write to the render texture](render-graph-read-write-texture.md). + +## Dispose of the render texture + +You must free the memory a render texture uses at the end of a render pass, using the `Dispose` method. + +```csharp +public void Dispose() +{ + renderTexture.Release(); +} +``` + +## Example + +The following Scriptable Renderer Feature contains an example render pass that copies a texture asset to a temporary texture. To use this example, follow these steps: + +1. Refer to [Inject a pass using a Scriptable Renderer Feature](renderer-features/scriptable-renderer-features/inject-a-pass-using-a-scriptable-renderer-feature.md#add-renderer-feature-to-asset) for instructions on how to add this render pass to a URP Asset. +2. In the Inspector window of the URP Asset, add a texture to the **Texture To Use** property. +3. Use the [Frame Debugger](https://docs.unity3d.com/2023.3/Documentation/Manual/frame-debugger-window.html) to check the texture the render pass adds. + +```csharp +using UnityEngine; +using UnityEngine.Rendering.Universal; +using UnityEngine.Rendering.RenderGraphModule; +using UnityEngine.Rendering; + +public class BlitFromExternalTexture : ScriptableRendererFeature +{ + // The texture to use as input + public Texture2D textureToUse; + + BlitFromTexture customPass; + + public override void Create() + { + // Create an instance of the render pass, and pass in the input texture + customPass = new BlitFromTexture(textureToUse); + + customPass.renderPassEvent = RenderPassEvent.AfterRenderingPostProcessing; + } + + public override void AddRenderPasses(ScriptableRenderer renderer, ref RenderingData renderingData) + { + renderer.EnqueuePass(customPass); + } + + class BlitFromTexture : ScriptableRenderPass + { + class PassData + { + internal TextureHandle textureToRead; + } + + private Texture2D texturePassedIn; + + public BlitFromTexture(Texture2D textureIn) + { + // In the render pass's constructor, set the input texture + texturePassedIn = textureIn; + } + + public override void RecordRenderGraph(RenderGraph renderGraph, ContextContainer frameContext) + { + using (var builder = renderGraph.AddRasterRenderPass("Copy texture", out var passData)) + { + // Create a temporary texture and set it as the render target + RenderTextureDescriptor textureProperties = new RenderTextureDescriptor(Screen.width, Screen.height, RenderTextureFormat.Default, 0); + TextureHandle texture = UniversalRenderer.CreateRenderGraphTexture(renderGraph, textureProperties, "My texture", false); + builder.SetRenderAttachment(texture, 0, AccessFlags.Write); + + // Create a render texture from the input texture + RTHandle rtHandle = RTHandles.Alloc(texturePassedIn); + + // Create a texture handle that the shader graph system can use + TextureHandle textureToRead = renderGraph.ImportTexture(rtHandle); + + // Add the texture to the pass data + passData.textureToRead = textureToRead; + + // Set the texture as readable + builder.UseTexture(passData.textureToRead, AccessFlags.Read); + + builder.AllowPassCulling(false); + + builder.SetRenderFunc((PassData data, RasterGraphContext context) => ExecutePass(data, context)); + } + } + + static void ExecutePass(PassData data, RasterGraphContext context) + { + // Copy the imported texture to the render target + Blitter.BlitTexture(context.cmd, data.textureToRead, new Vector4(0.8f,0.6f,0,0), 0, false); + } + } +} +``` + +## Additional resources + +* [Textures](https://docs.unity3d.com/Manual/Textures.html) +* [Render Texture assets](https://docs.unity3d.com/Manual/class-RenderTexture.html) +* [Custom Render Texture assets](https://docs.unity3d.com/Manual/class-CustomRenderTexture.html) diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/render-graph-introduction.md b/Packages/com.unity.render-pipelines.universal/Documentation~/render-graph-introduction.md new file mode 100644 index 00000000000..49b1ba4f881 --- /dev/null +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/render-graph-introduction.md @@ -0,0 +1,33 @@ +# Introduction to the render graph system + +The render graph system is a set of APIs you use to write a [Scriptable Render Pass](renderer-features/intro-to-scriptable-render-passes.md) in the Universal Render Pipeline (URP). + +When you use the render graph API to create a Scriptable Render Pass, you tell URP the following: + +1. The textures or render textures to use. This stage is the recording stage. +2. The graphics commands to execute, using the textures or render textures from the recording stage. This stage is the execution stage. + +You can then [add your Scriptable Render Pass to the URP renderer](renderer-features/custom-rendering-pass-workflow-in-urp.md). Your Scriptable Render Pass becomes part of URP's internal render graph, which is the sequence of render passes URP steps through each frame. URP automatically optimizes your render pass and the render graph to minimize the number of render passes, and the memory and bandwidth the render passes use. + +## How URP optimizes rendering + +URP does the following to optimize rendering in the render graph: + +- Merges multiple render passes into a single render pass. +- Avoids allocating resources the frame doesn't use. +- Avoids executing render passes if the final frame doesn't use their output. +- Avoids duplicating resources, for example by replacing two texture that have the same properties with a single texture. +- Automatically synchronizes the compute and graphics GPU command queues. + +On mobile platforms that use tile-based deferred rendering (TBDR), URP can also merge multiple render passes into a single native render pass. A native render pass keeps textures in tile memory, rather than copying textures from the GPU to the CPU. As a result, URP uses less memory bandwidth and rendering time. + +To check how URP optimizes rendering in your custom render passes, refer to [Analyze a render graph](render-graph-view.md). + +## Additional resources + +- [Use frame data](accessing-frame-data.md) +- [Transfer a texture between render passes](render-graph-pass-textures-between-passes.md) +- [Inject a render pass via scripting](customize/inject-render-pass-via-script.md) +- [Inject a render pass using a Scriptable Renderer Feature](renderer-features/scriptable-renderer-features/inject-a-pass-using-a-scriptable-renderer-feature.md) +- [The render graph system](https://docs.unity3d.com/Packages/com.unity.render-pipelines.core@17.0/manual/render-graph-system.html) in the Scriptable Render Pipeline (SRP) Core manual. +- [CommandBuffer.BeginRenderPass](https://docs.unity3d.com/2023.3/Documentation/ScriptReference/Rendering.CommandBuffer.BeginRenderPass.html) diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/render-graph-pass-textures-between-passes.md b/Packages/com.unity.render-pipelines.universal/Documentation~/render-graph-pass-textures-between-passes.md new file mode 100644 index 00000000000..d8865b330d2 --- /dev/null +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/render-graph-pass-textures-between-passes.md @@ -0,0 +1,227 @@ +# Transfer a texture between render passes + +You can transfer a texture between render passes, for example if you need to create a texture in one render pass and read it in a later render pass. + +Use the following methods to transfer textures between render passes: + +- [Add a texture to the frame data](#add-a-texture-to-the-frame-data) +- [Set a texture as a global texture](#set-a-texture-as-a-global-texture) + +You can also store the texture outside the render passes, for example as a `TextureHandle` in a [Scriptable Renderer Feature](renderer-features/scriptable-renderer-features/scriptable-renderer-features-landing.md). + +If you need to use make sure a texture is available across multiple frames, or that multiple cameras can access it, refer to [Import a texture into the render graph system](render-graph-import-a-texture.md) instead. + +## Add a texture to the frame data + +You can add a texture to the [frame data](accessing-frame-data.md) so you can fetch the texture in a later render pass. + +Follow these steps: + +1. Create a class that inherits [`ContextItem`](https://docs.unity3d.com/Packages/com.unity.render-pipelines.core@17.0/api/UnityEngine.Rendering.ContextItem.html) and contains a texture handle field. + + For example: + + ```csharp + public class MyCustomData : ContextItem { + public TextureHandle textureToTransfer; + } + ``` + +2. You must implement the `Reset()` method in your class, to reset the texture when the frame resets. + + For example: + + ```csharp + public class MyCustomData : ContextItem { + public TextureHandle textureToTransfer; + + public override void Reset() + { + textureToTransfer = TextureHandle.nullHandle; + } + } + ``` + +3. In your `RecordRenderGraph` method, add an instance of your class to the frame data. + + For example: + + ```csharp + public override void RecordRenderGraph(RenderGraph renderGraph, ContextContainer frameContext) + { + using (var builder = renderGraph.AddPass("Get frame data", out var passData)) + { + UniversalResourceData resourceData = frameContext.Get(); + var customData = contextData.Create(); + } + } + ``` + +4. Set the texture handle to your texture. + + For example: + + ```csharp + // Create texture properties that match the screen + RenderTextureDescriptor textureProperties = new RenderTextureDescriptor(Screen.width, Screen.height, RenderTextureFormat.Default, 0); + + // Create the texture + TextureHandle texture = UniversalRenderer.CreateRenderGraphTexture(renderGraph, textureProperties, "My texture", false); + + // Set the texture in the custom data instance + customData.textureToTransfer = texture; + ``` + +In a later render pass, in your `RecordRenderGraph` method, you can get your custom data and fetch your texture: + +For example: + +```csharp +// Get the custom data +MyCustomData fetchedData = frameData.Get(); + +// Get the texture +TextureHandle customTexture = customData.textureToTransfer; +``` + +Refer to [Use frame data](accessing-frame-data.md) for more information about frame data. + +### Example + +The following example adds a `CustomData` class that contains a texture. The first render pass clears the texture to yellow, and the second render pass fetches the yellow texture and draws a triangle onto it. + +```csharp +using UnityEngine; +using UnityEngine.Rendering.Universal; +using UnityEngine.Rendering.RenderGraphModule; +using UnityEngine.Rendering; + +public class AddOwnTextureToFrameData : ScriptableRendererFeature +{ + AddOwnTexturePass customPass1; + DrawTrianglePass customPass2; + + public override void Create() + { + customPass1 = new AddOwnTexturePass(); + customPass2 = new DrawTrianglePass(); + + customPass1.renderPassEvent = RenderPassEvent.AfterRenderingOpaques; + customPass2.renderPassEvent = RenderPassEvent.AfterRenderingOpaques; + } + + public override void AddRenderPasses(ScriptableRenderer renderer, ref RenderingData renderingData) + { + renderer.EnqueuePass(customPass1); + renderer.EnqueuePass(customPass2); + } + + // Create the first render pass, which creates a texture and adds it to the frame data + class AddOwnTexturePass : ScriptableRenderPass + { + + class PassData + { + internal TextureHandle copySourceTexture; + } + + // Create the custom data class that contains the new texture + public class CustomData : ContextItem { + public TextureHandle newTextureForFrameData; + + public override void Reset() + { + newTextureForFrameData = TextureHandle.nullHandle; + } + } + + public override void RecordRenderGraph(RenderGraph renderGraph, ContextContainer frameContext) + { + using (var builder = renderGraph.AddRasterRenderPass("Create new texture", out var passData)) + { + // Create a texture and set it as the render target + RenderTextureDescriptor textureProperties = new RenderTextureDescriptor(Screen.width, Screen.height, RenderTextureFormat.Default, 0); + TextureHandle texture = UniversalRenderer.CreateRenderGraphTexture(renderGraph, textureProperties, "My texture", false); + CustomData customData = frameContext.Create(); + customData.newTextureForFrameData = texture; + builder.SetRenderAttachment(texture, 0, AccessFlags.Write); + + builder.AllowPassCulling(false); + + builder.SetRenderFunc((PassData data, RasterGraphContext context) => ExecutePass(data, context)); + } + } + + static void ExecutePass(PassData data, RasterGraphContext context) + { + // Clear the render target (the texture) to yellow + context.cmd.ClearRenderTarget(true, true, Color.yellow); + } + + } + + // Create the second render pass, which fetches the texture and writes to it + class DrawTrianglePass : ScriptableRenderPass + { + + class PassData + { + // No local pass data needed + } + + public override void RecordRenderGraph(RenderGraph renderGraph, ContextContainer frameContext) + { + using (var builder = renderGraph.AddRasterRenderPass("Fetch texture and draw triangle", out var passData)) + { + // Fetch the yellow texture from the frame data and set it as the render target + var customData = frameContext.Get(); + var customTexture = customData.newTextureForFrameData; + builder.SetRenderAttachment(customTexture, 0, AccessFlags.Write); + + builder.AllowPassCulling(false); + + builder.SetRenderFunc((PassData data, RasterGraphContext context) => ExecutePass(data, context)); + } + } + + static void ExecutePass(PassData data, RasterGraphContext context) + { + // Generate a triangle mesh + Mesh mesh = new Mesh(); + mesh.vertices = new Vector3[] { new Vector3(0, 0, 0), new Vector3(1, 0, 0), new Vector3(0, 1, 0) }; + mesh.triangles = new int[] { 0, 1, 2 }; + + // Draw a triangle to the render target (the yellow texture) + context.cmd.DrawMesh(mesh, Matrix4x4.identity, new Material(Shader.Find("Universal Render Pipeline/Unlit"))); + } + } +} +``` + +## Set a texture as a global texture + +If you need to use a texture as the input for the shader on a GameObject, you can set a texture as a global texture. A global texture is available to all shaders and render passes. + +Setting a texture as a global texture can make rendering slower. Refer to [SetGlobalTexture](https://docs.unity3d.com/ScriptReference/Shader.SetGlobalTexture.html) for more information. + +Don't use an [unsafe render pass](render-graph-unsafe-pass.md) and `CommandBuffer.SetGlobal` to set a texture as a global texture, because it might cause errors. + +To set a global texture, in the `RecordRenderGraph` method, use the [`SetGlobalTextureAfterPass`](https://docs.unity3d.com/Packages/com.unity.render-pipelines.core@17.0/api/UnityEngine.Rendering.RenderGraphModule.IBaseRenderGraphBuilder.html#UnityEngine_Rendering_RenderGraphModule_IBaseRenderGraphBuilder_SetGlobalTextureAfterPass_UnityEngine_Rendering_RenderGraphModule_TextureHandle__System_Int32_) method. + +For example: + +```csharp +// Allocate a global shader texture called _GlobalTexture +private int shaderTextureID = Shader.globalTextureID("_GlobalTexture") + +using (var builder = renderGraph.AddRasterRenderPass("MyPass", out var passData)){ + + // Set a texture to the global texture + builder.SetGlobalTextureAfterPass(texture, globalTextureID); +} +``` + +You can now: + +- Access the texture in a different render pass, using the `UseGlobalTexture()` or `UseAllGlobalTextures()` API. +- Use the texture on any material in your scene. URP automatically uses the `UseAllGlobalTextures()` API to enable this. diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/render-graph-read-write-texture.md b/Packages/com.unity.render-pipelines.universal/Documentation~/render-graph-read-write-texture.md new file mode 100644 index 00000000000..84f32ff8908 --- /dev/null +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/render-graph-read-write-texture.md @@ -0,0 +1,88 @@ +# Use a texture in a render pass + +You can use the render graph system API to set a texture as an input or output for a custom render pass, so you can read from or write to it. + +You can't both read from and write to the same texture in a render pass. Refer to [Change the render target during a render pass](#change-the-render-target-during-a-pass) for more information. + +## Set a texture as an input + +To set a texture as an input for a custom render pass, follow these steps: + +1. In the `RecordRenderGraph` method, add a texture handle field to the data your pass uses. + + For example: + + ```csharp + // Create the data your pass uses + public class MyPassData + { + // Add a texture handle + public TextureHandle textureToUse; + } + ``` + +2. Set the texture handle to the texture you want to use. + + For example: + + ```csharp + // Add the texture handle to the data + RenderTextureDescriptor textureProperties = new RenderTextureDescriptor(Screen.width, Screen.height, RenderTextureFormat.Default, 0); + TextureHandle textureHandle = UniversalRenderer.CreateRenderGraphTexture(renderGraph, textureProperties, "My texture", false); + passData.textureToUse = textureHandle; + ``` + +2. Call the [`UseTexture`](https://docs.unity3d.com/Packages/com.unity.render-pipelines.core@17.0/api/UnityEngine.Rendering.RenderGraphModule.IBaseRenderGraphBuilder.html#UnityEngine_Rendering_RenderGraphModule_IBaseRenderGraphBuilder_UseTexture_UnityEngine_Rendering_RenderGraphModule_TextureHandle__UnityEngine_Rendering_RenderGraphModule_AccessFlags_) method to set the texture as an input. + + For example: + + ```csharp + builder.UseTexture(passData.textureToUse, AccessFlags.Read); + ``` + +In your `SetRenderFunc` method, you can now use the `TextureHandle` object in the pass data as an input for APIs such as `Blitter.BlitTexture`. + +## Set a texture as the render target + +To set a texture as the output for commands such as `Blit`, in the `RecordRenderGraph` method, use the [`SetRenderAttachment`](https://docs.unity3d.com/Packages/com.unity.render-pipelines.core@17.0/api/UnityEngine.Rendering.RenderGraphModule.IRasterRenderGraphBuilder.html#UnityEngine_Rendering_RenderGraphModule_IRasterRenderGraphBuilder_SetRenderAttachment_UnityEngine_Rendering_RenderGraphModule_TextureHandle_System_Int32_UnityEngine_Rendering_RenderGraphModule_AccessFlags_) method. The `SetRenderAttachment` method sets the texture as write-only by default. + +For example, the following command creates a temporary texture and sets it as the render target for the render pass: + +```csharp +// Create texture properties +RenderTextureDescriptor textureProperties = new RenderTextureDescriptor(Screen.width, Screen.height, RenderTextureFormat.Default, 0); + +// Create the texture +TextureHandle targetTexture = UniversalRenderer.CreateRenderGraphTexture(renderGraph, textureProperties, "My texture", false); + +// Set the texture as the render target +// The second parameter is the index the shader uses to access the texture +builder.SetRenderAttachment(targetTexture, 0); +``` + +In your `SetRenderFunc` method, you can now write to the texture using APIs such as `Blitter.BlitTexture`. + +You don't need to add the texture to your pass data. The render graph system sets up the texture for you automatically before it executes the render pass. + +If you need to draw objects to the render target, refer to [Draw objects in a render pass](render-graph-draw-objects-in-a-pass.md) for additional information. + + +## Change the render target during a render pass + +You can't change which texture URP writes to during a render graph system render pass. + +You can do either of the following instead: + +- Create a second custom render pass, and use `builder.SetRenderAttachment` during the second render pass to change the render target. +- Use the `UnsafePass` API so you can use the `SetRenderTarget` API in the `SetRenderFunc` method. Refer to [Use Compatibility Mode APIs in render graph render passes](render-graph-unsafe-pass.md) for more information and an example. + +You can use these methods to read from and write to the same texture, by first copying from the texture to a temporary texture you create, then copying back. + +If you blit between several textures with different properties, rendering might be slow because URP can't merge the blits into a single native render pass. Use the `AddUnSafePass` API and the `SetRenderTarget()` method instead. + +### Examples + +Refer to the following: + +- [Write a render pass](render-graph-write-render-pass.md) for an example that creates a texture then blits to it. +- [Import a texture into the render graph system](render-graph-import-a-texture.md) for an example that sets a temporary texture as the render target. diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/render-graph-unsafe-pass.md b/Packages/com.unity.render-pipelines.universal/Documentation~/render-graph-unsafe-pass.md new file mode 100644 index 00000000000..418b255b85f --- /dev/null +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/render-graph-unsafe-pass.md @@ -0,0 +1,126 @@ +# Use Compatibility Mode APIs in render graph render passes + +You can use the render graph `AddUnsafePass` API to use Compatibility Mode APIs such as `SetRenderTarget` in render graph system render passes. + +If you use the `AddUnsafePass` API, the following applies: + +- You can't use the `SetRenderAttachment` method in the `RecordRenderGraph` method. Use `SetRenderTarget` in the `SetRenderFunc` method instead. +- Rendering might be slower because URP can't optimize the render pass. For example, if your render pass writes to the active color buffer, URP can't detect if a later render pass writes to the same buffer. As a result, URP can't merge the two render passes, and the GPU transfers the buffer in and out of memory unnecessarily. + +## Create an unsafe render pass + +To create an unsafe render pass, follow these steps: + +1. In your `RecordRenderGraph` method, use the `AddUnsafePass` method instead of the `AddPass` method. + + For example: + + ```csharp + using (var builder = renderGraph.AddUnsafePass("My unsafe render pass", out var passData)) + ``` + +2. When you call the `SetRenderFunc` method, use the `UnsafeGraphContext` type instead of `RasterGraphContext`. + + For example: + + ```csharp + builder.SetRenderFunc( + (PassData passData, UnsafeGraphContext context) => ExecutePass(passData, context) + ); + ``` + +3. If your render pass writes to a texture, you must add the texture as a field in your pass data class. + + For example: + + ```csharp + private class PassData + { + internal TextureHandle textureToWriteTo; + } + ``` + +4. If your render pass writes to a texture, you must also set the texture as writeable using the `UseTexture` method. + + For example: + + ```csharp + builder.UseTexture(passData.textureToWriteTo, AccessFlags.Write); + ``` + +You can now use Compatibility Mode APIs in your `SetRenderFunc` method. + +## Example + +The following example uses the Compatibility Mode `SetRenderTarget` API to set the render target to the active color buffer during the render pass, then draw objects using their surface normals as colors. + +```csharp +using UnityEngine; +using UnityEngine.Rendering.RenderGraphModule; +using UnityEngine.Rendering; +using UnityEngine.Rendering.Universal; + +public class DrawNormalsToActiveColorTexture : ScriptableRendererFeature +{ + + DrawNormalsPass unsafePass; + + public override void Create() + { + unsafePass = new DrawNormalsPass(); + unsafePass.renderPassEvent = RenderPassEvent.AfterRenderingPostProcessing; + } + + public override void AddRenderPasses(ScriptableRenderer renderer, ref RenderingData renderingData) + { + renderer.EnqueuePass(unsafePass); + } + + class DrawNormalsPass : ScriptableRenderPass + { + private class PassData + { + internal TextureHandle activeColorBuffer; + internal TextureHandle cameraNormalsTexture; + } + + public override void RecordRenderGraph(RenderGraph renderGraph, ContextContainer frameContext) + { + using (var builder = renderGraph.AddUnsafePass("Draw normals", out var passData)) + { + // Make sure URP generates the normals texture + ConfigureInput(ScriptableRenderPassInput.Normal); + + // Get the frame data + UniversalResourceData resourceData = frameContext.Get(); + + // Add the active color buffer to our pass data, and set it as writeable + passData.activeColorBuffer = resourceData.activeColorTexture; + builder.UseTexture(passData.activeColorBuffer, AccessFlags.Write); + + // Add the camera normals texture to our pass data + passData.cameraNormalsTexture = resourceData.cameraNormalsTexture; + builder.UseTexture(passData.cameraNormalsTexture); + + builder.AllowPassCulling(false); + + builder.SetRenderFunc((PassData data, UnsafeGraphContext context) => ExecutePass(data, context)); + } + } + + static void ExecutePass(PassData passData, UnsafeGraphContext context) + { + // Create a command buffer for a list of rendering methods + CommandBuffer unsafeCommandBuffer = CommandBufferHelpers.GetNativeCommandBuffer(context.cmd); + + // Add a command to set the render target to the active color buffer so URP draws to it + context.cmd.SetRenderTarget(passData.activeColorBuffer); + + // Add a command to copy the camera normals texture to the render target + Blitter.BlitTexture(unsafeCommandBuffer, passData.cameraNormalsTexture, new Vector4(1, 1, 0, 0), 0, false); + } + + } + +} +``` diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/render-graph-view.md b/Packages/com.unity.render-pipelines.universal/Documentation~/render-graph-view.md new file mode 100644 index 00000000000..18b47083c5e --- /dev/null +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/render-graph-view.md @@ -0,0 +1,85 @@ +# Analyze a render graph + +There are several ways to analyse a render graph: + +- [Use the Render Graph Viewer](#use-the-render-graph-viewer) +- [Use the Rendering Debugger](#use-the-rendering-debugger) +- [Use the Frame Debugger](#use-the-frame-debugger) + +## Use the Render Graph Viewer + +To open the **Render Graph Viewer** window, go to **Window > Analysis > Render Graph Viewer**. + +The Render Graph Viewer window displays a render graph, which is the optimized sequence of render passes the Universal Render Pipeline (URP) steps through each frame. The Render Graph Viewer displays both built-in render passes and any [custom render passes](renderer-features/scriptable-render-passes.md) you create. + +Refer to [Render Graph Viewer window reference](render-graph-viewer-reference.md) for more information on the Render Graph Viewer. + +### View a render graph + +The **Render Graph Viewer** window displays the render graph for the current scene by default. To select another render graph, use the dropdown in the toolbar. + +#### Example: check how URP uses a resource + +You can use the resource access blocks next to a resource name to check how the render passes use the resource. + +![Render Graph Viewer example](Images/render-graph-viewer.png) + +In the previous example, the `_MainLightShadowmapTexture_` texture goes through the following stages: + +1. During the first five render passes between **InitFrame** and **SetupCameraProperties**, the texture doesn't exist. + +2. The **Main Light Shadowmap** render pass creates the texture as a global texture, and has write-only access to it. Refer to [Transfer textures between passes](render-graph-pass-textures-between-passes.md) for more information about global textures. + + The blue merge bar below **Main Light Shadowmap** means URP merged **Main Light Shadowmap**, **Additional Lights Shadowmap** and **SetupCameraProperties** into a single render pass. + +3. The next five render passes don't have access to the texture. + +4. The first **Draw Objects** render pass has read-only access to the texture. + +5. The next two render passes don't have access to the texture. + +6. The second **Draw Objects** render pass has read-only access to the texture. + +### Check how URP optimized a render pass + +To check the details of a render pass, for example to find out why it's not a native render pass or a merged pass, do either of the following: + +- Select the render pass name to display the details in the Pass List. +- Below the render pass name, hover your cursor over the gray, blue, or flashing blue resource access overview block. + +Refer to [Render Graph Viewer window reference](render-graph-viewer-reference.md) for more information. + +## Use the Rendering Debugger + +You can use the Rendering Debugger to log the resources URP uses and how it uses them, in the **Console** window. + +To enable logging, follow these steps: + +1. Select **Window > Analysis > Rendering Debugger** to open the **Rendering Debugger** window. +2. In the left pane, select the **Render Graph** tab. +3. Enable **Enable Logging**. +4. Select either **Log Frame Information** to log how URP uses resources, or **Log Resources** to log details about the resources. +5. Select the new item in the **Console** window to display the full log. + +Refer to [Rendering Debugger](features/rendering-debugger.md) for more information. + +## Use the Frame Debugger + +Use the [Frame Debugger](https://docs.unity3d.com/2023.3/Documentation/Manual/frame-debugger-window.html) to check the render passes and draw calls in the rendering loop. + +The Frame Debugger displays the following in the [Event Hierarchy panel](https://docs.unity3d.com/Manual/frame-debugger-window-event-hierarchy.html) when the render graph system is active: + +- A parent rendering event called **ExecuteRenderGraph**. +- Child rendering events called **(RP <render-pass>:<subpass>)**, where `` is the render pass number and `` is the subpass number. + +The Frame Debugger shows only render passes that contain a draw call. + +Refer to [Frame Debugger](https://docs.unity3d.com/2023.3/Documentation/Manual/frame-debugger-window.html) for more information. + +## Additional resources + +- [Render graph system](render-graph.md) +- [Rendering in the Universal Render Pipeline](rendering-in-universalrp.md) +- [Frame Debugger](https://docs.unity3d.com/2023.3/Documentation/Manual/frame-debugger-window.html) +- [Rendering Debugger](features/rendering-debugger.md) +- [Understand performance](understand-performance.md) diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/render-graph-viewer-reference.md b/Packages/com.unity.render-pipelines.universal/Documentation~/render-graph-viewer-reference.md new file mode 100644 index 00000000000..5977151dfec --- /dev/null +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/render-graph-viewer-reference.md @@ -0,0 +1,130 @@ + +# Render Graph Viewer window reference + +The **Render Graph Viewer** window displays the [render graph](render-graph.md) for the current scene in the Universal Render Pipeline (URP). + +Refer to [Analyze a render graph](render-graph-view.md) for more information. + +## Toolbar + +|**Control**|**Child control**|**Description**| +|-|-|-| +|**Capture**||Display the render graph for the current frame.| +|**Render graph**||Select the render graph from your project to display.| +|**Camera**||Select the camera to display the rendering loop for.| +|**Pass Filter**||Select which render passes to display.| +|| **Nothing** | Display no render passes. | +|| **Everything** | Display all render passes. | +|| **Culled** | Display render passes that URP hasn't included in the render graph because they have no effect on the final image. | +|| **Raster** | Display only raster render passes created using `renderGraph.AddRasterRenderPass`. | +|| **Unsafe** | Display only render passes that use Compatibility Mode APIs. Refer to [Use Compatibility Mode APIs in render graph render passes](render-graph-unsafe-pass.md) for more information. | +|| **Compute** | Display only compute render passes created using `renderGraph.AddComputePass`. | +|**Resource Filter**|| Select which resources to display.| +|| **Nothing** | Display no resources. | +|| **Everything** | Display all resources. | +|| **Imported** | Display only resources imported into the render graph using `ImportTexure`. | +|| **Textures** | Display only textures. | +|| **Buffers** | Display only buffers. | +|| **Acceleration Structures** | Display only acceleration structures used in compute render passes. | + +## Main window + +The main window is a timeline graph that displays the render passes in the render graph. It displays the following: + +- On the left, the list of resources the render passes use, in the order URP creates them. +- At the top, the list of render passes, in the order URP executes them. + +At the point where a render pass and a texture meet on the graph, a resource access block displays how the render pass uses the resource. The access block uses the following icons and colors: + +|**Access block icon or color**|**Description**| +|-|-| +|Dotted lines|The resource hasn't been created yet.| +|Green|The render pass has read-only access to the resource. The render pass can read the resource.| +|Red|The render pass has write-only access to the resource. The render pass can write to the resource.| +|Green and red|The render pass has read-write access to the resource. The render pass can read from or write to the resource.| +|Grey|The render pass can't access the resource.| +|Globe icon|The render pass sets the texture as a global resource. If the globe icon has a gray background, the resource was imported into the render graph as a `TextureHandle` object, and the pass uses the `SetGlobalTextureAfterPass` API. Refer to [Create a render graph texture](render-graph-create-a-texture.md) and [Transfer a texture between render passes](render-graph-pass-textures-between-passes.md) for more information.| +|Blank|The resource has been deallocated in memory, so it no longer exists.| + +Select an access block to display the resource in the Resource List and the render pass in the Pass Inspector List. + +### Render passes + +|**Control**|**Description**| +|-|-| +|Render pass name|The name of the render pass. This name is set in the `AddRasterRenderPass` or `AddComputePass` method.| +|Merge bar|If URP merged this pass with other passes, the Render Graph Viewer displays a blue bar below the merged passes.| +|Resource access overview bar|When you select a render pass name, the resource access overview bar displays information about the pass you selected and related passes. Hover your cursor over an overview block for more information. Select an overview block to open the C# file for the render pass.

Access blocks use the following colors:
  • White: The selected pass.
  • Grey: The pass isn't related to the selected pass.
  • Blue: The pass reads from or writes to a resource the selected pass uses.
  • Flashing blue: The pass reads from or writes to a resource the selected pass uses, and can be merged with other flashing blue passes.
| + +### Resources + +|**Property**|**Description**| +|-|-| +|Resource type|The type of the resource. Refer to the following screenshot.| +|Resource name|The resource name.| +|Imported resource|Displays a left-facing arrow if the resource is imported. Refer to [Import a texture into the render graph system](render-graph-import-a-texture.md) for more information.| + +![Render Graph Viewer icons](Images/render-graph-viewer-icons.png) + +The icons used as the resource type.
+A: A texture.
+B: An acceleration structure.
+C: A buffer. + +## Resource List + +Select a resource in the Resource List to expand or collapse information about the resource. + +You can also use the Search bar to find a resource by name. + +|**Property**|**Description**| +|-|-| +|Resource name|The resource name.| +|Imported resource|Displays a left-facing arrow if the resource is imported.| +|**Size**|The resource size in pixels.| +|**Format**|The texture format. Refer to [GraphicsFormat](https://docs.unity3d.com/2023.3/Documentation/ScriptReference/Experimental.Rendering.GraphicsFormat.html) for more information.| +|**Clear**|Displays **True** if URP clears the texture.| +|**BindMS**|Whether the texture is bound as a multisampled texture. Refer to [RenderTextureDescriptor.BindMS](https://docs.unity3d.com/ScriptReference/RenderTextureDescriptor-bindMS.html) for more information.| +|**Samples**|How many times Multisample Anti-aliasing (MSAA) samples the texture. Refer to [Anti-aliasing](anti-aliasing.md#multisample-anti-aliasing-msaa) for more information.| +|**Memoryless**|Displays **True** if the resource is stored in tile memory on mobile platforms that use tile-based deferred rendering. See [Render graph system introduction](render-graph-introduction.md) for more information.| + +## Pass List + +Select a render pass in the main window to display information about the render pass in the Pass List. + +You can also use the Search bar to find a render pass by name. + +|**Property**|**Description**| +|-|-| +|Pass name|The render pass name. If URP merged multiple passes, this property displays the names of all the merged passes.| +|**Native Render Pass Info**|Displays information about whether URP created a native render pass for this render pass by merging multiple render passes. Refer to [Introduction to the render graph system](render-graph-introduction.md) for more information.| +|**Pass break reasoning**|Displays the reasons why URP could not merge this render pass with the next render pass. | + +### Render Graph Pass Info + +The **Render Graph Pass Info** section displays information about the render pass, and each of the resources it uses. + +If URP merged multiple passes into this pass, the section displays information for each merged pass. + +|**Property**|**Description**| +|-|-| +|**Name**|The render pass name.| +|**Attachment dimensions**|The size of a resource the render pass uses, in pixels. Displays **0x0x0** if the render pass doesn't use a resource.| +|**Has depth attachment**|Whether the resource has a depth texture.| +|**MSAA samples**|How many times Multisample Anti-aliasing (MSAA) samples the texture. Refer to [Anti-aliasing](anti-aliasing.md#multisample-anti-aliasing-msaa) for more information. | +|**Async compute**|Whether the render pass accesses the resource using a compute shader.| + +### Attachments Load/Store Actions + +The **Attachments Load/Store Actions** section displays the resources the render pass uses. The section displays **No attachments** if the render pass doesn't use any resources. + +|**Property**|**Description**| +|-|-| +|**Name**|The resource name.| +|**Load Action**|The load action for the resource. Refer to [`RenderBufferLoadAction`](https://docs.unity3d.com/ScriptReference/Rendering.RenderBufferLoadAction.html) for more information.| +|**Store Action**|The store action for the resource, and how URP uses the resource later in another render pass or outside the graph. Refer to [`RenderBufferStoreAction`](https://docs.unity3d.com/ScriptReference/Rendering.RenderBufferStoreAction.html) for more information.| + +## Additional resources + +- [Frame Debugger](https://docs.unity3d.com/2023.3/Documentation/Manual/frame-debugger-window.html) +- [Understand performance](understand-performance.md) diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/render-graph-write-render-pass.md b/Packages/com.unity.render-pipelines.universal/Documentation~/render-graph-write-render-pass.md new file mode 100644 index 00000000000..74039436a49 --- /dev/null +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/render-graph-write-render-pass.md @@ -0,0 +1,164 @@ +# Write a render pass using the render graph system + +This page describes how to write a render pass using the render graph system. + +To illustrate the description, the page uses the example render pass that copies the camera's active color texture to a destination texture. To simplify the code, this example does not use the destination texture elsewhere in the frame. You can use the frame debugger to inspect its contents. + +## Declare a render pass + +Declare a render pass as a class that inherits from the [ScriptableRenderPass](xref:UnityEngine.Rendering.Universal.ScriptableRenderPass) class. + +## Declare resources that a render pass uses + +Inside the render pass, declare a class that contains the resources that the render pass uses. + +The resources can be regular C# variables and render graph resource references. The render graph system can access this data structure during the rendering code execution. Ensure that you declare only the variables that the render pass uses. Adding unnecessary variables can reduce performance. + +```C# +class PassData +{ + internal TextureHandle copySourceTexture; +} +``` + +The [RecordRenderGraph](xref:UnityEngine.Rendering.Universal.RenderObjectsPass.RecordRenderGraph*) method populates the data and the render graph passes it as a parameter to the rendering function. + +## Declare a rendering function that generates the rendering commands for the render pass + +Declare a rendering function that generates the rendering commands for the render pass. Further in this example, the [RecordRenderGraph](xref:UnityEngine.Rendering.Universal.RenderObjectsPass.RecordRenderGraph*) method instructs the render graph to use the function using the `SetRenderFunc` method. + +```C# +static void ExecutePass(PassData data, RasterGraphContext context) +{ + // Records a rendering command to copy, or blit, the contents of the source texture + // to the color render target of the render pass. + // The RecordRenderGraph method sets the destination texture as the render target + // with the UseTextureFragment method. + Blitter.BlitTexture(context.cmd, data.copySourceTexture, + new Vector4(1, 1, 0, 0), 0, false); +} +``` + +## Implement the RecordRenderGraph method + +Use the [RecordRenderGraph](xref:UnityEngine.Rendering.Universal.RenderObjectsPass.RecordRenderGraph*) method to add and configure one or more render passes in the render graph system. + +Unity calls this method during the render graph configuration step and lets you register relevant passes and resources for the render graph execution. Use this method to implement custom rendering. + +In the [RecordRenderGraph](xref:UnityEngine.Rendering.Universal.RenderObjectsPass.RecordRenderGraph*) method you declare render pass inputs and outputs, but do not add commands to command buffers. + +The following section describes the main elements of the [RecordRenderGraph](xref:UnityEngine.Rendering.Universal.RenderObjectsPass.RecordRenderGraph*) method and provides an example implementation. + +## The render graph builder variable add frame resources + +The `builder` variable is an instance of the `IRasterRenderGraphBuilder` interface. This variable is the entry point for configuring the information related to the render pass. + +The [UniversalResourceData](xref:UnityEngine.Rendering.Universal.UniversalResourceData) class contains all the texture resources used by URP, including the active color and depth textures of the camera. + +The [UniversalCameraData](xref:UnityEngine.Rendering.Universal.UniversalCameraData) class contains the data related to the currently active camera. + +For demonstrative purposes, this sample creates a temporary destination texture. `UniversalRenderer.CreateRenderGraphTexture` is a helper method that calls the `RenderGraph.CreateTexture` method. + +```C# +TextureHandle destination = + UniversalRenderer.CreateRenderGraphTexture(renderGraph, desc, "CopyTexture", false); +``` + +THe `builder.UseTexture` method declares that this render pass uses the source texture as a read-only input: + +```C# +builder.UseTexture(passData.copySourceTexture); +``` + +In this example, the `builder.SetRenderAttachment` method declares that this render pass uses the temporary destination texture as its color render target. This declaration is similar to the `cmd.SetRenderTarget` API which you can use in the Compatibility mode (without the render graph API). + +The `SetRenderFunc` method sets the `ExecutePass` method as the rendering function that render graph calls when executing the render pass. This sample uses a lambda expression to avoid memory allocations. + +```C# +builder.SetRenderFunc((PassData data, RasterGraphContext context) => ExecutePass(data, context)); +``` + +The complete example of the [RecordRenderGraph](xref:UnityEngine.Rendering.Universal.RenderObjectsPass.RecordRenderGraph*) method: + +```C# +// This method adds and configures one or more render passes in the render graph. +// This process includes declaring their inputs and outputs, +// but does not include adding commands to command buffers. +public override void RecordRenderGraph(RenderGraph renderGraph, ContextContainer frameData) +{ + string passName = "Copy To Debug Texture"; + + // Add a raster render pass to the render graph. The PassData type parameter determines + // the type of the passData output variable. + using (var builder = renderGraph.AddRasterRenderPass(passName, + out var passData)) + { + // UniversalResourceData contains all the texture references used by URP, + // including the active color and depth textures of the camera. + UniversalResourceData resourceData = frameData.Get(); + + // Populate passData with the data needed by the rendering function + // of the render pass. + // Use the camera's active color texture + // as the source texture for the copy operation. + passData.copySourceTexture = resourceData.activeColorTexture; + + // Create a destination texture for the copy operation based on the settings, + // such as dimensions, of the textures that the camera uses. + // Set msaaSamples to 1 to get a non-multisampled destination texture. + // Set depthBufferBits to 0 to ensure that the CreateRenderGraphTexture method + // creates a color texture and not a depth texture. + UniversalCameraData cameraData = frameData.Get(); + RenderTextureDescriptor desc = cameraData.cameraTargetDescriptor; + desc.msaaSamples = 1; + desc.depthBufferBits = 0; + + // For demonstrative purposes, this sample creates a temporary destination texture. + // UniversalRenderer.CreateRenderGraphTexture is a helper method + // that calls the RenderGraph.CreateTexture method. + // Using a RenderTextureDescriptor instance instead of a TextureDesc instance + // simplifies your code. + TextureHandle destination = + UniversalRenderer.CreateRenderGraphTexture(renderGraph, desc, + "CopyTexture", false); + + // Declare that this render pass uses the source texture as a read-only input. + builder.UseTexture(passData.copySourceTexture); + + // Declare that this render pass uses the temporary destination texture + // as its color render target. + // This is similar to cmd.SetRenderTarget prior to the RenderGraph API. + builder.SetRenderAttachment(destination, 0); + + // RenderGraph automatically determines that it can remove this render pass + // because its results, which are stored in the temporary destination texture, + // are not used by other passes. + // For demonstrative purposes, this sample turns off this behavior to make sure + // that render graph executes the render pass. + builder.AllowPassCulling(false); + + // Set the ExecutePass method as the rendering function that render graph calls + // for the render pass. + // This sample uses a lambda expression to avoid memory allocations. + builder.SetRenderFunc((PassData data, RasterGraphContext context) + => ExecutePass(data, context)); + } +} +``` + +## Inject the scriptable render pass instance into the renderer + +To inject the scriptable render pass instance into the renderer, use the `AddRenderPasses` method from a Renderer Feature implementation. URP calls the `AddRenderPasses` method every frame, once for each Camera. + +```C# +public override void AddRenderPasses(ScriptableRenderer renderer, ref RenderingData renderingData) +{ + renderer.EnqueuePass(m_CopyRenderPass); +} +``` + +## Additional resources + +* [Example of a complete Scriptable Renderer Feature](renderer-features/create-custom-renderer-feature.md) + +* [Write a Scriptable Render Pass in Compatibility Mode](renderer-features/write-a-scriptable-render-pass.md) \ No newline at end of file diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/render-graph.md b/Packages/com.unity.render-pipelines.universal/Documentation~/render-graph.md new file mode 100644 index 00000000000..fb5568f3db2 --- /dev/null +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/render-graph.md @@ -0,0 +1,19 @@ +# Render graph system + +The render graph system is a set of APIs you use to create a [Scriptable Render Pass](renderer-features/scriptable-render-passes.md). + +|Page|Description| +|-|-| +|[Introduction to the render graph system](render-graph-introduction.md)|What the render graph system is, and how it optimizes rendering.| +|[Write a render pass using the render graph system](render-graph-write-render-pass.md)|Write a Scriptable Render Pass using the render graph APIs.| +|[Use textures](working-with-textures.md)|Access and use textures in your render passes, and how to blit.| +|[Use frame data](accessing-frame-data.md) |Get the textures URP creates for the current frame and use them in your render passes.| +|[Draw objects in a render pass](render-graph-draw-objects-in-a-pass.md)|Draw objects in the render graph system using the `RendererList` API.| +|[Analyze a render graph](render-graph-view.md)|Check a render graph using the Render Graph Viewer, Rendering Debugger, or Frame Debugger.| +|[Use Compatibility Mode APIs in the render graph system](render-graph-unsafe-pass.md)|Use the render graph `UnSafePass` API to use Compatibility Mode APIs in the render graph system, such as `SetRenderTarget`.| +|[Render Graph Viewer window reference](render-graph-viewer-reference.md)|Reference for the **Render Graph Viewer** window.| + +## Additional resources + +- [Frame Debugger](https://docs.unity3d.com/2023.3/Documentation/Manual/frame-debugger-window.html) +- [Example of a complete Scriptable Renderer Feature](renderer-features/create-custom-renderer-feature.md) diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/renderer-features/create-custom-renderer-feature-compatibility-mode.md b/Packages/com.unity.render-pipelines.universal/Documentation~/renderer-features/create-custom-renderer-feature-compatibility-mode.md new file mode 100644 index 00000000000..bee21bf3e57 --- /dev/null +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/renderer-features/create-custom-renderer-feature-compatibility-mode.md @@ -0,0 +1,711 @@ +# Example of a complete Scriptable Renderer Feature in Compatibility Mode + +This section describes how to create a complete [Scriptable Renderer Feature](./scriptable-renderer-features/intro-to-scriptable-renderer-features.md) for a URP Renderer, if you enable **Compatibility Mode (Render Graph Disabled)** in [URP graphics settings](../urp-global-settings.md). + +> **Note**: Unity no longer develops or improves the rendering path that doesn't use the render graph API. Use the render graph API instead when developing new graphics features. + +This walkthrough contains the following sections: + +* [Overview of this example implementation](#example-implementation-overview) +* [Create example Scene and GameObjects](#example-scene) +* [Create a scriptable Renderer Feature and add it to the Universal Renderer](#scriptable-renderer-feature) + * [Add the Renderer Feature to the the Universal Renderer asset](#add-renderer-feature-to-asset) +* [Create the scriptable Render Pass](#scriptable-render-pass) +* [Implement the settings for the custom render pass](#implement-the-settings-for-the-custom-render-pass) +* [Enqueue the render pass in the custom renderer feature](#enqueue-the-render-pass-in-the-custom-renderer-feature) +* [Implement the volume component](#volume-component) +* [All complete code for the scripts in this example](#all-complete-code-for-the-scripts-in-this-example) + * [Custom Renderer Feature code](#code-renderer-feature) + * [Custom render pass code](#code-render-pass) + * [Volume Component code](#code-volume-component) +* [The custom shader for the blur effect](#example-shader) + +## Overview of this example implementation + +The example workflow on this page implements a custom Renderer Feature that uses [custom Render Passes](./intro-to-scriptable-render-passes.md) to add a blur effect to the camera output. + +The implementation consists of the following parts: + +* A `ScriptableRendererFeature` instance that enqueues a `ScriptableRenderPass` instance every frame. + +* A `ScriptableRenderPass` instance that performs the following steps: + + * Creates a temporary render texture using the `RenderTextureDescriptor` API. + + * Applies two passes of the [custom shader](#example-shader) to the camera output using the `RTHandle` and the `Blit` API. + +## Create example Scene and GameObjects + +To set your project up for this example workflow: + +1. Create a new Scene. + +1. Create two GameObjects: a Cube GameObject called `Cube`, and a Sphere GameObject called `Sphere`. + +2. Create two Materials with a shader that lets you specify the base color (for example, the `Universal Render Pipeline/Lit` shader). Call the Materials `Blue` and `Red`, and set the base colors of the Materials to blue and red respectively. + +3. Assign the `Red` Material to the cube and the `Blue` Material to the sphere. + +3. Position the camera so that it has the cube and the sphere in its view. + +The sample scene should look like the following image: + +![Sample scene](../Images/customizing-urp/custom-renderer-feature/sample-scene.png) + +## Create a scriptable Renderer Feature and add it to the Universal Renderer + +1. Create a new C# script and name it `BlurRendererFeature.cs`. + +2. In the script, remove the code that Unity inserted in the `BlurRendererFeature` class. + +3. Add the following `using` directive: + + ```C# + using UnityEngine.Rendering.Universal; + ``` + +3. Create the `BlurRendererFeature` class that inherits from the **ScriptableRendererFeature** class. + + ```C# + public class BlurRendererFeature : ScriptableRendererFeature + ``` + +4. In the `BlurRendererFeature` class, implement the following methods: + + * `Create`: Unity calls this method on the following events: + + * When the Renderer Feature loads the first time. + + * When you enable or disable the Renderer Feature. + + * When you change a property in the inspector of the Renderer Feature. + + * `AddRenderPasses`: Unity calls this method every frame, once for each camera. This method lets you inject `ScriptableRenderPass` instances into the scriptable Renderer. + +Now you have the custom `BlurRendererFeature` Renderer Feature with its main methods. + +Below is the complete code for this step. + +```C# +using System.Collections; +using System.Collections.Generic; +using UnityEngine; +using UnityEngine.Rendering.Universal; + +public class BlurRendererFeature : ScriptableRendererFeature +{ + public override void Create() + { + + } + + public override void AddRenderPasses(ScriptableRenderer renderer, + ref RenderingData renderingData) + { + + } +} +``` + +### Add the Renderer Feature to the the Universal Renderer asset + +Add the Renderer Feature you created to the the Universal Renderer asset. For information on how to do this, refer to the page [How to add a Renderer Feature to a Renderer](../urp-renderer-feature-how-to-add.md). + +## Create the scriptable Render Pass + +This section demonstrates how to create a scriptable Render Pass and enqueue its instance into the scriptable Renderer. + +1. Create a new C# script and name it `BlurRenderPass.cs`. + +2. In the script, remove the code that Unity inserted in the `BlurRenderPass` class. Add the following `using` directive: + + ```C# + using UnityEngine.Rendering; + using UnityEngine.Rendering.Universal; + ``` + +3. Create the `BlurRenderPass` class that inherits from the **ScriptableRenderPass** class. + + ```C# + public class BlurRenderPass : ScriptableRenderPass + ``` + +4. Add the `Execute` method to the class. Unity calls this method every frame, once for each camera. This method lets you implement the rendering logic of the scriptable Render Pass. + + ```C# + public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData) + { } + ``` + +Below is the complete code for the BlurRenderPass.cs file from this section. + +```C# +using UnityEngine.Rendering; +using UnityEngine.Rendering.Universal; + +public class BlurRenderPass : ScriptableRenderPass +{ + public override void Execute(ScriptableRenderContext context, + ref RenderingData renderingData) + { + + } +} +``` + +## Implement the settings for the custom render pass + +This section demonstrates how to implement the settings for the custom blur render pass. + +1. The Renderer Feature in this example uses the [shader](#example-shader) that performs the blur horizontally in one pass, and vertically in another pass. To let users control the blur value for each pass, add the following `BlurSettings` class to the `BlurRendererFeature.cs` script. + + ```C# + [Serializable] + public class BlurSettings + { + [Range(0,0.4f)] public float horizontalBlur; + [Range(0,0.4f)] public float verticalBlur; + } + ``` + +2. In the `BlurRendererFeature` class, declare the following fields: + + ```C# + [SerializeField] private BlurSettings settings; + [SerializeField] private Shader shader; + private Material material; + private BlurRenderPass blurRenderPass; + ``` + +3. In the `BlurRenderPass` class, add the fields for the settings, the Material, and the constructor that uses those fields. + + ```C# + private BlurSettings defaultSettings; + private Material material; + + public BlurRenderPass(Material material, BlurSettings defaultSettings) + { + this.material = material; + this.defaultSettings = defaultSettings; + } + ``` + +4. In the `BlurRenderPass` class, add the `RenderTextureDescriptor` field and initialize it in the constructor: + + ```C# + using UnityEngine; + + private RenderTextureDescriptor blurTextureDescriptor; + + public BlurRenderPass(Material material, BlurSettings defaultSettings) + { + this.material = material; + this.defaultSettings = defaultSettings; + + blurTextureDescriptor = new RenderTextureDescriptor(Screen.width, + Screen.height, RenderTextureFormat.Default, 0); + } + ``` + +5. In the `BlurRenderPass` class, declare the `RTHandle` field to store the reference to the temporary blur texture. + + ```C# + private RTHandle blurTextureHandle; + ``` + +6. In the `BlurRenderPass` class, implement the `Configure` method. Unity calls this method before executing the render pass. + + ```C# + public override void Configure(CommandBuffer cmd, + RenderTextureDescriptor cameraTextureDescriptor) + { + //Set the blur texture size to be the same as the camera target size. + blurTextureDescriptor.width = cameraTextureDescriptor.width; + blurTextureDescriptor.height = cameraTextureDescriptor.height; + + //Check if the descriptor has changed, and reallocate the RTHandle if necessary. + RenderingUtils.ReAllocateHandleIfNeeded(ref blurTextureHandle, blurTextureDescriptor); + } + ``` + +7. In the `BlurRenderPass` class, implement the `UpdateBlurSettings` method that updates the shader values. + + Use the `Blit` method to apply the two passes from the custom shader to the camera output. + + ```C# + private static readonly int horizontalBlurId = + Shader.PropertyToID("_HorizontalBlur"); + private static readonly int verticalBlurId = + Shader.PropertyToID("_VerticalBlur"); + + ... + + private void UpdateBlurSettings() + { + if (material == null) return; + + material.SetFloat(horizontalBlurId, defaultSettings.horizontalBlur); + material.SetFloat(verticalBlurId, defaultSettings.verticalBlur); + } + ``` + +8. Call the `UpdateBlurSettings` method in the `Execute` method. + + ```C# + public override void Execute(ScriptableRenderContext context, + ref RenderingData renderingData) + { + //Get a CommandBuffer from pool. + CommandBuffer cmd = CommandBufferPool.Get(); + + RTHandle cameraTargetHandle = + renderingData.cameraData.renderer.cameraColorTargetHandle; + + UpdateBlurSettings(); + + // Blit from the camera target to the temporary render texture, + // using the first shader pass. + Blit(cmd, cameraTargetHandle, blurTextureHandle, material, 0); + // Blit from the temporary render texture to the camera target, + // using the second shader pass. + Blit(cmd, blurTextureHandle, cameraTargetHandle, material, 1); + + //Execute the command buffer and release it back to the pool. + context.ExecuteCommandBuffer(cmd); + CommandBufferPool.Release(cmd); + } + ``` + +9. Implement the `Dispose` method that destroys the Material and the temporary render texture after the render pass execution. + + ```C# + public void Dispose() + { + #if UNITY_EDITOR + if (EditorApplication.isPlaying) + { + Object.Destroy(material); + } + else + { + Object.DestroyImmediate(material); + } + #else + Object.Destroy(material); + #endif + + if (blurTextureHandle != null) blurTextureHandle.Release(); + } + ``` + +The complete code for this part is in section [Custom render pass code](#code-render-pass). + +## Enqueue the render pass in the custom renderer feature + +In this section, you instantiate the render pass in the `Create` method of the `BlurRendererFeature` class, and enqueue it in the `AddRenderPasses` method. + +1. In the `Create` method of the `BlurRendererFeature` class, instantiate the `BlurRenderPass` class. + + In the method, use the `renderPassEvent` field to specify when to execute the render pass. + + ```C# + public override void Create() + { + if (shader == null) + { + return; + } + material = new Material(shader); + blurRenderPass = new BlurRenderPass(material, settings); + + renderPassEvent = RenderPassEvent.AfterRenderingSkybox; + } + ``` + +2. In the `AddRenderPasses` method of the `BlurRendererFeature` class, enqueue the render pass with the `EnqueuePass` method. + + ```C# + public override void AddRenderPasses(ScriptableRenderer renderer, ref RenderingData renderingData) + { + if (renderingData.cameraData.cameraType == CameraType.Game) + { + renderer.EnqueuePass(blurRenderPass); + } + } + ``` + +3. Implement the `Dispose` method that destroys the material instance that the Renderer Feature creates. The method also calls the `Dispose` method from the render pass class. + + ```C# + protected override void Dispose(bool disposing) + { + blurRenderPass.Dispose(); + #if UNITY_EDITOR + if (EditorApplication.isPlaying) + { + Destroy(material); + } + else + { + DestroyImmediate(material); + } + #else + Destroy(material); + #endif + } + ``` + +For the complete Renderer Feature code, refer to section [Custom Renderer Feature code](#code-renderer-feature). + +The Scriptable Renderer Feature is now complete. The following image shows the effect of the feature in the Game view and the example settings. + +![The effect of the Scriptable Renderer Feature in the Game view](../Images/customizing-urp/custom-renderer-feature/final-effect.png)
*The effect of the Scriptable Renderer Feature in the Game view.* + +## Implement the volume component + +This section shows how to implement a volume component that lets you control the input values for the custom renderer feature. + +1. Create a new C# script and name it `CustomVolumeComponent.cs`. + +1. Inherit the `CustomVolumeComponent` class from the `VolumeComponent` class, add the `[Serializable]` attribute to the class. Add the `using UnityEngine.Rendering;` directive. + + ```C# + using System; + using UnityEngine.Rendering; + + [Serializable] + public class CustomVolumeComponent : VolumeComponent + { + + } + ``` + +2. Add the `BoolParameter` field to the `CustomVolumeComponent` class. This field lets you enable or disable the custom renderer feature. + + ```C# + public class BlurVolumeComponent : VolumeComponent + { + public BoolParameter isActive = new BoolParameter(true); + } + ``` + +3. Add the fields to control the blur settings defined in the custom renderer feature. + + ```C# + [Serializable] + public class CustomVolumeComponent : VolumeComponent + { + public BoolParameter isActive = new BoolParameter(true); + public ClampedFloatParameter horizontalBlur = + new ClampedFloatParameter(0.05f, 0, 0.5f); + public ClampedFloatParameter verticalBlur = + new ClampedFloatParameter(0.05f, 0, 0.5f); + } + ``` + +4. In the `BlurRenderPass` script, change the `UpdateBlurSettings` method so that it uses the settings defined in a Volume or the default settings if no Volume is set. + + ```C# + private void UpdateBlurSettings() + { + if (material == null) return; + + // Use the Volume settings or the default settings if no Volume is set. + var volumeComponent = + VolumeManager.instance.stack.GetComponent(); + float horizontalBlur = volumeComponent.horizontalBlur.overrideState ? + volumeComponent.horizontalBlur.value : defaultSettings.horizontalBlur; + float verticalBlur = volumeComponent.verticalBlur.overrideState ? + volumeComponent.verticalBlur.value : defaultSettings.verticalBlur; + material.SetFloat(horizontalBlurId, horizontalBlur); + material.SetFloat(verticalBlurId, verticalBlur); + } + ``` + +5. In the Unity scene, create a [local Box Volume](../Volumes.md). If a [Volume Profile](../Volume-Profile.md) is missing, create a new one by clicking **New** next to the **Profile** property. Add the `Custom Volume Component` [override](../VolumeOverrides.md) to the Volume. + + ![Box Volume properties](../Images/customizing-urp/custom-renderer-feature/local-volume.png) + +6. Enable the settings in the `Custom Volume Component` override and set the values for this Volume. Move the Volume so that the camera is inside it. The settings from the Volume override the default settings from the custom renderer feature. + +## All complete code for the scripts in this example + +This section contains the complete code for all the scripts in this example. + +### Custom Renderer Feature code + +Below is the complete code for the custom Renderer Feature script. + +```C# +using System; +using UnityEditor; +using UnityEngine; +using UnityEngine.Rendering.Universal; + +public class BlurRendererFeature : ScriptableRendererFeature +{ + [SerializeField] private BlurSettings settings; + [SerializeField] private Shader shader; + private Material material; + private BlurRenderPass blurRenderPass; + + public override void Create() + { + if (shader == null) + { + return; + } + material = new Material(shader); + blurRenderPass = new BlurRenderPass(material, settings); + + blurRenderPass.renderPassEvent = RenderPassEvent.AfterRenderingSkybox; + } + + public override void AddRenderPasses(ScriptableRenderer renderer, + ref RenderingData renderingData) + { + if (renderingData.cameraData.cameraType == CameraType.Game) + { + renderer.EnqueuePass(blurRenderPass); + } + } + + protected override void Dispose(bool disposing) + { + blurRenderPass.Dispose(); + #if UNITY_EDITOR + if (EditorApplication.isPlaying) + { + Destroy(material); + } + else + { + DestroyImmediate(material); + } + #else + Destroy(material); + #endif + } +} + +[Serializable] +public class BlurSettings +{ + [Range(0, 0.4f)] public float horizontalBlur; + [Range(0, 0.4f)] public float verticalBlur; +} +``` + +### Custom render pass code + +Below is the complete code for the custom Render Pass script. + +```C# +using UnityEditor; +using UnityEngine; +using UnityEngine.Rendering; +using UnityEngine.Rendering.Universal; + +public class BlurRenderPass : ScriptableRenderPass +{ + private static readonly int horizontalBlurId = + Shader.PropertyToID("_HorizontalBlur"); + private static readonly int verticalBlurId = + Shader.PropertyToID("_VerticalBlur"); + + private BlurSettings defaultSettings; + private Material material; + + private RenderTextureDescriptor blurTextureDescriptor; + private RTHandle blurTextureHandle; + + public BlurRenderPass(Material material, BlurSettings defaultSettings) + { + this.material = material; + this.defaultSettings = defaultSettings; + + blurTextureDescriptor = new RenderTextureDescriptor(Screen.width, + Screen.height, RenderTextureFormat.Default, 0); + } + + public override void Configure(CommandBuffer cmd, + RenderTextureDescriptor cameraTextureDescriptor) + { + // Set the blur texture size to be the same as the camera target size. + blurTextureDescriptor.width = cameraTextureDescriptor.width; + blurTextureDescriptor.height = cameraTextureDescriptor.height; + + // Check if the descriptor has changed, and reallocate the RTHandle if necessary + RenderingUtils.ReAllocateHandleIfNeeded(ref blurTextureHandle, blurTextureDescriptor); + } + + private void UpdateBlurSettings() + { + if (material == null) return; + + // Use the Volume settings or the default settings if no Volume is set. + var volumeComponent = + VolumeManager.instance.stack.GetComponent(); + float horizontalBlur = volumeComponent.horizontalBlur.overrideState ? + volumeComponent.horizontalBlur.value : defaultSettings.horizontalBlur; + float verticalBlur = volumeComponent.verticalBlur.overrideState ? + volumeComponent.verticalBlur.value : defaultSettings.verticalBlur; + material.SetFloat(horizontalBlurId, horizontalBlur); + material.SetFloat(verticalBlurId, verticalBlur); + } + + public override void Execute(ScriptableRenderContext context, + ref RenderingData renderingData) + { + //Get a CommandBuffer from pool. + CommandBuffer cmd = CommandBufferPool.Get(); + + RTHandle cameraTargetHandle = + renderingData.cameraData.renderer.cameraColorTargetHandle; + + UpdateBlurSettings(); + + // Blit from the camera target to the temporary render texture, + // using the first shader pass. + Blit(cmd, cameraTargetHandle, blurTextureHandle, material, 0); + // Blit from the temporary render texture to the camera target, + // using the second shader pass. + Blit(cmd, blurTextureHandle, cameraTargetHandle, material, 1); + + //Execute the command buffer and release it back to the pool. + context.ExecuteCommandBuffer(cmd); + CommandBufferPool.Release(cmd); + } + + public void Dispose() + { + #if UNITY_EDITOR + if (EditorApplication.isPlaying) + { + Object.Destroy(material); + } + else + { + Object.DestroyImmediate(material); + } + #else + Object.Destroy(material); + #endif + + if (blurTextureHandle != null) blurTextureHandle.Release(); + } +} +``` + +### Volume Component code + +Below is the complete code for the Volume Component script. + +```C# +using System; +using UnityEngine.Rendering; + +[Serializable] +public class CustomVolumeComponent : VolumeComponent +{ + public BoolParameter isActive = new BoolParameter(true); + public ClampedFloatParameter horizontalBlur = + new ClampedFloatParameter(0.05f, 0, 0.5f); + public ClampedFloatParameter verticalBlur = + new ClampedFloatParameter(0.05f, 0, 0.5f); +} +``` + +## The custom shader for the blur effect + +This section contains the code for the custom shader that implements the blur effect. + +```c++ +Shader "CustomEffects/Blur" +{ + HLSLINCLUDE + + #include "Packages/com.unity.render-pipelines.universal/ShaderLibrary/Core.hlsl" + // The Blit.hlsl file provides the vertex shader (Vert), + // the input structure (Attributes), and the output structure (Varyings) + #include "Packages/com.unity.render-pipelines.core/Runtime/Utilities/Blit.hlsl" + + float _VerticalBlur; + float _HorizontalBlur; + + float4 _BlitTexture_TexelSize; + + float4 BlurVertical (Varyings input) : SV_Target + { + const float BLUR_SAMPLES = 64; + const float BLUR_SAMPLES_RANGE = BLUR_SAMPLES / 2; + + float3 color = 0; + float blurPixels = _VerticalBlur * _ScreenParams.y; + + for(float i = -BLUR_SAMPLES_RANGE; i <= BLUR_SAMPLES_RANGE; i++) + { + float2 sampleOffset = + float2 (0, (blurPixels / _BlitTexture_TexelSize.w) * + (i / BLUR_SAMPLES_RANGE)); + color += + SAMPLE_TEXTURE2D(_BlitTexture, sampler_LinearClamp, + input.texcoord + sampleOffset).rgb; + } + + return float4(color.rgb / (BLUR_SAMPLES + 1), 1); + } + + float4 BlurHorizontal (Varyings input) : SV_Target + { + const float BLUR_SAMPLES = 64; + const float BLUR_SAMPLES_RANGE = BLUR_SAMPLES / 2; + + UNITY_SETUP_STEREO_EYE_INDEX_POST_VERTEX(input); + float3 color = 0; + float blurPixels = _HorizontalBlur * _ScreenParams.x; + for(float i = -BLUR_SAMPLES_RANGE; i <= BLUR_SAMPLES_RANGE; i++) + { + float2 sampleOffset = + float2 ((blurPixels / _BlitTexture_TexelSize.z) * + (i / BLUR_SAMPLES_RANGE), 0); + color += + SAMPLE_TEXTURE2D(_BlitTexture, sampler_LinearClamp, + input.texcoord + sampleOffset).rgb; + } + return float4(color / (BLUR_SAMPLES + 1), 1); + } + + ENDHLSL + + SubShader + { + Tags { "RenderType"="Opaque" "RenderPipeline" = "UniversalPipeline"} + LOD 100 + ZWrite Off Cull Off + Pass + { + Name "BlurPassVertical" + + HLSLPROGRAM + + #pragma vertex Vert + #pragma fragment BlurVertical + + ENDHLSL + } + + Pass + { + Name "BlurPassHorizontal" + + HLSLPROGRAM + + #pragma vertex Vert + #pragma fragment BlurHorizontal + + ENDHLSL + } + } +} +``` \ No newline at end of file diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/renderer-features/create-custom-renderer-feature.md b/Packages/com.unity.render-pipelines.universal/Documentation~/renderer-features/create-custom-renderer-feature.md index 853db32c7f2..efe58f6f98a 100644 --- a/Packages/com.unity.render-pipelines.universal/Documentation~/renderer-features/create-custom-renderer-feature.md +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/renderer-features/create-custom-renderer-feature.md @@ -10,6 +10,7 @@ This walkthrough contains the following sections: * [Add the Renderer Feature to the the Universal Renderer asset](#add-renderer-feature-to-asset) * [Create the scriptable Render Pass](#scriptable-render-pass) * [Implement the settings for the custom render pass](#implement-the-settings-for-the-custom-render-pass) +* [Implement the render passes](#implement-the-render-passes) * [Enqueue the render pass in the custom renderer feature](#enqueue-the-render-pass-in-the-custom-renderer-feature) * [Implement the volume component](#volume-component) * [All complete code for the scripts in this example](#all-complete-code-for-the-scripts-in-this-example) @@ -30,7 +31,7 @@ The implementation consists of the following parts: * Creates a temporary render texture using the `RenderTextureDescriptor` API. - * Applies two passes of the [custom shader](#example-shader) to the camera output using the `RTHandle` and the `Blit` API. + * Applies two passes of the [custom shader](#example-shader) to the camera output using the `TextureHandle` and the `Blitter` API. ## Create example Scene and GameObjects @@ -46,6 +47,8 @@ To set your project up for this example workflow: 3. Position the camera so that it has the cube and the sphere in its view. +4. In the URP Asset, set the property **Quality** > **Anti Aliasing (MSAA)** to **Disabled**. The purpose of this step is to simplify the example implementation. + The sample scene should look like the following image: ![Sample scene](../Images/customizing-urp/custom-renderer-feature/sample-scene.png) @@ -119,6 +122,7 @@ This section demonstrates how to create a scriptable Render Pass and enqueue its ```C# using UnityEngine.Rendering; + using UnityEngine.Rendering.RenderGraphModule; using UnityEngine.Rendering.Universal; ``` @@ -128,23 +132,24 @@ This section demonstrates how to create a scriptable Render Pass and enqueue its public class BlurRenderPass : ScriptableRenderPass ``` -4. Add the `Execute` method to the class. Unity calls this method every frame, once for each camera. This method lets you implement the rendering logic of the scriptable Render Pass. +4. Add the `RecordRenderGraph` method to the class. This method adds and configures render passes in the render graph. This process includes declaring render pass inputs and outputs, but does not include adding commands to command buffers. Unity calls this method every frame, once for each camera. ```C# - public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData) + public override void RecordRenderGraph(RenderGraph renderGraph, ContextContainer frameData) { } ``` -Below is the complete code for the BlurRenderPass.cs file from this section. +Below is the complete code for the `BlurRenderPass.cs` file from this section. ```C# using UnityEngine.Rendering; +using UnityEngine.Rendering.RenderGraphModule; using UnityEngine.Rendering.Universal; public class BlurRenderPass : ScriptableRenderPass { - public override void Execute(ScriptableRenderContext context, - ref RenderingData renderingData) + public override void RecordRenderGraph(RenderGraph renderGraph, + ContextContainer frameData) { } @@ -184,11 +189,11 @@ This section demonstrates how to implement the settings for the custom blur rend public BlurRenderPass(Material material, BlurSettings defaultSettings) { this.material = material; - this.defaultSettings = defaultSettings; + this.defaultSettings = defaultSettings; } ``` -4. In the `BlurRenderPass` class, add the `RenderTextureDescriptor` field and initialize it in the constructor: +4. In the `BlurRenderPass` class, add the `RenderTextureDescriptor` field and initialize it in the constructor. The `RenderTextureDescriptor` class lets you specify the properties of a render texture, such as the width, height, and format. ```C# using UnityEngine; @@ -200,99 +205,149 @@ This section demonstrates how to implement the settings for the custom blur rend this.material = material; this.defaultSettings = defaultSettings; - blurTextureDescriptor = new RenderTextureDescriptor(Screen.width, - Screen.height, RenderTextureFormat.Default, 0); + blurTextureDescriptor = new RenderTextureDescriptor(Screen.width, Screen.height, + RenderTextureFormat.Default, 0); } ``` -5. In the `BlurRenderPass` class, declare the `RTHandle` field to store the reference to the temporary blur texture. +5. In the `BlurRenderPass` class, declare the `PassData` class for storing the render pass input data. The `RecordRenderGraph` method populates the data and the render graph passes it as a parameter to the rendering function. The `TextureHandle` field stores the reference to the temporary input texture. ```C# - private RTHandle blurTextureHandle; + private class PassData + { + internal TextureHandle src; + internal Material material; + } ``` -6. In the `BlurRenderPass` class, implement the `Configure` method. Unity calls this method before executing the render pass. +5. In the `RecordRenderGraph` method, create the variable for storing the `UniversalResourceData` instance from the `frameData` parameter. `UniversalResourceData` contains all the texture references used by URP, including the active color and depth textures of the camera. ```C# - public override void Configure(CommandBuffer cmd, - RenderTextureDescriptor cameraTextureDescriptor) - { - //Set the blur texture size to be the same as the camera target size. - blurTextureDescriptor.width = cameraTextureDescriptor.width; - blurTextureDescriptor.height = cameraTextureDescriptor.height; - - //Check if the descriptor has changed, and reallocate the RTHandle if necessary. - RenderingUtils.ReAllocateHandleIfNeeded(ref blurTextureHandle, blurTextureDescriptor); - } + UniversalResourceData resourceData = frameData.Get(); ``` -7. In the `BlurRenderPass` class, implement the `UpdateBlurSettings` method that updates the shader values. +6. Declare the variables for interacting with the shader properties. - Use the `Blit` method to apply the two passes from the custom shader to the camera output. + ```C# + private static readonly int horizontalBlurId = Shader.PropertyToID("_HorizontalBlur"); + private static readonly int verticalBlurId = Shader.PropertyToID("_VerticalBlur"); + private const string k_BlurTextureName = "_BlurTexture"; + private const string k_VerticalPassName = "VerticalBlurRenderPass"; + private const string k_HorizontalPassName = "HorizontalBlurRenderPass"; + ``` + +6. In the `RecordRenderGraph` method, declare the `TextureHandle` fields to store the references to the input and the output textures. `CreateRenderGraphTexture` is a helper method that calls the `RenderGraph.CreateTexture` method. ```C# - private static readonly int horizontalBlurId = - Shader.PropertyToID("_HorizontalBlur"); - private static readonly int verticalBlurId = - Shader.PropertyToID("_VerticalBlur"); + TextureHandle srcCamColor = resourceData.activeColorTexture; + TextureHandle dst = UniversalRenderer.CreateRenderGraphTexture(renderGraph, blurTextureDescriptor, k_BlurTextureName, false); + ``` - ... +7. In the `BlurRenderPass` class, implement the `UpdateBlurSettings` method that updates the shader values. + ```C# private void UpdateBlurSettings() { if (material == null) return; - - material.SetFloat(horizontalBlurId, defaultSettings.horizontalBlur); - material.SetFloat(verticalBlurId, defaultSettings.verticalBlur); + + // Use the Volume settings or the default settings if no Volume is set. + var volumeComponent = + VolumeManager.instance.stack.GetComponent(); + float horizontalBlur = volumeComponent.horizontalBlur.overrideState ? + volumeComponent.horizontalBlur.value : defaultSettings.horizontalBlur; + float verticalBlur = volumeComponent.verticalBlur.overrideState ? + volumeComponent.verticalBlur.value : defaultSettings.verticalBlur; + material.SetFloat(horizontalBlurId, horizontalBlur); + material.SetFloat(verticalBlurId, verticalBlur); } ``` -8. Call the `UpdateBlurSettings` method in the `Execute` method. +8. In the `RecordRenderGraph` method, add the variable for storing the `UniversalCameraData` data, and set the `RenderTextureDescriptor` values using that data. ```C# - public override void Execute(ScriptableRenderContext context, - ref RenderingData renderingData) - { - //Get a CommandBuffer from pool. - CommandBuffer cmd = CommandBufferPool.Get(); + UniversalCameraData cameraData = frameData.Get(); - RTHandle cameraTargetHandle = - renderingData.cameraData.renderer.cameraColorTargetHandle; + // The following line ensures that the render pass doesn't blit + // from the back buffer. + if (resourceData.isActiveTargetBackBuffer) + return; - UpdateBlurSettings(); + // Set the blur texture size to be the same as the camera target size. + blurTextureDescriptor.width = cameraData.cameraTargetDescriptor.width; + blurTextureDescriptor.height = cameraData.cameraTargetDescriptor.height; + blurTextureDescriptor.depthBufferBits = 0; + ``` + +8. In the `RecordRenderGraph` method, add the function to continuously update the blur settings in the material. + + ```C# + // Update the blur settings in the material + UpdateBlurSettings(); + + // This check is to avoid an error from the material preview in the scene + if (!srcCamColor.IsValid() || !dst.IsValid()) + return; + ``` + +## Implement the render passes + +1. In the `RecordRenderGraph` method, using the `builder` variable, add the render pass for the vertical blur. The `SetRenderFunc` method sets the rendering function for the render pass. In this example, the function blits the camera color to the render graph texture, using the first shader pass. + + ```C# + // Vertical blur pass + using (var builder = renderGraph.AddRasterRenderPass(k_VerticalPassName, + out var passData)) + { + // Configure pass data + passData.src = srcCamColor; + passData.material = material; - // Blit from the camera target to the temporary render texture, + // Configure render graph input and output + builder.UseTexture(passData.src); + builder.SetRenderAttachment(dst, 0); + + // Blit from the camera color to the render graph texture, // using the first shader pass. - Blit(cmd, cameraTargetHandle, blurTextureHandle, material, 0); - // Blit from the temporary render texture to the camera target, - // using the second shader pass. - Blit(cmd, blurTextureHandle, cameraTargetHandle, material, 1); - - //Execute the command buffer and release it back to the pool. - context.ExecuteCommandBuffer(cmd); - CommandBufferPool.Release(cmd); + builder.SetRenderFunc((PassData data, RasterGraphContext context) => + { + Blitter.BlitTexture(context.cmd, data.src, m_ScaleBias, data.material, 0); + }); } ``` -9. Implement the `Dispose` method that destroys the Material and the temporary render texture after the render pass execution. + The `BlitTexture` method uses the `m_ScaleBias` argument. add it in the `BlurRenderPass` class. ```C# - public void Dispose() + private Vector4 m_ScaleBias = new Vector4(1f, 1f, 0f, 0f); + ``` + +2. In the `RecordRenderGraph` method, using the `builder` variable, add the render pass for the horizontal blur. This pass uses the output of the previous pass as its input, it does that using the `FrameBufferFetch` method. In this example, using this method lets URP merge two blur passes into a single render pass. Refer to the complete shader code for the implementation details. + + ```C# + // Horizontal blur pass + using (var builder = renderGraph.AddRasterRenderPass(k_HorizontalPassName, + out var passData)) { - #if UNITY_EDITOR - if (EditorApplication.isPlaying) - { - Object.Destroy(material); - } - else - { - Object.DestroyImmediate(material); - } - #else - Object.Destroy(material); - #endif - - if (blurTextureHandle != null) blurTextureHandle.Release(); + // Reset unused passData fields + passData.src = TextureHandle.nullHandle; + + // Use the same material as the previous pass + passData.material = material; + + // Use the output of the previous pass as the input, + // and bind it as FrameBufferFetch input + builder.SetInputAttachment(dst, 0); + + // Use the input texture of the previous pass as the output + builder.SetRenderAttachment(srcCamColor, 0); + + // Blit from the render graph texture to the camera color, + // using the second shader pass, + // which reads the input texture using the FrameBufferFetch method. + builder.SetRenderFunc((PassData data, RasterGraphContext context) => + { + Blitter.BlitTexture(context.cmd, m_ScaleBias, data.material, 1); + }); } ``` @@ -316,7 +371,7 @@ In this section, you instantiate the render pass in the `Create` method of the ` material = new Material(shader); blurRenderPass = new BlurRenderPass(material, settings); - renderPassEvent = RenderPassEvent.AfterRenderingSkybox; + blurRenderPass.renderPassEvent = RenderPassEvent.AfterRenderingSkybox; } ``` @@ -332,12 +387,11 @@ In this section, you instantiate the render pass in the `Create` method of the ` } ``` -3. Implement the `Dispose` method that destroys the material instance that the Renderer Feature creates. The method also calls the `Dispose` method from the render pass class. +3. Implement the `Dispose` method that destroys the material instance that the Renderer Feature creates. ```C# protected override void Dispose(bool disposing) { - blurRenderPass.Dispose(); #if UNITY_EDITOR if (EditorApplication.isPlaying) { @@ -470,7 +524,6 @@ public class BlurRendererFeature : ScriptableRendererFeature protected override void Dispose(bool disposing) { - blurRenderPass.Dispose(); #if UNITY_EDITOR if (EditorApplication.isPlaying) { @@ -502,39 +555,31 @@ Below is the complete code for the custom Render Pass script. using UnityEditor; using UnityEngine; using UnityEngine.Rendering; +using UnityEngine.Rendering.RenderGraphModule; using UnityEngine.Rendering.Universal; public class BlurRenderPass : ScriptableRenderPass { - private static readonly int horizontalBlurId = - Shader.PropertyToID("_HorizontalBlur"); - private static readonly int verticalBlurId = - Shader.PropertyToID("_VerticalBlur"); + private static readonly int horizontalBlurId = Shader.PropertyToID("_HorizontalBlur"); + private static readonly int verticalBlurId = Shader.PropertyToID("_VerticalBlur"); + private const string k_BlurTextureName = "_BlurTexture"; + private const string k_VerticalPassName = "VerticalBlurRenderPass"; + private const string k_HorizontalPassName = "HorizontalBlurRenderPass"; + + private Vector4 m_ScaleBias = new Vector4(1f, 1f, 0f, 0f); private BlurSettings defaultSettings; private Material material; private RenderTextureDescriptor blurTextureDescriptor; - private RTHandle blurTextureHandle; public BlurRenderPass(Material material, BlurSettings defaultSettings) { this.material = material; this.defaultSettings = defaultSettings; - blurTextureDescriptor = new RenderTextureDescriptor(Screen.width, - Screen.height, RenderTextureFormat.Default, 0); - } - - public override void Configure(CommandBuffer cmd, - RenderTextureDescriptor cameraTextureDescriptor) - { - // Set the blur texture size to be the same as the camera target size. - blurTextureDescriptor.width = cameraTextureDescriptor.width; - blurTextureDescriptor.height = cameraTextureDescriptor.height; - - // Check if the descriptor has changed, and reallocate the RTHandle if necessary - RenderingUtils.ReAllocateHandleIfNeeded(ref blurTextureHandle, blurTextureDescriptor); + blurTextureDescriptor = new RenderTextureDescriptor(Screen.width, Screen.height, + RenderTextureFormat.Default, 0); } private void UpdateBlurSettings() @@ -552,45 +597,85 @@ public class BlurRenderPass : ScriptableRenderPass material.SetFloat(verticalBlurId, verticalBlur); } - public override void Execute(ScriptableRenderContext context, - ref RenderingData renderingData) + private class PassData + { + internal TextureHandle src; + internal Material material; + } + + public override void RecordRenderGraph(RenderGraph renderGraph, + ContextContainer frameData) { - //Get a CommandBuffer from pool. - CommandBuffer cmd = CommandBufferPool.Get(); + UniversalResourceData resourceData = frameData.Get(); - RTHandle cameraTargetHandle = - renderingData.cameraData.renderer.cameraColorTargetHandle; + UniversalCameraData cameraData = frameData.Get(); + // The following line ensures that the render pass doesn't blit + // from the back buffer. + if (resourceData.isActiveTargetBackBuffer) + return; + + // Set the blur texture size to be the same as the camera target size. + blurTextureDescriptor.width = cameraData.cameraTargetDescriptor.width; + blurTextureDescriptor.height = cameraData.cameraTargetDescriptor.height; + blurTextureDescriptor.depthBufferBits = 0; + + TextureHandle srcCamColor = resourceData.activeColorTexture; + TextureHandle dst = UniversalRenderer.CreateRenderGraphTexture(renderGraph, + blurTextureDescriptor, k_BlurTextureName, false); + + // Update the blur settings in the material UpdateBlurSettings(); - // Blit from the camera target to the temporary render texture, - // using the first shader pass. - Blit(cmd, cameraTargetHandle, blurTextureHandle, material, 0); - // Blit from the temporary render texture to the camera target, - // using the second shader pass. - Blit(cmd, blurTextureHandle, cameraTargetHandle, material, 1); - - //Execute the command buffer and release it back to the pool. - context.ExecuteCommandBuffer(cmd); - CommandBufferPool.Release(cmd); - } + // This check is to avoid an error from the material preview in the scene + if (!srcCamColor.IsValid() || !dst.IsValid()) + return; - public void Dispose() - { - #if UNITY_EDITOR - if (EditorApplication.isPlaying) + // Vertical blur pass + using (var builder = renderGraph.AddRasterRenderPass(k_VerticalPassName, + out var passData)) { - Object.Destroy(material); + // Configure pass data + passData.src = srcCamColor; + passData.material = material; + + // Configure render graph input and output + builder.UseTexture(passData.src); + builder.SetRenderAttachment(dst, 0); + + // Blit from the camera color to the render graph texture, + // using the first shader pass. + builder.SetRenderFunc((PassData data, RasterGraphContext context) => + { + Blitter.BlitTexture(context.cmd, data.src, m_ScaleBias, data.material, 0); + }); } - else + + // Horizontal blur pass + using (var builder = renderGraph.AddRasterRenderPass(k_HorizontalPassName, + out var passData)) { - Object.DestroyImmediate(material); - } - #else - Object.Destroy(material); - #endif + // Reset unused passData fields + passData.src = TextureHandle.nullHandle; + + // Use the same material as the previous pass + passData.material = material; + + // Use the output of the previous pass as the input, + // and bind it as FrameBufferFetch input + builder.SetInputAttachment(dst, 0); - if (blurTextureHandle != null) blurTextureHandle.Release(); + // Use the input texture of the previous pass as the output + builder.SetRenderAttachment(srcCamColor, 0); + + // Blit from the render graph texture to the camera color, + // using the second shader pass, + // which reads the input texture using the FrameBufferFetch method. + builder.SetRenderFunc((PassData data, RasterGraphContext context) => + { + Blitter.BlitTexture(context.cmd, m_ScaleBias, data.material, 1); + }); + } } } ``` @@ -631,8 +716,6 @@ Shader "CustomEffects/Blur" float _VerticalBlur; float _HorizontalBlur; - float4 _BlitTexture_TexelSize; - float4 BlurVertical (Varyings input) : SV_Target { const float BLUR_SAMPLES = 64; @@ -643,36 +726,12 @@ Shader "CustomEffects/Blur" for(float i = -BLUR_SAMPLES_RANGE; i <= BLUR_SAMPLES_RANGE; i++) { - float2 sampleOffset = - float2 (0, (blurPixels / _BlitTexture_TexelSize.w) * - (i / BLUR_SAMPLES_RANGE)); - color += - SAMPLE_TEXTURE2D(_BlitTexture, sampler_LinearClamp, - input.texcoord + sampleOffset).rgb; + float2 sampleOffset = float2 (0, (blurPixels / _BlitTexture_TexelSize.w) * (i / BLUR_SAMPLES_RANGE)); + color += SAMPLE_TEXTURE2D(_BlitTexture, sampler_LinearClamp, input.texcoord + sampleOffset).rgb; } return float4(color.rgb / (BLUR_SAMPLES + 1), 1); } - - float4 BlurHorizontal (Varyings input) : SV_Target - { - const float BLUR_SAMPLES = 64; - const float BLUR_SAMPLES_RANGE = BLUR_SAMPLES / 2; - - UNITY_SETUP_STEREO_EYE_INDEX_POST_VERTEX(input); - float3 color = 0; - float blurPixels = _HorizontalBlur * _ScreenParams.x; - for(float i = -BLUR_SAMPLES_RANGE; i <= BLUR_SAMPLES_RANGE; i++) - { - float2 sampleOffset = - float2 ((blurPixels / _BlitTexture_TexelSize.z) * - (i / BLUR_SAMPLES_RANGE), 0); - color += - SAMPLE_TEXTURE2D(_BlitTexture, sampler_LinearClamp, - input.texcoord + sampleOffset).rgb; - } - return float4(color / (BLUR_SAMPLES + 1), 1); - } ENDHLSL @@ -695,12 +754,30 @@ Shader "CustomEffects/Blur" Pass { - Name "BlurPassHorizontal" + Name "BlurPassHorizontal_FrameBufferFetch" HLSLPROGRAM #pragma vertex Vert - #pragma fragment BlurHorizontal + #pragma fragment Frag + + FRAMEBUFFER_INPUT_X_HALF(0); + + float4 Frag(Varyings input) : SV_Target + { + const float BLUR_SAMPLES = 64; + const float BLUR_SAMPLES_RANGE = BLUR_SAMPLES / 2; + + UNITY_SETUP_STEREO_EYE_INDEX_POST_VERTEX(input); + float3 color = 0; + float blurPixels = _HorizontalBlur * _ScreenParams.x; + for(float i = -BLUR_SAMPLES_RANGE; i <= BLUR_SAMPLES_RANGE; i++) + { + float2 sampleOffset = float2 ((blurPixels / 1) * (i / BLUR_SAMPLES_RANGE), 0); + color += LOAD_FRAMEBUFFER_X_INPUT(0, input.positionCS.xy + sampleOffset).rgb; + } + return float4(color / (BLUR_SAMPLES + 1), 1); + } ENDHLSL } diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/renderer-features/custom-rendering-pass-workflow-in-urp.md b/Packages/com.unity.render-pipelines.universal/Documentation~/renderer-features/custom-rendering-pass-workflow-in-urp.md index 8544648fd69..fd3a422e240 100644 --- a/Packages/com.unity.render-pipelines.universal/Documentation~/renderer-features/custom-rendering-pass-workflow-in-urp.md +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/renderer-features/custom-rendering-pass-workflow-in-urp.md @@ -1,32 +1,37 @@ # Custom render pass workflow in URP -A custom render pass is a way to change how the Universal Render Pipeline (URP) renders a scene or the objects within a scene. A custom render pass contains your own rendering code, which you add to the rendering pipeline at an injection point. +A custom render pass is a way to change how the Universal Render Pipeline (URP) renders a scene or the objects within a scene. A custom render pass contains your own rendering code, which you insert into the rendering pipeline at an injection point. To add a custom render pass, complete the following tasks: - [Create the code](#create-code) for a custom render pass using the Scriptable Render Pass API. -- [Inject the custom render pass](#inject-pass) using the `RenderPipelineManager` API, or by [creating a Scriptable Renderer Feature](#create-srf) that you add to the URP Renderer. +- Add the custom render pass to URP's frame rendering loop by [creating a Scriptable Renderer Feature](#create-srf), or [using the `RenderPipelineManager` API](#inject-pass). ## Create the code for a custom render pass -Use the `ScriptableRenderPass` to create the code for a custom render pass. +To create the code for a custom render pass, write a class that inherits `ScriptableRenderPass`. In the class, use the [render graph API](../render-graph-introduction.md) to tell Unity what textures and render targets to use, and what operations to do on them. -Refer to [Write a Scriptable Render Pass](write-a-scriptable-render-pass.md) for more information. +Refer to [Scriptable Render Passes](scriptable-render-passes.md) for more information. -## Inject the custom render pass using the RenderPipelineManager API +## Create a Scriptable Renderer Feature -Unity raises a [beginCameraRendering](https://docs.unity3d.com/ScriptReference/Rendering.RenderPipelineManager-beginCameraRendering.html) event before it renders each active Camera in every frame. You can subscribe a method to this event, to execute your custom render pass before Unity renders the Camera. +To add your custom render pass to URP's frame rendering loop, write a class that inherits `ScriptableRendererFeature`. -Refer to [Inject a render pass via scripting](../customize/inject-render-pass-via-script.md) for more information. +The Scriptable Renderer Feature does the following: -## Create a Scriptable Renderer Feature +1. Creates an instance of the custom render pass you created. +2. Inserts the custom render pass into the rendering pipeline. -Scriptable Renderer Features control when and how the Scriptable Render Passes apply to a particular renderer or camera, and can also manage multiple Scriptable Render Passes at once. +Refer to [Inject a pass using a Scriptable Renderer Feature](scriptable-renderer-features/inject-a-pass-using-a-scriptable-renderer-feature.md) for more information. -To create a Scriptable Renderer Feature, you do the following: +## Use the RenderPipelineManager API -* Create a Scriptable Renderer Feature using the API. -* Add the Scriptable Renderer Feature to the Universal Renderer asset, so it's included in the rendering pipeline. -* Enqueue your custom render pass in the Scriptable Renderer Feature. +To add your custom render pass to URP's frame rendering loop, you can also subscribe a method to one of the events in the [RenderPipelineManager](https://docs.unity3d.com/ScriptReference/Rendering.RenderPipelineManager.html) class. + +Refer to [Inject a render pass via scripting](../customize/inject-render-pass-via-script.md) for more information. + +## Additional resources + +- [Render graph system](../render-graph-introduction.md) +- [Example of a complete Scriptable Renderer Feature](../renderer-features/create-custom-renderer-feature.md) -Refer to [Inject a pass using a Scriptable Renderer Feature](scriptable-renderer-features/inject-a-pass-using-a-scriptable-renderer-feature.md) for more information. diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/renderer-features/custom-rendering-passes.md b/Packages/com.unity.render-pipelines.universal/Documentation~/renderer-features/custom-rendering-passes.md deleted file mode 100644 index 4834aaa9d63..00000000000 --- a/Packages/com.unity.render-pipelines.universal/Documentation~/renderer-features/custom-rendering-passes.md +++ /dev/null @@ -1,11 +0,0 @@ -# Custom render passes - -Create a custom render pass in a C# script and inject it into the Universal Render Pipeline (URP) frame rendering loop. - -|Page|Description| -|-|-| -|[Custom render pass workflow in URP](custom-rendering-pass-workflow-in-urp.md)|Add and inject a custom render pass to change how URP renders a scene or the objects within a scene.| -|[Scriptable Render Passes](scriptable-render-passes.md)|Use the Scriptable Render Pass API to create a custom render pass.| -|[Scriptable Renderer Features](scriptable-renderer-features/scriptable-renderer-features-landing.md)|Use the Scriptable Renderer Feature API to inject a custom render pass into a URP renderer.| -|[Working with textures](../working-with-textures.md)|How to access and use textures in a custom render pass, including how to blit.| - diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/renderer-features/scriptable-render-passes.md b/Packages/com.unity.render-pipelines.universal/Documentation~/renderer-features/scriptable-render-passes.md index cc38e8984da..0957500a653 100644 --- a/Packages/com.unity.render-pipelines.universal/Documentation~/renderer-features/scriptable-render-passes.md +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/renderer-features/scriptable-render-passes.md @@ -1,13 +1,12 @@ # Scriptable Render Passes -Use the `ScriptableRenderPass` API to write a custom render pass. You can then inject the pass into the Universal Render Pipeline (URP) frame rendering loop using the `RenderPipelineManager` API or a Scriptable Renderer Feature. +Use the `ScriptableRenderPass` API and the render graph system to write a custom render pass. You can then inject the render pass into the Universal Render Pipeline (URP) frame rendering loop using the `RenderPipelineManager` API or a Scriptable Renderer Feature. |Page|Description| |-|-| |[Introduction to Scriptable Render Passes](intro-to-scriptable-render-passes.md)|What a Scriptable Render Pass is, and how you can inject it into a scene.| -|[Write a Scriptable Render Pass](write-a-scriptable-render-pass.md)|An example of a `ScriptableRenderPass` instance that uses `Blit` to create a red tint effect.| -|[Inject a pass via scripting](../customize/inject-render-pass-via-script.md)|Use the `RenderPipelineManager` API to inject a render pass, without using a Scriptable Renderer Feature.| +|[Render graph system](../render-graph.md) |The render graph system is a set of APIs that you can use to write Scriptable Render Passes.| ## Additional resources -- [Inject a pass using a Scriptable Renderer Feature](scriptable-renderer-features/inject-a-pass-using-a-scriptable-renderer-feature.md) \ No newline at end of file +- [Adding a Scriptable Render Pass to the frame rendering loop](../inject-a-render-pass.md) diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/renderer-features/scriptable-renderer-features/inject-a-pass-using-a-scriptable-renderer-feature.md b/Packages/com.unity.render-pipelines.universal/Documentation~/renderer-features/scriptable-renderer-features/inject-a-pass-using-a-scriptable-renderer-feature.md index 61aa233ef75..1f965c33f4e 100644 --- a/Packages/com.unity.render-pipelines.universal/Documentation~/renderer-features/scriptable-renderer-features/inject-a-pass-using-a-scriptable-renderer-feature.md +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/renderer-features/scriptable-renderer-features/inject-a-pass-using-a-scriptable-renderer-feature.md @@ -1,161 +1,98 @@ -# Inject a pass using a Scriptable Renderer Feature +# Inject a render pass using a Scriptable Renderer Feature -This section describes how to create a [Scriptable Renderer Feature](intro-to-scriptable-renderer-features.md) for a URP Renderer. A Scriptable Renderer Feature enqueues a `ScriptableRenderPass` instance every frame. +Use the `ScriptableRenderFeature` API to insert a [Scriptable Render Pass](../../renderer-features/scriptable-render-passes.md) into the Universal Render Pipeline (URP) frame rendering loop. -You need to [write a Scriptable Render Pass](../write-a-scriptable-render-pass.md) first. +Follow these steps: -This walkthrough contains the following sections: +1. Create a new C# script. -* [Create a scriptable Renderer Feature](#scriptable-renderer-feature) -* [Add the Renderer Feature to the the Universal Renderer asset](#add-renderer-feature-to-asset) -* [Enqueue the render pass in the custom renderer feature](#enqueue-the-render-pass-in-the-custom-renderer-feature) -* [Complete code for the scripts in this example](#code-renderer-feature) - -## Create a scriptable Renderer Feature - -1. Create a new C# script and name it `MyRendererFeature.cs`. - -2. In the script, remove the code that Unity inserted in the `MyRendererFeature` class. - -3. Add the following `using` directive: +2. Replace the code with a class that inherits from the `ScriptableRendererFeature` class. ```C# - using UnityEngine.Rendering; + using UnityEngine; using UnityEngine.Rendering.Universal; + + public class MyRendererFeature : ScriptableRendererFeature + { + } ``` -3. Create the `MyRendererFeature` class that inherits from the **ScriptableRendererFeature** class. +3. In the class, override the `Create` method. For example: ```C# - public class MyRendererFeature : ScriptableRendererFeature + public override void Create() + { + } ``` -4. In the `MyRendererFeature` class, implement the following methods: - - * `Create`: Unity calls this method on the following events: - - * When the Renderer Feature loads the first time. + URP calls the `Create` methods on the following events: - * When you enable or disable the Renderer Feature. + - When the Scriptable Renderer Feature loads the first time. + - When you enable or disable the Scriptable Renderer Feature. + - When you change a property in the **Inspector** window of the Renderer Feature. - * When you change a property in the inspector of the Renderer Feature. - * `AddRenderPasses`: Unity calls this method every frame, once for each camera. This method lets you inject `ScriptableRenderPass` instances into the scriptable Renderer. +4. In the `Create` method, create an instance of your Scriptable Render Pass, and inject it into the renderer. -Now you have the custom `MyRendererFeature` Renderer Feature with its main methods. + For example, if you have a Scriptable Render Pass called `RedTintRenderPass`: -Below is the complete code for this step. - -```C# -using System.Collections; -using System.Collections.Generic; -using UnityEngine; -using UnityEngine.Rendering.Universal; + ```c# + // Define an instance of the Scriptable Render Pass + private RedTintRenderPass redTintRenderPass; -public class MyRendererFeature : ScriptableRendererFeature -{ public override void Create() { + // Create an instance of the Scriptable Render Pass + redTintRenderPass = new RedTintRenderPass(); + // Inject the render pass after rendering the skybox + redTintRenderPass.renderPassEvent = RenderPassEvent.AfterRenderingSkybox; } - - public override void AddRenderPasses(ScriptableRenderer renderer, - ref RenderingData renderingData) - { - - } -} -``` - -### Add the Renderer Feature to the Universal Renderer asset - -Add the Renderer Feature you created to the the Universal Renderer asset. For information on how to do this, refer to the page [How to add a Renderer Feature to a Renderer](../../urp-renderer-feature-how-to-add.md). - -## Enqueue a render pass in the custom renderer feature - -In this section, you instantiate a render pass in the `Create` method of the `MyRendererFeature` class, and enqueue it in the `AddRenderPasses` method. - -This section uses the example `RedTintRenderPass` Scriptable Render Pass from the [Write a Scriptable Render Pass](../write-a-scriptable-render-pass.md) page. - -1. Declare the following fields: - - ```C# - [SerializeField] private Shader shader; - private Material material; - private RedTintRenderPass redTintRenderPass; ``` -1. In the `Create` method, instantiate the `RedTintRenderPass` class. - - In the method, use the `renderPassEvent` field to specify when to execute the render pass. +5. Override the `AddRenderPasses` method. ```C# - public override void Create() + public override void AddRenderPasses(ScriptableRenderer renderer, ref RenderingData renderingData) { - if (shader == null) - { - return; - } - material = CoreUtils.CreateEngineMaterial(shader); - redTintRenderPass = new RedTintRenderPass(material); - - renderPassEvent = RenderPassEvent.AfterRenderingSkybox; } ``` -2. In the `AddRenderPasses` method, enqueue the render pass with the `EnqueuePass` method. + URP calls the `AddRenderPasses` method every frame, once for each camera. - ```C# +6. Use the `EnqueuePass` API to inject the Scriptable Render Pass into the frame rendering loop. + + ```c# public override void AddRenderPasses(ScriptableRenderer renderer, ref RenderingData renderingData) { - if (renderingData.cameraData.cameraType == CameraType.Game) - { - renderer.EnqueuePass(redTintRenderPass); - } + renderer.EnqueuePass(redTintRenderPass); } ``` -## Custom Renderer Feature code +You can now add the Scriptable Renderer Feature to the active URP asset. Refer to [How to add a Renderer Feature to a Renderer](../../urp-renderer-feature-how-to-add.md) for more information. -Below is the complete code for the custom Renderer Feature script. +## Example + +The following is the complete example code of a Scriptable Renderer Feature, using a Scriptable Render Pass called `RedTintRenderPass`. ```C# -using System; -using UnityEditor; using UnityEngine; using UnityEngine.Rendering; using UnityEngine.Rendering.Universal; public class MyRendererFeature : ScriptableRendererFeature { - [SerializeField] private Shader shader; - private Material material; private RedTintRenderPass redTintRenderPass; public override void Create() { - if (shader == null) - { - return; - } - material = CoreUtils.CreateEngineMaterial(shader); - redTintRenderPass = new RedTintRenderPass(material); - + redTintRenderPass = new RedTintRenderPass(); redTintRenderPass.renderPassEvent = RenderPassEvent.AfterRenderingSkybox; } - public override void AddRenderPasses(ScriptableRenderer renderer, - ref RenderingData renderingData) - { - if (renderingData.cameraData.cameraType == CameraType.Game) - { - renderer.EnqueuePass(redTintRenderPass); - } - } - public override void Dispose(bool disposing) + public override void AddRenderPasses(ScriptableRenderer renderer, ref RenderingData renderingData) { - CoreUtils.Destroy(material); + renderer.EnqueuePass(redTintRenderPass); } } - ``` diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/renderer-features/scriptable-renderer-features/scriptable-render-pass-reference.md b/Packages/com.unity.render-pipelines.universal/Documentation~/renderer-features/scriptable-renderer-features/scriptable-render-pass-reference.md new file mode 100644 index 00000000000..6b91d5fd36f --- /dev/null +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/renderer-features/scriptable-renderer-features/scriptable-render-pass-reference.md @@ -0,0 +1,16 @@ +## Scriptable Render Pass Compatibility Mode API reference + +You can use the following methods within a Scriptable Render Pass to handle its core functions, if you enable **Compatibility Mode (Render Graph Disabled)** in [URP graphics settings](../../urp-global-settings.md). + +> **Note**: Unity no longer develops or improves the rendering path that doesn't use the render graph API. Use the render graph API instead when developing new graphics features. + +| **Method** | **Description** | +| ---------- | --------------- | +| `Execute` | Use this method to implement the rendering logic for the Scriptable Renderer Feature.

**Note**: You do not need to call `ScriptableRenderContext.submit`. URP handles this and calls it at specific points in the pipeline. | +| `OnCameraCleanup` | Use this method to clean up any resources that were allocated during the render pass. | +| `OnCameraSetup` | Use this method to configure render targets and their clear state. You can also use it to create temporary render target textures.

**Note**: When this method is empty, the render pass renders to the active camera render target. | + +## Additional resources + +* [Scriptable Render Passes](../intro-to-scriptable-render-passes.md) +* [How to create a Custom Renderer Feature](../create-custom-renderer-feature.md) diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/renderer-features/scriptable-renderer-features/scriptable-renderer-feature-reference.md b/Packages/com.unity.render-pipelines.universal/Documentation~/renderer-features/scriptable-renderer-features/scriptable-renderer-feature-reference.md index 6adb4aeb6d4..dc71f6fa089 100644 --- a/Packages/com.unity.render-pipelines.universal/Documentation~/renderer-features/scriptable-renderer-features/scriptable-renderer-feature-reference.md +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/renderer-features/scriptable-renderer-features/scriptable-renderer-feature-reference.md @@ -1,13 +1,4 @@ -# Scriptable Renderer Feature Reference - -When working with Scriptable Renderer Features and Scriptable Render Passes there are predefined methods that you need to implement for URP to call at specific points in the pipeline. - -The following sections summarize the common methods used to write Scriptable Renderer Features and Scriptable Render Passes: - -* [Scriptable Renderer Feature Methods](#scriptable-renderer-feature-methods) -* [Scriptable Render Pass Methods](#scriptable-render-pass-methods) - -## Scriptable Renderer Feature Methods +# Scriptable Renderer Feature API reference You can use the following methods within a Scriptable Renderer Feature to handle its core functions. For more information on Scriptable Renderer Feature scripting and further details on the methods listed below, refer to [ScriptableRendererFeature](xref:UnityEngine.Rendering.Universal.ScriptableRendererFeature). @@ -18,16 +9,6 @@ You can use the following methods within a Scriptable Renderer Feature to handle | `Dispose` | Use this method to clean up the resources allocated to the Scriptable Renderer Feature such as Materials. | | `SetupRenderPasses` | Use this method to run any setup the Scriptable Render Passes require. For example, you can set the initial values of properties, or run custom setup methods from your Scriptable Render Passes.

If your Scriptable Renderer Feature accesses camera targets to set up its Scriptable Render Passes, do it in this method instead of in the `AddRenderPasses` method. | -## Scriptable Render Pass Methods - -You can use the following methods within a Scriptable Renderer Pass to handle its core functions. For further information on Scriptable Render Pass scripting and further details on the methods listed below, refer to [ScriptableRenderPass](xref:UnityEngine.Rendering.Universal.ScriptableRenderPass). - -| **Method** | **Description** | -| ---------- | --------------- | -| `Execute` | Use this method to implement the rendering logic for the Scriptable Renderer Feature.

**Note**: You do not need to call `ScriptableRenderContext.submit`, URP handles this and calls it at specific points in the pipeline. | -| `OnCameraCleanup` | Use this method to clean up any resources that were allocated during the render pass. | -| `OnCameraSetup` | Use this method to configure render targets and their clear state. You can also use it to create temporary render target textures.

**Note**: When this method is empty, the render pass will render to the active camera render target. | - ## Additional resources * [Introduction to Scriptable Renderer Features](./intro-to-scriptable-renderer-features.md) diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/renderer-features/scriptable-renderer-features/scriptable-renderer-features-landing.md b/Packages/com.unity.render-pipelines.universal/Documentation~/renderer-features/scriptable-renderer-features/scriptable-renderer-features-landing.md index d4dfc6ac915..c4e996fa7e5 100644 --- a/Packages/com.unity.render-pipelines.universal/Documentation~/renderer-features/scriptable-renderer-features/scriptable-renderer-features-landing.md +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/renderer-features/scriptable-renderer-features/scriptable-renderer-features-landing.md @@ -8,3 +8,4 @@ Scriptable Renderer Features are components you can add to a renderer to alter h |[Inject a custom pass using a Scriptable Renderer Feature](inject-a-pass-using-a-scriptable-renderer-feature.md)|Create a Scriptable Renderer Feature, add it to the Universal Renderer, and enqueue a render pass.| |[Apply a Scriptable Renderer Feature to a specific camera type](apply-scriptable-feature-to-specific-camera.md)|Control which cameras the effect of a Scriptable Renderer Feature applies to.| |[Example of a complete Scriptable Renderer Feature](../create-custom-renderer-feature.md)|An example of a complete Scriptable Renderer Feature with a Scriptable Render Pass that creates a blur effect.| +|[Scriptable Renderer Feature API reference](scriptable-renderer-feature-reference.md)|API reference for the Scriptable Renderer Feature class.| diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/renderer-features/write-a-scriptable-render-pass.md b/Packages/com.unity.render-pipelines.universal/Documentation~/renderer-features/write-a-scriptable-render-pass.md index a4c96defdbf..5eba4ac4f5d 100644 --- a/Packages/com.unity.render-pipelines.universal/Documentation~/renderer-features/write-a-scriptable-render-pass.md +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/renderer-features/write-a-scriptable-render-pass.md @@ -1,11 +1,15 @@ -# Write a Scriptable Render Pass +# Write a Scriptable Render Pass in Compatibility Mode + +If you enable **Compatibility Mode (Render Graph Disabled)** in [URP graphics settings](../urp-global-settings.md), you can write a Scriptable Render Pass without using the [render graph API](../render-graph.md). + +> **Note**: Unity no longer develops or improves the rendering path that doesn't use the render graph API. Use the render graph API instead when developing new graphics features. The following example is a `ScriptableRenderPass` instance that performs the following steps: 1. Creates a temporary render texture using the `RenderTextureDescriptor` API. 2. Applies two passes of the [custom shader](#example-shader) to the camera output using the `RTHandle` and the `Blit` API. -After you write a Scriptable Render Pass, you can inject the pass using one of the following methods: +After you write a Scriptable Render Pass, you can inject the render pass using one of the following methods: - [Use the `RenderPipelineManager` API](../customize/inject-render-pass-via-script.md) - [Use a Scriptable Renderer Feature](scriptable-renderer-features/inject-a-pass-using-a-scriptable-renderer-feature.md) @@ -102,7 +106,7 @@ public class RedTintRenderPass : ScriptableRenderPass } ``` -5. Use the Blit method to apply the two passes from the custom shader to the camera output. +5. Use the Blit method to apply the two render passes from the custom shader to the camera output. ```C# public override void Execute(ScriptableRenderContext context, diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/shared/lens-flare/choose-a-lens-flare-type.md b/Packages/com.unity.render-pipelines.universal/Documentation~/shared/lens-flare/choose-a-lens-flare-type.md new file mode 100644 index 00000000000..0768cee9a82 --- /dev/null +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/shared/lens-flare/choose-a-lens-flare-type.md @@ -0,0 +1,35 @@ +# Choose a lens flare type + +You can add the following types of lens flares: + +- [Lens flares](lens-flare-component.md) - use a **Lens Flare (SRP)** component to create lens flares for lights that have specific locations in your scene, for example the sun or bright bulbs. +- [Screen space lens flares](post-processing-screen-space-lens-flare.md) - use a **Screen Space Lens Flare** override to create lens flares for emissive surfaces, bright spots, and onscreen lights. + +You can use both types in the same scene. + +Use the following table to help you choose a lens flare type: + +| Feature | Lens Flare (SRP) component | Screen Space Lens Flare override | +|-|-|-| +| Typical uses | Lens flares from the sun and specific lights, custom flare shapes, and cinematics | Lens flares on vehicles and water, first-person games, and science-fiction environments | +| Supported platforms | All platforms | All platforms | +| CPU and GPU use | CPU and GPU | GPU | +| Types of light | All Light objects, except Area Lights | All bright spots and visible lights | +| Placement | Attach to individual lights. Place lens flares manually | Generate inside a volume. Place all lens flares automatically with a single setting | +| Lens flares from offscreen lights | Yes | No | +| Light streaks | No, unless you create them manually | Yes | +| Configure flares | Configure per lens flare or per element | Configure for all lens flares together | +| Configure flare elements | Configure many settings for each element, per lens flare | Configure several settings for elements, for all lens flares together | +| Configure attenuation | Yes | No | +| Affected by the environment | Yes | Yes | +| Preserve aspect ratio | Yes | No | +| Chromatic aberration | No | Yes | +| Blend modes | Additive, Lerp, Premultiplied and Screen | Additive only | +| Occlusion | Screen space occlusion, and geometric occlusion for offscreen lights. Configurable. Occlusion might not always work at the edge of the screen. | Screen space occlusion, generated from the color buffer. Not configurable | +| Examples in [package samples](../../package-samples.md) | Yes | No | + +## Additional resources + +- [Lens Flare (SRP) reference](lens-flare-srp-reference.md) +- [Lens Flare (SRP) Data Asset reference](lens-flare-asset.md) +- [Screen Space Lens Flare override reference](post-processing-screen-space-lens-flare.md) diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/shared/lens-flare/lens-flare-asset.md b/Packages/com.unity.render-pipelines.universal/Documentation~/shared/lens-flare/lens-flare-asset.md index d952f48fd56..a2093bfb33d 100644 --- a/Packages/com.unity.render-pipelines.universal/Documentation~/shared/lens-flare/lens-flare-asset.md +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/shared/lens-flare/lens-flare-asset.md @@ -4,7 +4,7 @@ Unity’s [Scriptable Render Pipeline (SRP)](https://docs.unity3d.com/Manual/Scr For examples of how to use Lens Flares, refer to the [Lens Flare samples in URP Package Samples](../../package-sample-urp-package-samples.md#lens-flares). -To create a Lens Flare Data asset, select **Assets > Create > Lens Flare (SRP)**. To use this asset, assign it to the **Lens Flare Data** property of a [Lens Flare (SRP) component](lens-flare-component.md). +To create a Lens Flare Data asset, select **Assets** > **Create** > **Lens Flare (SRP)**. To use this asset, assign it to the **Lens Flare Data** property of a [Lens Flare (SRP) component](lens-flare-component.md). ## Properties @@ -38,8 +38,6 @@ The Lens Flare Element asset has the following properties: #### Image -![](../../images/shared/lens-flare/LensFlareShapeImage.png) - | **Property** | **Description** | | --------------------- | ------------------------------------------------------------ | | Flare Texture | The Texture this lens flare element uses. | @@ -49,8 +47,6 @@ The Lens Flare Element asset has the following properties: #### Circle -![](../../images/shared/lens-flare/LensFlareShapeCircle.png) - | **Property** | **Description** | | ------------ | ------------------------------------------------------------ | | Gradient | Controls the offset of the circular flare's gradient. This value ranges from 0 to 1. | @@ -61,8 +57,6 @@ The Lens Flare Element asset has the following properties: #### Polygon -![](../../images/shared/lens-flare/LensFlareShapePolygon.png) - | **Property** | **Description** | | ------------ | ------------------------------------------------------------ | | Gradient | Controls the offset of the polygon flare's gradient. This value ranges from 0 to 1. | @@ -75,8 +69,6 @@ The Lens Flare Element asset has the following properties: #### Ring -![](images/LensFlareShapeRing.png) - | **Property** | **Description** | | --------------- | -------------------------------------------------------------- | | Gradient | Controls the offset of the circular flare's gradient. This value ranges from 0 to 1. | @@ -91,8 +83,6 @@ The Lens Flare Element asset has the following properties: #### Lens Flare Data Driven SRP -![](images/LensFlareShapeLensFlareDataSRP.png) - | **Property** | **Description** | | --------------- | ------------------------------------------------------------ | | Asset | Lens Flare Data SRP asset as an element. | @@ -108,8 +98,6 @@ That will trigger a warning and execution 16 recursions: ## Color -![](../../images/shared/lens-flare/LensFlareColor.png) - | **Property** | **Description** | | ----------------------- | ------------------------------------------------------------ | | Color Type | Select the color type of Lens Flare Element this asset creates:
• [Constant](#ColorConstant)
• [Radial](#ColorRadial)
• [Angular](#ColorAngular) | @@ -122,8 +110,6 @@ That will trigger a warning and execution 16 recursions: ### Constant Color -![](../../images/shared/lens-flare/LensFlareColorConstant.png) - | **Property** | **Description** | | ----------------------- | ------------------------------------------------------------ | | Tint | Changes the tint of the lens flare. If this asset is attached to the light, this property is based on the light tint. | @@ -132,8 +118,6 @@ That will trigger a warning and execution 16 recursions: ### Constant Color -![](../../images/shared/lens-flare/LensFlareColorRadialGradient.png) - | **Property** | **Description** | | ----------------------- | ------------------------------------------------------------ | | Tint Radial | Specifies the radial gradient tint of the element. If the element type is set to Image, the Flare Texture is multiplied by this color. | @@ -142,8 +126,6 @@ That will trigger a warning and execution 16 recursions: ### Constant Color -![](../../images/shared/lens-flare/LensFlareColorAngularGradient.png) - | **Property** | **Description** | | ----------------------- | ------------------------------------------------------------ | | Tint Angular | Specifies the angular gradient tint of the element. If the element type is set to Image, the Flare Texture is multiplied by this color. | @@ -156,8 +138,6 @@ That will trigger a warning and execution 16 recursions: ### Cutoff -![](../../images/shared/lens-flare/LensFlareCutoff.png) - | **Property** | **Description** | | --- | --- | | Cutoff Speed | Sets the speed at which the radius occludes the element.
A value of zero (with a large radius) does not occlude anything. The higher this value, the faster the element is occluded on the side of the screen.
The effect of this value is more noticeable with multiple elements. | @@ -169,8 +149,6 @@ That will trigger a warning and execution 16 recursions: ### Transform -![](../../images/shared/lens-flare/LensFlareTransform.png) - | **Property** | **Description** | | ----------------------- | ------------------------------------------------------------ | | Position Offset | Defines the offset of the lens flare's position in screen space, relative to its source. | @@ -184,8 +162,6 @@ That will trigger a warning and execution 16 recursions: ### Axis Transform -![](../../images/shared/lens-flare/LensFlareAxisTransform.png) - | **Property** | **Description** | | ----------------- | ------------------------------------------------------------ | | Starting Position | Defines the starting position of the lens flare relative to its source. This value operates in screen space. | @@ -196,8 +172,6 @@ That will trigger a warning and execution 16 recursions: ### Distortion -![](../../images/shared/lens-flare/LensFlareRadialDistortion.png) - | **Property** | **Description** | | --------------- | ------------------------------------------------------------ | | Enable | Set this property to True to enable distortion. | @@ -218,8 +192,6 @@ That will trigger a warning and execution 16 recursions: | Relative To Center | If true the distortion is relative to center of the screen otherwise relative to lensFlare source screen position. | #### Uniform -![](../../images/shared/lens-flare/LensFlareMultileElementUniform.png) - | **Property** | **Description** | | --------------- | ------------------------------------------------------------ | | Colors | The range of colors that this asset applies to the lens flares. | @@ -229,8 +201,6 @@ That will trigger a warning and execution 16 recursions: #### Curve -![](../../images/shared/lens-flare/LensFlareMultileElementCurve.png) - | **Property** | **Description** | | ---------------- | ------------------------------------------------------------ | | Colors | The range of colors that this asset applies to the lens flares. You can use the **Position Spacing** curve to determine how this range affects each lens flare. | @@ -242,8 +212,6 @@ That will trigger a warning and execution 16 recursions: #### Random -![](../../images/shared/lens-flare/LensFlareMultileElementRandom.png) - | **Property** | **Description** | | ------------------- | ------------------------------------------------------------ | | Seed | The base value that this asset uses to generate randomness. | diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/shared/lens-flare/lens-flare-component.md b/Packages/com.unity.render-pipelines.universal/Documentation~/shared/lens-flare/lens-flare-component.md index 8e0009e8b72..4c9eac85351 100644 --- a/Packages/com.unity.render-pipelines.universal/Documentation~/shared/lens-flare/lens-flare-component.md +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/shared/lens-flare/lens-flare-component.md @@ -1,4 +1,4 @@ -# Lens Flare (SRP) component +# Add lens flares ![](../../images/shared/lens-flare/lens-flare-header.png) @@ -19,30 +19,7 @@ To create a lens flare in a scene: 5. In the Lens Flare (SRP) component Inspector, assign the new Lens Flare (SRP) Data asset to the **Lens Flare Data** property. 6. Select the Lens Flare (SRP) Data asset and, in the Inspector, add a new element to the **Elements** list. A default white lens flare now renders at the position of the Lens Flare (SRP) component. For information on how to customize how the lens flare looks, refer to [Lens Flare (SRP) Data](lens-flare-asset.md). -## Properties - -### General - -| **Property** | **Description** | -| --------------- | ------------------------------------------------------------ | -| Lens Flare Data | Select the [Lens Flare (SRP) Data](lens-flare-asset.md) asset this component controls. | -| Intensity | Multiplies the intensity of the lens flare. | -| Scale | Multiplies the scale of the lens flare. | -| Light Override | Specifies the light component where the color and shape values are fetched from when using "Modulate By Light Color" or "Attenuation By Light Shape" properties on a Lens Flare Element. If nothing is specified, the light component from this gameobject is used. | -| Attenuation by Light Shape | Enable this property to automatically change the appearance of the lens flare based on the type of light you attached this component to.
For example, if this component is attached to a spot light and the camera is looking at this light from behind, the lens flare will not be visible.
This property is only available when this component is attached to a light. | -| Attenuation Distance |The distance between the start and the end of the Attenuation Distance Curve.
This value operates between 0 and 1 in world space. | -| Attenuation Distance Curve | Fades out the appearance of the lens flare over the distance between the GameObject this asset is attached to, and the Camera. | -| Scale Distance | The distance between the start and the end of the **Scale Distance Curve**.
This value operates between 0 and 1 in world space. | -| Scale Distance Curve | Changes the size of the lens flare over the distance between the GameObject this asset is attached to, and the Camera. | -| Screen Attenuation Curve | Reduces the effect of the lens flare based on its distance from the edge of the screen. You can use this to display a lens flare at the edge of your screen | - -### Occlusion - -| **Property** | **Description** | -| --------------- | ------------------------------------------------------------ | -| Enable | Enable this property to partially obscure the lens flare based on the depth buffer | -| Occlusion Radius | Defines how far from the light source Unity occludes the lens flare. This value is in world space. | -| Sample Count | The number of random samples the CPU uses to generate the **Occlusion Radius.** | -| Occlusion Offset | Offsets the plane that the occlusion operates on. A higher value moves this plane closer to Camera. This value is in world space.
For example, if a lens flare is inside the light bulb, you can use this to sample occlusion outside the light bulb. | -| Occlusion Remap Curve | Allow the occlusion [from 0 to 1] to be remap with any desired shape. | -| Allow Off Screen | Enable this property to allow lens flares outside the Camera's view to affect the current field of view. | +Refer to the following for more information: + +- [Lens Flare (SRP) reference](lens-flare-srp-reference.md) +- [Lens Flare (SRP) Data Asset reference](lens-flare-asset.md) diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/shared/lens-flare/lens-flare-srp-reference.md b/Packages/com.unity.render-pipelines.universal/Documentation~/shared/lens-flare/lens-flare-srp-reference.md new file mode 100644 index 00000000000..9727462a8b2 --- /dev/null +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/shared/lens-flare/lens-flare-srp-reference.md @@ -0,0 +1,29 @@ +# Lens Flare (SRP) component reference + +Refer to [Add lens flares](lens-flare-component.md) for information on how to use the Lens Flare (SRP) component. + +## General + +| **Property** | **Description** | +| --------------- | ------------------------------------------------------------ | +| Lens Flare Data | Select the [Lens Flare (SRP) Data](lens-flare-asset.md) asset this component controls. | +| Intensity | Multiplies the intensity of the lens flare. | +| Scale | Multiplies the scale of the lens flare. | +| Light Override | Specifies the light component Unity gets the color and shape values from, if you enable **Modulate By Light Color** or **Attenuation By Light Shape**. If you don't specify a light component, Unity uses the Light component from this GameObject. | +| Attenuation by Light Shape | Enable this property to automatically change the appearance of the lens flare based on the type of light you attached this component to.
For example, if this component is attached to a spot light and the camera is looking at this light from behind, the lens flare is not visible.
This property is only available when this component is attached to a light. | +| Attenuation Distance |The distance between the start and the end of the Attenuation Distance Curve.
This value operates between 0 and 1 in world space. | +| Attenuation Distance Curve | Fades out the appearance of the lens flare over the distance between the GameObject this asset is attached to, and the Camera. | +| Scale Distance | The distance between the start and the end of the **Scale Distance Curve**.
This value operates between 0 and 1 in world space. | +| Scale Distance Curve | Changes the size of the lens flare over the distance between the GameObject this asset is attached to, and the Camera. | +| Screen Attenuation Curve | Reduces the effect of the lens flare based on its distance from the edge of the screen. You can use this to display a lens flare at the edge of your screen | + +## Occlusion + +| **Property** | **Description** | +| --------------- | ------------------------------------------------------------ | +| Enable | Enable this property to partially obscure the lens flare based on the depth buffer | +| Occlusion Radius | Defines how far from the light source Unity occludes the lens flare. This value is in world space. | +| Sample Count | The number of random samples the CPU uses to generate the **Occlusion Radius.** | +| Occlusion Offset | Offsets the plane that the occlusion operates on. A higher value moves this plane closer to Camera. This value is in world space.
For example, if a lens flare is inside the light bulb, you can use this to sample occlusion outside the light bulb. | +| Occlusion Remap Curve | Allow the occlusion [from 0 to 1] to be remap with any desired shape. | +| Allow Off Screen | Enable this property to allow lens flares outside the Camera's view to affect the current field of view. | diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/shared/lens-flare/lens-flare.md b/Packages/com.unity.render-pipelines.universal/Documentation~/shared/lens-flare/lens-flare.md new file mode 100644 index 00000000000..7fb4f6878f3 --- /dev/null +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/shared/lens-flare/lens-flare.md @@ -0,0 +1,12 @@ +# Lens flares + +Lens flares simulate the effect of lights refracting inside a camera lens. Use lens flares to represent bright lights, or to add atmosphere to a scene. + +|Page|Description| +|-|-| +| [Choose a lens flare type](choose-a-lens-flare-type.md) | Understand the differences between lens flares and screen space lens flares. | +| [Add lens flares](lens-flare-component.md) | Use the Lens Flare (SRP) component to create lens flares for lights that have specific locations in your scene, for example bright bulbs. | +| [Add screen space lens flares](post-processing-screen-space-lens-flare.md) | Use the Screen Space Lens Flare override to create lens flares for emissive surfaces, bright spots in your scene that appear depending on the camera view, and all onscreen lights. | +| [Lens Flare (SRP) reference](lens-flare-srp-reference.md) | Reference for the Lens Flare (SRP) component. | +| [Lens Flare (SRP) Data Asset reference](lens-flare-asset.md) | Reference for the Lens Flare (SRP) Data Asset. | +| [Screen Space Lens Flare override reference](reference-screen-space-lens-flare.md) | Reference for the Screen Space Lens Flare override. | diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/shared/lens-flare/post-processing-screen-space-lens-flare.md b/Packages/com.unity.render-pipelines.universal/Documentation~/shared/lens-flare/post-processing-screen-space-lens-flare.md index 63f214f205c..9beb5db0f3b 100644 --- a/Packages/com.unity.render-pipelines.universal/Documentation~/shared/lens-flare/post-processing-screen-space-lens-flare.md +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/shared/lens-flare/post-processing-screen-space-lens-flare.md @@ -1,4 +1,4 @@ -# Screen Space Lens Flare +# Add screen space lens flares ![](../../Images/shared/lens-flare/screenspacelensflaresurp.png) @@ -47,51 +47,3 @@ To add **Screen Space Lens Flare** to a Volume: Some lens flares only appear, or only appear at full intensity, if you enable High Dynamic Range (HDR) rendering on your camera. To enable HDR, refer to [the **Output** section of the Camera component reference](../../camera-component-reference.md#output). -## Properties - -| **Property** | **Description** | -| - | - | -| **Intensity** | Set the strength of all the types of lens flares. If the value is 0, URP doesn't calculate or render any lens flares. The default is 0. | -| **Tint Color** | Set the color URP uses to tint all the types of lens flares. The default is white. | -| **Bloom Mip Bias** | Set the mipmap level URP uses to sample the Bloom pyramid and create the lens flares. The higher the mipmap level, the smaller and more pixelated the sample source, and the blurrier the result. The range is 0 through 5. 0 is the full-resolution mipmap level. The default is 1. Refer to [Mipmaps introduction](https://docs.unity3d.com/2023.1/Documentation/Manual/texture-mipmaps-introduction.html) for more information. This property only appears if you open the **More** (â‹®) menu and select **Show Additional Properties**. | - -### Flares - -Use the **Flares** settings to control regular flares, reversed flares and warped flares. - -| **Property** || **Description** | -|-|-|-| -| **Regular Multiplier** || Set the strength of regular flares. If the value is 0, URP doesn't calculate or render regular flares. The default is 1. | -| **Reversed Multiplier** || Set the strength of reversed flares. If the value is 0, URP doesn't calculate or render reversed flares. The default is 1. | -| **Warped Multipler** || Set the strength of warped flares. If the value is 0, URP doesn't calculate or render warped flares. The default is 1. | -|| **Scale** | Scale the width (**x**) and height (**y**) of warped flares. The defaults are 1. This property only appears if you open the **More** (â‹®) menu and select **Show Additional Properties**. | -| **Samples** || Set the number of times URP repeats the regular, reversed and warped flares. The range is 1 through 3. The default is 1. Increasing **Samples** has a big impact on performance. | -|| **Sample Dimmer** | Set the strength of the lens flares URP adds if you set **Samples** to 2 or 3. The higher the value, the less intense the flares. This property only appears if you open the **More** (â‹®) menu and select **Show Additional Properties**. | -| **Vignette Effect** || Set the strength of the regular, reversed and warped flares in a circular area in the center of the screen. Use **Vignette Effect** to avoid lens flare obscuring the scene too much. The default value is 1, which means URP doesn't render flares at the center of the screen. | -| **Starting Position** || Control how far the position of the regular, reversed and warped flares differ from the bright area they're sampled from, in metres. If the value is 0, URP places the lens flares at the same position as the bright areas they're sampled from. The range is 1 through 3. The default is 1.25. | -| **Scale** || Set the size of regular, reversed and warped lens flares. The range is 1 through 4. The default is 1.5. | - -### Streaks - -Use the **Streaks** settings to control flares stretched in one direction. - -| **Property** || **Description** | -|-|-|-| -| **Multiplier** || Set the strength of streaks. If the value is 0, URP doesn't calculate or render streaks. The default is 1. | -|| **Length** | Set the length of streaks. The range is 0 through 1. 1 is the approximate width of the screen. The default value is 0.5. | -|| **Orientation** | Set the angle of streaks, in degrees. The default value is 0, which creates horizontal streaks. | -|| **Threshold** | Control how localized the streak effect is. The higher the **Threshold**, the more localized the effect. The range is 0 through 1. The default value is 0.25. | -|| **Resolution** | Control the resolution detail of streaks. URP renders lower-resolution streaks faster. The options are **Half**, **Quarter** and **Eighth** full resolution. This property only appears if you open the **More** (â‹®) menu and select **Show Additional Properties**. | - -![](../../Images/shared/lens-flare/screenspacelensflares-threshold.gif)
-The effect of changing **Threshold** from 0 (a larger flare effect) to 1 (a smaller flare effect). - -### Chromatic Aberration - -Use the **Chromatic Aberration** settings to control chromatic aberration on all the lens flare types. Chromatic aberration splits light into its color components, which mimics the effect that a real-world camera produces when its lens fails to join all colors to the same point. - -The chromatic aberration effect is strongest at the edges of the screen, and decreases in strength towards the center of the screen. - -| **Property** | **Description** | -|-|-| -| **Intensity** | Set the strength of the chromatic aberration effect. If the value is 0, URP doesn't split the colors. | diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/shared/lens-flare/reference-screen-space-lens-flare.md b/Packages/com.unity.render-pipelines.universal/Documentation~/shared/lens-flare/reference-screen-space-lens-flare.md new file mode 100644 index 00000000000..5d386ea61ff --- /dev/null +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/shared/lens-flare/reference-screen-space-lens-flare.md @@ -0,0 +1,52 @@ +# Screen Space Lens Flare override reference + +Refer to [Add screen space lens flares](post-processing-screen-space-lens-flare.md) for more information. + +## Properties + +| **Property** | **Description** | +| - | - | +| **Intensity** | Set the strength of all the types of lens flares. If the value is 0, URP doesn't calculate or render any lens flares. The default is 0. | +| **Tint Color** | Set the color URP uses to tint all the types of lens flares. The default is white. | +| **Bloom Mip Bias** | Set the mipmap level URP uses to sample the Bloom pyramid and create the lens flares. The higher the mipmap level, the smaller and more pixelated the sample source, and the blurrier the result. The range is 0 through 5. 0 is the full-resolution mipmap level. The default is 1. Refer to [Mipmaps introduction](https://docs.unity3d.com/2023.1/Documentation/Manual/texture-mipmaps-introduction.html) for more information. This property only appears if you open the **More** (â‹®) menu and select **Show Additional Properties**. | + +### Flares + +Use the **Flares** settings to control regular flares, reversed flares and warped flares. + +| **Property** || **Description** | +|-|-|-| +| **Regular Multiplier** || Set the strength of regular flares. If the value is 0, URP doesn't calculate or render regular flares. The default is 1. | +| **Reversed Multiplier** || Set the strength of reversed flares. If the value is 0, URP doesn't calculate or render reversed flares. The default is 1. | +| **Warped Multipler** || Set the strength of warped flares. If the value is 0, URP doesn't calculate or render warped flares. The default is 1. | +|| **Scale** | Scale the width (**x**) and height (**y**) of warped flares. The defaults are 1. This property only appears if you open the **More** (â‹®) menu and select **Show Additional Properties**. | +| **Samples** || Set the number of times URP repeats the regular, reversed and warped flares. The range is 1 through 3. The default is 1. Increasing **Samples** has a big impact on performance. | +|| **Sample Dimmer** | Set the strength of the lens flares URP adds if you set **Samples** to 2 or 3. The higher the value, the less intense the flares. This property only appears if you open the **More** (â‹®) menu and select **Show Additional Properties**. | +| **Vignette Effect** || Set the strength of the regular, reversed and warped flares in a circular area in the center of the screen. Use **Vignette Effect** to avoid lens flare obscuring the scene too much. The default value is 1, which means URP doesn't render flares at the center of the screen. | +| **Starting Position** || Control how far the position of the regular, reversed and warped flares differ from the bright area they're sampled from, in metres. If the value is 0, URP places the lens flares at the same position as the bright areas they're sampled from. The range is 1 through 3. The default is 1.25. | +| **Scale** || Set the size of regular, reversed and warped lens flares. The range is 1 through 4. The default is 1.5. | + +### Streaks + +Use the **Streaks** settings to control flares stretched in one direction. + +| **Property** || **Description** | +|-|-|-| +| **Multiplier** || Set the strength of streaks. If the value is 0, URP doesn't calculate or render streaks. The default is 1. | +|| **Length** | Set the length of streaks. The range is 0 through 1. 1 is the approximate width of the screen. The default value is 0.5. | +|| **Orientation** | Set the angle of streaks, in degrees. The default value is 0, which creates horizontal streaks. | +|| **Threshold** | Control how localized the streak effect is. The higher the **Threshold**, the more localized the effect. The range is 0 through 1. The default value is 0.25. | +|| **Resolution** | Control the resolution detail of streaks. URP renders lower-resolution streaks faster. The options are **Half**, **Quarter** and **Eighth** full resolution. This property only appears if you open the **More** (â‹®) menu and select **Show Additional Properties**. | + +![](../../Images/shared/lens-flare/screenspacelensflares-threshold.gif)
+The effect of changing **Threshold** from 0 (a larger flare effect) to 1 (a smaller flare effect). + +### Chromatic Aberration + +Use the **Chromatic Aberration** settings to control chromatic aberration on all the lens flare types. Chromatic aberration splits light into its color components, which mimics the effect that a real-world camera produces when its lens fails to join all colors to the same point. + +The chromatic aberration effect is strongest at the edges of the screen, and decreases in strength towards the center of the screen. + +| **Property** | **Description** | +|-|-| +| **Intensity** | Set the strength of the chromatic aberration effect. If the value is 0, URP doesn't split the colors. | diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/universalrp-asset.md b/Packages/com.unity.render-pipelines.universal/Documentation~/universalrp-asset.md index d50b244b9bd..3f3b0f9c130 100644 --- a/Packages/com.unity.render-pipelines.universal/Documentation~/universalrp-asset.md +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/universalrp-asset.md @@ -83,18 +83,22 @@ These settings affect the lights in your scene. If you disable some of these settings, the relevant [keywords](https://docs.unity3d.com/Manual/shader-keywords) are [stripped from the Shader variables](shader-stripping.md). If there are settings that you know for certain you won’t use in your game or app, you can disable them to improve performance and reduce build time. -| Property | Description | -| --------------------- | ------------------------------------------------------------ | -| **Main Light** | These settings affect the main [Directional Light](https://docs.unity3d.com/Manual/Lighting.html) in your scene. You can select this by assigning it as a [Sun Source](https://docs.unity3d.com/Manual/GlobalIllumination.html) in the Lighting Inspector. If you don’t assign a sun source, the URP treats the brightest directional light in the scene as the main light. You can choose between [Pixel Lighting](https://docs.unity3d.com/Manual/LightPerformance.html) and _None_. If you choose None, URP doesn’t render a main light, even if you’ve set a sun source. | -| **Cast Shadows** | Check this box to make the main light cast shadows in your scene. | -| **Shadow Resolution** | This controls how large the shadow map texture for the main light is. High resolutions give sharper, more detailed shadows. If memory or rendering time is an issue, try a lower resolution. | -| **Mixed Lighting** | When [Mixed Lighting](https://docs.unity3d.com/Manual/LightMode-Mixed.html) is enabled, Unity includes mixed lighting shader variants in the build.| -| **Use Rendering Layers** | With this option selected, you can configure certain Lights to affect only specific GameObjects. For more information on Rendering Layers and how to use them, refer to the documentation on [Rendering Layers](features/rendering-layers.md) -| **Additional Lights** | Here, you can choose to have additional lights to supplement your main light. Choose between [Per Vertex](https://docs.unity3d.com/Manual/LightPerformance.html), [Per Pixel](https://docs.unity3d.com/Manual/LightPerformance.html), or **Disabled**. | -| **Per Object Limit** | This slider sets the limit for how many additional lights can affect each GameObject. | -| **Cast Shadows** | Check this box to make the additional lights cast shadows in your scene. | -| **Shadow Resolution** | This controls the size of the textures that cast directional shadows for the additional lights. This is a sprite atlas that packs up to 16 shadow maps. High resolutions give sharper, more detailed shadows. If memory or rendering time is an issue, try a lower resolution. | -| **Mixed Lighting** | Enable [Mixed Lighting](https://docs.unity3d.com/Manual/LightMode-Mixed.html) to configure the pipeline to include mixed lighting shader variants in the build. | +| **Property** | **Sub-property** | **Description** | +|-|-|-| +| **Main Light** || These settings affect the main [Directional Light](https://docs.unity3d.com/Manual/Lighting.html) in your scene. You can select this by assigning it as a [Sun Source](https://docs.unity3d.com/Manual/GlobalIllumination.html) in the Lighting Inspector. If you don’t assign a sun source, the URP treats the brightest directional light in the scene as the main light. You can choose between [Pixel Lighting](https://docs.unity3d.com/Manual/LightPerformance.html) and _None_. If you choose None, URP doesn’t render a main light, even if you’ve set a sun source. | +| **Cast Shadows** || Check this box to make the main light cast shadows in your scene. | +| **Shadow Resolution** || This controls how large the shadow map texture for the main light is. High resolutions give sharper, more detailed shadows. If memory or rendering time is an issue, try a lower resolution. | +| **Light Probe System** ||
  • **Light Probe Groups (Legacy)**: Use the same [Light Probe Group system](https://docs.unity3d.com/Manual/class-LightProbeGroup.html) as the Built-In Render Pipeline.
  • **Adaptive Probe Volumes**: Use [Adaptive Probe Volumes](probevolumes.md).
| +|| **Memory Budget** | Limits the width and height of the textures that store baked Global Illumination data, which determines the amount of memory Unity sets aside to store baked Adaptive Probe Volume data. These textures have a fixed depth.
Options:
  • **Memory Budget Low**
  • **Memory Budget Medium**
  • **Memory Budget High**
| +|| **SH Bands** | Determines the [spherical harmonics (SH) bands](https://docs.unity3d.com/Manual/LightProbes-TechnicalInformation.html) Unity uses to store probe data. L2 provides more precise results, but uses more system resources.
Options:
  • **Spherical Harmonics L1**
  • **Spherical Harmonics L2**
| +|| **Enable Streaming** | Enable to stream Adaptive Probe Volume data from CPU memory to GPU memory at runtime. Refer to [Streaming Adaptive Probe Volumes](probevolumes-streaming.md) for more information. | +|| **Estimated GPU Memory Cost** | Indicates the amount of texture data used by Adaptive Probe Volumes in your project. | +| **Additional Lights** || Here, you can choose to have additional lights to supplement your main light. Choose between [Per Vertex](https://docs.unity3d.com/Manual/LightPerformance.html), [Per Pixel](https://docs.unity3d.com/Manual/LightPerformance.html), or **Disabled**. | +|| **Per Object Limit** | This slider sets the limit for how many additional lights can affect each GameObject. | +|| **Cast Shadows** | Check this box to make the additional lights cast shadows in your scene. | +|| **Shadow Resolution** | This controls the size of the textures that cast directional shadows for the additional lights. This is a sprite atlas that packs up to 16 shadow maps. High resolutions give sharper, more detailed shadows. If memory or rendering time is an issue, try a lower resolution. | +| **Use Rendering Layers** || With this option selected, you can configure certain Lights to affect only specific GameObjects. For more information on Rendering Layers and how to use them, refer to the documentation on [Rendering Layers](features/rendering-layers.md) +| **Mixed Lighting** || Enable [Mixed Lighting](https://docs.unity3d.com/Manual/LightMode-Mixed.html) to configure the pipeline to include mixed lighting shader variants in the build. | ### Shadows @@ -135,7 +139,7 @@ This section allows you to fine-tune global post-processing settings. | Property | Description | | ---------------- | ------------------------------------------------------------ | -| **Volume Update Mode** | Select how Unity updates Volumes at run time.
• **Every Frame**: Unity updates volumes every frame.
• **Via Scripting**: Unity updates volumes when triggered via scripting.
In the Editor, Unity updates Volumes every frame when not in Play mode. | +| **Volume Update Mode** | Select how Unity updates Volumes at run time.
  • **Every Frame**: Unity updates volumes every frame.
  • **Via Scripting**: Unity updates volumes when triggered via scripting.
In the Editor, Unity updates Volumes every frame when not in Play mode. | | **Volume Profile** | Set the [Volume Profile](Volume-Profile.md) that a scene uses by default. Refer to [Understand volumes](Volumes.md) for more information. | The list of Volume Overrides that the Volume Profile contains appears below **Volume Profile**. You can add, remove, disable, and enable Volume Overrides, and edit their properties. Refer to [Volume Overrides](VolumeOverrides.md) for more information. diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/working-with-textures.md b/Packages/com.unity.render-pipelines.universal/Documentation~/working-with-textures.md index f9a89b9b111..14c289999bf 100644 --- a/Packages/com.unity.render-pipelines.universal/Documentation~/working-with-textures.md +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/working-with-textures.md @@ -4,5 +4,13 @@ How to access and use textures in a custom render pass in the Universal Render P |Page|Description| |-|-| +|[Create a render graph system texture](render-graph-create-a-texture.md)|Create a texture in a render graph system render pass.| +|[Import a texture into the render graph system](render-graph-import-a-texture.md)|Use the `RTHandle` API to create or use a render texture in a render graph system render pass.| +|[Use a texture in a render pass](render-graph-read-write-texture.md)|Use the render graph system API to set a texture as an input or output, so the render pass can read from or write to it.| +|[Transfer a texture between passes](render-graph-pass-textures-between-passes.md)|Set a texture as a global texture, or add the texture to the frame data.| |[URP blit best practices](customize/blit-overview.md)|Understand the different ways to perform a blit operation in URP and best practices to follow when writing custom render passes.| |[Perform a full screen blit in URP](renderer-features/how-to-fullscreen-blit.md)|An example of creating a custom render pass and a custom Scriptable Renderer Feature that performs a full screen blit.| + +## Additional resources + +- [Use frame data](accessing-frame-data.md) diff --git a/Packages/com.unity.render-pipelines.universal/Editor/Lighting/UniversalRenderPipelineLightUI.Drawers.cs b/Packages/com.unity.render-pipelines.universal/Editor/Lighting/UniversalRenderPipelineLightUI.Drawers.cs index d0b40419170..8118607e134 100644 --- a/Packages/com.unity.render-pipelines.universal/Editor/Lighting/UniversalRenderPipelineLightUI.Drawers.cs +++ b/Packages/com.unity.render-pipelines.universal/Editor/Lighting/UniversalRenderPipelineLightUI.Drawers.cs @@ -286,31 +286,10 @@ static void DrawRenderingContent(UniversalRenderPipelineSerializedLight serializ } } - var rendererList = UniversalRenderPipeline.asset.rendererDataList; - bool hasNonForwardPlusRenderer = false; - foreach (var r in rendererList) - { - if (r is UniversalRendererData ur) - { - if (ur.renderingMode != RenderingMode.ForwardPlus) - { - hasNonForwardPlusRenderer = true; - break; - } - } - else - { - hasNonForwardPlusRenderer = true; - break; - } - } - - GUI.enabled = hasNonForwardPlusRenderer; - EditorGUILayout.PropertyField(serializedLight.settings.cullingMask, hasNonForwardPlusRenderer ? Styles.CullingMask : Styles.CullingMaskDisabled); - GUI.enabled = true; + EditorGUILayout.PropertyField(serializedLight.settings.cullingMask, Styles.CullingMask); if (serializedLight.settings.cullingMask.intValue != -1) { - EditorGUILayout.HelpBox(Styles.CullingMaskWarning.text, MessageType.Warning); + EditorGUILayout.HelpBox(Styles.CullingMaskWarning.text, MessageType.Info); } } diff --git a/Packages/com.unity.render-pipelines.universal/Editor/Lighting/UniversalRenderPipelineLightUI.Skin.cs b/Packages/com.unity.render-pipelines.universal/Editor/Lighting/UniversalRenderPipelineLightUI.Skin.cs index f83b50baac9..8ec562c50a1 100644 --- a/Packages/com.unity.render-pipelines.universal/Editor/Lighting/UniversalRenderPipelineLightUI.Skin.cs +++ b/Packages/com.unity.render-pipelines.universal/Editor/Lighting/UniversalRenderPipelineLightUI.Skin.cs @@ -21,9 +21,8 @@ private static class Styles public static readonly GUIContent BakingWarning = EditorGUIUtility.TrTextContent("Light mode is currently overridden to Realtime mode. Enable Baked Global Illumination to use Mixed or Baked light modes."); public static readonly GUIContent DisabledLightWarning = EditorGUIUtility.TrTextContent("Lighting has been disabled in at least one Scene view. Any changes applied to lights in the Scene will not be updated in these views until Lighting has been enabled again."); public static readonly GUIContent SunSourceWarning = EditorGUIUtility.TrTextContent("This light is set as the current Sun Source, which requires a directional light. Go to the Lighting Window's Environment settings to edit the Sun Source."); - public static readonly GUIContent CullingMask = EditorGUIUtility.TrTextContent("Culling Mask", "Specifies which layers will be affected or excluded from the light's effect on objects in the scene. This only applies to objects rendered using the Forward rendering path, and transparent objects rendered using the Deferred rendering path.\n\nUse Rendering Layers instead, which is supported across all rendering paths."); - public static readonly GUIContent CullingMaskDisabled = EditorGUIUtility.TrTextContent("Culling Mask", "Culling Mask is disabled. This is because all active renderers use the Forward+ rendering path, which doesn't support Culling Mask. Use Rendering Layers instead, which is supported across all rendering paths."); - public static readonly GUIContent CullingMaskWarning = EditorGUIUtility.TrTextContent("Culling Mask only works with Forward rendering. Instead, use Rendering Layers on the Light, and Rendering Layer Mask on the Mesh Renderer, which will work across Deferred, Forward, and Forward+ rendering."); + public static readonly GUIContent CullingMask = EditorGUIUtility.TrTextContent("Culling Mask", "Specifies which lights are culled per camera. To control exclude certain lights affecting certain objects, use Rendering Layers instead, which is supported across all rendering paths."); + public static readonly GUIContent CullingMaskWarning = EditorGUIUtility.TrTextContent("Culling Mask should be used to control which lights are culled per camera. If you want to exclude certain lights from affecting certain objects, use Rendering Layers on the Light, and Rendering Layer Mask on the Mesh Renderer."); public static readonly GUIContent ShadowRealtimeSettings = EditorGUIUtility.TrTextContent("Realtime Shadows", "Settings for realtime direct shadows."); public static readonly GUIContent ShadowStrength = EditorGUIUtility.TrTextContent("Strength", "Controls how dark the shadows cast by the light will be."); diff --git a/Packages/com.unity.render-pipelines.universal/Editor/Overrides/ColorCurvesEditor.cs b/Packages/com.unity.render-pipelines.universal/Editor/Overrides/ColorCurvesEditor.cs index ae9b65a93cd..637ae422d86 100644 --- a/Packages/com.unity.render-pipelines.universal/Editor/Overrides/ColorCurvesEditor.cs +++ b/Packages/com.unity.render-pipelines.universal/Editor/Overrides/ColorCurvesEditor.cs @@ -37,16 +37,16 @@ sealed class ColorCurvesEditor : VolumeComponentEditor static GUIStyle s_PreLabel; - static GUIContent[] s_Curves = + static string[] s_CurveNames = { - new GUIContent("Master"), - new GUIContent("Red"), - new GUIContent("Green"), - new GUIContent("Blue"), - new GUIContent("Hue Vs Hue"), - new GUIContent("Hue Vs Sat"), - new GUIContent("Sat Vs Sat"), - new GUIContent("Lum Vs Sat") + "Master", + "Red", + "Green", + "Blue", + "Hue Vs Hue", + "Hue Vs Sat", + "Sat Vs Sat", + "Lum Vs Sat" }; SavedInt m_SelectedCurve; @@ -127,9 +127,28 @@ void CurveOverrideToggle(SerializedProperty overrideProp) overrideProp.boolValue = GUILayout.Toggle(overrideProp.boolValue, EditorGUIUtility.TrTextContent("Override"), EditorStyles.toolbarButton); } + string MakeCurveSelectionPopupLabel(int id) + { + string label = s_CurveNames[id]; + const string overrideSuffix = " (Overriding)"; + switch (id) + { + case 0: if (m_Master.overrideState.boolValue) label += overrideSuffix; break; + case 1: if (m_Red.overrideState.boolValue) label += overrideSuffix; break; + case 2: if (m_Green.overrideState.boolValue) label += overrideSuffix; break; + case 3: if (m_Blue.overrideState.boolValue) label += overrideSuffix; break; + case 4: if (m_HueVsHue.overrideState.boolValue) label += overrideSuffix; break; + case 5: if (m_HueVsSat.overrideState.boolValue) label += overrideSuffix; break; + case 6: if (m_SatVsSat.overrideState.boolValue) label += overrideSuffix; break; + case 7: if (m_LumVsSat.overrideState.boolValue) label += overrideSuffix; break; + } + return label; + } + int DoCurveSelectionPopup(int id) { - GUILayout.Label(s_Curves[id], EditorStyles.toolbarPopup, GUILayout.MaxWidth(150f)); + var label = MakeCurveSelectionPopupLabel(id); + GUILayout.Label(label, EditorStyles.toolbarPopup, GUILayout.MaxWidth(150f)); var lastRect = GUILayoutUtility.GetLastRect(); var e = Event.current; @@ -138,13 +157,15 @@ int DoCurveSelectionPopup(int id) { var menu = new GenericMenu(); - for (int i = 0; i < s_Curves.Length; i++) + for (int i = 0; i < s_CurveNames.Length; i++) { if (i == 4) menu.AddSeparator(""); int current = i; // Capture local for closure - menu.AddItem(s_Curves[i], current == id, () => + + var menuLabel = MakeCurveSelectionPopupLabel(i); + menu.AddItem(new GUIContent(menuLabel), current == id, () => { m_SelectedCurve.value = current; serializedObject.ApplyModifiedProperties(); diff --git a/Packages/com.unity.render-pipelines.universal/Editor/Settings/PropertyDrawers/URPRenderGraphPropertyDrawer.cs b/Packages/com.unity.render-pipelines.universal/Editor/Settings/PropertyDrawers/URPRenderGraphPropertyDrawer.cs index 8481c3313ba..5cff3cc142d 100644 --- a/Packages/com.unity.render-pipelines.universal/Editor/Settings/PropertyDrawers/URPRenderGraphPropertyDrawer.cs +++ b/Packages/com.unity.render-pipelines.universal/Editor/Settings/PropertyDrawers/URPRenderGraphPropertyDrawer.cs @@ -14,6 +14,8 @@ class RenderGraphPropertyDrawer : PropertyDrawer private const string k_EnableRenderCompatibilityModeLabel = "Compatibility Mode (Render Graph Disabled)"; private const string k_EnableRenderCompatibilityModeHelpBoxLabel = "Unity no longer develops or improves the rendering path that does not use Render Graph API. Use the Render Graph API when developing new graphics features."; + bool m_EnableCompatibilityModeValue; + /// public override VisualElement CreatePropertyGUI(SerializedProperty property) { @@ -21,20 +23,20 @@ public override VisualElement CreatePropertyGUI(SerializedProperty property) var enableCompatilityModeProp = property.FindPropertyRelative(k_EnableRenderCompatibilityPropertyName); var enableCompatibilityMode = new PropertyField(enableCompatilityModeProp, k_EnableRenderCompatibilityModeLabel); - // UITK raises ValueChangeCallback at bind time, so we need to ignore the first event - bool firstTime = true; + // UITK raises ValueChangeCallback at various times, so we need to track the actual value + m_EnableCompatibilityModeValue = enableCompatilityModeProp.boolValue; m_Root.Add(enableCompatibilityMode); enableCompatibilityMode.RegisterValueChangeCallback((onchanged) => { m_Root.Q("HelpBoxWarning").style.display = (onchanged.changedProperty.boolValue) ? DisplayStyle.Flex : DisplayStyle.None; - if (firstTime) + + bool newValue = onchanged.changedProperty.boolValue; + if (m_EnableCompatibilityModeValue != newValue) { - firstTime = false; - return; + m_EnableCompatibilityModeValue = newValue; + GraphicsSettings.GetRenderPipelineSettings()?.NotifyValueChanged(onchanged.changedProperty.name); } - - GraphicsSettings.GetRenderPipelineSettings()?.NotifyValueChanged(onchanged.changedProperty.name); }); m_Root.Add(new HelpBox(k_EnableRenderCompatibilityModeHelpBoxLabel, HelpBoxMessageType.Warning) diff --git a/Packages/com.unity.render-pipelines.universal/Editor/ShaderScriptableStripper.cs b/Packages/com.unity.render-pipelines.universal/Editor/ShaderScriptableStripper.cs index 91631520589..2fc260a45d4 100644 --- a/Packages/com.unity.render-pipelines.universal/Editor/ShaderScriptableStripper.cs +++ b/Packages/com.unity.render-pipelines.universal/Editor/ShaderScriptableStripper.cs @@ -37,6 +37,7 @@ internal interface IShaderScriptableStrippingData public PassType passType { get; set; } public PassIdentifier passIdentifier { get; set; } + public bool IsHDRDisplaySupportEnabled { get; set; } public bool IsHDRShaderVariantValid { get; set; } public bool IsShaderFeatureEnabled(ShaderFeatures feature); @@ -70,6 +71,7 @@ internal struct StrippingData : IShaderScriptableStrippingData public string passName { get => passData.passName; set {} } public PassType passType { get => passData.passType; set {} } public PassIdentifier passIdentifier { get => passData.pass; set {} } + public bool IsHDRDisplaySupportEnabled { get; set; } public bool IsHDRShaderVariantValid { get => HDROutputUtils.IsShaderVariantValid(variantData.shaderKeywordSet, PlayerSettings.allowHDRDisplaySupport); set { } } public bool IsKeywordEnabled(LocalKeyword keyword) @@ -836,7 +838,7 @@ Invalid Variants internal bool StripInvalidVariants_HDR(ref IShaderScriptableStrippingData strippingData) { // We do not need to strip out HDR output variants if HDR display is enabled. - if (PlayerSettings.allowHDRDisplaySupport) + if (strippingData.IsHDRDisplaySupportEnabled) return false; // Shared keywords between URP and HDRP. @@ -1042,6 +1044,7 @@ public bool CanRemoveVariant([DisallowNull] Shader shader, ShaderSnippetData pas stripScreenCoordOverrideVariants = ShaderBuildPreprocessor.s_StripScreenCoordOverrideVariants, stripUnusedVariants = ShaderBuildPreprocessor.s_StripUnusedVariants, stripUnusedPostProcessingVariants = ShaderBuildPreprocessor.s_StripUnusedPostProcessingVariants, + IsHDRDisplaySupportEnabled = PlayerSettings.allowHDRDisplaySupport, shader = shader, passData = passData, variantData = variantData diff --git a/Packages/com.unity.render-pipelines.universal/Runtime/2D/LightUtility.cs b/Packages/com.unity.render-pipelines.universal/Runtime/2D/LightUtility.cs index 6d21ce98d48..a92b6956af0 100644 --- a/Packages/com.unity.render-pipelines.universal/Runtime/2D/LightUtility.cs +++ b/Packages/com.unity.render-pipelines.universal/Runtime/2D/LightUtility.cs @@ -259,6 +259,8 @@ static void TransferToMesh(NativeArray vertices, int vertexCoun public static Bounds GenerateShapeMesh(Light2D light, Vector3[] shapePath, float falloffDistance, float batchColor) { const float kClipperScale = 10000.0f; + + var restoreState = Random.state; Random.InitState(123456); // for deterministic output // todo Revisit this while we do Batching. @@ -414,6 +416,7 @@ public static Bounds GenerateShapeMesh(Light2D light, Vector3[] shapePath, float TransferToMesh(outVertices, vcount, outIndices, icount, light); } + Random.state = restoreState; return light.lightMesh.GetSubMesh(0).bounds; } diff --git a/Packages/com.unity.render-pipelines.universal/Runtime/2D/Passes/Utility/RendererLighting.cs b/Packages/com.unity.render-pipelines.universal/Runtime/2D/Passes/Utility/RendererLighting.cs index 51a988a0a10..976bbbd62ec 100644 --- a/Packages/com.unity.render-pipelines.universal/Runtime/2D/Passes/Utility/RendererLighting.cs +++ b/Packages/com.unity.render-pipelines.universal/Runtime/2D/Passes/Utility/RendererLighting.cs @@ -578,7 +578,7 @@ internal static void RenderNormals(this IRenderPass2D pass, ScriptableRenderCont var msaaEnabled = renderingData.cameraData.cameraTargetDescriptor.msaaSamples > 1; var storeAction = msaaEnabled ? RenderBufferStoreAction.Resolve : RenderBufferStoreAction.Store; var clearFlag = pass.rendererData.useDepthStencilBuffer ? ClearFlag.All : ClearFlag.Color; - clearFlag = bFirstClear ? clearFlag : ClearFlag.None; + clearFlag = bFirstClear ? clearFlag : ClearFlag.Color; bFirstClear = false; if (depthTarget != null) diff --git a/Packages/com.unity.render-pipelines.universal/Runtime/Data/UniversalRenderPipelineAsset.DefaultResources.cs b/Packages/com.unity.render-pipelines.universal/Runtime/Data/UniversalRenderPipelineAsset.DefaultResources.cs index 9687b8a5ca6..cf8a3e06ddf 100644 --- a/Packages/com.unity.render-pipelines.universal/Runtime/Data/UniversalRenderPipelineAsset.DefaultResources.cs +++ b/Packages/com.unity.render-pipelines.universal/Runtime/Data/UniversalRenderPipelineAsset.DefaultResources.cs @@ -203,6 +203,12 @@ public override Shader defaultShader /// Returns the default SpeedTree8 shader that this asset uses. public override Shader defaultSpeedTree8Shader => defaultShaders?.defaultSpeedTree8Shader; + /// + /// Returns the default SpeedTree9 shader that this asset uses. + /// + /// Returns the default SpeedTree9 shader that this asset uses. + public override Shader defaultSpeedTree9Shader => defaultShaders?.defaultSpeedTree9Shader; + #endregion #endif diff --git a/Packages/com.unity.render-pipelines.universal/Runtime/Data/UniversalRenderPipelineAsset.cs b/Packages/com.unity.render-pipelines.universal/Runtime/Data/UniversalRenderPipelineAsset.cs index 875143779d0..00c66badcdb 100644 --- a/Packages/com.unity.render-pipelines.universal/Runtime/Data/UniversalRenderPipelineAsset.cs +++ b/Packages/com.unity.render-pipelines.universal/Runtime/Data/UniversalRenderPipelineAsset.cs @@ -890,6 +890,12 @@ public ScriptableRenderer scriptableRenderer { DestroyRenderer(ref m_Renderers[m_DefaultRendererIndex]); m_Renderers[m_DefaultRendererIndex] = scriptableRendererData.InternalCreateRenderer(); + + // GPU Resident Drawer may need to be reinitialized if renderer data has become incompatible/compatible + if (gpuResidentDrawerMode != GPUResidentDrawerMode.Disabled) + { + IGPUResidentRenderPipeline.ReinitializeGPUResidentDrawer(); + } } return m_Renderers[m_DefaultRendererIndex]; @@ -927,6 +933,12 @@ public ScriptableRenderer GetRenderer(int index) { DestroyRenderer(ref m_Renderers[index]); m_Renderers[index] = m_RendererDataList[index].InternalCreateRenderer(); + + // GPU Resident Drawer may need to be reinitialized if renderer data has become incompatible/compatible + if (gpuResidentDrawerMode != GPUResidentDrawerMode.Disabled) + { + IGPUResidentRenderPipeline.ReinitializeGPUResidentDrawer(); + } } return m_Renderers[index]; diff --git a/Packages/com.unity.render-pipelines.universal/Runtime/Passes/AdditionalLightsShadowCasterPass.cs b/Packages/com.unity.render-pipelines.universal/Runtime/Passes/AdditionalLightsShadowCasterPass.cs index 0f529c3c0a4..2cc1d27391f 100644 --- a/Packages/com.unity.render-pipelines.universal/Runtime/Passes/AdditionalLightsShadowCasterPass.cs +++ b/Packages/com.unity.render-pipelines.universal/Runtime/Passes/AdditionalLightsShadowCasterPass.cs @@ -711,6 +711,11 @@ void RenderAdditionalShadowmapAtlas(RasterCommandBuffer cmd, ref PassData data, using (new ProfilingScope(cmd, ProfilingSampler.Get(URPProfileId.AdditionalLightsShadow))) { + // For non-RG, need set the worldToCamera Matrix as that is not set for passes executed before normal rendering, + // otherwise shadows will behave incorrectly when Scene and Game windows are open at the same time (UUM-63267). + if (!useRenderGraph) + ShadowUtils.SetWorldToCameraMatrix(cmd, data.viewMatrix); + bool anyShadowSliceRenderer = false; int shadowSlicesCount = m_ShadowSliceToAdditionalLightIndex.Count; if (shadowSlicesCount > 0) @@ -826,6 +831,7 @@ private class PassData { internal UniversalLightData lightData; internal UniversalShadowData shadowData; + internal Matrix4x4 viewMatrix; internal bool stripShadowsOffVariants; internal AdditionalLightsShadowCasterPass pass; @@ -846,6 +852,7 @@ private void InitPassData(ref PassData passData, UniversalCameraData cameraData, passData.lightData = lightData; passData.shadowData = shadowData; + passData.viewMatrix = cameraData.GetViewMatrix(); passData.stripShadowsOffVariants = cameraData.renderer.stripShadowsOffVariants; passData.emptyShadowmap = m_CreateEmptyShadowmap; diff --git a/Packages/com.unity.render-pipelines.universal/Runtime/Passes/CapturePass.cs b/Packages/com.unity.render-pipelines.universal/Runtime/Passes/CapturePass.cs index b26931a6ec4..92e3fcbf84a 100644 --- a/Packages/com.unity.render-pipelines.universal/Runtime/Passes/CapturePass.cs +++ b/Packages/com.unity.render-pipelines.universal/Runtime/Passes/CapturePass.cs @@ -1,4 +1,6 @@ using System; +using System.Collections.Generic; +using UnityEngine.Rendering.RenderGraphModule; namespace UnityEngine.Rendering.Universal { @@ -35,5 +37,41 @@ public override void Execute(ScriptableRenderContext context, ref RenderingData captureActions.Current(colorAttachmentIdentifier, renderingData.commandBuffer); } } + + private class UnsafePassData + { + internal TextureHandle source; + public IEnumerator> captureActions; + } + + const string k_UnsafePassName = "CapturePass (Render Graph Unsafe Pass)"; + + // This function needs to add an unsafe render pass to Render Graph because a raster render pass, which is typically + // used for rendering with Render Graph, cannot perform the texture readback operations performed with the command + // buffer in CameraTextureProvider. Unsafe passes can do certain operations that raster render passes cannot do and + // have access to the full command buffer API. + public override void RecordRenderGraph(RenderGraph renderGraph, ContextContainer frameData) + { + UniversalResourceData resourceData = frameData.Get(); + UniversalCameraData cameraData = frameData.Get(); + + using (var builder = renderGraph.AddUnsafePass(k_UnsafePassName, out var passData, profilingSampler)) + { + // Setup up the pass data with cameraColor, which has the correct orientation and position in a built player + passData.source = resourceData.cameraColor; + passData.captureActions = cameraData.captureActions; + + // Setup up the builder + builder.AllowPassCulling(false); + builder.UseTexture(resourceData.cameraColor); + builder.SetRenderFunc((UnsafePassData data, UnsafeGraphContext unsafeContext) => + { + var nativeCommandBuffer = CommandBufferHelpers.GetNativeCommandBuffer(unsafeContext.cmd); + var captureActions = data.captureActions; + for (data.captureActions.Reset(); data.captureActions.MoveNext();) + captureActions.Current(data.source, nativeCommandBuffer); + }); + } + } } } diff --git a/Packages/com.unity.render-pipelines.universal/Runtime/Passes/ColorGradingLutPass.cs b/Packages/com.unity.render-pipelines.universal/Runtime/Passes/ColorGradingLutPass.cs index f643fbd1c93..b651c946163 100644 --- a/Packages/com.unity.render-pipelines.universal/Runtime/Passes/ColorGradingLutPass.cs +++ b/Packages/com.unity.render-pipelines.universal/Runtime/Passes/ColorGradingLutPass.cs @@ -37,7 +37,7 @@ Material Load(Shader shader) { if (shader == null) { - Debug.LogError($"Missing shader. {GetType().DeclaringType.Name} render pass will not execute. Check for missing reference in the renderer resources."); + Debug.LogError($"Missing shader. ColorGradingLutPass render pass will not execute. Check for missing reference in the renderer resources."); return null; } diff --git a/Packages/com.unity.render-pipelines.universal/Runtime/Passes/MainLightShadowCasterPass.cs b/Packages/com.unity.render-pipelines.universal/Runtime/Passes/MainLightShadowCasterPass.cs index bc7735f9bc7..9aa391ce7ad 100644 --- a/Packages/com.unity.render-pipelines.universal/Runtime/Passes/MainLightShadowCasterPass.cs +++ b/Packages/com.unity.render-pipelines.universal/Runtime/Passes/MainLightShadowCasterPass.cs @@ -268,8 +268,13 @@ void RenderMainLightCascadeShadowmap(RasterCommandBuffer cmd, ref PassData data, using (new ProfilingScope(cmd, ProfilingSampler.Get(URPProfileId.MainLightShadow))) { - // Need to start by setting the Camera position as that is not set for passes executed before normal rendering - cmd.SetGlobalVector(ShaderPropertyId.worldSpaceCameraPos, data.cameraData.worldSpaceCameraPos); + // Need to start by setting the Camera position and worldToCamera Matrix as that is not set for passes executed before normal rendering + ShadowUtils.SetCameraPosition(cmd, data.cameraData.worldSpaceCameraPos); + + // For non-RG, need set the worldToCamera Matrix as that is not set for passes executed before normal rendering, + // otherwise shadows will behave incorrectly when Scene and Game windows are open at the same time (UUM-63267). + if (!isRenderGraph) + ShadowUtils.SetWorldToCameraMatrix(cmd, data.cameraData.GetViewMatrix()); for (int cascadeIndex = 0; cascadeIndex < m_ShadowCasterCascadesCount; ++cascadeIndex) { diff --git a/Packages/com.unity.render-pipelines.universal/Runtime/Passes/MotionVectorRenderPass.cs b/Packages/com.unity.render-pipelines.universal/Runtime/Passes/MotionVectorRenderPass.cs index 39bfad6bbf7..0c9e3ae54fc 100644 --- a/Packages/com.unity.render-pipelines.universal/Runtime/Passes/MotionVectorRenderPass.cs +++ b/Packages/com.unity.render-pipelines.universal/Runtime/Passes/MotionVectorRenderPass.cs @@ -26,9 +26,9 @@ sealed class MotionVectorRenderPass : ScriptableRenderPass #endregion #region Constructors - internal MotionVectorRenderPass(Material cameraMaterial) + internal MotionVectorRenderPass(RenderPassEvent evt, Material cameraMaterial) { - renderPassEvent = RenderPassEvent.BeforeRenderingPostProcessing; + renderPassEvent = evt; m_CameraMaterial = cameraMaterial; m_PassData = new PassData(); base.profilingSampler = ProfilingSampler.Get(URPProfileId.MotionVectors); diff --git a/Packages/com.unity.render-pipelines.universal/Runtime/Passes/PostProcessPass.cs b/Packages/com.unity.render-pipelines.universal/Runtime/Passes/PostProcessPass.cs index cdf27f5390c..dae1398c85d 100644 --- a/Packages/com.unity.render-pipelines.universal/Runtime/Passes/PostProcessPass.cs +++ b/Packages/com.unity.render-pipelines.universal/Runtime/Passes/PostProcessPass.cs @@ -1853,7 +1853,7 @@ Material Load(Shader shader) { if (shader == null) { - Debug.LogErrorFormat($"Missing shader. {GetType().DeclaringType.Name} render pass will not execute. Check for missing reference in the renderer resources."); + Debug.LogErrorFormat($"Missing shader. PostProcessing render passes will not execute. Check for missing reference in the renderer resources."); return null; } else if (!shader.isSupported) diff --git a/Packages/com.unity.render-pipelines.universal/Runtime/Passes/PostProcessPassRenderGraph.cs b/Packages/com.unity.render-pipelines.universal/Runtime/Passes/PostProcessPassRenderGraph.cs index d5ca100d685..80a766c58b6 100644 --- a/Packages/com.unity.render-pipelines.universal/Runtime/Passes/PostProcessPassRenderGraph.cs +++ b/Packages/com.unity.render-pipelines.universal/Runtime/Passes/PostProcessPassRenderGraph.cs @@ -1279,7 +1279,10 @@ public void RenderLensFlareDataDriven(RenderGraph renderGraph, UniversalResource passData.material = m_Materials.lensFlareDataDriven; passData.width = (float)m_Descriptor.width; passData.height = (float)m_Descriptor.height; - passData.viewport = cameraData.pixelRect; + passData.viewport.x = 0.0f; + passData.viewport.y = 0.0f; + passData.viewport.width = (float)m_Descriptor.width; + passData.viewport.height = (float)m_Descriptor.height; if (m_PaniniProjection.IsActive()) { passData.usePanini = true; diff --git a/Packages/com.unity.render-pipelines.universal/Runtime/RenderPipelineResources/UniversalRenderPipelineEditorShaders.cs b/Packages/com.unity.render-pipelines.universal/Runtime/RenderPipelineResources/UniversalRenderPipelineEditorShaders.cs index b9fdf51aa7e..70f80224ae9 100644 --- a/Packages/com.unity.render-pipelines.universal/Runtime/RenderPipelineResources/UniversalRenderPipelineEditorShaders.cs +++ b/Packages/com.unity.render-pipelines.universal/Runtime/RenderPipelineResources/UniversalRenderPipelineEditorShaders.cs @@ -97,6 +97,16 @@ public Shader defaultSpeedTree8Shader get => m_DefaultSpeedTree8Shader; set => this.SetValueAndNotify(ref m_DefaultSpeedTree8Shader, value); } + + [SerializeField] + [ResourcePath("Shaders/Nature/SpeedTree9_URP.shadergraph")] + private Shader m_DefaultSpeedTree9Shader; + + public Shader defaultSpeedTree9Shader + { + get => m_DefaultSpeedTree9Shader; + set => this.SetValueAndNotify(ref m_DefaultSpeedTree9Shader, value); + } #endregion } } diff --git a/Packages/com.unity.render-pipelines.universal/Runtime/ScriptableRendererData.cs b/Packages/com.unity.render-pipelines.universal/Runtime/ScriptableRendererData.cs index 91de5dcd8ee..a62e0470509 100644 --- a/Packages/com.unity.render-pipelines.universal/Runtime/ScriptableRendererData.cs +++ b/Packages/com.unity.render-pipelines.universal/Runtime/ScriptableRendererData.cs @@ -59,7 +59,9 @@ protected virtual void OnValidate() { SetDirty(); #if UNITY_EDITOR - if (m_RendererFeatures.Contains(null)) + // Only validate ScriptableRendererFeatures when all scripts have finished compiling (to avoid false-negatives + // when ScriptableRendererFeatures haven't been compiled before this check). + if (!EditorApplication.isCompiling && m_RendererFeatures.Contains(null)) ValidateRendererFeatures(); #endif } diff --git a/Packages/com.unity.render-pipelines.universal/Runtime/ShadowUtils.cs b/Packages/com.unity.render-pipelines.universal/Runtime/ShadowUtils.cs index 6bd09f8707c..c60a56df5ee 100644 --- a/Packages/com.unity.render-pipelines.universal/Runtime/ShadowUtils.cs +++ b/Packages/com.unity.render-pipelines.universal/Runtime/ShadowUtils.cs @@ -489,10 +489,6 @@ public static void SetupShadowCasterConstantBuffer(CommandBuffer cmd, ref Visibl SetupShadowCasterConstantBuffer(CommandBufferHelpers.GetRasterCommandBuffer(cmd), ref shadowLight, shadowBias); } - private static int _ShadowBias = Shader.PropertyToID("_ShadowBias"); - private static int _LightDirection = Shader.PropertyToID("_LightDirection"); - private static int _LightPosition = Shader.PropertyToID("_LightPosition"); - internal static void SetupShadowCasterConstantBuffer(RasterCommandBuffer cmd, ref VisibleLight shadowLight, Vector4 shadowBias) { SetShadowBias(cmd, shadowBias); @@ -508,17 +504,31 @@ internal static void SetupShadowCasterConstantBuffer(RasterCommandBuffer cmd, re internal static void SetShadowBias(RasterCommandBuffer cmd, Vector4 shadowBias) { - cmd.SetGlobalVector(_ShadowBias, shadowBias); + cmd.SetGlobalVector(ShaderPropertyId.shadowBias, shadowBias); } internal static void SetLightDirection(RasterCommandBuffer cmd, Vector3 lightDirection) { - cmd.SetGlobalVector(_LightDirection, new Vector4(lightDirection.x, lightDirection.y, lightDirection.z, 0.0f)); + cmd.SetGlobalVector(ShaderPropertyId.lightDirection, new Vector4(lightDirection.x, lightDirection.y, lightDirection.z, 0.0f)); } internal static void SetLightPosition(RasterCommandBuffer cmd, Vector3 lightPosition) { - cmd.SetGlobalVector(_LightPosition, new Vector4(lightPosition.x, lightPosition.y, lightPosition.z, 1.0f)); + cmd.SetGlobalVector(ShaderPropertyId.lightPosition, new Vector4(lightPosition.x, lightPosition.y, lightPosition.z, 1.0f)); + } + + internal static void SetCameraPosition(RasterCommandBuffer cmd, Vector3 worldSpaceCameraPos) + { + cmd.SetGlobalVector(ShaderPropertyId.worldSpaceCameraPos, worldSpaceCameraPos); + } + + internal static void SetWorldToCameraMatrix(RasterCommandBuffer cmd, Matrix4x4 viewMatrix) + { + // There's an inconsistency in handedness between unity_matrixV and unity_WorldToCamera + // Unity changes the handedness of unity_WorldToCamera (see Camera::CalculateMatrixShaderProps) + // we will also change it here to avoid breaking existing shaders. (case 1257518) + Matrix4x4 worldToCameraMatrix = Matrix4x4.Scale(new Vector3(1.0f, 1.0f, -1.0f)) * viewMatrix; + cmd.SetGlobalMatrix(ShaderPropertyId.worldToCameraMatrix, worldToCameraMatrix); } private static RenderTextureDescriptor GetTemporaryShadowTextureDescriptor(int width, int height, int bits) diff --git a/Packages/com.unity.render-pipelines.universal/Runtime/UniversalRenderPipeline.cs b/Packages/com.unity.render-pipelines.universal/Runtime/UniversalRenderPipeline.cs index fffcf81a19b..d3a66063672 100644 --- a/Packages/com.unity.render-pipelines.universal/Runtime/UniversalRenderPipeline.cs +++ b/Packages/com.unity.render-pipelines.universal/Runtime/UniversalRenderPipeline.cs @@ -24,30 +24,44 @@ public sealed partial class UniversalRenderPipeline : RenderPipeline ///
public const string k_ShaderTagName = "UniversalPipeline"; - internal static class Profiling + // Cache camera data to avoid per-frame allocations. + internal static class CameraMetadataCache { - private static Dictionary s_HashSamplerCache = new Dictionary(); - public static readonly ProfilingSampler unknownSampler = new ProfilingSampler("Unknown"); + public class CameraMetadataCacheEntry + { + public string name; + public ProfilingSampler sampler; + } + + static Dictionary s_MetadataCache = new(); + + static readonly CameraMetadataCacheEntry k_NoAllocEntry = new() { name = "Unknown", sampler = new ProfilingSampler("Unknown") }; - // Specialization for camera loop to avoid allocations. - public static ProfilingSampler TryGetOrAddCameraSampler(Camera camera) + public static CameraMetadataCacheEntry GetCached(Camera camera) { #if UNIVERSAL_PROFILING_NO_ALLOC - return unknownSampler; + return k_NoAllocEntry; #else - ProfilingSampler ps = null; int cameraId = camera.GetHashCode(); - bool exists = s_HashSamplerCache.TryGetValue(cameraId, out ps); - if (!exists) + if (!s_MetadataCache.TryGetValue(cameraId, out CameraMetadataCacheEntry result)) { - // NOTE: camera.name allocates! - ps = new ProfilingSampler($"{nameof(UniversalRenderPipeline)}.{nameof(RenderSingleCameraInternal)}: {camera.name}"); - s_HashSamplerCache.Add(cameraId, ps); + string cameraName = camera.name; // Warning: camera.name allocates + result = new CameraMetadataCacheEntry + { + name = cameraName, + sampler = new ProfilingSampler( + $"{nameof(UniversalRenderPipeline)}.{nameof(RenderSingleCameraInternal)}: {cameraName}") + }; + s_MetadataCache.Add(cameraId, result); } - return ps; + + return result; #endif } + } + internal static class Profiling + { public static class Pipeline { // TODO: Would be better to add Profiling name hooks into RenderPipeline.cs, requires changes outside of Universal. @@ -698,8 +712,8 @@ static void RenderSingleCamera(ScriptableRenderContext context, UniversalCameraD // Until then, we can't use nested profiling scopes with XR multipass CommandBuffer cmdScope = cameraData.xr.enabled ? null : cmd; - ProfilingSampler sampler = Profiling.TryGetOrAddCameraSampler(camera); - using (new ProfilingScope(cmdScope, sampler)) // Enqueues a "BeginSample" command into the CommandBuffer cmd + var cameraMetadata = CameraMetadataCache.GetCached(camera); + using (new ProfilingScope(cmdScope, cameraMetadata.sampler)) // Enqueues a "BeginSample" command into the CommandBuffer cmd { renderer.Clear(cameraData.renderType); @@ -798,7 +812,7 @@ static void RenderSingleCamera(ScriptableRenderContext context, UniversalCameraD if (useRenderGraph) { - RecordAndExecuteRenderGraph(s_RenderGraph, context, renderer, cmd, cameraData.camera); + RecordAndExecuteRenderGraph(s_RenderGraph, context, renderer, cmd, cameraData.camera, cameraMetadata.name); renderer.FinishRenderGraphRendering(cmd); } else @@ -1393,7 +1407,7 @@ static void InitializeStackedCameraData(Camera baseCamera, UniversalAdditionalCa bool canSkipFrontToBackSorting = (baseCamera.opaqueSortMode == OpaqueSortMode.Default && hasHSRGPU) || baseCamera.opaqueSortMode == OpaqueSortMode.NoDistanceSort; cameraData.defaultOpaqueSortFlags = canSkipFrontToBackSorting ? noFrontToBackOpaqueFlags : commonOpaqueFlags; - cameraData.captureActions = CameraCaptureBridge.GetCaptureActions(baseCamera); + cameraData.captureActions = Unity.RenderPipelines.Core.Runtime.Shared.CameraCaptureBridge.GetCachedCaptureActionsEnumerator(baseCamera); } /// @@ -1458,7 +1472,6 @@ static void InitializeAdditionalCameraData(Camera camera, UniversalAdditionalCam // enable GPU occlusion culling in game and scene views only cameraData.useGPUOcclusionCulling = GPUResidentDrawer.IsInstanceOcclusionCullingEnabled() && renderer.supportsGPUOcclusion - && !XRSRPSettings.enabled && camera.cameraType is CameraType.SceneView or CameraType.Game or CameraType.Preview; cameraData.requiresDepthTexture |= cameraData.useGPUOcclusionCulling; diff --git a/Packages/com.unity.render-pipelines.universal/Runtime/UniversalRenderPipelineCore.cs b/Packages/com.unity.render-pipelines.universal/Runtime/UniversalRenderPipelineCore.cs index f959df4e758..ece57649f43 100644 --- a/Packages/com.unity.render-pipelines.universal/Runtime/UniversalRenderPipelineCore.cs +++ b/Packages/com.unity.render-pipelines.universal/Runtime/UniversalRenderPipelineCore.cs @@ -849,6 +849,10 @@ internal static class ShaderPropertyId public static readonly int worldToCameraMatrix = Shader.PropertyToID("unity_WorldToCamera"); public static readonly int cameraToWorldMatrix = Shader.PropertyToID("unity_CameraToWorld"); + public static readonly int shadowBias = Shader.PropertyToID("_ShadowBias"); + public static readonly int lightDirection = Shader.PropertyToID("_LightDirection"); + public static readonly int lightPosition = Shader.PropertyToID("_LightPosition"); + public static readonly int cameraWorldClipPlanes = Shader.PropertyToID("unity_CameraWorldClipPlanes"); public static readonly int billboardNormal = Shader.PropertyToID("unity_BillboardNormal"); diff --git a/Packages/com.unity.render-pipelines.universal/Runtime/UniversalRenderPipelineGlobalSettings.cs b/Packages/com.unity.render-pipelines.universal/Runtime/UniversalRenderPipelineGlobalSettings.cs index d45f211cf81..3c7534cd40d 100644 --- a/Packages/com.unity.render-pipelines.universal/Runtime/UniversalRenderPipelineGlobalSettings.cs +++ b/Packages/com.unity.render-pipelines.universal/Runtime/UniversalRenderPipelineGlobalSettings.cs @@ -1,4 +1,5 @@ using System; +using System.IO; using System.ComponentModel; using System.Collections.Generic; using UnityEditor; @@ -25,7 +26,7 @@ partial class UniversalRenderPipelineGlobalSettings : RenderPipelineGlobalSettin internal bool IsAtLastVersion() => k_LastVersion == m_AssetVersion; - internal const int k_LastVersion = 7; + internal const int k_LastVersion = 8; #pragma warning disable CS0414 [SerializeField][FormerlySerializedAs("k_AssetVersion")] @@ -132,6 +133,25 @@ public static void UpgradeAsset(int assetInstanceID) asset.m_AssetVersion = 7; } + // Reload PSDImporter and AsepriteImporter assets for 2D. Importers are triggered before graphics settings are loaded + // This ensures affected assets dependent on default materials from graphics settings are loaded correctly + if (asset.m_AssetVersion < 8) + { + var distinctGuids = AssetDatabase.FindAssets("", new[] { "Assets" }); + + for (int i = 0; i < distinctGuids.Length; i++) + { + var path = AssetDatabase.GUIDToAssetPath(distinctGuids[i]); + var assetExt = Path.GetExtension(path); + + if (assetExt == ".psb" || assetExt == ".psd" || + assetExt == ".ase" || assetExt == ".aseprite") + AssetDatabase.ImportAsset(path); + } + + asset.m_AssetVersion = 8; + } + // If the asset version has changed, means that a migration step has been executed if (assetVersionBeforeUpgrade != asset.m_AssetVersion) EditorUtility.SetDirty(asset); diff --git a/Packages/com.unity.render-pipelines.universal/Runtime/UniversalRenderPipelineRenderGraph.cs b/Packages/com.unity.render-pipelines.universal/Runtime/UniversalRenderPipelineRenderGraph.cs index de489c1e449..5bc402ab8b3 100644 --- a/Packages/com.unity.render-pipelines.universal/Runtime/UniversalRenderPipelineRenderGraph.cs +++ b/Packages/com.unity.render-pipelines.universal/Runtime/UniversalRenderPipelineRenderGraph.cs @@ -1,3 +1,4 @@ +using System.Collections.Generic; using UnityEngine.Rendering.RenderGraphModule; namespace UnityEngine.Rendering.Universal @@ -9,12 +10,11 @@ static void RecordRenderGraph(RenderGraph renderGraph, ScriptableRenderContext c renderer.RecordRenderGraph(renderGraph, context); } - static void RecordAndExecuteRenderGraph(RenderGraph renderGraph, ScriptableRenderContext context, ScriptableRenderer renderer, CommandBuffer cmd, Camera camera) + static void RecordAndExecuteRenderGraph(RenderGraph renderGraph, ScriptableRenderContext context, ScriptableRenderer renderer, CommandBuffer cmd, Camera camera, string cameraName) { - RenderGraphParameters rgParams = new RenderGraphParameters() + RenderGraphParameters rgParams = new RenderGraphParameters { - // TODO Rendergraph - we are reusing the sampler name, as camera.name does an alloc. we could probably cache this as the current string we get is a bit too informative - executionName = Profiling.TryGetOrAddCameraSampler(camera).name, + executionName = cameraName, commandBuffer = cmd, scriptableRenderContext = context, currentFrameIndex = Time.frameCount, diff --git a/Packages/com.unity.render-pipelines.universal/Runtime/UniversalRenderer.cs b/Packages/com.unity.render-pipelines.universal/Runtime/UniversalRenderer.cs index 022d18b9269..ab54e090e4b 100644 --- a/Packages/com.unity.render-pipelines.universal/Runtime/UniversalRenderer.cs +++ b/Packages/com.unity.render-pipelines.universal/Runtime/UniversalRenderer.cs @@ -270,7 +270,6 @@ public UniversalRenderer(UniversalRendererData data) : base(data) #endif m_DepthPrepass = new DepthOnlyPass(RenderPassEvent.BeforeRenderingPrePasses, RenderQueueRange.opaque, data.opaqueLayerMask); m_DepthNormalPrepass = new DepthNormalOnlyPass(RenderPassEvent.BeforeRenderingPrePasses, RenderQueueRange.opaque, data.opaqueLayerMask); - m_MotionVectorPass = new MotionVectorRenderPass(m_CameraMotionVecMaterial); if (renderingModeRequested == RenderingMode.Forward || renderingModeRequested == RenderingMode.ForwardPlus) { @@ -312,13 +311,17 @@ public UniversalRenderer(UniversalRendererData data) : base(data) m_RenderOpaqueForwardWithRenderingLayersPass = new DrawObjectsWithRenderingLayersPass(URPProfileId.DrawOpaqueObjects, true, RenderPassEvent.BeforeRenderingOpaques, RenderQueueRange.opaque, data.opaqueLayerMask, m_DefaultStencilState, stencilData.stencilReference); bool copyDepthAfterTransparents = m_CopyDepthMode == CopyDepthMode.AfterTransparents; + RenderPassEvent copyDepthEvent = copyDepthAfterTransparents ? RenderPassEvent.AfterRenderingTransparents : RenderPassEvent.AfterRenderingSkybox; m_CopyDepthPass = new CopyDepthPass( - copyDepthAfterTransparents ? RenderPassEvent.AfterRenderingTransparents : RenderPassEvent.AfterRenderingSkybox, + copyDepthEvent, copyDephPS, shouldClear: true, copyResolvedDepth: RenderingUtils.MultisampleDepthResolveSupported() && copyDepthAfterTransparents); + // Motion vectors depend on the (copy) depth texture. Depth is reprojected to calculate motion vectors. + m_MotionVectorPass = new MotionVectorRenderPass(copyDepthEvent + 1, m_CameraMotionVecMaterial); + m_DrawSkyboxPass = new DrawSkyboxPass(RenderPassEvent.BeforeRenderingSkybox); m_CopyColorPass = new CopyColorPass(RenderPassEvent.AfterRenderingSkybox, m_SamplingMaterial, m_BlitMaterial); #if ADAPTIVE_PERFORMANCE_2_1_0_OR_NEWER @@ -545,8 +548,10 @@ bool IsDepthPrimingEnabled(UniversalCameraData cameraData) bool isFirstCameraToWriteDepth = cameraData.renderType == CameraRenderType.Base || cameraData.clearDepth; // Enabled Depth priming when baking Reflection Probes causes artefacts (UUM-12397) bool isNotReflectionCamera = cameraData.cameraType != CameraType.Reflection; + // Depth is not rendered in a depth-only camera setup with depth priming (UUM-38158) + bool isNotOffscreenDepthTexture = !IsOffscreenDepthTexture(cameraData); - return depthPrimingRequested && isForwardRenderingMode && isFirstCameraToWriteDepth && isNotReflectionCamera && isNotWebGL; + return depthPrimingRequested && isForwardRenderingMode && isFirstCameraToWriteDepth && isNotReflectionCamera && isNotOffscreenDepthTexture && isNotWebGL; } bool IsWebGL() @@ -640,6 +645,9 @@ public override void Setup(ScriptableRenderContext context, ref RenderingData re if (cameraData.cameraType != CameraType.Game) useRenderPassEnabled = false; + // Because of the shortcutting done by depth only offscreen cameras, useDepthPriming must be computed early + useDepthPriming = IsDepthPrimingEnabled(cameraData); + // Special path for depth only offscreen cameras. Only write opaques + transparents. if (IsOffscreenDepthTexture(cameraData)) { @@ -741,7 +749,6 @@ public override void Setup(ScriptableRenderContext context, ref RenderingData re // TODO: We could cache and generate the LUT before rendering the stack bool generateColorGradingLUT = cameraData.postProcessEnabled && m_PostProcessPasses.isCreated; bool isSceneViewOrPreviewCamera = cameraData.isSceneViewCamera || cameraData.isPreviewCamera; - useDepthPriming = IsDepthPrimingEnabled(cameraData); // This indicates whether the renderer will output a depth texture. bool requiresDepthTexture = cameraData.requiresDepthTexture || renderPassInputs.requiresDepthTexture || useDepthPriming; @@ -1696,7 +1703,11 @@ private RenderPassInputSummary GetRenderPassInputs(bool isTemporalAAEnabled, boo // Motion vectors imply depth if (inputSummary.requiresMotionVectors) + { inputSummary.requiresDepthTexture = true; + inputSummary.requiresDepthTextureEarliestEvent = (RenderPassEvent)Mathf.Min((int)m_MotionVectorPass.renderPassEvent, (int)inputSummary.requiresDepthTextureEarliestEvent); + } + return inputSummary; } diff --git a/Packages/com.unity.render-pipelines.universal/Runtime/UniversalRendererDebug.cs b/Packages/com.unity.render-pipelines.universal/Runtime/UniversalRendererDebug.cs index 5c3e1cfd7ec..9fde7409fb7 100644 --- a/Packages/com.unity.render-pipelines.universal/Runtime/UniversalRendererDebug.cs +++ b/Packages/com.unity.render-pipelines.universal/Runtime/UniversalRendererDebug.cs @@ -137,9 +137,10 @@ private void SetupRenderGraphFinalPassDebug(RenderGraph renderGraph, ContextCont GPUResidentDrawer.RenderDebugOcclusionTestOverlay(renderGraph, debugSettings, cameraData.camera.GetInstanceID(), resourceData.activeColorTexture); + float screenWidth = (int)(cameraData.pixelHeight * cameraData.renderScale); float screenHeight = (int)(cameraData.pixelHeight * cameraData.renderScale); float maxHeight = screenHeight * textureHeightPercent / 100.0f; - GPUResidentDrawer.RenderDebugOccluderOverlay(renderGraph, debugSettings, new Vector2(0.0f, screenHeight - maxHeight), maxHeight, resourceData.activeColorTexture); + GPUResidentDrawer.RenderDebugOccluderOverlay(renderGraph, debugSettings, new Vector2(0.25f * screenWidth, screenHeight - 1.5f * maxHeight), maxHeight, resourceData.activeColorTexture); } } } diff --git a/Packages/com.unity.render-pipelines.universal/Runtime/UniversalRendererRenderGraph.cs b/Packages/com.unity.render-pipelines.universal/Runtime/UniversalRendererRenderGraph.cs index 6038962e84f..0a586d7affd 100644 --- a/Packages/com.unity.render-pipelines.universal/Runtime/UniversalRendererRenderGraph.cs +++ b/Packages/com.unity.render-pipelines.universal/Runtime/UniversalRendererRenderGraph.cs @@ -1,3 +1,4 @@ +using System; using System.Runtime.CompilerServices; using UnityEngine.Experimental.Rendering; using UnityEngine.Rendering.RenderGraphModule; @@ -886,26 +887,51 @@ private void OnBeforeRendering(RenderGraph renderGraph) private void UpdateInstanceOccluders(RenderGraph renderGraph, UniversalCameraData cameraData, TextureHandle depthTexture) { - var viewMatrix = cameraData.GetViewMatrix(); - var projMatrix = cameraData.GetProjectionMatrix(); int scaledWidth = (int)(cameraData.pixelWidth * cameraData.renderScale); int scaledHeight = (int)(cameraData.pixelHeight * cameraData.renderScale); + bool isSinglePassXR = cameraData.xr.enabled && cameraData.xr.singlePassEnabled; var occluderParams = new OccluderParameters(cameraData.camera.GetInstanceID()) { - viewMatrix = viewMatrix, - invViewMatrix = viewMatrix.inverse, - gpuProjMatrix = GL.GetGPUProjectionMatrix(projMatrix, true), - viewOffsetWorldSpace = Vector3.zero, + subviewCount = isSinglePassXR ? 2 : 1, depthTexture = depthTexture, depthSize = new Vector2Int(scaledWidth, scaledHeight), + depthIsArray = isSinglePassXR, }; - GPUResidentDrawer.UpdateInstanceOccluders(renderGraph, occluderParams); + Span occluderSubviewUpdates = stackalloc OccluderSubviewUpdate[occluderParams.subviewCount]; + for (int subviewIndex = 0; subviewIndex < occluderParams.subviewCount; ++subviewIndex) + { + var viewMatrix = cameraData.GetViewMatrix(subviewIndex); + var projMatrix = cameraData.GetProjectionMatrix(subviewIndex); + occluderSubviewUpdates[subviewIndex] = new OccluderSubviewUpdate(subviewIndex) + { + depthSliceIndex = subviewIndex, + viewMatrix = viewMatrix, + invViewMatrix = viewMatrix.inverse, + gpuProjMatrix = GL.GetGPUProjectionMatrix(projMatrix, true), + viewOffsetWorldSpace = Vector3.zero, + }; + } + GPUResidentDrawer.UpdateInstanceOccluders(renderGraph, occluderParams, occluderSubviewUpdates); } private void InstanceOcclusionTest(RenderGraph renderGraph, UniversalCameraData cameraData, OcclusionTest occlusionTest) { - var settings = new OcclusionCullingSettings(cameraData.camera.GetInstanceID(), occlusionTest); - GPUResidentDrawer.InstanceOcclusionTest(renderGraph, settings); + bool isSinglePassXR = cameraData.xr.enabled && cameraData.xr.singlePassEnabled; + int subviewCount = isSinglePassXR ? 2 : 1; + var settings = new OcclusionCullingSettings(cameraData.camera.GetInstanceID(), occlusionTest) + { + instanceMultiplier = (isSinglePassXR && !SystemInfo.supportsMultiview) ? 2 : 1, + }; + Span subviewOcclusionTests = stackalloc SubviewOcclusionTest[subviewCount]; + for (int subviewIndex = 0; subviewIndex < subviewCount; ++subviewIndex) + { + subviewOcclusionTests[subviewIndex] = new SubviewOcclusionTest() + { + cullingSplitIndex = 0, + occluderSubviewIndex = subviewIndex, + }; + } + GPUResidentDrawer.InstanceOcclusionTest(renderGraph, settings, subviewOcclusionTests); } private void OnMainRendering(RenderGraph renderGraph, ScriptableRenderContext context) @@ -1122,6 +1148,10 @@ private void OnMainRendering(RenderGraph renderGraph, ScriptableRenderContext co m_CopyDepthPass.Render(renderGraph, frameData, cameraDepthTexture, resourceData.activeDepthTexture, true); } + // Depends on the camera (copy) depth texture. Depth is reprojected to calculate motion vectors. + if (renderPassInputs.requiresMotionVectors && m_CopyDepthMode != CopyDepthMode.AfterTransparents) + m_MotionVectorPass.Render(renderGraph, frameData, resourceData.cameraDepthTexture, resourceData.motionVectorColor, resourceData.motionVectorDepth); + RecordCustomRenderGraphPasses(renderGraph, RenderPassEvent.BeforeRenderingSkybox); if (cameraData.camera.clearFlags == CameraClearFlags.Skybox && cameraData.renderType != CameraRenderType.Overlay) @@ -1174,8 +1204,8 @@ private void OnMainRendering(RenderGraph renderGraph, ScriptableRenderContext co // TODO: Postprocess pass should be able configure its render pass inputs per camera per frame (settings) BEFORE building any of the graph // TODO: Alternatively we could always build the graph (a potential graph) and cull away unused passes if "record + cull" is fast enough. // TODO: Currently we just override "requiresMotionVectors" for TAA in GetRenderPassInputs() - // Depends on camera depth - if (renderPassInputs.requiresMotionVectors) + // Depends on camera (copy) depth texture + if (renderPassInputs.requiresMotionVectors && m_CopyDepthMode == CopyDepthMode.AfterTransparents) m_MotionVectorPass.Render(renderGraph, frameData, resourceData.cameraDepthTexture, resourceData.motionVectorColor, resourceData.motionVectorDepth); if (context.HasInvokeOnRenderObjectCallbacks()) @@ -1338,6 +1368,11 @@ private void OnAfterRendering(RenderGraph renderGraph) resourceData.activeDepthID = UniversalResourceData.ActiveID.BackBuffer; } + if (cameraData.captureActions != null) + { + m_CapturePass.RecordRenderGraph(renderGraph, frameData); + } + cameraTargetResolved = // final PP always blit to camera target applyFinalPostProcessing || diff --git a/Packages/com.unity.render-pipelines.universal/Tests/Editor/NoLeaksOnEnterLeavePlaymode.cs b/Packages/com.unity.render-pipelines.universal/Tests/Editor/NoLeaksOnEnterLeavePlaymode.cs index 677df76fbd2..82a53e5ba95 100644 --- a/Packages/com.unity.render-pipelines.universal/Tests/Editor/NoLeaksOnEnterLeavePlaymode.cs +++ b/Packages/com.unity.render-pipelines.universal/Tests/Editor/NoLeaksOnEnterLeavePlaymode.cs @@ -69,6 +69,11 @@ void CompareResourceLists(Dictionary oldList, Dictionary instancingSplitValues; public ReadOnlyDictionary graphicsBufferUsage; public VFXMapping[] parameters; public (VFXSlot slot, VFXData data)[] linkedEventOut; @@ -864,7 +865,7 @@ private void GenerateShaders(List outGeneratedCodeData, VFXEx errors.ForEach(x => { errorMessage.AppendLine($"\t{x}"); - m_Graph.RegisterCompileError(context, x); + m_Graph.RegisterCompileError("CompileError", x, context); }); } } @@ -1167,31 +1168,34 @@ public void Compile(VFXCompilationMode compilationMode, bool forceShaderValidati var valueDescs = new List(); FillExpressionDescs(m_ExpressionGraph, expressionDescs, expressionPerSpawnEventAttributesDescs, valueDescs); - var compiledData = new VFXCompiledData { contextToCompiledData = new(), taskToCompiledData = new() }; - // Initialize context tasks - foreach (var context in compilableContexts) - compiledData.contextToCompiledData[context] = context.PrepareCompiledData(); - - foreach (var contextCompiledData in compiledData.contextToCompiledData.Values) - foreach (var task in contextCompiledData.tasks) - compiledData.taskToCompiledData.Add(task, new VFXTaskCompiledData() { indexInShaderSource = -1 }); EditorUtility.DisplayProgressBar(progressBarTitle, "Generating mappings", 5 / nbSteps); + + var compiledData = new VFXCompiledData { contextToCompiledData = new(), taskToCompiledData = new() }; + + // Initialize contexts and tasks foreach (var context in compilableContexts) { + var contextCompiledData = context.PrepareCompiledData(); var cpuMapper = m_ExpressionGraph.BuildCPUMapper(context); + var instancingSplitValues = context.CreateInstancingSplitValues(m_ExpressionGraph); - foreach (var task in compiledData.contextToCompiledData[context].tasks) + foreach (var task in contextCompiledData.tasks) { - var contextData = compiledData.taskToCompiledData[task]; + var contextData = new VFXTaskCompiledData() { indexInShaderSource = -1 }; contextData.hlslCodeHolders = m_ExpressionGraph.customHLSLExpressions; contextData.cpuMapper = cpuMapper; contextData.parameters = context.additionalMappings.ToArray(); contextData.linkedEventOut = ComputeEventListFromSlot(context.allLinkedOutputSlot).ToArray(); + contextData.instancingSplitValues = instancingSplitValues; + compiledData.taskToCompiledData[task] = contextData; } + + compiledData.contextToCompiledData[context] = contextCompiledData; } + var exposedParameterDescs = new List<(VFXMapping mapping, VFXSpace space, SpaceableType spaceType)>(); FillExposedDescs(exposedParameterDescs, m_ExpressionGraph, m_Graph.children.OfType()); SubgraphInfos subgraphInfos; @@ -1272,7 +1276,7 @@ public void Compile(VFXCompilationMode compilationMode, bool forceShaderValidati dataToSystemIndex.Add(data, (uint)systemDescs.Count); } - data.FillDescs(m_Graph.compileReporter, + data.FillDescs(m_Graph.errorManager.compileReporter, compilationMode, bufferDescs, temporaryBufferDescs, diff --git a/Packages/com.unity.visualeffectgraph/Editor/Controls/VFXBitField.cs b/Packages/com.unity.visualeffectgraph/Editor/Controls/VFXBitField.cs index 6a8ebe91607..d3c3ab39526 100644 --- a/Packages/com.unity.visualeffectgraph/Editor/Controls/VFXBitField.cs +++ b/Packages/com.unity.visualeffectgraph/Editor/Controls/VFXBitField.cs @@ -1,8 +1,5 @@ -using UnityEngine; using UnityEngine.UIElements; -using UnityEditor.UIElements; -using System.Collections.Generic; using System.Runtime.InteropServices; namespace UnityEditor.VFX { @@ -15,50 +12,32 @@ namespace UnityEditor.VFX.UI { abstract class VFXBitField : VFXControl { + private Label m_Label; + private bool m_Indeterminate; + protected VisualElement[] m_Buttons; - protected VisualElement m_Background; - protected Texture2D m_BitImage; - protected Texture2D m_BitBkgndImage; - protected Label m_Label; - public VFXBitField() + protected VFXBitField() { m_Buttons = new VisualElement[Marshal.SizeOf(typeof(T)) * 8]; - m_Background = new VisualElement() { name = "background" }; - - m_Label = new Label() { name = "tip" }; - Add(m_Label); - - Add(m_Background); - var buttonContainer = new VisualElement() { name = "button-container", pickingMode = PickingMode.Ignore }; - Add(buttonContainer); for (int i = 0; i < m_Buttons.Length; ++i) { - var button = new VisualElement(); - button.style.flexGrow = button.style.flexShrink = 1; - button.style.marginRight = 1; + var button = new VisualElement { name = "bit-button"}; SetupListener(button, i); - buttonContainer.Add(button); + Add(button); m_Buttons[i] = button; } + m_Buttons[0].AddToClassList("first"); + m_Buttons[^1].AddToClassList("last"); - VisualElement backgroundItem = null; - for (int i = 0; i < m_Buttons.Length; ++i) - { - backgroundItem = new VisualElement(); - backgroundItem.style.flexGrow = backgroundItem.style.flexShrink = 1; - if (i != m_Buttons.Length - 1) - backgroundItem.style.paddingLeft = 1; - SetupBkgnd(backgroundItem, i); - m_Background.Add(backgroundItem); - } + m_Label = new Label { name = "tip" }; + Add(m_Label); - m_Buttons[m_Buttons.Length - 1].style.marginRight = 0; - RegisterCallback(OnCustomStyleResolved); + RegisterCallback(e => m_Label.text = ""); this.AddManipulator(new ContextualMenuManipulator(BuildContextualMenu)); } - public void BuildContextualMenu(ContextualMenuPopulateEvent evt) + void BuildContextualMenu(ContextualMenuPopulateEvent evt) { evt.menu.AppendAction("Check All", CheckAll, DropdownMenuAction.AlwaysEnabled); evt.menu.AppendAction("Check None", CheckNone, DropdownMenuAction.AlwaysEnabled); @@ -70,47 +49,21 @@ public void BuildContextualMenu(ContextualMenuPopulateEvent evt) void SetupListener(VisualElement button, int index) { - button.AddManipulator(new Clickable(() => ValueToggled(index))); - button.RegisterCallback(e => m_Label.text = index.ToString()); - button.RegisterCallback(e => m_Label.text = ""); - } - - void SetupBkgnd(VisualElement button, int index) - { + button.AddManipulator(new Clickable(() => this.ValueToggled(index))); button.RegisterCallback(e => m_Label.text = index.ToString()); - button.RegisterCallback(e => m_Label.text = ""); } protected abstract void ValueToggled(int i); - static readonly CustomStyleProperty s_BitImage = new CustomStyleProperty("--bit-image"); - static readonly CustomStyleProperty s_BitBkgndImage = new CustomStyleProperty("--bit-bkgnd-image"); - private void OnCustomStyleResolved(CustomStyleResolvedEvent e) - { - var customStyle = e.customStyle; - customStyle.TryGetValue(s_BitImage, out m_BitImage); - customStyle.TryGetValue(s_BitBkgndImage, out m_BitBkgndImage); - - for (int i = 0; i < m_Background.childCount - 1; ++i) - m_Background.ElementAt(i).style.backgroundImage = m_BitBkgndImage; - - ValueToGUI(true); - } - - bool m_Indeterminate; - public override bool indeterminate { - get - { - return m_Indeterminate; - } + get => m_Indeterminate; set { m_Indeterminate = value; foreach (var button in m_Buttons) { - button.visible = !m_Indeterminate; + button.SetEnabled(!m_Indeterminate); } } } @@ -120,10 +73,17 @@ class VFX32BitField : VFXBitField { protected override void ValueToGUI(bool force) { - uint value = (uint)this.value; + uint bitMask = (uint)this.value; for (int i = 0; i < m_Buttons.Length; ++i) { - m_Buttons[i].style.backgroundImage = (value & 1u << i) != 0 ? m_BitImage : null; + if ((bitMask & 1u << i) != 0) + { + m_Buttons[i].AddToClassList("bit-set"); + } + else + { + m_Buttons[i].RemoveFromClassList("bit-set"); + } } } diff --git a/Packages/com.unity.visualeffectgraph/Editor/Controls/VFXControl.cs b/Packages/com.unity.visualeffectgraph/Editor/Controls/VFXControl.cs index f15fdd77207..7397df7cef0 100644 --- a/Packages/com.unity.visualeffectgraph/Editor/Controls/VFXControl.cs +++ b/Packages/com.unity.visualeffectgraph/Editor/Controls/VFXControl.cs @@ -1,6 +1,6 @@ +using System; using UnityEngine; using UnityEngine.UIElements; -using UnityEditor.UIElements; using System.Collections.Generic; @@ -12,17 +12,27 @@ static class VFXControlConstants public static readonly Color indeterminateTextColor = new Color(0.82f, 0.82f, 0.82f); } - abstract class VFXControl : VisualElement, INotifyValueChanged + interface IVFXControl + { + bool indeterminate { get; set; } + event Action onValueDragFinished; + event Action onValueDragStarted; + void ForceUpdate(); + void SetEnabled(bool isEnabled); + } + + abstract class VFXControl : VisualElement, INotifyValueChanged, IVFXControl { T m_Value; public T value { - get { return m_Value; } - set - { - SetValueAndNotify(value); - } + get => m_Value; + set => SetValueAndNotify(value); } + + public event Action onValueDragFinished; + public event Action onValueDragStarted; + public void SetValueAndNotify(T newValue) { if (!EqualityComparer.Default.Equals(value, newValue)) @@ -64,5 +74,9 @@ public void RemoveOnValueChanged(EventCallback> callback) { UnregisterCallback(callback); } + + protected void ValueDragFinished(PointerCaptureOutEvent evt) => onValueDragFinished?.Invoke(); + + protected void ValueDragStarted(PointerCaptureEvent evt) => onValueDragStarted?.Invoke(); } } diff --git a/Packages/com.unity.visualeffectgraph/Editor/Controls/VFXEnumField.cs b/Packages/com.unity.visualeffectgraph/Editor/Controls/VFXEnumField.cs index 0d9097cdc40..f8b54eb79ee 100644 --- a/Packages/com.unity.visualeffectgraph/Editor/Controls/VFXEnumField.cs +++ b/Packages/com.unity.visualeffectgraph/Editor/Controls/VFXEnumField.cs @@ -70,11 +70,6 @@ public VFXEnumField(string label, System.Type enumType) : base(label) style.flexDirection = FlexDirection.Row; Add(m_DropDownButton); - - var icon = new VisualElement() { name = "icon" }; - icon.AddToClassList("unity-enum-field__arrow"); - - m_DropDownButton.Add(icon); } public VFXEnumField(Label existingLabel, System.Type enumType) : base(existingLabel) diff --git a/Packages/com.unity.visualeffectgraph/Editor/Controls/VFXEnumValuePopup.cs b/Packages/com.unity.visualeffectgraph/Editor/Controls/VFXEnumValuePopup.cs index ac3ca100945..153835ba588 100644 --- a/Packages/com.unity.visualeffectgraph/Editor/Controls/VFXEnumValuePopup.cs +++ b/Packages/com.unity.visualeffectgraph/Editor/Controls/VFXEnumValuePopup.cs @@ -1,63 +1,35 @@ -using UnityEngine; -using UnityEngine.UIElements; -using UnityEditor.UIElements; +using System; using System.Collections.Generic; +using UnityEngine.UIElements; + namespace UnityEditor.VFX.UI { class VFXEnumValuePopup : VisualElement, INotifyValueChanged { - protected Label m_DropDownButton; - TextElement m_ValueText; + DropdownField m_DropDownButton; + long m_Value; - public string[] enumValues { get; set; } + public IEnumerable choices => m_DropDownButton.choices; - public VFXEnumValuePopup() + public VFXEnumValuePopup(string label, List values) { - AddToClassList("unity-enum-field"); - AddToClassList("VFXEnumValuePopup"); - m_DropDownButton = new Label(); - m_DropDownButton.AddToClassList("unity-enum-field__input"); - m_DropDownButton.AddManipulator(new DownClickable(OnClick)); + m_DropDownButton = new DropdownField(label); + m_DropDownButton.choices = values; + m_DropDownButton.value = values[0]; + m_DropDownButton.RegisterCallback>(OnValueChanged); Add(m_DropDownButton); - m_ValueText = new TextElement(); - m_ValueText.AddToClassList("unity-enum-field__text"); - - var icon = new VisualElement() { name = "icon" }; - icon.AddToClassList("unity-enum-field__arrow"); - m_DropDownButton.Add(m_ValueText); - m_DropDownButton.Add(icon); - } - - private void OnClick() - { - GenericMenu menu = new GenericMenu(); - - for (long i = 0; i < enumValues.Length; ++i) - { - menu.AddItem(new GUIContent(enumValues[i]), i == m_Value, ChangeValue, i); - } - menu.DropDown(m_DropDownButton.worldBound); } - void ChangeValue(object value) + private void OnValueChanged(ChangeEvent evt) { - SetValueAndNotify((long)value); + SetValueAndNotify(m_DropDownButton.choices.IndexOf(evt.newValue)); } - public long m_Value; - public long value { - get - { - return m_Value; - } - - set - { - SetValueAndNotify(value); - } + get => m_Value; + set => SetValueAndNotify(value); } public void SetValueAndNotify(long newValue) @@ -75,19 +47,12 @@ public void SetValueAndNotify(long newValue) public void SetValueWithoutNotify(long newValue) { - m_Value = newValue; - bool found = false; - for (uint i = 0; i < enumValues.Length; ++i) + if (newValue >= 0 && newValue < m_DropDownButton.choices.Count) { - if (newValue == i) - { - found = true; - m_ValueText.text = enumValues[i]; - break; - } + m_Value = newValue; } - if (!found) - m_ValueText.text = enumValues[enumValues.Length - 1]; + + m_Value = Math.Clamp(newValue, 0, m_DropDownButton.choices.Count - 1); } } } diff --git a/Packages/com.unity.visualeffectgraph/Editor/Controls/VFXFlipBookField.cs b/Packages/com.unity.visualeffectgraph/Editor/Controls/VFXFlipBookField.cs index ecb22cb93a1..540fba74b36 100644 --- a/Packages/com.unity.visualeffectgraph/Editor/Controls/VFXFlipBookField.cs +++ b/Packages/com.unity.visualeffectgraph/Editor/Controls/VFXFlipBookField.cs @@ -1,23 +1,19 @@ -using UnityEngine; using UnityEngine.UIElements; -using UnityEditor.UIElements; - -using System.Collections.Generic; namespace UnityEditor.VFX.UI { class VFXFlipBookField : VFXControl { - VFXLabeledField m_X; - VFXLabeledField m_Y; + IntegerField m_X; + IntegerField m_Y; void CreateTextField() { - m_X = new VFXLabeledField("X"); - m_Y = new VFXLabeledField("Y"); + m_X = new IntegerField("X"); + m_Y = new IntegerField("Y"); - m_X.control.AddToClassList("fieldContainer"); - m_Y.control.AddToClassList("fieldContainer"); + m_X.AddToClassList("fieldContainer"); + m_Y.AddToClassList("fieldContainer"); m_X.AddToClassList("fieldContainer"); m_Y.AddToClassList("fieldContainer"); @@ -41,19 +37,19 @@ void OnYValueChanged(ChangeEvent e) public override bool indeterminate { - get - { - return m_X.indeterminate; - } + get => m_X.showMixedValue; set { - m_X.indeterminate = value; - m_Y.indeterminate = value; + m_X.showMixedValue = value; + m_Y.showMixedValue = value; } } - public VFXFlipBookField() + public VFXFlipBookField(string label) { + var labelElement = new Label(label); + labelElement.AddToClassList("label"); + Add(labelElement); CreateTextField(); style.flexDirection = FlexDirection.Row; @@ -63,10 +59,10 @@ public VFXFlipBookField() protected override void ValueToGUI(bool force) { - if (!m_X.control.HasFocus() || force) + if (!m_X.HasFocus() || force) m_X.value = value.x; - if (!m_Y.control.HasFocus() || force) + if (!m_Y.HasFocus() || force) m_Y.value = value.y; } } diff --git a/Packages/com.unity.visualeffectgraph/Editor/Controls/VFXLabeledField.cs b/Packages/com.unity.visualeffectgraph/Editor/Controls/VFXLabeledField.cs index 54d9051c787..85260b4886f 100644 --- a/Packages/com.unity.visualeffectgraph/Editor/Controls/VFXLabeledField.cs +++ b/Packages/com.unity.visualeffectgraph/Editor/Controls/VFXLabeledField.cs @@ -1,259 +1,19 @@ -using UnityEngine; using UnityEngine.UIElements; -using System; namespace UnityEditor.VFX.UI { - //Copied from mousefield dragger but add notifications needed for delayed fields - class VFXFieldMouseDragger - { - Action m_OnDragFinished; - Action m_OnDragStarted; - public VFXFieldMouseDragger(IValueField drivenField, Action onDragFinished = null, Action onDragStarted = null) - { - m_DrivenField = drivenField; - m_DragElement = null; - m_DragHotZone = new Rect(0, 0, -1, -1); - m_OnDragFinished = onDragFinished; - m_OnDragStarted = onDragStarted; - dragging = false; - } - - IValueField m_DrivenField; - VisualElement m_DragElement; - Rect m_DragHotZone; - - public bool dragging; - public T startValue; - - public void SetDragZone(VisualElement dragElement) - { - SetDragZone(dragElement, new Rect(0, 0, -1, -1)); - } - - public void SetDragZone(VisualElement dragElement, Rect hotZone) - { - if (m_DragElement != null) - { - m_DragElement.UnregisterCallback(UpdateValueOnMouseDown); - m_DragElement.UnregisterCallback(UpdateValueOnMouseMove); - m_DragElement.UnregisterCallback(UpdateValueOnMouseUp); - m_DragElement.UnregisterCallback(UpdateValueOnKeyDown); - } - - m_DragElement = dragElement; - m_DragHotZone = hotZone; - - if (m_DragElement != null) - { - dragging = false; - m_DragElement.RegisterCallback(UpdateValueOnMouseDown); - m_DragElement.RegisterCallback(UpdateValueOnMouseMove); - m_DragElement.RegisterCallback(UpdateValueOnMouseUp); - m_DragElement.RegisterCallback(UpdateValueOnKeyDown); - } - } - - void UpdateValueOnMouseDown(MouseDownEvent evt) - { - if (evt.button == 0 && (m_DragHotZone.width < 0 || m_DragHotZone.height < 0 || m_DragHotZone.Contains(m_DragElement.WorldToLocal(evt.mousePosition)))) - { - m_DragElement.CaptureMouse(); - - // Make sure no other elements can capture the mouse! - evt.StopPropagation(); - - dragging = true; - startValue = m_DrivenField.value; - if (m_OnDragStarted != null) - m_OnDragStarted(); - - EditorGUIUtility.SetWantsMouseJumping(1); - } - } - - void UpdateValueOnMouseMove(MouseMoveEvent evt) - { - if (dragging) - { - DeltaSpeed s = evt.shiftKey ? DeltaSpeed.Fast : (evt.altKey ? DeltaSpeed.Slow : DeltaSpeed.Normal); - m_DrivenField.ApplyInputDeviceDelta(evt.mouseDelta, s, startValue); - } - } - - void UpdateValueOnMouseUp(MouseUpEvent evt) - { - if (dragging) - { - dragging = false; - MouseCaptureController.ReleaseMouse(); - EditorGUIUtility.SetWantsMouseJumping(0); - if (m_OnDragFinished != null) - m_OnDragFinished(); - } - } - - void UpdateValueOnKeyDown(KeyDownEvent evt) - { - if (dragging && evt.keyCode == KeyCode.Escape) - { - dragging = false; - m_DrivenField.value = startValue; - MouseCaptureController.ReleaseMouse(); - EditorGUIUtility.SetWantsMouseJumping(0); - } - } - } - - interface IVFXDraggedElement - { - void SetOnValueDragStarted(Action callback); - void SetOnValueDragFinished(Action callback); - } - - class VFXLabeledField : VisualElement, INotifyValueChanged, IVFXDraggedElement where T : VisualElement, INotifyValueChanged, new() + abstract class ValueControl : VisualElement { - private Action m_OnValueDragFinished; - private Action m_OnValueDragStarted; - - private bool m_Indeterminate; - protected Label m_Label; - protected T m_Control; - public VisualElement m_IndeterminateLabel; - - public VFXLabeledField(Label existingLabel, string controlName = null) + protected ValueControl(Label existingLabel) { m_Label = existingLabel; - - CreateControl(controlName); - SetupLabel(); - } - - - public bool indeterminate - { - get { return m_Control.parent == null; } - - set + if (m_Label != null) { - if (m_Indeterminate != value) - { - m_Indeterminate = value; - if (value) - { - m_Control.RemoveFromHierarchy(); - Add(m_IndeterminateLabel); - } - else - { - m_IndeterminateLabel.RemoveFromHierarchy(); - Add(m_Control); - } - } - } - } - - public VFXLabeledField(string label, string controlName = null) - { - if (!string.IsNullOrEmpty(label)) - { - m_Label = new Label() { text = label }; m_Label.AddToClassList("label"); - Add(m_Label); } - style.flexDirection = FlexDirection.Row; - - CreateControl(controlName); - SetupLabel(); - } - - public void SetOnValueDragStarted(Action callback) => m_OnValueDragStarted = callback; - public void SetOnValueDragFinished(Action callback) => m_OnValueDragFinished = callback; - - void SetupLabel() - { - if (typeof(IValueField).IsAssignableFrom(typeof(T))) - { - m_Label.styleSheets.Add(VFXView.LoadStyleSheet("VFXLabeledField")); - - var dragger = new VFXFieldMouseDragger((IValueField)m_Control, DragValueFinished, DragValueStarted); - dragger.SetDragZone(m_Label); - m_Label.AddToClassList("cursor-slide-arrow"); - } - - m_IndeterminateLabel = new Label() - { - name = "indeterminate", - text = VFXControlConstants.indeterminateText - }; - m_IndeterminateLabel.SetEnabled(false); - } - - void DragValueFinished() - { - m_OnValueDragFinished?.Invoke(this); - } - - void DragValueStarted() - { - m_OnValueDragStarted?.Invoke(this); - } - - void CreateControl(string controlName) - { - m_Control = new T { name = controlName }; - Add(m_Control); - - m_Control.RegisterValueChangedCallback(OnControlChange); - } - - void OnControlChange(ChangeEvent e) - { - e.StopPropagation(); - using (ChangeEvent evt = ChangeEvent.GetPooled(e.previousValue, e.newValue)) - { - evt.target = this; - SendEvent(evt); - } - } - - public T control - { - get { return m_Control; } - } - - public Label label - { - get { return m_Label; } - } - - public void SetValueAndNotify(U newValue) - { - m_Control.value = newValue; - } - - public void SetValueWithoutNotify(U newValue) - { - m_Control.SetValueWithoutNotify(newValue); - } - - public U value - { - get { return m_Control.value; } - set { m_Control.value = value; } - } - } - - abstract class ValueControl : VisualElement - { - protected Label m_Label; - - protected ValueControl(Label existingLabel) - { - m_Label = existingLabel; } protected ValueControl(string label) diff --git a/Packages/com.unity.visualeffectgraph/Editor/Controls/VFXMatrix4x4Field.cs b/Packages/com.unity.visualeffectgraph/Editor/Controls/VFXMatrix4x4Field.cs index de83f96d7b3..d8a71d6c7ee 100644 --- a/Packages/com.unity.visualeffectgraph/Editor/Controls/VFXMatrix4x4Field.cs +++ b/Packages/com.unity.visualeffectgraph/Editor/Controls/VFXMatrix4x4Field.cs @@ -1,11 +1,6 @@ using UnityEngine; using UnityEngine.UIElements; - -using Action = System.Action; - -using FloatField = UnityEditor.VFX.UI.VFXLabeledField; - namespace UnityEditor.VFX.UI { class VFXMatrix4x4Field : VFXControl @@ -20,14 +15,14 @@ void CreateTextField() { for (int j = 0; j < m_FloatFields.GetLength(1); ++j) { - var newField = new FloatField(string.Format("{0}{1}", i, j)); + var newField = new FloatField($"{i}{j}"); m_FloatFields[i, j] = newField; newField.AddToClassList("fieldContainer"); - newField.control.AddToClassList("fieldContainer"); + newField.AddToClassList("fieldContainer"); newField.RegisterCallback>(OnFloatValueChanged); - - newField.SetOnValueDragFinished(t => ValueDragFinished()); - newField.SetOnValueDragStarted(t => ValueDragStarted()); + var label = newField.Q