diff --git a/Packages/com.unity.render-pipelines.core/Documentation~/Images/Preferences/AdvancedProperties_Settings.png b/Packages/com.unity.render-pipelines.core/Documentation~/Images/Preferences/AdvancedProperties_Settings.png new file mode 100644 index 00000000000..0a8d359e731 Binary files /dev/null and b/Packages/com.unity.render-pipelines.core/Documentation~/Images/Preferences/AdvancedProperties_Settings.png differ diff --git a/Packages/com.unity.render-pipelines.core/Documentation~/Images/Preferences/HDRP_WaterSurface_General.png b/Packages/com.unity.render-pipelines.core/Documentation~/Images/Preferences/HDRP_WaterSurface_General.png new file mode 100644 index 00000000000..a191b62c4f3 Binary files /dev/null and b/Packages/com.unity.render-pipelines.core/Documentation~/Images/Preferences/HDRP_WaterSurface_General.png differ diff --git a/Packages/com.unity.render-pipelines.core/Documentation~/Images/Preferences/HDRP_WaterSurface_General_Visible.png b/Packages/com.unity.render-pipelines.core/Documentation~/Images/Preferences/HDRP_WaterSurface_General_Visible.png new file mode 100644 index 00000000000..2bf56d0a318 Binary files /dev/null and b/Packages/com.unity.render-pipelines.core/Documentation~/Images/Preferences/HDRP_WaterSurface_General_Visible.png differ diff --git a/Packages/com.unity.render-pipelines.core/Documentation~/Images/Preferences/PopUpAdvanced.png b/Packages/com.unity.render-pipelines.core/Documentation~/Images/Preferences/PopUpAdvanced.png new file mode 100644 index 00000000000..b26b2626580 Binary files /dev/null and b/Packages/com.unity.render-pipelines.core/Documentation~/Images/Preferences/PopUpAdvanced.png differ diff --git a/Packages/com.unity.render-pipelines.core/Documentation~/User-Render-Requests.md b/Packages/com.unity.render-pipelines.core/Documentation~/User-Render-Requests.md index e08eb877552..734afdb0abc 100644 --- a/Packages/com.unity.render-pipelines.core/Documentation~/User-Render-Requests.md +++ b/Packages/com.unity.render-pipelines.core/Documentation~/User-Render-Requests.md @@ -89,3 +89,7 @@ public class StandardRenderRequest : MonoBehaviour } ``` +## Other useful information + +- On [Universal Render Pipeline (URP)](https://docs.unity3d.com/Packages/com.unity.render-pipelines.universal@latest/User-Render-Requests.html). + diff --git a/Packages/com.unity.render-pipelines.core/Documentation~/advanced-properties.md b/Packages/com.unity.render-pipelines.core/Documentation~/advanced-properties.md new file mode 100644 index 00000000000..0534d9daf74 --- /dev/null +++ b/Packages/com.unity.render-pipelines.core/Documentation~/advanced-properties.md @@ -0,0 +1,36 @@ +# Advanced Properties + +Unity Render Pipelines components expose standard properties by default that are suitable for most use-cases. +However, there are components and Volume Overrides that include **advanced properties** which you can use to fine-tune the behavior of the component. + +There is a global state per user that stores if Unity displays **advanced properties** or not. + +## Exposing advanced properties within the inspector + +Not every component or Volume Override includes advanced properties. +If one does, it has a contextual menu to the right of each property section header that includes additional properties. To expose advanced properties for that section, open the contextual menu and click **Advanced Properties**. + +For an example, see the **Water Surface** component in [High Definition Render Pipeline (HDRP)](https://docs.unity3d.com/Packages/com.unity.render-pipelines.high-definition@latest). + +By default only standard properties are shown. + +![](Images/Preferences/HDRP_WaterSurface_General.png) + +When you select **Advanced Properties**: + +![](Images/Preferences/PopUpAdvanced.png) + +**Advanced Properties** become visible: + +![](Images/Preferences/HDRP_WaterSurface_General_Visible.png) + +For Volume Overrides, the already existing contextual menu has a **Advanced Properties** toggle as well. + +## Exposing advanced properties on preferences + +You can also access to this global preference by: + +1. Open the **Graphics** tab in the **Preferences** window (menu: **Edit > Preferences > Graphics**). +2. Under **Properties**. Set **Advanced Properties** to **All Visible**. + +![](Images/Preferences/AdvancedProperties_Settings.png) diff --git a/Packages/com.unity.render-pipelines.core/Editor/AdditionalPropertiesPreferences.cs b/Packages/com.unity.render-pipelines.core/Editor/AdditionalPropertiesPreferences.cs deleted file mode 100644 index 94bf4e924ef..00000000000 --- a/Packages/com.unity.render-pipelines.core/Editor/AdditionalPropertiesPreferences.cs +++ /dev/null @@ -1,104 +0,0 @@ -using System; -using System.Linq; -using System.Collections.Generic; -using UnityEngine; -using UnityEditorInternal; - -namespace UnityEditor.Rendering -{ - /// - /// Callback method that will be called when the Global Preferences for Additional Properties is changed - /// - [AttributeUsage(AttributeTargets.Method, AllowMultiple = false, Inherited = false)] - public sealed class SetAdditionalPropertiesVisibilityAttribute : Attribute - { - } - - class AdditionalPropertiesPreferences : ICoreRenderPipelinePreferencesProvider - { - class Styles - { - public static readonly GUIContent additionalPropertiesLabel = EditorGUIUtility.TrTextContent("Visibility", "Toggle all additional properties to either visible or hidden."); - public static readonly GUIContent[] additionalPropertiesNames = { EditorGUIUtility.TrTextContent("All Visible"), EditorGUIUtility.TrTextContent("All Hidden") }; - public static readonly int[] additionalPropertiesValues = { 1, 0 }; - } - - static List s_VolumeComponentEditorTypes; - static TypeCache.MethodCollection s_AdditionalPropertiesVisibilityMethods; - static bool s_ShowAllAdditionalProperties = false; - - static AdditionalPropertiesPreferences() - { - s_ShowAllAdditionalProperties = EditorPrefs.GetBool(Keys.showAllAdditionalProperties); - } - - static void InitializeIfNeeded() - { - if (s_VolumeComponentEditorTypes == null) - { - s_AdditionalPropertiesVisibilityMethods = TypeCache.GetMethodsWithAttribute(); - - s_VolumeComponentEditorTypes = TypeCache.GetTypesDerivedFrom() - .Where( - t => !t.IsAbstract - ).ToList(); - } - } - - static bool showAllAdditionalProperties - { - get => s_ShowAllAdditionalProperties; - set - { - s_ShowAllAdditionalProperties = value; - EditorPrefs.SetBool(Keys.showAllAdditionalProperties, s_ShowAllAdditionalProperties); - - ShowAllAdditionalProperties(showAllAdditionalProperties); - } - } - static List s_SearchKeywords = new() { "Additional", "Properties" }; - public List keywords => s_SearchKeywords; - - public GUIContent header { get; } = EditorGUIUtility.TrTextContent("Additional Properties"); - - static class Keys - { - internal const string showAllAdditionalProperties = "General.ShowAllAdditionalProperties"; - } - - public void PreferenceGUI() - { - EditorGUI.BeginChangeCheck(); - int newValue = EditorGUILayout.IntPopup(Styles.additionalPropertiesLabel, showAllAdditionalProperties ? 1 : 0, Styles.additionalPropertiesNames, Styles.additionalPropertiesValues); - if (EditorGUI.EndChangeCheck()) - { - showAllAdditionalProperties = newValue == 1; - } - } - - static void ShowAllAdditionalProperties(bool value) - { - // The way we do this here is to gather all types of either VolumeComponentEditor or IAdditionalPropertiesBoolFlagsHandler (for regular components) - // then we instantiate those classes in order to be able to call the relevant function to update the "ShowAdditionalProperties" flags. - // The instance on which we call is not important because in the end it will only change a global editor preference. - InitializeIfNeeded(); - - // Volume components - foreach (var editorType in s_VolumeComponentEditorTypes) - { - var key = VolumeComponentEditor.GetAdditionalPropertiesPreferenceKey(editorType); - var showAdditionalProperties = new EditorPrefBool(key); - showAdditionalProperties.value = value; - } - - // Regular components - foreach (var method in s_AdditionalPropertiesVisibilityMethods) - { - method.Invoke(null, new object[1] { value }); - } - - // Force repaint in case some editors are already open. - InternalEditorUtility.RepaintAllViews(); - } - } -} diff --git a/Packages/com.unity.render-pipelines.core/Editor/CoreEditorDrawers.cs b/Packages/com.unity.render-pipelines.core/Editor/CoreEditorDrawers.cs index 0a958cd70f4..32535d23643 100644 --- a/Packages/com.unity.render-pipelines.core/Editor/CoreEditorDrawers.cs +++ b/Packages/com.unity.render-pipelines.core/Editor/CoreEditorDrawers.cs @@ -180,20 +180,12 @@ void IDrawer.Draw(TData data, Editor owner) if (m_Enabler != null && !m_Enabler(data, owner)) return; - if (m_Anim != null) - CoreEditorUtils.BeginAdditionalPropertiesHighlight(m_Anim); - - for (var i = 0; i < m_ActionDrawers.Length; i++) - m_ActionDrawers[i](data, owner); - - if (m_Anim != null) + if (AdvancedProperties.BeginGroup(m_Anim)) { - CoreEditorUtils.EndAdditionalPropertiesHighlight(); - - // While the highlight is being changed, force the Repaint of the editor - if (m_Anim.value > 0.0f) - owner?.Repaint(); + for (var i = 0; i < m_ActionDrawers.Length; i++) + m_ActionDrawers[i](data, owner); } + AdvancedProperties.EndGroup(); } bool IDrawer.Expand(int mask) => DefaultExpand(m_ActionDrawers, mask); @@ -888,6 +880,7 @@ public static IDrawer AdditionalPropertiesFoldoutGroup(GUIConten where TEnum : struct, IConvertible where TAPEnum : struct, IConvertible { + additionalContent ??= Group((s, o) => { }); return AdditionalPropertiesFoldoutGroup(foldoutTitle, foldoutMask, foldoutState, additionalPropertiesMask, additionalPropertiesState, normalContent.Draw, additionalContent.Draw, options, customMenuContextAction, otherDocumentation); } @@ -972,7 +965,10 @@ void SwitchEnabler(TData data, Editor owner) return FoldoutGroup(foldoutTitle, foldoutMask, foldoutState, options, customMenuContextAction, Enabler, SwitchEnabler, otherDocumentation, normalContent, - ConditionalWithAdditionalProperties((serialized, owner) => additionalPropertiesState[additionalPropertiesMask] && foldoutState[foldoutMask], additionalPropertiesState.GetAnimation(additionalPropertiesMask), additionalContent).Draw + ConditionalWithAdditionalProperties( + (serialized, owner) => additionalPropertiesState[additionalPropertiesMask] && foldoutState[foldoutMask], + AdvancedProperties.s_AnimFloat, + additionalContent).Draw ); } } diff --git a/Packages/com.unity.render-pipelines.core/Editor/CoreEditorUtils.cs b/Packages/com.unity.render-pipelines.core/Editor/CoreEditorUtils.cs index 2844c054146..0f9077c7d07 100644 --- a/Packages/com.unity.render-pipelines.core/Editor/CoreEditorUtils.cs +++ b/Packages/com.unity.render-pipelines.core/Editor/CoreEditorUtils.cs @@ -822,14 +822,14 @@ static Action CreateMenuContextAction(Action contextAction, Fu { if (contextAction == null && (hasMoreOptions != null || customMenuContextAction != null)) { - // If no contextual menu add one for the additional properties. + // If no contextual menu add one for the advanced properties. contextAction = pos => { var menu = new GenericMenu(); if (customMenuContextAction != null) customMenuContextAction(menu); if (hasMoreOptions != null) - AddAdditionalPropertiesContext(menu, hasMoreOptions, toggleMoreOptions); + menu.AddAdvancedPropertiesBoolMenuItem(hasMoreOptions, toggleMoreOptions); menu.DropDown(new Rect(pos, Vector2.zero)); }; } @@ -877,12 +877,6 @@ static void ShowHelpButton(Rect contextMenuRect, string documentationURL, GUICon Help.BrowseURL(documentationURL); } - static void AddAdditionalPropertiesContext(GenericMenu menu, Func hasMoreOptions, Action toggleMoreOptions) - { - menu.AddItem(EditorGUIUtility.TrTextContent("Show Additional Properties"), hasMoreOptions.Invoke(), () => toggleMoreOptions.Invoke()); - menu.AddItem(EditorGUIUtility.TrTextContent("Show All Additional Properties..."), false, () => CoreRenderPipelinePreferences.Open()); - } - /// /// Draw a Color Field but convert the color to gamma space before displaying it in the shader. /// Using SetColor on a material does the conversion, but setting the color as vector3 in a constant buffer doesn't @@ -1387,19 +1381,6 @@ internal static void TryToFixFilterMode(float pixelsPerPoint, Texture2D icon) #endregion - internal static void BeginAdditionalPropertiesHighlight(AnimFloat animation) - { - var oldColor = GUI.color; - GUI.color = Color.Lerp(CoreEditorStyles.backgroundColor * oldColor, CoreEditorStyles.backgroundHighlightColor, animation.value); - EditorGUILayout.BeginVertical(CoreEditorStyles.additionalPropertiesHighlightStyle); - GUI.color = oldColor; - } - - internal static void EndAdditionalPropertiesHighlight() - { - EditorGUILayout.EndVertical(); - } - internal static T CreateAssetAt(Scene scene, string targetName) where T : ScriptableObject { string path; diff --git a/Packages/com.unity.render-pipelines.core/Editor/CoreRenderPipelinePreferences.cs b/Packages/com.unity.render-pipelines.core/Editor/CoreRenderPipelinePreferences.cs index 832524c64b1..2178d1fd0af 100644 --- a/Packages/com.unity.render-pipelines.core/Editor/CoreRenderPipelinePreferences.cs +++ b/Packages/com.unity.render-pipelines.core/Editor/CoreRenderPipelinePreferences.cs @@ -1,6 +1,7 @@ using System; using System.Collections.Generic; using System.Reflection; +using UnityEngine.Rendering; namespace UnityEditor.Rendering { @@ -12,7 +13,7 @@ public static class CoreRenderPipelinePreferences /// /// Path to the Render Pipeline Preferences /// - public static readonly string corePreferencePath = "Preferences/Core Render Pipeline"; + public static readonly string corePreferencePath = "Preferences/Graphics"; private static readonly List s_Providers = new(); @@ -25,6 +26,14 @@ static void InitPreferenceProviders() continue; s_Providers.Add(Activator.CreateInstance(provider) as ICoreRenderPipelinePreferencesProvider); } + + s_Providers.Sort((x, y) => GetDisplayInfoOrder(x.GetType()).CompareTo(GetDisplayInfoOrder(y.GetType()))); + } + + static int GetDisplayInfoOrder(Type type) + { + var attribute = type.GetCustomAttribute(); + return attribute?.order ?? int.MaxValue; } [SettingsProvider] @@ -38,8 +47,11 @@ static SettingsProvider PreferenceGUI() { foreach (var providers in s_Providers) { - EditorGUILayout.LabelField(providers.header, EditorStyles.boldLabel); - providers.PreferenceGUI(); + if (providers.header != null) + { + EditorGUILayout.LabelField(providers.header, EditorStyles.boldLabel); + providers.PreferenceGUI(); + } } } } diff --git a/Packages/com.unity.render-pipelines.core/Editor/Debugging/DebugWindow.cs b/Packages/com.unity.render-pipelines.core/Editor/Debugging/DebugWindow.cs index d2b5373c36e..f55dd0ec0e2 100644 --- a/Packages/com.unity.render-pipelines.core/Editor/Debugging/DebugWindow.cs +++ b/Packages/com.unity.render-pipelines.core/Editor/Debugging/DebugWindow.cs @@ -25,7 +25,19 @@ sealed class DebugWindowSettings : ScriptableObject // Keep these settings in a separate scriptable object so we can handle undo/redo on them // without the rest of the debug window interfering public int currentStateHash; - public int selectedPanel; + + public int selectedPanel + { + get => Mathf.Max(0, DebugManager.instance.PanelIndex(selectedPanelDisplayName)); + set + { + var displayName = DebugManager.instance.PanelDiplayName(value); + if (!string.IsNullOrEmpty(displayName)) + selectedPanelDisplayName = displayName; + } + } + + public string selectedPanelDisplayName; void OnEnable() { diff --git a/Packages/com.unity.render-pipelines.core/Editor/Deprecated.cs b/Packages/com.unity.render-pipelines.core/Editor/Deprecated.cs index 38402155432..e1bdc6b6989 100644 --- a/Packages/com.unity.render-pipelines.core/Editor/Deprecated.cs +++ b/Packages/com.unity.render-pipelines.core/Editor/Deprecated.cs @@ -5,6 +5,15 @@ namespace UnityEditor.Rendering { + /// + /// Callback method that will be called when the Global Preferences for Additional Properties is changed + /// + [AttributeUsage(AttributeTargets.Method, AllowMultiple = false, Inherited = false)] + [Obsolete("This attribute is not handled anymore. Use Advanced Properties. #from(6000.0)", false)] + public sealed class SetAdditionalPropertiesVisibilityAttribute : Attribute + { + } + /// /// This attributes tells a class which type of /// it's an editor for. diff --git a/Packages/com.unity.render-pipelines.core/Editor/EditorPrefBoolFlags.cs b/Packages/com.unity.render-pipelines.core/Editor/EditorPrefBoolFlags.cs index b3d4133ad36..6ba5064e016 100644 --- a/Packages/com.unity.render-pipelines.core/Editor/EditorPrefBoolFlags.cs +++ b/Packages/com.unity.render-pipelines.core/Editor/EditorPrefBoolFlags.cs @@ -15,11 +15,17 @@ public T value /// The raw value public uint rawValue - { get => (uint)EditorPrefs.GetInt(m_Key); set => EditorPrefs.SetInt(m_Key, (int)value); } + { + get => (uint)EditorPrefs.GetInt(m_Key); + set => EditorPrefs.SetInt(m_Key, (int)value); + } /// Constructor /// Name of the Key in EditorPrefs to save the value - public EditorPrefBoolFlags(string key) => m_Key = key; + public EditorPrefBoolFlags(string key) + { + m_Key = key; + } /// Test if saved value is equal to the one given /// Given value diff --git a/Packages/com.unity.render-pipelines.core/Editor/ICoreRenderPipelinePreferencesProvider.cs b/Packages/com.unity.render-pipelines.core/Editor/ICoreRenderPipelinePreferencesProvider.cs index bdb8ce5f628..177609d7532 100644 --- a/Packages/com.unity.render-pipelines.core/Editor/ICoreRenderPipelinePreferencesProvider.cs +++ b/Packages/com.unity.render-pipelines.core/Editor/ICoreRenderPipelinePreferencesProvider.cs @@ -1,5 +1,7 @@ using System.Collections.Generic; +using System.Reflection; using UnityEngine; +using UnityEngine.Rendering; namespace UnityEditor.Rendering { @@ -16,7 +18,15 @@ public interface ICoreRenderPipelinePreferencesProvider /// /// The header of the panel /// - GUIContent header { get; } + GUIContent header + { + get + { + var type = GetType(); + var displayTypeInfoAttribute = type.GetCustomAttribute(); + return EditorGUIUtility.TrTextContent(displayTypeInfoAttribute != null ? displayTypeInfoAttribute.name : type.Name); + } + } /// /// Renders the Preferences UI for this provider diff --git a/Packages/com.unity.render-pipelines.core/Editor/Icons/RenderGraphViewer/Global@2x.png b/Packages/com.unity.render-pipelines.core/Editor/Icons/RenderGraphViewer/Global@2x.png index 02c06ba242a..77b66fc5640 100644 Binary files a/Packages/com.unity.render-pipelines.core/Editor/Icons/RenderGraphViewer/Global@2x.png and b/Packages/com.unity.render-pipelines.core/Editor/Icons/RenderGraphViewer/Global@2x.png differ diff --git a/Packages/com.unity.render-pipelines.core/Editor/Icons/RenderGraphViewer/MultipleUsage.png b/Packages/com.unity.render-pipelines.core/Editor/Icons/RenderGraphViewer/MultipleUsage.png new file mode 100644 index 00000000000..5024cc74a3a Binary files /dev/null and b/Packages/com.unity.render-pipelines.core/Editor/Icons/RenderGraphViewer/MultipleUsage.png differ diff --git a/Packages/com.unity.render-pipelines.core/Editor/Icons/RenderGraphViewer/MultipleUsage.png.meta b/Packages/com.unity.render-pipelines.core/Editor/Icons/RenderGraphViewer/MultipleUsage.png.meta new file mode 100644 index 00000000000..5caa38131fd --- /dev/null +++ b/Packages/com.unity.render-pipelines.core/Editor/Icons/RenderGraphViewer/MultipleUsage.png.meta @@ -0,0 +1,336 @@ +fileFormatVersion: 2 +guid: cd47f6d5edd2c7541a0baf3774a54fdd +TextureImporter: + internalIDToNameTable: + - first: + 213: -2210434942944035985 + second: MultipleUsage_0 + externalObjects: {} + serializedVersion: 13 + mipmaps: + mipMapMode: 0 + enableMipMap: 0 + sRGBTexture: 1 + linearTexture: 0 + fadeOut: 0 + borderMipMap: 0 + mipMapsPreserveCoverage: 0 + alphaTestReferenceValue: 0.5 + mipMapFadeDistanceStart: 1 + mipMapFadeDistanceEnd: 3 + bumpmap: + convertToNormalMap: 0 + externalNormalMap: 0 + heightScale: 0.25 + normalMapFilter: 0 + flipGreenChannel: 0 + isReadable: 0 + streamingMipmaps: 0 + streamingMipmapsPriority: 0 + vTOnly: 0 + ignoreMipmapLimit: 0 + grayScaleToAlpha: 0 + generateCubemap: 6 + cubemapConvolution: 0 + seamlessCubemap: 0 + textureFormat: 1 + maxTextureSize: 2048 + textureSettings: + serializedVersion: 2 + filterMode: 1 + aniso: 1 + mipBias: 0 + wrapU: 1 + wrapV: 1 + wrapW: 1 + nPOTScale: 0 + lightmap: 0 + compressionQuality: 50 + spriteMode: 2 + spriteExtrude: 1 + spriteMeshType: 1 + alignment: 0 + spritePivot: {x: 0.5, y: 0.5} + spritePixelsToUnits: 100 + spriteBorder: {x: 0, y: 0, z: 0, w: 0} + spriteGenerateFallbackPhysicsShape: 1 + alphaUsage: 1 + alphaIsTransparency: 1 + spriteTessellationDetail: -1 + textureType: 8 + textureShape: 1 + singleChannelComponent: 0 + flipbookRows: 1 + flipbookColumns: 1 + maxTextureSizeSet: 0 + compressionQualitySet: 0 + textureFormatSet: 0 + ignorePngGamma: 0 + applyGammaDecoding: 0 + swizzle: 50462976 + cookieLightType: 0 + platformSettings: + - serializedVersion: 4 + buildTarget: DefaultTexturePlatform + maxTextureSize: 2048 + resizeAlgorithm: 0 + textureFormat: -1 + textureCompression: 1 + compressionQuality: 50 + crunchedCompression: 0 + allowsAlphaSplitting: 0 + overridden: 0 + ignorePlatformSupport: 0 + androidETC2FallbackOverride: 0 + forceMaximumCompressionQuality_BC6H_BC7: 0 + - serializedVersion: 4 + buildTarget: PS4 + maxTextureSize: 2048 + resizeAlgorithm: 0 + textureFormat: -1 + textureCompression: 1 + compressionQuality: 50 + crunchedCompression: 0 + allowsAlphaSplitting: 0 + overridden: 0 + ignorePlatformSupport: 0 + androidETC2FallbackOverride: 0 + forceMaximumCompressionQuality_BC6H_BC7: 0 + - serializedVersion: 4 + buildTarget: CloudRendering + maxTextureSize: 2048 + resizeAlgorithm: 0 + textureFormat: -1 + textureCompression: 1 + compressionQuality: 50 + crunchedCompression: 0 + allowsAlphaSplitting: 0 + overridden: 0 + ignorePlatformSupport: 0 + androidETC2FallbackOverride: 0 + forceMaximumCompressionQuality_BC6H_BC7: 0 + - serializedVersion: 4 + buildTarget: Nintendo Switch + maxTextureSize: 2048 + resizeAlgorithm: 0 + textureFormat: -1 + textureCompression: 1 + compressionQuality: 50 + crunchedCompression: 0 + allowsAlphaSplitting: 0 + overridden: 0 + ignorePlatformSupport: 0 + androidETC2FallbackOverride: 0 + forceMaximumCompressionQuality_BC6H_BC7: 0 + - serializedVersion: 4 + buildTarget: WebGL + maxTextureSize: 2048 + resizeAlgorithm: 0 + textureFormat: -1 + textureCompression: 1 + compressionQuality: 50 + crunchedCompression: 0 + allowsAlphaSplitting: 0 + overridden: 0 + ignorePlatformSupport: 0 + androidETC2FallbackOverride: 0 + forceMaximumCompressionQuality_BC6H_BC7: 0 + - serializedVersion: 4 + buildTarget: Win64 + maxTextureSize: 2048 + resizeAlgorithm: 0 + textureFormat: -1 + textureCompression: 1 + compressionQuality: 50 + crunchedCompression: 0 + allowsAlphaSplitting: 0 + overridden: 0 + ignorePlatformSupport: 0 + androidETC2FallbackOverride: 0 + forceMaximumCompressionQuality_BC6H_BC7: 0 + - serializedVersion: 4 + buildTarget: QNX + maxTextureSize: 2048 + resizeAlgorithm: 0 + textureFormat: -1 + textureCompression: 1 + compressionQuality: 50 + crunchedCompression: 0 + allowsAlphaSplitting: 0 + overridden: 0 + ignorePlatformSupport: 0 + androidETC2FallbackOverride: 0 + forceMaximumCompressionQuality_BC6H_BC7: 0 + - serializedVersion: 4 + buildTarget: Android + maxTextureSize: 2048 + resizeAlgorithm: 0 + textureFormat: -1 + textureCompression: 1 + compressionQuality: 50 + crunchedCompression: 0 + allowsAlphaSplitting: 0 + overridden: 0 + ignorePlatformSupport: 0 + androidETC2FallbackOverride: 0 + forceMaximumCompressionQuality_BC6H_BC7: 0 + - serializedVersion: 4 + buildTarget: GameCoreXboxOne + maxTextureSize: 2048 + resizeAlgorithm: 0 + textureFormat: -1 + textureCompression: 1 + compressionQuality: 50 + crunchedCompression: 0 + allowsAlphaSplitting: 0 + overridden: 0 + ignorePlatformSupport: 0 + androidETC2FallbackOverride: 0 + forceMaximumCompressionQuality_BC6H_BC7: 0 + - serializedVersion: 4 + buildTarget: Standalone + maxTextureSize: 2048 + resizeAlgorithm: 0 + textureFormat: -1 + textureCompression: 1 + compressionQuality: 50 + crunchedCompression: 0 + allowsAlphaSplitting: 0 + overridden: 0 + ignorePlatformSupport: 0 + androidETC2FallbackOverride: 0 + forceMaximumCompressionQuality_BC6H_BC7: 0 + - serializedVersion: 4 + buildTarget: EmbeddedLinux + maxTextureSize: 2048 + resizeAlgorithm: 0 + textureFormat: -1 + textureCompression: 1 + compressionQuality: 50 + crunchedCompression: 0 + allowsAlphaSplitting: 0 + overridden: 0 + ignorePlatformSupport: 0 + androidETC2FallbackOverride: 0 + forceMaximumCompressionQuality_BC6H_BC7: 0 + - serializedVersion: 4 + buildTarget: WindowsStoreApps + maxTextureSize: 2048 + resizeAlgorithm: 0 + textureFormat: -1 + textureCompression: 1 + compressionQuality: 50 + crunchedCompression: 0 + allowsAlphaSplitting: 0 + overridden: 0 + ignorePlatformSupport: 0 + androidETC2FallbackOverride: 0 + forceMaximumCompressionQuality_BC6H_BC7: 0 + - serializedVersion: 4 + buildTarget: iOS + maxTextureSize: 2048 + resizeAlgorithm: 0 + textureFormat: -1 + textureCompression: 1 + compressionQuality: 50 + crunchedCompression: 0 + allowsAlphaSplitting: 0 + overridden: 0 + ignorePlatformSupport: 0 + androidETC2FallbackOverride: 0 + forceMaximumCompressionQuality_BC6H_BC7: 0 + - serializedVersion: 4 + buildTarget: PS5 + maxTextureSize: 2048 + resizeAlgorithm: 0 + textureFormat: -1 + textureCompression: 1 + compressionQuality: 50 + crunchedCompression: 0 + allowsAlphaSplitting: 0 + overridden: 0 + ignorePlatformSupport: 0 + androidETC2FallbackOverride: 0 + forceMaximumCompressionQuality_BC6H_BC7: 0 + - serializedVersion: 4 + buildTarget: VisionOS + maxTextureSize: 2048 + resizeAlgorithm: 0 + textureFormat: -1 + textureCompression: 1 + compressionQuality: 50 + crunchedCompression: 0 + allowsAlphaSplitting: 0 + overridden: 0 + ignorePlatformSupport: 0 + androidETC2FallbackOverride: 0 + forceMaximumCompressionQuality_BC6H_BC7: 0 + - serializedVersion: 4 + buildTarget: GameCoreScarlett + maxTextureSize: 2048 + resizeAlgorithm: 0 + textureFormat: -1 + textureCompression: 1 + compressionQuality: 50 + crunchedCompression: 0 + allowsAlphaSplitting: 0 + overridden: 0 + ignorePlatformSupport: 0 + androidETC2FallbackOverride: 0 + forceMaximumCompressionQuality_BC6H_BC7: 0 + - serializedVersion: 4 + buildTarget: tvOS + maxTextureSize: 2048 + resizeAlgorithm: 0 + textureFormat: -1 + textureCompression: 1 + compressionQuality: 50 + crunchedCompression: 0 + allowsAlphaSplitting: 0 + overridden: 0 + ignorePlatformSupport: 0 + androidETC2FallbackOverride: 0 + forceMaximumCompressionQuality_BC6H_BC7: 0 + spriteSheet: + serializedVersion: 2 + sprites: + - serializedVersion: 2 + name: MultipleUsage_0 + rect: + serializedVersion: 2 + x: 0 + y: 0 + width: 16 + height: 16 + alignment: 0 + pivot: {x: 0, y: 0} + border: {x: 0, y: 0, z: 0, w: 0} + customData: + outline: [] + physicsShape: [] + tessellationDetail: -1 + bones: [] + spriteID: f6bb5ba7125f251e0800000000000000 + internalID: -2210434942944035985 + vertices: [] + indices: + edges: [] + weights: [] + outline: [] + customData: + physicsShape: [] + bones: [] + spriteID: + internalID: 0 + vertices: [] + indices: + edges: [] + weights: [] + secondaryTextures: [] + nameFileIdTable: + MultipleUsage_0: -2210434942944035985 + mipmapLimitGroupName: + pSDRemoveMatte: 0 + userData: + assetBundleName: + assetBundleVariant: diff --git a/Packages/com.unity.render-pipelines.core/Editor/Icons/RenderGraphViewer/d_Global@2x.png b/Packages/com.unity.render-pipelines.core/Editor/Icons/RenderGraphViewer/d_Global@2x.png index b4aa84a2cac..d34ae5d3056 100644 Binary files a/Packages/com.unity.render-pipelines.core/Editor/Icons/RenderGraphViewer/d_Global@2x.png and b/Packages/com.unity.render-pipelines.core/Editor/Icons/RenderGraphViewer/d_Global@2x.png differ diff --git a/Packages/com.unity.render-pipelines.core/Editor/Icons/RenderGraphViewer/d_MultipleUsage.png b/Packages/com.unity.render-pipelines.core/Editor/Icons/RenderGraphViewer/d_MultipleUsage.png new file mode 100644 index 00000000000..686321ddca6 Binary files /dev/null and b/Packages/com.unity.render-pipelines.core/Editor/Icons/RenderGraphViewer/d_MultipleUsage.png differ diff --git a/Packages/com.unity.render-pipelines.core/Editor/Icons/RenderGraphViewer/d_MultipleUsage.png.meta b/Packages/com.unity.render-pipelines.core/Editor/Icons/RenderGraphViewer/d_MultipleUsage.png.meta new file mode 100644 index 00000000000..42e96863854 --- /dev/null +++ b/Packages/com.unity.render-pipelines.core/Editor/Icons/RenderGraphViewer/d_MultipleUsage.png.meta @@ -0,0 +1,336 @@ +fileFormatVersion: 2 +guid: 60e80b0fa23ea904b92263803d64319c +TextureImporter: + internalIDToNameTable: + - first: + 213: -4520544496107484141 + second: d_MultipleUsage_0 + externalObjects: {} + serializedVersion: 13 + mipmaps: + mipMapMode: 0 + enableMipMap: 0 + sRGBTexture: 1 + linearTexture: 0 + fadeOut: 0 + borderMipMap: 0 + mipMapsPreserveCoverage: 0 + alphaTestReferenceValue: 0.5 + mipMapFadeDistanceStart: 1 + mipMapFadeDistanceEnd: 3 + bumpmap: + convertToNormalMap: 0 + externalNormalMap: 0 + heightScale: 0.25 + normalMapFilter: 0 + flipGreenChannel: 0 + isReadable: 0 + streamingMipmaps: 0 + streamingMipmapsPriority: 0 + vTOnly: 0 + ignoreMipmapLimit: 0 + grayScaleToAlpha: 0 + generateCubemap: 6 + cubemapConvolution: 0 + seamlessCubemap: 0 + textureFormat: 1 + maxTextureSize: 2048 + textureSettings: + serializedVersion: 2 + filterMode: 1 + aniso: 1 + mipBias: 0 + wrapU: 1 + wrapV: 1 + wrapW: 1 + nPOTScale: 0 + lightmap: 0 + compressionQuality: 50 + spriteMode: 2 + spriteExtrude: 1 + spriteMeshType: 1 + alignment: 0 + spritePivot: {x: 0.5, y: 0.5} + spritePixelsToUnits: 100 + spriteBorder: {x: 0, y: 0, z: 0, w: 0} + spriteGenerateFallbackPhysicsShape: 1 + alphaUsage: 1 + alphaIsTransparency: 1 + spriteTessellationDetail: -1 + textureType: 8 + textureShape: 1 + singleChannelComponent: 0 + flipbookRows: 1 + flipbookColumns: 1 + maxTextureSizeSet: 0 + compressionQualitySet: 0 + textureFormatSet: 0 + ignorePngGamma: 0 + applyGammaDecoding: 0 + swizzle: 50462976 + cookieLightType: 0 + platformSettings: + - serializedVersion: 4 + buildTarget: DefaultTexturePlatform + maxTextureSize: 2048 + resizeAlgorithm: 0 + textureFormat: -1 + textureCompression: 1 + compressionQuality: 50 + crunchedCompression: 0 + allowsAlphaSplitting: 0 + overridden: 0 + ignorePlatformSupport: 0 + androidETC2FallbackOverride: 0 + forceMaximumCompressionQuality_BC6H_BC7: 0 + - serializedVersion: 4 + buildTarget: PS4 + maxTextureSize: 2048 + resizeAlgorithm: 0 + textureFormat: -1 + textureCompression: 1 + compressionQuality: 50 + crunchedCompression: 0 + allowsAlphaSplitting: 0 + overridden: 0 + ignorePlatformSupport: 0 + androidETC2FallbackOverride: 0 + forceMaximumCompressionQuality_BC6H_BC7: 0 + - serializedVersion: 4 + buildTarget: CloudRendering + maxTextureSize: 2048 + resizeAlgorithm: 0 + textureFormat: -1 + textureCompression: 1 + compressionQuality: 50 + crunchedCompression: 0 + allowsAlphaSplitting: 0 + overridden: 0 + ignorePlatformSupport: 0 + androidETC2FallbackOverride: 0 + forceMaximumCompressionQuality_BC6H_BC7: 0 + - serializedVersion: 4 + buildTarget: Nintendo Switch + maxTextureSize: 2048 + resizeAlgorithm: 0 + textureFormat: -1 + textureCompression: 1 + compressionQuality: 50 + crunchedCompression: 0 + allowsAlphaSplitting: 0 + overridden: 0 + ignorePlatformSupport: 0 + androidETC2FallbackOverride: 0 + forceMaximumCompressionQuality_BC6H_BC7: 0 + - serializedVersion: 4 + buildTarget: WebGL + maxTextureSize: 2048 + resizeAlgorithm: 0 + textureFormat: -1 + textureCompression: 1 + compressionQuality: 50 + crunchedCompression: 0 + allowsAlphaSplitting: 0 + overridden: 0 + ignorePlatformSupport: 0 + androidETC2FallbackOverride: 0 + forceMaximumCompressionQuality_BC6H_BC7: 0 + - serializedVersion: 4 + buildTarget: Win64 + maxTextureSize: 2048 + resizeAlgorithm: 0 + textureFormat: -1 + textureCompression: 1 + compressionQuality: 50 + crunchedCompression: 0 + allowsAlphaSplitting: 0 + overridden: 0 + ignorePlatformSupport: 0 + androidETC2FallbackOverride: 0 + forceMaximumCompressionQuality_BC6H_BC7: 0 + - serializedVersion: 4 + buildTarget: QNX + maxTextureSize: 2048 + resizeAlgorithm: 0 + textureFormat: -1 + textureCompression: 1 + compressionQuality: 50 + crunchedCompression: 0 + allowsAlphaSplitting: 0 + overridden: 0 + ignorePlatformSupport: 0 + androidETC2FallbackOverride: 0 + forceMaximumCompressionQuality_BC6H_BC7: 0 + - serializedVersion: 4 + buildTarget: Android + maxTextureSize: 2048 + resizeAlgorithm: 0 + textureFormat: -1 + textureCompression: 1 + compressionQuality: 50 + crunchedCompression: 0 + allowsAlphaSplitting: 0 + overridden: 0 + ignorePlatformSupport: 0 + androidETC2FallbackOverride: 0 + forceMaximumCompressionQuality_BC6H_BC7: 0 + - serializedVersion: 4 + buildTarget: GameCoreXboxOne + maxTextureSize: 2048 + resizeAlgorithm: 0 + textureFormat: -1 + textureCompression: 1 + compressionQuality: 50 + crunchedCompression: 0 + allowsAlphaSplitting: 0 + overridden: 0 + ignorePlatformSupport: 0 + androidETC2FallbackOverride: 0 + forceMaximumCompressionQuality_BC6H_BC7: 0 + - serializedVersion: 4 + buildTarget: Standalone + maxTextureSize: 2048 + resizeAlgorithm: 0 + textureFormat: -1 + textureCompression: 1 + compressionQuality: 50 + crunchedCompression: 0 + allowsAlphaSplitting: 0 + overridden: 0 + ignorePlatformSupport: 0 + androidETC2FallbackOverride: 0 + forceMaximumCompressionQuality_BC6H_BC7: 0 + - serializedVersion: 4 + buildTarget: EmbeddedLinux + maxTextureSize: 2048 + resizeAlgorithm: 0 + textureFormat: -1 + textureCompression: 1 + compressionQuality: 50 + crunchedCompression: 0 + allowsAlphaSplitting: 0 + overridden: 0 + ignorePlatformSupport: 0 + androidETC2FallbackOverride: 0 + forceMaximumCompressionQuality_BC6H_BC7: 0 + - serializedVersion: 4 + buildTarget: WindowsStoreApps + maxTextureSize: 2048 + resizeAlgorithm: 0 + textureFormat: -1 + textureCompression: 1 + compressionQuality: 50 + crunchedCompression: 0 + allowsAlphaSplitting: 0 + overridden: 0 + ignorePlatformSupport: 0 + androidETC2FallbackOverride: 0 + forceMaximumCompressionQuality_BC6H_BC7: 0 + - serializedVersion: 4 + buildTarget: iOS + maxTextureSize: 2048 + resizeAlgorithm: 0 + textureFormat: -1 + textureCompression: 1 + compressionQuality: 50 + crunchedCompression: 0 + allowsAlphaSplitting: 0 + overridden: 0 + ignorePlatformSupport: 0 + androidETC2FallbackOverride: 0 + forceMaximumCompressionQuality_BC6H_BC7: 0 + - serializedVersion: 4 + buildTarget: PS5 + maxTextureSize: 2048 + resizeAlgorithm: 0 + textureFormat: -1 + textureCompression: 1 + compressionQuality: 50 + crunchedCompression: 0 + allowsAlphaSplitting: 0 + overridden: 0 + ignorePlatformSupport: 0 + androidETC2FallbackOverride: 0 + forceMaximumCompressionQuality_BC6H_BC7: 0 + - serializedVersion: 4 + buildTarget: VisionOS + maxTextureSize: 2048 + resizeAlgorithm: 0 + textureFormat: -1 + textureCompression: 1 + compressionQuality: 50 + crunchedCompression: 0 + allowsAlphaSplitting: 0 + overridden: 0 + ignorePlatformSupport: 0 + androidETC2FallbackOverride: 0 + forceMaximumCompressionQuality_BC6H_BC7: 0 + - serializedVersion: 4 + buildTarget: GameCoreScarlett + maxTextureSize: 2048 + resizeAlgorithm: 0 + textureFormat: -1 + textureCompression: 1 + compressionQuality: 50 + crunchedCompression: 0 + allowsAlphaSplitting: 0 + overridden: 0 + ignorePlatformSupport: 0 + androidETC2FallbackOverride: 0 + forceMaximumCompressionQuality_BC6H_BC7: 0 + - serializedVersion: 4 + buildTarget: tvOS + maxTextureSize: 2048 + resizeAlgorithm: 0 + textureFormat: -1 + textureCompression: 1 + compressionQuality: 50 + crunchedCompression: 0 + allowsAlphaSplitting: 0 + overridden: 0 + ignorePlatformSupport: 0 + androidETC2FallbackOverride: 0 + forceMaximumCompressionQuality_BC6H_BC7: 0 + spriteSheet: + serializedVersion: 2 + sprites: + - serializedVersion: 2 + name: d_MultipleUsage_0 + rect: + serializedVersion: 2 + x: 0 + y: 0 + width: 16 + height: 16 + alignment: 0 + pivot: {x: 0, y: 0} + border: {x: 0, y: 0, z: 0, w: 0} + customData: + outline: [] + physicsShape: [] + tessellationDetail: -1 + bones: [] + spriteID: 31c8a2e4bbcc341c0800000000000000 + internalID: -4520544496107484141 + vertices: [] + indices: + edges: [] + weights: [] + outline: [] + customData: + physicsShape: [] + bones: [] + spriteID: + internalID: 0 + vertices: [] + indices: + edges: [] + weights: [] + secondaryTextures: [] + nameFileIdTable: + d_MultipleUsage_0: -4520544496107484141 + mipmapLimitGroupName: + pSDRemoveMatte: 0 + userData: + assetBundleName: + assetBundleVariant: diff --git a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/.buginfo b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/.buginfo deleted file mode 100644 index e13772f5bdb..00000000000 --- a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/.buginfo +++ /dev/null @@ -1 +0,0 @@ -area: HD RP diff --git a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeAdjustmentVolumeEditor.cs b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeAdjustmentVolumeEditor.cs index 70682ec6b13..a75f6ff5925 100644 --- a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeAdjustmentVolumeEditor.cs +++ b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeAdjustmentVolumeEditor.cs @@ -80,25 +80,6 @@ enum Expandable readonly static ExpandedState k_ExpandedState = new ExpandedState(Expandable.Volume | Expandable.Adjustments); readonly static AdditionalPropertiesState k_AdditionalPropertiesState = new AdditionalPropertiesState(0); - public static void RegisterEditor(ProbeAdjustmentVolumeEditor editor) - { - k_AdditionalPropertiesState.RegisterEditor(editor); - } - - public static void UnregisterEditor(ProbeAdjustmentVolumeEditor editor) - { - k_AdditionalPropertiesState.UnregisterEditor(editor); - } - - [SetAdditionalPropertiesVisibility] - public static void SetAdditionalPropertiesVisibility(bool value) - { - if (value) - k_AdditionalPropertiesState.ShowAll(); - else - k_AdditionalPropertiesState.HideAll(); - } - public static void DrawVolumeContent(SerializedProbeAdjustmentVolume serialized, Editor owner) { EditorGUILayout.PropertyField(serialized.shape); diff --git a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.Dilate.cs b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.Dilate.cs index d1196f6c419..d8f7dfef242 100644 --- a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.Dilate.cs +++ b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.Dilate.cs @@ -87,7 +87,7 @@ internal void FromSphericalHarmonicsShaderConstants(ProbeReferenceVolume.Cell ce if (cellChunkData.skyShadingDirectionIndices.Length != 0) { int id = cellChunkData.skyShadingDirectionIndices[index]; - var directions = DynamicSkyPrecomputedDirections.GetPrecomputedDirections(); + var directions = ProbeVolumeConstantRuntimeResources.GetSkySamplingDirections(); SO_Direction = id == 255 ? Vector3.zero : directions[id]; } } @@ -171,7 +171,6 @@ public void Dispose() static readonly int _ProbePositionsBuffer = Shader.PropertyToID("_ProbePositionsBuffer"); static readonly int _NeedDilating = Shader.PropertyToID("_NeedDilating"); static readonly int _DilationParameters = Shader.PropertyToID("_DilationParameters"); - static readonly int _DilationParameters2 = Shader.PropertyToID("_DilationParameters2"); static readonly int _OutputProbes = Shader.PropertyToID("_OutputProbes"); // Can definitively be optimized later on. @@ -296,8 +295,7 @@ static void PerformDilation(ProbeReferenceVolume.Cell cell, ProbeVolumeBakingSet // There's an upper limit on the number of bricks supported inside a single cell int probeCount = Mathf.Min(cell.data.probePositions.Length, ushort.MaxValue * ProbeBrickPool.kBrickProbeCountTotal); - cmd.SetComputeVectorParam(dilationShader, _DilationParameters, new Vector4(probeCount, settings.dilationValidityThreshold, settings.dilationDistance, ProbeReferenceVolume.instance.MinBrickSize())); - cmd.SetComputeVectorParam(dilationShader, _DilationParameters2, new Vector4(settings.squaredDistWeighting ? 1 : 0, bakingSet.skyOcclusion ? 1 : 0, bakingSet.skyOcclusionShadingDirection ? 1 : 0, 0)); + cmd.SetComputeVectorParam(dilationShader, _DilationParameters, new Vector4(probeCount, settings.dilationValidityThreshold, settings.dilationDistance, settings.squaredDistWeighting ? 1 : 0)); var refVolume = ProbeReferenceVolume.instance; ProbeReferenceVolume.RuntimeResources rr = refVolume.GetRuntimeResources(); @@ -330,12 +328,11 @@ static void PerformDilation(ProbeReferenceVolume.Cell cell, ProbeVolumeBakingSet parameters.samplingNoise = 0; parameters.weight = 1f; parameters.leakReductionMode = APVLeakReductionMode.None; - parameters.minValidNormalWeight = 0.0f; parameters.frameIndexForNoise = 0; parameters.reflNormalizationLowerClamp = 0.1f; parameters.reflNormalizationUpperClamp = 1.0f; - parameters.skyOcclusionIntensity = 0.0f; - parameters.skyOcclusionShadingDirection = false; + parameters.skyOcclusionIntensity = bakingSet.skyOcclusion ? 1 : 0; + parameters.skyOcclusionShadingDirection = bakingSet.skyOcclusionShadingDirection ? true : false; parameters.regionCount = 1; parameters.regionLayerMasks = 1; ProbeReferenceVolume.instance.UpdateConstantBuffer(cmd, parameters); diff --git a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.Invalidation.cs b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.Invalidation.cs index 842bd04247e..842f2a857b4 100644 --- a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.Invalidation.cs +++ b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.Invalidation.cs @@ -15,7 +15,7 @@ internal static Vector3Int GetSampleOffset(int i) return new Vector3Int(i & 1, (i >> 1) & 1, (i >> 2) & 1); } - const float k_MinValidityForLeaking = 0.05f; + const float k_MinValidityForLeaking = APVDefinitions.probeValidityThreshold; internal static uint PackValidity(float[] validity) { diff --git a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.Placement.cs b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.Placement.cs index fd14e0aa31b..36b15a79f96 100644 --- a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.Placement.cs +++ b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.Placement.cs @@ -314,7 +314,7 @@ static internal ProbeSubdivisionResult BakeBricks(ProbeSubdivisionContext ctx, i if (filteredContributors.Count == 0 && !overlappingProbeVolumes.Any(v => v.component.fillEmptySpaces)) continue; - var bricks = ProbePlacement.SubdivideCell(cell.bounds, ctx, gpuResources, filteredContributors, overlappingProbeVolumes); + var bricks = ProbePlacement.SubdivideCell(cell.position, cell.bounds, ctx, gpuResources, filteredContributors, overlappingProbeVolumes); if (bricks.Length == 0) continue; diff --git a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.Serialization.cs b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.Serialization.cs index 714f91b14fa..21328bc8965 100644 --- a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.Serialization.cs +++ b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.Serialization.cs @@ -326,8 +326,8 @@ static void AnalyzeBrickForIndirectionEntries(ref BakingCell cell) cell.indirectionEntryInfo[i].positionInBricks = cellPosInBricks + new Vector3Int(x, y, z) * indirectionEntrySizeInBricks; cell.indirectionEntryInfo[i].hasOnlyBiggerBricks = minSubdiv > entrySubdivLevel && touchedBrick; - ProbeBrickIndex.IndirectionEntryUpdateInfo unused = new ProbeBrickIndex.IndirectionEntryUpdateInfo(); - int brickCount = ProbeReferenceVolume.instance.GetNumberOfBricksAtSubdiv(cell.indirectionEntryInfo[i], ref unused); + prv.ComputeEntryMinMax(ref cell.indirectionEntryInfo[i], cell.bricks); + int brickCount = ProbeReferenceVolume.GetNumberOfBricksAtSubdiv(cell.indirectionEntryInfo[i]); totalIndexChunks += Mathf.CeilToInt((float)brickCount / ProbeBrickIndex.kIndexChunkSize); diff --git a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.SkyOcclusion.cs b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.SkyOcclusion.cs index f9cf7755f77..0be43b60f44 100644 --- a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.SkyOcclusion.cs +++ b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.SkyOcclusion.cs @@ -55,7 +55,7 @@ internal static NativeArray EncodeShadingDirection(NativeArray di var cs = GraphicsSettings.GetRenderPipelineSettings().skyOcclusionCS; int kernel = cs.FindKernel("EncodeShadingDirection"); - DynamicSkyPrecomputedDirections.Initialize(); + ProbeVolumeConstantRuntimeResources.Initialize(); var precomputedShadingDirections = ProbeReferenceVolume.instance.GetRuntimeResources().SkyPrecomputedDirections; int probeCount = directions.Length; @@ -93,7 +93,7 @@ internal static NativeArray EncodeShadingDirection(NativeArray di internal static uint EncodeSkyShadingDirection(Vector3 direction) { - var precomputedDirections = DynamicSkyPrecomputedDirections.GetPrecomputedDirections(); + var precomputedDirections = ProbeVolumeConstantRuntimeResources.GetSkySamplingDirections(); uint indexMax = 255; float bestDot = -10.0f; diff --git a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbePlacement.cs b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbePlacement.cs index 91648519177..931edf3dc6c 100644 --- a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbePlacement.cs +++ b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbePlacement.cs @@ -204,7 +204,7 @@ static Material voxelizeMaterial } } - public static Brick[] SubdivideCell(Bounds cellBounds, ProbeSubdivisionContext subdivisionCtx, GPUSubdivisionContext ctx, GIContributors contributors, List<(ProbeVolume component, ProbeReferenceVolume.Volume volume, Bounds bounds)> probeVolumes) + public static Brick[] SubdivideCell(Vector3Int cellPosition, Bounds cellBounds, ProbeSubdivisionContext subdivisionCtx, GPUSubdivisionContext ctx, GIContributors contributors, List<(ProbeVolume component, ProbeReferenceVolume.Volume volume, Bounds bounds)> probeVolumes) { Brick[] finalBricks; HashSet brickSet = new HashSet(); @@ -279,6 +279,7 @@ public static Brick[] SubdivideCell(Bounds cellBounds, ProbeSubdivisionContext s // In case there is at least one brick in the sub-cell, we need to spawn the parent brick. if (hasMaxSizedBricks) { + int cellSizeInBricks = ProbeReferenceVolume.CellSize(ctx.maxSubdivisionLevel); float minBrickSize = subdivisionCtx.profile.minBrickSize; Vector3 cellID = cellBounds.min / minBrickSize; float parentSubdivLevel = 3.0f; @@ -288,7 +289,7 @@ public static Brick[] SubdivideCell(Bounds cellBounds, ProbeSubdivisionContext s // Add the sub-cell offset: int brickSize = (int)Mathf.Pow(3, i + 1); Vector3Int subCellPosInt = new Vector3Int(Mathf.FloorToInt(subCellPos.x), Mathf.FloorToInt(subCellPos.y), Mathf.FloorToInt(subCellPos.z)) * brickSize; - Vector3Int parentSubCellPos = new Vector3Int(Mathf.RoundToInt(cellID.x), Mathf.RoundToInt(cellID.y), Mathf.RoundToInt(cellID.z)) + subCellPosInt; + Vector3Int parentSubCellPos = cellPosition * cellSizeInBricks + subCellPosInt; // Find the corner in bricks of the parent volume: brickSet.Add(new Brick(parentSubCellPos, i + 1)); diff --git a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeVolumeBakingSetEditor.cs b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeVolumeBakingSetEditor.cs index fe0452b415c..c3c01cabf89 100644 --- a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeVolumeBakingSetEditor.cs +++ b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeVolumeBakingSetEditor.cs @@ -88,7 +88,7 @@ static class Styles // Probe Invalidity section public static readonly GUIContent resetDilation = new GUIContent("Reset Dilation Settings"); public static readonly GUIContent resetVirtualOffset = new GUIContent("Reset Virtual Offset Settings"); - public static readonly GUIContent renderingLayerMasks = new GUIContent("Rendering Layer Masks", "When enabled, geometry in a Rendering Layer will only receive lighting from probes which see Rendering Layers in the same Rendering Layer Mask. This can be used to prevent leaking across boundaries.\nGeometry not belonging to a Rendering Layer Mask will continue to sample all probes."); + public static readonly GUIContent renderingLayerMasks = new GUIContent("Rendering Layer Masks", "When enabled, geometry in a Rendering Layer will only receive lighting from probes which see Rendering Layers in the same Rendering Layer Mask. This can be used to prevent leaking across boundaries.\nGeometry not belonging to a Rendering Layer Mask will continue to sample all probes. Requires Leak Reduction Mode to be enabled."); public static readonly string maskTooltip = "The Rendering Layers for this mask."; } diff --git a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeVolumeCellDilation.compute b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeVolumeCellDilation.compute index 4f1604287c7..65902f1b279 100644 --- a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeVolumeCellDilation.compute +++ b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeVolumeCellDilation.compute @@ -18,11 +18,7 @@ CBUFFER_END #define _ProbeCount (uint)_DilationParameters.x #define _ValidityThreshold _DilationParameters.y #define _SearchRadius _DilationParameters.z -#define _MinBrickSizeForDilation _DilationParameters.w - -#define _SquaredDistanceWeight (_DilationParameters2.x > 0) -#define _EnableSkyOcclusion (_DilationParameters2.y > 0) -#define _EnableSkyDirection (_DilationParameters2.z > 0) +#define _SquaredDistanceWeight (_DilationParameters.w > 0) StructuredBuffer _NeedDilating; StructuredBuffer _ProbePositionsBuffer; @@ -60,7 +56,7 @@ void AddProbeSample(APVResources apvRes, float3 uvw, inout DilatedProbe probe, f } // Sky occlusion data - if (_EnableSkyOcclusion) + if (_APVSkyOcclusionWeight > 0) { float4 SO_L0L1 = SAMPLE_TEXTURE3D_LOD(apvRes.SkyOcclusionL0L1, s_linear_clamp_sampler, uvw, 0).rgba * weight; @@ -68,9 +64,9 @@ void AddProbeSample(APVResources apvRes, float3 uvw, inout DilatedProbe probe, f { probe.SO_L0L1 += SO_L0L1 * weight; - if (_EnableSkyDirection) + if (_APVSkyDirectionWeight > 0) { - int3 texCoord = uvw * _PoolDim - 0.5f; // No interpolation for sky shading indices + int3 texCoord = uvw * _APVPoolDim - 0.5f; // No interpolation for sky shading indices uint index = LOAD_TEXTURE3D(apvRes.SkyShadingDirectionIndices, texCoord).x * 255.0; probe.SO_Direction = index == 255 ? float3(0, 0, 0) : apvRes.SkyPrecomputedDirections[index].rgb * weight; @@ -103,18 +99,18 @@ void DilateCell(uint3 id : SV_DispatchThreadID) if (_NeedDilating[probeIdx] > 0) { - float3 centralPosition = _ProbePositionsBuffer[probeIdx] - _WorldOffset; + float3 centralPosition = _ProbePositionsBuffer[probeIdx] - _APVWorldOffset; DilatedProbe probe = (DilatedProbe)0; float3 uvw; uint subdiv; float shWeight = 0, soWeight = 0; - float3 uvwDelta = rcp(_PoolDim); + float3 uvwDelta = rcp(_APVPoolDim); float3 biasedPosWS; if (TryToGetPoolUVWAndSubdiv(FillAPVResources(), centralPosition, 0, 0, uvw, subdiv, biasedPosWS)) { - float stepSize = _MinBrickSizeForDilation / 3.0f; + float stepSize = _APVMinBrickSize / 3.0f; // Inflate search radius a bit. float radius = 1.5f * _SearchRadius; diff --git a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeVolumesOptionsEditor.cs b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeVolumesOptionsEditor.cs index f557e3d6741..983d0e6388c 100644 --- a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeVolumesOptionsEditor.cs +++ b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeVolumesOptionsEditor.cs @@ -10,7 +10,6 @@ sealed class ProbeVolumesOptionsEditor : VolumeComponentEditor SerializedDataParameter m_ScaleBiasMinProbeDistance; SerializedDataParameter m_SamplingNoise; SerializedDataParameter m_LeakReductionMode; - SerializedDataParameter m_MinValidDotProdValue; SerializedDataParameter m_AnimateNoise; SerializedDataParameter m_OcclusionOnlyNormalization; @@ -26,7 +25,6 @@ public override void OnEnable() m_ScaleBiasMinProbeDistance = Unpack(o.Find(x => x.scaleBiasWithMinProbeDistance)); m_SamplingNoise = Unpack(o.Find(x => x.samplingNoise)); m_LeakReductionMode = Unpack(o.Find(x => x.leakReductionMode)); - m_MinValidDotProdValue = Unpack(o.Find(x => x.minValidDotProductValue)); m_AnimateNoise = Unpack(o.Find(x => x.animateSamplingNoise)); m_OcclusionOnlyNormalization = Unpack(o.Find(x => x.occlusionOnlyReflectionNormalization)); @@ -44,15 +42,6 @@ public override void OnInspectorGUI() PropertyField(m_SamplingNoise); PropertyField(m_AnimateNoise); PropertyField(m_LeakReductionMode); - if (m_LeakReductionMode.value.intValue == (int)APVLeakReductionMode.ValidityBased) - { - } - else if (m_LeakReductionMode.value.intValue == (int)APVLeakReductionMode.ValidityAndNormalBased) - { - using (new IndentLevelScope()) - PropertyField(m_MinValidDotProdValue); - } - PropertyField(m_OcclusionOnlyNormalization); PropertyField(m_IntensityMultiplier); diff --git a/Packages/com.unity.render-pipelines.core/Editor/Material/DecalPreferences.cs b/Packages/com.unity.render-pipelines.core/Editor/Material/DecalPreferences.cs new file mode 100644 index 00000000000..0bc404e8930 --- /dev/null +++ b/Packages/com.unity.render-pipelines.core/Editor/Material/DecalPreferences.cs @@ -0,0 +1,47 @@ +using System; +using System.Collections.Generic; +using UnityEngine; +using UnityEngine.Rendering; +using RuntimeSRPPreferences = UnityEngine.Rendering.CoreRenderPipelinePreferences; + +namespace UnityEditor.Rendering +{ + /// + /// Preferences for Decals + /// + public class DecalPreferences : ICoreRenderPipelinePreferencesProvider + { + static readonly Color k_DecalGizmoColorBase = new Color(1, 1, 1, 8f / 255); + static Func GetColorPrefDecalGizmoColor; + + /// + /// Obtains the color of the decal gizmo + /// + public static Color decalGizmoColor => GetColorPrefDecalGizmoColor(); + + static DecalPreferences() + { + GetColorPrefDecalGizmoColor = RuntimeSRPPreferences.RegisterPreferenceColor("Scene/Decal", k_DecalGizmoColorBase); + } + + static List s_SearchKeywords = new() { "Decals" }; + + /// + /// The list of keywords for user search + /// + public List keywords => s_SearchKeywords; + + /// + /// The header of the panel + /// + public GUIContent header => null; // For now this is only a data preference without UI + + /// + /// Renders the Preferences UI for this provider + /// + public void PreferenceGUI() + { + // For now this is only a data preference without UI + } + } +} diff --git a/Packages/com.unity.render-pipelines.core/Editor/Material/DecalPreferences.cs.meta b/Packages/com.unity.render-pipelines.core/Editor/Material/DecalPreferences.cs.meta new file mode 100644 index 00000000000..4f7b832675d --- /dev/null +++ b/Packages/com.unity.render-pipelines.core/Editor/Material/DecalPreferences.cs.meta @@ -0,0 +1,2 @@ +fileFormatVersion: 2 +guid: f6b878d2c4b3d974db270bab74cd8f18 \ No newline at end of file diff --git a/Packages/com.unity.render-pipelines.high-definition/Editor/Wizard/WizardResources.meta b/Packages/com.unity.render-pipelines.core/Editor/Properties.meta similarity index 77% rename from Packages/com.unity.render-pipelines.high-definition/Editor/Wizard/WizardResources.meta rename to Packages/com.unity.render-pipelines.core/Editor/Properties.meta index f07e2cbb099..ceef5af30fe 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Editor/Wizard/WizardResources.meta +++ b/Packages/com.unity.render-pipelines.core/Editor/Properties.meta @@ -1,5 +1,5 @@ fileFormatVersion: 2 -guid: 783b6a6383182294b8e9b465e2d7d684 +guid: 80eb240eb8f49794ea98245f43a29de6 folderAsset: yes DefaultImporter: externalObjects: {} diff --git a/Packages/com.unity.render-pipelines.core/Editor/AdditionalPropertiesState.cs b/Packages/com.unity.render-pipelines.core/Editor/Properties/AdditionalPropertiesState.cs similarity index 83% rename from Packages/com.unity.render-pipelines.core/Editor/AdditionalPropertiesState.cs rename to Packages/com.unity.render-pipelines.core/Editor/Properties/AdditionalPropertiesState.cs index 8e3bdfb7094..ffe60292840 100644 --- a/Packages/com.unity.render-pipelines.core/Editor/AdditionalPropertiesState.cs +++ b/Packages/com.unity.render-pipelines.core/Editor/Properties/AdditionalPropertiesState.cs @@ -9,16 +9,7 @@ namespace UnityEditor.Rendering public abstract class AdditionalPropertiesStateBase where TState : struct, IConvertible { - HashSet m_Editors = new HashSet(); - Dictionary m_AnimFloats = new Dictionary(); - - void RepaintAll() - { - foreach (var editor in m_Editors) - { - editor.Repaint(); - } - } + HashSet m_Editors = new (); /// Get or set the state given the mask. /// The filtering mask @@ -28,6 +19,7 @@ public bool this[TState mask] get => GetAdditionalPropertiesState(mask); set => SetAdditionalPropertiesState(mask, value); } + /// Accessor to the expended state of this specific mask. /// The filtering mask /// True: All flagged area are expended @@ -39,9 +31,6 @@ public bool this[TState mask] public void SetAdditionalPropertiesState(TState mask, bool value) { SetAdditionalPropertiesStateValue(mask, value); - - if (value) - ResetAnimation(mask); } /// Setter to the expended state without resetting animation. @@ -55,30 +44,13 @@ public void SetAdditionalPropertiesState(TState mask, bool value) /// Utility to set all states to false public abstract void HideAll(); - internal AnimFloat GetAnimation(TState mask) - { - AnimFloat anim = null; - if (!m_AnimFloats.TryGetValue(mask, out anim)) - { - anim = new AnimFloat(0, RepaintAll); - anim.speed = CoreEditorConstants.additionalPropertiesHightLightSpeed; - m_AnimFloats.Add(mask, anim); - } - return anim; - } - /// /// Resets the animation associated with the given mask to a default state with the animated value set to 1.0 and the target value set to 0.0. /// /// The state mask used to retrieve the associated animation. protected internal void ResetAnimation(TState mask) { - AnimFloat anim = GetAnimation(mask); - - anim.value = 1.0f; - anim.target = 0.0f; } - /// /// Register an editor for this set of additional properties. /// @@ -118,36 +90,31 @@ public AdditionalPropertiesState(TState defaultValue, string prefix = "CoreRP", { string key = $"{prefix}:{typeof(TTarget).Name}:{typeof(TState).Name}:{stateId}"; m_State = new EditorPrefBoolFlags(key); - - //register key if not already there - if (!EditorPrefs.HasKey(key)) - { - EditorPrefs.SetInt(key, (int)(object)defaultValue); - } + AdvancedProperties.UpdateShowAdvancedProperties(key, m_State.rawValue != 0u); } /// - public override bool GetAdditionalPropertiesState(TState mask) + public override bool GetAdditionalPropertiesState(TState _) { - return m_State.HasFlag(mask); + return AdvancedProperties.enabled; } /// - protected override void SetAdditionalPropertiesStateValue(TState mask, bool value) + protected override void SetAdditionalPropertiesStateValue(TState _, bool value) { - m_State.SetFlag(mask, value); + AdvancedProperties.enabled = value; } /// public override void ShowAll() { - m_State.rawValue = uint.MaxValue; + AdvancedProperties.enabled = true; } /// public override void HideAll() { - m_State.rawValue = 0u; + AdvancedProperties.enabled = false; } } diff --git a/Packages/com.unity.render-pipelines.core/Editor/AdditionalPropertiesState.cs.meta b/Packages/com.unity.render-pipelines.core/Editor/Properties/AdditionalPropertiesState.cs.meta similarity index 100% rename from Packages/com.unity.render-pipelines.core/Editor/AdditionalPropertiesState.cs.meta rename to Packages/com.unity.render-pipelines.core/Editor/Properties/AdditionalPropertiesState.cs.meta diff --git a/Packages/com.unity.render-pipelines.core/Editor/Properties/AdvancedProperties.cs b/Packages/com.unity.render-pipelines.core/Editor/Properties/AdvancedProperties.cs new file mode 100644 index 00000000000..c35644cf7ac --- /dev/null +++ b/Packages/com.unity.render-pipelines.core/Editor/Properties/AdvancedProperties.cs @@ -0,0 +1,138 @@ +using System; +using UnityEditor.AnimatedValues; +using UnityEngine; + +namespace UnityEditor.Rendering +{ + /// + /// Global accessor to advanced properties + /// + public static class AdvancedProperties + { + static class Keys + { + // TODO Deprecate this key in U7: Advanced properties were formerly called additional properties + internal const string showAllAdditionalProperties = "General.ShowAllAdditionalProperties"; + internal const string advancedPropertiesMigrated = "General.LocalAdditionalPropertiesMigratedToGlobal"; + //END TODO + + internal const string showAdvancedProperties = "General.ShowAdvancedProperties"; + } + + // TODO Deprecate this in U7: Advanced properties were formerly called additional properties + static AdvancedProperties() + { + // Migrate from the previous global state + UpdateShowAdvancedProperties(Keys.showAllAdditionalProperties, + EditorPrefs.HasKey(Keys.showAllAdditionalProperties) && + EditorPrefs.GetBool(Keys.showAllAdditionalProperties)); + } + + internal static void UpdateShowAdvancedProperties(string key, bool previousState) + { + if (previousState) + { + if (!EditorPrefs.HasKey(Keys.advancedPropertiesMigrated) || !EditorPrefs.GetBool(Keys.advancedPropertiesMigrated)) + { + // Before we were storing a global state and a per editor state. + // So if the user had at least 1 editor with show additional, we need to show advanced properties everywhere. + enabled = true; + EditorPrefs.SetBool(Keys.advancedPropertiesMigrated, true); + } + } + + if (EditorPrefs.HasKey(key)) + EditorPrefs.DeleteKey(key); + } + // END TODO + + /// + /// Global event when the advanced preferences have changed + /// + public static event Action advancedPreferenceChanged; + + private static bool? s_ShowAdvanced; + + /// + /// If the show advanced properties is enabled + /// + public static bool enabled + { + get + { + s_ShowAdvanced ??= EditorPrefs.GetBool(Keys.showAdvancedProperties, false); + return s_ShowAdvanced.Value; + } + set + { + if (s_ShowAdvanced != value) + { + s_ShowAdvanced = value; + EditorPrefs.SetBool(Keys.showAdvancedProperties, value); + advancedPreferenceChanged?.Invoke(value); + } + } + } + + /// + /// Adds an entry to toggle Advanced Properties + /// + /// The menu where to add the Advanced Properties entry. + /// If the option is checked + /// The toggle action + public static void AddAdvancedPropertiesBoolMenuItem(this GenericMenu menu, Func hasMoreOptions, Action toggleMoreOptions) + { + menu.AddItem(EditorGUIUtility.TrTextContent("Advanced Properties"), hasMoreOptions.Invoke(), () => toggleMoreOptions.Invoke()); + } + + /// + /// Adds an entry to toggle Advanced Properties + /// + /// The menu where to add the Advanced Properties entry. + public static void AddAdvancedPropertiesBoolMenuItem(this GenericMenu menu) + { + AddAdvancedPropertiesBoolMenuItem(menu, + () => AdvancedProperties.enabled, + () => AdvancedProperties.enabled = !AdvancedProperties.enabled); + } + + internal static AnimFloat s_AnimFloat = new(0) + { + speed = 0.2f + }; + + internal static void ResetHighlight() + { + s_AnimFloat.value = 1.0f; + s_AnimFloat.target = 0.0f; + } + + internal static bool IsHighlightActive() => s_AnimFloat.isAnimating; + + /// + /// Starts the Advanced Properties highlight + /// + /// The animation of the highlight. If null, the global animation value is used. + /// Tru, if the advanced properties is enabled + public static bool BeginGroup(AnimFloat animation = null) + { + var oldColor = GUI.color; + + animation ??= s_AnimFloat; + + GUI.color = Color.Lerp(CoreEditorStyles.backgroundColor * oldColor, CoreEditorStyles.backgroundHighlightColor, animation.value); + EditorGUILayout.BeginVertical(CoreEditorStyles.additionalPropertiesHighlightStyle); + GUI.color = oldColor; + + return AdvancedProperties.enabled; + } + + /// + /// Ends the scope of highlight of advanced properties + /// + public static void EndGroup() + { + EditorGUILayout.EndVertical(); + } + } +} diff --git a/Packages/com.unity.render-pipelines.core/Editor/Properties/AdvancedProperties.cs.meta b/Packages/com.unity.render-pipelines.core/Editor/Properties/AdvancedProperties.cs.meta new file mode 100644 index 00000000000..d245729603e --- /dev/null +++ b/Packages/com.unity.render-pipelines.core/Editor/Properties/AdvancedProperties.cs.meta @@ -0,0 +1,3 @@ +fileFormatVersion: 2 +guid: 7cb91275e17b45ab83910c63bb9a3a99 +timeCreated: 1711378153 \ No newline at end of file diff --git a/Packages/com.unity.render-pipelines.core/Editor/Properties/AdvancedPropertiesObserver.cs b/Packages/com.unity.render-pipelines.core/Editor/Properties/AdvancedPropertiesObserver.cs new file mode 100644 index 00000000000..78955e5bda4 --- /dev/null +++ b/Packages/com.unity.render-pipelines.core/Editor/Properties/AdvancedPropertiesObserver.cs @@ -0,0 +1,30 @@ +using UnityEditorInternal; + +namespace UnityEditor.Rendering +{ + class AdvancedPropertiesObserver + { + [InitializeOnLoadMethod] + static void SubscribeToAdvancedPropertiesChanges() + { + AdvancedProperties.advancedPreferenceChanged += OnShowAdvancedPropertiesChanged; + } + + static void OnShowAdvancedPropertiesChanged(bool newValue) + { + if (newValue) + { + AdvancedProperties.ResetHighlight(); + EditorApplication.update += RepaintUntilAnimFinish; + } + } + + static void RepaintUntilAnimFinish() + { + if (AdvancedProperties.IsHighlightActive()) + InternalEditorUtility.RepaintAllViews(); + else + EditorApplication.update -= RepaintUntilAnimFinish; + } + } +} diff --git a/Packages/com.unity.render-pipelines.core/Editor/Properties/AdvancedPropertiesObserver.cs.meta b/Packages/com.unity.render-pipelines.core/Editor/Properties/AdvancedPropertiesObserver.cs.meta new file mode 100644 index 00000000000..890b273ac96 --- /dev/null +++ b/Packages/com.unity.render-pipelines.core/Editor/Properties/AdvancedPropertiesObserver.cs.meta @@ -0,0 +1,3 @@ +fileFormatVersion: 2 +guid: ee336999a409439e86492ecebf683718 +timeCreated: 1711378086 \ No newline at end of file diff --git a/Packages/com.unity.render-pipelines.core/Editor/Properties/PropertiesPreferencesProvider.cs b/Packages/com.unity.render-pipelines.core/Editor/Properties/PropertiesPreferencesProvider.cs new file mode 100644 index 00000000000..8190d23d055 --- /dev/null +++ b/Packages/com.unity.render-pipelines.core/Editor/Properties/PropertiesPreferencesProvider.cs @@ -0,0 +1,27 @@ +using System.Collections.Generic; +using UnityEngine; +using UnityEngine.Rendering; + +namespace UnityEditor.Rendering +{ + [DisplayInfo(name = "Properties", order = 100)] + class PropertiesPreferencesProvider : ICoreRenderPipelinePreferencesProvider + { + class Styles + { + public static readonly GUIContent additionalPropertiesLabel = EditorGUIUtility.TrTextContent("Advanced Properties", "Tells Unity to show or hide Advanced Properties."); + public static readonly GUIContent[] additionalPropertiesNames = { EditorGUIUtility.TrTextContent("All Visible"), EditorGUIUtility.TrTextContent("All Hidden") }; + public static readonly int[] additionalPropertiesValues = { 1, 0 }; + } + + static List s_SearchKeywords = new() { "Additional", "Advanced", "Properties" }; + public List keywords => s_SearchKeywords; + + public void PreferenceGUI() + { + AdvancedProperties.enabled = EditorGUILayout.IntPopup(Styles.additionalPropertiesLabel, + AdvancedProperties.enabled ? 1 : 0, Styles.additionalPropertiesNames, + Styles.additionalPropertiesValues) == 1; + } + } +} diff --git a/Packages/com.unity.render-pipelines.core/Editor/AdditionalPropertiesPreferences.cs.meta b/Packages/com.unity.render-pipelines.core/Editor/Properties/PropertiesPreferencesProvider.cs.meta similarity index 100% rename from Packages/com.unity.render-pipelines.core/Editor/AdditionalPropertiesPreferences.cs.meta rename to Packages/com.unity.render-pipelines.core/Editor/Properties/PropertiesPreferencesProvider.cs.meta diff --git a/Packages/com.unity.render-pipelines.core/Editor/RenderGraph/RenderGraphViewer.SidePanel.cs b/Packages/com.unity.render-pipelines.core/Editor/RenderGraph/RenderGraphViewer.SidePanel.cs index 430bd9510d8..5a98999d1b3 100644 --- a/Packages/com.unity.render-pipelines.core/Editor/RenderGraph/RenderGraphViewer.SidePanel.cs +++ b/Packages/com.unity.render-pipelines.core/Editor/RenderGraph/RenderGraphViewer.SidePanel.cs @@ -34,7 +34,8 @@ static partial class Classes public const string kPanelResourceListItem = "panel-resource-list__item"; public const string kPanelPassListItem = "panel-pass-list__item"; public const string kSubHeaderText = "sub-header-text"; - public const string kAttachmentInfoItem = "attachment-info__item"; + public const string kInfoFoldout = "info-foldout"; + public const string kInfoFoldoutSecondaryText = "info-foldout__secondary-text"; public const string kCustomFoldoutArrow = "custom-foldout-arrow"; } @@ -47,6 +48,7 @@ static partial class Classes bool m_PassListExpanded = true; float m_SidePanelVerticalAspectRatio = 0.5f; float m_SidePanelFixedPaneHeight = 0; + float m_ContentSplitViewFixedPaneWidth = 280; Dictionary> m_ResourceDescendantCache = new (); Dictionary> m_PassDescendantCache = new (); @@ -60,8 +62,19 @@ void InitializeSidePanel() UpdatePanelHeights(); }); + var contentSplitView = rootVisualElement.Q(Names.kContentContainer); + contentSplitView.fixedPaneInitialDimension = m_ContentSplitViewFixedPaneWidth; + contentSplitView.fixedPaneIndex = 1; + contentSplitView.fixedPane?.RegisterCallback(_ => + { + float? w = contentSplitView.fixedPane?.resolvedStyle?.width; + if (w.HasValue) + m_ContentSplitViewFixedPaneWidth = w.Value; + }); + // Callbacks for dynamic height allocation between resource & pass lists HeaderFoldout resourceListFoldout = rootVisualElement.Q(Names.kResourceListFoldout); + resourceListFoldout.value = m_ResourceListExpanded; resourceListFoldout.RegisterValueChangedCallback(evt => { if (m_ResourceListExpanded) @@ -74,6 +87,7 @@ void InitializeSidePanel() resourceListFoldout.contextMenuGenerator = () => CreateContextMenu(resourceListFoldout.Q()); HeaderFoldout passListFoldout = rootVisualElement.Q(Names.kPassListFoldout); + passListFoldout.value = m_PassListExpanded; passListFoldout.RegisterValueChangedCallback(evt => { if (m_PassListExpanded) @@ -185,13 +199,14 @@ void PopulateResourceList() var foldoutCheckmark = resourceItem.Q("unity-checkmark"); // Add resource type icon before the label foldoutCheckmark.parent.Insert(1, CreateResourceTypeIcon(visibleResourceElement.type)); + foldoutCheckmark.parent.Add(iconContainer); foldoutCheckmark.BringToFront(); // Move foldout checkmark to the right // Add imported icon to the right of the foldout checkmark var toggleContainer = resourceItem.Q(); toggleContainer.tooltip = resourceData.name; - toggleContainer.Add(iconContainer); - RenderGraphResourceType type = (RenderGraphResourceType)visibleResourceElement.type; + + RenderGraphResourceType type = visibleResourceElement.type; if (type == RenderGraphResourceType.Texture && resourceData.textureData != null) { var lineBreak = new VisualElement(); @@ -302,11 +317,27 @@ void CreateTextElement(VisualElement parent, string text, string className = nul { var pass = m_CurrentDebugData.passList[passId]; Debug.Assert(pass.nrpInfo != null); // This overlay currently assumes NRP compiler + var passFoldout = new Foldout(); - passFoldout.text = $"{pass.name} ({k_PassTypeNames[(int) pass.type]})"; - passFoldout.AddToClassList(Classes.kAttachmentInfoItem); + passFoldout.text = $"{pass.name} ({k_PassTypeNames[(int) pass.type]})"; + + var foldoutTextElement = passFoldout.Q(className: Foldout.textUssClassName); + foldoutTextElement.displayTooltipWhenElided = false; // no tooltip override when ellipsis is active + + bool hasSubpassIndex = pass.nativeSubPassIndex != -1; + if (hasSubpassIndex) + { + // Abuse Foldout to allow two-line header: add line break
at the end of the actual foldout text to increase height, + // then inject a second label into the hierarchy starting with a line break to offset it to the second line. + passFoldout.text += "
"; + Label subpassIndexLabel = new Label($"
Subpass #{pass.nativeSubPassIndex}"); + subpassIndexLabel.AddToClassList(Classes.kInfoFoldoutSecondaryText); + foldoutTextElement.Add(subpassIndexLabel); + } + + passFoldout.AddToClassList(Classes.kInfoFoldout); passFoldout.AddToClassList(Classes.kCustomFoldoutArrow); - passFoldout.Q().tooltip = passFoldout.text; + passFoldout.Q().tooltip = $"The {k_PassTypeNames[(int) pass.type]} {pass.name} belongs to native subpass {pass.nativeSubPassIndex}."; var foldoutCheckmark = passFoldout.Q("unity-checkmark"); foldoutCheckmark.BringToFront(); // Move foldout checkmark to the right @@ -330,10 +361,19 @@ void CreateTextElement(VisualElement parent, string text, string className = nul foreach (var attachmentInfo in nativePassInfo.attachmentInfos) { var attachmentFoldout = new Foldout(); - attachmentFoldout.text = attachmentInfo.resourceName; - attachmentFoldout.AddToClassList(Classes.kAttachmentInfoItem); + + // Abuse Foldout to allow two-line header (same as above) + attachmentFoldout.text = $"{attachmentInfo.resourceName}
"; + Label attachmentIndexLabel = new Label($"
Attachment #{attachmentInfo.attachmentIndex}"); + attachmentIndexLabel.AddToClassList(Classes.kInfoFoldoutSecondaryText); + + var foldoutTextElement = attachmentFoldout.Q(className: Foldout.textUssClassName); + foldoutTextElement.displayTooltipWhenElided = false; // no tooltip override when ellipsis is active + foldoutTextElement.Add(attachmentIndexLabel); + + attachmentFoldout.AddToClassList(Classes.kInfoFoldout); attachmentFoldout.AddToClassList(Classes.kCustomFoldoutArrow); - attachmentFoldout.Q().tooltip = attachmentFoldout.text; + attachmentFoldout.Q().tooltip = $"Texture {attachmentInfo.resourceName} is bound at attachment index {attachmentInfo.attachmentIndex}."; var foldoutCheckmark = attachmentFoldout.Q("unity-checkmark"); foldoutCheckmark.BringToFront(); // Move foldout checkmark to the right @@ -385,9 +425,16 @@ void UpdatePanelHeights() { bool passListExpanded = m_PassListExpanded && (m_CurrentDebugData != null && m_CurrentDebugData.isNRPCompiler); const int kFoldoutHeaderHeightPx = 18; + const int kFoldoutHeaderExpandedMinHeightPx = 50; const int kWindowExtraMarginPx = 6; - float panelHeightPx = focusedWindow.position.height - kHeaderContainerHeightPx - kWindowExtraMarginPx; + var resourceList = rootVisualElement.Q(Names.kResourceListFoldout); + var passList = rootVisualElement.Q(Names.kPassListFoldout); + + resourceList.style.minHeight = kFoldoutHeaderHeightPx; + passList.style.minHeight = kFoldoutHeaderHeightPx; + + float panelHeightPx = position.height - kHeaderContainerHeightPx - kWindowExtraMarginPx; if (!m_ResourceListExpanded) { m_SidePanelSplitView.fixedPaneInitialDimension = kFoldoutHeaderHeightPx; @@ -404,8 +451,14 @@ void UpdatePanelHeights() m_SidePanelVerticalAspectRatio = m_SidePanelFixedPaneHeight / panelHeightPx; } m_SidePanelSplitView.fixedPaneInitialDimension = panelHeightPx * m_SidePanelVerticalAspectRatio; + + resourceList.style.minHeight = kFoldoutHeaderExpandedMinHeightPx; + passList.style.minHeight = kFoldoutHeaderExpandedMinHeightPx; } + // Ensure fixed pane initial dimension gets applied in case it has already been set + m_SidePanelSplitView.fixedPane.style.height = m_SidePanelSplitView.fixedPaneInitialDimension; + // Disable drag line when one of the foldouts is collapsed var dragLine = m_SidePanelSplitView.Q("unity-dragline"); var dragLineAnchor = m_SidePanelSplitView.Q("unity-dragline-anchor"); diff --git a/Packages/com.unity.render-pipelines.core/Editor/RenderGraph/RenderGraphViewer.cs b/Packages/com.unity.render-pipelines.core/Editor/RenderGraph/RenderGraphViewer.cs index a39f587dce1..c86ab3582ab 100644 --- a/Packages/com.unity.render-pipelines.core/Editor/RenderGraph/RenderGraphViewer.cs +++ b/Packages/com.unity.render-pipelines.core/Editor/RenderGraph/RenderGraphViewer.cs @@ -56,6 +56,7 @@ static partial class Classes public const string kResourceIconContainer = "resource-icon-container"; public const string kResourceIcon = "resource-icon"; public const string kResourceIconImported = "resource-icon--imported"; + public const string kResourceIconMultipleUsage = "resource-icon--multiple-usage"; public const string kResourceIconGlobalDark = "resource-icon--global-dark"; public const string kResourceIconGlobalLight = "resource-icon--global-light"; public const string kResourceIconFbfetch = "resource-icon--fbfetch"; @@ -117,6 +118,7 @@ static partial class Classes const string kPassFilterLegacyEditorPrefsKey = "RenderGraphViewer.PassFilterLegacy"; const string kPassFilterEditorPrefsKey = "RenderGraphViewer.PassFilter"; const string kResourceFilterEditorPrefsKey = "RenderGraphViewer.ResourceFilter"; + const string kSelectedExecutionEditorPrefsKey = "RenderGraphViewer.SelectedExecution"; PassFilter m_PassFilter = PassFilter.CulledPasses | PassFilter.RasterPasses | PassFilter.UnsafePasses | PassFilter.ComputePasses; PassFilterLegacy m_PassFilterLegacy = PassFilterLegacy.CulledPasses; @@ -193,13 +195,26 @@ class ResourceElementInfo class ResourceRWBlock { + [Flags] + public enum UsageFlags + { + None = 0, + UpdatesGlobalResource = 1 << 0, + FramebufferFetch = 1 << 1, + } + public VisualElement element; public string tooltip; public int visibleResourceIndex; public bool read; public bool write; - public bool frameBufferFetch; - public bool setGlobalResource; + public UsageFlags usage; + + public bool HasMultipleUsageFlags() + { + // Check if usage is a power of 2, meaning only one bit is set + return usage != 0 && (usage & (usage - 1)) != 0; + } } class PassElementInfo @@ -664,7 +679,7 @@ void HoverResourceByIndex(int visibleResourceIndex, int visiblePassIndex) foreach (var res in passInfo.resourceBlocks) { if (res.visibleResourceIndex == visibleResourceIndex && - res.setGlobalResource) + res.usage.HasFlag(ResourceRWBlock.UsageFlags.UpdatesGlobalResource)) { disablePanning = true; } @@ -810,6 +825,9 @@ void SelectedRenderGraphChanged(string newRenderGraphName) void SelectedExecutionChanged(string newExecutionName) { + if (newExecutionName == selectedExecutionName) + return; + selectedExecutionName = newExecutionName; if (m_CurrentDebugData != null) @@ -869,8 +887,19 @@ void RebuildExecutionPopup() executionDropdownField.choices = choices; executionDropdownField.RegisterValueChangedCallback(evt => selectedExecutionName = evt.newValue); - executionDropdownField.value = choices[0]; - SelectedExecutionChanged(choices[0]); + + int selectedIndex = 0; + if (EditorPrefs.HasKey(kSelectedExecutionEditorPrefsKey)) + { + string previousSelectedExecution = EditorPrefs.GetString(kSelectedExecutionEditorPrefsKey); + int previousSelectedIndex = choices.IndexOf(previousSelectedExecution); + if (previousSelectedIndex != -1) + selectedIndex = previousSelectedIndex; + } + + // Set value without triggering serialization of the editorpref + executionDropdownField.SetValueWithoutNotify(choices[selectedIndex]); + SelectedExecutionChanged(choices[selectedIndex]); } void OnPassFilterChanged(ChangeEvent evt) @@ -1263,30 +1292,39 @@ void CreateRWResourceBlockElement(int offsetPx, ResourceRWBlock block) } } - if (block.frameBufferFetch) - { - var fbFetchIcon = new VisualElement(); - fbFetchIcon.AddToClassList(Classes.kResourceIcon); - fbFetchIcon.AddToClassList(Classes.kResourceIconFbfetch); - block.element.Add(fbFetchIcon); - } - else if (block.setGlobalResource) + string tooltip = string.Empty; + if (!string.IsNullOrEmpty(accessType)) + tooltip += $"{accessType} access to this resource."; + + if (block.usage != ResourceRWBlock.UsageFlags.None) { - var globalIcon = new VisualElement(); - globalIcon.AddToClassList(Classes.kResourceIcon); - string globalIconAsset = block.read || block.write ? Classes.kResourceIconGlobalLight : Classes.kResourceIconGlobalDark; - globalIcon.AddToClassList(globalIconAsset); - block.element.Add(globalIcon); + string resourceIconClassName = string.Empty; + + if (block.HasMultipleUsageFlags()) + resourceIconClassName = Classes.kResourceIconMultipleUsage; + else if (block.usage.HasFlag(ResourceRWBlock.UsageFlags.FramebufferFetch)) + resourceIconClassName = Classes.kResourceIconFbfetch; + else if (block.usage.HasFlag(ResourceRWBlock.UsageFlags.UpdatesGlobalResource)) + resourceIconClassName = block.read || block.write ? Classes.kResourceIconGlobalLight : Classes.kResourceIconGlobalDark; + + if (!string.IsNullOrEmpty(resourceIconClassName)) + { + var usageIcon = new VisualElement(); + usageIcon.AddToClassList(Classes.kResourceIcon); + usageIcon.AddToClassList(resourceIconClassName); + block.element.Add(usageIcon); + } + + if (tooltip.Length > 0) + tooltip += "

"; + tooltip += "Usage details:"; + if (block.usage.HasFlag(ResourceRWBlock.UsageFlags.FramebufferFetch)) + tooltip += "
- Read is using framebuffer fetch."; + if (block.usage.HasFlag(ResourceRWBlock.UsageFlags.UpdatesGlobalResource)) + tooltip += "
- Updates a global resource slot."; } - List tooltipMessages = new List(3); - if (accessType != null) - tooltipMessages.Add($"{accessType} access to this resource."); - if (block.frameBufferFetch) - tooltipMessages.Add("Read is using framebuffer fetch."); - if (block.setGlobalResource) - tooltipMessages.Add("Updates global resource."); - block.tooltip = string.Join($"{Environment.NewLine}{Environment.NewLine}", tooltipMessages); + block.tooltip = tooltip; block.element.style.left = offsetPx; block.element.AddToClassList(Classes.kResourceDependencyBlock); } @@ -1356,12 +1394,12 @@ VisualElement CreateResourceGridRow( if (resourceType == RenderGraphResourceType.Texture && pass.nrpInfo != null) { if (pass.nrpInfo.textureFBFetchList.Contains(resourceIndex)) - block.frameBufferFetch = true; + block.usage |= ResourceRWBlock.UsageFlags.FramebufferFetch; if (pass.nrpInfo.setGlobals.Contains(resourceIndex)) - block.setGlobalResource = true; + block.usage |= ResourceRWBlock.UsageFlags.UpdatesGlobalResource; } - if (!block.read && !block.write && !block.frameBufferFetch && !block.setGlobalResource) + if (!block.read && !block.write && block.usage == ResourceRWBlock.UsageFlags.None) continue; // No need to create a visual element int offsetPx = visiblePassIndex * kPassWidthPx; @@ -1438,6 +1476,7 @@ void RebuildGraphViewerUI() int numVisiblePasses = visiblePassIndex; if (numVisiblePasses == 0) { + SaveSplitViewFixedPaneHeight(); ClearGraphViewerUI(); SetEmptyStateMessage(EmptyStateReason.EmptyPassFilterResult); return; @@ -1481,6 +1520,7 @@ void RebuildGraphViewerUI() int numVisibleResources = visibleResourceIndex; if (numVisibleResources == 0) { + SaveSplitViewFixedPaneHeight(); ClearGraphViewerUI(); SetEmptyStateMessage(EmptyStateReason.EmptyResourceFilterResult); return; @@ -1546,7 +1586,18 @@ void InitializePersistentElements() renderGraphDropdownField.RegisterValueChangedCallback(evt => SelectedRenderGraphChanged(evt.newValue)); var executionDropdownField = rootVisualElement.Q(Names.kCurrentExecutionDropdown); - executionDropdownField.RegisterValueChangedCallback(evt => SelectedExecutionChanged(evt.newValue)); + executionDropdownField.RegisterValueChangedCallback(evt => + { + EditorPrefs.SetString(kSelectedExecutionEditorPrefsKey, evt.newValue); + SelectedExecutionChanged(evt.newValue); + }); + + // After delay, serialize currently selected execution. This avoids an issue where activating a new camera + // causes RG Viewer to change the execution just because it was serialized some time in the past. + executionDropdownField.schedule.Execute(() => + { + EditorPrefs.SetString(kSelectedExecutionEditorPrefsKey, selectedExecutionName); + }).ExecuteLater(500); var passFilter = rootVisualElement.Q(Names.kPassFilterField); passFilter.style.display = DisplayStyle.None; // Hidden until the compiler is known diff --git a/Packages/com.unity.render-pipelines.core/Editor/Settings/DefaultVolumeProfileEditor.cs b/Packages/com.unity.render-pipelines.core/Editor/Settings/DefaultVolumeProfileEditor.cs index f744a596cf5..0ac19fc3367 100644 --- a/Packages/com.unity.render-pipelines.core/Editor/Settings/DefaultVolumeProfileEditor.cs +++ b/Packages/com.unity.render-pipelines.core/Editor/Settings/DefaultVolumeProfileEditor.cs @@ -233,11 +233,7 @@ void OnVolumeComponentContextClick(Vector2 position, VolumeComponentEditor targe menu.AddSeparator(string.Empty); if (targetEditor.hasAdditionalProperties) - menu.AddItem(VolumeProfileUtils.Styles.showAdditionalProperties, targetEditor.showAdditionalProperties, () => targetEditor.showAdditionalProperties ^= true); - else - menu.AddDisabledItem(VolumeProfileUtils.Styles.showAdditionalProperties); - - menu.AddItem(VolumeProfileUtils.Styles.showAllAdditionalProperties, false, () => CoreRenderPipelinePreferences.Open()); + menu.AddAdvancedPropertiesBoolMenuItem(() => targetEditor.showAdditionalProperties, () => targetEditor.showAdditionalProperties ^= true); menu.AddSeparator(string.Empty); menu.AddItem(VolumeProfileUtils.Styles.openInRenderingDebugger, false, DebugDisplaySettingsVolume.OpenInRenderingDebugger); diff --git a/Packages/com.unity.render-pipelines.core/Editor/ShaderGenerator/ShaderTypeGeneration.cs b/Packages/com.unity.render-pipelines.core/Editor/ShaderGenerator/ShaderTypeGeneration.cs index 8a27910ee99..4898ad33afb 100644 --- a/Packages/com.unity.render-pipelines.core/Editor/ShaderGenerator/ShaderTypeGeneration.cs +++ b/Packages/com.unity.render-pipelines.core/Editor/ShaderGenerator/ShaderTypeGeneration.cs @@ -1129,7 +1129,13 @@ public bool Generate() name = name.Substring(2); } string defineName = name.ToUpper(); - m_Statics[defineName] = field.GetValue(null).ToString(); + string value; + if (fieldType == typeof(float)) + value = ((float)field.GetValue(null)).ToString(System.Globalization.CultureInfo.InvariantCulture); + else + value = field.GetValue(null).ToString(); + + m_Statics[defineName] = value; } continue; } diff --git a/Packages/com.unity.render-pipelines.core/Editor/StyleSheets/RenderGraphViewer.uss b/Packages/com.unity.render-pipelines.core/Editor/StyleSheets/RenderGraphViewer.uss index 972e89d847f..c55af2fcf28 100644 --- a/Packages/com.unity.render-pipelines.core/Editor/StyleSheets/RenderGraphViewer.uss +++ b/Packages/com.unity.render-pipelines.core/Editor/StyleSheets/RenderGraphViewer.uss @@ -2,7 +2,7 @@ --header-container-height: 24px; --resource-column-width: 220px; --resource-list-icon-size: 16px; - --resource-grid-icon-size: 12px; + --resource-grid-icon-size: 16px; --pass-width: 26px; --pass-list-height: 180px; --pass-list-tilted-label-length: 200px; @@ -278,7 +278,7 @@ #resource-grid .resource-icon { width: var(--resource-grid-icon-size); height: var(--resource-grid-icon-size); - margin-top: 8px; + margin-top: 5px; align-self: center; } @@ -530,6 +530,18 @@ ScrollView TextElement { margin-top: 1px; margin-left: 2px; margin-right: 6px; + flex-grow: 0; + flex-shrink: 0; +} + +.panel-resource-list__item > Toggle > VisualElement { + max-width: 100% +} + +.panel-resource-list__item > Toggle > VisualElement > Label { + overflow: hidden; + flex-shrink: 1; + text-overflow: ellipsis; } /* Pass List panel only */ @@ -553,7 +565,7 @@ ScrollView TextElement { -unity-font-style: bold; } -.attachment-info__item { +.info-foldout { border-radius: 4px; border-width: 1px; border-color: var(--side-panel-item-border-color); @@ -562,13 +574,13 @@ ScrollView TextElement { margin-right: 4px; padding-top: 4px; padding-bottom: 6px; - -unity-font-style: bold; } -.attachment-info__item > Toggle > VisualElement { +.info-foldout > Toggle > VisualElement { max-width: 100%; } -.attachment-info__item > Toggle > VisualElement > Label { + +.info-foldout > Toggle > VisualElement > Label { margin-left: 6px; flex-shrink: 1; flex-grow: 1; @@ -577,13 +589,20 @@ ScrollView TextElement { text-overflow: ellipsis; } -.attachment-info__item > TextElement { +.info-foldout > TextElement { -unity-font-style: normal; margin-right: 4px; margin-left: -6px; color: var(--unity-colors-default-text); } +.info-foldout__secondary-text { + margin-left: 0px; + overflow: hidden; + text-overflow: ellipsis; + color: var(--side-panel-secondary-text-color); +} + .panel-pass-list__item > #unity-content { margin-bottom: 12px; } diff --git a/Packages/com.unity.render-pipelines.core/Editor/StyleSheets/RenderGraphViewerDark.uss b/Packages/com.unity.render-pipelines.core/Editor/StyleSheets/RenderGraphViewerDark.uss index da48058d3c2..182870c48c4 100644 --- a/Packages/com.unity.render-pipelines.core/Editor/StyleSheets/RenderGraphViewerDark.uss +++ b/Packages/com.unity.render-pipelines.core/Editor/StyleSheets/RenderGraphViewerDark.uss @@ -18,6 +18,7 @@ --main-background-color: #313131; --side-panel-background-color: #383838; --side-panel-item-border-color: #666666; + --side-panel-secondary-text-color: #808080; } #capture-button { @@ -60,6 +61,10 @@ background-image: url("../Icons/RenderGraphViewer/FramebufferFetch@2x.png"); } +.resource-icon--multiple-usage { + background-image: url("../Icons/RenderGraphViewer/d_MultipleUsage.png"); +} + .pass-block.pass-block-script-link { background-image: url("../Icons/RenderGraphViewer/d_ScriptLink@2x.png"); background-color: #C4C4C4; diff --git a/Packages/com.unity.render-pipelines.core/Editor/StyleSheets/RenderGraphViewerLight.uss b/Packages/com.unity.render-pipelines.core/Editor/StyleSheets/RenderGraphViewerLight.uss index f350db8905f..17e663c347c 100644 --- a/Packages/com.unity.render-pipelines.core/Editor/StyleSheets/RenderGraphViewerLight.uss +++ b/Packages/com.unity.render-pipelines.core/Editor/StyleSheets/RenderGraphViewerLight.uss @@ -18,6 +18,7 @@ --main-background-color: #c8c8c8; --side-panel-background-color: #cbcbcb; --side-panel-item-border-color: #666666; + --side-panel-secondary-text-color: #707070; } #capture-button { @@ -60,6 +61,10 @@ background-image: url("../Icons/RenderGraphViewer/d_FramebufferFetch@2x.png"); } +.resource-icon--multiple-usage { + background-image: url("../Icons/RenderGraphViewer/MultipleUsage.png"); +} + .resource-helper-line--highlight { background-size: 8px 20px; /* light theme needs a wider dashed line to be properly visible */ } diff --git a/Packages/com.unity.render-pipelines.core/Editor/UXML/RenderGraphViewer.uxml b/Packages/com.unity.render-pipelines.core/Editor/UXML/RenderGraphViewer.uxml index 4175cf61a88..bc58e87832c 100644 --- a/Packages/com.unity.render-pipelines.core/Editor/UXML/RenderGraphViewer.uxml +++ b/Packages/com.unity.render-pipelines.core/Editor/UXML/RenderGraphViewer.uxml @@ -8,7 +8,7 @@ - + diff --git a/Packages/com.unity.render-pipelines.core/Editor/Volume/VolumeComponentEditor.cs b/Packages/com.unity.render-pipelines.core/Editor/Volume/VolumeComponentEditor.cs index b82ecf2023c..bb64826d8aa 100644 --- a/Packages/com.unity.render-pipelines.core/Editor/Volume/VolumeComponentEditor.cs +++ b/Packages/com.unity.render-pipelines.core/Editor/Volume/VolumeComponentEditor.cs @@ -117,8 +117,6 @@ internal Vector2 overrideToggleSize #region Additional Properties - AnimFloat m_AdditionalPropertiesAnimation; - EditorPrefBool m_ShowAdditionalProperties; List m_VolumeNotAdditionalParameters = new List(); /// @@ -131,17 +129,8 @@ internal Vector2 overrideToggleSize /// public bool showAdditionalProperties { - get => m_ShowAdditionalProperties.value; - set - { - if (value && !m_ShowAdditionalProperties.value) - { - m_AdditionalPropertiesAnimation.value = 1.0f; - m_AdditionalPropertiesAnimation.target = 0.0f; - } - - SetAdditionalPropertiesPreference(value); - } + get => AdvancedProperties.enabled; + set => AdvancedProperties.enabled = value; } /// @@ -151,15 +140,11 @@ public bool showAdditionalProperties /// True if the additional content should be drawn. protected bool BeginAdditionalPropertiesScope() { - if (hasAdditionalProperties && showAdditionalProperties) - { - CoreEditorUtils.BeginAdditionalPropertiesHighlight(m_AdditionalPropertiesAnimation); - return true; - } - else - { + if (!showAdditionalProperties || !hasAdditionalProperties) return false; - } + + AdvancedProperties.BeginGroup(); + return true; } /// @@ -168,9 +153,7 @@ protected bool BeginAdditionalPropertiesScope() protected void EndAdditionalPropertiesScope() { if (hasAdditionalProperties && showAdditionalProperties) - { - CoreEditorUtils.EndAdditionalPropertiesHighlight(); - } + AdvancedProperties.EndGroup(); } #endregion @@ -267,12 +250,7 @@ internal static string GetAdditionalPropertiesPreferenceKey(Type type) internal void InitAdditionalPropertiesPreference() { string key = GetAdditionalPropertiesPreferenceKey(GetType()); - m_ShowAdditionalProperties = new EditorPrefBool(key); - } - - internal void SetAdditionalPropertiesPreference(bool value) - { - m_ShowAdditionalProperties.value = value; + AdvancedProperties.UpdateShowAdvancedProperties(key, EditorPrefs.HasKey(key) && EditorPrefs.GetBool(key)); } internal void Init() @@ -291,11 +269,6 @@ internal void Init() InitAdditionalPropertiesPreference(); - m_AdditionalPropertiesAnimation = new AnimFloat(0, Repaint) - { - speed = CoreEditorConstants.additionalPropertiesHightLightSpeed - }; - InitParameters(); OnEnable(); diff --git a/Packages/com.unity.render-pipelines.core/Editor/Volume/VolumeComponentListEditor.cs b/Packages/com.unity.render-pipelines.core/Editor/Volume/VolumeComponentListEditor.cs index 3dc8dd0875c..47734b62533 100644 --- a/Packages/com.unity.render-pipelines.core/Editor/Volume/VolumeComponentListEditor.cs +++ b/Packages/com.unity.render-pipelines.core/Editor/Volume/VolumeComponentListEditor.cs @@ -448,12 +448,8 @@ void OnContextClick(Vector2 position, VolumeComponentEditor targetEditor, int id menu.AddSeparator(string.Empty); if (targetEditor.hasAdditionalProperties) - menu.AddItem(EditorGUIUtility.TrTextContent("Show Additional Properties"), - targetEditor.showAdditionalProperties, () => targetEditor.showAdditionalProperties ^= true); - else - menu.AddDisabledItem(EditorGUIUtility.TrTextContent("Show Additional Properties")); - menu.AddItem(EditorGUIUtility.TrTextContent("Show All Additional Properties..."), false, - () => CoreRenderPipelinePreferences.Open()); + menu.AddAdvancedPropertiesBoolMenuItem(() => targetEditor.showAdditionalProperties, + () => targetEditor.showAdditionalProperties ^= true); menu.AddSeparator(string.Empty); targetEditor.AddDefaultProfileContextMenuEntries(menu, VolumeManager.instance.globalDefaultProfile, diff --git a/Packages/com.unity.render-pipelines.core/Editor/Volume/VolumeProfileEditor.cs b/Packages/com.unity.render-pipelines.core/Editor/Volume/VolumeProfileEditor.cs index b1196bfde61..ff76aa7d16f 100644 --- a/Packages/com.unity.render-pipelines.core/Editor/Volume/VolumeProfileEditor.cs +++ b/Packages/com.unity.render-pipelines.core/Editor/Volume/VolumeProfileEditor.cs @@ -43,10 +43,13 @@ void OnDisable() /// public override void OnInspectorGUI() { - if (componentList == null) + if (componentList == null || componentList.asset == null) { if (!VolumeManager.instance.isInitialized) + { + EditorGUILayout.HelpBox("Volume Profiles require an active Scriptable Render Pipeline, but nothing has been rendered. Make sure Scene or Game View is in focus and no debug modes are active.", MessageType.Warning); return; // Defer initialization until VolumeManager is initialized + } Init(); } diff --git a/Packages/com.unity.render-pipelines.core/Editor/Volume/VolumeProfileUtils.cs b/Packages/com.unity.render-pipelines.core/Editor/Volume/VolumeProfileUtils.cs index 1cb1b8b4361..80eadedc7cb 100644 --- a/Packages/com.unity.render-pipelines.core/Editor/Volume/VolumeProfileUtils.cs +++ b/Packages/com.unity.render-pipelines.core/Editor/Volume/VolumeProfileUtils.cs @@ -19,8 +19,6 @@ internal static class Styles public static readonly GUIContent expandAll = EditorGUIUtility.TrTextContent("Expand All"); public static readonly GUIContent reset = EditorGUIUtility.TrTextContent("Reset"); public static readonly GUIContent resetAll = EditorGUIUtility.TrTextContent("Reset All"); - public static readonly GUIContent showAdditionalProperties = EditorGUIUtility.TrTextContent("Show Additional Properties"); - public static readonly GUIContent showAllAdditionalProperties = EditorGUIUtility.TrTextContent("Show All Additional Properties..."); public static readonly GUIContent openInRenderingDebugger = EditorGUIUtility.TrTextContent("Open In Rendering Debugger"); public static readonly GUIContent copySettings = EditorGUIUtility.TrTextContent("Copy Settings"); public static readonly GUIContent copyAllSettings = EditorGUIUtility.TrTextContent("Copy All Settings"); @@ -313,8 +311,7 @@ public static void AddVolumeProfileContextMenuItems( menu.AddSeparator(string.Empty); - menu.AddItem(Styles.showAllAdditionalProperties, false, - CoreRenderPipelinePreferences.Open); + menu.AddAdvancedPropertiesBoolMenuItem(); menu.AddSeparator(string.Empty); @@ -400,8 +397,7 @@ public static void OnVolumeProfileContextClick( menu.AddSeparator(string.Empty); - menu.AddItem(Styles.showAllAdditionalProperties, false, - CoreRenderPipelinePreferences.Open); + menu.AddAdvancedPropertiesBoolMenuItem(); menu.AddSeparator(string.Empty); diff --git a/Packages/com.unity.render-pipelines.core/Editor/Volume/VolumesPreferences.cs b/Packages/com.unity.render-pipelines.core/Editor/Volume/VolumesPreferences.cs index 9168883ecb3..38aa8717383 100644 --- a/Packages/com.unity.render-pipelines.core/Editor/Volume/VolumesPreferences.cs +++ b/Packages/com.unity.render-pipelines.core/Editor/Volume/VolumesPreferences.cs @@ -1,6 +1,7 @@ using System; using System.Collections.Generic; using UnityEngine; +using UnityEngine.Rendering; using RuntimeSRPPreferences = UnityEngine.Rendering.CoreRenderPipelinePreferences; namespace UnityEditor.Rendering @@ -8,6 +9,7 @@ namespace UnityEditor.Rendering /// /// Preferences for Volumes /// + [DisplayInfo(name = "Volumes", order = 50)] public class VolumesPreferences : ICoreRenderPipelinePreferencesProvider { static class Keys @@ -80,11 +82,6 @@ public static VolumeGizmoVisibility volumeGizmosVisibilityOption /// public List keywords => s_SearchKeywords; - /// - /// The header of the panel - /// - public GUIContent header { get; } = EditorGUIUtility.TrTextContent("Volumes"); - /// /// Renders the Preferences UI for this provider /// diff --git a/Packages/com.unity.render-pipelines.core/Runtime/Common/Observable.cs b/Packages/com.unity.render-pipelines.core/Runtime/Common/Observable.cs new file mode 100644 index 00000000000..67c9b36d8d4 --- /dev/null +++ b/Packages/com.unity.render-pipelines.core/Runtime/Common/Observable.cs @@ -0,0 +1,48 @@ +using System; +using System.Collections.Generic; + +namespace UnityEngine.Rendering +{ + /// + /// Represents an observable value of type T. Subscribers can be notified when the value changes. + /// + /// The type of the value. + public struct Observable + { + /// + /// Event that is triggered when the value changes. + /// + public event Action onValueChanged; + + private T m_Value; + + /// + /// The current value. + /// + public T value + { + get => m_Value; + set + { + // Only invoke the event if the new value is different from the current value + if (!EqualityComparer.Default.Equals(value, m_Value)) + { + m_Value = value; + + // Notify subscribers when the value changes + onValueChanged?.Invoke(value); + } + } + } + + /// + /// Constructor with value + /// + /// The new value to be assigned. + public Observable(T newValue) + { + m_Value = newValue; + onValueChanged = null; + } + } +} diff --git a/Packages/com.unity.render-pipelines.core/Runtime/Common/Observable.cs.meta b/Packages/com.unity.render-pipelines.core/Runtime/Common/Observable.cs.meta new file mode 100644 index 00000000000..5596972c6a2 --- /dev/null +++ b/Packages/com.unity.render-pipelines.core/Runtime/Common/Observable.cs.meta @@ -0,0 +1,3 @@ +fileFormatVersion: 2 +guid: c809a62d5133434283a678284bde6334 +timeCreated: 1712237945 \ No newline at end of file diff --git a/Packages/com.unity.render-pipelines.core/Runtime/Debug/ProbeVolumeDebugBase.hlsl b/Packages/com.unity.render-pipelines.core/Runtime/Debug/ProbeVolumeDebugBase.hlsl index e13fe4a0d5d..9dbd205033e 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime/Debug/ProbeVolumeDebugBase.hlsl +++ b/Packages/com.unity.render-pipelines.core/Runtime/Debug/ProbeVolumeDebugBase.hlsl @@ -103,7 +103,7 @@ void DoCull(inout v2f o) // snappedProbePosition_WS : worldspace position of main probe (a corner of the 8 probes cube) // samplingPosition_WS : worldspace sampling position after applying 'NormalBias' and 'ViewBias' and 'ValidityAndNormalBased Leak Reduction' // normalizedOffset : normalized offset between sampling position and snappedProbePosition -void FindSamplingData(float3 posWS, float3 normalWS, uint renderingLayer, out float3 snappedProbePosition_WS, out float3 samplingPosition_WS, out float3 samplingPositionNoAntiLeak_WS, out float probeDistance, out float3 normalizedOffset, out float validityWeights[8]) +void FindSamplingData(float3 posWS, float3 normalWS, uint renderingLayer, out float3 snappedProbePosition_WS, out float3 samplingPositionNoAntiLeak_WS, out float probeDistance, out float3 normalizedOffset, out float validityWeights[8]) { float3 cameraPosition_WS = _WorldSpaceCameraPos; float3 viewDir_WS = normalize(cameraPosition_WS - posWS); @@ -115,33 +115,43 @@ void FindSamplingData(float3 posWS, float3 normalWS, uint renderingLayer, out fl posWS = AddNoiseToSamplingPosition(posWS, posSS, viewDir_WS); } - posWS -= _WorldOffset; + posWS -= _APVWorldOffset; + // uvw APVResources apvRes = FillAPVResources(); float3 uvw; uint subdiv; float3 biasedPosWS; bool valid = TryToGetPoolUVWAndSubdiv(apvRes, posWS, normalWS, viewDir_WS, uvw, subdiv, biasedPosWS); - probeDistance = ProbeDistance(subdiv); - snappedProbePosition_WS = GetSnappedProbePosition(biasedPosWS, subdiv); - - WarpUVWLeakReduction(apvRes, posWS, normalWS, renderingLayer, subdiv, biasedPosWS, uvw, normalizedOffset, validityWeights); - - biasedPosWS += _WorldOffset; - snappedProbePosition_WS += _WorldOffset; - samplingPositionNoAntiLeak_WS = biasedPosWS; - - if (_LeakReductionMode != 0) + // Validity mask + float3 texCoord = uvw * _APVPoolDim - .5f; + float3 texFrac = frac(texCoord); + uint validityMask = LoadValidityMask(apvRes, renderingLayer, texCoord); + for (uint i = 0; i < 8; i++) { - samplingPosition_WS = snappedProbePosition_WS + (normalizedOffset*probeDistance); + int3 probeCoord = GetSampleOffset(i); + float validityWeight = ((probeCoord.x == 1) ? texFrac.x : 1.0f - texFrac.x) * + ((probeCoord.y == 1) ? texFrac.y : 1.0f - texFrac.y) * + ((probeCoord.z == 1) ? texFrac.z : 1.0f - texFrac.z); + validityWeights[i] = validityWeight * GetValidityWeight(i, validityMask); } - else + + // Sample position + normalizedOffset = texFrac; + if (_APVLeakReductionMode == APVLEAKREDUCTIONMODE_PERFORMANCE) { - normalizedOffset = (biasedPosWS - snappedProbePosition_WS) / probeDistance; - samplingPosition_WS = biasedPosWS; + float3 warped = uvw; + WarpUVWLeakReduction(apvRes, renderingLayer, warped); + normalizedOffset += (warped - uvw) * _APVPoolDim; } + // stuff + biasedPosWS += _APVWorldOffset; + samplingPositionNoAntiLeak_WS = biasedPosWS; + + probeDistance = ProbeDistance(subdiv); + snappedProbePosition_WS = GetSnappedProbePosition(biasedPosWS, subdiv); } // Return probe sampling weight @@ -293,7 +303,7 @@ float3 CalculateDiffuseLighting(v2f i) float3 skyShadingDirection = normal; if (_ShadingMode == DEBUGPROBESHADINGMODE_SKY_DIRECTION) { - if (_EnableSkyOcclusionShadingDirection > 0) + if (_APVSkyDirectionWeight > 0) { float value = 1.0f / GetCurrentExposureMultiplier(); @@ -314,8 +324,8 @@ float3 CalculateDiffuseLighting(v2f i) } else { - float skyOcclusion = 0.0f; - if (_SkyOcclusionIntensity > 0) + float3 skyOcclusion = _DebugEmptyProbeData.xyz; + if (_APVSkyOcclusionWeight > 0) { // L0 L1 float4 temp = float4(kSHBasis0, kSHBasis1 * normal.x, kSHBasis1 * normal.y, kSHBasis1 * normal.z); @@ -324,10 +334,7 @@ float3 CalculateDiffuseLighting(v2f i) if (_ShadingMode == DEBUGPROBESHADINGMODE_SKY_OCCLUSION_SH) { - if(_SkyOcclusionIntensity > 0) - return skyOcclusion / GetCurrentExposureMultiplier(); - else - return _DebugEmptyProbeData.xyz / GetCurrentExposureMultiplier(); + return skyOcclusion / GetCurrentExposureMultiplier(); } else { @@ -355,7 +362,7 @@ float3 CalculateDiffuseLighting(v2f i) bakeDiffuseLighting += EvalL2(L0, L2_R, L2_G, L2_B, L2_C, normal); #endif - if (_SkyOcclusionIntensity > 0) + if (_APVSkyOcclusionWeight > 0) bakeDiffuseLighting += skyOcclusion * EvaluateAmbientProbe(skyShadingDirection); return bakeDiffuseLighting; diff --git a/Packages/com.unity.render-pipelines.core/Runtime/Debug/ProbeVolumeDebugFunctions.hlsl b/Packages/com.unity.render-pipelines.core/Runtime/Debug/ProbeVolumeDebugFunctions.hlsl index cbb9831a05f..e50e8e66b28 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime/Debug/ProbeVolumeDebugFunctions.hlsl +++ b/Packages/com.unity.render-pipelines.core/Runtime/Debug/ProbeVolumeDebugFunctions.hlsl @@ -5,13 +5,11 @@ v2f vert(appdata v) { v2f o; + ZERO_INITIALIZE(v2f, o); UNITY_SETUP_INSTANCE_ID(v); UNITY_TRANSFER_INSTANCE_ID(v, o); - o.vertex = 0; - o.normal = 0; - if (!ShouldCull(o)) { float3 probePosition_WS = mul(UNITY_MATRIX_M, float4(0.0f, 0.0f, 0.0f, 1.0f)).xyz; @@ -26,12 +24,11 @@ float3 snappedProbePosition_WS; // worldspace position of main probe (a corner of the 8 probes cube) float3 samplingPositionNoAntiLeak_WS; // // worldspace sampling position after applying 'NormalBias', 'ViewBias' - float3 samplingPosition_WS; // worldspace sampling position after applying 'NormalBias', 'ViewBias' and 'ValidityAndNormalBased Leak Reduction' float probeDistance; float3 normalizedOffset; // normalized offset between sampling position and snappedProbePosition float validityWeight[8]; - FindSamplingData(debugPosition.xyz, debugNormal.xyz, _RenderingLayerMask, snappedProbePosition_WS, samplingPosition_WS, samplingPositionNoAntiLeak_WS, probeDistance, normalizedOffset, validityWeight); + FindSamplingData(debugPosition.xyz, debugNormal.xyz, _RenderingLayerMask, snappedProbePosition_WS, samplingPositionNoAntiLeak_WS, probeDistance, normalizedOffset, validityWeight); float samplingFactor = ComputeSamplingFactor(probePosition_WS, snappedProbePosition_WS, normalizedOffset, probeDistance); @@ -62,7 +59,7 @@ if (_ShadingMode == DEBUGPROBESHADINGMODE_RENDERING_LAYER_MASKS) { o.centerCoordSS = _ScreenSize.xy * ComputeNormalizedDeviceCoordinatesWithZ(probePosition_WS, UNITY_MATRIX_VP).xy; - if (_ProbeLayerCount != 1 & (asuint(UNITY_ACCESS_INSTANCED_PROP(Props, _RenderingLayer)) & _RenderingLayerMask) == 0) + if (_APVLayerCount != 1 & (asuint(UNITY_ACCESS_INSTANCED_PROP(Props, _RenderingLayer)) & _RenderingLayerMask) == 0) DoCull(o); } } @@ -93,7 +90,8 @@ else if (_ShadingMode == DEBUGPROBESHADINGMODE_VALIDITY) { float validity = UNITY_ACCESS_INSTANCED_PROP(Props, _Validity); - return lerp(float4(0, 1, 0, 1), float4(1, 0, 0, 1), validity); + float threshold = PROBE_VALIDITY_THRESHOLD; + return lerp(float4(0, 1, 0, 1), float4(1, 0, 0, 1), validity > threshold); } else if (_ShadingMode == DEBUGPROBESHADINGMODE_VALIDITY_OVER_DILATION_THRESHOLD) { @@ -117,12 +115,12 @@ float3(204, 121, 167) / 255.0f, }; - if (_ProbeLayerCount == 1) return _DebugEmptyProbeData; // Rendering layers are not baked + if (_APVLayerCount == 1) return _DebugEmptyProbeData; // Rendering layers are not baked uint renderingLayer = asuint(UNITY_ACCESS_INSTANCED_PROP(Props, _RenderingLayer)) & _RenderingLayerMask; uint stripeSize = 8; float3 result = float3(0, 0, 0); - uint2 positionSS = i.vertex.xy; + int2 positionSS = i.vertex.xy; uint layerId = 0, layerCount = countbits(renderingLayer); int colorIndex = 0; @@ -133,7 +131,7 @@ if (layerCount >= 4 && colorIndex == 0 && positionSS.x < i.centerCoordSS.x) colorIndex = 3; - for (uint l = 0; (l < _ProbeLayerCount) && (layerId < layerCount); l++) + for (uint l = 0; (l < _APVLayerCount) && (layerId < layerCount); l++) { [branch] if (renderingLayer & (1U << l)) @@ -240,6 +238,7 @@ v2f vert(appdata v) { v2f o; + ZERO_INITIALIZE(v2f, o); float4 debugPosition = _positionNormalBuffer[0]; float4 debugNormal = _positionNormalBuffer[1]; @@ -249,13 +248,12 @@ float3 snappedProbePosition_WS; // worldspace position of main probe (a corner of the 8 probes cube) float3 samplingPositionNoAntiLeak_WS; // worldspace sampling position after applying 'NormalBias', 'ViewBias' - float3 samplingPosition_WS; // worldspace sampling position after applying 'NormalBias', 'ViewBias' and 'ValidityAndNormalBased Leak Reduction' float probeDistance; float3 normalizedOffset; // normalized offset between sampling position and snappedProbePosition float validityWeights[8]; float validityWeight = 1.0f; - FindSamplingData(debugPosition.xyz, debugNormal.xyz, _RenderingLayerMask, snappedProbePosition_WS, samplingPosition_WS, samplingPositionNoAntiLeak_WS, probeDistance, normalizedOffset, validityWeights); + FindSamplingData(debugPosition.xyz, debugNormal.xyz, _RenderingLayerMask, snappedProbePosition_WS, samplingPositionNoAntiLeak_WS, probeDistance, normalizedOffset, validityWeights); // QUADS to write the sampling factor of each probe // each QUAD has an individual ID in vertex color blue channel @@ -314,7 +312,10 @@ validityWeight = validityWeights[5]; } - samplingFactor = ComputeSamplingFactor(quadPosition, snappedProbePosition_WS, normalizedOffset, probeDistance); + if (_APVLeakReductionMode == APVLEAKREDUCTIONMODE_QUALITY) + samplingFactor = validityWeight; // this is not 100% accurate in some cases (cause we do max 3 samples) + else + samplingFactor = ComputeSamplingFactor(quadPosition, snappedProbePosition_WS, normalizedOffset, probeDistance); float4 cameraUp = mul(UNITY_MATRIX_I_V, float4(0.0f, 1.0f, 0.0f, 0.0f)); float4 cameraRight = -mul(UNITY_MATRIX_I_V, float4(1.0f, 0.0f, 0.0f, 0.0f)); diff --git a/Packages/com.unity.render-pipelines.core/Runtime/Debug/ProbeVolumeSamplingDebug.shader b/Packages/com.unity.render-pipelines.core/Runtime/Debug/ProbeVolumeSamplingDebug.shader index 5969c6a67cb..e03f043b9dd 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime/Debug/ProbeVolumeSamplingDebug.shader +++ b/Packages/com.unity.render-pipelines.core/Runtime/Debug/ProbeVolumeSamplingDebug.shader @@ -11,6 +11,7 @@ Shader "Hidden/Core/ProbeVolumeSamplingDebug" #pragma target 4.5 #pragma only_renderers d3d11 playstation xboxone xboxseries vulkan metal switch #pragma multi_compile_fragment _ PROBE_VOLUMES_L1 PROBE_VOLUMES_L2 + //#pragma enable_d3d11_debug_symbols #include "Packages/com.unity.render-pipelines.core/ShaderLibrary/EntityLighting.hlsl" #include "Packages/com.unity.render-pipelines.core/Runtime/Debug/ProbeVolumeDebugBase.hlsl" diff --git a/Packages/com.unity.render-pipelines.core/Runtime/Debugging/DebugManager.cs b/Packages/com.unity.render-pipelines.core/Runtime/Debugging/DebugManager.cs index c40e089f39e..846edd43722 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime/Debugging/DebugManager.cs +++ b/Packages/com.unity.render-pipelines.core/Runtime/Debugging/DebugManager.cs @@ -247,6 +247,19 @@ public int PanelIndex([DisallowNull] string displayName) return -1; } + /// + /// Returns the panel display name + /// + /// The panelIndex for the panel to get the name + /// The display name of the panel, or empty string otherwise + public string PanelDiplayName([DisallowNull] int panelIndex) + { + if (panelIndex < 0 || panelIndex > m_Panels.Count - 1) + return string.Empty; + + return m_Panels[panelIndex].displayName; + } + /// /// Request DebugWindow to open the specified panel. /// diff --git a/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/DynamicGI/DynamicSkyPrecomputedDirections.cs b/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/DynamicGI/DynamicSkyPrecomputedDirections.cs deleted file mode 100644 index c592995e240..00000000000 --- a/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/DynamicGI/DynamicSkyPrecomputedDirections.cs +++ /dev/null @@ -1,70 +0,0 @@ -using System.Runtime.CompilerServices; -using RuntimeResources = UnityEngine.Rendering.ProbeReferenceVolume.RuntimeResources; - -namespace UnityEngine.Rendering -{ - internal static class DynamicSkyPrecomputedDirections - { - const int NB_SKY_PRECOMPUTED_DIRECTIONS = 255; - - static ComputeBuffer m_DirectionsBuffer = null; - static Vector3[] m_Directions = null; - - [MethodImpl(MethodImplOptions.AggressiveInlining)] - internal static void GetRuntimeResources(ref RuntimeResources rr) - { - rr.SkyPrecomputedDirections = m_DirectionsBuffer; - } - - [MethodImpl(MethodImplOptions.AggressiveInlining)] - internal static Vector3[] GetPrecomputedDirections() - { - return m_Directions; - } - - internal static void Initialize() - { - if (m_DirectionsBuffer == null) - { - m_Directions = new Vector3[NB_SKY_PRECOMPUTED_DIRECTIONS]; - m_DirectionsBuffer = new ComputeBuffer(m_Directions.Length, 3 * sizeof(float)); - - float sqrtNBpoints = Mathf.Sqrt((float)(NB_SKY_PRECOMPUTED_DIRECTIONS)); - float phi = 0.0f; - float phiMax = 0.0f; - float thetaMax = 0.0f; - - // Spiral based sampling on sphere - // See http://web.archive.org/web/20120331125729/http://www.math.niu.edu/~rusin/known-math/97/spherefaq - // http://www.math.vanderbilt.edu/saffeb/texts/161.pdf - for (int i=0; i < NB_SKY_PRECOMPUTED_DIRECTIONS; i++) - { - // theta from 0 to PI - // phi from 0 to 2PI - float h = -1.0f + (2.0f * i) / (NB_SKY_PRECOMPUTED_DIRECTIONS - 1.0f); - float theta = Mathf.Acos(h); - if (i == NB_SKY_PRECOMPUTED_DIRECTIONS - 1 || i==0) - phi = 0.0f; - else - phi = phi + 3.6f / sqrtNBpoints * 1.0f / (Mathf.Sqrt(1.0f-h*h)); - - Vector3 pointOnSphere = new Vector3(Mathf.Sin(theta) * Mathf.Cos(phi), Mathf.Sin(theta) * Mathf.Sin(phi), Mathf.Cos(theta)); - - pointOnSphere.Normalize(); - m_Directions[i] = pointOnSphere; - - phiMax = Mathf.Max(phiMax, phi); - thetaMax = Mathf.Max(thetaMax, theta); - } - m_DirectionsBuffer.SetData(m_Directions); - } - } - - internal static void Cleanup() - { - CoreUtils.SafeRelease(m_DirectionsBuffer); - m_DirectionsBuffer = null; - m_Directions = null; - } - } -} diff --git a/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeBrickIndex.cs b/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeBrickIndex.cs index 65ac48a8a78..07c0360ec2a 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeBrickIndex.cs +++ b/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeBrickIndex.cs @@ -216,11 +216,10 @@ internal void ComputeFragmentationRate() public struct IndirectionEntryUpdateInfo { - public int brickCount; public int firstChunkIndex; public int numberOfChunks; public int minSubdivInCell; - // IMPORTANT, These values should be at max resolution. This means that + // IMPORTANT, These values should be at max resolution, independent of minSubdivInCell. This means that // The map to the lower possible resolution is done after. However they are still in local space. public Vector3Int minValidBrickIndexForCellAtMaxRes; public Vector3Int maxValidBrickIndexForCellAtMaxResPlusOne; @@ -344,7 +343,7 @@ internal bool ReserveChunks(IndirectionEntryUpdateInfo[] entriesInfo, bool ignor return true; } - static bool BrickOverlapEntry(Vector3Int brickMin, Vector3Int brickMax, Vector3Int entryMin, Vector3Int entryMax) + static internal bool BrickOverlapEntry(Vector3Int brickMin, Vector3Int brickMax, Vector3Int entryMin, Vector3Int entryMax) { return brickMax.x > entryMin.x && entryMax.x > brickMin.x && brickMax.y > entryMin.y && entryMax.y > brickMin.y && diff --git a/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeIndexOfIndices.cs b/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeIndexOfIndices.cs index b288a07c46e..9f2a439ba97 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeIndexOfIndices.cs +++ b/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeIndexOfIndices.cs @@ -27,8 +27,10 @@ internal void Pack(out uint[] vals) vals[i] = 0; } - // Note this packing is really really generous, I really think we can get rid of 1 uint at least if we assume we don't go extreme. - // but this is encompassing all scenarios. + // TODO: Note this packing is too generous, we can get rid of 1 uint + // minLocalIndex is in cell space so it has an upper bound + // first chunk index is also on 16bits max when using max memory budget + // see comment below about size of valid // // UINT 0: // FirstChunkIndex 29 bit @@ -38,9 +40,12 @@ internal void Pack(out uint[] vals) // minLocalIdx.y 10 bit // minLocalIdx.z 10 bit // UINT 2: - // maxLocalIdxPlusOne.x 10 bit - // maxLocalIdxPlusOne.y 10 bit - // maxLocalIdxPlusOne.z 10 bit + // sizeOfValid.x 10 bit + // sizeOfValid.y 10 bit + // sizeOfValid.z 10 bit + + // This is always less than CellSize(kEntryMaxSubdivLevel)+1 == 28. See GetEntrySubdivLevel() + var sizeOfValid = maxLocalIdxPlusOne - minLocalIdx; vals[0] = (uint)firstChunkIndex & 0x1FFFFFFF; vals[0] |= ((uint)minSubdiv & 0x7) << 29; @@ -49,9 +54,9 @@ internal void Pack(out uint[] vals) vals[1] |= ((uint)minLocalIdx.y & 0x3FF) << 10; vals[1] |= ((uint)minLocalIdx.z & 0x3FF) << 20; - vals[2] = (uint)maxLocalIdxPlusOne.x & 0x3FF; - vals[2] |= ((uint)maxLocalIdxPlusOne.y & 0x3FF) << 10; - vals[2] |= ((uint)maxLocalIdxPlusOne.z & 0x3FF) << 20; + vals[2] = (uint)sizeOfValid.x & 0x3FF; + vals[2] |= ((uint)sizeOfValid.y & 0x3FF) << 10; + vals[2] |= ((uint)sizeOfValid.z & 0x3FF) << 20; } } diff --git a/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeReferenceVolume.Binding.cs b/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeReferenceVolume.Binding.cs index 1bbd49d48ab..b9350d710f4 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeReferenceVolume.Binding.cs +++ b/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeReferenceVolume.Binding.cs @@ -23,11 +23,10 @@ internal static class ShaderIDs public static readonly int _SkyOcclusionTexL0L1 = Shader.PropertyToID("_SkyOcclusionTexL0L1"); public static readonly int _SkyShadingDirectionIndicesTex = Shader.PropertyToID("_SkyShadingDirectionIndicesTex"); public static readonly int _SkyPrecomputedDirections = Shader.PropertyToID("_SkyPrecomputedDirections"); + public static readonly int _AntiLeakData = Shader.PropertyToID("_AntiLeakData"); } - ComputeBuffer m_EmptyIndexBuffer = null; - ComputeBuffer m_EmptyDirectionsBuffer = null; /// /// Bind the global APV resources @@ -60,6 +59,7 @@ public void BindAPVRuntimeResources(CommandBuffer cmdBuffer, bool isProbeVolumeE cmdBuffer.SetGlobalTexture(ShaderIDs._SkyOcclusionTexL0L1, rr.SkyOcclusionL0L1 ?? (RenderTargetIdentifier)CoreUtils.blackVolumeTexture); cmdBuffer.SetGlobalTexture(ShaderIDs._SkyShadingDirectionIndicesTex, rr.SkyShadingDirectionIndices ?? (RenderTargetIdentifier)CoreUtils.blackVolumeTexture); cmdBuffer.SetGlobalBuffer(ShaderIDs._SkyPrecomputedDirections, rr.SkyPrecomputedDirections); + cmdBuffer.SetGlobalBuffer(ShaderIDs._AntiLeakData, rr.QualityLeakReductionData); if (refVolume.shBands == ProbeVolumeSHBands.SphericalHarmonicsL2) { @@ -80,11 +80,6 @@ public void BindAPVRuntimeResources(CommandBuffer cmdBuffer, bool isProbeVolumeE if (m_EmptyIndexBuffer == null) m_EmptyIndexBuffer = new ComputeBuffer(1, sizeof(uint) * 3, ComputeBufferType.Structured); - if(m_EmptyDirectionsBuffer == null) - { - m_EmptyDirectionsBuffer = new ComputeBuffer(1, 3 * sizeof(float), ComputeBufferType.Structured); - } - cmdBuffer.SetGlobalBuffer(ShaderIDs._APVResIndex, m_EmptyIndexBuffer); cmdBuffer.SetGlobalBuffer(ShaderIDs._APVResCellIndices, m_EmptyIndexBuffer); @@ -96,7 +91,8 @@ public void BindAPVRuntimeResources(CommandBuffer cmdBuffer, bool isProbeVolumeE cmdBuffer.SetGlobalTexture(ShaderIDs._SkyOcclusionTexL0L1, CoreUtils.blackVolumeTexture); cmdBuffer.SetGlobalTexture(ShaderIDs._SkyShadingDirectionIndicesTex, CoreUtils.blackVolumeTexture); - cmdBuffer.SetGlobalBuffer(ShaderIDs._SkyPrecomputedDirections, m_EmptyDirectionsBuffer); + cmdBuffer.SetGlobalBuffer(ShaderIDs._SkyPrecomputedDirections, m_EmptyIndexBuffer); + cmdBuffer.SetGlobalBuffer(ShaderIDs._AntiLeakData, m_EmptyIndexBuffer); if (refVolume.shBands == ProbeVolumeSHBands.SphericalHarmonicsL2) { @@ -133,7 +129,6 @@ public bool UpdateShaderVariablesProbeVolumes(CommandBuffer cmd, ProbeVolumesOpt parameters.reflNormalizationLowerClamp = 0.005f; parameters.reflNormalizationUpperClamp = probeVolumeOptions.occlusionOnlyReflectionNormalization.value ? 1.0f : 7.0f; - parameters.minValidNormalWeight = probeVolumeOptions.minValidDotProductValue.value; parameters.skyOcclusionIntensity = skyOcclusion ? probeVolumeOptions.skyOcclusionIntensityMultiplier.value : 0.0f; parameters.skyOcclusionShadingDirection = skyOcclusion && skyOcclusionShadingDirection; parameters.regionCount = m_CurrentBakingSet.bakedMaskCount; diff --git a/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeReferenceVolume.Debug.cs b/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeReferenceVolume.Debug.cs index b4f0ece71fb..dc943f332fe 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeReferenceVolume.Debug.cs +++ b/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeReferenceVolume.Debug.cs @@ -587,7 +587,7 @@ void RefreshDebug(DebugUI.Field field, T value) { probeVolumeDebug.drawVirtualOffsetPush = value; - if (probeVolumeDebug.drawVirtualOffsetPush && probeVolumeDebug.drawProbes) + if (probeVolumeDebug.drawVirtualOffsetPush && probeVolumeDebug.drawProbes && m_CurrentBakingSet != null) { // If probes are being drawn when enabling offset, automatically scale them down to a reasonable size so the arrows aren't obscured by the probes. var searchDistance = CellSize(0) * MinBrickSize() / ProbeBrickPool.kBrickCellCount * m_CurrentBakingSet.settings.virtualOffsetSettings.searchMultiplier + m_CurrentBakingSet.settings.virtualOffsetSettings.outOfGeoOffset; diff --git a/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeReferenceVolume.cs b/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeReferenceVolume.cs index 1212eab7e9a..e88844498de 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeReferenceVolume.cs +++ b/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeReferenceVolume.cs @@ -113,7 +113,6 @@ internal struct ProbeVolumeShadingParameters public float samplingNoise; public float weight; public APVLeakReductionMode leakReductionMode; - public float minValidNormalWeight; public int frameIndexForNoise; public float reflNormalizationLowerClamp; public float reflNormalizationUpperClamp; @@ -173,6 +172,9 @@ internal struct IndirectionEntryInfo { public Vector3Int positionInBricks; public int minSubdiv; + public Vector3Int minBrickPos; + public Vector3Int maxBrickPosPlusOne; + public bool hasMinMax; // should be removed, only kept for migration public bool hasOnlyBiggerBricks; // True if it has only bricks that are bigger than the entry itself } @@ -631,6 +633,10 @@ public struct RuntimeResources /// Precomputed table of shading directions for sky occlusion shading. /// public ComputeBuffer SkyPrecomputedDirections; + /// + /// Precomputed table of sampling mask for quality leak reduction. + /// + public ComputeBuffer QualityLeakReductionData; } bool m_IsInitialized = false; @@ -974,7 +980,7 @@ public void Initialize(in ProbeVolumeSystemParameters parameters) // So we need to split the conditions to plan for that. m_DiskStreamingUseCompute = SystemInfo.supportsComputeShaders && streamingUploadCS != null && streamingUploadL2CS != null; InitializeDebug(); - DynamicSkyPrecomputedDirections.Initialize(); + ProbeVolumeConstantRuntimeResources.Initialize(); ProbeBrickPool.Initialize(); ProbeBrickBlendingPool.Initialize(); InitStreaming(); @@ -1040,9 +1046,7 @@ public void Cleanup() { CoreUtils.SafeRelease(m_EmptyIndexBuffer); m_EmptyIndexBuffer = null; - CoreUtils.SafeRelease(m_EmptyDirectionsBuffer); - m_EmptyDirectionsBuffer = null; - DynamicSkyPrecomputedDirections.Cleanup(); + ProbeVolumeConstantRuntimeResources.Cleanup(); #if UNITY_EDITOR UnityEditor.SceneManagement.EditorSceneManager.sceneSaving -= ProbeVolumeBakingSet.OnSceneSaving; @@ -1216,7 +1220,21 @@ internal bool LoadCell(Cell cell, bool ignoreErrorLog = false) for (int entry = 0; entry < indirectionBufferEntries; ++entry) { - int brickCountAtResForEntry = GetNumberOfBricksAtSubdiv(cell.indexInfo.indirectionEntryInfo[entry], ref indexInfo.updateInfo.entriesInfo[entry]); + // TODO: remove, this is for migration + if (!cell.indexInfo.indirectionEntryInfo[entry].hasMinMax) + { + if (cell.data.bricks.IsCreated) + ComputeEntryMinMax(ref cell.indexInfo.indirectionEntryInfo[entry], cell.data.bricks); + else + { + int entrySize = CellSize(GetEntrySubdivLevel()); + cell.indexInfo.indirectionEntryInfo[entry].minBrickPos = Vector3Int.zero; + cell.indexInfo.indirectionEntryInfo[entry].maxBrickPosPlusOne = new Vector3Int(entrySize + 1, entrySize + 1, entrySize + 1); + cell.indexInfo.indirectionEntryInfo[entry].hasMinMax = true; + } + } + + int brickCountAtResForEntry = GetNumberOfBricksAtSubdiv(cell.indexInfo.indirectionEntryInfo[entry]); indexInfo.updateInfo.entriesInfo[entry].numberOfChunks = m_Index.GetNumberOfChunks(brickCountAtResForEntry); } @@ -1230,6 +1248,8 @@ internal bool LoadCell(Cell cell, bool ignoreErrorLog = false) for (int entry = 0; entry < indirectionBufferEntries; ++entry) { + indexInfo.updateInfo.entriesInfo[entry].minValidBrickIndexForCellAtMaxRes = indexInfo.indirectionEntryInfo[entry].minBrickPos; + indexInfo.updateInfo.entriesInfo[entry].maxValidBrickIndexForCellAtMaxResPlusOne = indexInfo.indirectionEntryInfo[entry].maxBrickPosPlusOne; indexInfo.updateInfo.entriesInfo[entry].entryPositionInBricksAtMaxRes = indexInfo.indirectionEntryInfo[entry].positionInBricks; indexInfo.updateInfo.entriesInfo[entry].minSubdivInCell = indexInfo.indirectionEntryInfo[entry].minSubdiv; indexInfo.updateInfo.entriesInfo[entry].hasOnlyBiggerBricks = indexInfo.indirectionEntryInfo[entry].hasOnlyBiggerBricks; @@ -1447,35 +1467,58 @@ void PerformPendingDeletion() m_PendingScenesToBeUnloaded.Clear(); } - internal int GetNumberOfBricksAtSubdiv(IndirectionEntryInfo entryInfo, ref ProbeBrickIndex.IndirectionEntryUpdateInfo indirectionEntryUpdateInfo) + internal void ComputeEntryMinMax(ref IndirectionEntryInfo entryInfo, ReadOnlySpan bricks) { - // This is a special case that can be handled manually easily. + int entrySize = CellSize(GetEntrySubdivLevel()); + Vector3Int entry_min = entryInfo.positionInBricks; + Vector3Int entry_max = entryInfo.positionInBricks + new Vector3Int(entrySize, entrySize, entrySize); + if (entryInfo.hasOnlyBiggerBricks) { - indirectionEntryUpdateInfo.minValidBrickIndexForCellAtMaxRes = Vector3Int.zero; - indirectionEntryUpdateInfo.maxValidBrickIndexForCellAtMaxResPlusOne = Vector3Int.one * CellSize(GetEntrySubdivLevel()) + Vector3Int.one; - indirectionEntryUpdateInfo.brickCount = 1; - return indirectionEntryUpdateInfo.brickCount; + entryInfo.minBrickPos = entry_min; + entryInfo.maxBrickPosPlusOne = entry_max; } + else + { + entryInfo.minBrickPos = entryInfo.maxBrickPosPlusOne = Vector3Int.zero; - Vector3 globalBoundsMin = (m_CurrGlobalBounds.min - ProbeOffset()) / MinBrickSize(); - Vector3 globalBoundsMax = (m_CurrGlobalBounds.max - ProbeOffset()) / MinBrickSize(); + bool initialized = false; + for (int i = 0; i < bricks.Length; i++) + { + int brickSize = ProbeReferenceVolume.CellSize(bricks[i].subdivisionLevel); + var brickMin = bricks[i].position; + var brickMax = bricks[i].position + new Vector3Int(brickSize, brickSize, brickSize); + if (!ProbeBrickIndex.BrickOverlapEntry(brickMin, brickMax, entry_min, entry_max)) + continue; - int entrySize = CellSize(GetEntrySubdivLevel()); - Vector3Int entry_min = new Vector3Int(entryInfo.positionInBricks.x , entryInfo.positionInBricks.y, entryInfo.positionInBricks.z); - Vector3Int entry_max = entry_min + new Vector3Int(entrySize, entrySize, entrySize); + if (initialized) + { + entryInfo.minBrickPos = Vector3Int.Min(brickMin, entryInfo.minBrickPos); + entryInfo.maxBrickPosPlusOne = Vector3Int.Max(brickMax, entryInfo.maxBrickPosPlusOne); + } + else + { + entryInfo.minBrickPos = brickMin; + entryInfo.maxBrickPosPlusOne = brickMax; + initialized = true; + } + } + } - Vector3Int intersectBound_min = Vector3Int.Max(entry_min, new Vector3Int(Mathf.CeilToInt(globalBoundsMin.x), Mathf.CeilToInt(globalBoundsMin.y), Mathf.CeilToInt(globalBoundsMin.z))); - Vector3Int intersectBound_max = Vector3Int.Min(entry_max, new Vector3Int(Mathf.CeilToInt(globalBoundsMax.x), Mathf.CeilToInt(globalBoundsMax.y), Mathf.CeilToInt(globalBoundsMax.z))); + entryInfo.minBrickPos = entryInfo.minBrickPos - entry_min; + entryInfo.maxBrickPosPlusOne = Vector3Int.one + entryInfo.maxBrickPosPlusOne - entry_min; + entryInfo.hasMinMax = true; + } - indirectionEntryUpdateInfo.minValidBrickIndexForCellAtMaxRes = intersectBound_min - entry_min; - indirectionEntryUpdateInfo.maxValidBrickIndexForCellAtMaxResPlusOne = intersectBound_max - entry_min + Vector3Int.one; + static internal int GetNumberOfBricksAtSubdiv(IndirectionEntryInfo entryInfo) + { + // This is a special case that can be handled manually easily. + if (entryInfo.hasOnlyBiggerBricks) + return 1; - Vector3Int sizeOfValidIndicesAtMaxRes = indirectionEntryUpdateInfo.maxValidBrickIndexForCellAtMaxResPlusOne - indirectionEntryUpdateInfo.minValidBrickIndexForCellAtMaxRes; + Vector3Int sizeOfValidIndicesAtMaxRes = entryInfo.maxBrickPosPlusOne - entryInfo.minBrickPos; Vector3Int bricksForEntry = sizeOfValidIndicesAtMaxRes / CellSize(entryInfo.minSubdiv); - indirectionEntryUpdateInfo.brickCount = bricksForEntry.x * bricksForEntry.y * bricksForEntry.z; - - return indirectionEntryUpdateInfo.brickCount; + return bricksForEntry.x * bricksForEntry.y * bricksForEntry.z; } /// @@ -1596,7 +1639,7 @@ public RuntimeResources GetRuntimeResources() m_Index.GetRuntimeResources(ref rr); m_CellIndices.GetRuntimeResources(ref rr); m_Pool.GetRuntimeResources(ref rr); - DynamicSkyPrecomputedDirections.GetRuntimeResources(ref rr); + ProbeVolumeConstantRuntimeResources.GetRuntimeResources(ref rr); return rr; } @@ -1760,7 +1803,7 @@ void UpdatePool(CommandBuffer cmd, List chunkList, CellStreamingScratchBu void UpdateSharedData(List chunkList, NativeArray validityNeighMaskData, NativeArray skyOcclusionData, NativeArray skyShadingDirectionIndices, int chunkIndex) { var chunkSizeInProbes = ProbeBrickPool.GetChunkSizeInBrickCount() * ProbeBrickPool.kBrickProbeCountTotal; - + if (m_CurrentBakingSet.bakedMaskCount == 1) UpdateDataLocationTexture(m_TemporaryDataLocation.TexValidity, validityNeighMaskData.GetSubArray(chunkIndex * chunkSizeInProbes, chunkSizeInProbes)); else @@ -2010,27 +2053,24 @@ internal void UpdateConstantBuffer(CommandBuffer cmd, ProbeVolumeShadingParamete viewBias *= MinDistanceBetweenProbes(); } - if (parameters.regionCount != 1 && leakReductionMode == APVLeakReductionMode.None) - leakReductionMode = APVLeakReductionMode.ValidityBased; - var indexDim = m_CellIndices.GetGlobalIndirectionDimension(); var poolDim = m_Pool.GetPoolDimensions(); m_CellIndices.GetMinMaxEntry(out Vector3Int minEntry, out Vector3Int _); var entriesPerCell = m_CellIndices.entriesPerCellDimension; - var enableSkyOccShadingDir = parameters.skyOcclusionShadingDirection ? 1.0f : 0.0f; + var skyDirectionWeight = parameters.skyOcclusionShadingDirection ? 1.0f : 0.0f; var probeOffset = ProbeOffset(); ShaderVariablesProbeVolumes shaderVars; - shaderVars._Offset_IndirectionEntryDim = new Vector4(probeOffset.x, probeOffset.y, probeOffset.z, GetEntrySize()); - shaderVars._Weight_MinLoadedCellInEntries = new Vector4(parameters.weight, minLoadedCellPos.x * entriesPerCell, minLoadedCellPos.y * entriesPerCell, minLoadedCellPos.z * entriesPerCell); + shaderVars._Offset_LayerCount = new Vector4(probeOffset.x, probeOffset.y, probeOffset.z, parameters.regionCount); + shaderVars._MinLoadedCellInEntries_IndirectionEntryDim = new Vector4(minLoadedCellPos.x * entriesPerCell, minLoadedCellPos.y * entriesPerCell, minLoadedCellPos.z * entriesPerCell, GetEntrySize()); + shaderVars._MaxLoadedCellInEntries_RcpIndirectionEntryDim = new Vector4((maxLoadedCellPos.x + 1) * entriesPerCell - 1, (maxLoadedCellPos.y + 1) * entriesPerCell - 1, (maxLoadedCellPos.z + 1) * entriesPerCell - 1, 1.0f / GetEntrySize()); shaderVars._PoolDim_MinBrickSize = new Vector4(poolDim.x, poolDim.y, poolDim.z, MinBrickSize()); shaderVars._RcpPoolDim_XY = new Vector4(1.0f / poolDim.x, 1.0f / poolDim.y, 1.0f / poolDim.z, 1.0f / (poolDim.x * poolDim.y)); shaderVars._MinEntryPos_Noise = new Vector4(minEntry.x, minEntry.y, minEntry.z, parameters.samplingNoise); - shaderVars._IndicesDim_FrameIndex = new Vector4(indexDim.x, indexDim.y, indexDim.z, parameters.frameIndexForNoise); + shaderVars._EntryCount_X_XY_LeakReduction = new uint4((uint)indexDim.x, (uint)indexDim.x * (uint)indexDim.y, (uint)leakReductionMode, 0); // One slot available here shaderVars._Biases_NormalizationClamp = new Vector4(normalBias, viewBias, parameters.reflNormalizationLowerClamp, parameters.reflNormalizationUpperClamp); - shaderVars._LeakReduction_SkyOcclusion = new Vector4((int)leakReductionMode, parameters.minValidNormalWeight, parameters.skyOcclusionIntensity, enableSkyOccShadingDir); - shaderVars._MaxLoadedCellInEntries_LayerCount = new Vector4((maxLoadedCellPos.x + 1) * entriesPerCell - 1, (maxLoadedCellPos.y + 1) * entriesPerCell - 1, (maxLoadedCellPos.z + 1) * entriesPerCell - 1, parameters.regionCount); - shaderVars._ProbeVolumeLayerMask = parameters.regionLayerMasks; + shaderVars._FrameIndex_Weights = new Vector4(parameters.frameIndexForNoise, parameters.weight, parameters.skyOcclusionIntensity, skyDirectionWeight); + shaderVars._ProbeVolumeLayerMask = parameters.regionLayerMasks; ConstantBuffer.PushGlobal(cmd, shaderVars, m_CBShaderID); } diff --git a/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeVolume.cs b/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeVolume.cs index 704388ad8fa..3bf0e00562f 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeVolume.cs +++ b/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeVolume.cs @@ -73,9 +73,9 @@ public enum Mode [SerializeField] internal Matrix4x4 cachedTransform; [SerializeField] internal int cachedHashCode; - /// Whether spaces with no renderers need to be filled with bricks at lowest subdivision level. + /// Whether spaces with no renderers need to be filled with bricks at highest subdivision level. [HideInInspector] - [Tooltip("Whether Unity should fill empty space between renderers with bricks at the lowest subdivision level.")] + [Tooltip("Whether Unity should fill empty space between renderers with bricks at the highest subdivision level.")] public bool fillEmptySpaces = false; #if UNITY_EDITOR diff --git a/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeVolume.hlsl b/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeVolume.hlsl index 0c378c4d3ed..3c5cdf810e1 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeVolume.hlsl +++ b/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeVolume.hlsl @@ -10,28 +10,28 @@ #include "Packages/com.unity.render-pipelines.core/ShaderLibrary/Color.hlsl" // Unpack variables -#define _WorldOffset _Offset_IndirectionEntryDim.xyz -#define _GlobalIndirectionEntryDim _Offset_IndirectionEntryDim.w -#define _MinBrickSize _PoolDim_MinBrickSize.w -#define _PoolDim _PoolDim_MinBrickSize.xyz -#define _RcpPoolDim _RcpPoolDim_XY.xyz -#define _RcpPoolDimXY _RcpPoolDim_XY.w -#define _MinEntryPosition _MinEntryPos_Noise.xyz -#define _PVSamplingNoise _MinEntryPos_Noise.w -#define _GlobalIndirectionDimension _IndicesDim_FrameIndex.xyz -#define _NoiseFrameIndex _IndicesDim_FrameIndex.w -#define _NormalBias _Biases_NormalizationClamp.x -#define _ViewBias _Biases_NormalizationClamp.y -#define _Weight _Weight_MinLoadedCellInEntries.x -#define _MinLoadedCellInEntries _Weight_MinLoadedCellInEntries.yzw -#define _MaxLoadedCellInEntries _MaxLoadedCellInEntries_LayerCount.xyz -#define _ProbeLayerCount (int)(_MaxLoadedCellInEntries_LayerCount.w) -#define _MinReflProbeNormalizationFactor _Biases_NormalizationClamp.z -#define _MaxReflProbeNormalizationFactor _Biases_NormalizationClamp.w -#define _LeakReductionMode _LeakReduction_SkyOcclusion.x -#define _MinValidNormalWeight _LeakReduction_SkyOcclusion.y -#define _SkyOcclusionIntensity _LeakReduction_SkyOcclusion.z -#define _EnableSkyOcclusionShadingDirection _LeakReduction_SkyOcclusion.w +#define _APVWorldOffset _Offset_LayerCount.xyz +#define _APVIndirectionEntryDim _MinLoadedCellInEntries_IndirectionEntryDim.w +#define _APVRcpIndirectionEntryDim _MaxLoadedCellInEntries_RcpIndirectionEntryDim.w +#define _APVMinBrickSize _PoolDim_MinBrickSize.w +#define _APVPoolDim _PoolDim_MinBrickSize.xyz +#define _APVRcpPoolDim _RcpPoolDim_XY.xyz +#define _APVRcpPoolDimXY _RcpPoolDim_XY.w +#define _APVMinEntryPosition _MinEntryPos_Noise.xyz +#define _APVSamplingNoise _MinEntryPos_Noise.w +#define _APVEntryCount _EntryCount_X_XY_LeakReduction.xy +#define _APVLeakReductionMode _EntryCount_X_XY_LeakReduction.z +#define _APVNormalBias _Biases_NormalizationClamp.x +#define _APVViewBias _Biases_NormalizationClamp.y +#define _APVMinLoadedCellInEntries _MinLoadedCellInEntries_IndirectionEntryDim.xyz +#define _APVMaxLoadedCellInEntries _MaxLoadedCellInEntries_RcpIndirectionEntryDim.xyz +#define _APVLayerCount (uint)(_Offset_LayerCount.w) +#define _APVMinReflProbeNormalizationFactor _Biases_NormalizationClamp.z +#define _APVMaxReflProbeNormalizationFactor _Biases_NormalizationClamp.w +#define _APVFrameIndex _FrameIndex_Weights.x +#define _APVWeight _FrameIndex_Weights.y +#define _APVSkyOcclusionWeight _FrameIndex_Weights.z +#define _APVSkyDirectionWeight _FrameIndex_Weights.w #ifndef DECODE_SH #include "Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/DecodeSH.hlsl" @@ -49,9 +49,6 @@ SAMPLER(s_linear_clamp_sampler); SAMPLER(s_point_clamp_sampler); #endif -// TODO: Remove define when we are sure about what to do with this. -#define MANUAL_FILTERING 0 - #ifdef USE_APV_TEXTURE_HALF #define TEXTURE3D_APV TEXTURE3D_HALF #else @@ -164,6 +161,7 @@ struct APVSample StructuredBuffer _APVResIndex; StructuredBuffer _APVResCellIndices; StructuredBuffer _SkyPrecomputedDirections; +StructuredBuffer _AntiLeakData; TEXTURE3D_APV(_APVResL0_L1Rx); @@ -188,11 +186,11 @@ float3 AddNoiseToSamplingPosition(float3 posWS, float2 positionSS, float3 direct #ifdef UNITY_SPACE_TRANSFORMS_INCLUDED float3 right = mul((float3x3)GetViewToWorldMatrix(), float3(1.0, 0.0, 0.0)); float3 top = mul((float3x3)GetViewToWorldMatrix(), float3(0.0, 1.0, 0.0)); - float noise01 = InterleavedGradientNoise(positionSS, _NoiseFrameIndex); + float noise01 = InterleavedGradientNoise(positionSS, _APVFrameIndex); float noise02 = frac(noise01 * 100.0); float noise03 = frac(noise01 * 1000.0); direction += top * (noise02 - 0.5) + right * (noise03 - 0.5); - return _PVSamplingNoise > 0 ? posWS + noise01 * _PVSamplingNoise * direction : posWS; + return _APVSamplingNoise > 0 ? posWS + noise01 * _APVSamplingNoise * direction : posWS; #else return posWS; #endif @@ -213,12 +211,12 @@ half GetValidityWeight(uint offset, uint validityMask) float ProbeDistance(uint subdiv) { - return pow(3, subdiv) * _MinBrickSize / 3.0f; + return pow(3, subdiv) * _APVMinBrickSize / 3.0f; } half ProbeDistanceHalf(uint subdiv) { - return pow(half(3), half(subdiv)) * half(_MinBrickSize) / 3.0; + return pow(half(3), half(subdiv)) * half(_APVMinBrickSize) / 3.0; } float3 GetSnappedProbePosition(float3 posWS, uint subdiv) @@ -228,105 +226,65 @@ float3 GetSnappedProbePosition(float3 posWS, uint subdiv) return (dividedPos - frac(dividedPos)) * distBetweenProbes; } -float GetNormalWeight(uint3 offset, float3 posWS, float3 sample0Pos, float3 normalWS, uint subdiv) -{ - // TODO: This can be optimized. - float3 samplePos = (sample0Pos - posWS) + (float3)offset * ProbeDistance(subdiv); - float3 vecToProbe = normalize(samplePos); - float weight = saturate(dot(vecToProbe, normalWS) - _MinValidNormalWeight); - return weight; -} - -half GetNormalWeightHalf(uint3 offset, float3 posWS, float3 sample0Pos, float3 normalWS, uint subdiv) -{ - // TODO: This can be optimized. - half3 samplePos = (half3)(sample0Pos - posWS) + (half3)offset * ProbeDistanceHalf(subdiv); - half3 vecToProbe = normalize(samplePos); - half weight = saturate(dot(vecToProbe, (half3)normalWS) - (half)_MinValidNormalWeight); - return weight; -} - // ------------------------------------------------------------- // Indexing functions // ------------------------------------------------------------- -bool LoadCellIndexMetaData(int cellFlatIdx, out int chunkIndex, out int stepSize, out int3 minRelativeIdx, out int3 maxRelativeIdxPlusOne) +bool LoadCellIndexMetaData(uint cellFlatIdx, out uint chunkIndex, out int stepSize, out int3 minRelativeIdx, out uint3 sizeOfValid) { - bool cellIsLoaded = false; uint3 metaData = _APVResCellIndices[cellFlatIdx]; - if (metaData.x != 0xFFFFFFFF) - { - chunkIndex = metaData.x & 0x1FFFFFFF; - stepSize = round(pow(3, (metaData.x >> 29) & 0x7)); + // See ProbeIndexOfIndices.cs for packing + chunkIndex = metaData.x & 0x1FFFFFFF; + stepSize = round(pow(3, (metaData.x >> 29) & 0x7)); - minRelativeIdx.x = metaData.y & 0x3FF; - minRelativeIdx.y = (metaData.y >> 10) & 0x3FF; - minRelativeIdx.z = (metaData.y >> 20) & 0x3FF; + minRelativeIdx.x = metaData.y & 0x3FF; + minRelativeIdx.y = (metaData.y >> 10) & 0x3FF; + minRelativeIdx.z = (metaData.y >> 20) & 0x3FF; - maxRelativeIdxPlusOne.x = metaData.z & 0x3FF; - maxRelativeIdxPlusOne.y = (metaData.z >> 10) & 0x3FF; - maxRelativeIdxPlusOne.z = (metaData.z >> 20) & 0x3FF; - cellIsLoaded = true; - } - else - { - chunkIndex = -1; - stepSize = -1; - minRelativeIdx = -1; - maxRelativeIdxPlusOne = -1; - } + sizeOfValid.x = metaData.z & 0x3FF; + sizeOfValid.y = (metaData.z >> 10) & 0x3FF; + sizeOfValid.z = (metaData.z >> 20) & 0x3FF; - return cellIsLoaded; + return metaData.x != 0xFFFFFFFF; } uint GetIndexData(APVResources apvRes, float3 posWS) { - float3 entryPos = floor(posWS / _GlobalIndirectionEntryDim); - float3 topLeftEntryWS = entryPos * _GlobalIndirectionEntryDim; + float3 entryPos = floor(posWS * _APVRcpIndirectionEntryDim); + float3 topLeftEntryWS = entryPos * _APVIndirectionEntryDim; - bool isALoadedCell = all(entryPos >= _MinLoadedCellInEntries) && all(entryPos <= _MaxLoadedCellInEntries); + bool isALoadedCell = all(entryPos >= _APVMinLoadedCellInEntries) && all(entryPos <= _APVMaxLoadedCellInEntries); // Make sure we start from 0 - int3 entryPosInt = (int3)(entryPos - _MinEntryPosition); - - int flatIdx = dot(entryPosInt, int3(1, (int)_GlobalIndirectionDimension.x, ((int)_GlobalIndirectionDimension.x * (int)_GlobalIndirectionDimension.y))); - - int stepSize = 0; - int3 minRelativeIdx, maxRelativeIdxPlusOne; - int chunkIdx = -1; - bool isValidBrick = false; - int locationInPhysicalBuffer = 0; + uint3 entryPosInt = (uint3)(entryPos - _APVMinEntryPosition); + uint flatIdx = dot(entryPosInt, uint3(1, _APVEntryCount.x, _APVEntryCount.y)); // Dynamic branch must be enforced to avoid out-of-bounds memory access in LoadCellIndexMetaData + uint result = 0xffffffff; UNITY_BRANCH if (isALoadedCell) { - if (LoadCellIndexMetaData(flatIdx, chunkIdx, stepSize, minRelativeIdx, maxRelativeIdxPlusOne)) + int stepSize; + int3 minRelativeIdx; + uint3 sizeOfValid; + uint chunkIdx; + if (LoadCellIndexMetaData(flatIdx, chunkIdx, stepSize, minRelativeIdx, sizeOfValid)) { float3 residualPosWS = posWS - topLeftEntryWS; - int3 localBrickIndex = floor(residualPosWS / (_MinBrickSize * stepSize)); - localBrickIndex = min(localBrickIndex, (int3)(3 * 3 * 3 - 1)); // due to floating point issue, we may query an invalid brick - - // Out of bounds. - isValidBrick = all(localBrickIndex >= minRelativeIdx) && all(localBrickIndex < maxRelativeIdxPlusOne); - - int3 sizeOfValid = maxRelativeIdxPlusOne - minRelativeIdx; - // Relative to valid region - int3 localRelativeIndexLoc = (localBrickIndex - minRelativeIdx); - int flattenedLocationInCell = dot(localRelativeIndexLoc, int3(sizeOfValid.y, 1, sizeOfValid.x * sizeOfValid.y)); - - locationInPhysicalBuffer = chunkIdx * (int)PROBE_INDEX_CHUNK_SIZE + flattenedLocationInCell; + uint3 localBrickIndex = floor(residualPosWS / (_APVMinBrickSize * stepSize)); + localBrickIndex = min(localBrickIndex, (uint3)(3 * 3 * 3 - 1)); // due to floating point issue, we may query an invalid brick + localBrickIndex -= minRelativeIdx; // Relative to valid region + + UNITY_BRANCH + if (all(localBrickIndex < sizeOfValid)) + { + uint flattenedLocationInCell = dot(localBrickIndex, uint3(sizeOfValid.y, 1, sizeOfValid.x * sizeOfValid.y)); + uint locationInPhysicalBuffer = chunkIdx * (uint)PROBE_INDEX_CHUNK_SIZE + flattenedLocationInCell; + result = apvRes.index[locationInPhysicalBuffer]; + } } } - uint result = 0xffffffff; - - // Dynamic branch must be enforced to avoid out-of-bounds memory access in the physical APV buffer - UNITY_BRANCH if (isValidBrick) - { - result = apvRes.index[locationInPhysicalBuffer]; - } - return result; } @@ -368,17 +326,17 @@ bool TryToGetPoolUVWAndSubdiv(APVResources apvRes, float3 posWSForSample, out fl float flattened_pool_idx = packed_pool_idx & ((1 << 28) - 1); float3 pool_idx; - pool_idx.z = floor(flattened_pool_idx * _RcpPoolDimXY); - flattened_pool_idx -= (pool_idx.z * (_PoolDim.x * _PoolDim.y)); - pool_idx.y = floor(flattened_pool_idx * _RcpPoolDim.x); - pool_idx.x = floor(flattened_pool_idx - (pool_idx.y * _PoolDim.x)); + pool_idx.z = floor(flattened_pool_idx * _APVRcpPoolDimXY); + flattened_pool_idx -= (pool_idx.z * (_APVPoolDim.x * _APVPoolDim.y)); + pool_idx.y = floor(flattened_pool_idx * _APVRcpPoolDim.x); + pool_idx.x = floor(flattened_pool_idx - (pool_idx.y * _APVPoolDim.x)); // calculate uv offset and scale - float brickSizeWS = pow(3.0, subdiv) * _MinBrickSize; + float brickSizeWS = pow(3.0, subdiv) * _APVMinBrickSize; float3 offset = frac(posWSForSample.xyz / brickSizeWS); // [0;1] in brick space //offset = clamp( offset, 0.25, 0.75 ); // [0.25;0.75] in brick space (is this actually necessary?) - uvw = (pool_idx + 0.5 + (3.0 * offset)) * _RcpPoolDim; // add offset with brick footprint converted to text footprint in pool texel space + uvw = (pool_idx + 0.5 + (3.0 * offset)) * _APVRcpPoolDim; // add offset with brick footprint converted to text footprint in pool texel space // no valid brick loaded for this index, fallback to ambient probe // Note: we could instead early return when we know we'll have invalid UVs, but some bade code gen on Vulkan generates shader warnings if we do. @@ -387,7 +345,7 @@ bool TryToGetPoolUVWAndSubdiv(APVResources apvRes, float3 posWSForSample, out fl bool TryToGetPoolUVWAndSubdiv(APVResources apvRes, float3 posWS, float3 normalWS, float3 viewDirWS, out float3 uvw, out uint subdiv, out float3 biasedPosWS) { - biasedPosWS = (posWS + normalWS * _NormalBias) + viewDirWS * _ViewBias; + biasedPosWS = (posWS + normalWS * _APVNormalBias) + viewDirWS * _APVViewBias; return TryToGetPoolUVWAndSubdiv(apvRes, biasedPosWS, uvw, subdiv); } @@ -418,17 +376,16 @@ APVSample SampleAPV(APVResources apvRes, float3 uvw) apvSample.L2_C = half3(SAMPLE_TEXTURE3D_LOD(apvRes.L2_3, s_linear_clamp_sampler, uvw, 0).rgb); #endif // PROBE_VOLUMES_L2 - if (_SkyOcclusionIntensity > 0) + if (_APVSkyOcclusionWeight > 0) apvSample.skyOcclusionL0L1 = SAMPLE_TEXTURE3D_LOD(apvRes.SkyOcclusionL0L1, s_linear_clamp_sampler, uvw, 0).rgba; else apvSample.skyOcclusionL0L1 = float4(0, 0, 0, 0); - if (_EnableSkyOcclusionShadingDirection > 0) + if (_APVSkyDirectionWeight > 0) { // No interpolation for sky shading indices - float3 texCoordFloat = uvw * _PoolDim - 0.5f; - int3 texCoordInt = texCoordFloat; - uint index = LOAD_TEXTURE3D(apvRes.SkyShadingDirectionIndices, texCoordInt).x * 255.0; + int3 texCoord = uvw * _APVPoolDim - 0.5f; + uint index = LOAD_TEXTURE3D(apvRes.SkyShadingDirectionIndices, texCoord).x * 255.0; if (index == 255) apvSample.skyShadingDirection = float3(0, 0, 0); @@ -482,6 +439,8 @@ void WeightSample(inout APVSample apvSample, half weight) apvSample.L2_B *= weight; apvSample.L2_C *= weight; #endif // PROBE_VOLUMES_L2 + + apvSample.skyOcclusionL0L1 *= weight; } void AccumulateSamples(inout APVSample dst, APVSample other, half weight) @@ -498,6 +457,8 @@ void AccumulateSamples(inout APVSample dst, APVSample other, half weight) dst.L2_B += other.L2_B; dst.L2_C += other.L2_C; #endif // PROBE_VOLUMES_L2 + + dst.skyOcclusionL0L1 += other.skyOcclusionL0L1; } uint LoadValidityMask(APVResources apvRes, uint renderingLayer, int3 coord) @@ -505,7 +466,7 @@ uint LoadValidityMask(APVResources apvRes, uint renderingLayer, int3 coord) float rawValidity = LOAD_TEXTURE3D(apvRes.Validity, coord).x; uint validityMask; - if (_ProbeLayerCount == 1) + if (_APVLayerCount == 1) { validityMask = rawValidity * 255.0; } @@ -530,143 +491,133 @@ uint LoadValidityMask(APVResources apvRes, uint renderingLayer, int3 coord) return validityMask; } -APVSample ManuallyFilteredSample(APVResources apvRes, float3 posWS, float3 normalWS, uint renderingLayer, int subdiv, float3 biasedPosWS, float3 uvw) +float UnpackSamplingWeight(uint mask, float3 texFrac) { - float3 texCoordFloat = uvw * _PoolDim - .5f; - int3 texCoordInt = texCoordFloat; - float3 texFrac = frac(texCoordFloat); - float3 oneMinTexFrac = 1.0f - texFrac; + int3 dir = 0; + dir.x = (int)(mask >> 1) & 3; + dir.y = (int)(mask >> 4) & 3; + dir.z = (int)(mask >> 7) & 3; - bool sampled = false; - float totalW = 0.0f; + float3 weights; + weights.x = saturate(mask & 1) + (dir.x - 1) * texFrac.x; + weights.y = saturate(mask & 8) + (dir.y - 1) * texFrac.y; + weights.z = saturate(mask & 64) + (dir.z - 1) * texFrac.z; - APVSample baseSample; + return weights.x * weights.y * weights.z; +} - float3 positionCentralProbe = GetSnappedProbePosition(biasedPosWS, subdiv); +void UnpackSamplingOffset(uint mask, float3 texFrac, out float3 uvwOffset) +{ + uvwOffset.x = saturate(mask & 4) + (saturate(mask & 2) * texFrac.x - texFrac.x); + uvwOffset.y = saturate(mask & 32) + (saturate(mask & 16) * texFrac.y - texFrac.y); + uvwOffset.z = saturate(mask & 256) + (saturate(mask & 128) * texFrac.z - texFrac.z); +} - ZERO_INITIALIZE(APVSample, baseSample); +APVSample QualityLeakReduction(APVResources apvRes, uint renderingLayer, inout float3 uvw) +{ + float3 texCoord = uvw * _APVPoolDim - .5f; + float3 texFrac = frac(texCoord); - uint validityMask = LoadValidityMask(apvRes, renderingLayer, texCoordInt); - for (uint i = 0; i < 8; ++i) + half3 offset; + + uint validityMask = LoadValidityMask(apvRes, renderingLayer, texCoord); + uint antileak = _AntiLeakData[validityMask]; + + UnpackSamplingOffset(antileak >> 0, texFrac, offset); + APVSample apvSample = SampleAPV(apvRes, uvw + offset * _APVRcpPoolDim); + + // Optional additional samples for quality leak reduction + if (validityMask != 0xFF) { - uint3 offset = GetSampleOffset(i); - float trilinearW = - ((offset.x == 1) ? texFrac.x : oneMinTexFrac.x) * - ((offset.y == 1) ? texFrac.y : oneMinTexFrac.y) * - ((offset.z == 1) ? texFrac.z : oneMinTexFrac.z); + float3 weights; + weights.x = UnpackSamplingWeight(antileak >> 0, texFrac); + weights.y = UnpackSamplingWeight(antileak >> 9, texFrac); + weights.z = UnpackSamplingWeight(antileak >> 18, texFrac); - half validityWeight = GetValidityWeight(i, validityMask); + weights *= rcp(max(0.0001, weights.x+weights.y+weights.z)); + WeightSample(apvSample, weights.x); - if (validityWeight > 0) + UNITY_BRANCH if (weights.y != 0) { - APVSample apvSample = LoadAndDecodeAPV(apvRes, texCoordInt + offset); - half geoW = GetNormalWeightHalf(offset, posWS, positionCentralProbe, normalWS, subdiv); + UnpackSamplingOffset(antileak >> 9, texFrac, offset); + APVSample partialSample = SampleAPV(apvRes, uvw + offset * _APVRcpPoolDim); + AccumulateSamples(apvSample, partialSample, weights.y); + } - half finalW = half(geoW * trilinearW); - AccumulateSamples(baseSample, apvSample, finalW); - totalW += finalW; + UNITY_BRANCH if (weights.z != 0) + { + UnpackSamplingOffset(antileak >> 18, texFrac, offset); + APVSample partialSample = SampleAPV(apvRes, uvw + offset * _APVRcpPoolDim); + AccumulateSamples(apvSample, partialSample, weights.z); } } - WeightSample(baseSample, half(rcp(totalW))); - - return baseSample; + return apvSample; } -void WarpUVWLeakReduction(APVResources apvRes, float3 posWS, float3 normalWS, uint renderingLayer, uint subdiv, float3 biasedPosWS, inout float3 uvw, out float3 normalizedOffset, out float validityWeights[8]) +void WarpUVWLeakReduction(APVResources apvRes, uint renderingLayer, inout float3 uvw) { - float3 texCoordFloat = uvw * _PoolDim - 0.5f; - int3 texCoordInt = texCoordFloat; - half3 texFrac = half3(frac(texCoordFloat)); - uint validityMask = LoadValidityMask(apvRes, renderingLayer, texCoordInt); + float3 texCoord = uvw * _APVPoolDim - 0.5f; + half3 texFrac = half3(frac(texCoord)); - if (_LeakReductionMode == APVLEAKREDUCTIONMODE_VALIDITY_AND_NORMAL_BASED || validityMask != 0xFF) + uint validityMask = LoadValidityMask(apvRes, renderingLayer, texCoord); + if (validityMask != 0xFF) { - half4 weights[2]; + half weights[8]; half totalW = 0.0; - - float3 positionCentralProbe = GetSnappedProbePosition(biasedPosWS, subdiv); - half3 oneMinTexFrac = 1.0 - texFrac; - uint i = 0; + half3 offset = 0; + uint i; UNITY_UNROLL for (i = 0; i < 8; ++i) { - uint3 offset = GetSampleOffset(i); + uint3 probeOffset = GetSampleOffset(i); half validityWeight = - ((offset.x == 1) ? texFrac.x : oneMinTexFrac.x) * - ((offset.y == 1) ? texFrac.y : oneMinTexFrac.y) * - ((offset.z == 1) ? texFrac.z : oneMinTexFrac.z); + ((probeOffset.x == 1) ? texFrac.x : 1.0f - texFrac.x) * + ((probeOffset.y == 1) ? texFrac.y : 1.0f - texFrac.y) * + ((probeOffset.z == 1) ? texFrac.z : 1.0f - texFrac.z); validityWeight *= GetValidityWeight(i, validityMask); - - if (_LeakReductionMode == APVLEAKREDUCTIONMODE_VALIDITY_AND_NORMAL_BASED) - validityWeight *= GetNormalWeightHalf(offset, posWS, positionCentralProbe, normalWS, subdiv); - half weight = saturate(validityWeight); - - weights[i/4][i%4] = weight; + weights[i] = weight; totalW += weight; } + offset = -texFrac; half rcpTotalW = rcp(max(0.0001, totalW)); - weights[0] *= rcpTotalW; - weights[1] *= rcpTotalW; - - half3 fracOffset = -texFrac; UNITY_UNROLL for (i = 0; i < 8; ++i) { - uint3 offset = GetSampleOffset(i); - fracOffset += (half3)offset * weights[i/4][i%4]; + uint3 probeOffset = GetSampleOffset(i); + offset += (half3)probeOffset * (weights[i] * rcpTotalW); } - uvw = uvw + (float3)fracOffset * _RcpPoolDim; - } - - // Output values used for debug only - UNITY_UNROLL - for (uint i = 0; i < 8; i++) - { - int3 probeCoord = GetSampleOffset(i); - half validityWeight = GetValidityWeight(i, validityMask); - validityWeights[i] = validityWeight; + uvw += (float3)offset * _APVRcpPoolDim; } - - normalizedOffset = (float3)(uvw * _PoolDim - (texCoordInt + 0.5)); -} - -void WarpUVWLeakReduction(APVResources apvRes, float3 posWS, float3 normalWS, uint renderingLayer, uint subdiv, float3 biasedPosWS, inout float3 uvw) -{ - float3 normalizedOffset; - float validityWeights[8]; - WarpUVWLeakReduction(apvRes, posWS, normalWS, renderingLayer, subdiv, biasedPosWS, uvw, normalizedOffset, validityWeights); } APVSample SampleAPV(APVResources apvRes, float3 posWS, float3 biasNormalWS, uint renderingLayer, float3 viewDir) { APVSample outSample; - posWS -= _WorldOffset; + posWS -= _APVWorldOffset; - float3 pool_uvw; uint subdiv; + float3 pool_uvw; float3 biasedPosWS; if (TryToGetPoolUVWAndSubdiv(apvRes, posWS, biasNormalWS, viewDir, pool_uvw, subdiv, biasedPosWS)) { -#if MANUAL_FILTERING == 1 - if (_LeakReductionMode != 0) - outSample = ManuallyFilteredSample(apvRes, posWS, biasNormalWS, renderingLayer, subdiv, biasedPosWS, pool_uvw); + UNITY_BRANCH if (_APVLeakReductionMode == APVLEAKREDUCTIONMODE_QUALITY) + { + outSample = QualityLeakReduction(apvRes, renderingLayer, pool_uvw); + } else - outSample = SampleAPV(apvRes, pool_uvw); -#else - if (_LeakReductionMode != 0) { - WarpUVWLeakReduction(apvRes, posWS, biasNormalWS, renderingLayer, subdiv, biasedPosWS, pool_uvw); + if (_APVLeakReductionMode == APVLEAKREDUCTIONMODE_PERFORMANCE) + WarpUVWLeakReduction(apvRes, renderingLayer, pool_uvw); + outSample = SampleAPV(apvRes, pool_uvw); } - outSample = SampleAPV(apvRes, pool_uvw); -#endif } else { @@ -694,7 +645,7 @@ float EvalSHSkyOcclusion(float3 dir, APVSample apvSample) { // L0 L1 float4 temp = float4(kSHBasis0, kSHBasis1 * dir.x, kSHBasis1 * dir.y, kSHBasis1 * dir.z); - return _SkyOcclusionIntensity * dot(temp, apvSample.skyOcclusionL0L1); + return _APVSkyOcclusionWeight * dot(temp, apvSample.skyOcclusionL0L1); } float3 EvaluateOccludedSky(APVSample apvSample, float3 N) @@ -702,7 +653,7 @@ float3 EvaluateOccludedSky(APVSample apvSample, float3 N) float occValue = EvalSHSkyOcclusion(N, apvSample); float3 shadingNormal = N; - if (_EnableSkyOcclusionShadingDirection > 0) + if (_APVSkyDirectionWeight > 0) { shadingNormal = apvSample.skyShadingDirection; float normSquared = dot(shadingNormal, shadingNormal); @@ -754,12 +705,12 @@ void EvaluateAdaptiveProbeVolume(APVSample apvSample, float3 normalWS, out float #endif bakeDiffuseLighting += apvSample.L0; - if (_SkyOcclusionIntensity > 0) + if (_APVSkyOcclusionWeight > 0) bakeDiffuseLighting += EvaluateOccludedSky(apvSample, normalWS); - //if (_Weight < 1.f) + //if (_APVWeight < 1.f) { - bakeDiffuseLighting = bakeDiffuseLighting * _Weight; + bakeDiffuseLighting = bakeDiffuseLighting * _APVWeight; } } else @@ -785,16 +736,16 @@ void EvaluateAdaptiveProbeVolume(APVSample apvSample, float3 normalWS, float3 ba bakeDiffuseLighting += apvSample.L0; backBakeDiffuseLighting += apvSample.L0; - if (_SkyOcclusionIntensity > 0) + if (_APVSkyOcclusionWeight > 0) { bakeDiffuseLighting += EvaluateOccludedSky(apvSample, normalWS); backBakeDiffuseLighting += EvaluateOccludedSky(apvSample, backNormalWS); } - //if (_Weight < 1.f) + //if (_APVWeight < 1.f) { - bakeDiffuseLighting = bakeDiffuseLighting * _Weight; - backBakeDiffuseLighting = backBakeDiffuseLighting * _Weight; + bakeDiffuseLighting = bakeDiffuseLighting * _APVWeight; + backBakeDiffuseLighting = backBakeDiffuseLighting * _APVWeight; } } else @@ -816,9 +767,7 @@ void EvaluateAdaptiveProbeVolume(in float3 posWS, in float3 normalWS, in float3 if (apvSample.status != APV_SAMPLE_STATUS_INVALID) { -#if MANUAL_FILTERING == 0 apvSample.Decode(); -#endif #ifdef PROBE_VOLUMES_L1 EvaluateAPVL1(apvSample, normalWS, bakeDiffuseLighting); @@ -833,17 +782,17 @@ void EvaluateAdaptiveProbeVolume(in float3 posWS, in float3 normalWS, in float3 bakeDiffuseLighting += apvSample.L0; backBakeDiffuseLighting += apvSample.L0; lightingInReflDir += apvSample.L0; - if (_SkyOcclusionIntensity > 0) + if (_APVSkyOcclusionWeight > 0) { bakeDiffuseLighting += EvaluateOccludedSky(apvSample, normalWS); backBakeDiffuseLighting += EvaluateOccludedSky(apvSample, backNormalWS); lightingInReflDir += EvaluateOccludedSky(apvSample, reflDir); } - //if (_Weight < 1.f) + //if (_APVWeight < 1.f) { - bakeDiffuseLighting = bakeDiffuseLighting * _Weight; - backBakeDiffuseLighting = backBakeDiffuseLighting * _Weight; + bakeDiffuseLighting = bakeDiffuseLighting * _APVWeight; + backBakeDiffuseLighting = backBakeDiffuseLighting * _APVWeight; } } else @@ -882,7 +831,7 @@ void EvaluateAdaptiveProbeVolume(in float3 posWS, in float2 positionSS, out floa APVResources apvRes = FillAPVResources(); posWS = AddNoiseToSamplingPosition(posWS, positionSS, 1); - posWS -= _WorldOffset; + posWS -= _APVWorldOffset; float3 uvw; if (TryToGetPoolUVW(apvRes, posWS, 0, 0, uvw)) @@ -938,7 +887,7 @@ float GetReflectionProbeNormalizationFactor(float3 lightingInReflDir, float3 sam float refProbeNormalization = EvaluateReflectionProbeSH(sampleDir, reflProbeSHL0L1, reflProbeSHL2_1, reflProbeSHL2_2); float localNormalization = Luminance(real3(lightingInReflDir)); - return lerp(1.f, clamp(SafeDiv(localNormalization, refProbeNormalization), _MinReflProbeNormalizationFactor, _MaxReflProbeNormalizationFactor), _Weight); + return lerp(1.f, clamp(SafeDiv(localNormalization, refProbeNormalization), _APVMinReflProbeNormalizationFactor, _APVMaxReflProbeNormalizationFactor), _APVWeight); } diff --git a/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeVolumeBakingProcessSettings.cs b/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeVolumeBakingProcessSettings.cs index c45014d3439..7bb2397b40a 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeVolumeBakingProcessSettings.cs +++ b/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeVolumeBakingProcessSettings.cs @@ -11,7 +11,7 @@ internal struct ProbeDilationSettings internal void SetDefaults() { - enableDilation = true; + enableDilation = false; dilationDistance = 1; dilationValidityThreshold = 0.25f; dilationIterations = 1; diff --git a/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeVolumeBakingSet.Editor.cs b/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeVolumeBakingSet.Editor.cs index 31a699aa6b0..26fdca19124 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeVolumeBakingSet.Editor.cs +++ b/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeVolumeBakingSet.Editor.cs @@ -422,9 +422,9 @@ static internal int MaxSubdivLevelInProbeVolume(Vector3 volumeSize, int maxSubdi { float maxSizedDim = Mathf.Max(volumeSize.x, Mathf.Max(volumeSize.y, volumeSize.z)); float maxSideInBricks = maxSizedDim / ProbeReferenceVolume.instance.MinDistanceBetweenProbes(); - int subdiv = Mathf.FloorToInt(Mathf.Log(maxSideInBricks, 3)) - 1; + int subdiv = Mathf.FloorToInt(Mathf.Log(maxSideInBricks, 3)); - return Mathf.Min(subdiv, maxSubdiv); + return Mathf.Max(subdiv, maxSubdiv) - 1; } static void InflateBound(ref Bounds bounds, ProbeVolume pv) diff --git a/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeVolumeConstantRuntimeResources.cs b/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeVolumeConstantRuntimeResources.cs new file mode 100644 index 00000000000..04e8c8106a2 --- /dev/null +++ b/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeVolumeConstantRuntimeResources.cs @@ -0,0 +1,299 @@ +using System.Collections.Generic; +using System.Runtime.CompilerServices; +using Unity.Mathematics; + +using RuntimeResources = UnityEngine.Rendering.ProbeReferenceVolume.RuntimeResources; + +namespace UnityEngine.Rendering +{ + static class ProbeVolumeConstantRuntimeResources + { + static ComputeBuffer m_SkySamplingDirectionsBuffer = null; + static ComputeBuffer m_AntiLeakDataBuffer = null; + + [MethodImpl(MethodImplOptions.AggressiveInlining)] + internal static void GetRuntimeResources(ref RuntimeResources rr) + { + rr.SkyPrecomputedDirections = m_SkySamplingDirectionsBuffer; + rr.QualityLeakReductionData = m_AntiLeakDataBuffer; + } + + internal static void Initialize() + { + if (m_SkySamplingDirectionsBuffer == null) + { + k_SkyDirections = GenerateSkyDirections(); + m_SkySamplingDirectionsBuffer = new ComputeBuffer(k_SkyDirections.Length, 3 * sizeof(float)); + m_SkySamplingDirectionsBuffer.SetData(k_SkyDirections); + } + + if (m_AntiLeakDataBuffer == null) + { + m_AntiLeakDataBuffer = new ComputeBuffer(k_AntiLeakData.Length, sizeof(uint)); + m_AntiLeakDataBuffer.SetData(k_AntiLeakData); + } + } + + public static Vector3[] GetSkySamplingDirections() + { + return k_SkyDirections; + } + + internal static void Cleanup() + { + CoreUtils.SafeRelease(m_SkySamplingDirectionsBuffer); + m_SkySamplingDirectionsBuffer = null; + + CoreUtils.SafeRelease(m_AntiLeakDataBuffer); + m_AntiLeakDataBuffer = null; + } + + #region Sky Directions Buffer generator + const int NB_SKY_PRECOMPUTED_DIRECTIONS = 255; + static Vector3[] k_SkyDirections = new Vector3[NB_SKY_PRECOMPUTED_DIRECTIONS]; + + static Vector3[] GenerateSkyDirections() + { + var skyDirections = new Vector3[NB_SKY_PRECOMPUTED_DIRECTIONS]; + + float sqrtNBpoints = Mathf.Sqrt((float)(NB_SKY_PRECOMPUTED_DIRECTIONS)); + float phi = 0.0f; + float phiMax = 0.0f; + float thetaMax = 0.0f; + + // Spiral based sampling on sphere + // See http://web.archive.org/web/20120331125729/http://www.math.niu.edu/~rusin/known-math/97/spherefaq + // http://www.math.vanderbilt.edu/saffeb/texts/161.pdf + for (int i=0; i < NB_SKY_PRECOMPUTED_DIRECTIONS; i++) + { + // theta from 0 to PI + // phi from 0 to 2PI + float h = -1.0f + (2.0f * i) / (NB_SKY_PRECOMPUTED_DIRECTIONS - 1.0f); + float theta = Mathf.Acos(h); + if (i == NB_SKY_PRECOMPUTED_DIRECTIONS - 1 || i==0) + phi = 0.0f; + else + phi = phi + 3.6f / sqrtNBpoints * 1.0f / (Mathf.Sqrt(1.0f-h*h)); + + Vector3 pointOnSphere = new Vector3(Mathf.Sin(theta) * Mathf.Cos(phi), Mathf.Sin(theta) * Mathf.Sin(phi), Mathf.Cos(theta)); + + pointOnSphere.Normalize(); + skyDirections[i] = pointOnSphere; + + phiMax = Mathf.Max(phiMax, phi); + thetaMax = Mathf.Max(thetaMax, theta); + } + + return skyDirections; + } + #endregion + + #region AntiLeak Buffer generator +#if UNITY_EDITOR + static uint3 GetSampleOffset(uint i) + { + return new uint3(i, i >> 1, i >> 2) & 1; + } + static int GetProbeIndex(int x, int y, int z) + { + return x + y * 2 + z * 4; + } + + static uint BuildFace(int axis, int idx) + { + uint mask = 0; + int[] coords = new int[3]; + coords[axis] = idx; + for (int i = 0; i < 2; i++) + { + coords[(axis + 1) % 3] = i; + for (int j = 0; j < 2; j++) + { + coords[(axis + 2) % 3] = j; + mask = mask | (uint)(1 << GetProbeIndex(coords[0], coords[1], coords[2])); + } + } + return mask; + } + + static bool TryGetEdge(uint validityMask, uint samplingMask, out uint edge, out uint3 offset) + { + for (int i = 0; i < 8; i++) + { + if ((validityMask & (1 << i)) == 0) + continue; + + uint3 p = GetSampleOffset((uint)i); + if (p.x == 0) + { + edge = (1u << i) | (1u << GetProbeIndex(1, (int)p.y, (int)p.z)); + if ((validityMask & edge) == edge && (samplingMask & edge) == 0) + { + offset = 2 * p; + offset.x = 1; + return true; + } + } + if (p.y == 0) + { + edge = (1u << i) | (1u << GetProbeIndex((int)p.x, 1, (int)p.z)); + if ((validityMask & edge) == edge && (samplingMask & edge) == 0) + { + offset = 2 * p; + offset.y = 1; + return true; + } + } + if (p.z == 0) + { + edge = (1u << i) | (1u << GetProbeIndex((int)p.x, (int)p.y, 1)); + if ((validityMask & edge) == edge && (samplingMask & edge) == 0) + { + offset = 2 * p; + offset.z = 1; + return true; + } + } + } + + edge = 0; + offset = 0; + return false; + } + + static List ComputeMask(uint validityMask) + { + List samples = new(); + + // Cube sample + if (validityMask == 0 || validityMask == 255) + { + samples.Add(1); + return samples; + } + + // track which probes are sampled + uint samplingMask = 0; + + // Find face sample + for (int i = 0; i < 6; i++) + { + int axis = i / 2; + uint face = BuildFace(axis, i % 2); + if ((validityMask & face) == face) // all face is valid, sample it + { + uint3 offset = 0; + offset[axis] = (i % 2) == 0 ? 0u : 2u; + offset[(axis + 1) % 3] = 1; + offset[(axis + 2) % 3] = 1; + + samples.Add(offset); + samplingMask = face; + break; + } + } + + // Find edge samples + while (true) + { + if (!TryGetEdge(validityMask, samplingMask, out uint edge, out uint3 offset)) + break; + + samples.Add(offset); + samplingMask |= edge; + } + + // Find single probe samples + for (int i = 0; i < 8; i++) + { + if (((1 << i) & (validityMask & ~samplingMask)) == 0) + continue; + samples.Add(2 * GetSampleOffset((uint)i)); + samplingMask |= (uint)(1 << i); + } + + return samples; + } + + static uint PackSamplingDir(uint val) + { + // On a single axis there is up to 2 probes. A face or edge sample needs to sample in between the probes + // We encode 0 as sample first probe, 1 as sample between probe, 2 as sample second probe (2 bits) + // For faster decoding, we use a third bit that reduces ALU in shader + return /* 2 bits */ (val << 1) | /* 1 bit */ ((~val & 2) >> 1); + } + + static uint InvalidSampleMask() + { + // This is a special code that results in no sampling in shader without any additional ALU + return 2 | (2 << 3) | (2 << 6); + } + + static uint ComputeAntiLeakData(uint validityMask) + { + // This may generate more than 3 samples, but we limit to 3 + var samples = ComputeMask(validityMask); + uint mask = 0; + + for (int i = 0; i < 3; i++) + { + uint sampleMask; + if (i < samples.Count) + sampleMask = PackSamplingDir(samples[i].x) | (PackSamplingDir(samples[i].y) << 3) | (PackSamplingDir(samples[i].z) << 6); + else + sampleMask = InvalidSampleMask(); + + // 32bits - 9bits per samples (up to 3 samples) + // Each sample encodes sampling on each axis (3axis * 3bits) + // See PackSamplingDir for axis encoding + mask |= sampleMask << (9 * i); + } + + return mask; + } + + [UnityEditor.MenuItem("Edit/Rendering/Global Illumination/Generate AntiLeak Buffer")] + static uint[] BuildAntiLeakDataArray() + { + uint[] antileak = new uint[256]; + for (uint validityMask = 0; validityMask < 256; validityMask++) + antileak[validityMask] = ComputeAntiLeakData(validityMask); + + string str = "static uint[] k_AntiLeakData = new uint[256] {\n"; + for (int i = 0; i < 16; i++) + { + str += " "; + for (int j = 0; j < 16; j++) + { + str += antileak[i * 16 + j] + (j == 15 ? ",\n" : ", "); + } + } + str += " };"; + Debug.Log(str); + + return antileak; + } + #endif + + // This is autogenerated using the MenuItem above -- do not edit by hand + static uint[] k_AntiLeakData = new uint[256] { + 38347995, 38347849, 38347852, 38347851, 38347873, 38347865, 38322764, 38322763, 38347876, 38324297, 38347868, 38324299, 38347875, 38324313, 38322780, 38347867, + 38348041, 38347977, 38408780, 38408779, 38408801, 38408793, 69517900, 69517899, 38408804, 38324425, 38408796, 69519435, 38408803, 69519449, 69517916, 38408795, + 38348044, 38410313, 38347980, 38410315, 38410337, 38410329, 38322892, 70304331, 38410340, 70305865, 38410332, 70305867, 38410339, 70305881, 70304348, 38410331, + 38348043, 38410441, 38408908, 38347979, 38322955, 38409817, 69518028, 38322891, 38324491, 70305993, 38409820, 38324427, 38409827, 26351193, 25564764, 38323915, + 38348065, 38421065, 38421068, 38421067, 38348001, 38421081, 38312161, 38388299, 38421092, 75810889, 38421084, 75810891, 38421091, 75810905, 38388316, 38421083, + 38348057, 38421193, 38312217, 38416971, 38408929, 38347993, 69507297, 38312153, 38324505, 75811017, 38416988, 26358347, 38416995, 38324441, 69583452, 38320345, + 38421260, 75896905, 38421196, 75896907, 38410465, 75896921, 38388428, 70369867, 75896932, 70305865, 75896924, 70305867, 75896931, 70305881, 70369884, 75896923, + 38421259, 75897033, 38417100, 38421195, 38409953, 38410457, 69583564, 38377689, 75811083, 70305993, 75896412, 75811019, 75896419, 70306009, 70107740, 70301913, + 38348068, 38422601, 38422604, 38422603, 38422625, 38422617, 76595788, 76595787, 38348004, 38310628, 38422620, 38389835, 38422627, 38389849, 76595804, 38422619, + 38422793, 38422729, 76681804, 76681803, 76681825, 76681817, 69517900, 69517899, 38408932, 38389961, 76681820, 69584971, 76681827, 69584985, 69517916, 76681819, + 38348060, 38310684, 38422732, 38418507, 38322972, 38418521, 76595916, 25573451, 38410468, 70292196, 38347996, 38310620, 38418531, 70371417, 38322908, 38318812, + 38422795, 38418633, 76681932, 38422731, 76595979, 76682841, 69518028, 76595915, 38409956, 70371529, 38408924, 38376156, 76682851, 70109273, 69518044, 69513948, + 38348067, 38310691, 38312227, 38422091, 38422753, 38422105, 76585185, 76661323, 38421220, 75797220, 38422108, 75876427, 38348003, 38310627, 38312163, 38311651, + 38422809, 38422217, 76585241, 76689995, 76681953, 38422745, 69507297, 76585177, 38417124, 75876553, 76690012, 73779275, 38408931, 38389977, 69507299, 76593369, + 38421276, 75797276, 38422220, 75905099, 38418657, 75905113, 76661452, 74564171, 75897060, 70292196, 38421212, 75797212, 38410467, 70292195, 38388444, 75805404, + 38348059, 38310683, 38312219, 38422219, 38322971, 38418649, 25467163, 76650713, 38324507, 26252059, 38417116, 75862748, 38409955, 70371545, 69583580, 38347995, + }; + #endregion + } +} diff --git a/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/DynamicGI/DynamicSkyPrecomputedDirections.cs.meta b/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeVolumeConstantRuntimeResources.cs.meta similarity index 100% rename from Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/DynamicGI/DynamicSkyPrecomputedDirections.cs.meta rename to Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeVolumeConstantRuntimeResources.cs.meta diff --git a/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeVolumesOptions.cs b/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeVolumesOptions.cs index 4b2967e01d5..3015c5abcd8 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeVolumesOptions.cs +++ b/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeVolumesOptions.cs @@ -33,13 +33,13 @@ public sealed class ProbeVolumesOptions : VolumeComponent /// The overridden normal bias to be applied to the world position when sampling the Adaptive Probe Volumes data structure. Unit is meters. /// [Tooltip("The overridden normal bias to be applied to the world position when sampling the Adaptive Probe Volumes data structure. Unit is meters.")] - public ClampedFloatParameter normalBias = new ClampedFloatParameter(0.33f, 0.0f, 2.0f); + public ClampedFloatParameter normalBias = new ClampedFloatParameter(0.05f, 0.0f, 2.0f); /// /// A bias alongside the view vector to be applied to the world position when sampling the Adaptive Probe Volumes data structure. Unit is meters. /// [Tooltip("A bias alongside the view vector to be applied to the world position when sampling the Adaptive Probe Volumes data structure. Unit is meters.")] - public ClampedFloatParameter viewBias = new ClampedFloatParameter(0.0f, 0.0f, 2.0f); + public ClampedFloatParameter viewBias = new ClampedFloatParameter(0.1f, 0.0f, 2.0f); /// /// Whether to scale the bias for Adaptive Probe Volumes by the minimum distance between probes. @@ -64,12 +64,12 @@ public sealed class ProbeVolumesOptions : VolumeComponent /// Method used to reduce leaks. /// [Tooltip("Method used to reduce leaks. Currently available modes are crude, but cheap methods.")] - public APVLeakReductionModeParameter leakReductionMode = new APVLeakReductionModeParameter(APVLeakReductionMode.ValidityAndNormalBased); + public APVLeakReductionModeParameter leakReductionMode = new APVLeakReductionModeParameter(APVLeakReductionMode.Quality); /// - /// The minimum value that the dot product between the sample position normal and the vector to contributing probe need to have to have the probe considered. + /// This parameter isn't used anymore. /// - [Tooltip("The minimum value that the dot product between the sample position normal and the vector to contributing probe need to have to have the probe considered.")] + [Obsolete("This parameter isn't used anymore.")] public ClampedFloatParameter minValidDotProductValue = new ClampedFloatParameter(0.1f, -1.0f, 0.33f); /// diff --git a/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ShaderVariablesProbeVolumes.cs b/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ShaderVariablesProbeVolumes.cs index e3cbec01191..752f1cab935 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ShaderVariablesProbeVolumes.cs +++ b/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ShaderVariablesProbeVolumes.cs @@ -1,3 +1,4 @@ +using System; using Unity.Mathematics; namespace UnityEngine.Rendering @@ -6,8 +7,9 @@ namespace UnityEngine.Rendering class APVDefinitions { public static int probeIndexChunkSize = ProbeBrickIndex.kIndexChunkSize; - public static int probeMaxRegionCount = 4; + public const float probeValidityThreshold = 0.05f; + public static int probeMaxRegionCount = 4; public static Color32[] layerMaskColors = new Color32[] { new Color32(230, 159, 0, 255), new Color32(0, 158, 115, 255), @@ -41,29 +43,39 @@ public enum APVLeakReductionMode None = 0, /// /// The uvw used to sample APV data are warped to try to have invalid probe not contributing to lighting. - /// This only modifies the uvw used, but still sample a single time. It is effective when using rendering layers or in some situations (especially when occluding object contain probes inside) but ineffective in many other. + /// This samples APV a single time so it's a cheap option but will only work in the simplest cases. /// - ValidityBased = 1, + Performance = 1, /// - /// The uvw used to sample APV data are warped to try to have invalid probe not contributing to lighting. Also, a geometric weight based on normal at sampling position and vector to probes is used. - /// This only modifies the uvw used, but still sample a single time. It is effective in some situations (especially when occluding object contain probes inside) but ineffective in many other. + /// This option samples APV between 1 and 3 times to provide the smoothest result without introducing artifacts. + /// This is as expensive as Performance mode in the simplest cases, and is better and more expensive in the most complex cases. /// - ValidityAndNormalBased = 2, + Quality = 2, + /// + /// Obsolete, kept for migration. + /// + [Obsolete("Performance")] + ValidityBased = Performance, + /// + /// Obsolete, kept for migration. + /// + [Obsolete("Quality")] + ValidityAndNormalBased = Quality, } [GenerateHLSL(needAccessors = false, generateCBuffer = true, constantRegister = (int)APVConstantBufferRegister.GlobalRegister)] internal unsafe struct ShaderVariablesProbeVolumes { - public Vector4 _Offset_IndirectionEntryDim; - public Vector4 _Weight_MinLoadedCellInEntries; + public Vector4 _Offset_LayerCount; + public Vector4 _MinLoadedCellInEntries_IndirectionEntryDim; + public Vector4 _MaxLoadedCellInEntries_RcpIndirectionEntryDim; public Vector4 _PoolDim_MinBrickSize; public Vector4 _RcpPoolDim_XY; public Vector4 _MinEntryPos_Noise; - public Vector4 _IndicesDim_FrameIndex; + public uint4 _EntryCount_X_XY_LeakReduction; public Vector4 _Biases_NormalizationClamp; - public Vector4 _LeakReduction_SkyOcclusion; - public Vector4 _MaxLoadedCellInEntries_LayerCount; + public Vector4 _FrameIndex_Weights; public uint4 _ProbeVolumeLayerMask; } } diff --git a/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ShaderVariablesProbeVolumes.cs.hlsl b/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ShaderVariablesProbeVolumes.cs.hlsl index 48b10197867..db0dc9b94e5 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ShaderVariablesProbeVolumes.cs.hlsl +++ b/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ShaderVariablesProbeVolumes.cs.hlsl @@ -8,6 +8,8 @@ // UnityEngine.Rendering.APVLeakReductionMode: static fields // #define APVLEAKREDUCTIONMODE_NONE (0) +#define APVLEAKREDUCTIONMODE_PERFORMANCE (1) +#define APVLEAKREDUCTIONMODE_QUALITY (2) #define APVLEAKREDUCTIONMODE_VALIDITY_BASED (1) #define APVLEAKREDUCTIONMODE_VALIDITY_AND_NORMAL_BASED (2) @@ -15,20 +17,21 @@ // UnityEngine.Rendering.APVDefinitions: static fields // #define PROBE_INDEX_CHUNK_SIZE (243) +#define PROBE_VALIDITY_THRESHOLD (0.05) #define PROBE_MAX_REGION_COUNT (4) // Generated from UnityEngine.Rendering.ShaderVariablesProbeVolumes // PackingRules = Exact GLOBAL_CBUFFER_START(ShaderVariablesProbeVolumes, b6) - float4 _Offset_IndirectionEntryDim; - float4 _Weight_MinLoadedCellInEntries; + float4 _Offset_LayerCount; + float4 _MinLoadedCellInEntries_IndirectionEntryDim; + float4 _MaxLoadedCellInEntries_RcpIndirectionEntryDim; float4 _PoolDim_MinBrickSize; float4 _RcpPoolDim_XY; float4 _MinEntryPos_Noise; - float4 _IndicesDim_FrameIndex; + uint4 _EntryCount_X_XY_LeakReduction; float4 _Biases_NormalizationClamp; - float4 _LeakReduction_SkyOcclusion; - float4 _MaxLoadedCellInEntries_LayerCount; + float4 _FrameIndex_Weights; uint4 _ProbeVolumeLayerMask; CBUFFER_END diff --git a/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/Compiler/CompilerContextData.cs b/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/Compiler/CompilerContextData.cs index 7a8a4d549c4..112ff0040a2 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/Compiler/CompilerContextData.cs +++ b/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/Compiler/CompilerContextData.cs @@ -44,7 +44,7 @@ internal static int LastIndex(this ref NativeList list) where T : unmanage // Datastructure that contains passes and dependencies and allow you to iterate and reason on them more like a graph internal class CompilerContextData : IDisposable, RenderGraph.ICompiledGraph { - public CompilerContextData(int estimatedNumPasses, int estimatedNumResourcesPerType) + public CompilerContextData(int estimatedNumPasses) { passData = new NativeList(estimatedNumPasses, AllocatorManager.Persistent); fences = new Dictionary(); @@ -53,7 +53,7 @@ public CompilerContextData(int estimatedNumPasses, int estimatedNumResourcesPerT outputData = new NativeList(estimatedNumPasses * 2, AllocatorManager.Persistent); fragmentData = new NativeList(estimatedNumPasses * 4, AllocatorManager.Persistent); randomAccessResourceData = new NativeList(4, AllocatorManager.Persistent); // We assume not a lot of passes use random write - resources = new ResourcesData(estimatedNumResourcesPerType); + resources = new ResourcesData(); nativePassData = new NativeList(estimatedNumPasses, AllocatorManager.Persistent);// assume nothing gets merged nativeSubPassData = new NativeList(estimatedNumPasses, AllocatorManager.Persistent);// there should "never" be more subpasses than graph passes createData = new NativeList(estimatedNumPasses * 2, AllocatorManager.Persistent); // assume every pass creates two resources @@ -183,7 +183,7 @@ public bool AddToFragmentList(TextureAccess access, int listFirstIndex, int numI [MethodImpl(MethodImplOptions.AggressiveInlining)] public string GetResourceVersionedName(ResourceHandle h) => GetResourceName(h) + " V" + h.version; - + // resources can be added as fragment both as input and output so make sure not to add them twice (return true upon new addition) public bool AddToRandomAccessResourceList(ResourceHandle h, int randomWriteSlotIndex, bool preserveCounterValue, int listFirstIndex, int numItems) { diff --git a/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/Compiler/NativePassCompiler.Debug.cs b/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/Compiler/NativePassCompiler.Debug.cs index 3d02a1c6664..abc9230810c 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/Compiler/NativePassCompiler.Debug.cs +++ b/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/Compiler/NativePassCompiler.Debug.cs @@ -34,6 +34,7 @@ static RenderGraph.DebugData.PassData.NRPInfo.NativeRenderPassInfo.AttachmentInf return new RenderGraph.DebugData.PassData.NRPInfo.NativeRenderPassInfo.AttachmentInfo { resourceName = pointTo.GetName(ctx, attachment.handle), + attachmentIndex = attachmentIndex, loadAction = attachment.loadAction.ToString(), loadReason = loadReason, storeAction = attachment.storeAction.ToString(), @@ -78,7 +79,7 @@ internal static string MakePassMergeMessage(CompilerContextData ctx, in PassData message += "The next pass reads one of the outputs as a regular texture, the pass needs to break."; break; case PassBreakReason.NonRasterPass: - message += $"{prevPassName} is type {pass.type}. Only Raster passes can be merged."; + message += $"{prevPassName} is type {prevPass.type}. Only Raster passes can be merged."; break; case PassBreakReason.DifferentDepthTextures: message += $"{prevPassName} uses a different depth buffer than {passName}."; @@ -210,6 +211,7 @@ internal void GenerateNativeCompilerDebugData(ref RenderGraph.DebugData debugDat debugPass.type = passData.type; debugPass.culled = passData.culled; debugPass.async = passData.asyncCompute; + debugPass.nativeSubPassIndex = passData.nativeSubPassIndex; debugPass.generateDebugData = graphPass.generateDebugData; debugPass.resourceReadLists = new List[(int)RenderGraphResourceType.Count]; debugPass.resourceWriteLists = new List[(int)RenderGraphResourceType.Count]; diff --git a/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/Compiler/NativePassCompiler.cs b/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/Compiler/NativePassCompiler.cs index f6d3aaa549e..f9e39250805 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/Compiler/NativePassCompiler.cs +++ b/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/Compiler/NativePassCompiler.cs @@ -26,7 +26,6 @@ internal struct RenderGraphInputInfo RenderGraphCompilationCache m_CompilationCache; internal const int k_EstimatedPassCount = 100; - internal const int k_EstimatedResourceCountPerType = 50; internal const int k_MaxSubpass = 8; // Needs to match with RenderPassSetup.h NativeList m_BeginRenderPassAttachments; @@ -34,7 +33,7 @@ internal struct RenderGraphInputInfo public NativePassCompiler(RenderGraphCompilationCache cache) { m_CompilationCache = cache; - defaultContextData = new CompilerContextData(k_EstimatedPassCount, k_EstimatedResourceCountPerType); + defaultContextData = new CompilerContextData(k_EstimatedPassCount); toVisitPassIds = new Stack(k_EstimatedPassCount); m_BeginRenderPassAttachments = new NativeList(FixedAttachmentArray.MaxAttachments, Allocator.Persistent); } @@ -1013,9 +1012,20 @@ void PrepareNativeRenderPass(ref NativePassData nativePass) // Partial writes will register themselves as readers so this should be adequate foreach (ref readonly var reader in contextData.Readers(fragment.resource)) { - bool isFragment = contextData.passData.ElementAt(reader.passId).IsUsedAsFragment(fragment.resource, contextData); + ref var readerPass = ref contextData.passData.ElementAt(reader.passId); + bool isFragment = readerPass.IsUsedAsFragment(fragment.resource, contextData); + + // Unsafe pass - we cannot know how it is used, so we need to both store and resolve + if (readerPass.type == RenderGraphPassType.Unsafe) + { + needsMSAASamples = true; + needsResolvedData = true; + msaaUserPassID = reader.passId; + userPassID = reader.passId; + break; + } // A fragment attachment use we need the msaa samples - if (isFragment) + else if (isFragment) { needsMSAASamples = true; msaaUserPassID = reader.passId; diff --git a/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/Compiler/ResourcesData.cs b/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/Compiler/ResourcesData.cs index 5ae11571d2c..a6e6e1f1245 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/Compiler/ResourcesData.cs +++ b/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/Compiler/ResourcesData.cs @@ -198,7 +198,7 @@ internal class ResourcesData public DynamicArray[] resourceNames; - public ResourcesData(int estimatedNumResourcesPerType) + public ResourcesData() { unversionedData = new NativeList[(int)RenderGraphResourceType.Count]; versionedData = new NativeList[(int)RenderGraphResourceType.Count]; @@ -207,10 +207,12 @@ public ResourcesData(int estimatedNumResourcesPerType) for (int t = 0; t < (int)RenderGraphResourceType.Count; t++) { - versionedData[t] = new NativeList(MaxVersions * estimatedNumResourcesPerType, AllocatorManager.Persistent); - unversionedData[t] = new NativeList(estimatedNumResourcesPerType, AllocatorManager.Persistent); - readerData[t] = new NativeList(MaxVersions * estimatedNumResourcesPerType * MaxReaders, AllocatorManager.Persistent); - resourceNames[t] = new DynamicArray(estimatedNumResourcesPerType); // T in NativeList cannot contain managed types, so the names are stored separately + // Note: All these lists are allocated with zero capacity, they will be resized in Initialize when + // the amount of resources is known. + versionedData[t] = new NativeList(0, AllocatorManager.Persistent); + unversionedData[t] = new NativeList(0, AllocatorManager.Persistent); + readerData[t] = new NativeList(0, AllocatorManager.Persistent); + resourceNames[t] = new DynamicArray(0); // T in NativeList cannot contain managed types, so the names are stored separately } } diff --git a/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/RenderGraph.DebugData.cs b/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/RenderGraph.DebugData.cs index 387bec53594..3dd5b98652a 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/RenderGraph.DebugData.cs +++ b/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/RenderGraph.DebugData.cs @@ -48,6 +48,9 @@ public struct PassData // Whether the pass is an async compute pass. public bool async; + // Native subpass index. + public int nativeSubPassIndex; + // Index of the pass that needs to be waited for. public int syncToPassIndex; @@ -65,6 +68,7 @@ public class NativeRenderPassInfo public class AttachmentInfo { public string resourceName; + public int attachmentIndex; public string loadAction; public string loadReason; public string storeAction; diff --git a/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/RenderGraph.cs b/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/RenderGraph.cs index a6e162180a7..2af5663e39a 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/RenderGraph.cs +++ b/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/RenderGraph.cs @@ -1224,6 +1224,11 @@ internal void Execute() m_Resources.BeginExecute(m_CurrentFrameIndex); +#if UNITY_EDITOR + // Feeding Render Graph Viewer before resource deallocation at pass execution + GenerateDebugData(); +#endif + if (nativeRenderPassesEnabled) ExecuteNativeRenderGraph(); else @@ -1249,8 +1254,6 @@ internal void Execute() } finally { - GenerateDebugData(); - if (m_DebugParameters.immediateMode) ReleaseImmediateModeResources(); @@ -2547,7 +2550,7 @@ void CleanupDebugData() m_DebugData.Clear(); } -#endregion + #endregion Dictionary registeredGlobals = new Dictionary(); diff --git a/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/RenderGraphCompilationCache.cs b/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/RenderGraphCompilationCache.cs index 459f180400c..d32a0baeea5 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/RenderGraphCompilationCache.cs +++ b/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/RenderGraphCompilationCache.cs @@ -38,7 +38,7 @@ public RenderGraphCompilationCache() for (int i = 0; i < k_CachedGraphCount; ++i) { m_CompiledGraphPool.Push(new RenderGraph.CompiledGraph()); - m_NativeCompiledGraphPool.Push(new CompilerContextData(NativePassCompiler.k_EstimatedPassCount, NativePassCompiler.k_EstimatedResourceCountPerType)); + m_NativeCompiledGraphPool.Push(new CompilerContextData(NativePassCompiler.k_EstimatedPassCount)); } } diff --git a/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/RenderGraphResourceRegistry.cs b/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/RenderGraphResourceRegistry.cs index 8dae1d4ccdf..a7260ba8ac9 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/RenderGraphResourceRegistry.cs +++ b/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/RenderGraphResourceRegistry.cs @@ -778,7 +778,7 @@ internal void GetRenderTargetInfo(in ResourceHandle res, out RenderTargetInfo ou else { // Managed by rendergraph, it might not be created yet so we look at the desc to find out - var desc = GetTextureResourceDesc(res, true); // TODO: remove true, we should throw on invalid desc here + var desc = GetTextureResourceDesc(res); var dim = desc.CalculateFinalDimensions(); outInfo = new RenderTargetInfo(); outInfo.width = dim.x; diff --git a/Packages/com.unity.render-pipelines.core/Runtime/Utilities/CoreUtils.cs b/Packages/com.unity.render-pipelines.core/Runtime/Utilities/CoreUtils.cs index c85ae85e671..a33785728b2 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime/Utilities/CoreUtils.cs +++ b/Packages/com.unity.render-pipelines.core/Runtime/Utilities/CoreUtils.cs @@ -1148,6 +1148,22 @@ public static void SetKeyword(CommandBuffer cmd, string keyword, bool state) cmd.DisableShaderKeyword(keyword); } + /// + /// Set a local keyword on a ComputeShader using a CommandBuffer + /// + /// CommandBuffer on which to set the global keyword. + /// Compute Shader on which to set the keyword. + /// Keyword to be set. + /// Value of the keyword to be set. + public static void SetKeyword(CommandBuffer cmd, ComputeShader cs, string keyword, bool state) + { + var kw = new LocalKeyword(cs, keyword); + if (state) + cmd.EnableKeyword(cs, kw); + else + cmd.DisableKeyword(cs, kw); + } + /// /// Set a global keyword using a RasterCommandBuffer /// @@ -1162,7 +1178,7 @@ public static void SetKeyword(BaseCommandBuffer cmd, string keyword, bool state) cmd.m_WrappedCommandBuffer.DisableShaderKeyword(keyword); } - // Caution: such a call should not be use interlaced with command buffer command, as it is immediate + // Caution: such a call should not be use interleaved with command buffer command, as it is immediate /// /// Set a keyword immediately on a Material. /// @@ -1177,7 +1193,7 @@ public static void SetKeyword(Material material, string keyword, bool state) material.DisableKeyword(keyword); } - // Caution: such a call should not be use interlaced with command buffer command, as it is immediate + // Caution: such a call should not be use interleaved with command buffer command, as it is immediate /// /// Set a keyword immediately on a Material. /// @@ -1192,8 +1208,9 @@ public static void SetKeyword(Material material, LocalKeyword keyword, bool stat material.DisableKeyword(keyword); } + // Caution: such a call should not be use interleaved with command buffer command, as it is immediate /// - /// Set a keyword to a compute shader + /// Set a keyword immediately on a compute shader /// /// Compute Shader on which to set the keyword. /// Keyword to be set. diff --git a/Packages/com.unity.render-pipelines.core/Runtime/XR/XRLayout.cs b/Packages/com.unity.render-pipelines.core/Runtime/XR/XRLayout.cs index e3a155d0bbd..ce4fc8a2087 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime/XR/XRLayout.cs +++ b/Packages/com.unity.render-pipelines.core/Runtime/XR/XRLayout.cs @@ -1,6 +1,5 @@ using System.Collections.Generic; using System.Text; -using UnityEngine.Rendering; namespace UnityEngine.Experimental.Rendering { @@ -28,7 +27,7 @@ public void AddCamera(Camera camera, bool enableXR) if (XRSystem.displayActive && xrSupported) { XRSystem.SetDisplayZRange(camera.nearClipPlane, camera.farClipPlane); - XRSystem.CreateDefaultLayout(camera); + XRSystem.CreateDefaultLayout(camera, this); } else { diff --git a/Packages/com.unity.render-pipelines.core/Runtime/XR/XRLayoutStack.cs b/Packages/com.unity.render-pipelines.core/Runtime/XR/XRLayoutStack.cs new file mode 100644 index 00000000000..6e07b1f6b47 --- /dev/null +++ b/Packages/com.unity.render-pipelines.core/Runtime/XR/XRLayoutStack.cs @@ -0,0 +1,35 @@ +using System; +using System.Collections.Generic; +using UnityEngine.Pool; + +namespace UnityEngine.Experimental.Rendering +{ + internal class XRLayoutStack : IDisposable + { + readonly Stack m_Stack = new (); + + public XRLayout New() + { + GenericPool.Get(out var layout); + m_Stack.Push(layout); + return layout; + } + + public XRLayout top => m_Stack.Peek(); + + public void Release() + { + if (!m_Stack.TryPop(out var value)) + throw new InvalidOperationException($"Calling {nameof(Release)} without calling {nameof(New)} first."); + + value.Clear(); + GenericPool.Release(value); + } + + public void Dispose() + { + if (m_Stack.Count != 0) + throw new Exception($"Stack is not empty. Did you skip a call to {nameof(Release)}?"); + } + } +} diff --git a/Packages/com.unity.render-pipelines.core/Runtime/XR/XRLayoutStack.cs.meta b/Packages/com.unity.render-pipelines.core/Runtime/XR/XRLayoutStack.cs.meta new file mode 100644 index 00000000000..75940c16a6d --- /dev/null +++ b/Packages/com.unity.render-pipelines.core/Runtime/XR/XRLayoutStack.cs.meta @@ -0,0 +1,3 @@ +fileFormatVersion: 2 +guid: d3562354edd44e76aa9d77b720395854 +timeCreated: 1714053686 \ No newline at end of file diff --git a/Packages/com.unity.render-pipelines.core/Runtime/XR/XRPass.cs b/Packages/com.unity.render-pipelines.core/Runtime/XR/XRPass.cs index 4be97ed6a86..2766826a7d7 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime/XR/XRPass.cs +++ b/Packages/com.unity.render-pipelines.core/Runtime/XR/XRPass.cs @@ -1,5 +1,6 @@ using System; using System.Collections.Generic; +using UnityEditor; using UnityEngine.Rendering; namespace UnityEngine.Experimental.Rendering @@ -94,6 +95,16 @@ public bool supportsFoveatedRendering /// public bool copyDepth { get; private set; } + /// + /// If true, is the first pass of a xr camera + /// + public bool isFirstCameraPass => multipassId == 0; + + /// + /// If true, is the last pass of a xr camera + /// + public bool isLastCameraPass => (multipassId == 0 && viewCount <= 1) || (multipassId == 1 && viewCount > 1); + /// /// Index of the pass inside the frame. /// diff --git a/Packages/com.unity.render-pipelines.core/Runtime/XR/XRSystem.cs b/Packages/com.unity.render-pipelines.core/Runtime/XR/XRSystem.cs index 255df0403d3..bc9730e5ddc 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime/XR/XRSystem.cs +++ b/Packages/com.unity.render-pipelines.core/Runtime/XR/XRSystem.cs @@ -25,7 +25,7 @@ public static class SinglepassKeywords public static class XRSystem { // Keep track of only one XR layout - static XRLayout s_Layout = new XRLayout(); + static XRLayoutStack s_Layout = new (); // Delegate allocations of XRPass to the render pipeline static Func s_PassAllocator = null; @@ -226,6 +226,22 @@ public static void SetRenderScale(float renderScale) #endif } + + /// + /// Used by the render pipeline to retrieve the renderViewportScale value from the XR display. + /// One use case for retriving this value is that render pipeline can properly sync some SRP owned textures to scale accordingly + /// + /// Returns current scaleOfAllViewports value from the XRDisplaySubsystem. + public static float GetRenderViewportScale() + { +#if ENABLE_VR && ENABLE_XR_MODULE + + return s_Display.scaleOfAllViewports; +#else + return 1.0f; +#endif + } + /// /// Used by the render pipeline to initiate a new rendering frame through a XR layout. /// @@ -233,14 +249,7 @@ public static void SetRenderScale(float renderScale) public static XRLayout NewLayout() { RefreshDeviceInfo(); - - if (s_Layout.GetActivePasses().Count > 0) - { - Debug.LogWarning("Render Pipeline error : the XR layout still contains active passes. Executing XRSystem.EndLayout() right now."); - EndLayout(); - } - - return s_Layout; + return s_Layout.New(); } /// @@ -249,9 +258,9 @@ public static XRLayout NewLayout() public static void EndLayout() { if (dumpDebugInfo) - s_Layout.LogDebugInfo(); + s_Layout.top.LogDebugInfo(); - s_Layout.Clear(); + s_Layout.Release(); } /// @@ -340,45 +349,49 @@ static void RefreshDeviceInfo() } // Setup the layout to use multi-pass or single-pass based on the runtime caps - internal static void CreateDefaultLayout(Camera camera) + internal static void CreateDefaultLayout(Camera camera, XRLayout layout) { #if ENABLE_VR && ENABLE_XR_MODULE if (s_Display == null) throw new NullReferenceException(nameof(s_Display)); + void AddViewToPass(XRPass xrPass, XRDisplaySubsystem.XRRenderPass renderPass, int renderParamIndex) + { + renderPass.GetRenderParameter(camera, renderParamIndex, out var renderParam); + xrPass.AddView(BuildView(renderPass, renderParam)); + } + for (int renderPassIndex = 0; renderPassIndex < s_Display.GetRenderPassCount(); ++renderPassIndex) { s_Display.GetRenderPass(renderPassIndex, out var renderPass); s_Display.GetCullingParameters(camera, renderPass.cullingPassIndex, out var cullingParams); + int renderParameterCount = renderPass.GetRenderParameterCount(); if (CanUseSinglePass(camera, renderPass)) { - var xrPass = s_PassAllocator(BuildPass(renderPass, cullingParams)); + var createInfo = BuildPass(renderPass, cullingParams, layout); + var xrPass = s_PassAllocator(createInfo); - for (int renderParamIndex = 0; renderParamIndex < renderPass.GetRenderParameterCount(); ++renderParamIndex) + for (int renderParamIndex = 0; renderParamIndex < renderParameterCount; ++renderParamIndex) { - renderPass.GetRenderParameter(camera, renderParamIndex, out var renderParam); - xrPass.AddView(BuildView(renderPass, renderParam)); + AddViewToPass(xrPass, renderPass, renderParamIndex); } - s_Layout.AddPass(camera, xrPass); + layout.AddPass(camera, xrPass); } else { - for (int renderParamIndex = 0; renderParamIndex < renderPass.GetRenderParameterCount(); ++renderParamIndex) + for (int renderParamIndex = 0; renderParamIndex < renderParameterCount; ++renderParamIndex) { - renderPass.GetRenderParameter(camera, renderParamIndex, out var renderParam); - - var xrPass = s_PassAllocator(BuildPass(renderPass, cullingParams)); - xrPass.AddView(BuildView(renderPass, renderParam)); - - s_Layout.AddPass(camera, xrPass); + var createInfo = BuildPass(renderPass, cullingParams, layout); + var xrPass = s_PassAllocator(createInfo); + AddViewToPass(xrPass, renderPass, renderParamIndex); + layout.AddPass(camera, xrPass); } } } - if (s_LayoutOverride != null) - s_LayoutOverride.Invoke(s_Layout, camera); + s_LayoutOverride?.Invoke(layout, camera); #endif } @@ -400,8 +413,7 @@ internal static void ReconfigurePass(XRPass xrPass, Camera camera) xrPass.AssignView(renderParamIndex, BuildView(renderPass, renderParam)); } - if (s_LayoutOverride != null) - s_LayoutOverride.Invoke(s_Layout, camera); + s_LayoutOverride?.Invoke(s_Layout.top, camera); } #endif } @@ -445,7 +457,7 @@ static XRView BuildView(XRDisplaySubsystem.XRRenderPass renderPass, XRDisplaySub return new XRView(renderParameter.projection, renderParameter.view, viewport, occlusionMesh, renderParameter.textureArraySlice); } - static XRPassCreateInfo BuildPass(XRDisplaySubsystem.XRRenderPass xrRenderPass, ScriptableCullingParameters cullingParameters) + static XRPassCreateInfo BuildPass(XRDisplaySubsystem.XRRenderPass xrRenderPass, ScriptableCullingParameters cullingParameters, XRLayout layout) { // We can't use descriptor directly because y-flip is forced // XRTODO : fix root problem @@ -455,7 +467,7 @@ static XRPassCreateInfo BuildPass(XRDisplaySubsystem.XRRenderPass xrRenderPass, rtDesc.volumeDepth = xrRenderPass.renderTargetDesc.volumeDepth; rtDesc.vrUsage = xrRenderPass.renderTargetDesc.vrUsage; rtDesc.sRGB = xrRenderPass.renderTargetDesc.sRGB; - + XRPassCreateInfo passInfo = new XRPassCreateInfo { renderTarget = xrRenderPass.renderTarget, @@ -464,7 +476,7 @@ static XRPassCreateInfo BuildPass(XRDisplaySubsystem.XRRenderPass xrRenderPass, occlusionMeshMaterial = s_OcclusionMeshMaterial, occlusionMeshScale = GetOcclusionMeshScale(), foveatedRenderingInfo = xrRenderPass.foveatedRenderingInfo, - multipassId = s_Layout.GetActivePasses().Count, + multipassId = layout.GetActivePasses().Count, cullingPassId = xrRenderPass.cullingPassIndex, copyDepth = xrRenderPass.shouldFillOutDepth, xrSdkRenderPass = xrRenderPass diff --git a/Packages/com.unity.render-pipelines.core/ShaderLibrary/Packing.hlsl b/Packages/com.unity.render-pipelines.core/ShaderLibrary/Packing.hlsl index 6f0aa84c853..456d92b1523 100644 --- a/Packages/com.unity.render-pipelines.core/ShaderLibrary/Packing.hlsl +++ b/Packages/com.unity.render-pipelines.core/ShaderLibrary/Packing.hlsl @@ -54,7 +54,7 @@ real3 UnpackNormalOctRectEncode(real2 f) // Ref: http://jcgt.org/published/0003/02/01/paper.pdf "A Survey of Efficient Representations for Independent Unit Vectors" // Encode with Oct, this function work with any size of output // return float between [-1, 1] -real2 PackNormalOctQuadEncode(float3 n) +float2 PackNormalOctQuadEncode(float3 n) { //float l1norm = dot(abs(n), 1.0); //float2 res0 = n.xy * (1.0 / l1norm); @@ -64,20 +64,21 @@ real2 PackNormalOctQuadEncode(float3 n) // Optimized version of above code: n *= rcp(max(dot(abs(n), 1.0), 1e-6)); - real t = saturate(-n.z); - return n.xy + real2(n.x >= 0.0 ? t : -t, n.y >= 0.0 ? t : -t); + float t = saturate(-n.z); + return n.xy + float2(n.x >= 0.0 ? t : -t, n.y >= 0.0 ? t : -t); } -real3 UnpackNormalOctQuadEncode(real2 f) +float3 UnpackNormalOctQuadEncode(float2 f) { - real3 n = real3(f.x, f.y, 1.0 - abs(f.x) - abs(f.y)); + // NOTE: Do NOT use abs() in this line. It causes miscompilations. (UUM-62216, UUM-70600) + float3 n = float3(f.x, f.y, 1.0 - (f.x < 0 ? -f.x : f.x) - (f.y < 0 ? -f.y : f.y)); //float2 val = 1.0 - abs(n.yx); //n.xy = (n.zz < float2(0.0, 0.0) ? (n.xy >= 0.0 ? val : -val) : n.xy); // Optimized version of above code: - real t = max(-n.z, 0.0); - n.xy += real2(n.x >= 0.0 ? -t : t, n.y >= 0.0 ? -t : t); + float t = max(-n.z, 0.0); + n.xy += float2(n.x >= 0.0 ? -t : t, n.y >= 0.0 ? -t : t); return normalize(n); } diff --git a/Packages/com.unity.render-pipelines.core/Tests/Editor/GPUDriven/GPUDrivenRenderingTests.cs b/Packages/com.unity.render-pipelines.core/Tests/Editor/GPUDriven/GPUDrivenRenderingTests.cs index 2bdd743a31d..c041289ac11 100644 --- a/Packages/com.unity.render-pipelines.core/Tests/Editor/GPUDriven/GPUDrivenRenderingTests.cs +++ b/Packages/com.unity.render-pipelines.core/Tests/Editor/GPUDriven/GPUDrivenRenderingTests.cs @@ -230,6 +230,7 @@ public void TestInstanceCullingTier0() } [Test, ConditionalIgnore("IgnoreGfxAPI", "Graphics API Not Supported.")] + [Ignore("Disabled for Instability https://jira.unity3d.com/browse/UUM-71039")] public void TestSceneViewHiddenRenderersCullingTier0() { var go = GameObject.CreatePrimitive(PrimitiveType.Cube); diff --git a/Packages/com.unity.render-pipelines.core/Tests/Editor/XR.meta b/Packages/com.unity.render-pipelines.core/Tests/Editor/XR.meta new file mode 100644 index 00000000000..0d1f82cb57d --- /dev/null +++ b/Packages/com.unity.render-pipelines.core/Tests/Editor/XR.meta @@ -0,0 +1,3 @@ +fileFormatVersion: 2 +guid: 6c39937ba9ca4a3d9bfd4613712ac88e +timeCreated: 1714054759 \ No newline at end of file diff --git a/Packages/com.unity.render-pipelines.core/Tests/Editor/XR/XRLayoutStackTests.cs b/Packages/com.unity.render-pipelines.core/Tests/Editor/XR/XRLayoutStackTests.cs new file mode 100644 index 00000000000..23ab0237d99 --- /dev/null +++ b/Packages/com.unity.render-pipelines.core/Tests/Editor/XR/XRLayoutStackTests.cs @@ -0,0 +1,99 @@ +using System; +using System.Collections.Generic; +using System.Runtime.InteropServices; +using NUnit.Framework; +using UnityEngine.Experimental.Rendering; + +namespace UnityEngine.Rendering.Experimental.Tests.XR +{ + [TestFixture] + class XRLayoutStackTests + { + private XRLayoutStack m_StackTest = new (); + + [TearDown] + public void TearDown() + { + m_StackTest.Dispose(); + } + + [Test] + public void New_ReturnsNonNullObject() + { + var layout = m_StackTest.New(); + Assert.NotNull(layout); + m_StackTest.Release(); + } + + [Test] + public void Top_AfterNew_ReturnsCorrectObject() + { + var layout = m_StackTest.New(); + Assert.AreEqual(layout, m_StackTest.top); + m_StackTest.Release(); + } + + [Test] + public void NewNTimes_ReturnsTheTopToTheLatestElement() + { + var layouts = new List(); + + const int k_Iterations = 5; + + // Creating instances and adding them to the list + for (int i = 0; i < k_Iterations; i++) + { + layouts.Add(m_StackTest.New()); + } + + // Releasing instances and validating + for (int i = k_Iterations - 1; i >= 0; i--) + { + Assert.AreEqual(layouts[i], m_StackTest.top); + m_StackTest.Release(); + } + + Top_WithoutNew_ThrowsException(); + } + + [Test] + public void Top_WithoutNew_ThrowsException() + { + Assert.Throws(() => + { + var top = m_StackTest.top; + }); + } + + [Test] + public void Release_WithoutNew_ThrowsException() + { + Assert.Throws(m_StackTest.Release); + } + + [Test] + public void Dispose_WithoutRelease_ThrowsException() + { + m_StackTest.New(); + Assert.Throws(m_StackTest.Dispose); + m_StackTest.Release(); + } + + [Test] + public void CheckStackBetweenFramesReturnsTheSameXRLayout() + { + var stack = m_StackTest.New(); + m_StackTest.Release(); + + const int k_Iterations = 5; + + // Creating instances and adding them to the list + for (int i = 0; i < k_Iterations; i++) + { + m_StackTest.New(); + Assert.AreEqual(stack, m_StackTest.top); + m_StackTest.Release(); + } + } + } +} diff --git a/Packages/com.unity.render-pipelines.core/Tests/Editor/XR/XRLayoutStackTests.cs.meta b/Packages/com.unity.render-pipelines.core/Tests/Editor/XR/XRLayoutStackTests.cs.meta new file mode 100644 index 00000000000..aa06a43b2b8 --- /dev/null +++ b/Packages/com.unity.render-pipelines.core/Tests/Editor/XR/XRLayoutStackTests.cs.meta @@ -0,0 +1,3 @@ +fileFormatVersion: 2 +guid: eb4f16cc2f3149d9b6039f08ec44aefd +timeCreated: 1714054770 \ No newline at end of file diff --git a/Packages/com.unity.render-pipelines.core/Tests/Editor/XR/XRLayoutTests.cs b/Packages/com.unity.render-pipelines.core/Tests/Editor/XR/XRLayoutTests.cs new file mode 100644 index 00000000000..a0100e30be1 --- /dev/null +++ b/Packages/com.unity.render-pipelines.core/Tests/Editor/XR/XRLayoutTests.cs @@ -0,0 +1,112 @@ +using NUnit.Framework; +using System.Collections.Generic; +using UnityEngine.Experimental.Rendering; +using UnityEngine.XR; + +namespace UnityEngine.Rendering.Experimental.Tests.XR +{ + [TestFixture] + class XRLayoutTests + { + private XRDisplaySubsystem m_CurrentSubsystem; + private Camera m_Camera; + private XRLayout m_LayoutTest = new (); + + [SetUp] + public void Setup() + { + var go = new GameObject(nameof(XRLayoutTests)); + m_Camera = go.AddComponent(); + } + + [TearDown] + public void TearDown() + { + m_LayoutTest.Clear(); + + Object.DestroyImmediate(m_Camera.gameObject); + + Assert.IsEmpty(m_LayoutTest.GetActivePasses()); + } + + [Test] + public void EmptyPassAreAdded() + { + const int k_Iterations = 5; + for (int i = 0; i < k_Iterations; ++i) + { + m_LayoutTest.AddCamera(m_Camera, false); + } + + Assert.AreEqual(k_Iterations, m_LayoutTest.GetActivePasses().Count); + + foreach (var pass in m_LayoutTest.GetActivePasses()) + { + Assert.AreEqual(m_Camera, pass.Item1); + Assert.AreEqual(XRSystem.emptyPass, pass.Item2); + } + } + + public static IEnumerable s_TestCasesMultiPass + { + get + { + yield return new TestCaseData(0, 0, 0); + yield return new TestCaseData(1, 0, 0); + yield return new TestCaseData(1, 1, 1); + yield return new TestCaseData(2, 1, 2); + yield return new TestCaseData(3, 2, 6); + yield return new TestCaseData(20, 2, 40); + } + } + + [Test] + [TestCaseSource(nameof(s_TestCasesMultiPass))] + public void CreateDefaultLayoutMockMultipass(int renderPassCount, int renderParameterCount, int expectedActivePassesCount) + { + Assert.AreEqual(expectedActivePassesCount, renderPassCount * renderParameterCount); + + for (int renderPassIndex = 0; renderPassIndex < renderPassCount; ++renderPassIndex) + { + for (int renderParamIndex = 0; renderParamIndex < renderParameterCount; ++renderParamIndex) + { + m_LayoutTest.AddPass(m_Camera, new XRPass()); + } + } + + Assert.AreEqual(expectedActivePassesCount, m_LayoutTest.GetActivePasses().Count); + } + + public static IEnumerable s_TestCasesSinglePass + { + get + { + yield return new TestCaseData(0, 0, 0); + yield return new TestCaseData(1, 0, 1); + yield return new TestCaseData(1, 1, 1); + yield return new TestCaseData(2, 1, 2); + yield return new TestCaseData(3, 1, 3); + yield return new TestCaseData(20, 1, 20); + } + } + + [Test] + [TestCaseSource(nameof(s_TestCasesSinglePass))] + public void CreateDefaultLayoutMockSinglepass(int renderPassCount, int renderParameterCount, int expectedActivePassesCount) + { + for (int renderPassIndex = 0; renderPassIndex < renderPassCount; ++renderPassIndex) + { + var xrPass = new XRPass(); + + for (int renderParamIndex = 0; renderParamIndex < renderParameterCount; ++renderParamIndex) + { + xrPass.AddView(new XRView()); + } + + m_LayoutTest.AddPass(m_Camera, xrPass); + } + + Assert.AreEqual(expectedActivePassesCount, m_LayoutTest.GetActivePasses().Count); + } + } +} diff --git a/Packages/com.unity.render-pipelines.core/Tests/Editor/XR/XRLayoutTests.cs.meta b/Packages/com.unity.render-pipelines.core/Tests/Editor/XR/XRLayoutTests.cs.meta new file mode 100644 index 00000000000..ce229f04ceb --- /dev/null +++ b/Packages/com.unity.render-pipelines.core/Tests/Editor/XR/XRLayoutTests.cs.meta @@ -0,0 +1,2 @@ +fileFormatVersion: 2 +guid: 0962482855590ee4cbfb66b12d3cf1ad \ No newline at end of file diff --git a/Packages/com.unity.render-pipelines.core/Tests/Editor/XR/XRPassTests.cs b/Packages/com.unity.render-pipelines.core/Tests/Editor/XR/XRPassTests.cs new file mode 100644 index 00000000000..e0f73077289 --- /dev/null +++ b/Packages/com.unity.render-pipelines.core/Tests/Editor/XR/XRPassTests.cs @@ -0,0 +1,18 @@ +using NUnit.Framework; +using System.Collections.Generic; +using UnityEngine.Experimental.Rendering; +using UnityEngine.XR; + +namespace UnityEngine.Rendering.Experimental.Tests.XR +{ + [TestFixture] + class XRPassTests + { + [Test] + public void EmptyPass_IsFirstAndLastPass() + { + Assert.IsTrue(XRSystem.emptyPass.isFirstCameraPass); + Assert.IsTrue(XRSystem.emptyPass.isLastCameraPass); + } + } +} diff --git a/Packages/com.unity.render-pipelines.core/Tests/Editor/XR/XRPassTests.cs.meta b/Packages/com.unity.render-pipelines.core/Tests/Editor/XR/XRPassTests.cs.meta new file mode 100644 index 00000000000..a229cf29789 --- /dev/null +++ b/Packages/com.unity.render-pipelines.core/Tests/Editor/XR/XRPassTests.cs.meta @@ -0,0 +1,2 @@ +fileFormatVersion: 2 +guid: 8164fee9fb17dfb41ab04fb4f5837794 \ No newline at end of file diff --git a/Packages/com.unity.render-pipelines.core/Tests/Runtime/Threading/.buginfo b/Packages/com.unity.render-pipelines.core/Tests/Runtime/Threading/.buginfo deleted file mode 100644 index d4fba8df287..00000000000 --- a/Packages/com.unity.render-pipelines.core/Tests/Runtime/Threading/.buginfo +++ /dev/null @@ -1 +0,0 @@ -area: HD RP \ No newline at end of file diff --git a/Packages/com.unity.render-pipelines.core/package.json b/Packages/com.unity.render-pipelines.core/package.json index 1b4512afa3c..4a0754b5288 100644 --- a/Packages/com.unity.render-pipelines.core/package.json +++ b/Packages/com.unity.render-pipelines.core/package.json @@ -5,12 +5,13 @@ "unity": "6000.0", "displayName": "Core RP Library", "dependencies": { - "com.unity.mathematics": "1.2.6", + "com.unity.burst": "1.8.14", + "com.unity.mathematics": "1.3.2", "com.unity.ugui": "2.0.0", - "com.unity.collections": "2.2.0", + "com.unity.collections": "2.4.1", "com.unity.modules.physics": "1.0.0", "com.unity.modules.terrain": "1.0.0", "com.unity.modules.jsonserialize": "1.0.0", "com.unity.rendering.light-transport": "1.0.0" } -} \ No newline at end of file +} diff --git a/Packages/com.unity.render-pipelines.high-definition-config/.buginfo b/Packages/com.unity.render-pipelines.high-definition-config/.buginfo new file mode 100644 index 00000000000..aa7f21cd9cd --- /dev/null +++ b/Packages/com.unity.render-pipelines.high-definition-config/.buginfo @@ -0,0 +1 @@ +area: SRP Architecture & API diff --git a/Packages/com.unity.render-pipelines.high-definition/.buginfo b/Packages/com.unity.render-pipelines.high-definition/.buginfo index 9b282103beb..6b6b1d34448 100644 --- a/Packages/com.unity.render-pipelines.high-definition/.buginfo +++ b/Packages/com.unity.render-pipelines.high-definition/.buginfo @@ -1,5 +1,5 @@ old: - area: HD RP + area: SRP Architecture & API Workflow: when: diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Authoring-LUTs.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Authoring-LUTs.md index 6b5580cd230..3b15f49935b 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Authoring-LUTs.md +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Authoring-LUTs.md @@ -1,6 +1,7 @@ # Lookup textures You can use lookup textures (LUTs) in the High Definition Render Pipeline (HDRP) . To create a LUT, use external software. + |Page|Description| |-|-| |[Understand LUTs](rendering-luts-understand.md)|Learn about LUTs in HDRP.| diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/HDRP-Asset.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/HDRP-Asset.md index 13f455552e6..f4b4934a2eb 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/HDRP-Asset.md +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/HDRP-Asset.md @@ -6,25 +6,28 @@ Unity only allocates memory and builds shader variants for features you enable i ## Rendering -| **Property** | **Description** | -|------------------------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| **Color Buffer Format** | The format of the color buffer that HDRP will use for rendering, using R16G16B16A16 instead of R11G11B10 will double the memory usage but help you to avoid banding. R16G16B16A16 is also required for [Alpha-Output](Alpha-Output.md). | -| **Lit Shader Mode** | Use the drop-down to choose which mode HDRP uses for the [Lit Shader](lit-material.md).
• **Forward Only**: forces HDRP to only use forward rendering for Lit Shaders.
• **Deferred Only**: forces HDRP to use deferred rendering for Lit Shaders (HDRP still renders advanced Materials using forward rendering).
• **Both**: allows the Camera to use deferred and forward rendering.

Select **Both** to allow you to switch between forward and deferred rendering for Lit Shaders at runtime per Camera. Selecting a specific mode reduces build time and Shader memory because HDRP requires less Shader variants, but it is not possible to switch from one mode to the other at runtime. | -| **- Multisample Anti-aliasing Quality** | Use the drop-down to set the number of samples HDRP uses for multisample anti-aliasing (MSAA). The larger the sample count, the better the quality. Select **None** to disable MSAA.
This property is only visible when **Lit Shader Mode** is set to **Forward Only** or **Both**. | -| **Motion Vectors** | Enable the checkbox to make HDRP support motion vectors. HDRP uses motion vectors for effects like screen space reflection (SSR) and motion blur. When disabled, motion blur has no effect and HDRP calculates SSR with lower quality. | -| **Runtime Debug Display** | Enable the checkbox to make HDRP able to use debug modes from the [Rendering Debugger](use-the-rendering-debugger.md) at runtime. Disable the checkbox to reduce build time and shader memory. This disables the following debug modes: All material property debug modes except GBuffer debug, the various property override options, and all the lighting debug modes. | -| **Runtime AOV API** | Enable the checkbox to make HDRP able to use the AOV API (rendering of material properties and lighting modes) at runtime. Disable this checkbox to reduce build time and shader memory. This disables the following AOV modes: All material properties and lighting modes. | | -| **Terrain Hole** | Enable the checkbox to make HDRP support [Terrain Holes](https://docs.unity3d.com/2019.3/Documentation/Manual/terrain-PaintHoles.html). If you do not enable this, Terrain Holes are not visible in your Scene. | -| **Transparent Backface** | Enable the checkbox to make HDRP support transparent back-face render passes. If your Unity Project does not need to make a transparent back-face pass, disable this checkbox to reduce build time. | -| **Transparent Depth Prepass** | Enable the checkbox to make HDRP support transparent depth render prepasses. If your Unity Project does not need to make a transparent depth prepass, disable this checkbox to reduce build time . | -| **Transparent Depth Postpass** | Enable the checkbox to make HDRP support transparent depth render postpasses. If your Unity Project does not make use of a transparent depth postpass. Uncheck this checkbox to reduce build time . | -| **Custom Pass** | Enable the checkbox to make HDRP support custom passes. If your Unity Project does not make use [Custom Passes](Custom-Pass.md), Uncheck this checkbox to save memory . | -| - **Custom Buffer Format** | Specify the texture format for the custom buffer. If you experience banding issues due to your custom passes, you can change it to either `R11G11B10` if you don't need alpha or `R16G16B16A16`. | -| **Realtime Raytracing (Preview)** | Enable the checkbox to enable HDRP realtime ray tracing (Preview). It requires to have ray tracing compatible hardware. For more information, please refer to the [Ray Tracing Getting Started](Ray-Tracing-Getting-Started.md) page. | -| **Visual Effects Ray Tracing (Preview)** | Enable the checkbox to make HDRP support ray tracing with Visual Effects. **Realtime Raytracing (Preview)** must be enabled. | -| **Supported Ray Tracing Mode (Preview)** | Select the supported modes for ray tracing effects (Performance, Quality or Both). For more information, see the [Ray Tracing Getting Started](Ray-Tracing-Getting-Started.md) page. | -| - **LOD Bias** | Set the value that Cameras use to calculate their LOD bias. The Camera uses this value differently depending on the **LOD Bias Mode** you select. | -| - **Maximum LOD Level** | Set the value that Cameras use to calculate their maximum level of detail. The Camera uses this value differently depending on the **Maximum LOD Level Mode** you select. | +| **Property** | **Sub-property** | **Description** | +|-|-|-| +| **Color Buffer Format** || The format of the color buffer that HDRP uses for rendering. Using R16G16B16A16 instead of R11G11B10 doubles the memory usage, but helps avoid banding. R16G16B16A16 is also required for [Alpha-Output](Alpha-Output.md). | +| **Lit Shader Mode** || Use the drop-down to choose which mode HDRP uses for the [Lit Shader](lit-material.md).
• **Forward Only**: forces HDRP to only use forward rendering for Lit Shaders.
• **Deferred Only**: forces HDRP to use deferred rendering for Lit Shaders (HDRP still renders advanced Materials using forward rendering).
• **Both**: allows the Camera to use deferred and forward rendering.

Select **Both** to allow you to switch between forward and deferred rendering for Lit Shaders at runtime per Camera. Selecting a specific mode reduces build time and Shader memory because HDRP requires less Shader variants, but it is not possible to switch from one mode to the other at runtime. | +| **- Multisample Anti-aliasing Quality** || Use the drop-down to set the number of samples HDRP uses for multisample anti-aliasing (MSAA). The larger the sample count, the better the quality. Select **None** to disable MSAA.
This property is only visible when **Lit Shader Mode** is set to **Forward Only** or **Both**. | +| **Motion Vectors** || Enable the checkbox to enable motion vector support in HDRP. HDRP uses motion vectors for effects like screen space reflection (SSR) and motion blur. When disabled, motion blur has no effect and HDRP calculates SSR with lower quality. | +| **Runtime Debug Display** || Enable the checkbox to enable HDRP to use debug modes from the [Rendering Debugger](use-the-rendering-debugger.md) at runtime. Disable the checkbox to reduce build time and shader memory. This disables all property override options, all lighting debug modes, and all material property debug modes except GBuffer debug. | +| **Runtime AOV API** || Enable the checkbox to enable HDRP able to use the AOV API (rendering of material properties and lighting modes) at runtime. Disable this checkbox to reduce build time and shader memory. This disables all material properties and lighting modes. | | +| **Terrain Hole** || Enable the checkbox to enable suppot for [Terrain Holes](https://docs.unity3d.com/2019.3/Documentation/Manual/terrain-PaintHoles.html) in HDRP. If you do not enable this, Terrain Holes are not visible in your Scene. | +| **Transparent Backface** || Enable the checkbox to enable support for transparent back-face render passes in HDRP. If your Unity Project does not need to make a transparent back-face pass, disable this checkbox to reduce build time. | +| **Transparent Depth Prepass** || Enable the checkbox to enable support for transparent depth render prepasses in HDRP. If your Unity Project does not need to make a transparent depth prepass, disable this checkbox to reduce build time. | +| **Transparent Depth Postpass** || Enable the checkbox to enable support for transparent depth render postpasses in HDRP. If your Unity Project does not make use of a transparent depth postpass, disable this checkbox to reduce build time. | +| **Custom Pass** || Enable the checkbox to enable support for [custom passes](Custom-Pass.md) in HDRP. If your Unity Project does not use custom passes, disable this checkbox to save memory. | +| - **Custom Buffer Format** || Specify the texture format for the custom buffer. If you experience banding issues due to your custom passes, you can change it to either `R11G11B10` if you don't need alpha, or `R16G16B16A16` if you do need alpha. | +| **Realtime Raytracing (Preview)** || Enable the checkbox to enable HDRP realtime ray tracing (Preview). It requires ray tracing-compatible hardware. For more information, refer to [Ray Tracing Getting Started](Ray-Tracing-Getting-Started.md). | +| **Visual Effects Ray Tracing (Preview)** || Enable the checkbox to enable support for ray tracing with Visual Effects in HDRP. **Realtime Raytracing (Preview)** must be enabled. | +| **Supported Ray Tracing Mode (Preview)** || Select the supported modes for ray tracing effects (**Performance**, **Quality**, or **Both**). For more information, refer to [Ray Tracing Getting Started](Ray-Tracing-Getting-Started.md). | +| - **LOD Bias** || Set the value that Cameras use to calculate their LOD bias. The Camera uses this value differently depending on the **LOD Bias Mode** you select. | +| - **Maximum LOD Level** || Set the value that Cameras use to calculate their maximum level of detail. The Camera uses this value differently depending on the **Maximum LOD Level Mode** you select. | +| **GPU Resident Drawer**||The GPU Resident Drawer automatically uses the [`BatchRendererGroup`](https://docs.unity3d.com/Manual/batch-renderer-group.html) API to draw GameObjects with GPU instancing. Refer to [Use the GPU Resident Drawer](gpu-resident-drawer.md) for more information.

  • **Disabled**: Unity doesn't automatically draw GameObjects with GPU instancing.
  • **Instanced Drawing**: Unity automatically draws GameObjects with GPU instancing.
| +|| **Small-Mesh Screen-Percentage** | Set the screen percentage Unity uses to cull small GameObjects, to speed up rendering. Unity culls GameObjects that fill less of the screen than this value. This setting might not work if you use your own [Level of Detail (LOD) meshes](https://docs.unity3d.com/Manual/LevelOfDetail.html). Set the value to 0 to stop Unity culling small GameObjects.

To prevent Unity culling an individual GameObject that covers less screen space than this value, go to the **Inspector** window for the GameObject and add a **Disallow Small Mesh Culling** component. | +|| **GPU Occlusion Culling** | Enable Unity using the GPU instead of the CPU to exclude GameObjects from rendering when they're hidden behind other GameObjects. Refer to [Use GPU occlusion culling](gpu-culling.md) for more information. | diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/HDRP-Features.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/HDRP-Features.md index 9f0f28ce110..be255b8d892 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/HDRP-Features.md +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/HDRP-Features.md @@ -351,7 +351,7 @@ To apply a tint to the shadow or the penumbra of the shadow: 1. Open the Light's Inspector window. 2. Go to the Shadows section. 3. Open the **More** (⋮) menu. -4. Select **Show Additional Properties**. +4. Select *Advanced Properties**. ### Ray tracing diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/MatCap_Settings.png b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/MatCap_Settings.png new file mode 100644 index 00000000000..39ccecec279 Binary files /dev/null and b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/MatCap_Settings.png differ diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/renderingdebugger-gpuculling-heatmap.jpg b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/renderingdebugger-gpuculling-heatmap.jpg new file mode 100644 index 00000000000..024d6b6f534 Binary files /dev/null and b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/renderingdebugger-gpuculling-heatmap.jpg differ diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/renderingdebugger-gpuculling-overlay.jpg b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/renderingdebugger-gpuculling-overlay.jpg new file mode 100644 index 00000000000..5898fbc4083 Binary files /dev/null and b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/renderingdebugger-gpuculling-overlay.jpg differ diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Lighting-Mode-Shadowmask.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Lighting-Mode-Shadowmask.md index 4efc1f4f37d..b992df5d2c5 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Lighting-Mode-Shadowmask.md +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Lighting-Mode-Shadowmask.md @@ -55,7 +55,7 @@ For information on the behavior of each Shadowmask Mode, see the following table | Shadowmask Mode | Description | | ------------------- | ------------------------------------------------------------ | -| **Distance Shadowmask** | Makes the Light cast real-time shadows for all GameObjects when the distance between the Camera and the Light is less than the Fade Distance. If you can not see this property, enable [additional properties](expose-all-additional-properties.md) for the Shadows section. When the distance between the Light and the Camera is greater than the Fade Distance, HDRP stops calculating real-time shadows for the Light. Instead, it uses shadowmasks for static GameObjects, and non-static GameObjects don't cast shadows. Directional Lights don't use Fade Distance, instead they use the current [Max Shadow Distance](Override-Shadows.md). | +| **Distance Shadowmask** | Makes the Light cast real-time shadows for all GameObjects when the distance between the Camera and the Light is less than the Fade Distance. If you can not see this property, enable [advanced properties](https://docs.unity3d.com/Packages/com.unity.render-pipelines.core@latest?subfolder=/manual/advanced-properties.html) for the Shadows section. When the distance between the Light and the Camera is greater than the Fade Distance, HDRP stops calculating real-time shadows for the Light. Instead, it uses shadowmasks for static GameObjects, and non-static GameObjects don't cast shadows. Directional Lights don't use Fade Distance, instead they use the current [Max Shadow Distance](Override-Shadows.md). | | **Shadowmask** | Makes the Light cast real-time shadows for non-static GameObjects only. It then combines these shadows with shadowmasks for static GameObjects when the distance between the Camera and the Light is less than the Fade Distance. When the distance between the Light and the Camera is greater than the Fade Distance, HDRP stops calculating real-time shadows for the Light. It uses shadowmasks for static GameObjects and non-static GameObjects don't cast shadows. | diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Optimization.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Optimization.md new file mode 100644 index 00000000000..d1a4073d9a8 --- /dev/null +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Optimization.md @@ -0,0 +1,7 @@ +# Optimization + +Optimize the High Definition Render Pipeline (HDRP) to improve the performance of your project. + +|Page|Description| +|-|-| +|[Reduce rendering work on the CPU](reduce-rendering-work-on-cpu.md)|Use the GPU Resident Drawer or GPU occlusion culling to speed up rendering.| diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Planar-Reflection-Probe.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Planar-Reflection-Probe.md index 2321d367d1a..2e644aa0907 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Planar-Reflection-Probe.md +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Planar-Reflection-Probe.md @@ -59,8 +59,8 @@ The following properties control the method that the Planar Reflection Probe use | **Custom Frame Settings** | Allows you to define custom [Frame Settings](Frame-Settings.md) for this Probe. Disable this property to use the **Default Frame Settings** in your Unity Project’s [HDRP Asset](HDRP-Asset.md). | | **Resolution** | Set the resolution of this Planar Reflection Probe. Use the drop-down to select which quality mode to derive the resolution from. If you select Custom, set the resolution, measured in pixels, in the input field. A higher resolution increases the fidelity of planar reflection at the cost of GPU performance and memory usage, so if you experience any performance issues, try using a lower value. The resolution can be set to 0 to prevent the probe from being rendered for certain quality levels. | | **Rough Reflections** | Disable the checkbox to tell HDRP to use this Planar Reflection Probe as a mirror. If you do this, the receiving surface must be perfectly smooth or the reflection result is not accurate. If you want perfect reflection, disabling this option can be useful because it means HDRP does not need to process rough refraction and thus decreases the resource intensity of the effect.| -| **Mirror Position** | Offsets the position of the mirror from the Transform Position.
This property only appears when you enable [additional properties](expose-all-additional-properties.md) for this section. | -| **Range Compression Factor** | The factor which HDRP divides the result of the probe's rendering by. This is useful to deal with very bright or dark objects in the reflections that would otherwise be saturated.
This property only appears when you enable [additional properties](expose-all-additional-properties.md) for this section. | +| **Mirror Position** | Offsets the position of the mirror from the Transform Position.
This property only appears when you enable [advanced properties](https://docs.unity3d.com/Packages/com.unity.render-pipelines.core@latest?subfolder=/manual/advanced-properties.html) for this section. | +| **Range Compression Factor** | The factor which HDRP divides the result of the probe's rendering by. This is useful to deal with very bright or dark objects in the reflections that would otherwise be saturated.
This property only appears when you enable [advanced properties](https://docs.unity3d.com/Packages/com.unity.render-pipelines.core@latest?subfolder=/manual/advanced-properties.html) for this section. | ### Render Settings @@ -90,4 +90,4 @@ You can use Scene view gizmos to visually customize specific properties. ## Best practices If you use a Planar Reflection Probe as a mirror (i.e its influence volume overlap a GameObject with a Material that has its smoothness and metallic properties set to 1) it is best practice to disable the **Rough Refraction** property to decrease the resource intensity. -If a receiving surface isn't a perfect mirror and the **Rough Reflection** option is disabled, the surface still renders smooth, but the result is physically incorrect. \ No newline at end of file +If a receiving surface isn't a perfect mirror and the **Rough Reflection** option is disabled, the surface still renders smooth, but the result is physically incorrect. diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Post-Processing-Bloom.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Post-Processing-Bloom.md index 7f63b598071..883fbf51554 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Post-Processing-Bloom.md +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Post-Processing-Bloom.md @@ -13,7 +13,7 @@ The Bloom effect also has a **Lens Dirt** feature, which you can use to apply a 1. In the Scene or Hierarchy view, select a GameObject that contains a Volume component to view it in the Inspector. 2. In the Inspector, go to **Add Override** > **Post-processing** and select **Bloom**. HDRP now applies **Bloom** to any Camera this Volume affects. -Bloom includes [additional properties](expose-all-additional-properties.md) that you must manually expose. +Bloom includes [advanced properties](https://docs.unity3d.com/Packages/com.unity.render-pipelines.core@latest?subfolder=/manual/advanced-properties.html) that you must manually expose. [!include[](snippets/volume-override-api.md)] @@ -39,10 +39,10 @@ Bloom includes [additional properties](expose-all-additional-properties.md) that | **Property** | **Description** | | -------------------------- | ------------------------------------------------------------ | -| **Resolution** | Use the drop-down to set the resolution at which HDRP processes the Bloom effect. If you target consoles that use a very high resolution (for example, 4k), select **Quarter,** because it's less resource-intensive.
• **Quarter**: Uses quarter the screen resolution.
• **Half**: Uses half the screen resolution.
This property only appears when you enable [additional properties](expose-all-additional-properties.md). | +| **Resolution** | Use the drop-down to set the resolution at which HDRP processes the Bloom effect. If you target consoles that use a very high resolution (for example, 4k), select **Quarter,** because it's less resource-intensive.
• **Quarter**: Uses quarter the screen resolution.
• **Half**: Uses half the screen resolution.
This property only appears when you enable [advanced properties](https://docs.unity3d.com/Packages/com.unity.render-pipelines.core@latest?subfolder=/manual/advanced-properties.html). | | **High Quality Prefiltering** | Enable the checkbox to make HDRP use 13 samples instead of 4 during the prefiltering pass. This increases the resource intensity of the Bloom effect, but results in less flickering by small and bright objects like the sun.
This property only appears when you enable [additional properties](expose-all-additional-properties.md). | -| **High Quality Filtering** | Enable the checkbox to make HDRP use bicubic filtering instead of bilinear filtering. This increases the resource intensity of the Bloom effect, but results in smoother visuals.
This property only appears when you enable [additional properties](expose-all-additional-properties.md). | -| **Anamorphic** | Enable the checkbox to make the bloom effect take the **Anamorphism** property of the Camera into account. This stretches the bloom horizontally or vertically like it would on anamorphic sensors.
This property only appears when you enable [additional properties](expose-all-additional-properties.md). | +| **High Quality Filtering** | Enable the checkbox to make HDRP use bicubic filtering instead of bilinear filtering. This increases the resource intensity of the Bloom effect, but results in smoother visuals.
This property only appears when you enable [advanced properties](https://docs.unity3d.com/Packages/com.unity.render-pipelines.core@latest?subfolder=/manual/advanced-properties.html). | +| **Anamorphic** | Enable the checkbox to make the bloom effect take the **Anamorphism** property of the Camera into account. This stretches the bloom horizontally or vertically like it would on anamorphic sensors.
This property only appears when you enable [advanced properties](https://docs.unity3d.com/Packages/com.unity.render-pipelines.core@latest?subfolder=/manual/advanced-properties.html). | ## Details diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Post-Processing-Depth-of-Field.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Post-Processing-Depth-of-Field.md index 05a24ae6127..de6ba424ce8 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Post-Processing-Depth-of-Field.md +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Post-Processing-Depth-of-Field.md @@ -9,7 +9,7 @@ The Depth Of Field component applies a depth of field effect, which simulates th 1. In the Scene or Hierarchy view, select a GameObject that contains a Volume component to view it in the Inspector. 2. In the Inspector, go to **Add Override** > **Post-processing** and select **Depth Of Field**. HDRP now applies **Depth Of Field** to any Camera this Volume affects. -Depth Of Field includes [additional properties](expose-all-additional-properties.md) that you must manually expose. +Depth Of Field includes [advanced properties](https://docs.unity3d.com/Packages/com.unity.render-pipelines.core@latest?subfolder=/manual/advanced-properties.html). that you must manually expose. [!include[](snippets/volume-override-api.md)] diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Post-Processing-Motion-Blur.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Post-Processing-Motion-Blur.md index fc488e5b296..104e421c2ed 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Post-Processing-Motion-Blur.md +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Post-Processing-Motion-Blur.md @@ -11,7 +11,7 @@ The Motion Blur effect uses velocities from HDRP's velocity buffer. This means t 1. In the Scene or Hierarchy view, select a GameObject that contains a Volume component to view it in the Inspector. 2. In the Inspector, go to **Add Override** > **Post-processing** and select **Motion Blur**. HDRP now applies **Motion Blur** to any Camera this Volume affects. -Motion Blur includes [additional properties](expose-all-additional-properties.md) that you can manually expose. +Motion Blur includes [advanced properties](https://docs.unity3d.com/Packages/com.unity.render-pipelines.core@latest?subfolder=/manual/advanced-properties.html) that you can manually expose. [!include[](snippets/volume-override-api.md)] diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Reflection-Probe.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Reflection-Probe.md index 54d5d8a1dee..b5bda4e0cb7 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Reflection-Probe.md +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Reflection-Probe.md @@ -66,7 +66,7 @@ The following properties control the method that the Reflection Probe uses to ca | **Probe Layer Mask** | Acts as a culling mask for environment lights (light from Planar Reflection Probes and Reflection Probes). This Reflection Probe ignores all Reflection Probes that are on Layers not included in this Layer mask, so use this property to ignore certain Reflection Probes when rendering this one. | | **Custom Frame Settings** | Allows you to define custom [Frame Settings](Frame-Settings.md) for this Probe. Disable this property to use the **Default Frame Settings** in your Unity Project’s [HDRP Asset](HDRP-Asset.md). | | **Resolution** | Select a quality mode to determine the resolution of this Reflection Probe. If you select Custom, you must specify a resolution in the dropdown menu. Higher resolutions increase the fidelity of cube reflections but can reduce GPU performance and increase memory consumption. The resolution can be set to 0 to prevent the probe from being rendered for certain quality levels. | -| **Range Compression Factor** | The factor which HDRP divides the result of the probe's rendering by. This is useful to deal with very bright or dark objects in the reflections that would otherwise be saturated.
This property only appears when you enable [additional properties](expose-all-additional-properties.md) for this section. | +| **Range Compression Factor** | The factor which HDRP divides the result of the probe's rendering by. This is useful to deal with very bright or dark objects in the reflections that would otherwise be saturated.
This property only appears when you enable [advanced properties](https://docs.unity3d.com/Packages/com.unity.render-pipelines.core@latest?subfolder=/manual/advanced-properties.html) for this section. | ### Render Settings diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/TableOfContents.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/TableOfContents.md index 3270a53b6e9..cabdf8390f9 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/TableOfContents.md +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/TableOfContents.md @@ -82,10 +82,12 @@ * [Display Adaptive Probe Volumes](probevolumes-showandadjust.md) * [Configure the size and density of Adaptive Probe Volumes](probevolumes-changedensity.md) * [Bake multiple scenes together with Baking Sets](probevolumes-usebakingsets.md) - * [Bake different lighting setups with Lighting Scenarios](probevolumes-bakedifferentlightingsetups) + * [Changing lighting at runtime](change-lighting-at-runtime.md) + * [Choose how to change lighting at runtime](probevolumes-understand-changing-lighting-at-runtime.md) + * [Bake different lighting setups with Lighting Scenarios](probevolumes-bakedifferentlightingsetups.md) + * [Update light from the sky at runtime with sky occlusion](probevolumes-skyocclusion.md) * [Streaming](probevolumes-streaming.md) * [Fix issues with Adaptive Probe Volumes](probevolumes-fixissues.md) - * [Sky Occlusion](probevolumes-skyocclusion.md) * [Adaptive Probe Volume Inspector window reference](probevolumes-inspector-reference.md) * [Adaptive Probe Volumes panel reference](probevolumes-lighting-panel-reference.md) * [Adaptive Probe Volumes Options Override reference](probevolumes-options-override-reference.md) @@ -297,7 +299,11 @@ * [Understand and fix Not a Number (NAN) and Infinite (Inf) values](Post-Processing-Propagating-NaNs.md) * [Known issues](Known-Issues.md) * [Stencil Buffer Usage](Stencil-Usage.md) - +* [Optimization](Optimization.md) + * [Reduce rendering work on the CPU](reduce-rendering-work-on-cpu.md) + * [Use the GPU Resident Drawer](gpu-resident-drawer.md) + * [Make a GameObject compatible with the GPU Resident Drawer](make-object-compatible-gpu-rendering.md) + * [Use GPU occlusion culling](gpu-culling.md) * [Reference](Reference.md) * [Menu Items](Menu-Items.md) diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/change-lighting-at-runtime.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/change-lighting-at-runtime.md new file mode 100644 index 00000000000..0d8609cf209 --- /dev/null +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/change-lighting-at-runtime.md @@ -0,0 +1,9 @@ +# Changing lighting at runtime + +You can change how objects use the baked data in Adaptive Probe Volumes, to create lighting that changes at runtime. For example, you can turn the lights on and off in a scene, or change the time of day. + +| Page | Description | +|-|-| +| [Choose how to change lighting at runtime](probevolumes-understand-changing-lighting-at-runtime.md) | Choose whether to use Lighting Scenarios or sky occlusion. | +| [Bake different lighting setups with Lighting Scenarios](probevolumes-bakedifferentlightingsetups.md) | Use multiple Lighting Scenarios to store baking results for different scene setups, and switch or blend between them at runtime. | +| [Update light from the sky at runtime with sky occlusion](probevolumes-skyocclusion.md) | Sky occlusion means that when a GameObject samples a color from the sky, Unity dims the color if the light can't reach the GameObject. | diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/create-realistic-clouds-volumetric-clouds.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/create-realistic-clouds-volumetric-clouds.md index 344e3d0c30e..25666f40ccb 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/create-realistic-clouds-volumetric-clouds.md +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/create-realistic-clouds-volumetric-clouds.md @@ -33,3 +33,33 @@ When **Rendering Space** is set to **Camera**, the clouds are always located abo [!include[](snippets/volume-override-api.md)] +By default, animation data for clouds gets incremented automatically depending on the wind parameters. +In some cases, it can be useful to manually set the animation time, which can be done by using the following script on a Camera: + +```cs +using UnityEngine; +using UnityEngine.Rendering; +using UnityEngine.Rendering.HighDefinition; + +public class CloudSync : MonoBehaviour +{ + VolumetricClouds.AnimationData data; + + void Update() + { + // Save animation data + if (Input.GetKeyDown(KeyCode.A)) + data = VolumetricClouds.animationData; + + // Set animation data + if (Input.GetKeyDown(KeyCode.B)) + { + var camera = this.GetComponent(); + var hdCamera = HDCamera.GetOrCreate(camera); + VolumetricClouds.animationData = data; + // We reset the camera to discard the history buffer manually + hdCamera.Reset(); + } + } +} +``` diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/debug-materials-and-shaders-matcap.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/debug-materials-and-shaders-matcap.md index c00478a9c14..aa3f60b3968 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/debug-materials-and-shaders-matcap.md +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/debug-materials-and-shaders-matcap.md @@ -14,10 +14,15 @@ MatCap mode preserves the normal maps and you can use the original Material albe ## Properties -| **Property** | **Description** | -| ----------------------------- | ------------------------------------------------------------ | -| **Mix Albedo in MatCap Mode** | Enable to make HDRP mix the albedo of the Material with its material capture. | -| **MatCap Intensity Scale** | Set the intensity of the material capture. This increases the brightness of the Scene. This is useful if the albedo darkens the Scene considerably. | +1. Open the **Graphics** tab in the **Preferences** window (menu: **Edit > Preferences > Graphics**). +2. Under **High Definition Render Pipeline** and **MatCap Mode**: + +![](Images/MatCap_Settings.png) + +| **Property** | **Description** | +| ------------------------- | ------------------------------------------------------------ | +| **Mix Albedo** | Enable to make HDRP mix the albedo of the Material with its material capture. | +| **Intensity Scale** | Set the intensity of the material capture. This increases the brightness of the Scene. This is useful if the albedo darkens the Scene considerably. | ## Default material captures diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/expose-all-additional-properties.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/expose-all-additional-properties.md deleted file mode 100644 index cba36fb4dca..00000000000 --- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/expose-all-additional-properties.md +++ /dev/null @@ -1,29 +0,0 @@ -# Expose all additional properties - -The High Definition Render Pipeline (HDRP) components expose standard properties by default that are suitable for most use-cases. However, some HDRP components and [Volume Overrides](volume-component.md) include **additional properties** which you can use to fine-tune the behavior of the component. - -## Exposing additional properties - -Not every component or Volume Override includes additional properties. If one does, it has a contextual menu to the right of each property section header that includes additional properties. To expose additional properties for that section, open the contextual menu and click **Show Additional Properties**. For example, the [Light component’s](Light-Component.md) **General** section includes additional properties: - -![](Images/MoreOptions1.png) - -When you select **Show Additional Properties**, Unity exposes additional properties for the **General** section. In this example, the **Light Layer** property appears: - -![](Images/MoreOptions2.png) - -For Volume Overrides, the already existing contextual menu has a **Show Additional Properties** toggle as well. - -Note that you can also open the contextual menu by right-clicking on the property section header. - -## Exposing all additional properties - -If you want to toggle additional properties for all components and Volume Overrides, you can do so through the **Preference** window under **Core Render Pipeline**. To do this: - -1. Open the **Core Render Pipeline** tab in the **Preferences** window (menu: **Edit > Preferences > Core Render Pipeline**). -2. Set **Additional Properties** to **All Visible**. - -![](Images/MoreOptions3.png) - -When toggling additional properties through this menu, the state of all components changes once. After that, you can still choose to show or hide additional properties for each component individually. -A shortcut to this preference menu is also available from the component and Volume Override's contextual menu with **Show All Additional Properties...**. diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/gpu-culling.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/gpu-culling.md new file mode 100644 index 00000000000..a4409169fba --- /dev/null +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/gpu-culling.md @@ -0,0 +1,35 @@ +# Use GPU occlusion culling + +GPU occlusion culling means Unity uses the GPU instead of the CPU to exclude objects from rendering when they're occluded behind other objects. Unity uses this information to speed up rendering in scenes that have a lot of occlusion. + +The GPU Resident Drawer works only with [Graphics APIs](https://docs.unity3d.com/6000.0/Documentation/Manual/GraphicsAPIs.html) and platforms that support compute shaders. + +## How GPU occlusion culling works + +Unity generates depth textures from the perspective of cameras and lights in the scene. + +The GPU then uses the depth textures from the current frame and the previous frame to cull objects. Unity renders only objects that are unoccluded in either frame. Unity culls the remaining objects, which are occluded in both frames. + +Whether GPU occlusion culling speeds up rendering depends on your scene. GPU occlusion culling is most effective in the following setups: + +- Multiple objects use the same mesh, so Unity can group them into a single draw call. +- The scene has a lot of occlusion, especially if the occluded objects have a high number of vertices. + +If occlusion culling doesn't have a big effect on your scene, rendering time might increase because of the extra work the GPU does to set up GPU occlusion culling. + +## Enable GPU occlusion culling + +1. [Enable the GPU Resident Drawer](gpu-resident-drawer.md#enable-the-gpu-resident-drawer). +2. In the active [HDRP Asset](HDRP-Asset.md), in the **Rendering** section, enable **GPU Occlusion**. + +## Analyze GPU occlusion culling + +You can use the following to analyze GPU occlusion culling: + +- [Rendering Statistics](https://docs.unity3d.com/Manual/RenderingStatistics.html) overlay to check rendering speed increases. +- [Rendering Debugger](rendering-debugger-window-reference.md) to troubleshoot issues. + +## Additional resources + +- [Reduce rendering work on the CPU](reduce-rendering-work-on-cpu.md) +- [Occlusion culling](https://docs.unity3d.com/Manual/OcclusionCulling.html) diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/gpu-resident-drawer.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/gpu-resident-drawer.md new file mode 100644 index 00000000000..036c132a6e3 --- /dev/null +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/gpu-resident-drawer.md @@ -0,0 +1,55 @@ + +# Use the GPU Resident Drawer + +The GPU Resident Drawer automatically uses the [`BatchRendererGroup`](https://docs.unity3d.com/Manual/batch-renderer-group.html) API to draw GameObjects with GPU instancing, which reduces the number of draw calls and frees CPU processing time. For more information, refer to [How BatchRendererGroup works](https://docs.unity3d.com/Manual/batch-renderer-group-how.html). + +The GPU Resident Drawer works only with the following: + +- [Graphics APIs](https://docs.unity3d.com/6000.0/Documentation/Manual/GraphicsAPIs.html) and platforms that support compute shaders. +- GameObjects that have a [**Mesh Renderer** component](https://docs.unity3d.com/Manual/class-MeshRenderer.html). + +Otherwise, Unity falls back to drawing the GameObject without GPU instancing. + +If you enable the GPU Resident Drawer, the following applies: + +- Build times are longer because Unity compiles all the `BatchRendererGroup` shader variants into your build. + +## Enable the GPU Resident Drawer + +To enable the GPU Resident Drawer, follow these steps: + +1. Go to **Project Settings** > **Graphics**, then in the **Shader Stripping** section set **BatchRendererGroup Variants** to **Keep All**. +2. Go to the active [HDRP Asset](HDRP-Asset.md), then in the **Rendering** section set **GPU Resident Drawer** to **Instanced Drawing**. + +If you change or create GameObjects each frame, the GPU Resident Drawer updates with the changes. + +To include or exclude GameObjects from the GPU Resident Drawer, refer to [Make a GameObject compatible with the GPU Resident Drawer](make-object-compatible-gpu-rendering.md). + +## Analyze the GPU Resident Drawer + +To analyze the results of the GPU Resident Drawer, you can use the following: + +- [Frame Debugger](https://docs.unity3d.com/Manual/FrameDebugger.html). If the GPU Resident Drawer groups GameObjects, the Frame Debugger displays draw calls called **Hybrid Batch Group**. +- [Rendering Debugger](rendering-debugger-window-reference.md) +- [Rendering Statistics](https://docs.unity3d.com/Manual/RenderingStatistics.html) to check if the number of frames per second has increased, and the CPU processing time and SetPass calls have decreased. +- [Unity Profiler](https://docs.unity3d.com/Manual/Profiler.html) + +## Optimize the GPU Resident Drawer + +How much the GPU Resident Drawer speeds up rendering depends on your scene. The GPU Resident Drawer is most effective in the following setups: + +- The scene is large. +- Multiple GameObjects use the same mesh, so Unity can group them into a single draw call. + +Rendering usually speeds up less in the Scene view and the Game view, compared to Play mode or a final built project. + +The following might speed up the GPU Resident Drawer: + +- Go to **Project Settings** > **Player**, then in the **Other Settings** section, disable **Static Batching**. +- Go to **Window** > **Panels** > **Lighting**, then in the **Lightmapping Settings** section enable **Fixed Lightmap Size** and disable **Use Mipmap Limits**. + +## Additional resources + +- [Reduce rendering work on the CPU](reduce-rendering-work-on-cpu.md) +- [Graphics performance fundamentals](https://docs.unity3d.com/Manual/OptimizingGraphicsPerformance.html) +- [GPU occlusion culling](gpu-culling.md) \ No newline at end of file diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/hdri-sky-volume-override-reference.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/hdri-sky-volume-override-reference.md index 794cd659720..0f20a632f1f 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/hdri-sky-volume-override-reference.md +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/hdri-sky-volume-override-reference.md @@ -100,7 +100,7 @@ Refer to [Create an HDRI sky](create-an-HDRI-sky.md) for more information. ![](Images/Override-HDRISky2.png) -These properties only appear if you enable [more options](expose-all-additional-properties.md) and then enable **Backplate**. +These properties only appear if you enable [advanced properties](https://docs.unity3d.com/Packages/com.unity.render-pipelines.core@latest?subfolder=/manual/advanced-properties.html) and then enable **Backplate**. diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/make-object-compatible-gpu-rendering.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/make-object-compatible-gpu-rendering.md new file mode 100644 index 00000000000..4e90a838362 --- /dev/null +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/make-object-compatible-gpu-rendering.md @@ -0,0 +1,25 @@ +# Make a GameObject compatible with the GPU Resident Drawer + +To make a GameObject compatible with the [GPU Resident Drawer](gpu-resident-drawer.md), check it has the following properties: + +- Has a [Mesh Renderer component](https://docs.unity3d.com/Manual/class-MeshRenderer.html). +- In the Mesh Renderer component, **Light Probes** isn't set to **Use Proxy Volume**. +- Uses only static global illumination, not real time global illumination. +- Uses a shader that supports DOTS instancing. Refer to [Supporting DOTS Instancing](https://docs.unity3d.com/Manual/dots-instancing-shaders.html) for more information. +- Doesn't move position after one camera finishes rendering and before another camera starts rendering. +- Doesn't use the `MaterialPropertyBlock` API. +- Doesn't have a script that uses a per-instance callback, for example `OnRenderObject`. + +## Exclude a GameObject from the GPU Resident Drawer + +To exclude a GameObject from the GPU Resident Drawer, add a **Disallow GPU Driven Rendering** component to the GameObject. + +1. Select the GameObject. +2. In the **Inspector** window, select **Add Component**. +3. Select **Disallow GPU Driven Rendering**. + +Select **Apply to Children Recursively** to exclude both the GameObject and its children. + +## Additional resources + +- [Mesh Renderer component](https://docs.unity3d.com/Manual/class-MeshRenderer.html) diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes-adjustment-volume-component-reference.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes-adjustment-volume-component-reference.md index 8ab43f5fe5d..c9c660983f7 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes-adjustment-volume-component-reference.md +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes-adjustment-volume-component-reference.md @@ -39,8 +39,8 @@ Refer to the following for more information about using the Probe Adjustment Vol
  • Apply Virtual Offset: Change the position Light Probes use when sampling the lighting in the scene during baking. Refer to Adjust Virtual Offset for more information.
  • Override Virtual Offset Settings: Override the biases HDRP uses during baking to determine when Light Probes use Virtual Offset, and calculate sampling positions. Refer to Adjust Virtual Offset for more information
  • Intensity Scale: Override the intensity of probes to brighten or darken affected areas.
  • -
  • Override Sky Direction: Override the direction used for sampling the ambient probe when using Sky Occlusion.
  • -
  • Override Sample Count: Override the sample count used to compute Lighting and Sky Occlusion.
  • +
  • Override Sky Direction Override the directions Unity uses to sample the ambient probe, if you enable sky occlusion.
  • +
  • Override Sample Count: Override the number of samples Unity uses for Adaptive Probe Volumes.
  • @@ -79,5 +79,39 @@ Refer to the following for more information about using the Probe Adjustment Vol

    Change the brightness of all probes covered by the Probe Volumes Adjustment Volume component. Use this sparingly, because changing the intensity of probe data can lead to inconsistencies in the lighting. This option only appears if you set Mode to Intensity Scale.

    + + + + + + + +
    Preview Probe Adjustments +

    Preview the effect of the adjustments in the Scene view and the Rendering Debugger.

    +
    Bake Probe Volumes +

    Bake the Adaptive Probe Volumes with the adjustments.

    +
    + +## Override Sample Count properties + +These properties are visible only when you set **Mode** to **Override Sample Count**. + +### Probes + +| Property | Description | +|-|-| +| **Direct Sample Count** | Set the number of samples Unity uses to calculate direct lighting. | +| **Indirect Sample Count** | Set the number of samples Unity uses to calculate indirect lighting. | +| **Sample Count Multiplier** | Set a value to multiply **Direct Sample Count** and **Indirect Sample Count** by. | +| **Max Bounces** | Set the maximum number of times Unity bounces light off objects when it calculates indirect lighting. | + +### Sky Occlusion + +These properties only have an effect if you enable sky occlusion. Refer to [Update light from the sky at runtime with sky occlusion](probevolumes-skyocclusion.md) for more information. + +| Property | Description | +|-|-| +| **Sample Count** | Set the number of samples Unity uses to calculate a sky occlusion value for each probe. | +| **Max Bounces** | Set the maximum number of times Unity bounces light off objects when it calculates a sky occlusion value. | diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes-concept.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes-concept.md index d25c7974f0f..8591c72a8fd 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes-concept.md +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes-concept.md @@ -21,7 +21,7 @@ Adaptive Probe Volumes have the following advantages: - Because Adaptive Probe Volumes can cover a whole scene, screen space effects can fall back to Light Probes to get lighting data from GameObjects that are off-screen or occluded. Refer to [Screen Space Global Illumination](Override-Screen-Space-GI.md) for more information. - Unity can use the data in Adaptive Probe Volumes to adjust lighting from Reflection Probes so it more closely matches the local environment, which reduces the number of Reflection Probes you need. Refer to [Frame Settings properties](frame-settings-reference.md). - Adaptive Probe Volumes include [streaming](probevolumes-streaming.md) functionality to support large open worlds. -- Adaptive Probe Volumes support [Sky Occlusion](probevolumes-skyocclusion.md) for dynamic sky relighting at runtime. +- You can use Adaptive Probe Volumes to [update light from the sky at runtime with sky occlusion](probevolumes-skyocclusion.md). ![](Images/probevolumes-per-pixel.png)
    The car model is made up of separate GameObjects. The left scene uses Light Probe Groups, which use per-object lighting, so each part of the car samples a single blended probe value. The right scene uses Adaptive Probe Volumes, which use per-pixel lighting, so each part of the car samples its nearest probes. This image uses the ArchVizPRO Photostudio HDRP asset from the Unity Asset Store. diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes-lighting-panel-reference.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes-lighting-panel-reference.md index 6cbdb0dd60b..ddc24e4b37a 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes-lighting-panel-reference.md +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes-lighting-panel-reference.md @@ -94,6 +94,16 @@ This section appears only if you enable **Lighting Scenarios** under **Light Pro ||| **Not Baked** | An information icon appears if you haven't baked any lighting data for the active Lighting Scenario.| ||| **Not Loaded** | An information icon appears if scenes in the Baking Set aren't currently loaded in the Hierarchy window, so HDRP can't determine the Lighting Scenario status. | +## Sky Occlusion Settings + +| **Property** | **Description** | +|-|-| +| **Sky Occlusion** | Enable [sky occlusion](probevolumes-skyocclusion.md). | +| **Samples** | Set the number of samples Unity uses to calculate the light each probe receives from the sky. Higher values increase the accuracy of the sky occlusion data, but increasing baking time. The default value is 2048. | +| **Bounces** | Set the number of times Unity bounces light from the sky off objects when calculating the sky occlusion data. Higher values increase the accuracy of the sky occlusion data, but increase baking time. Use higher values if objects block the direct view from probes to the sky. The default value is 2. | +| **Albedo Override** | Set the brightness of the single color Unity uses to represent objects the sky light bounces off, instead of the actual color of the objects. Higher values brighten the baked sky occlusion lighting. The default value is 0.6. | +| **Sky Direction** | Enable Unity storing and using more accurate data about the directions from probes towards the sky. Refer to [Enable more accurate sky direction data](probevolumes-skyocclusion.md#enable-more-accurate-sky-direction-data) for more information. | + ## Probe Invalidity Settings diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes-options-override-reference.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes-options-override-reference.md index 929e72a0fb9..bc6811b7fd0 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes-options-override-reference.md +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes-options-override-reference.md @@ -14,6 +14,7 @@ Refer to [Fix issues with Adaptive Probe Volumes](probevolumes-fixissues.md) for | **Scale Bias with Min Probe Distance** | Scale the **Normal Bias** or **View Bias** so it's proportional to the spacing between Light Probes in a [brick](probevolumes-concept.md#how-probe-volumes-work). | | **Sampling Noise** | Enable to increase or decrease the amount of noise HDRP adds to the position used by shaded pixels when sampling Light Probes. This can help [fix seams](probevolumes-fixissues.md#fix-seams) between bricks. | | **Animate Sampling Noise** | Enable to animate sampling noise when Temporal Anti-Aliasing (TAA) is enabled. This can make noise patterns less visible. | -| **Leak Reduction Mode** | Enable to choose the method Unity uses to reduce leaks. Refer to [Fix light leaks](probevolumes-fixissues.md#fix-light-leaks).
    Options:
    • **None**: No leak reduction.
    • **Validity Based**: Prevent invalid Light Probes from contributing to the lighting result.
    • **Validity and Normal Based**: Prevent invalid Light Probes from contributing to the lighting result, and give Light Probes more weight than others based on the GameObject pixel's sampling position. -| **Min Valid Dot Product Value** | Enable to make HDRP reduce a Light Probe's influence on a GameObject if the direction towards the Light Probe is too different to the GameObject's surface normal direction. The value is the minimum [dot product](https://docs.unity3d.com/ScriptReference/Vector3.Dot.html) between the two directions where HDRP will reduce the Light Probe's influence. | +| **Leak Reduction Mode** | Enable to choose the method Unity uses to reduce leaks. Refer to [Fix light leaks](probevolumes-fixissues.md#fix-light-leaks).
    Options:
    • **None**: No leak reduction.
    • **Performance**: The uvw used to sample APV data are warped to try to have invalid probe not contributing to lighting. This samples APV a single time so it's a cheap option but will only work in the simplest cases.
    • **Validity and Normal Based**: This option samples APV between 1 and 3 times to provide the smoothest result without introducing artifacts. This is as expensive as Performance mode in the simplest cases, and is better and more expensive in the most complex cases. | | **Occlusion Only Reflection Normalization** | Enable to limit Reflection Probe Normalization so it only decreases the intensity of reflections. Keep this enabled to reduce light leaks. Refer to [Frame Settings](frame-settings-reference.md#lighting). | +| **Intensity Multiplier** | Set the strength of the light contribution from Adaptive Probe Volumes. A value of 0 means Unity doesn't use the Adaptive Probe Volume data. | +| **Sky Occlusion Intensity Multiplier** | Set the strength of the light contribution from sky occlusion data in Adaptive Probe Volumes, if you enable [sky occlusion](probevolumes-skyocclusion.md). | diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes-skyocclusion.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes-skyocclusion.md index 295afd97f45..0124c58c3b5 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes-skyocclusion.md +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes-skyocclusion.md @@ -1,70 +1,61 @@ -# Sky Occlusion +# Update light from the sky at runtime with sky occlusion -Sky Occlusion stores the amount of lighting from the Sky affecting probes in an Adaptive Probe Volume. During run-time, this data can be combined with lighting from the Scene’s Ambient Probe to dynamically relight the Scene based on changes to the Sky. See [Visual Environment Volume override](Override-Visual-Environment.md). +You can enable sky occlusion when you use Adaptive Probe Volumes. Sky occlusion means that when a GameObject samples a color from the sky, Unity dims the color if the light can't reach the GameObject. -When Sky Occlusion is enabled for Adaptive Probe Volumes, an additional directional visibility factor is calculated for each probe during bake time. This gray value - stored as a spherical harmonic - is used during shading to attenuate the lighting contribution from the Sky. As multiple bounces can be used, the Sky’s effect upon probes with indirect paths to the Sky can also be calculated. +Sky occlusion in Unity uses the sky color from the [ambient probe](https://docs.unity3d.com/2023.3/Documentation/ScriptReference/RenderSettings-ambientProbe.html), which updates at runtime. This means you can dynamically light GameObjects as the sky color changes. For example, you can change the sky color from light to dark, to simulate the effect of a day-night cycle. -Static and dynamic objects can both receive lighting with Sky Occlusion. However, only static objects can affect the baked result. Enabling Sky Occlusion can lengthen the time required to bake lighting and uses additional memory at run-time. +If you enable sky occlusion, Adaptive Probe Volumes might take longer to bake, and Unity might use more memory at runtime. -## Enable Sky Occlusion +## How sky occlusion works -Sky Occlusion is enabled from the **Sky Occlusion** section of the **Adaptive Probe Volumes** tab within the **Lighting Window**. +When you enable sky occlusion, Unity bakes an additional static sky occlusion value into each probe in an Adaptive Probe Volume. The sky occlusion value is the amount of indirect light the probe receives from the sky, including light that bounced off static GamesObjects. -Note that lighting data must be recalculated if Sky Occlusion is enabled for the first time, or is disabled following a bake. +At runtime, when a static or dynamic GameObject samples an Adaptive Probe Volume probe, Unity approximates the light from the sky using two values: -## Modifying Sky Occlusion properties +- A sky color from the ambient probe, which updates when the sky color changes. +- The sky occlusion value, which is static. -It is possible to affect the visual quality and appearance of Sky Occlusion using these properties: +## Enable sky occlusion -
    - - - - - - - - - - - - - - - - - - - - - - - - -
    PropertyDescription
    SamplesDetermines the number of samples used when calculating the sky contribution for each probe. Increasing this value improves the accuracy of lighting data at the cost of the time required to bake Adaptive Probe Volumes.
    BouncesThe number of bounces used when calculating the sky’s contribution on probes. Increasing the number of bounces can be useful in Scenes where probes may have very indirect routes to the Sky. This will also affect the time required to bake Adaptive Probe Volumes.
    Albedo OverrideSky Occlusion does not consider the albedo (color) of Materials used throughout the Scene when calculating bounced lighting. Instead a single color is a used throughout the Scene. Albedo Override allows this color to be modified. Lower values darken and higher values will brighten the intensity of this value.
    Sky DirectionWhether probes should store the dominant direction of incoming light from the Sky. Sky Direction increases memory usage but produces more accurate lighting. Without Sky Direction, the surface normals of objects are used instead and in some Scenes this can produce visual inaccuracies.
    +First, enable the GPU lightmapper. Unity doesn't support sky occlusion if you use **Progressive CPU** instead. -## Sky Direction +1. Go to **Window** > **Rendering** > **Lighting**. +2. Go to the **Scene** panel. +3. Set **Lightmapper** to **Progressive GPU**. -By default, Sky Direction is disabled and the surface normals of objects lit by probes are used to sample the Ambient Probe generated from the Sky. -When Sky Direction is enabled, Unity calculates - for each probe - the most appropriate incoming sky lighting direction. Where desirable, this can be locally overridden in specific areas of the Scene using a [Probe Adjustment Volume](probevolumes-concept.md#volume). +To enable sky occlusion, follow these steps: -Enabling Sky Direction can improve visual results, especially in cave-like scenarios where the sky lighting needs to bounce several times on surfaces before reaching a surface. However the additional data required increases the time needed to bakelighting data. It also increases memory usage during run-time. +1. Go to the **Adaptive Probe Volumes** panel. +2. Enable **Sky Occlusion**. -## Debugging Sky Occlusion +To update the lighting data, you must also [bake the Adaptive Probe Volume](probevolumes-use.md#add-and-bake-an-adaptive-probe-volume) after you enable or disable sky occlusion. -You can inspect the Sky Occlusion value using the **Display Probes** option in the [Rendering Debugger](rendering-debugger-window-reference.md#probe-volume-panel). Two views are provided in the **Probe Shading Mode** dropdown: -1. **Sky Occlusion SH**: Display the gray value (scalar) used to attenuate Sky lighting. -2. **Sky Direction**: Displays a green dot corresponding to the direction used to sample the Ambient Probe. If **Sky Direction** was not enabled or could not be computed this displays a red probe. +## Update light at runtime -## Limitations +To update the light from the sky at runtime, follow these steps to make sure the ambient probe updates when the sky updates. -1. Currently Sky Occlusion does not work if the **Progressive CPU Lightmapper** is selected. -2. If Sky Occlusion is enabled or disabled, the Scene must be rebaked to update lighting data. -3. Sky Direction is not interpolated between probes. This may result in harsh lighting transitions where neighboring probes are storing very different results. +1. In the **Hierarchy** window, select the volume that affects the current camera. +2. In the **Inspector** window, double-click the Volume Profile Asset to open the asset. +3. In the **Visual Environment** > **Sky** section, set **Ambient Mode** to **Dynamic**. -# Additional resources +Refer to [Environment lighting](environment-lighting.md) for more information. +## Enable more accurate sky direction data +When an object samples the ambient probe, by default Unity uses the surface normal of the object as the direction to the sky. This direction might not match the direction the light comes from, for example if the object is inside and the sky light bounces off other objects to reach it. -* [Understand Adaptive Probe Volumes](probevolumes-concept.md) -* [Visual Environment Volume override](Override-Visual-Environment.md) \ No newline at end of file +Unity can instead calculate, store, and use an accurate direction from each Adaptive Probe Volume probe, and take bounce lighting into account. This makes sky occlusion more accurate, especially in areas like caves where probes don't have a direct line of sight to the sky, or when the sky has contrasting colors and the light comes from a specific direction such as through a window. + +To enable this feature, in the **Adaptive Probe Volumes** of the Lighting window, enable **Sky Direction**. + +If you enable **Sky Direction**, the following applies: + +- Baking takes longer and Unity uses more memory at runtime. +- There might be visible seams, because Unity doesn't interpolate sky direction data between probes. + +To override the directions Unity uses, use a [Probe Adjustment Volume component](probevolumes-adjustment-volume-component-reference.md). + +## Additional resources + +- [Adaptive Probe Volumes panel properties](probevolumes-lighting-panel-reference.md#sky-occlusion-settings) for more information about sky occlusion settings +- [Rendering Debugger](rendering-debugger-window-reference.md#probe-volume-panel) for information about displaying baked sky occlusion data diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes-understand-changing-lighting-at-runtime.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes-understand-changing-lighting-at-runtime.md new file mode 100644 index 00000000000..fd365d611e4 --- /dev/null +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes-understand-changing-lighting-at-runtime.md @@ -0,0 +1,19 @@ +# Choose how to change lighting at runtime + +You can change how objects use the baked data in Adaptive Probe Volumes, to create lighting that changes at runtime. For example, you can turn the lights on and off in a scene, or change the time of day. + +You can use one of the following processes: + +- [Bake different lighting setups with Lighting Scenarios](probevolumes-bakedifferentlightingsetups.md), for example you can bake a Lighting Scenario for each stage in a day-night cycle. +- [Update light from the sky at runtime with sky occlusion](probevolumes-skyocclusion.md). + +Lighting Scenarios have the following advantages: + +- Lighting Scenarios are more accurate. Lighting Scenarios don't approximate the light from the sky, or the color of objects that light bounces off. +- Lighting Scenarios store all the lighting in a scene, so you can update light from both the sky and scene lights. + +Sky occlusion has the following advantages: + +- Easier to set up. For example, you only need to bake once to set up the data you need for a day-night cycle. +- Better performance. +- Faster and smoother transitions, because sky occlusion doesn't have to blend between different sets of data. diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes-use.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes-use.md index cbe021e9a1c..32d2ad96e3b 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes-use.md +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes-use.md @@ -56,6 +56,6 @@ You can use the following to configure an Adaptive Probe Volume: ## Additional resources - [Bake multiple scenes together with Baking Sets](probevolumes-usebakingsets.md) -- [Bake different lighting setups with Lighting Scenarios](probevolumes-bakedifferentlightingsetups.md) +- [Change lighting at runtime](change-lighting-at-runtime.md) - [Fix issues with Adaptive Probe Volumes](probevolumes-fixissues.md) - [Work with multiple Scenes in Unity](https://docs.unity3d.com/Documentation/Manual/MultiSceneEditing.html) diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes.md index 4519a066e06..6b2b9d8c9e7 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes.md +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/probevolumes.md @@ -9,9 +9,8 @@ Adaptive Probe Volumes (APV) make [Light Probes](https://docs.unity3d.com/Manual | [Display Adaptive Probe Volumes](probevolumes-showandadjust.md) | Visualize the structure of Adaptive Probe Volumes. | | [Configure the size and density of Adaptive Probe Volumes](probevolumes-changedensity.md) | Change the size of an Adaptive Probe Volume, or increase the density of Light Probes. | | [Bake multiple scenes together with Baking Sets](probevolumes-usebakingsets.md) | Add scenes to a Baking Set so you can bake the lighting for all the scenes together. | -| [Bake different lighting setups with Lighting Scenarios](probevolumes-bakedifferentlightingsetups.md) | Use multiple Lighting Scenarios to store baking results for different scene setups, and switch between them at runtime. | +| [Change lighting at runtime](change-lighting-at-runtime.md) | Use Lighting Scenarios or sky occlusion to change how objects use the data in Adaptive Probe Volumes at runtime. | | [Streaming Adaptive Probe Volumes](probevolumes-streaming.md) | How Adaptive Probe Volumes stream lighting data to provide lighting for large open worlds. | -| [Sky Occlusion](probevolumes-skyocclusion.md) | How to use Sky Occlusion with Adaptive Probe Volumes for dynamic sky lighting. | | [Fix issues with Adaptive Probe Volumes](probevolumes-fixissues.md) | Reduce light leaks and seams in your lighting result. | | [Adaptive Probe Volume Inspector window reference](probevolumes-inspector-reference.md) | Reference for the Adaptive Probe Volume Inspector window. | | [Adaptive Probe Volumes panel reference](probevolumes-lighting-panel-reference.md) | Reference for the Adaptive Probe Volumes panel in the Lighting settings. | diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/reduce-rendering-work-on-cpu.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/reduce-rendering-work-on-cpu.md new file mode 100644 index 00000000000..31271cd9021 --- /dev/null +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/reduce-rendering-work-on-cpu.md @@ -0,0 +1,13 @@ +# Reduce rendering work on the CPU + +You can use the GPU Resident Drawer or GPU occlusion culling to speed up rendering. When you enable these features, Unity optimizes the rendering pipeline so the CPU has less work to do each frame, and the GPU draws GameObjects more efficiently. + +|Page|Description| +|-|-| +|[Use the GPU Resident Drawer](gpu-resident-drawer.md)|Automatically use the `BatchRendererGroup` API to use instancing and reduce the number of draw calls.| +|[Make a GameObject compatible with the GPU Resident Drawer](make-object-compatible-gpu-rendering.md)|Include or exclude a GameObject from the GPU Resident Drawer.| +|[Use GPU occlusion culling](gpu-culling.md)|Use the GPU instead of the CPU to exclude GameObjects from rendering when they're occluded behind other GameObjects.| + +## Additional resources + +- [Graphics performance fundamentals](https://docs.unity3d.com/Manual/OptimizingGraphicsPerformance.html) diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/reference-light-component.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/reference-light-component.md index 10a11d0d08b..eb941d397ac 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/reference-light-component.md +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/reference-light-component.md @@ -1,6 +1,6 @@ ## Light component reference -The properties available for Lights are in separate sections. Each section contains some properties that all Lights share, and also properties that customize the behavior of the specific type of Light. These sections also contain [additional properties](expose-all-additional-properties.md) that you can expose if you want to fine-tune your light's behavior. The sections are: +The properties available for Lights are in separate sections. Each section contains some properties that all Lights share, and also properties that customize the behavior of the specific type of Light. These sections also contain [advanced properties](https://docs.unity3d.com/Packages/com.unity.render-pipelines.core@latest?subfolder=/manual/advanced-properties.html) that you can expose if you want to fine-tune your light's behavior. The sections are: - [General](#General) - [Shape](#Shape) @@ -23,7 +23,7 @@ To make the Light work with the **Animation window**, when you click on the **Ad | ------------------------ | ------------------------------------------------------------ | | **Type** | Defines the Light’s type. Lights of different Types behave differently, so when you change the **Type**, the properties change in the Inspector. Possible types are:
    • Directional
    • Point
    • Spot
    • Area | | **Mode** | Specify the [Light Mode](https://docs.unity3d.com/Manual/LightModes.html) that HDRP uses to determine how to bake a Light, if at all. Possible modes are:
    • [Realtime](https://docs.unity3d.com/Manual/LightMode-Realtime.html): Unity performs the lighting calculations for Realtime Lights at runtime, once per frame.
    • [Mixed](https://docs.unity3d.com/Manual/LightMode-Mixed.html): Mixed Lights combine elements of both realtime and baked lighting.
    • [Baked](https://docs.unity3d.com/Manual/LightMode-Baked.html): Unity performs lighting calculations for Baked Lights in the Unity Editor, and saves the results to disk as lighting data. Note that soft falloff/range attenuation isn't supported for Baked Area Lights. | -| **Rendering Layer Mask** | Defines which Rendering Layers this Light affects. The affected Light only lights up Mesh Renderers or Terrain with a matching **Rendering Layer Mask**. To use this property:
    • Set up [light layers](Rendering-Layers.md) in your project.
    • Enable [additional properties](expose-all-additional-properties.md) for this section. | +| **Rendering Layer Mask** | Defines which Rendering Layers this Light affects. The affected Light only lights up Mesh Renderers or Terrain with a matching **Rendering Layer Mask**. To use this property:
    • Set up [light layers](Rendering-Layers.md) in your project.
    • Enable [advanced properties](https://docs.unity3d.com/Packages/com.unity.render-pipelines.core@latest?subfolder=/manual/advanced-properties.html) for this section. | #### Light Types guide @@ -106,7 +106,7 @@ These settings define the behavior of the light when you use it as a celestial b ### Emission -These settings define the emissive behavior of your Light. You can set the Light’s color, strength, and maximum range. If you don't see these properties in the Light Inspector, make sure you enable [additional properties](expose-all-additional-properties.md). Most Lights share **Emission** properties. Below are the list of properties that more than one Light **Type** share, followed by unique properties only available for a single Light **Type**. +These settings define the emissive behavior of your Light. You can set the Light’s color, strength, and maximum range. If you don't see these properties in the Light Inspector, make sure you enable [advanced properties](https://docs.unity3d.com/Packages/com.unity.render-pipelines.core@latest?subfolder=/manual/advanced-properties.html). Most Lights share **Emission** properties. Below are the list of properties that more than one Light **Type** share, followed by unique properties only available for a single Light **Type**. #### Shared Properties @@ -122,13 +122,13 @@ These settings define the emissive behavior of your Light. You can set the Light | **Cookie** | An RGB Texture that the Light projects. For example, to create silhouettes or patterned illumination for the Light. Texture shapes should be 2D for Spot and Directional Lights and Cube for Point Lights. Always import **Cookie** textures as the default texture type. This property is available for **Spot**, **Area** (Rectangular only), **Directional**, and **Point** Lights.
    Pyramid and Box lights will use an implicit 4x4 white cookie if none is specified. | | **IES Profile** | An IES File that describes the light profile. HDRP uses a linear average of a cookie and an IES profile in your scene. If you use an IES profile and a cookie at the same time during light baking, the Light in your scene only uses the cookie. You can't assign an IES file with code. Instead, use the **Cookie** property with the Textures that IES generates. | | **IES cutoff angle (%)** | Cut off of the IES Profile, as a percentage of the Outer angle. During a baking of a lightmap this parameter isn't used. | -| **Affect Diffuse** | Enable the checkbox to apply [diffuse]() lighting to this Light.
    This property only appears when you enable [additional properties](expose-all-additional-properties.md) for this section. It's only available in Realtime or Mixed light **Mode**. | -| **Affect Specular** | Enable the checkbox to apply [specular](https://docs.unity3d.com/Manual/shader-NormalSpecular.html) lighting to this Light.
    This property only appears when you enable [additional properties](expose-all-additional-properties.md) for this section. It's only available in Realtime or Mixed light **Mode**. | -| **Range Attenuation** | Enable the checkbox to make this Light shine uniformly across its range. This stops light from fading around the edges. This setting is useful when the range limit isn't visible on screen, and you don't want the edges of your light to fade out. This property is available for all **Light Types** except **Directional**.
    This property only appears when you enable [additional properties](expose-all-additional-properties.md) for this section. It's only available in Realtime or Mixed light **Mode** for **Type** Area. | +| **Affect Diffuse** | Enable the checkbox to apply [diffuse]() lighting to this Light.
    This property only appears when you enable [advanced properties](https://docs.unity3d.com/Packages/com.unity.render-pipelines.core@latest?subfolder=/manual/advanced-properties.html) for this section. It's only available in Realtime or Mixed light **Mode**. | +| **Affect Specular** | Enable the checkbox to apply [specular](https://docs.unity3d.com/Manual/shader-NormalSpecular.html) lighting to this Light.
    This property only appears when you enable [advanced properties](https://docs.unity3d.com/Packages/com.unity.render-pipelines.core@latest?subfolder=/manual/advanced-properties.html)for this section. It's only available in Realtime or Mixed light **Mode**. | +| **Range Attenuation** | Enable the checkbox to make this Light shine uniformly across its range. This stops light from fading around the edges. This setting is useful when the range limit isn't visible on screen, and you don't want the edges of your light to fade out. This property is available for all **Light Types** except **Directional**.
    This property only appears when you enable [advanced properties](https://docs.unity3d.com/Packages/com.unity.render-pipelines.core@latest?subfolder=/manual/advanced-properties.html) for this section. It's only available in Realtime or Mixed light **Mode** for **Type** Area. | | **Fade Distance** | The distance between the Light source and the Camera at which the Light begins to fade out. Measured in meters. This property is available for all **Light Types** except **Directional**.
    This property only appears when you enable [additional properties](expose-all-additional-properties.md) for this section. It's only available in Realtime or Mixed light **Mode**. | -| **Intensity Multiplier** | A multiplier that gets applied to the intensity of the Light. Doesn't affect the intensity value, but only gets applied during the evaluation of the lighting. You can also modify this property via [Timeline](https://docs.unity3d.com/Manual/TimelineSection.html), Scripting or [animation](https://docs.unity3d.com/Manual/animeditor-AnimatingAGameObject.html). The parameter lets you fade the Light in and out without having to store its original intensity.
    This property does not affect the [Physically Based Sky](physically-based-sky-volume-override-reference.html) rendering for the main directionnal light.
    This property only appears when you enable [additional properties](expose-all-additional-properties.md) for this section. It's only available in Realtime or Mixed light **Mode**. | -| **Display Emissive Mesh** | Enable the checkbox to make Unity automatically generate a Mesh with an emissive Material using the size, color, and intensity of this Light. Unity automatically adds the Mesh and Material to the GameObject the Light component is attached to. This property is available for **Rectangle** and **Tube** Lights.
    This property only appears when you enable [additional properties](expose-all-additional-properties.md) for this section. (In case of an IES profile and a cookie used at the same time, only the cookie will be displayed). | -| **Include For Ray Tracing** | Enable the checkbox to make this Light active when you enable the **Ray Tracing** [Frame Setting](Frame-Settings.md) on the Camera. This applies to rasterization and [ray tracing](Ray-Tracing-Getting-Started.md) passes.
    This property only appears when you enable [additional properties](expose-all-additional-properties.md) for this section. It's only available in Realtime or Mixed light **Mode**. | +| **Intensity Multiplier** | A multiplier that gets applied to the intensity of the Light. Doesn't affect the intensity value, but only gets applied during the evaluation of the lighting. You can also modify this property via [Timeline](https://docs.unity3d.com/Manual/TimelineSection.html), Scripting or [animation](https://docs.unity3d.com/Manual/animeditor-AnimatingAGameObject.html). The parameter lets you fade the Light in and out without having to store its original intensity.
    This property does not affect the [Physically Based Sky](physically-based-sky-volume-override-reference.html) rendering for the main directionnal light.
    This property only appears when you enable [advanced properties](https://docs.unity3d.com/Packages/com.unity.render-pipelines.core@latest?subfolder=/manual/advanced-properties.html) for this section. It's only available in Realtime or Mixed light **Mode**. | +| **Display Emissive Mesh** | Enable the checkbox to make Unity automatically generate a Mesh with an emissive Material using the size, color, and intensity of this Light. Unity automatically adds the Mesh and Material to the GameObject the Light component is attached to. This property is available for **Rectangle** and **Tube** Lights.
    This property only appears when you enable [advanced properties](https://docs.unity3d.com/Packages/com.unity.render-pipelines.core@latest?subfolder=/manual/advanced-properties.html) for this section. (In case of an IES profile and a cookie used at the same time, only the cookie will be displayed). | +| **Include For Ray Tracing** | Enable the checkbox to make this Light active when you enable the **Ray Tracing** [Frame Setting](Frame-Settings.md) on the Camera. This applies to rasterization and [ray tracing](Ray-Tracing-Getting-Started.md) passes.
    This property only appears when you enable [advanced properties](https://docs.unity3d.com/Packages/com.unity.render-pipelines.core@latest?subfolder=/manual/advanced-properties.html) for this section. It's only available in Realtime or Mixed light **Mode**. | | **Include For Path Tracing** | Enable the checkbox to make this Light active when [Path Tracing](Ray-Tracing-Path-Tracing.md) is enabled. | #### Spot Light @@ -183,21 +183,21 @@ This section is only available in Realtime or Mixed light **Mode**. | **Resolution** | Set the resolution of this Light’s shadow maps. Use the drop-down to select which quality mode to derive the resolution from. If you don't enable **Use Quality Settings**, or you select **Custom**, set the resolution, measured in pixels, in the input field.
    A higher resolution increases the fidelity of shadows at the cost of GPU performance and memory usage, so if you experience any performance issues, try using a lower value. Shadows can be turned off by setting the resolution to 0. | | **Near Plane** | The distance, in meters, from the Light that GameObjects begin to cast shadows. | | **Shadowmask Mode** | Defines how the shadowmask behaves for this Light. For detailed information on each **Shadowmask Mode**, see the documentation on [Shadowmasks](Lighting-Mode-Shadowmask.md). This property is only visible if you tet the **Mode**, under [General](#general), to **Mixed**. | -| **Slope-Scale Depth Bias** | Use the slider to set the bias that HDRP adds to the distance in this Light's shadow map to avoid self intersection. This bias is proportional to the slope of the polygons represented in the shadow map.
    This property only appears when you enable [additional properties](expose-all-additional-properties.md) for this section. | -| **Normal Bias** | Controls the amount of normal [bias](https://docs.unity3d.com/Manual/ShadowOverview.html#LightBias) this Light applies along the [normal](https://docs.unity3d.com/Manual/AnatomyofaMesh.html) of the illuminated surface.
    This property only appears when you enable [additional properties](expose-all-additional-properties.md) for this section. | -| **Custom Spot Angle** | Enable the checkbox to use a custom angle to render shadow maps with.
    This property only appears if you select **Spot** from the **Type** drop-down and enable [additional properties](expose-all-additional-properties.md) for this section. | -| **Shadow Angle** | Use the slider to set a custom angle to use for shadow map rendering.
    This property only appears if you enable **Custom Spot Angle** and enable [additional properties](expose-all-additional-properties.md) for this section. | +| **Slope-Scale Depth Bias** | Use the slider to set the bias that HDRP adds to the distance in this Light's shadow map to avoid self intersection. This bias is proportional to the slope of the polygons represented in the shadow map.
    This property only appears when you enable [advanced properties](https://docs.unity3d.com/Packages/com.unity.render-pipelines.core@latest?subfolder=/manual/advanced-properties.html) for this section. | +| **Normal Bias** | Controls the amount of normal [bias](https://docs.unity3d.com/Manual/ShadowOverview.html#LightBias) this Light applies along the [normal](https://docs.unity3d.com/Manual/AnatomyofaMesh.html) of the illuminated surface.
    This property only appears when you enable [advanced properties](https://docs.unity3d.com/Packages/com.unity.render-pipelines.core@latest?subfolder=/manual/advanced-properties.html) for this section. | +| **Custom Spot Angle** | Enable the checkbox to use a custom angle to render shadow maps with.
    This property only appears if you select **Spot** from the **Type** drop-down and enable [advanced properties](https://docs.unity3d.com/Packages/com.unity.render-pipelines.core@latest?subfolder=/manual/advanced-properties.html) for this section. | +| **Shadow Angle** | Use the slider to set a custom angle to use for shadow map rendering.
    This property only appears if you enable **Custom Spot Angle** and enable [advanced properties](https://docs.unity3d.com/Packages/com.unity.render-pipelines.core@latest?subfolder=/manual/advanced-properties.html) for this section. | | **Shadow Cone** | Use the slider to set the aperture of the shadow cone this area Light uses for shadowing. This property only appears if you select **Rectangle** from the **Type** drop-down. | -| **EVSM Exponent** | Use the slider to set the exponent this area Light uses for depth warping. [EVSM](Glossary.md#ExponentialVarianceShadowMap) modifies its shadow distribution representation by this exponent. Increase this value to reduce light leaking and change the appearance of the shadow. This property only appears if you select **Rectangle** from the **Type** drop-down and enable [additional properties](expose-all-additional-properties.md) for this section. | +| **EVSM Exponent** | Use the slider to set the exponent this area Light uses for depth warping. [EVSM](Glossary.md#ExponentialVarianceShadowMap) modifies its shadow distribution representation by this exponent. Increase this value to reduce light leaking and change the appearance of the shadow. This property only appears if you select **Rectangle** from the **Type** drop-down and enable [advanced properties](https://docs.unity3d.com/Packages/com.unity.render-pipelines.core@latest?subfolder=/manual/advanced-properties.html) for this section. | | **Light Leak Bias** | Use this slider to set the bias that HDRP uses to prevent light leaking through Scene geometry. Increasing this value prevents light leaks, but removes some of the shadow softness. This property only appears if you select **Rectangle** from the **Type** drop-down and enable [additional properties](expose-all-additional-properties.md) for this section. | -| **Variance Bias** | Use the slider to fix numerical accuracy issues in the [EVSM](Glossary.md#ExponentialVarianceShadowMap). This property only appears if you select **Rectangle** from the **Type** drop-down and enable [additional properties](expose-all-additional-properties.md) for this section. | -| **Blur Passes** | Use the slider to set the number of blur passes HDRP performs on this shadow map. Increasing this value softens shadows, but impacts performance. This property only appears if you select **Rectangle** from the **Type** drop-down and enable [additional properties](expose-all-additional-properties.md) for this section. | -| **Dimmer** | Dims the shadows this Light casts so they become more faded and transparent.
    This property only appears when you enable [additional properties](expose-all-additional-properties.md) for this section. | -| **Tint** | Specifies whether HDRP should tint the shadows this Light casts. This option affects dynamic shadows, [Contact Shadows](Override-Contact-Shadows.md), and [ShadowMask](Lighting-Mode-Shadowmask.md). It doesn't affect baked shadows. You can use this behavior to change the color and transparency of shadows.
    This property only appears when you enable [additional properties](expose-all-additional-properties.md) for this section. | -| **Penumbra Tint** | Specifies whether the tint should only affect the shadow's penumbra. If you enable this property, HDRP only applies the color tint to the shadow's penumbra. If you disable this property, HDRP applies the color tint to the entire shadow including the penumbra. To change the color HDRP tints the shadow to, see the above **Tint** property.
    This property only appears when you enable [additional properties](expose-all-additional-properties.md) for this section. | +| **Variance Bias** | Use the slider to fix numerical accuracy issues in the [EVSM](Glossary.md#ExponentialVarianceShadowMap). This property only appears if you select **Rectangle** from the **Type** drop-down and enable [advanced properties](https://docs.unity3d.com/Packages/com.unity.render-pipelines.core@latest?subfolder=/manual/advanced-properties.html) for this section. | +| **Blur Passes** | Use the slider to set the number of blur passes HDRP performs on this shadow map. Increasing this value softens shadows, but impacts performance. This property only appears if you select **Rectangle** from the **Type** drop-down and enable [advanced properties](https://docs.unity3d.com/Packages/com.unity.render-pipelines.core@latest?subfolder=/manual/advanced-properties.html) for this section. | +| **Dimmer** | Dims the shadows this Light casts so they become more faded and transparent.
    This property only appears when you enable [advanced properties](https://docs.unity3d.com/Packages/com.unity.render-pipelines.core@latest?subfolder=/manual/advanced-properties.html) for this section. | +| **Tint** | Specifies whether HDRP should tint the shadows this Light casts. This option affects dynamic shadows, [Contact Shadows](Override-Contact-Shadows.md), and [ShadowMask](Lighting-Mode-Shadowmask.md). It doesn't affect baked shadows. You can use this behavior to change the color and transparency of shadows.
    This property only appears when you enable [advanced properties](https://docs.unity3d.com/Packages/com.unity.render-pipelines.core@latest?subfolder=/manual/advanced-properties.html) for this section. | +| **Penumbra Tint** | Specifies whether the tint should only affect the shadow's penumbra. If you enable this property, HDRP only applies the color tint to the shadow's penumbra. If you disable this property, HDRP applies the color tint to the entire shadow including the penumbra. To change the color HDRP tints the shadow to, see the above **Tint** property.
    This property only appears when you enable [advanced properties](https://docs.unity3d.com/Packages/com.unity.render-pipelines.core@latest?subfolder=/manual/advanced-properties.html)for this section. | | **Fade Distance** | The distance, in meters, between the Camera and the Light at which shadows fade out. This property is available for **Spot** and **Point** Lights.
    This property only appears when you enable [additional properties](expose-all-additional-properties.md) for this section. | -| **Custom Shadow Layers** | Enable the checkbox to use a different [Rendering Layer Mask](Rendering-Layers.md) for shadows than the one used for lighting. If you enable this feature, then HDRP uses the **Shadow Layers** drop-down in this section for shadowing. If you disable it, then HDRP uses the **Rendering Layer Mask** drop-down in the **General** section for shadowing.
    This property only appears when you enable [additional properties](expose-all-additional-properties.md) for this section. To access this property, enable **Light Layers** in your [HDRP Asset](HDRP-Asset.md). | -| **Shadow Layers** | Use the drop-down to set the [Rendering Layer Mask](Rendering-Layers.md) HDRP uses for shadowing. This Light therefore only casts shadows for GameObjects that use a matching Rendering Layer. For more information about using Rendering Layers for shadowing, see [Shadow Light Layers](Rendering-Layers.md#ShadowLightLayers).
    This property only appears when you enable [additional properties](expose-all-additional-properties.md) for this section. To access this property, enable the **Custom Shadow Layers** checkbox. | +| **Custom Shadow Layers** | Enable the checkbox to use a different [Rendering Layer Mask](Rendering-Layers.md) for shadows than the one used for lighting. If you enable this feature, then HDRP uses the **Shadow Layers** drop-down in this section for shadowing. If you disable it, then HDRP uses the **Rendering Layer Mask** drop-down in the **General** section for shadowing.
    This property only appears when you enable [advanced properties](https://docs.unity3d.com/Packages/com.unity.render-pipelines.core@latest?subfolder=/manual/advanced-properties.html) for this section. To access this property, enable **Light Layers** in your [HDRP Asset](HDRP-Asset.md). | +| **Shadow Layers** | Use the drop-down to set the [Rendering Layer Mask](Rendering-Layers.md) HDRP uses for shadowing. This Light therefore only casts shadows for GameObjects that use a matching Rendering Layer. For more information about using Rendering Layers for shadowing, see [Shadow Light Layers](Rendering-Layers.md#ShadowLightLayers).
    This property only appears when you enable [advanced properties](https://docs.unity3d.com/Packages/com.unity.render-pipelines.core@latest?subfolder=/manual/advanced-properties.html) for this section. To access this property, enable the **Custom Shadow Layers** checkbox. | ##### Contact Shadows diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/rendering-debugger-window-reference.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/rendering-debugger-window-reference.md index 4a2fab735f0..cd7f7bb5eba 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/rendering-debugger-window-reference.md +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/rendering-debugger-window-reference.md @@ -12,6 +12,7 @@ The Rendering Debugger separates debug items into the following sections: * [Probe Volume](#ProbeVolume) * [Camera](#CameraPanel) * [Virtual Texturing](#VirtualTexturingPanel) +* [GPU Resident Drawer](#GPUResidentDrawer) Refer to [Use the Rendering debugger](use-the-rendering-debugger.md) for more information. @@ -494,8 +495,8 @@ These settings make it possible for you to visualize [Adaptive Probe Volumes](pr | **Property** | **Sub-property** | **Description** | |-|-|-| -| **Display Cells** || Display cells. Refer to [Understanding Adaptive Probe Volumes](../probevolumes-concept.md) for more information. | -| **Display Bricks** || Display bricks. Refer to [Understanding Adaptive Probe Volumes](../probevolumes-concept.md) for more information. | +| **Display Cells** || Display cells. Refer to [Understanding Adaptive Probe Volumes](probevolumes-concept.md) for more information. | +| **Display Bricks** || Display bricks. Refer to [Understanding Adaptive Probe Volumes](probevolumes-concept.md) for more information. | | **Live Subdivision Preview** || Enable a preview of Adaptive Probe Volume data in the scene without baking. This might make the Editor slower. This setting appears only if you select **Display Cells** or **Display Bricks**. | || **Cell Updates Per Frame** | Set the number of cells, bricks, and probe positions to update per frame. Higher values might make the Editor slower. The default value is 4. This property appears only if you enable **Live Subdivision Preview**. | || **Update Frequency** | Set how frequently Unity updates cell, bricks, and probe positions, in seconds. The default value is 1. This property appears only if you enable **Live Subdivision Preview**. | @@ -506,7 +507,7 @@ These settings make it possible for you to visualize [Adaptive Probe Volumes](pr | **Property** | **Sub-property** | **Description** | |-|-|-| | **Display Probes** || Display probes. | -|| **Probe Shading Mode** | Set what the Rendering Debugger displays. The options are:
    • SH: Display the [spherical harmonics (SH) lighting data](https://docs.unity3d.com/Manual/LightProbes-TechnicalInformation.html) for the final color calculation. The number of bands depends on the **SH Bands** setting in the active [HDRP Asset](HDRP-Asset.md).
    • SHL0: Display the spherical harmonics (SH) lighting data with only the first band.
    • SHL0L1: Display the spherical Harmonics (SH) lighting data with the first two bands.
    • Validity: Display whether probes are valid, based on the number of backfaces the probe samples. Refer to [Fix issues with Adaptive Probe Volumes](probevolumes-fixissues.md) for more information about probe validity.
    • Probe Validity Over Dilation Threshold: Display red if a probe samples too many backfaces, based on the **Validity Threshold** set in the [Adaptive Probe Volumes panel](probevolumes-lighting-panel-reference.md). This means the probe can't be baked or sampled.
    • Invalidated By Touchup Volumes: Display probes that a [Probe Adjustment Volume component](probevolumes-adjustment-volume-component-reference.md) has made invalid.
    • Size: Display a different color for each size of [brick](probevolumes-concept.md).
    | +|| **Probe Shading Mode** | Set what the Rendering Debugger displays. The options are:
    • SH: Display the [spherical harmonics (SH) lighting data](https://docs.unity3d.com/Manual/LightProbes-TechnicalInformation.html) for the final color calculation. The number of bands depends on the **SH Bands** setting in the active [HDRP Asset](HDRP-Asset.md).
    • SHL0: Display the spherical harmonics (SH) lighting data with only the first band.
    • SHL0L1: Display the spherical Harmonics (SH) lighting data with the first two bands.
    • Validity: Display whether probes are valid, based on the number of backfaces the probe samples. Refer to [Fix issues with Adaptive Probe Volumes](probevolumes-fixissues.md) for more information about probe validity.
    • Probe Validity Over Dilation Threshold: Display red if a probe samples too many backfaces, based on the **Validity Threshold** set in the [Adaptive Probe Volumes panel](probevolumes-lighting-panel-reference.md). This means the probe can't be baked or sampled.
    • Invalidated By Touchup Volumes: Display probes that a [Probe Adjustment Volume component](probevolumes-adjustment-volume-component-reference.md) has made invalid.
    • Size: Display a different color for each size of [brick](probevolumes-concept.md).
    • Sky Occlusion SH: If you enable [sky occlusion](probevolumes-skyocclusion.md), this setting displays the amount of indirect light the probe receives from the sky that bounced off static GameObjects. The value is a scalar, so it displays as a shade of gray.
    • Sky Direction: Display a green circle that represents the direction from the probe to the sky. This setting displays a red circle if Unity can't calculate the direction, or **Sky Direction** in the [Adaptive Probe Volumes panel](probevolumes-lighting-panel-reference.md) is disabled.
    | || **Debug Size** | Set the size of the displayed probes. The default is 0.3. | || **Exposure Compensation** | Set the brightness of the displayed probes. Decrease the value to increase brightness. The default is 0. This property appears only if you set **Probe Shading Mode** to **SH**, **SHL0**, or **SHL0L1**. | || **Max Subdivisions Displayed** | Set the lowest probe density to display. For example, set this to 0 to display only the highest probe density. | @@ -517,6 +518,8 @@ These settings make it possible for you to visualize [Adaptive Probe Volumes](pr | **Virtual Offset Debug** || Display the offsets Unity applies to Light Probe capture positions. | || **Debug Size** | Set the size of the arrows that represent Virtual Offset values. | | **Debug Draw Distance** || Set how far from the scene camera Unity draws debug visuals for cells and bricks, in meters. The default is 200. | +| **Auto Display Probes** || Display probes in the Scene view, if you select a volume with a Probe Adjustment Volume component in the Hierarchy window. | +| **Isolate Affected** || Display only probes affected by a volume with a Probe Adjustment Volume component, if you select the volume in the Hierarchy window. | ### Streaming @@ -780,3 +783,62 @@ You can use the **Virtual Texturing** panel to visualize [Streaming Virtual Text | ------------------------------------ | ------------------------------------------------------------- | | **Debug disable Feedback Streaming** | Deactivate Streaming Virtual Texturing to quickly assess its cost in performance and memory at runtime. | | **Textures with Preloaded Mips** | Display the total number of virtual textures Unity has loaded into the scene. Unity tries to preload the least detailed mipmap level (least being 128x128) into GPU memory. This number increases every time a material is loaded. | + + + +## GPU Resident Drawer + +The properties in this section let you visualize settings that [reduce rendering work on the CPU](reduce-rendering-work-on-cpu.md). + +### Occlusion Culling + +|**Property**|**Sub-property**|**Description**| +|-|-|-| +| **Occlusion Test Overlay** || Display a heatmap of culled instances. The heatmap displays blue if there are few culled instances, through to red if there are many culled instances. If you enable this setting, culling might be slower. | +| **Occlusion Test Overlay Count Visible** || Display a heatmap of instances that Unity doesn't cull. The heatmap displays blue if there are many culled instances, through to red if there are few culled instances. This setting only has an effect if you enable **Occlusion Test Overlay**. | +| **Override Occlusion Test To Always Pass** || Set occluded objects as unoccluded. This setting affects both the Rendering Debugger and the scene. | +| **Occluder Context Stats** || Display the [**Occlusion Context Stats**](#occlusion-context-stats) section. | +| **Occluder Debug View** || Display an overlay with the occlusion textures and mipmaps Unity generates. | +|| **Occluder Debug View Index** | Set the occlusion texture to display. | +|| **Occluder Debug View Range Min** | Set the brightness of the minimum depth value. Increase this value to brighten objects that are far away from the view. | +|| **Occluder Debug View Range Max** | Set the brightness of the maximum depth value. Decrease this value to darken objects that are close to the view. | + +![](Images/renderingdebugger-gpuculling-heatmap.jpg)
    +The Rendering Debugger with **Occlusion Test Overlay** enabled. The red areas are where Unity culls many objects. The blue area is where Unity culls few objects. + +![](Images/renderingdebugger-gpuculling-overlay.jpg)
    +The Rendering Debugger with **Occluder Debug View** enabled. The overlay displays each mipmap level of the occlusion texture. + +### Occlusion Context Stats + +The **Occlusion Context Stats** section lists the occlusion textures Unity generates. + +|**Property**|**Description**| +|-|-| +| **Active Occlusion Contexts** | The number of occlusion textures. | +| **View Instance ID** | The instance ID of the camera Unity renders the view from, to create the occlusion texture. | +| **Subview Count** | The number of subviews. The value might be 2 or more if you use XR. | +| **Size Per Subview** | The size of the subview texture in bytes. | + +### GPU Resident Drawer Settings + +|**Section**|**Property**|**Sub-property**|**Description**| +|-|-|-|-| +|**Display Culling Stats**|||Display information about the cameras Unity uses to create occlusion textures.| +|**Instance Culler Stats**|||| +||**View Count**|| The number of views Unity uses for GPU culling. Unity uses one view per shadow cascade or shadow map. For example, Unity uses three views for a Directional Light that generates three shadow cascades. | +||**Per View Stats**||| +|||**View Type**| The object or shadow split Unity renders the view from. | +|||**View Instance ID**| The instance ID of the camera or light Unity renders the view from. | +|||**Split Index**| The shadow split index value. This value is 0 if the object doesn't have shadow splits. | +|||**Visible Instances**| How many objects are visible in this split. | +|||**Draw Commands**| How many draw commands Unity uses for this split. | +|**Occlusion Culling Events**|||| +||**View Instance ID**|| The instance ID of the camera Unity renders the view from. | +||**Event type**|| The type of render pass.
    • **OccluderUpdate**
    • The GPU samples the depth buffer and creates a new occlusion texture and its mipmap.
    • **OcclusionTest**
    • The GPU tests all the instances against the occlusion texture.
    | +||**Occluder Version**|| How many times Unity updates the occlusion texture in this frame. | +||**Subview Mask**|| A bitmask that represents which subviews are affected in this frame. | +||**Occlusion Test**|| Which test the GPU runs against the occlusion texture.
    • **TestNone**
    • Unity found no occluders, so all instances are visible.
    • **TestAll**: Unity tests all instances against the occlusion texture.
    • **TestCulled**: Unity tests only instances that the previous **TestAll** test culled.
    | +||**Visible Instances**|| The number of visible instances after occlusion culling. | +||**Culled Instances**|| The number of culled instances after occlusion culling. | + diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/water-scripting-in-the-water-system.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/water-scripting-in-the-water-system.md index d12bdf47b92..a9cc1502aa8 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/water-scripting-in-the-water-system.md +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/water-scripting-in-the-water-system.md @@ -207,15 +207,21 @@ public class FitToWaterSurface_Burst : MonoBehaviour ## Synchronizing Water Surfaces -When making a multiplayer game, it can be useful to ensure all clients have a water simulation that is running in sync. -You can achieve this by specifying the absolute time at which the simulation started by using the following API: +When working with multiple water surfaces, it can be useful to synchronize the water simulation of each of the surface. +In a multiplayer game, this can ensure all clients have a water simulation that is running in sync. +You can achieve this by using one of the two following APIs; ```cs -water.simulationStart = new DateTime(2008, 5, 1, 8, 30, 52); // HDRP will compute the water simulation as if the program started at that time +water.simulationStart = DateTime.Now; // HDRP will compute the water simulation as if the game just started +water.simulationTime = 0; // Set the exact simulation time in seconds ``` -Alternatively, if you have a reference water surface, you can make sure other existing surfaces are synchronized with this one by copying the start value: +Alternatively, if you have a reference water surface, you can make sure other existing surfaces are synchronized with this one by copying the simulation time value: ```cs water.simulationStart = referenceSurface.simulationStart; +water.simulationTime = referenceSurface.simulationTime; ``` + +The `simulationStart` API works with absolute time data, which simplifies synchronization when sending the value over the network, as you don't have to account for the latency. +Using the `simulationTime` gives you direct access to the time value used to compute the result of the water simulation and is useful when synchronizing surfaces locally. diff --git a/Packages/com.unity.render-pipelines.high-definition/Editor/Core/HDRenderPipelinePreferencesProvider.cs b/Packages/com.unity.render-pipelines.high-definition/Editor/Core/HDRenderPipelinePreferencesProvider.cs new file mode 100644 index 00000000000..965bbf77980 --- /dev/null +++ b/Packages/com.unity.render-pipelines.high-definition/Editor/Core/HDRenderPipelinePreferencesProvider.cs @@ -0,0 +1,42 @@ +using System.Collections.Generic; +using UnityEngine; +using UnityEngine.Rendering; +using UnityEngine.Rendering.HighDefinition; + +namespace UnityEditor.Rendering.HighDefinition.Core +{ + /// + /// Editor Preferences for HDRP + /// + [DisplayInfo(name = "High Definition Render Pipeline", order = 200)] + public class HDRenderPipelinePreferencesProvider : ICoreRenderPipelinePreferencesProvider + { + class Styles + { + public static readonly GUIContent matcapLabel = EditorGUIUtility.TrTextContent("MatCap Mode Default Values"); + public static readonly GUIContent matcapViewMixAlbedoLabel = EditorGUIUtility.TrTextContent("Mix Albedo", "Enable to make HDRP mix the albedo of the Material with its material capture."); + public static readonly GUIContent matcapViewScaleLabel = EditorGUIUtility.TrTextContent("Intensity Scale", "Set the intensity of the material capture. This increases the brightness of the Scene. This is useful if the albedo darkens the Scene considerably."); + } + + static List s_SearchKeywords = new() { "MatCap Mode", "Intensity scale", "Mix Albedo" }; + + /// + /// Keyworks for the preferences + /// + public List keywords => s_SearchKeywords; + + /// + /// UI for the preferences. + /// + public void PreferenceGUI() + { + EditorGUILayout.LabelField(Styles.matcapLabel, EditorStyles.boldLabel); + EditorGUI.indentLevel++; + var matCapMode = HDRenderPipelinePreferences.matCapMode; + matCapMode.mixAlbedo.value = EditorGUILayout.Toggle(Styles.matcapViewMixAlbedoLabel, matCapMode.mixAlbedo.value); + if (matCapMode.mixAlbedo.value) + matCapMode.viewScale.value = EditorGUILayout.FloatField(Styles.matcapViewScaleLabel, matCapMode.viewScale.value); + EditorGUI.indentLevel--; + } + } +} diff --git a/Packages/com.unity.render-pipelines.high-definition/Editor/Core/HDRenderPipelinePreferencesProvider.cs.meta b/Packages/com.unity.render-pipelines.high-definition/Editor/Core/HDRenderPipelinePreferencesProvider.cs.meta new file mode 100644 index 00000000000..12557ca5682 --- /dev/null +++ b/Packages/com.unity.render-pipelines.high-definition/Editor/Core/HDRenderPipelinePreferencesProvider.cs.meta @@ -0,0 +1,3 @@ +fileFormatVersion: 2 +guid: e2f32393e04740bab03f54007a3e4368 +timeCreated: 1711446846 \ No newline at end of file diff --git a/Packages/com.unity.render-pipelines.high-definition/Editor/Lighting/.buginfo b/Packages/com.unity.render-pipelines.high-definition/Editor/Lighting/.buginfo new file mode 100644 index 00000000000..484b54b848d --- /dev/null +++ b/Packages/com.unity.render-pipelines.high-definition/Editor/Lighting/.buginfo @@ -0,0 +1 @@ +area: Lighting diff --git a/Packages/com.unity.render-pipelines.high-definition/Editor/Lighting/HDLightUI.ContextualMenu.cs b/Packages/com.unity.render-pipelines.high-definition/Editor/Lighting/HDLightUI.ContextualMenu.cs index 729f5deae4d..ad2db7cb421 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Editor/Lighting/HDLightUI.ContextualMenu.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Editor/Lighting/HDLightUI.ContextualMenu.cs @@ -32,7 +32,7 @@ static void ResetLight(MenuCommand menuCommand) light.cookie = null; } - [MenuItem("CONTEXT/Light/Show All Additional Properties...", false, 100)] + [MenuItem("CONTEXT/Light/Open Preferences > Graphics...", false, 100)] static void ShowAllAdditionalProperties(MenuCommand menuCommand) { CoreRenderPipelinePreferences.Open(); diff --git a/Packages/com.unity.render-pipelines.high-definition/Editor/Lighting/HDLightUI.Skin.cs b/Packages/com.unity.render-pipelines.high-definition/Editor/Lighting/HDLightUI.Skin.cs index 2a3616fb0ca..28d22d66173 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Editor/Lighting/HDLightUI.Skin.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Editor/Lighting/HDLightUI.Skin.cs @@ -60,7 +60,7 @@ sealed class Styles public readonly GUIContent shapeRadiusDisc = EditorGUIUtility.TrTextContent("Radius", "Sets the radius of the Disc Light."); public readonly GUIContent barnDoorAngle = EditorGUIUtility.TrTextContent("Barn Door Angle", "Sets the angle of the Rectangle Light so that is behaves like a barn door."); public readonly GUIContent barnDoorLength = EditorGUIUtility.TrTextContent("Barn Door Length", "Sets the length for the barn door."); - public readonly GUIContent aspectRatioPyramid = EditorGUIUtility.TrTextContent("Aspect ratio", "Controls the aspect ration of the Pyramid Light's projection. A value of 1 results in a square."); + public readonly GUIContent aspectRatioPyramid = EditorGUIUtility.TrTextContent("Aspect Ratio", "Controls the aspect ration of the Pyramid Light's projection. A value of 1 results in a square."); public readonly GUIContent shapeWidthBox = EditorGUIUtility.TrTextContent("Size X", "Sets the width of the Box Light."); public readonly GUIContent shapeHeightBox = EditorGUIUtility.TrTextContent("Size Y", "Sets the height of the Box Light."); public readonly GUIContent applyRangeAttenuation = EditorGUIUtility.TrTextContent("Range Attenuation", "Allows you to enable or disable range attenuation. Range attenuation is useful for indoor environments because you can avoid having to set up a large range for a Light to get correct inverse square attenuation that may leak out of the indoor environment."); @@ -139,7 +139,7 @@ sealed class Styles public readonly GUIContent evsmExponent = EditorGUIUtility.TrTextContent("EVSM Exponent", "Exponent used for depth warping. Increasing this could reduce light leak and result in a change in appearance of the shadow."); public readonly GUIContent evsmLightLeakBias = EditorGUIUtility.TrTextContent("Light Leak Bias", "Increasing this value light leaking, but it eats up a bit of the softness of the shadow."); public readonly GUIContent evsmVarianceBias = EditorGUIUtility.TrTextContent("Variance Bias", "Variance Bias for EVSM. This is to contrast numerical accuracy issues. "); - public readonly GUIContent evsmAdditionalBlurPasses = EditorGUIUtility.TrTextContent("Blur passes", "Increasing this will increase the softness of the shadow, but it will severely impact performance."); + public readonly GUIContent evsmAdditionalBlurPasses = EditorGUIUtility.TrTextContent("Blur Passes", "Increasing this will increase the softness of the shadow, but it will severely impact performance."); public readonly GUIContent dirLightPCSSMaxPenumbraSize = EditorGUIUtility.TrTextContent("Max Penumbra Size", "Maximum size (in world space) of PCSS shadow penumbra limiting blur filter kernel size, larger kernels may require more samples to avoid quality degradation."); public readonly GUIContent dirLightPCSSMaxSamplingDistance = EditorGUIUtility.TrTextContent("Max Sampling Distance", "Maximum distance (in world space) from the receiver PCSS shadow sampling occurs, lower to avoid light bleeding but may cause self-shadowing"); public readonly GUIContent dirLightPCSSMinFilterSizeTexels = EditorGUIUtility.TrTextContent("Min Filter", "Minimum filter size (in shadowmap texels) to avoid aliasing close to caster"); diff --git a/Packages/com.unity.render-pipelines.high-definition/Editor/Lighting/HDLightUI.cs b/Packages/com.unity.render-pipelines.high-definition/Editor/Lighting/HDLightUI.cs index f8b6c76f32e..3dbe7df0822 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Editor/Lighting/HDLightUI.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Editor/Lighting/HDLightUI.cs @@ -130,15 +130,6 @@ internal static void UnregisterEditor(HDLightEditor editor) k_AdditionalPropertiesState.UnregisterEditor(editor); } - [SetAdditionalPropertiesVisibility] - internal static void SetAdditionalPropertiesVisibility(bool value) - { - if (value) - k_AdditionalPropertiesState.ShowAll(); - else - k_AdditionalPropertiesState.HideAll(); - } - static Func GetLightingSettingsOrDefaultsFallback; static HDLightUI() diff --git a/Packages/com.unity.render-pipelines.high-definition/Editor/Lighting/Reflection/HDBakedReflectionSystem.cs b/Packages/com.unity.render-pipelines.high-definition/Editor/Lighting/Reflection/HDBakedReflectionSystem.cs index ebf9ee76495..a8016dff51a 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Editor/Lighting/Reflection/HDBakedReflectionSystem.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Editor/Lighting/Reflection/HDBakedReflectionSystem.cs @@ -5,6 +5,7 @@ using System.Reflection; using Unity.Collections.LowLevel.Unsafe; using UnityEditor.VersionControl; +using UnityEditor.SceneManagement; using UnityEngine; using UnityEngine.Rendering; using UnityEngine.Assertions; @@ -23,8 +24,8 @@ public struct ProbeBakingHash : CoreUnsafeUtils.IKeyGetter(bakedTexturePath); + Assert.IsNotNull(bakedTexture, "The baked texture was imported before, " + + "so it must exists in AssetDatabase"); + + probe.SetTexture(ProbeSettings.Mode.Baked, bakedTexture); + EditorUtility.SetDirty(probe); } AssetDatabase.StopAssetEditing(); } - // Import assets - AssetDatabase.StartAssetEditing(); - for (int i = 0; i < toBakeIndicesList.Count; ++i) - { - var index = toBakeIndicesList.GetUnchecked(i); - var instanceId = states[index].instanceID; - var probe = (HDProbe)EditorUtility.InstanceIDToObject(instanceId); - if (string.IsNullOrEmpty(probe.gameObject.scene.path)) - continue; - - var bakedTexturePath = HDBakingUtilities.GetBakedTextureFilePath(probe); - var bakedTexture = AssetDatabase.LoadAssetAtPath(bakedTexturePath); - Assert.IsNotNull(bakedTexture, "The baked texture was imported before, " + - "so it must exists in AssetDatabase"); - - probe.SetTexture(ProbeSettings.Mode.Baked, bakedTexture); - EditorUtility.SetDirty(probe); - } - AssetDatabase.StopAssetEditing(); // == 5. == @@ -680,7 +691,7 @@ internal static void RenderAndWriteToFile(HDProbe probe, string targetFile, Rend { Debug.Assert(probeRT.dimension == TextureDimension.Cube); - var positionSettings = ProbeCapturePositionSettings.ComputeFrom(probe, null); + var positionSettings = ProbeCapturePositionSettings.ComputeFrom(probe, null); HDRenderUtilities.Render(probe.settings, positionSettings, probeRT, out cameraSettings, out cameraPositionSettings, forceFlipY: true, @@ -798,27 +809,21 @@ static void ComputeProbeInstanceID(IEnumerable probes, HDProbeBakingSta } } - static void ComputeProbeSettingsHashes(IEnumerable probes, HDProbeBakingState* states) + static void ComputeProbeBakingHashes(IEnumerable probes, Hash128 allProbeDependencyHash, Hash128 reflectionBouncesHash, HDProbeBakingState* states) { var i = 0; foreach (var probe in probes) { var positionSettings = ProbeCapturePositionSettings.ComputeFrom(probe, null); var positionSettingsHash = positionSettings.ComputeHash(); - // TODO: make ProbeSettings and unmanaged type so its hash can be the hash of its memory var probeSettingsHash = probe.settings.ComputeHash(); - HashUtilities.AppendHash(ref positionSettingsHash, ref probeSettingsHash); - states[i].probeSettingsHash = probeSettingsHash; - ++i; - } - } - static void ComputeProbeBakingHashes(int count, Hash128 allProbeDependencyHash, HDProbeBakingState* states) - { - for (int i = 0; i < count; ++i) - { - states[i].probeBakingHash = states[i].probeSettingsHash; - HashUtilities.ComputeHash128(ref allProbeDependencyHash, ref states[i].probeBakingHash); + HashUtilities.AppendHash(ref positionSettingsHash, ref states[i].probeBakingHashNoBounce); + HashUtilities.AppendHash(ref allProbeDependencyHash, ref states[i].probeBakingHashNoBounce); + + states[i].probeBakingHash = states[i].probeBakingHashNoBounce; + HashUtilities.AppendHash(ref reflectionBouncesHash, ref states[i].probeBakingHash); + ++i; } } diff --git a/Packages/com.unity.render-pipelines.high-definition/Editor/Lighting/Reflection/HDProbeUI.cs b/Packages/com.unity.render-pipelines.high-definition/Editor/Lighting/Reflection/HDProbeUI.cs index cc3959a6c77..9cc022bb7c6 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Editor/Lighting/Reflection/HDProbeUI.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Editor/Lighting/Reflection/HDProbeUI.cs @@ -33,14 +33,5 @@ internal static void UnregisterEditor(HDProbeEditor Graphics...", false, 100)] + [MenuItem("CONTEXT/PlanarReflectionProbe/Open Preferences > Graphics...", false, 700)] static void ShowAllAdditionalProperties(MenuCommand menuCommand) { CoreRenderPipelinePreferences.Open(); } - [MenuItem("CONTEXT/PlanarReflectionProbe/Show All Additional Properties...", false, 700)] - static void ShowAllAdditionalPropertiesPlanar(MenuCommand menuCommand) - { - CoreRenderPipelinePreferences.Open(); - } - #endregion protected override void OnEnable() diff --git a/Packages/com.unity.render-pipelines.high-definition/Editor/Lighting/ScreenSpaceAmbientOcclusionEditor.cs b/Packages/com.unity.render-pipelines.high-definition/Editor/Lighting/ScreenSpaceAmbientOcclusionEditor.cs index 2d87e1110fc..9125ac10355 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Editor/Lighting/ScreenSpaceAmbientOcclusionEditor.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Editor/Lighting/ScreenSpaceAmbientOcclusionEditor.cs @@ -93,9 +93,9 @@ private static class Styles public static readonly GUIContent stepCount = EditorGUIUtility.TrTextContent("Step Count", "Number of steps to take along one signed direction during horizon search (this is the number of steps in positive and negative direction)."); public static readonly GUIContent tempAccum = EditorGUIUtility.TrTextContent("Temporal Accumulation", "Whether the results are accumulated over time or not. This can get better results cheaper, but it can lead to temporal artifacts. Requires Motion Vectors to be enabled."); public static readonly GUIContent directionCount = EditorGUIUtility.TrTextContent("Direction Count", "Number of directions searched for occlusion at each each pixel."); - public static readonly GUIContent blurSharpness = EditorGUIUtility.TrTextContent("Blur sharpness", "Modify the non-temporal blur to change how sharp features are preserved. Lower values blurrier/softer, higher values sharper but with risk of noise."); + public static readonly GUIContent blurSharpness = EditorGUIUtility.TrTextContent("Blur Sharpness", "Modify the non-temporal blur to change how sharp features are preserved. Lower values blurrier/softer, higher values sharper but with risk of noise."); public static readonly GUIContent bilateralAggressiveness = EditorGUIUtility.TrTextContent("Bilateral Aggressiveness", "Higher this value, the less lenient with depth differences the spatial filter is. Increase if for example noticing white halos where AO should be."); - public static readonly GUIContent ghostingReduction = EditorGUIUtility.TrTextContent("Ghosting reduction", "Moving this factor closer to 0 will increase the amount of accepted samples during temporal accumulation, increasing the ghosting, but reducing the temporal noise."); + public static readonly GUIContent ghostingReduction = EditorGUIUtility.TrTextContent("Ghosting Reduction", "Moving this factor closer to 0 will increase the amount of accepted samples during temporal accumulation, increasing the ghosting, but reducing the temporal noise."); public static readonly GUIContent bilateralUpsample = EditorGUIUtility.TrTextContent("Bilateral Upsample", "This upsample method preserves sharp edges better, however can result in visible aliasing and it is slightly more expensive."); } diff --git a/Packages/com.unity.render-pipelines.high-definition/Editor/Lighting/VolumetricClouds/VolumetricCloudsEditor.cs b/Packages/com.unity.render-pipelines.high-definition/Editor/Lighting/VolumetricClouds/VolumetricCloudsEditor.cs index 2d14886906e..3f39304ae27 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Editor/Lighting/VolumetricClouds/VolumetricCloudsEditor.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Editor/Lighting/VolumetricClouds/VolumetricCloudsEditor.cs @@ -177,7 +177,7 @@ public override void OnEnable() static public readonly GUIContent k_CloudMapTilingText = EditorGUIUtility.TrTextContent("Cloud Map Tiling", "Tiling (x,y) of the cloud map."); static public readonly GUIContent k_CloudMapOffsetText = EditorGUIUtility.TrTextContent("Cloud Map Offset", "Offset (x,y) of the cloud map."); static public readonly GUIContent k_GlobalHorizontalWindSpeedText = EditorGUIUtility.TrTextContent("Global Horizontal Wind Speed", "Sets the global horizontal wind speed in kilometers per hour.\nThis value can be relative to the Global Wind Speed defined in the Visual Environment."); - static public readonly GUIContent k_PerceptualBlending = EditorGUIUtility.TrTextContent("Perceptual Blending", "When enabled, the clouds will blend in a perceptual way with the environment. This may cause artifacts when the sky is over-exposed."); + static public readonly GUIContent k_PerceptualBlending = EditorGUIUtility.TrTextContent("Perceptual Blending", "When enabled, the clouds will blend in a perceptual way with the environment. This may cause artifacts when the sky is over-exposed.\nThis only works when MSAA is off."); void MicroDetailsSection() { diff --git a/Packages/com.unity.render-pipelines.high-definition/Editor/Material/Decal/DecalProjectorEditor.cs b/Packages/com.unity.render-pipelines.high-definition/Editor/Material/Decal/DecalProjectorEditor.cs index 7855533c2f4..3ee2c8f6090 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Editor/Material/Decal/DecalProjectorEditor.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Editor/Material/Decal/DecalProjectorEditor.cs @@ -29,7 +29,7 @@ static Color fullColor static Color s_LastColor; static void UpdateColorsInHandlesIfRequired() { - Color c = HDRenderPipelinePreferences.decalGizmoColor; + Color c = DecalPreferences.decalGizmoColor; if (c != s_LastColor) { if (s_BoxHandle != null && !s_BoxHandle.Equals(null)) diff --git a/Packages/com.unity.render-pipelines.high-definition/Editor/RayTracing/.buginfo b/Packages/com.unity.render-pipelines.high-definition/Editor/RayTracing/.buginfo new file mode 100644 index 00000000000..484b54b848d --- /dev/null +++ b/Packages/com.unity.render-pipelines.high-definition/Editor/RayTracing/.buginfo @@ -0,0 +1 @@ +area: Lighting diff --git a/Packages/com.unity.render-pipelines.high-definition/Editor/RenderPipeline/Camera/HDCameraUI.Rendering.Drawers.cs b/Packages/com.unity.render-pipelines.high-definition/Editor/RenderPipeline/Camera/HDCameraUI.Rendering.Drawers.cs index 0e5f483aebd..f81d4b7dcd0 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Editor/RenderPipeline/Camera/HDCameraUI.Rendering.Drawers.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Editor/RenderPipeline/Camera/HDCameraUI.Rendering.Drawers.cs @@ -92,15 +92,6 @@ internal static void UnregisterEditor(HDCameraEditor editor) k_AdditionalPropertiesState.UnregisterEditor(editor); } - [SetAdditionalPropertiesVisibility] - internal static void SetAdditionalPropertiesVisibility(bool value) - { - if (value) - k_AdditionalPropertiesState.ShowAll(); - else - k_AdditionalPropertiesState.HideAll(); - } - static void Draw_Rendering_Advanced(SerializedHDCamera p, Editor owner) { } @@ -207,7 +198,7 @@ static void Drawer_Draw_FSR2_Section(SerializedHDCamera p, Editor owner) p.fidelityFX2SuperResolutionQuality.intValue = (int)(object)v; } EditorGUI.indentLevel--; - } + } EditorGUILayout.PropertyField(p.fidelityFX2SuperResolutionUseCustomAttributes, Styles.FSR2UseCustomAttributes); if (p.fidelityFX2SuperResolutionUseCustomAttributes.boolValue) diff --git a/Packages/com.unity.render-pipelines.high-definition/Editor/RenderPipeline/Camera/HDCameraUI.Rendering.Skin.cs b/Packages/com.unity.render-pipelines.high-definition/Editor/RenderPipeline/Camera/HDCameraUI.Rendering.Skin.cs index a1a013df3f1..f685fb3f625 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Editor/RenderPipeline/Camera/HDCameraUI.Rendering.Skin.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Editor/RenderPipeline/Camera/HDCameraUI.Rendering.Skin.cs @@ -23,7 +23,7 @@ class Styles public static readonly GUIContent TAASharpeningMode = EditorGUIUtility.TrTextContent("Sharpening Mode", "Low quality is fast, but is prone to artifact and sub-optimal results, PostSharpen is more expensive, but leads to higher quality sharpening. Finally CAS will also be of higher quality than Low Quality option, offering strong sharpening but limited control."); public static readonly GUIContent TAAAntiRinging = EditorGUIUtility.TrTextContent("Anti-ringing", "When enabled, ringing artifacts (dark or strangely saturated edges) caused by history sharpening will be improved. This comes at a potential loss of sharpness upon motion."); // Advanced TAA - public static readonly GUIContent TAABaseBlendFactor = EditorGUIUtility.TrTextContent("Base blend factor", "Determines how much the history buffer is blended together with current frame result. Higher values means more history contribution, which leads to better anti aliasing, but also more prone to ghosting."); + public static readonly GUIContent TAABaseBlendFactor = EditorGUIUtility.TrTextContent("Base Blend Factor", "Determines how much the history buffer is blended together with current frame result. Higher values means more history contribution, which leads to better anti aliasing, but also more prone to ghosting."); public static readonly GUIContent TAAJitterScale = EditorGUIUtility.TrTextContent("Jitter Scale", "Determines the scale to the jitter applied when TAA is enabled. Lowering this value will lead to less visible flickering and jittering, but also will produce more aliased images."); public static readonly GUIContent renderingPath = EditorGUIUtility.TrTextContent("Custom Frame Settings", "Define custom values for Frame Settings for this Camera to use."); diff --git a/Packages/com.unity.render-pipelines.high-definition/Editor/RenderPipeline/HDRenderPipelineUI.Skin.cs b/Packages/com.unity.render-pipelines.high-definition/Editor/RenderPipeline/HDRenderPipelineUI.Skin.cs index ce13c4a378d..5fc0c53d751 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Editor/RenderPipeline/HDRenderPipelineUI.Skin.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Editor/RenderPipeline/HDRenderPipelineUI.Skin.cs @@ -332,7 +332,7 @@ public class Styles public static readonly GUIContent rayTracingHalfResThreshold = EditorGUIUtility.TrTextContent("Ray Tracing Half Res Threshold", "The minimum percentage threshold allowed to render ray tracing effects at half resolution. When the resolution percentage falls below this threshold, HDRP will render ray tracing effects at full resolution."); public static readonly GUIContent lowResTransparentEnabled = EditorGUIUtility.TrTextContent("Enable", "When enabled, materials tagged as Low Res Transparent, will be rendered in a quarter res offscreen buffer and then composited to full res."); - public static readonly GUIContent checkerboardDepthBuffer = EditorGUIUtility.TrTextContent("Checkerboarded depth buffer downsample", "When enabled, the depth buffer used for low res transparency is generated in a min/max checkerboard pattern from original full res buffer."); + public static readonly GUIContent checkerboardDepthBuffer = EditorGUIUtility.TrTextContent("Checkerboarded Depth Buffer Downsample", "When enabled, the depth buffer used for low res transparency is generated in a min/max checkerboard pattern from original full res buffer."); public static readonly GUIContent lowResTranspUpsample = EditorGUIUtility.TrTextContent("Upsample type", "The type of upsampling filter used to composite the low resolution transparency."); public static readonly GUIContent XRSinglePass = EditorGUIUtility.TrTextContent("Single Pass", "When enabled, XR views are rendered simultaneously and the render loop is processed only once. This setting will improve CPU and GPU performance but will use more GPU memory."); diff --git a/Packages/com.unity.render-pipelines.high-definition/Editor/Sky/HDLightingWindowEnvironmentSection.cs b/Packages/com.unity.render-pipelines.high-definition/Editor/Sky/HDLightingWindowEnvironmentSection.cs index 3e8ac5b2c3f..3a38e9109e0 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Editor/Sky/HDLightingWindowEnvironmentSection.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Editor/Sky/HDLightingWindowEnvironmentSection.cs @@ -34,6 +34,8 @@ class SerializedStaticLightingSky public SerializedProperty skyUniqueID; public SerializedProperty cloudUniqueID; public SerializedProperty volumetricCloudsToggle; + public SerializedProperty numberOfBounces; + public VolumeProfile volumeProfile { get => (serializedObject.targetObject as StaticLightingSky).profile; @@ -46,6 +48,7 @@ public SerializedStaticLightingSky(StaticLightingSky staticLightingSky) skyUniqueID = serializedObject.FindProperty("m_StaticLightingSkyUniqueID"); cloudUniqueID = serializedObject.FindProperty("m_StaticLightingCloudsUniqueID"); volumetricCloudsToggle = serializedObject.FindProperty("m_StaticLightingVolumetricClouds"); + numberOfBounces = serializedObject.FindProperty("bounces"); } public void Apply() => serializedObject.ApplyModifiedProperties(); @@ -94,7 +97,7 @@ public override void OnDisable() void OnActiveSceneChange(Scene current, Scene next) => m_SerializedActiveSceneLightingSky = new SerializedStaticLightingSky(GetStaticLightingSkyForScene(next)); - StaticLightingSky GetStaticLightingSkyForScene(Scene scene) + static internal StaticLightingSky GetStaticLightingSkyForScene(Scene scene) { StaticLightingSky result = null; foreach (var go in scene.GetRootGameObjects()) @@ -205,6 +208,12 @@ void DrawGUI() EditorGUILayout.PropertyField(m_SerializedActiveSceneLightingSky.volumetricCloudsToggle, EditorGUIUtility.TrTextContent("Static Lighting Volumetric Clouds", "Specify if volumetric clouds should be used for static ambient in the referenced profile for active scene.")); } + EditorGUILayout.Space(); + + EditorGUILayout.LabelField("Reflection Probes"); + using (new EditorGUI.IndentLevelScope()) + EditorGUILayout.PropertyField(m_SerializedActiveSceneLightingSky.numberOfBounces); + --EditorGUI.indentLevel; } } diff --git a/Packages/com.unity.render-pipelines.high-definition/Editor/Sky/VisualEnvironmentEditor.cs b/Packages/com.unity.render-pipelines.high-definition/Editor/Sky/VisualEnvironmentEditor.cs index 1120cc94e8b..2c052cc518b 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Editor/Sky/VisualEnvironmentEditor.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Editor/Sky/VisualEnvironmentEditor.cs @@ -130,13 +130,13 @@ public override void OnInspectorGUI() // Sky UpdateSkyAndFogIntPopupData(); - using (var scope = new OverridablePropertyScope(m_SkyType, EditorGUIUtility.TrTextContent("Sky type", "Specifies the type of sky this Volume uses."), this)) + using (var scope = new OverridablePropertyScope(m_SkyType, EditorGUIUtility.TrTextContent("Sky Type", "Specifies the type of sky this Volume uses."), this)) { if (scope.displayed) EditorGUILayout.IntPopup(m_SkyType.value, m_SkyClassNames.ToArray(), m_SkyUniqueIDs.ToArray(), scope.label); } - using (var scope = new OverridablePropertyScope(m_CloudType, EditorGUIUtility.TrTextContent("Background clouds", "Specifies the type of background cloud this Volume uses."), this)) + using (var scope = new OverridablePropertyScope(m_CloudType, EditorGUIUtility.TrTextContent("Background Clouds", "Specifies the type of background cloud this Volume uses."), this)) { if (scope.displayed) EditorGUILayout.IntPopup(m_CloudType.value, m_CloudClassNames.ToArray(), m_CloudUniqueIDs.ToArray(), scope.label); diff --git a/Packages/com.unity.render-pipelines.high-definition/Editor/VFXGraph/Outputs/VFXDecalHDRPOutput.cs b/Packages/com.unity.render-pipelines.high-definition/Editor/VFXGraph/Outputs/VFXDecalHDRPOutput.cs index ace3973d0bc..da95e5c8d98 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Editor/VFXGraph/Outputs/VFXDecalHDRPOutput.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Editor/VFXGraph/Outputs/VFXDecalHDRPOutput.cs @@ -8,13 +8,10 @@ namespace UnityEditor.VFX.HDRP { - [VFXInfo(name = "Output Particle HDRP Lit Decal", category = "Output")] + [VFXInfo(name = "Output Particle|HDRP Lit|Decal", category = "#4Output Advanced")] class VFXDecalHDRPOutput : VFXAbstractParticleHDRPOutput { - public override string name - { - get { return "Output Particle HDRP Lit Decal"; } - } + public override string name => "Output Particle".AppendLabel("HDRP Lit", false) + "\nDecal"; public override string codeGeneratorTemplate { diff --git a/Packages/com.unity.render-pipelines.high-definition/Editor/VFXGraph/Outputs/VFXDistortionMeshOutput.cs b/Packages/com.unity.render-pipelines.high-definition/Editor/VFXGraph/Outputs/VFXDistortionMeshOutput.cs index a8858162afb..84a73d6c668 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Editor/VFXGraph/Outputs/VFXDistortionMeshOutput.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Editor/VFXGraph/Outputs/VFXDistortionMeshOutput.cs @@ -5,10 +5,10 @@ namespace UnityEditor.VFX.HDRP { - [VFXInfo(name = "Output Particle HDRP Distortion Mesh", category = "Output")] + [VFXInfo(name = "Output Particle|HDRP Distortion|Mesh", category = "#4Output Advanced")] class VFXDistortionMeshOutput : VFXAbstractDistortionOutput { - public override string name { get { return "Output Particle HDRP Distortion Mesh"; } } + public override string name => "Output Particle".AppendLabel("HDRP Distortion", false) + "\nMesh"; public override string codeGeneratorTemplate { get { return RenderPipeTemplate("VFXParticleDistortionMesh"); } } public override VFXTaskType taskType { get { return VFXTaskType.ParticleMeshOutput; } } public override bool supportsUV { get { return true; } } diff --git a/Packages/com.unity.render-pipelines.high-definition/Editor/VFXGraph/Outputs/VFXDistortionPlanarPrimitiveOutput.cs b/Packages/com.unity.render-pipelines.high-definition/Editor/VFXGraph/Outputs/VFXDistortionPlanarPrimitiveOutput.cs index cbb761960ed..8b86139f8da 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Editor/VFXGraph/Outputs/VFXDistortionPlanarPrimitiveOutput.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Editor/VFXGraph/Outputs/VFXDistortionPlanarPrimitiveOutput.cs @@ -1,20 +1,30 @@ using System; using System.Collections.Generic; using System.Linq; -using UnityEditor.VFX.Block; + using UnityEngine; namespace UnityEditor.VFX.HDRP { - internal class VFXDistortionPlanarPrimitiveOutputProvider : VariantProvider + internal class VFXDistortionPlanarPrimitiveOutputSubvariantProvider : VariantProvider { + private readonly VFXPrimitiveType mainPrimitiveType; + + public VFXDistortionPlanarPrimitiveOutputSubvariantProvider(VFXPrimitiveType type) + { + mainPrimitiveType = type; + } + public override IEnumerable GetVariants() { foreach (var primitive in Enum.GetValues(typeof(VFXPrimitiveType)).Cast()) { + if (primitive == mainPrimitiveType) + continue; + yield return new Variant( - $"Output Particle HDRP Distortion {primitive}", - "Output", + "Output Particle|HDRP Distortion".AppendLabel(primitive.ToString()), + VFXLibraryStringHelper.Separator("Output Advanced", 4), typeof(VFXDistortionPlanarPrimitiveOutput), new[] {new KeyValuePair("primitiveType", primitive)} ); @@ -22,6 +32,19 @@ public override IEnumerable GetVariants() } } + internal class VFXDistortionPlanarPrimitiveOutputProvider : VariantProvider + { + public override IEnumerable GetVariants() + { + yield return new Variant( + "Output Particle|HDRP Distortion".AppendLabel(VFXPrimitiveType.Quad.ToString()), + VFXLibraryStringHelper.Separator("Output Advanced", 4), + typeof(VFXDistortionPlanarPrimitiveOutput), + new[] {new KeyValuePair("primitiveType", VFXPrimitiveType.Quad)}, + () => new VFXDistortionPlanarPrimitiveOutputSubvariantProvider(VFXPrimitiveType.Quad)); + } + } + [VFXInfo(variantProvider = typeof(VFXDistortionPlanarPrimitiveOutputProvider))] class VFXDistortionPlanarPrimitiveOutput : VFXAbstractDistortionOutput { @@ -31,7 +54,7 @@ class VFXDistortionPlanarPrimitiveOutput : VFXAbstractDistortionOutput //[VFXSetting] // tmp dont expose as settings atm public bool useGeometryShader = false; - public override string name { get { return "Output Particle HDRP Distortion " + primitiveType.ToString(); } } + public override string name => "Output Particle".AppendLabel("HDRP Distortion", false) + $"\n{primitiveType.ToString()}"; public override string codeGeneratorTemplate { get { return RenderPipeTemplate("VFXParticleDistortionPlanarPrimitive"); } } public override VFXTaskType taskType { diff --git a/Packages/com.unity.render-pipelines.high-definition/Editor/VFXGraph/Outputs/VFXDistortionQuadStripOutput.cs b/Packages/com.unity.render-pipelines.high-definition/Editor/VFXGraph/Outputs/VFXDistortionQuadStripOutput.cs index 15cd11119fd..d672d827d8f 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Editor/VFXGraph/Outputs/VFXDistortionQuadStripOutput.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Editor/VFXGraph/Outputs/VFXDistortionQuadStripOutput.cs @@ -5,7 +5,7 @@ namespace UnityEditor.VFX.HDRP { - [VFXInfo(name = "Output Strip HDRP Distortion Quad", category = "Output")] + [VFXInfo(name = "Output ParticleStrip|HDRP Distortion|Quad", category = "#3Output Strip", synonyms = new []{ "Trail", "Ribbon" })] class VFXDistortionQuadStripOutput : VFXAbstractDistortionOutput { [VFXSetting, SerializeField, Tooltip("Specifies the way the UVs are interpolated along the strip. They can either be stretched or repeated per segment.")] @@ -19,7 +19,7 @@ class VFXDistortionQuadStripOutput : VFXAbstractDistortionOutput VFXDistortionQuadStripOutput() : base(true) { } - public override string name { get { return "Output Strip HDRP Distortion Quad"; } } + public override string name => "Output ParticleStrip".AppendLabel("HDRP Distortion", false) + "\nQuad"; public override string codeGeneratorTemplate { get { return RenderPipeTemplate("VFXParticleDistortionPlanarPrimitive"); } } public override VFXTaskType taskType => VFXTaskType.ParticleQuadOutput; public override bool supportsUV { get { return true; } } diff --git a/Packages/com.unity.render-pipelines.high-definition/Editor/VFXGraph/Outputs/VFXLitCubeOutput.cs b/Packages/com.unity.render-pipelines.high-definition/Editor/VFXGraph/Outputs/VFXLitCubeOutput.cs index 1588d52a3ad..7a6b85f87e3 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Editor/VFXGraph/Outputs/VFXLitCubeOutput.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Editor/VFXGraph/Outputs/VFXLitCubeOutput.cs @@ -3,10 +3,10 @@ namespace UnityEditor.VFX.HDRP { - [VFXInfo(name = "Output Particle HDRP Lit Cube", category = "Output", experimental = true)] + [VFXInfo(name = "Output Particle|HDRP Lit|Cube", category = "#5Output Debug", experimental = true, synonyms = new []{ "Box" })] class VFXLitCubeOutput : VFXAbstractParticleHDRPLitOutput { - public override string name => "Output Particle HDRP Lit Cube"; + public override string name => "Output Particle".AppendLabel("HDRP Lit", false) + "\nCube"; public override string codeGeneratorTemplate => RenderPipeTemplate("VFXParticleLitCube"); public override VFXTaskType taskType => VFXTaskType.ParticleHexahedronOutput; public override bool implementsMotionVector => true; diff --git a/Packages/com.unity.render-pipelines.high-definition/Editor/VFXGraph/Outputs/VFXLitMeshOutput.cs b/Packages/com.unity.render-pipelines.high-definition/Editor/VFXGraph/Outputs/VFXLitMeshOutput.cs index 93b141525cf..d627715c6a8 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Editor/VFXGraph/Outputs/VFXLitMeshOutput.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Editor/VFXGraph/Outputs/VFXLitMeshOutput.cs @@ -4,16 +4,10 @@ namespace UnityEditor.VFX.HDRP { - [VFXInfo(name = "Output Particle HDRP Lit Mesh", category = "Output")] + [VFXInfo(name = "Output Particle|HDRP Lit|Mesh", category = "#2Output Basic")] class VFXLitMeshOutput : VFXAbstractParticleHDRPLitOutput, IVFXMultiMeshOutput { - public override string name - { - get - { - return "Output Particle HDRP Lit Mesh"; - } - } + public override string name => "Output Particle".AppendLabel("HDRP Lit") + "\nMesh"; public override string codeGeneratorTemplate { get { return RenderPipeTemplate("VFXParticleLitMesh"); } } public override VFXTaskType taskType { get { return VFXTaskType.ParticleMeshOutput; } } public override bool supportsUV { get { return GetOrRefreshShaderGraphObject() == null; } } diff --git a/Packages/com.unity.render-pipelines.high-definition/Editor/VFXGraph/Outputs/VFXLitPlanarPrimitiveOutput.cs b/Packages/com.unity.render-pipelines.high-definition/Editor/VFXGraph/Outputs/VFXLitPlanarPrimitiveOutput.cs index 8db31fc67f3..d4e886f7b6c 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Editor/VFXGraph/Outputs/VFXLitPlanarPrimitiveOutput.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Editor/VFXGraph/Outputs/VFXLitPlanarPrimitiveOutput.cs @@ -1,36 +1,54 @@ using System; using System.Collections.Generic; using System.Linq; -using UnityEditor.VFX.Block; + using UnityEngine; namespace UnityEditor.VFX.HDRP { - class VFXLitPlanarPrimitiveOutputProvider : VariantProvider + class VFXLitPlanarPrimitiveOutputSubVariantProvider : VariantProvider { + private readonly VFXPrimitiveType mainVariantType; + + public VFXLitPlanarPrimitiveOutputSubVariantProvider(VFXPrimitiveType type) + { + this.mainVariantType = type; + } + public override IEnumerable GetVariants() { - foreach (var primitive in Enum.GetValues(typeof(VFXPrimitiveType))) + foreach (var primitive in Enum.GetValues(typeof(VFXPrimitiveType)).Cast()) { + if (primitive == this.mainVariantType) + continue; + yield return new Variant( - $"Lit Output Particle {primitive}", - "Output", + "Output Particle".AppendLabel("HDRP Lit", false).AppendLabel(primitive.ToString()), + null, typeof(VFXLitPlanarPrimitiveOutput), new[] {new KeyValuePair("primitiveType", primitive)}); } } } - [VFXInfo(variantProvider = typeof(VFXLitPlanarPrimitiveOutputProvider))] - class VFXLitPlanarPrimitiveOutput : VFXAbstractParticleHDRPLitOutput + class VFXLitPlanarPrimitiveOutputProvider : VariantProvider { - public override string name + public override IEnumerable GetVariants() { - get - { - return "Output Particle HDRP Lit " + primitiveType.ToString(); - } + yield return new Variant( + "Output Particle".AppendLabel("HDRP Lit", false).AppendLabel("Quad", false), + VFXLibraryStringHelper.Separator("Output Basic", 2), + typeof(VFXLitPlanarPrimitiveOutput), + new[] {new KeyValuePair("primitiveType", VFXPrimitiveType.Quad)}, + () => new VFXLitPlanarPrimitiveOutputSubVariantProvider(VFXPrimitiveType.Quad)); + } + } + + [VFXInfo(variantProvider = typeof(VFXLitPlanarPrimitiveOutputProvider))] + class VFXLitPlanarPrimitiveOutput : VFXAbstractParticleHDRPLitOutput + { + public override string name => "Output Particle".AppendLabel("HDRP Lit", false) + $"\n{ObjectNames.NicifyVariableName(primitiveType.ToString())}"; public override string codeGeneratorTemplate { get { return RenderPipeTemplate("VFXParticleLitPlanarPrimitive"); } } public override VFXTaskType taskType { get { return VFXPlanarPrimitiveHelper.GetTaskType(primitiveType); } } public override bool supportsUV { get { return GetOrRefreshShaderGraphObject() == null; } } diff --git a/Packages/com.unity.render-pipelines.high-definition/Editor/VFXGraph/Outputs/VFXLitQuadStripOutput.cs b/Packages/com.unity.render-pipelines.high-definition/Editor/VFXGraph/Outputs/VFXLitQuadStripOutput.cs index 77f88db3ba4..8288588d45f 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Editor/VFXGraph/Outputs/VFXLitQuadStripOutput.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Editor/VFXGraph/Outputs/VFXLitQuadStripOutput.cs @@ -5,12 +5,12 @@ namespace UnityEditor.VFX.HDRP { - [VFXInfo(name = "Output ParticleStrip HDRP Lit Quad", category = "Output", experimental = true)] + [VFXInfo(name = "Output ParticleStrip|HDRP Lit|Quad", category = "#3Output Strip", experimental = true, synonyms = new []{ "Trail", "Ribbon" })] class VFXLitQuadStripOutput : VFXAbstractParticleHDRPLitOutput { protected VFXLitQuadStripOutput() : base(true) { } // strips - public override string name { get { return "Output ParticleStrip HDRP Lit Quad"; } } + public override string name => "Output ParticleStrip".AppendLabel("HDRP Lit", false) + "\nQuad"; public override string codeGeneratorTemplate { get { return RenderPipeTemplate("VFXParticleLitPlanarPrimitive"); } } public override VFXTaskType taskType { get { return VFXTaskType.ParticleQuadOutput; } } public override bool supportsUV { get { return true; } } diff --git a/Packages/com.unity.render-pipelines.high-definition/Editor/VFXGraph/Outputs/VFXLitSphereOutput.cs b/Packages/com.unity.render-pipelines.high-definition/Editor/VFXGraph/Outputs/VFXLitSphereOutput.cs index 1a87c9b0ef4..f1cbf74a66a 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Editor/VFXGraph/Outputs/VFXLitSphereOutput.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Editor/VFXGraph/Outputs/VFXLitSphereOutput.cs @@ -4,10 +4,10 @@ namespace UnityEditor.VFX.HDRP { - [VFXInfo(name = "Output Particle HDRP Lit Sphere", category = "Output", experimental = true)] + [VFXInfo(name = "Output Particle|HDRP Lit|Sphere", category = "#5Output Debug", experimental = true)] class VFXLitSphereOutput : VFXAbstractParticleHDRPLitOutput { - public override string name => "Output Particle HDRP Lit Sphere"; + public override string name => "Output Particle".AppendLabel("HDRP Lit", false) + "\nSphere"; public override string codeGeneratorTemplate => RenderPipeTemplate("VFXParticleSphere"); public override VFXTaskType taskType => VFXTaskType.ParticleQuadOutput; diff --git a/Packages/com.unity.render-pipelines.high-definition/Editor/VFXGraph/Outputs/VFXVolumetricFogOutput.cs b/Packages/com.unity.render-pipelines.high-definition/Editor/VFXGraph/Outputs/VFXVolumetricFogOutput.cs index b0a92a8396e..ff58e4f0527 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Editor/VFXGraph/Outputs/VFXVolumetricFogOutput.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Editor/VFXGraph/Outputs/VFXVolumetricFogOutput.cs @@ -7,10 +7,10 @@ namespace UnityEditor.VFX.HDRP { - [VFXInfo(name = "Output Particle HDRP Volumetric Fog", category = "Output", experimental = true)] + [VFXInfo(name = "Output Particle|HDRP Volumetric Fog", category = "#4Output Advanced", experimental = true)] class VFXVolumetricFogOutput : VFXAbstractParticleOutput { - public override string name => "Output Particle HDRP Volumetric Fog"; + public override string name => "Output Particle".AppendLabel("HDRP Lit", false) + "\nVolumetric Fog"; public override string codeGeneratorTemplate => RenderPipeTemplate("VFXVolumetricFogOutput"); public override VFXTaskType taskType => VFXTaskType.ParticleQuadOutput; diff --git a/Packages/com.unity.render-pipelines.high-definition/Editor/VFXGraph/Utility/VFXHDRPSettingsUtility.cs b/Packages/com.unity.render-pipelines.high-definition/Editor/VFXGraph/Utility/VFXHDRPSettingsUtility.cs index b9fd4331b60..7378c0a226a 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Editor/VFXGraph/Utility/VFXHDRPSettingsUtility.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Editor/VFXGraph/Utility/VFXHDRPSettingsUtility.cs @@ -9,7 +9,7 @@ public static void RefreshVfxErrorsIfNeeded() { foreach (var vfxWindow in VFXViewWindow.GetAllWindows()) { - if (vfxWindow != null) + if (vfxWindow != null && vfxWindow.graphView != null ) { var vfxGraph = vfxWindow.graphView.controller.graph; foreach (var output in vfxGraph.children.OfType()) diff --git a/Packages/com.unity.render-pipelines.high-definition/Editor/Water/WaterSurface/WaterSurfaceEditor.Appearance.cs b/Packages/com.unity.render-pipelines.high-definition/Editor/Water/WaterSurface/WaterSurfaceEditor.Appearance.cs index ff20450592a..03e3c39bbda 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Editor/Water/WaterSurface/WaterSurfaceEditor.Appearance.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Editor/Water/WaterSurface/WaterSurfaceEditor.Appearance.cs @@ -118,7 +118,7 @@ internal static string GetWaterResourcesPath(MonoBehaviour component) CoreUtils.EnsureFolderTreeInAssetFilePath(folderName); return folderName; } - + internal static Material CreateNewWaterMaterialAndShader(MonoBehaviour component) { string directory = GetWaterResourcesPath(component); @@ -277,8 +277,8 @@ static internal void WaterSurfaceAppearanceSection(WaterSurfaceEditor serialized EditorGUILayout.PropertyField(serialized.m_CausticsVirtualPlaneDistance, k_CausticsVirtualPlaneDistance); EditorGUILayout.PropertyField(serialized.m_CausticsTilingFactor, k_CausticsTilingFactor); - - if (WaterSurfaceUI.ShowAdditionalProperties()) + + if (AdvancedProperties.BeginGroup()) { EditorGUILayout.PropertyField(serialized.m_CausticsIntensity, k_CausticsInstensity); EditorGUILayout.PropertyField(serialized.m_CausticsPlaneBlendDistance); @@ -290,6 +290,7 @@ static internal void WaterSurfaceAppearanceSection(WaterSurfaceEditor serialized EditorGUILayout.PropertyField(serialized.m_CausticsDirectionalShadowDimmer, k_CausticsDirectionalShadowDimmer); } } + AdvancedProperties.EndGroup(); // Display an info box if the wind speed is null for the target band if (!WaterBandHasAgitation(serialized, owner, HDRenderPipeline.SanitizeCausticsBand(serialized.m_CausticsBand.intValue, bandCount))) diff --git a/Packages/com.unity.render-pipelines.high-definition/Editor/Water/WaterSurface/WaterSurfaceEditor.Simulation.cs b/Packages/com.unity.render-pipelines.high-definition/Editor/Water/WaterSurface/WaterSurfaceEditor.Simulation.cs index 5aa7ce11d0f..8b5452a09a0 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Editor/Water/WaterSurface/WaterSurfaceEditor.Simulation.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Editor/Water/WaterSurface/WaterSurfaceEditor.Simulation.cs @@ -223,7 +223,7 @@ static internal void WaterSurfaceSimulationSection_Ocean(WaterSurfaceEditor seri } // The fade parameters are only to be displayed when the additional parameters are - if (WaterSurfaceUI.ShowAdditionalProperties()) + if (AdvancedProperties.BeginGroup()) { // Fade of the ripples using (new BoldLabelScope()) @@ -239,6 +239,7 @@ static internal void WaterSurfaceSimulationSection_Ocean(WaterSurfaceEditor seri } } } + AdvancedProperties.EndGroup(); } // Second band foldout @@ -256,7 +257,7 @@ static internal void WaterSurfaceSimulationSection_Ocean(WaterSurfaceEditor seri } // The fade parameters are only to be displayed when the additional parameters are - if (WaterSurfaceUI.ShowAdditionalProperties()) + if (AdvancedProperties.BeginGroup()) { // Fade of the ripples using (new BoldLabelScope()) @@ -271,6 +272,7 @@ static internal void WaterSurfaceSimulationSection_Ocean(WaterSurfaceEditor seri } } } + AdvancedProperties.EndGroup(); } using (new DisabledScope(true)) @@ -292,7 +294,7 @@ static internal void WaterSurfaceSimulationSection_Ocean(WaterSurfaceEditor seri // Current & Orientation WaterSurfaceRipplesOrientationCurrentInherit(serialized, owner, WaterPropertyParameterDrawer.swellModeNames); - if (WaterSurfaceUI.ShowAdditionalProperties()) + if (AdvancedProperties.BeginGroup()) { // Fade of the ripples using (new BoldLabelScope()) @@ -307,6 +309,7 @@ static internal void WaterSurfaceSimulationSection_Ocean(WaterSurfaceEditor seri } } } + AdvancedProperties.EndGroup(); } } } @@ -342,7 +345,7 @@ static internal void WaterSurfaceSimulationSection_River(WaterSurfaceEditor seri } // The fade parameters are only to be displayed when the additional parameters are - if (WaterSurfaceUI.ShowAdditionalProperties()) + if (AdvancedProperties.BeginGroup()) { // Fade of the agitation using (new BoldLabelScope()) @@ -357,6 +360,7 @@ static internal void WaterSurfaceSimulationSection_River(WaterSurfaceEditor seri } } } + AdvancedProperties.EndGroup(); } // Ripples Section @@ -372,7 +376,7 @@ static internal void WaterSurfaceSimulationSection_River(WaterSurfaceEditor seri // Orientation & Current WaterSurfaceRipplesOrientationCurrentInherit(serialized, owner, WaterPropertyParameterDrawer.agitationModeNames); - if (WaterSurfaceUI.ShowAdditionalProperties()) + if (AdvancedProperties.BeginGroup()) { // Fade of the ripples using (new BoldLabelScope()) @@ -387,6 +391,7 @@ static internal void WaterSurfaceSimulationSection_River(WaterSurfaceEditor seri } } } + AdvancedProperties.EndGroup(); } } } @@ -406,7 +411,7 @@ static internal void WaterSurfaceSimulationSection_Pool(WaterSurfaceEditor seria // Current WaterSurfaceRipplesOrientationCurrent(serialized, owner); - if (WaterSurfaceUI.ShowAdditionalProperties()) + if (AdvancedProperties.BeginGroup()) { // Fade of the ripples using (new BoldLabelScope()) @@ -421,6 +426,7 @@ static internal void WaterSurfaceSimulationSection_Pool(WaterSurfaceEditor seria } } } + AdvancedProperties.EndGroup(); } } } diff --git a/Packages/com.unity.render-pipelines.high-definition/Editor/Water/WaterSurface/WaterSurfaceEditor.cs b/Packages/com.unity.render-pipelines.high-definition/Editor/Water/WaterSurface/WaterSurfaceEditor.cs index d5409dcb0b6..22efe23f1d8 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Editor/Water/WaterSurface/WaterSurfaceEditor.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Editor/Water/WaterSurface/WaterSurfaceEditor.cs @@ -208,11 +208,12 @@ static internal void WaterSurfaceGeneralSection(WaterSurfaceEditor serialized, E using (new EditorGUI.IndentLevelScope()) { EditorGUILayout.PropertyField(serialized.m_MaxTessellationFactor); - if (WaterSurfaceUI.ShowAdditionalProperties()) + if (AdvancedProperties.BeginGroup()) { EditorGUILayout.PropertyField(serialized.m_TessellationFactorFadeStart); EditorGUILayout.PropertyField(serialized.m_TessellationFactorFadeRange); } + AdvancedProperties.EndGroup(); } } } @@ -325,12 +326,12 @@ class WaterSurfaceUI { public static readonly CED.IDrawer Inspector; - public static readonly string generalHeader = "General"; - public static readonly string simulationHeader = "Simulation"; - public static readonly string deformationHeader = "Deformation"; - public static readonly string appearanceHeader = "Appearance"; - public static readonly string foamHeader = "Foam"; - public static readonly string miscellaneousHeader = "Miscellaneous"; + public static readonly GUIContent generalHeader = EditorGUIUtility.TrTextContent("General"); + public static readonly GUIContent simulationHeader = EditorGUIUtility.TrTextContent("Simulation"); + public static readonly GUIContent deformationHeader = EditorGUIUtility.TrTextContent("Deformation"); + public static readonly GUIContent appearanceHeader = EditorGUIUtility.TrTextContent("Appearance"); + public static readonly GUIContent foamHeader = EditorGUIUtility.TrTextContent("Foam"); + public static readonly GUIContent miscellaneousHeader = EditorGUIUtility.TrTextContent("Miscellaneous"); enum Expandable { @@ -344,37 +345,15 @@ enum Expandable internal enum AdditionalProperties { - Global = 1 << 0, + General = 1 << 0, + Simulation = 1 << 1, + Appearance = 1 << 3, } readonly static ExpandedState k_ExpandedState = new ExpandedState(0, "HDRP"); readonly internal static AdditionalPropertiesState k_AdditionalPropertiesState = new AdditionalPropertiesState(0, "HDRP"); - internal static void RegisterEditor(HDLightEditor editor) - { - k_AdditionalPropertiesState.RegisterEditor(editor); - } - - internal static void UnregisterEditor(HDLightEditor editor) - { - k_AdditionalPropertiesState.UnregisterEditor(editor); - } - - [SetAdditionalPropertiesVisibility] - internal static void SetAdditionalPropertiesVisibility(bool value) - { - if (value) - k_AdditionalPropertiesState.ShowAll(); - else - k_AdditionalPropertiesState.HideAll(); - } - - internal static bool ShowAdditionalProperties() - { - return k_AdditionalPropertiesState[WaterSurfaceUI.AdditionalProperties.Global]; - } - - [MenuItem("CONTEXT/WaterSurface/Show All Additional Properties...", false, 100)] + [MenuItem("CONTEXT/WaterSurface/Open Preferences > Graphics...", false, 100)] static void ShowAllAdditionalProperties(MenuCommand menuCommand) { CoreRenderPipelinePreferences.Open(); @@ -407,14 +386,21 @@ static void ResetWaterSurface(MenuCommand menuCommand) static WaterSurfaceUI() { + var emptyDrawer = + CED.Group( + (s, e) => { }); + Inspector = CED.Group( - CED.FoldoutGroup(generalHeader, Expandable.General, k_ExpandedState, WaterSurfaceEditor.WaterSurfaceGeneralSection), - CED.FoldoutGroup(simulationHeader, Expandable.Simulation, k_ExpandedState, WaterSurfaceEditor.WaterSurfaceSimulationSection), + CED.AdditionalPropertiesFoldoutGroup(generalHeader, Expandable.General, k_ExpandedState, + AdditionalProperties.General, k_AdditionalPropertiesState, CED.Group(WaterSurfaceEditor.WaterSurfaceGeneralSection), emptyDrawer), + CED.AdditionalPropertiesFoldoutGroup(simulationHeader, Expandable.Simulation, k_ExpandedState, + AdditionalProperties.Simulation, k_AdditionalPropertiesState, CED.Group(WaterSurfaceEditor.WaterSurfaceSimulationSection), emptyDrawer), CED.FoldoutGroup(deformationHeader, Expandable.Deformation, k_ExpandedState, WaterSurfaceEditor.WaterSurfaceDeformationSection), - CED.FoldoutGroup(appearanceHeader, Expandable.Appearance, k_ExpandedState, WaterSurfaceEditor.WaterSurfaceAppearanceSection), + CED.AdditionalPropertiesFoldoutGroup(appearanceHeader, Expandable.Appearance, k_ExpandedState, + AdditionalProperties.Appearance, k_AdditionalPropertiesState, CED.Group(WaterSurfaceEditor.WaterSurfaceAppearanceSection), emptyDrawer), CED.FoldoutGroup(foamHeader, Expandable.Foam, k_ExpandedState, WaterSurfaceEditor.WaterSurfaceFoamSection), CED.FoldoutGroup(miscellaneousHeader, Expandable.Miscellaneous, k_ExpandedState, WaterSurfaceEditor.WaterSurfaceMiscellaneousSection) - ); + ); } } } diff --git a/Packages/com.unity.render-pipelines.high-definition/Editor/Wizard/HDWizard.UIElement.cs b/Packages/com.unity.render-pipelines.high-definition/Editor/Wizard/HDWizard.UIElement.cs index ed2417fe064..ba90aab1e4e 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Editor/Wizard/HDWizard.UIElement.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Editor/Wizard/HDWizard.UIElement.cs @@ -329,11 +329,6 @@ class ConfigInfoLine : VisualElementUpdatable { static class Style { - const string k_IconFolder = @"Packages/com.unity.render-pipelines.high-definition/Editor/Wizard/WizardResources/"; - public static readonly Texture ok = CoreEditorUtils.LoadIcon(k_IconFolder, "OK"); - public static readonly Texture error = CoreEditorUtils.LoadIcon(k_IconFolder, "Error"); - public static readonly Texture warning = CoreEditorUtils.LoadIcon(k_IconFolder, "Warning"); - public const int k_IndentStepSize = 15; } @@ -360,13 +355,23 @@ public ConfigInfoLine(string label, string error, MessageType messageType, strin { var statusOK = new Image() { - image = Style.ok, - name = "StatusOK" + image = CoreEditorStyles.iconComplete, + name = "StatusOK", + style = + { + height = 16, + width = 16 + } }; var statusKO = new Image() { - image = Style.error, - name = "StatusError" + image = CoreEditorStyles.iconFail, + name = "StatusError", + style = + { + height = 16, + width = 16 + } }; testRow.Add(statusOK); testRow.Add(statusKO); diff --git a/Packages/com.unity.render-pipelines.high-definition/Editor/Wizard/WizardResources/Error.png b/Packages/com.unity.render-pipelines.high-definition/Editor/Wizard/WizardResources/Error.png deleted file mode 100644 index 8bba82fc9a5..00000000000 Binary files a/Packages/com.unity.render-pipelines.high-definition/Editor/Wizard/WizardResources/Error.png and /dev/null differ diff --git a/Packages/com.unity.render-pipelines.high-definition/Editor/Wizard/WizardResources/Error.png.meta b/Packages/com.unity.render-pipelines.high-definition/Editor/Wizard/WizardResources/Error.png.meta deleted file mode 100644 index 89af7e523c2..00000000000 --- a/Packages/com.unity.render-pipelines.high-definition/Editor/Wizard/WizardResources/Error.png.meta +++ /dev/null @@ -1,115 +0,0 @@ -fileFormatVersion: 2 -guid: b9ba8ba73504d5344a9b8a1c64107c86 -TextureImporter: - internalIDToNameTable: [] - externalObjects: {} - serializedVersion: 10 - mipmaps: - mipMapMode: 0 - enableMipMap: 0 - sRGBTexture: 1 - linearTexture: 0 - fadeOut: 0 - borderMipMap: 0 - mipMapsPreserveCoverage: 0 - alphaTestReferenceValue: 0.5 - mipMapFadeDistanceStart: 1 - mipMapFadeDistanceEnd: 3 - bumpmap: - convertToNormalMap: 0 - externalNormalMap: 0 - heightScale: 0.25 - normalMapFilter: 0 - isReadable: 0 - streamingMipmaps: 0 - streamingMipmapsPriority: 0 - grayScaleToAlpha: 0 - generateCubemap: 6 - cubemapConvolution: 0 - seamlessCubemap: 0 - textureFormat: 1 - maxTextureSize: 2048 - textureSettings: - serializedVersion: 2 - filterMode: -1 - aniso: -1 - mipBias: -100 - wrapU: 1 - wrapV: 1 - wrapW: -1 - nPOTScale: 0 - lightmap: 0 - compressionQuality: 50 - spriteMode: 1 - spriteExtrude: 1 - spriteMeshType: 1 - alignment: 0 - spritePivot: {x: 0.5, y: 0.5} - spritePixelsToUnits: 100 - spriteBorder: {x: 0, y: 0, z: 0, w: 0} - spriteGenerateFallbackPhysicsShape: 0 - alphaUsage: 1 - alphaIsTransparency: 1 - spriteTessellationDetail: -1 - textureType: 8 - textureShape: 1 - singleChannelComponent: 0 - maxTextureSizeSet: 0 - compressionQualitySet: 0 - textureFormatSet: 0 - platformSettings: - - serializedVersion: 3 - buildTarget: DefaultTexturePlatform - maxTextureSize: 32 - resizeAlgorithm: 0 - textureFormat: -1 - textureCompression: 1 - compressionQuality: 50 - crunchedCompression: 0 - allowsAlphaSplitting: 0 - overridden: 0 - androidETC2FallbackOverride: 0 - forceMaximumCompressionQuality_BC6H_BC7: 0 - - serializedVersion: 3 - buildTarget: Standalone - maxTextureSize: 32 - resizeAlgorithm: 0 - textureFormat: -1 - textureCompression: 1 - compressionQuality: 50 - crunchedCompression: 0 - allowsAlphaSplitting: 0 - overridden: 0 - androidETC2FallbackOverride: 0 - forceMaximumCompressionQuality_BC6H_BC7: 0 - - serializedVersion: 3 - buildTarget: Windows Store Apps - maxTextureSize: 32 - resizeAlgorithm: 0 - textureFormat: -1 - textureCompression: 1 - compressionQuality: 50 - crunchedCompression: 0 - allowsAlphaSplitting: 0 - overridden: 0 - androidETC2FallbackOverride: 0 - forceMaximumCompressionQuality_BC6H_BC7: 0 - spriteSheet: - serializedVersion: 2 - sprites: [] - outline: [] - physicsShape: [] - bones: [] - spriteID: 5e97eb03825dee720800000000000000 - internalID: 0 - vertices: [] - indices: - edges: [] - weights: [] - secondaryTextures: [] - spritePackingTag: - pSDRemoveMatte: 0 - pSDShowRemoveMatteOption: 0 - userData: - assetBundleName: - assetBundleVariant: diff --git a/Packages/com.unity.render-pipelines.high-definition/Editor/Wizard/WizardResources/OK.png b/Packages/com.unity.render-pipelines.high-definition/Editor/Wizard/WizardResources/OK.png deleted file mode 100644 index 77a552e3a4b..00000000000 Binary files a/Packages/com.unity.render-pipelines.high-definition/Editor/Wizard/WizardResources/OK.png and /dev/null differ diff --git a/Packages/com.unity.render-pipelines.high-definition/Editor/Wizard/WizardResources/OK.png.meta b/Packages/com.unity.render-pipelines.high-definition/Editor/Wizard/WizardResources/OK.png.meta deleted file mode 100644 index e4d60ce96e5..00000000000 --- a/Packages/com.unity.render-pipelines.high-definition/Editor/Wizard/WizardResources/OK.png.meta +++ /dev/null @@ -1,115 +0,0 @@ -fileFormatVersion: 2 -guid: a8359114ca9cc0a448b8172a6a236330 -TextureImporter: - internalIDToNameTable: [] - externalObjects: {} - serializedVersion: 10 - mipmaps: - mipMapMode: 0 - enableMipMap: 0 - sRGBTexture: 1 - linearTexture: 0 - fadeOut: 0 - borderMipMap: 0 - mipMapsPreserveCoverage: 0 - alphaTestReferenceValue: 0.5 - mipMapFadeDistanceStart: 1 - mipMapFadeDistanceEnd: 3 - bumpmap: - convertToNormalMap: 0 - externalNormalMap: 0 - heightScale: 0.25 - normalMapFilter: 0 - isReadable: 0 - streamingMipmaps: 0 - streamingMipmapsPriority: 0 - grayScaleToAlpha: 0 - generateCubemap: 6 - cubemapConvolution: 0 - seamlessCubemap: 0 - textureFormat: 1 - maxTextureSize: 2048 - textureSettings: - serializedVersion: 2 - filterMode: -1 - aniso: -1 - mipBias: -100 - wrapU: 1 - wrapV: 1 - wrapW: -1 - nPOTScale: 0 - lightmap: 0 - compressionQuality: 50 - spriteMode: 1 - spriteExtrude: 1 - spriteMeshType: 1 - alignment: 0 - spritePivot: {x: 0.5, y: 0.5} - spritePixelsToUnits: 100 - spriteBorder: {x: 0, y: 0, z: 0, w: 0} - spriteGenerateFallbackPhysicsShape: 0 - alphaUsage: 1 - alphaIsTransparency: 1 - spriteTessellationDetail: -1 - textureType: 8 - textureShape: 1 - singleChannelComponent: 0 - maxTextureSizeSet: 0 - compressionQualitySet: 0 - textureFormatSet: 0 - platformSettings: - - serializedVersion: 3 - buildTarget: DefaultTexturePlatform - maxTextureSize: 32 - resizeAlgorithm: 0 - textureFormat: -1 - textureCompression: 1 - compressionQuality: 50 - crunchedCompression: 0 - allowsAlphaSplitting: 0 - overridden: 0 - androidETC2FallbackOverride: 0 - forceMaximumCompressionQuality_BC6H_BC7: 0 - - serializedVersion: 3 - buildTarget: Standalone - maxTextureSize: 32 - resizeAlgorithm: 0 - textureFormat: -1 - textureCompression: 1 - compressionQuality: 50 - crunchedCompression: 0 - allowsAlphaSplitting: 0 - overridden: 0 - androidETC2FallbackOverride: 0 - forceMaximumCompressionQuality_BC6H_BC7: 0 - - serializedVersion: 3 - buildTarget: Windows Store Apps - maxTextureSize: 32 - resizeAlgorithm: 0 - textureFormat: -1 - textureCompression: 1 - compressionQuality: 50 - crunchedCompression: 0 - allowsAlphaSplitting: 0 - overridden: 0 - androidETC2FallbackOverride: 0 - forceMaximumCompressionQuality_BC6H_BC7: 0 - spriteSheet: - serializedVersion: 2 - sprites: [] - outline: [] - physicsShape: [] - bones: [] - spriteID: 5e97eb03825dee720800000000000000 - internalID: 0 - vertices: [] - indices: - edges: [] - weights: [] - secondaryTextures: [] - spritePackingTag: - pSDRemoveMatte: 0 - pSDShowRemoveMatteOption: 0 - userData: - assetBundleName: - assetBundleVariant: diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/Debug/DebugDisplay.cs b/Packages/com.unity.render-pipelines.high-definition/Runtime/Debug/DebugDisplay.cs index c3863ce4d1b..947dec97932 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/Debug/DebugDisplay.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/Debug/DebugDisplay.cs @@ -231,6 +231,21 @@ public enum VolumetricCloudsDebug Depth, } + /// + /// List of Depth Pyramid Full Screen Debug views. + /// + public enum DepthPyramidDebugView + { + /// + /// Closest depth. + /// + ClosestDepth, + /// + /// Checkerboard of minimum and maximum depth. + /// + CheckerboardDepth, + } + /// /// Class managing debug display in HDRP. /// @@ -280,6 +295,8 @@ public partial class DebugData public Vector4 fullScreenDebugDepthRemap = new Vector4(0.0f, 1.0f, 0.0f, 0.0f); /// Current full screen debug mode mip level (when applicable). public float fullscreenDebugMip = 0.0f; + /// Enable to show checkerboard depths instead of closest depths (when applicable). + public DepthPyramidDebugView depthPyramidView = DepthPyramidDebugView.ClosestDepth; /// Index of the light used for contact shadows display. public int fullScreenContactShadowLightIndex = 0; /// XR single pass test mode. @@ -378,6 +395,7 @@ public partial class DebugData internal int lightClusterCategoryDebug; internal int historyBufferFrameIndex = 0; internal int stpDebugModeEnumIndex; + internal int depthPyramidViewEnumIndex; private float m_DebugGlobalMipBiasOverride = 0.0f; @@ -1204,6 +1222,11 @@ static class LightingStrings public static readonly NameAndTooltip AreaLights = new() { name = "Area Lights", tooltip = "Temporarily enables or disables Area Lights in your Scene." }; public static readonly NameAndTooltip ReflectionProbes = new() { name = "Reflection Probes", tooltip = "Temporarily enables or disables Reflection Probes in your Scene." }; + // Lighting - Mat Cap + public static readonly NameAndTooltip MatCapHeader = new() { name = "Mat Cap Mode", tooltip = "Settings for Scene View MatCap" }; + public static readonly NameAndTooltip MatCapViewMixAlbedoLabel = new() { name = "Mix Albedo", tooltip = "Enable to make HDRP mix the albedo of the Material with its material capture." }; + public static readonly NameAndTooltip MatCapIntensityScaleLabel = new() { name = "Intensity scale", tooltip = "Set the intensity of the material capture. This increases the brightness of the Scene. This is useful if the albedo darkens the Scene considerably." }; + public static readonly NameAndTooltip Exposure = new() { name = "Exposure", tooltip = "Allows the selection of an Exposure debug mode to use." }; public static readonly NameAndTooltip HDROutput = new() { name = "HDR", tooltip = "Allows the selection of an HDR debug mode to use." }; public static readonly NameAndTooltip HDROutputDebugMode = new() { name = "DebugMode", tooltip = "Use the drop-down to select a debug mode for HDR Output." }; @@ -1241,6 +1264,7 @@ static class LightingStrings public static readonly NameAndTooltip FullscreenDebugMode = new() { name = "Fullscreen Debug Mode", tooltip = "Use the drop-down to select a rendering mode to display as an overlay on the screen." }; public static readonly NameAndTooltip ScreenSpaceShadowIndex = new() { name = "Screen Space Shadow Index", tooltip = "Select the index of the screen space shadows to view with the slider. There must be a Light in the scene that uses Screen Space Shadows." }; public static readonly NameAndTooltip DepthPyramidDebugMip = new() { name = "Debug Mip", tooltip = "Enable to view a lower-resolution mipmap." }; + public static readonly NameAndTooltip DepthPyramidDebugView = new() { name = "Debug View", tooltip = "Use the down-down to select which depth pyramid data to show in this view." }; public static readonly NameAndTooltip DepthPyramidEnableRemap = new() { name = "Enable Depth Remap", tooltip = "Enable remapping of displayed depth values for better vizualization." }; public static readonly NameAndTooltip DepthPyramidRangeMin = new() { name = "Depth Range Min Value", tooltip = "Distance at which depth values remap starts (0 is near plane, 1 is far plane)" }; public static readonly NameAndTooltip DepthPyramidRangeMax = new() { name = "Depth Range Max Value", tooltip = "Distance at which depth values remap ends (0 is near plane, 1 is far plane)" }; @@ -1434,6 +1458,29 @@ void RegisterLightingDebug() lighting.children.Add(hdrFoldout); lighting.children.Add(new DebugUI.EnumField { nameAndTooltip = LightingStrings.LightingDebugMode, getter = () => (int)data.lightingDebugSettings.debugLightingMode, setter = value => SetDebugLightingMode((DebugLightingMode)value), autoEnum = typeof(DebugLightingMode), getIndex = () => data.lightingDebugModeEnumIndex, setIndex = value => { data.ResetExclusiveEnumIndices(); data.lightingDebugModeEnumIndex = value; } }); + + lighting.children.Add(new DebugUI.Container() + { + children = + { + new DebugUI.BoolField + { + nameAndTooltip = LightingStrings.MatCapViewMixAlbedoLabel, + getter = () => data.lightingDebugSettings.matCapMixAlbedo, + setter = value => data.lightingDebugSettings.matCapMixAlbedo = value + }, + new DebugUI.FloatField + { + nameAndTooltip = LightingStrings.MatCapIntensityScaleLabel, + getter = () => data.lightingDebugSettings.matCapMixScale, + setter = value => data.lightingDebugSettings.matCapMixScale = value, + isHiddenCallback = () => !data.lightingDebugSettings.matCapMixAlbedo + }, + }, + isHiddenCallback = () => data.lightingDebugSettings.debugLightingMode != DebugLightingMode.MatcapView + }); + + lighting.children.Add(new DebugUI.BitField { nameAndTooltip = LightingStrings.LightHierarchyDebugMode, getter = () => data.lightingDebugSettings.debugLightFilterMode, setter = value => SetDebugLightFilterMode((DebugLightFilterMode)value), enumType = typeof(DebugLightFilterMode)}); list.Add(lighting); @@ -1565,6 +1612,16 @@ void RegisterLightingDebug() children = { new DebugUI.FloatField { nameAndTooltip = LightingStrings.DepthPyramidDebugMip, getter = () => data.fullscreenDebugMip, setter = value => data.fullscreenDebugMip = value, min = () => 0f, max = () => 1f, incStep = 0.05f }, + new DebugUI.EnumField() + { + isHiddenCallback = () => data.fullScreenDebugMode != FullScreenDebugMode.DepthPyramid, + nameAndTooltip = LightingStrings.DepthPyramidDebugView, + getter = () => (int)data.depthPyramidView, + setter = value => { data.depthPyramidView = (DepthPyramidDebugView)value; }, + autoEnum = typeof(DepthPyramidDebugView), + getIndex = () => data.depthPyramidViewEnumIndex, + setIndex = value => { data.depthPyramidViewEnumIndex = value; }, + }, new DebugUI.BoolField { nameAndTooltip = LightingStrings.DepthPyramidEnableRemap, getter = () => data.enableDebugDepthRemap, setter = value => data.enableDebugDepthRemap = value }, new DebugUI.Container() { diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/Debug/DebugDisplay.hlsl b/Packages/com.unity.render-pipelines.high-definition/Runtime/Debug/DebugDisplay.hlsl index fd00349a856..f02fdc6d901 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/Debug/DebugDisplay.hlsl +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/Debug/DebugDisplay.hlsl @@ -16,8 +16,6 @@ // Local shader variables static SHADOW_TYPE g_DebugShadowAttenuation = 0; -StructuredBuffer _DebugDepthPyramidOffsets; - #include "Packages/com.unity.render-pipelines.high-definition/Runtime/Debug/PBRValidator.hlsl" // When displaying lux meter we compress the light in order to be able to display value higher than 65504 diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/Debug/DebugFullScreen.shader b/Packages/com.unity.render-pipelines.high-definition/Runtime/Debug/DebugFullScreen.shader index e0887ec8e85..d9e20a1529e 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/Debug/DebugFullScreen.shader +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/Debug/DebugFullScreen.shader @@ -35,7 +35,7 @@ Shader "Hidden/HDRP/DebugFullScreen" float _QuadOverdrawMaxQuadCost; float _VertexDensityMaxPixelCost; uint _DebugContactShadowLightIndex; - int _DebugDepthPyramidMip; + float4 _DebugDepthPyramidParams; // (mip index, offset_x, offset_y, unused) float _MinMotionVector; float4 _MotionVecIntensityParams; float _FogVolumeOverdrawMaxValue; @@ -440,11 +440,13 @@ Shader "Hidden/HDRP/DebugFullScreen" } if (_FullScreenDebugMode == FULLSCREENDEBUGMODE_DEPTH_PYRAMID) { + int debugDepthPyramidMip = _DebugDepthPyramidParams.x; + int2 debugDepthPyramidOffset = int2(_DebugDepthPyramidParams.yz); + // Reuse depth display function from DebugViewMaterial - int2 mipOffset = _DebugDepthPyramidOffsets[_DebugDepthPyramidMip]; - uint2 remappedPos = (uint2)(input.texcoord.xy * _DebugViewportSize.xy); - uint2 pixCoord = (uint2)remappedPos.xy >> _DebugDepthPyramidMip; - float depth = LOAD_TEXTURE2D_X(_CameraDepthTexture, pixCoord + mipOffset).r; + uint2 samplePosition = (uint2)((input.texcoord.xy / _RTHandleScale.xy) * _DebugViewportSize.xy); + uint2 pixCoord = (uint2)samplePosition >> debugDepthPyramidMip; + float depth = LOAD_TEXTURE2D_X(_CameraDepthTexture, pixCoord + debugDepthPyramidOffset).r; PositionInputs posInput = GetPositionInput(input.positionCS.xy, _ScreenSize.zw, depth, UNITY_MATRIX_I_VP, UNITY_MATRIX_V); // We square the factors to have more precision near zero which is where people usually want to visualize depth. diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/Debug/LightingDebug.cs b/Packages/com.unity.render-pipelines.high-definition/Runtime/Debug/LightingDebug.cs index dd13429c2fb..f046f431d6d 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/Debug/LightingDebug.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/Debug/LightingDebug.cs @@ -359,6 +359,22 @@ public bool IsDebugDisplayEnabled() /// Light category for cluster debug view. public ClusterLightCategoryDebug clusterLightCategory = ClusterLightCategoryDebug.All; + + /// Enable to make HDRP mix the albedo of the Material with its material capture. + public bool matCapMixAlbedo = false ; + + /// Set the intensity of the material capture. This increases the brightness of the Scene. This is useful if the albedo darkens the Scene considerably. + public float matCapMixScale = 1.0f; + +#if UNITY_EDITOR + public LightingDebugSettings() + { + var matCapMode = HDRenderPipelinePreferences.matCapMode; + matCapMixAlbedo = matCapMode.mixAlbedo.value; + matCapMixScale = matCapMode.viewScale.value; + } +#endif + // Internal APIs internal bool IsDebugDisplayRemovePostprocess() { diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/.buginfo b/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/.buginfo new file mode 100644 index 00000000000..484b54b848d --- /dev/null +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/.buginfo @@ -0,0 +1 @@ +area: Lighting diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/LightLoop/LightLoop.cs b/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/LightLoop/LightLoop.cs index 9f013e328ad..4686d32f250 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/LightLoop/LightLoop.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/LightLoop/LightLoop.cs @@ -1389,11 +1389,10 @@ void PreprocessVisibleLights(ScriptableRenderContext renderContext, CommandBuffe if (hdCamera.visualSky.skyRenderer?.GetType() == typeof(PhysicallyBasedSkyRenderer)) { - // Lights with 0 intensity are culled by unity, but we still want to show them - // in the PBR sky, so we need to allocate space for the cookie + // Allocate space in the cookie atlas for PBR sky surface textures foreach (var directional in lightEntities.directionalLights) { - if (directional.legacyLight.intensity == 0.0f && directional.interactsWithSky) + if (directional.interactsWithSky) m_TextureCaches.lightCookieManager.ReserveSpace(directional.surfaceTexture); } } @@ -1773,8 +1772,6 @@ internal void ReserveCookieAtlasTexture(HDAdditionalLightData hdLightData, Light { case LightType.Directional: { - if (hdLightData.interactsWithSky) - m_TextureCaches.lightCookieManager.ReserveSpace(hdLightData.surfaceTexture); m_TextureCaches.lightCookieManager.ReserveSpace(light?.cookie); break; } diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/ScreenSpaceLighting/BilateralUpsample.hlsl b/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/ScreenSpaceLighting/BilateralUpsample.hlsl index b55be87654f..48f2c0e0b04 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/ScreenSpaceLighting/BilateralUpsample.hlsl +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/ScreenSpaceLighting/BilateralUpsample.hlsl @@ -86,17 +86,17 @@ float BilUpSingle_Uniform(float HiDepth, float4 LowDepths, float4 lowValue) // them in this structure struct NeighborhoodUpsampleData3x3 { - // Low resolution depths + // Low resolution scene depths float4 lowDepthA; float4 lowDepthB; float lowDepthC; - // The low resolution masks - float4 lowMasksA; - float4 lowMasksB; - float lowMasksC; + // The low resolution depth values + float4 lowDepthValueA; + float4 lowDepthValueB; + float lowDepthValueC; - // The low resolution values + // The low resolution color values float4 lowValue0; float4 lowValue1; float4 lowValue2; @@ -113,60 +113,10 @@ struct NeighborhoodUpsampleData3x3 float lowWeightC; }; -void EvaluateMaskValidity(float linearHighDepth, float lowDepth, int currentIndex, - inout float inputMask, inout int closestNeighhor, - inout float currentDistance) -{ - if (inputMask == 0.0f) - return; - - // Convert the depths to linear - float candidateLinearDepth = Linear01Depth(lowDepth, _ZBufferParams); - - // Compute the distance between the two values - float candidateDistance = abs(linearHighDepth - candidateLinearDepth); - - // Evaluate if this becomes the closest neighbor - if (candidateDistance < currentDistance) - { - closestNeighhor = currentIndex; - currentDistance = candidateDistance; - } - - bool validSample = candidateDistance < (linearHighDepth * 0.3); - inputMask = validSample ? 1.0f : 0.0f; -} - -void OverrideMaskValues(float highDepth, inout NeighborhoodUpsampleData3x3 data, - out bool rejectedNeighborhood, out int closestNeighbor) -{ - // First of all compute the linear version of the high depth - float linearHighDepth = Linear01Depth(highDepth, _ZBufferParams); - float currentDistance = 1.0f; - - closestNeighbor = 4; // Index of the closest neighbor (center by default) - - // The center has precedence over the other pixels - EvaluateMaskValidity(linearHighDepth, data.lowDepthB.x, 4, data.lowMasksB.x, closestNeighbor, currentDistance); - - // Then the plus - EvaluateMaskValidity(linearHighDepth, data.lowDepthA.y, 1, data.lowMasksA.y, closestNeighbor, currentDistance); - EvaluateMaskValidity(linearHighDepth, data.lowDepthA.w, 3, data.lowMasksA.w, closestNeighbor, currentDistance); - EvaluateMaskValidity(linearHighDepth, data.lowDepthB.y, 5, data.lowMasksB.y, closestNeighbor, currentDistance); - EvaluateMaskValidity(linearHighDepth, data.lowDepthB.w, 7, data.lowMasksB.w, closestNeighbor, currentDistance); - - // Then the cross - EvaluateMaskValidity(linearHighDepth, data.lowDepthA.x, 0, data.lowMasksA.x, closestNeighbor, currentDistance); - EvaluateMaskValidity(linearHighDepth, data.lowDepthA.z, 2, data.lowMasksA.z, closestNeighbor, currentDistance); - EvaluateMaskValidity(linearHighDepth, data.lowDepthB.z, 6, data.lowMasksB.z, closestNeighbor, currentDistance); - EvaluateMaskValidity(linearHighDepth, data.lowDepthC, 8, data.lowMasksC, closestNeighbor, currentDistance); - - // Flag that tells us which pixel holds valid information - rejectedNeighborhood = (currentDistance >= (linearHighDepth * 0.3)); -} - // The bilateral upscale function (3x3 neighborhood) -float4 BilUpColor3x3(float highDepth, in NeighborhoodUpsampleData3x3 data) +// Perform joint bilateral upsampling using the scene depth as guide signal +// https://bartwronski.com/2019/09/22/local-linear-models-guided-filter/ +void BilUpColor3x3(float highDepth, in NeighborhoodUpsampleData3x3 data, out float4 outColor, out float outDepth) { float4 combinedWeightsA = data.lowWeightA / (abs(highDepth - data.lowDepthA) + _UpsampleTolerance); float4 combinedWeightsB = data.lowWeightB / (abs(highDepth - data.lowDepthB) + _UpsampleTolerance); @@ -186,8 +136,15 @@ float4 BilUpColor3x3(float highDepth, in NeighborhoodUpsampleData3x3 data) + data.lowValue6 * combinedWeightsB.z + data.lowValue7 * combinedWeightsB.w + data.lowValue8 * combinedWeightsC - + float4(_NoiseFilterStrength, _NoiseFilterStrength, _NoiseFilterStrength, 0.0); - return WeightedSum / TotalWeight; + + float4(_NoiseFilterStrength.xxx, 0.0f); + + float WeightedDepth = dot(data.lowDepthValueA, combinedWeightsA) + + dot(data.lowDepthValueB, combinedWeightsB) + + data.lowDepthValueC * combinedWeightsC + + _NoiseFilterStrength; + + outColor = WeightedSum / TotalWeight; + outDepth = WeightedDepth / TotalWeight; } // Due to compiler issues, it is not possible to use arrays to store the neighborhood values, we then store them in this structure diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/Shadow/HDDynamicShadowAtlas.cs b/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/Shadow/HDDynamicShadowAtlas.cs index 519e26142f7..2afd5bdb1c7 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/Shadow/HDDynamicShadowAtlas.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/Shadow/HDDynamicShadowAtlas.cs @@ -234,7 +234,7 @@ internal struct ShadowBlitParameters } - public unsafe void BlitCachedIntoAtlas(RenderGraph renderGraph, TextureHandle cachedAtlasTexture, int cachedAtlasSize, Material blitMaterial, string passName, HDProfileId profileID) + public unsafe void BlitCachedIntoAtlas(RenderGraph renderGraph, TextureHandle cachedAtlasTexture, Vector2Int cachedAtlasSize, Material blitMaterial, string passName, HDProfileId profileID) { if (m_MixedRequestsPendingBlits.Length > 0) { @@ -242,7 +242,7 @@ public unsafe void BlitCachedIntoAtlas(RenderGraph renderGraph, TextureHandle ca { passData.requestsWaitingBlits = m_MixedRequestsPendingBlits; passData.blitMaterial = blitMaterial; - passData.cachedShadowAtlasSize = new Vector2Int(cachedAtlasSize, cachedAtlasSize); + passData.cachedShadowAtlasSize = cachedAtlasSize; passData.sourceCachedAtlas = builder.ReadTexture(cachedAtlasTexture); passData.atlasTexture = builder.WriteTexture(GetShadowMapDepthTexture(renderGraph)); diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/Shadow/HDShadowManager.cs b/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/Shadow/HDShadowManager.cs index 1ae0aa74fd8..8bd8233abac 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/Shadow/HDShadowManager.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/Shadow/HDShadowManager.cs @@ -1316,14 +1316,12 @@ internal void RenderShadows(RenderGraph renderGraph, in ShaderVariablesGlobal gl { // Punctual result.cachedPunctualShadowResult = cachedShadowManager.punctualShadowAtlas.RenderShadows(renderGraph, cullResults, globalCB, hdCamera.frameSettings, "Cached Punctual Lights Shadows rendering"); - cachedShadowManager.punctualShadowAtlas.AddBlitRequestsForUpdatedShadows(m_Atlas); BlitCachedShadows(renderGraph, ShadowMapType.PunctualAtlas); result.punctualShadowResult = m_Atlas.RenderShadows(renderGraph, cullResults, globalCB, hdCamera.frameSettings, "Punctual Lights Shadows rendering"); if (ShaderConfig.s_AreaLights == 1) { cachedShadowManager.areaShadowAtlas.RenderShadowMaps(renderGraph, cullResults, globalCB, hdCamera.frameSettings, "Cached Area Lights Shadows rendering"); - cachedShadowManager.areaShadowAtlas.AddBlitRequestsForUpdatedShadows(m_AreaLightShadowAtlas); BlitCachedShadows(renderGraph, ShadowMapType.AreaLightAtlas); m_AreaLightShadowAtlas.RenderShadowMaps(renderGraph, cullResults, globalCB, hdCamera.frameSettings, "Area Light Shadows rendering"); result.areaShadowResult = m_AreaLightShadowAtlas.BlurShadows(renderGraph); @@ -1337,8 +1335,6 @@ internal void RenderShadows(RenderGraph renderGraph, in ShaderVariablesGlobal gl { cachedShadowManager.UpdateDirectionalCacheTexture(renderGraph); cachedShadowManager.directionalLightAtlas.RenderShadows(renderGraph, cullResults, globalCB, hdCamera.frameSettings, "Cached Directional Lights Shadows rendering"); - - cachedShadowManager.directionalLightAtlas.AddBlitRequestsForUpdatedShadows(m_CascadeAtlas); } BlitCachedShadows(renderGraph, ShadowMapType.CascadedDirectional); } @@ -1434,26 +1430,26 @@ internal static void BindDefaultShadowGlobalResources(RenderGraph renderGraph) void BlitCachedShadows(RenderGraph renderGraph) { - m_Atlas.BlitCachedIntoAtlas(renderGraph, cachedShadowManager.punctualShadowAtlas.GetOutputTexture(renderGraph), cachedShadowManager.punctualShadowAtlas.width, m_BlitShadowMaterial, "Blit Punctual Mixed Cached Shadows", HDProfileId.BlitPunctualMixedCachedShadowMaps); + m_Atlas.BlitCachedIntoAtlas(renderGraph, cachedShadowManager.punctualShadowAtlas.GetOutputTexture(renderGraph), new Vector2Int(cachedShadowManager.punctualShadowAtlas.width, cachedShadowManager.punctualShadowAtlas.height), m_BlitShadowMaterial, "Blit Punctual Mixed Cached Shadows", HDProfileId.BlitPunctualMixedCachedShadowMaps); if (cachedShadowManager.DirectionalHasCachedAtlas()) { - m_CascadeAtlas.BlitCachedIntoAtlas(renderGraph, cachedShadowManager.directionalLightAtlas.GetOutputTexture(renderGraph), cachedShadowManager.directionalLightAtlas.width, m_BlitShadowMaterial, "Blit Directional Mixed Cached Shadows", HDProfileId.BlitDirectionalMixedCachedShadowMaps); + m_CascadeAtlas.BlitCachedIntoAtlas(renderGraph, cachedShadowManager.directionalLightAtlas.GetOutputTexture(renderGraph), new Vector2Int(cachedShadowManager.directionalLightAtlas.width, cachedShadowManager.directionalLightAtlas.height), m_BlitShadowMaterial, "Blit Directional Mixed Cached Shadows", HDProfileId.BlitDirectionalMixedCachedShadowMaps); } if (ShaderConfig.s_AreaLights == 1) { - m_AreaLightShadowAtlas.BlitCachedIntoAtlas(renderGraph, cachedShadowManager.areaShadowAtlas.GetOutputTexture(renderGraph), cachedShadowManager.areaShadowAtlas.width, m_BlitShadowMaterial, "Blit Area Mixed Cached Shadows", HDProfileId.BlitAreaMixedCachedShadowMaps); + m_AreaLightShadowAtlas.BlitCachedIntoAtlas(renderGraph, cachedShadowManager.areaShadowAtlas.GetOutputTexture(renderGraph), new Vector2Int(cachedShadowManager.areaShadowAtlas.width, cachedShadowManager.areaShadowAtlas.height), m_BlitShadowMaterial, "Blit Area Mixed Cached Shadows", HDProfileId.BlitAreaMixedCachedShadowMaps); } } void BlitCachedShadows(RenderGraph renderGraph, ShadowMapType shadowAtlas) { if (shadowAtlas == ShadowMapType.PunctualAtlas) - m_Atlas.BlitCachedIntoAtlas(renderGraph, cachedShadowManager.punctualShadowAtlas.GetOutputTexture(renderGraph), cachedShadowManager.punctualShadowAtlas.width, m_BlitShadowMaterial, "Blit Punctual Mixed Cached Shadows", HDProfileId.BlitPunctualMixedCachedShadowMaps); + m_Atlas.BlitCachedIntoAtlas(renderGraph, cachedShadowManager.punctualShadowAtlas.GetOutputTexture(renderGraph), new Vector2Int(cachedShadowManager.punctualShadowAtlas.width, cachedShadowManager.punctualShadowAtlas.height), m_BlitShadowMaterial, "Blit Punctual Mixed Cached Shadows", HDProfileId.BlitPunctualMixedCachedShadowMaps); if (shadowAtlas == ShadowMapType.CascadedDirectional && cachedShadowManager.DirectionalHasCachedAtlas()) - m_CascadeAtlas.BlitCachedIntoAtlas(renderGraph, cachedShadowManager.directionalLightAtlas.GetOutputTexture(renderGraph), cachedShadowManager.directionalLightAtlas.width, m_BlitShadowMaterial, "Blit Directional Mixed Cached Shadows", HDProfileId.BlitDirectionalMixedCachedShadowMaps); + m_CascadeAtlas.BlitCachedIntoAtlas(renderGraph, cachedShadowManager.directionalLightAtlas.GetOutputTexture(renderGraph), new Vector2Int(cachedShadowManager.directionalLightAtlas.width, cachedShadowManager.directionalLightAtlas.height), m_BlitShadowMaterial, "Blit Directional Mixed Cached Shadows", HDProfileId.BlitDirectionalMixedCachedShadowMaps); if (shadowAtlas == ShadowMapType.AreaLightAtlas && ShaderConfig.s_AreaLights == 1) - m_AreaLightShadowAtlas.BlitCachedIntoAtlas(renderGraph, cachedShadowManager.areaShadowAtlas.GetShadowMapDepthTexture(renderGraph), cachedShadowManager.areaShadowAtlas.width, m_BlitShadowMaterial, "Blit Area Mixed Cached Shadows", HDProfileId.BlitAreaMixedCachedShadowMaps); + m_AreaLightShadowAtlas.BlitCachedIntoAtlas(renderGraph, cachedShadowManager.areaShadowAtlas.GetShadowMapDepthTexture(renderGraph), new Vector2Int(cachedShadowManager.areaShadowAtlas.width, cachedShadowManager.areaShadowAtlas.height), m_BlitShadowMaterial, "Blit Area Mixed Cached Shadows", HDProfileId.BlitAreaMixedCachedShadowMaps); } } } diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/VolumetricClouds/HDRenderPipeline.VolumetricClouds.cs b/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/VolumetricClouds/HDRenderPipeline.VolumetricClouds.cs index b7d5c57c81c..d63049451e3 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/VolumetricClouds/HDRenderPipeline.VolumetricClouds.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/VolumetricClouds/HDRenderPipeline.VolumetricClouds.cs @@ -19,9 +19,6 @@ public partial class HDRenderPipeline ComputeShader m_VolumetricCloudsCS; ComputeShader m_VolumetricCloudsTraceCS; - // Prepass kernels - int m_CloudDownscaleDepthKernel; - // Cloud rendering kernels int m_CloudRenderKernel; @@ -45,17 +42,9 @@ public partial class HDRenderPipeline Material m_CloudCombinePass; LocalKeyword m_OutputFogTransmittanceKeyword; - - // Animation time is shared for all cameras, but only updated by the main camera - internal struct VolumetricCloudsAnimationData - { - internal float time; - public Vector2 cloudOffset; - public float verticalShapeOffset; - public float verticalErosionOffset; - } - - internal VolumetricCloudsAnimationData m_CloudsAnimationData; + + float m_CloudsAnimationLastTime; + internal VolumetricClouds.AnimationData m_CloudsAnimationData; struct VolumetricCloudsCameraData { @@ -84,7 +73,6 @@ void InitializeVolumetricClouds() // Grab the kernels we need m_VolumetricCloudsCS = runtimeShaders.volumetricCloudsCS; - m_CloudDownscaleDepthKernel = m_VolumetricCloudsCS.FindKernel("DownscaleDepth"); m_ReprojectCloudsKernel = m_VolumetricCloudsCS.FindKernel("ReprojectClouds"); m_ReprojectCloudsRejectionKernel = m_VolumetricCloudsCS.FindKernel("ReprojectCloudsRejection"); @@ -108,9 +96,9 @@ void InitializeVolumetricClouds() AllocatePresetTextures(); // Initialize cloud animation + m_CloudsAnimationLastTime = -1.0f; m_CloudsAnimationData = new() { - time = -1.0f, cloudOffset = new Vector2(0.0f, 0.0f), verticalShapeOffset = 0.0f, verticalErosionOffset = 0.0f, @@ -364,6 +352,7 @@ void UpdateShaderVariablesClouds(ref ShaderVariablesClouds cb, HDCamera hdCamera sunAngleDifference = Quaternion.Angle(additionalLightData.previousTransform.rotation, additionalLightData.transform.localToWorldMatrix.rotation); cb._CloudHistoryInvalidation = Mathf.Lerp(1.0f, 0.0f, Mathf.Clamp((sunAngleDifference) / 10.0f, 0.0f, 1.0f)); cb._TemporalAccumulationFactor = settings.temporalAccumulationFactor.value; + cb._ImprovedTransmittanceBlend = settings.perceptualBlending.value; if (settings.fadeInMode.value == VolumetricClouds.CloudFadeInMode.Automatic) { @@ -395,11 +384,13 @@ void UpdateShaderVariablesClouds(ref ShaderVariablesClouds cb, HDCamera hdCamera cb._EnableFastToneMapping = cameraData.enableExposureControl ? 1 : 0; + bool quarterRes = cameraData.intermediateWidth != cameraData.finalWidth; cb._LowResolutionEvaluation = cameraData.lowResolution ? 1 : 0; cb._EnableIntegration = cameraData.enableIntegration ? 1 : 0; cb._CameraSpace = hdCamera.planet.renderingSpace == RenderingSpace.Camera ? 1 : 0; cb._ValidSceneDepth = cameraData.cameraType != TVolumetricCloudsCameraType.Sky ? 1 : 0; cb._IntermediateResolutionScale = cameraData.intermediateWidth == cameraData.finalWidth ? 1u : 2u; + cb._ReprojDepthMipOffset = hdCamera.depthBufferMipChainInfo.mipLevelOffsetsCheckerboard[quarterRes ? 1 : 0]; unsafe { @@ -432,6 +423,7 @@ struct VolumetricCloudCommonData // Resolution parameters public TVolumetricCloudsCameraType cameraType; public bool enableExposureControl; + public bool perceptualBlending; public bool microErosion; public bool simplePreset; public bool pbrSkyActive; @@ -474,6 +466,7 @@ void FillVolumetricCloudsCommonData(HDCamera hdCamera, bool enableExposureContro commonData.renderKernel = m_CloudRenderKernel; commonData.pbrSkyActive = hdCamera.volumeStack.GetComponent().skyType.value == (int)SkyType.PhysicallyBased; commonData.traceForSky = cameraType == TVolumetricCloudsCameraType.Sky; + commonData.perceptualBlending = cameraType == TVolumetricCloudsCameraType.Default && !hdCamera.msaaEnabled && settings.perceptualBlending.value > 0.0f; // Static textures commonData.simplePreset = settings.cloudControl.value == VolumetricClouds.CloudControl.Simple; @@ -515,8 +508,8 @@ void UpdateVolumetricClouds(HDCamera hdCamera, in VolumetricClouds settings) if (EvaluateVolumetricCloudsHistoryValidity(hdCamera)) { float totalTime = Application.isPlaying ? Time.time : Time.realtimeSinceStartup; - float deltaTime = totalTime - m_CloudsAnimationData.time; - if (m_CloudsAnimationData.time == -1.0f) + float deltaTime = totalTime - m_CloudsAnimationLastTime; + if (m_CloudsAnimationLastTime == -1.0f) deltaTime = 0.0f; #if UNITY_EDITOR @@ -533,7 +526,7 @@ void UpdateVolumetricClouds(HDCamera hdCamera, in VolumetricClouds settings) Vector2 windDirection = new Vector2(Mathf.Cos(theta), Mathf.Sin(theta)); // Animate the offsets - m_CloudsAnimationData.time = totalTime; + m_CloudsAnimationLastTime = totalTime; m_CloudsAnimationData.cloudOffset += deltaTime * settings.globalWindSpeed.GetValue(hdCamera) * windDirection; m_CloudsAnimationData.verticalShapeOffset += deltaTime * settings.verticalShapeWindSpeed.value; m_CloudsAnimationData.verticalErosionOffset += deltaTime * settings.verticalErosionWindSpeed.value; @@ -641,7 +634,7 @@ internal struct VolumetricCloudsOutput } void RenderVolumetricClouds(RenderGraph renderGraph, HDCamera hdCamera, TextureHandle colorBuffer, TextureHandle depthPyramid, - TextureHandle volumetricLighting, ref TransparentPrepassOutput transparentPrepass, ref TextureHandle opticalFogTransmittance) + ref TransparentPrepassOutput transparentPrepass, ref TextureHandle opticalFogTransmittance) { // If the current volume does not enable the feature, quit right away. VolumetricClouds settings = hdCamera.volumeStack.GetComponent(); @@ -670,11 +663,11 @@ void RenderVolumetricClouds(RenderGraph renderGraph, HDCamera hdCamera, TextureH // Render the clouds if (accumulationClouds) - transparentPrepass.clouds = RenderVolumetricClouds_Accumulation(renderGraph, hdCamera, cameraType, colorBuffer, depthPyramid, volumetricLighting); + transparentPrepass.clouds = RenderVolumetricClouds_Accumulation(renderGraph, hdCamera, cameraType, colorBuffer, depthPyramid); else if (fullResolutionClouds) - transparentPrepass.clouds = RenderVolumetricClouds_FullResolution(renderGraph, hdCamera, cameraType, colorBuffer, depthPyramid, volumetricLighting); + transparentPrepass.clouds = RenderVolumetricClouds_FullResolution(renderGraph, hdCamera, cameraType, colorBuffer, depthPyramid); else // realtime reflection - transparentPrepass.clouds = RenderVolumetricClouds_LowResolution(renderGraph, hdCamera, cameraType, colorBuffer, depthPyramid, volumetricLighting); + transparentPrepass.clouds = RenderVolumetricClouds_LowResolution(renderGraph, hdCamera, cameraType, colorBuffer, depthPyramid); // Push the texture to the debug menu if (m_CurrentDebugDisplaySettings.data.volumetricCloudDebug == VolumetricCloudsDebug.Lighting) @@ -683,13 +676,6 @@ void RenderVolumetricClouds(RenderGraph renderGraph, HDCamera hdCamera, TextureH PushFullScreenDebugTexture(m_RenderGraph, transparentPrepass.clouds.depthBuffer, FullScreenDebugMode.VolumetricClouds, GraphicsFormat.R32_SFloat); } - class AccumulateOpticalFogTransmittancePassData - { - public TextureHandle cloudsLighting; - public TextureHandle opticalFogTransmittance; - public Material cloudCombinePass; - } - void PreRenderVolumetricClouds(RenderGraph renderGraph, HDCamera hdCamera) { if (m_CurrentDebugDisplaySettings.DebugHideSky(hdCamera)) @@ -709,38 +695,60 @@ void PreRenderVolumetricClouds(RenderGraph renderGraph, HDCamera hdCamera) RenderVolumetricCloudsShadows(renderGraph, hdCamera, in settings); } - // Computes a half res buffer of the scene depth (TODO: share that with other effects) - static void DoVolumetricCloudsDepthDownscale(CommandBuffer cmd, int kernel, int traceTX, int traceTY, int viewCount, in VolumetricCloudCommonData commonData, - RTHandle depthPyramid, RTHandle halfResDepthBuffer) + static GraphicsFormat GetCloudsColorFormat(VolumetricClouds settings, bool isHistoryBuffer) { - using (new ProfilingScope(cmd, ProfilingSampler.Get(HDProfileId.VolumetricCloudsDepthDownscale))) - { - // Compute the alternative version of the mip 1 of the depth (min instead of max that is required to handle high frequency meshes (vegetation, hair) - cmd.SetComputeTextureParam(commonData.volumetricCloudsCS, kernel, HDShaderIDs._DepthTexture, depthPyramid); - cmd.SetComputeTextureParam(commonData.volumetricCloudsCS, kernel, HDShaderIDs._HalfResDepthBufferRW, halfResDepthBuffer); - cmd.DispatchCompute(commonData.volumetricCloudsCS, kernel, traceTX, traceTY, viewCount); - } + // When neighborhood clamping is disabled, using R11G11B10 format for reprojection causes color shift + if (!settings.ghostingReduction.value && isHistoryBuffer) return GraphicsFormat.R16G16B16A16_SFloat; + return (GraphicsFormat)currentAsset?.currentPlatformRenderPipelineSettings.colorBufferFormat; + } + + void CreateTracingTextures(RenderGraph renderGraph, RenderGraphBuilder builder, VolumetricClouds settings, float scale, out TextureHandle cloudsLighting, out TextureHandle cloudsDepth) + { + cloudsLighting = builder.CreateTransientTexture(new TextureDesc(Vector2.one * scale, true, true) + { colorFormat = GetCloudsColorFormat(settings, false), enableRandomWrite = true, name = "Traced Clouds Lighting" }); + + cloudsDepth = builder.CreateTransientTexture(new TextureDesc(Vector2.one * scale, true, true) + { colorFormat = GraphicsFormat.R16G16_SFloat, enableRandomWrite = true, name = "Traced Clouds Depth" }); + } + + void CreateIntermediateTextures(RenderGraph renderGraph, RenderGraphBuilder builder, VolumetricClouds settings, out TextureHandle intermediate1, out TextureHandle intermediate2) + { + intermediate1 = builder.CreateTransientTexture(new TextureDesc(Vector2.one * 0.5f, true, true) + { colorFormat = GetCloudsColorFormat(settings, false), enableRandomWrite = true, name = "Temporary Clouds Lighting Buffer 1" }); + + intermediate2 = builder.CreateTransientTexture(new TextureDesc(Vector2.one * 0.5f, true, true) + { colorFormat = GraphicsFormat.R16G16B16A16_SFloat, enableRandomWrite = true, name = "Temporary Clouds Lighting Buffer 2" }); + } + + void CreateOutputTextures(RenderGraph renderGraph, RenderGraphBuilder builder, VolumetricClouds settings, out TextureHandle cloudsLighting, out TextureHandle cloudsDepth) + { + cloudsLighting = builder.WriteTexture(renderGraph.CreateTexture(new TextureDesc(Vector2.one, true, true) + { colorFormat = GetCloudsColorFormat(settings, false), enableRandomWrite = true, name = "Volumetric Clouds Lighting Texture" })); + + cloudsDepth = builder.WriteTexture(renderGraph.CreateTexture(new TextureDesc(Vector2.one, true, true) + { colorFormat = GraphicsFormat.R16G16_SFloat, enableRandomWrite = true, name = "Volumetric Clouds Depth Texture" })); } static void DoVolumetricCloudsTrace(CommandBuffer cmd, int traceTX, int traceTY, int viewCount, in VolumetricCloudCommonData commonData, - RTHandle volumetricLightingTexture, RTHandle sceneDepth, GraphicsBuffer ambientProbe, + GraphicsBuffer ambientProbe, RTHandle colorBuffer, RTHandle depthPyramid, RTHandle cloudsLightingOutput, RTHandle cloudsDepthOutput) { using (new ProfilingScope(cmd, ProfilingSampler.Get(HDProfileId.VolumetricCloudsTrace))) { + CoreUtils.SetKeyword(cmd, "PERCEPTUAL_TRANSMITTANCE", commonData.perceptualBlending); CoreUtils.SetKeyword(cmd, "CLOUDS_SIMPLE_PRESET", commonData.simplePreset); CoreUtils.SetKeyword(cmd, "CLOUDS_MICRO_EROSION", commonData.microErosion); CoreUtils.SetKeyword(cmd, "PHYSICALLY_BASED_SUN", commonData.pbrSkyActive); CoreUtils.SetKeyword(cmd, "TRACE_FOR_SKY", commonData.traceForSky); - cmd.SetComputeTextureParam(commonData.volumetricCloudsTraceCS, commonData.renderKernel, HDShaderIDs._VBufferLighting, volumetricLightingTexture); - cmd.SetComputeTextureParam(commonData.volumetricCloudsTraceCS, commonData.renderKernel, HDShaderIDs._VolumetricCloudsSourceDepth, sceneDepth); + cmd.SetComputeBufferParam(commonData.volumetricCloudsTraceCS, commonData.renderKernel, HDShaderIDs._VolumetricCloudsAmbientProbeBuffer, ambientProbe); + cmd.SetComputeTextureParam(commonData.volumetricCloudsTraceCS, commonData.renderKernel, HDShaderIDs._CameraColorTexture, colorBuffer); + cmd.SetComputeTextureParam(commonData.volumetricCloudsTraceCS, commonData.renderKernel, HDShaderIDs._CameraDepthTexture, depthPyramid); cmd.SetComputeTextureParam(commonData.volumetricCloudsTraceCS, commonData.renderKernel, HDShaderIDs._Worley128RGBA, commonData.worley128RGBA); cmd.SetComputeTextureParam(commonData.volumetricCloudsTraceCS, commonData.renderKernel, HDShaderIDs._ErosionNoise, commonData.erosionNoise); cmd.SetComputeTextureParam(commonData.volumetricCloudsTraceCS, commonData.renderKernel, HDShaderIDs._CloudMapTexture, commonData.cloudMapTexture); cmd.SetComputeTextureParam(commonData.volumetricCloudsTraceCS, commonData.renderKernel, HDShaderIDs._CloudLutTexture, commonData.cloudLutTexture); - cmd.SetComputeBufferParam(commonData.volumetricCloudsTraceCS, commonData.renderKernel, HDShaderIDs._VolumetricCloudsAmbientProbeBuffer, ambientProbe); // Output buffers cmd.SetComputeTextureParam(commonData.volumetricCloudsTraceCS, commonData.renderKernel, HDShaderIDs._CloudsLightingTextureRW, cloudsLightingOutput); @@ -751,7 +759,7 @@ static void DoVolumetricCloudsTrace(CommandBuffer cmd, int traceTX, int traceTY, } static void DoVolumetricCloudsReproject(CommandBuffer cmd, int kernel, int traceTX, int traceTY, int viewCount, in VolumetricCloudCommonData commonData, - RTHandle halfResCloudsLighting, RTHandle halfResCloudsDepth, RTHandle halfResDepthBuffer, + RTHandle tracedCloudsLighting, RTHandle tracedCloudsDepth, RTHandle depthPyramid, bool withHistory, bool clearHistory, RTHandle previousHistory0Buffer, RTHandle previousHistory1Buffer, RTHandle lightingOutput, RTHandle additionalOutput) { @@ -772,10 +780,10 @@ static void DoVolumetricCloudsReproject(CommandBuffer cmd, int kernel, int trace cmd.SetComputeTextureParam(commonData.volumetricCloudsCS, kernel, HDShaderIDs._HistoryVolumetricClouds1Texture, previousHistory1Buffer); } - // Re-project the result from the previous frame - cmd.SetComputeTextureParam(commonData.volumetricCloudsCS, kernel, HDShaderIDs._CloudsLightingTexture, halfResCloudsLighting); - cmd.SetComputeTextureParam(commonData.volumetricCloudsCS, kernel, HDShaderIDs._CloudsDepthTexture, halfResCloudsDepth); - cmd.SetComputeTextureParam(commonData.volumetricCloudsCS, kernel, HDShaderIDs._HalfResDepthBuffer, halfResDepthBuffer); + // Input textures + cmd.SetComputeTextureParam(commonData.volumetricCloudsCS, kernel, HDShaderIDs._CameraDepthTexture, depthPyramid); + cmd.SetComputeTextureParam(commonData.volumetricCloudsCS, kernel, HDShaderIDs._CloudsLightingTexture, tracedCloudsLighting); + cmd.SetComputeTextureParam(commonData.volumetricCloudsCS, kernel, HDShaderIDs._CloudsDepthTexture, tracedCloudsDepth); // Output textures cmd.SetComputeTextureParam(commonData.volumetricCloudsCS, kernel, HDShaderIDs._CloudsLightingTextureRW, lightingOutput); @@ -787,7 +795,7 @@ static void DoVolumetricCloudsReproject(CommandBuffer cmd, int kernel, int trace } static void DoVolumetricCloudsUpscale(CommandBuffer cmd, int kernel, int traceTX, int traceTY, int viewCount, in VolumetricCloudCommonData commonData, - RTHandle currentHistory0Buffer, RTHandle currentHistory1Buffer, RTHandle colorBuffer, RTHandle currentDepthBuffer, + RTHandle currentHistory0Buffer, RTHandle currentHistory1Buffer, RTHandle colorBuffer, RTHandle depthPyramid, RTHandle cloudsLighting, RTHandle cloudsDepth) { using (new ProfilingScope(cmd, ProfilingSampler.Get(HDProfileId.VolumetricCloudsUpscale))) @@ -795,9 +803,8 @@ static void DoVolumetricCloudsUpscale(CommandBuffer cmd, int kernel, int traceTX // Compute the final resolution parameters cmd.SetComputeTextureParam(commonData.volumetricCloudsCS, kernel, HDShaderIDs._VolumetricCloudsTexture, currentHistory0Buffer); cmd.SetComputeTextureParam(commonData.volumetricCloudsCS, kernel, HDShaderIDs._DepthStatusTexture, currentHistory1Buffer); - cmd.SetComputeTextureParam(commonData.volumetricCloudsCS, kernel, HDShaderIDs._CameraColorTexture, colorBuffer); - cmd.SetComputeTextureParam(commonData.volumetricCloudsCS, kernel, HDShaderIDs._DepthTexture, currentDepthBuffer); + cmd.SetComputeTextureParam(commonData.volumetricCloudsCS, kernel, HDShaderIDs._CameraDepthTexture, depthPyramid); // Output clouds texture (scattering + transmittance) cmd.SetComputeTextureParam(commonData.volumetricCloudsCS, kernel, HDShaderIDs._VolumetricCloudsLightingTextureRW, cloudsLighting); diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/VolumetricClouds/HDRenderPipeline.VolumetricCloudsAccumulation.cs b/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/VolumetricClouds/HDRenderPipeline.VolumetricCloudsAccumulation.cs index 957cf38e244..ae1fd0f1622 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/VolumetricClouds/HDRenderPipeline.VolumetricCloudsAccumulation.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/VolumetricClouds/HDRenderPipeline.VolumetricCloudsAccumulation.cs @@ -5,35 +5,29 @@ namespace UnityEngine.Rendering.HighDefinition { public partial class HDRenderPipeline { - static RTHandle VolumetricCloudsHistoryBufferAllocatorFunction(HDCameraFrameHistoryType type, string viewName, int frameIndex, RTHandleSystem rtHandleSystem, bool fullscale) + // We need to store these as globals to avoid GC alloc in history allocator callbacks + static bool s_FullscaleHistory; + static GraphicsFormat s_FormatHistory; + static HDCameraFrameHistoryType s_TypeHistory; + + static RTHandle VolumetricCloudsHistoryBufferAllocatorFunction(string viewName, int frameIndex, RTHandleSystem rtHandleSystem) { - int index = type == HDCameraFrameHistoryType.VolumetricClouds0 ? 0 : 1; - return rtHandleSystem.Alloc(Vector2.one * (fullscale ? 1.0f : 0.5f), TextureXR.slices, colorFormat: GraphicsFormat.R16G16B16A16_SFloat, dimension: TextureXR.dimension, + return rtHandleSystem.Alloc(Vector2.one * (s_FullscaleHistory ? 1.0f : 0.5f), TextureXR.slices, colorFormat: s_FormatHistory, dimension: TextureXR.dimension, enableRandomWrite: true, useMipMap: false, autoGenerateMips: false, - name: string.Format("{0}_CloudsHistory{1}Buffer{2}", viewName, index, frameIndex)); + name: string.Format("{0}_CloudsHistory{1}Buffer{2}", viewName, s_TypeHistory, frameIndex)); } - static RTHandle VolumetricClouds0HistoryBufferAllocatorFunctionDownscaled(string viewName, int frameIndex, RTHandleSystem rtHandleSystem) - => VolumetricCloudsHistoryBufferAllocatorFunction(HDCameraFrameHistoryType.VolumetricClouds0, viewName, frameIndex, rtHandleSystem, false); - static RTHandle VolumetricClouds0HistoryBufferAllocatorFunction(string viewName, int frameIndex, RTHandleSystem rtHandleSystem) - => VolumetricCloudsHistoryBufferAllocatorFunction(HDCameraFrameHistoryType.VolumetricClouds0, viewName, frameIndex, rtHandleSystem, true); - - static RTHandle VolumetricClouds1HistoryBufferAllocatorFunctionDownscaled(string viewName, int frameIndex, RTHandleSystem rtHandleSystem) - => VolumetricCloudsHistoryBufferAllocatorFunction(HDCameraFrameHistoryType.VolumetricClouds1, viewName, frameIndex, rtHandleSystem, false); - static RTHandle VolumetricClouds1HistoryBufferAllocatorFunction(string viewName, int frameIndex, RTHandleSystem rtHandleSystem) - => VolumetricCloudsHistoryBufferAllocatorFunction(HDCameraFrameHistoryType.VolumetricClouds1, viewName, frameIndex, rtHandleSystem, true); - - static RTHandle RequestVolumetricCloudsHistoryTexture(HDCamera hdCamera, bool current, HDCameraFrameHistoryType type, bool fullscale) + static RTHandle RequestVolumetricCloudsHistoryTexture(HDCamera hdCamera, bool current, HDCameraFrameHistoryType type, bool fullscale, VolumetricClouds settings) { RTHandle texture = current ? hdCamera.GetCurrentFrameRT((int)type) : hdCamera.GetPreviousFrameRT((int)type); if (texture != null) return texture; - - // Do that to avoid GC.alloc - System.Func allocator = type == HDCameraFrameHistoryType.VolumetricClouds0 ? - (fullscale ? VolumetricClouds0HistoryBufferAllocatorFunction : VolumetricClouds0HistoryBufferAllocatorFunctionDownscaled) : - (fullscale ? VolumetricClouds1HistoryBufferAllocatorFunction : VolumetricClouds1HistoryBufferAllocatorFunctionDownscaled); - return hdCamera.AllocHistoryFrameRT((int)type, allocator, 2); + + s_TypeHistory = type; + s_FullscaleHistory = fullscale; + s_FormatHistory = type == HDCameraFrameHistoryType.VolumetricClouds0 ? GetCloudsColorFormat(settings, true) : GraphicsFormat.R16G16B16A16_SFloat; + + return hdCamera.AllocHistoryFrameRT((int)type, VolumetricCloudsHistoryBufferAllocatorFunction, 2); } private int CombineVolumetricCloudsHistoryStateToMask(HDCamera hdCamera) @@ -66,12 +60,10 @@ struct VolumetricCloudsParameters_Accumulation public int finalHeight; public int viewCount; - public bool downscaleDepth; public bool historyValidity; public Vector2Int previousViewportSize; // Compute shader and kernels - public int depthDownscaleKernel; public int reprojectKernel; public int upscaleClouds; @@ -108,7 +100,6 @@ VolumetricCloudsParameters_Accumulation PrepareVolumetricCloudsParameters_Accumu parameters.viewCount = hdCamera.viewCount; parameters.historyValidity = historyValidity; - parameters.downscaleDepth = downscaling == 0.5f; float historyScale = hdCamera.intermediateDownscaling * (hdCamera.volumetricCloudsFullscaleHistory ? 1.0f : 2.0f); parameters.previousViewportSize = new Vector2Int( @@ -117,12 +108,13 @@ VolumetricCloudsParameters_Accumulation PrepareVolumetricCloudsParameters_Accumu ); // Compute shader and kernels - parameters.depthDownscaleKernel = m_CloudDownscaleDepthKernel; parameters.reprojectKernel = settings.ghostingReduction.value ? m_ReprojectCloudsRejectionKernel : m_ReprojectCloudsKernel; - if (downscaling == 0.5f) - parameters.upscaleClouds = hdCamera.msaaEnabled ? m_UpscaleCloudsKernel : m_UpscaleCloudsPerceptualKernel; + + bool quarterRes = downscaling == 0.5f; + if (parameters.commonData.perceptualBlending) + parameters.upscaleClouds = quarterRes ? m_UpscaleCloudsPerceptualKernel : m_CombineCloudsPerceptualKernel; else - parameters.upscaleClouds = hdCamera.msaaEnabled ? m_CombineCloudsKernel : m_CombineCloudsPerceptualKernel; + parameters.upscaleClouds = quarterRes ? m_UpscaleCloudsKernel : m_CombineCloudsKernel; // Update the constant buffer VolumetricCloudsCameraData cameraData; @@ -138,16 +130,12 @@ VolumetricCloudsParameters_Accumulation PrepareVolumetricCloudsParameters_Accumu cameraData.enableIntegration = true; UpdateShaderVariablesClouds(ref parameters.commonData.cloudsCB, hdCamera, settings, cameraData, cloudModelData, false); - // If this is a default camera, we want the improved blending, otherwise we don't (in the case of a planar) - parameters.commonData.cloudsCB._ImprovedTransmittanceBlend = parameters.commonData.cameraType == TVolumetricCloudsCameraType.Default ? perceptualBlending : 0.0f; - parameters.commonData.cloudsCB._CubicTransmittance = parameters.commonData.cameraType == TVolumetricCloudsCameraType.Default && hdCamera.msaaEnabled ? perceptualBlending : 0; - return parameters; } - static void TraceVolumetricClouds_Accumulation(CommandBuffer cmd, VolumetricCloudsParameters_Accumulation parameters, GraphicsBuffer ambientProbe, - RTHandle colorBuffer, RTHandle depthPyramid, RTHandle halfResDepthBuffer, RTHandle volumetricLightingTexture, - RTHandle intermediateCloudsLighting, RTHandle intermediateCloudsDepth, + static void TraceVolumetricClouds_Accumulation(CommandBuffer cmd, VolumetricCloudsParameters_Accumulation parameters, + GraphicsBuffer ambientProbe, RTHandle colorBuffer, RTHandle depthPyramid, + RTHandle tracedCloudsLighting, RTHandle tracedCloudsDepth, RTHandle currentHistory0Buffer, RTHandle previousHistory0Buffer, RTHandle currentHistory1Buffer, RTHandle previousHistory1Buffer, RTHandle cloudsLighting, RTHandle cloudsDepth) @@ -172,30 +160,20 @@ static void TraceVolumetricClouds_Accumulation(CommandBuffer cmd, VolumetricClou // The ideal approach would be to have a function for that returns the converted size from a viewport and texture size. // but for now we do it like this. Vector2Int previousViewportSize = previousHistory0Buffer.GetScaledSize(parameters.previousViewportSize); - parameters.commonData.cloudsCB._HistoryViewportSize = new Vector2(previousViewportSize.x, previousViewportSize.y); - parameters.commonData.cloudsCB._HistoryBufferSize = new Vector2(previousHistory0Buffer.rt.width, previousHistory0Buffer.rt.height); + parameters.commonData.cloudsCB._HistoryViewportScale.Set(previousViewportSize.x / (float)previousHistory0Buffer.rt.width, previousViewportSize.y / (float)previousHistory0Buffer.rt.height); // Bind the constant buffer (global as we need it for the .shader as well) ConstantBuffer.PushGlobal(cmd, parameters.commonData.cloudsCB, HDShaderIDs._ShaderVariablesClouds); ConstantBuffer.Set(parameters.commonData.volumetricCloudsCS, HDShaderIDs._ShaderVariablesClouds); - // Depth downscale - if (parameters.downscaleDepth) - { - DoVolumetricCloudsDepthDownscale(cmd, parameters.depthDownscaleKernel, intermediateTX, intermediateTY, parameters.viewCount, in parameters.commonData, - depthPyramid, halfResDepthBuffer); - } - else - halfResDepthBuffer = depthPyramid; - // Ray-march the clouds for this frame DoVolumetricCloudsTrace(cmd, traceTX, traceTY, parameters.viewCount, in parameters.commonData, - volumetricLightingTexture, halfResDepthBuffer, ambientProbe, - intermediateCloudsLighting, intermediateCloudsDepth); + ambientProbe, colorBuffer, depthPyramid, + tracedCloudsLighting, tracedCloudsDepth); // We only reproject for realtime clouds DoVolumetricCloudsReproject(cmd, parameters.reprojectKernel, intermediateTX, intermediateTY, parameters.viewCount, in parameters.commonData, - intermediateCloudsLighting, intermediateCloudsDepth, halfResDepthBuffer, + tracedCloudsLighting, tracedCloudsDepth, depthPyramid, true, !parameters.historyValidity, previousHistory0Buffer, previousHistory1Buffer, currentHistory0Buffer, currentHistory1Buffer); @@ -213,7 +191,6 @@ class VolumetricCloudsAccumulationData public TextureHandle depthPyramid; public TextureHandle motionVectors; public BufferHandle ambientProbeBuffer; - public TextureHandle volumetricLighting; // History and history output public TextureHandle previousHistoryBuffer0; @@ -222,17 +199,15 @@ class VolumetricCloudsAccumulationData public TextureHandle currentHistoryBuffer1; // Intermediate buffers - public TextureHandle intermediateCloudsLighting; - public TextureHandle halfResDepthBuffer; - public TextureHandle intermediateCloudsDepth; + public TextureHandle tracedCloudsLighting; + public TextureHandle tracedCloudsDepth; // Cloud pass output public TextureHandle cloudsLighting; public TextureHandle cloudsDepth; } - VolumetricCloudsOutput RenderVolumetricClouds_Accumulation(RenderGraph renderGraph, HDCamera hdCamera, TVolumetricCloudsCameraType cameraType, - TextureHandle colorBuffer, TextureHandle depthPyramid, TextureHandle volumetricLighting) + VolumetricCloudsOutput RenderVolumetricClouds_Accumulation(RenderGraph renderGraph, HDCamera hdCamera, TVolumetricCloudsCameraType cameraType, TextureHandle colorBuffer, TextureHandle depthPyramid) { using (var builder = renderGraph.AddRenderPass("Volumetric Clouds", out var passData, ProfilingSampler.Get(HDProfileId.VolumetricClouds))) { @@ -262,37 +237,23 @@ VolumetricCloudsOutput RenderVolumetricClouds_Accumulation(RenderGraph renderGra passData.colorBuffer = builder.ReadTexture(colorBuffer); passData.depthPyramid = builder.ReadTexture(depthPyramid); passData.ambientProbeBuffer = builder.ReadBuffer(renderGraph.ImportBuffer(m_CloudsDynamicProbeBuffer)); - passData.volumetricLighting = builder.ReadTexture(volumetricLighting); // History and pass output hdCamera.intermediateDownscaling = downscaling; - passData.currentHistoryBuffer0 = renderGraph.ImportTexture(RequestVolumetricCloudsHistoryTexture(hdCamera, true, HDCameraFrameHistoryType.VolumetricClouds0, fullscaleHistory)); - passData.previousHistoryBuffer0 = renderGraph.ImportTexture(RequestVolumetricCloudsHistoryTexture(hdCamera, false, HDCameraFrameHistoryType.VolumetricClouds0, fullscaleHistory)); - passData.currentHistoryBuffer1 = renderGraph.ImportTexture(RequestVolumetricCloudsHistoryTexture(hdCamera, true, HDCameraFrameHistoryType.VolumetricClouds1, fullscaleHistory)); - passData.previousHistoryBuffer1 = renderGraph.ImportTexture(RequestVolumetricCloudsHistoryTexture(hdCamera, false, HDCameraFrameHistoryType.VolumetricClouds1, fullscaleHistory)); - - if (passData.parameters.downscaleDepth) - passData.halfResDepthBuffer = builder.CreateTransientTexture(new TextureDesc(Vector2.one * downscaling, true, true) - { colorFormat = GraphicsFormat.R32_SFloat, enableRandomWrite = true, name = "Half Res Scene Depth" }); - - // Intermediate textures - passData.intermediateCloudsLighting = builder.CreateTransientTexture(new TextureDesc(Vector2.one * downscaling * 0.5f, true, true) - { colorFormat = GraphicsFormat.R16G16B16A16_SFloat, enableRandomWrite = true, name = "Intermediate Clouds Lighting" }); - passData.intermediateCloudsDepth = builder.CreateTransientTexture(new TextureDesc(Vector2.one * downscaling * 0.5f, true, true) - { colorFormat = GraphicsFormat.R32_SFloat, enableRandomWrite = true, name = "Intermediate Clouds Depth" }); - - // Output of the clouds - passData.cloudsLighting = builder.WriteTexture(renderGraph.CreateTexture(new TextureDesc(Vector2.one, true, true) - { colorFormat = GraphicsFormat.R16G16B16A16_SFloat, enableRandomWrite = true, name = "Volumetric Clouds Lighting Texture" })); - passData.cloudsDepth = builder.WriteTexture(renderGraph.CreateTexture(new TextureDesc(Vector2.one, true, true) - { colorFormat = GraphicsFormat.R32_SFloat, enableRandomWrite = true, name = "Volumetric Clouds Depth Texture" })); + passData.currentHistoryBuffer0 = renderGraph.ImportTexture(RequestVolumetricCloudsHistoryTexture(hdCamera, true, HDCameraFrameHistoryType.VolumetricClouds0, fullscaleHistory, settings)); + passData.previousHistoryBuffer0 = renderGraph.ImportTexture(RequestVolumetricCloudsHistoryTexture(hdCamera, false, HDCameraFrameHistoryType.VolumetricClouds0, fullscaleHistory, settings)); + passData.currentHistoryBuffer1 = renderGraph.ImportTexture(RequestVolumetricCloudsHistoryTexture(hdCamera, true, HDCameraFrameHistoryType.VolumetricClouds1, fullscaleHistory, settings)); + passData.previousHistoryBuffer1 = renderGraph.ImportTexture(RequestVolumetricCloudsHistoryTexture(hdCamera, false, HDCameraFrameHistoryType.VolumetricClouds1, fullscaleHistory, settings)); + + CreateTracingTextures(renderGraph, builder, settings, downscaling * 0.5f, out passData.tracedCloudsLighting, out passData.tracedCloudsDepth); + CreateOutputTextures(renderGraph, builder, settings, out passData.cloudsLighting, out passData.cloudsDepth); builder.SetRenderFunc( (VolumetricCloudsAccumulationData data, RenderGraphContext ctx) => { - TraceVolumetricClouds_Accumulation(ctx.cmd, data.parameters, data.ambientProbeBuffer, - data.colorBuffer, data.depthPyramid, data.halfResDepthBuffer, data.volumetricLighting, - data.intermediateCloudsLighting, data.intermediateCloudsDepth, + TraceVolumetricClouds_Accumulation(ctx.cmd, data.parameters, + data.ambientProbeBuffer, data.colorBuffer, data.depthPyramid, + data.tracedCloudsLighting, data.tracedCloudsDepth, data.currentHistoryBuffer0, data.previousHistoryBuffer0, data.currentHistoryBuffer1, data.previousHistoryBuffer1, data.cloudsLighting, data.cloudsDepth); diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/VolumetricClouds/HDRenderPipeline.VolumetricCloudsFullResolution.cs b/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/VolumetricClouds/HDRenderPipeline.VolumetricCloudsFullResolution.cs index d78880aa3ec..206b54181a1 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/VolumetricClouds/HDRenderPipeline.VolumetricCloudsFullResolution.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/VolumetricClouds/HDRenderPipeline.VolumetricCloudsFullResolution.cs @@ -35,7 +35,7 @@ VolumetricCloudsParameters_FullResolution PrepareVolumetricCloudsParameters_Full parameters.viewCount = viewCount; // Compute shader and kernels - parameters.combineKernel = hdCamera.msaaEnabled ? m_CombineCloudsKernel : m_CombineCloudsPerceptualKernel; + parameters.combineKernel = parameters.commonData.perceptualBlending ? m_CombineCloudsKernel : m_CombineCloudsPerceptualKernel; // Update the constant buffer VolumetricCloudsCameraData cameraData; @@ -51,16 +51,11 @@ VolumetricCloudsParameters_FullResolution PrepareVolumetricCloudsParameters_Full cameraData.enableIntegration = true; UpdateShaderVariablesClouds(ref parameters.commonData.cloudsCB, hdCamera, settings, cameraData, cloudModelData, false); - // If this is a default camera, we want the improved blending, otherwise we don't (in the case of a planar) - float perceptualBlending = settings.perceptualBlending.value; - parameters.commonData.cloudsCB._ImprovedTransmittanceBlend = parameters.commonData.cameraType == TVolumetricCloudsCameraType.Default ? perceptualBlending : 0.0f; - parameters.commonData.cloudsCB._CubicTransmittance = parameters.commonData.cameraType == TVolumetricCloudsCameraType.Default && hdCamera.msaaEnabled ? perceptualBlending : 0; - return parameters; } - static void TraceVolumetricClouds_FullResolution(CommandBuffer cmd, VolumetricCloudsParameters_FullResolution parameters, GraphicsBuffer ambientProbeBuffer, - RTHandle colorBuffer, RTHandle depthPyramid, RTHandle volumetricLightingTexture, + static void TraceVolumetricClouds_FullResolution(CommandBuffer cmd, VolumetricCloudsParameters_FullResolution parameters, + GraphicsBuffer ambientProbeBuffer, RTHandle colorBuffer, RTHandle depthPyramid, RTHandle intermediateCloudsLighting, RTHandle intermediateCloudsDepth, RTHandle cloudsLighting, RTHandle cloudsDepth) { @@ -78,7 +73,7 @@ static void TraceVolumetricClouds_FullResolution(CommandBuffer cmd, VolumetricCl // Ray-march the clouds for this frame DoVolumetricCloudsTrace(cmd, finalTX, finalTY, parameters.viewCount, in parameters.commonData, - volumetricLightingTexture, depthPyramid, ambientProbeBuffer, + ambientProbeBuffer, colorBuffer, depthPyramid, intermediateCloudsLighting, intermediateCloudsDepth); DoVolumetricCloudsUpscale(cmd, parameters.combineKernel, finalTX, finalTY, parameters.viewCount, in parameters.commonData, @@ -95,18 +90,17 @@ class VolumetricCloudsFullResolutionData public TextureHandle colorBuffer; public TextureHandle depthPyramid; public BufferHandle ambientProbeBuffer; - public TextureHandle volumetricLighting; // Intermediate buffers - public TextureHandle intermediateLightingBuffer; - public TextureHandle intermediateBufferDepth; + public TextureHandle tracedCloudsLighting; + public TextureHandle tracedCloudsDepth; // Output buffer public TextureHandle cloudsLighting; public TextureHandle cloudsDepth; } - VolumetricCloudsOutput RenderVolumetricClouds_FullResolution(RenderGraph renderGraph, HDCamera hdCamera, TVolumetricCloudsCameraType cameraType, TextureHandle colorBuffer, TextureHandle depthPyramid, TextureHandle volumetricLighting) + VolumetricCloudsOutput RenderVolumetricClouds_FullResolution(RenderGraph renderGraph, HDCamera hdCamera, TVolumetricCloudsCameraType cameraType, TextureHandle colorBuffer, TextureHandle depthPyramid) { using (var builder = renderGraph.AddRenderPass("Volumetric Clouds Full Resolution", out var passData, ProfilingSampler.Get(HDProfileId.VolumetricClouds))) { @@ -120,25 +114,16 @@ VolumetricCloudsOutput RenderVolumetricClouds_FullResolution(RenderGraph renderG passData.colorBuffer = builder.ReadTexture(colorBuffer); passData.depthPyramid = builder.ReadTexture(depthPyramid); passData.ambientProbeBuffer = builder.ReadBuffer(renderGraph.ImportBuffer(m_CloudsDynamicProbeBuffer)); - passData.volumetricLighting = builder.ReadTexture(volumetricLighting); - - passData.intermediateLightingBuffer = builder.CreateTransientTexture(new TextureDesc(Vector2.one, true, true) - { colorFormat = GraphicsFormat.R16G16B16A16_SFloat, enableRandomWrite = true, name = "Temporary Clouds Lighting Buffer 0" }); - passData.intermediateBufferDepth = builder.CreateTransientTexture(new TextureDesc(Vector2.one, true, true) - { colorFormat = GraphicsFormat.R32_SFloat, enableRandomWrite = true, name = "Temporary Clouds Depth Buffer 0" }); - - // Output of the clouds - passData.cloudsLighting = builder.WriteTexture(renderGraph.CreateTexture(new TextureDesc(Vector2.one, true, true) - { colorFormat = GraphicsFormat.R16G16B16A16_SFloat, enableRandomWrite = true, name = "Volumetric Clouds Lighting Texture" })); - passData.cloudsDepth = builder.WriteTexture(renderGraph.CreateTexture(new TextureDesc(Vector2.one, true, true) - { colorFormat = GraphicsFormat.R32_SFloat, enableRandomWrite = true, name = "Volumetric Clouds Depth Texture" })); + + CreateTracingTextures(renderGraph, builder, settings, 1.0f, out passData.tracedCloudsLighting, out passData.tracedCloudsDepth); + CreateOutputTextures(renderGraph, builder, settings, out passData.cloudsLighting, out passData.cloudsDepth); builder.SetRenderFunc( (VolumetricCloudsFullResolutionData data, RenderGraphContext ctx) => { - TraceVolumetricClouds_FullResolution(ctx.cmd, data.parameters, data.ambientProbeBuffer, - data.colorBuffer, data.depthPyramid, data.volumetricLighting, - data.intermediateLightingBuffer, data.intermediateBufferDepth, + TraceVolumetricClouds_FullResolution(ctx.cmd, data.parameters, + data.ambientProbeBuffer, data.colorBuffer, data.depthPyramid, + data.tracedCloudsLighting, data.tracedCloudsDepth, data.cloudsLighting, data.cloudsDepth); }); diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/VolumetricClouds/HDRenderPipeline.VolumetricCloudsLowResolution.cs b/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/VolumetricClouds/HDRenderPipeline.VolumetricCloudsLowResolution.cs index 2777dc8b886..3b00d1f2e71 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/VolumetricClouds/HDRenderPipeline.VolumetricCloudsLowResolution.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/VolumetricClouds/HDRenderPipeline.VolumetricCloudsLowResolution.cs @@ -17,7 +17,6 @@ struct VolumetricCloudsParameters_LowResolution public int viewCount; // Used kernels - public int depthDownscaleKernel; public int preUpscaleKernel; public int upscaleKernel; @@ -50,9 +49,8 @@ VolumetricCloudsParameters_LowResolution PrepareVolumetricCloudsParameters_LowRe parameters.viewCount = viewCount; // Compute shader and kernels - parameters.depthDownscaleKernel = m_CloudDownscaleDepthKernel; parameters.preUpscaleKernel = m_PreUpscaleCloudsKernel; - parameters.upscaleKernel = hdCamera.msaaEnabled ? m_UpscaleCloudsKernel : m_UpscaleCloudsPerceptualKernel; + parameters.upscaleKernel = m_UpscaleCloudsKernel; // Update the constant buffer VolumetricCloudsCameraData cameraData; @@ -71,10 +69,10 @@ VolumetricCloudsParameters_LowResolution PrepareVolumetricCloudsParameters_LowRe return parameters; } - static void TraceVolumetricClouds_LowResolution(CommandBuffer cmd, VolumetricCloudsParameters_LowResolution parameters, GraphicsBuffer ambientProbeBuffer, - RTHandle colorBuffer, RTHandle depthPyramid, RTHandle volumetricLightingTexture, - RTHandle intermediateCloudsLighting, RTHandle intermediateLightingBuffer1, RTHandle intermediateLightingBuffer2, - RTHandle halfResDepthBuffer, RTHandle intermediateCloudsDepth, + static void TraceVolumetricClouds_LowResolution(CommandBuffer cmd, VolumetricCloudsParameters_LowResolution parameters, + GraphicsBuffer ambientProbeBuffer, RTHandle colorBuffer, RTHandle depthPyramid, + RTHandle tracedCloudsLighting, RTHandle tracedCloudsDepth, + RTHandle intermediateLightingBuffer1, RTHandle intermediateLightingBuffer2, RTHandle cloudsLighting, RTHandle cloudsDepth) { // Compute the number of tiles to evaluate @@ -97,18 +95,14 @@ static void TraceVolumetricClouds_LowResolution(CommandBuffer cmd, VolumetricClo ConstantBuffer.Set(parameters.commonData.volumetricCloudsCS, HDShaderIDs._ShaderVariablesClouds); ConstantBuffer.Set(parameters.commonData.volumetricCloudsTraceCS, HDShaderIDs._ShaderVariablesClouds); - // Depth downscale - DoVolumetricCloudsDepthDownscale(cmd, parameters.depthDownscaleKernel, intermediateTX, intermediateTY, parameters.viewCount, in parameters.commonData, - depthPyramid, halfResDepthBuffer); - // Ray-march the clouds for this frame DoVolumetricCloudsTrace(cmd, traceTX, traceTY, parameters.viewCount, in parameters.commonData, - volumetricLightingTexture, halfResDepthBuffer, ambientProbeBuffer, - intermediateCloudsLighting, intermediateCloudsDepth); + ambientProbeBuffer, colorBuffer, depthPyramid, + tracedCloudsLighting, tracedCloudsDepth); // We only reproject for realtime clouds DoVolumetricCloudsReproject(cmd, parameters.preUpscaleKernel, intermediateTX, intermediateTY, parameters.viewCount, in parameters.commonData, - intermediateCloudsLighting, intermediateCloudsDepth, halfResDepthBuffer, + tracedCloudsLighting, tracedCloudsDepth, depthPyramid, false, false, null, null, // no history reprojection intermediateLightingBuffer1, intermediateLightingBuffer2); @@ -126,21 +120,19 @@ class VolumetricCloudsLowResolutionData public TextureHandle depthPyramid; public TextureHandle maxZMask; public BufferHandle ambientProbeBuffer; - public TextureHandle volumetricLighting; // Intermediate buffers - public TextureHandle halfResCloudsLighting; + public TextureHandle tracedCloudsLighting; public TextureHandle intermediateLightingBuffer1; public TextureHandle intermediateLightingBuffer2; - public TextureHandle halfResDepthBuffer; - public TextureHandle halfResCloudsDepth; + public TextureHandle tracedCloudsDepth; // Output buffer public TextureHandle cloudsLighting; public TextureHandle cloudsDepth; } - VolumetricCloudsOutput RenderVolumetricClouds_LowResolution(RenderGraph renderGraph, HDCamera hdCamera, TVolumetricCloudsCameraType cameraType, TextureHandle colorBuffer, TextureHandle depthPyramid, TextureHandle volumetricLighting) + VolumetricCloudsOutput RenderVolumetricClouds_LowResolution(RenderGraph renderGraph, HDCamera hdCamera, TVolumetricCloudsCameraType cameraType, TextureHandle colorBuffer, TextureHandle depthPyramid) { using (var builder = renderGraph.AddRenderPass("Volumetric Clouds Low Resolution", out var passData, ProfilingSampler.Get(HDProfileId.VolumetricClouds))) { @@ -154,32 +146,18 @@ VolumetricCloudsOutput RenderVolumetricClouds_LowResolution(RenderGraph renderGr passData.colorBuffer = builder.ReadTexture(colorBuffer); passData.depthPyramid = builder.ReadTexture(depthPyramid); passData.ambientProbeBuffer = builder.ReadBuffer(renderGraph.ImportBuffer(m_CloudsDynamicProbeBuffer)); - passData.volumetricLighting = builder.ReadTexture(volumetricLighting); - - // Intermediate buffers - passData.halfResCloudsLighting = builder.CreateTransientTexture(new TextureDesc(Vector2.one * 0.5f, true, true) - { colorFormat = GraphicsFormat.R16G16B16A16_SFloat, enableRandomWrite = true, name = "Half Res Clouds Lighting" }); - passData.intermediateLightingBuffer1 = builder.CreateTransientTexture(new TextureDesc(Vector2.one * 0.5f, true, true) - { colorFormat = GraphicsFormat.R16G16B16A16_SFloat, enableRandomWrite = true, name = "Temporary Clouds Lighting Buffer 1" }); - passData.intermediateLightingBuffer2 = builder.CreateTransientTexture(new TextureDesc(Vector2.one * 0.5f, true, true) - { colorFormat = GraphicsFormat.R16G16B16A16_SFloat, enableRandomWrite = true, name = "Temporary Clouds Lighting Buffer 2" }); - passData.halfResDepthBuffer = builder.CreateTransientTexture(new TextureDesc(Vector2.one * 0.5f, true, true) - { colorFormat = GraphicsFormat.R32_SFloat, enableRandomWrite = true, name = "Half Res Scene Depth" }); - passData.halfResCloudsDepth = builder.CreateTransientTexture(new TextureDesc(Vector2.one * 0.5f, true, true) - { colorFormat = GraphicsFormat.R32_SFloat, enableRandomWrite = true, name = "Half Res Clouds Depth" }); - - // Output of the clouds - passData.cloudsLighting = builder.WriteTexture(renderGraph.CreateTexture(new TextureDesc(Vector2.one, true, true) - { colorFormat = GraphicsFormat.R16G16B16A16_SFloat, enableRandomWrite = true, name = "Volumetric Clouds Lighting Texture" })); - passData.cloudsDepth = builder.WriteTexture(renderGraph.CreateTexture(new TextureDesc(Vector2.one, true, true) - { colorFormat = GraphicsFormat.R32_SFloat, enableRandomWrite = true, name = "Volumetric Clouds Depth Texture" })); + + CreateTracingTextures(renderGraph, builder, settings, 0.25f, out passData.tracedCloudsLighting, out passData.tracedCloudsDepth); + CreateIntermediateTextures(renderGraph, builder, settings, out passData.intermediateLightingBuffer1, out passData.intermediateLightingBuffer2); + CreateOutputTextures(renderGraph, builder, settings, out passData.cloudsLighting, out passData.cloudsDepth); builder.SetRenderFunc( (VolumetricCloudsLowResolutionData data, RenderGraphContext ctx) => { - TraceVolumetricClouds_LowResolution(ctx.cmd, data.parameters, data.ambientProbeBuffer, - data.colorBuffer, data.depthPyramid, data.volumetricLighting, - data.halfResCloudsLighting, data.intermediateLightingBuffer1, data.intermediateLightingBuffer2, data.halfResDepthBuffer, data.halfResCloudsDepth, + TraceVolumetricClouds_LowResolution(ctx.cmd, data.parameters, + data.ambientProbeBuffer, data.colorBuffer, data.depthPyramid, + data.tracedCloudsLighting, data.tracedCloudsDepth, + data.intermediateLightingBuffer1, data.intermediateLightingBuffer2, data.cloudsLighting, data.cloudsDepth); }); diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/VolumetricClouds/HDRenderPipeline.VolumetricCloudsSky.cs b/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/VolumetricClouds/HDRenderPipeline.VolumetricCloudsSky.cs index 2eba60e0179..3a6fc34046c 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/VolumetricClouds/HDRenderPipeline.VolumetricCloudsSky.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/VolumetricClouds/HDRenderPipeline.VolumetricCloudsSky.cs @@ -9,14 +9,14 @@ TextureDesc GetVolumetricCloudsIntermediateLightingBufferDesc() { int skyResolution = (int)m_Asset.currentPlatformRenderPipelineSettings.lightLoopSettings.skyReflectionSize; return new TextureDesc(skyResolution, skyResolution, false, true) - { colorFormat = GraphicsFormat.R16G16B16A16_SFloat, enableRandomWrite = true }; + { colorFormat = GraphicsFormat.B10G11R11_UFloatPack32, enableRandomWrite = true }; } TextureDesc GetVolumetricCloudsIntermediateDepthBufferDesc() { int skyResolution = (int)m_Asset.currentPlatformRenderPipelineSettings.lightLoopSettings.skyReflectionSize; return new TextureDesc(skyResolution, skyResolution, false, true) - { colorFormat = GraphicsFormat.R32_SFloat, enableRandomWrite = true }; + { colorFormat = GraphicsFormat.R16G16_SFloat, enableRandomWrite = true }; } TextureDesc GetVolumetricCloudsIntermediateCubeTextureDesc() @@ -26,6 +26,13 @@ TextureDesc GetVolumetricCloudsIntermediateCubeTextureDesc() { slices = TextureXR.slices, dimension = TextureDimension.Cube, colorFormat = GraphicsFormat.R16G16B16A16_SFloat, enableRandomWrite = true, useMipMap = true, autoGenerateMips = false }; } + TextureDesc GetVolumetricCloudsMetalCopyBufferDesc() + { + int skyResolution = (int)m_Asset.currentPlatformRenderPipelineSettings.lightLoopSettings.skyReflectionSize; + return new TextureDesc(skyResolution, skyResolution, false, true) + { colorFormat = GraphicsFormat.R16G16B16A16_SFloat, enableRandomWrite = false }; + } + class VolumetricCloudsSkyLowPassData { // Resolution parameters @@ -116,10 +123,11 @@ static void TraceVolumetricClouds_Sky_Low(CommandBuffer cmd, VolumetricCloudsSky // Ray-march the clouds for this frame DoVolumetricCloudsTrace(cmd, traceTX, traceTY, 1, in passData.commonData, - TextureXR.GetBlackTextureArray(), TextureXR.GetBlackTexture(), passData.ambientProbeBuffer, + passData.ambientProbeBuffer, TextureXR.GetBlackTextureArray(), TextureXR.GetBlackTextureArray(), passData.intermediateLightingBuffer, passData.intermediateDepthBuffer); mpb.SetTexture(HDShaderIDs._VolumetricCloudsLightingTexture, passData.intermediateLightingBuffer); + mpb.SetTexture(HDShaderIDs._VolumetricCloudsDepthTexture, passData.intermediateDepthBuffer); CoreUtils.SetRenderTarget(cmd, passData.output, ClearFlag.None, miplevel: 2, cubemapFace: passData.cubemapFace); CoreUtils.DrawFullScreen(cmd, passData.cloudCombinePass, mpb, 3); } @@ -140,8 +148,8 @@ class VolumetricCloudsSkyHighPassData public Matrix4x4[] pixelCoordToViewDir; - public TextureHandle intermediateLightingBuffer0; - public TextureHandle intermediateLightingBuffer1; + public TextureHandle intermediateLightingBuffer; + public TextureHandle cameraColorCopy; public TextureHandle intermediateDepthBuffer; public TextureHandle output; public BufferHandle ambientProbeBuffer; @@ -183,11 +191,11 @@ void PrepareVolumetricCloudsSkyHighPassData(RenderGraph renderGraph, RenderGraph UpdateShaderVariablesClouds(ref data.commonData.cloudsCB, hdCamera, settings, cameraData, cloudModelData, false); int skyResolution = (int)m_Asset.currentPlatformRenderPipelineSettings.lightLoopSettings.skyReflectionSize; - data.intermediateLightingBuffer0 = builder.CreateTransientTexture(GetVolumetricCloudsIntermediateLightingBufferDesc()); + data.intermediateLightingBuffer = builder.CreateTransientTexture(GetVolumetricCloudsIntermediateLightingBufferDesc()); data.intermediateDepthBuffer = builder.CreateTransientTexture(GetVolumetricCloudsIntermediateDepthBufferDesc()); if (SystemInfo.graphicsDeviceType == GraphicsDeviceType.Metal) { - data.intermediateLightingBuffer1 = builder.CreateTransientTexture(GetVolumetricCloudsIntermediateLightingBufferDesc()); + data.cameraColorCopy = builder.CreateTransientTexture(GetVolumetricCloudsMetalCopyBufferDesc()); data.output = builder.ReadWriteTexture(output); } else @@ -211,26 +219,28 @@ static void RenderVolumetricClouds_Sky_High(CommandBuffer cmd, VolumetricCloudsS // Ray-march the clouds for this frame DoVolumetricCloudsTrace(cmd, finalTX, finalTY, 1, in passData.commonData, - TextureXR.GetBlackTextureArray(), TextureXR.GetBlackTexture(), passData.ambientProbeBuffer, - passData.intermediateLightingBuffer0, passData.intermediateDepthBuffer); + passData.ambientProbeBuffer, TextureXR.GetBlackTextureArray(), TextureXR.GetBlackTextureArray(), + passData.intermediateLightingBuffer, passData.intermediateDepthBuffer); if (SystemInfo.graphicsDeviceType == GraphicsDeviceType.Metal) { // On Intel GPUs on OSX, due to the fact that we cannot always rely on pre-exposure the hardware blending fails and turns into Nans when // the values are close to the max fp16 value. We do the blending manually on metal to avoid that behavior. // Copy the target face of the cubemap into a temporary texture - cmd.CopyTexture(passData.output, (int)passData.cubemapFace, 0, passData.intermediateLightingBuffer1, 0, 0); + cmd.CopyTexture(passData.output, (int)passData.cubemapFace, 0, passData.cameraColorCopy, 0, 0); // Output the result into the output buffer - mpb.SetTexture(HDShaderIDs._CameraColorTexture, passData.intermediateLightingBuffer1); - mpb.SetTexture(HDShaderIDs._VolumetricCloudsLightingTexture, passData.intermediateLightingBuffer0); + mpb.SetTexture(HDShaderIDs._CameraColorTexture, passData.cameraColorCopy); + mpb.SetTexture(HDShaderIDs._VolumetricCloudsLightingTexture, passData.intermediateLightingBuffer); + mpb.SetTexture(HDShaderIDs._VolumetricCloudsDepthTexture, passData.intermediateDepthBuffer); CoreUtils.SetRenderTarget(cmd, passData.output, ClearFlag.None, 0, passData.cubemapFace); CoreUtils.DrawFullScreen(cmd, passData.cloudCombinePass, mpb, 1); } else { // Output the result into the output buffer - mpb.SetTexture(HDShaderIDs._VolumetricCloudsLightingTexture, passData.intermediateLightingBuffer0); + mpb.SetTexture(HDShaderIDs._VolumetricCloudsLightingTexture, passData.intermediateLightingBuffer); + mpb.SetTexture(HDShaderIDs._VolumetricCloudsDepthTexture, passData.intermediateDepthBuffer); CoreUtils.SetRenderTarget(cmd, passData.output, ClearFlag.None, 0, passData.cubemapFace); CoreUtils.DrawFullScreen(cmd, passData.cloudCombinePass, mpb, 2); } @@ -346,7 +356,7 @@ internal void RenderVolumetricClouds_Sky(RenderGraph renderGraph, HDCamera hdCam passData.input = builder.ReadTexture(intermediateCubemap); if (SystemInfo.graphicsDeviceType == GraphicsDeviceType.Metal) { - passData.intermediateBuffer = builder.CreateTransientTexture(GetVolumetricCloudsIntermediateLightingBufferDesc()); + passData.intermediateBuffer = builder.CreateTransientTexture(GetVolumetricCloudsMetalCopyBufferDesc()); passData.output = builder.ReadWriteTexture(skyboxCubemap); } else diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/VolumetricClouds/VolumetricClouds.Migration.cs b/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/VolumetricClouds/VolumetricClouds.Migration.cs index 9843df749d1..d41f006e54b 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/VolumetricClouds/VolumetricClouds.Migration.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/VolumetricClouds/VolumetricClouds.Migration.cs @@ -57,7 +57,6 @@ enum Version }), MigrationStep.New(Version.SharedRenderingSpace, (VolumetricClouds c) => { - c.perceptualBlending.value = 0.0f; if (c.active == false || c.enable.value == false) return; diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/VolumetricClouds/VolumetricClouds.compute b/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/VolumetricClouds/VolumetricClouds.compute index 056a2c73282..eff0b2c9372 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/VolumetricClouds/VolumetricClouds.compute +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/VolumetricClouds/VolumetricClouds.compute @@ -1,20 +1,17 @@ #pragma only_renderers d3d11 playstation xboxone xboxseries vulkan metal switch -// Depth related kernels -#pragma kernel DownscaleDepth - // Trace to intermediate -#pragma kernel ReprojectClouds REPROJECT_CLOUDS=ReprojectClouds -#pragma kernel ReprojectCloudsRejection REPROJECT_CLOUDS=ReprojectCloudsRejection WITH_REJECTION +#pragma kernel ReprojectClouds REPROJECT_CLOUDS=ReprojectClouds +#pragma kernel ReprojectCloudsRejection REPROJECT_CLOUDS=ReprojectCloudsRejection WITH_REJECTION #pragma kernel PreUpscaleClouds // Intermediate to Full resolution -#pragma kernel UpscaleClouds UPSCALE_CLOUDS=UpscaleClouds -#pragma kernel UpscaleCloudsPerceptual UPSCALE_CLOUDS=UpscaleCloudsPerceptual PERCEPTUAL_TRANSMITTANCE +#pragma kernel UpscaleClouds UPSCALE_CLOUDS=UpscaleClouds +#pragma kernel UpscaleCloudsPerceptual UPSCALE_CLOUDS=UpscaleCloudsPerceptual PERCEPTUAL_TRANSMITTANCE // Full resolution combination -#pragma kernel CombineClouds COMBINE_CLOUDS=CombineClouds -#pragma kernel CombineCloudsPerceptual COMBINE_CLOUDS=CombineCloudsPerceptual PERCEPTUAL_TRANSMITTANCE +#pragma kernel CombineClouds COMBINE_CLOUDS=CombineClouds +#pragma kernel CombineCloudsPerceptual COMBINE_CLOUDS=CombineCloudsPerceptual PERCEPTUAL_TRANSMITTANCE // #define WITHOUT_LDS // #pragma enable_d3d11_debug_symbols @@ -27,37 +24,16 @@ #include "Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/VolumetricClouds/VolumetricCloudsUtilities.hlsl" #include "Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/VolumetricClouds/VolumetricCloudsDenoising.hlsl" -// Input textures -TEXTURE2D_X(_CameraColorTexture); -TEXTURE2D_X(_DepthTexture); +// Buffer that holds the offset for every level of the depth pyramid +StructuredBuffer _DepthPyramidMipLevelOffsets; // History buffers TEXTURE2D_X(_HistoryVolumetricClouds0Texture); TEXTURE2D_X(_HistoryVolumetricClouds1Texture); // Output texture -RW_TEXTURE2D_X(float, _HalfResDepthBufferRW); -RW_TEXTURE2D_X(float4, _CloudsLightingTextureRW); -RW_TEXTURE2D_X(float3, _CloudsAdditionalTextureRW); - -[numthreads(8, 8, 1)] -void DownscaleDepth(uint3 intermediateCoord : SV_DispatchThreadID, uint2 groupThreadId : SV_GroupThreadID, uint2 groupId : SV_GroupID) -{ - UNITY_XR_ASSIGN_VIEW_INDEX(intermediateCoord.z); - - // If this is bigger than the trace size, we are done - if (any(intermediateCoord.xy >= uint2(_IntermediateScreenSize.xy))) - return; - - // TODO USE LDS for this - float depth0 = LOAD_TEXTURE2D_X(_DepthTexture, intermediateCoord.xy * 2.0).x; - float depth1 = LOAD_TEXTURE2D_X(_DepthTexture, intermediateCoord.xy * 2.0 + int2(0, 1)).x; - float depth2 = LOAD_TEXTURE2D_X(_DepthTexture, intermediateCoord.xy * 2.0 + int2(1, 1)).x; - float depth3 = LOAD_TEXTURE2D_X(_DepthTexture, intermediateCoord.xy * 2.0 + int2(1, 0)).x; - - // Combine it with the current shift to define which half res depth should be used - _HalfResDepthBufferRW[COORD_TEXTURE2D_X(intermediateCoord.xy)] = min(min(depth0, depth1), min(depth2, depth3)); -} +RW_TEXTURE2D_X(float3, _CloudsLightingTextureRW); +RW_TEXTURE2D_X(float4, _CloudsAdditionalTextureRW); [numthreads(8, 8, 1)] void REPROJECT_CLOUDS(uint3 dispatchThreadId : SV_DispatchThreadID, @@ -71,8 +47,13 @@ void REPROJECT_CLOUDS(uint3 dispatchThreadId : SV_DispatchThreadID, uint2 intermediateCoord = dispatchThreadId.xy; uint2 fullResCoord = intermediateCoord * _IntermediateResolutionScale; uint2 traceCoord = intermediateCoord / 2; + uint2 localOffset = uint2(intermediateCoord.x & 1, intermediateCoord.y & 1); + +#ifdef WITHOUT_LDS + uint2 threadCoord = traceCoord; +#else + uint2 threadCoord = groupThreadId; -#ifndef WITHOUT_LDS // Only 36 workers of the 64 do the pre-fetching if (groupIndex < 36) { @@ -83,178 +64,94 @@ void REPROJECT_CLOUDS(uint3 dispatchThreadId : SV_DispatchThreadID, GroupMemoryBarrierWithGroupSync(); #endif -#ifdef WITHOUT_LDS - // Average the depth of the cloud - float currentCloudDepth = LOAD_TEXTURE2D_X(_CloudsDepthTexture, traceCoord).x; -#else - // Average the depth of the cloud - float currentCloudDepth = GetCloudDepth_LDS(groupThreadId, int2(0, 0)); -#endif - - // Compute the motionVector of the clouds - float2 motionVector = EvaluateCloudMotionVectors(fullResCoord, currentCloudDepth, 1.0); + // 1. Init various stuff + float currentSceneDepth = LOAD_TEXTURE2D_X(_CameraDepthTexture, _ReprojDepthMipOffset + intermediateCoord).x; + float currentCloudDepth = GetCloudDepth(threadCoord, int2(0, 0)); - // Compute the history pixel coordinate to tap from - float2 historyCoord = (intermediateCoord.xy + 0.5) - motionVector * _IntermediateScreenSize.xy; - float2 clampedHistoryUV = clamp(historyCoord, 0.0, _IntermediateScreenSize.xy - 0.5f) / _IntermediateScreenSize.xy; + bool validTracing = all(localOffset == ComputeCheckerBoardOffset(traceCoord, _SubPixelIndex)); - // Read the volumetric cloud value from the previous frame - float2 ratioScale = _HistoryViewportSize / _HistoryBufferSize; - float2 historySampleCoords = clampedHistoryUV * ratioScale; + float4 finalColor = GetCloudLighting(threadCoord, int2(0, 0)); + float finalCloudDepth = currentCloudDepth; + float finalSampleCount = 1.0; - // Grab the history values - float4 previousResult = SAMPLE_TEXTURE2D_X_LOD(_HistoryVolumetricClouds0Texture, s_linear_clamp_sampler, historySampleCoords, 0); - float3 previousResult1 = SAMPLE_TEXTURE2D_X_LOD(_HistoryVolumetricClouds1Texture, s_linear_clamp_sampler, historySampleCoords, 0).xyz; - - // Inverse the exposure of the previous frame and apply the current one (need to be done in linear space) - previousResult.xyz *= GetInversePreviousExposureMultiplier() * GetCurrentExposureMultiplier(); - - // Unpack the second buffer - float previousSampleCount = previousResult1.x; - float previousDepth = previousResult1.y; - float previousCloudDepth = previousResult1.z; - - // Reproject previous cloud depth in case near plane has changed - previousCloudDepth = saturate(previousCloudDepth * _NearPlaneReprojection); - - // This tracks if the history is considered valid - bool validHistory = previousSampleCount >= 0.5f; + // 2. Check history validity + float2 motionVector = EvaluateCloudMotionVectors(fullResCoord, currentCloudDepth, 1.0); + float2 historyUV = (intermediateCoord.xy + 0.5) / _IntermediateScreenSize.xy - motionVector; - // The history is invalid if we are requesting a value outside the frame - if(historyCoord.x < 0.0 || historyCoord.x >= _IntermediateScreenSize.x || historyCoord.y < 0.0 || historyCoord.y >= _IntermediateScreenSize.y) - validHistory = false; + float4 history = SAMPLE_TEXTURE2D_X_LOD(_HistoryVolumetricClouds1Texture, s_linear_clamp_sampler, historyUV * _HistoryViewportScale, 0); + float previousSampleCount = history.x; - // Read the resolution of the current pixel - float currentDepth = LOAD_TEXTURE2D_X(_HalfResDepthBuffer, intermediateCoord).x; - - // Compare the depth of the current pixel to the one of its history, if they are too different, we cannot consider this history valid - float linearPrevDepth = Linear01Depth(previousDepth, _ZBufferParams); - float linearCurrentDepth = Linear01Depth(currentDepth, _ZBufferParams); - - // We only need to check if the pixel depth coherence if the clouds can be behind and in front of the pixel - if (abs(linearPrevDepth - linearCurrentDepth) > linearCurrentDepth * 0.2) - validHistory = false; - - float validityFactor = 1.0; -#ifdef WITH_REJECTION - // We need to validate that within the 3x3 trace region, at least one of the pixels is not a background pixel (including the clouds) - float4 lightingMin = float4(FLT_MAX, FLT_MAX, FLT_MAX, 1.0); - float4 lightingMax = float4(0, 0, 0, 0.0); - for (int y = -1; y <= 1; ++y) + // History is invalid if sample is out of screen or scene depth was too different + if (all(historyUV == saturate(historyUV)) && previousSampleCount >= 0.5f && EvaluateDepthDifference(history.y, currentSceneDepth)) { - for (int x = -1; x <= 1; ++x) - { - #ifdef WITHOUT_LDS - float4 cloudLigting = LOAD_TEXTURE2D_X(_CloudsLightingTexture, traceCoord + int2(x, y)); - #else - float4 cloudLigting = GetCloudLighting_LDS(groupThreadId, int2(x, y)); - #endif - lightingMin = min(lightingMin, cloudLigting); - lightingMax = max(lightingMax, cloudLigting); - } - } + float4 previousColor = SAMPLE_TEXTURE2D_X_LOD(_HistoryVolumetricClouds0Texture, s_linear_clamp_sampler, historyUV * _HistoryViewportScale, 0); + previousColor.xyz *= GetInversePreviousExposureMultiplier() * GetCurrentExposureMultiplier(); + previousColor.a = history.a; - if (currentDepth == UNITY_RAW_FAR_CLIP_VALUE) - previousResult = ClipCloudsToRegion(previousResult, lightingMin, lightingMax, validityFactor); -#endif + float previousCloudDepth = history.z; + previousCloudDepth = saturate(previousCloudDepth * _NearPlaneReprojection); - // Compute the local index that tells us the index of this pixel, the strategy for reprojection is a bit different in both cases - int localIndex = (intermediateCoord.x & 1) + (intermediateCoord.y & 1) * 2; - int currentIndex = ComputeCheckerBoardIndex(traceCoord, _SubPixelIndex); - if (localIndex == currentIndex) - { - // We need to validate that within the 3x3 trace region, at least one of the pixels is not a background pixel (incluing the clouds) - float cloudNeighborhood = 0.0f; + // Color clamp the history with neighborhood + float validityFactor = 1.0; + + #ifdef WITH_REJECTION + float4 lightingMin = float4(FLT_MAX, FLT_MAX, FLT_MAX, 1.0); + float4 lightingMax = float4(0, 0, 0, 0.0); for (int y = -1; y <= 1; ++y) { for (int x = -1; x <= 1; ++x) { - #ifdef WITHOUT_LDS - if (LOAD_TEXTURE2D_X(_CloudsDepthTexture, traceCoord + int2(x, y)).x != 0.0f) - cloudNeighborhood += 1.0f; - #else - if (GetCloudDepth_LDS(groupThreadId, int2(x, y)) != 0.0f) - cloudNeighborhood += 1.0f; - #endif + CloudReprojectionData data = GetCloudReprojectionDataSample(threadCoord, int2(x, y)); + if ((data.pixelDepth == UNITY_RAW_FAR_CLIP_VALUE) == (currentSceneDepth == UNITY_RAW_FAR_CLIP_VALUE)) + { + lightingMin = min(lightingMin, data.cloudLighting); + lightingMax = max(lightingMax, data.cloudLighting); + } } } - // If the target coordinate is out of the screen, we cannot use the history - float accumulationFactor = 0.0; - float sampleCount = 1.0; - if (validHistory && cloudNeighborhood != 0.0f) + previousColor = ClipCloudsToRegion(previousColor, lightingMin, lightingMax, validityFactor); + #endif + + if (validTracing) { // Define our accumation value - accumulationFactor = previousSampleCount >= 16.0 ? 0.94117647058 : (previousSampleCount / (previousSampleCount + 1.0)); - accumulationFactor *= _TemporalAccumulationFactor * validityFactor * _CloudHistoryInvalidation; - sampleCount = min(previousSampleCount + 1.0, 16.0); - } + float accumulationFactor = validityFactor * previousSampleCount / (previousSampleCount + 1.0); + accumulationFactor *= _TemporalAccumulationFactor * _CloudHistoryInvalidation; - // Accumulate the result with the previous frame - #ifdef WITHOUT_LDS - previousResult = accumulationFactor * previousResult + (1.0 - accumulationFactor) * LOAD_TEXTURE2D_X(_CloudsLightingTexture, traceCoord); - #else - previousResult = accumulationFactor * previousResult + (1.0 - accumulationFactor) * GetCloudLighting_LDS(groupThreadId, int2(0, 0)); - #endif - previousSampleCount = sampleCount; - previousDepth = currentDepth; - - // If there are no clouds in the new pixel, we force the depth to zero. Otherwise, we are likely on a pixel - // which state is not stable and we take the maximum of both frames as we cannot interpolate between the depths. - previousCloudDepth = previousResult.w == 1.0 ? UNITY_RAW_FAR_CLIP_VALUE : currentCloudDepth; + finalColor = lerp(finalColor, previousColor, accumulationFactor); + finalSampleCount = min(previousSampleCount + 1.0, 16.0); + } + else + { + finalColor = previousColor; + finalCloudDepth = previousCloudDepth; + finalSampleCount = validityFactor * previousSampleCount * _CloudHistoryInvalidation; + } } - else + else if (!validTracing) { - // Reduce the history validity a bit - previousSampleCount *= validityFactor * _CloudHistoryInvalidation; + // Bilateral upscale in case we have no data + NeighborhoodUpsampleData3x3 upsampleData; + uint localIndex = (intermediateCoord.x & 1) + ((intermediateCoord.y & 1) << 1); + FillCloudReprojectionNeighborhoodData(threadCoord, localIndex, upsampleData); - // If the target coordinate is out of the screen or the depth that was used to generate it - // is too different from the one of the current pixel, we cannot use the history - if (!validHistory) - { - // Structure that will hold everything - NeighborhoodUpsampleData3x3 upsampleData; - #ifdef WITHOUT_LDS - FillCloudReprojectionNeighborhoodData_NOLDS(traceCoord, localIndex, upsampleData); - #else - FillCloudReprojectionNeighborhoodData(groupThreadId, localIndex, upsampleData); - #endif - // Make sure that at least one of the pixels in the neighborhood can be used - bool rejectNeighborhood; - int closestNeighbor = 4; - OverrideMaskValues(currentDepth, upsampleData, rejectNeighborhood, closestNeighbor); - - if (rejectNeighborhood) - { - // We don't have any valid history and there is no neighbor that is usable, we consider that we have no clouds. - previousResult = float4(0.0, 0.0, 0.0, 1.0); - previousSampleCount = 0.0f; - } - else - { - // We don't have any history for this pixel, but there is at least one neighbor that can be used in the current frame tracing - previousSampleCount = 1.0f; - previousResult = BilUpColor3x3(currentDepth, upsampleData); + bool isSky = currentSceneDepth == UNITY_RAW_FAR_CLIP_VALUE; + upsampleData.lowWeightA *= ((upsampleData.lowDepthA == UNITY_RAW_FAR_CLIP_VALUE) == isSky); + upsampleData.lowWeightB *= ((upsampleData.lowDepthB == UNITY_RAW_FAR_CLIP_VALUE) == isSky); + upsampleData.lowWeightC *= ((upsampleData.lowDepthC == UNITY_RAW_FAR_CLIP_VALUE) == isSky); - // Due to numerical precision issues, upscaling a bunch of 1.0 can lead to a slightly lower number, this fixes it. - if (EvaluateRegionEmptiness(upsampleData) == 1.0) - previousResult = float4(0, 0, 0, 1); - } - #ifdef WITHOUT_LDS - previousCloudDepth = LOAD_TEXTURE2D_X(_CloudsDepthTexture, traceCoord + IndexToLocalOffsetCoords[closestNeighbor]).x; - #else - previousCloudDepth = GetCloudDepth_LDS(groupThreadId, IndexToLocalOffsetCoords[closestNeighbor]); - #endif - } - previousDepth = currentDepth; + // Depth are not converted to linear 01 space on purpose here + // But it would be slower without noticeable quality improvement + BilUpColor3x3(currentSceneDepth, upsampleData, finalColor, finalCloudDepth); } - // Make sure this doesn't go outside of the [0, 1] interval - previousResult.w = saturate(previousResult.w); + // 3. Export + finalColor.a = saturate(finalColor.a); + finalCloudDepth = finalColor.a == 1.0 ? UNITY_RAW_FAR_CLIP_VALUE : finalCloudDepth; - // Accumulate the result with the previous frame - _CloudsLightingTextureRW[COORD_TEXTURE2D_X(intermediateCoord)] = previousResult; - _CloudsAdditionalTextureRW[COORD_TEXTURE2D_X(intermediateCoord)] = float3(previousSampleCount, previousDepth, previousCloudDepth); + _CloudsLightingTextureRW[COORD_TEXTURE2D_X(intermediateCoord)] = finalColor.xyz; + _CloudsAdditionalTextureRW[COORD_TEXTURE2D_X(intermediateCoord)] = float4(finalSampleCount, currentSceneDepth, finalCloudDepth, finalColor.a); } [numthreads(8, 8, 1)] @@ -268,8 +165,13 @@ void PreUpscaleClouds(uint3 dispatchThreadId : SV_DispatchThreadID, // Compute the set of coordinates we need uint2 intermediateCoord = dispatchThreadId.xy; uint2 traceCoord = intermediateCoord / 2; + uint2 localOffset = uint2(intermediateCoord.x & 1, intermediateCoord.y & 1); + +#ifdef WITHOUT_LDS + uint2 threadCoord = traceCoord; +#else + uint2 threadCoord = groupThreadId; -#ifndef WITHOUT_LDS // Only 36 workers of the 64 do the pre-fetching if (groupIndex < 36) { @@ -280,122 +182,57 @@ void PreUpscaleClouds(uint3 dispatchThreadId : SV_DispatchThreadID, GroupMemoryBarrierWithGroupSync(); #endif -#ifdef WITHOUT_LDS - // Average the depth of the cloud - float currentCloudDepth = LOAD_TEXTURE2D_X(_CloudsDepthTexture, traceCoord).x; -#else - // Average the depth of the cloud - float currentCloudDepth = GetCloudDepth_LDS(groupThreadId, int2(0, 0)); -#endif + // Read the resolution of the current pixel + float currentSceneDepth = LOAD_TEXTURE2D_X(_CameraDepthTexture, _ReprojDepthMipOffset + intermediateCoord).x; + float currentCloudDepth = GetCloudDepth(threadCoord, int2(0, 0)); + + bool validTracing = all(localOffset == ComputeCheckerBoardOffset(traceCoord, _SubPixelIndex)); - float cloudCloudDepth = 0; - float currentDepth = LOAD_TEXTURE2D_X(_HalfResDepthBuffer, intermediateCoord).x; - float4 cloudLighting = 0; + float finalCloudDepth = 0; + float4 finalColor = 0; // Compute the local index that tells us the index of this pixel, the strategy for reprojection is a bit different in both cases - int localIndex = (intermediateCoord.x & 1) + (intermediateCoord.y & 1) * 2; - int currentIndex = _EnableIntegration ? ComputeCheckerBoardIndex(intermediateCoord / 2, _SubPixelIndex) : 0; - if (localIndex == currentIndex) + if (validTracing) { // Accumulate the result with the previous frame - #ifdef WITHOUT_LDS - cloudLighting = LOAD_TEXTURE2D_X(_CloudsLightingTexture, traceCoord); - #else - cloudLighting = GetCloudLighting_LDS(groupThreadId, int2(0, 0)); - #endif - cloudCloudDepth = currentCloudDepth; + finalColor = GetCloudLighting(threadCoord, int2(0, 0)); + finalCloudDepth = currentCloudDepth; } else { // Structure that will hold everything NeighborhoodUpsampleData3x3 upsampleData; - #ifdef WITHOUT_LDS - FillCloudReprojectionNeighborhoodData_NOLDS(traceCoord, localIndex, upsampleData); - #else - FillCloudReprojectionNeighborhoodData(groupThreadId, localIndex, upsampleData); - #endif - // Make sure that at least one of the pixels in the neighborhood can be used - float rejectNeighborhood; - int closestNeighbor; - OverrideMaskValues(currentDepth, upsampleData, rejectNeighborhood, closestNeighbor); + uint localIndex = (intermediateCoord.x & 1) + ((intermediateCoord.y & 1) << 1); + FillCloudReprojectionNeighborhoodData(threadCoord, localIndex, upsampleData); - // 1.0 if we were able to produce a value 0.0 if we failed to - if (rejectNeighborhood) - { - // We don't have any valid history and there is no neighbor that is usable - cloudLighting = 0.0f; - } - else - { - // We don't have any history for this pixel, but there is at least one neighbor that can be used in the current frame tracing - cloudLighting = BilUpColor3x3(currentDepth, upsampleData); - } - #ifdef WITHOUT_LDS - cloudCloudDepth = LOAD_TEXTURE2D_X(_CloudsDepthTexture, traceCoord + IndexToLocalOffsetCoords[closestNeighbor]).x; - #else - cloudCloudDepth = GetCloudDepth_LDS(groupThreadId, IndexToLocalOffsetCoords[closestNeighbor]); - #endif - - // Due to numerical precision issues, upscaling a bunch of 1.0 can lead to a slightly lower number, this fixes it. - if (EvaluateRegionEmptiness(upsampleData) == 1.0) - cloudLighting = float4(0, 0, 0, 1); + BilUpColor3x3(currentSceneDepth, upsampleData, finalColor, finalCloudDepth); } // Make sure this doesn't go outside of the [0, 1] interval - cloudLighting.w = saturate(cloudLighting.w); + finalColor.w = saturate(finalColor.w); // Accumulate the result with the previous frame - _CloudsLightingTextureRW[COORD_TEXTURE2D_X(intermediateCoord)] = cloudLighting; - _CloudsAdditionalTextureRW[COORD_TEXTURE2D_X(intermediateCoord)] = float3(1, currentDepth, cloudCloudDepth); + _CloudsLightingTextureRW[COORD_TEXTURE2D_X(intermediateCoord)] = finalColor.xyz; + _CloudsAdditionalTextureRW[COORD_TEXTURE2D_X(intermediateCoord)] = float4(1, currentSceneDepth, finalCloudDepth, finalColor.a); } -// Constant buffer where all variables should land -CBUFFER_START(VolumetricCloudsUpscaleConstantBuffer) - float2 _UpperScreenSize; -CBUFFER_END - -RW_TEXTURE2D_X(float4, _VolumetricCloudsLightingTextureRW); -RW_TEXTURE2D_X(float, _VolumetricCloudsDepthTextureRW); - -void FillLDSUpscale(uint groupIndex, uint2 groupOrigin) -{ - // Define which value we will be acessing with this worker thread - int acessCoordX = groupIndex % 6; - int acessCoordY = groupIndex / 6; - - // Everything we are accessing is in intermediate res (half rez). - uint2 traceGroupOrigin = groupOrigin / 2; - - // The initial position of the access - int2 originXY = traceGroupOrigin - int2(1, 1); - - // Compute the sample position - int2 sampleCoord = int2(clamp(originXY.x + acessCoordX, 0, _IntermediateScreenSize.x - 1), clamp(originXY.y + acessCoordY, 0, _IntermediateScreenSize.y - 1)); - - // Read the sample value - float4 sampleVal = LOAD_TEXTURE2D_X(_VolumetricCloudsTexture, sampleCoord); - float3 depthStatusValue = LOAD_TEXTURE2D_X(_DepthStatusTexture, sampleCoord).xyz; - - // Store into the LDS - gs_cacheR[groupIndex] = sampleVal.r; - gs_cacheG[groupIndex] = sampleVal.g; - gs_cacheB[groupIndex] = sampleVal.b; - gs_cacheA[groupIndex] = sampleVal.a; - gs_cacheDP[groupIndex] = depthStatusValue.y; - gs_cachePS[groupIndex] = saturate(depthStatusValue.x); - gs_cacheDC[groupIndex] = depthStatusValue.z; -} +RW_TEXTURE2D_X(float3, _VolumetricCloudsLightingTextureRW); +RW_TEXTURE2D_X(float2, _VolumetricCloudsDepthTextureRW); [numthreads(8, 8, 1)] void UPSCALE_CLOUDS(uint3 finalCoord : SV_DispatchThreadID, - int groupIndex : SV_GroupIndex, - uint2 groupThreadId : SV_GroupThreadID, - uint2 groupId : SV_GroupID) + int groupIndex : SV_GroupIndex, + uint2 groupThreadId : SV_GroupThreadID, + uint2 groupId : SV_GroupID) { UNITY_XR_ASSIGN_VIEW_INDEX(finalCoord.z); int2 halfResCoord = finalCoord.xy / 2; -#ifndef WITHOUT_LDS +#ifdef WITHOUT_LDS + int2 threadCoord = halfResCoord; +#else + int2 threadCoord = groupThreadId; + // Only 36 workers of the 64 do the pre-fetching if (groupIndex < 36) { @@ -412,71 +249,42 @@ void UPSCALE_CLOUDS(uint3 finalCoord : SV_DispatchThreadID, return; // Grab the depth value of the pixel - float highDepth = LOAD_TEXTURE2D_X(_DepthTexture, finalCoord.xy).x; - - // Compute the index of the pixel in the 2x2 region (L->R, T->B) - uint subRegionIndex = (finalCoord.x & 1) + (finalCoord.y & 1) * 2; + float sceneDepth = LOAD_TEXTURE2D_X(_CameraDepthTexture, finalCoord.xy).x; // Structure that will hold everything NeighborhoodUpsampleData3x3 upsampleData; + uint localIndex = (finalCoord.x & 1) + (finalCoord.y & 1) * 2; + FillCloudUpscaleNeighborhoodData(threadCoord, localIndex, upsampleData); -#ifndef WITHOUT_LDS - // Fill the sample data - FillCloudUpscaleNeighborhoodData(groupThreadId.xy, subRegionIndex, upsampleData); -#else - // Fill the sample data - FillCloudUpscaleNeighborhoodData_NOLDS(halfResCoord, subRegionIndex, upsampleData); -#endif + // Solves edge filtering in most cases + bool isSky = sceneDepth == UNITY_RAW_FAR_CLIP_VALUE; + upsampleData.lowWeightA *= ((upsampleData.lowDepthA == UNITY_RAW_FAR_CLIP_VALUE) == isSky); + upsampleData.lowWeightB *= ((upsampleData.lowDepthB == UNITY_RAW_FAR_CLIP_VALUE) == isSky); + upsampleData.lowWeightC *= ((upsampleData.lowDepthC == UNITY_RAW_FAR_CLIP_VALUE) == isSky); - // This flags allows us to track if at least one of the contributing pixels has some clouds - int closestNeighbor; - bool rejectedNeighborhood; - OverrideMaskValues(highDepth, upsampleData, rejectedNeighborhood, closestNeighbor); + // Convert the depths to linear, helps when scene depth has checkerboard pattern + float linearSceneDepth = Linear01Depth(sceneDepth, _ZBufferParams); + upsampleData.lowDepthA = Linear01Depth(upsampleData.lowDepthA, _ZBufferParams); + upsampleData.lowDepthB = Linear01Depth(upsampleData.lowDepthB, _ZBufferParams); + upsampleData.lowDepthC = Linear01Depth(upsampleData.lowDepthC, _ZBufferParams); // Do the bilateral upscale - float4 currentClouds = BilUpColor3x3(highDepth, upsampleData); - - // Read the fallback value and use it if we defined that it was impossible for us to do something about it - if (rejectedNeighborhood) - currentClouds = float4(0.0, 0.0, 0.0, 1.0); - - // De-tonemap the inscattering value - currentClouds.w = saturate(currentClouds.w); - - // Due to numerical precision issues, upscaling a bunch of 1.0 can lead to a slightly lower number, this fixes it. - if (EvaluateRegionEmptiness(upsampleData) == 1.0) - currentClouds = float4(0, 0, 0, 1); - - // We cannot simply pick the low res depth that ressembles the most the full resolution pixel - // Two cases are possible: - // - The final pixel doesn't have any clouds. In this case, we don't care about the depth. - // - The final pixel has some amount of clouds (which doesn't neccesarly map to any specific pixel in the neighborhood as it has been interpolated). - // In the second this case we need to take the average depth (in linear space) of the contributing pixels (the one that have some clouds in them). - #ifdef WITHOUT_LDS - float cloudDepth = currentClouds.w != 1.0 ? EvaluateUpscaledCloudDepth_NOLDS(halfResCoord, upsampleData) : UNITY_RAW_FAR_CLIP_VALUE; - #else - //float cloudDepth = GetCloudDepth_LDS(groupThreadId, IndexToLocalOffsetCoords[closestNeighbor]); - float cloudDepth = currentClouds.w != 1.0 ? EvaluateUpscaledCloudDepth(groupThreadId, upsampleData) : UNITY_RAW_FAR_CLIP_VALUE; - #endif - -#if defined(PERCEPTUAL_TRANSMITTANCE) - // Estimate the transmittance that shall be used - float4 currentColor = _CameraColorTexture[COORD_TEXTURE2D_X(finalCoord.xy)]; - float finalTransmittance = EvaluateFinalTransmittance(currentColor.rgb, currentClouds.w); -#else - float finalTransmittance = _CubicTransmittance ? currentClouds.a * currentClouds.a : currentClouds.a; -#endif + float4 finalColor; + float finalCloudDepth; + BilUpColor3x3(linearSceneDepth, upsampleData, finalColor, finalCloudDepth); + + finalColor.a = EvaluateFinalTransmittance(finalCoord.xy, finalColor.a); // Store the upscaled result only, composite in later pass. - _VolumetricCloudsLightingTextureRW[COORD_TEXTURE2D_X(finalCoord.xy)] = float4(currentClouds.xyz, finalTransmittance); - _VolumetricCloudsDepthTextureRW[COORD_TEXTURE2D_X(finalCoord.xy)] = cloudDepth; + _VolumetricCloudsLightingTextureRW[COORD_TEXTURE2D_X(finalCoord.xy)] = finalColor.rgb; + _VolumetricCloudsDepthTextureRW[COORD_TEXTURE2D_X(finalCoord.xy)] = float2(finalCloudDepth, finalColor.a); } [numthreads(8, 8, 1)] void COMBINE_CLOUDS(uint3 finalCoord : SV_DispatchThreadID, - int groupIndex : SV_GroupIndex, - uint2 groupThreadId : SV_GroupThreadID, - uint2 groupId : SV_GroupID) + int groupIndex : SV_GroupIndex, + uint2 groupThreadId : SV_GroupThreadID, + uint2 groupId : SV_GroupID) { UNITY_XR_ASSIGN_VIEW_INDEX(finalCoord.z); @@ -484,27 +292,12 @@ void COMBINE_CLOUDS(uint3 finalCoord : SV_DispatchThreadID, if (any(finalCoord.xy >= uint2(_FinalScreenSize.xy))) return; - // Grab the depth value of the pixel - float highDepth = LOAD_TEXTURE2D_X(_DepthTexture, finalCoord.xy).x; - float4 sampleVal = LOAD_TEXTURE2D_X(_VolumetricCloudsTexture, finalCoord.xy); - - // Do the bilateral upscale - float4 currentClouds = sampleVal; - - // De-tonemap the inscattering value - currentClouds.w = saturate(currentClouds.w); - - float cloudsDepth = LOAD_TEXTURE2D_X(_DepthStatusTexture, finalCoord.xy).z; + float3 color = LOAD_TEXTURE2D_X(_VolumetricCloudsTexture, finalCoord.xy).xyz; + float2 depthTransmittance = LOAD_TEXTURE2D_X(_DepthStatusTexture, finalCoord.xy).zw; -#if defined(PERCEPTUAL_TRANSMITTANCE) - // Estimate the transmittance that shall be used - float4 currentColor = _CameraColorTexture[COORD_TEXTURE2D_X(finalCoord.xy)]; - float finalTransmittance = EvaluateFinalTransmittance(currentColor.rgb, currentClouds.w); -#else - float finalTransmittance = _CubicTransmittance ? currentClouds.a * currentClouds.a : currentClouds.a; -#endif + depthTransmittance.y = EvaluateFinalTransmittance(finalCoord.xy, depthTransmittance.y); // Store the upscaled result only, composite in later pass. - _VolumetricCloudsLightingTextureRW[COORD_TEXTURE2D_X(finalCoord.xy)] = float4(currentClouds.xyz, finalTransmittance); - _VolumetricCloudsDepthTextureRW[COORD_TEXTURE2D_X(finalCoord.xy)] = cloudsDepth; + _VolumetricCloudsLightingTextureRW[COORD_TEXTURE2D_X(finalCoord.xy)] = color; + _VolumetricCloudsDepthTextureRW[COORD_TEXTURE2D_X(finalCoord.xy)] = depthTransmittance; } diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/VolumetricClouds/VolumetricClouds.cs b/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/VolumetricClouds/VolumetricClouds.cs index 2346dfb24a9..b321f312df5 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/VolumetricClouds/VolumetricClouds.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/VolumetricClouds/VolumetricClouds.cs @@ -11,6 +11,26 @@ namespace UnityEngine.Rendering.HighDefinition [HDRPHelpURL("Override-Volumetric-Clouds")] public sealed partial class VolumetricClouds : VolumeComponent { + /// + /// Struct holding animation data for volumetric clouds. + /// Animation data is shared for all cameras, but only updated by the main camera. + /// + public struct AnimationData + { + internal Vector2 cloudOffset; + internal float verticalShapeOffset; + internal float verticalErosionOffset; + } + + /// + /// Override current clouds animation data. Can be used to synchronize clouds over the network. + /// + public static AnimationData animationData + { + get => HDRenderPipeline.currentPipeline.m_CloudsAnimationData; + set { HDRenderPipeline.currentPipeline.m_CloudsAnimationData = value; } + } + /// /// Control mode for the volumetric clouds. /// @@ -437,13 +457,13 @@ public CloudPresets cloudPreset /// Enable/Disable the volumetric clouds ghosting reduction. When enabled, reduces significantly the ghosting of the volumetric clouds, but may introduce some flickering at lower temporal accumulation factors. ///
    [Tooltip("Enable/Disable the volumetric clouds ghosting reduction. When enabled, reduces significantly the ghosting of the volumetric clouds, but may introduce some flickering at lower temporal accumulation factors.")] - public BoolParameter ghostingReduction = new BoolParameter(false); + public BoolParameter ghostingReduction = new BoolParameter(true); /// /// Specifies the strength of the perceptual blending for the volumetric clouds. This value should be treated as flag and only be set to 0.0 or 1.0. /// [Tooltip("Specifies the strength of the perceptual blending for the volumetric clouds. This value should be treated as flag and only be set to 0.0 or 1.0.")] - public ClampedFloatParameter perceptualBlending = new ClampedFloatParameter(0.0f, 0.0f, 1.0f); + public ClampedFloatParameter perceptualBlending = new ClampedFloatParameter(1.0f, 0.0f, 1.0f); /// /// When enabled, HDRP evaluates the Volumetric Clouds' shadows. The Volumetric Clouds shadow is rendered independently of the shadow map toggle of the directional light. diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/VolumetricClouds/VolumetricCloudsCombine.shader b/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/VolumetricClouds/VolumetricCloudsCombine.shader index 7d4483e3770..0fcdc0ec9ff 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/VolumetricClouds/VolumetricCloudsCombine.shader +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/VolumetricClouds/VolumetricCloudsCombine.shader @@ -21,7 +21,6 @@ Shader "Hidden/HDRP/VolumetricCloudsCombine" // Aerial perspective is already applied during cloud tracing #define ATMOSPHERE_NO_AERIAL_PERSPECTIVE - TEXTURE2D_X(_CameraColorTexture); TEXTURE2D_X(_VolumetricCloudsLightingTexture); TEXTURE2D_X(_VolumetricCloudsDepthTexture); TEXTURECUBE(_VolumetricCloudsTexture); @@ -59,7 +58,7 @@ Shader "Hidden/HDRP/VolumetricCloudsCombine" ZWrite Off // If this is a background pixel, we want the cloud value, otherwise we do not. - Blend 0 One OneMinusSrcAlpha, Zero One + Blend 0 One SrcAlpha, Zero One Blend 1 DstColor Zero // Multiply to combine the transmittance HLSLPROGRAM @@ -77,10 +76,11 @@ Shader "Hidden/HDRP/VolumetricCloudsCombine" UNITY_SETUP_STEREO_EYE_INDEX_POST_VERTEX(input); // Read cloud data - color = LOAD_TEXTURE2D_X(_VolumetricCloudsLightingTexture, input.positionCS.xy); - color.a = 1.0f - color.a; + float3 clouds = LOAD_TEXTURE2D_X(_VolumetricCloudsLightingTexture, input.positionCS.xy).xyz; + float transmittance = LOAD_TEXTURE2D_X(_VolumetricCloudsDepthTexture, input.positionCS.xy).y; - float transmittance = 1.0f - color.a; + color.rgb = clouds; + color.a = transmittance; float deviceDepth = LOAD_TEXTURE2D_X(_VolumetricCloudsDepthTexture, input.positionCS.xy).x; float linearDepth = DecodeInfiniteDepth(deviceDepth, _CloudNearPlane); @@ -95,7 +95,7 @@ Shader "Hidden/HDRP/VolumetricCloudsCombine" // Apply fog float3 volColor, volOpacity; EvaluateAtmosphericScattering(posInput, V, volColor, volOpacity); - color.rgb = color.rgb * (1 - volOpacity) + volColor * color.a; + color.rgb = color.rgb * (1 - volOpacity) + volColor * (1 - color.a); // Output transmittance for lens flares #if defined(OUTPUT_TRANSMITTANCE_BUFFER) @@ -109,6 +109,7 @@ Shader "Hidden/HDRP/VolumetricCloudsCombine" Pass { // Pass 1 + // Sky high on metal Cull Off ZWrite Off ZTest Always @@ -116,13 +117,17 @@ Shader "Hidden/HDRP/VolumetricCloudsCombine" HLSLPROGRAM + TEXTURE2D_X(_CameraColorTexture); + float4 Frag(Varyings input) : SV_Target { - // Composite the result via hardware blending. - float4 clouds = LOAD_TEXTURE2D_X(_VolumetricCloudsLightingTexture, input.positionCS.xy); + // Composite the result via manual blending. + float3 clouds = LOAD_TEXTURE2D_X(_VolumetricCloudsLightingTexture, input.positionCS.xy).xyz; + float alpha = LOAD_TEXTURE2D_X(_VolumetricCloudsDepthTexture, input.positionCS.xy).y; clouds.rgb *= GetInverseCurrentExposureMultiplier(); - float4 color = LOAD_TEXTURE2D_X(_CameraColorTexture, input.positionCS.xy); - return float4(clouds.xyz + color.xyz * clouds.w, 1.0); + + float3 color = LOAD_TEXTURE2D_X(_CameraColorTexture, input.positionCS.xy).xyz; + return float4(clouds + color * alpha, 1.0); } ENDHLSL } @@ -130,6 +135,7 @@ Shader "Hidden/HDRP/VolumetricCloudsCombine" Pass { // Pass 2 + // Sky high Cull Off ZWrite Off ZTest Always @@ -142,9 +148,11 @@ Shader "Hidden/HDRP/VolumetricCloudsCombine" float4 Frag(Varyings input) : SV_Target { // Composite the result via hardware blending. - float4 clouds = LOAD_TEXTURE2D_X(_VolumetricCloudsLightingTexture, input.positionCS.xy); + float3 clouds = LOAD_TEXTURE2D_X(_VolumetricCloudsLightingTexture, input.positionCS.xy).xyz; + float alpha = LOAD_TEXTURE2D_X(_VolumetricCloudsDepthTexture, input.positionCS.xy).y; clouds.rgb *= GetInverseCurrentExposureMultiplier(); - return clouds; + + return float4(clouds, alpha); } ENDHLSL } @@ -152,6 +160,7 @@ Shader "Hidden/HDRP/VolumetricCloudsCombine" Pass { // Pass 3 + // Sky low - blit to cubemap Cull Off ZWrite Off Blend Off @@ -159,7 +168,10 @@ Shader "Hidden/HDRP/VolumetricCloudsCombine" HLSLPROGRAM float4 Frag(Varyings input) : SV_Target { - return LOAD_TEXTURE2D_X(_VolumetricCloudsLightingTexture, input.positionCS.xy); + float3 clouds = LOAD_TEXTURE2D_X(_VolumetricCloudsLightingTexture, input.positionCS.xy).xyz; + float alpha = LOAD_TEXTURE2D_X(_VolumetricCloudsDepthTexture, input.positionCS.xy).y; + + return float4(clouds, alpha); } ENDHLSL } @@ -167,6 +179,7 @@ Shader "Hidden/HDRP/VolumetricCloudsCombine" Pass { // Pass 4 + // Sky low - pre upscale Cull Off ZWrite Off Blend Off @@ -186,12 +199,15 @@ Shader "Hidden/HDRP/VolumetricCloudsCombine" Pass { // Pass 5 + // Sky low - upscale metal Cull Off ZWrite Off Blend Off HLSLPROGRAM + TEXTURE2D_X(_CameraColorTexture); + float4 Frag(Varyings input) : SV_Target { // Construct the view direction @@ -211,6 +227,7 @@ Shader "Hidden/HDRP/VolumetricCloudsCombine" Pass { // Pass 6 + // Sky low - upscale Cull Off ZWrite Off // If this is a background pixel, we want the cloud value, otherwise we do not. @@ -242,7 +259,7 @@ Shader "Hidden/HDRP/VolumetricCloudsCombine" ZWrite Off // If this is a background pixel, we want the cloud value, otherwise we do not. - Blend One OneMinusSrcAlpha, Zero One + Blend One SrcAlpha, Zero One Blend 1 One OneMinusSrcAlpha // before refraction Blend 2 One OneMinusSrcAlpha // before refraction alpha @@ -275,10 +292,11 @@ Shader "Hidden/HDRP/VolumetricCloudsCombine" UNITY_SETUP_STEREO_EYE_INDEX_POST_VERTEX(input); // Read cloud data - color = LOAD_TEXTURE2D_X(_VolumetricCloudsLightingTexture, input.positionCS.xy); - color.a = 1.0f - color.a; + float3 clouds = LOAD_TEXTURE2D_X(_VolumetricCloudsLightingTexture, input.positionCS.xy).xyz; + float transmittance = LOAD_TEXTURE2D_X(_VolumetricCloudsDepthTexture, input.positionCS.xy).y; - float transmittance = 1.0f - color.a; + color.rgb = clouds; + color.a = 1 - transmittance; float deviceDepth = LOAD_TEXTURE2D_X(_VolumetricCloudsDepthTexture, input.positionCS.xy).x; float linearDepth = min(DecodeInfiniteDepth(deviceDepth, _CloudNearPlane), _ProjectionParams.z); @@ -299,6 +317,8 @@ Shader "Hidden/HDRP/VolumetricCloudsCombine" // Sort clouds with refractive objects ComputeRefractionSplitColor(posInput, color, outBeforeRefractionColor, outBeforeRefractionAlpha); + color.a = 1 - color.a; // That avoids precision issues when the sun is behind the clouds + // Output transmittance for lens flares #if defined(OUTPUT_TRANSMITTANCE_BUFFER) // channel 1 is used when fog multiple scattering is enabled and we don't want clouds in this opacity (it doesn't work well with water and transparent sorting) diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/VolumetricClouds/VolumetricCloudsDef.cs b/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/VolumetricClouds/VolumetricCloudsDef.cs index 5603bf10237..cfa362b63e7 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/VolumetricClouds/VolumetricCloudsDef.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/VolumetricClouds/VolumetricCloudsDef.cs @@ -90,9 +90,9 @@ unsafe struct ShaderVariablesClouds // Quarter/Trace resolution public Vector4 _TraceScreenSize; // Resolution of the history buffer size - public Vector2 _HistoryViewportSize; - // Resolution of the history depth buffer - public Vector2 _HistoryBufferSize; + public Vector2 _HistoryViewportScale; + // Offset in depth pyramid + public Vector2Int _ReprojDepthMipOffset; // Flag that defines if the clouds should be evaluated at full resolution public int _LowResolutionEvaluation; @@ -131,8 +131,7 @@ unsafe struct ShaderVariablesClouds public float _FadeInDistance; // Flag that allows to know if we should be using the improved transmittance blending public float _ImprovedTransmittanceBlend; - // Flag that defines if the transmittance should follow a cubic profile (For MSAA) - public float _CubicTransmittance; + public float _PaddingVC0; [HLSLArray(3 * 4, typeof(Vector4))] public fixed float _DistanceBasedWeights[12 * 4]; diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/VolumetricClouds/VolumetricCloudsDef.cs.hlsl b/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/VolumetricClouds/VolumetricCloudsDef.cs.hlsl index 56367d59dda..cab1f1d419f 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/VolumetricClouds/VolumetricCloudsDef.cs.hlsl +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/VolumetricClouds/VolumetricCloudsDef.cs.hlsl @@ -42,8 +42,8 @@ CBUFFER_START(ShaderVariablesClouds) float4 _FinalScreenSize; float4 _IntermediateScreenSize; float4 _TraceScreenSize; - float2 _HistoryViewportSize; - float2 _HistoryBufferSize; + float2 _HistoryViewportScale; + int2 _ReprojDepthMipOffset; int _LowResolutionEvaluation; int _EnableIntegration; int _ValidSceneDepth; @@ -61,7 +61,7 @@ CBUFFER_START(ShaderVariablesClouds) float _FadeInStart; float _FadeInDistance; float _ImprovedTransmittanceBlend; - float _CubicTransmittance; + float _PaddingVC0; float4 _DistanceBasedWeights[12]; CBUFFER_END diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/VolumetricClouds/VolumetricCloudsDenoising.hlsl b/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/VolumetricClouds/VolumetricCloudsDenoising.hlsl index 0f125b32e03..c6c805821f4 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/VolumetricClouds/VolumetricCloudsDenoising.hlsl +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/VolumetricClouds/VolumetricCloudsDenoising.hlsl @@ -38,16 +38,61 @@ float2 EvaluateCloudMotionVectors(float2 fullResCoord, float deviceDepth, float return velocity; } +bool EvaluateDepthDifference(float depthPrev, float depthCurr) +{ + if ((depthPrev == UNITY_RAW_FAR_CLIP_VALUE) != (depthCurr == UNITY_RAW_FAR_CLIP_VALUE)) + return false; + else + { + float linearDepthP = Linear01Depth(depthPrev, _ZBufferParams); + float linearDepthC = Linear01Depth(depthCurr, _ZBufferParams); + //return abs(linearDepthP - linearDepthC) <= linearDepthC * 0.2; + return abs(linearDepthP/linearDepthC - 1.0f) <= 0.2; + } +} + +float4 ClipCloudsToRegion(float4 history, float4 minimum, float4 maximum, inout float validityFactor) +{ + // The transmittance is overriden using a clamp + float clampedTransmittance = clamp(history.w, minimum.w, maximum.w); + + // The lighting is overriden using a clip + float3 center = 0.5 * (maximum.xyz + minimum.xyz); + float3 extents = 0.5 * (maximum.xyz - minimum.xyz); + + // This is actually `distance`, however the keyword is reserved + float3 offset = history.xyz - center; + float3 v_unit = offset.xyz / extents.xyz; + float3 absUnit = abs(v_unit); + float maxUnit = Max3(absUnit.x, absUnit.y, absUnit.z); + + // We make the history less valid if we had to clip it + validityFactor *= maxUnit > 1.0 ? 0.5 : 1.0; + + if (maxUnit > 1.0) + return float4(center + (offset / maxUnit), clampedTransmittance); + else + return float4(history.xyz, clampedTransmittance); +} + +float4 Linear01Depth(float4 depth, float4 zBufferParam) +{ + return float4( + Linear01Depth(depth.x, zBufferParam), + Linear01Depth(depth.y, zBufferParam), + Linear01Depth(depth.z, zBufferParam), + Linear01Depth(depth.w, zBufferParam)); +} + +#ifndef WITHOUT_LDS // Our dispatch is a 8x8 tile. We can access up to 3x3 values at dispatch's half resolution // around the center pixel which represents a total of 36 uniques values for the tile. -groupshared float gs_cacheR[36]; -groupshared float gs_cacheG[36]; -groupshared float gs_cacheB[36]; -groupshared float gs_cacheA[36]; +groupshared float4 gs_cacheRGBA[36]; groupshared float gs_cacheDP[36]; groupshared float gs_cacheDC[36]; groupshared float gs_cachePS[36]; +// Init LDS void FillCloudReprojectionLDS(uint groupIndex, uint2 groupOrigin) { // Define which value we will be acessing with this worker thread @@ -61,24 +106,48 @@ void FillCloudReprojectionLDS(uint groupIndex, uint2 groupOrigin) int2 originXY = traceGroupOrigin - int2(1, 1); // Compute the sample position - int2 sampleCoord = int2(clamp(originXY.x + acessCoordX, 0, _TraceScreenSize.x - 1), clamp(originXY.y + acessCoordY, 0, _TraceScreenSize.y - 1)); + int2 sampleCoord = clamp(originXY + int2(acessCoordX, acessCoordY), 0, _TraceScreenSize.xy - 1); // The representative coordinate to use depends if we are using the checkerboard integration pattern (or not) - int checkerBoardIndex = ComputeCheckerBoardIndex(sampleCoord, _SubPixelIndex); - int2 representativeCoord = sampleCoord * 2 + (_EnableIntegration ? (int2)HalfResolutionIndexToOffset(checkerBoardIndex) : int2(0, 0)); + int2 representativeCoord = sampleCoord * 2; + if (_EnableIntegration) + representativeCoord += ComputeCheckerBoardOffset(sampleCoord, _SubPixelIndex); // Read the sample values - float sampleDP = LOAD_TEXTURE2D_X(_HalfResDepthBuffer, representativeCoord).x; - float4 sampleVal = LOAD_TEXTURE2D_X(_CloudsLightingTexture, sampleCoord); - float sampleDC = LOAD_TEXTURE2D_X(_CloudsDepthTexture, sampleCoord).x; + float sampleDP = LOAD_TEXTURE2D_X(_CameraDepthTexture, _ReprojDepthMipOffset + representativeCoord).x; + float3 sampleVal = LOAD_TEXTURE2D_X(_CloudsLightingTexture, sampleCoord).xyz; + float2 sampleDC_TR = LOAD_TEXTURE2D_X(_CloudsDepthTexture, sampleCoord).xy; // Store into the LDS - gs_cacheR[groupIndex] = sampleVal.r; - gs_cacheG[groupIndex] = sampleVal.g; - gs_cacheB[groupIndex] = sampleVal.b; - gs_cacheA[groupIndex] = sampleVal.a; + gs_cacheRGBA[groupIndex] = float4(sampleVal.rgb, sampleDC_TR.y); gs_cacheDP[groupIndex] = sampleDP; - gs_cacheDC[groupIndex] = sampleDC; + gs_cacheDC[groupIndex] = sampleDC_TR.x; +} + +void FillLDSUpscale(uint groupIndex, uint2 groupOrigin) +{ + // Define which value we will be acessing with this worker thread + int acessCoordX = groupIndex % 6; + int acessCoordY = groupIndex / 6; + + // Everything we are accessing is in intermediate res (half rez). + uint2 traceGroupOrigin = groupOrigin / 2; + + // The initial position of the access + int2 originXY = traceGroupOrigin - int2(1, 1); + + // Compute the sample position + int2 sampleCoord = clamp(originXY + int2(acessCoordX, acessCoordY), 0, _IntermediateScreenSize.xy - 1); + + // Read the sample value + float3 sampleVal = LOAD_TEXTURE2D_X(_VolumetricCloudsTexture, sampleCoord).xyz; + float4 depthStatusValue = LOAD_TEXTURE2D_X(_DepthStatusTexture, sampleCoord); + + // Store into the LDS + gs_cacheRGBA[groupIndex] = float4(sampleVal.rgb, depthStatusValue.a); + gs_cacheDP[groupIndex] = depthStatusValue.y; + gs_cachePS[groupIndex] = saturate(depthStatusValue.x); + gs_cacheDC[groupIndex] = depthStatusValue.z; } uint OffsetToLDSAdress(uint2 groupThreadId, int2 offset) @@ -87,17 +156,36 @@ uint OffsetToLDSAdress(uint2 groupThreadId, int2 offset) uint2 tapAddress = (uint2)((int2)(groupThreadId / 2 + 1) + offset); return clamp((uint)(tapAddress.x) % 6 + tapAddress.y * 6, 0, 35); } +#endif + +#ifdef WITHOUT_LDS +float GetCloudDepth(int2 traceCoord, int2 offset) +{ + traceCoord = clamp(traceCoord + offset, 0, _TraceScreenSize.xy - 1); + return LOAD_TEXTURE2D_X(_CloudsDepthTexture, traceCoord).x; +} -float GetCloudDepth_LDS(uint2 groupThreadId, int2 offset) +float4 GetCloudLighting(int2 traceCoord, int2 offset) +{ + float4 cloudLighting; + traceCoord = clamp(traceCoord + offset, 0, _TraceScreenSize.xy - 1); + + cloudLighting.xyz = LOAD_TEXTURE2D_X(_CloudsLightingTexture, traceCoord).xyz; + cloudLighting.a = LOAD_TEXTURE2D_X(_CloudsDepthTexture, traceCoord).y; + return cloudLighting; +} +#else +float GetCloudDepth(uint2 groupThreadId, int2 offset) { return gs_cacheDC[OffsetToLDSAdress(groupThreadId, offset)]; } -float4 GetCloudLighting_LDS(uint2 groupThreadId, int2 offset) +float4 GetCloudLighting(uint2 groupThreadId, int2 offset) { uint ldsTapAddress = OffsetToLDSAdress(groupThreadId, offset); - return float4(gs_cacheR[ldsTapAddress], gs_cacheG[ldsTapAddress], gs_cacheB[ldsTapAddress], gs_cacheA[ldsTapAddress]); + return gs_cacheRGBA[ldsTapAddress]; } +#endif struct CloudReprojectionData { @@ -106,13 +194,28 @@ struct CloudReprojectionData float cloudDepth; }; +#ifdef WITHOUT_LDS +CloudReprojectionData GetCloudReprojectionDataSample(int2 traceCoord, int2 offset) +{ + traceCoord = clamp(traceCoord + offset, 0, _TraceScreenSize.xy - 1); + + float3 color = LOAD_TEXTURE2D_X(_CloudsLightingTexture, traceCoord).xyz; + float2 depthTransmittance = LOAD_TEXTURE2D_X(_CloudsDepthTexture, traceCoord).xy; + + int2 representativeCoord = traceCoord * 2 + ComputeCheckerBoardOffset(traceCoord, _SubPixelIndex); + + CloudReprojectionData outVal; + outVal.cloudLighting.rgb = color; + outVal.cloudLighting.a = depthTransmittance.y; + outVal.cloudDepth = depthTransmittance.x; + outVal.pixelDepth = LOAD_TEXTURE2D_X(_CameraDepthTexture, _ReprojDepthMipOffset + representativeCoord).x; + return outVal; +} +#else CloudReprojectionData GetCloudReprojectionDataSample(uint index) { CloudReprojectionData outVal; - outVal.cloudLighting.r = gs_cacheR[index]; - outVal.cloudLighting.g = gs_cacheG[index]; - outVal.cloudLighting.b = gs_cacheB[index]; - outVal.cloudLighting.a = gs_cacheA[index]; + outVal.cloudLighting = gs_cacheRGBA[index]; outVal.pixelDepth = gs_cacheDP[index]; outVal.cloudDepth = gs_cacheDC[index]; return outVal; @@ -122,160 +225,65 @@ CloudReprojectionData GetCloudReprojectionDataSample(uint2 groupThreadId, int2 o { return GetCloudReprojectionDataSample(OffsetToLDSAdress(groupThreadId, offset)); } - -float4 ClipCloudsToRegion(float4 history, float4 minimum, float4 maximum, inout float validityFactor) -{ - // The transmittance is overriden using a clamp - float clampedTransmittance = clamp(history.w, minimum.w, maximum.w); - - // The lighting is overriden using a clip - float3 center = 0.5 * (maximum.xyz + minimum.xyz); - float3 extents = 0.5 * (maximum.xyz - minimum.xyz); - - // This is actually `distance`, however the keyword is reserved - float3 offset = history.xyz - center; - float3 v_unit = offset.xyz / extents.xyz; - float3 absUnit = abs(v_unit); - float maxUnit = Max3(absUnit.x, absUnit.y, absUnit.z); - - // We make the history less valid if we had to clip it - validityFactor *= maxUnit > 1.0 ? 0.5 : 1.0; - - if (maxUnit > 1.0) - return float4(center + (offset / maxUnit), clampedTransmittance); - else - return float4(history.xyz, clampedTransmittance); -} - -// Function that fills the struct as we cannot use arrays -void FillCloudReprojectionNeighborhoodData_NOLDS(int2 traceCoord, int subRegionIdx, out NeighborhoodUpsampleData3x3 neighborhoodData) -{ - // Fill the sample data - neighborhoodData.lowValue0 = LOAD_TEXTURE2D_X(_CloudsLightingTexture, traceCoord + int2(-1, -1)); - neighborhoodData.lowValue1 = LOAD_TEXTURE2D_X(_CloudsLightingTexture, traceCoord + int2(0, -1)); - neighborhoodData.lowValue2 = LOAD_TEXTURE2D_X(_CloudsLightingTexture, traceCoord + int2(1, -1)); - - neighborhoodData.lowValue3 = LOAD_TEXTURE2D_X(_CloudsLightingTexture, traceCoord + int2(-1, 0)); - neighborhoodData.lowValue4 = LOAD_TEXTURE2D_X(_CloudsLightingTexture, traceCoord + int2(0, 0)); - neighborhoodData.lowValue5 = LOAD_TEXTURE2D_X(_CloudsLightingTexture, traceCoord + int2(1, 0)); - - neighborhoodData.lowValue6 = LOAD_TEXTURE2D_X(_CloudsLightingTexture, traceCoord + int2(-1, 1)); - neighborhoodData.lowValue7 = LOAD_TEXTURE2D_X(_CloudsLightingTexture, traceCoord + int2(0, 1)); - neighborhoodData.lowValue8 = LOAD_TEXTURE2D_X(_CloudsLightingTexture, traceCoord + int2(1, 1)); - - int2 traceTapCoord = traceCoord + int2(-1, -1); - int checkerBoardIndex = ComputeCheckerBoardIndex(traceTapCoord, _SubPixelIndex); - int2 representativeCoord = traceTapCoord * 2 + HalfResolutionIndexToOffset(checkerBoardIndex); - neighborhoodData.lowDepthA.x = LOAD_TEXTURE2D_X(_HalfResDepthBuffer, representativeCoord).x; - neighborhoodData.lowWeightA.x = _DistanceBasedWeights[subRegionIdx * 3 + 0].x; - - traceTapCoord = traceCoord + int2(0, -1); - checkerBoardIndex = ComputeCheckerBoardIndex(traceTapCoord, _SubPixelIndex); - representativeCoord = traceTapCoord * 2 + HalfResolutionIndexToOffset(checkerBoardIndex); - neighborhoodData.lowDepthA.y = LOAD_TEXTURE2D_X(_HalfResDepthBuffer, representativeCoord).x; - neighborhoodData.lowWeightA.y = _DistanceBasedWeights[subRegionIdx * 3 + 0].y; - - traceTapCoord = traceCoord + int2(1, -1); - checkerBoardIndex = ComputeCheckerBoardIndex(traceTapCoord, _SubPixelIndex); - representativeCoord = traceTapCoord * 2 + HalfResolutionIndexToOffset(checkerBoardIndex); - neighborhoodData.lowDepthA.z = LOAD_TEXTURE2D_X(_HalfResDepthBuffer, representativeCoord).x; - neighborhoodData.lowWeightA.z = _DistanceBasedWeights[subRegionIdx * 3 + 0].z; - - traceTapCoord = traceCoord + int2(-1, 0); - checkerBoardIndex = ComputeCheckerBoardIndex(traceTapCoord, _SubPixelIndex); - representativeCoord = traceTapCoord * 2 + HalfResolutionIndexToOffset(checkerBoardIndex); - neighborhoodData.lowDepthA.w = LOAD_TEXTURE2D_X(_HalfResDepthBuffer, representativeCoord).x; - neighborhoodData.lowWeightA.w = _DistanceBasedWeights[subRegionIdx * 3 + 0].w; - - traceTapCoord = traceCoord + int2(0, 0); - checkerBoardIndex = ComputeCheckerBoardIndex(traceTapCoord, _SubPixelIndex); - representativeCoord = traceTapCoord * 2 + HalfResolutionIndexToOffset(checkerBoardIndex); - neighborhoodData.lowDepthB.x = LOAD_TEXTURE2D_X(_HalfResDepthBuffer, representativeCoord).x; - neighborhoodData.lowWeightB.x = _DistanceBasedWeights[subRegionIdx * 3 + 1].x; - - traceTapCoord = traceCoord + int2(1, 0); - checkerBoardIndex = ComputeCheckerBoardIndex(traceTapCoord, _SubPixelIndex); - representativeCoord = traceTapCoord * 2 + HalfResolutionIndexToOffset(checkerBoardIndex); - neighborhoodData.lowDepthB.y = LOAD_TEXTURE2D_X(_HalfResDepthBuffer, representativeCoord).x; - neighborhoodData.lowWeightB.y = _DistanceBasedWeights[subRegionIdx * 3 + 1].y; - - traceTapCoord = traceCoord + int2(-1, 1); - checkerBoardIndex = ComputeCheckerBoardIndex(traceTapCoord, _SubPixelIndex); - representativeCoord = traceTapCoord * 2 + HalfResolutionIndexToOffset(checkerBoardIndex); - neighborhoodData.lowDepthB.z = LOAD_TEXTURE2D_X(_HalfResDepthBuffer, representativeCoord).x; - neighborhoodData.lowWeightB.z = _DistanceBasedWeights[subRegionIdx * 3 + 1].z; - - traceTapCoord = traceCoord + int2(0, 1); - checkerBoardIndex = ComputeCheckerBoardIndex(traceTapCoord, _SubPixelIndex); - representativeCoord = traceTapCoord * 2 + HalfResolutionIndexToOffset(checkerBoardIndex); - neighborhoodData.lowDepthB.w = LOAD_TEXTURE2D_X(_HalfResDepthBuffer, representativeCoord).x; - neighborhoodData.lowWeightB.w = _DistanceBasedWeights[subRegionIdx * 3 + 1].w; - - traceTapCoord = traceCoord + int2(1, 1); - checkerBoardIndex = ComputeCheckerBoardIndex(traceTapCoord, _SubPixelIndex); - representativeCoord = traceTapCoord * 2 + HalfResolutionIndexToOffset(checkerBoardIndex); - neighborhoodData.lowDepthC = LOAD_TEXTURE2D_X(_HalfResDepthBuffer, representativeCoord).x; - neighborhoodData.lowWeightC = _DistanceBasedWeights[subRegionIdx * 3 + 2].x; - - // In the reprojection case, all masks are valid - neighborhoodData.lowMasksA = 1.0f; - neighborhoodData.lowMasksB = 1.0f; - neighborhoodData.lowMasksC = 1.0f; -} +#endif // Function that fills the struct as we cannot use arrays -void FillCloudReprojectionNeighborhoodData(int2 groupThreadId, int subRegionIdx, out NeighborhoodUpsampleData3x3 neighborhoodData) +void FillCloudReprojectionNeighborhoodData(int2 threadCoord, int subRegionIdx, out NeighborhoodUpsampleData3x3 neighborhoodData) { // Fill the sample data - CloudReprojectionData data = GetCloudReprojectionDataSample(groupThreadId, int2(-1, -1)); + CloudReprojectionData data = GetCloudReprojectionDataSample(threadCoord, int2(-1, -1)); neighborhoodData.lowValue0 = data.cloudLighting; + neighborhoodData.lowDepthValueA.x = data.cloudDepth; neighborhoodData.lowDepthA.x = data.pixelDepth; neighborhoodData.lowWeightA.x = _DistanceBasedWeights[subRegionIdx * 3 + 0].x; - data = GetCloudReprojectionDataSample(groupThreadId, int2(0, -1)); + data = GetCloudReprojectionDataSample(threadCoord, int2(0, -1)); neighborhoodData.lowValue1 = data.cloudLighting; + neighborhoodData.lowDepthValueA.y = data.cloudDepth; neighborhoodData.lowDepthA.y = data.pixelDepth; neighborhoodData.lowWeightA.y = _DistanceBasedWeights[subRegionIdx * 3 + 0].y; - data = GetCloudReprojectionDataSample(groupThreadId, int2(1, -1)); + data = GetCloudReprojectionDataSample(threadCoord, int2(1, -1)); neighborhoodData.lowValue2 = data.cloudLighting; + neighborhoodData.lowDepthValueA.z = data.cloudDepth; neighborhoodData.lowDepthA.z = data.pixelDepth; neighborhoodData.lowWeightA.z = _DistanceBasedWeights[subRegionIdx * 3 + 0].z; - data = GetCloudReprojectionDataSample(groupThreadId, int2(-1, 0)); + data = GetCloudReprojectionDataSample(threadCoord, int2(-1, 0)); neighborhoodData.lowValue3 = data.cloudLighting; + neighborhoodData.lowDepthValueA.w = data.cloudDepth; neighborhoodData.lowDepthA.w = data.pixelDepth; neighborhoodData.lowWeightA.w = _DistanceBasedWeights[subRegionIdx * 3 + 0].w; - data = GetCloudReprojectionDataSample(groupThreadId, int2(0, 0)); + data = GetCloudReprojectionDataSample(threadCoord, int2(0, 0)); neighborhoodData.lowValue4 = data.cloudLighting; + neighborhoodData.lowDepthValueB.x = data.cloudDepth; neighborhoodData.lowDepthB.x = data.pixelDepth; neighborhoodData.lowWeightB.x = _DistanceBasedWeights[subRegionIdx * 3 + 1].x; - data = GetCloudReprojectionDataSample(groupThreadId, int2(1, 0)); + data = GetCloudReprojectionDataSample(threadCoord, int2(1, 0)); neighborhoodData.lowValue5 = data.cloudLighting; + neighborhoodData.lowDepthValueB.y = data.cloudDepth; neighborhoodData.lowDepthB.y = data.pixelDepth; neighborhoodData.lowWeightB.y = _DistanceBasedWeights[subRegionIdx * 3 + 1].y; - data = GetCloudReprojectionDataSample(groupThreadId, int2(-1, 1)); + data = GetCloudReprojectionDataSample(threadCoord, int2(-1, 1)); neighborhoodData.lowValue6 = data.cloudLighting; + neighborhoodData.lowDepthValueB.z = data.cloudDepth; neighborhoodData.lowDepthB.z = data.pixelDepth; neighborhoodData.lowWeightB.z = _DistanceBasedWeights[subRegionIdx * 3 + 1].z; - data = GetCloudReprojectionDataSample(groupThreadId, int2(0, 1)); + data = GetCloudReprojectionDataSample(threadCoord, int2(0, 1)); neighborhoodData.lowValue7 = data.cloudLighting; + neighborhoodData.lowDepthValueB.w = data.cloudDepth; neighborhoodData.lowDepthB.w = data.pixelDepth; neighborhoodData.lowWeightB.w = _DistanceBasedWeights[subRegionIdx * 3 + 1].w; - data = GetCloudReprojectionDataSample(groupThreadId, int2(1, 1)); + data = GetCloudReprojectionDataSample(threadCoord, int2(1, 1)); neighborhoodData.lowValue8 = data.cloudLighting; + neighborhoodData.lowDepthValueC = data.cloudDepth; neighborhoodData.lowDepthC = data.pixelDepth; neighborhoodData.lowWeightC = _DistanceBasedWeights[subRegionIdx * 3 + 2].x; - - // In the reprojection case, all masks are valid - neighborhoodData.lowMasksA = 1.0f; - neighborhoodData.lowMasksB = 1.0f; - neighborhoodData.lowMasksC = 1.0f; } struct CloudUpscaleData @@ -286,13 +294,27 @@ struct CloudUpscaleData float cloudDepth; }; +#ifdef WITHOUT_LDS +CloudUpscaleData GetCloudUpscaleDataSample(int2 intermediateCoord, int2 offset) +{ + intermediateCoord = clamp(intermediateCoord + offset, 0, _IntermediateScreenSize.xy - 1); + + float3 lightingVal = LOAD_TEXTURE2D_X(_VolumetricCloudsTexture, intermediateCoord).xyz; + float4 depthStatusValue = LOAD_TEXTURE2D_X(_DepthStatusTexture, intermediateCoord); + + CloudUpscaleData outVal; + outVal.cloudLighting.rgb = lightingVal; + outVal.cloudLighting.a = depthStatusValue.a; + outVal.pixelDepth = depthStatusValue.y; + outVal.pixelStatus = saturate(depthStatusValue.x); + outVal.cloudDepth = depthStatusValue.z; + return outVal; +} +#else CloudUpscaleData GetCloudUpscaleDataSample(uint index) { CloudUpscaleData outVal; - outVal.cloudLighting.r = gs_cacheR[index]; - outVal.cloudLighting.g = gs_cacheG[index]; - outVal.cloudLighting.b = gs_cacheB[index]; - outVal.cloudLighting.a = gs_cacheA[index]; + outVal.cloudLighting = gs_cacheRGBA[index]; outVal.pixelDepth = gs_cacheDP[index]; outVal.pixelStatus = gs_cachePS[index]; outVal.cloudDepth = gs_cacheDC[index]; @@ -303,270 +325,65 @@ CloudUpscaleData GetCloudUpscaleDataSample(uint2 groupThreadId, int2 offset) { return GetCloudUpscaleDataSample(OffsetToLDSAdress(groupThreadId, offset)); } +#endif // Function that fills the struct as we cannot use arrays -void FillCloudUpscaleNeighborhoodData(int2 groupThreadId, int subRegionIdx, out NeighborhoodUpsampleData3x3 neighborhoodData) +void FillCloudUpscaleNeighborhoodData(int2 threadCoord, int subRegionIdx, out NeighborhoodUpsampleData3x3 neighborhoodData) { // Fill the sample data - CloudUpscaleData data = GetCloudUpscaleDataSample(groupThreadId, int2(-1, -1)); + CloudUpscaleData data = GetCloudUpscaleDataSample(threadCoord, int2(-1, -1)); neighborhoodData.lowValue0 = data.cloudLighting; + neighborhoodData.lowDepthValueA.x = data.cloudDepth; neighborhoodData.lowDepthA.x = data.pixelDepth; - neighborhoodData.lowMasksA.x = data.pixelStatus; - neighborhoodData.lowWeightA.x = _DistanceBasedWeights[subRegionIdx * 3 + 0].x; + neighborhoodData.lowWeightA.x = data.pixelStatus * _DistanceBasedWeights[subRegionIdx * 3 + 0].x; - data = GetCloudUpscaleDataSample(groupThreadId, int2(0, -1)); + data = GetCloudUpscaleDataSample(threadCoord, int2(0, -1)); neighborhoodData.lowValue1 = data.cloudLighting; + neighborhoodData.lowDepthValueA.y = data.cloudDepth; neighborhoodData.lowDepthA.y = data.pixelDepth; - neighborhoodData.lowMasksA.y = data.pixelStatus; - neighborhoodData.lowWeightA.y = _DistanceBasedWeights[subRegionIdx * 3 + 0].y; + neighborhoodData.lowWeightA.y = data.pixelStatus * _DistanceBasedWeights[subRegionIdx * 3 + 0].y; - data = GetCloudUpscaleDataSample(groupThreadId, int2(1, -1)); + data = GetCloudUpscaleDataSample(threadCoord, int2(1, -1)); neighborhoodData.lowValue2 = data.cloudLighting; + neighborhoodData.lowDepthValueA.z = data.cloudDepth; neighborhoodData.lowDepthA.z = data.pixelDepth; - neighborhoodData.lowMasksA.z = data.pixelStatus; - neighborhoodData.lowWeightA.z = _DistanceBasedWeights[subRegionIdx * 3 + 0].z; + neighborhoodData.lowWeightA.z = data.pixelStatus * _DistanceBasedWeights[subRegionIdx * 3 + 0].z; - data = GetCloudUpscaleDataSample(groupThreadId, int2(-1, 0)); + data = GetCloudUpscaleDataSample(threadCoord, int2(-1, 0)); neighborhoodData.lowValue3 = data.cloudLighting; + neighborhoodData.lowDepthValueA.w = data.cloudDepth; neighborhoodData.lowDepthA.w = data.pixelDepth; - neighborhoodData.lowMasksA.w = data.pixelStatus; - neighborhoodData.lowWeightA.w = _DistanceBasedWeights[subRegionIdx * 3 + 0].w; + neighborhoodData.lowWeightA.w = data.pixelStatus * _DistanceBasedWeights[subRegionIdx * 3 + 0].w; - data = GetCloudUpscaleDataSample(groupThreadId, int2(0, 0)); + data = GetCloudUpscaleDataSample(threadCoord, int2(0, 0)); neighborhoodData.lowValue4 = data.cloudLighting; + neighborhoodData.lowDepthValueB.x = data.cloudDepth; neighborhoodData.lowDepthB.x = data.pixelDepth; - neighborhoodData.lowMasksB.x = data.pixelStatus; - neighborhoodData.lowWeightB.x = _DistanceBasedWeights[subRegionIdx * 3 + 1].x; + neighborhoodData.lowWeightB.x = data.pixelStatus * _DistanceBasedWeights[subRegionIdx * 3 + 1].x; - data = GetCloudUpscaleDataSample(groupThreadId, int2(1, 0)); + data = GetCloudUpscaleDataSample(threadCoord, int2(1, 0)); neighborhoodData.lowValue5 = data.cloudLighting; + neighborhoodData.lowDepthValueB.y = data.cloudDepth; neighborhoodData.lowDepthB.y = data.pixelDepth; - neighborhoodData.lowMasksB.y = data.pixelStatus; - neighborhoodData.lowWeightB.y = _DistanceBasedWeights[subRegionIdx * 3 + 1].y; + neighborhoodData.lowWeightB.y = data.pixelStatus * _DistanceBasedWeights[subRegionIdx * 3 + 1].y; - data = GetCloudUpscaleDataSample(groupThreadId, int2(-1, 1)); + data = GetCloudUpscaleDataSample(threadCoord, int2(-1, 1)); neighborhoodData.lowValue6 = data.cloudLighting; + neighborhoodData.lowDepthValueB.z = data.cloudDepth; neighborhoodData.lowDepthB.z = data.pixelDepth; - neighborhoodData.lowMasksB.z = data.pixelStatus; - neighborhoodData.lowWeightB.z = _DistanceBasedWeights[subRegionIdx * 3 + 1].z; + neighborhoodData.lowWeightB.z = data.pixelStatus * _DistanceBasedWeights[subRegionIdx * 3 + 1].z; - data = GetCloudUpscaleDataSample(groupThreadId, int2(0, 1)); + data = GetCloudUpscaleDataSample(threadCoord, int2(0, 1)); neighborhoodData.lowValue7 = data.cloudLighting; + neighborhoodData.lowDepthValueB.w = data.cloudDepth; neighborhoodData.lowDepthB.w = data.pixelDepth; - neighborhoodData.lowMasksB.w = data.pixelStatus; - neighborhoodData.lowWeightB.w = _DistanceBasedWeights[subRegionIdx * 3 + 1].w; + neighborhoodData.lowWeightB.w = data.pixelStatus * _DistanceBasedWeights[subRegionIdx * 3 + 1].w; - data = GetCloudUpscaleDataSample(groupThreadId, int2(1, 1)); + data = GetCloudUpscaleDataSample(threadCoord, int2(1, 1)); neighborhoodData.lowValue8 = data.cloudLighting; + neighborhoodData.lowDepthValueC = data.cloudDepth; neighborhoodData.lowDepthC = data.pixelDepth; - neighborhoodData.lowMasksC = data.pixelStatus; - neighborhoodData.lowWeightC = _DistanceBasedWeights[subRegionIdx * 3 + 2].x; + neighborhoodData.lowWeightC = data.pixelStatus * _DistanceBasedWeights[subRegionIdx * 3 + 2].x; } -// Function that fills the struct as we cannot use arrays -void FillCloudUpscaleNeighborhoodData_NOLDS(int2 traceCoord, int subRegionIdx, out NeighborhoodUpsampleData3x3 neighborhoodData) -{ - // Fill the sample data (TOP LEFT) - float4 lightingVal = LOAD_TEXTURE2D_X(_VolumetricCloudsTexture, traceCoord + int2(-1, -1)); - float3 depthStatusValue = LOAD_TEXTURE2D_X(_DepthStatusTexture, traceCoord + int2(-1, -1)).xyz; - neighborhoodData.lowValue0 = lightingVal; - neighborhoodData.lowDepthA.x = depthStatusValue.y; - neighborhoodData.lowMasksA.x = saturate(depthStatusValue.x); - neighborhoodData.lowWeightA.x = _DistanceBasedWeights[subRegionIdx * 3 + 0].x; - - // Fill the sample data (TOP CENTER) - lightingVal = LOAD_TEXTURE2D_X(_VolumetricCloudsTexture, traceCoord + int2(0, -1)); - depthStatusValue = LOAD_TEXTURE2D_X(_DepthStatusTexture, traceCoord + int2(0, -1)).xyz; - neighborhoodData.lowValue1 = lightingVal; - neighborhoodData.lowDepthA.y = depthStatusValue.y; - neighborhoodData.lowMasksA.y = saturate(depthStatusValue.x); - neighborhoodData.lowWeightA.y = _DistanceBasedWeights[subRegionIdx * 3 + 0].y; - - // Fill the sample data (TOP RIGHT) - lightingVal = LOAD_TEXTURE2D_X(_VolumetricCloudsTexture, traceCoord + int2(1, -1)); - depthStatusValue = LOAD_TEXTURE2D_X(_DepthStatusTexture, traceCoord + int2(1, -1)).xyz; - neighborhoodData.lowValue2 = lightingVal; - neighborhoodData.lowDepthA.z = depthStatusValue.y; - neighborhoodData.lowMasksA.z = saturate(depthStatusValue.x); - neighborhoodData.lowWeightA.z = depthStatusValue.z; - neighborhoodData.lowWeightA.z = _DistanceBasedWeights[subRegionIdx * 3 + 0].z; - - // Fill the sample data (MID LEFT) - lightingVal = LOAD_TEXTURE2D_X(_VolumetricCloudsTexture, traceCoord + int2(-1, 0)); - depthStatusValue = LOAD_TEXTURE2D_X(_DepthStatusTexture, traceCoord + int2(-1, 0)).xyz; - neighborhoodData.lowValue3 = lightingVal; - neighborhoodData.lowDepthA.w = depthStatusValue.y; - neighborhoodData.lowMasksA.w = saturate(depthStatusValue.x); - neighborhoodData.lowWeightA.w = _DistanceBasedWeights[subRegionIdx * 3 + 0].w; - - // Fill the sample data (MID CENTER) - lightingVal = LOAD_TEXTURE2D_X(_VolumetricCloudsTexture, traceCoord + int2(0, 0)); - depthStatusValue = LOAD_TEXTURE2D_X(_DepthStatusTexture, traceCoord + int2(0, 0)).xyz; - neighborhoodData.lowValue4 = lightingVal; - neighborhoodData.lowDepthB.x = depthStatusValue.y; - neighborhoodData.lowMasksB.x = saturate(depthStatusValue.x); - neighborhoodData.lowWeightB.x = _DistanceBasedWeights[subRegionIdx * 3 + 1].x; - - // Fill the sample data (MID RIGHT) - lightingVal = LOAD_TEXTURE2D_X(_VolumetricCloudsTexture, traceCoord + int2(1, 0)); - depthStatusValue = LOAD_TEXTURE2D_X(_DepthStatusTexture, traceCoord + int2(1, 0)).xyz; - neighborhoodData.lowValue5 = lightingVal; - neighborhoodData.lowDepthB.y = depthStatusValue.y; - neighborhoodData.lowMasksB.y = saturate(depthStatusValue.x); - neighborhoodData.lowWeightB.y = _DistanceBasedWeights[subRegionIdx * 3 + 1].y; - - // Fill the sample data (BOTTOM LEFT) - lightingVal = LOAD_TEXTURE2D_X(_VolumetricCloudsTexture, traceCoord + int2(-1, 1)); - depthStatusValue = LOAD_TEXTURE2D_X(_DepthStatusTexture, traceCoord + int2(-1, 1)).xyz; - neighborhoodData.lowValue6 = lightingVal; - neighborhoodData.lowDepthB.z = depthStatusValue.y; - neighborhoodData.lowMasksB.z = saturate(depthStatusValue.x); - neighborhoodData.lowWeightB.z = depthStatusValue.z; - neighborhoodData.lowWeightB.z = _DistanceBasedWeights[subRegionIdx * 3 + 1].z; - - // Fill the sample data (BOTTOM CENTER) - lightingVal = LOAD_TEXTURE2D_X(_VolumetricCloudsTexture, traceCoord + int2(0, 1)); - depthStatusValue = LOAD_TEXTURE2D_X(_DepthStatusTexture, traceCoord + int2(0, 1)).xyz; - neighborhoodData.lowValue7 = lightingVal; - neighborhoodData.lowDepthB.w = depthStatusValue.y; - neighborhoodData.lowMasksB.w = saturate(depthStatusValue.x); - neighborhoodData.lowWeightB.w = depthStatusValue.z; - neighborhoodData.lowWeightB.w = _DistanceBasedWeights[subRegionIdx * 3 + 1].w; - - // Fill the sample data (BOTTOM CENTER) - lightingVal = LOAD_TEXTURE2D_X(_VolumetricCloudsTexture, traceCoord + int2(1, 1)); - depthStatusValue = LOAD_TEXTURE2D_X(_DepthStatusTexture, traceCoord + int2(1, 1)).xyz; - neighborhoodData.lowValue8 = lightingVal; - neighborhoodData.lowDepthC = depthStatusValue.y; - neighborhoodData.lowMasksC = saturate(depthStatusValue.x); - neighborhoodData.lowWeightC = _DistanceBasedWeights[subRegionIdx * 3 + 2].x; -} - -float EvaluateUpscaledCloudDepth(int2 groupThreadId, NeighborhoodUpsampleData3x3 nhd) -{ - // There are some cases where we need to provide a depth value for the volumetric clouds (mainly for the fog now, but this may change in the future) - // Given that the cloud value for a final pixel doesn't come from a single half resolution pixel (it is interpolated), we also need to interpolate - // the depth. That said, we cannot interpolate cloud values with non-cloudy pixels values and we need to exclude them from the evaluation - // Also, we should be doing the interpolation in linear space to be accurate, but it becomes more expensive and experimentally I didn't find - // any artifact of doing it in logarithmic space. - float finalDepth = 0.0f; - float sumWeight = 0.0f; - - // Top left - float weight = (nhd.lowValue0.w != 1.0 ? 1.0 : 0.0) * nhd.lowMasksA.x; - finalDepth += weight * GetCloudDepth_LDS(groupThreadId, int2(-1, -1)); - sumWeight += weight; - - // Top center - weight = (nhd.lowValue1.w != 1.0 ? 1.0 : 0.0) * nhd.lowMasksA.y; - finalDepth += weight * GetCloudDepth_LDS(groupThreadId, int2(0, -1)); - sumWeight += weight; - - // Top right - weight = (nhd.lowValue2.w != 1.0 ? 1.0 : 0.0) * nhd.lowMasksA.z; - finalDepth += weight * GetCloudDepth_LDS(groupThreadId, int2(1, -1)); - sumWeight += weight; - - // Mid left - weight = (nhd.lowValue3.w != 1.0 ? 1.0 : 0.0) * nhd.lowMasksA.w; - finalDepth += weight * GetCloudDepth_LDS(groupThreadId, int2(-1, 0)); - sumWeight += weight; - - // Mid center - weight = (nhd.lowValue4.w != 1.0 ? 1.0 : 0.0) * nhd.lowMasksB.x; - finalDepth += weight * GetCloudDepth_LDS(groupThreadId, int2(0, 0)); - sumWeight += weight; - - // Mid right - weight = (nhd.lowValue5.w != 1.0 ? 1.0 : 0.0) * nhd.lowMasksB.y; - finalDepth += weight * GetCloudDepth_LDS(groupThreadId, int2(1, 0)); - sumWeight += weight; - - // Bottom left - weight = (nhd.lowValue6.w != 1.0 ? 1.0 : 0.0) * nhd.lowMasksB.z; - finalDepth += weight * GetCloudDepth_LDS(groupThreadId, int2(-1, 1)); - sumWeight += weight; - - // Bottom mid - weight = (nhd.lowValue7.w != 1.0 ? 1.0 : 0.0) * nhd.lowMasksB.w; - finalDepth += weight * GetCloudDepth_LDS(groupThreadId, int2(0, 1)); - sumWeight += weight; - - // Bottom mid - weight = (nhd.lowValue8.w != 1.0 ? 1.0 : 0.0) * nhd.lowMasksC; - finalDepth += weight * GetCloudDepth_LDS(groupThreadId, int2(1, 1)); - sumWeight += weight; - - return finalDepth / sumWeight; -} - -float EvaluateUpscaledCloudDepth_NOLDS(int2 halfResCoord, NeighborhoodUpsampleData3x3 nhd) -{ - float finalDepth = 0.0f; - float sumWeight = 0.0f; - - // Top left - float weight = (nhd.lowValue0.w != 1.0 ? 1.0 : 0.0) * nhd.lowMasksA.x; - finalDepth += weight * LOAD_TEXTURE2D_X(_DepthStatusTexture, halfResCoord + int2(-1, -1)).z; - sumWeight += weight; - - // Top center - weight = (nhd.lowValue1.w != 1.0 ? 1.0 : 0.0) * nhd.lowMasksA.y; - finalDepth += weight * LOAD_TEXTURE2D_X(_DepthStatusTexture, halfResCoord + int2(0, -1)).z; - sumWeight += weight; - - // Top right - weight = (nhd.lowValue2.w != 1.0 ? 1.0 : 0.0) * nhd.lowMasksA.z; - finalDepth += weight * LOAD_TEXTURE2D_X(_DepthStatusTexture, halfResCoord + int2(1, -1)).z; - sumWeight += weight; - - // Mid left - weight = (nhd.lowValue3.w != 1.0 ? 1.0 : 0.0) * nhd.lowMasksA.w; - finalDepth += weight * LOAD_TEXTURE2D_X(_DepthStatusTexture, halfResCoord + int2(-1, 0)).z; - sumWeight += weight; - - // Mid center - weight = (nhd.lowValue4.w != 1.0 ? 1.0 : 0.0) * nhd.lowMasksB.x; - finalDepth += weight * LOAD_TEXTURE2D_X(_DepthStatusTexture, halfResCoord + int2(0, 0)).z; - sumWeight += weight; - - // Mid right - weight = (nhd.lowValue5.w != 1.0 ? 1.0 : 0.0) * nhd.lowMasksB.y; - finalDepth += weight * LOAD_TEXTURE2D_X(_DepthStatusTexture, halfResCoord + int2(1, 0)).z; - sumWeight += weight; - - // Bottom left - weight = (nhd.lowValue6.w != 1.0 ? 1.0 : 0.0) * nhd.lowMasksB.z; - finalDepth += weight * LOAD_TEXTURE2D_X(_DepthStatusTexture, halfResCoord + int2(-1, 1)).z; - sumWeight += weight; - - // Bottom mid - weight = (nhd.lowValue7.w != 1.0 ? 1.0 : 0.0) * nhd.lowMasksB.w; - finalDepth += weight * LOAD_TEXTURE2D_X(_DepthStatusTexture, halfResCoord + int2(0, 1)).z; - sumWeight += weight; - - // Bottom mid - weight = (nhd.lowValue8.w != 1.0 ? 1.0 : 0.0) * nhd.lowMasksC; - finalDepth += weight * LOAD_TEXTURE2D_X(_DepthStatusTexture, halfResCoord + int2(1, 1)).z; - sumWeight += weight; - - return sumWeight != 0.0f ? finalDepth / sumWeight : 0.0f; -} - -// This function will return something strictly smaller than 0 if any of the lower res pixels -// have some amound of clouds. -float EvaluateRegionEmptiness(NeighborhoodUpsampleData3x3 data) -{ - float emptyRegionFlag = 1.0f; - emptyRegionFlag *= lerp(1.0, data.lowValue0.w, data.lowWeightA.x != 0.0 ? 1.0 : 0.0); - emptyRegionFlag *= lerp(1.0, data.lowValue1.w, data.lowWeightA.y != 0.0 ? 1.0 : 0.0); - emptyRegionFlag *= lerp(1.0, data.lowValue2.w, data.lowWeightA.z != 0.0 ? 1.0 : 0.0); - emptyRegionFlag *= lerp(1.0, data.lowValue3.w, data.lowWeightA.w != 0.0 ? 1.0 : 0.0); - emptyRegionFlag *= lerp(1.0, data.lowValue4.w, data.lowWeightB.x != 0.0 ? 1.0 : 0.0); - emptyRegionFlag *= lerp(1.0, data.lowValue5.w, data.lowWeightB.y != 0.0 ? 1.0 : 0.0); - emptyRegionFlag *= lerp(1.0, data.lowValue6.w, data.lowWeightB.z != 0.0 ? 1.0 : 0.0); - emptyRegionFlag *= lerp(1.0, data.lowValue7.w, data.lowWeightB.w != 0.0 ? 1.0 : 0.0); - emptyRegionFlag *= lerp(1.0, data.lowValue8.w, data.lowWeightC != 0.0 ? 1.0 : 0.0); - return emptyRegionFlag; -} #endif // VOLUMETRIC_CLOUDS_DENOISING_H diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/VolumetricClouds/VolumetricCloudsTrace.compute b/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/VolumetricClouds/VolumetricCloudsTrace.compute index aee473467b6..ea496f36d93 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/VolumetricClouds/VolumetricCloudsTrace.compute +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/VolumetricClouds/VolumetricCloudsTrace.compute @@ -7,6 +7,7 @@ #pragma multi_compile _ CLOUDS_MICRO_EROSION #pragma multi_compile _ CLOUDS_SIMPLE_PRESET #pragma multi_compile _ TRACE_FOR_SKY +#pragma multi_compile _ PERCEPTUAL_TRANSMITTANCE // #pragma enable_d3d11_debug_symbols @@ -15,12 +16,9 @@ #include "Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/VolumetricClouds/VolumetricCloudsUtilities.hlsl" #include "Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/AtmosphericScattering/AtmosphericScattering.hlsl" -// Input textures -TEXTURE2D_X(_VolumetricCloudsSourceDepth); - // Output texture -RW_TEXTURE2D_X(float4, _CloudsLightingTextureRW); -RW_TEXTURE2D_X(float, _CloudsDepthTextureRW); +RW_TEXTURE2D_X(float3, _CloudsLightingTextureRW); +RW_TEXTURE2D_X(float2, _CloudsDepthTextureRW); CloudRay BuildRay(uint2 intermediateCoord) { @@ -40,7 +38,9 @@ CloudRay BuildRay(uint2 intermediateCoord) #ifndef TRACE_FOR_SKY if (_ValidSceneDepth) { - float depthValue = LOAD_TEXTURE2D_X(_VolumetricCloudsSourceDepth, intermediateCoord.xy).x; + // TODO: Neighbor analysis to represent full depth range + // Ref: Creating the Atmospheric World of Red Dead Redemption 2, slide 55 + float depthValue = LOAD_TEXTURE2D_X(_CameraDepthTexture, _ReprojDepthMipOffset + intermediateCoord).x; if (depthValue != UNITY_RAW_FAR_CLIP_VALUE) ray.maxRayLength = LinearEyeDepth(positionCS * _ScreenSize.zw, depthValue, _InvProjParams) * rcp(dot(ray.direction, -UNITY_MATRIX_V[2].xyz)); } @@ -71,13 +71,8 @@ void RenderClouds(uint3 traceCoord : SV_DispatchThreadID, int groupIndex : SV_Gr if (_LowResolutionEvaluation) { intermediateCoord = traceCoord.xy * 2; - if (_EnableIntegration) - { - // Compute the half res coordinate that matches this thread (as we virtually do the computation in half res space) - int checkerBoardIndex = ComputeCheckerBoardIndex(traceCoord.xy, _SubPixelIndex); - intermediateCoord += HalfResolutionIndexToOffset(checkerBoardIndex); - } + intermediateCoord += ComputeCheckerBoardOffset(traceCoord.xy, _SubPixelIndex); } // Given that the rendering resolution is not guaranteed to be an even number, we need to clamp to the intermediate resolution in this case @@ -98,16 +93,21 @@ void RenderClouds(uint3 traceCoord : SV_DispatchThreadID, int groupIndex : SV_Gr float3 V = ray.direction; float2 positionNDC = intermediateCoord * _IntermediateScreenSize.zw; + // We have to transform transmittance in the same way as during final combine + // This is still faster than evaluating atmospheric scattering at full res + float2 finalCoord = _LowResolutionEvaluation ? intermediateCoord * 2 : intermediateCoord; + float transmittance = EvaluateFinalTransmittance(finalCoord, result.transmittance); + float3 skyColor, skyOpacity; EvaluateAtmosphericScattering(V, positionNDC, result.meanDistance, skyColor, skyOpacity); - result.inScattering.xyz = result.inScattering.xyz * (1 - skyOpacity) + skyColor * (1 - result.transmittance); + result.inScattering.xyz = result.inScattering.xyz * (1 - skyOpacity) + skyColor * (1 - transmittance); } #endif // Output the result - _CloudsLightingTextureRW[COORD_TEXTURE2D_X(traceCoord.xy)] = float4(result.inScattering, result.transmittance); + _CloudsLightingTextureRW[COORD_TEXTURE2D_X(traceCoord.xy)] = result.inScattering; // Compute the cloud depth float depth = result.invalidRay ? UNITY_RAW_FAR_CLIP_VALUE : EncodeInfiniteDepth(result.meanDistance, _CloudNearPlane); - _CloudsDepthTextureRW[COORD_TEXTURE2D_X(traceCoord.xy)] = depth; + _CloudsDepthTextureRW[COORD_TEXTURE2D_X(traceCoord.xy)] = float2(depth, result.transmittance); } diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/VolumetricClouds/VolumetricCloudsUtilities.hlsl b/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/VolumetricClouds/VolumetricCloudsUtilities.hlsl index 69d3ae40e05..391f070df86 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/VolumetricClouds/VolumetricCloudsUtilities.hlsl +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/VolumetricClouds/VolumetricCloudsUtilities.hlsl @@ -48,20 +48,27 @@ float ConvertCloudDepth(float3 position) return hClip.z / hClip.w; } -float EvaluateFinalTransmittance(float3 color, float transmittance) +TEXTURE2D_X(_CameraColorTexture); + +// Tweak the transmittance to improve situation where the sun is behind the clouds +float EvaluateFinalTransmittance(float2 finalCoord, float transmittance) { + #ifdef PERCEPTUAL_TRANSMITTANCE // Due to the high intensity of the sun, we often need apply the transmittance in a tonemapped space // As we only produce one transmittance, we evaluate the approximation on the luminance of the color - float luminance = Luminance(color); - - // Apply the tone mapping and then the transmittance - float resultLuminance = luminance / (1.0 + luminance) * transmittance; + float luminance = Luminance(_CameraColorTexture[COORD_TEXTURE2D_X(finalCoord.xy)]); + if (luminance > 0.0f) + { + // Apply the transmittance in tonemapped space + float resultLuminance = FastTonemapPerChannel(luminance) * transmittance; + resultLuminance = FastTonemapPerChannelInvert(resultLuminance); - // reverse the tone mapping - resultLuminance = resultLuminance / (1.0 - resultLuminance); + // This approach only makes sense if the color is not black + transmittance = lerp(transmittance, resultLuminance / luminance, _ImprovedTransmittanceBlend); + } + #endif - // This approach only makes sense if the color is not black - return luminance > 0.0 ? lerp(transmittance, resultLuminance / luminance, _ImprovedTransmittanceBlend) : transmittance; + return saturate(transmittance); } /// Tracing @@ -351,7 +358,7 @@ void EvaluateCloudProperties(float3 positionPS, float noiseMipOffset, float eros float ambientOcclusionBlend = saturate(1.0 - max(erosionFactor, shapeFactor) * 0.5); properties.ambientOcclusion = lerp(1.0, properties.ambientOcclusion, ambientOcclusionBlend); - // Apply the erosion for nifer details + // Apply the erosion for nicer details if (!cheapVersion) { float3 erosionCoords = AnimateErosionNoisePosition(positionPS) / NOISE_TEXTURE_NORMALIZATION_FACTOR * _ErosionScale; @@ -415,12 +422,9 @@ float3 EvaluateSunLuminance(float3 positionWS, float3 sunDirection, float3 sunCo // Compute the size of the current step float intervalSize = totalLightDistance / (float)_NumLightSteps; - - // Sums the ex - float extinctionSum = 0; + float opticalDepth = 0; // Collect total density along light ray. - float lastDist = 0; for (int j = 0; j < _NumLightSteps; j++) { // Here we intentionally do not take the right step size for the first step @@ -428,23 +432,18 @@ float3 EvaluateSunLuminance(float3 positionWS, float3 sunDirection, float3 sunCo float dist = intervalSize * (0.25 + j); // Evaluate the current sample point - float3 currentSamplePointWS = positionWS + sunDirection * dist; + float3 currentSamplePointPS = ConvertToPS(positionWS) + sunDirection * dist; // Get the cloud properties at the sample point CloudProperties lightRayCloudProperties; - EvaluateCloudProperties(ConvertToPS (currentSamplePointWS), 3.0f * j / _NumLightSteps, 0.0, true, true, lightRayCloudProperties); - - // Normally we would evaluate the transmittance at each step and multiply them - // but given the fact that exp exp (extinctionA) * exp(extinctionB) = exp(extinctionA + extinctionB) - // We can sum the extinctions and do the extinction only once - extinctionSum += max(lightRayCloudProperties.density * lightRayCloudProperties.sigmaT, 1e-6); + EvaluateCloudProperties(currentSamplePointPS, 3.0f * j / _NumLightSteps, 0.0, true, true, lightRayCloudProperties); - // Move on to the next step - lastDist = dist; + opticalDepth += lightRayCloudProperties.density * lightRayCloudProperties.sigmaT; } // Compute the luminance for each octave + // https://magnuswrenninge.com/wp-content/uploads/2010/03/Wrenninge-OzTheGreatAndVolumetric.pdf float3 sunColorXPowderEffect = sunColor * powderEffect; - float3 extinction = intervalSize * extinctionSum * _ScatteringTint.xyz; + float3 extinction = intervalSize * opticalDepth * _ScatteringTint.xyz; for (int o = 0; o < NUM_MULTI_SCATTERING_OCTAVES; ++o) { float msFactor = PositivePow(_MultiScattering, o); @@ -478,8 +477,9 @@ void EvaluateCloud(CloudProperties cloudProperties, EnvironmentLighting envLight // Add the environement lighting contribution totalLuminance += lerp(envLighting.ambientTermBottom, envLighting.ambientTermTop, cloudProperties.height) * cloudProperties.ambientOcclusion; - // Note: This is an alterated version of the "Energy-conserving analytical integration" - // For some reason the divison by the clamped extinction just makes it all wrong. + // "Energy-conserving analytical integration" + // See slide 28 at http://www.frostbite.com/2015/08/physically-based-unified-volumetric-rendering-in-frostbite/ + // No division by clamped extinction because albedo == 1 => sigma_s == sigma_e so it simplifies const float3 integScatt = (totalLuminance - totalLuminance * transmittance); volumetricRay.inScattering += integScatt * volumetricRay.transmittance; volumetricRay.transmittance *= transmittance; @@ -538,8 +538,8 @@ VolumetricRayResult TraceVolumetricRay(CloudRay cloudRay) float meanDistanceDivider = 0.0f; // Current position for the evaluation, apply blue noise to start position - float currentDistance = 0; - float3 currentPositionWS = cloudRay.originWS + rayMarchRange.start * cloudRay.direction; + float currentDistance = cloudRay.integrationNoise * stepS; + float3 currentPositionWS = cloudRay.originWS + (rayMarchRange.start + currentDistance) * cloudRay.direction; // Initialize the values for the optimized ray marching bool activeSampling = true; @@ -558,7 +558,7 @@ VolumetricRayResult TraceVolumetricRay(CloudRay cloudRay) { // If the density is null, we can skip as there will be no contribution CloudProperties cloudProperties; - EvaluateCloudProperties(ConvertToPS (currentPositionWS), 0.0f, erosionMipOffset, false, false, cloudProperties); + EvaluateCloudProperties(ConvertToPS(currentPositionWS), 0.0f, erosionMipOffset, false, false, cloudProperties); // Apply the fade in function to the density cloudProperties.density *= densityAttenuationValue; @@ -566,9 +566,9 @@ VolumetricRayResult TraceVolumetricRay(CloudRay cloudRay) if (cloudProperties.density > CLOUD_DENSITY_TRESHOLD) { // Contribute to the average depth (must be done first in case we end up inside a cloud at the next step) - float transmitanceXdensity = volumetricRay.transmittance * cloudProperties.density; - volumetricRay.meanDistance += (rayMarchRange.start + currentDistance) * transmitanceXdensity; - meanDistanceDivider += transmitanceXdensity; + // page 43: https://media.contentapi.ea.com/content/dam/eacom/frostbite/files/s2016-pbs-frostbite-sky-clouds-new.pdf + volumetricRay.meanDistance += (rayMarchRange.start + currentDistance) * volumetricRay.transmittance; + meanDistanceDivider += volumetricRay.transmittance; // Evaluate the cloud at the position EvaluateCloud(cloudProperties, cloudRay.envLighting, currentPositionWS, stepS, currentDistance / totalDistance, volumetricRay); @@ -591,9 +591,8 @@ VolumetricRayResult TraceVolumetricRay(CloudRay cloudRay) activeSampling = false; // Do the next step - float relativeStepSize = lerp(cloudRay.integrationNoise, 1.0, saturate(currentIndex)); - currentPositionWS += cloudRay.direction * stepS * relativeStepSize; - currentDistance += stepS * relativeStepSize; + currentPositionWS += cloudRay.direction * stepS; + currentDistance += stepS; } else { diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/Material/TerrainLit/TerrainLitTemplate.hlsl b/Packages/com.unity.render-pipelines.high-definition/Runtime/Material/TerrainLit/TerrainLitTemplate.hlsl index 8b5fcd493f1..ee3374bc52a 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/Material/TerrainLit/TerrainLitTemplate.hlsl +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/Material/TerrainLit/TerrainLitTemplate.hlsl @@ -27,6 +27,10 @@ #endif #ifdef SCENESELECTIONPASS #include "Packages/com.unity.render-pipelines.high-definition/Runtime/ShaderLibrary/PickingSpaceTransforms.hlsl" +#elif SHADERPASS == SHADERPASS_LIGHT_TRANSPORT + // Use Unity's built-in matrices for meta pass rendering + #define SCENEPICKINGPASS + #include "Packages/com.unity.render-pipelines.high-definition/Runtime/ShaderLibrary/PickingSpaceTransforms.hlsl" #endif #include "Packages/com.unity.render-pipelines.high-definition/Runtime/Material/Material.hlsl" diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/Shaders/TemporalAntialiasing.hlsl b/Packages/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/Shaders/TemporalAntialiasing.hlsl index 1a6ef147b7c..70de30c7f4a 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/Shaders/TemporalAntialiasing.hlsl +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/Shaders/TemporalAntialiasing.hlsl @@ -66,8 +66,8 @@ static float2 NeighbourOffsets[8]; void SetNeighbourOffsets(float4 neighbourOffsets[4]) { - UNITY_UNROLL for (int i = 0; i < 16; ++i) - NeighbourOffsets[i/2][i%2] = neighbourOffsets[i/4][i%4]; + UNITY_UNROLL for (uint i = 0; i < 16; ++i) + NeighbourOffsets[i / 2][i % 2] = neighbourOffsets[i / 4][i % 4]; } float2 ClampAndScaleForBilinearWithCustomScale(float2 uv, float2 scale) @@ -595,9 +595,9 @@ CTYPE FilterCentralColor(NeighbourhoodSamples samples, float centralWeight, floa { CTYPE filtered = samples.central * centralWeight; - for (int i = 0; i < NEIGHBOUR_COUNT; ++i) + for (uint i = 0; i < NEIGHBOUR_COUNT; ++i) { - float w = weights[i/4][i%4]; + float w = weights[i / 4][i % 4]; filtered += samples.neighbours[i] * w; } diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/Camera/HDCamera.cs b/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/Camera/HDCamera.cs index 6f4447713cb..826d2667be0 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/Camera/HDCamera.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/Camera/HDCamera.cs @@ -1210,7 +1210,8 @@ internal void Update(FrameSettings currentFrameSettings, HDRenderPipeline hdrp, // The condition inside controls whether we perform init/deinit or not. HDRenderPipeline.ReinitializeVolumetricBufferParams(this); - bool isCurrentColorPyramidRequired = frameSettings.IsEnabled(FrameSettingsField.Refraction) || frameSettings.IsEnabled(FrameSettingsField.Distortion); + bool ssmsEnabled = Fog.IsMultipleScatteringEnabled(this, out _); + bool isCurrentColorPyramidRequired = frameSettings.IsEnabled(FrameSettingsField.Refraction) || frameSettings.IsEnabled(FrameSettingsField.Distortion) || ssmsEnabled; bool isHistoryColorPyramidRequired = IsSSREnabled(transparent: false) || IsSSREnabled(transparent: true) || IsSSGIEnabled(); bool isVolumetricHistoryRequired = IsVolumetricReprojectionEnabled(); @@ -1335,7 +1336,7 @@ internal void Update(FrameSettings currentFrameSettings, HDRenderPipeline hdrp, Vector2Int nonScaledViewport = new Vector2Int(actualWidth, actualHeight); - m_DepthBufferMipChainInfo.ComputePackedMipChainInfo(nonScaledViewport); + m_DepthBufferMipChainInfo.ComputePackedMipChainInfo(nonScaledViewport, hdrp.RequiredCheckerboardMipCountInDepthPyramid(this)); historyLowResScale = resetPostProcessingHistory ? 0.5f : lowResScale; historyLowResScaleForScreenSpaceLighting = resetPostProcessingHistory ? 0.5f : lowResScaleForScreenSpaceLighting; diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDProfileId.cs b/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDProfileId.cs index 69a1668a3b7..4bd1d26732c 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDProfileId.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDProfileId.cs @@ -98,7 +98,6 @@ internal enum HDProfileId // Volumetric clouds VolumetricClouds, - VolumetricCloudsDepthDownscale, VolumetricCloudsTrace, VolumetricCloudsReproject, VolumetricCloudsPreUpscale, diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDRenderPipeline.Debug.cs b/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDRenderPipeline.Debug.cs index 8d908a578d2..aa6b6f4df87 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDRenderPipeline.Debug.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDRenderPipeline.Debug.cs @@ -145,7 +145,9 @@ unsafe void ApplyDebugDisplaySettings(HDCamera hdCamera, CommandBuffer cmd, bool // However debug mode like colorPickerModes and false color don't need DEBUG_DISPLAY and must work with the lighting. // So we will enabled DEBUG_DISPLAY independently - bool debugDisplayEnabledOrSceneLightingDisabled = m_CurrentDebugDisplaySettings.IsDebugDisplayEnabled() || CoreUtils.IsSceneLightingDisabled(hdCamera.camera); + bool isSceneLightingDisabled = CoreUtils.IsSceneLightingDisabled(hdCamera.camera); + bool debugDisplayEnabledOrSceneLightingDisabled = m_CurrentDebugDisplaySettings.IsDebugDisplayEnabled() || isSceneLightingDisabled; + // Enable globally the keyword DEBUG_DISPLAY on shader that support it with multi-compile CoreUtils.SetKeyword(cmd, "DEBUG_DISPLAY", debugDisplayEnabledOrSceneLightingDisabled); @@ -173,12 +175,6 @@ unsafe void ApplyDebugDisplaySettings(HDCamera hdCamera, CommandBuffer cmd, bool var debugEmissiveColor = new Vector4(lightingDebugSettings.overrideEmissiveColor ? 1.0f : 0.0f, lightingDebugSettings.overrideEmissiveColorValue.r, lightingDebugSettings.overrideEmissiveColorValue.g, lightingDebugSettings.overrideEmissiveColorValue.b); var debugTrueMetalColor = new Vector4(materialDebugSettings.materialValidateTrueMetal ? 1.0f : 0.0f, materialDebugSettings.materialValidateTrueMetalColor.r, materialDebugSettings.materialValidateTrueMetalColor.g, materialDebugSettings.materialValidateTrueMetalColor.b); - DebugLightingMode debugLightingMode = m_CurrentDebugDisplaySettings.GetDebugLightingMode(); - if (CoreUtils.IsSceneLightingDisabled(hdCamera.camera)) - { - debugLightingMode = DebugLightingMode.MatcapView; - } - ref var cb = ref m_ShaderVariablesDebugDisplayCB; var debugMaterialIndices = m_CurrentDebugDisplaySettings.GetDebugMaterialIndexes(); @@ -202,6 +198,31 @@ unsafe void ApplyDebugDisplaySettings(HDCamera hdCamera, CommandBuffer cmd, bool } } + DebugLightingMode debugLightingMode = m_CurrentDebugDisplaySettings.GetDebugLightingMode(); + + // Mat Cap Mode Logic + { + bool matCapMixAlbedo = false; + float matCapMixScale = 1.0f; + + if (debugLightingMode == DebugLightingMode.MatcapView) + { + matCapMixAlbedo = m_CurrentDebugDisplaySettings.data.lightingDebugSettings.matCapMixAlbedo; + matCapMixScale = m_CurrentDebugDisplaySettings.data.lightingDebugSettings.matCapMixScale; + } +#if UNITY_EDITOR + else if (isSceneLightingDisabled) + { + // Forcing the MatCap Mode when scene view lighting is disabled. Also use the default values + debugLightingMode = DebugLightingMode.MatcapView; + matCapMixAlbedo = HDRenderPipelinePreferences.matCapMode.mixAlbedo.value; + matCapMixScale = HDRenderPipelinePreferences.matCapMode.viewScale.value; + } +#endif + cb._MatcapMixAlbedo = matCapMixAlbedo ? 1 : 0; + cb._MatcapViewScale = matCapMixScale; + } + cb._DebugLightingMode = (int)debugLightingMode; cb._DebugLightLayersMask = (int)m_CurrentDebugDisplaySettings.GetDebugLightLayersMask(); cb._DebugShadowMapMode = (int)m_CurrentDebugDisplaySettings.GetDebugShadowMapMode(); @@ -215,13 +236,6 @@ unsafe void ApplyDebugDisplaySettings(HDCamera hdCamera, CommandBuffer cmd, bool cb._ColorPickerMode = (int)m_CurrentDebugDisplaySettings.GetDebugColorPickerMode(); cb._DebugFullScreenMode = (int)m_CurrentDebugDisplaySettings.data.fullScreenDebugMode; -#if UNITY_EDITOR - cb._MatcapMixAlbedo = HDRenderPipelinePreferences.matcapViewMixAlbedo ? 1 : 0; - cb._MatcapViewScale = HDRenderPipelinePreferences.matcapViewScale; -#else - cb._MatcapMixAlbedo = 0; - cb._MatcapViewScale = 1.0f; -#endif cb._DebugViewportSize = hdCamera.screenSize; cb._DebugLightingAlbedo = debugAlbedo; cb._DebugLightingSmoothness = debugSmoothness; @@ -561,8 +575,7 @@ class ResolveFullScreenDebugPassData public DebugDisplaySettings debugDisplaySettings; public Material debugFullScreenMaterial; public HDCamera hdCamera; - public int depthPyramidMip; - public ComputeBuffer depthPyramidOffsets; + public Vector4 depthPyramidParams; public TextureHandle output; public TextureHandle input; public TextureHandle depthPyramid; @@ -580,14 +593,24 @@ TextureHandle ResolveFullScreenDebug(RenderGraph renderGraph, TextureHandle inpu passData.debugFullScreenMaterial = m_DebugFullScreen; passData.input = builder.ReadTexture(inputFullScreenDebug); passData.depthPyramid = builder.ReadTexture(depthPyramid); + { + int mipCount = hdCamera.depthBufferMipChainInfo.mipLevelCount; + int mipIndex = Mathf.Min(Mathf.FloorToInt(m_CurrentDebugDisplaySettings.data.fullscreenDebugMip * mipCount), mipCount - 1); + Vector2Int mipOffset = hdCamera.depthBufferMipChainInfo.mipLevelOffsets[mipIndex]; + if (m_CurrentDebugDisplaySettings.data.depthPyramidView == DepthPyramidDebugView.CheckerboardDepth && hdCamera.depthBufferMipChainInfo.mipLevelCountCheckerboard != 0) + { + mipIndex = Mathf.Min(mipIndex, hdCamera.depthBufferMipChainInfo.mipLevelCountCheckerboard - 1); + mipOffset = hdCamera.depthBufferMipChainInfo.mipLevelOffsetsCheckerboard[mipIndex]; + } + passData.depthPyramidParams = new Vector4(mipIndex, mipOffset.x, mipOffset.y, 0.0f); + } + if (IsComputeThicknessNeeded(hdCamera)) passData.thickness = builder.ReadTexture(HDComputeThickness.Instance.GetThicknessTextureArray()); else passData.thickness = builder.ReadTexture(renderGraph.defaultResources.blackTextureArrayXR); - passData.thicknessReindex = builder.ReadBuffer(renderGraph.ImportBuffer(HDComputeThickness.Instance.GetReindexMap())); - passData.depthPyramidMip = (int)(m_CurrentDebugDisplaySettings.data.fullscreenDebugMip * hdCamera.depthBufferMipChainInfo.mipLevelCount); - passData.depthPyramidOffsets = hdCamera.depthBufferMipChainInfo.GetOffsetBufferData(m_DepthPyramidMipLevelOffsetsBuffer); + // On Vulkan, not binding the Random Write Target will result in an invalid drawcall. // To avoid that, if the compute buffer is invalid, we bind a dummy compute buffer anyway. if (m_DebugFullScreenComputeBuffer.IsValid()) @@ -613,8 +636,7 @@ TextureHandle ResolveFullScreenDebug(RenderGraph renderGraph, TextureHandle inpu mpb.SetVector(HDShaderIDs._FullScreenDebugDepthRemap, new Vector4(data.debugDisplaySettings.data.fullScreenDebugDepthRemap.x, data.debugDisplaySettings.data.fullScreenDebugDepthRemap.y, data.hdCamera.camera.nearClipPlane, data.hdCamera.camera.farClipPlane)); else // Setup neutral value mpb.SetVector(HDShaderIDs._FullScreenDebugDepthRemap, new Vector4(0.0f, 1.0f, 0.0f, 1.0f)); - mpb.SetInt(HDShaderIDs._DebugDepthPyramidMip, data.depthPyramidMip); - mpb.SetBuffer(HDShaderIDs._DebugDepthPyramidOffsets, data.depthPyramidOffsets); + mpb.SetVector(HDShaderIDs._DebugDepthPyramidParams, data.depthPyramidParams); mpb.SetInt(HDShaderIDs._DebugContactShadowLightIndex, data.debugDisplaySettings.data.fullScreenContactShadowLightIndex); mpb.SetFloat(HDShaderIDs._TransparencyOverdrawMaxPixelCost, (float)data.debugDisplaySettings.data.transparencyDebugSettings.maxPixelCost); mpb.SetFloat(HDShaderIDs._FogVolumeOverdrawMaxValue, (float)volumetricSliceCount); diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDRenderPipeline.Prepass.cs b/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDRenderPipeline.Prepass.cs index 67854ef972f..2145654868c 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDRenderPipeline.Prepass.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDRenderPipeline.Prepass.cs @@ -10,7 +10,7 @@ public partial class HDRenderPipeline Material m_MSAAResolveMaterial, m_MSAAResolveMaterialDepthOnly; Material m_CameraMotionVectorsMaterial; Material m_DecalNormalBufferMaterial; - Material m_DownsampleDepthMaterialHalfresCheckerboard; + Material m_DownsampleDepthMaterialLoad; Material m_DownsampleDepthMaterialGather; Material[] m_ComputeThicknessOpaqueMaterial; Material[] m_ComputeThicknessTransparentMaterial; @@ -35,7 +35,7 @@ void InitializePrepass(HDRenderPipelineAsset hdAsset) m_MSAAResolveMaterialDepthOnly.EnableKeyword("_DEPTH_ONLY"); m_CameraMotionVectorsMaterial = CoreUtils.CreateEngineMaterial(runtimeShaders.cameraMotionVectorsPS); m_DecalNormalBufferMaterial = CoreUtils.CreateEngineMaterial(runtimeShaders.decalNormalBufferPS); - m_DownsampleDepthMaterialHalfresCheckerboard = CoreUtils.CreateEngineMaterial(runtimeShaders.downsampleDepthPS); + m_DownsampleDepthMaterialLoad = CoreUtils.CreateEngineMaterial(runtimeShaders.downsampleDepthPS); m_DownsampleDepthMaterialGather = CoreUtils.CreateEngineMaterial(runtimeShaders.downsampleDepthPS); m_DownsampleDepthMaterialGather.EnableKeyword("GATHER_DOWNSAMPLE"); m_ComputeThicknessOpaqueMaterial = new Material[m_MaxXRViewsCount]; @@ -64,7 +64,7 @@ void CleanupPrepass() CoreUtils.Destroy(m_MSAAResolveMaterialDepthOnly); CoreUtils.Destroy(m_CameraMotionVectorsMaterial); CoreUtils.Destroy(m_DecalNormalBufferMaterial); - CoreUtils.Destroy(m_DownsampleDepthMaterialHalfresCheckerboard); + CoreUtils.Destroy(m_DownsampleDepthMaterialLoad); CoreUtils.Destroy(m_DownsampleDepthMaterialGather); m_ComputeThicknessReindexMap.Dispose(); for (int viewId = 0; viewId < m_MaxXRViewsCount; ++viewId) @@ -410,21 +410,9 @@ PrepassOutput RenderPrepass(RenderGraph renderGraph, if (depthBufferModified) m_IsDepthBufferCopyValid = false; - // Only on consoles is safe to read and write from/to the depth atlas - bool mip1FromDownsampleForLowResTrans = SystemInfo.graphicsDeviceType == GraphicsDeviceType.PlayStation4 || - SystemInfo.graphicsDeviceType == GraphicsDeviceType.PlayStation5 || - SystemInfo.graphicsDeviceType == GraphicsDeviceType.PlayStation5NGGC || - SystemInfo.graphicsDeviceType == GraphicsDeviceType.XboxOne || - SystemInfo.graphicsDeviceType == GraphicsDeviceType.XboxOneD3D12 || - SystemInfo.graphicsDeviceType == GraphicsDeviceType.GameCoreXboxOne || - SystemInfo.graphicsDeviceType == GraphicsDeviceType.GameCoreXboxSeries; - - mip1FromDownsampleForLowResTrans = mip1FromDownsampleForLowResTrans && hdCamera.frameSettings.IsEnabled(FrameSettingsField.LowResTransparent) && hdCamera.isLowResScaleHalf; - - DownsampleDepthForLowResTransparency(renderGraph, hdCamera, mip1FromDownsampleForLowResTrans, ref result); - // In both forward and deferred, everything opaque should have been rendered at this point so we can safely copy the depth buffer for later processing. - GenerateDepthPyramid(renderGraph, hdCamera, mip1FromDownsampleForLowResTrans, ref result); + GenerateDepthPyramid(renderGraph, hdCamera, ref result); + DownsampleDepthForLowResTransparency(renderGraph, hdCamera, ref result); // In case we don't have MSAA, we always run camera motion vectors when is safe to assume Object MV are rendered if (!needCameraMVBeforeResolve) @@ -1468,18 +1456,35 @@ class DownsampleDepthForLowResPassData { public bool useGatherDownsample; public float downsampleScale; + public Vector2Int loadOffset; public Material downsampleDepthMaterial; public TextureHandle depthTexture; - public TextureHandle depthPyramidTexture; public TextureHandle downsampledDepthBuffer; public Rect viewport; + } + + internal int RequiredCheckerboardMipCountInDepthPyramid(HDCamera hdCamera) + { + int mipCount = 0; + + // lowres transparency needs 1 mip + if (hdCamera.frameSettings.IsEnabled(FrameSettingsField.LowResTransparent) + && hdCamera.isLowResScaleHalf + && m_Asset.currentPlatformRenderPipelineSettings.lowresTransparentSettings.checkerboardDepthBuffer) + { + mipCount = Mathf.Max(mipCount, 1); + } + + // Volumetric clouds need 1 mip + if (HasVolumetricClouds(hdCamera)) + { + mipCount = Mathf.Max(mipCount, 1); + } - // Data needed for potentially writing - public Vector2Int mip0Offset; - public bool computesMip1OfAtlas; + return mipCount; } - void DownsampleDepthForLowResTransparency(RenderGraph renderGraph, HDCamera hdCamera, bool computeMip1OfPyramid, ref PrepassOutput output) + void DownsampleDepthForLowResTransparency(RenderGraph renderGraph, HDCamera hdCamera, ref PrepassOutput output) { // If the depth buffer hasn't been already copied by the decal depth buffer pass, then we do the copy here. CopyDepthBufferIfNeeded(renderGraph, hdCamera, ref output); @@ -1489,34 +1494,19 @@ void DownsampleDepthForLowResTransparency(RenderGraph renderGraph, HDCamera hdCa passData.useGatherDownsample = false; if (hdCamera.isLowResScaleHalf) { - if (m_Asset.currentPlatformRenderPipelineSettings.lowresTransparentSettings.checkerboardDepthBuffer) - { - m_DownsampleDepthMaterialHalfresCheckerboard.EnableKeyword("CHECKERBOARD_DOWNSAMPLE"); - } - else - { - m_DownsampleDepthMaterialHalfresCheckerboard.DisableKeyword("CHECKERBOARD_DOWNSAMPLE"); - } - if (computeMip1OfPyramid) - { - passData.mip0Offset = hdCamera.depthBufferMipChainInfo.mipLevelOffsets[1]; - m_DownsampleDepthMaterialHalfresCheckerboard.EnableKeyword("OUTPUT_FIRST_MIP_OF_MIPCHAIN"); - } - passData.downsampleDepthMaterial = m_DownsampleDepthMaterialHalfresCheckerboard; + passData.downsampleDepthMaterial = m_DownsampleDepthMaterialLoad; + passData.loadOffset = m_Asset.currentPlatformRenderPipelineSettings.lowresTransparentSettings.checkerboardDepthBuffer + ? hdCamera.depthBufferMipChainInfo.mipLevelOffsetsCheckerboard[1] + : hdCamera.depthBufferMipChainInfo.mipLevelOffsets[1]; } else { - m_DownsampleDepthMaterialGather.EnableKeyword("GATHER_DOWNSAMPLE"); passData.downsampleDepthMaterial = m_DownsampleDepthMaterialGather; passData.useGatherDownsample = true; } - - passData.computesMip1OfAtlas = computeMip1OfPyramid; passData.downsampleScale = hdCamera.lowResScale; passData.viewport = hdCamera.lowResViewport; - passData.depthTexture = builder.ReadTexture(output.resolvedDepthBuffer); - if (computeMip1OfPyramid) - passData.depthPyramidTexture = builder.WriteTexture(output.depthPyramidTexture); + passData.depthTexture = builder.ReadTexture(output.depthPyramidTexture); passData.downsampledDepthBuffer = builder.UseDepthBuffer(renderGraph.CreateTexture( new TextureDesc(Vector2.one * hdCamera.lowResScale, true, true) { depthBufferBits = DepthBits.Depth32, name = "LowResDepthBuffer" }), DepthAccess.Write); @@ -1524,36 +1514,24 @@ void DownsampleDepthForLowResTransparency(RenderGraph renderGraph, HDCamera hdCa builder.SetRenderFunc( (DownsampleDepthForLowResPassData data, RenderGraphContext context) => { - if (data.computesMip1OfAtlas) - { - data.downsampleDepthMaterial.SetVector(HDShaderIDs._DstOffset, new Vector4(data.mip0Offset.x, data.mip0Offset.y, 0.0f, 0.0f)); - context.cmd.SetRandomWriteTarget(1, data.depthPyramidTexture); - } - + Vector4 scaleBias = Vector4.zero; if (data.useGatherDownsample) { float downsampleScaleInv = 1.0f / data.downsampleScale; RenderTexture srcTexture = data.depthTexture; RenderTexture destTexture = data.downsampledDepthBuffer; - float uvScaleX = 1.0f; - float uvScaleY = 1.0f; - if (!DynamicResolutionHandler.instance.HardwareDynamicResIsEnabled()) - { - uvScaleX = ((float)destTexture.width / (float)srcTexture.width) * downsampleScaleInv; - uvScaleY = ((float)destTexture.height / (float)srcTexture.height) * downsampleScaleInv; - } - - data.downsampleDepthMaterial.SetVector(HDShaderIDs._ScaleBias, new Vector4(uvScaleX, uvScaleY, 0.0f, 0.0f)); + scaleBias.x = ((float)destTexture.width / (float)srcTexture.width) * downsampleScaleInv; + scaleBias.y = ((float)destTexture.height / (float)srcTexture.height) * downsampleScaleInv; + } + else + { + scaleBias.z = data.loadOffset.x; + scaleBias.w = data.loadOffset.y; } + context.cmd.SetGlobalVector(HDShaderIDs._ScaleBias, scaleBias); context.cmd.SetViewport(data.viewport); - context.cmd.SetGlobalTexture(HDShaderIDs._SourceDownsampleDepth, data.depthTexture); context.cmd.DrawProcedural(Matrix4x4.identity, data.downsampleDepthMaterial, 0, MeshTopology.Triangles, 3, 1, null); - - if (data.computesMip1OfAtlas) - { - context.cmd.ClearRandomWriteTargets(); - } }); output.downsampledDepthBuffer = passData.downsampledDepthBuffer; @@ -1565,11 +1543,9 @@ class GenerateDepthPyramidPassData public TextureHandle depthTexture; public HDUtils.PackedMipChainInfo mipInfo; public MipGenerator mipGenerator; - - public bool mip0AlreadyComputed; } - void GenerateDepthPyramid(RenderGraph renderGraph, HDCamera hdCamera, bool mip0AlreadyComputed, ref PrepassOutput output) + void GenerateDepthPyramid(RenderGraph renderGraph, HDCamera hdCamera, ref PrepassOutput output) { if (!hdCamera.frameSettings.IsEnabled(FrameSettingsField.OpaqueObjects)) { @@ -1585,12 +1561,11 @@ void GenerateDepthPyramid(RenderGraph renderGraph, HDCamera hdCamera, bool mip0A passData.depthTexture = builder.WriteTexture(output.depthPyramidTexture); passData.mipInfo = hdCamera.depthBufferMipChainInfo; passData.mipGenerator = m_MipGenerator; - passData.mip0AlreadyComputed = mip0AlreadyComputed; builder.SetRenderFunc( (GenerateDepthPyramidPassData data, RenderGraphContext context) => { - data.mipGenerator.RenderMinDepthPyramid(context.cmd, data.depthTexture, data.mipInfo, data.mip0AlreadyComputed); + data.mipGenerator.RenderMinDepthPyramid(context.cmd, data.depthTexture, data.mipInfo); }); output.depthPyramidTexture = passData.depthTexture; diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDRenderPipeline.RenderGraph.cs b/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDRenderPipeline.RenderGraph.cs index 608af364621..9ce84ea8433 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDRenderPipeline.RenderGraph.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDRenderPipeline.RenderGraph.cs @@ -50,7 +50,7 @@ void RecordRenderGraph(RenderRequest renderRequest, // We need to initialize the MipChainInfo here, so it will be available to any render graph pass that wants to use it during setup // Be careful, ComputePackedMipChainInfo needs the render texture size and not the viewport size. Otherwise it would compute the wrong size. - hdCamera.depthBufferMipChainInfo.ComputePackedMipChainInfo(RTHandles.rtHandleProperties.currentRenderTargetSize); + hdCamera.depthBufferMipChainInfo.ComputePackedMipChainInfo(RTHandles.rtHandleProperties.currentRenderTargetSize, RequiredCheckerboardMipCountInDepthPyramid(hdCamera)); // Bind the depth pyramid offset info for the HDSceneDepth node in ShaderGraph. This can be used by users in custom passes. commandBuffer.SetGlobalBuffer(HDShaderIDs._DepthPyramidMipLevelOffsets, hdCamera.depthBufferMipChainInfo.GetOffsetBufferData(m_DepthPyramidMipLevelOffsetsBuffer)); @@ -240,7 +240,7 @@ void RecordRenderGraph(RenderRequest renderRequest, colorBuffer = RenderOpaqueFog(m_RenderGraph, hdCamera, colorBuffer, volumetricLighting, msaa, in prepassOutput, in transparentPrepass, ref opticalFogTransmittance); - RenderClouds(m_RenderGraph, hdCamera, colorBuffer, prepassOutput.depthBuffer, volumetricLighting, in prepassOutput, ref transparentPrepass, ref opticalFogTransmittance); + RenderClouds(m_RenderGraph, hdCamera, colorBuffer, prepassOutput.depthBuffer, in prepassOutput, ref transparentPrepass, ref opticalFogTransmittance); colorBuffer = RenderTransparency(m_RenderGraph, hdCamera, colorBuffer, prepassOutput.resolvedNormalBuffer, vtFeedbackBuffer, currentColorPyramid, volumetricLighting, rayCountTexture, opticalFogTransmittance, m_SkyManager.GetSkyReflection(hdCamera), gpuLightListOutput, transparentPrepass, ref prepassOutput, shadowResult, cullingResults, customPassCullingResults, aovRequest, aovCustomPassBuffers); @@ -1940,14 +1940,14 @@ TextureHandle RenderOpaqueFog(RenderGraph renderGraph, HDCamera hdCamera, Textur return m_SkyManager.RenderOpaqueAtmosphericScattering(renderGraph, hdCamera, in refractionOutput, colorBuffer, msaa ? prepassOutput.depthAsColor : prepassOutput.depthPyramidTexture, volumetricLighting, prepassOutput.depthBuffer, prepassOutput.normalBuffer, ref opticalFogTransmittance); } - void RenderClouds(RenderGraph renderGraph, HDCamera hdCamera, TextureHandle colorBuffer, TextureHandle depthStencilBuffer, TextureHandle volumetricLighting, in PrepassOutput prepassOutput, ref TransparentPrepassOutput transparentPrepass, ref TextureHandle opticalFogTransmittance) + void RenderClouds(RenderGraph renderGraph, HDCamera hdCamera, TextureHandle colorBuffer, TextureHandle depthStencilBuffer, in PrepassOutput prepassOutput, ref TransparentPrepassOutput transparentPrepass, ref TextureHandle opticalFogTransmittance) { if (m_CurrentDebugDisplaySettings.DebugHideSky(hdCamera)) return; m_SkyManager.RenderClouds(renderGraph, hdCamera, colorBuffer, depthStencilBuffer, ref opticalFogTransmittance); - RenderVolumetricClouds(m_RenderGraph, hdCamera, colorBuffer, prepassOutput.depthPyramidTexture, volumetricLighting, ref transparentPrepass, ref opticalFogTransmittance); + RenderVolumetricClouds(m_RenderGraph, hdCamera, colorBuffer, prepassOutput.depthPyramidTexture, ref transparentPrepass, ref opticalFogTransmittance); } class GenerateColorPyramidData diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDRenderPipeline.cs b/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDRenderPipeline.cs index fef5b7e5343..1b9914c962b 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDRenderPipeline.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDRenderPipeline.cs @@ -1423,6 +1423,7 @@ public struct Target public CameraSettings cameraSettings; public List<(HDProbe.RenderData, HDProbe)> viewDependentProbesData; public bool cullingResultIsShared; + public XRPass xrPass; } private void VisitRenderRequestRecursive(List requests, List visitStatus, int requestIndex, List renderIndices) @@ -1475,7 +1476,10 @@ internal void Reset() } } - bool PrepareAndCullCamera(Camera camera, XRPass xrPass, bool cameraRequestedDynamicRes, + bool PrepareAndCullCamera( + Camera camera, + XRPass xrPass, + bool cameraRequestedDynamicRes, List renderRequests, ScriptableRenderContext renderContext, out RenderRequest renderRequest, @@ -1534,7 +1538,7 @@ bool PrepareAndCullCamera(Camera camera, XRPass xrPass, bool cameraRequestedDyna if (needCulling) { - skipRequest = !TryCull(camera, hdCamera, renderContext, m_SkyManager, cullingParameters, m_Asset, ref cullingResults); + skipRequest = !TryCull(camera, hdCamera, renderContext, m_SkyManager, cullingParameters, m_Asset, xrPass, ref cullingResults); } } @@ -1552,7 +1556,11 @@ bool PrepareAndCullCamera(Camera camera, XRPass xrPass, bool cameraRequestedDyna hdCamera.UpdateGlobalMipBiasCB(ref m_ShaderVariablesGlobalCB, GetGlobalMipBias(hdCamera)); ConstantBuffer.PushGlobal(m_ShaderVariablesGlobalCB, HDShaderIDs._ShaderVariablesGlobal); // Execute custom render - BeginCameraRendering(renderContext, camera); + if (xrPass.isFirstCameraPass) + { + BeginCameraRendering(renderContext, camera); + } + additionalCameraData.ExecuteCustomRender(renderContext, hdCamera); } @@ -1561,7 +1569,10 @@ bool PrepareAndCullCamera(Camera camera, XRPass xrPass, bool cameraRequestedDyna // Submit render context and free pooled resources for this request renderContext.Submit(); m_CullingResultsPool.Release(cullingResults); - UnityEngine.Rendering.RenderPipeline.EndCameraRendering(renderContext, camera); + if (xrPass.isLastCameraPass) + { + EndCameraRendering(renderContext, camera); + } return false; } @@ -1601,7 +1612,8 @@ bool PrepareAndCullCamera(Camera camera, XRPass xrPass, bool cameraRequestedDyna index = renderRequests.Count, cameraSettings = CameraSettings.From(hdCamera), viewDependentProbesData = ListPool<(HDProbe.RenderData, HDProbe)>.Get(), - cullingResultIsShared = cullingResultIsShared + cullingResultIsShared = cullingResultIsShared, + xrPass = xrPass // TODO: store DecalCullResult }; renderRequests.Add(renderRequest); @@ -1819,10 +1831,7 @@ ScriptableRenderContext renderContext out var hdCamera, out var cullingParameters ) - && TryCull( - camera, hdCamera, renderContext, m_SkyManager, cullingParameters, m_Asset, - ref _cullingResults - ) + && TryCull(camera, hdCamera, renderContext, m_SkyManager, cullingParameters, m_Asset, XRSystem.emptyPass, ref _cullingResults) )) { // Skip request and free resources @@ -1906,7 +1915,8 @@ ref _cullingResults dependsOnRenderRequestIndices = ListPool.Get(), index = renderRequests.Count, cameraSettings = cameraSettings[j], - viewDependentProbesData = ListPool<(HDProbe.RenderData, HDProbe)>.Get() + viewDependentProbesData = ListPool<(HDProbe.RenderData, HDProbe)>.Get(), + xrPass = XRSystem.emptyPass, // TODO: store DecalCullResult }; @@ -2387,9 +2397,12 @@ protected override void Render(ScriptableRenderContext renderContext, Camera[] c cmd.SetInvertCulling(false); } - // EndCameraRendering callback should be executed outside of any profiling scope in case user code submits the renderContext - EndCameraRendering(renderContext, renderRequest.hdCamera.camera); - + if (renderRequest.xrPass.isLastCameraPass) + { + // EndCameraRendering callback should be executed outside of any profiling scope in case user code submits the renderContext + EndCameraRendering(renderContext, renderRequest.hdCamera.camera); + } + EndRenderRequest(renderRequest, cmd); // Render XR mirror view once all render requests have been completed @@ -2508,7 +2521,7 @@ protected override void ProcessRenderRequests(ScriptableRenderConte //temporarily disable AOV requests if(hdCam != null) { - existingAOVs = (AOVRequestDataCollection) hdCam.aovRequests; + existingAOVs = (AOVRequestDataCollection)hdCam.aovRequests; hdCam.SetAOVRequests(s_EmptyAOVRequests); } @@ -2588,7 +2601,7 @@ protected override void ProcessRenderRequests(ScriptableRenderConte } else { - Debug.LogWarning("RenderRequest type: " + typeof(RequestData).FullName + " is either invalid or unsupported by HDRP"); + Debug.LogWarning("RenderRequest type: " + typeof(RequestData).FullName+ " is either invalid or unsupported by HDRP"); } } @@ -2686,7 +2699,7 @@ AOVRequestData aovRequest } } - Func flagsFunc = delegate (HDCamera hdCamera, HDAdditionalLightData data, Light light) + Func flagsFunc = delegate (HDCamera hdCamera, HDAdditionalLightData data, Light light) { uint result = 0u; @@ -3057,6 +3070,7 @@ static bool TryCull( SkyManager skyManager, ScriptableCullingParameters cullingParams, HDRenderPipelineAsset hdrp, + XRPass xrPass, ref HDCullingResults cullingResults ) { @@ -3094,8 +3108,11 @@ ref HDCullingResults cullingResults QualitySettings.maximumLODLevel = hdCamera.frameSettings.GetResolvedMaximumLODLevel(hdrp); #endif - // This needs to be called before culling, otherwise in the case where users generate intermediate renderers, it can provoke crashes. - BeginCameraRendering(renderContext, camera); + if (xrPass.isFirstCameraPass) + { + // This needs to be called before culling, otherwise in the case where users generate intermediate renderers, it can provoke crashes. + BeginCameraRendering(renderContext, camera); + } DecalSystem.CullRequest decalCullRequest = null; if (hdCamera.frameSettings.IsEnabled(FrameSettingsField.Decals)) diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDStringConstants.cs b/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDStringConstants.cs index ea45d0e6549..d4eff01b202 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDStringConstants.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDStringConstants.cs @@ -229,8 +229,7 @@ static class HDShaderIDs public static readonly int _AmbientOcclusionTexture = Shader.PropertyToID("_AmbientOcclusionTexture"); public static readonly int _AmbientOcclusionTextureRW = Shader.PropertyToID("_AmbientOcclusionTextureRW"); public static readonly int _MultiAmbientOcclusionTexture = Shader.PropertyToID("_MultiAmbientOcclusionTexture"); - public static readonly int _DebugDepthPyramidMip = Shader.PropertyToID("_DebugDepthPyramidMip"); - public static readonly int _DebugDepthPyramidOffsets = Shader.PropertyToID("_DebugDepthPyramidOffsets"); + public static readonly int _DebugDepthPyramidParams = Shader.PropertyToID("_DebugDepthPyramidParams"); public static readonly int _UseTileLightList = Shader.PropertyToID("_UseTileLightList"); @@ -378,6 +377,7 @@ static class HDShaderIDs public static readonly int _InputVal = Shader.PropertyToID("_InputVal"); public static readonly int _Sizes = Shader.PropertyToID("_Sizes"); public static readonly int _ScaleBias = Shader.PropertyToID("_ScaleBias"); + public static readonly int _DstOffset = Shader.PropertyToID("_DstOffset"); // MSAA shader properties public static readonly int _ColorTextureMS = Shader.PropertyToID("_ColorTextureMS"); @@ -520,7 +520,6 @@ static class HDShaderIDs public static readonly int _ComputeThicknessScale = Shader.PropertyToID("_ComputeThicknessScale"); public static readonly int _ComputeThicknessShowOverlapCount = Shader.PropertyToID("_ComputeThicknessShowOverlapCount"); public static readonly int _VolumetricCloudsDebugMode = Shader.PropertyToID("_VolumetricCloudsDebugMode"); - public static readonly int _SourceDownsampleDepth = Shader.PropertyToID("_SourceDownsampleDepth"); public static readonly int _InputCubemap = Shader.PropertyToID("_InputCubemap"); public static readonly int _Mipmap = Shader.PropertyToID("_Mipmap"); @@ -556,7 +555,6 @@ static class HDShaderIDs public static readonly int _PixelCoordToViewDirWS = Shader.PropertyToID("_PixelCoordToViewDirWS"); // Clouds - public static readonly int _VolumetricCloudsSourceDepth = Shader.PropertyToID("_VolumetricCloudsSourceDepth"); public static readonly int _CloudsLightingTexture = Shader.PropertyToID("_CloudsLightingTexture"); public static readonly int _CloudsLightingTextureRW = Shader.PropertyToID("_CloudsLightingTextureRW"); public static readonly int _HalfResDepthBufferRW = Shader.PropertyToID("_HalfResDepthBufferRW"); @@ -676,8 +674,8 @@ static class HDShaderIDs public static readonly int _SrcOffsetAndLimit = Shader.PropertyToID("_SrcOffsetAndLimit"); public static readonly int _SrcScaleBias = Shader.PropertyToID("_SrcScaleBias"); public static readonly int _SrcUvLimits = Shader.PropertyToID("_SrcUvLimits"); - public static readonly int _DstOffset = Shader.PropertyToID("_DstOffset"); public static readonly int _DepthMipChain = Shader.PropertyToID("_DepthMipChain"); + public static readonly int _DepthPyramidConstants = Shader.PropertyToID("DepthPyramidConstants"); public static readonly int _VBufferDensity = Shader.PropertyToID("_VBufferDensity"); public static readonly int _VBufferLighting = Shader.PropertyToID("_VBufferLighting"); diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/LineRendering/Kernels/StageRasterBin.compute b/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/LineRendering/Kernels/StageRasterBin.compute index 289aa33bc62..e25cb635245 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/LineRendering/Kernels/StageRasterBin.compute +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/LineRendering/Kernels/StageRasterBin.compute @@ -103,16 +103,18 @@ void Main(Group group) const SegmentRecord s_segment = LoadSegmentRecord(_SegmentRecordBuffer, s_segmentIndex); - const float w0 = asfloat(_Vertex1RecordBuffer.Load((s_segment.vertexIndex0 << 4) + 12u)); - const float w1 = asfloat(_Vertex1RecordBuffer.Load((s_segment.vertexIndex1 << 4) + 12u)); - // Choose the larger of the two widths, enforce a minimum of one tile. - const float screenSpaceWidthPadding = ceil(max(w0, w1)); + float screenSpaceWidthPadding = 1; uint2 v_segmentMin, v_segmentMax; if(s_segmentIndex < gs_ActiveSegmentCount) + { + const float w0 = asfloat(_Vertex1RecordBuffer.Load((s_segment.vertexIndex0 << 4) + 12u)); + const float w1 = asfloat(_Vertex1RecordBuffer.Load((s_segment.vertexIndex1 << 4) + 12u)); + screenSpaceWidthPadding = ceil(max(w0, w1)); GetSegmentBoundingBox(s_segment, screenSpaceWidthPadding, v_segmentMin, v_segmentMax); + } else { // Fall back to a zero-sized AABB to prevent uninitialized memory creating a huge one. diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/LineRendering/Kernels/StageRasterFine.compute b/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/LineRendering/Kernels/StageRasterFine.compute index 79d65e5434b..cd80333beb3 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/LineRendering/Kernels/StageRasterFine.compute +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/LineRendering/Kernels/StageRasterFine.compute @@ -213,7 +213,7 @@ Fragment GetFragment(Segment segment, float2 coord) // Unfortunately need a NaN guard here. fragment.colorAndAlpha = max(fragment.colorAndAlpha, 0); - fragment.colorAndAlpha.a = fragment.coverage; + fragment.colorAndAlpha.a *= fragment.coverage; } #if COMPILING_SHADER diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/LineRendering/Kernels/StageSetupSegment.compute b/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/LineRendering/Kernels/StageSetupSegment.compute index d3ef3e1907c..fa274be91e1 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/LineRendering/Kernels/StageSetupSegment.compute +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/LineRendering/Kernels/StageSetupSegment.compute @@ -147,7 +147,7 @@ void Main(Group group) const float2 positionSS1 = _ScreenSize.xy * (0.5 + 0.5 * positionNDC1.xy); // Depth test against each segment vertex. - if(positionNDC1.z < LoadCameraDepth(float2(positionSS1.x, _ScreenSize.y - positionSS1.y))) + if(positionNDC1.z < LoadCameraDepth(float2(positionSS1.x, _ScreenSize.y - positionSS1.y)) && positionNDC0.z < LoadCameraDepth(float2(positionSS0.x, _ScreenSize.y - positionSS0.y))) { culled = true; } diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/Raytracing/Shaders/RayTracingCommon.hlsl b/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/Raytracing/Shaders/RayTracingCommon.hlsl index cb9b92c634f..09a6b648ffc 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/Raytracing/Shaders/RayTracingCommon.hlsl +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/Raytracing/Shaders/RayTracingCommon.hlsl @@ -61,9 +61,19 @@ struct StandardBSDFData uint isUnlit; }; +uint2 ComputeCheckerBoardOffset(uint2 traceCoord, uint subPixelIndex) +{ + uint checker = (traceCoord.x & 1) ^ (traceCoord.y & 1); + subPixelIndex = (subPixelIndex + checker) & 0x3; + return uint2(((subPixelIndex >> 1) ^ subPixelIndex) & 1, subPixelIndex >> 1); +} + // This function compute the checkerboard undersampling position +// Warning: This function is broken, but keeping it to not break anything +// Use ComputeCheckerBoardOffset instead uint ComputeCheckerBoardIndex(uint2 traceCoord, uint subPixelIndex) { + // TODO: missing parenthesis around the & operations uint localOffset = (traceCoord.x & 1 + traceCoord.y & 1) & 1; uint checkerBoardLocation = (subPixelIndex + localOffset) & 0x3; return checkerBoardLocation; diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/RenderPass/DepthPyramid.compute b/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/RenderPass/DepthPyramid.compute index c0657d2d341..57e79ae6baf 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/RenderPass/DepthPyramid.compute +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/RenderPass/DepthPyramid.compute @@ -1,47 +1,129 @@ #include "Packages/com.unity.render-pipelines.core/ShaderLibrary/Common.hlsl" #include "Packages/com.unity.render-pipelines.core/ShaderLibrary/TextureXR.hlsl" +#include "Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/RenderPass/DepthPyramidConstants.cs.hlsl" #pragma only_renderers d3d11 playstation xboxone xboxseries vulkan metal switch -#pragma kernel KDepthDownsample8DualUav KERNEL_SIZE=8 KERNEL_NAME=KDepthDownsample8DualUav +#pragma multi_compile_local _ ENABLE_CHECKERBOARD -RW_TEXTURE2D_X(float, _DepthMipChain); +#pragma kernel KDepthDownsample8DualUav KERNEL_NAME=KDepthDownsample8DualUav -CBUFFER_START(cb) - uint4 _SrcOffsetAndLimit; // {x, y, w - 1, h - 1} - uint4 _DstOffset; // {x, y, 0, 0} -CBUFFER_END +RW_TEXTURE2D_X(float, _DepthMipChain); #if UNITY_REVERSED_Z -# define MIN_DEPTH(l, r) max(l, r) +#define MIN_DEPTH(A, B) max((A), (B)) +#define MIN3_DEPTH(A, B, C) Max3((A), (B), (C)) +#define MAX_DEPTH(A, B) min((A), (B)) +#define MAX3_DEPTH(A, B, C) Min3((A), (B), (C)) #else -# define MIN_DEPTH(l, r) min(l, r) +#define MIN_DEPTH(A, B) min((A), (B)) +#define MIN3_DEPTH(A, B, C) Min3((A), (B), (C)) +#define MAX_DEPTH(A, B) max((A), (B)) +#define MAX3_DEPTH(A, B, C) Max3((A), (B), (C)) #endif -// Downsample a depth texture by taking the min value of sampled pixels -// The size of the dispatch is (DstMipSize / KernelSize). -[numthreads(KERNEL_SIZE, KERNEL_SIZE, 1)] -void KERNEL_NAME(uint3 dispatchThreadId : SV_DispatchThreadID) +uint2 CoordInTileByIndex(uint i) { - UNITY_XR_ASSIGN_VIEW_INDEX(dispatchThreadId.z); + // decode i = [yxxyyx] (we want each pair of bits to have an x and a y) + return uint2( + (i & 1) | ((i >> 2) & 6), + ((i >> 1) & 3) | ((i >> 3) & 4)); +} - uint2 srcOffset = _SrcOffsetAndLimit.xy; - uint2 srcLimit = _SrcOffsetAndLimit.zw; - uint2 dstOffset = _DstOffset.xy; +groupshared float s_minDepth[32]; +#ifdef ENABLE_CHECKERBOARD +groupshared float s_maxDepth[32]; +#endif - // Upper-left pixel coordinate of quad that this thread will read - uint2 srcPixelUL = srcOffset + (dispatchThreadId.xy << 1); +void SubgroupMergeDepths(uint threadID, uint bitIndex, inout float minDepth, inout float maxDepth) +{ + uint highIndex = threadID >> (bitIndex + 1); + uint lowIndex = threadID & ((1 << (bitIndex + 1)) - 1); - float p00 = _DepthMipChain[COORD_TEXTURE2D_X(min(srcPixelUL + uint2(0u, 0u), srcLimit))]; - float p10 = _DepthMipChain[COORD_TEXTURE2D_X(min(srcPixelUL + uint2(1u, 0u), srcLimit))]; - float p01 = _DepthMipChain[COORD_TEXTURE2D_X(min(srcPixelUL + uint2(0u, 1u), srcLimit))]; - float p11 = _DepthMipChain[COORD_TEXTURE2D_X(min(srcPixelUL + uint2(1u, 1u), srcLimit))]; - float4 depths = float4(p00, p10, p01, p11); + if (lowIndex == (1 << bitIndex)) + { + s_minDepth[highIndex] = minDepth; +#ifdef ENABLE_CHECKERBOARD + s_maxDepth[highIndex] = maxDepth; +#endif + } + GroupMemoryBarrierWithGroupSync(); - // Select the closest sample - float minDepth = MIN_DEPTH(MIN_DEPTH(depths.x, depths.y), MIN_DEPTH(depths.z, depths.w)); + if (lowIndex == 0) + { + minDepth = MIN_DEPTH(minDepth, s_minDepth[highIndex]); +#ifdef ENABLE_CHECKERBOARD + maxDepth = MAX_DEPTH(maxDepth, s_maxDepth[highIndex]); +#endif + } + GroupMemoryBarrierWithGroupSync(); +} - _DepthMipChain[COORD_TEXTURE2D_X(dstOffset + dispatchThreadId.xy)] = minDepth; +float CheckerboardDepth(uint2 coord, float minDepth, float maxDepth) +{ + return ((coord.x ^ coord.y) & 1) ? minDepth : maxDepth; } -#undef MIN_DEPTH +// Downsample a depth texture by taking the min value of sampled pixels +[numthreads(64, 1, 1)] +void KERNEL_NAME(uint threadID : SV_GroupThreadID, uint3 groupID : SV_GroupID) +{ + UNITY_XR_ASSIGN_VIEW_INDEX(groupID.z); + + // assign threads to pixels in a swizzle-like pattern + int2 dstCoord0 = (groupID.xy << 3) | CoordInTileByIndex(threadID); + + int2 readOffsetUL = dstCoord0 << 1; + float p00 = _DepthMipChain[COORD_TEXTURE2D_X(_SrcOffset + min(readOffsetUL + int2(0, 0), _SrcLimit))]; + float p10 = _DepthMipChain[COORD_TEXTURE2D_X(_SrcOffset + min(readOffsetUL + int2(1, 0), _SrcLimit))]; + float p01 = _DepthMipChain[COORD_TEXTURE2D_X(_SrcOffset + min(readOffsetUL + int2(0, 1), _SrcLimit))]; + float p11 = _DepthMipChain[COORD_TEXTURE2D_X(_SrcOffset + min(readOffsetUL + int2(1, 1), _SrcLimit))]; + float minDepth = MIN3_DEPTH(p00, p10, MIN_DEPTH(p01, p11)); + float maxDepth = MAX3_DEPTH(p00, p10, MAX_DEPTH(p01, p11)); + + // write dst0 + if (all(dstCoord0 < _DstSize0)) + { + _DepthMipChain[COORD_TEXTURE2D_X(_MinDstOffset0 + dstCoord0)] = minDepth; +#ifdef ENABLE_CHECKERBOARD + if (_CbDstCount >= 1) + _DepthMipChain[COORD_TEXTURE2D_X(_CbDstOffset0 + dstCoord0)] = CheckerboardDepth(dstCoord0, minDepth, maxDepth); +#endif + } + + // merge to thread 0 in subgroup size 4 + SubgroupMergeDepths(threadID, 0, minDepth, maxDepth); + SubgroupMergeDepths(threadID, 1, minDepth, maxDepth); + if (_MinDstCount >= 2 && (threadID & 0x3) == 0) + { + int2 dstCoord1 = dstCoord0 >> 1; + if (all(dstCoord1 < _DstSize1)) + { + _DepthMipChain[COORD_TEXTURE2D_X(_MinDstOffset1 + dstCoord1)] = minDepth; +#ifdef ENABLE_CHECKERBOARD + if (_CbDstCount >= 2) + _DepthMipChain[COORD_TEXTURE2D_X(_CbDstOffset1 + dstCoord1)] = CheckerboardDepth(dstCoord1, minDepth, maxDepth); +#endif + } + } + + // merge to thread 0 in subgroup size 16 + SubgroupMergeDepths(threadID, 2, minDepth, maxDepth); + SubgroupMergeDepths(threadID, 3, minDepth, maxDepth); + if (_MinDstCount >= 3 && (threadID & 0xf) == 0) + { + int2 dstCoord2 = dstCoord0 >> 2; + if (all(dstCoord2 < _DstSize2)) + _DepthMipChain[COORD_TEXTURE2D_X(_MinDstOffset2 + dstCoord2)] = minDepth; + } + + // merge to thread 0 + SubgroupMergeDepths(threadID, 4, minDepth, maxDepth); + SubgroupMergeDepths(threadID, 5, minDepth, maxDepth); + if (_MinDstCount >= 4 && (threadID & 0x3f) == 0) + { + int2 dstCoord3 = dstCoord0 >> 3; + if (all(dstCoord3 < _DstSize3)) + _DepthMipChain[COORD_TEXTURE2D_X(_MinDstOffset3 + dstCoord3)] = minDepth; + } +} diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/RenderPass/DepthPyramidConstants.cs b/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/RenderPass/DepthPyramidConstants.cs new file mode 100644 index 00000000000..ad8cdb9c544 --- /dev/null +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/RenderPass/DepthPyramidConstants.cs @@ -0,0 +1,27 @@ +namespace UnityEngine.Rendering.HighDefinition +{ + [GenerateHLSL(needAccessors = false, generateCBuffer = true)] + internal unsafe struct DepthPyramidConstants + { + public uint _MinDstCount; + public uint _CbDstCount; + public uint _DepthPyramidPad0; + public uint _DepthPyramidPad1; + + public Vector2Int _SrcOffset; + public Vector2Int _SrcLimit; + + public Vector2Int _DstSize0; + public Vector2Int _DstSize1; + public Vector2Int _DstSize2; + public Vector2Int _DstSize3; + + public Vector2Int _MinDstOffset0; + public Vector2Int _MinDstOffset1; + public Vector2Int _MinDstOffset2; + public Vector2Int _MinDstOffset3; + + public Vector2Int _CbDstOffset0; + public Vector2Int _CbDstOffset1; + } +} diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/RenderPass/DepthPyramidConstants.cs.hlsl b/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/RenderPass/DepthPyramidConstants.cs.hlsl new file mode 100644 index 00000000000..66957bf9095 --- /dev/null +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/RenderPass/DepthPyramidConstants.cs.hlsl @@ -0,0 +1,29 @@ +// +// This file was automatically generated. Please don't edit by hand. Execute Editor command [ Edit > Rendering > Generate Shader Includes ] instead +// + +#ifndef DEPTHPYRAMIDCONSTANTS_CS_HLSL +#define DEPTHPYRAMIDCONSTANTS_CS_HLSL +// Generated from UnityEngine.Rendering.HighDefinition.DepthPyramidConstants +// PackingRules = Exact +CBUFFER_START(DepthPyramidConstants) + uint _MinDstCount; + uint _CbDstCount; + uint _DepthPyramidPad0; + uint _DepthPyramidPad1; + int2 _SrcOffset; + int2 _SrcLimit; + int2 _DstSize0; + int2 _DstSize1; + int2 _DstSize2; + int2 _DstSize3; + int2 _MinDstOffset0; + int2 _MinDstOffset1; + int2 _MinDstOffset2; + int2 _MinDstOffset3; + int2 _CbDstOffset0; + int2 _CbDstOffset1; +CBUFFER_END + + +#endif diff --git a/Tests/SRPTests/Packages/com.unity.testing.graphics-performance/LICENSE.md.meta b/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/RenderPass/DepthPyramidConstants.cs.hlsl.meta similarity index 61% rename from Tests/SRPTests/Packages/com.unity.testing.graphics-performance/LICENSE.md.meta rename to Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/RenderPass/DepthPyramidConstants.cs.hlsl.meta index 7dc26a5354d..faf0ea8ae8c 100644 --- a/Tests/SRPTests/Packages/com.unity.testing.graphics-performance/LICENSE.md.meta +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/RenderPass/DepthPyramidConstants.cs.hlsl.meta @@ -1,6 +1,6 @@ fileFormatVersion: 2 -guid: 245bb42c6352c484c82bcfe1b8467b05 -TextScriptImporter: +guid: a959eef549ac6b04bbebaf3004f2536e +ShaderIncludeImporter: externalObjects: {} userData: assetBundleName: diff --git a/Tests/SRPTests/Packages/com.unity.testing.graphics-performance/Editor/Common/EditorLogWatcher.cs.meta b/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/RenderPass/DepthPyramidConstants.cs.meta similarity index 83% rename from Tests/SRPTests/Packages/com.unity.testing.graphics-performance/Editor/Common/EditorLogWatcher.cs.meta rename to Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/RenderPass/DepthPyramidConstants.cs.meta index 45723f3fc92..9e997336f74 100644 --- a/Tests/SRPTests/Packages/com.unity.testing.graphics-performance/Editor/Common/EditorLogWatcher.cs.meta +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/RenderPass/DepthPyramidConstants.cs.meta @@ -1,5 +1,5 @@ fileFormatVersion: 2 -guid: 1de3d930bbdeb444fb3244c98c41f863 +guid: 90584c2e6462b3f42bca3c8697990981 MonoImporter: externalObjects: {} serializedVersion: 2 diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/RenderPass/MipGenerator.cs b/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/RenderPass/MipGenerator.cs index d3af7c3e822..f0aea90f924 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/RenderPass/MipGenerator.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/RenderPass/MipGenerator.cs @@ -19,9 +19,6 @@ class MipGenerator int m_ColorDownsampleKernel; int m_ColorGaussianKernel; - int[] m_SrcOffset; - int[] m_DstOffset; - public MipGenerator(HDRenderPipeline renderPipeline) { m_TempColorTargets = new RTHandle[xrMaxSliceCount]; @@ -33,8 +30,6 @@ public MipGenerator(HDRenderPipeline renderPipeline) m_ColorDownsampleKernel = m_ColorPyramidCS.FindKernel("KColorDownsample"); m_ColorGaussianKernel = m_ColorPyramidCS.FindKernel("KColorGaussian"); - m_SrcOffset = new int[4]; - m_DstOffset = new int[4]; m_ColorPyramidPS = renderPipeline.runtimeShaders.colorPyramidPS; m_ColorPyramidPSMat = CoreUtils.CreateEngineMaterial(m_ColorPyramidPS); m_PropertyBlock = new MaterialPropertyBlock(); @@ -66,41 +61,60 @@ int xrMaxSliceCount // Generates an in-place depth pyramid // TODO: Mip-mapping depth is problematic for precision at lower mips, generate a packed atlas instead - public void RenderMinDepthPyramid(CommandBuffer cmd, RenderTexture texture, HDUtils.PackedMipChainInfo info, bool mip1AlreadyComputed) + public void RenderMinDepthPyramid(CommandBuffer cmd, RenderTexture texture, HDUtils.PackedMipChainInfo info) { HDUtils.CheckRTCreated(texture); var cs = m_DepthPyramidCS; int kernel = m_DepthDownsampleKernel; - // TODO: Do it 1x MIP at a time for now. In the future, do 4x MIPs per pass, or even use a single pass. + cmd.SetComputeTextureParam(cs, kernel, HDShaderIDs._DepthMipChain, texture); + // Note: Gather() doesn't take a LOD parameter and we cannot bind an SRV of a MIP level, // and we don't support Min samplers either. So we are forced to perform 4x loads. - for (int i = 1; i < info.mipLevelCount; i++) + for (int dstIndex0 = 1; dstIndex0 < info.mipLevelCount;) { - if (mip1AlreadyComputed && i == 1) continue; - - Vector2Int dstSize = info.mipLevelSizes[i]; - Vector2Int dstOffset = info.mipLevelOffsets[i]; - Vector2Int srcSize = info.mipLevelSizes[i - 1]; - Vector2Int srcOffset = info.mipLevelOffsets[i - 1]; - Vector2Int srcLimit = srcOffset + srcSize - Vector2Int.one; - - m_SrcOffset[0] = srcOffset.x; - m_SrcOffset[1] = srcOffset.y; - m_SrcOffset[2] = srcLimit.x; - m_SrcOffset[3] = srcLimit.y; - - m_DstOffset[0] = dstOffset.x; - m_DstOffset[1] = dstOffset.y; - m_DstOffset[2] = 0; - m_DstOffset[3] = 0; + int minCount = Mathf.Min(info.mipLevelCount - dstIndex0, 4); + int cbCount = 0; + if (dstIndex0 < info.mipLevelCountCheckerboard) + { + cbCount = info.mipLevelCountCheckerboard - dstIndex0; + Debug.Assert(dstIndex0 == 1, "expected to make checkerboard mips on the first pass"); + Debug.Assert(cbCount <= minCount, "expected fewer checkerboard mips than min mips"); + Debug.Assert(cbCount <= 2, "expected 2 or fewer checkerboard mips for now"); + } - cmd.SetComputeIntParams(cs, HDShaderIDs._SrcOffsetAndLimit, m_SrcOffset); - cmd.SetComputeIntParams(cs, HDShaderIDs._DstOffset, m_DstOffset); - cmd.SetComputeTextureParam(cs, kernel, HDShaderIDs._DepthMipChain, texture); + Vector2Int srcOffset = info.mipLevelOffsets[dstIndex0 - 1]; + Vector2Int srcSize = info.mipLevelSizes[dstIndex0 - 1]; + int dstIndex1 = Mathf.Min(dstIndex0 + 1, info.mipLevelCount - 1); + int dstIndex2 = Mathf.Min(dstIndex0 + 2, info.mipLevelCount - 1); + int dstIndex3 = Mathf.Min(dstIndex0 + 3, info.mipLevelCount - 1); + DepthPyramidConstants cb = new DepthPyramidConstants + { + _MinDstCount = (uint)minCount, + _CbDstCount = (uint)cbCount, + _SrcOffset = srcOffset, + _SrcLimit = srcSize - Vector2Int.one, + _DstSize0 = info.mipLevelSizes[dstIndex0], + _DstSize1 = info.mipLevelSizes[dstIndex1], + _DstSize2 = info.mipLevelSizes[dstIndex2], + _DstSize3 = info.mipLevelSizes[dstIndex3], + _MinDstOffset0 = info.mipLevelOffsets[dstIndex0], + _MinDstOffset1 = info.mipLevelOffsets[dstIndex1], + _MinDstOffset2 = info.mipLevelOffsets[dstIndex2], + _MinDstOffset3 = info.mipLevelOffsets[dstIndex3], + _CbDstOffset0 = info.mipLevelOffsetsCheckerboard[dstIndex0], + _CbDstOffset1 = info.mipLevelOffsetsCheckerboard[dstIndex1], + }; + ConstantBuffer.Push(cmd, cb, cs, HDShaderIDs._DepthPyramidConstants); + + CoreUtils.SetKeyword(cmd, cs, "ENABLE_CHECKERBOARD", cbCount != 0); + + Vector2Int dstSize = info.mipLevelSizes[dstIndex0]; cmd.DispatchCompute(cs, kernel, HDUtils.DivRoundUp(dstSize.x, 8), HDUtils.DivRoundUp(dstSize.y, 8), texture.volumeDepth); + + dstIndex0 += minCount; } } diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/Utility/HDUtils.cs b/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/Utility/HDUtils.cs index 1c299826ae6..329cd0c9594 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/Utility/HDUtils.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/Utility/HDUtils.cs @@ -630,44 +630,70 @@ internal static void RestoreRenderPipelineAsset(bool wasUnsetFromQuality, Render internal struct PackedMipChainInfo { public Vector2Int textureSize; - public int mipLevelCount; + public int mipLevelCount; // mips contain min (closest) depth + public int mipLevelCountCheckerboard; public Vector2Int[] mipLevelSizes; - public Vector2Int[] mipLevelOffsets; + public Vector2Int[] mipLevelOffsets; // mips contain min (closest) depth + public Vector2Int[] mipLevelOffsetsCheckerboard; private Vector2 cachedTextureScale; private Vector2Int cachedHardwareTextureSize; + private int cachedCheckerboardMipCount; private bool m_OffsetBufferWillNeedUpdate; public void Allocate() { mipLevelOffsets = new Vector2Int[15]; + mipLevelOffsetsCheckerboard = new Vector2Int[15]; mipLevelSizes = new Vector2Int[15]; m_OffsetBufferWillNeedUpdate = true; } + enum PackDirection + { + Right, + Down, + } + + static Vector2Int NextMipBegin(Vector2Int prevMipBegin, Vector2Int prevMipSize, PackDirection dir) + { + Vector2Int mipBegin = prevMipBegin; + if (dir == PackDirection.Right) + mipBegin.x += prevMipSize.x; + else + mipBegin.y += prevMipSize.y; + return mipBegin; + } + // We pack all MIP levels into the top MIP level to avoid the Pow2 MIP chain restriction. // We compute the required size iteratively. // This function is NOT fast, but it is illustrative, and can be optimized later. - public void ComputePackedMipChainInfo(Vector2Int viewportSize) + public void ComputePackedMipChainInfo(Vector2Int viewportSize, int checkerboardMipCount) { + // only support up to 2 mips of checkerboard data being created + checkerboardMipCount = Mathf.Clamp(checkerboardMipCount, 0, 2); + bool isHardwareDrsOn = DynamicResolutionHandler.instance.HardwareDynamicResIsEnabled(); Vector2Int hardwareTextureSize = isHardwareDrsOn ? DynamicResolutionHandler.instance.ApplyScalesOnSize(viewportSize) : viewportSize; Vector2 textureScale = isHardwareDrsOn ? new Vector2((float)viewportSize.x / (float)hardwareTextureSize.x, (float)viewportSize.y / (float)hardwareTextureSize.y) : new Vector2(1.0f, 1.0f); // No work needed. - if (cachedHardwareTextureSize == hardwareTextureSize && cachedTextureScale == textureScale) + if (cachedHardwareTextureSize == hardwareTextureSize && cachedTextureScale == textureScale && cachedCheckerboardMipCount == checkerboardMipCount) return; cachedHardwareTextureSize = hardwareTextureSize; cachedTextureScale = textureScale; + cachedCheckerboardMipCount = checkerboardMipCount; mipLevelSizes[0] = hardwareTextureSize; mipLevelOffsets[0] = Vector2Int.zero; + mipLevelOffsetsCheckerboard[0] = mipLevelOffsets[0]; int mipLevel = 0; Vector2Int mipSize = hardwareTextureSize; - + bool hasCheckerboard = (checkerboardMipCount != 0); + int maxCheckboardLevelCount = hasCheckerboard ? (1 + checkerboardMipCount) : 0; do { mipLevel++; @@ -678,26 +704,40 @@ public void ComputePackedMipChainInfo(Vector2Int viewportSize) mipLevelSizes[mipLevel] = mipSize; + Vector2Int prevMipSize = mipLevelSizes[mipLevel - 1]; Vector2Int prevMipBegin = mipLevelOffsets[mipLevel - 1]; - Vector2Int prevMipEnd = prevMipBegin + mipLevelSizes[mipLevel - 1]; + Vector2Int prevMipBeginCheckerboard = mipLevelOffsetsCheckerboard[mipLevel - 1]; - Vector2Int mipBegin = new Vector2Int(); - - if ((mipLevel & 1) != 0) // Odd + Vector2Int mipBegin = prevMipBegin; + Vector2Int mipBeginCheckerboard = prevMipBeginCheckerboard; + if (mipLevel == 1) { - mipBegin.x = prevMipBegin.x; - mipBegin.y = prevMipEnd.y; + // first mip always below full resolution + mipBegin = NextMipBegin(prevMipBegin, prevMipSize, PackDirection.Down); + + // pack checkerboard next to it if present + if (hasCheckerboard) + mipBeginCheckerboard = NextMipBegin(mipBegin, mipSize, PackDirection.Right); + else + mipBeginCheckerboard = mipBegin; } - else // Even + else { - mipBegin.x = prevMipEnd.x; - mipBegin.y = prevMipBegin.y; + // alternate directions, mip 2 starts with down if checkerboard, right if not + bool isOdd = ((mipLevel & 1) != 0); + PackDirection dir = (isOdd ^ hasCheckerboard) ? PackDirection.Down : PackDirection.Right; + + mipBegin = NextMipBegin(prevMipBegin, prevMipSize, dir); + mipBeginCheckerboard = NextMipBegin(prevMipBeginCheckerboard, prevMipSize, dir); } mipLevelOffsets[mipLevel] = mipBegin; + mipLevelOffsetsCheckerboard[mipLevel] = mipBeginCheckerboard; hardwareTextureSize.x = Math.Max(hardwareTextureSize.x, mipBegin.x + mipSize.x); hardwareTextureSize.y = Math.Max(hardwareTextureSize.y, mipBegin.y + mipSize.y); + hardwareTextureSize.x = Math.Max(hardwareTextureSize.x, mipBeginCheckerboard.x + mipSize.x); + hardwareTextureSize.y = Math.Max(hardwareTextureSize.y, mipBeginCheckerboard.y + mipSize.y); } while ((mipSize.x > 1) || (mipSize.y > 1)); @@ -705,6 +745,7 @@ public void ComputePackedMipChainInfo(Vector2Int viewportSize) (int)Mathf.Ceil((float)hardwareTextureSize.x * textureScale.x), (int)Mathf.Ceil((float)hardwareTextureSize.y * textureScale.y)); mipLevelCount = mipLevel + 1; + mipLevelCountCheckerboard = hasCheckerboard ? (1 + checkerboardMipCount) : 0; m_OffsetBufferWillNeedUpdate = true; } @@ -722,6 +763,10 @@ public ComputeBuffer GetOffsetBufferData(ComputeBuffer mipLevelOffsetsBuffer) internal static int DivRoundUp(int x, int y) => (x + y - 1) / y; + internal static Vector2Int DivRoundUp(Vector2Int n, int d) => new Vector2Int(HDUtils.DivRoundUp(n.x, d), HDUtils.DivRoundUp(n.y, d)); + internal static Vector2Int DivRoundUp(Vector2Int n, Vector2Int d) => new Vector2Int(HDUtils.DivRoundUp(n.x, d.x), HDUtils.DivRoundUp(n.y, d.y)); + internal static Vector3Int DivRoundUp(Vector3Int n, int d) => new Vector3Int(HDUtils.DivRoundUp(n.x, d), HDUtils.DivRoundUp(n.y, d), HDUtils.DivRoundUp(n.z, d)); + internal static bool IsQuaternionValid(Quaternion q) => (q[0] * q[0] + q[1] * q[1] + q[2] * q[2] + q[3] * q[3]) > float.Epsilon; diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/ShaderLibrary/DownsampleDepth.shader b/Packages/com.unity.render-pipelines.high-definition/Runtime/ShaderLibrary/DownsampleDepth.shader index 18493c50f78..1a7c500bb24 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/ShaderLibrary/DownsampleDepth.shader +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/ShaderLibrary/DownsampleDepth.shader @@ -4,9 +4,7 @@ Shader "Hidden/HDRP/DownsampleDepth" #pragma target 4.5 #pragma editor_sync_compilation - #pragma multi_compile_local_fragment MIN_DOWNSAMPLE CHECKERBOARD_DOWNSAMPLE #pragma multi_compile_local_fragment _ GATHER_DOWNSAMPLE - #pragma multi_compile_local_fragment _ OUTPUT_FIRST_MIP_OF_MIPCHAIN #pragma only_renderers d3d11 playstation xboxone xboxseries vulkan metal switch #include "Packages/com.unity.render-pipelines.core/ShaderLibrary/Common.hlsl" #include "Packages/com.unity.render-pipelines.high-definition/Runtime/ShaderLibrary/ShaderVariables.hlsl" @@ -43,52 +41,16 @@ Shader "Hidden/HDRP/DownsampleDepth" #endif } - float MaxDepth(float4 depths) - { -#if UNITY_REVERSED_Z - return Min3(depths.x, depths.y, min(depths.z, depths.w)); -#else - return Max3(depths.x, depths.y, max(depths.z, depths.w)); -#endif - } - - TEXTURE2D_X(_SourceDownsampleDepth); - -#ifdef OUTPUT_FIRST_MIP_OF_MIPCHAIN - #ifdef SHADER_API_PSSL - RW_TEXTURE2D_X(float, _OutputTexture) : register(u0); - #else - RW_TEXTURE2D_X(float, _OutputTexture) : register(u1); - #endif - float4 _DstOffset; -#endif - float4 _ScaleBias; // x: uv offset x, uv offset y, uv x scale, uv y scale, void Frag(Varyings input, out float outputDepth : SV_Depth) { UNITY_SETUP_STEREO_EYE_INDEX_POST_VERTEX(input); #ifdef GATHER_DOWNSAMPLE - float4 depths = GATHER_RED_TEXTURE2D_X(_SourceDownsampleDepth, s_linear_clamp_sampler, input.texcoord * _ScaleBias.xy + _ScaleBias.zw); + float4 depths = GATHER_RED_TEXTURE2D_X(_CameraDepthTexture, s_linear_clamp_sampler, input.texcoord * _ScaleBias.xy + _ScaleBias.zw); outputDepth = MinDepth(depths); #else - uint2 fullResUpperCorner = uint2((((float2)input.positionCS.xy - 0.5f) * 2.0) + 0.5f); - float4 depths; - depths.x = LOAD_TEXTURE2D_X_LOD(_SourceDownsampleDepth, fullResUpperCorner, 0); - depths.y = LOAD_TEXTURE2D_X_LOD(_SourceDownsampleDepth, fullResUpperCorner + uint2(0, 1), 0); - depths.z = LOAD_TEXTURE2D_X_LOD(_SourceDownsampleDepth, fullResUpperCorner + uint2(1, 0), 0); - depths.w = LOAD_TEXTURE2D_X_LOD(_SourceDownsampleDepth, fullResUpperCorner + uint2(1, 1), 0); - - float minDepth = MinDepth(depths); - #if MIN_DOWNSAMPLE - outputDepth = minDepth; - #elif CHECKERBOARD_DOWNSAMPLE - outputDepth = (uint(input.positionCS.x + input.positionCS.y) & 1) > 0 ? minDepth : MaxDepth(depths); - #endif - - #ifdef OUTPUT_FIRST_MIP_OF_MIPCHAIN - _OutputTexture[COORD_TEXTURE2D_X(_DstOffset.xy + input.positionCS.xy)] = minDepth; - #endif + outputDepth = LOAD_TEXTURE2D_X_LOD(_CameraDepthTexture, uint2(input.positionCS.xy + _ScaleBias.zw), 0).r; #endif } diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/Sky/PhysicallyBasedSky/PhysicallyBasedSky.cs b/Packages/com.unity.render-pipelines.high-definition/Runtime/Sky/PhysicallyBasedSky/PhysicallyBasedSky.cs index dc7d2b76fb2..93e91bb8e1b 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/Sky/PhysicallyBasedSky/PhysicallyBasedSky.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/Sky/PhysicallyBasedSky/PhysicallyBasedSky.cs @@ -168,11 +168,11 @@ public enum RenderingMode /// Horizon tint. Does not affect the precomputation. [Tooltip("Specifies a color that HDRP uses to tint the sky at the horizon. Does not affect the precomputation.")] - public ColorParameter horizonTint = new ColorParameter(Color.white, hdr: false, showAlpha: false, showEyeDropper: false); + public ColorParameter horizonTint = new ColorParameter(Color.white, hdr: false, showAlpha: false, showEyeDropper: true); /// Zenith tint. Does not affect the precomputation. [Tooltip("Specifies a color that HDRP uses to tint the point in the sky directly above the observer (the zenith). Does not affect the precomputation.")] - public ColorParameter zenithTint = new ColorParameter(Color.white, hdr: false, showAlpha: false, showEyeDropper: false); + public ColorParameter zenithTint = new ColorParameter(Color.white, hdr: false, showAlpha: false, showEyeDropper: true); /// Horizon-zenith shift. Does not affect the precomputation. [Tooltip("Controls how HDRP blends between the Horizon Tint and Zenith Tint. Does not affect the precomputation.")] diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/Sky/PhysicallyBasedSky/PhysicallyBasedSkyRenderer.cs b/Packages/com.unity.render-pipelines.high-definition/Runtime/Sky/PhysicallyBasedSky/PhysicallyBasedSkyRenderer.cs index f3fca312b6e..698e05c3a7a 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/Sky/PhysicallyBasedSky/PhysicallyBasedSkyRenderer.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/Sky/PhysicallyBasedSky/PhysicallyBasedSkyRenderer.cs @@ -311,6 +311,7 @@ public void BindBuffers(MaterialPropertyBlock mpb) static PrecomputationCache s_PrecomputationCache = new PrecomputationCache(); + const int k_MaxCelestialBodies = 16; static GraphicsBuffer s_CelestialBodyBuffer; static CelestialBodyData[] s_CelestialBodyData; static int s_DataFrameUpdate = -1; @@ -408,7 +409,6 @@ void UpdateCelestialBodyBuffer(CommandBuffer cmd, BuiltinSkyParameters builtinPa { if (s_CelestialBodyBuffer == null) { - int k_MaxCelestialBodies = 16; int stride = System.Runtime.InteropServices.Marshal.SizeOf(typeof(CelestialBodyData)); s_CelestialBodyBuffer = new GraphicsBuffer(GraphicsBuffer.Target.Structured, k_MaxCelestialBodies, stride); s_CelestialBodyData = new CelestialBodyData[k_MaxCelestialBodies]; @@ -428,12 +428,14 @@ void UpdateCelestialBodyBuffer(CommandBuffer cmd, BuiltinSkyParameters builtinPa { FillCelestialBodyData(cmd, light, ref s_CelestialBodyData[lightCount++]); exposure = Mathf.Max(light.legacyLight.intensity * -light.transform.forward.y, exposure); + if (lightCount >= k_MaxCelestialBodies) break; } } uint bodyCount = lightCount; foreach (var light in directionalLights) { + if (bodyCount >= k_MaxCelestialBodies) break; if (light.legacyLight.enabled && light.interactsWithSky && light.legacyLight.intensity == 0.0f) FillCelestialBodyData(cmd, light, ref s_CelestialBodyData[bodyCount++]); } diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/Sky/SkyManager.cs b/Packages/com.unity.render-pipelines.high-definition/Runtime/Sky/SkyManager.cs index 0c1e2de29f9..2498566bebb 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/Sky/SkyManager.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/Sky/SkyManager.cs @@ -1,6 +1,7 @@ using System; using System.Linq; using System.Collections.Generic; +using UnityEngine.SceneManagement; using UnityEngine.Experimental.Rendering; using UnityEngine.Rendering.RenderGraphModule; @@ -179,8 +180,9 @@ class SkyManager public static Dictionary cloudTypesDict { get { if (m_CloudTypesDict == null) UpdateCloudTypes(); return m_CloudTypesDict; } } // This list will hold the static lighting sky that should be used for baking ambient probe. - // In practice we will always use the last one registered but we use a list to be able to roll back to the previous one once the user deletes the superfluous instances. - private static List m_StaticLightingSkies = new List(); + // We can have multiple but we only want to use the one from the active scene + private static Dictionary m_StaticLightingSkies = new (); + private static StaticLightingSky m_ActiveStaticSky; // Only show the procedural sky upgrade message once static bool logOnce = true; @@ -1236,7 +1238,7 @@ public void UpdateEnvironment(RenderGraph renderGraph, HDCamera hdCamera, Light // because we only maintain one static sky. Since we don't care that the static lighting may be a bit different in the preview we never recompute // and we use the one from the main camera. bool forceStaticUpdate = false; - StaticLightingSky staticLightingSky = GetStaticLightingSky(); + m_ActiveStaticSky = m_StaticLightingSkies.GetValueOrDefault(SceneManager.GetActiveScene().GetHashCode(), null); #if UNITY_EDITOR // In the editor, we might need the static sky ready for baking lightmaps/lightprobes regardless of the current ambient mode so we force it to update in this case if it's not been computed yet.. // We always force an update of the static sky when we're in scene view mode. Previous behaviour was to prevent forced updates if the hash of the static sky was non-null, but this was preventing @@ -1246,9 +1248,12 @@ public void UpdateEnvironment(RenderGraph renderGraph, HDCamera hdCamera, Light #endif if ((ambientMode == SkyAmbientMode.Static || forceStaticUpdate) && hdCamera.camera.cameraType != CameraType.Preview) { - m_StaticLightingSky.skySettings = staticLightingSky != null ? staticLightingSky.skySettings : null; - m_StaticLightingSky.cloudSettings = staticLightingSky != null ? staticLightingSky.cloudSettings : null; - m_StaticLightingSky.volumetricClouds = staticLightingSky != null ? staticLightingSky.volumetricClouds : null; + if (m_ActiveStaticSky != null) + { + m_StaticLightingSky.skySettings = m_ActiveStaticSky.skySettings; + m_StaticLightingSky.cloudSettings = m_ActiveStaticSky.cloudSettings; + m_StaticLightingSky.volumetricClouds = m_ActiveStaticSky.volumetricClouds; + } UpdateEnvironment(renderGraph, hdCamera, m_StaticLightingSky, sunLight, m_StaticSkyUpdateRequired || m_UpdateRequired, true, true, SkyAmbientMode.Static); m_StaticSkyUpdateRequired = false; } @@ -1595,34 +1600,25 @@ public TextureHandle RenderOpaqueAtmosphericScattering(RenderGraph renderGraph, static public StaticLightingSky GetStaticLightingSky() { - if (m_StaticLightingSkies.Count == 0) - return null; - else - return m_StaticLightingSkies[m_StaticLightingSkies.Count - 1]; + return m_ActiveStaticSky; } static public void RegisterStaticLightingSky(StaticLightingSky staticLightingSky) { - if (!m_StaticLightingSkies.Contains(staticLightingSky)) + #if UNITY_EDITOR + if (staticLightingSky.staticLightingSkyUniqueID == (int)SkyType.Procedural && !skyTypesDict.TryGetValue((int)SkyType.Procedural, out var dummy)) { - if (m_StaticLightingSkies.Count != 0) - { - Debug.LogWarning("One Static Lighting Sky component was already set for baking, only the latest one will be used."); - } - - if (staticLightingSky.staticLightingSkyUniqueID == (int)SkyType.Procedural && !skyTypesDict.TryGetValue((int)SkyType.Procedural, out var dummy)) - { - Debug.LogError("You are using the deprecated Procedural Sky for static lighting in your Scene. You can still use it but, to do so, you must install it separately. To do this, open the Package Manager window and import the 'Procedural Sky' sample from the HDRP package page, then close and re-open your project without saving."); - return; - } - - m_StaticLightingSkies.Add(staticLightingSky); + Debug.LogError("You are using the deprecated Procedural Sky for static lighting in your Scene. You can still use it but, to do so, you must install it separately. To do this, open the Package Manager window and import the 'Procedural Sky' sample from the HDRP package page, then close and re-open your project without saving."); + return; } + #endif + + m_StaticLightingSkies[staticLightingSky.gameObject.scene.GetHashCode()] = staticLightingSky; } static public void UnRegisterStaticLightingSky(StaticLightingSky staticLightingSky) { - m_StaticLightingSkies.Remove(staticLightingSky); + m_StaticLightingSkies.Remove(staticLightingSky.gameObject.scene.GetHashCode()); } public Texture2D ExportSkyToTexture(Camera camera) diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/Sky/StaticLightingSky.cs b/Packages/com.unity.render-pipelines.high-definition/Runtime/Sky/StaticLightingSky.cs index db6512ec698..ee04eea0f21 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/Sky/StaticLightingSky.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/Sky/StaticLightingSky.cs @@ -37,6 +37,10 @@ public class StaticLightingSky : MonoBehaviour VolumetricClouds m_VolumetricClouds; VolumetricClouds m_VolumetricCloudSettingsFromProfile; + // Reflection Probes + [SerializeField, Range(1, 5), Tooltip("Controls how many times a reflection includes other reflections. A value of 1 results in the Scene being rendered once so mirrored reflections will be black.")] + internal int bounces = 1; + internal SkySettings skySettings { get diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/Tools/.buginfo b/Packages/com.unity.render-pipelines.high-definition/Runtime/Tools/.buginfo new file mode 100644 index 00000000000..beeea491820 --- /dev/null +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/Tools/.buginfo @@ -0,0 +1 @@ +area: Material diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/Utilities/HDRenderPipelinePreferences.cs b/Packages/com.unity.render-pipelines.high-definition/Runtime/Utilities/HDRenderPipelinePreferences.cs index 8991ed25bb4..40c8e11ba09 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/Utilities/HDRenderPipelinePreferences.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/Utilities/HDRenderPipelinePreferences.cs @@ -1,90 +1,36 @@ using System; +using UnityEditor; namespace UnityEngine.Rendering.HighDefinition { // This file can't be in the editor assembly as we need to access it in runtime-editor-specific - // places like OnGizmo etc and we don't want to add the editor assembly as a dependency of the - // runtime one #if UNITY_EDITOR - using UnityEditor; - using AntialiasingMode = HDAdditionalCameraData.AntialiasingMode; - - [InitializeOnLoad] static class HDRenderPipelinePreferences { - static bool m_Loaded = false; - - static bool s_MatcapMixAlbedo; - public static bool matcapViewMixAlbedo + public class MatCapModeEditorPreferences { - get => s_MatcapMixAlbedo; - set + static class Keys { - if (s_MatcapMixAlbedo == value) return; - s_MatcapMixAlbedo = value; - EditorPrefs.SetBool(Keys.matcapViewMixAlbedo, s_MatcapMixAlbedo); + internal const string matcapViewMixAlbedo = "HDRP.SceneView.MatcapMixAlbedo"; + internal const string matcapViewScale = "HDRP.SceneView.MatcapViewScale"; } - } - static float s_MatcapScale; - public static float matcapViewScale - { - get => s_MatcapScale; - set - { - if (s_MatcapScale == value) return; - s_MatcapScale = value; - EditorPrefs.SetFloat(Keys.matcapViewScale, s_MatcapScale); - } - } - - #region Decal Gizmo Color - - static readonly Color k_DecalGizmoColorBase = new Color(1, 1, 1, 8f / 255); - static Func GetColorPrefDecalGizmoColor; - public static Color decalGizmoColor => GetColorPrefDecalGizmoColor(); + public Observable mixAlbedo = new(true); + public Observable viewScale = new(1.0f); - #endregion - - static class Keys - { - internal const string sceneViewAntialiasing = "HDRP.SceneView.Antialiasing"; - internal const string sceneViewStopNaNs = "HDRP.SceneView.StopNaNs"; - internal const string matcapViewMixAlbedo = "HDRP.SceneView.MatcapMixAlbedo"; - internal const string matcapViewScale = "HDRP.SceneView.MatcapViewScale"; - } - - [SettingsProvider] - static SettingsProvider PreferenceGUI() - { - return new SettingsProvider("Preferences/HD Render Pipeline", SettingsScope.User) + public MatCapModeEditorPreferences() { - guiHandler = searchContext => - { - if (!m_Loaded) - Load(); - - matcapViewMixAlbedo = EditorGUILayout.Toggle("Mix Albedo in the Matcap", matcapViewMixAlbedo); - if (matcapViewMixAlbedo) - matcapViewScale = EditorGUILayout.FloatField("Matcap intensity scale", matcapViewScale); - } - }; - } + mixAlbedo.value = EditorPrefs.GetBool(Keys.matcapViewMixAlbedo, true); + mixAlbedo.onValueChanged += value => EditorPrefs.SetBool(Keys.matcapViewMixAlbedo, value); - static HDRenderPipelinePreferences() - { - Load(); + viewScale.value = EditorPrefs.GetFloat(Keys.matcapViewScale, 1.0f); + viewScale.onValueChanged += value => EditorPrefs.SetFloat(Keys.matcapViewScale, value); + } } - static void Load() - { - s_MatcapMixAlbedo = EditorPrefs.GetBool(Keys.matcapViewMixAlbedo, true); - s_MatcapScale = EditorPrefs.GetFloat(Keys.matcapViewScale, 1.0f); - GetColorPrefDecalGizmoColor = CoreRenderPipelinePreferences.RegisterPreferenceColor("Scene/Decal", k_DecalGizmoColorBase); - - m_Loaded = true; - } + private static Lazy s_MatCapModeEditorPreferences = new(() => new MatCapModeEditorPreferences()); + public static MatCapModeEditorPreferences matCapMode => s_MatCapModeEditorPreferences.Value; } #endif } diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/VirtualTexturing/.buginfo b/Packages/com.unity.render-pipelines.high-definition/Runtime/VirtualTexturing/.buginfo new file mode 100644 index 00000000000..9d12516e2df --- /dev/null +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/VirtualTexturing/.buginfo @@ -0,0 +1 @@ +area: Texture diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/Water/HDRenderPipeline.WaterSystem.Foam.cs b/Packages/com.unity.render-pipelines.high-definition/Runtime/Water/HDRenderPipeline.WaterSystem.Foam.cs index e5f68649e77..2ba77001938 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/Water/HDRenderPipeline.WaterSystem.Foam.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/Water/HDRenderPipeline.WaterSystem.Foam.cs @@ -193,7 +193,7 @@ void UpdateWaterFoamSimulation(CommandBuffer cmd, WaterSurface currentWater) return; // First we must ensure, that the texture is there (if it should be) and at the right resolution - currentWater.CheckFoamResources(); + currentWater.CheckFoamResources(cmd); // Skip if there are is foam to render if (!currentWater.foam) diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/Water/WaterSurface/WaterSurface.Foam.cs b/Packages/com.unity.render-pipelines.high-definition/Runtime/Water/WaterSurface/WaterSurface.Foam.cs index c4cf6211619..c49a3797f7a 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/Water/WaterSurface/WaterSurface.Foam.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/Water/WaterSurface/WaterSurface.Foam.cs @@ -123,7 +123,7 @@ public enum WaterFoamResolution internal RTHandle[] foamBuffers = new RTHandle[2]; internal float4 previousFoamRegionScaleOffset; - internal void CheckFoamResources() + internal void CheckFoamResources(CommandBuffer cmd) { if (foam) { @@ -137,6 +137,9 @@ internal void CheckFoamResources() { foamBuffers[0] = RTHandles.Alloc(resolution, resolution, 1, dimension: TextureDimension.Tex2D, colorFormat: GraphicsFormat.R16G16_SFloat, enableRandomWrite: true, wrapMode: TextureWrapMode.Clamp); foamBuffers[1] = RTHandles.Alloc(resolution, resolution, 1, dimension: TextureDimension.Tex2D, colorFormat: GraphicsFormat.R16G16_SFloat, enableRandomWrite: true, wrapMode: TextureWrapMode.Clamp); + + // Clear buffer 0 only + CoreUtils.SetRenderTarget(cmd, foamBuffers[0], ClearFlag.Color, Color.black); } } else diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/Water/WaterSurface/WaterSurface.Simulation.cs b/Packages/com.unity.render-pipelines.high-definition/Runtime/Water/WaterSurface/WaterSurface.Simulation.cs index c8e20817632..5c6bdfa9967 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/Water/WaterSurface/WaterSurface.Simulation.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/Water/WaterSurface/WaterSurface.Simulation.cs @@ -148,6 +148,20 @@ public DateTime simulationStart } } + /// Current simulation time in seconds. + public float simulationTime + { + get + { + return simulation?.simulationTime ?? 0.0f; + } + set + { + if (simulation != null) + simulation.simulationTime = value; + } + } + internal int numActiveBands => HDRenderPipeline.EvaluateBandCount(surfaceType, ripples); // Optional CPU simulation data diff --git a/Packages/com.unity.render-pipelines.high-definition/package.json b/Packages/com.unity.render-pipelines.high-definition/package.json index 10290581596..2a98203a6f6 100644 --- a/Packages/com.unity.render-pipelines.high-definition/package.json +++ b/Packages/com.unity.render-pipelines.high-definition/package.json @@ -5,13 +5,9 @@ "unity": "6000.0", "displayName": "High Definition RP", "dependencies": { - "com.unity.mathematics": "1.2.4", - "com.unity.collections": "2.2.0", - "com.unity.burst": "1.8.9", "com.unity.modules.video": "1.0.0", "com.unity.modules.animation": "1.0.0", "com.unity.modules.imageconversion": "1.0.0", - "com.unity.modules.terrain": "1.0.0", "com.unity.render-pipelines.core": "17.0.3", "com.unity.shadergraph": "17.0.3", "com.unity.visualeffectgraph": "17.0.3", diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/Images/Inspectors/light-inspector.png b/Packages/com.unity.render-pipelines.universal/Documentation~/Images/Inspectors/light-inspector.png deleted file mode 100644 index 125f15ca015..00000000000 Binary files a/Packages/com.unity.render-pipelines.universal/Documentation~/Images/Inspectors/light-inspector.png and /dev/null differ diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/Images/renderingdebugger-gpuculling-heatmap.jpg b/Packages/com.unity.render-pipelines.universal/Documentation~/Images/renderingdebugger-gpuculling-heatmap.jpg new file mode 100644 index 00000000000..024d6b6f534 Binary files /dev/null and b/Packages/com.unity.render-pipelines.universal/Documentation~/Images/renderingdebugger-gpuculling-heatmap.jpg differ diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/Images/renderingdebugger-gpuculling-overlay.jpg b/Packages/com.unity.render-pipelines.universal/Documentation~/Images/renderingdebugger-gpuculling-overlay.jpg new file mode 100644 index 00000000000..5898fbc4083 Binary files /dev/null and b/Packages/com.unity.render-pipelines.universal/Documentation~/Images/renderingdebugger-gpuculling-overlay.jpg differ diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/Lights-2D-intro.md b/Packages/com.unity.render-pipelines.universal/Documentation~/Lights-2D-intro.md index 1ddf1db50d6..1a1d872cd4e 100644 --- a/Packages/com.unity.render-pipelines.universal/Documentation~/Lights-2D-intro.md +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/Lights-2D-intro.md @@ -16,7 +16,7 @@ The 2D lighting model was designed specifically to work with 2D worlds that are The lighting calculation in 2D Lights is not physics based as it is with 3D Lights. The details of the lighting model calculation can be found here. ### No interoperability with 3D Lights and 3D Renderers -Currently both 3D and 2D Lights can only affect 3D and 2D Renderers respectively. 2D Lighting does not work on or effect 3D Renderers such as the [Mesh Renderer](https://docs.unity3d.com/Manual/class-MeshRenderer.html), while 3D Lighting will similarly have no effect on 2D Renderers such as the [Sprite Renderer](https://docs.unity3d.com/Manual/class-SpriteRenderer.html). While interoperability between the respective Lights and Renderers may be developed in the future, currently a combination of 2D and 3D Lights and 2D and 3D Renderers in a single scene can be achieved by using the camera stacking technique. +3D and 2D Lights can only affect 3D and 2D Renderers respectively. 2D Lighting does not work on or effect 3D Renderers such as the [Mesh Renderer](https://docs.unity3d.com/Manual/class-MeshRenderer.html), while 3D Lighting will similarly have no effect on 2D Renderers such as the [Sprite Renderer](https://docs.unity3d.com/Manual/class-SpriteRenderer.html). Currently, to achieve a combination of 2D and 3D Lights and 2D and 3D Renderers in a single Scene, you can use multiple cameras and have one of the cameras render to a [Render Texture](https://docs.unity3d.com/Manual/class-RenderTexture.html), and sample that texture in a material rendered by another camera. ## Technical details of the 2D Lighting graphics pipeline The 2D Lighting graphics pipeline rendering process can be broken down into 2 distinct phases: diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/TableOfContents.md b/Packages/com.unity.render-pipelines.universal/Documentation~/TableOfContents.md index 02da07cd287..e18e880a843 100644 --- a/Packages/com.unity.render-pipelines.universal/Documentation~/TableOfContents.md +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/TableOfContents.md @@ -55,11 +55,13 @@ * [Rendering](rendering-in-universalrp.md) * [Rendering Layers](features/rendering-layers.md) * [Lighting](lighting.md) + * [Lighting in URP](lighting/lighting-in-urp.md) * [Light component reference](light-component.md) - * [Lighting Mode](urp-lighting-mode.md) - * [View and control a light from its perspective](lights-placement-tool.md) * [The Universal Additional Light Data component](universal-additional-light-data.md) + * [Lighting Mode](urp-lighting-mode.md) * [Shadows in the Universal Render Pipeline](Shadows-in-URP.md) + * [Reflection probes](lighting/reflection-probes.md) + * [View and control a light from its perspective](lights-placement-tool.md) * [Adaptive Probe Volumes (APV)](probevolumes.md) * [Understanding Adaptive Probe Volumes](probevolumes-concept.md) * [Use Adaptive Probe Volumes](probevolumes-use.md) @@ -67,16 +69,15 @@ * [Configure the size and density of Adaptive Probe Volumes](probevolumes-changedensity.md) * [Bake multiple scenes together with Baking Sets](probevolumes-usebakingsets.md) * [Changing lighting at runtime](probe-volumes-change-lighting-at-runtime.md) - * [Choose how to change lighting at runtime](probevolumes-understand-changing-lighting-at-runtime.md) - * [Bake different lighting setups with Lighting Scenarios](probevolumes-bakedifferentlightingsetups.md) - * [Update light from the sky at runtime with sky occlusion](probevolumes-skyocclusion.md) + * [Choose how to change lighting at runtime](probevolumes-understand-changing-lighting-at-runtime.md) + * [Bake different lighting setups with Lighting Scenarios](probevolumes-bakedifferentlightingsetups.md) + * [Update light from the sky at runtime with sky occlusion](probevolumes-skyocclusion.md) * [Streaming](probevolumes-streaming.md) * [Fix issues with Adaptive Probe Volumes](probevolumes-fixissues.md) * [Adaptive Probe Volume Inspector window reference](probevolumes-inspector-reference.md) * [Adaptive Probe Volumes panel reference](probevolumes-lighting-panel-reference.md) * [Probe Volumes Options Override reference](probevolumes-options-override-reference.md) * [Probe Adjustment Volume component reference](probevolumes-adjustment-volume-component-reference.md) - * [Reflection probes](lighting/reflection-probes.md) * [Lens flares](shared/lens-flare/lens-flare.md) * [Choose a lens flare type](shared/lens-flare/choose-a-lens-flare-type.md) * [Add lens flares](shared/lens-flare/lens-flare-component.md) @@ -99,6 +100,8 @@ * [Render a camera's output to a Render Texture](rendering-to-a-render-texture.md) * [Customize a camera](universal-additional-camera-data.md) * [Camera component properties](camera-component-reference.md) + * [Physical Camera properties](cameras/physical-camera-reference.md) + * [Render Requests](User-Render-Requests.md) * [Post-processing](integration-with-post-processing.md) * [How to configure](integration-with-post-processing.md#post-proc-how-to) * [HDR Output](post-processing/hdr-output.md) @@ -178,7 +181,6 @@ * [Access a texture in a custom render pass](render-graph-read-write-texture.md) * [Transfer a texture between render passes](render-graph-pass-textures-between-passes.md) * [URP blit best practices](customize/blit-overview.md) - * [Perform a full screen blit in URP](renderer-features/how-to-fullscreen-blit.md) * [Draw objects in a render pass](render-graph-draw-objects-in-a-pass.md) * [Use frame data](accessing-frame-data.md) * [Analyze a render graph](render-graph-view.md) @@ -197,10 +199,15 @@ * [Write a Scriptable Render Pass in Compatibility Mode](renderer-features/write-a-scriptable-render-pass.md) * [Example of a complete Scriptable Renderer Feature in Compatibility Mode](renderer-features/create-custom-renderer-feature-compatibility-mode.md) * [Scriptable Render Pass Compatibility Mode API reference](renderer-features/scriptable-renderer-features/scriptable-render-pass-reference.md) + * [Perform a full screen blit in URP in Compatibility mode](renderer-features/how-to-fullscreen-blit.md) * [Optimization](urp-optimization.md) * [Rendering Debugger](features/rendering-debugger.md) * [Add controls to the Rendering Debugger](features/rendering-debugger-add-controls.md) * [Optimize for better performance](optimize-for-better-performance.md) + * [Reduce rendering work on the CPU](reduce-rendering-work-on-cpu.md) + * [Use the GPU Resident Drawer](gpu-resident-drawer.md) + * [Make a GameObject compatible with the GPU Resident Drawer](make-object-compatible-gpu-rendering.md) + * [Use GPU occlusion culling](gpu-culling.md) * [Update Quality Setting Presets for URP](birp-onboarding/quality-presets.md) * [2D graphics features](2d-index.md) * [Introduction to the 2D Lighting system](Lights-2D-intro.md) diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/User-Render-Requests.md b/Packages/com.unity.render-pipelines.universal/Documentation~/User-Render-Requests.md new file mode 100644 index 00000000000..401fe8a13bd --- /dev/null +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/User-Render-Requests.md @@ -0,0 +1,88 @@ +# Render Requests + +For a general documentation see the [Core Package](https://docs.unity3d.com/Packages/com.unity.render-pipelines.core@latest/User-Render-Requests.html) about Render Requests. + +## Use UniversalRenderPipeline.SingleCameraRequest + +`UniversalRenderPipeline.SingleCameraRequest` renders a single camera, without taking into account the full stack of cameras. + +You can still hook into callbacks from [RenderPipelineManager](https://docs.unity3d.com/ScriptReference/Rendering.RenderPipelineManager.html). + +The following code sample shows that you can hook into [RenderPipelineManager.endContextRendering](https://docs.unity3d.com/ScriptReference/Rendering.RenderPipelineManager-endContextRendering.html) `UniversalRenderPipeline.SingleCameraRequest` + +To try out this example: + +- Attach the script to a **GameObject** in the **Scene**. +- Configure the **cams** and **rts**. +- Set **useSingleCameraRequestValues** to true or false depending on which type of render request you want to use. +- Select **Enter Play Mode**. +- See the **Console** Log. + +``` +using System.Collections; +using System.Collections.Generic; +using System.Text; +using UnityEngine; +using UnityEngine.Rendering; +using UnityEngine.Rendering.Universal; + +public class SingleCameraRenderRequestExample : MonoBehaviour +{ + public Camera[] cams; + public RenderTexture[] rts; + + void Start() + { + if (cams == null || cams.Length == 0 || rts == null || cams.Length != rts.Length) + { + Debug.LogError("Invalid setup"); + return; + } + + StartCoroutine(RenderSingleRequestNextFrame()); + RenderPipelineManager.endContextRendering += OnEndContextRendering; + } + + void OnEndContextRendering(ScriptableRenderContext context, List cameras) + { + var stb = new StringBuilder($"Cameras Count from EndContextRendering: {cameras.Count}."); + foreach (var cam in cameras) + { + stb.AppendLine($"- {cam.name}"); + } + Debug.Log(stb.ToString()); + } + + void OnDestroy() + { + RenderPipelineManager.endContextRendering -= OnEndContextRendering; + } + + IEnumerator RenderSingleRequestNextFrame() + { + yield return new WaitForEndOfFrame(); + + SendSingleRenderRequests(); + + yield return new WaitForEndOfFrame(); + + StartCoroutine(RenderSingleRequestNextFrame()); + } + + void SendSingleRenderRequests() + { + for (int i = 0; i < cams.Length; i++) + { + UniversalRenderPipeline.SingleCameraRequest request = + new UniversalRenderPipeline.SingleCameraRequest(); + + // Check if the request is supported by the active render pipeline + if (RenderPipeline.SupportsRenderRequest(cams[i], request)) + { + request.destination = rts[i]; + RenderPipeline.SubmitRenderRequest(cams[i], request); + } + } + } +} +``` diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/camera-component-reference.md b/Packages/com.unity.render-pipelines.universal/Documentation~/camera-component-reference.md index 66a199a4cb5..ffc06009b56 100644 --- a/Packages/com.unity.render-pipelines.universal/Documentation~/camera-component-reference.md +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/camera-component-reference.md @@ -22,51 +22,24 @@ Overlay cameras expose the following properties: ## Projection -| **Property** || **Description** | -| ------------------------ | - | :-------------------------------------------------------------------- | -| **Projection** || Control how the camera simulates perspective. | -|| *Perspective* | Render objects with perspective intact. | -|| *Orthographic* | Render objects uniformly, with no sense of perspective. | -| **Field of View Axis** || Set the axis Unity measures the camera's field of view along.

    Available options:
    • **Vertical**
    • **Horizontal**
    This property is only visible when **Projection** is set to **Perspective**. | -| **Field of View** || Set the width of the camera's view angle, measured in degrees along the selected axis.

    This property is only visible when **Projection** is set to **Perspective**. | -| **Size** || Set the viewport size of the camera.

    This property is only visible when **Projection** is set to **Orthographic**. | -| **Clipping Planes** || Set the distances from the camera where rendering starts and stops. | -|| *Near* | The closest point relative to the camera that drawing will occur. | -|| *Far* | The furthest point relative to the camera that drawing will occur. | -| **Physical Camera** || Enable Physical Camera properties for this camera.

    When the Physical Camera properties are enabled, Unity calculates the **Field of View** using the properties that simulate real-world camera attributes: **Focal Length**, **Sensor Size**, and **Lens Shift**.

    Physical Camera properties are not visible in the Inspector until you tick this box.

    This property is only visible when **Projection** is set to **Perspective**. | +| **Property** | **Description** | +| ------------------------ | :-------------------------------------------------------------------- | +| **Projection** | Control how the camera simulates perspective. | +|     **Perspective** | Render objects with perspective intact. | +|     **Orthographic** | Render objects uniformly, with no sense of perspective. | +| **Field of View Axis** | Set the axis Unity measures the camera's field of view along.

    Available options:
    • **Vertical**
    • **Horizontal**
    This property is only visible when **Projection** is set to **Perspective**. | +| **Field of View** | Set the width of the camera's view angle, measured in degrees along the selected axis.

    This property is only visible when **Projection** is set to **Perspective**. | +| **Size** | Set the viewport size of the camera.

    This property is only visible when **Projection** is set to **Orthographic**. | +| **Clipping Planes** | Set the distances from the camera where rendering starts and stops. | +|     **Near** | The closest point relative to the camera where drawing occurs. | +|     **Far** | The furthest point relative to the camera where drawing occurs. | +| **Physical Camera** | Displays additional properties for the camera in the Inspector to simulate a physical camera. A physical camera calculates the Field of View with properties simulating real-world camera attributes: **Focal Length**, **Sensor Size**, and **Shift**.

    The **Physical Camera** property is only available when **Projection** is set to **Perspective**. | ### Physical Camera -| **Property** || **Description** | -| ------------------------ | - | :-------------------------------------------------------------------- | -| **Camera Body** || -|| *Sensor Type* | Specify the real-world camera format you want the camera to simulate. Choose the desired format from the list.

    When you choose a camera format, Unity sets the the **Sensor Size > X** and **Y** properties to the correct values automatically.

    If you change the **Sensor Size** values manually, Unity automatically sets this property to **Custom**. | -|| *Sensor Size* | Set the size, in millimeters, of the camera sensor.

    Unity sets the **X** and **Y** values automatically when you choose the **Sensor Type**. You can enter custom values if needed. | -||         *X* | Set the horizontal size of the camera sensor. | -||         *Y* | Set the vertical size of the camera sensor. | -|| *ISO* | Set the light sensitivity of the camera sensor. | -|| *Shutter Speed* | Set the amount of time the camera sensor captures light. | -||                 *Shutter Speed Unit* | Select the unit of measurement for **Shutter Speed**.

    Available options:
    • **Second**
    • **1/Second**
    | -|| *Gate Fit* | Options for changing the size of the **resolution gate** (size/aspect ratio of the game view) relative to the **film gate** (size/aspect ratio of the Physical Camera sensor).

    For further information about resolution gate and film gate, refer to the documentation on [Physical Cameras](https://docs.unity3d.com/Manual/PhysicalCameras.html). | -||                 *Vertical* | Fits the resolution gate to the height of the film gate.

    If the sensor aspect ratio is larger than the game view aspect ratio, Unity crops the rendered image at the sides.

    If the sensor aspect ratio is smaller than the game view aspect ratio, Unity overscans the rendered image at the sides.

    When you choose this setting, changing the sensor width (**Sensor Size > X property**) has no effect on the rendered image. | -||                 *Horizontal* | Fits the resolution gate to the width of the film gate.

    If the sensor aspect ratio is larger than the game view aspect ratio, Unity overscans the rendered image on the top and bottom.

    If the sensor aspect ratio is smaller than the game view aspect ratio, Unity crops the rendered image on the top and bottom.

    When you choose this setting, changing the sensor height (**Sensor Size > Y** property) has no effect on the rendered image. | -||                 *Fill* | Fits the resolution gate to either the width or height of the film gate, whichever is smaller. This crops the rendered image. | -||                 *Overscan* | Fits the resolution gate to either the width or height of the film gate, whichever is larger. This overscans the rendered image. | -||                 *None* | Ignores the resolution gate and uses the film gate only. This stretches the rendered image to fit the game view aspect ratio. | -| **Lens** || -|| *Focal Length* | Set the distance, in millimeters, between the camera sensor and the camera lens.

    Lower values result in a wider **Field of View**, and vice versa.

    When you change this value, Unity automatically updates the **Field of View** property accordingly. | -|| *Shift* | Shift the lens horizontally or vertically from center. Values are multiples of the sensor size; for example, a shift of 0.5 along the X axis offsets the sensor by half its horizontal size.

    You can use lens shifts to correct distortion that occurs when the camera is at an angle to the subject (for example, converging parallel lines).

    Shift the lens along either axis to make the camera frustum [oblique](https://docs.unity3d.com/Manual/ObliqueFrustum.html). | -||         *X* | Set the horizontal offset of the lens from the camera sensor. | -||         *Y* | Set the vertical offset of the lens from the camera sensor. | -|| *Aperture* | Set the f-stop (f-number) of the lens. A lower value gives a wider lens aperture. | -|| *Focus Distance* | Set the distance from the camera where objects appear sharp when you enable Depth of Field. | -| **Aperture Shape** || -|| *Blade Count* | The number of blades in the lens aperture. A higher value gives a rounder aperture shape. | -|| *Curvature* | Set the curvature of the lens aperture blades. | -|| *Barrel Clipping* | Set the self-occlusion of the lens. A higher value creates a cat's eye effect. | -|| *Anamorphism* | Set the stretch of the sensor. A higher value increases the stretch of the sensor to simulate an anamorphic look. | +The **Physical Camera** property adds additional properties to the camera to simulate a real-world camera. For more information, refer to the [Physical Camera reference](cameras/physical-camera-reference.md). @@ -77,20 +50,20 @@ Overlay cameras expose the following properties: | **Renderer** | Select which renderer this camera uses. | | **Post Processing** | Enable post-processing effects. | | **Anti-Aliasing** | Select the method that this camera uses for post-process anti-aliasing. A camera can still use Multisample Anti-aliasing (MSAA), which is a hardware feature, at the same time as post-process anti-aliasing unless you use Temporal Anti-aliasing.

    The following Anti-aliasing options are available:
    • **None**: This camera can process MSAA but does not process any post-process anti-aliasing.
    • **Fast Approximate Anti-aliasing (FXAA)**: Performs a full screen pass which smooths edges on a per-pixel level.
    • **Subpixel Morphological Anti-aliasing (SMAA)**: Finds edge patterns in the image and blends the pixels on these edges according to those patterns.
    • **Temporal Anti-aliasing (TAA)**: Uses previous frames accumulated into a color history buffer to smooth edges over the course of multiple frames.
    For more information, refer to [Anti-aliasing in the Universal Render Pipeline](anti-aliasing.md).

    This property is only visible when **Render Type** is set to **Base**. | -|         *Quality (SMAA)* | Select the quality of SMAA. The difference in resource intensity is fairly small between **Low** and **High**.

    Available options:
    • **Low**
    • **Medium**
    • **High**
    This property only appears when you select **Subpixel Morphological Anti-aliasing (SMAA)** from the **Anti-aliasing** drop-down. | -|         *Quality (TAA)* | Select the quality of TAA.

    Available options:
    • **Very Low**
    • **Low**
    • **Medium**
    • **High**
    • **Very High**
    This property only appears when you select **Temporal Anti-aliasing (TAA)** from the **Anti-aliasing** drop-down. | -|         *Contrast Adaptive Sharpening* | Enable high quality post sharpening to reduce TAA blur.

    This setting is overridden when you enable either [AMD FidelityFX Super Resolution (FSR) or Scalable Temporal Post-Processing (STP)](universalrp-asset.md#quality) upscaling in the URP Asset as they both handle sharpening as part of the upscaling process.

    This property only appears when you select **Temporal Anti-aliasing (TAA)** from the **Anti-aliasing** drop-down. | -|         *Base Blend Factor* | Set how much the history buffer blends with the current frame result. Higher values mean more history contribution, which improves the anti-aliasing, but also increases the chance of ghosting.

    This property only appears when you select **Temporal Anti-aliasing (TAA)** from the **Anti-aliasing** drop-down and enable **Show Additional Properties** in the Inspector. | -|         *Jitter Scale* | Set the scale of the jitter applied when TAA is enabled. A lower value reduces visible flickering and jittering, but also reduces the effectiveness of the anti-aliasing.

    This property only appears when you select **Temporal Anti-aliasing (TAA)** from the **Anti-aliasing** drop-down and enable **Show Additional Properties** in the Inspector. | -|         *Mip Bias* | Set how much texture mipmap selection is biased when rendering.

    A positive bias makes a texture appear more blurry, while a negative bias sharpens the texture. However, a lower value also has a negative impact on performance.

    **Note**: Requires mipmaps in textures.

    This property only appears when you select **Temporal Anti-aliasing (TAA)** from the **Anti-aliasing** drop-down and enable **Show Additional Properties** in the Inspector. | -|         *Variance Clamp Scale* | Set the size of the color volume Unity uses to find nearby pixels when the color history is incorrect or unavailable. To do this, the clamp limits how much a pixel's color can vary from the color of the pixels that surround it.

    Lower values can reduce ghosting, but produce more flickering. Higher values reduce flickering, but are prone to blur and ghosting.

    This property only appears when you select **Temporal Anti-aliasing (TAA)** from the **Anti-aliasing** drop-down and enable **Show Additional Properties** in the Inspector. | -| **Stop NaNs** | Enable the checkbox to make this camera replace values that are Not a Number (NaN) with a black pixel. This stops certain effects from breaking, but is a resource-intensive process. Only enable this feature if you experience NaN issues that you can not fix.

    This property is only visible when **Render Type** is set to **Base**. | -| **Dithering** | Enable the checkbox to apply 8-bit dithering to the final render. This can help reduce banding on wide gradients and low light areas.

    This property is only visible when **Render Type** is set to **Base**. | +|         **Quality (SMAA)** | Select the quality of SMAA. The difference in resource intensity is fairly small between **Low** and **High**.

    Available options:
    • **Low**
    • **Medium**
    • **High**
    This property only appears when you select **Subpixel Morphological Anti-aliasing (SMAA)** from the **Anti-aliasing** drop-down. | +|         **Quality (TAA)** | Select the quality of TAA.

    Available options:
    • **Very Low**
    • **Low**
    • **Medium**
    • **High**
    • **Very High**
    This property only appears when you select **Temporal Anti-aliasing (TAA)** from the **Anti-aliasing** drop-down. | +|         **Contrast Adaptive Sharpening** | Enable high quality post sharpening to reduce TAA blur.

    This setting is overridden when you enable either [AMD FidelityFX Super Resolution (FSR) or Scalable Temporal Post-Processing (STP)](universalrp-asset.md#quality) upscaling in the URP Asset as they both handle sharpening as part of the upscaling process.

    This property only appears when you select **Temporal Anti-aliasing (TAA)** from the **Anti-aliasing** drop-down. | +|         **Base Blend Factor** | Set how much the history buffer blends with the current frame result. Higher values mean more history contribution, which improves the anti-aliasing, but also increases the chance of ghosting.

    This property only appears when you select **Temporal Anti-aliasing (TAA)** from the **Anti-aliasing** drop-down and enable **Show Additional Properties** in the Inspector. | +|         **Jitter Scale** | Set the scale of the jitter applied when TAA is enabled. A lower value reduces visible flickering and jittering, but also reduces the effectiveness of the anti-aliasing.

    This property only appears when you select **Temporal Anti-aliasing (TAA)** from the **Anti-aliasing** drop-down and enable **Show Additional Properties** in the Inspector. | +|         **Mip Bias** | Set how much texture mipmap selection is biased when rendering.

    A positive bias makes a texture appear more blurry, while a negative bias sharpens the texture. However, a lower value also has a negative impact on performance.

    **Note**: Requires mipmaps in textures.

    This property only appears when you select **Temporal Anti-aliasing (TAA)** from the **Anti-aliasing** drop-down and enable **Show Additional Properties** in the Inspector. | +|         **Variance Clamp Scale** | Set the size of the color volume Unity uses to find nearby pixels when the color history is incorrect or unavailable. The clamp limits how much a pixel's color can vary from the color of the surrounding pixels.

    Lower values can reduce ghosting, but produce more flickering. Higher values reduce flickering, but are prone to blur and ghosting.

    This property only appears when you select **Temporal Anti-aliasing (TAA)** from the **Anti-aliasing** drop-down and enable **Show Additional Properties** in the Inspector. | +| **Stop NaNs** | Replaces Not a Number (NaN) values with a black pixel for the camera. This stops certain effects from breaking, but is a resource-intensive process which causes a negative performance impact. Only enable this feature if you experience NaN issues you can't fix.

    The Stop NaNs pass executes at the start of the post-processing passes. You must enable **Post Processing** for the camera to use **Stop NaNs**.

    Only available when **Render Type** is set to **Base**. | +| **Dithering** | Enable to apply 8-bit dithering to the final render to help reduce banding on wide gradients and low light areas.

    This property is only visible when **Render Type** is set to **Base**. | | **Clear Depth** | Enable to clear depth from previous camera on rendering.

    This property is only visible when **Render Type** is set to **Overlay**. | | **Render Shadows** | Enable shadow rendering. | | **Priority** | A camera with a higher priority is drawn on top of a camera with a lower priority. Priority has a range from -100 to 100.

    This property is only visible when **Render Type** is set to **Base**. | | **Opaque Texture** | Control whether the camera creates a CameraOpaqueTexture, which is a copy of the rendered view.

    Available options:
    • **Off**: Camera does not create a CameraOpaqueTexture.
    • **On**: Camera creates a CameraOpaqueTexture.
    • **Use Pipeline Settings**: The Render Pipeline Asset determines the value of this setting.
    This property is only visible when **Render Type** is set to **Base**. | -| **Depth Texture** | Control whether the camera creates `_CameraDepthTexture`, which is a copy of the rendered depth values.

    Available options:
    • **Off**: Camera does not create a CameraDepthTexture.
    • **On**: Camera creates a CameraDepthTexture.
    • **Use Pipeline Settings**: The Render Pipeline Asset determines the value of this setting.
    **Note**: `_CameraDepthTexture` is set between the `AfterRenderingSkybox` and `BeforeRenderingTransparents` events, or at the `BeforeRenderingOpaques` event if you use a depth prepass. For more information on the order of events in the rendering loop, refer to [Injection points](/customize/custom-pass-injection-points.md). | +| **Depth Texture** | Control whether the camera creates `_CameraDepthTexture`, which is a copy of the rendered depth values.

    Available options:
    • **Off**: Camera does not create a CameraDepthTexture.
    • **On**: Camera creates a CameraDepthTexture.
    • **Use Pipeline Settings**: The Render Pipeline Asset determines the value of this setting.
    **Note**: `_CameraDepthTexture` is set between the `AfterRenderingSkybox` and `BeforeRenderingTransparents` events, or at the `BeforeRenderingOpaques` event if you use a depth prepass. For more information on the order of events in the rendering loop, refer to [Injection points](customize/custom-pass-injection-points.md). | | **Culling Mask** | Select which Layers the camera renders to. | | **Occlusion Culling** | Enable Occlusion Culling. | @@ -112,13 +85,13 @@ You can use the stack property add Overlay cameras to the stack and they will re | **Property** | **Description** | | -------------------------- | ------------------------------------------------------------ | | **Background Type** | Control how to initialize the color buffer at the start of this camera's render loop. For more information, refer to [the documentation on clearing](cameras-advanced.md#clearing).

    This property is only visible when **Render Type** is set to **Base**. | -|         *Skybox* | Initializes the color buffer by clearing to a Skybox. Defaults to a background color if no Skybox is found. | -|         *Solid Color* | Initializes the color buffer by clearing to a given color.
    If you select this property, Unity shows the following extra property:
    **Background**: The camera clears its color buffer to this color before rendering. | -|         *Uninitialized* | Does not initialize the color buffer. This means that the load action for that specific RenderTarget will be `DontCare` instead of `Load` or `Clear`. `DontCare` specifies that the previous contents of the RenderTarget don't need to be preserved.

    Only use this option in order to optimize performance in situations where your camera or Camera Stack will draw to every pixel in the color buffer, otherwise the behaviour of pixels the camera doesn't draw is undefined.

    **Note**: The results might look different between Editor and Player, as the Editor doesn't run on Tile-Based Deferred Rendering (TBDR) GPUs (found in mobile devices). If you use this option on TBDR GPUs, it causes uninitialized tile memory and the content is undefined. | +|         **Skybox** | Initializes the color buffer by clearing to a Skybox. Defaults to a background color if no Skybox is found. | +|         **Solid Color** | Initializes the color buffer by clearing to a given color.
    If you select this property, Unity shows the following extra property:
    **Background**: The camera clears its color buffer to this color before rendering. | +|         **Uninitialized** | Does not initialize the color buffer. This means that the load action for that specific RenderTarget will be `DontCare` instead of `Load` or `Clear`. `DontCare` specifies that the previous contents of the RenderTarget don't need to be preserved.

    Only use this option in order to optimize performance in situations where your camera or Camera Stack will draw to every pixel in the color buffer, otherwise the behaviour of pixels the camera doesn't draw is undefined.

    **Note**: The results might look different between Editor and Player, as the Editor doesn't run on Tile-Based Deferred Rendering (TBDR) GPUs (found in mobile devices). If you use this option on TBDR GPUs, it causes uninitialized tile memory and the content is undefined. | | **Volumes** | The settings in this section define how Volumes affect this camera. | -|         *Update Mode* | Select how Unity updates Volumes.

    Available options:
    • **Every Frame**: Update Volumes with every frame Unity renders.
    • **Via Scripting**: Only update volumes when triggered by a script.
    • **Use Pipeline Settings**: Use the default setting for the Render Pipeline.
    | -|         *Volume Mask* | Use the drop-down to set the Layer Mask that defines which Volumes affect this camera. | -|         *Volume Trigger* | Assign a Transform that the [Volume](Volumes.md) system uses to handle the position of this camera. For example, if your application uses a third person view of a character, set this property to the character's Transform. The camera then uses the post-processing and scene settings for Volumes that the character enters. If you do not assign a Transform, the camera uses its own Transform instead. | +|         **Update** **Mode** | Select how Unity updates Volumes.

    Available options:
    • **Every Frame**: Update Volumes with every frame Unity renders.
    • **Via Scripting**: Only update volumes when triggered by a script.
    • **Use Pipeline Settings**: Use the default setting for the Render Pipeline.
    | +|         **Volume** **Mask** | Use the drop-down to set the Layer Mask that defines which Volumes affect this camera. | +|         **Volume** **Trigger** | Assign a Transform that the [Volume](Volumes.md) system uses to handle the position of this camera. For example, if your application uses a third person view of a character, set this property to the character's Transform. The camera then uses the post-processing and scene settings for Volumes that the character enters. If you do not assign a Transform, the camera uses its own Transform instead. | @@ -142,10 +115,10 @@ This section is only available if you set the **Render Type** to **Base** | **Target Display** | Select which external device to render to. | | **Target Eye** | Select the target eye for this camera.

    Available options:
    • **Both**: Allows XR rendering from the selected camera.
    • **None**: Disables XR rendering for the selected camera.
    | | **Viewport Rect** | Four values that indicate where on the screen this camera view is drawn. Measured in Viewport Coordinates (values 0-1). | -|         *X* | The beginning horizontal position Unity uses to draw the camera view. | -|         *Y* | The beginning vertical position Unity uses to draw the camera view. | -|         *W* (Width) | Width of the camera output on the screen. | -|         *H* (Height) | Height of the camera output on the screen. | +|     **X** | The beginning horizontal position Unity uses to draw the camera view. | +|     **Y** | The beginning vertical position Unity uses to draw the camera view. | +|     **W** | Width of the camera output on the screen. | +|     **H** | Height of the camera output on the screen. | | **HDR Rendering** | Enable High Dynamic Range rendering for this camera. | | **MSAA** | Enable [Multisample Anti-aliasing](anti-aliasing.md#msaa) for this camera. | | **Allow Dynamic Resolution** | Enable Dynamic Resolution rendering for this camera. | diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/cameras.md b/Packages/com.unity.render-pipelines.universal/Documentation~/cameras.md index f8c3996208a..b034682e25d 100644 --- a/Packages/com.unity.render-pipelines.universal/Documentation~/cameras.md +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/cameras.md @@ -4,10 +4,10 @@ A Camera in Unity works like a camera in the real world: it captures a view of o | Page | Description | |-|-| -| [Cameras in URP](/cameras/camera-differences-in-urp.md)|Understand the differences between Unity's built-in camera and the URP camera.| +| [Cameras in URP](cameras/camera-differences-in-urp.md)| Understand the differences between Unity's built-in camera and the URP camera. | | [Understand camera render order](cameras-advanced.md) | Understand the order in which URP clears camera buffers and performs culling and rendering operations. | | [Camera render types](camera-types-and-render-type.md) | Understand the difference between the Base and Overlay camera types. | | [Anti-aliasing in URP](anti-aliasing.md) | Apply anti-aliasing effects to a camera. | -| [Use multiple cameras](cameras-multiple.md)| Set up and use more than one camera in a scene to use in a camera stack, a split screen effect, post-processing, or output to a render texture.| -| [Customize a camera](universal-additional-camera-data.md)| Use the Universal Additional Camera Data component to customise a camera's behavior.| -| [Camera component properties](camera-component-reference.md)| Understand how each camera property works in URP.| +| [Use multiple cameras](cameras-multiple.md)| Set up and use more than one camera in a scene to use in a camera stack, a split screen effect, post-processing, or output to a render texture. | +| [Customize a camera](universal-additional-camera-data.md)| Use the Universal Additional Camera Data component to customise a camera's behavior. | +| [Camera component properties](camera-component-reference.md)| Understand how each camera property works in URP. | diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/cameras/apply-different-post-proc-to-cameras.md b/Packages/com.unity.render-pipelines.universal/Documentation~/cameras/apply-different-post-proc-to-cameras.md index 73de81df8db..7831e223eb9 100644 --- a/Packages/com.unity.render-pipelines.universal/Documentation~/cameras/apply-different-post-proc-to-cameras.md +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/cameras/apply-different-post-proc-to-cameras.md @@ -24,7 +24,7 @@ With the scene set up, the following steps show how to create and apply a post-p 3. Select the **Layer** dropdown and choose one of the layers created when you set up the scene. 4. Select the camera you want to apply this effect to. 5. In the Inspector window, go to **Environment** > **Volume Mask** and select the same layer that you chose for the GameObject. -6. Repeat stpes 1-5 for each GameObject and Camera pair that your scene requires. +6. Repeat steps 1-5 for each GameObject and Camera pair that your scene requires. > [!NOTE] > Some effects apply to all cameras in a scene by default. As a result of this, you might need to add the same effect to each volume. This overrides the effects from other volumes on individual cameras with the new values that you set. diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/cameras/camera-stacking-concepts.md b/Packages/com.unity.render-pipelines.universal/Documentation~/cameras/camera-stacking-concepts.md index 1fa87f935db..e8cf8405574 100644 --- a/Packages/com.unity.render-pipelines.universal/Documentation~/cameras/camera-stacking-concepts.md +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/cameras/camera-stacking-concepts.md @@ -17,6 +17,10 @@ You should only apply post-processing to the last camera in the stack, so the fo * URP renders the post-processing effects only once, not repeatedly for each camera. * The visual effects are consistent, because all the cameras in the stack receive the same post-processing. +## Limitations + +You cannot use a mix of different types of renderers (2D and 3D) for cameras in a camera stack. + ## Additional resources * [Set up a camera stack](../camera-stacking.md) diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/cameras/physical-camera-reference.md b/Packages/com.unity.render-pipelines.universal/Documentation~/cameras/physical-camera-reference.md new file mode 100644 index 00000000000..263b61eef2f --- /dev/null +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/cameras/physical-camera-reference.md @@ -0,0 +1,59 @@ +# Physical Camera reference + +The physical camera properties enable the URP camera to simulate a real-world camera. These properties correspond to features of real-world cameras and work in the same way. + +For more information about how to use some of these properties to create the camera effect you desire, refer to [Using Physical Cameras](xref:PhysicalCameras). + +> [!NOTE] +> When the Physical Camera is in use, Unity calcualtes the Field of View with the following properties: +> +> * **Sensor Size** +> * **Focal Length** +> * **Shift** + +The Physical Camera properties are split into the following sections: + +* [Camera Body](#camera-body) +* [Lens](#lens) +* [Aperture Shape](#aperture-shape) + +## Camera Body + +| **Property** | **Description** | +| ------------ | --------------- | +| **Sensor Type** | Specify the real-world camera format you want the camera to simulate. When you choose a camera format, Unity sets the the **Sensor Size** > **X** and **Y** properties to the correct values automatically.

    URP offers the following camera format presets:
    • **8mm**:
      • **X**: 4.8
      • **Y**: 3.5
    • **Super 8mm**:
      • **X**: 5.79
      • **Y**: 4.01
    • **16mm**:
      • **X**: 10.26
      • **Y**: 7.49
    • **Super 16mm**:
      • **X**: 12.522
      • **Y**: 7.417
    • **35mm 2-perf**:
      • **X**: 21.95
      • **Y**: 9.35
    • **35mm Academy**:
      • **X**: 21.946
      • **Y**: 16.002
    • **Super-35**:
      • **X**: 24.89
      • **Y**: 18.66
    • **35mm TV Projection**:
      • **X**: 20.726
      • **Y**: 15.545
    • **35mm Full Aperture**:
      • **X**: 24.892
      • **Y**: 18.669
    • **35mm 1.85 Projection**:
      • **X**: 20.955
      • **Y**: 11.328
    • **35mm Anamorphic**:
      • **X**: 21.946
      • **Y**: 18.593
    • **65mm ALEXA**:
      • **X**: 54.12
      • **Y**: 25.59
    • **70mm**:
      • **X**: 52.476
      • **Y**: 23.012
    • **70mm IMAX**:
      • **X**: 70.41
      • **Y**: 52.63
    • **Custom**:
      • Set the **X** and **Y** values manually

      • If you change the **Sensor Size** values manually, Unity automatically sets this property to **Custom**. | +| **Sensor Size** | Set the size, in millimeters, of the camera sensor.

        Unity sets the **X** and **Y** values automatically when you choose the **Sensor Type**. | +|     **X** | The horizontal size of the camera sensor. | +|     **Y** | The vertical size of the camera sensor. | +| **ISO** | The light sensitivity of the camera sensor. | +| **Shutter Speed** | The amount of time the camera sensor captures light. | +|     **Unit** | The unit of measurement for **Shutter Speed**.

        Available options:
        • **Second**
        • **1/Second**
        | +| **Gate Fit** | Options for changing the size of the **resolution gate** (size/aspect ratio of the game view) relative to the **film gate** (size/aspect ratio of the Physical Camera sensor).

        For more information about resolution gate and film gate, refer to the documentation on [Physical Cameras](https://docs.unity3d.com/Manual/PhysicalCameras.html). | +|     **Vertical** | Fits the resolution gate to the height of the film gate.

        If the sensor aspect ratio is larger than the game view aspect ratio, Unity crops the rendered image at the sides.

        If the sensor aspect ratio is smaller than the game view aspect ratio, Unity overscans the rendered image at the sides.

        When you choose this setting, any change to the sensor width (**Sensor Size** > **X**) has no effect on the rendered image. | +|     **Horizontal** | Fits the resolution gate to the width of the film gate.

        If the sensor aspect ratio is larger than the game view aspect ratio, Unity overscans the rendered image on the top and bottom.

        If the sensor aspect ratio is smaller than the game view aspect ratio, Unity crops the rendered image on the top and bottom.

        When you choose this setting, any change to the sensor height (**Sensor Size** > **Y**) has no effect on the rendered image. | +|     **Fill** | Fits the resolution gate to either the width or height of the film gate, whichever is smaller. This crops the rendered image. | +|     **Overscan** | Fits the resolution gate to either the width or height of the film gate, whichever is larger. This overscans the rendered image. | +|     **None** | Ignores the resolution gate and uses the film gate only. This stretches the rendered image to fit the game view aspect ratio. | + +## Lens + +| **Property** | **Description** | +| ------------ | --------------- | +| **Focal Length** | The distance, in millimeters, between the camera sensor and the camera lens.

        Lower values result in a wider **Field of View**, and vice versa.

        When you change this value, Unity automatically updates the **Field of View** property accordingly. | +| **Shift** | Shifts the lens horizontally or vertically from center. Values are multiples of the sensor size; for example, a shift of 0.5 along the X axis offsets the sensor by half its horizontal size.

        You can use lens shifts to correct distortion that occurs when the camera is at an angle to the subject (for example, converging parallel lines).

        Shift the lens along either axis to make the camera frustum [oblique](https://docs.unity3d.com/Manual/ObliqueFrustum.html). | +|     **X** | The lens's horizontal offset from the camera sensor. | +|     **Y** | The lens's vertical offset from the camera sensor | +| **Aperture** | The f-stop (f-number) of the lens. A lower value gives a wider lens aperture. | +| **Focus Distance** | The distance from the camera where objects appear sharp when you enable Depth of Field. | + +> [!NOTE] +> When Physical Camera properties are in use at the same time as [Depth of Field](../post-processing-depth-of-field.md) post-processing, the Lens properties directly affect the Depth of Field effect. This requires you to adjust both the Depth of Field properties and the Lens properties to create the effect you want. + +## Aperture Shape + +| **Property** | **Description** | +| ------------ | --------------- | +| **Blade Count** | The number of blades in the lens aperture. A higher value gives a rounder aperture shape. | +| **Curvature** | The curvature of the lens aperture blades. | +| **Barrel Clipping** | The self-occlusion of the lens. A higher value creates a cat's eye effect. | +| **Anamorphism** | The amount of vertical stretch of the camera sensor to make the sensor taller or shorter. A higher value increases the stretch of the sensor to simulate an anamorphic look. | diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/compatibility-mode.md b/Packages/com.unity.render-pipelines.universal/Documentation~/compatibility-mode.md index 60d142cb6db..b92886bc0eb 100644 --- a/Packages/com.unity.render-pipelines.universal/Documentation~/compatibility-mode.md +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/compatibility-mode.md @@ -1,11 +1,13 @@ ## Compatibility Mode -If you enable **Compatibility Mode (Render Graph Disabled)** in [URP graphics settings](urp-global-settings.md), you can write a Scriptable Render Pass without using the [render graph API](render-graph.md). +If you enable **Compatibility Mode (Render Graph Disabled)** in URP graphics settings, you can write a Scriptable Render Pass without using the [render graph API](render-graph.md). The setting is in **Project Settings** > **Graphics** > **Pipeline Specific Settings** > **URP** > **Render Graph**. -> **Note**: Unity no longer develops or improves the rendering path that doesn't use the render graph API. Use the render graph API instead when developing new graphics features. +> [!NOTE] +> Unity no longer develops or improves the rendering path that doesn't use the [render graph API](render-graph.md). Use the render graph API instead when developing new graphics features. |Page|Description| |-|-| |[Write a Scriptable Render Pass in Compatibility Mode](renderer-features/write-a-scriptable-render-pass.md)|An example of creating a Scriptable Render Pass in Compatibility Mode.| |[Example of a complete Scriptable Renderer Feature in Compatibility Mode](renderer-features/create-custom-renderer-feature-compatibility-mode.md)|An example of a complete Scriptable Renderer Feature in Compatibility Mode.| |[Scriptable Render Pass API reference](renderer-features/scriptable-renderer-features/scriptable-render-pass-reference.md)|Reference for the Scriptable Render Pass API.| +|[Perform a full screen blit in URP in Compatibility mode](renderer-features/how-to-fullscreen-blit.md)|An example that describes how to create a custom Renderer Feature that performs a full screen blit.| diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/customize/blit-overview.md b/Packages/com.unity.render-pipelines.universal/Documentation~/customize/blit-overview.md index 11614be5ded..a2936afc821 100644 --- a/Packages/com.unity.render-pipelines.universal/Documentation~/customize/blit-overview.md +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/customize/blit-overview.md @@ -22,4 +22,4 @@ Use the [Blitter API](https://docs.unity3d.com/Packages/com.unity.render-pipelin ## Custom full-screen blit example -The [How to perform a full screen blit in URP](../renderer-features/how-to-fullscreen-blit.md) example shows how to create a custom Renderer Feature that performs a full screen blit. The example works in XR and is compatible with SRP APIs. +The [How to perform a full screen blit in URP in Compatibility Mode](../renderer-features/how-to-fullscreen-blit.md) example shows how to create a custom Renderer Feature that performs a full screen blit. The example works in XR and is compatible with SRP APIs. diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/features/rendering-debugger.md b/Packages/com.unity.render-pipelines.universal/Documentation~/features/rendering-debugger.md index 4ddddd51622..9e7d101ea2b 100644 --- a/Packages/com.unity.render-pipelines.universal/Documentation~/features/rendering-debugger.md +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/features/rendering-debugger.md @@ -60,6 +60,8 @@ The **Rendering Debugger** window contains the following sections: * [Lighting](#lighting) +* [GPU Resident Drawer](#gpu-resident-drawer) + * [Render Graph](#render-graph) * [Probe Volume](#probe-volume-panel) @@ -145,10 +147,11 @@ The properties in this section let you visualize different rendering features. | **Property** | **Description** | | ------------------------------ | ------------------------------------------------------------ | -| **Map Overlays** | Specifies which render pipeline texture to overlay on the screen. The options are:
        • **None**: Renders the scene normally without a texture overlay.
        • **Depth**: Overlays the camera's depth texture on the screen.
        • **Additional Lights Shadow Map**: Overlays the [shadow map](https://docs.unity3d.com/Manual/shadow-mapping.html) that contains shadows cast by lights other than the main directional light.
        • **Main Light Shadow Map**: Overlays the shadow map that contains shadows cast by the main directional light.
        | +| **Map Overlays** | Specifies which render pipeline texture to overlay on the screen. The options are:
        • **None**: Renders the scene normally without a texture overlay.
        • **Depth**: Overlays the camera's depth texture on the screen.
        • **Motion Vector**: Overlays the camera's motion vector texture on the screen.
        • **Additional Lights Shadow Map**: Overlays the [shadow map](https://docs.unity3d.com/Manual/shadow-mapping.html) that contains shadows cast by lights other than the main directional light.
        • **Main Light Shadow Map**: Overlays the shadow map that contains shadows cast by the main directional light.
        • **Additional Lights Cookie Atlas**: Overlays the light cookie atlas texture that contains patterns cast by lights other than the main directional light.
        • **Reflection Probe Atlas**: Overlays the reflection probe atlas texture that contains the reflection textures at the probe locations.
        | | **  Map Size** | The width and height of the overlay texture as a percentage of the view window URP displays it in. For example, a value of **50** fills up a quarter of the screen (50% of the width and 50% of the height). | | **HDR** | Indicates whether to use [high dynamic range (HDR)](https://docs.unity3d.com/Manual/HDR.html) to render the scene. Enabling this property only has an effect if you enable **HDR** in your URP Asset. | | **MSAA** | Indicates whether to use [Multisample Anti-aliasing (MSAA)](./../anti-aliasing.md#msaa) to render the scene. Enabling this property only has an effect if:
        • You set **Anti Aliasing (MSAA)** to a value other than **Disabled** in your URP Asset.
        • You use the Game View. MSAA has no effect in the Scene View.
        | +| **TAA Debug Mode** | Specifies which Temporal Anti-aliasing debug mode to use. The options are:
        • **None**: Renders the scene normally without a debug mode.
        • **Show Raw Frame**: Renders the screen with the color input Temporal Anti-aliasing currently uses.
        • **Show Raw Frame No Jitter**: Renders the screen with the color input TAA currently uses, and disables camera jitter.
        • **Show Clamped History**: Renders the screen with the color history that TAA has accumulated and corrected.
        | | **Post-processing** | Specifies how URP applies post-processing. The options are:
        • **Disabled**: Disables post-processing.
        • **Auto**: Unity enables or disables post-processing depending on the currently active debug modes. If color changes from post-processing would change the meaning of a debug mode's pixel, Unity disables post-processing. If no debug modes are active, or if color changes from post-processing don't change the meaning of the active debug modes' pixels, Unity enables post-processing.
        • **Enabled**: Applies post-processing to the image that the camera captures.
        | | **Additional Wireframe Modes** | Specifies whether and how to render wireframes for meshes in your scene. The options are:
        • **None**: Doesn't render wireframes.
        • **Wireframe**: Exclusively renders edges for meshes in your scene. In this mode, you can see the wireframe for meshes through the wireframe for closer meshes.
        • **Solid Wireframe**: Exclusively renders edges and faces for meshes in your scene. In this mode, the faces of each wireframe mesh hide edges behind them.
        • **Shaded Wireframe**: Renders edges for meshes as an overlay. In this mode, Unity renders the scene in color and overlays the wireframe over the top.
        | | **Overdraw** | Indicates whether to render the overdraw debug view. This is useful to check where Unity draws pixels over one other. | @@ -198,6 +201,62 @@ The properties in this section let you visualize different settings and elements | **Lighting Debug Mode** | Specifies which lighting and shadow information to overlay on-screen to debug. The options are:
        • **None**: Renders the scene normally without a debug overlay.
        • **Shadow Cascades**: Overlays shadow cascade information so you can determine which shadow cascade each pixel uses. Use this to debug shadow cascade distances. For information on which color represents which shadow cascade, refer to the [Shadows section of the URP Asset](../universalrp-asset.md#shadows).
        • **Lighting Without Normal Maps**: Renders the scene to visualize lighting. This mode uses neutral materials and disables normal maps. This and the **Lighting With Normal Maps** mode are useful for debugging lighting issues caused by normal maps.
        • **Lighting With Normal Maps**: Renders the scene to visualize lighting. This mode uses neutral materials and allows normal maps.
        • **Reflections**: Renders the scene to visualize reflections. This mode applies perfectly smooth, reflective materials to every Mesh Renderer.
        • **Reflections With Smoothness**: Renders the scene to visualize reflections. This mode applies reflective materials without an overridden smoothness to every GameObject.
        | | **Lighting Features** | Specifies flags for which lighting features contribute to the final lighting result. Use this to view and debug specific lighting features in your scene. The options are:
        • **Nothing**: Shortcut to disable all flags.
        • **Everything**: Shortcut to enable all flags.
        • **Global Illumination**: Indicates whether to render [global illumination](https://docs.unity3d.com/Manual/realtime-gi-using-enlighten.html).
        • **Main Light**: Indicates whether the main directional [Light](../light-component.md) contributes to lighting.
        • **Additional Lights**: Indicates whether lights other than the main directional light contribute to lighting.
        • **Vertex Lighting**: Indicates whether additional lights that use per-vertex lighting contribute to lighting.
        • **Emission**: Indicates whether [emissive](https://docs.unity3d.com/Manual/StandardShaderMaterialParameterEmission.html) materials contribute to lighting.
        • **Ambient Occlusion**: Indicates whether [ambient occlusion](../post-processing-ssao.md) contributes to lighting.
        | +### GPU Resident Drawer + +The properties in this section let you visualize settings that [reduce rendering work on the CPU](../reduce-rendering-work-on-cpu.md). + +#### Occlusion Culling + +|**Property**|**Sub-property**|**Description**| +|-|-|-| +| **Occlusion Test Overlay** || Display a heatmap of culled instances. The heatmap displays blue if there are few culled instances, through to red if there are many culled instances. If you enable this setting, culling might be slower. | +| **Occlusion Test Overlay Count Visible** || Display a heatmap of instances that Unity doesn't cull. The heatmap displays blue if there are many culled instances, through to red if there are few culled instances. This setting only has an effect if you enable **Occlusion Test Overlay**. | +| **Override Occlusion Test To Always Pass** || Set occluded objects as unoccluded. This setting affects both the Rendering Debugger and the scene. | +| **Occluder Context Stats** || Display the [**Occlusion Context Stats**](#occlusion-context-stats) section. | +| **Occluder Debug View** || Display an overlay with the occlusion textures and mipmaps Unity generates. | +|| **Occluder Debug View Index** | Set the occlusion texture to display. | +|| **Occluder Debug View Range Min** | Set the brightness of the minimum depth value. Increase this value to brighten objects that are far away from the view. | +|| **Occluder Debug View Range Max** | Set the brightness of the maximum depth value. Decrease this value to darken objects that are close to the view. | + +![](../Images/renderingdebugger-gpuculling-heatmap.jpg)
        +The Rendering Debugger with **Occlusion Test Overlay** enabled. The red areas are where Unity culls many objects. The blue area is where Unity culls few objects. + +![](../Images/renderingdebugger-gpuculling-overlay.jpg)
        +The Rendering Debugger with **Occluder Debug View** enabled. The overlay displays each mipmap level of the occlusion texture. + +#### Occlusion Context Stats + +The **Occlusion Context Stats** section lists the occlusion textures Unity generates. + +|**Property**|**Description**| +|-|-| +| **Active Occlusion Contexts** | The number of occlusion textures. | +| **View Instance ID** | The instance ID of the camera Unity renders the view from, to create the occlusion texture. | +| **Subview Count** | The number of subviews. The value might be 2 or more if you use XR. | +| **Size Per Subview** | The size of the subview texture in bytes. | + +#### GPU Resident Drawer Settings + +|**Section**|**Property**|**Sub-property**|**Description**| +|-|-|-|-| +|**Display Culling Stats**|||Display information about the cameras Unity uses to create occlusion textures.| +|**Instance Culler Stats**|||| +||**View Count**|| The number of views Unity uses for GPU culling. Unity uses one view per shadow cascade or shadow map. For example, Unity uses three views for a Directional Light that generates three shadow cascades. | +||**Per View Stats**||| +|||**View Type**| The object or shadow split Unity renders the view from. | +|||**View Instance ID**| The instance ID of the camera or light Unity renders the view from. | +|||**Split Index**| The shadow split index value. This value is 0 if the object doesn't have shadow splits. | +|||**Visible Instances**| How many objects are visible in this split. | +|||**Draw Commands**| How many draw commands Unity uses for this split. | +|**Occlusion Culling Events**|||| +||**View Instance ID**|| The instance ID of the camera Unity renders the view from. | +||**Event type**|| The type of render pass.
        • **OccluderUpdate**
        • The GPU samples the depth buffer and creates a new occlusion texture and its mipmap.
        • **OcclusionTest**
        • The GPU tests all the instances against the occlusion texture.
        | +||**Occluder Version**|| How many times Unity updates the occlusion texture in this frame. | +||**Subview Mask**|| A bitmask that represents which subviews are affected in this frame. | +||**Occlusion Test**|| Which test the GPU runs against the occlusion texture.
        • **TestNone**
        • Unity found no occluders, so all instances are visible.
        • **TestAll**: Unity tests all instances against the occlusion texture.
        • **TestCulled**: Unity tests only instances that the previous **TestAll** test culled.
        | +||**Visible Instances**|| The number of visible instances after occlusion culling. | +||**Culled Instances**|| The number of culled instances after occlusion culling. | + ### Render Graph The properties in this section let you change how the [render graph system](../render-graph.md) works. diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/gpu-culling.md b/Packages/com.unity.render-pipelines.universal/Documentation~/gpu-culling.md new file mode 100644 index 00000000000..4d61380d2d6 --- /dev/null +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/gpu-culling.md @@ -0,0 +1,39 @@ +# Use GPU occlusion culling + +GPU occlusion culling means Unity uses the GPU instead of the CPU to exclude objects from rendering when they're occluded behind other objects. Unity uses this information to speed up rendering in scenes that have a lot of occlusion. + +The GPU Resident Drawer works only with the following: + +- The [Forward+](rendering/forward-plus-rendering-path.md) rendering path. +- [Graphics APIs](https://docs.unity3d.com/6000.0/Documentation/Manual/GraphicsAPIs.html) and platforms that support compute shaders. + +## How GPU occlusion culling works + +Unity generates depth textures from the perspective of cameras and lights in the scene. + +The GPU then uses the depth textures from the current frame and the previous frame to cull objects. Unity renders only objects that are unoccluded in either frame. Unity culls the remaining objects, which are occluded in both frames. + +Whether GPU occlusion culling speeds up rendering depends on your scene. GPU occlusion culling is most effective in the following setups: + +- Multiple objects use the same mesh, so Unity can group them into a single draw call. +- The scene has a lot of occlusion, especially if the occluded objects have a high number of vertices. + +If occlusion culling doesn't have a big effect on your scene, rendering time might increase because of the extra work the GPU does to set up GPU occlusion culling. + +## Enable GPU occlusion culling + +1. Go to **Graphics**, select the **URP** tab, then in the **Render Graph** section make sure **Compatibility Mode (Render Graph Disabled)** is disabled. +2. [Enable the GPU Resident Drawer](gpu-resident-drawer.md#enable-the-gpu-resident-drawer). +3. In the active [Universal Renderer](urp-universal-renderer.md), enable **GPU Occlusion**. + +## Analyze GPU occlusion culling + +You can use the following to analyze GPU occlusion culling: + +- [Rendering Statistics](https://docs.unity3d.com/Manual/RenderingStatistics.html) overlay to check rendering speed increases. +- [Rendering Debugger](features/rendering-debugger.md#gpu-resident-drawer) to troubleshoot issues. + +## Additional resources + +- [Reduce rendering work on the CPU](reduce-rendering-work-on-cpu.md) +- [Occlusion culling](https://docs.unity3d.com/Manual/OcclusionCulling.html) diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/gpu-resident-drawer.md b/Packages/com.unity.render-pipelines.universal/Documentation~/gpu-resident-drawer.md new file mode 100644 index 00000000000..b10a6e56289 --- /dev/null +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/gpu-resident-drawer.md @@ -0,0 +1,58 @@ + +# Use the GPU Resident Drawer + +The GPU Resident Drawer automatically uses the [`BatchRendererGroup`](https://docs.unity3d.com/Manual/batch-renderer-group.html) API to draw GameObjects with GPU instancing, which reduces the number of draw calls and frees CPU processing time. For more information, refer to [How BatchRendererGroup works](https://docs.unity3d.com/Manual/batch-renderer-group-how.html). + +The GPU Resident Drawer works only with the following: + +- The [Forward+](rendering/forward-plus-rendering-path.md) rendering path. +- [Graphics APIs](https://docs.unity3d.com/6000.0/Documentation/Manual/GraphicsAPIs.html) and platforms that support compute shaders. +- GameObjects that have a [Mesh Renderer component](https://docs.unity3d.com/Manual/class-MeshRenderer.html). + +Otherwise, Unity falls back to drawing the GameObject without GPU instancing. + +If you enable the GPU Resident Drawer, the following applies: + +- Build times are longer because Unity compiles all the `BatchRendererGroup` shader variants into your build. + +## Enable the GPU Resident Drawer + +Follow these steps: + +1. Go to **Project Settings** > **Graphics**, then in the **Shader Stripping** section set **BatchRendererGroup Variants** to **Keep All**. +2. Go to the active [URP Asset](universalrp-asset.md) and enable **SRP Batcher**. +3. Double-click the renderer in the **Renderer List** to open the Universal Renderer, then set **Rendering Path** to **Forward+**. +4. Set **GPU Resident Drawer** to **Instanced Drawing**. + +If you change or create GameObjects each frame, the GPU Resident Drawer updates with the changes. + +To include or exclude GameObjects from the GPU Resident Drawer, refer to [Make a GameObject compatible with the GPU Resident Drawer](make-object-compatible-gpu-rendering.md). + +## Analyze the GPU Resident Drawer + +To analyze the results of the GPU Resident Drawer, you can use the following: + +- [Frame Debugger](https://docs.unity3d.com/Manual/FrameDebugger.html). The GPU Resident Drawer groups GameObjects into draw calls with the name **Hybrid Batch Group**. +- [Rendering Debugger](features/rendering-debugger.md#gpu-resident-drawer). +- [Rendering Statistics](https://docs.unity3d.com/Manual/RenderingStatistics.html) to check if the number of frames per second increases, and the CPU processing time and SetPass calls decreases. +- [Unity Profiler](optimize-for-better-performance.md) + +## Optimize the GPU Resident Drawer + +How much the GPU Resident Drawer speeds up rendering depends on your scene. The GPU Resident Drawer is most effective in the following setups: + +- The scene is large. +- Multiple GameObjects use the same mesh, so Unity can group them into a single draw call. + +Rendering usually speeds up less in the Scene view and the Game view, compared to Play mode or your final built project. + +The following might speed up the GPU Resident Drawer: + +- Go to **Project Settings** > **Player**, then in the **Other Settings** section disable **Static Batching**. +- Go to **Window** > **Panels** > **Lighting**, then in the **Lightmapping Settings** section enable **Fixed Lightmap Size** and disable **Use Mipmap Limits**. + +## Additional resources + +- [Reduce rendering work on the CPU](reduce-rendering-work-on-cpu.md) +- [Graphics performance fundamentals](https://docs.unity3d.com/Manual/OptimizingGraphicsPerformance.html) +- [GPU occlusion culling](gpu-culling.md) diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/light-component.md b/Packages/com.unity.render-pipelines.universal/Documentation~/light-component.md index 96d395610bf..4efd28a8ef2 100644 --- a/Packages/com.unity.render-pipelines.universal/Documentation~/light-component.md +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/light-component.md @@ -8,8 +8,6 @@ This page contains information on Light components in the Universal Render Pipel The Light Inspector includes the following groups of properties: -![](Images/Inspectors/light-inspector.png) - * [General](#General) * [Shape](#Shape) * [Emission](#Emission) @@ -21,25 +19,33 @@ The Light Inspector includes the following groups of properties: | Property:| Function: | |:---|:---| | **Type**| The current type of light. Possible values are **Directional**, **Point**, **Spot** and **Area**.| -| **Mode**| Specify the [Light Mode](https://docs.unity3d.com/Manual/LightModes.html) used to determine if and how a light is "baked". Possible modes are **Realtime**, **Mixed** and **Baked**.| +| **Mode**| Specify the [Light Mode](https://docs.unity3d.com/Manual/LightModes.html) used to determine if and how a light is "baked".

        Options:
        • **Realtime**
        • **Mixed**
        • **Baked**

        **Note**: If **Type** is set to **Area**, this property is automatically set to **Baked**. | +| **Rendering Layers** | Set which rendering layers the light applies to.

        **Note**: This property is only available if **Mode** is set to **Realtime** or **Mixed**. | ### Shape | Property:| Function: | |:---|:---| -| **Spot Angle**| Define the angle (in degrees) at the base of a spot light’s cone (**Spot** light only). | +| **Inner/Outer Spot Angle**| The inner and outer angles (in degrees) at the base of a spot light’s cone (**Spot** light only). | +| **Shape** | The shape of the area light.

        Available options:
        • **Rectangle**
        • **Disc**
        | +|     **Width** | The width of the area light.

        **Note**: This property is only available if **Shape** is set to **Rectangle**. | +|     **Height** | The height of the area light.

        **Note**: This property is only available if **Shape** is set to **Rectangle**. | +|     **Radius** | The radius of the area light

        **Note**: This property is only available if **Shape** is set to **Disc**. | ### Emission | Property:| Function: | |:---|:---| -| **Color**| Use the color picker to set the color of the emitted light. | +| **Light Appearance** | Select the method used to create the color of the light.

        Available options:
        • **Color**
        • **Filter and Temperature**
        | +|     **Color**| The color of the emitted light. Set this property with the color slider.

        **Note**: This property is only available if **Light Apperance** is set to **Color**. | +|     **Filter**| The color of the tint for the light source. Set this property with the color slider.

        **Note**: This property is only available if **Light Apperance** is set to **Filter and Temperature**. | +|     **Temperature**| The temperature (in Kelvin) of the light. Set this property with the slider or enter a specific value.

        **Note**: This property is only available if **Light Apperance** is set to **Filter and Temperature**. | | **Intensity**| Set the brightness of the light. The default value for a **Directional** light is 0.5. The default value for a **Point**, **Spot** or **Area** light is 1. | | **Indirect Multiplier**| Use this value to vary the intensity of indirect light. Indirect light is light that has bounced from one object to another. The **Indirect Multiplier** defines the brightness of bounced light calculated by the global illumination (GI) system. If you set **Indirect Multiplier** to a value lower than **1,** the bounced light becomes dimmer with every bounce. A value higher than **1** makes light brighter with each bounce. This is useful, for example, when a dark surface in shadow (such as the interior of a cave) needs to be brighter in order to make detail visible. | | **Range**| Define how far the light emitted from the center of the object travels (**Point** and **Spot** lights only). | | **Cookie** | The RGB texture this Light projects into the scene. Use cookies to create silhouettes or patterned illumination. The texture format to use depends on the type of Light:
        • Directional: 2D texture
        • Spot: 2D texture
        • Point: [cubemap texture](https://docs.unity3d.com/Manual/class-Cubemap.html)

        **Note**: URP doesn't support light cookies for Area lights.

        For more information about light cookies, refer to [Cookies](https://docs.unity3d.com/Manual/Cookies.html). | -|   **Cookie Size** | The per-axis scale Unity applies to the cookie texture. Use this property to set the size of the cookie.

        This property is available only if you set **Type** to **Directional** and assign a texture to **Cookie**. | -|   **Cookie Offset** | The per-axis offset Unity applies to the cookie texture. Use this property to move the cookie without moving the light itself. You can also animate this property to scroll the cookie.

        This property is available only if you set **Type** to **Directional** and assign a texture to **Cookie**. | +|   **Cookie Size** | The per-axis scale Unity applies to the cookie texture. Use this property to set the size of the cookie.

        **Note**: This property is available only if you set **Type** to **Directional** and assign a texture to **Cookie**. | +|   **Cookie Offset** | The per-axis offset Unity applies to the cookie texture. Use this property to move the cookie without moving the light itself. You can also animate this property to scroll the cookie.

        **Note**: This property is available only if you set **Type** to **Directional** and assign a texture to **Cookie**. | ## Rendering @@ -56,15 +62,17 @@ The Light Inspector includes the following groups of properties: | Property:| Function: | |:---|:---| | **Shadow Type**| Determine whether this Light casts Hard Shadows, Soft Shadows, or no shadows at all. For information on hard and soft shadows, refer to documentation on [Lights](https://docs.unity3d.com/Manual/class-Light.html). | -|    Baked Shadow Angle| If **Type** is set to **Directional** and **Shadow Type** is set to **Soft Shadows**, this property adds some artificial softening to the edges of shadows and gives them a more natural look. | -|    Baked Shadow Radius| If **Type** is set to **Point** or **Spot** and **Shadow Type** is set to **Soft Shadows**, this property adds some artificial softening to the edges of shadows and gives them a more natural look. | +|    **Baked Shadow Angle** | If **Type** is set to **Directional** and **Shadow Type** is set to **Soft Shadows**, this property adds some artificial softening to the edges of shadows and gives them a more natural look.

        **Note**: This property is only available if **Mode** is set to **Mixed** or **Baked**. | +|    **Baked Shadow Radius** | If **Type** is set to **Point** or **Spot** and **Shadow Type** is set to **Soft Shadows**, this property adds some artificial softening to the edges of shadows and gives them a more natural look.

        **Note**: This property is only available if **Mode** is set to **Mixed** or **Baked**. | |    Realtime Shadows| These properties are available when **Shadow Type** is set to **Hard Shadows** or **Soft Shadows**. Use these properties to control real-time shadow rendering settings. | -|        Strength| Use the slider to control how dark the shadows cast by this Light are, represented by a value between 0 and 1. This is set to 1 by default. | -|        Bias| Controls whether to use shadow bias settings from the URP Asset, or whether to define custom shadow bias settings for this Light. Possible values are **Use Pipeline Settings** or **Custom**.| -|        Depth| Controls the distance at which the shadows will be pushed away from the light. Useful for avoiding false self-shadowing artifacts. This property is visible only when **Bias** is set to **Custom**.| -|        Normal| Controls the distance at which the shadow casting surfaces will be shrunk along the surface normal. Useful for avoiding false self-shadowing artifacts. This property is visible only when **Bias** is set to **Custom**.| -|        Near Plane| Use the slider to control the value for the near clip plane when rendering shadows, defined as a value between 0.1 and 10. This value is clamped to 0.1 units or 1% of the light’s **Range** property, whichever is lower. This is set to 0.2 by default. | -|        Soft Shadows Quality | Select the soft shadows quality. With the **Use Pipeline Settings** option selected Unity uses the value from the URP Asset. Options **Low**, **Medium**, and **High** let you specify the soft shadow quality value for this Light. For more information on the values, refer to the [Soft Shadows](universalrp-asset.md#soft-shadows) section. | +|        **Strength**| Use the slider to control how dark the shadows cast by this Light are, represented by a value between 0 and 1. This is set to 1 by default. | +|        **Bias**| Controls whether to use shadow bias settings from the URP Asset, or whether to define custom shadow bias settings for this Light. Possible values are **Use Pipeline Settings** or **Custom**.| +|        **Depth**| Controls the distance at which the shadows will be pushed away from the light. Useful for avoiding false self-shadowing artifacts. This property is visible only when **Bias** is set to **Custom**.| +|        **Normal**| Controls the distance at which the shadow casting surfaces will be shrunk along the surface normal. Useful for avoiding false self-shadowing artifacts. This property is visible only when **Bias** is set to **Custom**.| +|        **Near Plane**| Use the slider to control the value for the near clip plane when rendering shadows, defined as a value between 0.1 and 10. This value is clamped to 0.1 units or 1% of the light’s **Range** property, whichever is lower. This is set to 0.2 by default. | +|        **Soft** **Shadows** **Quality** | Select the soft shadows quality. With the **Use Pipeline Settings** option selected Unity uses the value from the URP Asset. Options **Low**, **Medium**, and **High** let you specify the soft shadow quality value for this Light. For more information on the values, refer to the [Soft Shadows](universalrp-asset.md#soft-shadows) section. | +|    **Custom Shadow Layers** | Enable to specify the layer for shadows from the light separately to the layer for the light itself.

        **Note**: This property is only available if **Mode** is set to **Mixed** or **Baked**, and **Shadow Type** is set to **Hard Shadows** or **Soft Shadows**. | +|        **Layer** | The layer for shadows from the light. | ## Preset diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/lighting.md b/Packages/com.unity.render-pipelines.universal/Documentation~/lighting.md index 5bda4738b9e..bb651ca0c64 100644 --- a/Packages/com.unity.render-pipelines.universal/Documentation~/lighting.md +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/lighting.md @@ -2,17 +2,17 @@ Using the Universal Render Pipeline (URP), you can achieve realistic lighting that is suitable for a range of art styles. -All of Unity's render pipelines share common lighting functionality, but each render pipeline has some important differences. - -Areas where the Universal Render Pipeline (URP) differs from Unity's common lighting functionality are: - -* The [Light component inspector](light-component.md), which displays some URP-specific controls. -* The [Universal Additional Light Data](universal-additional-light-data.md) component, which allows Unity to store Light-related data that is specific to URP. -* Enlighten Realtime Global Illumination is supported in URP from version 12. For more information, refer to [Realtime Global Illumination using Enlighten](https://docs.unity3d.com/Manual/realtime-gi-using-enlighten.html). - -For a full comparison of lighting features between Unity's Built-in Render Pipeline and URP, and an up to date list of lighting features that are currently under research, check the [feature comparison chart](universalrp-builtin-feature-comparison.md). - -For a general introduction to lighting in Unity and examples of common lighting workflows, refer to the [Lighting section of the Unity Manual](https://docs.unity3d.com/Manual/LightingOverview.html). +| Page | Description | +|-|-| +| [Lighting in URP](lighting/lighting-in-urp.md) | Understand the differences between Unity's common lighting functionality and the lighting functionality in URP.| +| [Light component reference](light-component.md) | Understand how each lighting property works in URP. | +| [The Universal Additional Light Data component](universal-additional-light-data.md) | Use the Universal Additional Light Data component to override lighting settings in URP. | +| [Lighting Mode](urp-lighting-mode.md) | Understand which lighting modes URP supports. | +| [Shadows in the URP](Shadows-in-URP.md) | How to work with shadows in URP. | +| [Reflection Probes](lighting/reflection-probes.md) | Configure the URP-specific behavior of Reflection Probes. | +| [View and control a light from its perspective](lights-placement-tool.md) | Use the Light Placement Tool to view a Scene from a light source's perspective. | +| [Adaptive Probe Volumes (APV)](probevolumes.md) | Understand how to use Adaptive Probe Volumes in URP. | +| [Lens flares](shared/lens-flare/lens-flare.md) | Understand how to use lens flares in URP. | ## Configure lighting for better performance diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/lighting/lighting-in-urp.md b/Packages/com.unity.render-pipelines.universal/Documentation~/lighting/lighting-in-urp.md new file mode 100644 index 00000000000..174f905ec2f --- /dev/null +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/lighting/lighting-in-urp.md @@ -0,0 +1,12 @@ +# Lighting in the Universal Render Pipeline + +All of Unity's render pipelines share common lighting functionality, but each render pipeline has some important differences. + +Areas where the Universal Render Pipeline (URP) differs from Unity's common lighting functionality are: + +* The [Light component inspector](light-component.md), which displays some URP-specific controls. +* The [Universal Additional Light Data](universal-additional-light-data.md) component, which allows Unity to store Light-related data that is specific to URP. + +For a full comparison of lighting features between Unity's Built-in Render Pipeline and URP, and an up to date list of lighting features that are currently under research, check the [Render pipeline feature comparison](https://docs.unity3d.com/Manual/render-pipelines-feature-comparison.html). + +For a general introduction to lighting in Unity and examples of common lighting workflows, refer to the [Lighting section of the Unity Manual](https://docs.unity3d.com/Manual/LightingOverview.html). diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/make-object-compatible-gpu-rendering.md b/Packages/com.unity.render-pipelines.universal/Documentation~/make-object-compatible-gpu-rendering.md new file mode 100644 index 00000000000..3f704feb4ae --- /dev/null +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/make-object-compatible-gpu-rendering.md @@ -0,0 +1,26 @@ +# Make a GameObject compatible with the GPU Resident Drawer + +To make a GameObject compatible with the [GPU Resident Drawer](gpu-resident-drawer.md), check it has the following properties: + +- Has a [Mesh Renderer component](https://docs.unity3d.com/Manual/class-MeshRenderer.html). +- In the Mesh Renderer component, **Light Probes** isn't set to **Use Proxy Volume**. +- Uses only static global illumination, not real time global illumination. +- Uses a shader that supports DOTS instancing. Refer to [Supporting DOTS Instancing](https://docs.unity3d.com/Manual/dots-instancing-shaders.html) for more information. +- Doesn't move position after one camera finishes rendering and before another camera starts rendering. +- Doesn't use the `MaterialPropertyBlock` API. +- Doesn't have a script that uses a per-instance callback, for example `OnRenderObject`. + +## Exclude a GameObject from the GPU Resident Drawer + +To exclude a GameObject from the GPU Resident Drawer, add a **Disallow GPU Driven Rendering** component to the GameObject. + +1. Select the GameObject. +2. In the **Inspector** window, select **Add Component**. +3. Select **Disallow GPU Driven Rendering**. + +Select **Apply to Children Recursively** to exclude both the GameObject and its children. + +## Additional resources + +- [Mesh Renderer component](https://docs.unity3d.com/Manual/class-MeshRenderer.html) + diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/probevolumes-lighting-panel-reference.md b/Packages/com.unity.render-pipelines.universal/Documentation~/probevolumes-lighting-panel-reference.md index 1bc09fc9455..73da9854283 100644 --- a/Packages/com.unity.render-pipelines.universal/Documentation~/probevolumes-lighting-panel-reference.md +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/probevolumes-lighting-panel-reference.md @@ -98,9 +98,9 @@ This section appears only if you enable **Lighting Scenarios** under **Light Pro | **Property** | **Description** | |-|-| -| **Sky Occlusion** | Enable [sky occlusion](probevolumes-skyocclusion.md) | +| **Sky Occlusion** | Enable [sky occlusion](probevolumes-skyocclusion.md). | | **Samples** | Set the number of samples Unity uses to calculate the light each probe receives from the sky. Higher values increase the accuracy of the sky occlusion data, but increasing baking time. The default value is 2048. | -| **Bounces** | The number of times Unity bounces light from the sky off objects when calculating the sky occlusion data. Higher values increase the accuracy of the sky occlusion data, but increase baking time. Use higher values if objects block the direct view from probes to the sky. The default value is 2. | +| **Bounces** | Set the number of times Unity bounces light from the sky off objects when calculating the sky occlusion data. Higher values increase the accuracy of the sky occlusion data, but increase baking time. Use higher values if objects block the direct view from probes to the sky. The default value is 2. | | **Albedo Override** | Set the brightness of the single color Unity uses to represent objects the sky light bounces off, instead of the actual color of the objects. Higher values brighten the baked sky occlusion lighting. The default value is 0.6. | | **Sky Direction** | Enable Unity storing and using more accurate data about the directions from probes towards the sky. Refer to [Add dynamic color and shadows from the sky](probevolumes-skyocclusion.md#enable-more-accurate-sky-direction-data) for more information. | diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/reduce-rendering-work-on-cpu.md b/Packages/com.unity.render-pipelines.universal/Documentation~/reduce-rendering-work-on-cpu.md new file mode 100644 index 00000000000..31271cd9021 --- /dev/null +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/reduce-rendering-work-on-cpu.md @@ -0,0 +1,13 @@ +# Reduce rendering work on the CPU + +You can use the GPU Resident Drawer or GPU occlusion culling to speed up rendering. When you enable these features, Unity optimizes the rendering pipeline so the CPU has less work to do each frame, and the GPU draws GameObjects more efficiently. + +|Page|Description| +|-|-| +|[Use the GPU Resident Drawer](gpu-resident-drawer.md)|Automatically use the `BatchRendererGroup` API to use instancing and reduce the number of draw calls.| +|[Make a GameObject compatible with the GPU Resident Drawer](make-object-compatible-gpu-rendering.md)|Include or exclude a GameObject from the GPU Resident Drawer.| +|[Use GPU occlusion culling](gpu-culling.md)|Use the GPU instead of the CPU to exclude GameObjects from rendering when they're occluded behind other GameObjects.| + +## Additional resources + +- [Graphics performance fundamentals](https://docs.unity3d.com/Manual/OptimizingGraphicsPerformance.html) diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/render-graph-viewer-reference.md b/Packages/com.unity.render-pipelines.universal/Documentation~/render-graph-viewer-reference.md index 5977151dfec..d01ab1df0a0 100644 --- a/Packages/com.unity.render-pipelines.universal/Documentation~/render-graph-viewer-reference.md +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/render-graph-viewer-reference.md @@ -86,7 +86,7 @@ You can also use the Search bar to find a resource by name. |**Clear**|Displays **True** if URP clears the texture.| |**BindMS**|Whether the texture is bound as a multisampled texture. Refer to [RenderTextureDescriptor.BindMS](https://docs.unity3d.com/ScriptReference/RenderTextureDescriptor-bindMS.html) for more information.| |**Samples**|How many times Multisample Anti-aliasing (MSAA) samples the texture. Refer to [Anti-aliasing](anti-aliasing.md#multisample-anti-aliasing-msaa) for more information.| -|**Memoryless**|Displays **True** if the resource is stored in tile memory on mobile platforms that use tile-based deferred rendering. See [Render graph system introduction](render-graph-introduction.md) for more information.| +|**Memoryless**|Displays **True** if the resource is stored in tile memory on mobile platforms that use tile-based deferred rendering. Refer to [Render graph system introduction](render-graph-introduction.md) for more information.| ## Pass List diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/renderer-features/create-custom-renderer-feature-compatibility-mode.md b/Packages/com.unity.render-pipelines.universal/Documentation~/renderer-features/create-custom-renderer-feature-compatibility-mode.md index bee21bf3e57..02b03222635 100644 --- a/Packages/com.unity.render-pipelines.universal/Documentation~/renderer-features/create-custom-renderer-feature-compatibility-mode.md +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/renderer-features/create-custom-renderer-feature-compatibility-mode.md @@ -1,8 +1,8 @@ # Example of a complete Scriptable Renderer Feature in Compatibility Mode -This section describes how to create a complete [Scriptable Renderer Feature](./scriptable-renderer-features/intro-to-scriptable-renderer-features.md) for a URP Renderer, if you enable **Compatibility Mode (Render Graph Disabled)** in [URP graphics settings](../urp-global-settings.md). +This section describes how to create a complete [Scriptable Renderer Feature](./scriptable-renderer-features/intro-to-scriptable-renderer-features.md) for a URP Renderer. -> **Note**: Unity no longer develops or improves the rendering path that doesn't use the render graph API. Use the render graph API instead when developing new graphics features. +[!include[](../snippets/note-compatibility-mode.md)] This walkthrough contains the following sections: diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/renderer-features/how-to-fullscreen-blit.md b/Packages/com.unity.render-pipelines.universal/Documentation~/renderer-features/how-to-fullscreen-blit.md index 24f2e449a22..4b86d06538d 100644 --- a/Packages/com.unity.render-pipelines.universal/Documentation~/renderer-features/how-to-fullscreen-blit.md +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/renderer-features/how-to-fullscreen-blit.md @@ -1,7 +1,9 @@ -# Perform a full screen blit in URP +# Perform a full screen blit in URP in Compatibility Mode The example on this page describes how to create a custom Renderer Feature that performs a full screen blit. +[!include[](../snippets/note-compatibility-mode.md)] + ## Example overview This example implements the following solution: @@ -195,3 +197,7 @@ Follow these steps to create a [custom Renderer Feature](https://docs.unity3d.co > **Note**: To visualize the example in XR, configure the project to use XR SDK. [Add the MockHMD XR Plugin to the project](https://docs.unity3d.com/Packages/com.unity.xr.mock-hmd@latest/index.html). Set the **Render Mode** property to **Single Pass Instanced**. The example is complete. + +## Additional resources + +For more information on performing the blit operation in Compatibility Mode, refer to the [Using textures section in the URP 14 (Unity 2022) documentation](https://docs.unity3d.com/Packages/com.unity.render-pipelines.universal@14.0/manual/working-with-textures.html). \ No newline at end of file diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/renderer-features/scriptable-renderer-features/scriptable-render-pass-reference.md b/Packages/com.unity.render-pipelines.universal/Documentation~/renderer-features/scriptable-renderer-features/scriptable-render-pass-reference.md index 9ccb2ed5374..1a53cbd2614 100644 --- a/Packages/com.unity.render-pipelines.universal/Documentation~/renderer-features/scriptable-renderer-features/scriptable-render-pass-reference.md +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/renderer-features/scriptable-renderer-features/scriptable-render-pass-reference.md @@ -1,8 +1,8 @@ ## Scriptable Render Pass Compatibility Mode API reference -You can use the following methods within a Scriptable Render Pass to handle its core functions, if you enable **Compatibility Mode (Render Graph Disabled)** in [URP graphics settings](../../urp-global-settings.md). +You can use the following methods within a Scriptable Render Pass to handle its core functions. -> **Note**: Unity no longer develops or improves the rendering path that doesn't use the render graph API. Use the render graph API instead when developing new graphics features. +[!include[](../../snippets/note-compatibility-mode.md)] | **Method** | **Description** | | ---------- | --------------- | diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/renderer-features/write-a-scriptable-render-pass.md b/Packages/com.unity.render-pipelines.universal/Documentation~/renderer-features/write-a-scriptable-render-pass.md index 5eba4ac4f5d..0e842c3bac3 100644 --- a/Packages/com.unity.render-pipelines.universal/Documentation~/renderer-features/write-a-scriptable-render-pass.md +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/renderer-features/write-a-scriptable-render-pass.md @@ -1,8 +1,6 @@ # Write a Scriptable Render Pass in Compatibility Mode -If you enable **Compatibility Mode (Render Graph Disabled)** in [URP graphics settings](../urp-global-settings.md), you can write a Scriptable Render Pass without using the [render graph API](../render-graph.md). - -> **Note**: Unity no longer develops or improves the rendering path that doesn't use the render graph API. Use the render graph API instead when developing new graphics features. +[!include[](../snippets/note-compatibility-mode.md)] The following example is a `ScriptableRenderPass` instance that performs the following steps: diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/rendering-to-a-render-texture.md b/Packages/com.unity.render-pipelines.universal/Documentation~/rendering-to-a-render-texture.md index 8eb8b0be1a3..c1b84c5b984 100644 --- a/Packages/com.unity.render-pipelines.universal/Documentation~/rendering-to-a-render-texture.md +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/rendering-to-a-render-texture.md @@ -18,9 +18,8 @@ If you have a Camera that is rendering to a Render Texture, you must have a seco The first Camera now renders its view to the Render Texture. The second Camera renders the scene including the Render Texture to the screen. -You can set the Output Target for a Camera in a script by setting the `cameraOutput` property of the Camera's [Universal Additional Camera Data](xref:UnityEngine.Rendering.Universal.UniversalAdditionalCameraData) component, like this: +You can set the output target for a camera in a script by setting the `targetTexture` property of the camera: ```c# -myUniversalAdditionalCameraData.cameraOutput = CameraOutput.Texture; myCamera.targetTexture = myRenderTexture; ``` diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/shading-model.md b/Packages/com.unity.render-pipelines.universal/Documentation~/shading-model.md index aff4029b2ac..235c3005d16 100644 --- a/Packages/com.unity.render-pipelines.universal/Documentation~/shading-model.md +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/shading-model.md @@ -52,7 +52,7 @@ The URP Baked Lit shader is the only shader that uses the Baked Lit shading mode ## Shaders with no lighting -URP comes with some Shaders that are Unlit. This means that they have no directional lights and no baked lighting. Because there are no light calculations, these shaders compile faster than Shaders with lighting. If you know in advance that your GameObject or visual doesn’t need lighting, choose an Unlit shader to save calculation and build time in your final product. +URP comes with some unlit-type shaders. Materials with unlit-type shaders are not affected by neither real-time, nor baked lighting. Unlit shaders let you create unique visual look of the objects in your scene. Unlit shaders have significantly faster compilation speed compared with lit shaders. The following URP Shaders have no lighting: * [Unlit](unlit-shader.md) diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/snippets/note-compatibility-mode.md b/Packages/com.unity.render-pipelines.universal/Documentation~/snippets/note-compatibility-mode.md new file mode 100644 index 00000000000..98c2814d6d7 --- /dev/null +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/snippets/note-compatibility-mode.md @@ -0,0 +1,2 @@ +> [!NOTE] +> Unity no longer develops or improves the rendering path that doesn't use the [render graph API](render-graph.md). Use the render graph API instead when developing new graphics features. To use the instructions on this page, enable **Compatibility Mode (Render Graph Disabled)** in URP graphics settings (**Project Settings** > **Graphics**). \ No newline at end of file diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/universalrp-asset.md b/Packages/com.unity.render-pipelines.universal/Documentation~/universalrp-asset.md index 1a7dfad4a1c..61283b30854 100644 --- a/Packages/com.unity.render-pipelines.universal/Documentation~/universalrp-asset.md +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/universalrp-asset.md @@ -42,17 +42,20 @@ To show all additional properties in all sections: The **Rendering** settings control the core part of the pipeline rendered frame. -| **Property** | **Description** | -| ----------------------- | ------------------------------------------------------------ | -| **Depth Texture** | Enables URP to create a `_CameraDepthTexture`. URP then uses this [depth texture](https://docs.unity3d.com/Manual/SL-DepthTextures.html) by default for all Cameras in your scene. You can override this for individual cameras in the [Camera Inspector](camera-component-reference.md). | -| **Opaque Texture** | Enable this to create a `_CameraOpaqueTexture` as default for all cameras in your scene. This works like the [GrabPass](https://docs.unity3d.com/Manual/SL-GrabPass.html) in the built-in render pipeline. The **Opaque Texture** provides a snapshot of the scene right before URP renders any transparent meshes. You can use this in transparent Shaders to create effects like frosted glass, water refraction, or heat waves. You can override this for individual cameras in the [Camera Inspector](camera-component-reference.md). | -| **Opaque Downsampling** | Set the sampling mode on the opaque texture to one of the following:
        **None**: Produces a copy of the opaque pass in the same resolution as the camera.
        **2x Bilinear**: Produces a half-resolution image with bilinear filtering.
        **4x Box**: Produces a quarter-resolution image with box filtering. This produces a softly blurred copy.
        **4x Bilinear**: Produces a quarter-resolution image with bi-linear filtering. | -| **Terrain Holes** | If you disable this option, the URP removes all Terrain hole Shader variants when you build for the Unity Player, which decreases build time. | -| **SRP Batcher** | Check this box to enable the SRP Batcher. This is useful if you have many different Materials that use the same Shader. The SRP Batcher is an inner loop that speeds up CPU rendering without affecting GPU performance. When you use the SRP Batcher, it replaces the SRP rendering code inner loop. If both **SRP Batcher** and **Dynamic Batching** are enabled, SRP Batcher will take precedence over dynamic batching as long as the shader is SRP Batcher compatible.

        **Note**: If assets or shaders in a project are not optimized for use with the SRP Batcher, low performance devices might be more performant when you disable the SRP Batcher. | -| **Dynamic Batching** | Enable [Dynamic Batching](https://docs.unity3d.com/Manual/DrawCallBatching.html), to make the render pipeline automatically batch small dynamic objects that share the same Material. This is useful for platforms and graphics APIs that do not support GPU instancing. If your targeted hardware does support GPU instancing, disable **Dynamic Batching**. You can change this at run time. | -| **Debug Level** | Set the level of debug information that the render pipeline generates. The values are:
        **Disabled**: Debugging is disabled. This is the default.
        **Profiling**: Makes the render pipeline provide detailed information tags, which you can find in the FrameDebugger. | -| **Shader Variant Log Level** | Set the level of information about Shader Stripping and Shader Variants you want to display when Unity finishes a build. Values are:
        **Disabled**: Unity doesn’t log anything.
        **Only Universal**: Unity logs information for all of the [URP Shaders](shaders-in-universalrp.md).
        **All**: Unity logs information for all Shaders in your build.
        You can check the information in Console panel when your build has finished. | -| **Store Actions** | Defines if Unity discards or stores the render targets of the DrawObjects Passes. Selecting the **Store** option significantly increases the memory bandwidth on mobile and tile-based GPUs.
        **Auto**: Unity uses the **Discard** option by default, and falls back to the **Store** option if it detects any injected Passes.
        **Discard**: Unity discards the render targets of render Passes that are not reused later (lower memory bandwidth).
        **Store**: Unity stores all render targets of each Pass (higher memory bandwidth). | +|**Property**|**Sub-property**|**Description**| +|-|-|-| +| **Depth Texture**|| Enables URP to create a `_CameraDepthTexture`. URP then uses this [depth texture](https://docs.unity3d.com/Manual/SL-DepthTextures.html) by default for all Cameras in your scene. You can override this for individual cameras in the [Camera Inspector](camera-component-reference.md). | +| **Opaque Texture**|| Enable this to create a `_CameraOpaqueTexture` as default for all cameras in your scene. This works like the [GrabPass](https://docs.unity3d.com/Manual/SL-GrabPass.html) in the built-in render pipeline. The **Opaque Texture** provides a snapshot of the scene right before URP renders any transparent meshes. You can use this in transparent Shaders to create effects like frosted glass, water refraction, or heat waves. You can override this for individual cameras in the [Camera Inspector](camera-component-reference.md). | +| **Opaque Downsampling**|| Set the sampling mode on the opaque texture to one of the following:
        **None**: Produces a copy of the opaque pass in the same resolution as the camera.
        **2x Bilinear**: Produces a half-resolution image with bilinear filtering.
        **4x Box**: Produces a quarter-resolution image with box filtering. This produces a softly blurred copy.
        **4x Bilinear**: Produces a quarter-resolution image with bi-linear filtering. | +| **Terrain Holes**|| If you disable this option, the URP removes all Terrain hole Shader variants when you build for the Unity Player, which decreases build time. | +| **GPU Resident Drawer**||The GPU Resident Drawer automatically uses the [`BatchRendererGroup`](https://docs.unity3d.com/Manual/batch-renderer-group.html) API to draw GameObjects with GPU instancing. Refer to [Use the GPU Resident Drawer](gpu-resident-drawer.md) for more information.

        • **Disabled**: Unity doesn't automatically draw GameObjects with GPU instancing.
        • **Instanced Drawing**: Unity automatically draws GameObjects with GPU instancing.
        | +|| **Small-Mesh Screen-Percentage** | Set the screen percentage Unity uses to cull small GameObjects, to speed up rendering. Unity culls GameObjects that fill less of the screen than this value. This setting might not work if you use your own [Level of Detail (LOD) meshes](https://docs.unity3d.com/Manual/LevelOfDetail.html). Set the value to 0 to stop Unity culling small GameObjects.

        To prevent Unity culling an individual GameObject that covers less screen space than this value, go to the **Inspector** window for the GameObject and add a **Disallow Small Mesh Culling** component. | +|| **GPU Occlusion Culling** | Enable Unity to use the GPU instead of the CPU to exclude GameObjects from rendering when they're hidden behind other GameObjects. Refer to [Use GPU occlusion culling](gpu-culling.md) for more information. | +| **SRP Batcher**|| Enable the SRP Batcher. This is useful if you have many different Materials that use the same Shader. The SRP Batcher is an inner loop that speeds up CPU rendering without affecting GPU performance. When you use the SRP Batcher, it replaces the SRP rendering code inner loop. If both **SRP Batcher** and **Dynamic Batching** are enabled, SRP Batcher will take precedence over dynamic batching as long as the shader is SRP Batcher compatible.

        **Note**: If assets or shaders in a project are not optimized for use with the SRP Batcher, low performance devices might be more performant when you disable the SRP Batcher. | +| **Dynamic Batching**|| Enable [Dynamic Batching](https://docs.unity3d.com/Manual/DrawCallBatching.html), to make the render pipeline automatically batch small dynamic objects that share the same Material. This is useful for platforms and graphics APIs that do not support GPU instancing. If your targeted hardware does support GPU instancing, disable **Dynamic Batching**. You can change this at run time. | +| **Debug Level**|| Set the level of debug information that the render pipeline generates. The values are:
        **Disabled**: Debugging is disabled. This is the default.
        **Profiling**: Makes the render pipeline provide detailed information tags, which you can find in the FrameDebugger. | +| **Shader Variant Log Level**||Set the level of information about Shader Stripping and Shader Variants you want to display when Unity finishes a build. Values are:
        **Disabled**: Unity doesn’t log anything.
        **Only Universal**: Unity logs information for all of the [URP Shaders](shaders-in-universalrp.md).
        **All**: Unity logs information for all Shaders in your build.
        You can check the information in Console panel when your build has finished. | +| **Store Actions**|| Defines if Unity discards or stores the render targets of the DrawObjects Passes.
        **Auto**: Unity uses the **Discard** option by default, and falls back to the **Store** option if it detects any injected Passes.
        **Discard**: Unity discards the render targets of render Passes that are not reused later (lower memory bandwidth).
        **Store**: Unity stores all render targets of each Pass. **Store** significantly increases the memory bandwidth on mobile and tile-based GPUs. | ### Quality @@ -83,26 +86,30 @@ These settings affect the lights in your scene. If you disable some of these settings, the relevant [keywords](https://docs.unity3d.com/Manual/shader-keywords) are [stripped from the Shader variables](shader-stripping.md). If there are settings that you know for certain you won’t use in your game or app, you can disable them to improve performance and reduce build time. -| **Property** | **Sub-property** | **Description** | -|-|-|-| -| **Main Light** || These settings affect the main [Directional Light](https://docs.unity3d.com/Manual/Lighting.html) in your scene. You can select this by assigning it as a [Sun Source](https://docs.unity3d.com/Manual/GlobalIllumination.html) in the Lighting Inspector. If you don’t assign a sun source, the URP treats the brightest directional light in the scene as the main light. You can choose between [Pixel Lighting](https://docs.unity3d.com/Manual/LightPerformance.html) and _None_. If you choose None, URP doesn’t render a main light, even if you’ve set a sun source. | -| **Cast Shadows** || Check this box to make the main light cast shadows in your scene. | -| **Shadow Resolution** || This controls how large the shadow map texture for the main light is. High resolutions give sharper, more detailed shadows. If memory or rendering time is an issue, try a lower resolution. | -| **Light Probe System** ||
        • **Light Probe Groups (Legacy)**: Use the same [Light Probe Group system](https://docs.unity3d.com/Manual/class-LightProbeGroup.html) as the Built-In Render Pipeline.
        • **Adaptive Probe Volumes**: Use [Adaptive Probe Volumes](probevolumes.md).
        | -|| **Memory Budget** | Limits the width and height of the textures that store baked Global Illumination data, which determines the amount of memory Unity sets aside to store baked Adaptive Probe Volume data. These textures have a fixed depth.
        Options:
        • **Memory Budget Low**
        • **Memory Budget Medium**
        • **Memory Budget High**
        | -|| **SH Bands** | Determines the [spherical harmonics (SH) bands](https://docs.unity3d.com/Manual/LightProbes-TechnicalInformation.html) Unity uses to store probe data. L2 provides more precise results, but uses more system resources.
        Options:
        • **Spherical Harmonics L1**
        • **Spherical Harmonics L2**
        | -| **Lighting Scenarios** || Enable to use Lighting Scenarios. Refer to [Bake different lighting setups using Lighting Scenarios](probevolumes-bakedifferentlightingsetups.md) for more information. | -|| **Scenario Blending** | Enable blending between different Lighting Scenarios. This uses more memory and makes rendering slower. | -|| **Scenario Blending Memory Budget** | Limits the width and height of the textures that Unity uses to blend between Lighting Scenarios. This determines the amount of memory Unity sets aside to store Lighting Scenario blending data, and store data while doing the blending operation. These textures have a fixed depth.
        Options:
        • **Memory Budget Low**
        • **Memory Budget Medium**
        • **Memory Budget High** | -|| **Enable GPU Streaming** | Enable to stream Adaptive Probe Volume data from CPU memory to GPU memory at runtime. Refer to [Streaming Adaptive Probe Volumes](probevolumes-streaming.md) for more information. | -|| **Enable Disk Streaming** | Enable to stream Adaptive Probe Volume data from disk to CPU memory at runtime. [Streaming Adaptive Probe Volumes](probevolumes-streaming.md) for more information. | -| **Estimated GPU Memory Cost** || Indicates the amount of texture data used by Adaptive Probe Volumes in your project. This includes textures used both for Global Illumination and Lighting Scenario blending. | -| **Additional Lights** || Here, you can choose to have additional lights to supplement your main light. Choose between [Per Vertex](https://docs.unity3d.com/Manual/LightPerformance.html), [Per Pixel](https://docs.unity3d.com/Manual/LightPerformance.html), or **Disabled**. | -|| **Per Object Limit** | This slider sets the limit for how many additional lights can affect each GameObject. | -|| **Cast Shadows** | Check this box to make the additional lights cast shadows in your scene. | -|| **Shadow Resolution** | This controls the size of the textures that cast directional shadows for the additional lights. This is a sprite atlas that packs up to 16 shadow maps. High resolutions give sharper, more detailed shadows. If memory or rendering time is an issue, try a lower resolution. | -| **Use Rendering Layers** || With this option selected, you can configure certain Lights to affect only specific GameObjects. For more information on Rendering Layers and how to use them, refer to the documentation on [Rendering Layers](features/rendering-layers.md) -| **Mixed Lighting** || Enable [Mixed Lighting](https://docs.unity3d.com/Manual/LightMode-Mixed.html) to configure the pipeline to include mixed lighting shader variants in the build. | +| Property | Description | +| --------------------- | ------------------------------------------------------------ | +| **Main Light** | These settings affect the main [Directional Light](https://docs.unity3d.com/Manual/Lighting.html) in your scene. You can select this by assigning it as a [Sun Source](https://docs.unity3d.com/Manual/GlobalIllumination.html) in the Lighting Inspector. If you don’t assign a sun source, the URP treats the brightest directional light in the scene as the main light. You can choose between [Pixel Lighting](https://docs.unity3d.com/Manual/LightPerformance.html) and **None**. If you choose None, URP doesn’t render a main light, even if you’ve set a sun source. | +|     **Cast Shadows** | Check this box to make the main light cast shadows in your scene. | +|     **Shadow Resolution** | This controls how large the shadow map texture for the main light is. High resolutions give sharper, more detailed shadows. If memory or rendering time is an issue, try a lower resolution. | +| **Light Probe System** |
        • **Light Probe Groups (Legacy)**: Use the same [Light Probe Group system](https://docs.unity3d.com/Manual/class-LightProbeGroup.html) as the Built-In Render Pipeline.
        • **Adaptive Probe Volumes**: Use [Adaptive Probe Volumes](probevolumes.md).
        | +| **Memory Budget** | Limits the width and height of the textures that store baked Global Illumination data, which determines the amount of memory Unity sets aside to store baked Adaptive Probe Volume data. These textures have a fixed depth.
        Options:
        • **Memory Budget Low**
        • **Memory Budget Medium**
        • **Memory Budget High**
        | +| **SH Bands** | Determines the [spherical harmonics (SH) bands](https://docs.unity3d.com/Manual/LightProbes-TechnicalInformation.html) Unity uses to store probe data. L2 provides more precise results, but uses more system resources.
        Options:
        • **Spherical Harmonics L1**
        • **Spherical Harmonics L2**
        | +| **Enable Streaming** | Enable to stream Adaptive Probe Volume data from CPU memory to GPU memory at runtime. Refer to [Streaming Adaptive Probe Volumes](probevolumes-streaming.md) for more information. | +| **Estimated GPU Memory Cost** | Indicates the amount of texture data used by Adaptive Probe Volumes in your project. | | +| **Additional Lights** | Here, you can choose to have additional lights to supplement your main light. Choose between [Per Vertex](https://docs.unity3d.com/Manual/LightPerformance.html), [Per Pixel](https://docs.unity3d.com/Manual/LightPerformance.html), or **Disabled**. | +|     **Per Object Limit** | This slider sets the limit for how many additional lights can affect each GameObject. | +|     **Cast Shadows** | Check this box to make the additional lights cast shadows in your scene. | +|     **Shadow Atlas Resolution** | This controls the size of the textures that cast directional shadows for the additional lights. This is a sprite atlas that packs up to 16 shadow maps. High resolutions give sharper, more detailed shadows. If memory or rendering time is an issue, try a lower resolution. | +|     **Shadow Resolution Tiers** | Set the resolution of the shadows cast by additional lights at various tiers.

        Resolutions must have a value of 128 or greater, and are rounded to the next power of two.

        **Note**: This property is only visible when the **Cast Shadows** property is enabled for Additional Lights. | +|     **Cookie Atlas Resolution** | The size of the cookie atlas the additional lights use. All additional lights are packed into a single cookie atlas.

        This property is only visible when the **Light Cookies** property is enabled. | +|     **Cookie Atlas Format** | The format of the cookie atlas for additional lights. All additional lights are packed into a single cookie atlas.

        Available options:
        • **Grayscale Low**
        • **Grayscale High**
        • **Color Low**
        • **Color High**
        • **Color HDR**
        This property is only visible when the **Light Cookies** property is enabled. | +| **Reflection Probes** | | +|     **Probe Blending** | Smooth the transitions between Reflection Probes. For more information, refer to [Reflection Probe Blending](lighting/reflection-probes.md#reflection-probe-blending). | +|     **Box Projection** | Create reflections on objects based on their position within the probe's box, while still using a single probe as the reflection source. For more information, refer to [Advanced Reflection Probe features](xref:AdvancedRefProbe). | +| **Mixed Lighting** | Enable [Mixed Lighting](https://docs.unity3d.com/Manual/LightMode-Mixed.html) to configure the pipeline to include mixed lighting shader variants in the build. | +| **Use Rendering Layers** | With this option selected, you can configure certain Lights to affect only specific GameObjects. For more information on Rendering Layers and how to use them, refer to the documentation on [Rendering Layers](features/rendering-layers.md). | +| **Light Cookies** | Enables [light cookies](https://docs.unity3d.com/Manual/Cookies.html). This property enables **Cookie Atlas Resolution** and **Cookie Atlas Format** for additional lights. | +| **SH Evaluation Mode** | Defines the spherical harmonic (SH) lighting evaluation type.

        Available options:
        • **Auto**: Unity selects a mode automatically.
        • **Per Vertex**: Evaluate lighting per vertex.
        • **Mixed**: Evaluate lighting partially per vertex, partially per pixel.
        • **Per Pixel**: Evaluate lighting per pixel.
        | ### Shadows @@ -117,14 +124,14 @@ The **Shadows** section has the following properties. | **Max Distance** | The maximum distance from the Camera at which Unity renders the shadows. Unity does not render shadows farther than this distance.
        **Note**: This property is in metric units regardless of the value in the **Working Unit** property. | | **Working Unit** | The unit in which Unity measures the shadow cascade distances. | | **Cascade Count** | The number of [shadow cascades](https://docs.unity3d.com/Manual/shadow-cascades.html). With shadow cascades, you can avoid crude shadows close to the Camera and keep the Shadow Resolution reasonably low. For more information, refer to the documentation on [Shadow Cascades](https://docs.unity3d.com/Manual/shadow-cascades.html). Increasing the number of cascades reduces the performance. Cascade settings only affects the main light. | -|     Split 1 | The distance where cascade 1 ends and cascade 2 starts. | -|     Split 2 | The distance where cascade 2 ends and cascade 3 starts. | -|     Split 3 | The distance where cascade 3 ends and cascade 4 starts. | -|     Last Border | The size of the area where Unity fades out the shadows. Unity starts fading out shadows at the distance **Max Distance** - **Last Border**, at **Max Distance** the shadows fade to zero. | +|     **Split** **1** | The distance where cascade 1 ends and cascade 2 starts. | +|     **Split** **2** | The distance where cascade 2 ends and cascade 3 starts. | +|     **Split** **3** | The distance where cascade 3 ends and cascade 4 starts. | +|     **Last** **Border** | The size of the area where Unity fades out the shadows. Unity starts fading out shadows at the distance **Max Distance** - **Last Border**, at **Max Distance** the shadows fade to zero. | | **Depth Bias** | Use this setting to reduce [shadow acne](https://docs.unity3d.com/Manual/ShadowPerformance.html). | | **Normal Bias** | Use this setting to reduce [shadow acne](https://docs.unity3d.com/Manual/ShadowPerformance.html). | | **Soft Shadows** | Select this check box to enable extra processing of the shadow maps to give them a smoother look.
        **Performance impact**: high.
        When this option is disabled, Unity samples the shadow map once with the default hardware filtering. | -|     Quality | Select the quality level of soft shadow processing.
        Available options:
        **Low**: good balance of quality and performance for mobile platforms. Filtering method: 4 PCF taps.
        **Medium**: good balance of quality and performance for desktop platforms. Filtering method: 5x5 tent filter. This is the default value.
        **High**: best quality, higher performance impact. Filtering method: 7x7 tent filter. | +|     **Quality** | Select the quality level of soft shadow processing.
        Available options:
        **Low**: good balance of quality and performance for mobile platforms. Filtering method: 4 PCF taps.
        **Medium**: good balance of quality and performance for desktop platforms. Filtering method: 5x5 tent filter. This is the default value.
        **High**: best quality, higher performance impact. Filtering method: 7x7 tent filter. | | **Conservative Enclosing Sphere** | Enable this option to improve shadow frustum culling and prevent Unity from excessively culling shadows in the corners of the shadow cascades.
        Disable this option only for compatibility purposes of existing projects created in previous Unity versions.
        If you enable this option in an existing project, you might need to adjust the shadows cascade distances because the shadow culling enclosing spheres change their size and position.
        **Performance impact**: enabling this option is likely to improve performance, because the option minimizes the overlap of shadow cascades, which reduces the number of redundant static shadow casters. | ### Post-processing @@ -133,11 +140,11 @@ This section allows you to fine-tune global post-processing settings. | Property | Description | | ---------------- | ------------------------------------------------------------ | -| **Post Processing** | This check box turns post-processing on (check box selected) or off (check box cleared) for the current URP asset.
        If you clear this check box, Unity excludes post-processing shaders and textures from the build, unless one of the following conditions is true:
        • Other assets in the build refer to the assets related to post-processing.
        • A different URP asset has the Post Processing property enabled.
        | -| **Post Process Data** | The asset containing references to shaders and Textures that the Renderer uses for post-processing.
        **Note**: Changes to this property are necessary only for advanced customization use cases. | | **Grading Mode** | Select the [color grading](https://docs.unity3d.com/Manual/PostProcessing-ColorGrading.html) mode to use for the Project.
        • **High Dynamic Range**: This mode works best for high precision grading similar to movie production workflows. Unity applies color grading before tonemapping.
        • **Low Dynamic Range**: This mode follows a more classic workflow. Unity applies a limited range of color grading after tonemapping.
        | | **LUT Size** | Set the size of the internal and external [look-up textures (LUTs)](https://docs.unity3d.com/Manual/PostProcessing-ColorGrading.html) that the Universal Render Pipeline uses for color grading. Higher sizes provide more precision, but have a potential cost of performance and memory use. You cannot mix and match LUT sizes, so decide on a size before you start the color grading process.
        The default value, **32**, provides a good balance of speed and quality. | | **Fast sRGB/Linear Conversions** | Select this option to use faster, but less accurate approximation functions when converting between the sRGB and Linear color spaces.| +| **Data Driven Lens Flare** | Allocate the shader variants and memory URP needs for [lens flares](shared/lens-flare/lens-flare-srp-reference.md) effect. | +| **Screen Space Lens Flare** | Allocate the shader variants and memory URP needs for [screen space lens flares](shared/lens-flare/reference-screen-space-lens-flare.md). | ### Volumes diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/urp-global-settings.md b/Packages/com.unity.render-pipelines.universal/Documentation~/urp-global-settings.md index f73bb3c0fe1..13bb68de376 100644 --- a/Packages/com.unity.render-pipelines.universal/Documentation~/urp-global-settings.md +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/urp-global-settings.md @@ -12,11 +12,11 @@ The check boxes in this section define which shader variants Unity strips when y | **Property** | **Description** | | --------------------------| ------------------------------------------------------------ | -| Shader Variant Log Level | Select what information about Shader variants Unity saves in logs when you build your Unity Project.
        Options:
        • Disabled: Unity doesn't save any shader variant information.
        • Only SRP Shaders: Unity saves only shader variant information for URP shaders.
        • All Shaders: Unity saves shader variant information for every shader type. | -| Strip Debug Variants | When enabled, Unity strips all debug view shader variants when you build the Player. This decreases build time, but prevents the use of Rendering Debugger in Player builds. | -| Strip Unused Post Processing Variants | When enabled, Unity assumes that the Player does not create new [Volume Profiles](Volume-Profile.md) at runtime. With this assumption, Unity only keeps the shader variants that the existing [Volume Profiles](Volume-Profile.md) use, and strips all the other variants. Unity keeps shader variants used in Volume Profiles even if the scenes in the project do not use the Profiles. | -| Strip Unused Variants | When enabled, Unity performs shader stripping in a more efficient way. This option reduces the amount of shader variants in the Player by a factor of 2 if the project uses the following URP features:
        • Rendering Layers
        • Native Render Pass
        • Reflection Probe Blending
        • Reflection Probe Box Projection
        • SSAO Renderer Feature
        • Decal Renderer Feature
        • Certain post-processing effects
        Disable this option only if you notice issues in the Player. | -| Strip Screen Coord Override Variants | When enabled, Unity strips Screen Coordinates Override shader variants in Player builds. | +| **Shader Variant Log Level** | Select what information about Shader variants Unity saves in logs when you build your Unity Project.
        Options:
        • Disabled: Unity doesn't save any shader variant information.
        • Only SRP Shaders: Unity saves only shader variant information for URP shaders.
        • All Shaders: Unity saves shader variant information for every shader type. | +| **Strip Debug Variants** | When enabled, Unity strips all debug view shader variants when you build the Player. This decreases build time, but prevents the use of Rendering Debugger in Player builds. | +| **Strip Unused Post Processing Variants** | When enabled, Unity assumes that the Player does not create new [Volume Profiles](Volume-Profile.md) at runtime. With this assumption, Unity only keeps the shader variants that the existing [Volume Profiles](Volume-Profile.md) use, and strips all the other variants. Unity keeps shader variants used in Volume Profiles even if the scenes in the project do not use the Profiles. | +| **Strip Unused Variants** | When enabled, Unity performs shader stripping in a more efficient way. This option reduces the amount of shader variants in the Player by a factor of 2 if the project uses the following URP features:
        • Rendering Layers
        • Native Render Pass
        • Reflection Probe Blending
        • Reflection Probe Box Projection
        • SSAO Renderer Feature
        • Decal Renderer Feature
        • Certain post-processing effects
        Disable this option only if you notice issues in the Player. | +| **Strip Screen Coord Override Variants** | When enabled, Unity strips Screen Coordinates Override shader variants in Player builds. | ## Default Volume Profile diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/urp-universal-renderer.md b/Packages/com.unity.render-pipelines.universal/Documentation~/urp-universal-renderer.md index 674eadde382..9e656e3741f 100644 --- a/Packages/com.unity.render-pipelines.universal/Documentation~/urp-universal-renderer.md +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/urp-universal-renderer.md @@ -100,7 +100,7 @@ With this check box selected, the Renderer processes the Stencil buffer values. For more information on how Unity works with the Stencil buffer, refer to [ShaderLab: Stencil](https://docs.unity3d.com/Manual/SL-Stencil.html). -In URP, you can use bits 0-15 of the stencil buffer for custom rendering effects. +In URP, you can use bits 0 to 3 of the stencil buffer for custom rendering effects. This means you can use stencil indices 0 to 15. ### Compatibility diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/volume-component-reference.md b/Packages/com.unity.render-pipelines.universal/Documentation~/volume-component-reference.md index e8ccea580e3..7b4ed4308dc 100644 --- a/Packages/com.unity.render-pipelines.universal/Documentation~/volume-component-reference.md +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/volume-component-reference.md @@ -2,8 +2,6 @@ Volumes components contain properties that control how they affect Cameras and how they interact with other Volumes. -![](/Images/Inspectors/Volume1.png) - | Property | Description | | :----------------- | :----------------------------------------------------------- | | **Mode** | Use the drop-down to select the method that URP uses to calculate whether this Volume can affect a Camera:
        • **Global**: Makes the Volume have no boundaries and allow it to affect every Camera in the scene.
        • **Local**: Allows you to specify boundaries for the Volume so that the Volume only affects Cameras inside the boundaries. Add a Collider to the Volume's GameObject and use that to set the boundaries. | diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/writing-shaders-urp-unlit-texture.md b/Packages/com.unity.render-pipelines.universal/Documentation~/writing-shaders-urp-unlit-texture.md index 5ee7d062a95..93026b35dc0 100644 --- a/Packages/com.unity.render-pipelines.universal/Documentation~/writing-shaders-urp-unlit-texture.md +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/writing-shaders-urp-unlit-texture.md @@ -9,7 +9,7 @@ Use the Unity shader source file from section [URP unlit shader with color input ```c++ Properties { - [MainTexture] _BaseMap("Base Map", 2D) = "white" + [MainTexture] _BaseMap("Base Map", 2D) = "white" {} } ``` @@ -78,7 +78,7 @@ Shader "Example/URPUnlitShaderTexture" // called Base Map. Properties { - [MainTexture] _BaseMap("Base Map", 2D) = "white" + [MainTexture] _BaseMap("Base Map", 2D) = "white" {} } SubShader diff --git a/Packages/com.unity.render-pipelines.universal/Editor/Camera/UniversalRenderPipelineCameraUI.Rendering.Skin.cs b/Packages/com.unity.render-pipelines.universal/Editor/Camera/UniversalRenderPipelineCameraUI.Rendering.Skin.cs index 89dc0efd55e..7bf85895ca2 100644 --- a/Packages/com.unity.render-pipelines.universal/Editor/Camera/UniversalRenderPipelineCameraUI.Rendering.Skin.cs +++ b/Packages/com.unity.render-pipelines.universal/Editor/Camera/UniversalRenderPipelineCameraUI.Rendering.Skin.cs @@ -16,7 +16,7 @@ public class Styles public static GUIContent antialiasingQuality = EditorGUIUtility.TrTextContent("Quality", "The quality level to use for the selected anti-aliasing method."); public static GUIContent taaContrastAdaptiveSharpening = EditorGUIUtility.TrTextContent("Contrast Adaptive Sharpening", "Enables high quality post sharpening to reduce TAA blur. The FSR upscaling overrides this setting if enabled."); - public static readonly GUIContent taaBaseBlendFactor = EditorGUIUtility.TrTextContent("Base blend factor", "Determines how much the history buffer is blended together with current frame result. Higher values means more history contribution, which leads to better anti aliasing, but also more prone to ghosting."); + public static readonly GUIContent taaBaseBlendFactor = EditorGUIUtility.TrTextContent("Base Blend Factor", "Determines how much the history buffer is blended together with current frame result. Higher values means more history contribution, which leads to better anti aliasing, but also more prone to ghosting."); public static readonly GUIContent taaJitterScale = EditorGUIUtility.TrTextContent("Jitter Scale", "Determines the scale to the jitter applied when TAA is enabled. Lowering this value will lead to less visible flickering and jittering, but also will produce more aliased images."); public static readonly GUIContent taaMipBias = EditorGUIUtility.TrTextContent("Mip Bias", "Determines how much texture mip map selection is biased when rendering. Lowering this can slightly reduce blur on textures at the cost of performance. Requires mip maps in textures."); public static readonly GUIContent taaVarianceClampScale = EditorGUIUtility.TrTextContent("Variance Clamp Scale", "Determines the strength of the history color rectification clamp. Lower values can reduce ghosting, but produce more flickering. Higher values reduce flickering, but are prone to blur and ghosting."); diff --git a/Packages/com.unity.render-pipelines.universal/Editor/Converter/ReadonlyMaterialConverter.cs b/Packages/com.unity.render-pipelines.universal/Editor/Converter/ReadonlyMaterialConverter.cs index fe099eb907a..6711b944ac1 100644 --- a/Packages/com.unity.render-pipelines.universal/Editor/Converter/ReadonlyMaterialConverter.cs +++ b/Packages/com.unity.render-pipelines.universal/Editor/Converter/ReadonlyMaterialConverter.cs @@ -3,6 +3,7 @@ using System.Linq; using System.Text; using UnityEditor.SceneManagement; +using UnityEditor.Search; using UnityEngine; using UnityEngine.SceneManagement; using Object = UnityEngine.Object; @@ -46,40 +47,43 @@ internal class ReadonlyMaterialConverter : RenderPipelineConverter public override void OnInitialize(InitializeConverterContext ctx, Action callback) { - var context = Search.SearchService.CreateContext("asset", "urp=convert-readonly a=URPConverterIndex"); - - Search.SearchService.Request(context, (c, items) => - { - // we're going to do this step twice in order to get them ordered, but it should be fast - var orderedRequest = items.OrderBy(req => + Search.SearchService.Request + ( + Search.SearchService.CreateContext("asset", "urp=convert-readonly a=URPConverterIndex"), + (searchContext, items) => { - GlobalObjectId.TryParse(req.id, out var gid); - return gid.assetGUID; - }); + // we're going to do this step twice in order to get them ordered, but it should be fast + var orderedRequest = items.OrderBy(req => + { + GlobalObjectId.TryParse(req.id, out var gid); + return gid.assetGUID; + }); - foreach (var r in orderedRequest) - { - if (r == null || !GlobalObjectId.TryParse(r.id, out var gid)) + foreach (var r in orderedRequest) { - continue; - } + if (string.IsNullOrEmpty(r?.id) || + !GlobalObjectId.TryParse(r.id, out var gid)) + { + continue; + } - var label = r.provider.fetchLabel(r, r.context); - var description = r.provider.fetchDescription(r, r.context); + var label = r.provider.fetchLabel(r, r.context); + var description = r.provider.fetchDescription(r, r.context); - var item = new ConverterItemDescriptor() - { - name = description.Split('/').Last().Split('.').First(), - info = $"{label}", - }; - guids.Add(gid.ToString()); + var item = new ConverterItemDescriptor() + { + name = description.Split('/').Last().Split('.').First(), + info = $"{label}", + }; + guids.Add(gid.ToString()); - ctx.AddAssetToConvert(item); - } + ctx.AddAssetToConvert(item); + } - callback.Invoke(); - }); - context?.Dispose(); + callback.Invoke(); + searchContext?.Dispose(); + } + ); } public override void OnRun(ref RunItemContext ctx) diff --git a/Packages/com.unity.render-pipelines.universal/Editor/Converter/RenderPipelineConvertersEditor.cs b/Packages/com.unity.render-pipelines.universal/Editor/Converter/RenderPipelineConvertersEditor.cs index f405014db47..e1e450f785c 100644 --- a/Packages/com.unity.render-pipelines.universal/Editor/Converter/RenderPipelineConvertersEditor.cs +++ b/Packages/com.unity.render-pipelines.universal/Editor/Converter/RenderPipelineConvertersEditor.cs @@ -355,6 +355,15 @@ void HideConverterLayout(VisualElement element) } void ToggleAllNone(ClickEvent evt, int index, bool value, VisualElement item) { + void ToggleSelection(Label labelSelected, Label labelNotSelected) + { + labelSelected.AddToClassList("selected"); + labelSelected.RemoveFromClassList("not_selected"); + + labelNotSelected.AddToClassList("not_selected"); + labelNotSelected.RemoveFromClassList("selected"); + } + var conv = m_ConverterStates[index]; if (conv.items.Count > 0) { @@ -363,22 +372,18 @@ void ToggleAllNone(ClickEvent evt, int index, bool value, VisualElement item) convItem.isActive = value; } UpdateSelectedConverterItems(index, item); + + var allLabel = item.Q