diff --git a/.gitattributes b/.gitattributes index 1f8c6889b1a..2ccd6ba96a3 100644 --- a/.gitattributes +++ b/.gitattributes @@ -337,38 +337,6 @@ Editor/Resources/unity[[:space:]]editor[[:space:]]resources filter=lfs diff=lfs **/SRP_SmokeTest/**/*.tiff filter=lfs diff=lfs merge=lfs -text **/SRP_SmokeTest/**/*.ttf filter=lfs diff=lfs merge=lfs -text **/SRP_SmokeTest/**/*.vfx filter=lfs diff=lfs merge=lfs -text -**/UniversalGfxTestStereo/**/*.bytes filter=lfs diff=lfs merge=lfs -text -**/UniversalGfxTestStereo/**/*.cube filter=lfs diff=lfs merge=lfs -text -**/UniversalGfxTestStereo/**/*.dds filter=lfs diff=lfs merge=lfs -text -**/UniversalGfxTestStereo/**/*.exp filter=lfs diff=lfs merge=lfs -text -**/UniversalGfxTestStereo/**/*.FBX filter=lfs diff=lfs merge=lfs -text -**/UniversalGfxTestStereo/**/*.fbx filter=lfs diff=lfs merge=lfs -text -**/UniversalGfxTestStereo/**/*.hdr filter=lfs diff=lfs merge=lfs -text -**/UniversalGfxTestStereo/**/*.jpeg filter=lfs diff=lfs merge=lfs -text -**/UniversalGfxTestStereo/**/*.jpg filter=lfs diff=lfs merge=lfs -text -**/UniversalGfxTestStereo/**/*.png filter=lfs diff=lfs merge=lfs -text -**/UniversalGfxTestStereo/**/*.psd filter=lfs diff=lfs merge=lfs -text -**/UniversalGfxTestStereo/**/*.tga filter=lfs diff=lfs merge=lfs -text -**/UniversalGfxTestStereo/**/*.tif filter=lfs diff=lfs merge=lfs -text -**/UniversalGfxTestStereo/**/*.tiff filter=lfs diff=lfs merge=lfs -text -**/UniversalGfxTestStereo/**/*.ttf filter=lfs diff=lfs merge=lfs -text -**/UniversalGfxTestStereo/**/*.vfx filter=lfs diff=lfs merge=lfs -text -**/UniversalUpgradeTest/**/*.bytes filter=lfs diff=lfs merge=lfs -text -**/UniversalUpgradeTest/**/*.cube filter=lfs diff=lfs merge=lfs -text -**/UniversalUpgradeTest/**/*.dds filter=lfs diff=lfs merge=lfs -text -**/UniversalUpgradeTest/**/*.exp filter=lfs diff=lfs merge=lfs -text -**/UniversalUpgradeTest/**/*.FBX filter=lfs diff=lfs merge=lfs -text -**/UniversalUpgradeTest/**/*.fbx filter=lfs diff=lfs merge=lfs -text -**/UniversalUpgradeTest/**/*.hdr filter=lfs diff=lfs merge=lfs -text -**/UniversalUpgradeTest/**/*.jpeg filter=lfs diff=lfs merge=lfs -text -**/UniversalUpgradeTest/**/*.jpg filter=lfs diff=lfs merge=lfs -text -**/UniversalUpgradeTest/**/*.png filter=lfs diff=lfs merge=lfs -text -**/UniversalUpgradeTest/**/*.psd filter=lfs diff=lfs merge=lfs -text -**/UniversalUpgradeTest/**/*.tga filter=lfs diff=lfs merge=lfs -text -**/UniversalUpgradeTest/**/*.tif filter=lfs diff=lfs merge=lfs -text -**/UniversalUpgradeTest/**/*.tiff filter=lfs diff=lfs merge=lfs -text -**/UniversalUpgradeTest/**/*.ttf filter=lfs diff=lfs merge=lfs -text -**/UniversalUpgradeTest/**/*.vfx filter=lfs diff=lfs merge=lfs -text **/com.unity.template-hd/Assets/Scenes/SampleScene/LightingData.asset filter=lfs diff=lfs merge=lfs -text **/HDRP_PerformanceTests/Assets/Scenes/Lighting/Cloud.asset filter=lfs diff=lfs merge=lfs -text diff --git a/Packages/com.unity.render-pipelines.core/Documentation~/TableOfContents.md b/Packages/com.unity.render-pipelines.core/Documentation~/TableOfContents.md index 720f9b57464..510bc01d3eb 100644 --- a/Packages/com.unity.render-pipelines.core/Documentation~/TableOfContents.md +++ b/Packages/com.unity.render-pipelines.core/Documentation~/TableOfContents.md @@ -2,6 +2,12 @@ * [What's new](whats-new.md) * [12](whats-new-12.md) * [13](whats-new-13.md) +* [Creating a custom render pipeline](srp-custom.md) + * [Create a custom Scriptable Render Pipeline](srp-custom-getting-started.md) + * [Create a Render Pipeline Asset and Render Pipeline Instance in a custom render pipeline](srp-creating-render-pipeline-asset-and-render-pipeline-instance.md) + * [Create a simple render loop in a custom render pipeline](srp-creating-simple-render-loop.md) + * [Execute rendering commands in a custom render pipeline](srp-using-scriptable-render-context.md) + * [Scriptable Render Pipeline callbacks reference](srp-callbacks-reference.md) * Camera components * [Free Camera](Free-Camera.md) * [Camera Switcher](Camera-Switcher.md) diff --git a/Packages/com.unity.render-pipelines.core/Documentation~/index.md b/Packages/com.unity.render-pipelines.core/Documentation~/index.md index ddf86b8c2c4..d1724a3269f 100644 --- a/Packages/com.unity.render-pipelines.core/Documentation~/index.md +++ b/Packages/com.unity.render-pipelines.core/Documentation~/index.md @@ -1,7 +1,5 @@ # SRP Core -![](https://blogs.unity3d.com/wp-content/uploads/2018/01/image5_rs.png) - The Scriptable Render Pipeline (SRP) is a Unity feature that allows you to write C# scripts to control the way Unity renders each frame. SRP Core is a package that makes it easier to create or customize an SRP. SRP Core contains reusable code, including boilerplate code for working with platform-specific graphics APIs, utility functions for common rendering operations, and the shader libraries used in the High Definition Render Pipeline (HDRP) and Universal Render Pipeline (URP). diff --git a/Packages/com.unity.render-pipelines.core/Documentation~/srp-callbacks-reference.md b/Packages/com.unity.render-pipelines.core/Documentation~/srp-callbacks-reference.md new file mode 100644 index 00000000000..1d810b7fc6f --- /dev/null +++ b/Packages/com.unity.render-pipelines.core/Documentation~/srp-callbacks-reference.md @@ -0,0 +1,12 @@ +# Scriptable Render Pipeline callbacks reference + +When working with SRP, use these to make Unity call your C# code at specific times. + +* [RenderPipeline.Render](xref:UnityEngine.Rendering.RenderPipeline.Render(UnityEngine.Rendering.ScriptableRenderContext,UnityEngine.Camera[])) is the main entry point to the SRP. Unity calls this method automatically. If you are writing a custom render pipeline, this is where you begin to write your code. +* The [RenderPipelineManager](xref:UnityEngine.Rendering.RenderPipelineManager) class has the following events that you can subscribe to, so that you can execute custom code at specific points in the render loop: + * [beginFrameRendering](xref:UnityEngine.Rendering.RenderPipeline.BeginFrameRendering(UnityEngine.Rendering.ScriptableRenderContext,UnityEngine.Camera[])) - **Note:** This can generate garbage. Use `beginContextRendering` instead. + * [endFrameRendering](xref:UnityEngine.Rendering.RenderPipeline.EndFrameRendering(UnityEngine.Rendering.ScriptableRenderContext,UnityEngine.Camera[])) - **Note:** This can generate garbage. Use `endContextRendering` instead. + * [beginContextRendering](https://docs.unity3d.com/6000.0/Documentation/ScriptReference/Rendering.RenderPipelineManager-beginContextRendering.html) + * [endContextRendering](https://docs.unity3d.com/6000.0/Documentation/ScriptReference/Rendering.RenderPipelineManager-endContextRendering.html) + * [beginCameraRendering](https://docs.unity3d.com/6000.0/Documentation/ScriptReference/Rendering.RenderPipelineManager-beginCameraRendering.html) + * [endCameraRendering](https://docs.unity3d.com/6000.0/Documentation/ScriptReference/Rendering.RenderPipelineManager-endCameraRendering.html) \ No newline at end of file diff --git a/Packages/com.unity.render-pipelines.core/Documentation~/srp-creating-render-pipeline-asset-and-render-pipeline-instance.md b/Packages/com.unity.render-pipelines.core/Documentation~/srp-creating-render-pipeline-asset-and-render-pipeline-instance.md new file mode 100644 index 00000000000..0a3479951b0 --- /dev/null +++ b/Packages/com.unity.render-pipelines.core/Documentation~/srp-creating-render-pipeline-asset-and-render-pipeline-instance.md @@ -0,0 +1,122 @@ +--- +uid: um-srp-creating-render-pipeline-asset-and-render-pipeline-instance +--- + +# Create a Render Pipeline Asset and Render Pipeline Instance in a custom render pipeline + +If you are creating your own render pipeline based on the Scriptable Render Pipeline (SRP), your Project must contain: + +* A script that inherits from [RenderPipelineAsset](xref:UnityEngine.Rendering.RenderPipelineAsset) and overrides its `CreatePipeline()` method. This script defines your Render Pipeline Asset. +* A script that inherits from [RenderPipeline](xref:UnityEngine.Rendering.RenderPipeline), and overrides its `Render()` method. This script defines your Render Pipeline Instance, and is where you write your custom rendering code. +* A Render Pipeline Asset that you have created from your [RenderPipelineAsset](xref:UnityEngine.Rendering.RenderPipelineAsset) script. This asset acts as a factory class for your Render Pipeline Instance. + +Because these elements are so closely related, you should create them at the same time. + +## Creating a basic Render Pipeline Asset and Render Pipeline Instance + +The following example shows how to create a script for a basic custom Render Pipeline Asset that instantiates the Render Pipeline Instance, a script that defines the Render Pipeline Instance, and the Render Pipeline Asset itself. + +1. Create a C# script called _ExampleRenderPipelineAsset.cs_. + +2. Copy and paste the following code into the new script: + + ```lang-csharp + using UnityEngine; + using UnityEngine.Rendering; + + // The CreateAssetMenu attribute lets you create instances of this class in the Unity Editor. + [CreateAssetMenu(menuName = "Rendering/ExampleRenderPipelineAsset")] + public class ExampleRenderPipelineAsset : RenderPipelineAsset + { + // Unity calls this method before rendering the first frame. + // If a setting on the Render Pipeline Asset changes, Unity destroys the current Render Pipeline Instance and calls this method again before rendering the next frame. + protected override RenderPipeline CreatePipeline() { + // Instantiate the Render Pipeline that this custom SRP uses for rendering. + return new ExampleRenderPipelineInstance(); + } + } + ``` + +3. Create a C# script called _ExampleRenderPipelineInstance.cs_. + +4. Copy and paste the following code into the new script: + + + ```lang-csharp + using UnityEngine; + using UnityEngine.Rendering; + + public class ExampleRenderPipelineInstance : RenderPipeline + { + public ExampleRenderPipelineInstance() { + } + + protected override void Render (ScriptableRenderContext context, Camera[] cameras) { + // This is where you can write custom rendering code. Customize this method to customize your SRP. + } + } + ``` + +5. In the Project view, either click the add (+) button, or open the context menu and navigate to **Create**, and then choose **Rendering** > **Example Render Pipeline Asset**. Unity creates a new Render Pipeline Asset in the Project view. + +## Creating a configurable Render Pipeline Asset and Render Pipeline Instance + +By default, a Render Pipeline Asset stores information about which Render Pipeline Instance to use for rendering, and the default Materials and Shaders to use in the Editor. In your `RenderPipelineAsset` script, you can extend your Render Pipeline Asset so that it stores additional data, and you can have multiple different Render Pipeline Assets with different configurations in your Project. For example, you might use a Render Pipeline Asset to hold configuration data for each different tier of hardware. The High Definition Render Pipeline (HDRP) and the Universal Render Pipeline (URP) include examples of this. + +The following example shows how to create a `RenderPipelineAsset` script that defines a Render Pipeline Asset with public data that you can set for each instance using the Inspector, and a Render Pipeline Instance that receives a Render Pipeline Asset in its constructor and uses data from that Render Pipeline Asset. + +1. Create a C# script called _ExampleRenderPipelineAsset.cs_. + +2. Copy and paste the following code into the new script: + + ```lang-csharp + using UnityEngine; + using UnityEngine.Rendering; + + // The CreateAssetMenu attribute lets you create instances of this class in the Unity Editor. + [CreateAssetMenu(menuName = "Rendering/ExampleRenderPipelineAsset")] + public class ExampleRenderPipelineAsset : RenderPipelineAsset + { + // This data can be defined in the Inspector for each Render Pipeline Asset + public Color exampleColor; + public string exampleString; + + // Unity calls this method before rendering the first frame. + // If a setting on the Render Pipeline Asset changes, Unity destroys the current Render Pipeline Instance and calls this method again before rendering the next frame. + protected override RenderPipeline CreatePipeline() { + // Instantiate the Render Pipeline that this custom SRP uses for rendering, and pass a reference to this Render Pipeline Asset. + // The Render Pipeline Instance can then access the configuration data defined above. + return new ExampleRenderPipelineInstance(this); + } + } + ``` + +3. Create a C# script called _ExampleRenderPipelineInstance.cs_. + +4. Copy and paste the following code into the new script: + + ```lang-csharp + using UnityEngine; + using UnityEngine.Rendering; + + public class ExampleRenderPipelineInstance : RenderPipeline + { + // Use this variable to a reference to the Render Pipeline Asset that was passed to the constructor + private ExampleRenderPipelineAsset renderPipelineAsset; + + // The constructor has an instance of the ExampleRenderPipelineAsset class as its parameter. + public ExampleRenderPipelineInstance(ExampleRenderPipelineAsset asset) { + renderPipelineAsset = asset; + } + + protected override void Render(ScriptableRenderContext context, Camera[] cameras) { + // This is an example of using the data from the Render Pipeline Asset. + Debug.Log(renderPipelineAsset.exampleString); + + // This is where you can write custom rendering code. Customize this method to customize your SRP. + } + } + + ``` + +5. In the Project view, either click the add (+) button, or open the context menu and navigate to **Create**, and then choose **Rendering** > **Example Render Pipeline Asset**. Unity creates a new Render Pipeline Asset in the Project view. \ No newline at end of file diff --git a/Packages/com.unity.render-pipelines.core/Documentation~/srp-creating-simple-render-loop.md b/Packages/com.unity.render-pipelines.core/Documentation~/srp-creating-simple-render-loop.md new file mode 100644 index 00000000000..36a6e2f319d --- /dev/null +++ b/Packages/com.unity.render-pipelines.core/Documentation~/srp-creating-simple-render-loop.md @@ -0,0 +1,279 @@ +--- +uid: um-srp-creating-simple-render-loop +--- + +# Create a simple render loop in a custom render pipeline + +A render loop is the term for all of the rendering operations that take place in a single frame. This page contains information on creating a simple render loop in a custom render pipeline that is based on Unity's Scriptable Render Pipeline. + +The code examples on this page demonstrate the basic principles of using the Scriptable Render Pipeline. You can use this information to build your own custom Scriptable Render Pipeline, or to understand how Unity's prebuilt Scriptable Render Pipelines work. + +## Preparing your project + +Before you begin writing the code for your render loop, you must prepare your project. + +The steps are as follows: + +1. [Create an SRP-compatible shader](#creating-unity-shader). +2. [Create one or more GameObjects to render](#creating-gameobject). +3. [Create the basic structure of your custom SRP](#creating-srp). +4. *Optional:* If you plan to extend your simple custom SRP to add more complex functionality, install the SRP Core package. The SRP Core package includes the [SRP Core shader library](https://docs.unity3d.com/Packages/com.unity.render-pipelines.core@17.0/api/index.html) (which you can use to make your shaders SRP Batcher compatible), and utility functions for common operations. + + + +### Creating an SRP-compatible shader + +In the Scriptable Render Pipeline, you use the `LightMode` Pass tag to determine how to draw geometry. For more information on Pass tags, see [ShaderLab: assigning tags to a Pass](https://docs.unity3d.com/6000.0/Documentation/Manual/SL-PassTags). + +This task shows you how to create a very simple unlit Shader object with a LightMode Pass tag value of `ExampleLightModeTag`. + +1. Create a new shader asset in your project. For instructions on creating a shader asset, see [Shader assets](https://docs.unity3d.com/6000.0/Documentation/Manual/class-Shader). +2. In your Project view, double click the shader asset to open the shader source code in a text editor. +3. Replace the existing code with the following: + +``` +// This defines a simple unlit Shader object that is compatible with a custom Scriptable Render Pipeline. +// It applies a hardcoded color, and demonstrates the use of the LightMode Pass tag. +// It is not compatible with SRP Batcher. + +Shader "Examples/SimpleUnlitColor" +{ + SubShader + { + Pass + { + // The value of the LightMode Pass tag must match the ShaderTagId in ScriptableRenderContext.DrawRenderers + Tags { "LightMode" = "ExampleLightModeTag"} + + HLSLPROGRAM + #pragma vertex vert + #pragma fragment frag + + float4x4 unity_MatrixVP; + float4x4 unity_ObjectToWorld; + + struct Attributes + { + float4 positionOS : POSITION; + }; + + struct Varyings + { + float4 positionCS : SV_POSITION; + }; + + Varyings vert (Attributes IN) + { + Varyings OUT; + float4 worldPos = mul(unity_ObjectToWorld, IN.positionOS); + OUT.positionCS = mul(unity_MatrixVP, worldPos); + return OUT; + } + + float4 frag (Varyings IN) : SV_TARGET + { + return float4(0.5,1,0.5,1); + } + ENDHLSL + } + } +} +``` + + + +### Creating a GameObject to render + +To test that your render loop works, you must create something to render. This task shows you how to put GameObjects in your scene that use the SRP-compatible shader that you created in the previous task. + +1. Create a new material asset in your Unity project. For instructions see [Materials](https://docs.unity3d.com/6000.0/Documentation/Manual/class-Material). +2. Assign the shader asset to the material asset. For instructions, see [Materials](https://docs.unity3d.com/6000.0/Documentation/Manual/class-Material). +3. Create a cube in your scene. For instructions, see [Primitive objects](https://docs.unity3d.com/6000.0/Documentation/Manual/PrimitiveObjects). +4. Assign the material to it. For instructions, see [Materials](https://docs.unity3d.com/6000.0/Documentation/Manual/class-Material). + + + +### Creating the basic structure of your custom SRP + +The final stage of preparation is to create the basic source files needed for your custom SRP, and tell Unity to begin rendering using the custom SRP. + +1. Create a class that inherits from `RenderPipeline` and a compatible Render Pipeline Asset, following the instructions in [Creating a Render Pipeline Instance and Render Pipeline Asset](srp-creating-render-pipeline-asset-and-render-pipeline-instance.md) +2. Set the active Render Pipeline Asset, following the instructions in [How to get, set, and configure the active render pipeline](https://docs.unity3d.com/6000.0/Documentation/Manual/srp-setting-render-pipeline-asset.html). Unity will begin rendering using the custom SRP immediately, which means that your Scene view and Game view will be blank until you add code to your custom SRP. + +## Creating the render loop + +In a simple render loop, the basic operations are: + +* [Clearing the render target](#clearing), which means removing the geometry that was drawn during the last frame. +* [Culling](#culling), which means filtering out geometry that is not visible to a Camera. +* [Drawing](#drawing), which means telling the GPU what geometry to draw, and how to draw it. + + + +### Clearing the render target + +Clearing means removing the things that were drawn during the last frame. The render target is usually the screen; however, you can also render to textures to create a "picture in picture" effect. These examples demonstrate how to render to the screen, which is Unity's default behavior. + +To clear the render target in the Scriptable Render Pipeline, you do the following: + +1. Configure a `CommandBuffer` with a `Clear` command. +2. Add the `CommandBuffer` to the queue of commands on the `ScriptableRenderContext`; to do this, call [ScriptableRenderContext.ExecuteCommandBuffer](xref:UnityEngine.Rendering.ScriptableRenderContext.ExecuteCommandBuffer(UnityEngine.Rendering.CommandBuffer)). +3. Instruct the graphics API to perform the queue of commands on the `ScriptableRenderContext`; to do this, call [ScriptableRenderContext.Submit](xref:UnityEngine.Rendering.ScriptableRenderContext.Submit). + +As with all Scriptable Render Pipeline operations, you use the [RenderPipeline.Render](xref:UnityEngine.Rendering.RenderPipeline.Render(UnityEngine.Rendering.ScriptableRenderContext,UnityEngine.Camera[])) method as the entry point for this code. This example code demonstrates how to do this: + +```lang-csharp +/* +This is a simplified example of a custom Scriptable Render Pipeline. +It demonstrates how a basic render loop works. +It shows the clearest workflow, rather than the most efficient runtime performance. +*/ + +using UnityEngine; +using UnityEngine.Rendering; + +public class ExampleRenderPipeline : RenderPipeline { + public ExampleRenderPipeline() { + } + + protected override void Render (ScriptableRenderContext context, Camera[] cameras) { + // Create and schedule a command to clear the current render target + var cmd = new CommandBuffer(); + cmd.ClearRenderTarget(true, true, Color.black); + context.ExecuteCommandBuffer(cmd); + cmd.Release(); + + // Instruct the graphics API to perform all scheduled commands + context.Submit(); + } +} +``` + + + +### Culling + +Culling is the process of filtering out geometry that is not visible to a Camera. + +To cull in the Scriptable Render Pipeline, you do the following: + +1. Populate a [ScriptableCullingParameters](xref:UnityEngine.Rendering.ScriptableCullingParameters) struct with data about a Camera; to do this, call [Camera.TryGetCullingParameters](xref:UnityEngine.Camera.TryGetCullingParameters(UnityEngine.Rendering.ScriptableCullingParameters&)). +2. Optional: Manually update the values of the `ScriptableCullingParameters` struct. +3. Call [ScriptableRenderContext.Cull](xref:UnityEngine.Rendering.ScriptableRenderContext.Cull(UnityEngine.Rendering.ScriptableCullingParameters&)), and store the results in a `CullingResults` struct. + +This example code extends the example above, and demonstrates how to clear the render target and then perform a culling operation: + +```lang-csharp +/* +This is a simplified example of a custom Scriptable Render Pipeline. +It demonstrates how a basic render loop works. +It shows the clearest workflow, rather than the most efficient runtime performance. +*/ + +using UnityEngine; +using UnityEngine.Rendering; + +public class ExampleRenderPipeline : RenderPipeline { + public ExampleRenderPipeline() { + } + + protected override void Render (ScriptableRenderContext context, Camera[] cameras) { + // Create and schedule a command to clear the current render target + var cmd = new CommandBuffer(); + cmd.ClearRenderTarget(true, true, Color.black); + context.ExecuteCommandBuffer(cmd); + cmd.Release(); + + // Iterate over all Cameras + foreach (Camera camera in cameras) + { + // Get the culling parameters from the current Camera + camera.TryGetCullingParameters(out var cullingParameters); + + // Use the culling parameters to perform a cull operation, and store the results + var cullingResults = context.Cull(ref cullingParameters); + } + + // Instruct the graphics API to perform all scheduled commands + context.Submit(); + } +} +``` + + + +### Drawing + +Drawing is the process of instructing the graphics API to draw a given set of geometry with given settings. + +To draw in SRP, you do the following: + +1. Perform a culling operation, as described above, and store the results in a `CullingResults` struct. +2. Create and configure [FilteringSettings](xref:UnityEngine.Rendering.FilteringSettings) struct, which describes how to filter the culling results. +3. Create and configure a [DrawingSettings](xref:UnityEngine.Rendering.DrawingSettings) struct, which describes which geometry to draw and how to draw it. +4. *Optional*: By default, Unity sets the render state based on the Shader object. If you want to override the render state for some or all of the geometry that you are about to draw, you can use a [RenderStateBlock](xref:UnityEngine.Rendering.RenderStateBlock) struct to do this. +5. Call [ScriptableRenderContext.DrawRenderers](xref:UnityEngine.Rendering.ScriptableRenderContext.DrawRenderers(UnityEngine.Rendering.CullingResults,UnityEngine.Rendering.DrawingSettings&,UnityEngine.Rendering.FilteringSettings&)), and pass the structs that you created as parameters. Unity draws the filtered set of geometry, according to the settings. + +This example code builds on the examples above, and demonstrates how to clear the render target, perform a culling operation, and draw the resulting geometry: + +```lang-csharp +/* +This is a simplified example of a custom Scriptable Render Pipeline. +It demonstrates how a basic render loop works. +It shows the clearest workflow, rather than the most efficient runtime performance. +*/ + +using UnityEngine; +using UnityEngine.Rendering; + +public class ExampleRenderPipeline : RenderPipeline { + public ExampleRenderPipeline() { + } + + protected override void Render (ScriptableRenderContext context, Camera[] cameras) { + // Create and schedule a command to clear the current render target + var cmd = new CommandBuffer(); + cmd.ClearRenderTarget(true, true, Color.black); + context.ExecuteCommandBuffer(cmd); + cmd.Release(); + + // Iterate over all Cameras + foreach (Camera camera in cameras) + { + // Get the culling parameters from the current Camera + camera.TryGetCullingParameters(out var cullingParameters); + + // Use the culling parameters to perform a cull operation, and store the results + var cullingResults = context.Cull(ref cullingParameters); + + // Update the value of built-in shader variables, based on the current Camera + context.SetupCameraProperties(camera); + + // Tell Unity which geometry to draw, based on its LightMode Pass tag value + ShaderTagId shaderTagId = new ShaderTagId("ExampleLightModeTag"); + + // Tell Unity how to sort the geometry, based on the current Camera + var sortingSettings = new SortingSettings(camera); + + // Create a DrawingSettings struct that describes which geometry to draw and how to draw it + DrawingSettings drawingSettings = new DrawingSettings(shaderTagId, sortingSettings); + + // Tell Unity how to filter the culling results, to further specify which geometry to draw + // Use FilteringSettings.defaultValue to specify no filtering + FilteringSettings filteringSettings = FilteringSettings.defaultValue; + + // Schedule a command to draw the geometry, based on the settings you have defined + context.DrawRenderers(cullingResults, ref drawingSettings, ref filteringSettings); + + // Schedule a command to draw the Skybox if required + if (camera.clearFlags == CameraClearFlags.Skybox && RenderSettings.skybox != null) + { + context.DrawSkybox(camera); + } + + // Instruct the graphics API to perform all scheduled commands + context.Submit(); + } + } +} +``` diff --git a/Packages/com.unity.render-pipelines.core/Documentation~/srp-custom-getting-started.md b/Packages/com.unity.render-pipelines.core/Documentation~/srp-custom-getting-started.md new file mode 100644 index 00000000000..79d91be72fb --- /dev/null +++ b/Packages/com.unity.render-pipelines.core/Documentation~/srp-custom-getting-started.md @@ -0,0 +1,40 @@ +--- +uid: um-srp-custom-getting-started +--- + +# Create a custom render pipeline + +This page contains information on how to get started with creating your own custom render pipeline based on the Scriptable Render Pipeline (SRP). + + +## Creating a new project and installing the packages needed for a custom render pipeline + +These instructions show you how to create a custom render pipeline using the SRP Core package. SRP Core is a package made by Unity that contains a reusable code to help you make your own render pipeline, including boilerplate code for working with platform-specific graphics APIs, utility functions for common rendering operations, and the shader library that URP and HDRP use. + +1. Create a new Unity Project. +2. Use Git to create a clone of the [SRP source code repository](https://github.com/Unity-Technologies/Graphics). You can place the SRP source code in any location on your disk, as long as it is not in one of the [reserved Project sub-folders](https://docs.unity3d.com/6000.0/Documentation/Manual/upm-ui-local.html#PkgLocation). +3. Use Git to update your copy of the SRP source code to a branch that is compatible with your version of the Unity Editor. Read [Using the latest version](https://github.com/Unity-Technologies/Graphics#branches-and-package-releases) in the SRP repository documentation for information on branches and versions. +4. Open your Project in Unity, and install the following packages from the SRP source code folder on your disk, in the following order. For information on installing packages from disk, see [Installing a package from a local folder](https://docs.unity3d.com/6000.0/Documentation/Manual/upm-ui-local.html). + * _com.unity.render-pipelines.core_. + * Optional: _com.unity.render-pipelines.shadergraph_. Install this package if you intend to use Shader Graph or modify the Shader Graph source code as part of your custom SRP. + * Optional: _com.unity.render-pipelines.visualeffectgraph_. Install this package if you intend to use Visual Effect Graph or modify the Visual Effect Graph source code as part of your custom SRP. + +You can now debug and modify the scripts in your copy of the SRP source code, and see the results of your changes in your Unity Project. + +## Creating a custom version of URP or HDRP + +The Universal Render Pipeline (URP) and the High Definition Render Pipeline (HDRP) offer extensive customization options to help you achieve the graphics and performance you need. However, if you want even more control, you can create a custom version of one of these render pipelines, and modify the source code. + +Follow steps 1-3 in the section above, **Creating a new Project and installing the packages needed for a custom SRP**. When you reach step 4, install the following packages in the following order: + +**URP:** + +* _com.unity.render-pipelines.core_ +* _com.unity.render-pipelines.shadergraph_ +* _com.unity.render-pipelines.universal_ + +**HDRP:** + +* _com.unity.render-pipelines.core_ +* _com.unity.render-pipelines.shadergraph_ +* _com.unity.render-pipelines.high-defintion_ \ No newline at end of file diff --git a/Packages/com.unity.render-pipelines.core/Documentation~/srp-custom.md b/Packages/com.unity.render-pipelines.core/Documentation~/srp-custom.md new file mode 100644 index 00000000000..bb126137f62 --- /dev/null +++ b/Packages/com.unity.render-pipelines.core/Documentation~/srp-custom.md @@ -0,0 +1,19 @@ +--- +uid: um-srp-custom +--- + +# Creating a custom render pipeline + +Unity provides two prebuilt render pipelines based on the Scriptable Render Pipeline (SRP): the High Definition Render Pipeline (HDRP), and the Universal Render Pipeline (URP). HDRP and URP offer extensive customization options; however, if you want even more control over your rendering pipeline, you can create your own custom render pipeline based on SRP. + +| **Page** | **Description** | +| --- | --- | +| [Create a custom Scriptable Render Pipeline](srp-custom-getting-started.md) | Install the packages needed for a custom render pipeline based on SRP, or create a custom version of URP or HDRP. | +| [Create a Render Pipeline Asset and Render Pipeline Instance in a custom render pipeline](srp-creating-render-pipeline-asset-and-render-pipeline-instance.md) | Create scripts that inherit from `RenderPipelineAsset` and `RenderPipeline`, then create a Render Pipeline Asset. | +| [Create a simple render loop in the Scriptable Render Pipeline](srp-creating-simple-render-loop.md) | Create a simple loop to clear the render target, perform a culling operation, and draw geometry. | +| [Extend a Scriptable Render Pipeline with command buffers or API calls](srp-using-scriptable-render-context.md) | Use the `ScriptableRenderContext` API to configure and schedule rendering commands. | +| [Scriptable Render Pipeline callbacks reference](srp-callbacks-reference.md) | Learn about the callbacks you can use to call your C# code at specific times. | + +## Additional resources + +- [Render pipelines](https://docs.unity3d.com/6000.0/Documentation/Manual/render-pipelines.html) diff --git a/Packages/com.unity.render-pipelines.core/Documentation~/srp-using-scriptable-render-context.md b/Packages/com.unity.render-pipelines.core/Documentation~/srp-using-scriptable-render-context.md new file mode 100644 index 00000000000..dee775b6564 --- /dev/null +++ b/Packages/com.unity.render-pipelines.core/Documentation~/srp-using-scriptable-render-context.md @@ -0,0 +1,54 @@ +--- +uid: um-srp-using-scriptable-render-context +--- + +# Execute rendering commands in a custom render pipeline + +This page explains how to schedule and execute rendering commands in the Scriptable Render Pipeline (SRP)ScriptableRenderPipeline, either by using CommandBuffers or by making direct API calls to the ScriptableRenderContext. The information on this page is applicable to the Universal Render Pipeline (URP), the High Definition Render Pipeline (HDRP), and custom render pipelines that are based on SRP. + +In SRP, you use C# scripts to configure and schedule rendering commands. You then tell Unity's low-level graphics architecture to execute them, which sends instructions to the graphics API. + +The main way of doing this is by making API calls to the ScriptableRenderContext, but you can also execute CommandBuffers immediately. + +## Using the ScriptableRenderContext APIs + +In SRP, the ScriptableRenderContext class acts as an interface between the C# render pipeline code and Unity's low-level graphics code. SRP rendering works using delayed execution; you use the ScriptableRenderContext to build up a list of rendering commands, and then you tell Unity to execute them. Unity's low-level graphics architecture then sends instructions to the graphics API. + +To schedule rendering commands, you can: + +* Pass [CommandBuffers](xref:UnityEngine.Rendering.CommandBuffer) to the ScriptableRenderContext, using [ScriptableRenderContext.ExecuteCommandBuffer](xref:UnityEngine.Rendering.ScriptableRenderContext.ExecuteCommandBuffer(UnityEngine.Rendering.CommandBuffer)) +* Make direct API calls to the Scriptable Render Context, such as [ScriptableRenderContext.Cull](xref:UnityEngine.Rendering.ScriptableRenderContext.Cull(UnityEngine.Rendering.ScriptableCullingParameters&)) or [ScriptableRenderContext.DrawRenderers](xref:UnityEngine.Rendering.ScriptableRenderContext.DrawRenderers(UnityEngine.Rendering.CullingResults,UnityEngine.Rendering.DrawingSettings&,UnityEngine.Rendering.FilteringSettings&)) + +To tell Unity to perform the commands that you have scheduled, call [ScriptableRenderContext.Submit](xref:UnityEngine.Rendering.ScriptableRenderContext.Submit). Note that it does not matter whether you used a CommandBuffer to schedule the command, or whether you scheduled the command by calling an API; Unity schedules all rendering commands on the ScriptableRenderContext in the same way, and does not execute any of them until you call `Submit()`. + +This example code demonstrates how to schedule and perform a command to clear the current render target, using a CommandBuffer. + +```lang-csharp +using UnityEngine; +using UnityEngine.Rendering; + +public class ExampleRenderPipeline : RenderPipeline +{ + public ExampleRenderPipeline() { + } + + protected override void Render(ScriptableRenderContext context, Camera[] cameras) { + // Create and schedule a command to clear the current render target + var cmd = new CommandBuffer(); + cmd.ClearRenderTarget(true, true, Color.red); + context.ExecuteCommandBuffer(cmd); + cmd.Release(); + + // Tell the Scriptable Render Context to tell the graphics API to perform the scheduled commands + context.Submit(); + } +} +``` + +## Executing CommandBuffers immediately + +You can execute CommandBuffers immediately without using the ScriptableRenderContext, by calling [Graphics.ExecuteCommandBuffer](xref:UnityEngine.Graphics.ExecuteCommandBuffer(UnityEngine.Rendering.CommandBuffer)). Calls to this API take place outside of the render pipeline. + +## Additional information + +For more information on commands that you can schedule using CommandBuffers, see [CommandBuffers API documentation](xref:UnityEngine.Rendering.CommandBuffer). diff --git a/Packages/com.unity.render-pipelines.core/Editor/Debugging/DebugState.cs b/Packages/com.unity.render-pipelines.core/Editor/Debugging/DebugState.cs index 97f92fa444b..0c7e971bc50 100644 --- a/Packages/com.unity.render-pipelines.core/Editor/Debugging/DebugState.cs +++ b/Packages/com.unity.render-pipelines.core/Editor/Debugging/DebugState.cs @@ -237,7 +237,7 @@ public override void SetValue(object value, DebugUI.IValueField field) /// /// Unsigned Integer Debug State. /// - [Serializable, DebugState(typeof(DebugUI.UIntField))] + [Serializable, DebugState(typeof(DebugUI.UIntField), typeof(DebugUI.MaskField))] public sealed class DebugStateUInt : DebugState { } /// diff --git a/Packages/com.unity.render-pipelines.core/Editor/Debugging/DebugUIDrawer.Builtins.cs b/Packages/com.unity.render-pipelines.core/Editor/Debugging/DebugUIDrawer.Builtins.cs index 41cb26d9125..cb89a14027a 100644 --- a/Packages/com.unity.render-pipelines.core/Editor/Debugging/DebugUIDrawer.Builtins.cs +++ b/Packages/com.unity.render-pipelines.core/Editor/Debugging/DebugUIDrawer.Builtins.cs @@ -399,6 +399,33 @@ protected override Enum DoGUI(Rect rect, GUIContent label, DebugUI.BitField fiel } + /// + /// Builtin Drawer for Maskfield Debug Items. + /// + [DebugUIDrawer(typeof(DebugUI.MaskField))] + public sealed class DebugUIDrawerMaskField : DebugUIFieldDrawer + { + /// + /// Does the field of the given type + /// + /// The rect to draw the field + /// The label for the field + /// The field + /// The state + /// The current value from the UI + protected override uint DoGUI(Rect rect, GUIContent label, DebugUI.MaskField field, DebugStateUInt state) + { + uint value = field.GetValue(); + + var enumNames = new string[field.enumNames.Length]; + for (int i = 0; i < enumNames.Length; i++) + enumNames[i] = field.enumNames[i].text; + var mask = EditorGUI.MaskField(rect, label, (int)value, enumNames); + + return (uint)mask; + } + } + /// /// Builtin Drawer for Foldout Debug Items. /// diff --git a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.LightTransport.cs b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.LightTransport.cs index 0f238494b50..a0be9e1a2b9 100644 --- a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.LightTransport.cs +++ b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.LightTransport.cs @@ -367,29 +367,28 @@ public static BakeContext New(InputExtraction.BakeInput input, NativeArray RunPlacement() + static NativeList RunPlacement(ref bool canceledByUser) { // Overwrite loaded settings with data from profile. Note that the m_BakingSet.profile is already patched up if isFreezingPlacement float prevBrickSize = ProbeReferenceVolume.instance.MinBrickSize(); @@ -142,12 +142,15 @@ static NativeList RunPlacement() // Run subdivision ProbeSubdivisionResult result; using (new BakingSetupProfiling(BakingSetupProfiling.Stages.BakeBricks)) - result = GetWorldSubdivision(); + result = GetWorldSubdivision(ref canceledByUser); + + if (canceledByUser) + return new NativeList(Allocator.Temp); // Compute probe positions NativeList positions; using (new BakingSetupProfiling(BakingSetupProfiling.Stages.ApplySubdivisionResults)) - positions = ApplySubdivisionResults(result); + positions = ApplySubdivisionResults(result, ref canceledByUser); // Restore loaded asset settings ProbeReferenceVolume.instance.SetSubdivisionDimensions(prevBrickSize, prevMaxSubdiv, prevOffset); @@ -155,25 +158,31 @@ static NativeList RunPlacement() return positions; } - static ProbeSubdivisionResult GetWorldSubdivision() + static ProbeSubdivisionResult GetWorldSubdivision(ref bool canceledByUser) { if (isFreezingPlacement) return GetBricksFromLoaded(); var ctx = PrepareProbeSubdivisionContext(); - return BakeBricks(ctx, m_BakingBatch.contributors); + return BakeBricks(ctx, m_BakingBatch.contributors, ref canceledByUser); } - static NativeList ApplySubdivisionResults(ProbeSubdivisionResult results) + static NativeList ApplySubdivisionResults(ProbeSubdivisionResult results, ref bool canceledByUser) { - int cellIdx = 0, freq = 10; // Don't refresh progress bar at every iteration because it's slow + int cellIdx = 0, freq = 10; BakingSetupProfiling.GetProgressRange(out float progress0, out float progress1); var positions = new NativeList(Allocator.Persistent); foreach ((var position, var bounds, var bricks) in results.cells) { - if (++cellIdx % freq == 0) - EditorUtility.DisplayProgressBar("Baking Probe Volumes", $"Subdividing cell {cellIdx} out of {results.cells.Count}", Mathf.Lerp(progress0, progress1, cellIdx / (float)results.cells.Count)); + if (cellIdx++ % freq == 0) // Don't refresh progress bar at every iteration because it's slow + { + if (EditorUtility.DisplayCancelableProgressBar("Baking Probe Volumes", $"Subdividing cell {cellIdx} out of {results.cells.Count}", Mathf.Lerp(progress0, progress1, cellIdx / (float)results.cells.Count))) + { + canceledByUser = true; + return positions; + } + } int positionStart = positions.Length; @@ -282,18 +291,30 @@ static internal ProbeSubdivisionContext PrepareProbeSubdivisionContext(bool live return ctx; } - static internal ProbeSubdivisionResult BakeBricks(ProbeSubdivisionContext ctx, in GIContributors contributors) + static internal ProbeSubdivisionResult BakeBricks(ProbeSubdivisionContext ctx, in GIContributors contributors, ref bool canceledByUser) { var result = new ProbeSubdivisionResult(); if (ctx.probeVolumes.Count == 0) return result; + int cellIdx = 0, freq = 100; + BakingSetupProfiling.GetProgressRange(out float progress0, out float progress1); + using (var gpuResources = ProbePlacement.AllocateGPUResources(ctx.probeVolumes.Count, ctx.profile)) { // subdivide all the cells and generate brick positions foreach (var cell in ctx.cells) { + if (cellIdx++ % freq == 0) // Don't refresh progress bar at every iteration because it's slow + { + if (EditorUtility.DisplayCancelableProgressBar("Generating Probe Volume Bricks", $"Processing cell {cellIdx} out of {ctx.cells.Count}", Mathf.Lerp(progress0, progress1, cellIdx / (float)ctx.cells.Count))) + { + canceledByUser = true; + return new ProbeSubdivisionResult(); + } + } + var scenesInCell = new HashSet(); // Calculate overlaping probe volumes to avoid unnecessary work diff --git a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.cs b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.cs index 25b6190c4e3..86e259d05be 100644 --- a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.cs +++ b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeGIBaking.cs @@ -910,18 +910,23 @@ internal static bool PrepareBaking() return false; s_AdjustmentVolumes = GetAdjustementVolumes(); - requests = AdditionalGIBakeRequestsManager.GetProbeNormalizationRequests(); + bool canceledByUser = false; // Note: this could be executed in the baking delegate to be non blocking using (new BakingSetupProfiling(BakingSetupProfiling.Stages.PlaceProbes)) - positions = RunPlacement(); + positions = RunPlacement(ref canceledByUser); - if (positions.Length == 0) + if (positions.Length == 0 || canceledByUser) { positions.Dispose(); + Clear(); CleanBakeData(); + + if (canceledByUser) + Lightmapping.Cancel(); + return false; } } @@ -1311,7 +1316,7 @@ static void FixSeams(NativeArray positionRemap, NativeArray positi { uint renderingLayerMask = renderingLayerMasks[positionRemap[index]]; bool commonRenderingLayer = (renderingLayerMask & probeRenderingLayerMask) != 0; - if (!commonRenderingLayer) continue; // We do not use this probe contribution if it does not share at least a common rendering layer + if (!commonRenderingLayer) continue; // We do not use this probe contribution if it does not share at least a common rendering layer } // Do the lerp in compressed format to match result on GPU diff --git a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeSubdivisionContext.cs b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeSubdivisionContext.cs index b4c56aaa56d..cc2880ee44b 100644 --- a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeSubdivisionContext.cs +++ b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeSubdivisionContext.cs @@ -114,7 +114,8 @@ IEnumerator Subdivide() ctx.cells.Clear(); ctx.cells.Add(cell); - var result = AdaptiveProbeVolumes.BakeBricks(ctx, contributors); + bool canceledByUser = false; + var result = AdaptiveProbeVolumes.BakeBricks(ctx, contributors, ref canceledByUser); if (result.cells.Count != 0) ProbeReferenceVolume.instance.realtimeSubdivisionInfo[cell.bounds] = result.cells[0].bricks; diff --git a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeVolumeBuildProcessor.cs b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeVolumeBuildProcessor.cs index f81326f32b3..65fbc5026a8 100644 --- a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeVolumeBuildProcessor.cs +++ b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeVolumeBuildProcessor.cs @@ -1,12 +1,14 @@ using System.IO; using System.Collections.Generic; using UnityEditor.Build; +using UnityEditor.Build.Reporting; using UnityEngine; using UnityEngine.Rendering; +using UnityEngine.SceneManagement; namespace UnityEditor.Rendering { - class ProbeVolumeBuildProcessor : BuildPlayerProcessor + class ProbeVolumeBuildProcessor : BuildPlayerProcessor, IProcessSceneWithReport { const string kTempAPVStreamingAssetsPath = "TempAPVStreamingAssets"; @@ -35,11 +37,11 @@ void CopyStreamableAsset(ProbeVolumeStreamableAsset asset, string basePath) File.Copy(assetPath, Path.Combine(basePath, asset.assetGUID + ".bytes")); } - void GetProbeVolumeProjectSettings(BuildPlayerContext buildPlayerContext, out bool supportProbeVolume, out ProbeVolumeSHBands maxSHBands) + void GetProbeVolumeProjectSettings(BuildTarget target, out bool supportProbeVolume, out ProbeVolumeSHBands maxSHBands) { // Grab all assets used for the build. List srpAssets = new List(); - buildPlayerContext.BuildPlayerOptions.target.TryGetRenderPipelineAssets(srpAssets); + target.TryGetRenderPipelineAssets(srpAssets); maxSHBands = ProbeVolumeSHBands.SphericalHarmonicsL1; supportProbeVolume = false; @@ -60,7 +62,7 @@ void GetProbeVolumeProjectSettings(BuildPlayerContext buildPlayerContext, out bo public override void PrepareForBuild(BuildPlayerContext buildPlayerContext) { - GetProbeVolumeProjectSettings(buildPlayerContext, out bool supportProbeVolume, out var maxSHBands); + GetProbeVolumeProjectSettings(buildPlayerContext.BuildPlayerOptions.target, out bool supportProbeVolume, out var maxSHBands); if (!supportProbeVolume) return; @@ -131,5 +133,67 @@ public override void PrepareForBuild(BuildPlayerContext buildPlayerContext) buildPlayerContext.AddAdditionalPathToStreamingAssets(tempStreamingAssetsPath, AdaptiveProbeVolumes.kAPVStreamingAssetsPath); } + + private static bool IsBundleBuild(BuildReport report, bool isPlaying) + { + // We are entering playmode, so not building a bundle. + if (isPlaying) + return false; + + // Addressable builds do not provide a BuildReport. Because the Addressables package + // only supports AssetBundle builds, we infer that this is not a player build. + if (report == null) + return true; + + return report.summary.buildType == BuildType.AssetBundle; + } + + // This codepath handles the case of building asset bundles, i.e. not a full player build. It updates the references + // to individual data assets in the baking sets for each scene, such that the assets are included in the bundle. + public override int callbackOrder => 1; + public void OnProcessScene(Scene scene, BuildReport report) + { + // Only run for bundle builds. + if (!IsBundleBuild(report, Application.isPlaying)) + return; + + // Only run when APV is enabled. + GetProbeVolumeProjectSettings(EditorUserBuildSettings.activeBuildTarget, out bool supportProbeVolume, out var maxSHBands); + if (!supportProbeVolume) + return; + + // Reload the map from scene to baking set if we couldn't find the specific baking set. + if (ProbeVolumeBakingSet.sceneToBakingSet == null || ProbeVolumeBakingSet.sceneToBakingSet.Count == 0) + ProbeVolumeBakingSet.SyncBakingSets(); + + // Get the baking set for the scene. + var bakingSet = ProbeVolumeBakingSet.GetBakingSetForScene(scene.GetGUID()); + if (bakingSet == null || !bakingSet.cellSharedDataAsset.IsValid()) + return; + + bool useStreamingAsset = !GraphicsSettings.GetRenderPipelineSettings().probeVolumeDisableStreamingAssets; + if (useStreamingAsset) + { + Debug.LogWarning( + "Attempted to build an Asset Bundle containing Adaptive Probe Volume data, but streaming assets are enabled. This is unsupported. " + + "To use Adaptive Probe Volumes with Asset Bundles, please check 'Probe Volume Disable Streaming Assets' under Graphics Settings."); + } + + // Update all the asset references. + bakingSet.cellSharedDataAsset.UpdateAssetReference(useStreamingAsset); + bakingSet.cellBricksDataAsset.UpdateAssetReference(useStreamingAsset); + + bool stripSupportData = true; + if (!stripSupportData) + bakingSet.cellSupportDataAsset.UpdateAssetReference(false); + + foreach (var scenario in bakingSet.scenarios) + { + scenario.Value.cellDataAsset.UpdateAssetReference(useStreamingAsset); + if (maxSHBands == ProbeVolumeSHBands.SphericalHarmonicsL2) + scenario.Value.cellOptionalDataAsset.UpdateAssetReference(useStreamingAsset); + scenario.Value.cellProbeOcclusionDataAsset.UpdateAssetReference(useStreamingAsset); + } + } } } diff --git a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeVolumeEditor.cs b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeVolumeEditor.cs index 968b6aa8377..20de07edc84 100644 --- a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeVolumeEditor.cs +++ b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeVolumeEditor.cs @@ -152,7 +152,7 @@ public override void OnInspectorGUI() drawInspector = false; } - if (!ProbeReferenceVolume.instance.isInitialized || !ProbeReferenceVolume.instance.enabledBySRP) + if (!ProbeVolumeGlobalSettingsStripper.ProbeVolumeSupportedForBuild()) { APVDisabledHelpBox(); drawInspector = false; diff --git a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeVolumeLightingTab.cs b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeVolumeLightingTab.cs index 12ab759b1b8..d2b6049bdeb 100644 --- a/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeVolumeLightingTab.cs +++ b/Packages/com.unity.render-pipelines.core/Editor/Lighting/ProbeVolume/ProbeVolumeLightingTab.cs @@ -207,7 +207,7 @@ public override void OnDisable() EditorSceneManager.sceneOpened -= OnSceneOpened; // We keep allocated acceleration structures while the Lighting window is open in order to make subsequent bakes faster, but when the window closes we dispose of them - // Unless a bake is running, in which case we leave disposing to CleanBakeData() + // Unless a bake is running, in which case we leave disposing to CleanBakeData() if (!AdaptiveProbeVolumes.isRunning && !Lightmapping.isRunning) AdaptiveProbeVolumes.Dispose(); } @@ -869,7 +869,9 @@ internal static void BakeAPVButton() else { if (GUILayout.Button(Styles.generateAPV)) - AdaptiveProbeVolumes.BakeAsync(); + { + EditorApplication.delayCall += () => AdaptiveProbeVolumes.BakeAsync(); + } } } #endregion @@ -1010,7 +1012,7 @@ internal bool PrepareAPVBake() if (ProbeReferenceVolume.instance.supportLightingScenarios && !activeSet.m_LightingScenarios.Contains(activeSet.lightingScenario)) activeSet.SetActiveScenario(activeSet.m_LightingScenarios[0], false); - + // Layout has changed and is incompatible. if (activeSet.HasValidSharedData() && !activeSet.freezePlacement && !activeSet.CheckCompatibleCellLayout()) { diff --git a/Packages/com.unity.render-pipelines.core/Editor/Settings/PropertyDrawers/DefaultVolumeProfileSettingsPropertyDrawer.cs b/Packages/com.unity.render-pipelines.core/Editor/Settings/PropertyDrawers/DefaultVolumeProfileSettingsPropertyDrawer.cs index ae699840ad8..f25f8af4faa 100644 --- a/Packages/com.unity.render-pipelines.core/Editor/Settings/PropertyDrawers/DefaultVolumeProfileSettingsPropertyDrawer.cs +++ b/Packages/com.unity.render-pipelines.core/Editor/Settings/PropertyDrawers/DefaultVolumeProfileSettingsPropertyDrawer.cs @@ -10,8 +10,13 @@ namespace UnityEditor.Rendering /// public abstract class DefaultVolumeProfileSettingsPropertyDrawer : PropertyDrawer { + // UUM-77758: Due to how PropertyDrawers are created and cached, there is no way to retrieve them reliably + // later. We know that only one DefaultVolumeProfile exists at any given time, so we can access it through + // static variables. + static SerializedProperty s_DefaultVolumeProfileSerializedProperty; + static DefaultVolumeProfileEditor s_DefaultVolumeProfileEditor; + VisualElement m_Root; - DefaultVolumeProfileEditor m_Editor; /// SerializedObject representing the settings object protected SerializedObject m_SettingsSerializedObject; @@ -76,8 +81,12 @@ protected void CreateDefaultVolumeProfileEditor() if (profile == VolumeManager.instance.globalDefaultProfile) VolumeProfileUtils.EnsureAllOverridesForDefaultProfile(profile); - m_Editor = new DefaultVolumeProfileEditor(profile, m_SettingsSerializedObject); - m_EditorContainer.Add(m_Editor.Create()); + if (s_DefaultVolumeProfileSerializedProperty != m_VolumeProfileSerializedProperty) + { + s_DefaultVolumeProfileSerializedProperty = m_VolumeProfileSerializedProperty; + s_DefaultVolumeProfileEditor = new DefaultVolumeProfileEditor(profile, m_SettingsSerializedObject); + } + m_EditorContainer.Add(s_DefaultVolumeProfileEditor.Create()); m_EditorContainer.Q("volume-override-info-box").text = volumeInfoBoxLabel.text; } @@ -86,9 +95,10 @@ protected void CreateDefaultVolumeProfileEditor() /// protected void DestroyDefaultVolumeProfileEditor() { - if (m_Editor != null) - m_Editor.Destroy(); - m_Editor = null; + if (s_DefaultVolumeProfileEditor != null) + s_DefaultVolumeProfileEditor.Destroy(); + s_DefaultVolumeProfileEditor = null; + s_DefaultVolumeProfileSerializedProperty = null; m_EditorContainer?.Clear(); } @@ -112,21 +122,20 @@ public abstract class DefaultVolumeProfileSettingsContextMenu protected abstract string defaultVolumeProfilePath { get; } - void IRenderPipelineGraphicsSettingsContextMenu.PopulateContextMenu(TSetting setting, PropertyDrawer drawer, ref GenericMenu menu) + void IRenderPipelineGraphicsSettingsContextMenu.PopulateContextMenu(TSetting setting, PropertyDrawer _, ref GenericMenu menu) { menu.AddSeparator(""); - var volumeDrawer = drawer as DefaultVolumeProfileSettingsPropertyDrawer; bool canCreateNewAsset = RenderPipelineManager.currentPipeline is TRenderPipeline; VolumeProfileUtils.AddVolumeProfileContextMenuItems(ref menu, setting.volumeProfile, - volumeDrawer.m_Editor.allEditors, + s_DefaultVolumeProfileEditor.allEditors, overrideStateOnReset: true, defaultVolumeProfilePath: defaultVolumeProfilePath, onNewVolumeProfileCreated: createdProfile => { - volumeDrawer.m_VolumeProfileSerializedProperty.objectReferenceValue = createdProfile; - volumeDrawer.m_VolumeProfileSerializedProperty.serializedObject.ApplyModifiedProperties(); + s_DefaultVolumeProfileSerializedProperty.objectReferenceValue = createdProfile; + s_DefaultVolumeProfileSerializedProperty.serializedObject.ApplyModifiedProperties(); VolumeProfile initialAsset = null; @@ -139,7 +148,7 @@ void IRenderPipelineGraphicsSettingsContextMenu.PopulateContextMenu(TS } VolumeProfileUtils.UpdateGlobalDefaultVolumeProfile(createdProfile, initialAsset); }, - onComponentEditorsExpandedCollapsed: volumeDrawer.m_Editor.RebuildListViews, + onComponentEditorsExpandedCollapsed: s_DefaultVolumeProfileEditor.RebuildListViews, canCreateNewAsset); } } diff --git a/Packages/com.unity.render-pipelines.core/Runtime/Debugging/DebugUI.Fields.cs b/Packages/com.unity.render-pipelines.core/Runtime/Debugging/DebugUI.Fields.cs index 00b45b73d25..db504af0bcf 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime/Debugging/DebugUI.Fields.cs +++ b/Packages/com.unity.render-pipelines.core/Runtime/Debugging/DebugUI.Fields.cs @@ -446,6 +446,47 @@ public Type enumType } } + /// + /// Maskfield enumeration field. + /// + public class MaskField : EnumField + { + /// + /// Fills the enum using the provided names + /// + /// names to fill the enum + public void Fill(string[] names) + { + using (ListPool.Get(out var tmpNames)) + using (ListPool.Get(out var tmpValues)) + { + for (int i=0; i<(names.Length); ++i) + { + tmpNames.Add(new GUIContent(names[i])); + tmpValues.Add(i); + } + enumNames = tmpNames.ToArray(); + enumValues = tmpValues.ToArray(); + } + } + + /// + /// Assigns a value to the maskfield. + /// + /// value for the maskfield + public override void SetValue(uint value) + { + Assert.IsNotNull(setter); + var validValue = ValidateValue(value); + + if (!validValue.Equals(getter())) + { + setter(validValue); + onValueChanged?.Invoke(this, validValue); + } + } + } + /// /// Color field. /// diff --git a/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/InstanceData/InstanceData.cs b/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/InstanceData/InstanceData.cs index 03f08897dfd..00805d473f9 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/InstanceData/InstanceData.cs +++ b/Packages/com.unity.render-pipelines.core/Runtime/GPUDriven/InstanceData/InstanceData.cs @@ -630,19 +630,19 @@ internal unsafe struct SmallIntegerArray : IDisposable public SmallIntegerArray(int length, Allocator allocator) { + m_FixedArray = default; + m_List = default; Length = length; Valid = true; - if (Length <= 8) // 32 bytes fixed array + if (Length <= m_FixedArray.Capacity) { m_FixedArray = new FixedList32Bytes(); m_FixedArray.Length = Length; - m_List = default; m_IsEmbedded = true; } else { - m_FixedArray = default; m_List = new UnsafeList(Length, allocator, NativeArrayOptions.UninitializedMemory); m_List.Resize(Length); m_IsEmbedded = false; diff --git a/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeReferenceVolume.Streaming.cs b/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeReferenceVolume.Streaming.cs index 8541fa84d32..21ac285f4dd 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeReferenceVolume.Streaming.cs +++ b/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeReferenceVolume.Streaming.cs @@ -470,7 +470,7 @@ void ComputeBestToBeLoadedCells(Vector3 cameraPosition, Vector3 cameraDirection) } } - void ComputeWorseLoadedCells(Vector3 cameraPosition, Vector3 cameraDirection) + void ComputeStreamingScoreAndWorseLoadedCells(Vector3 cameraPosition, Vector3 cameraDirection) { m_WorseLoadedCells.Clear(); m_WorseLoadedCells.Reserve(m_LoadedCells.size); // Pre-reserve to avoid Insert allocating every time. @@ -660,6 +660,7 @@ public void UpdateCellStreaming(CommandBuffer cmd, Camera camera, ProbeVolumesOp int shChunkBudget = m_Pool.GetRemainingChunkCount(); int cellCountToLoad = Mathf.Min(numberOfCellsLoadedPerFrame, bestUnloadedCells.size); + bool didRecomputeScoresForLoadedCells = false; if (m_SupportGPUStreaming) { if (m_IndexDefragmentationInProgress) @@ -691,9 +692,10 @@ public void UpdateCellStreaming(CommandBuffer cmd, Camera camera, ProbeVolumesOp } else { - ComputeWorseLoadedCells(cameraPositionCellSpace, m_FrozenCameraDirection); + ComputeStreamingScoreAndWorseLoadedCells(cameraPositionCellSpace, m_FrozenCameraDirection); worseLoadedCells = m_WorseLoadedCells; } + didRecomputeScoresForLoadedCells = true; int pendingUnloadCount = 0; while (m_TempCellToLoadList.size < cellCountToLoad) @@ -755,10 +757,24 @@ public void UpdateCellStreaming(CommandBuffer cmd, Camera camera, ProbeVolumesOp { var cellInfo = m_ToBeLoadedCells[m_TempCellToLoadList.size]; // m_TempCellToLoadList.size get incremented in TryLoadCell if (!TryLoadCell(cellInfo, ref shChunkBudget, ref indexChunkBudget, m_TempCellToLoadList)) + { + if (i > 0) // Only warn once + { + Debug.LogWarning("Max Memory Budget for Adaptive Probe Volumes has been reached, but there is still more data to load. Consider either increasing the Memory Budget, enabling GPU Streaming, or reducing the probe count."); + } break; + } } } + // If we intend to blend scenarios, compute the streaming scores for the already loaded cells. + // These will be used to determine which of the loaded cells to perform blending on first. + // We only need to do this if we didn't already do it above. + if (!didRecomputeScoresForLoadedCells && supportScenarioBlending) + { + ComputeStreamingScore(cameraPositionCellSpace, m_FrozenCameraDirection, m_LoadedCells); + } + if (m_LoadMaxCellsPerFrame) ComputeMinMaxStreamingScore(); @@ -861,7 +877,9 @@ void UpdateBlendingCellStreaming(CommandBuffer cmd) var worstCellLoaded = m_LoadedBlendingCells[m_LoadedBlendingCells.size - m_TempBlendingCellToUnloadList.size - 1]; var bestCellToBeLoaded = m_ToBeLoadedBlendingCells[m_TempBlendingCellToLoadList.size]; - if (bestCellToBeLoaded.blendingInfo.blendingScore >= (worstNoTurnover ?? worstCellLoaded).blendingInfo.blendingScore) // We are in a "stable" state + // The best cell to be loaded has WORSE score than the worst cell already loaded. + // This means all cells waiting to be loaded are worse than the ones we already have - we are in a "stable" state. + if (bestCellToBeLoaded.blendingInfo.blendingScore >= (worstNoTurnover ?? worstCellLoaded).blendingInfo.blendingScore) { if (worstNoTurnover == null) // Disable turnover break; @@ -876,13 +894,21 @@ void UpdateBlendingCellStreaming(CommandBuffer cmd) break; } + // If we encounter a cell that is still being streamed in (and thus hasn't had a chance to be blended yet), bail + // we don't want to keep unloading cells before they get blended, or we will never get any work done. + // This branch is only ever true when disk streaming is being used. + if (worstCellLoaded.streamingInfo.IsBlendingStreaming()) + break; + UnloadBlendingCell(worstCellLoaded, m_TempBlendingCellToUnloadList); if (probeVolumeDebug.verboseStreamingLog) LogStreaming($"Unloading blending cell {worstCellLoaded.desc.index}"); - // Loading can still fail cause all cells don't have the same chunk count - if (TryLoadBlendingCell(bestCellToBeLoaded, m_TempBlendingCellToLoadList) && turnoverOffset != -1) + bool loadOk = TryLoadBlendingCell(bestCellToBeLoaded, m_TempBlendingCellToLoadList); + + // Handle turnover. Loading can still fail cause all cells don't have the same chunk count. + if (loadOk && turnoverOffset != -1) { // swap to ensure loaded cells are at the start of m_ToBeLoadedBlendingCells m_ToBeLoadedBlendingCells[turnoverOffset] = m_ToBeLoadedBlendingCells[m_TempBlendingCellToLoadList.size-1]; diff --git a/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeVolumeBakingSet.Editor.cs b/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeVolumeBakingSet.Editor.cs index eaafdce5c95..0ff45e67338 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeVolumeBakingSet.Editor.cs +++ b/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeVolumeBakingSet.Editor.cs @@ -22,7 +22,7 @@ internal class SceneBakeData [SerializeField] SerializedDictionary m_SceneBakeData = new(); - static Dictionary sceneToBakingSet = new Dictionary(); + internal static Dictionary sceneToBakingSet = new Dictionary(); /// /// Tries to add a scene to the baking set. diff --git a/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeVolumeGlobalSettings.cs b/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeVolumeGlobalSettings.cs index 287f77cfa65..5627919b626 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeVolumeGlobalSettings.cs +++ b/Packages/com.unity.render-pipelines.core/Runtime/Lighting/ProbeVolume/ProbeVolumeGlobalSettings.cs @@ -87,7 +87,7 @@ class ProbeVolumeGlobalSettings : IRenderPipelineGraphicsSettings { [SerializeField, HideInInspector] int m_Version = 1; - [SerializeField, Tooltip("Enabling this will make APV baked data assets compatible with Addressables and Asset Bundles. This will also make Disk Streaming unavailable.")] + [SerializeField, Tooltip("Enabling this will make APV baked data assets compatible with Addressables and Asset Bundles. This will also make Disk Streaming unavailable. After changing this setting, a clean rebuild may be required for data assets to be included in Adressables and Asset Bundles.")] bool m_ProbeVolumeDisableStreamingAssets; public int version { get => m_Version; } diff --git a/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/Compiler/NativePassCompiler.cs b/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/Compiler/NativePassCompiler.cs index ce5a9605d87..57bc3c19b51 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/Compiler/NativePassCompiler.cs +++ b/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/Compiler/NativePassCompiler.cs @@ -816,6 +816,9 @@ void DetermineLoadStoreActions(ref NativePassData nativePass) ref readonly var resourceData = ref contextData.UnversionedResourceData(fragment.resource); bool isImported = resourceData.isImported; + int destroyPassID = resourceData.lastUsePassID; + bool usedAfterThisNativePass = (destroyPassID >= (nativePass.lastGraphPass + 1)); + if (fragment.accessFlags.HasFlag(AccessFlags.Read) || partialWrite) { // The resource is already allocated before this pass so we need to load it @@ -825,6 +828,18 @@ void DetermineLoadStoreActions(ref NativePassData nativePass) #if UNITY_EDITOR currLoadAudit = new LoadAudit(LoadReason.LoadPreviouslyWritten, resourceData.firstUsePassID); #endif + + // Once we decide to load a resource, we must default to the Store action if the resource is used after the current native pass. + // If we were to use the DontCare action in this case, the driver would be effectively be allowed to discard the + // contents of the resource. This is true even when we're only performing reads on it. + if (usedAfterThisNativePass) + { + currAttachment.storeAction = RenderBufferStoreAction.Store; +#if UNITY_EDITOR + currStoreAudit = new StoreAudit(StoreReason.StoreUsedByLaterPass, destroyPassID); +#endif + } + } // It's first used this native pass so we need to clear it so reads/partial writes return the correct clear value // the clear colors are part of the resource description and set-up when executing the graph we don't need to care about that here. @@ -864,19 +879,22 @@ void DetermineLoadStoreActions(ref NativePassData nativePass) // Simple non-msaa case if (nativePass.samples <= 1) { - // The resource is still used after this renderpass so we need to store it imported resources always need to be stored - // as we don't know what happens with them and assume the contents are somewhow used outside the graph - int destroyPassID = resourceData.lastUsePassID; - if (destroyPassID >= nativePass.lastGraphPass + 1) + if (usedAfterThisNativePass) { + // The resource is still used after this native pass so we need to store it. currAttachment.storeAction = RenderBufferStoreAction.Store; #if UNITY_EDITOR currStoreAudit = new StoreAudit(StoreReason.StoreUsedByLaterPass, destroyPassID); #endif } - // It's last used during this native pass just discard it unless it's imported in which case we need to store else { + // This is the last native pass that uses the resource. + // If it's imported, we store it because its contents may be used outside the graph. + // Otherwise, we can safely discard its contents. + // + // The one exception to this, is the user declared discard flag which allows us to assume an imported + // resource is not used outside the graph. if (isImported) { if (resourceData.discard) @@ -912,7 +930,6 @@ void DetermineLoadStoreActions(ref NativePassData nativePass) // In theory the opposite could also be true (use MSAA after resolve data is no longer needed) but we consider it sufficiently strange to not // consider it here. - int destroyPassID = resourceData.lastUsePassID; currAttachment.storeAction = RenderBufferStoreAction.DontCare; //Check if we're the last pass writing it by checking the output version of the current pass is the higherst version the resource will reach diff --git a/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/Compiler/PassesData.cs b/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/Compiler/PassesData.cs index 23066848f12..95dcddfebd3 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/Compiler/PassesData.cs +++ b/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/Compiler/PassesData.cs @@ -786,7 +786,7 @@ static bool CanMergeNativeSubPass(CompilerContextData contextData, NativePassDat // which could have been implied by leaving the flag to None if (!passToMerge.fragmentInfoHasDepth && nativePass.hasDepth) { - flags = SubPassFlags.ReadOnlyDepth; + flags = SubPassFlags.ReadOnlyDepthStencil; } // MRT attachments @@ -807,7 +807,7 @@ static bool CanMergeNativeSubPass(CompilerContextData contextData, NativePassDat { flags = (graphPassFragment.accessFlags.HasFlag(AccessFlags.Write)) ? SubPassFlags.None - : SubPassFlags.ReadOnlyDepth; + : SubPassFlags.ReadOnlyDepthStencil; } // It's a color attachment else @@ -902,7 +902,7 @@ public static void TryMergeNativeSubPass(CompilerContextData contextData, ref Na // which could have been implied by leaving the flag to None if (!passToMerge.fragmentInfoHasDepth && nativePass.hasDepth) { - desc.flags = SubPassFlags.ReadOnlyDepth; + desc.flags = SubPassFlags.ReadOnlyDepthStencil; } // MRT attachments @@ -919,7 +919,7 @@ public static void TryMergeNativeSubPass(CompilerContextData contextData, ref Na { desc.flags = (graphPassFragment.accessFlags.HasFlag(AccessFlags.Write)) ? SubPassFlags.None - : SubPassFlags.ReadOnlyDepth; + : SubPassFlags.ReadOnlyDepthStencil; } // It's a color attachment else @@ -1028,7 +1028,7 @@ static void UpdateNativeSubPassesAttachments(CompilerContextData contextData, re // which could have been implied by leaving the flag to None if (!currGraphPass.fragmentInfoHasDepth && nativePass.hasDepth) { - nativeSubPassDescriptor.flags = SubPassFlags.ReadOnlyDepth; + nativeSubPassDescriptor.flags = SubPassFlags.ReadOnlyDepthStencil; } // MRT attachments @@ -1046,7 +1046,7 @@ static void UpdateNativeSubPassesAttachments(CompilerContextData contextData, re { nativeSubPassDescriptor.flags = (graphPassFragment.accessFlags.HasFlag(AccessFlags.Write)) ? SubPassFlags.None - : SubPassFlags.ReadOnlyDepth; + : SubPassFlags.ReadOnlyDepthStencil; } // It's a color attachment else diff --git a/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/RenderGraph.cs b/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/RenderGraph.cs index cb676f2e0b5..1c8d8913001 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/RenderGraph.cs +++ b/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/RenderGraph.cs @@ -2572,6 +2572,9 @@ void CleanupDebugData() internal void SetGlobal(TextureHandle h, int globalPropertyId) { + if (!h.IsValid()) + throw new ArgumentException("Attempting to register an invalid texture handle as a global"); + registeredGlobals[globalPropertyId] = h; } diff --git a/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/RenderGraphDefaultResources.cs b/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/RenderGraphDefaultResources.cs index c0a0363a87d..4ce0a173f70 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/RenderGraphDefaultResources.cs +++ b/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/RenderGraphDefaultResources.cs @@ -38,7 +38,7 @@ internal RenderGraphDefaultResources() { m_BlackTexture2D = RTHandles.Alloc(Texture2D.blackTexture); m_WhiteTexture2D = RTHandles.Alloc(Texture2D.whiteTexture); - m_ShadowTexture2D = RTHandles.Alloc(1, 1, depthBufferBits: DepthBits.Depth32, isShadowMap: true, name: "DefaultShadowTexture"); + m_ShadowTexture2D = RTHandles.Alloc(1, 1, colorFormat:Experimental.Rendering.GraphicsFormat.None, depthBufferBits: DepthBits.Depth32, isShadowMap: true, name: "DefaultShadowTexture"); } internal void Cleanup() diff --git a/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/RenderGraphPass.cs b/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/RenderGraphPass.cs index 9336280d115..093e6d82437 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/RenderGraphPass.cs +++ b/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/RenderGraphPass.cs @@ -366,7 +366,7 @@ void ComputeTextureHash(ref HashFNV1A32 generator, in ResourceHandle handle, Ren { // The only info we have is from the provided desc upon importing. ref var desc = ref res.desc; - generator.Append((int) desc.colorFormat); + generator.Append((int) desc.format); generator.Append((int) desc.dimension); generator.Append((int) desc.msaaSamples); generator.Append(desc.width); @@ -380,7 +380,7 @@ void ComputeTextureHash(ref HashFNV1A32 generator, in ResourceHandle handle, Ren else { var desc = resources.GetTextureResourceDesc(handle); - generator.Append((int) desc.colorFormat); + generator.Append((int) desc.format); generator.Append((int) desc.dimension); generator.Append((int) desc.msaaSamples); generator.Append(desc.clearBuffer); diff --git a/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/RenderGraphResourceRegistry.cs b/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/RenderGraphResourceRegistry.cs index 1318c85c64a..f26aea44706 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/RenderGraphResourceRegistry.cs +++ b/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/RenderGraphResourceRegistry.cs @@ -544,12 +544,12 @@ internal TextureHandle ImportTexture(in RTHandle rt, RenderTargetInfo info, in I // Store the info in the descriptor structure to avoid having a separate info structure being saved per resource // This descriptor will then be used to reconstruct the info (see GetRenderTargetInfo) but is not a full featured descriptor. // This is ok as this descriptor will never be used to create textures (as they are imported into the graph and thus externally created). + + texResource.desc.format = info.format; texResource.desc.width = info.width; texResource.desc.height = info.height; texResource.desc.slices = info.volumeDepth; - - texResource.desc.msaaSamples = (MSAASamples)info.msaaSamples; - texResource.desc.colorFormat = info.format; + texResource.desc.msaaSamples = (MSAASamples)info.msaaSamples; texResource.desc.bindTextureMS = info.bindMS; texResource.desc.clearBuffer = importParams.clearOnFirstUse; texResource.desc.clearColor = importParams.clearColor; @@ -665,7 +665,7 @@ internal TextureHandle ImportBackbuffer(RenderTargetIdentifier rt, in RenderTarg texResource.desc.slices = info.volumeDepth; texResource.desc.msaaSamples = (MSAASamples)info.msaaSamples; texResource.desc.bindTextureMS = info.bindMS; - texResource.desc.colorFormat = info.format; + texResource.desc.format = info.format; texResource.desc.clearBuffer = importParams.clearOnFirstUse; texResource.desc.clearColor = importParams.clearColor; texResource.desc.discardBuffer = importParams.discardOnLastUse; @@ -721,8 +721,7 @@ internal void GetRenderTargetInfo(in ResourceHandle res, out RenderTargetInfo ou outInfo.width = handle.m_RT.width; outInfo.height = handle.m_RT.height; outInfo.volumeDepth = handle.m_RT.volumeDepth; - // If it's depth only, graphics format is null but depthStencilFormat is the real format - outInfo.format = (handle.m_RT.graphicsFormat != GraphicsFormat.None) ? handle.m_RT.graphicsFormat : handle.m_RT.depthStencilFormat; + outInfo.format = GetFormat(handle.m_RT.graphicsFormat, handle.m_RT.depthStencilFormat); outInfo.msaaSamples = handle.m_RT.antiAliasing; outInfo.bindMS = handle.m_RT.bindTextureMS; } @@ -735,8 +734,7 @@ internal void GetRenderTargetInfo(in ResourceHandle res, out RenderTargetInfo ou if (handle.m_ExternalTexture is RenderTexture) { RenderTexture rt = (RenderTexture)handle.m_ExternalTexture; - // If it's depth only, graphics format is null but depthStencilFormat is the real format - outInfo.format = (rt.graphicsFormat != GraphicsFormat.None) ? rt.graphicsFormat : rt.depthStencilFormat; + outInfo.format = GetFormat(rt.graphicsFormat, rt.depthStencilFormat); outInfo.msaaSamples = rt.antiAliasing; } else @@ -758,7 +756,7 @@ internal void GetRenderTargetInfo(in ResourceHandle res, out RenderTargetInfo ou var desc = GetTextureResourceDesc(res, true); outInfo = new RenderTargetInfo(); #if DEVELOPMENT_BUILD || UNITY_EDITOR - if (desc.width == 0 || desc.height == 0 || desc.slices == 0 || desc.msaaSamples == 0 || desc.colorFormat == GraphicsFormat.None) + if (desc.width == 0 || desc.height == 0 || desc.slices == 0 || desc.msaaSamples == 0 || desc.format == GraphicsFormat.None) { throw new Exception("Invalid imported texture. A RTHandle wrapping an RenderTargetIdentifier was imported without providing valid RenderTargetInfo."); } @@ -768,7 +766,7 @@ internal void GetRenderTargetInfo(in ResourceHandle res, out RenderTargetInfo ou outInfo.volumeDepth = desc.slices; outInfo.msaaSamples = (int)desc.msaaSamples; - outInfo.format = desc.colorFormat; + outInfo.format = desc.format; outInfo.bindMS = desc.bindTextureMS; } else @@ -788,15 +786,24 @@ internal void GetRenderTargetInfo(in ResourceHandle res, out RenderTargetInfo ou outInfo.msaaSamples = (int)desc.msaaSamples; outInfo.bindMS = desc.bindTextureMS; + outInfo.format = desc.format; + } + } - if (desc.isShadowMap || desc.depthBufferBits != DepthBits.None) - { - var format = desc.isShadowMap ? DefaultFormat.Shadow : DefaultFormat.DepthStencil; - outInfo.format = SystemInfo.GetGraphicsFormat(format); - } - else + internal GraphicsFormat GetFormat(GraphicsFormat color, GraphicsFormat depthStencil) + { + ValidateFormat(color, depthStencil); + return (depthStencil != GraphicsFormat.None) ? depthStencil : color; + } + + [Conditional("DEVELOPMENT_BUILD"), Conditional("UNITY_EDITOR")] + internal void ValidateFormat(GraphicsFormat color, GraphicsFormat depthStencil) + { + if (RenderGraph.enableValidityChecks) + { + if (color != GraphicsFormat.None && depthStencil != GraphicsFormat.None) { - outInfo.format = desc.colorFormat; + throw new Exception("Invalid imported texture. Both a color and a depthStencil format are provided. The texture needs to either have a color format or a depth stencil format."); } } } @@ -1057,7 +1064,7 @@ bool CreateTextureCallback(InternalRenderGraphContext rgContext, IRenderGraphRes if ((forceManualClearOfResource && resource.desc.clearBuffer) || m_RenderGraphDebug.clearRenderTargetsAtCreation) { bool debugClear = m_RenderGraphDebug.clearRenderTargetsAtCreation && !resource.desc.clearBuffer; - var clearFlag = resource.desc.depthBufferBits != DepthBits.None ? ClearFlag.DepthStencil : ClearFlag.Color; + var clearFlag = GraphicsFormatUtility.IsDepthStencilFormat(resource.desc.format) ? ClearFlag.DepthStencil : ClearFlag.Color; var clearColor = debugClear ? Color.magenta : resource.desc.clearColor; CoreUtils.SetRenderTarget(rgContext.cmd, resource.graphicsResource, clearFlag, clearColor); executedWork = true; @@ -1094,7 +1101,7 @@ void ReleaseTextureCallback(InternalRenderGraphContext rgContext, IRenderGraphRe if (m_RenderGraphDebug.clearRenderTargetsAtRelease) { - var clearFlag = resource.desc.depthBufferBits != DepthBits.None ? ClearFlag.DepthStencil : ClearFlag.Color; + var clearFlag = GraphicsFormatUtility.IsDepthStencilFormat(resource.desc.format)? ClearFlag.DepthStencil : ClearFlag.Color; CoreUtils.SetRenderTarget(rgContext.cmd, resource.graphicsResource, clearFlag, Color.magenta); } } @@ -1104,9 +1111,9 @@ void ValidateTextureDesc(in TextureDesc desc) { if(RenderGraph.enableValidityChecks) { - if (desc.colorFormat == GraphicsFormat.None && desc.depthBufferBits == DepthBits.None) + if (desc.format == GraphicsFormat.None ) { - throw new ArgumentException("Texture was created with an invalid color format."); + throw new ArgumentException("Texture was created with with no format. The texture needs to either have a color format or a depth stencil format."); } if (desc.dimension == TextureDimension.None) diff --git a/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/RenderGraphResourceTexture.cs b/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/RenderGraphResourceTexture.cs index a68ce2e4504..ce22c2e771d 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/RenderGraphResourceTexture.cs +++ b/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/RenderGraphResourceTexture.cs @@ -168,10 +168,8 @@ public struct TextureDesc public Vector2 scale; ///Texture scale function. public ScaleFunc func; - ///Depth buffer bit depth. - public DepthBits depthBufferBits; - ///Color format. - public GraphicsFormat colorFormat; + ///Color or depth stencil format. + public GraphicsFormat format; ///Filtering mode. public FilterMode filterMode; ///Addressing mode. @@ -225,6 +223,34 @@ public struct TextureDesc ///Texture needs to be discarded on last use. public bool discardBuffer; + + ///Depth buffer bit depth of the format. The setter convert the bits to valid depth stencil format and sets the format. The getter gets the depth bits of the format. + public DepthBits depthBufferBits + { + get { return (DepthBits)GraphicsFormatUtility.GetDepthBits(format); } + set + { + if (value == DepthBits.None) + { + if( !GraphicsFormatUtility.IsDepthStencilFormat(format) ) + return; + else + format = GraphicsFormat.None; + } + else + { + format = GraphicsFormatUtility.GetDepthStencilFormat((int)value); + } + } + } + + ///Color format. Sets the format. The getter checks if format is a color format. Returns the format if a color format, otherwise returns GraphicsFormat.None. + public GraphicsFormat colorFormat + { + get { return GraphicsFormatUtility.IsDepthStencilFormat(format) ? GraphicsFormat.None : format; } + set { format = value; } + } + void InitDefaultValues(bool dynamicResolution, bool xrReady) { useDynamicScale = dynamicResolution; @@ -322,8 +348,7 @@ public TextureDesc(RenderTextureDescriptor input) slices = input.volumeDepth; scale = Vector2.one; func = null; - depthBufferBits = (DepthBits)input.depthBufferBits; - colorFormat = input.graphicsFormat; + format = (input.depthStencilFormat != GraphicsFormat.None) ? input.depthStencilFormat : input.graphicsFormat; filterMode = FilterMode.Bilinear; wrapMode = TextureWrapMode.Clamp; dimension = input.dimension; @@ -387,8 +412,7 @@ public override int GetHashCode() hashCode.Append(mipMapBias); hashCode.Append(slices); - hashCode.Append((int) depthBufferBits); - hashCode.Append((int) colorFormat); + hashCode.Append((int) format); hashCode.Append((int) filterMode); hashCode.Append((int) wrapMode); hashCode.Append((int) dimension); @@ -450,20 +474,20 @@ public override void CreateGraphicsResource() // Textures are going to be reused under different aliases along the frame so we can't provide a specific name upon creation. // The name in the desc is going to be used for debugging purpose and render graph visualization. if (name == "") - name = $"RenderGraphTexture_{m_TextureCreationIndex++}"; + name = $"RenderGraphTexture_{m_TextureCreationIndex++}"; switch (desc.sizeMode) { case TextureSizeMode.Explicit: - graphicsResource = RTHandles.Alloc(desc.width, desc.height, desc.slices, desc.depthBufferBits, desc.colorFormat, desc.filterMode, desc.wrapMode, desc.dimension, desc.enableRandomWrite, + graphicsResource = RTHandles.Alloc(desc.width, desc.height, desc.format, desc.slices, desc.filterMode, desc.wrapMode, desc.dimension, desc.enableRandomWrite, desc.useMipMap, desc.autoGenerateMips, desc.isShadowMap, desc.anisoLevel, desc.mipMapBias, desc.msaaSamples, desc.bindTextureMS, desc.useDynamicScale, desc.useDynamicScaleExplicit, desc.memoryless, desc.vrUsage, name); break; case TextureSizeMode.Scale: - graphicsResource = RTHandles.Alloc(desc.scale, desc.slices, desc.depthBufferBits, desc.colorFormat, desc.filterMode, desc.wrapMode, desc.dimension, desc.enableRandomWrite, + graphicsResource = RTHandles.Alloc(desc.scale, desc.format, desc.slices, desc.filterMode, desc.wrapMode, desc.dimension, desc.enableRandomWrite, desc.useMipMap, desc.autoGenerateMips, desc.isShadowMap, desc.anisoLevel, desc.mipMapBias, desc.msaaSamples, desc.bindTextureMS, desc.useDynamicScale, desc.useDynamicScaleExplicit, desc.memoryless, desc.vrUsage, name); break; case TextureSizeMode.Functor: - graphicsResource = RTHandles.Alloc(desc.func, desc.slices, desc.depthBufferBits, desc.colorFormat, desc.filterMode, desc.wrapMode, desc.dimension, desc.enableRandomWrite, + graphicsResource = RTHandles.Alloc(desc.func, desc.format, desc.slices, desc.filterMode, desc.wrapMode, desc.dimension, desc.enableRandomWrite, desc.useMipMap, desc.autoGenerateMips, desc.isShadowMap, desc.anisoLevel, desc.mipMapBias, desc.msaaSamples, desc.bindTextureMS, desc.useDynamicScale, desc.useDynamicScaleExplicit, desc.memoryless, desc.vrUsage, name); break; } diff --git a/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/RenderGraphUtilsBlit.cs b/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/RenderGraphUtilsBlit.cs index db299248ccf..ebd4505e7c1 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/RenderGraphUtilsBlit.cs +++ b/Packages/com.unity.render-pipelines.core/Runtime/RenderGraph/RenderGraphUtilsBlit.cs @@ -9,6 +9,8 @@ namespace UnityEngine.Rendering.RenderGraphModule.Util /// public static partial class RenderGraphUtils { + static MaterialPropertyBlock s_PropertyBlock = new MaterialPropertyBlock(); + /// /// Checks if the shader features required by the MSAA version of the copy pass is supported on current platform. /// @@ -29,7 +31,7 @@ class CopyPassData /// individual samples will be copied. /// /// Copy is intentionally limited in functionally so it can be implemented using frame buffer fetch for optimal performance on tile based GPUs. If you are looking for a more generic - /// function please use the AddBlitPass function. Blit will automatically decide (based on the arguments) whether to use normal rendering or to instead call copy internally. + /// function please use the AddBlitPass function. /// /// For XR textures you will have to copy for each eye seperatly. /// @@ -735,45 +737,25 @@ static void BlitMaterialRenderFunc(BlitMaterialPassData data, UnsafeGraphContext s_BlitScaleBias.z = data.offset.x; s_BlitScaleBias.w = data.offset.y; - if (data.propertyBlock != null) - { - data.propertyBlock.SetTexture(data.sourceTexturePropertyID, data.source); - if (data.sourceSlice == -1) - data.propertyBlock.SetInt(data.sourceSlicePropertyID, 0); - if (data.sourceMip == -1) - data.propertyBlock.SetInt(data.sourceMipPropertyID, 0); - data.propertyBlock.SetVector(data.scaleBiasPropertyID, s_BlitScaleBias); - } - else - { - data.material.SetTexture(data.sourceTexturePropertyID, data.source); - if (data.sourceSlice == -1) - data.material.SetInt(data.sourceSlicePropertyID, 0); - if (data.sourceMip == -1) - data.material.SetInt(data.sourceMipPropertyID, 0); - data.material.SetVector(data.scaleBiasPropertyID, s_BlitScaleBias); - } + CommandBuffer unsafeCmd = CommandBufferHelpers.GetNativeCommandBuffer(context.cmd); + if (data.propertyBlock == null) data.propertyBlock = s_PropertyBlock; + + data.propertyBlock.SetTexture(data.sourceTexturePropertyID, data.source); + if (data.sourceSlice == -1) + data.propertyBlock.SetInt(data.sourceSlicePropertyID, 0); + if (data.sourceMip == -1) + data.propertyBlock.SetInt(data.sourceMipPropertyID, 0); + data.propertyBlock.SetVector(data.scaleBiasPropertyID, s_BlitScaleBias); - CommandBuffer unsafeCmd = CommandBufferHelpers.GetNativeCommandBuffer(context.cmd); for (int currSlice = 0; currSlice < data.numSlices; currSlice++) { for (int currMip = 0; currMip < data.numMips; currMip++) { - if (data.propertyBlock != null) - { - if (data.sourceSlice != -1) - data.propertyBlock.SetInt(data.sourceSlicePropertyID, data.sourceSlice + currSlice); - if (data.sourceMip != -1) - data.propertyBlock.SetInt(data.sourceMipPropertyID, data.sourceMip + currMip); - } - else - { - if (data.sourceSlice != -1) - data.material.SetInt(data.sourceSlicePropertyID, data.sourceSlice + currSlice); - if (data.sourceMip != -1) - data.material.SetInt(data.sourceMipPropertyID, data.sourceMip + currMip); - } + if (data.sourceSlice != -1) + data.propertyBlock.SetInt(data.sourceSlicePropertyID, data.sourceSlice + currSlice); + if (data.sourceMip != -1) + data.propertyBlock.SetInt(data.sourceMipPropertyID, data.sourceMip + currMip); context.cmd.SetRenderTarget(data.destination, data.destinationMip + currMip, CubemapFace.Unknown, data.destinationSlice + currSlice); switch (data.geometry) diff --git a/Packages/com.unity.render-pipelines.core/Runtime/STP/STP.cs b/Packages/com.unity.render-pipelines.core/Runtime/STP/STP.cs index 9cc4250bf2d..96178b8b12b 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime/STP/STP.cs +++ b/Packages/com.unity.render-pipelines.core/Runtime/STP/STP.cs @@ -1150,7 +1150,7 @@ public static TextureHandle Execute(RenderGraph renderGraph, ref Config config) passData.intermediateColor = UseTexture(builder, renderGraph.CreateTexture(new TextureDesc(intermediateSize.x, intermediateSize.y, config.enableHwDrs, config.enableTexArray) { name = "STP Intermediate Color", - colorFormat = GraphicsFormat.A2B10G10R10_UNormPack32, + format = GraphicsFormat.A2B10G10R10_UNormPack32, enableRandomWrite = true }), AccessFlags.WriteAll); @@ -1158,7 +1158,7 @@ public static TextureHandle Execute(RenderGraph renderGraph, ref Config config) passData.intermediateConvergence = UseTexture(builder, renderGraph.CreateTexture(new TextureDesc(convergenceSize.x, convergenceSize.y, config.enableHwDrs, config.enableTexArray) { name = "STP Intermediate Convergence", - colorFormat = GraphicsFormat.R8_UNorm, + format = GraphicsFormat.R8_UNorm, enableRandomWrite = true }), AccessFlags.WriteAll); @@ -1241,7 +1241,7 @@ public static TextureHandle Execute(RenderGraph renderGraph, ref Config config) passData.intermediateWeights = UseTexture(builder, renderGraph.CreateTexture(new TextureDesc(intermediateSize.x, intermediateSize.y, config.enableHwDrs, config.enableTexArray) { name = "STP Intermediate Weights", - colorFormat = GraphicsFormat.R8_UNorm, + format = GraphicsFormat.R8_UNorm, enableRandomWrite = true }), AccessFlags.WriteAll); diff --git a/Packages/com.unity.render-pipelines.core/Runtime/Textures/BufferedRTHandleSystem.cs b/Packages/com.unity.render-pipelines.core/Runtime/Textures/BufferedRTHandleSystem.cs index b962e6684ce..b4a7a26b51b 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime/Textures/BufferedRTHandleSystem.cs +++ b/Packages/com.unity.render-pipelines.core/Runtime/Textures/BufferedRTHandleSystem.cs @@ -162,7 +162,7 @@ RTHandle Alloc(ref RenderTextureDescriptor d, FilterMode fMode, TextureWrapMode d.width, d.height, d.volumeDepth, - (DepthBits)d.depthBufferBits, + (DepthBits) d.depthBufferBits, d.graphicsFormat, fMode, wMode, diff --git a/Packages/com.unity.render-pipelines.core/Runtime/Textures/RTHandleSystem.cs b/Packages/com.unity.render-pipelines.core/Runtime/Textures/RTHandleSystem.cs index 780b157d602..5dd596af50e 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime/Textures/RTHandleSystem.cs +++ b/Packages/com.unity.render-pipelines.core/Runtime/Textures/RTHandleSystem.cs @@ -423,7 +423,7 @@ void DemandResize(RTHandle rth) rt.width, rt.height, rt.volumeDepth, - rt.graphicsFormat, + (rt.depthStencilFormat!=GraphicsFormat.None)? rt.depthStencilFormat : rt.graphicsFormat, rt.dimension, rth.m_Name, mips: rt.useMipMap, @@ -505,7 +505,10 @@ void Resize(int width, int height, bool sizeChanged) renderTexture.height = Mathf.Max(scaledSize.y, 1); // Regenerate the name - renderTexture.name = CoreUtils.GetRenderTargetAutoName(renderTexture.width, renderTexture.height, renderTexture.volumeDepth, renderTexture.graphicsFormat, renderTexture.dimension, rth.m_Name, mips: renderTexture.useMipMap, enableMSAA: rth.m_EnableMSAA, msaaSamples: (MSAASamples)renderTexture.antiAliasing, dynamicRes: renderTexture.useDynamicScale, dynamicResExplicit: renderTexture.useDynamicScaleExplicit); + renderTexture.name = CoreUtils.GetRenderTargetAutoName(renderTexture.width, renderTexture.height, renderTexture.volumeDepth + , (renderTexture.depthStencilFormat != GraphicsFormat.None) ? renderTexture.depthStencilFormat : renderTexture.graphicsFormat + , renderTexture.dimension, rth.m_Name, mips: renderTexture.useMipMap, enableMSAA: rth.m_EnableMSAA + , msaaSamples: (MSAASamples)renderTexture.antiAliasing, dynamicRes: renderTexture.useDynamicScale, dynamicResExplicit: renderTexture.useDynamicScaleExplicit); // Create the render texture renderTexture.Create(); @@ -561,7 +564,60 @@ public RTHandle Alloc( string name = "" ) { - return Alloc(width, height, wrapMode, wrapMode, wrapMode, slices, depthBufferBits, colorFormat, filterMode, dimension, enableRandomWrite, useMipMap, + var format = (depthBufferBits != DepthBits.None) ? GraphicsFormatUtility.GetDepthStencilFormat((int)depthBufferBits) : colorFormat; + + return Alloc(width, height, format, wrapMode, wrapMode, wrapMode, slices, filterMode, dimension, enableRandomWrite, useMipMap, + autoGenerateMips, isShadowMap, anisoLevel, mipMapBias, msaaSamples, bindTextureMS, useDynamicScale, useDynamicScaleExplicit, memoryless, vrUsage, name); + } + + /// + /// Allocate a new fixed sized RTHandle. + /// + /// With of the RTHandle. + /// Heigh of the RTHandle. + /// GraphicsFormat of a color or depth stencil buffer. + /// Number of slices of the RTHandle. + /// Filtering mode of the RTHandle. + /// Addressing mode of the RTHandle. + /// Texture dimension of the RTHandle. + /// Set to true to enable UAV random read writes on the texture. + /// Set to true if the texture should have mipmaps. + /// Set to true to automatically generate mipmaps. + /// Set to true if the depth buffer should be used as a shadow map. + /// Anisotropic filtering level. + /// Bias applied to mipmaps during filtering. + /// Number of MSAA samples for the RTHandle. + /// Set to true if the texture needs to be bound as a multisampled texture in the shader. + /// [See Dynamic Resolution documentation](https://docs.unity3d.com/Manual/DynamicResolution.html) + /// [See Dynamic Resolution documentation](https://docs.unity3d.com/Manual/DynamicResolution.html) + /// Use this property to set the render texture memoryless modes. + /// Special treatment of the VR eye texture used in stereoscopic rendering. + /// Name of the RTHandle. + /// A new RTHandle. + public RTHandle Alloc( + int width, + int height, + GraphicsFormat format, + int slices = 1, + FilterMode filterMode = FilterMode.Point, + TextureWrapMode wrapMode = TextureWrapMode.Repeat, + TextureDimension dimension = TextureDimension.Tex2D, + bool enableRandomWrite = false, + bool useMipMap = false, + bool autoGenerateMips = true, + bool isShadowMap = false, + int anisoLevel = 1, + float mipMapBias = 0f, + MSAASamples msaaSamples = MSAASamples.None, + bool bindTextureMS = false, + bool useDynamicScale = false, + bool useDynamicScaleExplicit = false, + RenderTextureMemoryless memoryless = RenderTextureMemoryless.None, + VRTextureUsage vrUsage = VRTextureUsage.None, + string name = "" + ) + { + return Alloc(width, height, format, wrapMode, wrapMode, wrapMode, slices, filterMode, dimension, enableRandomWrite, useMipMap, autoGenerateMips, isShadowMap, anisoLevel, mipMapBias, msaaSamples, bindTextureMS, useDynamicScale, useDynamicScaleExplicit, memoryless, vrUsage, name); } @@ -617,6 +673,85 @@ public RTHandle Alloc( VRTextureUsage vrUsage = VRTextureUsage.None, string name = "" ) + { + var format = (depthBufferBits != DepthBits.None) ? GraphicsFormatUtility.GetDepthStencilFormat((int)depthBufferBits) : colorFormat; + + return Alloc( + width, + height, + format, + wrapModeU, + wrapModeV, + wrapModeW, + slices, + filterMode, + dimension, + enableRandomWrite, + useMipMap, + autoGenerateMips , + isShadowMap, + anisoLevel, + mipMapBias, + msaaSamples, + bindTextureMS, + useDynamicScale, + useDynamicScaleExplicit, + memoryless, + vrUsage, + name + ); + } + + /// + /// Allocate a new fixed sized RTHandle. + /// + /// With of the RTHandle. + /// Heigh of the RTHandle. + /// GraphicsFormat of the color or a depth stencil buffer. + /// U coordinate wrapping mode of the RTHandle. + /// V coordinate wrapping mode of the RTHandle. + /// W coordinate wrapping mode of the RTHandle. + /// Number of slices of the RTHandle. + /// Filtering mode of the RTHandle. + /// Texture dimension of the RTHandle. + /// Set to true to enable UAV random read writes on the texture. + /// Set to true if the texture should have mipmaps. + /// Set to true to automatically generate mipmaps. + /// Set to true if the depth buffer should be used as a shadow map. + /// Anisotropic filtering level. + /// Bias applied to mipmaps during filtering. + /// Number of MSAA samples for the RTHandle. + /// Set to true if the texture needs to be bound as a multisampled texture in the shader. + /// [See Dynamic Resolution documentation](https://docs.unity3d.com/Manual/DynamicResolution.html) + /// [See Dynamic Resolution documentation](https://docs.unity3d.com/Manual/DynamicResolution.html) + /// Use this property to set the render texture memoryless modes. + /// Special treatment of the VR eye texture used in stereoscopic rendering. + /// Name of the RTHandle. + /// A new RTHandle. + public RTHandle Alloc( + int width, + int height, + GraphicsFormat format, + TextureWrapMode wrapModeU, + TextureWrapMode wrapModeV, + TextureWrapMode wrapModeW = TextureWrapMode.Repeat, + int slices = 1, + FilterMode filterMode = FilterMode.Point, + TextureDimension dimension = TextureDimension.Tex2D, + bool enableRandomWrite = false, + bool useMipMap = false, + bool autoGenerateMips = true, + bool isShadowMap = false, + int anisoLevel = 1, + float mipMapBias = 0f, + MSAASamples msaaSamples = MSAASamples.None, + bool bindTextureMS = false, + bool useDynamicScale = false, + bool useDynamicScaleExplicit = false, + RenderTextureMemoryless memoryless = RenderTextureMemoryless.None, + VRTextureUsage vrUsage = VRTextureUsage.None, + string name = "" + ) { bool enableMSAA = msaaSamples != MSAASamples.None; if (!enableMSAA && bindTextureMS == true) @@ -625,63 +760,52 @@ public RTHandle Alloc( bindTextureMS = false; } - // We need to handle this in an explicit way since GraphicsFormat does not expose depth formats. TODO: Get rid of this branch once GraphicsFormat'll expose depth related formats RenderTexture rt; - if (isShadowMap || depthBufferBits != DepthBits.None) + if (isShadowMap) { - RenderTextureFormat format = isShadowMap ? RenderTextureFormat.Shadowmap : RenderTextureFormat.Depth; - GraphicsFormat stencilFormat = !isShadowMap && SystemInfo.IsFormatSupported(GraphicsFormat.R8_UInt, GraphicsFormatUsage.StencilSampling) ? GraphicsFormat.R8_UInt : GraphicsFormat.None; + RenderTextureFormat rtFormat = RenderTextureFormat.Shadowmap; + + rt = new RenderTexture(width, height, GraphicsFormatUtility.GetDepthBits(format), rtFormat, RenderTextureReadWrite.Linear); + rt.name = CoreUtils.GetRenderTargetAutoName(width, height, slices, rtFormat, name, mips: useMipMap, enableMSAA: enableMSAA, msaaSamples: msaaSamples); + + } + else if (GraphicsFormatUtility.IsDepthStencilFormat(format)) + { + //depth texture + rt = new RenderTexture(width, height, GraphicsFormat.None, format); + + rt.stencilFormat = GetStencilFormat(format); + + rt.name = CoreUtils.GetRenderTargetAutoName(width, height, slices, format, dimension, name, mips: useMipMap, enableMSAA: enableMSAA, msaaSamples: msaaSamples, dynamicRes: useDynamicScale, dynamicResExplicit: useDynamicScaleExplicit); - rt = new RenderTexture(width, height, (int)depthBufferBits, format, RenderTextureReadWrite.Linear) - { - hideFlags = HideFlags.HideAndDontSave, - volumeDepth = slices, - filterMode = filterMode, - wrapModeU = wrapModeU, - wrapModeV = wrapModeV, - wrapModeW = wrapModeW, - dimension = dimension, - enableRandomWrite = enableRandomWrite, - useMipMap = useMipMap, - autoGenerateMips = autoGenerateMips, - anisoLevel = anisoLevel, - mipMapBias = mipMapBias, - stencilFormat = stencilFormat, - antiAliasing = (int)msaaSamples, - bindTextureMS = bindTextureMS, - useDynamicScale = m_HardwareDynamicResRequested && useDynamicScale, - useDynamicScaleExplicit = m_HardwareDynamicResRequested && useDynamicScaleExplicit, - memorylessMode = memoryless, - vrUsage = vrUsage, - name = CoreUtils.GetRenderTargetAutoName(width, height, slices, format, name, mips: useMipMap, enableMSAA: enableMSAA, msaaSamples: msaaSamples) - }; } else { - rt = new RenderTexture(width, height, (int)depthBufferBits, colorFormat) - { - hideFlags = HideFlags.HideAndDontSave, - volumeDepth = slices, - filterMode = filterMode, - wrapModeU = wrapModeU, - wrapModeV = wrapModeV, - wrapModeW = wrapModeW, - dimension = dimension, - enableRandomWrite = enableRandomWrite, - useMipMap = useMipMap, - autoGenerateMips = autoGenerateMips, - anisoLevel = anisoLevel, - mipMapBias = mipMapBias, - antiAliasing = (int)msaaSamples, - bindTextureMS = bindTextureMS, - useDynamicScale = m_HardwareDynamicResRequested && useDynamicScale, - useDynamicScaleExplicit = m_HardwareDynamicResRequested && useDynamicScaleExplicit, - memorylessMode = memoryless, - vrUsage = vrUsage, - name = CoreUtils.GetRenderTargetAutoName(width, height, slices, colorFormat, dimension, name, mips: useMipMap, enableMSAA: enableMSAA, msaaSamples: msaaSamples, dynamicRes: useDynamicScale, dynamicResExplicit: useDynamicScaleExplicit) - }; + //color texture + rt = new RenderTexture(width, height, format, GraphicsFormat.None); + + rt.name = CoreUtils.GetRenderTargetAutoName(width, height, slices, format, dimension, name, mips: useMipMap, enableMSAA: enableMSAA, msaaSamples: msaaSamples, dynamicRes: useDynamicScale, dynamicResExplicit: useDynamicScaleExplicit); } + rt.enableRandomWrite = enableRandomWrite; + rt.useMipMap = useMipMap; + rt.autoGenerateMips = autoGenerateMips; + rt.anisoLevel = anisoLevel; + rt.mipMapBias = mipMapBias; + rt.antiAliasing = (int)msaaSamples; + rt.bindTextureMS = bindTextureMS; + rt.useDynamicScale = m_HardwareDynamicResRequested && useDynamicScale; + rt.useDynamicScaleExplicit = m_HardwareDynamicResRequested && useDynamicScaleExplicit; + rt.memorylessMode = memoryless; + rt.vrUsage = vrUsage; + rt.hideFlags = HideFlags.HideAndDontSave; + rt.volumeDepth = slices; + rt.filterMode = filterMode; + rt.wrapModeU = wrapModeU; + rt.wrapModeV = wrapModeV; + rt.wrapModeW = wrapModeW; + rt.dimension = dimension; + rt.Create(); var newRT = new RTHandle(this); @@ -713,15 +837,10 @@ public RTHandle Alloc(int width, int height, RTHandleAllocInfo info) info.bindTextureMS = false; } - // We need to handle this in an explicit way since GraphicsFormat does not expose depth formats. TODO: Get rid of this branch once GraphicsFormat'll expose depth related formats RenderTexture rt; - var colorFormat = GraphicsFormatUtility.IsDepthFormat(info.format) ? GraphicsFormat.None : info.format; + var colorFormat = GraphicsFormatUtility.IsDepthStencilFormat(info.format) ? GraphicsFormat.None : info.format; var depthStencilFormat = (colorFormat == GraphicsFormat.None) ? info.format : GraphicsFormat.None; - - var stencilFormat = GraphicsFormat.None; - if (GraphicsFormatUtility.IsStencilFormat(info.format) && SystemInfo.IsFormatSupported(GraphicsFormat.R8_UInt, GraphicsFormatUsage.StencilSampling)) - stencilFormat = GraphicsFormat.R8_UInt; - + rt = new RenderTexture(width, height, colorFormat, depthStencilFormat) { hideFlags = HideFlags.HideAndDontSave, @@ -740,7 +859,7 @@ public RTHandle Alloc(int width, int height, RTHandleAllocInfo info) bindTextureMS = info.bindTextureMS, useDynamicScale = m_HardwareDynamicResRequested && info.useDynamicScale, memorylessMode = info.memoryless, - stencilFormat = stencilFormat, + stencilFormat = GetStencilFormat(depthStencilFormat), vrUsage = info.vrUsage, name = CoreUtils.GetRenderTargetAutoName(width, height, info.slices, info.format, info.dimension, info.name, mips: info.useMipMap, enableMSAA: enableMSAA, msaaSamples: info.msaaSamples, dynamicRes: info.useDynamicScale) }; @@ -782,9 +901,8 @@ public Vector2Int CalculateDimensions(Vector2 scaleFactor) /// Allocate a new automatically sized RTHandle. /// /// Constant scale for the RTHandle size computation. + /// GraphicsFormat of a color or depth stencil buffer. /// Number of slices of the RTHandle. - /// Bit depths of a depth buffer. - /// GraphicsFormat of a color buffer. /// Filtering mode of the RTHandle. /// Addressing mode of the RTHandle. /// Texture dimension of the RTHandle. @@ -804,9 +922,8 @@ public Vector2Int CalculateDimensions(Vector2 scaleFactor) /// A new RTHandle. public RTHandle Alloc( Vector2 scaleFactor, - int slices = 1, - DepthBits depthBufferBits = DepthBits.None, - GraphicsFormat colorFormat = GraphicsFormat.R8G8B8A8_SRGB, + GraphicsFormat format, + int slices = 1, FilterMode filterMode = FilterMode.Point, TextureWrapMode wrapMode = TextureWrapMode.Repeat, TextureDimension dimension = TextureDimension.Tex2D, @@ -830,8 +947,7 @@ public RTHandle Alloc( var rth = AllocAutoSizedRenderTexture(actualDimensions.x, actualDimensions.y, slices, - depthBufferBits, - colorFormat, + format, filterMode, wrapMode, dimension, @@ -856,6 +972,77 @@ public RTHandle Alloc( return rth; } + /// + /// Allocate a new automatically sized RTHandle. + /// + /// Constant scale for the RTHandle size computation. + /// Number of slices of the RTHandle. + /// Bit depths of a depth buffer. + /// GraphicsFormat of a color buffer. + /// Filtering mode of the RTHandle. + /// Addressing mode of the RTHandle. + /// Texture dimension of the RTHandle. + /// Set to true to enable UAV random read writes on the texture. + /// Set to true if the texture should have mipmaps. + /// Set to true to automatically generate mipmaps. + /// Set to true if the depth buffer should be used as a shadow map. + /// Anisotropic filtering level. + /// Bias applied to mipmaps during filtering. + /// Number of MSAA samples. + /// Set to true if the texture needs to be bound as a multisampled texture in the shader. + /// [See Dynamic Resolution documentation](https://docs.unity3d.com/Manual/DynamicResolution.html) + /// [See Dynamic Resolution documentation](https://docs.unity3d.com/Manual/DynamicResolution.html) + /// Use this property to set the render texture memoryless modes. + /// Special treatment of the VR eye texture used in stereoscopic rendering. + /// Name of the RTHandle. + /// A new RTHandle. + public RTHandle Alloc( + Vector2 scaleFactor, + int slices = 1, + DepthBits depthBufferBits = DepthBits.None, + GraphicsFormat colorFormat = GraphicsFormat.R8G8B8A8_SRGB, + FilterMode filterMode = FilterMode.Point, + TextureWrapMode wrapMode = TextureWrapMode.Repeat, + TextureDimension dimension = TextureDimension.Tex2D, + bool enableRandomWrite = false, + bool useMipMap = false, + bool autoGenerateMips = true, + bool isShadowMap = false, + int anisoLevel = 1, + float mipMapBias = 0f, + MSAASamples msaaSamples = MSAASamples.None, + bool bindTextureMS = false, + bool useDynamicScale = false, + bool useDynamicScaleExplicit = false, + RenderTextureMemoryless memoryless = RenderTextureMemoryless.None, + VRTextureUsage vrUsage = VRTextureUsage.None, + string name = "" + ) + { + var format = (depthBufferBits != DepthBits.None) ? GraphicsFormatUtility.GetDepthStencilFormat((int)depthBufferBits) : colorFormat; + + return Alloc(scaleFactor, + format, + slices, + filterMode, + wrapMode, + dimension, + enableRandomWrite, + useMipMap, + autoGenerateMips, + isShadowMap, + anisoLevel, + mipMapBias, + msaaSamples, + bindTextureMS, + useDynamicScale, + useDynamicScaleExplicit, + memoryless, + vrUsage, + name + ); + } + /// /// Allocate a new automatically sized RTHandle. /// @@ -945,14 +1132,82 @@ public RTHandle Alloc( VRTextureUsage vrUsage = VRTextureUsage.None, string name = "" ) + { + var format = (depthBufferBits != DepthBits.None) ? GraphicsFormatUtility.GetDepthStencilFormat((int)depthBufferBits) : colorFormat; + + return Alloc(scaleFunc, + format, + slices, + filterMode, + wrapMode, + dimension, + enableRandomWrite, + useMipMap, + autoGenerateMips, + isShadowMap, + anisoLevel, + mipMapBias, + msaaSamples, + bindTextureMS, + useDynamicScale, + useDynamicScaleExplicit, + memoryless, + vrUsage, + name + ); + } + + /// + /// Allocate a new automatically sized RTHandle. + /// + /// Function used for the RTHandle size computation. + /// GraphicsFormat of a color or depth stencil buffer. + /// Number of slices of the RTHandle. + /// Filtering mode of the RTHandle. + /// Addressing mode of the RTHandle. + /// Texture dimension of the RTHandle. + /// Set to true to enable UAV random read writes on the texture. + /// Set to true if the texture should have mipmaps. + /// Set to true to automatically generate mipmaps. + /// Set to true if the depth buffer should be used as a shadow map. + /// Anisotropic filtering level. + /// Bias applied to mipmaps during filtering. + /// Number of MSAA samples. + /// Set to true if the texture needs to be bound as a multisampled texture in the shader. + /// [See Dynamic Resolution documentation](https://docs.unity3d.com/Manual/DynamicResolution.html) + /// [See Dynamic Resolution documentation](https://docs.unity3d.com/Manual/DynamicResolution.html) + /// Use this property to set the render texture memoryless modes. + /// Special treatment of the VR eye texture used in stereoscopic rendering. + /// Name of the RTHandle. + /// A new RTHandle. + public RTHandle Alloc( + ScaleFunc scaleFunc, + GraphicsFormat format, + int slices = 1, + FilterMode filterMode = FilterMode.Point, + TextureWrapMode wrapMode = TextureWrapMode.Repeat, + TextureDimension dimension = TextureDimension.Tex2D, + bool enableRandomWrite = false, + bool useMipMap = false, + bool autoGenerateMips = true, + bool isShadowMap = false, + int anisoLevel = 1, + float mipMapBias = 0f, + MSAASamples msaaSamples = MSAASamples.None, + bool bindTextureMS = false, + bool useDynamicScale = false, + bool useDynamicScaleExplicit = false, + RenderTextureMemoryless memoryless = RenderTextureMemoryless.None, + VRTextureUsage vrUsage = VRTextureUsage.None, + string name = "" + ) { var actualDimensions = CalculateDimensions(scaleFunc); var rth = AllocAutoSizedRenderTexture(actualDimensions.x, actualDimensions.y, - slices, - depthBufferBits, - colorFormat, + slices, + format, filterMode, wrapMode, dimension, @@ -1000,9 +1255,8 @@ public RTHandle Alloc(ScaleFunc scaleFunc, RTHandleAllocInfo info) RTHandle AllocAutoSizedRenderTexture( int width, int height, - int slices, - DepthBits depthBufferBits, - GraphicsFormat colorFormat, + int slices, + GraphicsFormat format, FilterMode filterMode, TextureWrapMode wrapMode, TextureDimension dimension, @@ -1035,59 +1289,49 @@ string name Debug.LogWarning("RTHandle that is MSAA-enabled cannot allocate MSAA RT with 'enableRandomWrite = true'."); enableRandomWrite = false; } - - // We need to handle this in an explicit way since GraphicsFormat does not expose depth formats. TODO: Get rid of this branch once GraphicsFormat'll expose depth related formats + RenderTexture rt; - if (isShadowMap || depthBufferBits != DepthBits.None) + + bool isDepthStencilFormat = GraphicsFormatUtility.IsDepthStencilFormat(format); + + if (isShadowMap) { - RenderTextureFormat format = isShadowMap ? RenderTextureFormat.Shadowmap : RenderTextureFormat.Depth; - GraphicsFormat stencilFormat = !isShadowMap && SystemInfo.IsFormatSupported(GraphicsFormat.R8_UInt, GraphicsFormatUsage.StencilSampling) ? GraphicsFormat.R8_UInt : GraphicsFormat.None; - rt = new RenderTexture(width, height, (int)depthBufferBits, format, RenderTextureReadWrite.Linear) - { - hideFlags = HideFlags.HideAndDontSave, - volumeDepth = slices, - filterMode = filterMode, - wrapMode = wrapMode, - dimension = dimension, - enableRandomWrite = enableRandomWrite, - useMipMap = useMipMap, - autoGenerateMips = autoGenerateMips, - anisoLevel = anisoLevel, - mipMapBias = mipMapBias, - antiAliasing = (int)msaaSamples, - bindTextureMS = bindTextureMS, - useDynamicScale = m_HardwareDynamicResRequested && useDynamicScale, - useDynamicScaleExplicit = m_HardwareDynamicResRequested && useDynamicScaleExplicit, - memorylessMode = memoryless, - stencilFormat = stencilFormat, - vrUsage = vrUsage, - name = CoreUtils.GetRenderTargetAutoName(width, height, slices, colorFormat, dimension, name, mips: useMipMap, enableMSAA: enableMSAA, msaaSamples: msaaSamples, dynamicRes: useDynamicScale, dynamicResExplicit: useDynamicScaleExplicit) - }; + Assert.IsTrue(isDepthStencilFormat, "RTHandle is created as shadowMap but the format is not a depth stencil format."); + + rt = new RenderTexture(width, height, GraphicsFormatUtility.GetDepthBits(format), RenderTextureFormat.Shadowmap, RenderTextureReadWrite.Linear); + } - else + else if(isDepthStencilFormat) { - rt = new RenderTexture(width, height, (int)depthBufferBits, colorFormat) - { - hideFlags = HideFlags.HideAndDontSave, - volumeDepth = slices, - filterMode = filterMode, - wrapMode = wrapMode, - dimension = dimension, - enableRandomWrite = enableRandomWrite, - useMipMap = useMipMap, - autoGenerateMips = autoGenerateMips, - anisoLevel = anisoLevel, - mipMapBias = mipMapBias, - antiAliasing = (int)msaaSamples, - bindTextureMS = bindTextureMS, - useDynamicScale = m_HardwareDynamicResRequested && useDynamicScale, - useDynamicScaleExplicit = m_HardwareDynamicResRequested && useDynamicScaleExplicit, - memorylessMode = memoryless, - vrUsage = vrUsage, - name = CoreUtils.GetRenderTargetAutoName(width, height, slices, colorFormat, dimension, name, mips: useMipMap, enableMSAA: enableMSAA, msaaSamples: msaaSamples, dynamicRes: useDynamicScale, dynamicResExplicit: useDynamicScaleExplicit) - }; + //depth texture + rt = new RenderTexture(width, height, GraphicsFormat.None, format); + + rt.stencilFormat = GetStencilFormat(format); + + }else + { + //color texture + rt = new RenderTexture(width, height, format, GraphicsFormat.None); } + rt.name = CoreUtils.GetRenderTargetAutoName(width, height, slices, format, dimension, name, mips: useMipMap, enableMSAA: enableMSAA, msaaSamples: msaaSamples, dynamicRes: useDynamicScale, dynamicResExplicit: useDynamicScaleExplicit); + rt.hideFlags = HideFlags.HideAndDontSave; + rt.volumeDepth = slices; + rt.filterMode = filterMode; + rt.wrapMode = wrapMode; + rt.dimension = dimension; + rt.enableRandomWrite = enableRandomWrite; + rt.useMipMap = useMipMap; + rt.autoGenerateMips = autoGenerateMips; + rt.anisoLevel = anisoLevel; + rt.mipMapBias = mipMapBias; + rt.antiAliasing = (int)msaaSamples; + rt.bindTextureMS = bindTextureMS; + rt.useDynamicScale = m_HardwareDynamicResRequested && useDynamicScale; + rt.useDynamicScaleExplicit = m_HardwareDynamicResRequested && useDynamicScaleExplicit; + rt.memorylessMode = memoryless; + rt.vrUsage = vrUsage; + rt.Create(); var rth = new RTHandle(this); @@ -1120,13 +1364,9 @@ RTHandle AllocAutoSizedRenderTexture(int width, int height, RTHandleAllocInfo in RenderTexture rt; { - var colorFormat = GraphicsFormatUtility.IsDepthFormat(info.format) ? GraphicsFormat.None : info.format; + var colorFormat = GraphicsFormatUtility.IsDepthStencilFormat(info.format) ? GraphicsFormat.None : info.format; var depthStencilFormat = (colorFormat == GraphicsFormat.None) ? info.format : GraphicsFormat.None; - var stencilFormat = GraphicsFormat.None; - if (GraphicsFormatUtility.IsStencilFormat(info.format) && SystemInfo.IsFormatSupported(GraphicsFormat.R8_UInt, GraphicsFormatUsage.StencilSampling)) - stencilFormat = GraphicsFormat.R8_UInt; - rt = new RenderTexture(width, height, colorFormat, depthStencilFormat) { hideFlags = HideFlags.HideAndDontSave, @@ -1143,7 +1383,7 @@ RTHandle AllocAutoSizedRenderTexture(int width, int height, RTHandleAllocInfo in bindTextureMS = info.bindTextureMS, useDynamicScale = m_HardwareDynamicResRequested && info.useDynamicScale, memorylessMode = info.memoryless, - stencilFormat = stencilFormat, + stencilFormat = GetStencilFormat(depthStencilFormat), vrUsage = info.vrUsage, name = CoreUtils.GetRenderTargetAutoName(width, height, info.slices, info.format, info.dimension, info.name, mips: info.useMipMap, enableMSAA: enableMSAA, msaaSamples: info.msaaSamples, dynamicRes: info.useDynamicScale) }; @@ -1248,5 +1488,11 @@ internal string DumpRTInfo() return result; } + + private GraphicsFormat GetStencilFormat(GraphicsFormat depthStencilFormat) + { + return (GraphicsFormatUtility.IsStencilFormat(depthStencilFormat) && SystemInfo.IsFormatSupported(GraphicsFormat.R8_UInt, GraphicsFormatUsage.StencilSampling)) ? + GraphicsFormat.R8_UInt : GraphicsFormat.None; + } } } diff --git a/Packages/com.unity.render-pipelines.core/Runtime/Textures/RTHandles.cs b/Packages/com.unity.render-pipelines.core/Runtime/Textures/RTHandles.cs index 78bf1d3f9e5..45074362626 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime/Textures/RTHandles.cs +++ b/Packages/com.unity.render-pipelines.core/Runtime/Textures/RTHandles.cs @@ -118,6 +118,77 @@ public static RTHandle Alloc( ); } + /// + /// Allocate a new fixed sized RTHandle with the default RTHandle System. + /// + /// With of the RTHandle. + /// Heigh of the RTHandle. + /// GraphicsFormat of a color or depth stencil buffer. + /// Number of slices of the RTHandle. + /// Filtering mode of the RTHandle. + /// Addressing mode of the RTHandle. + /// Texture dimension of the RTHandle. + /// Set to true to enable UAV random read writes on the texture. + /// Set to true if the texture should have mipmaps. + /// Set to true to automatically generate mipmaps. + /// Set to true if the depth buffer should be used as a shadow map. + /// Anisotropic filtering level. + /// Bias applied to mipmaps during filtering. + /// Number of MSAA samples for the RTHandle. + /// Set to true if the texture needs to be bound as a multisampled texture in the shader. + /// [See Dynamic Resolution documentation](https://docs.unity3d.com/Manual/DynamicResolution.html) + /// [See Dynamic Resolution documentation](https://docs.unity3d.com/Manual/DynamicResolution.html) + /// Use this property to set the render texture memoryless modes. + /// Special treatment of the VR eye texture used in stereoscopic rendering. + /// Name of the RTHandle. + /// A new RTHandle. + public static RTHandle Alloc( + int width, + int height, + GraphicsFormat format, + int slices = 1, + FilterMode filterMode = FilterMode.Point, + TextureWrapMode wrapMode = TextureWrapMode.Repeat, + TextureDimension dimension = TextureDimension.Tex2D, + bool enableRandomWrite = false, + bool useMipMap = false, + bool autoGenerateMips = true, + bool isShadowMap = false, + int anisoLevel = 1, + float mipMapBias = 0, + MSAASamples msaaSamples = MSAASamples.None, + bool bindTextureMS = false, + bool useDynamicScale = false, + bool useDynamicScaleExplicit = false, + RenderTextureMemoryless memoryless = RenderTextureMemoryless.None, + VRTextureUsage vrUsage = VRTextureUsage.None, + string name = "" + ) + { + return s_DefaultInstance.Alloc( + width, + height, + format, + slices, + filterMode, + wrapMode, + dimension, + enableRandomWrite, + useMipMap, + autoGenerateMips, + isShadowMap, + anisoLevel, + mipMapBias, + msaaSamples, + bindTextureMS, + useDynamicScale, + useDynamicScaleExplicit, + memoryless, + vrUsage, + name + ); + } + /// /// Allocate a new fixed sized RTHandle with the default RTHandle System. /// @@ -328,6 +399,74 @@ public static RTHandle Alloc( ); } + /// + /// Allocate a new automatically sized RTHandle for the default RTHandle System. + /// + /// Constant scale for the RTHandle size computation. + /// GraphicsFormat of a color or depth stencil buffer. + /// Number of slices of the RTHandle. + /// Filtering mode of the RTHandle. + /// Addressing mode of the RTHandle. + /// Texture dimension of the RTHandle. + /// Set to true to enable UAV random read writes on the texture. + /// Set to true if the texture should have mipmaps. + /// Set to true to automatically generate mipmaps. + /// Set to true if the depth buffer should be used as a shadow map. + /// Anisotropic filtering level. + /// Bias applied to mipmaps during filtering. + /// Number of MSAA samples. + /// Set to true if the texture needs to be bound as a multisampled texture in the shader. + /// [See Dynamic Resolution documentation](https://docs.unity3d.com/Manual/DynamicResolution.html) + /// [See Dynamic Resolution documentation](https://docs.unity3d.com/Manual/DynamicResolution.html) + /// Use this property to set the render texture memoryless modes. + /// Special treatment of the VR eye texture used in stereoscopic rendering. + /// Name of the RTHandle. + /// A new RTHandle. + public static RTHandle Alloc( + Vector2 scaleFactor, + GraphicsFormat format, + int slices = 1, + FilterMode filterMode = FilterMode.Point, + TextureWrapMode wrapMode = TextureWrapMode.Repeat, + TextureDimension dimension = TextureDimension.Tex2D, + bool enableRandomWrite = false, + bool useMipMap = false, + bool autoGenerateMips = true, + bool isShadowMap = false, + int anisoLevel = 1, + float mipMapBias = 0, + MSAASamples msaaSamples = MSAASamples.None, + bool bindTextureMS = false, + bool useDynamicScale = false, + bool useDynamicScaleExplicit = false, + RenderTextureMemoryless memoryless = RenderTextureMemoryless.None, + VRTextureUsage vrUsage = VRTextureUsage.None, + string name = "" + ) + { + return s_DefaultInstance.Alloc( + scaleFactor, + format, + slices, + filterMode, + wrapMode, + dimension, + enableRandomWrite, + useMipMap, + autoGenerateMips, + isShadowMap, + anisoLevel, + mipMapBias, + msaaSamples, + bindTextureMS, + useDynamicScale, + useDynamicScaleExplicit, + memoryless, + vrUsage, + name + ); + } + /// /// Allocate a new automatically sized RTHandle for the default RTHandle System. /// @@ -351,11 +490,12 @@ public static RTHandle Alloc( string name = "" ) { + var format = GraphicsFormatUtility.IsDepthStencilFormat(descriptor.depthStencilFormat) ? descriptor.depthStencilFormat : descriptor.graphicsFormat; + return s_DefaultInstance.Alloc( scaleFactor, - descriptor.volumeDepth, - (DepthBits)descriptor.depthBufferBits, - descriptor.graphicsFormat, + format, + descriptor.volumeDepth, filterMode, wrapMode, descriptor.dimension, @@ -457,6 +597,74 @@ public static RTHandle Alloc( ); } + /// + /// Allocate a new automatically sized RTHandle for the default RTHandle System. + /// + /// Function used for the RTHandle size computation. + /// GraphicsFormat of a color or depth stencil buffer. + /// Number of slices of the RTHandle. + /// Filtering mode of the RTHandle. + /// Addressing mode of the RTHandle. + /// Texture dimension of the RTHandle. + /// Set to true to enable UAV random read writes on the texture. + /// Set to true if the texture should have mipmaps. + /// Set to true to automatically generate mipmaps. + /// Set to true if the depth buffer should be used as a shadow map. + /// Anisotropic filtering level. + /// Bias applied to mipmaps during filtering. + /// Number of MSAA samples. + /// Set to true if the texture needs to be bound as a multisampled texture in the shader. + /// [See Dynamic Resolution documentation](https://docs.unity3d.com/Manual/DynamicResolution.html) + /// [See Dynamic Resolution documentation](https://docs.unity3d.com/Manual/DynamicResolution.html) + /// Use this property to set the render texture memoryless modes. + /// Special treatment of the VR eye texture used in stereoscopic rendering. + /// Name of the RTHandle. + /// A new RTHandle. + public static RTHandle Alloc( + ScaleFunc scaleFunc, + GraphicsFormat format, + int slices = 1, + FilterMode filterMode = FilterMode.Point, + TextureWrapMode wrapMode = TextureWrapMode.Repeat, + TextureDimension dimension = TextureDimension.Tex2D, + bool enableRandomWrite = false, + bool useMipMap = false, + bool autoGenerateMips = true, + bool isShadowMap = false, + int anisoLevel = 1, + float mipMapBias = 0, + MSAASamples msaaSamples = MSAASamples.None, + bool bindTextureMS = false, + bool useDynamicScale = false, + bool useDynamicScaleExplicit = false, + RenderTextureMemoryless memoryless = RenderTextureMemoryless.None, + VRTextureUsage vrUsage = VRTextureUsage.None, + string name = "" + ) + { + return s_DefaultInstance.Alloc( + scaleFunc, + format, + slices, + filterMode, + wrapMode, + dimension, + enableRandomWrite, + useMipMap, + autoGenerateMips, + isShadowMap, + anisoLevel, + mipMapBias, + msaaSamples, + bindTextureMS, + useDynamicScale, + useDynamicScaleExplicit, + memoryless, + vrUsage, + name + ); + } + /// /// Allocate a new automatically sized RTHandle for the default RTHandle System. /// @@ -483,12 +691,12 @@ public static RTHandle Alloc( Assert.IsFalse(descriptor.graphicsFormat != GraphicsFormat.None && descriptor.depthStencilFormat != GraphicsFormat.None, "The RenderTextureDescriptor used to create RTHandle " + name + " contains both graphicsFormat and depthStencilFormat which is not allowed."); - var actualFormat = descriptor.graphicsFormat != GraphicsFormat.None ? descriptor.graphicsFormat : descriptor.depthStencilFormat; + var format = (descriptor.depthStencilFormat!=GraphicsFormat.None) ? descriptor.depthStencilFormat : descriptor.graphicsFormat; + return s_DefaultInstance.Alloc( scaleFunc, + format, descriptor.volumeDepth, - (DepthBits)descriptor.depthBufferBits, - actualFormat, filterMode, wrapMode, descriptor.dimension, diff --git a/Packages/com.unity.render-pipelines.core/Runtime/Utilities/LightUnitUtils.cs b/Packages/com.unity.render-pipelines.core/Runtime/Utilities/LightUnitUtils.cs index 5f902c1b05a..a7f93f6d2b2 100644 --- a/Packages/com.unity.render-pipelines.core/Runtime/Utilities/LightUnitUtils.cs +++ b/Packages/com.unity.render-pipelines.core/Runtime/Utilities/LightUnitUtils.cs @@ -258,7 +258,7 @@ public static float NitsToLumen(float nits, float area) /// Intensity in Candela. public static float LuxToCandela(float lux, float distance) { - return lux / (distance * distance); + return lux * (distance * distance); } /// @@ -269,7 +269,7 @@ public static float LuxToCandela(float lux, float distance) /// Intensity in Lux. public static float CandelaToLux(float candela, float distance) { - return candela * distance * distance; + return candela / (distance * distance); } /// diff --git a/Packages/com.unity.render-pipelines.core/ShaderLibrary/AreaLighting.hlsl b/Packages/com.unity.render-pipelines.core/ShaderLibrary/AreaLighting.hlsl index 31da3fc52df..5340aa84beb 100644 --- a/Packages/com.unity.render-pipelines.core/ShaderLibrary/AreaLighting.hlsl +++ b/Packages/com.unity.render-pipelines.core/ShaderLibrary/AreaLighting.hlsl @@ -100,7 +100,7 @@ real3 ComputeEdgeFactor(real3 V1, real3 V2) real V1oV2 = dot(V1, V2); real3 V1xV2 = cross(V1, V2); // Plane normal (tangent to the unit sphere) real sqLen = saturate(1 - V1oV2 * V1oV2); // length(V1xV2) = abs(sin(angle)) - real rcpLen = rsqrt(max(FLT_MIN, sqLen)); // Make sure it is finite + real rcpLen = rsqrt(max(FLT_EPS, sqLen)); // Make sure it is finite #if 0 real y = rcpLen * acos(V1oV2); #else diff --git a/Packages/com.unity.render-pipelines.core/Tests/Editor/NativePassCompilerRenderGraphTests.cs b/Packages/com.unity.render-pipelines.core/Tests/Editor/NativePassCompilerRenderGraphTests.cs index ce7e6e196c3..6664980c909 100644 --- a/Packages/com.unity.render-pipelines.core/Tests/Editor/NativePassCompilerRenderGraphTests.cs +++ b/Packages/com.unity.render-pipelines.core/Tests/Editor/NativePassCompilerRenderGraphTests.cs @@ -19,7 +19,7 @@ TextureDesc SimpleTextureDesc(string name, int w, int h, int samples) { TextureDesc result = new TextureDesc(w, h); result.msaaSamples = (MSAASamples)samples; - result.colorFormat = GraphicsFormat.R8G8B8A8_UNorm; + result.format = GraphicsFormat.R8G8B8A8_UNorm; result.name = name; return result; } @@ -933,7 +933,7 @@ public void TransientTexturesCantBeReused() { width = 1920, height = 1080, - colorFormat = GraphicsFormat.B10G11R11_UFloatPack32, + format = GraphicsFormat.B10G11R11_UFloatPack32, clearBuffer = true, clearColor = Color.red, name = "Transient Texture" diff --git a/Packages/com.unity.render-pipelines.core/Tests/Editor/RenderGraphTests.cs b/Packages/com.unity.render-pipelines.core/Tests/Editor/RenderGraphTests.cs index 885bae6e51a..6dabf0c4071 100644 --- a/Packages/com.unity.render-pipelines.core/Tests/Editor/RenderGraphTests.cs +++ b/Packages/com.unity.render-pipelines.core/Tests/Editor/RenderGraphTests.cs @@ -12,6 +12,23 @@ #endif namespace UnityEngine.Rendering.Tests { + [InitializeOnLoad] + class RenderGraphTestsOnLoad + { + static bool IsGraphicsAPISupported() + { + var gfxAPI = SystemInfo.graphicsDeviceType; + if (gfxAPI == GraphicsDeviceType.OpenGLCore) + return false; + return true; + } + + static RenderGraphTestsOnLoad() + { + ConditionalIgnoreAttribute.AddConditionalIgnoreMapping("IgnoreGraphicsAPI", !IsGraphicsAPISupported()); + } + } + class RenderGraphTests { // For RG Record/Hash/Compile testing, use m_RenderGraph @@ -806,7 +823,7 @@ static void RenderFunc3(RenderGraphTestPassData data, RenderGraphContext context var hash0 = m_RenderGraph.ComputeGraphHash(); m_RenderGraph.ClearCompiledGraph(); - texture0 = m_RenderGraph.CreateTexture(new TextureDesc(Vector2.one) { colorFormat = GraphicsFormat.R8G8B8A8_UNorm }); + texture0 = m_RenderGraph.CreateTexture(new TextureDesc(Vector2.one) { format = GraphicsFormat.R8G8B8A8_UNorm }); using (var builder = m_RenderGraph.AddRenderPass("TestPass0", out var passData)) { @@ -842,7 +859,7 @@ static void RenderFunc3(RenderGraphTestPassData data, RenderGraphContext context static void RecordRenderGraph(RenderGraph renderGraph) { - TextureHandle texture0 = renderGraph.CreateTexture(new TextureDesc(Vector2.one) { colorFormat = GraphicsFormat.R8G8B8A8_UNorm }); + TextureHandle texture0 = renderGraph.CreateTexture(new TextureDesc(Vector2.one) { format = GraphicsFormat.R8G8B8A8_UNorm }); using (var builder = renderGraph.AddRenderPass("TestPass0", out var passData)) { @@ -901,6 +918,58 @@ public void GetDescAndInfoForImportedTextureWorks() CoreUtils.Destroy(renderTexture); } + [Test] + public void TextureDescFormatPropertiesWork() + { + var formatR32 = GraphicsFormat.R32_SFloat; + + var textureDesc = new TextureDesc(16, 16); + textureDesc.format = formatR32; + + Assert.AreEqual(textureDesc.colorFormat,formatR32); + Assert.AreEqual(textureDesc.depthBufferBits, DepthBits.None); + + textureDesc.depthBufferBits = DepthBits.None; + + //No change expected + Assert.AreEqual(textureDesc.colorFormat, formatR32); + Assert.AreEqual(textureDesc.depthBufferBits, DepthBits.None); + + textureDesc.depthBufferBits = DepthBits.Depth32; + + //Not entirely sure what the platform will select but at least it should be 24 or more (not 0) + Assert.IsTrue((int)textureDesc.depthBufferBits >= 24); + Assert.AreEqual(textureDesc.colorFormat, GraphicsFormat.None); + + textureDesc.format = formatR32; + + Assert.AreEqual(textureDesc.colorFormat, formatR32); + Assert.AreEqual(textureDesc.depthBufferBits, DepthBits.None); + + textureDesc.format = GraphicsFormat.D16_UNorm; + + Assert.AreEqual(textureDesc.depthBufferBits, DepthBits.Depth16); + Assert.AreEqual(textureDesc.colorFormat, GraphicsFormat.None); + + { + var importedTexture = m_RenderGraph.CreateTexture(textureDesc); + + var importedDesc = importedTexture.GetDescriptor(m_RenderGraph); + Assert.AreEqual(textureDesc.format, importedDesc.format); + } + + textureDesc.colorFormat = formatR32; + Assert.AreEqual(textureDesc.depthBufferBits, DepthBits.None); + Assert.AreEqual(textureDesc.colorFormat, textureDesc.format); + + { + var importedTexture = m_RenderGraph.CreateTexture(textureDesc); + + var importedDesc = importedTexture.GetDescriptor(m_RenderGraph); + Assert.AreEqual(textureDesc.format, importedDesc.format); + } + } + [Test] public void ImportingBuiltinRenderTextureTypeWithNoInfoThrows() { @@ -912,6 +981,25 @@ public void ImportingBuiltinRenderTextureTypeWithNoInfoThrows() var importedTexture = m_RenderGraph.ImportTexture(renderTextureHandle); }); + renderTextureHandle.Release(); + } + + [Test] + public void ImportingRenderTextureWithColorAndDepthThrows() + { + // Create a new RTHandle texture + var desc = new RenderTextureDescriptor(16, 16, GraphicsFormat.R8G8B8A8_UNorm, GraphicsFormat.D32_SFloat_S8_UInt); + var rt = new RenderTexture(desc) { name = "RenderTextureWithColorAndDepth"}; + + var renderTextureHandle = RTHandles.Alloc(rt); + + Assert.Throws(() => + { + var importedTexture = m_RenderGraph.ImportTexture(renderTextureHandle); + }); + + renderTextureHandle.Release(); + rt.Release(); } [Test] @@ -1191,12 +1279,11 @@ class TestBuffersImport { public BufferHandle bufferHandle; public ComputeShader computeShader; - } private const string kPathToComputeShader = "Packages/com.unity.render-pipelines.core/Tests/Editor/BufferCopyTest.compute"; - [Test] + [Test, ConditionalIgnore("IgnoreGraphicsAPI", "Compute Shaders are not supported for this Graphics API.")] public void ImportingBufferWorks() { // We need a real ScriptableRenderContext and a camera to execute the render graph diff --git a/Packages/com.unity.render-pipelines.core/Tests/Runtime/LightUnitTests.cs b/Packages/com.unity.render-pipelines.core/Tests/Runtime/LightUnitTests.cs index f0a266b6eaa..bb64a6fdb62 100644 --- a/Packages/com.unity.render-pipelines.core/Tests/Runtime/LightUnitTests.cs +++ b/Packages/com.unity.render-pipelines.core/Tests/Runtime/LightUnitTests.cs @@ -93,16 +93,16 @@ public void PointLightUnitConversion() Assert.That(LightUnitUtils.ConvertIntensity(l, 3f, LightUnit.Candela, LightUnit.Candela), Is.EqualTo(3f).Within(epsilon)); Assert.That(LightUnitUtils.ConvertIntensity(l, 3f, LightUnit.Lumen, LightUnit.Candela), Is.EqualTo(0.238732412f).Within(epsilon)); Assert.That(LightUnitUtils.ConvertIntensity(l, 3f, LightUnit.Candela, LightUnit.Lumen), Is.EqualTo(37.6991119f).Within(epsilon)); - Assert.That(LightUnitUtils.ConvertIntensity(l, 3f, LightUnit.Lumen, LightUnit.Lux), Is.EqualTo(2.14859176f).Within(epsilon)); - Assert.That(LightUnitUtils.ConvertIntensity(l, 3f, LightUnit.Lux, LightUnit.Lumen), Is.EqualTo(4.18879032f).Within(epsilon)); + Assert.That(LightUnitUtils.ConvertIntensity(l, 3f, LightUnit.Lumen, LightUnit.Lux), Is.EqualTo(0.0265258234f).Within(epsilon)); + Assert.That(LightUnitUtils.ConvertIntensity(l, 3f, LightUnit.Lux, LightUnit.Lumen), Is.EqualTo(339.292023f).Within(epsilon)); Assert.That(LightUnitUtils.ConvertIntensity(l, 3f, LightUnit.Lumen, LightUnit.Ev100), Is.EqualTo(0.933466434f).Within(epsilon)); Assert.That(LightUnitUtils.ConvertIntensity(l, 3f, LightUnit.Ev100, LightUnit.Lumen), Is.EqualTo(12.566371f).Within(epsilon)); - Assert.That(LightUnitUtils.ConvertIntensity(l, 3f, LightUnit.Candela, LightUnit.Lux), Is.EqualTo(27f).Within(epsilon)); - Assert.That(LightUnitUtils.ConvertIntensity(l, 3f, LightUnit.Lux, LightUnit.Candela), Is.EqualTo(0.333333343f).Within(epsilon)); + Assert.That(LightUnitUtils.ConvertIntensity(l, 3f, LightUnit.Candela, LightUnit.Lux), Is.EqualTo(0.333333343f).Within(epsilon)); + Assert.That(LightUnitUtils.ConvertIntensity(l, 3f, LightUnit.Lux, LightUnit.Candela), Is.EqualTo(27.0f).Within(epsilon)); Assert.That(LightUnitUtils.ConvertIntensity(l, 3f, LightUnit.Candela, LightUnit.Ev100), Is.EqualTo(4.58496237f).Within(epsilon)); Assert.That(LightUnitUtils.ConvertIntensity(l, 3f, LightUnit.Ev100, LightUnit.Candela), Is.EqualTo(1f).Within(epsilon)); - Assert.That(LightUnitUtils.ConvertIntensity(l, 3f, LightUnit.Lux, LightUnit.Ev100), Is.EqualTo(1.41503751f).Within(epsilon)); - Assert.That(LightUnitUtils.ConvertIntensity(l, 3f, LightUnit.Ev100, LightUnit.Lux), Is.EqualTo(9f).Within(epsilon)); + Assert.That(LightUnitUtils.ConvertIntensity(l, 3f, LightUnit.Lux, LightUnit.Ev100), Is.EqualTo(7.75488758f).Within(epsilon)); + Assert.That(LightUnitUtils.ConvertIntensity(l, 3f, LightUnit.Ev100, LightUnit.Lux), Is.EqualTo(0.111111112f).Within(epsilon)); } [Test] @@ -116,31 +116,31 @@ public void SpotLightUnitConversion() Assert.That(LightUnitUtils.ConvertIntensity(l, 3f, LightUnit.Candela, LightUnit.Candela), Is.EqualTo(3f).Within(epsilon)); Assert.That(LightUnitUtils.ConvertIntensity(l, 3f, LightUnit.Lumen, LightUnit.Candela), Is.EqualTo(0.238732412f).Within(epsilon)); Assert.That(LightUnitUtils.ConvertIntensity(l, 3f, LightUnit.Candela, LightUnit.Lumen), Is.EqualTo(37.6991119f).Within(epsilon)); - Assert.That(LightUnitUtils.ConvertIntensity(l, 3f, LightUnit.Lumen, LightUnit.Lux), Is.EqualTo(2.14859176f).Within(epsilon)); - Assert.That(LightUnitUtils.ConvertIntensity(l, 3f, LightUnit.Lux, LightUnit.Lumen), Is.EqualTo(4.18879032f).Within(epsilon)); + Assert.That(LightUnitUtils.ConvertIntensity(l, 3f, LightUnit.Lumen, LightUnit.Lux), Is.EqualTo(0.0265258234f).Within(epsilon)); + Assert.That(LightUnitUtils.ConvertIntensity(l, 3f, LightUnit.Lux, LightUnit.Lumen), Is.EqualTo(339.292023f).Within(epsilon)); Assert.That(LightUnitUtils.ConvertIntensity(l, 3f, LightUnit.Lumen, LightUnit.Ev100), Is.EqualTo(0.933466434f).Within(epsilon)); Assert.That(LightUnitUtils.ConvertIntensity(l, 3f, LightUnit.Ev100, LightUnit.Lumen), Is.EqualTo(12.566371f).Within(epsilon)); - Assert.That(LightUnitUtils.ConvertIntensity(l, 3f, LightUnit.Candela, LightUnit.Lux), Is.EqualTo(27f).Within(epsilon)); - Assert.That(LightUnitUtils.ConvertIntensity(l, 3f, LightUnit.Lux, LightUnit.Candela), Is.EqualTo(0.333333343f).Within(epsilon)); + Assert.That(LightUnitUtils.ConvertIntensity(l, 3f, LightUnit.Candela, LightUnit.Lux), Is.EqualTo(0.333333343f).Within(epsilon)); + Assert.That(LightUnitUtils.ConvertIntensity(l, 3f, LightUnit.Lux, LightUnit.Candela), Is.EqualTo(27.0f).Within(epsilon)); Assert.That(LightUnitUtils.ConvertIntensity(l, 3f, LightUnit.Candela, LightUnit.Ev100), Is.EqualTo(4.58496237f).Within(epsilon)); Assert.That(LightUnitUtils.ConvertIntensity(l, 3f, LightUnit.Ev100, LightUnit.Candela), Is.EqualTo(1f).Within(epsilon)); - Assert.That(LightUnitUtils.ConvertIntensity(l, 3f, LightUnit.Lux, LightUnit.Ev100), Is.EqualTo(1.41503751f).Within(epsilon)); - Assert.That(LightUnitUtils.ConvertIntensity(l, 3f, LightUnit.Ev100, LightUnit.Lux), Is.EqualTo(9f).Within(epsilon)); + Assert.That(LightUnitUtils.ConvertIntensity(l, 3f, LightUnit.Lux, LightUnit.Ev100), Is.EqualTo(7.75488758f).Within(epsilon)); + Assert.That(LightUnitUtils.ConvertIntensity(l, 3f, LightUnit.Ev100, LightUnit.Lux), Is.EqualTo(0.111111112f).Within(epsilon)); l.enableSpotReflector = true; Assert.That(LightUnitUtils.ConvertIntensity(l, 3f, LightUnit.Candela, LightUnit.Candela), Is.EqualTo(3f).Within(epsilon)); Assert.That(LightUnitUtils.ConvertIntensity(l, 3f, LightUnit.Lumen, LightUnit.Candela), Is.EqualTo(14.0125141f).Within(epsilon)); Assert.That(LightUnitUtils.ConvertIntensity(l, 3f, LightUnit.Candela, LightUnit.Lumen), Is.EqualTo(0.642283022f).Within(epsilon)); - Assert.That(LightUnitUtils.ConvertIntensity(l, 3f, LightUnit.Lumen, LightUnit.Lux), Is.EqualTo(126.112625f).Within(epsilon)); - Assert.That(LightUnitUtils.ConvertIntensity(l, 3f, LightUnit.Lux, LightUnit.Lumen), Is.EqualTo(0.0713647828f).Within(epsilon)); + Assert.That(LightUnitUtils.ConvertIntensity(l, 3f, LightUnit.Lumen, LightUnit.Lux), Is.EqualTo(1.55694604f).Within(epsilon)); + Assert.That(LightUnitUtils.ConvertIntensity(l, 3f, LightUnit.Lux, LightUnit.Lumen), Is.EqualTo(5.78054714f).Within(epsilon)); Assert.That(LightUnitUtils.ConvertIntensity(l, 3f, LightUnit.Lumen, LightUnit.Ev100), Is.EqualTo(6.80864382f).Within(epsilon)); Assert.That(LightUnitUtils.ConvertIntensity(l, 3f, LightUnit.Ev100, LightUnit.Lumen), Is.EqualTo(0.214094341f).Within(epsilon)); - Assert.That(LightUnitUtils.ConvertIntensity(l, 3f, LightUnit.Candela, LightUnit.Lux), Is.EqualTo(27f).Within(epsilon)); - Assert.That(LightUnitUtils.ConvertIntensity(l, 3f, LightUnit.Lux, LightUnit.Candela), Is.EqualTo(0.333333343f).Within(epsilon)); + Assert.That(LightUnitUtils.ConvertIntensity(l, 3f, LightUnit.Candela, LightUnit.Lux), Is.EqualTo(0.333333343f).Within(epsilon)); + Assert.That(LightUnitUtils.ConvertIntensity(l, 3f, LightUnit.Lux, LightUnit.Candela), Is.EqualTo(27.0f).Within(epsilon)); Assert.That(LightUnitUtils.ConvertIntensity(l, 3f, LightUnit.Candela, LightUnit.Ev100), Is.EqualTo(4.58496237f).Within(epsilon)); Assert.That(LightUnitUtils.ConvertIntensity(l, 3f, LightUnit.Ev100, LightUnit.Candela), Is.EqualTo(1f).Within(epsilon)); - Assert.That(LightUnitUtils.ConvertIntensity(l, 3f, LightUnit.Lux, LightUnit.Ev100), Is.EqualTo(1.41503751f).Within(epsilon)); - Assert.That(LightUnitUtils.ConvertIntensity(l, 3f, LightUnit.Ev100, LightUnit.Lux), Is.EqualTo(9f).Within(epsilon)); + Assert.That(LightUnitUtils.ConvertIntensity(l, 3f, LightUnit.Lux, LightUnit.Ev100), Is.EqualTo(7.75488758f).Within(epsilon)); + Assert.That(LightUnitUtils.ConvertIntensity(l, 3f, LightUnit.Ev100, LightUnit.Lux), Is.EqualTo(0.111111112f).Within(epsilon)); } [Test] diff --git a/Packages/com.unity.render-pipelines.core/package.json b/Packages/com.unity.render-pipelines.core/package.json index ccc4cb1f669..94cb2dbebe0 100644 --- a/Packages/com.unity.render-pipelines.core/package.json +++ b/Packages/com.unity.render-pipelines.core/package.json @@ -8,7 +8,7 @@ "com.unity.burst": "1.8.14", "com.unity.mathematics": "1.3.2", "com.unity.ugui": "2.0.0", - "com.unity.collections": "2.4.1", + "com.unity.collections": "2.4.3", "com.unity.modules.physics": "1.0.0", "com.unity.modules.terrain": "1.0.0", "com.unity.modules.jsonserialize": "1.0.0", diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/HDRP-Sample-Projects.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/HDRP-Sample-Projects.md index b804920dc43..acb3fff95f2 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/HDRP-Sample-Projects.md +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/HDRP-Sample-Projects.md @@ -24,8 +24,7 @@ Graphics Features used: Relevant links : * [Photogrammetry](https://unity.com/solutions/photogrammetry) - * [Photogrammetry in Unity: Making Real-World Objects into Digital Assets](https://blogs.unity3d.com/2018/03/12/photogrammetry-in-unity-making-real-world-objects-into-digital-assets/) - * [Creating Assets with Photogrammetry using a rotating surface](https://blogs.unity3d.com/2019/03/11/creating-assets-with-photogrammetry-using-a-rotating-surface/) + * [Photogrammetry in Unity: Making Real-World Objects into Digital Assets](https://unity.com/blog/engine-platform/photogrammetry-real-world-objects-into-digital-assets) * [Siggraph 2017 - Photogrammetry workflow and the tech behind the de-lighting tool](https://www.youtube.com/watch?v=Ny9ZXt_2v2Y) @@ -47,10 +46,7 @@ Graphics features used: * GPU Lightmapper Relevant links : -* [Now Available: The Spaceship Demo Project using VFX Graph and High-Definition Render Pipeline](https://blogs.unity3d.com/2019/08/19/now-available-the-spaceship-demo-project-using-vfx-graph-and-high-definition-render-pipeline/) * [VFX Graph: building visual effects in the Spaceship Demo | Unite Now 2020](https://www.youtube.com/watch?v=Od16mcOzl2M) -* [Visual Effect Graph Samples](https://blogs.unity3d.com/2019/03/06/visual-effect-graph-samples/) - ## VR Alchemy Lab diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/HDSampleBuffer.png b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/HDSampleBuffer.png new file mode 100644 index 00000000000..42f7f619f25 Binary files /dev/null and b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/HDSampleBuffer.png differ diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/SROShaderPass.png b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/SROShaderPass.png new file mode 100644 index 00000000000..7cdc5c57568 Binary files /dev/null and b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/SROShaderPass.png differ diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/SRPBatcher.png b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/SRPBatcher.png new file mode 100644 index 00000000000..a44675d6cd7 Binary files /dev/null and b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/SRPBatcher.png differ diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/SRP_Batcher_batch_information.png b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/SRP_Batcher_batch_information.png new file mode 100644 index 00000000000..6555dba5cb2 Binary files /dev/null and b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/SRP_Batcher_batch_information.png differ diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/SRP_Batcher_loop.png b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/SRP_Batcher_loop.png new file mode 100644 index 00000000000..54cfd55c8f5 Binary files /dev/null and b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/SRP_Batcher_loop.png differ diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/SRP_batcher_shader_compatibility.png b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/SRP_batcher_shader_compatibility.png new file mode 100644 index 00000000000..235eb2a0cb8 Binary files /dev/null and b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Images/SRP_batcher_shader_compatibility.png differ diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Optimization.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Optimization.md index d1a4073d9a8..0612b96828b 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Optimization.md +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Optimization.md @@ -4,4 +4,6 @@ Optimize the High Definition Render Pipeline (HDRP) to improve the performance o |Page|Description| |-|-| +|[Optimizing draw calls](reduce-draw-calls-landing-hdrp.md) | Techniques for speeding up rendering by reducing the number of drawing commands the CPU sends to the GPU. | |[Reduce rendering work on the CPU](reduce-rendering-work-on-cpu.md)|Use the GPU Resident Drawer or GPU occlusion culling to speed up rendering.| + diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/SRPBatcher-Enable.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/SRPBatcher-Enable.md new file mode 100644 index 00000000000..22521aa3ea1 --- /dev/null +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/SRPBatcher-Enable.md @@ -0,0 +1,16 @@ +# Enable the SRP Batcher + +When you use HDRP, Unity enables the SRP Batcher by default. Disabling the SRP Batcher isn't recommended. However, you can temporarily disable the SRP Batcher for debugging purposes. + +To enable and disable the SRP Batcher at build time using the Editor: + +1. In the Project window, select the [HDRP Asset](https://docs.unity3d.com/Packages/com.unity.render-pipelines.high-definition@latest/index.html?subfolder=/manual/HDRP-Asset.html). +2. In the Inspector for the asset, enter [Debug mode](https://docs.unity3d.com/6000.0/Documentation/Manual/InspectorOptions). In Debug mode, you can see the properties of the HDRP Asset, including the SRP Batcher property. +3. Select **Enable** **SRP Batcher** to enable or disable the SRP Batcher. + +To enable or disable the SRP Batcher at runtime, toggle the following global variable in your C# code: + +``` +GraphicsSettings.useScriptableRenderPipelineBatching = true; +``` + diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/SRPBatcher-Incompatible.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/SRPBatcher-Incompatible.md new file mode 100644 index 00000000000..6ace4589bae --- /dev/null +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/SRPBatcher-Incompatible.md @@ -0,0 +1,32 @@ +# Remove SRP Batcher compatibility for GameObjects + +In some rare cases, you might want to intentionally make particular GameObjects incompatible with the SRP Batcher. For example, if you want to use [GPU instancing](https://docs.unity3d.com/6000.0/Documentation/Manual/GPUInstancing), which isn't compatible with the SRP Batcher. If you want to render many identical meshes with the exact same material, GPU instancing can be more efficient than the SRP Batcher. To use GPU instancing, you must either: + +* Use [Graphics.RenderMeshInstanced](https://docs.unity3d.com/6000.0/Documentation/ScriptReference/Graphics.RenderMeshInstanced). +* Manually remove SRP Batcher compatibility and enable GPU instancing for the material. + +There are two ways to remove compatibility with the SRP Batcher from a GameObject: + +* Make the shader incompatible. +* Make the renderer incompatible. + +**Tip**: If you use GPU instancing instead of the SRP Batcher, use the [Profiler](https://docs.unity3d.com/6000.0/Documentation/Manual/Profiler) to make sure that GPU instancing is more efficient for your application than the SRP Batcher. + +## Removing shader compatibility + +You can make both hand-written and Shader Graph shaders incompatible with the SRP Batcher. However, for Shader Graph shaders, if you change and recompile the Shader Graph often, it's simpler to make the [renderer incompatible](#removing-renderer-compatibility) instead. + +To make a Unity shader incompatible with the SRP Batcher, you need to make changes to the shader source file: + +1. For hand-written shaders, open the shader source file. For Shader Graph shaders, copy the Shader Graph's compiled shader source code into a new shader source file. Use the new shader source file in your application instead of the Shader Graph. +2. Add a new [material property](https://docs.unity3d.com/6000.0/Documentation/Manual/SL-Properties) declaration into the shader’s `Properties` block. Don't declare the new material property in the `UnityPerMaterial` constant buffer. + +The material property doesn't need to do anything; just having a material property that doesn't exist in the `UnityPerMaterial` constant buffer makes the shader incompatible with the SRP Batcher. + +**Warning**: If you use a Shader Graph, be aware that every time you edit and recompile the Shader Graph, you must repeat this process. + + + +## Removing renderer compatibility + +You can make individual renderers incompatible with the SRP Batcher. To do this, add a `MaterialPropertyBlock` to the renderer. diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/SRPBatcher-Materials.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/SRPBatcher-Materials.md new file mode 100644 index 00000000000..8350d44af94 --- /dev/null +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/SRPBatcher-Materials.md @@ -0,0 +1,26 @@ +# Check whether a GameObject is compatible with the SRP Batcher + +## GameObject compatibility + +In any given scene, some GameObjects are compatible with the SRP Batcher, and some aren't. Compatible GameObjects use the SRP Batcher code path, and non-compatible GameObjects use the standard SRP code path. For more information, see [How the SRP Batcher works](#how-the-srp-batcher-works). + +A GameObject must meet the following requirements to be compatible with the SRP Batcher code path: + +* The GameObject must contain either a mesh or a skinned mesh. It can't be a particle. +* The GameObject mustn't use [MaterialPropertyBlocks](https://docs.unity3d.com/6000.0/Documentation/ScriptReference/MaterialPropertyBlock). +* The shader that the GameObject uses must be compatible with the SRP Batcher. For more information, see [Shader compatibility](#shader-compatibility). + + + +## Shader compatibility + +All lit and unlit shaders in the the Universal Render Pipeline (URP) and the High Definition Render Pipeline (HDRP) fit this requirement (except for the particle versions of these shaders). + +For a custom shader to be compatible with the SRP Batcher it must meet the following requirements: + +* The shader must declare all built-in engine properties in a single constant buffer named `UnityPerDraw`. For example, `unity_ObjectToWorld`, or `unity_SHAr`. +* The shader must declare all material properties in a single constant buffer named `UnityPerMaterial`. + +You can check the compatibility status of a shader in the Inspector panel. + +![You can check the compatibility of your shaders in the Inspector panel for the specific shader.](Images/SRP_batcher_shader_compatibility.png) \ No newline at end of file diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/SRPBatcher-Profile.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/SRPBatcher-Profile.md new file mode 100644 index 00000000000..a06de988ff0 --- /dev/null +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/SRPBatcher-Profile.md @@ -0,0 +1,17 @@ + +# Troubleshoot the SRP Batcher + +You can check the status of SRP batches in the [Frame Debugger](https://docs.unity3d.com/6000.0/Documentation/Manual/FrameDebugger) window. Each SRP Batch displays how many draw calls Unity used, which keywords Unity attached to the shader, and the reason why Unity didn't batch that draw call with the previous one. + +To check the status of SRP Batcher batches: + +1. In the Editor, open the Frame Debugger (menu: **Window** > **Analysis** > **Frame Debugger**). +2. In the Frame Debugger, go to **Render Camera** > **Render Opaques**. +3. Expand the **RenderLoopNewBatcher. Draw** list. +4. Select on the **SRP Batch** you want to inspect. + +In the example below, the reason is: **Nodes have different shaders**. This means that the shader for that SRP batch is different to the one in the previous SRP batch. Because the SRP Batcher used a different shader, the SRP Batcher created a new batch. If several SRP batches have a low number of draw calls, it often means the project uses too many shader variants. + +![In the Frame Debugger window, you can find details about individual SRP batches, including why the SRP Batcher created a new SRP batch instead of continuing the existing one.](Images/SRP_Batcher_batch_information.png) + +If you write your own Scriptable Render Pipeline, instead of using either the Universal Render Pipeline or the High Definition Render Pipeline, try to write a generic multi-purpose shader with a minimal number of keywords. This is optimal because you can use as many material properties as you want. \ No newline at end of file diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/SRPBatcher-landing.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/SRPBatcher-landing.md new file mode 100644 index 00000000000..f8f24a027e0 --- /dev/null +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/SRPBatcher-landing.md @@ -0,0 +1,12 @@ +# Scriptable Render Pipeline (SRP) Batcher + +Resources for using the Scriptable Render Pipeline (SRP) Batcher to reduce the number of render state changes between draw calls. + +| **Page** | **Description** | +| --- | --- | +| [Introduction to the SRP Batcher](SRPBatcher.md) | Understand how the SRP Batcher reduces render state changes between draw calls. | +| [Check whether a GameObject is compatible with the SRP Batcher](SRPBatcher-Materials.md) | Find out if Unity can include a GameObject and a shader in the SRP Batcher. | +| [Enable the SRP Batcher](SRPBatcher-Enable.md) | Enable the SRP Batcher in the HDRP Asset. | +| [Troubleshoot the SRP Batcher](SRPBatcher-Profile.md) | Use the Frame Debugger to solve common issues with the SRP Batcher, such as a low number of draw calls in batches. | +| [Remove SRP Batcher compatibility for GameObjects](SRPBatcher-Incompatible.md) | Make a shader or renderer incompatible with the SRP Batcher, for example if you want to use [GPU instancing](https://docs.unity3d.com/6000.0/Documentation/Manual/GPUInstancing.md). | + diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/SRPBatcher.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/SRPBatcher.md new file mode 100644 index 00000000000..20e951df42f --- /dev/null +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/SRPBatcher.md @@ -0,0 +1,45 @@ +--- +uid: um-srp-batcher +--- + +# Scriptable Render Pipeline Batcher + +The Scriptable Render Pipeline (SRP) Batcher is a [draw call optimization](reduce-draw-calls-landing-hdrp.md) that significantly improves performance for applications that use an SRP. The SRP Batcher reduces the CPU time Unity requires to prepare and dispatch draw calls for materials that use the same shader variant. + +![The Scriptable Render Pipeline (SRP) Batcher reduces the CPU time Unity requires to render scenes with many materials that use the same shader variant.](Images/SRPBatcher.png) + +## Requirements and compatibility + +This section includes information about the render pipeline compatibility of the SRP Batcher. + +### Render pipeline compatibility + +| **Feature** | **Universal Render Pipeline (URP)** | **High Definition Render Pipeline (HDRP)** | **Custom Scriptable Render Pipeline (SRP)** | **Built-in Render Pipeline** | +| --------------- | ---------------------------- | ----------------------------------- | ------------------------------------------ | ------------------------------------------- | +| **SRP Batcher** | Yes | Yes | Yes | No | + + + + +## How the SRP Batcher works + +The traditional way to optimize draw calls is to reduce the number of them. Instead, the SRP Batcher reduces render-state changes between draw calls. To do this, the SRP Batcher combines a sequence of `bind` and `draw` GPU commands. Each sequence of commands is called an SRP batch. + +![The batching of bind and draw commands reduces the GPU setup between draw calls.](Images/SROShaderPass.png) + +To achieve optimal performance for your rendering, each SRP batch should contain as many `bind` and `draw` commands as possible. To achieve this, use as few shader variants as possible. You can still use as many different materials with the same shader as you want. + +When Unity detects a new material during the render loop, the CPU collects all properties and binds them to the GPU in constant buffers. The number of GPU buffers depends on how the shader declares its constant buffers. + +The SRP Batcher is a low-level render loop that makes material data persist in GPU memory. If the material content doesn't change, theSRP Batcher doesn't make any render-state changes. Instead, the SRP Batcher uses a dedicated code path to update the Unity Engine properties in a large GPU buffer, like this: + +![The SRP Batcher rendering workflow. The SRP Batcher uses a dedicated code path to update the Unity Engine properties in a large GPU buffer.](Images/SRP_Batcher_loop.png) + +Here, the CPU only handles the Unity Engine properties, labeled **Per Object large buffer** in the above diagram. All materials have persistent constant buffers located in GPU memory, which are ready to use. This speeds up rendering because: + +* All material content now persists in GPU memory. +* Dedicated code manages a large per-object GPU constant buffer for all per-object properties. + + + + diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/TableOfContents.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/TableOfContents.md index 128dd8c3476..12fee589791 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/TableOfContents.md +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/TableOfContents.md @@ -302,6 +302,33 @@ * [Known issues](Known-Issues.md) * [Stencil Buffer Usage](Stencil-Usage.md) * [Optimization](Optimization.md) + * [Optimizing draw calls](reduce-draw-calls-landing-hdrp.md) + * [Scriptable Render Pipeline Batcher](SRPBatcher-landing.md) + * [Introduction to the SRP Batcher](SRPBatcher.md) + * [Check whether a GameObject is compatible with the SRP Batcher](SRPBatcher-Materials.md) + * [Enable the SRP Batcher](SRPBatcher-Enable.md) + * [Troubleshoot the SRP Batcher](SRPBatcher-Profile.md) + * [Remove SRP Batcher compatibility for GameObjects](SRPBatcher-Incompatible.md) + * [BatchRendererGroup](batch-renderer-group.md) + * [How BatchRendererGroup works](batch-renderer-group-how.md) + * [Set up your project for BatchRendererGroup](batch-renderer-group-getting-started.md) + * [Creating a renderer with BatchRendererGroup](batch-renderer-group-creating-a-renderer.md) + * [Initializing a BatchRendererGroup object](batch-renderer-group-initializing.md) + * [Registering meshes and materials](batch-renderer-group-registering-meshes-and-materials.md) + * [Creating batches](batch-renderer-group-creating-batches.md) + * [Creating draw commands](batch-renderer-group-creating-draw-commands.md) + * [Writing custom shaders for the BatchRendererGroup](batch-renderer-group-writing-shaders.md) + * [DOTS Instancing shaders](dots-instancing-shaders.md) + * [Support DOTS Instancing in a custom shader](dots-instancing-shaders-support.md) + * [Declare DOTS Instancing properties in a custom shader](dots-instancing-shaders-declare.md) + * [Access DOTS Instancing properties in a custom shader](dots-instancing-shaders-access.md) + * [Best practice for DOTS Instancing shaders](dots-instancing-shaders-best-practice.md) + * [DOTS Instancing shader samples](dots-instancing-shaders-samples.md) + * [Example of a DOTS Instancing shader that accesses per-instance data](dots-instancing-shaders-per-instance.md) + * [Example of a DOTS Instancing shader that accesses constant data](dots-instancing-shaders-constant.md) + * [Example of using UNITY_DOTS_INSTANCED_PROP macros in a DOTS Instancing shader](dots-instancing-shaders-unity-dots-instanced-prop.md) + * [Macros reference](dots-instancing-shaders-macros.md) + * [Shader functions reference](dots-instancing-shaders-functions.md) * [Reduce rendering work on the CPU](reduce-rendering-work-on-cpu.md) * [Use the GPU Resident Drawer](gpu-resident-drawer.md) * [Make a GameObject compatible with the GPU Resident Drawer](make-object-compatible-gpu-rendering.md) diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Upgrading-from-2023.2-to-unity-6-preview.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Upgrading-from-2023.2-to-unity-6-preview.md index 3211ee940b9..bcd199fcc60 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/Upgrading-from-2023.2-to-unity-6-preview.md +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/Upgrading-from-2023.2-to-unity-6-preview.md @@ -43,3 +43,8 @@ Note that this change will result in a loss of precision for the attenuation val To disable `PrecomputedAtmosphericAttenuation`, first you need to install the HDRP config package which can be done using the [Render Pipeline Wizard](Render-Pipeline-Wizard.md). For more info, see [HDRP Config](configure-a-project-using-the-hdrp-config-package.md). Once installed, go in ShaderConfig.cs and set `PrecomputedAtmosphericAttenuation` to 0. + +## Physically Based Depth Of Field + +We improved the performances of the PBR DoF and removed the parameter "High Quality Filtering" as it was too costly to be used in a reasonable scenario. The replacement of this option is the resolution dropdown which allows to use full resolution physically based depth of field whereas before it was maxed at half resolution. This allows for more precise depth of field and less artifacts but it's still very costly. +The PBR DoF now also take in account the aperture shape defined in the physical camera settings (blade count, etc.) diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/batch-renderer-group-creating-a-renderer.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/batch-renderer-group-creating-a-renderer.md new file mode 100644 index 00000000000..4df0677f9cd --- /dev/null +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/batch-renderer-group-creating-a-renderer.md @@ -0,0 +1,14 @@ +--- +uid: um-batch-renderer-group-creating-a-renderer +--- + +# Creating a renderer with the BatchRendererGroup API + +This section of the documentation explains how to use BatchRendererGroup (BRG) to create a renderer. + +| **Topic** | **Description** | +| ------------------------------------------------------------ | ------------------------------------------------------------ | +| [Initialize a BatchRendererGroup object](batch-renderer-group-initializing.md) | Explains how to initialize a BatchRendererGroup object with a minimal OnPerformCulling callback. | +| [Register meshes and materials](batch-renderer-group-registering-meshes-and-materials.md) | Explains how to register meshes and materials so you can use them in a renderer. | +| [Create batches](batch-renderer-group-creating-batches.md) | Explains how to create batches of instance data that describe what to render. | +| [Create draw commands](batch-renderer-group-creating-draw-commands.md) | Explains how to create draw commands and complete your simple renderer. | diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/batch-renderer-group-creating-batches.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/batch-renderer-group-creating-batches.md new file mode 100644 index 00000000000..fa9e737000d --- /dev/null +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/batch-renderer-group-creating-batches.md @@ -0,0 +1,205 @@ +--- +uid: um-batch-renderer-group-creating-batches +--- + +# Create batches with the BatchRendererGroup API + +BatchRendererGroup (BRG) doesn't automatically provide any instance data. Instance data includes many properties which are normally built in for GameObjects, such as transform matrices, light probe coefficients, and lightmap texture coordinates. This means features like ambient lighting only work if you provide instance data yourself. To do this, you add and configure batches. A batch is a collection of instances, where each instance corresponds to a single thing to render. What the instance actually represents depends on what you want to render. For example, in a prop object renderer, an instance could represent a single prop, and in a terrain renderer, it could represent a single terrain patch. + +Each batch has a set of metadata values and a single [GraphicsBuffer](https://docs.unity3d.com/6000.0/Documentation/ScriptReference/GraphicsBuffer), which every instance in the batch shares. To load data for an instance, the typical process is to use the metadata values to load from the correct location in the GraphicsBuffer. The `UNITY_ACCESS_DOTS_INSTANCED_PROP` family of shader macros work with this scheme (see [Accessing DOTS Instanced properties](dots-instancing-shaders.md#accessing-dots-instanced-properties)). However, you don't need to use this per-instance data loading scheme, and you are free to implement your own scheme if you want. + +To create a batch, use [BatchRendererGroup.AddBatch](https://docs.unity3d.com/6000.0/Documentation/ScriptReference/Rendering.BatchRendererGroup.AddBatch). The method receives an array of metadata values as well as a handle to a GraphicsBuffer. Unity passes the metadata values to the shader when it renders instances from the batch, and binds the GraphicsBuffer as `unity_DOTSInstanceData`. For metadata values that the shader uses but you don't pass in when you create a batch, Unity sets them to zero. + +You can't modify batch metadata values after you create them, so any metadata values you pass to the batch are final. If you need to change any metadata values, create a new batch and remove the old one. You can modify the batch's GraphicsBuffer at any time. To do this, use [SetBatchBuffer](https://docs.unity3d.com/6000.0/Documentation/ScriptReference/Rendering.BatchRendererGroup.SetBatchBuffer). This can be useful to resize buffers and allocate a larger buffer if the existing one runs out of space. + +**Note**: You don't need to specify the size of a batch when you create one. Instead, you have to make sure that the shader can correctly handle the instance indices you pass to it. What this means depends on the shader. For Unity-provided SRP shaders, this means that there must be valid instance data in the buffer at the index you pass. + +See the following code sample for an example of how to create a batch with metadata values and a GraphicsBuffer of instance data. This code sample builds on the one in [Registering meshes and materials](batch-renderer-group-registering-meshes-and-materials.md). + +```lang-csharp +using System; +using Unity.Collections; +using Unity.Collections.LowLevel.Unsafe; +using Unity.Jobs; +using UnityEngine; +using UnityEngine.Rendering; + +public class SimpleBRGExample : MonoBehaviour +{ + public Mesh mesh; + public Material material; + + private BatchRendererGroup m_BRG; + + private GraphicsBuffer m_InstanceData; + private BatchID m_BatchID; + private BatchMeshID m_MeshID; + private BatchMaterialID m_MaterialID; + + // Some helper constants to make calculations more convenient. + private const int kSizeOfMatrix = sizeof(float) * 4 * 4; + private const int kSizeOfPackedMatrix = sizeof(float) * 4 * 3; + private const int kSizeOfFloat4 = sizeof(float) * 4; + private const int kBytesPerInstance = (kSizeOfPackedMatrix * 2) + kSizeOfFloat4; + private const int kExtraBytes = kSizeOfMatrix * 2; + private const int kNumInstances = 3; + + // The PackedMatrix is a convenience type that converts matrices into + // the format that Unity-provided SRP shaders expect. + struct PackedMatrix + { + public float c0x; + public float c0y; + public float c0z; + public float c1x; + public float c1y; + public float c1z; + public float c2x; + public float c2y; + public float c2z; + public float c3x; + public float c3y; + public float c3z; + + public PackedMatrix(Matrix4x4 m) + { + c0x = m.m00; + c0y = m.m10; + c0z = m.m20; + c1x = m.m01; + c1y = m.m11; + c1z = m.m21; + c2x = m.m02; + c2y = m.m12; + c2z = m.m22; + c3x = m.m03; + c3y = m.m13; + c3z = m.m23; + } + } + + private void Start() + { + m_BRG = new BatchRendererGroup(this.OnPerformCulling, IntPtr.Zero); + m_MeshID = m_BRG.RegisterMesh(mesh); + m_MaterialID = m_BRG.RegisterMaterial(material); + + AllocateInstanceDataBuffer(); + PopulateInstanceDataBuffer(); + } + + private void AllocateInstanceDataBuffer() + { + m_InstanceData = new GraphicsBuffer(GraphicsBuffer.Target.Raw, + BufferCountForInstances(kBytesPerInstance, kNumInstances, kExtraBytes), + sizeof(int)); + } + + private void PopulateInstanceDataBuffer() + { + // Place a zero matrix at the start of the instance data buffer, so loads from address 0 return zero. + var zero = new Matrix4x4[1] { Matrix4x4.zero }; + + // Create transform matrices for three example instances. + var matrices = new Matrix4x4[kNumInstances] + { + Matrix4x4.Translate(new Vector3(-2, 0, 0)), + Matrix4x4.Translate(new Vector3(0, 0, 0)), + Matrix4x4.Translate(new Vector3(2, 0, 0)), + }; + + // Convert the transform matrices into the packed format that the shader expects. + var objectToWorld = new PackedMatrix[kNumInstances] + { + new PackedMatrix(matrices[0]), + new PackedMatrix(matrices[1]), + new PackedMatrix(matrices[2]), + }; + + // Also create packed inverse matrices. + var worldToObject = new PackedMatrix[kNumInstances] + { + new PackedMatrix(matrices[0].inverse), + new PackedMatrix(matrices[1].inverse), + new PackedMatrix(matrices[2].inverse), + }; + + // Make all instances have unique colors. + var colors = new Vector4[kNumInstances] + { + new Vector4(1, 0, 0, 1), + new Vector4(0, 1, 0, 1), + new Vector4(0, 0, 1, 1), + }; + + // In this simple example, the instance data is placed into the buffer like this: + // Offset | Description + // 0 | 64 bytes of zeroes, so loads from address 0 return zeroes + // 64 | 32 uninitialized bytes to make working with SetData easier, otherwise unnecessary + // 96 | unity_ObjectToWorld, three packed float3x4 matrices + // 240 | unity_WorldToObject, three packed float3x4 matrices + // 384 | _BaseColor, three float4s + + // Calculates start addresses for the different instanced properties. unity_ObjectToWorld starts + // at address 96 instead of 64, because the computeBufferStartIndex parameter of SetData + // is expressed as source array elements, so it is easier to work in multiples of sizeof(PackedMatrix). + uint byteAddressObjectToWorld = kSizeOfPackedMatrix * 2; + uint byteAddressWorldToObject = byteAddressObjectToWorld + kSizeOfPackedMatrix * kNumInstances; + uint byteAddressColor = byteAddressWorldToObject + kSizeOfPackedMatrix * kNumInstances; + + // Upload the instance data to the GraphicsBuffer so the shader can load them. + m_InstanceData.SetData(zero, 0, 0, 1); + m_InstanceData.SetData(objectToWorld, 0, (int)(byteAddressObjectToWorld / kSizeOfPackedMatrix), objectToWorld.Length); + m_InstanceData.SetData(worldToObject, 0, (int)(byteAddressWorldToObject / kSizeOfPackedMatrix), worldToObject.Length); + m_InstanceData.SetData(colors, 0, (int)(byteAddressColor / kSizeOfFloat4), colors.Length); + + // Set up metadata values to point to the instance data. Set the most significant bit 0x80000000 in each + // which instructs the shader that the data is an array with one value per instance, indexed by the instance index. + // Any metadata values that the shader uses that are not set here will be 0. When a value of 0 is used with + // UNITY_ACCESS_DOTS_INSTANCED_PROP (i.e. without a default), the shader interprets the + // 0x00000000 metadata value and loads from the start of the buffer. The start of the buffer is + // a zero matrix so this sort of load is guaranteed to return zero, which is a reasonable default value. + var metadata = new NativeArray(3, Allocator.Temp); + metadata[0] = new MetadataValue { NameID = Shader.PropertyToID("unity_ObjectToWorld"), Value = 0x80000000 | byteAddressObjectToWorld, }; + metadata[1] = new MetadataValue { NameID = Shader.PropertyToID("unity_WorldToObject"), Value = 0x80000000 | byteAddressWorldToObject, }; + metadata[2] = new MetadataValue { NameID = Shader.PropertyToID("_BaseColor"), Value = 0x80000000 | byteAddressColor, }; + + // Finally, create a batch for the instances and make the batch use the GraphicsBuffer with the + // instance data as well as the metadata values that specify where the properties are. + m_BatchID = m_BRG.AddBatch(metadata, m_InstanceData.bufferHandle); + } + + // Raw buffers are allocated in ints. This is a utility method that calculates + // the required number of ints for the data. + int BufferCountForInstances(int bytesPerInstance, int numInstances, int extraBytes = 0) + { + // Round byte counts to int multiples + bytesPerInstance = (bytesPerInstance + sizeof(int) - 1) / sizeof(int) * sizeof(int); + extraBytes = (extraBytes + sizeof(int) - 1) / sizeof(int) * sizeof(int); + int totalBytes = bytesPerInstance * numInstances + extraBytes; + return totalBytes / sizeof(int); + } + + + private void OnDisable() + { + m_BRG.Dispose(); + } + + public unsafe JobHandle OnPerformCulling( + BatchRendererGroup rendererGroup, + BatchCullingContext cullingContext, + BatchCullingOutput cullingOutput, + IntPtr userContext) + { + // This simple example doesn't use jobs, so it can just return an empty JobHandle. + // Performance-sensitive applications should use Burst jobs to implement + // culling and draw command output. In this case, this function would return a + // handle here that completes when the Burst jobs finish. + return new JobHandle(); + + } +} +``` + +Now that you have registered all the required resources with a BatchRendererGroup object, you can create draw commands. For more information, see the next topic, [Creating draw commands](batch-renderer-group-creating-draw-commands.md). \ No newline at end of file diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/batch-renderer-group-creating-draw-commands.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/batch-renderer-group-creating-draw-commands.md new file mode 100644 index 00000000000..3d2fe8ed04d --- /dev/null +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/batch-renderer-group-creating-draw-commands.md @@ -0,0 +1,259 @@ +--- +uid: um-batch-renderer-group-creating-draw-commands +--- + +# Create draw commands with the BatchRendererGroup API + +To create draw commands, use the [BatchRendererGroup.OnPerformCulling](https://docs.unity3d.com/6000.0/Documentation/ScriptReference/Rendering.BatchRendererGroup.OnPerformCulling) callback. Specifically, you use the callback's [BatchCullingOutput](https://docs.unity3d.com/6000.0/Documentation/ScriptReference/Rendering.BatchCullingOutput) parameter. This parameter contains a [NativeArray](https://docs.unity3d.com/6000.0/Documentation/ScriptReference/Unity.Collections.NativeArray_1) with a single element. This layout means you can directly modify the contents of the array element without Unity copying data unnecessarily. The element in the `NativeArray` is a [BatchCullingOutputDrawCommands](https://docs.unity3d.com/6000.0/Documentation/ScriptReference/Rendering.BatchCullingOutputDrawCommands) struct that contains the [actual draw commands](https://docs.unity3d.com/6000.0/Documentation/ScriptReference/Rendering.BatchCullingOutputDrawCommands-drawCommands). + +Your `OnPerformCulling` implementation can generate as many or as few draw commands as you want. A simple implementation that only uses a single mesh and material could only output a single draw command, a more complex implementation could output thousands, each with different meshes and materials. + +**Note**: To provide maximum flexibility, Unity doesn't preallocate the arrays in the `BatchCullingOutputDrawCommands` output struct and stores them as raw pointers so you can easily allocate them and use them from [Burst](https://docs.unity3d.com/Packages/com.unity.burst@latest) jobs. You must allocate the arrays using [UnsafeUtility.Malloc](https://docs.unity3d.com/6000.0/Documentation/ScriptReference/Unity.Collections.LowLevel.Unsafe.UnsafeUtility.Malloc) with the the [Allocator.TempJob](https://docs.unity3d.com/6000.0/Documentation/ScriptReference/Unity.Collections.Allocator.TempJob) allocator. The callback shouldn't release the memory. Instead, Unity releases the memory after it finishes rendering using the draw commands. + +See the following code sample for an example of how to create draw commands. This code sample builds on the one in [Creating batches](batch-renderer-group-creating-batches.md). + +```lang-csharp +using System; +using Unity.Collections; +using Unity.Collections.LowLevel.Unsafe; +using Unity.Jobs; +using UnityEngine; +using UnityEngine.Rendering; + +public class SimpleBRGExample : MonoBehaviour +{ + public Mesh mesh; + public Material material; + + private BatchRendererGroup m_BRG; + + private GraphicsBuffer m_InstanceData; + private BatchID m_BatchID; + private BatchMeshID m_MeshID; + private BatchMaterialID m_MaterialID; + + // Some helper constants to make calculations more convenient. + private const int kSizeOfMatrix = sizeof(float) * 4 * 4; + private const int kSizeOfPackedMatrix = sizeof(float) * 4 * 3; + private const int kSizeOfFloat4 = sizeof(float) * 4; + private const int kBytesPerInstance = (kSizeOfPackedMatrix * 2) + kSizeOfFloat4; + private const int kExtraBytes = kSizeOfMatrix * 2; + private const int kNumInstances = 3; + + // The PackedMatrix is a convenience type that converts matrices into + // the format that Unity-provided SRP shaders expect. + struct PackedMatrix + { + public float c0x; + public float c0y; + public float c0z; + public float c1x; + public float c1y; + public float c1z; + public float c2x; + public float c2y; + public float c2z; + public float c3x; + public float c3y; + public float c3z; + + public PackedMatrix(Matrix4x4 m) + { + c0x = m.m00; + c0y = m.m10; + c0z = m.m20; + c1x = m.m01; + c1y = m.m11; + c1z = m.m21; + c2x = m.m02; + c2y = m.m12; + c2z = m.m22; + c3x = m.m03; + c3y = m.m13; + c3z = m.m23; + } + } + + private void Start() + { + m_BRG = new BatchRendererGroup(this.OnPerformCulling, IntPtr.Zero); + m_MeshID = m_BRG.RegisterMesh(mesh); + m_MaterialID = m_BRG.RegisterMaterial(material); + + AllocateInstanceDateBuffer(); + PopulateInstanceDataBuffer(); + } + + private void AllocateInstanceDateBuffer() + { + m_InstanceData = new GraphicsBuffer(GraphicsBuffer.Target.Raw, + BufferCountForInstances(kBytesPerInstance, kNumInstances, kExtraBytes), + sizeof(int)); + } + + private void PopulateInstanceDataBuffer() + { + // Place a zero matrix at the start of the instance data buffer, so loads from address 0 return zero. + var zero = new Matrix4x4[1] { Matrix4x4.zero }; + + // Create transform matrices for three example instances. + var matrices = new Matrix4x4[kNumInstances] + { + Matrix4x4.Translate(new Vector3(-2, 0, 0)), + Matrix4x4.Translate(new Vector3(0, 0, 0)), + Matrix4x4.Translate(new Vector3(2, 0, 0)), + }; + + // Convert the transform matrices into the packed format that shaders expects. + var objectToWorld = new PackedMatrix[kNumInstances] + { + new PackedMatrix(matrices[0]), + new PackedMatrix(matrices[1]), + new PackedMatrix(matrices[2]), + }; + + // Also create packed inverse matrices. + var worldToObject = new PackedMatrix[kNumInstances] + { + new PackedMatrix(matrices[0].inverse), + new PackedMatrix(matrices[1].inverse), + new PackedMatrix(matrices[2].inverse), + }; + + // Make all instances have unique colors. + var colors = new Vector4[kNumInstances] + { + new Vector4(1, 0, 0, 1), + new Vector4(0, 1, 0, 1), + new Vector4(0, 0, 1, 1), + }; + + // In this simple example, the instance data is placed into the buffer like this: + // Offset | Description + // 0 | 64 bytes of zeroes, so loads from address 0 return zeroes + // 64 | 32 uninitialized bytes to make working with SetData easier, otherwise unnecessary + // 96 | unity_ObjectToWorld, three packed float3x4 matrices + // 240 | unity_WorldToObject, three packed float3x4 matrices + // 384 | _BaseColor, three float4s + + // Calculates start addresses for the different instanced properties. unity_ObjectToWorld starts at + // address 96 instead of 64 which means 32 bits are left uninitialized. This is because the + // computeBufferStartIndex parameter requires the start offset to be divisible by the size of the source + // array element type. In this case, it's the size of PackedMatrix, which is 48. + uint byteAddressObjectToWorld = kSizeOfPackedMatrix * 2; + uint byteAddressWorldToObject = byteAddressObjectToWorld + kSizeOfPackedMatrix * kNumInstances; + uint byteAddressColor = byteAddressWorldToObject + kSizeOfPackedMatrix * kNumInstances; + + // Upload the instance data to the GraphicsBuffer so the shader can load them. + m_InstanceData.SetData(zero, 0, 0, 1); + m_InstanceData.SetData(objectToWorld, 0, (int)(byteAddressObjectToWorld / kSizeOfPackedMatrix), objectToWorld.Length); + m_InstanceData.SetData(worldToObject, 0, (int)(byteAddressWorldToObject / kSizeOfPackedMatrix), worldToObject.Length); + m_InstanceData.SetData(colors, 0, (int)(byteAddressColor / kSizeOfFloat4), colors.Length); + + // Set up metadata values to point to the instance data. Set the most significant bit 0x80000000 in each + // which instructs the shader that the data is an array with one value per instance, indexed by the instance index. + // Any metadata values that the shader uses and not set here will be zero. When such a value is used with + // UNITY_ACCESS_DOTS_INSTANCED_PROP (i.e. without a default), the shader interprets the + // 0x00000000 metadata value and loads from the start of the buffer. The start of the buffer which is + // is a zero matrix so this sort of load is guaranteed to return zero, which is a reasonable default value. + var metadata = new NativeArray(3, Allocator.Temp); + metadata[0] = new MetadataValue { NameID = Shader.PropertyToID("unity_ObjectToWorld"), Value = 0x80000000 | byteAddressObjectToWorld, }; + metadata[1] = new MetadataValue { NameID = Shader.PropertyToID("unity_WorldToObject"), Value = 0x80000000 | byteAddressWorldToObject, }; + metadata[2] = new MetadataValue { NameID = Shader.PropertyToID("_BaseColor"), Value = 0x80000000 | byteAddressColor, }; + + // Finally, create a batch for the instances, and make the batch use the GraphicsBuffer with the + // instance data, as well as the metadata values that specify where the properties are. + m_BatchID = m_BRG.AddBatch(metadata, m_InstanceData.bufferHandle); + } + + // Raw buffers are allocated in ints. This is a utility method that calculates + // the required number of ints for the data. + int BufferCountForInstances(int bytesPerInstance, int numInstances, int extraBytes = 0) + { + // Round byte counts to int multiples + bytesPerInstance = (bytesPerInstance + sizeof(int) - 1) / sizeof(int) * sizeof(int); + extraBytes = (extraBytes + sizeof(int) - 1) / sizeof(int) * sizeof(int); + int totalBytes = bytesPerInstance * numInstances + extraBytes; + return totalBytes / sizeof(int); + } + + + private void OnDisable() + { + m_BRG.Dispose(); + } + + public unsafe JobHandle OnPerformCulling( + BatchRendererGroup rendererGroup, + BatchCullingContext cullingContext, + BatchCullingOutput cullingOutput, + IntPtr userContext) + { + // UnsafeUtility.Malloc() requires an alignment, so use the largest integer type's alignment + // which is a reasonable default. + int alignment = UnsafeUtility.AlignOf(); + + // Acquire a pointer to the BatchCullingOutputDrawCommands struct so you can easily + // modify it directly. + var drawCommands = (BatchCullingOutputDrawCommands*)cullingOutput.drawCommands.GetUnsafePtr(); + + // Allocate memory for the output arrays. In a more complicated implementation, you would calculate + // the amount of memory to allocate dynamically based on what is visible. + // This example assumes that all of the instances are visible and thus allocates + // memory for each of them. The necessary allocations are as follows: + // - a single draw command (which draws kNumInstances instances) + // - a single draw range (which covers our single draw command) + // - kNumInstances visible instance indices. + // You must always allocate the arrays using Allocator.TempJob. + drawCommands->drawCommands = (BatchDrawCommand*)UnsafeUtility.Malloc(UnsafeUtility.SizeOf(), alignment, Allocator.TempJob); + drawCommands->drawRanges = (BatchDrawRange*)UnsafeUtility.Malloc(UnsafeUtility.SizeOf(), alignment, Allocator.TempJob); + drawCommands->visibleInstances = (int*)UnsafeUtility.Malloc(kNumInstances * sizeof(int), alignment, Allocator.TempJob); + drawCommands->drawCommandPickingInstanceIDs = null; + + drawCommands->drawCommandCount = 1; + drawCommands->drawRangeCount = 1; + drawCommands->visibleInstanceCount = kNumInstances; + + // This example doens't use depth sorting, so it leaves instanceSortingPositions as null. + drawCommands->instanceSortingPositions = null; + drawCommands->instanceSortingPositionFloatCount = 0; + + // Configure the single draw command to draw kNumInstances instances + // starting from offset 0 in the array, using the batch, material and mesh + // IDs registered in the Start() method. It doesn't set any special flags. + drawCommands->drawCommands[0].visibleOffset = 0; + drawCommands->drawCommands[0].visibleCount = kNumInstances; + drawCommands->drawCommands[0].batchID = m_BatchID; + drawCommands->drawCommands[0].materialID = m_MaterialID; + drawCommands->drawCommands[0].meshID = m_MeshID; + drawCommands->drawCommands[0].submeshIndex = 0; + drawCommands->drawCommands[0].splitVisibilityMask = 0xff; + drawCommands->drawCommands[0].flags = 0; + drawCommands->drawCommands[0].sortingPosition = 0; + + // Configure the single draw range to cover the single draw command which + // is at offset 0. + drawCommands->drawRanges[0].drawCommandsBegin = 0; + drawCommands->drawRanges[0].drawCommandsCount = 1; + + // This example doesn't care about shadows or motion vectors, so it leaves everything + // at the default zero values, except the renderingLayerMask which it sets to all ones + // so Unity renders the instances regardless of mask settings. + drawCommands->drawRanges[0].filterSettings = new BatchFilterSettings { renderingLayerMask = 0xffffffff, }; + + // Finally, write the actual visible instance indices to the array. In a more complicated + // implementation, this output would depend on what is visible, but this example + // assumes that everything is visible. + for (int i = 0; i < kNumInstances; ++i) + drawCommands->visibleInstances[i] = i; + + // This simple example doesn't use jobs, so it returns an empty JobHandle. + // Performance-sensitive applications are encouraged to use Burst jobs to implement + // culling and draw command output. In this case, this function returns a + // handle here that completes when the Burst jobs finish. + return new JobHandle(); + } +} +``` + +This is the final, complete, code sample for BRG. If you attach this Component to a GameObject, set a mesh and [DOTS Instancing](dots-instancing-shaders.md)-compatible material in the Inspector, and enter Play Mode, Unity renders three instances of the mesh using the material. \ No newline at end of file diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/batch-renderer-group-getting-started.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/batch-renderer-group-getting-started.md new file mode 100644 index 00000000000..814bd0bfccb --- /dev/null +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/batch-renderer-group-getting-started.md @@ -0,0 +1,15 @@ +--- +uid: um-batch-renderer-group-getting-started +--- + +# Set up your project for the BatchRendererGroup API + +Before you use BRG, your project must support it. BRG requires your project to: + +* Use the SRP Batcher. To enable the SRP Batcher, see [Using the SRP Batcher](SRPBatcher.md#using-the-srp-batcher). +* Keep BRG [shader variants](https://docs.unity3d.com/6000.0/Documentation/Manual/shader-variants). To do this, select **Edit** > **Project Settings** > **Graphics**, and set **BatchRendererGroup variants** to **Keep all**. +* Allow [unsafe code](https://docs.microsoft.com/en-us/dotnet/csharp/language-reference/unsafe-code). To do this, enable the **Allow ‘unsafe’ Code** [Player Setting](https://docs.unity3d.com/6000.0/Documentation/Manual/class-PlayerSettings). + +**Note:** The BatchRendererGroup uses [DOTS Instancing shaders](dots-instancing-shaders.md), but it doesn't require any DOTS packages. The name reflects the new data-oriented way to load instance data, and also helps with backward compatibility with existing Hybrid Renderer compatible shaders. + +For information on how to use BRG to create a basic renderer, see [Creating a renderer with BatchRendererGroup](batch-renderer-group-creating-a-renderer.md). diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/batch-renderer-group-how.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/batch-renderer-group-how.md new file mode 100644 index 00000000000..f40ece8701e --- /dev/null +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/batch-renderer-group-how.md @@ -0,0 +1,57 @@ +--- +uid: um-batch-renderer-group-how +--- + +# Introduction to the BatchRendererGroup API + +BRG is the perfect tool to: + +* Render DOTS [Entities](https://docs.unity3d.com/Packages/com.unity.entities@latest). For more information how Entities uses BRG, refer to [Entities Graphics Performance](https://docs.unity3d.com/Packages/com.unity.entities.graphics@1.2/manual/entities-graphics-performance.html). +* Render a large number of environment objects where using individual GameObjects would be too resource-intensive. For example, procedurally-placed plants or rocks. +* Render custom terrain patches. You can use different meshes or materials to display different levels of detail. + +## Render pipeline compatibility + +The following table shows which render pipelines support BRG. + +| **Feature name** | **Universal Render Pipeline (URP)** | **High Definition Render Pipeline (HDRP)** | **Custom SRP** | **Built-in Render Pipeline** | +| ------------------ | ---------------------------- | ----------------------------------- | ------------------------------------------ | -------------- | +| BatchRendererGroup | Yes (1) | Yes (1) | Yes (1) | No | + +**Notes**: + +1. If the project uses the SRP Batcher. + +## Platform compatibility + +Unity supports BRG on: + +* Windows using DirectX 11 +* Windows using DirectX 12 +* Windows using Vulkan +* Universal Windows Platform +* Linux using Vulkan +* macOS using Metal +* iOS +* Android (Vulkan and OpenGL ES 3.x) +* PlayStation 4 +* PlayStation 5 +* Xbox One +* Xbox Series X and Xbox Series S +* Nintendo Switch + +## How BatchRendererGroup works + +To render to the screen, BatchRendererGroup (BRG) generates [draw commands](https://docs.unity3d.com/6000.0/Documentation/ScriptReference/Rendering.BatchDrawCommand) which are a BRG-specific concept that contains everything Unity needs to efficiently create optimized, instanced draw calls. + +To determine when to render the instances in a draw command, BRG uses [filter settings](https://docs.unity3d.com/6000.0/Documentation/ScriptReference/Rendering.BatchFilterSettings). Filter settings control when to render instances themselves, but also when to render certain facets of each instance such as its shadows and motion vectors + +Because the same filter settings can often apply to a large number of draw commands, BRG uses [draw ranges](https://docs.unity3d.com/6000.0/Documentation/ScriptReference/Rendering.BatchDrawRange) to apply filter settings to a range of draw commands. A draw range combines a contiguous number of draw commands with an instance of filter settings that apply to them. Draw ranges are especially useful if the filter settings determine that Unity shouldn't render the draw commands, because this makes it possible for Unity to efficiently skip rendering for every draw command in the range. + +There is no restriction on which instances are in which draw calls. It's possible to render the same instance, an object with the same instance index and batchID, many times with different meshes and materials. One example where this can be useful is drawing different sub-meshes with different materials, but using the same instance indices to share properties like transform matrices between the draws. + +For information on how to create a renderer with BRG, see [Creating a renderer with BatchRendererGroup](batch-renderer-group-creating-a-renderer.md). + +## Technical limitations + +In most cases, Unity renders a draw command as a single, platform-level, instanced draw call for each compatible [DrawRenderers](https://docs.unity3d.com/6000.0/Documentation/ScriptReference/Rendering.ScriptableRenderContext.DrawRenderers) call in the Scriptable Render Pipeline. However, that isn't possible when the graphics API has a lower size limit for draw calls than the draw command's `visibleCount`. In these situations, Unity splits the draw command into multiple instanced draw calls. \ No newline at end of file diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/batch-renderer-group-initializing.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/batch-renderer-group-initializing.md new file mode 100644 index 00000000000..23eab8c305c --- /dev/null +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/batch-renderer-group-initializing.md @@ -0,0 +1,55 @@ +--- +uid: um-batch-renderer-group-initializing +--- + +# Initialize a BatchRendererGroup object + +The first step to render using BRG is to create an instance of [BatchRendererGroup](https://docs.unity3d.com/6000.0/Documentation/ScriptReference/Rendering.BatchRendererGroup) and initialize it with an implementation of [OnPerformCulling](https://docs.unity3d.com/6000.0/Documentation/ScriptReference/Rendering.BatchRendererGroup.OnPerformCulling). + +The OnPerformCulling callback is the main entry point of BRG and Unity calls it whenever it culls visible objects. For information on the parameters it receives, see [OnPerformCulling](https://docs.unity3d.com/6000.0/Documentation/ScriptReference/Rendering.BatchRendererGroup.OnPerformCulling). Typically, there are two tasks that the OnPerformCulling callback needs to perform: + +* Visibility culling to determine which of its instances are visible based on the [BatchCullingContext](https://docs.unity3d.com/6000.0/Documentation/ScriptReference/Rendering.BatchCullingContext) parameter. +* Output the actual draw commands to render those instances. To do this you write to the [BatchCullingOutput](https://docs.unity3d.com/6000.0/Documentation/ScriptReference/Rendering.BatchCullingOutput) parameter. + +In simple implementations, you can do these tasks directly in the OnPerformCulling callback, but for high-performance implementations, it's best practice to do most of this work in [Burst](https://docs.unity3d.com/Packages/com.unity.burst@latest) jobs. The OnPerformCulling callback should return a [JobHandle](https://docs.unity3d.com/6000.0/Documentation/ScriptReference/Unity.Jobs.JobHandle) that completes after the jobs write the output into the [BatchCullingOutput](https://docs.unity3d.com/6000.0/Documentation/ScriptReference/Rendering.BatchCullingOutput) parameter. If your implementation doesn't use jobs, you can return an empty JobHandle. + +See the following code sample for an example of how to create a BatchRendererGroup object and initialize it with the most minimum OnPerformCulling callback that compiles. + +```lang-csharp +using System; +using Unity.Collections; +using Unity.Collections.LowLevel.Unsafe; +using Unity.Jobs; +using UnityEngine; +using UnityEngine.Rendering; + +public class SimpleBRGExample : MonoBehaviour +{ + private BatchRendererGroup m_BRG; + + private void Start() + { + m_BRG = new BatchRendererGroup(this.OnPerformCulling, IntPtr.Zero); + } + + private void OnDisable() + { + m_BRG.Dispose(); + } + + public unsafe JobHandle OnPerformCulling( + BatchRendererGroup rendererGroup, + BatchCullingContext cullingContext, + BatchCullingOutput cullingOutput, + IntPtr userContext) + { + // This example doesn't use jobs, so it can return an empty JobHandle. + // Performance-sensitive applications should use Burst jobs to implement + // culling and draw command output. In this case, this function would return a + // handle here that completes when the Burst jobs finish. + return new JobHandle(); + } +} +``` + +Before you use OnPerformCulling to create draw commands, you need to provide your BatchRendererGroup object any meshes you want it to draw, and any materials you want it to use. For more information, see the next topic, [Registering meshes and materials](batch-renderer-group-registering-meshes-and-materials.md). \ No newline at end of file diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/batch-renderer-group-registering-meshes-and-materials.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/batch-renderer-group-registering-meshes-and-materials.md new file mode 100644 index 00000000000..353f017bd8b --- /dev/null +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/batch-renderer-group-registering-meshes-and-materials.md @@ -0,0 +1,75 @@ +--- +uid: um-batch-renderer-group-registering-meshes-and-materials +--- + +# Register meshes and materials with the BatchRendererGroup API + +[Mesh](https://docs.unity3d.com/6000.0/Documentation/ScriptReference/Mesh) and [Material](https://docs.unity3d.com/6000.0/Documentation/ScriptReference/Material) are managed C# objects in Unity which means you can't use them from [Burst](https://docs.unity3d.com/Packages/com.unity.burst@latest) C# code. This means that to use them in BRG draw commands, you must pre-register with the BRG. + +To register Mesh and Material objects, use [BatchRendererGroup.RegisterMesh](https://docs.unity3d.com/6000.0/Documentation/ScriptReference/Rendering.BatchRendererGroup.RegisterMesh) and [BatchRendererGroup.RegisterMaterial](https://docs.unity3d.com/6000.0/Documentation/ScriptReference/Rendering.BatchRendererGroup.RegisterMaterial) respectively. These functions return a [BatchMeshID](https://docs.unity3d.com/6000.0/Documentation/ScriptReference/Rendering.BatchMeshID) and a [BatchMaterialID](https://docs.unity3d.com/6000.0/Documentation/ScriptReference/Rendering.BatchMaterialID), respectively, which are plain data structs that contain a Burst-compatible handle. They are strongly typed to help prevent errors from accidentally using the wrong handle type. + +You can register Mesh and Material objects at any time, including run time. The only requirements are: + +* You need to register Mesh and Material objects before the BatchRendererGroup can use them to render. +* The Material must support DOTS Instancing. + +You can also unregister Mesh and Material objects if you no longer need them. This is necessary if you want to unload any Mesh or Material objects. [BatchRendererGroup.Dispose](https://docs.unity3d.com/6000.0/Documentation/ScriptReference/Rendering.BatchRendererGroup.Dispose) automatically unregisters all registered assets. + +**Note**: You can't serialize a BatchMeshID or a BatchMaterialID. They are only valid with the BatchRendererGroup you register them with and become invalid if you unregister them, or if the BatchRendererGroup no longer exists. BatchMeshID and BatchMaterialID also become invalid if something forces Unity to unload the Mesh or Material objects, which happens when Unity unloads the Scene that the Mesh or Material objects are part of. + +It is possible to register the same Mesh or Material object multiple times. This is useful in situations where you want to register Meshes or Materials without having to know which Meshes and Materials have been registered already. In this situation, the BatchRenderer keeps an internal count of the number of registrations in the following way: + +* Each time you register a Mesh or Material object, the BatchRendererGroup increases its reference count by 1. +* Each time you unregister a Mesh or Material object, the BatchRendererGroup decreases its reference count by 1. If this causes the reference count to reach 0, the BatchRendererGroup unregisters the Mesh or Material. If you want to use the Mesh or Material in future draw commands, you must register it again. +* A RegisterMesh or RegisterMaterial call with an already registered Mesh or Material returns the same BatchMeshID or BatchMaterialID as the previous call. However, if the BatchRendererGroup completely unregistered the Mesh or Material, registering it again could return a different ID. + +**Note**: BRG checks for modifications to Mesh or Material objects after the first OnPerformCulling callback method in a frame. This means that Unity takes any modification that occurs before that point into account. This includes changes you make in the first callback itself, but not changes that occur in any jobs scheduled by the callback. Modifying Mesh or Material objects after that point causes undefined behavior. + +See the following code sample for an example of how to register meshes and materials with a BatchRendererGroup object. This code sample builds on the one in [Initializing a BatchRendererGroup object](batch-renderer-group-initializing.md). + +```lang-csharp +using System; +using Unity.Collections; +using Unity.Collections.LowLevel.Unsafe; +using Unity.Jobs; +using UnityEngine; +using UnityEngine.Rendering; + +public class SimpleBRGExample : MonoBehaviour +{ + public Mesh mesh; + public Material material; + + private BatchRendererGroup m_BRG; + + private BatchMeshID m_MeshID; + private BatchMaterialID m_MaterialID; + + private void Start() + { + m_BRG = new BatchRendererGroup(this.OnPerformCulling, IntPtr.Zero); + m_MeshID = m_BRG.RegisterMesh(mesh); + m_MaterialID = m_BRG.RegisterMaterial(material); + } + + private void OnDisable() + { + m_BRG.Dispose(); + } + + public unsafe JobHandle OnPerformCulling( + BatchRendererGroup rendererGroup, + BatchCullingContext cullingContext, + BatchCullingOutput cullingOutput, + IntPtr userContext) + { + // This simple example doesn't use jobs, so it can return an empty JobHandle. + // Performance-sensitive applications should use Burst jobs to implement + // culling and draw command output. In this case, this function would return a + // handle that completes when the Burst jobs finish. + return new JobHandle(); + } +} +``` + +Before you create any draw commands that use the registered Meshes and Materials, you need to provide data, like transform matrices, to use for the draw command instances. To provide data to use for each instance, BatchRendererGroup uses a concept called batches. For more information, see the next topic, [Creating batches](batch-renderer-group-creating-batches.md). \ No newline at end of file diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/batch-renderer-group-writing-shaders.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/batch-renderer-group-writing-shaders.md new file mode 100644 index 00000000000..b5c15805205 --- /dev/null +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/batch-renderer-group-writing-shaders.md @@ -0,0 +1,14 @@ +# Writing custom shaders for the BatchRendererGroup API + +Resources for writing custom shaders that support Data-Oriented Technology Stack (DOTS) Instancing, so they can be used with the `BatchRendererGroup` API. + +| **Page** | **Description** | +| --- | --- | +| [DOTS Instancing shaders](dots-instancing-shaders.md) | Learn about the characteristics and advantages of shaders that support DOTS Instancing. | +| [Support DOTS Instancing in a a custom shader](dots-instancing-shaders-support.md) | Add defines and properties to a custom shader to make it compatible with DOTS Instancing. | +| [Declare DOTS Instancing properties in a custom shader](dots-instancing-shaders-declare.md) | Declare a DOTS Instancing property block in custom shader code. | +| [Access DOTS Instancing properties in a custom shader](dots-instancing-shaders-access.md) | Use built-in macros to access DOTS Instancing properties in custom shader code. | +| [Best practice for DOTS Instancing shaders](dots-instancing-shaders-best-practice.md) | Structure data to avoid DOTS Instancing shaders behaving in unexpected ways. | +| [DOTS Instancing shader samples](dots-instancing-shaders-samples.md) | Examples of using built-in macros to access per-instance data and constant data in DOTS Instancing shaders. | +| [DOTS Instancing shader macros reference](dots-instancing-shaders-macros.md) | Explore the built-in macros you can use to access DOTS Instancing properties in a custom shader. | +| [DOTS Instancing shader functions reference](dots-instancing-shaders-functions.md) | Explore the built-in functions you can use to load the values of constants directly from the draw command data. | diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/batch-renderer-group.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/batch-renderer-group.md new file mode 100644 index 00000000000..d0e74f26c44 --- /dev/null +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/batch-renderer-group.md @@ -0,0 +1,19 @@ +--- +uid: um-batch-renderer-group +--- + +# BatchRendererGroup API + +BatchRendererGroup (BRG) is an API for high-performance custom rendering in projects that use a Scriptable Render Pipeline (SRP) and the [SRP Batcher](SRPBatcher.md). + +| **Page** | **Description** | +| ------------------------------------------------------------ | ------------------------------------------------------------ | +| [Introduction to the BatchRendererGroup API](batch-renderer-group-how.md) | Explains how BRG renders to the screen and introduces BRG-specific concepts. | +| [Set up your project for the BatchRendererGroup API](batch-renderer-group-getting-started.md) | Describes the requirements and compatibility of BRG and explains how to set up your project to support BRG. | +| [Creating a renderer with the BatchRendererGroup API](batch-renderer-group-creating-a-renderer.md) | A section that explains how to use BRG to create a simple custom renderer. | +| [Writing custom shaders for the BatchRendererGroup API](batch-renderer-group-writing-shaders.md) | Describes the new data-oriented way shaders can load instance data. | + +## Additional resources + +* [Reduce rendering work on the CPU](reduce-rendering-work-on-cpu.md) +* [Optimizing draw calls](reduce-draw-calls-landing-hdrp.md) diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/configure-hdrp-for-virtual-reality.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/configure-hdrp-for-virtual-reality.md index 355cf5c8c21..b7c1de80f26 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/configure-hdrp-for-virtual-reality.md +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/configure-hdrp-for-virtual-reality.md @@ -22,7 +22,7 @@ You can also watch the presentation from Unite Copenhagen (October 2019) to lear * Open VR* **Note**: Valve is currently developing their OpenVR Unity XR plugin for 2019.3 and beyond. -For more information, see [Unity XR platform updates](https://blogs.unity3d.com/2020/01/24/unity-xr-platform-updates/) on the Unity blog, and [XR Plugin Architecture](https://docs.unity3d.com/Manual/XRPluginArchitecture.html) in the Unity Manual. +For more information, refer to [XR Plugin Architecture](https://docs.unity3d.com/Manual/XRPluginArchitecture.html). The XR Plugin architecture links to the OpenVR desktop package and has further info and recommendations. ## Resolution Control diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/custom-post-processing-create-apply.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/custom-post-processing-create-apply.md index 1dcc385dcba..de697675796 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/custom-post-processing-create-apply.md +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/custom-post-processing-create-apply.md @@ -18,6 +18,10 @@ Note that the file name of both the C# post-process volume and the shader need t This creates each template file in the **Project** window in the **Assets** folder. +**Note**: When using **Full screen Shader Graph**, if you need **Scene Color**, use the **Post Process Input** source for the **HD Sample Buffer** node. + +![](Images/HDSampleBuffer.png) + ## Apply a custom post-processing effect diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/dots-instancing-shaders-access.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/dots-instancing-shaders-access.md new file mode 100644 index 00000000000..b53992bda2d --- /dev/null +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/dots-instancing-shaders-access.md @@ -0,0 +1,11 @@ +# Access DOTS Instancing properties in a custom shader + +To access DOTS Instanced properties, your shader can use one of the access macros that Unity provides. The access macros assume that instance data in `unity_DOTSInstanceData` uses the following layout: + +* The 31 least significant bits of the metadata value contain the byte address of the first instance in the batch within the `unity_DOTSInstanceData` buffer. +* If the most significant bit of the metadata value is `0`, every instance uses the value from instance index zero. This means each instance loads directly from the byte address in the metadata value. In this case, the buffer only needs to store a single value, instead of one value per instance. +* If the most significant bit of the metadata value is `1`, the address should contain an array where you can find the value for instance index `instanceID` using `AddressOfInstance0 + sizeof(PropertyType) * instanceID`. In this case, you should ensure that every rendered instance index has valid data in buffer. Otherwise, out-of-bounds access and undefined behavior can occur. + +You can also set the the metadata value directly which is useful if you want to use a custom data source that doesn't use the above layout, such as a texture. + +For an example of how to use these macros, see [Access macro example](dots-instancing-shaders-samples.md). diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/dots-instancing-shaders-best-practice.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/dots-instancing-shaders-best-practice.md new file mode 100644 index 00000000000..68bf0eaad01 --- /dev/null +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/dots-instancing-shaders-best-practice.md @@ -0,0 +1,47 @@ + +# Best practice for DOTS Instancing shaders + +It is best practice to initialize the first 64 bytes of all `unity_DOTSInstanceData` buffers to zero and leave them unused. This is because the default metadata value that Unity uses for all metadata values not specified during batch creation is zero. Specifically, when a shader loads a zero metadata value from the `UNITY_ACCESS_DOTS_INSTANCED_PROP` macro, the shader loads this value from the address `zero` because the instance index will be disregarded. Ensuring that the first 64 bytes, which is the size of the largest value type (a float4x4 matrix), are zeroes guarantees that such loads predictably return a result of zero. Otherwise, the shader could load something unpredictable, depending on what happens to be located at address zero. + +When using DOTS Instancing, Shader Graphs and Shaders that Unity provides use a special convention for transform matrices. To save GPU memory and bandwidth, they store these matrices using only 12 floats instead of the full 16, because four floats are always constant. These shaders expect floats formatted in such a way that the x, y, and z of each column in the matrix are stored in order. In other words, the first three floats are the x, y, and z of the first column, the next three floats are the x, y, and z of the second column, and so on. The matrices don't store the `w` element of each column. The transform matrices this affects are: + +* `unity_ObjectToWorld` +* `unity_WorldToObject` +* `unity_MatrixPreviousM` +* `unity_MatrixPreviousMI` + +The following code sample includes a struct that converts regular four-by-four matrices into the 12 floats convention. + +```lang-csharp +struct PackedMatrix +{ + public float c0x; + public float c0y; + public float c0z; + public float c1x; + public float c1y; + public float c1z; + public float c2x; + public float c2y; + public float c2z; + public float c3x; + public float c3y; + public float c3z; + + public PackedMatrix(Matrix4x4 m) + { + c0x = m.m00; + c0y = m.m10; + c0z = m.m20; + c1x = m.m01; + c1y = m.m11; + c1z = m.m21; + c2x = m.m02; + c2y = m.m12; + c2z = m.m22; + c3x = m.m03; + c3y = m.m13; + c3z = m.m23; + } +} +``` diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/dots-instancing-shaders-constant.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/dots-instancing-shaders-constant.md new file mode 100644 index 00000000000..b559720f276 --- /dev/null +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/dots-instancing-shaders-constant.md @@ -0,0 +1,25 @@ +# Example of a DOTS Instancing shader that accesses constant data + +In this example: + +* The metadata value for `Color` is `0x00001000`. +* The instance index is `5`. +* Data for instance 0 starts at address 0x1000. +* The most significant bit is not set so data for instance 5 is at the same address as instance 0. + +Because the most significant bit is not set, the accessor macros that fall back to defaults don't access `unity_DOTSInstanceData`. This means that: + +* `c0` will contain the value from `unity_DOTSInstanceData` address `0x1000`. +* `c1` will contain the value of the regular material property **Color**, and cause a compile error if the Color property doesn't exist. +* `c2` will contain `(1, 2, 3, 4)` because that was passed as the explicit default value. + +``` +void ExampleConstant() +{ + // rawMetadataValue will contain 0x00001000 + uint rawMetadataValue = UNITY_DOTS_INSTANCED_METADATA_NAME(float4, Color); + float4 c0 = UNITY_ACCESS_DOTS_INSTANCED_PROP(float4, Color); + float4 c1 = UNITY_ACCESS_DOTS_INSTANCED_PROP_WITH_DEFAULT(float4, Color); + float4 c2 = UNITY_ACCESS_DOTS_INSTANCED_PROP_WITH_CUSTOM_DEFAULT(float4, Color, float4(1, 2, 3, 4)); +} +``` \ No newline at end of file diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/dots-instancing-shaders-declare.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/dots-instancing-shaders-declare.md new file mode 100644 index 00000000000..ebc66c35443 --- /dev/null +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/dots-instancing-shaders-declare.md @@ -0,0 +1,29 @@ +# Declare DOTS Instancing properties in a custom shader + +To load instance data, such as transform matrices, the shader needs to define DOTS Instanced properties. Below is an example of a simple DOTS Instanced property block: + +``` +UNITY_DOTS_INSTANCING_START(MaterialPropertyMetadata) + UNITY_DOTS_INSTANCED_PROP(float4, Color) +UNITY_DOTS_INSTANCING_END(MaterialPropertyMetadata) +``` + +To mark the beginning and end of the property block, use the `UNITY_DOTS_INSTANCING_START` and `UNITY_DOTS_INSTANCING_END` macros followed by the name of the block. The example uses the name `MaterialPropertyMetadata`. There are three allowed block names: + +* BuiltinPropertyMetadata +* MaterialPropertyMetadata +* UserPropertyMetadata + +The shader can declare one of each, so a DOTS Instanced shader can have between zero and three of such blocks. Unity-defined shader code doesn't use UserPropertyMetadata so this name is guaranteed to be free for you to use. URP and HDRP define BuiltinPropertyMetadata for every shader they provide and define MaterialPropertyMetadata for most of them too, so it's best practice to use UserPropertyMetadata. Your custom shaders can use all three possible names, even all at once. + +The block can contain any number of DOTS Instanced property definitions formatted like: + +``` +UNITY_DOTS_INSTANCED_PROP(PropertyType, PropertyName) +``` + +`PropertyType` can be any HLSL built-in type (like uint, float4, float4x4, or int2x4) except a bool vector, and `PropertyName` is the name of the DOTS Instanced property. DOTS Instanced properties are completely separate from [regular material properties](https://docs.unity3d.com/6000.0/Documentation/Manual/SL-Properties), and you can give them the same name as another regular material property. This is possible because the `UNITY_DOTS_INSTANCED_PROP` macro generates special constant names which Unity recognizes that don't conflict with other property names. Shaders that Unity provides give DOTS Instanced properties the same names as regular material properties, but you don't need to follow this convention. + +Internally, Unity provides the shader with a 32-bit integer metadata value for every DOTS Instanced property the shader declares. Unity sets the metadata value when your code makes a [BatchRendererGroup.AddBatch](https://docs.unity3d.com/6000.0/Documentation/ScriptReference/Rendering.BatchRendererGroup.AddBatch) call to create the batch associated with the draw. The metadata value defaults to `0` if Unity doesn't set it. The shader also has access to `ByteAddressBuffer unity_DOTSInstanceData` which Unity sets to the GraphicsBuffer you pass as an argument to `BatchRendererGroup.AddBatch`. This buffer is typically where the shader loads the instance data from. Multiple batches can share a single GraphicsBuffer, but it is also possible for each batch to use its own separate GraphicsBuffer for `unity_DOTSInstanceData`. + +**Note**: Unity doesn't provide any DOTS Instanced data automatically. It's your responsibility to make sure that the `unity_DOTSInstanceData` buffer of each batch contains the correct data. Instance data must include many properties that are Unity normally provides for GameObjects, such as transform matrices, light probe coefficients, and lightmap texture coordinates. diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/dots-instancing-shaders-functions.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/dots-instancing-shaders-functions.md new file mode 100644 index 00000000000..9ef9c3e842e --- /dev/null +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/dots-instancing-shaders-functions.md @@ -0,0 +1,13 @@ +# DOTS Instancing shader functions reference + +Alongside the access macros, Unity provides shader functions that load the values of constants directly from the draw command data. Shaders that Unity provides use these functions. + +Unity provides the following shader functions: + +| **Shader function** | **Description** | +| -------------------------------------------- | ------------------------------------------------------------ | +| `LoadDOTSInstancedData_RenderingLayer` | Returns the [renderingLayerMask](https://docs.unity3d.com/6000.0/Documentation/ScriptReference/Rendering.BatchFilterSettings-renderingLayerMask) for the draw command. | +| `LoadDOTSInstancedData_MotionVectorsParams` | Returns the [motion vector generation mode](https://docs.unity3d.com/6000.0/Documentation/ScriptReference/Rendering.BatchFilterSettings-motionMode) for the draw command. This is formatted as a float4, which is what Unity shaders expect. | +| `LoadDOTSInstancedData_WorldTransformParams` | Returns whether to draw the instance with a flipped triangle winding. See [FlipWinding](https://docs.unity3d.com/6000.0/Documentation/ScriptReference/Rendering.BatchDrawCommandFlags.FlipWinding). | +| `LoadDOTSInstancedData_LightData` | Returns whether the scene's main Directional Light is active for the instance. The main light can be deactivated for multiple reasons, for example if the light already included in light maps. | +| `LoadDOTSInstancedData_LODFade` | Returns the 8 bit crossfade value you set if the [LODCrossFade flag](https://docs.unity3d.com/6000.0/Documentation/ScriptReference/Rendering.BatchDrawCommandFlags.LODCrossFade) is set. If the flag is not set, the return value is undefined. | \ No newline at end of file diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/dots-instancing-shaders-macros.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/dots-instancing-shaders-macros.md new file mode 100644 index 00000000000..a57bb62925f --- /dev/null +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/dots-instancing-shaders-macros.md @@ -0,0 +1,11 @@ +# DOTS Instancing shader macros reference + +Unity provides the following access macros: + +| **Access macro** | **Description** | +| ------------------------------------------------------------ | ------------------------------------------------------------ | +| `UNITY_ACCESS_DOTS_INSTANCED_PROP(PropertyType, PropertyName)` | Returns the value loaded from `unity_DOTSInstanceData` using the layout described above. Shaders that Unity provides use this version for DOTS Instanced built-in properties that don’t have a default value to fall back on. | +| `UNITY_ACCESS_DOTS_INSTANCED_PROP_WITH_DEFAULT(PropertyType, PropertyName)` | Returns the same as `UNITY_ACCESS_DOTS_INSTANCED_PROP`, except if the most significant bit of the metadata value is zero, it returns a default value. The default value is the value of the regular material property with the same name as the DOTS Instanced property, which is why Shaders that Unity provides use the convention where DOTS Instanced properties have the same name as regular material properties. When using the default value, the access macro doesn't access `unity_DOTSInstanceData` at all. Shaders that Unity provides use this access macro for DOTS Instanced material properties, so the loads fall back to the value set on the material. | +| `UNITY_ACCESS_DOTS_INSTANCED_PROP_WITH_CUSTOM_DEFAULT(PropertyType, PropertyName, DefaultValue)` | Returns the same as `UNITY_ACCESS_DOTS_INSTANCED_PROP` unless the most significant bit of the metadata value is zero, in which case this macroreturns `DefaultValue` instead, and doesn't access `unity_DOTSInstanceData`. | +| `UNITY_DOTS_INSTANCED_METADATA_NAME(PropertyType, PropertyName)` | Returns the metadata value directly without accessing anything. This is useful for custom instance data loading schemes. | + diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/dots-instancing-shaders-per-instance.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/dots-instancing-shaders-per-instance.md new file mode 100644 index 00000000000..43eeb6f5f24 --- /dev/null +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/dots-instancing-shaders-per-instance.md @@ -0,0 +1,22 @@ +# Example of a DOTS Instancing shader that accesses per-instance data + +In this example: + +* The metadata value for `Color` is `0x80001000`. +* The instance index is `5`. +* Data for instance 0 starts at address 0x1000. +* Data for instance 5 is at address 0x1000 + 5 * sizeof(float4) = 0x1050 + +Because the most significant bit is already set, the accessor macros don't load defaults. This means that `c0`, `c1`, and `c2` will all have the same value, loaded from `unity_DOTSInstanceData` address `0x1050`. + +``` +void ExamplePerInstance() +{ + // rawMetadataValue will contain 0x80001000 + uint rawMetadataValue = UNITY_DOTS_INSTANCED_METADATA_NAME(float4, Color); + + float4 c0 = UNITY_ACCESS_DOTS_INSTANCED_PROP(float4, Color); + float4 c1 = UNITY_ACCESS_DOTS_INSTANCED_PROP_WITH_DEFAULT(float4, Color); + float4 c2 = UNITY_ACCESS_DOTS_INSTANCED_PROP_WITH_CUSTOM_DEFAULT(float4, Color, float4(1, 2, 3, 4)); +} +``` diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/dots-instancing-shaders-samples.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/dots-instancing-shaders-samples.md new file mode 100644 index 00000000000..5b98c5f18c4 --- /dev/null +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/dots-instancing-shaders-samples.md @@ -0,0 +1,9 @@ +# DOTS Instancing shader examples + +Examples of using built-in macros to access per-instance data and constant data in DOTS Instancing shaders. + +| **Page** | **Description** | +| --- | --- | +| [Example of a DOTS Instancing shader that accesses per-instance data](dots-instancing-shaders-per-instance.md) | An example of accessing per-instance color data in a DOTS Instancing shader. | +| [Example of a DOTS Instancing shader that accesses constant data](dots-instancing-shaders-constant.md) | An example of accessing constant color data in a DOTS Instancing shader. | +| [Example of using UNITY_DOTS_INSTANCED_PROP macros in a DOTS Instancing shader](dots-instancing-shaders-unity-dots-instanced-prop.md) | An example of using the `UNITY_ACCESS_DOTS_INSTANCED_PROP` to specify if a property can be instanced or not at compile time. | diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/dots-instancing-shaders-support.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/dots-instancing-shaders-support.md new file mode 100644 index 00000000000..a8df6dc11f8 --- /dev/null +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/dots-instancing-shaders-support.md @@ -0,0 +1,9 @@ +# Support DOTS Instancing in a a custom shader + +To support DOTS Instancing, a shader needs to do the following: + +* Use shader model 4.5 or newer. Specify `#pragma target 4.5` or higher. +* Support the `DOTS_INSTANCING_ON` keyword. Declare this with `#pragma multi_compile _ DOTS_INSTANCING_ON`. +* Declare at least one block of DOTS Instanced properties each of which has least one property. For more information, see [Declaring DOTS Instanced properties](dots-instancing-shaders-declare.md). + +**Note**: Shader Graphs and shaders that Unity provides in URP and HDRP support DOTS Instancing. diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/dots-instancing-shaders-unity-dots-instanced-prop.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/dots-instancing-shaders-unity-dots-instanced-prop.md new file mode 100644 index 00000000000..4bd95370c76 --- /dev/null +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/dots-instancing-shaders-unity-dots-instanced-prop.md @@ -0,0 +1,30 @@ +# Example of using UNITY_DOTS_INSTANCED_PROP macros in a DOTS Instancing shader + +The `UNITY_DOTS_INSTANCED_PROP` macro has 3 variants: + +* `UNITY_DOTS_INSTANCED_PROP_OVERRIDE_DISABLED(PropertyType, PropertyName)` +* `UNITY_DOTS_INSTANCED_PROP_OVERRIDE_SUPPORTED(PropertyType, PropertyName)` +* `UNITY_DOTS_INSTANCED_PROP_OVERRIDE_REQUIRED(PropertyType, PropertyName)` + +These macros allow you to specify if a property can be instanced or not at compile-time. It allows the access macros such as `UNITY_ACCESS_DOTS_INSTANCED_PROP` to expand to more optimal code and can have significant impact on low-end platforms. + +Here is an example of a DOTS Instanced property block using all the macro variants above: + +``` +UNITY_DOTS_INSTANCING_START(MaterialPropertyMetadata) + UNITY_DOTS_INSTANCED_PROP_OVERRIDE_SUPPORTED(float4, Color) + UNITY_DOTS_INSTANCED_PROP_OVERRIDE_DISABLED(float4, SpecColor) + UNITY_DOTS_INSTANCED_PROP_OVERRIDE_REQUIRED(float4, EmissionColor) +UNITY_DOTS_INSTANCING_END(MaterialPropertyMetadata) +``` + +- The `Color` property can either be instanced or not. The correct loading path is selected dynamically depending on the property metadata high-bit. +- The `SpecColor` property is not instantiable. This declaration won't add an uint32 field in the constant buffer. It is equivalent to not declaring anything at all. It can be useful to quickly disable instancing on a property without the need to modify other parts of the code. +- The `EmissionColor` property must be instanced. The property is always loaded from the `unity_DOTSInstanceData` buffer, and no dynamic branch is ever emitted when accessing the property. + +By default, `UNITY_DOTS_INSTANCED_PROP` is the same as `UNITY_DOTS_INSTANCED_PROP_OVERRIDE_SUPPORTED`. This default behavior can be changed by uncommenting the define `UNITY_DOTS_INSTANCED_PROP_OVERRIDE_DISABLED_BY_DEFAULT` in "com.unity.render-pipelines.core\ShaderLibrary\UnityDOTSInstancing.hlsl". When you do this, the define is enabled, and `UNITY_DOTS_INSTANCED_PROP` becomes the same as `UNITY_DOTS_INSTANCED_PROP_OVERRIDE_DISABLED`. + +**Note**: When uncommenting the define `UNITY_DOTS_INSTANCED_PROP_OVERRIDE_DISABLED_BY_DEFAULT`, you might need to clear the Library folder to make sure the shaders are correctly recompiled. + +On low-end devices, instanced properties can have a significant performance cost. Loading from an SSBO for example, can be a lot slower than a normal constant buffer load. This is because on many low-end devices, this type of buffer load goes through texture samplers, whereas constant buffer loads uses faster hardware unless a dynamic index is used to access the buffer. Instanced properties are always loaded with dynamic indexing since it depends on the property metadata, this means they always go through the texture samplers on low-end devices. +To better optimize your project for low-end devices, you can disable property instancing by default. To do this, enable the define `UNITY_DOTS_INSTANCED_PROP_OVERRIDE_DISABLED_BY_DEFAULT`, this sets property instancing to be disabled as default. Once this is done, you can then enable property instancing manually only for the properties that require it. diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/dots-instancing-shaders.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/dots-instancing-shaders.md new file mode 100644 index 00000000000..5fcb89633b4 --- /dev/null +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/dots-instancing-shaders.md @@ -0,0 +1,18 @@ +--- +uid: um-dots-instancing-shaders +--- + +# DOTS Instancing shaders + +To render large instance counts efficiently, BRG uses a new shader instancing mode called DOTS Instancing. Every shader that BRG uses must support DOTS Instancing. In traditional instanced shaders, the shader is passed an array for each instanced property in a constant or uniform buffer, such that each element in each array contains the property value for a single instance in the draw. In DOTS Instanced shaders, Unity passes one 32-bit integer to the shader for each DOTS Instanced property. This 32-bit integer is called a metadata value. This integer can represent anything you want, but typically it represents an offset in the buffer from where the shader loads property data for the instance that the shader is rendering. + +DOTS Instancing has many advantages compared to traditional instancing, including the following: + +* The instance data is stored in a GraphicsBuffer and remains persistent on the GPU, which means that Unity doesn't need to set it up again each time it renders the instance. Setting up data only when an instance actually changes can significantly improve performance in cases where instance data changes rarely or not at all. This is much more efficient than traditional instancing, which requires an engine to set up all data for every instance every frame. +* The process for setting up instance data is separate from setting up the draw call. This makes draw call setup lightweight and efficient. BRG makes this possible with a special fast path of the SRP Batcher that only does a minimal amount of work for each draw call. The responsibility for this work moves to you and gives you more control over what to render in each draw call. +* The size of a draw call is no longer limited by how much instance data can fit in a constant or uniform buffer. This makes it possible for BRG to render larger instance counts with a single draw call. +
**Note**: The number of instance indices still limits the draw call size, since each index still requires some data. However, an index consumes far less memory than a full set of instanced properties which means many more instances can fit inside a constant or uniform buffer. For example, each index requires 16 byes so if the memory limit for a buffer on a particular platform is 64kb, 4096 indices can fit in the buffer. +* If every instance uses the same value for a given property, it is possible to have all instances load the value from the same place in memory. This saves memory and the number of GPU cycles spent duplicating the value for each instance. + + + diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/reduce-draw-calls-landing-hdrp.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/reduce-draw-calls-landing-hdrp.md new file mode 100644 index 00000000000..31eccf1d096 --- /dev/null +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/reduce-draw-calls-landing-hdrp.md @@ -0,0 +1,8 @@ +# Optimizing draw calls + +Techniques for speeding up rendering by reducing the number of drawing commands the CPU sends to the GPU. + +| **Page**| **Description**| +|-|-| +| [Scriptable Render Pipeline Batcher](SRPBatcher-landing.md) | Resources for using the Scriptable Render Pipeline (SRP) Batcher to reduce the number of render state changes between draw calls. | +| [BatchRendererGroup API](batch-renderer-group.md)| Resources for using the `BatchRendererGroup` API to reduce the number of batches in the SRP Batcher. | diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/rendering-debugger-window-reference.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/rendering-debugger-window-reference.md index 18feeaa26c5..7cd61d6725d 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/rendering-debugger-window-reference.md +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/rendering-debugger-window-reference.md @@ -133,7 +133,7 @@ The **Material** panel has tools that you can use to visualize different Materia These parameters only appear when you set the Material Debug Option to Rendering Layers. - Filter with Light Layers from Selected Light + Filter Light Layers by Light Enable the checkbox to visualize GameObjects that the selected light affects. diff --git a/Packages/com.unity.render-pipelines.high-definition/Documentation~/snippets/shader-properties/surface-options/eye-material-type.md b/Packages/com.unity.render-pipelines.high-definition/Documentation~/snippets/shader-properties/surface-options/eye-material-type.md index 2def46fadec..ad8b6b8111d 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Documentation~/snippets/shader-properties/surface-options/eye-material-type.md +++ b/Packages/com.unity.render-pipelines.high-definition/Documentation~/snippets/shader-properties/surface-options/eye-material-type.md @@ -2,5 +2,5 @@ Material Type -Specifies the method HDRP uses to calculate the lighting in the eye's iris:
Eye: Uses a low resource-intensity method to calculate caustics.
Eye Cinematic: Refracts incoming light to create more realistic lighting. This is the algorithm used in The Heretic. This method is more resource-intensive than Eye.
Eye Cinematic With Caustic: Uses refracted light direction and caustic approximation to shade the iris. This is the algorithm used in Enemies. This method is the the most resource intensive. +Specifies the method HDRP uses to calculate the lighting in the eye's iris:
Eye: Uses a low resource-intensity method to calculate caustics.
Eye Cinematic: Refracts incoming light to create more realistic lighting. This is the algorithm used in The Heretic. This method is more resource-intensive than Eye.
Eye Cinematic With Caustic: Uses refracted light direction and caustic approximation to shade the iris. This is the algorithm used in Enemies. This method is the most resource intensive. diff --git a/Packages/com.unity.render-pipelines.high-definition/Editor/Lighting/Reflection/Volume/ReflectionProxyVolumeComponentEditor.cs b/Packages/com.unity.render-pipelines.high-definition/Editor/Lighting/Reflection/Volume/ReflectionProxyVolumeComponentEditor.cs index 30be6cc4a37..8aa8885da5b 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Editor/Lighting/Reflection/Volume/ReflectionProxyVolumeComponentEditor.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Editor/Lighting/Reflection/Volume/ReflectionProxyVolumeComponentEditor.cs @@ -70,30 +70,12 @@ static void DrawGizmosSelected(ReflectionProxyVolumeComponent proxyVolumeCompone case ProxyShape.Box: s_BoxHandle.Value.center = Quaternion.Inverse(tr.rotation) * tr.position; s_BoxHandle.Value.size = prox.boxSize; - EditorGUI.BeginChangeCheck(); s_BoxHandle.Value.DrawHull(true); - s_BoxHandle.Value.DrawHandle(); - if (EditorGUI.EndChangeCheck()) - { - Undo.RecordObjects(new Object[] {tr, comp}, "Update Proxy Volume Size"); - tr.position = tr.rotation * s_BoxHandle.Value.center; - prox.boxSize = s_BoxHandle.Value.size; - } - break; case ProxyShape.Sphere: - s_SphereHandle.Value.center = Quaternion.Inverse(tr.rotation) * tr.position; + s_SphereHandle.Value.center = tr.position; s_SphereHandle.Value.radius = prox.sphereRadius; - EditorGUI.BeginChangeCheck(); s_SphereHandle.Value.DrawHull(true); - s_SphereHandle.Value.DrawHandle(); - if (EditorGUI.EndChangeCheck()) - { - Undo.RecordObjects(new Object[] {tr, comp}, "Update Proxy Volume Size"); - tr.position = tr.rotation * s_SphereHandle.Value.center; - prox.sphereRadius = s_SphereHandle.Value.radius; - } - break; case ProxyShape.Infinite: break; @@ -102,5 +84,47 @@ static void DrawGizmosSelected(ReflectionProxyVolumeComponent proxyVolumeCompone } } } + + void OnSceneGUI() + { + foreach (var comp in s_TypedTargets) + { + var tr = comp.transform; + var prox = comp.proxyVolume; + + using (new Handles.DrawingScope(Matrix4x4.TRS(Vector3.zero, comp.transform.rotation, Vector3.one))) + { + switch (prox.shape) + { + case ProxyShape.Box: + EditorGUI.BeginChangeCheck(); + s_BoxHandle.Value.center = Quaternion.Inverse(tr.rotation) * tr.position; + s_BoxHandle.Value.size = prox.boxSize; + s_BoxHandle.Value.DrawHandle(); + if (EditorGUI.EndChangeCheck()) + { + Undo.RecordObjects(new Object[] { tr, comp }, "Update Proxy Volume Size"); + tr.position = tr.rotation * s_BoxHandle.Value.center; + prox.boxSize = s_BoxHandle.Value.size; + } + break; + case ProxyShape.Sphere: + EditorGUI.BeginChangeCheck(); + s_SphereHandle.Value.center = Quaternion.Inverse(tr.rotation) * tr.position; + s_SphereHandle.Value.radius = prox.sphereRadius; + s_SphereHandle.Value.DrawHandle(); + if (EditorGUI.EndChangeCheck()) + { + Undo.RecordObjects(new Object[] { tr, comp }, "Update Proxy Volume Size"); + tr.position = tr.rotation * s_SphereHandle.Value.center; + prox.sphereRadius = s_SphereHandle.Value.radius; + } + break; + case ProxyShape.Infinite: + break; + } + } + } + } } } diff --git a/Packages/com.unity.render-pipelines.high-definition/Editor/Material/Canvas/ShaderGraph/HDCanvasSubTarget.cs b/Packages/com.unity.render-pipelines.high-definition/Editor/Material/Canvas/ShaderGraph/HDCanvasSubTarget.cs index 3c9397b9c0d..8b7ee534bc1 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Editor/Material/Canvas/ShaderGraph/HDCanvasSubTarget.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Editor/Material/Canvas/ShaderGraph/HDCanvasSubTarget.cs @@ -67,6 +67,8 @@ protected override DefineCollection GetAdditionalDefines() var result = new DefineCollection(); if (canvasData.alphaClip) result.Add(CoreKeywordDescriptors.AlphaTest, 1); + + result.Add(base.GetAdditionalDefines()); return result; } public override void GetActiveBlocks(ref TargetActiveBlockContext context) diff --git a/Packages/com.unity.render-pipelines.high-definition/Editor/PostProcessing/DepthOfFieldEditor.cs b/Packages/com.unity.render-pipelines.high-definition/Editor/PostProcessing/DepthOfFieldEditor.cs index 7193e8321f8..1e8d19c6766 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Editor/PostProcessing/DepthOfFieldEditor.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Editor/PostProcessing/DepthOfFieldEditor.cs @@ -53,6 +53,8 @@ static partial class Styles SerializedDataParameter m_Resolution; SerializedDataParameter m_PhysicallyBased; SerializedDataParameter m_LimitManualRangeNearBlur; + SerializedDataParameter m_AdaptiveSamplingWeight; + SerializedDataParameter m_CoCStabilization; public override void OnEnable() { @@ -77,7 +79,8 @@ public override void OnEnable() m_Resolution = Unpack(o.Find("m_Resolution")); m_PhysicallyBased = Unpack(o.Find("m_PhysicallyBased")); m_LimitManualRangeNearBlur = Unpack(o.Find("m_LimitManualRangeNearBlur")); - + m_AdaptiveSamplingWeight = Unpack(o.Find("m_AdaptiveSamplingWeight")); + m_CoCStabilization = Unpack(o.Find(x => x.coCStabilization)); base.OnEnable(); } @@ -175,19 +178,6 @@ void DrawFocusSettings(int mode) } } - void PropertyPBRDofResolution(SerializedDataParameter property) - { - using (var scope = new OverridablePropertyScope(property, Styles.PbrDofResolutionTitle, this)) - { - if (!scope.displayed) - return; - - bool isHighResolution = property.value.intValue <= (int)DepthOfFieldResolution.Half; - isHighResolution = EditorGUILayout.Toggle(Styles.PbrDofResolutionTitle, isHighResolution); - property.value.intValue = isHighResolution ? Math.Min((int)DepthOfFieldResolution.Half, property.value.intValue) : (int)DepthOfFieldResolution.Quarter; - } - } - void DrawQualitySettings() { using (new QualityScope(this)) @@ -197,12 +187,10 @@ void DrawQualitySettings() PropertyField(m_FarSampleCount, Styles.k_FarSampleCount); PropertyField(m_FarMaxBlur, Styles.k_FarMaxBlur); PropertyField(m_PhysicallyBased); + PropertyField(m_Resolution); if (m_PhysicallyBased.value.boolValue) - PropertyPBRDofResolution(m_Resolution); - else - PropertyField(m_Resolution); + PropertyField(m_AdaptiveSamplingWeight); - PropertyField(m_HighQualityFiltering); if (m_PhysicallyBased.value.boolValue) { if (BeginAdditionalPropertiesScope()) @@ -212,11 +200,17 @@ void DrawQualitySettings() } EndAdditionalPropertiesScope(); } + else + { + PropertyField(m_HighQualityFiltering); + } if (m_FocusMode.value.intValue == (int)DepthOfFieldMode.Manual && !m_PhysicallyBased.value.boolValue) { PropertyField(m_LimitManualRangeNearBlur); } + + PropertyField(m_CoCStabilization); } } @@ -230,7 +224,6 @@ public override QualitySettingsBlob SaveCustomQualitySettingsAsObject(QualitySet settings.Save(m_FarSampleCount); settings.Save(m_FarMaxBlur); settings.Save(m_Resolution); - settings.Save(m_HighQualityFiltering); settings.Save(m_PhysicallyBased); settings.Save(m_LimitManualRangeNearBlur); @@ -244,7 +237,6 @@ public override void LoadSettingsFromObject(QualitySettingsBlob settings) settings.TryLoad(ref m_FarSampleCount); settings.TryLoad(ref m_FarMaxBlur); settings.TryLoad(ref m_Resolution); - settings.TryLoad(ref m_HighQualityFiltering); settings.TryLoad(ref m_PhysicallyBased); settings.TryLoad(ref m_LimitManualRangeNearBlur); } diff --git a/Packages/com.unity.render-pipelines.high-definition/Editor/RenderPipeline/HDEditorUtils.cs b/Packages/com.unity.render-pipelines.high-definition/Editor/RenderPipeline/HDEditorUtils.cs index bea27bc4811..381a8db2506 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Editor/RenderPipeline/HDEditorUtils.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Editor/RenderPipeline/HDEditorUtils.cs @@ -395,10 +395,10 @@ internal static HDCamera[] GetDisplayedCameras() var type = assembly.GetType("UnityEditor.GameView"); var targetDisplayProp = type.GetProperty("targetDisplay"); - foreach (EditorWindow gameView in Resources.FindObjectsOfTypeAll(type)) + // This is an optimization to retrieve only the first gameView, to avoid a call to exepensive Resources.FindObjectsOfTypeAll causing lots of slow UI in OnInpsectorGUI calls. + EditorWindow gameView = EditorWindow.GetWindow(type); + if (gameView.hasFocus) { - if (!gameView.hasFocus) continue; - var targetDisplay = (int)targetDisplayProp.GetValue(gameView); foreach (var camera in HDCamera.GetHDCameras()) { diff --git a/Packages/com.unity.render-pipelines.high-definition/Editor/RenderPipeline/HDRenderPipelineUI.Skin.cs b/Packages/com.unity.render-pipelines.high-definition/Editor/RenderPipeline/HDRenderPipelineUI.Skin.cs index b229fa96120..cc23e11f955 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Editor/RenderPipeline/HDRenderPipelineUI.Skin.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Editor/RenderPipeline/HDRenderPipelineUI.Skin.cs @@ -55,6 +55,7 @@ public class Styles public static readonly GUIContent sampleCountQuality = EditorGUIUtility.TrTextContent("Sample Count"); public static readonly GUIContent pbrResolutionQualityTitle = EditorGUIUtility.TrTextContent("Enable High Resolution"); public static readonly GUIContent resolutionQuality = EditorGUIUtility.TrTextContent("Resolution"); + public static readonly GUIContent adaptiveSamplingWeight = EditorGUIUtility.TrTextContent("Adaptive Sampling Weight"); public static readonly GUIContent highQualityPrefiltering = EditorGUIUtility.TrTextContent("High Quality Prefiltering"); public static readonly GUIContent highQualityFiltering = EditorGUIUtility.TrTextContent("High Quality Filtering"); public static readonly GUIContent dofPhysicallyBased = EditorGUIUtility.TrTextContent("Physically Based"); diff --git a/Packages/com.unity.render-pipelines.high-definition/Editor/RenderPipeline/HDRenderPipelineUI.cs b/Packages/com.unity.render-pipelines.high-definition/Editor/RenderPipeline/HDRenderPipelineUI.cs index e7b93a2756b..097913ed3f2 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Editor/RenderPipeline/HDRenderPipelineUI.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Editor/RenderPipeline/HDRenderPipelineUI.cs @@ -383,7 +383,7 @@ static void Drawer_SectionShadows(SerializedHDRenderPipelineAsset serialized, Ed EditorGUI.BeginChangeCheck(); EditorGUILayout.DelayedIntField(serialized.renderPipelineSettings.hdShadowInitParams.maxShadowRequests, Styles.maxRequestContent); if (EditorGUI.EndChangeCheck()) - serialized.renderPipelineSettings.hdShadowInitParams.maxShadowRequests.intValue = Mathf.Max(1, serialized.renderPipelineSettings.hdShadowInitParams.maxShadowRequests.intValue); + serialized.renderPipelineSettings.hdShadowInitParams.maxShadowRequests.intValue = Mathf.Max(1, Mathf.Min(65536, serialized.renderPipelineSettings.hdShadowInitParams.maxShadowRequests.intValue)); if (!serialized.renderPipelineSettings.supportedLitShaderMode.hasMultipleDifferentValues) { @@ -1126,16 +1126,15 @@ static void DrawDepthOfFieldQualitySetting(SerializedHDRenderPipelineAsset seria } EditorGUILayout.PropertyField(serialized.renderPipelineSettings.postProcessQualitySettings.DoFPhysicallyBased.GetArrayElementAtIndex(tier), Styles.dofPhysicallyBased); + EditorGUILayout.PropertyField(serialized.renderPipelineSettings.postProcessQualitySettings.DoFResolution.GetArrayElementAtIndex(tier), Styles.resolutionQuality); if (serialized.renderPipelineSettings.postProcessQualitySettings.DoFPhysicallyBased.GetArrayElementAtIndex(tier).boolValue) { - int currentResolution = serialized.renderPipelineSettings.postProcessQualitySettings.DoFResolution.GetArrayElementAtIndex(tier).intValue; - bool isHighResolution = currentResolution <= (int)DepthOfFieldResolution.Half; - isHighResolution = EditorGUILayout.Toggle(Styles.pbrResolutionQualityTitle, isHighResolution); - serialized.renderPipelineSettings.postProcessQualitySettings.DoFResolution.GetArrayElementAtIndex(tier).intValue = isHighResolution ? Math.Min((int)DepthOfFieldResolution.Half, currentResolution) : (int)DepthOfFieldResolution.Quarter; + EditorGUILayout.PropertyField(serialized.renderPipelineSettings.postProcessQualitySettings.AdaptiveSamplingWeight.GetArrayElementAtIndex(tier), Styles.adaptiveSamplingWeight); } else - EditorGUILayout.PropertyField(serialized.renderPipelineSettings.postProcessQualitySettings.DoFResolution.GetArrayElementAtIndex(tier), Styles.resolutionQuality); - EditorGUILayout.PropertyField(serialized.renderPipelineSettings.postProcessQualitySettings.DoFHighFilteringQuality.GetArrayElementAtIndex(tier), Styles.highQualityFiltering); + { + EditorGUILayout.PropertyField(serialized.renderPipelineSettings.postProcessQualitySettings.DoFHighFilteringQuality.GetArrayElementAtIndex(tier), Styles.highQualityFiltering); + } EditorGUILayout.PropertyField(serialized.renderPipelineSettings.postProcessQualitySettings.LimitManualRangeNearBlur.GetArrayElementAtIndex(tier), Styles.limitNearBlur); } diff --git a/Packages/com.unity.render-pipelines.high-definition/Editor/RenderPipeline/Settings/SerializedPostProcessingQualitySettings.cs b/Packages/com.unity.render-pipelines.high-definition/Editor/RenderPipeline/Settings/SerializedPostProcessingQualitySettings.cs index 04bd61670f0..5e1886115e9 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Editor/RenderPipeline/Settings/SerializedPostProcessingQualitySettings.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Editor/RenderPipeline/Settings/SerializedPostProcessingQualitySettings.cs @@ -16,6 +16,7 @@ class SerializedPostProcessingQualitySettings public SerializedProperty DoFHighFilteringQuality; public SerializedProperty DoFPhysicallyBased; public SerializedProperty LimitManualRangeNearBlur; + public SerializedProperty AdaptiveSamplingWeight; // Motion Blur public SerializedProperty MotionBlurSampleCount; @@ -41,6 +42,7 @@ public SerializedPostProcessingQualitySettings(SerializedProperty root) DoFHighFilteringQuality = root.Find((GlobalPostProcessingQualitySettings s) => s.DoFHighQualityFiltering); DoFPhysicallyBased = root.Find((GlobalPostProcessingQualitySettings s) => s.DoFPhysicallyBased); LimitManualRangeNearBlur = root.Find((GlobalPostProcessingQualitySettings s) => s.LimitManualRangeNearBlur); + AdaptiveSamplingWeight = root.Find((GlobalPostProcessingQualitySettings s) => s.AdaptiveSamplingWeight); // Motion Blur MotionBlurSampleCount = root.Find((GlobalPostProcessingQualitySettings s) => s.MotionBlurSampleCount); diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/Debug/DebugDisplay.cs b/Packages/com.unity.render-pipelines.high-definition/Runtime/Debug/DebugDisplay.cs index 947dec97932..c8173c9842c 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/Debug/DebugDisplay.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/Debug/DebugDisplay.cs @@ -136,6 +136,8 @@ public enum FullScreenDebugMode ColorLog, /// Display Depth of Field circle of confusion. DepthOfFieldCoc, + /// Display Depth of Field tile classification. Red is slow in-focus, green is fast de-focus and blue is fast in-focus. + DepthOfFieldTileClassification, /// Display Transparency Overdraw. TransparencyOverdraw, /// Display Quad Overdraw. @@ -1242,7 +1244,7 @@ static class LightingStrings public static readonly NameAndTooltip LightHierarchyDebugMode = new() { name = "Light Hierarchy Debug Mode", tooltip = "Use the drop-down to select a light type to show the direct lighting for or a Reflection Probe type to show the indirect lighting for." }; public static readonly NameAndTooltip LightLayersVisualization = new() { name = "Light Layers Visualization", tooltip = "Visualize the light layers of GameObjects in your Scene." }; - public static readonly NameAndTooltip LightLayersUseSelectedLight = new() { name = "Filter with Light Layers from Selected Light", tooltip = "Highlight Renderers affected by the selected light." }; + public static readonly NameAndTooltip LightLayersUseSelectedLight = new() { name = "Filter Light Layers by Light", tooltip = "Highlight Renderers affected by the selected light." }; public static readonly NameAndTooltip LightLayersSwitchToLightShadowLayers = new() { name = "Use Light's Shadow Layer Mask", tooltip = "Highlight Renderers that cast shadows for the selected light." }; public static readonly NameAndTooltip LightLayersFilterLayers = new() { name = "Filter Layers", tooltip = "Use the drop-down to filter light layers that you want to visialize." }; public static readonly NameAndTooltip LightLayersColor = new() { name = "Layers Color", tooltip = "Select the display color of each light layer." }; @@ -2335,7 +2337,7 @@ internal bool DebugNeedsExposure() (data.fullScreenDebugMode == FullScreenDebugMode.PreRefractionColorPyramid || data.fullScreenDebugMode == FullScreenDebugMode.FinalColorPyramid || data.fullScreenDebugMode == FullScreenDebugMode.VolumetricClouds || data.fullScreenDebugMode == FullScreenDebugMode.TransparentScreenSpaceReflections || data.fullScreenDebugMode == FullScreenDebugMode.ScreenSpaceReflections || data.fullScreenDebugMode == FullScreenDebugMode.ScreenSpaceReflectionsPrev || data.fullScreenDebugMode == FullScreenDebugMode.ScreenSpaceReflectionsAccum || data.fullScreenDebugMode == FullScreenDebugMode.ScreenSpaceReflectionSpeedRejection || data.fullScreenDebugMode == FullScreenDebugMode.LightCluster || data.fullScreenDebugMode == FullScreenDebugMode.ScreenSpaceShadows || data.fullScreenDebugMode == FullScreenDebugMode.NanTracker || data.fullScreenDebugMode == FullScreenDebugMode.ColorLog || data.fullScreenDebugMode == FullScreenDebugMode.ScreenSpaceGlobalIllumination || data.fullScreenDebugMode == FullScreenDebugMode.LensFlareScreenSpace || - data.fullScreenDebugMode == FullScreenDebugMode.VolumetricFog || data.fullScreenDebugMode == FullScreenDebugMode.STP); + data.fullScreenDebugMode == FullScreenDebugMode.VolumetricFog || data.fullScreenDebugMode == FullScreenDebugMode.STP || data.fullScreenDebugMode == FullScreenDebugMode.DepthOfFieldTileClassification); } } } diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/Debug/DebugDisplay.cs.hlsl b/Packages/com.unity.render-pipelines.high-definition/Runtime/Debug/DebugDisplay.cs.hlsl index 84541180343..7c512c5ee54 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/Debug/DebugDisplay.cs.hlsl +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/Debug/DebugDisplay.cs.hlsl @@ -37,21 +37,22 @@ #define FULLSCREENDEBUGMODE_NAN_TRACKER (27) #define FULLSCREENDEBUGMODE_COLOR_LOG (28) #define FULLSCREENDEBUGMODE_DEPTH_OF_FIELD_COC (29) -#define FULLSCREENDEBUGMODE_TRANSPARENCY_OVERDRAW (30) -#define FULLSCREENDEBUGMODE_QUAD_OVERDRAW (31) -#define FULLSCREENDEBUGMODE_LOCAL_VOLUMETRIC_FOG_OVERDRAW (32) -#define FULLSCREENDEBUGMODE_VERTEX_DENSITY (33) -#define FULLSCREENDEBUGMODE_REQUESTED_VIRTUAL_TEXTURE_TILES (34) -#define FULLSCREENDEBUGMODE_LENS_FLARE_DATA_DRIVEN (35) -#define FULLSCREENDEBUGMODE_LENS_FLARE_SCREEN_SPACE (36) -#define FULLSCREENDEBUGMODE_COMPUTE_THICKNESS (37) -#define FULLSCREENDEBUGMODE_HIGH_QUALITY_LINES (38) -#define FULLSCREENDEBUGMODE_STP (39) -#define FULLSCREENDEBUGMODE_MAX_RENDERING_FULL_SCREEN_DEBUG (40) -#define FULLSCREENDEBUGMODE_MIN_MATERIAL_FULL_SCREEN_DEBUG (41) -#define FULLSCREENDEBUGMODE_VALIDATE_DIFFUSE_COLOR (42) -#define FULLSCREENDEBUGMODE_VALIDATE_SPECULAR_COLOR (43) -#define FULLSCREENDEBUGMODE_MAX_MATERIAL_FULL_SCREEN_DEBUG (44) +#define FULLSCREENDEBUGMODE_DEPTH_OF_FIELD_TILE_CLASSIFICATION (30) +#define FULLSCREENDEBUGMODE_TRANSPARENCY_OVERDRAW (31) +#define FULLSCREENDEBUGMODE_QUAD_OVERDRAW (32) +#define FULLSCREENDEBUGMODE_LOCAL_VOLUMETRIC_FOG_OVERDRAW (33) +#define FULLSCREENDEBUGMODE_VERTEX_DENSITY (34) +#define FULLSCREENDEBUGMODE_REQUESTED_VIRTUAL_TEXTURE_TILES (35) +#define FULLSCREENDEBUGMODE_LENS_FLARE_DATA_DRIVEN (36) +#define FULLSCREENDEBUGMODE_LENS_FLARE_SCREEN_SPACE (37) +#define FULLSCREENDEBUGMODE_COMPUTE_THICKNESS (38) +#define FULLSCREENDEBUGMODE_HIGH_QUALITY_LINES (39) +#define FULLSCREENDEBUGMODE_STP (40) +#define FULLSCREENDEBUGMODE_MAX_RENDERING_FULL_SCREEN_DEBUG (41) +#define FULLSCREENDEBUGMODE_MIN_MATERIAL_FULL_SCREEN_DEBUG (42) +#define FULLSCREENDEBUGMODE_VALIDATE_DIFFUSE_COLOR (43) +#define FULLSCREENDEBUGMODE_VALIDATE_SPECULAR_COLOR (44) +#define FULLSCREENDEBUGMODE_MAX_MATERIAL_FULL_SCREEN_DEBUG (45) // Generated from UnityEngine.Rendering.HighDefinition.ShaderVariablesDebugDisplay // PackingRules = Exact diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/Debug/DebugFullScreen.shader b/Packages/com.unity.render-pipelines.high-definition/Runtime/Debug/DebugFullScreen.shader index 323557ce27c..089f050ff1f 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/Debug/DebugFullScreen.shader +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/Debug/DebugFullScreen.shader @@ -398,6 +398,11 @@ Shader "Hidden/HDRP/DebugFullScreen" return float4(color, 1.0); } + if (_FullScreenDebugMode == FULLSCREENDEBUGMODE_DEPTH_OF_FIELD_TILE_CLASSIFICATION) + { + float3 color = SAMPLE_TEXTURE2D_X(_DebugFullScreenTexture, s_point_clamp_sampler, input.texcoord).rgb; + return float4(color, 1.0); + } if (_FullScreenDebugMode == FULLSCREENDEBUGMODE_CONTACT_SHADOWS) { uint2 samplePosition = (uint2)((input.texcoord.xy / _RTHandleScale.xy) * _DebugViewportSize.xy); diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/LightEvaluation.hlsl b/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/LightEvaluation.hlsl index 5def133f046..d1fb561e8fa 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/LightEvaluation.hlsl +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/LightEvaluation.hlsl @@ -316,9 +316,6 @@ SHADOW_TYPE EvaluateShadow_Directional( LightLoopContext lightLoopContext, Posit shadow = lightLoopContext.shadowValue; #ifdef SHADOWS_SHADOWMASK - float3 camToPixel = posInput.positionWS - GetPrimaryCameraPosition(); - float distanceCamToPixel2 = dot(camToPixel, camToPixel); - int shadowSplitIndex = lightLoopContext.shadowContext.shadowSplitIndex; if (shadowSplitIndex < 0) { @@ -326,7 +323,12 @@ SHADOW_TYPE EvaluateShadow_Directional( LightLoopContext lightLoopContext, Posit } else if (shadowSplitIndex == int(_CascadeShadowCount) - 1) { - float fade = lightLoopContext.shadowContext.fade; + // float fade = lightLoopContext.shadowContext.fade; + float3 camToPixel = posInput.positionWS - GetPrimaryCameraPosition(); + float distanceCamToPixel2 = dot(camToPixel, camToPixel); + + HDDirectionalShadowData dsd = lightLoopContext.shadowContext.directionalShadowData; + float fade = saturate(distanceCamToPixel2 * dsd.fadeScale + dsd.fadeBias); // In the transition code (both dithering and blend) we use shadow = lerp( shadow, 1.0, fade ) for last transition // mean if we expend the code we have (shadow * (1 - fade) + fade). Here to make transition with shadow mask // we will remove fade and add fade * shadowMask which mean we do a lerp with shadow mask diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/Shadow/HDShadowAlgorithms.hlsl b/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/Shadow/HDShadowAlgorithms.hlsl index a49b0d15241..61556bdfd96 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/Shadow/HDShadowAlgorithms.hlsl +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/Shadow/HDShadowAlgorithms.hlsl @@ -257,6 +257,12 @@ int EvalShadow_GetSplitIndex(HDShadowContext shadowContext, int index, float3 po void LoadDirectionalShadowDatas(inout HDShadowData sd, HDShadowContext shadowContext, int index) { + sd.rot0 = shadowContext.shadowDatas[index].rot0; + sd.rot1 = shadowContext.shadowDatas[index].rot1; + sd.rot2 = shadowContext.shadowDatas[index].rot2; + + sd.shadowToWorld = shadowContext.shadowDatas[index].shadowToWorld; + sd.proj = shadowContext.shadowDatas[index].proj; sd.pos = shadowContext.shadowDatas[index].pos; sd.worldTexelSize = shadowContext.shadowDatas[index].worldTexelSize; diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/Shadow/HDShadowManager.cs b/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/Shadow/HDShadowManager.cs index 8bd8233abac..0034a8ba3e4 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/Shadow/HDShadowManager.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/Shadow/HDShadowManager.cs @@ -111,6 +111,9 @@ unsafe struct HDDirectionalShadowData [HLSLArray(4, typeof(float))] [SurfaceDataAttributes(precision = FieldPrecision.Real)] public fixed float cascadeBorders[4]; + + public float fadeScale; + public float fadeBias; } struct HDShadowCullingSplit @@ -702,6 +705,13 @@ public void InitShadowManager(HDRenderPipeline renderPipeline, HDShadowInitParam // Even when shadows are disabled (maxShadowRequests == 0) we need to allocate compute buffers to avoid having // resource not bound errors when dispatching a compute shader. + if (initParams.maxShadowRequests > 65536) + { + initParams.maxShadowRequests = 65536; +#if UNITY_EDITOR || DEVELOPMENT_BUILD + Debug.LogWarning("The 'Maximum Shadows on Screen' value has been clamped to 65536 in order not to exceed the maximum size of the buffer."); +#endif + } m_ShadowDataBuffer = new ComputeBuffer(Mathf.Max(initParams.maxShadowRequests, 1), System.Runtime.InteropServices.Marshal.SizeOf(typeof(HDShadowData))); m_DirectionalShadowDataBuffer = new ComputeBuffer(1, System.Runtime.InteropServices.Marshal.SizeOf(typeof(HDDirectionalShadowData))); m_MaxShadowRequests = initParams.maxShadowRequests; @@ -1153,7 +1163,10 @@ unsafe public void PrepareGPUShadowDatas(CullingResults cullResults, HDCamera ca else m_DirectionalShadowData.cascadeDirection = Vector4.zero; - m_DirectionalShadowData.cascadeDirection.w = camera.volumeStack.GetComponent().cascadeShadowSplitCount.value; + HDShadowSettings shadowSettings = camera.volumeStack.GetComponent(); + m_DirectionalShadowData.cascadeDirection.w = shadowSettings.cascadeShadowSplitCount.value; + + GetShadowFadeScaleAndBias(shadowSettings, out m_DirectionalShadowData.fadeScale, out m_DirectionalShadowData.fadeBias); if (m_ShadowRequestCount > 0) { @@ -1164,6 +1177,45 @@ unsafe public void PrepareGPUShadowDatas(CullingResults cullResults, HDCamera ca } } + void GetShadowFadeScaleAndBias(HDShadowSettings shadowSettings, out float scale, out float bias) + { + float maxShadowDistance = shadowSettings.maxShadowDistance.value; + float maxShadowDistanceSq = maxShadowDistance * maxShadowDistance; + float cascadeBorder; + int splitCount = shadowSettings.cascadeShadowSplitCount.value; + if (splitCount == 4) + cascadeBorder = shadowSettings.cascadeShadowBorder3.value; + else if (splitCount == 3) + cascadeBorder = shadowSettings.cascadeShadowBorder2.value; + else if (splitCount == 2) + cascadeBorder = shadowSettings.cascadeShadowBorder1.value; + else + cascadeBorder = shadowSettings.cascadeShadowBorder0.value; + + GetScaleAndBiasForLinearDistanceFade(maxShadowDistanceSq, cascadeBorder, out scale, out bias); + } + + void GetScaleAndBiasForLinearDistanceFade(float fadeDistance, float border, out float scale, out float bias) + { + // To avoid division from zero + // This values ensure that fade within cascade will be 0 and outside 1 + if (border < 0.0001f) + { + float multiplier = 1000f; // To avoid blending if difference is in fractions + scale = multiplier; + bias = -fadeDistance * multiplier; + return; + } + + border = 1 - border; + border *= border; + + // Fade with distance calculation is just a linear fade from 90% of fade distance to fade distance. 90% arbitrarily chosen but should work well enough. + float distanceFadeNear = border * fadeDistance; + scale = 1.0f / (fadeDistance - distanceFadeNear); + bias = -distanceFadeNear / (fadeDistance - distanceFadeNear); + } + public void PushGlobalParameters(CommandBuffer cmd) { // This code must be in sync with HDShadowContext.hlsl diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/Shadow/HDShadowManager.cs.hlsl b/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/Shadow/HDShadowManager.cs.hlsl index b48233628bb..2a5f61bb325 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/Shadow/HDShadowManager.cs.hlsl +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/Shadow/HDShadowManager.cs.hlsl @@ -11,6 +11,8 @@ struct HDDirectionalShadowData float4 sphereCascades[4]; real4 cascadeDirection; real cascadeBorders[4]; + float fadeScale; + float fadeBias; }; // Generated from UnityEngine.Rendering.HighDefinition.HDShadowData diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/VolumetricClouds/VolumetricCloudsTrace.compute b/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/VolumetricClouds/VolumetricCloudsTrace.compute index 9bf63da4cc4..f82c83cf341 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/VolumetricClouds/VolumetricCloudsTrace.compute +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/VolumetricClouds/VolumetricCloudsTrace.compute @@ -106,6 +106,7 @@ void RenderClouds(uint3 traceCoord : SV_DispatchThreadID, int groupIndex : SV_Gr _CloudsLightingTextureRW[COORD_TEXTURE2D_X(traceCoord.xy)] = result.scattering; // Compute the cloud depth - float depth = result.invalidRay ? UNITY_RAW_FAR_CLIP_VALUE : EncodeInfiniteDepth(result.meanDistance, _CloudNearPlane); + float cloudDepth = result.meanDistance * dot(ray.direction, -UNITY_MATRIX_V[2].xyz); // Distance to depth + float depth = result.invalidRay ? UNITY_RAW_FAR_CLIP_VALUE : EncodeInfiniteDepth(cloudDepth, _CloudNearPlane); _CloudsDepthTextureRW[COORD_TEXTURE2D_X(traceCoord.xy)] = float2(depth, result.transmittance); } diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/VolumetricClouds/VolumetricCloudsUtilities.hlsl b/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/VolumetricClouds/VolumetricCloudsUtilities.hlsl index d9eaeeca600..610fee6b10b 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/VolumetricClouds/VolumetricCloudsUtilities.hlsl +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/Lighting/VolumetricClouds/VolumetricCloudsUtilities.hlsl @@ -63,8 +63,12 @@ float EvaluateFinalTransmittance(float2 finalCoord, float transmittance) float resultLuminance = FastTonemapPerChannel(luminance) * transmittance; resultLuminance = FastTonemapPerChannelInvert(resultLuminance); + // By softening the transmittance attenuation curve for pixels adjacent to cloud boundaries when the luminance is super high, + // We can prevent sun flicker and improve perceptual blending. (https://www.desmos.com/calculator/vmly6erwdo) + float finalTransmittance = max(resultLuminance / luminance, pow(transmittance, 6)); + // This approach only makes sense if the color is not black - transmittance = lerp(transmittance, resultLuminance / luminance, _ImprovedTransmittanceBlend); + transmittance = lerp(transmittance, finalTransmittance, _ImprovedTransmittanceBlend); } #endif diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/Components/DepthOfField.cs b/Packages/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/Components/DepthOfField.cs index bda5bb28cb2..a0edc34be87 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/Components/DepthOfField.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/Components/DepthOfField.cs @@ -73,12 +73,6 @@ public enum FocusDistanceMode [HDRPHelpURL("Post-Processing-Depth-of-Field")] public sealed class DepthOfField : VolumeComponentWithQuality, IPostProcessComponent { - // Sampling ratios for adaptive sampling. - // X: ratio of the sharp part tiles of PBR dof that have high variance of CoC. - // Y: ratio of the blurry / sharp tiles that have low variance of CoC. - internal static Vector2 s_HighQualityAdaptiveSamplingWeights = new Vector2(4.0f, 1.0f); - internal static Vector2 s_LowQualityAdaptiveSamplingWeights = new Vector2(1.0f, 0.75f); - /// /// Specifies the mode that HDRP uses to set the focus for the depth of field effect. /// @@ -237,7 +231,7 @@ public bool highQualityFiltering } set { m_HighQualityFiltering.value = value; } } - + /// /// When enabled, HDRP uses a more accurate but slower physically based method to compute the depth of field effect. /// @@ -258,6 +252,27 @@ public bool physicallyBased set { m_PhysicallyBased.value = value; } } + /// + /// The adaptive sampling weight is a factor that modifies the number of samples in the depth of field depending + /// on the radius of the blur. Higher values will reduce the noise in the depth of field but increases its cost. + /// + public float adaptiveSamplingWeight + { + get + { + if (!UsesQualitySettings()) + { + return m_AdaptiveSamplingWeight.value; + } + else + { + int qualityLevel = (int)quality.levelAndOverride.level; + return GetPostProcessingQualitySettings().AdaptiveSamplingWeight[qualityLevel]; + } + } + set { m_AdaptiveSamplingWeight.value = value; } + } + /// /// Adjust near blur CoC based on depth distance when manual, non-physical mode is used. /// @@ -297,7 +312,6 @@ public DepthOfFieldResolution resolution } } - [Header("Near Blur")] [Tooltip("Sets the number of samples to use for the near field.")] [SerializeField, FormerlySerializedAs("nearSampleCount")] @@ -335,11 +349,25 @@ public DepthOfFieldResolution resolution [SerializeField] BoolParameter m_PhysicallyBased = new BoolParameter(false); + [AdditionalProperty] + [Tooltip("When enabled, HDRP uses a more accurate but slower physically based algorithm to compute the depth of field effect.")] + [SerializeField] + FloatParameter m_AdaptiveSamplingWeight = new ClampedFloatParameter(0.75f, 0.5f, 4f); + [AdditionalProperty] [Tooltip("Adjust near blur CoC based on depth distance when manual, non-physical mode is used.")] [SerializeField] BoolParameter m_LimitManualRangeNearBlur = new BoolParameter(false); + /// + /// Enables the Circle of Confusion Reprojection used when anti-aliasing or an upsampling technique requiring jittering (TAA, DLSS, STP, etc.) is enabled. Disabling this option can get rid of ghosting artifacts in the depth of field." + /// + [AdditionalProperty] + [Tooltip("Enables the CoC Reprojection used when anti-aliasing or an upsampling technique requiring jittering (TAA, DLSS, STP, etc.) is enabled. Disabling this option can get rid of ghosting artifacts in the depth of field.")] + [SerializeField] + [InspectorName("CoC Stabilization")] + public BoolParameter coCStabilization = new BoolParameter(true); + /// /// Tells if the effect needs to be rendered or not. /// diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/Shaders/BloomPrefilter.compute b/Packages/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/Shaders/BloomPrefilter.compute index 38edc4b5986..9681b96b00e 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/Shaders/BloomPrefilter.compute +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/Shaders/BloomPrefilter.compute @@ -27,7 +27,8 @@ float3 BilinearSample(float2 uv, float2 offset, out float weight) #ifdef ENABLE_ALPHA // When alpha is enabled, regions with zero alpha should not generate any bloom / glow. Therefore we pre-multipy the color with the alpha channel here and the rest // of the computations remain float3. Still, when bloom is applied to the final image, bloom will still be spread on regions with zero alpha (see UberPost.compute) - c.xyz *= c.w; + // Note that the alpha channel in the color target could be greater than 1.0 or NaN or negative. The alpha here is opacity so we clamp it to handle an unexpected input. + c.xyz *= saturate(c.w); #endif c.xyz = QuadraticThreshold(c.xyz, _BloomThreshold.x, _BloomThreshold.yzw); diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/Shaders/DepthOfFieldCoCReproject.compute b/Packages/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/Shaders/DepthOfFieldCoCReproject.compute index 25e0b03d84f..bbc5aa95134 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/Shaders/DepthOfFieldCoCReproject.compute +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/Shaders/DepthOfFieldCoCReproject.compute @@ -65,10 +65,10 @@ void KMain(uint3 dispatchThreadId : SV_DispatchThreadID) // CoC dilation: determine the closest point in the four neighbors float3 closest = float3(0.0, 0.0, coc0); - closest = coc1 < closest.z ? float3(-1.0, 0.0, coc1) : closest; - closest = coc2 < closest.z ? float3( 0.0, -1.0, coc2) : closest; - closest = coc3 < closest.z ? float3( 0.0, 1.0, coc3) : closest; - closest = coc4 < closest.z ? float3( 1.0, 0.0, coc4) : closest; + closest = abs(coc1) < abs(closest.z) ? float3(-1.0, 0.0, coc1) : closest; + closest = abs(coc2) < abs(closest.z) ? float3( 0.0, -1.0, coc2) : closest; + closest = abs(coc3) < abs(closest.z) ? float3( 0.0, 1.0, coc3) : closest; + closest = abs(coc4) < abs(closest.z) ? float3( 1.0, 0.0, coc4) : closest; // Sample the history buffer with the motion vector at the closest point float2 motionVector; @@ -88,7 +88,9 @@ void KMain(uint3 dispatchThreadId : SV_DispatchThreadID) float cocMax = Max3(Max3(coc0, coc1, coc2), coc3, coc4); cocHis = clamp(cocHis, cocMin, cocMax); - float outputCoC = lerp(coc0, cocHis, MotionBlending); + float outputCoC = coc0; + if (sign(coc0) == sign(cocHis)) + outputCoC = lerp(coc0, cocHis, MotionBlending); #if defined(SHADER_API_XBOXONE) // In some cases, it looks like the compiler reorganizes code so that we end up at the end with a NaN in the history (disabling compiler optimizations get rid of the NaN). diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/Shaders/DepthOfFieldCommon.hlsl b/Packages/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/Shaders/DepthOfFieldCommon.hlsl index b31b2e6579e..1d85ed153fb 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/Shaders/DepthOfFieldCommon.hlsl +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/Shaders/DepthOfFieldCommon.hlsl @@ -1,6 +1,7 @@ #ifndef DEPTH_OF_FIELD_COMMON #define DEPTH_OF_FIELD_COMMON +#include "Packages/com.unity.render-pipelines.core/ShaderLibrary/Common.hlsl" #include "Packages/com.unity.render-pipelines.core/ShaderLibrary/TextureXR.hlsl" #include "Packages/com.unity.render-pipelines.high-definition/Runtime/ShaderLibrary/ShaderVariables.hlsl" @@ -9,6 +10,26 @@ struct TileData uint position; }; +struct CoCTileData +{ + float minFarCoC; + float maxFarCoC; + float minNearCoC; + float maxNearCoC; +}; + +CoCTileData LoadCoCTileData(TEXTURE2D_X(tileTexture), uint2 coords) +{ + float4 data = tileTexture[COORD_TEXTURE2D_X(coords)]; + CoCTileData tileData = {data.x, data.y, data.z, data.w}; + return tileData; +} + +float4 PackCoCTileData(CoCTileData data) +{ + return float4(data.minFarCoC, data.maxFarCoC, data.minNearCoC, data.maxNearCoC); +} + uint PackKernelCoord(float2 coords) { return uint(f32tof16(coords.x) | f32tof16(coords.y) << 16); diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/Shaders/DoFApertureShape.compute b/Packages/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/Shaders/DoFApertureShape.compute new file mode 100644 index 00000000000..76df386427d --- /dev/null +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/Shaders/DoFApertureShape.compute @@ -0,0 +1,51 @@ +#include "Packages/com.unity.render-pipelines.core/ShaderLibrary/Common.hlsl" +#include "Packages/com.unity.render-pipelines.high-definition/Runtime/ShaderLibrary/ShaderVariables.hlsl" +#include "Packages/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/Shaders/PostProcessDefines.hlsl" +#include "Packages/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/Shaders/DepthOfFieldCommon.hlsl" + +#pragma only_renderers d3d11 playstation xboxone xboxseries vulkan metal switch + +#pragma kernel ComputeShapeBuffer + +#pragma multi_compile _ ENABLE_ALPHA + +#define GROUP_RES 8u +#define GROUP_SIZE (GROUP_RES * GROUP_RES) + +CBUFFER_START(cb0) +float4 _Params; +CBUFFER_END + +#define BladeCount _Params.x +#define NGonFactor _Params.y +#define Rotation _Params.z +#define Anamorphism _Params.w +#define ResScale 0.0 +#define OneOverResScale 0.0 +#define AdaptiveSamplingWeights float2(0.0, 0.0) +#define MaxColorMip 0.0 +#include "Packages/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/Shaders/DoFGatherUtils.hlsl" + +float2 AngleToApertureShape(float angle) +{ + // Transform to rotated ngon + // "CryEngine 3 Graphics Gems" [Sousa13] + float n = BladeCount; + float nt = cos(PI / n); + float dt = cos(angle - (TWO_PI / n) * floor((n * angle + PI) / TWO_PI)); + float r = PositivePow(nt / dt, NGonFactor); + float u = r * cos(angle - Rotation); + float v = r * sin(angle - Rotation); + + v *= 1.0 + Anamorphism; + u *= 1.0 - Anamorphism; + + return float2(u, v); +} + +[numthreads(64, 1, 1)] +void ComputeShapeBuffer(uint3 dispatchThreadId : SV_DispatchThreadID) +{ + float t = (dispatchThreadId.x / (float)_ApertureShapeTableCount) * 2 * PI; + _ApertureShapeTable[dispatchThreadId.x] = AngleToApertureShape(t); +} diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/Shaders/DoFApertureShape.compute.meta b/Packages/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/Shaders/DoFApertureShape.compute.meta new file mode 100644 index 00000000000..da5fd563b37 --- /dev/null +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/Shaders/DoFApertureShape.compute.meta @@ -0,0 +1,7 @@ +fileFormatVersion: 2 +guid: dd5acecb27e20334fa3be332e85172df +ComputeShaderImporter: + externalObjects: {} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/Shaders/DoFCoCMinMax.compute b/Packages/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/Shaders/DoFCoCMinMax.compute index 98ab9d9de38..730835d6b66 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/Shaders/DoFCoCMinMax.compute +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/Shaders/DoFCoCMinMax.compute @@ -11,42 +11,50 @@ TEXTURE2D_X(_InputTexture); // output texture with min / max tiles RW_TEXTURE2D_X(float4, _OutputTexture); +float2 _OutputResolution; + // min-max tile size #define TILE_RES 8u #define GROUP_RES 8u #define GROUP_SIZE (GROUP_RES * GROUP_RES) -float4 InitMinMaxTile() +#define BIG_NUMBER 1200 // should be small enough to fit in fp16 + +CoCTileData InitMinMaxTile() { - // x: min far coc - // y: max far coc - // z: min near coc - // w: max near coc - const float bigNumber = 1000; // should be small enough to fit in fp16 - return float4(bigNumber, 0, -bigNumber, 0); + CoCTileData tileData; + + tileData.minFarCoC = BIG_NUMBER; + tileData.maxFarCoC = 0; + tileData.minNearCoC = -BIG_NUMBER; + tileData.maxNearCoC = 0; + + return tileData; } -void UpdateMinMaxTile(inout float4 tile, float CoC) +void UpdateMinMaxTile(inout CoCTileData tile, float CoC) { if (CoC >= 0) { - tile.x = min(tile.x, CoC); - tile.y = max(tile.y, CoC); + tile.minFarCoC = min(tile.minFarCoC, CoC); + tile.maxFarCoC = max(tile.maxFarCoC, CoC); } else { - tile.z = max(tile.z, CoC); - tile.w = min(tile.w, CoC); + tile.minNearCoC = max(tile.minNearCoC, CoC); + tile.maxNearCoC = min(tile.maxNearCoC, CoC); } } [numthreads(GROUP_RES, GROUP_RES, 1)] void KMainCoCMinMax(uint3 dispatchThreadId : SV_DispatchThreadID) { + if (any(dispatchThreadId.xy > uint2(_OutputResolution))) + return; UNITY_XR_ASSIGN_VIEW_INDEX(dispatchThreadId.z); - float4 minMaxTile = InitMinMaxTile(); + CoCTileData minMaxTile = InitMinMaxTile(); for (uint j = 0; j < TILE_RES; j++) { @@ -60,5 +68,5 @@ void KMainCoCMinMax(uint3 dispatchThreadId : SV_DispatchThreadID) } } - _OutputTexture[COORD_TEXTURE2D_X(dispatchThreadId.xy)] = minMaxTile; + _OutputTexture[COORD_TEXTURE2D_X(dispatchThreadId.xy)] = PackCoCTileData(minMaxTile); } diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/Shaders/DoFCombine.compute b/Packages/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/Shaders/DoFCombine.compute index 469d33297c9..9b2552ec649 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/Shaders/DoFCombine.compute +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/Shaders/DoFCombine.compute @@ -6,7 +6,7 @@ #pragma only_renderers d3d11 playstation xboxone xboxseries vulkan metal switch -#pragma kernel KMain +#pragma kernel UpsampleFastTiles #pragma multi_compile _ ENABLE_ALPHA #pragma multi_compile _ FORCE_POINT_SAMPLING @@ -17,6 +17,8 @@ CBUFFER_START(cb0) float4 _Params; float4 _Params2; +uint _DebugTileClassification; +uint3 _Padding; CBUFFER_END #define NumRings _Params.x @@ -31,53 +33,35 @@ RW_TEXTURE2D_X(CTYPE, _OutputTexture); #define ResScale 1.0f #define OneOverResScale 1.0f -#define MaxColorMip 0.0f +#define MaxColorMip 0.0 #define AdaptiveSamplingWeights _Params2.xy #define BlurResolution _Params2.z #define InvBlurResolution _Params2.w #include "Packages/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/Shaders/DoFGatherUtils.hlsl" +// TODO: indirect classification [numthreads(GROUP_RES, GROUP_RES, 1)] -void KMain(uint3 dispatchThreadId : SV_DispatchThreadID) +void UpsampleFastTiles(uint3 dispatchThreadId : SV_DispatchThreadID) { UNITY_XR_ASSIGN_VIEW_INDEX(dispatchThreadId.z); PositionInputs posInputs = GetPositionInput(float2(dispatchThreadId.xy), _PostProcessScreenSize.zw, uint2(GROUP_RES, GROUP_RES)); - CTYPE output = 0; - int tileClass = GetTileClass(posInputs.positionSS); - if (tileClass == SLOW_INFOCUS_TILE) - { - SampleData centerSample; - centerSample.color = GetColorSample(posInputs.positionSS, 0); - centerSample.CoC = GetCoCRadius(posInputs.positionSS); - - DoFTile tileData; - LoadTileData(posInputs.positionSS, centerSample, NumRings, tileData); - - float4 outColor; - float outAlpha; - DoFGatherRings(posInputs, tileData, centerSample, outColor, outAlpha); - output.xyz = outColor.xyz; -#ifdef ENABLE_ALPHA - ComposeAlpha(output, centerSample.color.xyz, outAlpha); -#endif - } - else if (tileClass == FAST_DEFOCUS_TILE) + if (tileClass == FAST_DEFOCUS_TILE) { float2 uv = (posInputs.positionSS + 0.5) * _PostProcessScreenSize.zw; uv = ClampAndScaleUV(uv, _PostProcessScreenSize.zw * BlurResolution, 0.5f, _RTHandlePostProcessScale.xy); - output = SAMPLE_TEXTURE2D_X_LOD(_InputNearTexture, s_linear_clamp_sampler, uv, 0.0).CTYPE_SWIZZLE; - } - else - { - output = GetColorSample(posInputs.positionSS, 0); + CTYPE output = SAMPLE_TEXTURE2D_X_LOD(_InputNearTexture, s_linear_clamp_sampler, uv, 0.0).CTYPE_SWIZZLE; + _OutputTexture[COORD_TEXTURE2D_X(posInputs.positionSS)] = (CTYPE)output; } // Helper function to visualize tile types in case it is needed for debugging - //DebugTiles(posInputs.positionSS, output.xyz); - - _OutputTexture[COORD_TEXTURE2D_X(posInputs.positionSS)] = (CTYPE)output; + if (_DebugTileClassification != 0) + { + CTYPE debug = _OutputTexture[COORD_TEXTURE2D_X(posInputs.positionSS)]; + DebugTiles(tileClass, debug.xyz); + _OutputTexture[COORD_TEXTURE2D_X(posInputs.positionSS)] = debug; + } } diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/Shaders/DoFComputeSlowTiles.compute b/Packages/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/Shaders/DoFComputeSlowTiles.compute new file mode 100644 index 00000000000..99d826e780e --- /dev/null +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/Shaders/DoFComputeSlowTiles.compute @@ -0,0 +1,66 @@ +#include "Packages/com.unity.render-pipelines.core/ShaderLibrary/Common.hlsl" +#include "Packages/com.unity.render-pipelines.high-definition/Runtime/ShaderLibrary/ShaderVariables.hlsl" +#include "Packages/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/Shaders/PostProcessDefines.hlsl" +#include "Packages/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/Shaders/DepthOfFieldCommon.hlsl" +#include "Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/Raytracing/Shaders/RaytracingSampling.hlsl" + +#pragma only_renderers d3d11 playstation xboxone xboxseries vulkan metal switch + +#pragma kernel ComputeSlowTiles + +#pragma multi_compile _ ENABLE_ALPHA + +#define GROUP_RES 8u +#define GROUP_SIZE (GROUP_RES * GROUP_RES) + +CBUFFER_START(cb0) +float4 _Params; +float4 _Params2; +CBUFFER_END + +#define NumRings _Params.x +#define MaxCoCRadius _Params.y +#define Anamorphism _Params.z + +// Here we write the final output +RW_TEXTURE2D_X(CTYPE, _OutputTexture); + +#define ResScale 1.0f +#define OneOverResScale 1.0f +#define MaxColorMip 0.0 +#define AdaptiveSamplingWeights _Params2.xy +#define BlurResolution _Params2.z +#define InvBlurResolution _Params2.w +#include "Packages/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/Shaders/DoFGatherUtils.hlsl" + +[numthreads(GROUP_RES, GROUP_RES, 1)] +void ComputeSlowTiles(uint3 dispatchThreadId : SV_DispatchThreadID) +{ + UNITY_XR_ASSIGN_VIEW_INDEX(dispatchThreadId.z); + + PositionInputs posInputs = GetPositionInput(float2(dispatchThreadId.xy), _PostProcessScreenSize.zw, uint2(GROUP_RES, GROUP_RES)); + + CTYPE output = GetColorSample(posInputs.positionSS, 0); + + int tileClass = GetTileClass(posInputs.positionSS); + + if (tileClass == SLOW_INFOCUS_TILE) + { + SampleData centerSample; + centerSample.color = output; + centerSample.CoC = GetCoCRadius(posInputs.positionSS); + + DoFTile tileData; + LoadTileData(posInputs.positionSS, centerSample, NumRings, tileData); + + float4 outColor; + float outAlpha; + DoFGatherRings(posInputs, tileData, centerSample, outColor, outAlpha); + output.xyz = outColor.xyz; + #ifdef ENABLE_ALPHA + ComposeAlpha(output, centerSample.color.xyz, outAlpha); + #endif + } + + _OutputTexture[COORD_TEXTURE2D_X(posInputs.positionSS)] = (CTYPE)output; +} diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/Shaders/DoFComputeSlowTiles.compute.meta b/Packages/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/Shaders/DoFComputeSlowTiles.compute.meta new file mode 100644 index 00000000000..1a250da624b --- /dev/null +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/Shaders/DoFComputeSlowTiles.compute.meta @@ -0,0 +1,7 @@ +fileFormatVersion: 2 +guid: b89f86a76de81ee42ae16daad78eb382 +ComputeShaderImporter: + externalObjects: {} + userData: + assetBundleName: + assetBundleVariant: diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/Shaders/DoFGather.compute b/Packages/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/Shaders/DoFGather.compute index 44da3e82fd4..508ea385b73 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/Shaders/DoFGather.compute +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/Shaders/DoFGather.compute @@ -10,7 +10,7 @@ #pragma multi_compile _ ENABLE_ALPHA #pragma multi_compile _ HIGH_QUALITY -#pragma multi_compile _ FORCE_POINT_SAMPLING +#pragma multi_compile _ FORCE_POINT_SAMPLING CBUFFER_START(cb0) float4 _Params; @@ -46,17 +46,14 @@ void KMain(uint3 dispatchThreadId : SV_DispatchThreadID) centerSample.color = GetColorSample(posInputs.positionSS, 0); centerSample.CoC = GetCoCRadius(posInputs.positionSS); - -#ifndef HIGH_QUALITY int tileClass = GetTileClass(posInputs.positionSS); - if (ResScale != 1.0 && tileClass != FAST_DEFOCUS_TILE) + if (tileClass != FAST_DEFOCUS_TILE) { // Early exit: these tiles will be computed at full res in the combine pass // This might create small artifacts during upscale of the half-res tiles (bilinear fetch at the border picks unblurred values and this might be visible), so it's disabled in high quality mode _OutputTexture[COORD_TEXTURE2D_X(posInputs.positionSS)] = (CTYPE)centerSample.color; return; } -#endif DoFTile tileData; LoadTileData(posInputs.positionSS, centerSample, NumRings, tileData); diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/Shaders/DoFGatherUtils.hlsl b/Packages/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/Shaders/DoFGatherUtils.hlsl index c1f2a0da29a..5309ddf66c2 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/Shaders/DoFGatherUtils.hlsl +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/Shaders/DoFGatherUtils.hlsl @@ -2,12 +2,16 @@ #define DOF_GATHER_UTILS #include "Packages/com.unity.render-pipelines.core/ShaderLibrary/Random.hlsl" +#include "Packages/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/Shaders/DepthOfFieldCommon.hlsl" // Input textures TEXTURE2D_X(_InputTexture); TEXTURE2D_X(_InputCoCTexture); TEXTURE2D_X(_TileList); +RWStructuredBuffer _ApertureShapeTable; +uint _ApertureShapeTableCount; + #define FAST_INFOCUS_TILE 0 #define SLOW_INFOCUS_TILE 1 #define FAST_DEFOCUS_TILE 2 @@ -119,19 +123,23 @@ CTYPE GetColorSample(float2 sampleTC, float lod) void LoadTileData(float2 sampleTC, SampleData centerSample, float rings, inout DoFTile tileData) { - float4 cocRanges = LOAD_TEXTURE2D_X(_TileList, ResScale * sampleTC / TILE_RES) * OneOverResScale; + CoCTileData cocRanges = LoadCoCTileData(_TileList, uint2(ResScale * sampleTC / TILE_RES)); + + cocRanges.minFarCoC *= OneOverResScale; + cocRanges.maxFarCoC *= OneOverResScale; + cocRanges.minNearCoC *= OneOverResScale; + cocRanges.maxNearCoC *= OneOverResScale; // Note: for the far-field, we don't need to search further than than the central CoC. // If there is a larger CoC that overlaps the central pixel then it will have greater depth - float limit = min(cocRanges.y, 2 * abs(centerSample.CoC)); - tileData.maxRadius = max(limit, -cocRanges.w); + float limit = min(cocRanges.maxFarCoC, 2 * abs(centerSample.CoC)); + tileData.maxRadius = max(limit, -cocRanges.maxNearCoC); // Detect tiles than need more samples - tileData.numSamples = rings; - tileData.numSamples = tileData.maxRadius > 0 ? tileData.numSamples : 0; + tileData.numSamples = tileData.maxRadius > 0 ? rings : 0; #ifdef ADAPTIVE_SAMPLING - float minRadius = min(cocRanges.x, -cocRanges.z) * OneOverResScale; + float minRadius = min(cocRanges.minFarCoC, -cocRanges.minNearCoC) * OneOverResScale; tileData.numSamples = (int)ceil((minRadius / tileData.maxRadius < 0.1) ? tileData.numSamples * AdaptiveSamplingWeights.x : tileData.numSamples * AdaptiveSamplingWeights.y); #endif @@ -147,9 +155,11 @@ void LoadTileData(float2 sampleTC, SampleData centerSample, float rings, inout D #endif } -float2 PointInCircle(float angle) +float2 PointOnCircle(float angle01) { - return float2(cos(angle), sin(angle)) * float2 (1 - Anamorphism, 1 + Anamorphism); + angle01 %= 1; + uint index = angle01 * _ApertureShapeTableCount; + return _ApertureShapeTable[index]; } void ResolveColorAndAlpha(inout float4 outColor, inout float outAlpha, CTYPE defaultValue) @@ -282,7 +292,7 @@ void DoFGatherRings(PositionInputs posInputs, DoFTile tileData, SampleData cente float dR = rcp((float)tileData.numSamples); int noiseOffset = _TaaFrameInfo.w != 0.0 ? _TaaFrameInfo.z : 0; int halfSamples = tileData.numSamples >> 1; - float dAng = PI * rcp(halfSamples); + float dAng = rcp(halfSamples); // Select the appropriate mip to sample based on the amount of samples. Lower sample counts will be faster at the cost of "leaking" float lod = min(MaxColorMip, log2(2 * PI * tileData.maxRadius * rcp(tileData.numSamples))); @@ -319,12 +329,12 @@ void DoFGatherRings(PositionInputs posInputs, DoFTile tileData, SampleData cente #endif SampleData sampleData[2]; - const float offset[2] = { 0, PI }; + const float offset[2] = { 0, 0.5 }; UNITY_UNROLL for (int j = 0; j < 2; j++) { - float2 sampleTC = posInputs.positionSS + sampleRadius * PointInCircle(offset[j] + (i + r2) * dAng); + float2 sampleTC = posInputs.positionSS + sampleRadius * PointOnCircle(offset[j] + (i + r2) * dAng); sampleData[j].color = GetColorSample(sampleTC, lod); sampleData[j].CoC = GetCoCRadius(sampleTC); bgEstimate += float4(sampleData[j].color.xyz, 1); @@ -364,32 +374,35 @@ void DoFGatherRings(PositionInputs posInputs, DoFTile tileData, SampleData cente int GetTileClass(float2 sampleTC) { - float4 cocRanges = LOAD_TEXTURE2D_X(_TileList, ResScale * sampleTC / TILE_RES); - float minRadius = min(abs(cocRanges.x), -cocRanges.z); - float maxRadius = max(abs(cocRanges.y), -cocRanges.w); + CoCTileData cocRanges = LoadCoCTileData(_TileList, ResScale * sampleTC / TILE_RES); + float minRadius = min(abs(cocRanges.minFarCoC), -cocRanges.minNearCoC); + float maxRadius = max(abs(cocRanges.maxFarCoC), -cocRanges.maxNearCoC); + // If the CoC radius of the tile is less than 1 px, then we're in focus and we can just copy the tile. if (minRadius < 1 && maxRadius < 1) return FAST_INFOCUS_TILE; - else if (minRadius > 2.5 && maxRadius > 2.5) + // If the CoC radius of the tile is always more than 1 px, then the tile is fully defocus (either by near or far blur) + // We can also just copy this tile from the near/far gather pass. + else if (minRadius > 1 && maxRadius > 1) return FAST_DEFOCUS_TILE; + // Worst case, the tile contains both in focus and defocus pixels, we need to compute the blur for each pixel in the tile. else return SLOW_INFOCUS_TILE; } -void DebugTiles(float2 sampleTC, inout float3 output) +void DebugTiles(int tileClass, inout float3 output) { - int tileClass = GetTileClass(sampleTC); if (tileClass == SLOW_INFOCUS_TILE) { - output.xyz = lerp(output.xyz, float3(1, 0, 0), 0.9); + output.xyz = lerp(output.xyz, float3(1, 0, 0), 0.95); } else if (tileClass == FAST_DEFOCUS_TILE) { - output.xyz = lerp(output.xyz, float3(0, 0, 1), 0.9); + output.xyz = lerp(output.xyz, float3(0, 0, 1), 0.95); } - else + else // FAST_INFOCUS_TILE { - output.xyz = lerp(output.xyz, float3(0, 1, 0), 0.9); + output.xyz = lerp(output.xyz, float3(0, 1, 0), 0.95); } } diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/Shaders/DoFMinMaxDilate.compute b/Packages/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/Shaders/DoFMinMaxDilate.compute index 2dac5ef1584..edc10ead1ad 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/Shaders/DoFMinMaxDilate.compute +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/PostProcessing/Shaders/DoFMinMaxDilate.compute @@ -15,15 +15,15 @@ RW_TEXTURE2D_X(float4, _OutputTexture); #define GROUP_RES 8u #define GROUP_SIZE (GROUP_RES * GROUP_RES) -void DilateTile(inout float4 tileInfo, float4 tile) +void DilateTile(inout CoCTileData tileInfo, CoCTileData tile) { // far field - tileInfo.x = tileInfo.x; - tileInfo.y = max(tileInfo.y, tile.y); + tileInfo.minFarCoC = min(tileInfo.minFarCoC, tile.minFarCoC); + tileInfo.maxFarCoC = max(tileInfo.maxFarCoC, tile.maxFarCoC); // near field - tileInfo.z = tileInfo.z; - tileInfo.w = min(tileInfo.w, tile.w); + tileInfo.minNearCoC = max(tileInfo.minNearCoC, tile.minNearCoC); + tileInfo.maxNearCoC = min(tileInfo.maxNearCoC, tile.maxNearCoC); } [numthreads(GROUP_RES, GROUP_RES, 1)] @@ -31,7 +31,7 @@ void KMain(uint3 dispatchThreadId : SV_DispatchThreadID) { UNITY_XR_ASSIGN_VIEW_INDEX(dispatchThreadId.z); - float4 tileInfo = _InputTexture[COORD_TEXTURE2D_X(dispatchThreadId.xy)]; + CoCTileData tileInfo = LoadCoCTileData(_InputTexture, dispatchThreadId.xy); int2 startIndex = max((int2)dispatchThreadId.xy - int2(1, 1), int2(0, 0)); int2 endIndex = min((int2)dispatchThreadId.xy + int2(1, 1), _PostProcessScreenSize.xy / TILE_RES - int2(1, 1)); @@ -40,10 +40,10 @@ void KMain(uint3 dispatchThreadId : SV_DispatchThreadID) { for (int j = startIndex.y; j <= endIndex.y; j++) { - float4 tile = LOAD_TEXTURE2D_X(_InputTexture, int2(i, j)); + CoCTileData tile = LoadCoCTileData(_InputTexture, int2(i, j)); DilateTile(tileInfo, tile); } } - _OutputTexture[COORD_TEXTURE2D_X(dispatchThreadId.xy)] = tileInfo; + _OutputTexture[COORD_TEXTURE2D_X(dispatchThreadId.xy)] = PackCoCTileData(tileInfo); } diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/GlobalPostProcessingQualitySettings.cs b/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/GlobalPostProcessingQualitySettings.cs index f7427a9d283..65190473a9e 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/GlobalPostProcessingQualitySettings.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/GlobalPostProcessingQualitySettings.cs @@ -90,6 +90,10 @@ internal GlobalPostProcessingQualitySettings() LimitManualRangeNearBlur[(int)ScalableSettingLevelParameter.Level.Medium] = false; LimitManualRangeNearBlur[(int)ScalableSettingLevelParameter.Level.High] = false; + AdaptiveSamplingWeight[(int)ScalableSettingLevelParameter.Level.Low] = 0.5f; + AdaptiveSamplingWeight[(int)ScalableSettingLevelParameter.Level.Medium] = 0.75f; + AdaptiveSamplingWeight[(int)ScalableSettingLevelParameter.Level.High] = 2; + /* Motion Blur */ MotionBlurSampleCount[(int)ScalableSettingLevelParameter.Level.Low] = 4; MotionBlurSampleCount[(int)ScalableSettingLevelParameter.Level.Medium] = 8; @@ -135,6 +139,9 @@ internal GlobalPostProcessingQualitySettings() public bool[] DoFHighQualityFiltering = new bool[s_QualitySettingCount]; /// Use physically based Depth of field for each quality level. The array must have one entry per scalable setting level. public bool[] DoFPhysicallyBased = new bool[s_QualitySettingCount]; + /// Adjust the number of samples in the physically based depth of field depending on the radius of the blur. Higher values will decrease the noise but increase the rendering cost. + [Range(0.25f, 4f)] + public float[] AdaptiveSamplingWeight = new float[s_QualitySettingCount]; /// Adjust near blur CoC based on depth distance when manual, non-physical mode is used for each quality level. The array must have one entry per scalable setting level. public bool[] LimitManualRangeNearBlur = new bool[s_QualitySettingCount]; diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDProfileId.cs b/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDProfileId.cs index bb77a8b68ee..fbcbcb60148 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDProfileId.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDProfileId.cs @@ -278,6 +278,8 @@ internal enum HDProfileId DepthOfFieldGatherNear, DepthOfFieldPreCombine, DepthOfFieldCombine, + DepthOfFieldComputeSlowTiles, + DepthOfFieldApertureShape, LensFlareScreenSpace, LensFlareDataDriven, LensFlareComputeOcclusionDataDriven, diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDRenderPipeline.PostProcess.cs b/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDRenderPipeline.PostProcess.cs index 3f6adfee965..21349011862 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDRenderPipeline.PostProcess.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDRenderPipeline.PostProcess.cs @@ -48,6 +48,7 @@ public partial class HDRenderPipeline ComputeBuffer m_BokehIndirectCmd; ComputeBuffer m_NearBokehTileList; ComputeBuffer m_FarBokehTileList; + const int k_DepthOfFieldApertureShapeBufferSize = 256; // AMD-CAS data ComputeBuffer m_ContrastAdaptiveSharpen; @@ -560,6 +561,17 @@ public bool PerformsAntiAliasing() schedule = DynamicResolutionHandler.instance.upsamplerSchedule, }; } + // TAA can use CAS for sharpening without upsampling. + if (hdCamera.taaSharpenMode == HDAdditionalCameraData.TAASharpenMode.ContrastAdaptiveSharpening && !hdCamera.DynResRequest.enabled) + { + Assertions.Assert.AreEqual(DynamicResolutionHandler.UpsamplerScheduleType.AfterPost, DynamicResolutionHandler.instance.upsamplerSchedule); + return new CurrentUpsamplerData + { + isAdvancedUpsampler = false, + regularUpsampler = DynamicResUpscaleFilter.ContrastAdaptiveSharpen, + schedule = DynamicResolutionHandler.instance.upsamplerSchedule, + }; + } } } @@ -2267,6 +2279,10 @@ struct DepthOfFieldParameters public int pbDoFDilateKernel; public ComputeShader pbDoFCombineCS; public int pbDoFCombineKernel; + public ComputeShader dofComputeSlowTilesCS; + public int dofComputeSlowTilesKernel; + public ComputeShader dofComputeApertureShapeCS; + public int dofComputeApertureShapeKernel; public int minMaxCoCTileSize; public BlueNoise.DitheredTextureSet ditheredTextureSet; @@ -2283,6 +2299,7 @@ struct DepthOfFieldParameters public DepthOfFieldResolution resolution; public DepthOfFieldMode focusMode; public Vector2 adaptiveSamplingWeights; + public bool dynamicResolutionEnabled; public Vector2 physicalCameraCurvature; public float physicalCameraAperture; @@ -2306,7 +2323,7 @@ struct DepthOfFieldParameters public bool useMipSafePath; } - DepthOfFieldParameters PrepareDoFParameters(HDCamera hdCamera) + DepthOfFieldParameters PrepareDoFParameters(HDCamera hdCamera, CurrentUpsamplerData? upsamplerData) { DepthOfFieldParameters parameters = new DepthOfFieldParameters(); @@ -2349,7 +2366,11 @@ DepthOfFieldParameters PrepareDoFParameters(HDCamera hdCamera) parameters.pbDoFGatherCS = runtimeShaders.dofGatherCS; parameters.pbDoFGatherKernel = parameters.pbDoFGatherCS.FindKernel("KMain"); parameters.pbDoFCombineCS = runtimeShaders.dofCombineCS; - parameters.pbDoFCombineKernel = parameters.pbDoFGatherCS.FindKernel("KMain"); + parameters.pbDoFCombineKernel = parameters.pbDoFCombineCS.FindKernel("UpsampleFastTiles"); + parameters.dofComputeSlowTilesCS = runtimeShaders.dofComputeSlowTilesCS; + parameters.dofComputeSlowTilesKernel = parameters.dofComputeSlowTilesCS.FindKernel("ComputeSlowTiles"); + parameters.dofComputeApertureShapeCS = runtimeShaders.dofComputeApertureShapeCS; + parameters.dofComputeApertureShapeKernel = parameters.dofComputeApertureShapeCS.FindKernel("ComputeShapeBuffer"); parameters.minMaxCoCTileSize = 8; parameters.camera = hdCamera; @@ -2439,6 +2460,7 @@ DepthOfFieldParameters PrepareDoFParameters(HDCamera hdCamera) parameters.dofPrecombineFarCS.EnableKeyword("ENABLE_ALPHA"); parameters.pbDoFGatherCS.EnableKeyword("ENABLE_ALPHA"); parameters.pbDoFCombineCS.EnableKeyword("ENABLE_ALPHA"); + parameters.dofComputeSlowTilesCS.EnableKeyword("ENABLE_ALPHA"); } if (parameters.resolution == DepthOfFieldResolution.Full) @@ -2446,6 +2468,10 @@ DepthOfFieldParameters PrepareDoFParameters(HDCamera hdCamera) parameters.dofPrefilterCS.EnableKeyword("FULL_RES"); parameters.dofCombineCS.EnableKeyword("FULL_RES"); } + else if (parameters.dynamicResolutionEnabled) + { + parameters.dofGatherCS.EnableKeyword("LOW_RESOLUTION"); + } else if (parameters.highQualityFiltering) { parameters.dofPrefilterCS.EnableKeyword("HIGH_QUALITY"); @@ -2484,10 +2510,6 @@ DepthOfFieldParameters PrepareDoFParameters(HDCamera hdCamera) parameters.dofCoCReprojectCS.EnableKeyword("ENABLE_MAX_BLENDING"); parameters.ditheredTextureSet = GetBlueNoiseManager().DitheredTextureSet256SPP(); - // PBR dof has special resolution requirements. Either half or full. - // The max here will constrain it to just quarter or half. - parameters.resolution = (DepthOfFieldResolution)Math.Max((int)parameters.resolution, (int)DepthOfFieldResolution.Half); - if (parameters.resolution != DepthOfFieldResolution.Quarter) { // Reasons for this flag: @@ -2498,14 +2520,15 @@ DepthOfFieldParameters PrepareDoFParameters(HDCamera hdCamera) parameters.pbDoFCombineCS.EnableKeyword("FORCE_POINT_SAMPLING"); } - if (parameters.highQualityFiltering) - { - parameters.pbDoFGatherCS.EnableKeyword("HIGH_QUALITY"); - } + // Sampling ratios for adaptive sampling. + // X: ratio of the sharp part tiles of PBR dof that have high variance of CoC. + // Y: ratio of the blurry / sharp tiles that have low variance of CoC. + parameters.adaptiveSamplingWeights = new Vector2( + m_DepthOfField.adaptiveSamplingWeight <= 1.0f ? m_DepthOfField.adaptiveSamplingWeight : 1.0f, + m_DepthOfField.adaptiveSamplingWeight > 1.0f ? m_DepthOfField.adaptiveSamplingWeight : 1.0f + ); - parameters.adaptiveSamplingWeights = (parameters.highQualityFiltering) - ? DepthOfField.s_HighQualityAdaptiveSamplingWeights - : DepthOfField.s_LowQualityAdaptiveSamplingWeights; + parameters.dynamicResolutionEnabled = upsamplerData != null && upsamplerData.Value.schedule != DynamicResolutionHandler.UpsamplerScheduleType.BeforePost; } if (hdCamera.msaaEnabled) @@ -2580,7 +2603,6 @@ static void DoDepthOfField(in DepthOfFieldParameters dofParameters, CommandBuffe bool bothLayersActive = nearLayerActive && farLayerActive; bool useTiles = dofParameters.useTiles; - bool hqFiltering = dofParameters.highQualityFiltering; const uint kIndirectNearOffset = 0u * sizeof(uint); const uint kIndirectFarOffset = 3u * sizeof(uint); @@ -3041,7 +3063,7 @@ static void ReprojectCoCHistory(in DepthOfFieldParameters parameters, CommandBuf fullresCoC = nextCoC; } - static void DoPhysicallyBasedDepthOfField(in DepthOfFieldParameters dofParameters, CommandBuffer cmd, RTHandle source, RTHandle destination, RTHandle fullresCoC, RTHandle prevCoCHistory, RTHandle nextCoCHistory, RTHandle motionVecTexture, RTHandle sourcePyramid, RTHandle depthBuffer, RTHandle minMaxCoCPing, RTHandle minMaxCoCPong, RTHandle scaledDof, bool taaEnabled, RTHandle depthMinMaxAvgMSAA) + static void DoPhysicallyBasedDepthOfField(in DepthOfFieldParameters dofParameters, CommandBuffer cmd, RTHandle source, RTHandle destination, RTHandle fullresCoC, RTHandle prevCoCHistory, RTHandle nextCoCHistory, RTHandle motionVecTexture, RTHandle sourcePyramid, RTHandle depthBuffer, RTHandle minMaxCoCPing, RTHandle minMaxCoCPong, RTHandle scaledDof, bool taaEnabled, RTHandle depthMinMaxAvgMSAA, BufferHandle shapeTable, bool debugTileClassification) { // Currently Physically Based DoF is performed at "full" resolution (ie does not utilize DepthOfFieldResolution) // However, to produce similar results when switching between various resolutions, or dynamic resolution, @@ -3114,32 +3136,13 @@ static void DoPhysicallyBasedDepthOfField(in DepthOfFieldParameters dofParameter } } - using (new ProfilingScope(cmd, ProfilingSampler.Get(HDProfileId.DepthOfFieldPyramid))) - { - // DoF color pyramid - if (sourcePyramid != null) - { - cs = dofParameters.dofMipCS; - kernel = dofParameters.dofMipColorKernel; - - cmd.SetComputeTextureParam(cs, kernel, HDShaderIDs._InputTexture, source, 0); - cmd.SetComputeTextureParam(cs, kernel, HDShaderIDs._OutputTexture, sourcePyramid, 0); - cmd.SetComputeTextureParam(cs, kernel, HDShaderIDs._OutputMip1, sourcePyramid, 1); - cmd.SetComputeTextureParam(cs, kernel, HDShaderIDs._OutputMip2, sourcePyramid, 2); - cmd.SetComputeTextureParam(cs, kernel, HDShaderIDs._OutputMip3, sourcePyramid, 3); - cmd.SetComputeTextureParam(cs, kernel, HDShaderIDs._OutputMip4, sourcePyramid, 4); - - int tx = ((dofParameters.viewportSize.x >> 1) + 7) / 8; - int ty = ((dofParameters.viewportSize.y >> 1) + 7) / 8; - cmd.DispatchCompute(cs, kernel, tx, ty, dofParameters.camera.viewCount); - } - } - using (new ProfilingScope(cmd, ProfilingSampler.Get(HDProfileId.DepthOfFieldDilate))) { int tileSize = dofParameters.minMaxCoCTileSize; - int tx = ((dofParameters.viewportSize.x / tileSize) + 7) / 8; - int ty = ((dofParameters.viewportSize.y / tileSize) + 7) / 8; + int tileCountX = Mathf.CeilToInt(dofParameters.viewportSize.x / (float)tileSize); + int tileCountY = Mathf.CeilToInt(dofParameters.viewportSize.y / (float)tileSize); + int tx = HDUtils.DivRoundUp(tileCountX, 8); + int ty = HDUtils.DivRoundUp(tileCountY, 8); // Min Max CoC tiles { @@ -3147,6 +3150,7 @@ static void DoPhysicallyBasedDepthOfField(in DepthOfFieldParameters dofParameter kernel = dofParameters.pbDoFMinMaxKernel; cmd.SetComputeTextureParam(cs, kernel, HDShaderIDs._InputTexture, fullresCoC, 0); + cmd.SetComputeVectorParam(cs, HDShaderIDs._OutputResolution, new Vector2(tileCountX, tileCountY)); cmd.SetComputeTextureParam(cs, kernel, HDShaderIDs._OutputTexture, minMaxCoCPing, 0); cmd.DispatchCompute(cs, kernel, tx, ty, dofParameters.camera.viewCount); } @@ -3167,6 +3171,83 @@ static void DoPhysicallyBasedDepthOfField(in DepthOfFieldParameters dofParameter } } + // Compute the shape of the aperture into a buffer, sampling this buffer in the loop of the DoF + // is faster than computing sin/cos of each angle for the sampling and it let us handle the shape + // of the aperture with the blade count. + using (new ProfilingScope(cmd, ProfilingSampler.Get(HDProfileId.DepthOfFieldApertureShape))) + { + cs = dofParameters.dofComputeApertureShapeCS; + kernel = dofParameters.dofComputeApertureShapeKernel; + float rotation = (dofParameters.physicalCameraAperture - Camera.kMinAperture) / (Camera.kMaxAperture - Camera.kMinAperture); + rotation *= (360f / dofParameters.physicalCameraBladeCount) * Mathf.Deg2Rad; // TODO: Crude approximation, make it correct + + float ngonFactor = 1f; + if (dofParameters.physicalCameraCurvature.y - dofParameters.physicalCameraCurvature.x > 0f) + ngonFactor = (dofParameters.physicalCameraAperture - dofParameters.physicalCameraCurvature.x) / (dofParameters.physicalCameraCurvature.y - dofParameters.physicalCameraCurvature.x); + + ngonFactor = Mathf.Clamp01(ngonFactor); + ngonFactor = Mathf.Lerp(ngonFactor, 0f, Mathf.Abs(dofParameters.physicalCameraAnamorphism)); + + cmd.SetComputeVectorParam(cs, HDShaderIDs._Params, new Vector4(dofParameters.physicalCameraBladeCount, ngonFactor, rotation, dofParameters.physicalCameraAnamorphism / 4f)); + cmd.SetComputeIntParam(cs, HDShaderIDs._ApertureShapeTableCount, k_DepthOfFieldApertureShapeBufferSize); + cmd.SetComputeBufferParam(cs, kernel, HDShaderIDs._ApertureShapeTable, shapeTable); + cmd.DispatchCompute(cs, kernel, k_DepthOfFieldApertureShapeBufferSize / 64, 1, 1); + } + + // Slow tiles refer to a tile that contain both in focus and defocus pixels which requires to gather the CoC + // per pixel + + // Compute the slow path tiles into the output buffer. + // The output of this pass is used as input for the color pyramid below, this is to avoid some + // leaking artifacts on the border of the tiles. Blurring the slow tiles allows for the bilinear + // interpolation in the final upsample pass to get more correct data instead of sampling non-blurred tiles. + using (new ProfilingScope(cmd, ProfilingSampler.Get(HDProfileId.DepthOfFieldComputeSlowTiles))) + { + cs = dofParameters.dofComputeSlowTilesCS; + kernel = dofParameters.dofComputeSlowTilesKernel; + float sampleCount = Mathf.Max(dofParameters.nearSampleCount, dofParameters.farSampleCount); + float anamorphism = dofParameters.physicalCameraAnamorphism / 4f; + + float mipLevel = 1 + Mathf.Ceil(Mathf.Log(maxCoc, 2)); + cmd.SetComputeVectorParam(cs, HDShaderIDs._Params, new Vector4(sampleCount, maxCoc, anamorphism, 0.0f)); + cmd.SetComputeVectorParam(cs, HDShaderIDs._Params2, new Vector4(dofParameters.adaptiveSamplingWeights.x, dofParameters.adaptiveSamplingWeights.y, (float)dofParameters.resolution, 1.0f/(float)dofParameters.resolution)); + cmd.SetComputeTextureParam(cs, kernel, HDShaderIDs._InputTexture, source); + cmd.SetComputeTextureParam(cs, kernel, HDShaderIDs._InputCoCTexture, fullresCoC); + cmd.SetComputeTextureParam(cs, kernel, HDShaderIDs._TileList, minMaxCoCPing, 0); + cmd.SetComputeTextureParam(cs, kernel, HDShaderIDs._OutputTexture, destination); + cmd.SetComputeBufferParam(cs, kernel, HDShaderIDs._ApertureShapeTable, shapeTable); + cmd.SetComputeIntParam(cs, HDShaderIDs._ApertureShapeTableCount, k_DepthOfFieldApertureShapeBufferSize); + + cmd.DispatchCompute(cs, kernel, (dofParameters.viewportSize.x + 7) / 8, (dofParameters.viewportSize.y + 7) / 8, dofParameters.camera.viewCount); + } + + // When the DoF is at full resolution, we consider that this is the highest quality level so we remove + // the sampling from the pyramid which causes artifacts on the border of tiles in certain scenarios. + if (dofParameters.resolution != DepthOfFieldResolution.Full) + { + // DoF color pyramid with the slow tiles inside + using (new ProfilingScope(cmd, ProfilingSampler.Get(HDProfileId.DepthOfFieldPyramid))) + { + if (sourcePyramid != null) + { + cs = dofParameters.dofMipCS; + kernel = dofParameters.dofMipColorKernel; + + cmd.SetComputeTextureParam(cs, kernel, HDShaderIDs._InputTexture, destination, 0); + cmd.SetComputeTextureParam(cs, kernel, HDShaderIDs._OutputTexture, sourcePyramid, 0); + cmd.SetComputeTextureParam(cs, kernel, HDShaderIDs._OutputMip1, sourcePyramid, 1); + cmd.SetComputeTextureParam(cs, kernel, HDShaderIDs._OutputMip2, sourcePyramid, 2); + cmd.SetComputeTextureParam(cs, kernel, HDShaderIDs._OutputMip3, sourcePyramid, 3); + cmd.SetComputeTextureParam(cs, kernel, HDShaderIDs._OutputMip4, sourcePyramid, 4); + + int tx = ((dofParameters.viewportSize.x >> 1) + 7) / 8; + int ty = ((dofParameters.viewportSize.y >> 1) + 7) / 8; + cmd.DispatchCompute(cs, kernel, tx, ty, dofParameters.camera.viewCount); + } + } + } + + // Blur far and near tiles with a "fast" blur using (new ProfilingScope(cmd, ProfilingSampler.Get(HDProfileId.DepthOfFieldGatherNear))) { cs = dofParameters.pbDoFGatherCS; @@ -3185,10 +3266,14 @@ static void DoPhysicallyBasedDepthOfField(in DepthOfFieldParameters dofParameter BlueNoise.BindDitheredTextureSet(cmd, dofParameters.ditheredTextureSet); int scaledWidth = (dofParameters.viewportSize.x / (int)dofParameters.resolution + 7) / 8; int scaledHeight = (dofParameters.viewportSize.y / (int)dofParameters.resolution + 7) / 8; + cmd.SetComputeBufferParam(cs, kernel, HDShaderIDs._ApertureShapeTable, shapeTable); + cmd.SetComputeIntParam(cs, HDShaderIDs._ApertureShapeTableCount, k_DepthOfFieldApertureShapeBufferSize); cmd.DispatchCompute(cs, kernel, scaledWidth, scaledHeight, dofParameters.camera.viewCount); } + // Upscale near/far defocus tiles with a bilinear filter. The bilinear filtering leaking is reduced + // because the neighbouring tiles have already been blurred by the first slow tile pass. using (new ProfilingScope(cmd, ProfilingSampler.Get(HDProfileId.DepthOfFieldCombine))) { cs = dofParameters.pbDoFCombineCS; @@ -3204,6 +3289,7 @@ static void DoPhysicallyBasedDepthOfField(in DepthOfFieldParameters dofParameter cmd.SetComputeTextureParam(cs, kernel, HDShaderIDs._InputNearTexture, scaledDof); cmd.SetComputeTextureParam(cs, kernel, HDShaderIDs._TileList, minMaxCoCPing, 0); cmd.SetComputeTextureParam(cs, kernel, HDShaderIDs._OutputTexture, destination); + cmd.SetComputeIntParam(cs, HDShaderIDs._DebugTileClassification, debugTileClassification ? 1 : 0); cmd.DispatchCompute(cs, kernel, (dofParameters.viewportSize.x + 7) / 8, (dofParameters.viewportSize.y + 7) / 8, dofParameters.camera.viewCount); } @@ -3236,20 +3322,19 @@ class DepthofFieldData public BufferHandle bokehIndirectCmd; public BufferHandle nearBokehTileList; public BufferHandle farBokehTileList; + public BufferHandle apertureShapeTable; public bool taaEnabled; + public bool debugTileClassification; } TextureHandle DepthOfFieldPass(RenderGraph renderGraph, HDCamera hdCamera, TextureHandle depthBuffer, TextureHandle motionVectors, TextureHandle depthBufferMipChain, TextureHandle source, TextureHandle depthMinMaxAvgMSAA, TextureHandle stencilTexture, CurrentUpsamplerData? upsamplerData) { bool postDoFTAAEnabled = false; bool isSceneView = hdCamera.camera.cameraType == CameraType.SceneView; - bool stabilizeCoC = m_AntialiasingFS && hdCamera.antialiasing == HDAdditionalCameraData.AntialiasingMode.TemporalAntialiasing; bool isOrtho = hdCamera.camera.orthographic; - - // If DLSS is enabled, we need to stabilize the CoC buffer (because the upsampled depth is jittered) - if (hdCamera.RequiresCameraJitter()) - stabilizeCoC = true; + // If jitter is enabled, we need to stabilize the CoC buffer (because the upsampled depth is jittered) + bool stabilizeCoC = hdCamera.RequiresCameraJitter() && m_DepthOfField.coCStabilization.value; // If Path tracing is enabled, then DoF is computed in the path tracer by sampling the lens aperure (when using the physical camera mode) bool isDoFPathTraced = (hdCamera.frameSettings.IsEnabled(FrameSettingsField.RayTracing) && @@ -3268,7 +3353,7 @@ TextureHandle DepthOfFieldPass(RenderGraph renderGraph, HDCamera hdCamera, Textu hdCamera.resetPostProcessingHistory = true; } - var dofParameters = PrepareDoFParameters(hdCamera); + var dofParameters = PrepareDoFParameters(hdCamera, upsamplerData); bool useHistoryMips = m_DepthOfField.physicallyBased; bool cocHistoryValid = GrabCoCHistory(hdCamera, out var prevCoC, out var nextCoC, useMips: useHistoryMips); @@ -3418,7 +3503,8 @@ TextureHandle DepthOfFieldPass(RenderGraph renderGraph, HDCamera hdCamera, Textu passData.pongFarRGB = builder.CreateTransientTexture(new TextureDesc(screenScale, IsDynamicResUpscaleTargetEnabled(), true) { colorFormat = GetPostprocessTextureFormat(hdCamera), enableRandomWrite = true, name = "Scaled DoF" }); - passData.pingFarRGB = builder.CreateTransientTexture(GetPostprocessOutputHandle(renderGraph, "DoF Source Pyramid", GetPostprocessTextureFormat(hdCamera), true)); + if (dofParameters.resolution != DepthOfFieldResolution.Full) + passData.pingFarRGB = builder.CreateTransientTexture(GetPostprocessOutputHandle(renderGraph, "DoF Source Pyramid", GetPostprocessTextureFormat(hdCamera), true)); // The size of the tile texture should be rounded-up, so we use a custom scale operator // We cannot use the tile size in the scale call callback (to avoid gc alloc), so for now we use an assert @@ -3431,14 +3517,18 @@ TextureHandle DepthOfFieldPass(RenderGraph renderGraph, HDCamera hdCamera, Textu passData.pongNearRGB = builder.CreateTransientTexture(new TextureDesc(scaler, IsDynamicResUpscaleTargetEnabled(), true) { colorFormat = GraphicsFormat.R16G16B16A16_SFloat, useMipMap = false, enableRandomWrite = true, name = "CoC Min Max Tiles" }); + passData.apertureShapeTable = builder.CreateTransientBuffer(new BufferDesc(k_DepthOfFieldApertureShapeBufferSize, sizeof(float) * 2)); + passData.debugTileClassification = m_CurrentDebugDisplaySettings.data.fullScreenDebugMode == FullScreenDebugMode.DepthOfFieldTileClassification; + builder.SetRenderFunc( (DepthofFieldData data, RenderGraphContext ctx) => { - DoPhysicallyBasedDepthOfField(data.parameters, ctx.cmd, data.source, data.destination, data.fullresCoC, data.prevCoC, data.nextCoC, data.motionVecTexture, data.pingFarRGB, data.depthBuffer, data.pingNearRGB, data.pongNearRGB, data.pongFarRGB, data.taaEnabled, data.depthMinMaxAvgMSAA); + DoPhysicallyBasedDepthOfField(data.parameters, ctx.cmd, data.source, data.destination, data.fullresCoC, data.prevCoC, data.nextCoC, data.motionVecTexture, data.pingFarRGB, data.depthBuffer, data.pingNearRGB, data.pongNearRGB, data.pongFarRGB, data.taaEnabled, data.depthMinMaxAvgMSAA, data.apertureShapeTable, data.debugTileClassification); }); source = passData.destination; PushFullScreenDebugTexture(renderGraph, debugCocTexture, debugCocTextureScales, FullScreenDebugMode.DepthOfFieldCoc); + PushFullScreenDebugTexture(renderGraph, passData.destination, hdCamera.postProcessRTScales, FullScreenDebugMode.DepthOfFieldTileClassification); } } diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDRenderPipeline.RenderGraph.cs b/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDRenderPipeline.RenderGraph.cs index 8d5ef7c03cf..bf5b745b436 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDRenderPipeline.RenderGraph.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDRenderPipeline.RenderGraph.cs @@ -624,6 +624,7 @@ class SetFinalTargetPassData public CubemapFace finalTargetFace; public Rect finalViewport; public TextureHandle depthBuffer; + public Vector2 blitScaleBias; public bool flipY; } @@ -650,6 +651,7 @@ void SetFinalTarget(RenderGraph renderGraph, HDCamera hdCamera, TextureHandle de if (passData.copyDepth) { passData.depthBuffer = builder.ReadTexture(depthBuffer); + passData.blitScaleBias = RTHandles.rtHandleProperties.rtHandleScale; passData.flipY = hdCamera.isMainGameView || hdCamera.flipYMode == HDAdditionalCameraData.FlipYMode.ForceFlipY; passData.copyDepthMaterial = m_CopyDepth; } @@ -673,7 +675,7 @@ void SetFinalTarget(RenderGraph renderGraph, HDCamera hdCamera, TextureHandle de mpb.SetTexture(HDShaderIDs._InputDepth, depth); // When we are Main Game View we need to flip the depth buffer ourselves as we are after postprocess / blit that have already flipped the screen mpb.SetInt("_FlipY", data.flipY ? 1 : 0); - mpb.SetVector(HDShaderIDs._BlitScaleBias, new Vector4(1.0f, 1.0f, 0.0f, 0.0f)); + mpb.SetVector(HDShaderIDs._BlitScaleBias, data.blitScaleBias); CoreUtils.DrawFullScreen(ctx.cmd, data.copyDepthMaterial, mpb); } } @@ -685,10 +687,9 @@ void SetFinalTarget(RenderGraph renderGraph, HDCamera hdCamera, TextureHandle de class CopyXRDepthPassData { public Material copyDepth; - public Rect viewport; public TextureHandle depthBuffer; public TextureHandle output; - public float dynamicResolutionScale; + public Vector2 blitScaleBias; public bool flipY; } @@ -699,10 +700,10 @@ void CopyDepth(RenderGraph renderGraph, HDCamera hdCamera, TextureHandle depthBu using (var builder = renderGraph.AddRenderPass(name, out var passData, ProfilingSampler.Get(profileID))) { passData.copyDepth = m_CopyDepth; - passData.viewport = hdCamera.finalViewport; passData.depthBuffer = builder.ReadTexture(depthBuffer); - passData.output = builder.WriteTexture(output); - passData.dynamicResolutionScale = copyForXR ? DynamicResolutionHandler.instance.GetCurrentScale() : 1.0f / DynamicResolutionHandler.instance.GetCurrentScale(); + passData.output = builder.UseDepthBuffer(output, DepthAccess.Write); + passData.blitScaleBias = copyForXR ? new Vector2(hdCamera.actualWidth / hdCamera.finalViewport.width, hdCamera.actualHeight / hdCamera.finalViewport.height) + : RTHandles.rtHandleProperties.rtHandleScale; passData.flipY = copyForXR; builder.SetRenderFunc( @@ -711,11 +712,9 @@ void CopyDepth(RenderGraph renderGraph, HDCamera hdCamera, TextureHandle depthBu var mpb = ctx.renderGraphPool.GetTempMaterialPropertyBlock(); mpb.SetTexture(HDShaderIDs._InputDepth, data.depthBuffer); - mpb.SetVector(HDShaderIDs._BlitScaleBias, new Vector4(data.dynamicResolutionScale, data.dynamicResolutionScale, 0.0f, 0.0f)); + mpb.SetVector(HDShaderIDs._BlitScaleBias, data.blitScaleBias); mpb.SetInt(HDShaderIDs._FlipY, data.flipY ? 1 : 0); - ctx.cmd.SetRenderTarget(data.output, 0, CubemapFace.Unknown, -1); - ctx.cmd.SetViewport(data.viewport); CoreUtils.DrawFullScreen(ctx.cmd, data.copyDepth, mpb); }); } diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDRenderPipeline.cs b/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDRenderPipeline.cs index 4e3f6f6c14a..b356c045b1c 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDRenderPipeline.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDRenderPipeline.cs @@ -671,7 +671,7 @@ public HDRenderPipeline(HDRenderPipelineAsset asset) m_DepthPyramidMipLevelOffsetsBuffer = new ComputeBuffer(15, sizeof(int) * 2); m_CustomPassColorBuffer = new Lazy(() => RTHandles.Alloc(Vector2.one, TextureXR.slices, dimension: TextureXR.dimension, colorFormat: GetCustomBufferFormat(), enableRandomWrite: true, useDynamicScale: true, name: "CustomPassColorBuffer")); - m_CustomPassDepthBuffer = new Lazy(() => RTHandles.Alloc(Vector2.one, TextureXR.slices, dimension: TextureXR.dimension, colorFormat: GraphicsFormat.R32_UInt, useDynamicScale: true, name: "CustomPassDepthBuffer", depthBufferBits: DepthBits.Depth32)); + m_CustomPassDepthBuffer = new Lazy(() => RTHandles.Alloc(Vector2.one, TextureXR.slices, dimension: TextureXR.dimension, colorFormat: GraphicsFormat.None, useDynamicScale: true, name: "CustomPassDepthBuffer", depthBufferBits: DepthBits.Depth32)); // For debugging MousePositionDebug.instance.Build(); @@ -2383,12 +2383,6 @@ protected override void Render(ScriptableRenderContext renderContext, Camera[] c cmd.SetInvertCulling(false); } - if (renderRequest.xrPass.isLastCameraPass) - { - // EndCameraRendering callback should be executed outside of any profiling scope in case user code submits the renderContext - EndCameraRendering(renderContext, renderRequest.hdCamera.camera); - } - EndRenderRequest(renderRequest, cmd); // Render XR mirror view once all render requests have been completed @@ -2406,6 +2400,12 @@ protected override void Render(ScriptableRenderContext renderContext, Camera[] c renderContext.ExecuteCommandBuffer(cmd); CommandBufferPool.Release(cmd); renderContext.Submit(); + + if (renderRequest.xrPass.isLastCameraPass) + { + // EndCameraRendering callback should be executed outside of any profiling scope in case user code submits the renderContext + EndCameraRendering(renderContext, renderRequest.hdCamera.camera); + } } ScriptableRenderContext.PopDisableApiRenderers(); diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDStringConstants.cs b/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDStringConstants.cs index 22730448f58..74ff0b9f7b6 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDStringConstants.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/HDStringConstants.cs @@ -969,6 +969,7 @@ static class HDShaderIDs public static readonly int _OutputVelocityMagnitudeHistory = Shader.PropertyToID("_OutputVelocityMagnitudeHistory"); public static readonly int _OutputDepthTexture = Shader.PropertyToID("_OutputDepthTexture"); public static readonly int _OutputMotionVectorTexture = Shader.PropertyToID("_OutputMotionVectorTexture"); + public static readonly int _OutputResolution = Shader.PropertyToID("_OutputResolution"); public static readonly int _TargetScale = Shader.PropertyToID("_TargetScale"); public static readonly int _Params = Shader.PropertyToID("_Params"); @@ -977,6 +978,7 @@ static class HDShaderIDs public static readonly int _Params3 = Shader.PropertyToID("_Params3"); public static readonly int _BokehKernel = Shader.PropertyToID("_BokehKernel"); public static readonly int _InputCoCTexture = Shader.PropertyToID("_InputCoCTexture"); + public static readonly int _DebugTileClassification = Shader.PropertyToID("_DebugTileClassification"); public static readonly int _InputHistoryCoCTexture = Shader.PropertyToID("_InputHistoryCoCTexture"); public static readonly int _OutputCoCTexture = Shader.PropertyToID("_OutputCoCTexture"); public static readonly int _OutputNearCoCTexture = Shader.PropertyToID("_OutputNearCoCTexture"); @@ -1004,6 +1006,8 @@ static class HDShaderIDs public static readonly int _InputNearAlphaTexture = Shader.PropertyToID("_InputNearAlphaTexture"); public static readonly int _CoCTargetScale = Shader.PropertyToID("_CoCTargetScale"); public static readonly int _DepthMinMaxAvg = Shader.PropertyToID("_DepthMinMaxAvg"); + public static readonly int _ApertureShapeTable = Shader.PropertyToID("_ApertureShapeTable"); + public static readonly int _ApertureShapeTableCount = Shader.PropertyToID("_ApertureShapeTableCount"); public static readonly int _FlareOcclusionTex = Shader.PropertyToID("_FlareOcclusionTex"); public static readonly int _FlareSunOcclusionTex = Shader.PropertyToID("_FlareSunOcclusionTex"); diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/LineRendering/Kernels/StageSetupSegment.compute b/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/LineRendering/Kernels/StageSetupSegment.compute index fa274be91e1..613037d8b1c 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/LineRendering/Kernels/StageSetupSegment.compute +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/RenderPipeline/LineRendering/Kernels/StageSetupSegment.compute @@ -87,6 +87,11 @@ bool CulledLineLOD(uint segmentIndex) [numthreads(NUM_LANE_SEGMENT_SETUP, 1, 1)] void Main(Group group) { + #if defined(UNITY_STEREO_INSTANCING_ENABLED) + // See: [NOTE-HQ-LINES-SINGLE-PASS-STEREO] + unity_StereoEyeIndex = _ViewIndex; + #endif + if (group.groupIndex == 0u) { gs_SegmentOffset = _CounterBuffer.Load(COUNTER_GROUP_SEG_OFFSET); diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/Settings/HDRenderPipelineRuntimeShaders.cs b/Packages/com.unity.render-pipelines.high-definition/Runtime/Settings/HDRenderPipelineRuntimeShaders.cs index 291355ca854..eeb04ca2a2f 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/Settings/HDRenderPipelineRuntimeShaders.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/Settings/HDRenderPipelineRuntimeShaders.cs @@ -1378,6 +1378,22 @@ public ComputeShader dofCombineCS set => this.SetValueAndNotify(ref m_DofCombineCS, value); } + [SerializeField, ResourcePath("Runtime/PostProcessing/Shaders/DoFComputeSlowTiles.compute")] + private ComputeShader m_DofComputeSlowTilesCS; + public ComputeShader dofComputeSlowTilesCS + { + get => m_DofComputeSlowTilesCS; + set => this.SetValueAndNotify(ref m_DofComputeSlowTilesCS, value); + } + + [SerializeField, ResourcePath("Runtime/PostProcessing/Shaders/DoFApertureShape.compute")] + private ComputeShader m_DofComputeApertureShapeCS; + public ComputeShader dofComputeApertureShapeCS + { + get => m_DofComputeApertureShapeCS; + set => this.SetValueAndNotify(ref m_DofComputeApertureShapeCS, value); + } + [Header("Post-processing - Motion Blur")] [SerializeField, ResourcePath("Runtime/PostProcessing/Shaders/MotionBlurMotionVecPrep.compute")] private ComputeShader m_MotionBlurMotionVecPrepCS; diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/ShaderLibrary/CopyDepthBuffer.shader b/Packages/com.unity.render-pipelines.high-definition/Runtime/ShaderLibrary/CopyDepthBuffer.shader index c320c6f10f6..69e24c42df4 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/ShaderLibrary/CopyDepthBuffer.shader +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/ShaderLibrary/CopyDepthBuffer.shader @@ -71,8 +71,7 @@ Shader "Hidden/HDRP/CopyDepthBuffer" float Frag(Varyings input) : SV_Depth { UNITY_SETUP_STEREO_EYE_INDEX_POST_VERTEX(input); - uint2 coord = uint2(input.texcoord.xy * _ScreenSize.xy); - return LOAD_TEXTURE2D_X(_InputDepthTexture, coord).x; + return SAMPLE_TEXTURE2D_X_LOD(_InputDepthTexture, s_point_clamp_sampler, input.texcoord.xy, 0).x; } ENDHLSL diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/Utilities/HDRenderUtilities.cs b/Packages/com.unity.render-pipelines.high-definition/Runtime/Utilities/HDRenderUtilities.cs index c75ae50f632..d33035474e1 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/Utilities/HDRenderUtilities.cs +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/Utilities/HDRenderUtilities.cs @@ -12,56 +12,76 @@ public static partial class HDRenderUtilities { /// Perform a rendering into . /// - /// How to perform standard rendering: + /// How to perform rendering into a 2D render target: /// + /// using UnityEngine; + /// using UnityEngine.Rendering; + /// using UnityEngine.Rendering.HighDefinition; + /// using UnityEngine.Experimental.Rendering; + /// /// class StandardRenderingExample /// { - /// public void Render() - /// { - /// // Copy default settings - /// var settings = CameraRenderSettings.Default; - /// // Adapt default settings to our custom usage - /// settings.position.position = new Vector3(0, 1, 0); - /// settings.camera.frustum.fieldOfView = 60.0f; - /// // Get our render target - /// var rt = new RenderTexture(128, 128, 1, GraphicsFormat.B8G8R8A8_SNorm); - /// HDRenderUtilities.Render(settings, rt); - /// // Do something with rt - /// rt.Release(); - /// } + /// public void Render() + /// { + /// // Copy default settings and adjust them for the custom rendering. + /// var cameraSettings = CameraSettings.defaultCameraSettingsNonAlloc; + /// cameraSettings.frustum.fieldOfView = 60.0f; + /// var cameraPosition = CameraPositionSettings.NewDefault(); + /// cameraPosition.position = new Vector3(0, 1, 0); + /// + /// // Create the 2D render target + /// var rt = new RenderTexture(128, 128, 1, GraphicsFormat.B8G8R8A8_UNorm); + /// + /// // Perform the custom rendering into the render target + /// HDRenderUtilities.Render(cameraSettings, cameraPosition, rt); + /// + /// // Implement the custom render target processing. + /// + /// // Release the render target when the processing is done, RenderTexture variables are not garbage collected like normal managed types. + /// rt.Release(); + /// } /// } /// /// /// How to perform a cubemap rendering: /// + /// using UnityEngine; + /// using UnityEngine.Rendering; + /// using UnityEngine.Rendering.HighDefinition; + /// using UnityEngine.Experimental.Rendering; + /// /// class CubemapRenderExample /// { - /// public void Render() - /// { - /// // Copy default settings - /// var settings = CameraRenderSettings.Default; - /// // Adapt default settings to our custom usage - /// settings.position.position = new Vector3(0, 1, 0); - /// settings.camera.physical.iso = 800.0f; - /// // Frustum settings are ignored and driven by the cubemap rendering - /// // Get our render target - /// var rt = new RenderTexture(128, 128, 1, GraphicsFormat.B8G8R8A8_SNorm) - /// { - /// dimension = TextureDimension.Cube - /// }; - /// // The TextureDimension is detected and the renderer will perform a cubemap rendering. - /// HDRenderUtilities.Render(settings, rt); - /// // Do something with rt - /// rt.Release(); - /// } + /// public void Render() + /// { + /// // Copy the default settings and adjust them for the custom rendering. + /// // Frustum settings from cameraSettings are ignored because the render target is a cubemap. + /// var cameraSettings = CameraSettings.defaultCameraSettingsNonAlloc; + /// var cameraPosition = CameraPositionSettings.NewDefault(); + /// cameraPosition.position = new Vector3(0, 1, 0); + /// + /// // Create the cubemap render target + /// var rt = new RenderTexture(128, 128, 1, GraphicsFormat.B8G8R8A8_UNorm) + /// { + /// dimension = TextureDimension.Cube + /// }; + /// + /// // Perform the custom rendering into the cubemap + /// HDRenderUtilities.Render(cameraSettings, cameraPosition, rt); + /// + /// // Implement the custom render target processing. + /// + /// // Release the render target when the processing is done, RenderTexture variables are not garbage collected like normal managed types. + /// rt.Release(); + /// } /// } /// /// /// Settings for the camera. /// Position for the camera. /// Target to render to. - /// Only used in the Editor fo cubemaps. - /// This is bitmask of only objects with these flags will be rendered + /// Only used in the Editor for cubemaps. + /// This is a bitmask of , only objects with these flags are rendered. /// public static void Render( CameraSettings settings, @@ -349,7 +369,7 @@ public static void Render( [Obsolete("Use CreateReflectionProbeRenderTarget with explicit format instead", true)] public static RenderTexture CreateReflectionProbeRenderTarget(int cubemapSize) { - RenderTexture rt = new RenderTexture(cubemapSize, cubemapSize, 1, GraphicsFormat.R16G16B16A16_SFloat) + RenderTexture rt = new RenderTexture(cubemapSize, cubemapSize, 0, GraphicsFormat.R16G16B16A16_SFloat) { dimension = TextureDimension.Cube, enableRandomWrite = true, @@ -368,7 +388,7 @@ public static RenderTexture CreateReflectionProbeRenderTarget(int cubemapSize) /// The texture to use as reflection probe target. public static RenderTexture CreateReflectionProbeRenderTarget(int cubemapSize, GraphicsFormat format) { - RenderTexture rt = new RenderTexture(cubemapSize, cubemapSize, 1, format) + RenderTexture rt = new RenderTexture(cubemapSize, cubemapSize, 0, format) { dimension = TextureDimension.Cube, enableRandomWrite = true, @@ -388,7 +408,7 @@ public static RenderTexture CreateReflectionProbeRenderTarget(int cubemapSize, G /// The texture used as planar reflection probe target public static RenderTexture CreatePlanarProbeRenderTarget(int planarSize, GraphicsFormat format) { - RenderTexture rt = new RenderTexture(planarSize, planarSize, 1, format) + RenderTexture rt = new RenderTexture(planarSize, planarSize, 0, format) { dimension = TextureDimension.Tex2D, enableRandomWrite = true, @@ -407,7 +427,7 @@ public static RenderTexture CreatePlanarProbeRenderTarget(int planarSize, Graphi /// The texture used as planar reflection probe target public static RenderTexture CreatePlanarProbeDepthRenderTarget(int planarSize) { - RenderTexture rt = new RenderTexture(planarSize, planarSize, 1, GraphicsFormat.R32_SFloat) + RenderTexture rt = new RenderTexture(planarSize, planarSize, 0, GraphicsFormat.R32_SFloat) { dimension = TextureDimension.Tex2D, enableRandomWrite = true, diff --git a/Packages/com.unity.render-pipelines.high-definition/Runtime/VFXGraph/Shaders/VFXDefines.hlsl b/Packages/com.unity.render-pipelines.high-definition/Runtime/VFXGraph/Shaders/VFXDefines.hlsl index 60a5b859ad0..1a1a4e660a3 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Runtime/VFXGraph/Shaders/VFXDefines.hlsl +++ b/Packages/com.unity.render-pipelines.high-definition/Runtime/VFXGraph/Shaders/VFXDefines.hlsl @@ -60,6 +60,10 @@ #define CULL_VERTEX(o) { o.VFX_VARYING_POSCS.x = VFX_NAN; return o; } #endif +#if HAS_STRIPS +#define HAS_STRIPS_DATA 1 +#endif + // Enable the support of global mip bias in the shader. // Only has effect if the global mip bias is enabled in shader config and DRS is enabled. #define SUPPORT_GLOBAL_MIP_BIAS diff --git a/Packages/com.unity.render-pipelines.high-definition/Tests/Editor/HDAnalyticsTests_Defaults.txt b/Packages/com.unity.render-pipelines.high-definition/Tests/Editor/HDAnalyticsTests_Defaults.txt index 3109433e317..73220fce423 100644 --- a/Packages/com.unity.render-pipelines.high-definition/Tests/Editor/HDAnalyticsTests_Defaults.txt +++ b/Packages/com.unity.render-pipelines.high-definition/Tests/Editor/HDAnalyticsTests_Defaults.txt @@ -152,6 +152,7 @@ {"postProcessQualitySettings.DoFResolution":"[Quarter,Half,Full]"}, {"postProcessQualitySettings.DoFHighQualityFiltering":"[False,True,True]"}, {"postProcessQualitySettings.DoFPhysicallyBased":"[False,False,False]"}, +{"postProcessQualitySettings.AdaptiveSamplingWeight":"[0.5,0.75,2]"}, {"postProcessQualitySettings.LimitManualRangeNearBlur":"[False,False,False]"}, {"postProcessQualitySettings.MotionBlurSampleCount":"[4,8,12]"}, {"postProcessQualitySettings.BloomRes":"[Quarter,Half,Half]"}, diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/TableOfContents.md b/Packages/com.unity.render-pipelines.universal/Documentation~/TableOfContents.md index 564e2cb8b21..118499346c8 100644 --- a/Packages/com.unity.render-pipelines.universal/Documentation~/TableOfContents.md +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/TableOfContents.md @@ -1,246 +1 @@ -* [Universal Render Pipeline](index.md) -* [Requirements](requirements.md) - * [Building for Closed platforms](Building-For-Consoles.md) -* [What's new in URP](whats-new/urp-whats-new.md) -* [Features]() - * [Feature list](urp-feature-list.md) - * [Feature Comparison with the Built-in Render Pipeline](universalrp-builtin-feature-comparison.md) -* [Getting started](InstallingAndConfiguringURP.md) - * [Create a project with URP](creating-a-new-project-with-urp.md) - * [Install URP into an existing Project](InstallURPIntoAProject.md) - * [Package samples](package-samples.md) - * [URP Package Samples](package-sample-urp-package-samples.md) - * [Scene Templates](scene-templates.md) - * [Quality Settings in URP](birp-onboarding/quality-settings-location.md) - * [Change Quality settings with code](quality/quality-settings-through-code.md) - * [Configure settings with the URP Config package](URP-Config-Package.md) - * [Understand performance](understand-performance.md) - * [Configure for better performance](configure-for-better-performance.md) -* [Render Pipeline Concepts](urp-concepts.md) - * [The URP Asset](universalrp-asset.md) - * [Universal Renderer](urp-universal-renderer.md) - * [Deferred Rendering Path](rendering/deferred-rendering-path.md) - * [Forward+ Rendering Path](rendering/forward-plus-rendering-path.md) - * [Graphics settings window reference in URP](urp-global-settings.md) - * [Pre-built effects (Renderer Features)](urp-renderer-feature.md) - * [How to add a Renderer Feature](urp-renderer-feature-how-to-add.md) - * [Render Objects Renderer Feature](renderer-features/renderer-feature-render-objects-landing.md) - * [Example: How to create a custom rendering effect using the Render Objects Renderer Feature](renderer-features/how-to-custom-effect-render-objects.md) - * [Render Objects Renderer Feature reference](renderer-features/renderer-feature-render-objects.md) - * [Decal Renderer Feature](renderer-feature-decal-landing.md) - * [Decal Renderer Feature](renderer-feature-decal.md) - * [Decal Shader Graph](decal-shader.md) - * [Screen Space Ambient Occlusion (SSAO) Renderer Feature](post-processing-ssao.md) - * [Screen Space Shadows Renderer Feature](renderer-feature-screen-space-shadows.md) - * [Full Screen Pass Renderer Feature](renderer-features/renderer-feature-full-screen-pass-landing.md) - * [How to create a custom post-processing effect](post-processing/post-processing-custom-effect-low-code.md) - * [Full Screen Pass Renderer Feature reference](renderer-features/renderer-feature-full-screen-pass.md) -* [Upgrade guides](upgrade-guides.md) - * [Render Pipeline Converter](features/rp-converter.md) - * [Upgrading to URP 17](upgrade-guide-unity-6.md) - * [Upgrading to URP 16](upgrade-guide-2023-2.md) - * [Upgrading to URP 15](upgrade-guide-2023-1.md) - * [Upgrading to URP 14](upgrade-guide-2022-2.md) - * [Upgrading to URP 13](upgrade-guide-2022-1.md) - * [Upgrading to URP 12.0.x](upgrade-guide-2021-2.md) - * [Upgrading to URP 11.0.x](upgrade-guide-11-0-x.md) - * [Upgrading to URP 10.1.x](upgrade-guide-10-1-x.md) - * [Upgrading to URP 10.0.x](upgrade-guide-10-0-x.md) - * [Upgrading to URP 9.0.x](upgrade-guide-9-0-x.md) - * [Upgrading to URP 8.2.0](upgrade-guide-8-2-0.md) - * [Upgrading to URP 8.1.0](upgrade-guide-8-1-0.md) - * [Upgrading to URP 8.0.0](upgrade-guide-8-0-0.md) - * [Upgrading to URP 7.4.0](upgrade-guide-7-4-0.md) - * [Upgrading to URP 7.3.0](upgrade-guide-7-3-0.md) - * [Upgrading to URP 7.2.0](upgrade-guide-7-2-0.md) - * [Upgrading from LWRP to URP](upgrade-lwrp-to-urp.md) -* [Rendering](rendering-in-universalrp.md) - * [Rendering Layers](features/rendering-layers.md) -* [Lighting](lighting.md) - * [Lighting in URP](lighting/lighting-in-urp.md) - * [Light component reference](light-component.md) - * [The Universal Additional Light Data component](universal-additional-light-data.md) - * [Lighting Mode](urp-lighting-mode.md) - * [Shadows in the Universal Render Pipeline](Shadows-in-URP.md) - * [Reflection probes](lighting/reflection-probes.md) - * [View and control a light from its perspective](lights-placement-tool.md) - * [Adaptive Probe Volumes (APV)](probevolumes.md) - * [Understanding Adaptive Probe Volumes](probevolumes-concept.md) - * [Use Adaptive Probe Volumes](probevolumes-use.md) - * [Display Adaptive Probe Volumes](probevolumes-showandadjust.md) - * [Configure the size and density of Adaptive Probe Volumes](probevolumes-changedensity.md) - * [Bake multiple scenes together with Baking Sets](probevolumes-usebakingsets.md) - * [Changing lighting at runtime](probe-volumes-change-lighting-at-runtime.md) - * [Choose how to change lighting at runtime](probevolumes-understand-changing-lighting-at-runtime.md) - * [Bake different lighting setups with Lighting Scenarios](probevolumes-bakedifferentlightingsetups.md) - * [Update light from the sky at runtime with sky occlusion](probevolumes-skyocclusion.md) - * [Streaming](probevolumes-streaming.md) - * [Fix issues with Adaptive Probe Volumes](probevolumes-fixissues.md) - * [Adaptive Probe Volume Inspector window reference](probevolumes-inspector-reference.md) - * [Adaptive Probe Volumes panel reference](probevolumes-lighting-panel-reference.md) - * [Probe Volumes Options Override reference](probevolumes-options-override-reference.md) - * [Probe Adjustment Volume component reference](probevolumes-adjustment-volume-component-reference.md) - * [Lens flares](shared/lens-flare/lens-flare.md) - * [Choose a lens flare type](shared/lens-flare/choose-a-lens-flare-type.md) - * [Add lens flares](shared/lens-flare/lens-flare-component.md) - * [Add screen space lens flares](shared/lens-flare/post-processing-screen-space-lens-flare.md) - * [Lens Flare (SRP) reference](shared/lens-flare/lens-flare-srp-reference.md) - * [Lens Flare (SRP) Data Asset reference](shared/lens-flare/lens-flare-asset.md) - * [Screen Space Lens Flare override reference](shared/lens-flare/reference-screen-space-lens-flare.md) -* [Cameras](cameras.md) - * [Cameras in URP](cameras/camera-differences-in-urp.md) - * [Understand camera render order](cameras-advanced.md) - * [Camera render types](camera-types-and-render-type.md) - * [Anti-aliasing in URP](anti-aliasing.md) - * [Motion vectors](features/motion-vectors.md) - * [Use multiple cameras](cameras-multiple.md) - * [Understand camera stacking](cameras/camera-stacking-concepts.md) - * [Set up a camera stack](camera-stacking.md) - * [Add and remove cameras in a camera stack](cameras/add-and-remove-cameras-in-a-stack.md) - * [Set up split-screen rendering](rendering-to-the-same-render-target.md) - * [Apply different post processing effects to separate cameras](cameras/apply-different-post-proc-to-cameras.md) - * [Render a camera's output to a Render Texture](rendering-to-a-render-texture.md) - * [Render a camera outside the rendering loop](User-Render-Requests.md) - * [Customize a camera](universal-additional-camera-data.md) - * [Camera component properties](camera-component-reference.md) - * [Physical Camera properties](cameras/physical-camera-reference.md) -* [Post-processing](integration-with-post-processing.md) - * [How to configure](integration-with-post-processing.md#post-proc-how-to) - * [Spatial-Temporal Post-processing](stp/stp-upscaler.md) - * [Spatial-Temporal Post-processing debug views](stp/stp-debug-views.md) - * [HDR Output](post-processing/hdr-output.md) - * [Custom Passes with HDR Output](post-processing/hdr-custom-pass.md) - * [Implement an HDR Output compatible custom overlay](post-processing/hdr-output-implement-custom-overlay.md) - * [Volumes](volumes-landing-page.md) - * [Understand Volumes](Volumes.md) - * [Set up a Volume](set-up-a-volume.md) - * [Create a Volume Profile](Volume-Profile.md) - * [Configure Volume Overrides](VolumeOverrides.md) - * [Volume component reference](volume-component-reference.md) - * [Effect List](EffectList.md) - * [Ambient Occlusion](post-processing-ssao.md) - * [Bloom](post-processing-bloom.md) - * [Channel Mixer](Post-Processing-Channel-Mixer.md) - * [Chromatic Aberration](post-processing-chromatic-aberration.md) - * [Color Adjustments](Post-Processing-Color-Adjustments.md) - * [Color Curves](Post-Processing-Color-Curves.md) - * [Depth of Field](post-processing-depth-of-field.md) - * [Film Grain](Post-Processing-Film-Grain.md) - * [Lens Distortion](Post-Processing-Lens-Distortion.md) - * [Lift, Gamma, and Gain](Post-Processing-Lift-Gamma-Gain.md) - * [Motion Blur](Post-Processing-Motion-Blur.md) - * [Panini Projection](Post-Processing-Panini-Projection.md) - * [Screen Space Lens Flare](shared/lens-flare/post-processing-screen-space-lens-flare.md) - * [Shadows Midtones Highlights](Post-Processing-Shadows-Midtones-Highlights.md) - * [Split Toning](Post-Processing-Split-Toning.md) - * [Tonemapping](post-processing-tonemapping.md) - * [Vignette](post-processing-vignette.md) - * [White Balance](Post-Processing-White-Balance.md) - * [Lens Flare](shared/lens-flare/lens-flare-component.md) - * [Custom Post-processing](post-processing/custom-post-processing.md) - * [How to create a custom post-processing effect](post-processing/post-processing-custom-effect-low-code.md) - -* [Shaders and Materials](shaders-in-universalrp.md) - * [Shading Models](shading-model.md) - * [Material Variants](materialvariant-URP.md) - * [Complex Lit](shader-complex-lit.md) - * [Lit](lit-shader.md) - * [Simple Lit](simple-lit-shader.md) - * [Baked Lit](baked-lit-shader.md) - * [Unlit](unlit-shader.md) - * [Terrain Lit](shader-terrain-lit.md) - * [Particles Lit](particles-lit-shader.md) - * [Particles Simple Lit](particles-simple-lit-shader.md) - * [Particles Unlit](particles-unlit-shader.md) - * [Decal](decal-shader.md) - * [Fullscreen](urp-shaders/master-stack-fullscreen.md) - * [Canvas](canvas-shader.md) - * [Upgrading shaders from Built-in](upgrading-your-shaders.md) - * [Upgrade custom shaders for URP compatibility](urp-shaders/birp-urp-custom-shader-upgrade-guide.md) - * [Shader stripping](shader-stripping.md) - * [Writing custom shaders](writing-custom-shaders-urp.md) - * [Creating a sample scene](writing-shaders-urp-basic-prerequisites.md) - * [URP basic unlit shader](writing-shaders-urp-basic-unlit-structure.md) - * [URP unlit shader with color input](writing-shaders-urp-unlit-color.md) - * [Drawing a texture](writing-shaders-urp-unlit-texture.md) - * [Visualizing normal vectors](writing-shaders-urp-unlit-normals.md) - * [Reconstruct the world space positions](writing-shaders-urp-reconstruct-world-position.md) - * [Shader methods in URP](use-built-in-shader-methods.md) - * [Import a file from the URP shader library](use-built-in-shader-methods-import.md) - * [Transform positions in a custom URP shader](use-built-in-shader-methods-transformations.md) - * [Use the camera in a custom URP shader](use-built-in-shader-methods-camera.md) - * [Use lighting in a custom URP shader](use-built-in-shader-methods-lighting.md) - * [Use shadows in a custom URP shader](use-built-in-shader-methods-shadows.md) - * [URP ShaderLab Pass tags](urp-shaders/urp-shaderlab-pass-tags.md) -* [Custom rendering and post-processing](customizing-urp.md) - * [Custom render pass workflow in URP](renderer-features/custom-rendering-pass-workflow-in-urp.md) - * [Scriptable Render Passes](renderer-features/scriptable-render-passes.md) - * [Introduction to Scriptable Render Passes](renderer-features/intro-to-scriptable-render-passes.md) - * [Render graph system](render-graph.md) - * [Introduction to the render graph system](render-graph-introduction.md) - * [Write a render pass using the render graph system](render-graph-write-render-pass.md) - * [Use textures](working-with-textures.md) - * [Create a render graph system texture](render-graph-create-a-texture.md) - * [Import a texture into the render graph system](render-graph-import-a-texture.md) - * [Access a texture in a custom render pass](render-graph-read-write-texture.md) - * [Transfer a texture between render passes](render-graph-pass-textures-between-passes.md) - * [URP blit best practices](customize/blit-overview.md) - * [Use frame data](render-graph-frame-data.md) - * [Get data from the current frame](accessing-frame-data.md) - * [Get data from previous frames](render-graph-get-previous-frames.md) - * [Add textures to previous frames](render-graph-add-textures-to-previous-frames.md) - * [Get the current framebuffer with framebuffer fetch](render-graph-framebuffer-fetch.md) - * [Draw objects in a render pass](render-graph-draw-objects-in-a-pass.md) - * [Use a compute shader in a render pass](render-graph-compute-shader.md) - * [Run a compute shader in a render pass](render-graph-compute-shader-run.md) - * [Create input data for a compute shader](render-graph-compute-shader-input.md) - * [Analyze a render graph](render-graph-view.md) - * [Use Compatibility Mode APIs in render graph render passes](render-graph-unsafe-pass.md) - * [Render Graph Viewer window reference](render-graph-viewer-reference.md) - * [Adding a Scriptable Render Pass to the frame rendering loop](inject-a-render-pass.md) - * [Scriptable Renderer Features](renderer-features/scriptable-renderer-features/scriptable-renderer-features-landing.md) - * [Introduction to Scriptable Renderer Features](renderer-features/scriptable-renderer-features/intro-to-scriptable-renderer-features.md) - * [Inject a custom render pass using a Scriptable Renderer Feature](renderer-features/scriptable-renderer-features/inject-a-pass-using-a-scriptable-renderer-feature.md) - * [Apply a Scriptable Renderer Feature to a specific camera type](renderer-features/scriptable-renderer-features/apply-scriptable-feature-to-specific-camera.md) - * [Example of a complete Scriptable Renderer Feature](renderer-features/create-custom-renderer-feature.md) - * [Scriptable Renderer Feature API reference](renderer-features/scriptable-renderer-features/scriptable-renderer-feature-reference.md) - * [Inject a render pass via scripting](customize/inject-render-pass-via-script.md) - * [Injection points reference](customize/custom-pass-injection-points.md) - * [Custom URP post-processing effect](post-processing/custom-post-processing-with-volume.md) - * [Compatibility mode](compatibility-mode.md) - * [Write a Scriptable Render Pass in Compatibility Mode](renderer-features/write-a-scriptable-render-pass.md) - * [Example of a complete Scriptable Renderer Feature in Compatibility Mode](renderer-features/create-custom-renderer-feature-compatibility-mode.md) - * [Scriptable Render Pass Compatibility Mode API reference](renderer-features/scriptable-renderer-features/scriptable-render-pass-reference.md) - * [Perform a full screen blit in URP in Compatibility mode](renderer-features/how-to-fullscreen-blit.md) -* [Optimization](urp-optimization.md) - * [Rendering Debugger](features/rendering-debugger.md) - * [Use the Rendering Debugger](features/rendering-debugger-use.md) - * [Add controls to the Rendering Debugger](features/rendering-debugger-add-controls.md) - * [Rendering Debugger reference](features/rendering-debugger-reference.md) - * [Optimize for better performance](optimize-for-better-performance.md) - * [Reduce rendering work on the CPU](reduce-rendering-work-on-cpu.md) - * [Use the GPU Resident Drawer](gpu-resident-drawer.md) - * [Make a GameObject compatible with the GPU Resident Drawer](make-object-compatible-gpu-rendering.md) - * [Use GPU occlusion culling](gpu-culling.md) - * [Update Quality Setting Presets for URP](birp-onboarding/quality-presets.md) - * [Optimization techniques for untethered XR devices](xr-untethered-device-optimization.md) -* [2D graphics features](2d-index.md) - * [Introduction to the 2D Lighting system](Lights-2D-intro.md) - * [Requirements and setup](Setup.md) - * [Configure the 2D Renderer Asset](2DRendererData-overview.md) - * [HDR emulation scale](HDREmulationScale.md) - * [Light Blend Styles](LightBlendStyles.md) - * [Using the Tilemap Renderer with URP 2D](2d/tilemap-renderer-2d-renderer.md) - * [Prepare sprites and upgrade projects for lighting](PrepShader.md) - * [Normal map and mask Textures](SecondaryTextures.md) - * [Common 2D Light Type properties](2DLightProperties.md) - * [Specific 2D Light Type properties](LightTypes.md) - * [Using the Tilemap Renderer with URP 2D](2d/tilemap-renderer-2d-renderer.md) - * [Create shadows with Shadow Caster 2D](2DShadows.md) - * [Create a 2D sprite Shader Graph](ShaderGraph.md) - * [Light a Visual Effect Graph asset](2d-visual-effect-graph-compatibility.md) - * [Using the Light Batching Debugger](2d-light-batching-debugger.md) - * [2D Pixel Perfect](2d-pixelperfect.md) - * [Using the Cinemachine Pixel Perfect extension](pixel-cinemachine.md) -* [Frequently asked questions (FAQ)](faq.md) -* [Known issues](known-issues.md) +* [The content is moved to the Unity Manual](index.md) diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/features/rendering-debugger-reference.md b/Packages/com.unity.render-pipelines.universal/Documentation~/features/rendering-debugger-reference.md index 1388ec56c5c..3a4fa8246f7 100644 --- a/Packages/com.unity.render-pipelines.universal/Documentation~/features/rendering-debugger-reference.md +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/features/rendering-debugger-reference.md @@ -122,10 +122,10 @@ The properties in this section let you visualize different Material properties. #### Material Filters -| **Property** | **Description** | -| --- | --- | -| **Material Override** | Select a Material property to visualize on every GameObject on screen.
The available options are:
  • Albedo
  • Specular
  • Alpha
  • Smoothness
  • AmbientOcclusion
  • Emission
  • NormalWorldSpace
  • NormalTangentSpace
  • LightingComplexity
  • Metallic
  • SpriteMask
With the **LightingComplexity** value selected, Unity shows how many Lights affect areas of the screen space. | -| **Vertex Attribute** | Select a vertex attribute of GameObjects to visualize on screen.
The available options are:
  • Texcoord0
  • Texcoord1
  • Texcoord2
  • Texcoord3
  • Color
  • Tangent
  • Normal
| +| **Property** | **Description** | +| --- |---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| **Material Override** | Select a Material property to visualize on every GameObject on screen.
The available options are:
  • Albedo
  • Specular
  • Alpha
  • Smoothness
  • AmbientOcclusion
  • Emission
  • NormalWorldSpace
  • NormalTangentSpace
  • LightingComplexity
  • Metallic
  • SpriteMask
  • RenderingLayerMasks
With the **LightingComplexity** value selected, Unity shows how many Lights affect areas of the screen space.
With the **RenderingLayerMasks** value selected, you can filter the layers you want to debug either manually using the **Filter Layers** option or by selecting a light with the **Filter Rendering Layers by Light** option. Additionally, you can override the debug colors using **Layers Color**. | +| **Vertex Attribute** | Select a vertex attribute of GameObjects to visualize on screen.
The available options are:
  • Texcoord0
  • Texcoord1
  • Texcoord2
  • Texcoord3
  • Color
  • Tangent
  • Normal
| #### Material Validation diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/index.md b/Packages/com.unity.render-pipelines.universal/Documentation~/index.md index 59d97e58a82..15966e444e1 100644 --- a/Packages/com.unity.render-pipelines.universal/Documentation~/index.md +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/index.md @@ -1,24 +1,8 @@ --- uid: urp-index --- -# Universal Render Pipeline overview +# The documentation has moved to the Unity Manual -![Universal Render Pipeline in action](Images/AssetShots/Beauty/Overview.png) +The documentation for the Universal Render Pipeline (URP) in Unity 6 Preview has moved to the [Unity Manual](https://docs.unity3d.com/6000.0/Documentation/Manual/urp/urp-introduction.html). -The Universal Render Pipeline (URP) is a prebuilt Scriptable Render Pipeline, made by Unity. URP provides artist-friendly workflows that let you quickly and easily create optimized graphics across a range of platforms, from mobile to high-end consoles and PCs. - -## Requirements - -For information about requirements and compatibility, refer to the [Requirements](requirements.md). - -## What's new in URP - -For information on what's new in the latest version of URP, refer to [What's new in URP](whats-new/urp-whats-new.md). - -## Getting started with URP - -For information on starting a new URP Project from scratch, or about installing URP in an existing Unity Project, check [Getting started](InstallingAndConfiguringURP.md). - -## Upgrading - -For information on upgrading from a previous version of URP to the current version, or for information about upgrading from the Lightweight Render Pipeline (LWRP) to URP, refer to [Upgrade guides](upgrade-guides.md). +The [scripting API](https://docs.unity3d.com/Packages/com.unity.render-pipelines.universal@17.0/api/index.html) documentation is still on this website. diff --git a/Packages/com.unity.render-pipelines.universal/Documentation~/urp-universal-renderer.md b/Packages/com.unity.render-pipelines.universal/Documentation~/urp-universal-renderer.md index 1a27e327db6..54375c7eaa1 100644 --- a/Packages/com.unity.render-pipelines.universal/Documentation~/urp-universal-renderer.md +++ b/Packages/com.unity.render-pipelines.universal/Documentation~/urp-universal-renderer.md @@ -91,7 +91,7 @@ This section contains properties related to URP's Native RenderPass API. | Property | Description | |:-|:-| -| **Native RenderPass** | Indicates whether to use URP's Native RenderPass API. When enabled, URP uses this API to structure render passes. As a result, you can use [programmable blending](https://docs.unity3d.com/Manual/SL-PlatformDifferences.html#using-shader-framebuffer-fetch) in custom URP shaders. For more information about the RenderPass API, refer to [ScriptableRenderContext.BeginRenderPass](https://docs.unity3d.com/ScriptReference/Rendering.ScriptableRenderContext.BeginRenderPass.html).

**Note**: Enabling this property has no effect on OpenGL ES. | +| **Native RenderPass** | Indicates whether to use URP's Native RenderPass API. When enabled, URP uses this API to structure render passes. As a result, you can use [programmable blending](https://docs.unity3d.com/Manual/SL-PlatformDifferences.html#using-shader-framebuffer-fetch) in custom URP shaders. For more information about the RenderPass API, refer to [ScriptableRenderContext.BeginRenderPass](https://docs.unity3d.com/ScriptReference/Rendering.ScriptableRenderContext.BeginRenderPass.html).

**Note**: Enabling this property has no effect on OpenGL ES.
**Note**: This property is not activated in the editor and only works in the player(standalone player).| ### Shadows diff --git a/Packages/com.unity.render-pipelines.universal/Editor/2D/ShaderGraph/Targets/SpriteSubTargetUtility.cs b/Packages/com.unity.render-pipelines.universal/Editor/2D/ShaderGraph/Targets/SpriteSubTargetUtility.cs index 63ae3cd1108..2ee799a6bd5 100644 --- a/Packages/com.unity.render-pipelines.universal/Editor/2D/ShaderGraph/Targets/SpriteSubTargetUtility.cs +++ b/Packages/com.unity.render-pipelines.universal/Editor/2D/ShaderGraph/Targets/SpriteSubTargetUtility.cs @@ -2,11 +2,28 @@ using System.Linq; using UnityEditor.ShaderGraph; using UnityEngine.UIElements; +using static UnityEditor.Rendering.Universal.ShaderGraph.UniversalTarget; namespace UnityEditor.Rendering.Universal.ShaderGraph { internal static class SpriteSubTargetUtility { + public static RenderStateCollection GetDefaultRenderState(UniversalTarget target) + { + var result = CoreRenderStates.Default; + + // Add Z write + if (target.zWriteControl == ZWriteControl.ForceEnabled) + result.Add(RenderState.ZWrite(ZWrite.On)); + else + result.Add(RenderState.ZWrite(ZWrite.Off), new FieldCondition(UniversalFields.SurfaceTransparent, true)); + + // Add Z test + result.Add(RenderState.ZTest(target.zTestMode.ToString())); + + return result; + } + public static void AddDefaultFields(ref TargetFieldContext context, UniversalTarget target) { // Only support SpriteColor legacy block if BaseColor/Alpha are not active @@ -58,6 +75,29 @@ public static void AddDefaultPropertiesGUI(ref TargetPropertyGUIContext context, onChange(); }); + context.AddProperty("Depth Write", new EnumField(ZWriteControl.ForceDisabled) { value = target.zWriteControl }, (evt) => + { + if (Equals(target.zWriteControl, evt.newValue)) + return; + + registerUndo("Change Depth Write Control"); + target.zWriteControl = (ZWriteControl)evt.newValue; + onChange(); + }); + + if (target.zWriteControl == ZWriteControl.ForceEnabled) + { + context.AddProperty("Depth Test", new EnumField(ZTestModeForUI.LEqual) { value = (ZTestModeForUI)target.zTestMode }, (evt) => + { + if (Equals(target.zTestMode, evt.newValue)) + return; + + registerUndo("Change Depth Test"); + target.zTestMode = (ZTestMode)evt.newValue; + onChange(); + }); + } + context.AddProperty("Alpha Clipping", new Toggle() { value = target.alphaClip }, (evt) => { if (Equals(target.alphaClip, evt.newValue)) diff --git a/Packages/com.unity.render-pipelines.universal/Editor/2D/ShaderGraph/Targets/UniversalSpriteCustomLitSubTarget.cs b/Packages/com.unity.render-pipelines.universal/Editor/2D/ShaderGraph/Targets/UniversalSpriteCustomLitSubTarget.cs index f59055c488b..733351d7123 100644 --- a/Packages/com.unity.render-pipelines.universal/Editor/2D/ShaderGraph/Targets/UniversalSpriteCustomLitSubTarget.cs +++ b/Packages/com.unity.render-pipelines.universal/Editor/2D/ShaderGraph/Targets/UniversalSpriteCustomLitSubTarget.cs @@ -1,7 +1,5 @@ using System; -using System.Linq; using UnityEditor.ShaderGraph; - using Unity.Rendering.Universal; namespace UnityEditor.Rendering.Universal.ShaderGraph @@ -108,7 +106,7 @@ public static PassDescriptor Lit(UniversalTarget target) fieldDependencies = CoreFieldDependencies.Default, // Conditional State - renderStates = CoreRenderStates.Default, + renderStates = SpriteSubTargetUtility.GetDefaultRenderState(target), pragmas = CorePragmas._2DDefault, defines = new DefineCollection(), keywords = SpriteLitKeywords.Lit, diff --git a/Packages/com.unity.render-pipelines.universal/Editor/2D/ShaderGraph/Targets/UniversalSpriteLitSubTarget.cs b/Packages/com.unity.render-pipelines.universal/Editor/2D/ShaderGraph/Targets/UniversalSpriteLitSubTarget.cs index 13ed32fdffc..db890426073 100644 --- a/Packages/com.unity.render-pipelines.universal/Editor/2D/ShaderGraph/Targets/UniversalSpriteLitSubTarget.cs +++ b/Packages/com.unity.render-pipelines.universal/Editor/2D/ShaderGraph/Targets/UniversalSpriteLitSubTarget.cs @@ -2,9 +2,7 @@ using System.Collections.Generic; using UnityEditor.ShaderGraph; using UnityEditor.ShaderGraph.Legacy; - using Unity.Rendering.Universal; -using UnityEngine; namespace UnityEditor.Rendering.Universal.ShaderGraph { @@ -131,7 +129,7 @@ public static PassDescriptor Lit(UniversalTarget target) fieldDependencies = CoreFieldDependencies.Default, // Conditional State - renderStates = CoreRenderStates.Default, + renderStates = SpriteSubTargetUtility.GetDefaultRenderState(target), pragmas = CorePragmas._2DDefault, defines = new DefineCollection(), keywords = SpriteLitKeywords.Lit, diff --git a/Packages/com.unity.render-pipelines.universal/Editor/2D/ShaderGraph/Targets/UniversalSpriteUnlitSubTarget.cs b/Packages/com.unity.render-pipelines.universal/Editor/2D/ShaderGraph/Targets/UniversalSpriteUnlitSubTarget.cs index 95432efcf33..71969e5b8b5 100644 --- a/Packages/com.unity.render-pipelines.universal/Editor/2D/ShaderGraph/Targets/UniversalSpriteUnlitSubTarget.cs +++ b/Packages/com.unity.render-pipelines.universal/Editor/2D/ShaderGraph/Targets/UniversalSpriteUnlitSubTarget.cs @@ -1,9 +1,7 @@ using System; -using System.Linq; using System.Collections.Generic; using UnityEditor.ShaderGraph; using UnityEditor.ShaderGraph.Legacy; - using Unity.Rendering.Universal; namespace UnityEditor.Rendering.Universal.ShaderGraph @@ -126,7 +124,7 @@ public static PassDescriptor Unlit(UniversalTarget target) fieldDependencies = CoreFieldDependencies.Default, // Conditional State - renderStates = CoreRenderStates.Default, + renderStates = SpriteSubTargetUtility.GetDefaultRenderState(target), pragmas = CorePragmas._2DDefault, defines = new DefineCollection(), keywords = SpriteUnlitKeywords.Unlit, diff --git a/Packages/com.unity.render-pipelines.universal/Editor/RendererFeatures/RenderObjectsPassFeatureEditor.cs b/Packages/com.unity.render-pipelines.universal/Editor/RendererFeatures/RenderObjectsPassFeatureEditor.cs index 099c18e6bc6..5b2861bfa4b 100644 --- a/Packages/com.unity.render-pipelines.universal/Editor/RendererFeatures/RenderObjectsPassFeatureEditor.cs +++ b/Packages/com.unity.render-pipelines.universal/Editor/RendererFeatures/RenderObjectsPassFeatureEditor.cs @@ -300,7 +300,7 @@ void DoCameraOverride(ref Rect rect) EditorGUI.BeginChangeCheck(); var newOffset = EditorGUI.Vector3Field(rect, Styles.positionOffset, new Vector3(offset.x, offset.y, offset.z)); if (EditorGUI.EndChangeCheck()) - m_CameraOffset.vector4Value = new Vector4(newOffset.x, newOffset.y, newOffset.z, 1f); + m_CameraOffset.vector4Value = new Vector4(newOffset.x, newOffset.y, newOffset.z, 0f); rect.y += Styles.defaultLineSpace; //Restore prev camera projections EditorGUI.PropertyField(rect, m_RestoreCamera, Styles.restoreCamera); diff --git a/Packages/com.unity.render-pipelines.universal/Editor/ShaderBuildPreprocessor.cs b/Packages/com.unity.render-pipelines.universal/Editor/ShaderBuildPreprocessor.cs index 664d3810b75..6432e9e1169 100644 --- a/Packages/com.unity.render-pipelines.universal/Editor/ShaderBuildPreprocessor.cs +++ b/Packages/com.unity.render-pipelines.universal/Editor/ShaderBuildPreprocessor.cs @@ -93,6 +93,22 @@ enum VolumeFeatures } + /// + /// This class is used solely to make sure Shader Prefiltering data inside the + /// URP Assets get updated before anything (Like Asset Bundles) are built. + /// + class UpdateShaderPrefilteringDataBeforeBuild : IPreprocessShaders + { + public int callbackOrder => -100; + + public UpdateShaderPrefilteringDataBeforeBuild() + { + ShaderBuildPreprocessor.GatherShaderFeatures(Debug.isDebugBuild); + } + + public void OnProcessShader(Shader shader, ShaderSnippetData snippetData, IList compilerDataList){} + } + /// /// Preprocess Build class used to determine the shader features used in the project. /// Also called when building Asset Bundles. @@ -223,7 +239,7 @@ public void OnPostprocessBuild(BuildReport report) // Gathers all the shader features and updates the prefiltering // settings for all URP Assets in the quality settings - private static void GatherShaderFeatures(bool isDevelopmentBuild) + internal static void GatherShaderFeatures(bool isDevelopmentBuild) { GetGlobalAndPlatformSettings(isDevelopmentBuild); GetSupportedFeaturesFromVolumes(); diff --git a/Packages/com.unity.render-pipelines.universal/Editor/ShaderGraph/Includes/ShaderPassDecal.hlsl b/Packages/com.unity.render-pipelines.universal/Editor/ShaderGraph/Includes/ShaderPassDecal.hlsl index ff333bee9d2..1aee62bd95f 100644 --- a/Packages/com.unity.render-pipelines.universal/Editor/ShaderGraph/Includes/ShaderPassDecal.hlsl +++ b/Packages/com.unity.render-pipelines.universal/Editor/ShaderGraph/Includes/ShaderPassDecal.hlsl @@ -248,14 +248,7 @@ void Frag(PackedVaryings packedInput, half3 normalWS = half3(LoadSceneNormals(positionCS.xy)); #endif - float2 positionSS = input.positionCS.xy * _ScreenSize.zw; - -#if defined(SUPPORTS_FOVEATED_RENDERING_NON_UNIFORM_RASTER) - UNITY_BRANCH if (_FOVEATED_RENDERING_NON_UNIFORM_RASTER) - { - positionSS = RemapFoveatedRenderingNonUniformToLinearCS(input.positionCS.xy, true) * _ScreenSize.zw; - } -#endif + float2 positionSS = FoveatedRemapNonUniformToLinearCS(input.positionCS.xy) * _ScreenSize.zw; #ifdef DECAL_PROJECTOR float3 positionWS = ComputeWorldSpacePosition(positionSS, depth, UNITY_MATRIX_I_VP); diff --git a/Packages/com.unity.render-pipelines.universal/Editor/ShaderGraph/Targets/UniversalTarget.cs b/Packages/com.unity.render-pipelines.universal/Editor/ShaderGraph/Targets/UniversalTarget.cs index 7dfd89564e7..4d863ecc5e4 100644 --- a/Packages/com.unity.render-pipelines.universal/Editor/ShaderGraph/Targets/UniversalTarget.cs +++ b/Packages/com.unity.render-pipelines.universal/Editor/ShaderGraph/Targets/UniversalTarget.cs @@ -514,7 +514,7 @@ public override void GetPropertiesGUI(ref TargetPropertyGUIContext context, Acti } // this is a copy of ZTestMode, but hides the "Disabled" option, which is invalid - enum ZTestModeForUI + internal enum ZTestModeForUI { Never = 1, Less = 2, @@ -1433,9 +1433,6 @@ public static class Uniforms // used by sprite targets, NOT used by lit/unlit anymore public static readonly RenderStateCollection Default = new RenderStateCollection { - { RenderState.ZTest(ZTest.LEqual) }, - { RenderState.ZWrite(ZWrite.On), new FieldCondition(UniversalFields.SurfaceOpaque, true) }, - { RenderState.ZWrite(ZWrite.Off), new FieldCondition(UniversalFields.SurfaceTransparent, true) }, { RenderState.Cull(Cull.Back), new FieldCondition(Fields.DoubleSided, false) }, { RenderState.Cull(Cull.Off), new FieldCondition(Fields.DoubleSided, true) }, { RenderState.Blend(Blend.One, Blend.Zero), new FieldCondition(UniversalFields.SurfaceOpaque, true) }, diff --git a/Packages/com.unity.render-pipelines.universal/Editor/UniversalRenderPipelineMaterialUpgrader.cs b/Packages/com.unity.render-pipelines.universal/Editor/UniversalRenderPipelineMaterialUpgrader.cs index f2c13d645d5..15f93c2d219 100644 --- a/Packages/com.unity.render-pipelines.universal/Editor/UniversalRenderPipelineMaterialUpgrader.cs +++ b/Packages/com.unity.render-pipelines.universal/Editor/UniversalRenderPipelineMaterialUpgrader.cs @@ -290,6 +290,12 @@ public override void OnClicked(int index) { EditorGUIUtility.PingObject(AssetDatabase.LoadAssetAtPath(m_AssetsToConvert[index])); } + + internal static void DisableKeywords(Material material) + { + // LOD fade is now controlled by the render pipeline, and not the individual material, so disable it. + material.DisableKeyword("LOD_FADE_CROSSFADE"); + } } /// @@ -457,6 +463,7 @@ public static void UpdateStandardMaterialKeywords(Material material) UpdateSurfaceTypeAndBlendMode(material); UpdateDetailScaleOffset(material); BaseShaderGUI.SetupMaterialBlendMode(material); + UniversalRenderPipelineMaterialUpgrader.DisableKeywords(material); } /// @@ -481,6 +488,7 @@ public static void UpdateStandardSpecularMaterialKeywords(Material material) UpdateSurfaceTypeAndBlendMode(material); UpdateDetailScaleOffset(material); BaseShaderGUI.SetupMaterialBlendMode(material); + UniversalRenderPipelineMaterialUpgrader.DisableKeywords(material); } static void UpdateDetailScaleOffset(Material material) @@ -592,6 +600,7 @@ internal static void UpdateMaterialKeywords(Material material) MaterialEditor.FixupEmissiveFlag(material); bool shouldEmissionBeEnabled = (material.globalIlluminationFlags & MaterialGlobalIlluminationFlags.EmissiveIsBlack) == 0; CoreUtils.SetKeyword(material, "_EMISSION", shouldEmissionBeEnabled); + UniversalRenderPipelineMaterialUpgrader.DisableKeywords(material); } private static void UpdateMaterialSpecularSource(Material material) @@ -625,22 +634,23 @@ public class TerrainUpgrader : MaterialUpgrader /// The name of the old shader. public TerrainUpgrader(string oldShaderName) { - RenameShader(oldShaderName, ShaderUtils.GetShaderPath(ShaderPathID.TerrainLit)); + RenameShader(oldShaderName, ShaderUtils.GetShaderPath(ShaderPathID.TerrainLit), UniversalRenderPipelineMaterialUpgrader.DisableKeywords); } + } internal class SpeedTreeUpgrader : MaterialUpgrader { internal SpeedTreeUpgrader(string oldShaderName) { - RenameShader(oldShaderName, ShaderUtils.GetShaderPath(ShaderPathID.SpeedTree7)); + RenameShader(oldShaderName, ShaderUtils.GetShaderPath(ShaderPathID.SpeedTree7), UniversalRenderPipelineMaterialUpgrader.DisableKeywords); } } internal class SpeedTreeBillboardUpgrader : MaterialUpgrader { internal SpeedTreeBillboardUpgrader(string oldShaderName) { - RenameShader(oldShaderName, ShaderUtils.GetShaderPath(ShaderPathID.SpeedTree7Billboard)); + RenameShader(oldShaderName, ShaderUtils.GetShaderPath(ShaderPathID.SpeedTree7Billboard), UniversalRenderPipelineMaterialUpgrader.DisableKeywords); } } @@ -684,6 +694,7 @@ public ParticleUpgrader(string oldShaderName) public static void UpdateStandardSurface(Material material) { UpdateSurfaceBlendModes(material); + UniversalRenderPipelineMaterialUpgrader.DisableKeywords(material); } /// @@ -693,6 +704,7 @@ public static void UpdateStandardSurface(Material material) public static void UpdateUnlit(Material material) { UpdateSurfaceBlendModes(material); + UniversalRenderPipelineMaterialUpgrader.DisableKeywords(material); } /// @@ -749,7 +761,7 @@ public class AutodeskInteractiveUpgrader : MaterialUpgrader /// The name of the old shader. public AutodeskInteractiveUpgrader(string oldShaderName) { - RenameShader(oldShaderName, "Universal Render Pipeline/Autodesk Interactive/AutodeskInteractive"); + RenameShader(oldShaderName, "Universal Render Pipeline/Autodesk Interactive/AutodeskInteractive", UniversalRenderPipelineMaterialUpgrader.DisableKeywords); } /// diff --git a/Packages/com.unity.render-pipelines.universal/Runtime/2D/Passes/Utility/Light2DLookupTexture.cs b/Packages/com.unity.render-pipelines.universal/Runtime/2D/Passes/Utility/Light2DLookupTexture.cs index 57c7ad0ba02..6d80e1d0a22 100644 --- a/Packages/com.unity.render-pipelines.universal/Runtime/2D/Passes/Utility/Light2DLookupTexture.cs +++ b/Packages/com.unity.render-pipelines.universal/Runtime/2D/Passes/Utility/Light2DLookupTexture.cs @@ -4,8 +4,49 @@ namespace UnityEngine.Rendering.Universal { internal static class Light2DLookupTexture { + internal static readonly string k_LightLookupProperty = "_LightLookup"; + internal static readonly string k_FalloffLookupProperty = "_FalloffLookup"; + internal static readonly int k_LightLookupID = Shader.PropertyToID(k_LightLookupProperty); + internal static readonly int k_FalloffLookupID = Shader.PropertyToID(k_FalloffLookupProperty); + private static Texture2D s_PointLightLookupTexture; private static Texture2D s_FalloffLookupTexture; + private static RTHandle m_LightLookupRTHandle = null; + private static RTHandle m_FalloffRTHandle = null; + + public static RTHandle GetLightLookupTexture_Rendergraph() + { + if (s_PointLightLookupTexture == null || m_LightLookupRTHandle == null) + { + var lightLookupTexture = GetLightLookupTexture(); + + m_LightLookupRTHandle?.Release(); + m_LightLookupRTHandle = RTHandles.Alloc(lightLookupTexture); + } + + return m_LightLookupRTHandle; + } + + public static RTHandle GetFallOffLookupTexture_Rendergraph() + { + if (s_FalloffLookupTexture == null || m_FalloffRTHandle == null) + { + var fallOffLookupTexture = GetFalloffLookupTexture(); + + m_FalloffRTHandle?.Release(); + m_FalloffRTHandle = RTHandles.Alloc(fallOffLookupTexture); + } + + return m_FalloffRTHandle; + } + + public static void Release() + { + m_FalloffRTHandle?.Release(); + m_LightLookupRTHandle?.Release(); + m_FalloffRTHandle = null; + m_LightLookupRTHandle = null; + } public static Texture GetLightLookupTexture() { @@ -14,13 +55,13 @@ public static Texture GetLightLookupTexture() return s_PointLightLookupTexture; } - public static Texture GetFallOffLookupTexture() + public static Texture GetFalloffLookupTexture() { if (s_FalloffLookupTexture == null) s_FalloffLookupTexture = CreateFalloffLookupTexture(); return s_FalloffLookupTexture; - } + } private static Texture2D CreatePointLightLookupTexture() { @@ -34,6 +75,7 @@ private static Texture2D CreatePointLightLookupTexture() textureFormat = GraphicsFormat.R32G32B32A32_SFloat; var texture = new Texture2D(WIDTH, HEIGHT, textureFormat, TextureCreationFlags.None); + texture.name = k_LightLookupProperty; texture.filterMode = FilterMode.Bilinear; texture.wrapMode = TextureWrapMode.Clamp; var center = new Vector2(WIDTH / 2.0f, HEIGHT / 2.0f); @@ -82,6 +124,7 @@ private static Texture2D CreateFalloffLookupTexture() const GraphicsFormat textureFormat = GraphicsFormat.R8G8B8A8_SRGB; var texture = new Texture2D(WIDTH, HEIGHT - 64, textureFormat, TextureCreationFlags.None); + texture.name = k_FalloffLookupProperty; texture.filterMode = FilterMode.Bilinear; texture.wrapMode = TextureWrapMode.Clamp; for (var y = 0; y < HEIGHT; y++) diff --git a/Packages/com.unity.render-pipelines.universal/Runtime/2D/Passes/Utility/RendererLighting.cs b/Packages/com.unity.render-pipelines.universal/Runtime/2D/Passes/Utility/RendererLighting.cs index 20ab8a8fd0a..2d944974328 100644 --- a/Packages/com.unity.render-pipelines.universal/Runtime/2D/Passes/Utility/RendererLighting.cs +++ b/Packages/com.unity.render-pipelines.universal/Runtime/2D/Passes/Utility/RendererLighting.cs @@ -705,7 +705,6 @@ private static Material CreateLightMaterial(Renderer2DData rendererData, Light2D SetBlendModes(material, BlendMode.One, BlendMode.One); else { - material.SetInt("_HandleZTest", (int)CompareFunction.Disabled); SetBlendModes(material, BlendMode.SrcAlpha, BlendMode.One); } } diff --git a/Packages/com.unity.render-pipelines.universal/Runtime/2D/Rendergraph/CopyCameraSortingLayerPass.cs b/Packages/com.unity.render-pipelines.universal/Runtime/2D/Rendergraph/CopyCameraSortingLayerPass.cs index 3932dd6c8a3..675b02c1d0f 100644 --- a/Packages/com.unity.render-pipelines.universal/Runtime/2D/Rendergraph/CopyCameraSortingLayerPass.cs +++ b/Packages/com.unity.render-pipelines.universal/Runtime/2D/Rendergraph/CopyCameraSortingLayerPass.cs @@ -1,5 +1,4 @@ using System; -using UnityEngine.Experimental.Rendering; using UnityEngine.Rendering.RenderGraphModule; namespace UnityEngine.Rendering.Universal diff --git a/Packages/com.unity.render-pipelines.universal/Runtime/2D/Rendergraph/DrawLight2DPass.cs b/Packages/com.unity.render-pipelines.universal/Runtime/2D/Rendergraph/DrawLight2DPass.cs index 27d64342f36..d5e57cdf532 100644 --- a/Packages/com.unity.render-pipelines.universal/Runtime/2D/Rendergraph/DrawLight2DPass.cs +++ b/Packages/com.unity.render-pipelines.universal/Runtime/2D/Rendergraph/DrawLight2DPass.cs @@ -1,6 +1,5 @@ using System; using System.Collections.Generic; -using UnityEngine.Experimental.Rendering; using UnityEngine.Rendering.RenderGraphModule; using CommonResourceData = UnityEngine.Rendering.Universal.UniversalResourceData; @@ -18,34 +17,13 @@ internal class DrawLight2DPass : ScriptableRenderPass internal static readonly int k_InverseHDREmulationScaleID = Shader.PropertyToID("_InverseHDREmulationScale"); internal static readonly string k_NormalMapID = "_NormalMap"; internal static readonly string k_ShadowMapID = "_ShadowTex"; - internal static readonly string k_LightLookupID = "_LightLookup"; - internal static readonly string k_FalloffLookupID = "_FalloffLookup"; TextureHandle[] intermediateTexture = new TextureHandle[1]; - internal static RTHandle m_FallOffRTHandle = null; - internal static RTHandle m_LightLookupRTHandle = null; - private int lightLookupInstanceID; - private int fallOffLookupInstanceID; internal static MaterialPropertyBlock s_PropertyBlock = new MaterialPropertyBlock(); public void Setup(RenderGraph renderGraph, ref Renderer2DData rendererData) { - // Reallocate external texture if needed - var fallOffLookupTexture = Light2DLookupTexture.GetFallOffLookupTexture(); - if (fallOffLookupInstanceID != fallOffLookupTexture.GetInstanceID()) - { - m_FallOffRTHandle = RTHandles.Alloc(fallOffLookupTexture); - fallOffLookupInstanceID = fallOffLookupTexture.GetInstanceID(); - } - - var lightLookupTexture = Light2DLookupTexture.GetLightLookupTexture(); - if (lightLookupInstanceID != lightLookupTexture.GetInstanceID()) - { - m_LightLookupRTHandle = RTHandles.Alloc(lightLookupTexture); - lightLookupInstanceID = lightLookupTexture.GetInstanceID(); - } - foreach (var light in rendererData.lightCullResult.visibleLights) { if (light.useCookieSprite && light.m_CookieSpriteTexture != null) @@ -53,12 +31,6 @@ public void Setup(RenderGraph renderGraph, ref Renderer2DData rendererData) } } - public void Dispose() - { - m_FallOffRTHandle?.Release(); - m_LightLookupRTHandle?.Release(); - } - [Obsolete(DeprecationMessage.CompatibilityScriptingAPIObsolete, false)] public override void Execute(ScriptableRenderContext context, ref RenderingData renderingData) { @@ -109,10 +81,6 @@ private static void Execute(RasterCommandBuffer cmd, PassData passData, ref Laye if (breakBatch && LightBatch.isBatchingSupported) RendererLighting.lightBatch.Flush(cmd); - // Set material properties - lightMaterial.SetTexture(k_LightLookupID, passData.lightLookUp); - lightMaterial.SetTexture(k_FalloffLookupID, passData.fallOffLookUp); - if (passData.layerBatch.lightStats.useNormalMap) s_PropertyBlock.SetTexture(k_NormalMapID, passData.normalMap); @@ -190,10 +158,6 @@ internal static void ExecuteUnsafe(UnsafeCommandBuffer cmd, PassData passData, r //if (breakBatch && LightBatch.isBatchingSupported) // RendererLighting.lightBatch.Flush(cmd); - // Set material properties - lightMaterial.SetTexture(k_LightLookupID, passData.lightLookUp); - lightMaterial.SetTexture(k_FalloffLookupID, passData.fallOffLookUp); - if (passData.layerBatch.lightStats.useNormalMap) s_PropertyBlock.SetTexture(k_NormalMapID, passData.normalMap); @@ -233,8 +197,6 @@ internal class PassData internal TextureHandle normalMap; internal TextureHandle shadowMap; - internal TextureHandle fallOffLookUp; - internal TextureHandle lightLookUp; // TODO: Optimize and remove low level pass // For low level shadow and light pass @@ -287,11 +249,6 @@ public void Render(RenderGraph graph, ContextContainer frameData, Renderer2DData passData.isVolumetric = isVolumetric; passData.normalMap = layerBatch.lightStats.useNormalMap ? universal2DResourceData.normalsTexture[batchIndex] : TextureHandle.nullHandle; passData.shadowMap = layerBatch.lightStats.useShadows ? universal2DResourceData.shadowsTexture : TextureHandle.nullHandle; - passData.fallOffLookUp = graph.ImportTexture(m_FallOffRTHandle); - passData.lightLookUp = graph.ImportTexture(m_LightLookupRTHandle); - - builder.UseTexture(passData.fallOffLookUp); - builder.UseTexture(passData.lightLookUp); builder.AllowPassCulling(false); builder.AllowGlobalStateModification(true); @@ -336,11 +293,6 @@ public void Render(RenderGraph graph, ContextContainer frameData, Renderer2DData passData.isVolumetric = isVolumetric; passData.normalMap = layerBatch.lightStats.useNormalMap ? universal2DResourceData.normalsTexture[batchIndex] : TextureHandle.nullHandle; passData.shadowMap = layerBatch.lightStats.useShadows ? universal2DResourceData.shadowsTexture : TextureHandle.nullHandle; - passData.fallOffLookUp = graph.ImportTexture(m_FallOffRTHandle); - passData.lightLookUp = graph.ImportTexture(m_LightLookupRTHandle); - - builder.UseTexture(passData.fallOffLookUp); - builder.UseTexture(passData.lightLookUp); builder.AllowPassCulling(false); builder.AllowGlobalStateModification(true); diff --git a/Packages/com.unity.render-pipelines.universal/Runtime/2D/Rendergraph/DrawRenderer2DPass.cs b/Packages/com.unity.render-pipelines.universal/Runtime/2D/Rendergraph/DrawRenderer2DPass.cs index af02f5dac9c..88dcf58aece 100644 --- a/Packages/com.unity.render-pipelines.universal/Runtime/2D/Rendergraph/DrawRenderer2DPass.cs +++ b/Packages/com.unity.render-pipelines.universal/Runtime/2D/Rendergraph/DrawRenderer2DPass.cs @@ -8,17 +8,13 @@ namespace UnityEngine.Rendering.Universal internal class DrawRenderer2DPass : ScriptableRenderPass { static readonly string k_RenderPass = "Renderer2D Pass"; - static readonly string k_SetLightGlobalPass = "SetLightGlobals Pass"; + static readonly string k_SetLightBlendTexture = "SetLightBlendTextures"; private static readonly ProfilingSampler m_ProfilingSampler = new ProfilingSampler(k_RenderPass); - private static readonly ProfilingSampler m_SetLightGlobalProfilingSampler = new ProfilingSampler(k_SetLightGlobalPass); + private static readonly ProfilingSampler m_SetLightBlendTextureProfilingSampler = new ProfilingSampler(k_SetLightBlendTexture); private static readonly ShaderTagId k_CombinedRenderingPassName = new ShaderTagId("Universal2D"); private static readonly ShaderTagId k_LegacyPassName = new ShaderTagId("SRPDefaultUnlit"); -#if UNITY_EDITOR - private static readonly int k_DefaultWhiteTextureID = Shader.PropertyToID("_DefaultWhiteTex"); -#endif - private static readonly List k_ShaderTags = new List() {k_LegacyPassName, k_CombinedRenderingPassName}; @@ -97,7 +93,7 @@ public void Render(RenderGraph graph, ContextContainer frameData, Renderer2DData // Preset global light textures for first batch if (batchIndex == 0) { - using (var builder = graph.AddRasterRenderPass(k_SetLightGlobalPass, out var passData, m_SetLightGlobalProfilingSampler)) + using (var builder = graph.AddRasterRenderPass(k_SetLightBlendTexture, out var passData, m_SetLightBlendTextureProfilingSampler)) { if (layerBatch.lightStats.useAnyLights) { @@ -185,8 +181,6 @@ void SetGlobalLightTextures(RenderGraph graph, IRasterRenderGraphBuilder builder if (cameraData.cameraType == CameraType.Preview) isLitView = false; - builder.SetGlobalTextureAfterPass(graph.defaultResources.whiteTexture, k_DefaultWhiteTextureID); - if (isLitView) #endif { diff --git a/Packages/com.unity.render-pipelines.universal/Runtime/2D/Rendergraph/DrawShadow2DPass.cs b/Packages/com.unity.render-pipelines.universal/Runtime/2D/Rendergraph/DrawShadow2DPass.cs index faf66ca656b..eb711bf8216 100644 --- a/Packages/com.unity.render-pipelines.universal/Runtime/2D/Rendergraph/DrawShadow2DPass.cs +++ b/Packages/com.unity.render-pipelines.universal/Runtime/2D/Rendergraph/DrawShadow2DPass.cs @@ -35,9 +35,6 @@ private static void ExecuteShadowPass(UnsafeCommandBuffer cmd, DrawLight2DPass.P var projectedShadowMaterial = passData.rendererData.GetProjectedShadowMaterial(); var projectedUnshadowMaterial = passData.rendererData.GetProjectedUnshadowMaterial(); - projectedShadowMaterial.SetTexture(DrawLight2DPass.k_FalloffLookupID, passData.fallOffLookUp); - projectedUnshadowMaterial.SetTexture(DrawLight2DPass.k_FalloffLookupID, passData.fallOffLookUp); - ShadowRendering.PrerenderShadows(cmd, passData.rendererData, ref passData.layerBatch, light, 0, light.shadowIntensity); } } @@ -62,8 +59,6 @@ public void Render(RenderGraph graph, ContextContainer frameData, Renderer2DData passData.shadowMap = shadowTexture; passData.shadowDepth = depthTexture; passData.normalMap = layerBatch.lightStats.useNormalMap ? universal2DResourceData.normalsTexture[batchIndex] : TextureHandle.nullHandle; - passData.fallOffLookUp = graph.ImportTexture(DrawLight2DPass.m_FallOffRTHandle); - passData.lightLookUp = graph.ImportTexture(DrawLight2DPass.m_LightLookupRTHandle); if (!isVolumetric) { @@ -88,8 +83,6 @@ public void Render(RenderGraph graph, ContextContainer frameData, Renderer2DData builder.UseTexture(shadowTexture, AccessFlags.Write); builder.UseTexture(depthTexture, AccessFlags.Write); - builder.UseTexture(passData.fallOffLookUp); - builder.UseTexture(passData.lightLookUp); foreach (var light in layerBatch.shadowLights) { diff --git a/Packages/com.unity.render-pipelines.universal/Runtime/2D/Rendergraph/GlobalLightTexturePass.cs b/Packages/com.unity.render-pipelines.universal/Runtime/2D/Rendergraph/GlobalLightTexturePass.cs new file mode 100644 index 00000000000..5dcb53d1d5f --- /dev/null +++ b/Packages/com.unity.render-pipelines.universal/Runtime/2D/Rendergraph/GlobalLightTexturePass.cs @@ -0,0 +1,41 @@ +using UnityEngine.Rendering.RenderGraphModule; + +namespace UnityEngine.Rendering.Universal +{ + internal class GlobalLightTexturePass : ScriptableRenderPass + { + static readonly string k_SetGlobalLightTexture = "SetGlobalLightTextures"; + private static readonly ProfilingSampler m_SetGlobalLightTextureProfilingSampler = new ProfilingSampler(k_SetGlobalLightTexture); + +#if UNITY_EDITOR + private static readonly int k_DefaultWhiteTextureID = Shader.PropertyToID("_DefaultWhiteTex"); +#endif + + class PassData + { + } + + internal static void SetGlobals(RenderGraph graph) + { + using (var builder = graph.AddRasterRenderPass(k_SetGlobalLightTexture, out var passData, m_SetGlobalLightTextureProfilingSampler)) + { + // Set light lookup and fall off textures as global + var lightLookupTexture = graph.ImportTexture(Light2DLookupTexture.GetLightLookupTexture_Rendergraph()); + var fallOffTexture = graph.ImportTexture(Light2DLookupTexture.GetFallOffLookupTexture_Rendergraph()); + + builder.SetGlobalTextureAfterPass(lightLookupTexture, Light2DLookupTexture.k_LightLookupID); + builder.SetGlobalTextureAfterPass(fallOffTexture, Light2DLookupTexture.k_FalloffLookupID); +#if UNITY_EDITOR + builder.SetGlobalTextureAfterPass(graph.defaultResources.whiteTexture, k_DefaultWhiteTextureID); +#endif + + builder.AllowPassCulling(false); + builder.AllowGlobalStateModification(true); + + builder.SetRenderFunc((PassData data, RasterGraphContext context) => + { + }); + } + } + } +} diff --git a/Packages/com.unity.render-pipelines.universal/Runtime/2D/Rendergraph/GlobalLightTexturePass.cs.meta b/Packages/com.unity.render-pipelines.universal/Runtime/2D/Rendergraph/GlobalLightTexturePass.cs.meta new file mode 100644 index 00000000000..4590c2b150e --- /dev/null +++ b/Packages/com.unity.render-pipelines.universal/Runtime/2D/Rendergraph/GlobalLightTexturePass.cs.meta @@ -0,0 +1,2 @@ +fileFormatVersion: 2 +guid: 6625e53e7e2d40e41bb473bb7c20d739 \ No newline at end of file diff --git a/Packages/com.unity.render-pipelines.universal/Runtime/2D/Rendergraph/Renderer2DRendergraph.cs b/Packages/com.unity.render-pipelines.universal/Runtime/2D/Rendergraph/Renderer2DRendergraph.cs index b4eb61ad0a3..e356bd0110b 100644 --- a/Packages/com.unity.render-pipelines.universal/Runtime/2D/Rendergraph/Renderer2DRendergraph.cs +++ b/Packages/com.unity.render-pipelines.universal/Runtime/2D/Rendergraph/Renderer2DRendergraph.cs @@ -1,43 +1,21 @@ using UnityEngine.Experimental.Rendering; using UnityEngine.Rendering.RenderGraphModule; using static UnityEngine.Rendering.Universal.UniversalResourceDataBase; - using CommonResourceData = UnityEngine.Rendering.Universal.UniversalResourceData; namespace UnityEngine.Rendering.Universal { - internal enum Renderer2DResource - { - BackBufferColor, - BackBufferDepth, - - // intermediate camera targets - CameraColor, - CameraDepth, - - // intermediate depth for usage in passes with render texture scale - IntermediateDepth, - - LightTexture0, - LightTexture1, - LightTexture2, - LightTexture3, - - NormalsTexture, - ShadowsTexture, - UpscaleTexture, - CameraSortingLayerTexture, - - InternalColorLut, - AfterPostProcessColor, - OverlayUITexture, - DebugScreenColor, - DebugScreenDepth - } - internal sealed partial class Renderer2D : ScriptableRenderer { - RTHandle m_RenderGraphCameraColorHandle; + // TODO RENDERGRAPH: Once all cameras will run in a single RenderGraph we should remove all RTHandles and use per frame RG textures. + // We use 2 camera color handles so we can handle the edge case when a pass might want to read and write the same target. + // This is not allowed so we just swap the current target, this keeps camera stacking working and avoids an extra blit pass. + static int m_CurrentColorHandle = 0; + RTHandle[] m_RenderGraphCameraColorHandles = new RTHandle[] + { + null, null + }; + RTHandle m_RenderGraphCameraDepthHandle; RTHandle m_RenderGraphBackbufferColorHandle; RTHandle m_RenderGraphBackbufferDepthHandle; @@ -63,6 +41,24 @@ private struct ImportResourceSummary internal ImportResourceParams backBufferDepthParams; } + private RTHandle currentRenderGraphCameraColorHandle + { + get + { + return m_RenderGraphCameraColorHandles[m_CurrentColorHandle]; + } + } + + // get the next m_RenderGraphCameraColorHandles and make it the new current for future accesses + private RTHandle nextRenderGraphCameraColorHandle + { + get + { + m_CurrentColorHandle = (m_CurrentColorHandle + 1) % 2; + return currentRenderGraphCameraColorHandle; + } + } + ImportResourceSummary GetImportResourceSummary(RenderGraph renderGraph, UniversalCameraData cameraData) { ImportResourceSummary output = new ImportResourceSummary(); @@ -236,8 +232,8 @@ void CreateResources(RenderGraph renderGraph) } var renderTextureScale = m_Renderer2DData.lightRenderTextureScale; - var width = (int)(cameraData.cameraTargetDescriptor.width * renderTextureScale); - var height = (int)(cameraData.cameraTargetDescriptor.height * renderTextureScale); + var width = (int)Mathf.Max(1, cameraData.cameraTargetDescriptor.width * renderTextureScale); + var height = (int)Mathf.Max(1, cameraData.cameraTargetDescriptor.height * renderTextureScale); // Intermediate depth desc (size of renderTextureScale) { @@ -322,7 +318,8 @@ void CreateResources(RenderGraph renderGraph) cameraTargetDescriptor.autoGenerateMips = false; cameraTargetDescriptor.depthBufferBits = (int)DepthBits.None; - RenderingUtils.ReAllocateHandleIfNeeded(ref m_RenderGraphCameraColorHandle, cameraTargetDescriptor, cameraTargetFilterMode, TextureWrapMode.Clamp, name: "_CameraTargetAttachment"); + RenderingUtils.ReAllocateHandleIfNeeded(ref m_RenderGraphCameraColorHandles[0], cameraTargetDescriptor, cameraTargetFilterMode, TextureWrapMode.Clamp, name: "_CameraTargetAttachmentA"); + RenderingUtils.ReAllocateHandleIfNeeded(ref m_RenderGraphCameraColorHandles[1], cameraTargetDescriptor, cameraTargetFilterMode, TextureWrapMode.Clamp, name: "_CameraTargetAttachmentB"); commonResourceData.activeColorID = ActiveID.Camera; } else @@ -343,13 +340,7 @@ void CreateResources(RenderGraph renderGraph) m_CopyDepthPass.m_CopyResolvedDepth = resolveDepth; if (hasMSAA) - { depthDescriptor.bindMS = !resolveDepth; - } - - // binding MS surfaces is not supported by the GLES backend, and it won't be fixed after investigating - if (IsGLDevice()) - depthDescriptor.bindMS = false; depthDescriptor.graphicsFormat = GraphicsFormat.None; depthDescriptor.depthStencilFormat = k_DepthStencilFormat; @@ -365,7 +356,7 @@ void CreateResources(RenderGraph renderGraph) cameraData.baseCamera.TryGetComponent(out var baseCameraData); var baseRenderer = (Renderer2D)baseCameraData.scriptableRenderer; - m_RenderGraphCameraColorHandle = baseRenderer.m_RenderGraphCameraColorHandle; + m_RenderGraphCameraColorHandles = baseRenderer.m_RenderGraphCameraColorHandles; m_RenderGraphCameraDepthHandle = baseRenderer.m_RenderGraphCameraDepthHandle; m_RenderGraphBackbufferColorHandle = baseRenderer.m_RenderGraphBackbufferColorHandle; m_RenderGraphBackbufferDepthHandle = baseRenderer.m_RenderGraphBackbufferDepthHandle; @@ -381,7 +372,7 @@ void CreateResources(RenderGraph renderGraph) importSummary.cameraColorParams.discardOnLastUse = lastCameraInTheStack; importSummary.cameraDepthParams.discardOnLastUse = lastCameraInTheStack; - commonResourceData.cameraColor = renderGraph.ImportTexture(m_RenderGraphCameraColorHandle, importSummary.cameraColorParams); + commonResourceData.cameraColor = renderGraph.ImportTexture(currentRenderGraphCameraColorHandle, importSummary.cameraColorParams); commonResourceData.cameraDepth = renderGraph.ImportTexture(m_RenderGraphCameraDepthHandle, importSummary.cameraDepthParams); } @@ -531,6 +522,9 @@ private void OnMainRendering(RenderGraph renderGraph) var cameraSortingLayerBoundsIndex = Render2DLightingPass.GetCameraSortingLayerBoundsIndex(m_Renderer2DData); + // Set Global Light Textures + GlobalLightTexturePass.SetGlobals(renderGraph); + // Main render passes // Normal Pass @@ -624,7 +618,8 @@ private void OnAfterRendering(RenderGraph renderGraph) commonResourceData.debugScreenDepth = UniversalRenderer.CreateRenderGraphTexture(renderGraph, depthDesc, "_DebugScreenDepth", false); } - bool applyPostProcessing = postProcessingData.isEnabled && m_PostProcessPasses.isCreated; + bool applyPostProcessing = cameraData.postProcessEnabled && m_PostProcessPasses.isCreated; + bool anyPostProcessing = postProcessingData.isEnabled && m_PostProcessPasses.isCreated; cameraData.camera.TryGetComponent(out var ppc); bool isPixelPerfectCameraEnabled = ppc != null && ppc.enabled && ppc.cropFrame != PixelPerfectCamera.CropFrame.None; @@ -633,28 +628,40 @@ private void OnAfterRendering(RenderGraph renderGraph) // When using Upscale Render Texture on a Pixel Perfect Camera, we want all post-processing effects done with a low-res RT, // and only upscale the low-res RT to fullscreen when blitting it to camera target. Also, final post processing pass is not run in this case, // so FXAA is not supported (you don't want to apply FXAA when everything is intentionally pixelated). - bool requireFinalPostProcessPass = cameraData.resolveFinalTarget && !ppcUpscaleRT && applyPostProcessing && cameraData.antialiasing == AntialiasingMode.FastApproximateAntialiasing; + bool applyFinalPostProcessing = cameraData.resolveFinalTarget && !ppcUpscaleRT && anyPostProcessing && cameraData.antialiasing == AntialiasingMode.FastApproximateAntialiasing; bool hasPassesAfterPostProcessing = activeRenderPassQueue.Find(x => x.renderPassEvent == RenderPassEvent.AfterRenderingPostProcessing) != null; bool needsColorEncoding = DebugHandler == null || !DebugHandler.HDRDebugViewIsActive(cameraData.resolveFinalTarget); - var finalColorHandle = commonResourceData.activeColorTexture; - if (applyPostProcessing) { + TextureHandle activeColor = commonResourceData.activeColorTexture; + + // if the postprocessing pass is trying to read and write to the same CameraColor target, we need to swap so it writes to a different target, + // since reading a pass attachment is not possible. Normally this would be possible using temporary RenderGraph managed textures. + // The reason why in this case we need to use "external" RTHandles is to preserve the results for camera stacking. + // TODO RENDERGRAPH: Once all cameras will run in a single RenderGraph we can just use temporary RenderGraph textures as intermediate buffer. + ImportResourceParams importColorParams = new ImportResourceParams(); + importColorParams.clearOnFirstUse = true; + importColorParams.clearColor = Color.black; + importColorParams.discardOnLastUse = cameraData.resolveFinalTarget; // check if last camera in the stack + + commonResourceData.cameraColor = renderGraph.ImportTexture(nextRenderGraphCameraColorHandle, importColorParams); + postProcessPass.RenderPostProcessingRenderGraph( renderGraph, frameData, - commonResourceData.activeColorTexture, + activeColor, commonResourceData.internalColorLut, commonResourceData.overlayUITexture, - commonResourceData.afterPostProcessColor, - requireFinalPostProcessPass, + commonResourceData.activeColorTexture, + applyFinalPostProcessing, resolveToDebugScreen, needsColorEncoding); - finalColorHandle = commonResourceData.afterPostProcessColor; } + var finalColorHandle = commonResourceData.activeColorTexture; + RecordCustomRenderGraphPasses(renderGraph, RenderPassEvent2D.AfterRenderingPostProcessing); // Do PixelPerfect upscaling when using the Stretch Fill option if (requirePixelPerfectUpscale) @@ -669,12 +676,18 @@ private void OnAfterRendering(RenderGraph renderGraph) if (createColorTexture) { - if (requireFinalPostProcessPass) + if (applyFinalPostProcessing) postProcessPass.RenderFinalPassRenderGraph(renderGraph, frameData, in finalColorHandle, commonResourceData.overlayUITexture, in finalBlitTarget, needsColorEncoding); - else + else if (cameraData.resolveFinalTarget) m_FinalBlitPass.Render(renderGraph, frameData, cameraData, finalColorHandle, finalBlitTarget, commonResourceData.overlayUITexture); finalColorHandle = finalBlitTarget; + + if (cameraData.resolveFinalTarget) + { + commonResourceData.activeColorID = ActiveID.BackBuffer; + commonResourceData.activeDepthID = ActiveID.BackBuffer; + } } // We can explicitly render the overlay UI from URP when HDR output is not enabled. @@ -684,22 +697,27 @@ private void OnAfterRendering(RenderGraph renderGraph) if (shouldRenderUI && !outputToHDR) m_DrawOverlayUIPass.RenderOverlay(renderGraph, frameData, in finalColorHandle, in finalDepthHandle); + // If HDR debug views are enabled, DebugHandler will perform the blit from debugScreenColor (== finalColorHandle) to backBufferColor. DebugHandler?.Setup(renderGraph, cameraData.isPreviewCamera); DebugHandler?.Render(renderGraph, cameraData, finalColorHandle, commonResourceData.overlayUITexture, commonResourceData.backBufferColor); + if (cameraData.isSceneViewCamera) + DrawRenderGraphWireOverlay(renderGraph, frameData, commonResourceData.backBufferColor); + if (drawGizmos) - DrawRenderGraphGizmos(renderGraph, frameData, commonResourceData.backBufferColor, commonResourceData.activeDepthTexture, GizmoSubset.PostImageEffects); + DrawRenderGraphGizmos(renderGraph, frameData, commonResourceData.activeColorTexture, commonResourceData.activeDepthTexture, GizmoSubset.PostImageEffects); } private void CleanupRenderGraphResources() { - m_RenderGraphCameraColorHandle?.Release(); + m_RenderGraphCameraColorHandles[0]?.Release(); + m_RenderGraphCameraColorHandles[1]?.Release(); m_RenderGraphCameraDepthHandle?.Release(); m_RenderGraphBackbufferColorHandle?.Release(); m_RenderGraphBackbufferDepthHandle?.Release(); m_CameraSortingLayerHandle?.Release(); - m_LightPass.Dispose(); + Light2DLookupTexture.Release(); } } } diff --git a/Packages/com.unity.render-pipelines.universal/Runtime/Debug/DebugDisplaySettingsMaterial.cs b/Packages/com.unity.render-pipelines.universal/Runtime/Debug/DebugDisplaySettingsMaterial.cs index 3b1067a02fb..00174b5ad53 100644 --- a/Packages/com.unity.render-pipelines.universal/Runtime/Debug/DebugDisplaySettingsMaterial.cs +++ b/Packages/com.unity.render-pipelines.universal/Runtime/Debug/DebugDisplaySettingsMaterial.cs @@ -250,6 +250,81 @@ public float albedoSaturationTolerance /// public float metallicMaxValue { get; set; } = 0.9f; + /// + /// Current value for filtering layers based on the selected light's rendering layers. + /// + public bool renderingLayersSelectedLight { get; set; } = false; + + /// + /// Current value for filtering layers based on the selected light's shadow layers. + /// + public bool selectedLightShadowLayerMask { get; set; } = false; + + /// + /// Current value for filtering layers. + /// + public uint renderingLayerMask { get; set; } = 0; + + /// Rendering Layers Debug Colors. + public Vector4[] debugRenderingLayersColors = new Vector4[] + { + new Vector4(230, 159, 0) / 255, + new Vector4(86, 180, 233) / 255, + new Vector4(255, 182, 291) / 255, + new Vector4(0, 158, 115) / 255, + new Vector4(240, 228, 66) / 255, + new Vector4(0, 114, 178) / 255, + new Vector4(213, 94, 0) / 255, + new Vector4(170, 68, 170) / 255, + new Vector4(1.0f, 0.5f, 0.5f), + new Vector4(0.5f, 1.0f, 0.5f), + new Vector4(0.5f, 0.5f, 1.0f), + new Vector4(0.5f, 1.0f, 1.0f), + new Vector4(0.75f, 0.25f, 1.0f), + new Vector4(0.25f, 1.0f, 0.75f), + new Vector4(0.25f, 0.25f, 0.75f), + new Vector4(0.75f, 0.25f, 0.25f), + new Vector4(0.0f, 0.0f, 0.0f), + new Vector4(0.0f, 0.0f, 0.0f), + new Vector4(0.0f, 0.0f, 0.0f), + new Vector4(0.0f, 0.0f, 0.0f), + new Vector4(0.0f, 0.0f, 0.0f), + new Vector4(0.0f, 0.0f, 0.0f), + new Vector4(0.0f, 0.0f, 0.0f), + new Vector4(0.0f, 0.0f, 0.0f), + new Vector4(0.0f, 0.0f, 0.0f), + new Vector4(0.0f, 0.0f, 0.0f), + new Vector4(0.0f, 0.0f, 0.0f), + new Vector4(0.0f, 0.0f, 0.0f), + new Vector4(0.0f, 0.0f, 0.0f), + new Vector4(0.0f, 0.0f, 0.0f), + new Vector4(0.0f, 0.0f, 0.0f), + new Vector4(0.0f, 0.0f, 0.0f), + }; + + /// + /// Get the RenderingLayerMask used by the selected light + /// + /// Bitmask representing the RenderingLayerMask for the selected light. + public uint GetDebugLightLayersMask() + { +#if UNITY_EDITOR + if (renderingLayersSelectedLight) + { + if (UnityEditor.Selection.activeGameObject == null) + return 0; + var light = UnityEditor.Selection.activeGameObject.GetComponent(); + if (light == null) + return 0; + + if (selectedLightShadowLayerMask) + return light.shadowRenderingLayers; + return light.renderingLayers; + } +#endif + return 0xFFFF; + } + /// /// Current material validation mode. /// @@ -271,10 +346,15 @@ static class Strings { public const string AlbedoSettingsContainerName = "Albedo Settings"; public const string MetallicSettingsContainerName = "Metallic Settings"; + public const string RenderingLayerMasksSettingsContainerName = "Rendering Layer Masks Settings"; public static readonly NameAndTooltip MaterialOverride = new() { name = "Material Override", tooltip = "Use the drop-down to select a Material property to visualize on every GameObject on screen." }; public static readonly NameAndTooltip VertexAttribute = new() { name = "Vertex Attribute", tooltip = "Use the drop-down to select a 3D GameObject attribute, like Texture Coordinates or Vertex Color, to visualize on screen." }; public static readonly NameAndTooltip MaterialValidationMode = new() { name = "Material Validation Mode", tooltip = "Debug and validate material properties." }; + public static readonly NameAndTooltip RenderingLayersSelectedLight = new() { name = "Filter Rendering Layers by Light", tooltip = "Highlight Renderers affected by Selected Light" }; + public static readonly NameAndTooltip SelectedLightShadowLayerMask = new() { name = "Use Light's Shadow Layer Mask", tooltip = "Highlight Renderers that cast shadows for the Selected Light" }; + public static readonly NameAndTooltip RenderingLayerColors = new() { name = "Layers Color", tooltip = "Select the display color for each Rendering Layer" }; + public static readonly NameAndTooltip FilterRenderingLayerMask = new() { name = "Filter Layers", tooltip = "Use the dropdown to filter Rendering Layers that you want to visualize" }; public static readonly NameAndTooltip ValidationPreset = new() { name = "Validation Preset", tooltip = "Validate using a list of preset surfaces and inputs based on real-world surfaces." }; public static readonly NameAndTooltip AlbedoCustomColor = new() { name = "Target Color", tooltip = "Custom target color for albedo validation." }; public static readonly NameAndTooltip AlbedoMinLuminance = new() { name = "Min Luminance", tooltip = "Any values set below this field are invalid and appear red on screen." }; @@ -318,6 +398,31 @@ internal static class WidgetFactory onValueChanged = (_, _) => DebugManager.instance.ReDrawOnScreenDebug() }; + internal static DebugUI.Widget CreateRenderingLayersSelectedLight (SettingsPanel panel) => new DebugUI.BoolField + { + nameAndTooltip = Strings.RenderingLayersSelectedLight, + getter = () => (bool)panel.data.renderingLayersSelectedLight, + setter = (value) => panel.data.renderingLayersSelectedLight = value, + flags = DebugUI.Flags.EditorOnly, + }; + + internal static DebugUI.Widget CreateSelectedLightShadowLayerMask(SettingsPanel panel) => new DebugUI.BoolField + { + nameAndTooltip = Strings.SelectedLightShadowLayerMask, + getter = () => (bool)panel.data.selectedLightShadowLayerMask, + setter = value => panel.data.selectedLightShadowLayerMask = value, + flags = DebugUI.Flags.EditorOnly, + isHiddenCallback = () => !panel.data.renderingLayersSelectedLight + }; + + internal static DebugUI.Widget CreateFilterRenderingLayerMasks (SettingsPanel panel) => new DebugUI.MaskField + { + nameAndTooltip = Strings.FilterRenderingLayerMask, + getter = () => panel.data.renderingLayerMask, + setter = value => panel.data.renderingLayerMask = value, + isHiddenCallback = () => panel.data.renderingLayersSelectedLight + }; + internal static DebugUI.Widget CreateAlbedoPreset(SettingsPanel panel) => new DebugUI.EnumField { nameAndTooltip = Strings.ValidationPreset, @@ -396,6 +501,31 @@ public SettingsPanel(DebugDisplaySettingsMaterial data) { AddWidget(new DebugUI.RuntimeDebugShadersMessageBox()); + DebugUI.MaskField filterRenderingLayerWidget = (DebugUI.MaskField)WidgetFactory.CreateFilterRenderingLayerMasks(this); + var renderingLayers = new List(); + for (int i = 0; i < 32; i++) + renderingLayers.Add($"Unused Rendering Layer {i}"); + var names = UnityEngine.RenderingLayerMask.GetDefinedRenderingLayerNames(); + for (int i = 0; i < names.Length; i++) + { + var index = UnityEngine.RenderingLayerMask.NameToRenderingLayer(names[i]); + renderingLayers[index] = names[i]; + } + filterRenderingLayerWidget.Fill(renderingLayers.ToArray()); + + var layersColor = new DebugUI.Foldout() { nameAndTooltip = Strings.RenderingLayerColors, flags = DebugUI.Flags.EditorOnly }; + for (int i = 0; i < renderingLayers.Count; i++) + { + int index = i; + layersColor.children.Add(new DebugUI.ColorField + { + displayName = renderingLayers[i], + flags = DebugUI.Flags.EditorOnly, + getter = () => this.data.debugRenderingLayersColors[index], + setter = value => this.data.debugRenderingLayersColors[index] = value + }); + } + AddWidget(new DebugUI.Foldout { displayName = "Material Filters", @@ -405,6 +535,18 @@ public SettingsPanel(DebugDisplaySettingsMaterial data) children = { WidgetFactory.CreateMaterialOverride(this), + new DebugUI.Container() + { + displayName = Strings.RenderingLayerMasksSettingsContainerName, + isHiddenCallback = () => data.materialDebugMode != DebugMaterialMode.RenderingLayerMasks, + children = + { + WidgetFactory.CreateRenderingLayersSelectedLight(this), + WidgetFactory.CreateSelectedLightShadowLayerMask(this), + filterRenderingLayerWidget, + layersColor, + } + }, WidgetFactory.CreateVertexAttribute(this) } }); diff --git a/Packages/com.unity.render-pipelines.universal/Runtime/Debug/DebugHandler.cs b/Packages/com.unity.render-pipelines.universal/Runtime/Debug/DebugHandler.cs index fefcd4e40bf..8ac8e1c019f 100644 --- a/Packages/com.unity.render-pipelines.universal/Runtime/Debug/DebugHandler.cs +++ b/Packages/com.unity.render-pipelines.universal/Runtime/Debug/DebugHandler.cs @@ -2,6 +2,7 @@ using System.Collections; using System.Collections.Generic; using System.Diagnostics; +using System.Runtime.InteropServices; using UnityEditor; using UnityEngine.Experimental.Rendering; using UnityEngine.Rendering.RenderGraphModule; @@ -89,6 +90,8 @@ class DebugHandler : IDebugDisplaySettingsQuery RTHandle m_DebugFontTexture; + private GraphicsBuffer m_debugDisplayConstant; + readonly UniversalRenderPipelineDebugDisplaySettings m_DebugDisplaySettings; DebugDisplaySettingsLighting LightingSettings => m_DebugDisplaySettings.lightingSettings; @@ -178,6 +181,8 @@ internal DebugHandler() { m_DebugFontTexture = RTHandles.Alloc(m_RuntimeTextures.debugFontTexture); } + + m_debugDisplayConstant = new GraphicsBuffer(GraphicsBuffer.Target.Constant, 32, Marshal.SizeOf(typeof(Vector4))); } public void Dispose() @@ -186,6 +191,7 @@ public void Dispose() m_DebugScreenColorHandle?.Release(); m_DebugScreenDepthHandle?.Release(); m_DebugFontTexture?.Release(); + m_debugDisplayConstant.Dispose(); CoreUtils.Destroy(m_HDRDebugViewMaterial); CoreUtils.Destroy(m_ReplacementMaterial); } @@ -240,6 +246,15 @@ internal void SetupShaderProperties(RasterCommandBuffer cmd, int passIndex = 0) cmd.DisableShaderKeyword("_DEBUG_ENVIRONMENTREFLECTIONS_OFF"); } + m_debugDisplayConstant.SetData(MaterialSettings.debugRenderingLayersColors, 0, 0, 32); + + cmd.SetGlobalConstantBuffer(m_debugDisplayConstant, "_DebugDisplayConstant", 0, m_debugDisplayConstant.count * m_debugDisplayConstant.stride); + + if (MaterialSettings.renderingLayersSelectedLight) + cmd.SetGlobalInt("_DebugRenderingLayerMask", (int)MaterialSettings.GetDebugLightLayersMask()); + else + cmd.SetGlobalInt("_DebugRenderingLayerMask", (int)MaterialSettings.renderingLayerMask); + switch (RenderingSettings.sceneOverrideMode) { case DebugSceneOverrideMode.Overdraw: diff --git a/Packages/com.unity.render-pipelines.universal/Runtime/Decal/DBuffer/DBufferDepthCopyPass.cs b/Packages/com.unity.render-pipelines.universal/Runtime/Decal/DBuffer/DBufferDepthCopyPass.cs index f7b051746e4..0fe7c49fd11 100644 --- a/Packages/com.unity.render-pipelines.universal/Runtime/Decal/DBuffer/DBufferDepthCopyPass.cs +++ b/Packages/com.unity.render-pipelines.universal/Runtime/Decal/DBuffer/DBufferDepthCopyPass.cs @@ -19,31 +19,27 @@ public override void RecordRenderGraph(RenderGraph renderGraph, ContextContainer { UniversalResourceData resourceData = frameData.Get(); UniversalCameraData cameraData = frameData.Get(); + var universalRenderer = cameraData.renderer as UniversalRenderer; - UniversalRenderer renderer = (UniversalRenderer)cameraData.renderer; + bool isDeferred = universalRenderer.renderingModeActual == RenderingMode.Deferred; + bool useDepthPriming = universalRenderer.useDepthPriming; + bool isMsaa = cameraData.cameraTargetDescriptor.msaaSamples > 1; - TextureHandle cameraDepthTexture = resourceData.cameraDepthTexture; - - TextureHandle src, dest; - if (renderer.renderingModeActual == RenderingMode.Deferred) - { - src = resourceData.activeDepthTexture; - dest = cameraDepthTexture; - } - else + // We must create a temporary depth buffer for dbuffer rendering if the existing one isn't compatible. + // The deferred path always has compatible depth + // The forward path only has compatible depth when depth priming is enabled without MSAA + bool hasCompatibleDepth = isDeferred || (useDepthPriming && !isMsaa); + if (!hasCompatibleDepth) { var depthDesc = cameraData.cameraTargetDescriptor; depthDesc.graphicsFormat = GraphicsFormat.None; //Depth only rendering depthDesc.depthStencilFormat = cameraData.cameraTargetDescriptor.depthStencilFormat; depthDesc.msaaSamples = 1; - var depthTarget = UniversalRenderer.CreateRenderGraphTexture(renderGraph, depthDesc, DBufferRenderPass.s_DBufferDepthName, true); - resourceData.dBufferDepth = depthTarget; + resourceData.dBufferDepth = UniversalRenderer.CreateRenderGraphTexture(renderGraph, depthDesc, DBufferRenderPass.s_DBufferDepthName, true); - src = cameraDepthTexture; - dest = cameraData.cameraTargetDescriptor.msaaSamples > 1 ? depthTarget : resourceData.activeDepthTexture; + // Copy the current depth data into the new attachment + Render(renderGraph, resourceData.dBufferDepth, resourceData.cameraDepthTexture, resourceData, cameraData, false, "Copy DBuffer Depth"); } - - Render(renderGraph, dest, src, resourceData, cameraData, false); } } } diff --git a/Packages/com.unity.render-pipelines.universal/Runtime/Decal/DBuffer/DBufferRenderPass.cs b/Packages/com.unity.render-pipelines.universal/Runtime/Decal/DBuffer/DBufferRenderPass.cs index 07ad6a4f11d..50b4faf397d 100644 --- a/Packages/com.unity.render-pipelines.universal/Runtime/Decal/DBuffer/DBufferRenderPass.cs +++ b/Packages/com.unity.render-pipelines.universal/Runtime/Decal/DBuffer/DBufferRenderPass.cs @@ -230,13 +230,10 @@ public override void RecordRenderGraph(RenderGraph renderGraph, ContextContainer UniversalCameraData cameraData = frameData.Get(); UniversalLightData lightData = frameData.Get(); - UniversalRenderer renderer = (UniversalRenderer)cameraData.renderer; - TextureHandle cameraDepthTexture = resourceData.cameraDepthTexture; TextureHandle cameraNormalsTexture = resourceData.cameraNormalsTexture; - TextureHandle depthTarget = renderer.renderingModeActual == RenderingMode.Deferred ? resourceData.activeDepthTexture : - (cameraData.cameraTargetDescriptor.msaaSamples > 1 ? resourceData.dBufferDepth : resourceData.activeDepthTexture); + TextureHandle depthTarget = resourceData.dBufferDepth.IsValid() ? resourceData.dBufferDepth : resourceData.activeDepthTexture; using (var builder = renderGraph.AddRasterRenderPass(passName, out var passData, profilingSampler)) { @@ -275,14 +272,14 @@ public override void RecordRenderGraph(RenderGraph renderGraph, ContextContainer builder.SetRenderAttachment(dbufferHandles[2], 2, AccessFlags.Write); } - builder.SetRenderAttachmentDepth(depthTarget, AccessFlags.Write); - if (cameraData.cameraTargetDescriptor.msaaSamples > 1) - builder.SetGlobalTextureAfterPass(depthTarget, Shader.PropertyToID("_CameraDepthTexture")); + builder.SetRenderAttachmentDepth(depthTarget, AccessFlags.Read); if (cameraDepthTexture.IsValid()) builder.UseTexture(cameraDepthTexture, AccessFlags.Read); if (cameraNormalsTexture.IsValid()) builder.UseTexture(cameraNormalsTexture, AccessFlags.Read); + if (passData.decalLayers) + builder.UseTexture(resourceData.renderingLayersTexture, AccessFlags.Read); if (resourceData.ssaoTexture.IsValid()) builder.UseGlobalTexture(s_SSAOTextureID); diff --git a/Packages/com.unity.render-pipelines.universal/Runtime/History/RawDepthHistory.cs b/Packages/com.unity.render-pipelines.universal/Runtime/History/RawDepthHistory.cs index 15f660014ec..968351b7fb2 100644 --- a/Packages/com.unity.render-pipelines.universal/Runtime/History/RawDepthHistory.cs +++ b/Packages/com.unity.render-pipelines.universal/Runtime/History/RawDepthHistory.cs @@ -102,7 +102,7 @@ internal RenderTextureDescriptor GetHistoryDescriptor(ref RenderTextureDescripto // Return true if the RTHandles were reallocated. internal bool Update(ref RenderTextureDescriptor cameraDesc, bool xrMultipassEnabled) { - if (cameraDesc.width > 0 && cameraDesc.height > 0 && cameraDesc.graphicsFormat != GraphicsFormat.None) + if (cameraDesc.width > 0 && cameraDesc.height > 0 && (cameraDesc.depthStencilFormat != GraphicsFormat.None || cameraDesc.graphicsFormat != GraphicsFormat.None) ) { var historyDesc = GetHistoryDescriptor(ref cameraDesc); diff --git a/Packages/com.unity.render-pipelines.universal/Runtime/Passes/AdditionalLightsShadowCasterPass.cs b/Packages/com.unity.render-pipelines.universal/Runtime/Passes/AdditionalLightsShadowCasterPass.cs index 37b9d0ef90f..410bed22609 100644 --- a/Packages/com.unity.render-pipelines.universal/Runtime/Passes/AdditionalLightsShadowCasterPass.cs +++ b/Packages/com.unity.render-pipelines.universal/Runtime/Passes/AdditionalLightsShadowCasterPass.cs @@ -57,6 +57,7 @@ private static class AdditionalShadowsConstantBuffer int renderTargetWidth; int renderTargetHeight; + private RenderTextureDescriptor m_AdditionalLightShadowDescriptor; ProfilingSampler m_ProfilingSetupSampler = new ProfilingSampler("Setup Additional Shadows"); private PassData m_PassData; @@ -583,7 +584,7 @@ public bool Setup(UniversalRenderingData renderingData, UniversalCameraData came m_AdditionalLightShadowSliceIndexTo_WorldShadowMatrix[globalShadowSliceIndex] = sliceTransform * m_AdditionalLightShadowSliceIndexTo_WorldShadowMatrix[globalShadowSliceIndex]; } - ShadowUtils.ShadowRTReAllocateIfNeeded(ref m_AdditionalLightsShadowmapHandle, renderTargetWidth, renderTargetHeight, k_ShadowmapBufferBits, name: k_AdditionalLightShadowMapTextureName); + UpdateTextureDescriptorIfNeeded(); m_MaxShadowDistanceSq = cameraData.maxShadowDistance * cameraData.maxShadowDistance; m_CascadeBorder = shadowData.mainLightShadowCascadeBorder; @@ -593,6 +594,17 @@ public bool Setup(UniversalRenderingData renderingData, UniversalCameraData came return true; } + private void UpdateTextureDescriptorIfNeeded() + { + if ( m_AdditionalLightShadowDescriptor.width != renderTargetWidth + || m_AdditionalLightShadowDescriptor.height != renderTargetHeight + || m_AdditionalLightShadowDescriptor.depthBufferBits != k_ShadowmapBufferBits + || m_AdditionalLightShadowDescriptor.colorFormat != RenderTextureFormat.Shadowmap) + { + m_AdditionalLightShadowDescriptor = new RenderTextureDescriptor(renderTargetWidth, renderTargetHeight, RenderTextureFormat.Shadowmap, k_ShadowmapBufferBits); + } + } + bool SetupForEmptyRendering(bool stripShadowsOffVariants, UniversalShadowData shadowData) { if (!stripShadowsOffVariants) @@ -602,10 +614,6 @@ bool SetupForEmptyRendering(bool stripShadowsOffVariants, UniversalShadowData sh m_CreateEmptyShadowmap = true; useNativeRenderPass = false; - // Required for scene view camera(URP renderer not initialized) - if (ShadowUtils.ShadowRTReAllocateIfNeeded(ref m_EmptyAdditionalLightShadowmapTexture, k_EmptyShadowMapDimensions, k_EmptyShadowMapDimensions, k_ShadowmapBufferBits, name: k_EmptyAdditionalLightShadowMapTextureName)) - m_EmptyShadowmapNeedsClear = true; - // initialize _AdditionalShadowParams for (int i = 0; i < m_AdditionalLightIndexToShadowParams.Length; ++i) m_AdditionalLightIndexToShadowParams[i] = c_DefaultShadowParams; @@ -617,29 +625,37 @@ bool SetupForEmptyRendering(bool stripShadowsOffVariants, UniversalShadowData sh [Obsolete(DeprecationMessage.CompatibilityScriptingAPIObsolete, false)] public override void Configure(CommandBuffer cmd, RenderTextureDescriptor cameraTextureDescriptor) { - - if (m_CreateEmptyShadowmap && !m_EmptyShadowmapNeedsClear) - { - // UUM-63146 - glClientWaitSync: Expected application to have kicked everything until job: 96089 (possibly by calling glFlush)" are thrown in the Android Player on some devices with PowerVR Rogue GE8320 - // Resetting of target would clean up the color attachment buffers and depth attachment buffers, which inturn is preventing the leak in the said platform. This is likely a symptomatic fix, but is solving the problem for now. - - if (Application.platform == RuntimePlatform.Android && PlatformAutoDetect.isRunningOnPowerVRGPU) - ResetTarget(); - - return; - } - // Disable obsolete warning for internal usage #pragma warning disable CS0618 + if (m_CreateEmptyShadowmap) { + // Required for scene view camera(URP renderer not initialized) + if (ShadowUtils.ShadowRTReAllocateIfNeeded(ref m_EmptyAdditionalLightShadowmapTexture, k_EmptyShadowMapDimensions, k_EmptyShadowMapDimensions, k_ShadowmapBufferBits, name: k_EmptyAdditionalLightShadowMapTextureName)) + m_EmptyShadowmapNeedsClear = true; + + if (!m_EmptyShadowmapNeedsClear) + { + // UUM-63146 - glClientWaitSync: Expected application to have kicked everything until job: 96089 (possibly by calling glFlush)" are thrown in the Android Player on some devices with PowerVR Rogue GE8320 + // Resetting of target would clean up the color attachment buffers and depth attachment buffers, which inturn is preventing the leak in the said platform. This is likely a symptomatic fix, but is solving the problem for now. + + if (Application.platform == RuntimePlatform.Android && PlatformAutoDetect.isRunningOnPowerVRGPU) + ResetTarget(); + + return; + } + ConfigureTarget(m_EmptyAdditionalLightShadowmapTexture); m_EmptyShadowmapNeedsClear = false; } else + { + ShadowUtils.ShadowRTReAllocateIfNeeded(ref m_AdditionalLightsShadowmapHandle, renderTargetWidth, renderTargetHeight, k_ShadowmapBufferBits, name: k_AdditionalLightShadowMapTextureName); ConfigureTarget(m_AdditionalLightsShadowmapHandle); + } ConfigureClear(ClearFlag.All, Color.black); + #pragma warning restore CS0618 } @@ -663,6 +679,7 @@ public override void Execute(ScriptableRenderContext context, ref RenderingData UniversalCameraData cameraData = frameData.Get(); UniversalLightData lightData = frameData.Get(); InitPassData(ref m_PassData, cameraData, lightData, shadowData); + m_PassData.allocatedShadowAtlasSize = m_AdditionalLightsShadowmapHandle.referenceSize; InitRendererLists(ref universalRenderingData.cullResults, ref m_PassData, context, default(RenderGraph), false); RenderAdditionalShadowmapAtlas(CommandBufferHelpers.GetRasterCommandBuffer(universalRenderingData.commandBuffer), ref m_PassData, false); universalRenderingData.commandBuffer.SetGlobalTexture(m_AdditionalLightsShadowmapID, m_AdditionalLightsShadowmapHandle.nameID); @@ -725,8 +742,7 @@ void RenderAdditionalShadowmapAtlas(RasterCommandBuffer cmd, ref PassData data, if (shadowSlicesCount > 0) cmd.SetKeyword(ShaderGlobalKeywords.CastingPunctualLightShadow, true); - float lastDepthBias = -10f; - float lastNormalBias = -10f; + Vector4 lastShadowBias = new Vector4(-10f, -10f, -10f, -10f); for (int globalShadowSliceIndex = 0; globalShadowSliceIndex < shadowSlicesCount; ++globalShadowSliceIndex) { int additionalLightIndex = m_ShadowSliceToAdditionalLightIndex[globalShadowSliceIndex]; @@ -743,13 +759,10 @@ void RenderAdditionalShadowmapAtlas(RasterCommandBuffer cmd, ref PassData data, Vector4 shadowBias = ShadowUtils.GetShadowBias(ref shadowLight, visibleLightIndex, data.shadowData, shadowSliceData.projectionMatrix, shadowSliceData.resolution); // Update the bias when rendering the first slice or when the bias has changed - if ( globalShadowSliceIndex == 0 - || !ShadowUtils.FastApproximately(shadowBias.x, lastDepthBias) - || !ShadowUtils.FastApproximately(shadowBias.y, lastNormalBias)) + if (globalShadowSliceIndex == 0 || !ShadowUtils.FastApproximately(shadowBias, lastShadowBias)) { ShadowUtils.SetShadowBias(cmd, shadowBias); - lastDepthBias = shadowBias.x; - lastNormalBias = shadowBias.y; + lastShadowBias = shadowBias; } // Update light position @@ -784,12 +797,12 @@ void RenderAdditionalShadowmapAtlas(RasterCommandBuffer cmd, ref PassData data, ShadowUtils.SetSoftShadowQualityShaderKeywords(cmd, data.shadowData); if (anyShadowSliceRenderer) - SetupAdditionalLightsShadowReceiverConstants(cmd, data.useStructuredBuffer, softShadows); + SetupAdditionalLightsShadowReceiverConstants(cmd, data.allocatedShadowAtlasSize, data.useStructuredBuffer, softShadows); } } // Set constant buffer data that will be used during the lighting/shadowing pass - void SetupAdditionalLightsShadowReceiverConstants(RasterCommandBuffer cmd, bool useStructuredBuffer, bool softShadows) + void SetupAdditionalLightsShadowReceiverConstants(RasterCommandBuffer cmd, Vector2Int allocatedShadowAtlasSize, bool useStructuredBuffer, bool softShadows) { if (useStructuredBuffer) { @@ -814,7 +827,6 @@ void SetupAdditionalLightsShadowReceiverConstants(RasterCommandBuffer cmd, bool if (softShadows) { - Vector2Int allocatedShadowAtlasSize = m_AdditionalLightsShadowmapHandle.referenceSize; Vector2 invShadowAtlasSize = Vector2.one / allocatedShadowAtlasSize; Vector2 invHalfShadowAtlasSize = invShadowAtlasSize * 0.5f; @@ -842,6 +854,7 @@ private class PassData internal TextureHandle shadowmapTexture; internal int shadowmapID; internal bool useStructuredBuffer; + internal Vector2Int allocatedShadowAtlasSize; internal bool emptyShadowmap; @@ -916,7 +929,7 @@ internal TextureHandle Render(RenderGraph graph, ContextContainer frameData) builder.UseRendererList(passData.shadowRendererListsHdl[globalShadowSliceIndex]); } - shadowTexture = UniversalRenderer.CreateRenderGraphTexture(graph, m_AdditionalLightsShadowmapHandle.rt.descriptor, "_AdditionalLightsShadowmapTexture", true, ShadowUtils.m_ForceShadowPointSampling ? FilterMode.Point : FilterMode.Bilinear); + shadowTexture = UniversalRenderer.CreateRenderGraphTexture(graph, m_AdditionalLightShadowDescriptor, k_AdditionalLightShadowMapTextureName, true, ShadowUtils.m_ForceShadowPointSampling ? FilterMode.Point : FilterMode.Bilinear); builder.SetRenderAttachmentDepth(shadowTexture, AccessFlags.Write); } else @@ -924,6 +937,9 @@ internal TextureHandle Render(RenderGraph graph, ContextContainer frameData) shadowTexture = graph.defaultResources.defaultShadowTexture; } + TextureDesc descriptor = shadowTexture.GetDescriptor(graph); + passData.allocatedShadowAtlasSize = new Vector2Int(descriptor.width, descriptor.height); + // RENDERGRAPH TODO: Need this as shadowmap is only used as Global Texture and not a buffer, so would get culled by RG builder.AllowPassCulling(false); builder.AllowGlobalStateModification(true); diff --git a/Packages/com.unity.render-pipelines.universal/Runtime/Passes/CopyColorPass.cs b/Packages/com.unity.render-pipelines.universal/Runtime/Passes/CopyColorPass.cs index e48f02221b2..5927b0948a2 100644 --- a/Packages/com.unity.render-pipelines.universal/Runtime/Passes/CopyColorPass.cs +++ b/Packages/com.unity.render-pipelines.universal/Runtime/Passes/CopyColorPass.cs @@ -62,13 +62,13 @@ public static void ConfigureDescriptor(Downsampling downsamplingMethod, ref Rend descriptor.depthBufferBits = 0; if (downsamplingMethod == Downsampling._2xBilinear) { - descriptor.width /= 2; - descriptor.height /= 2; + descriptor.width = Mathf.Max(1, descriptor.width / 2); + descriptor.height = Mathf.Max(1, descriptor.height / 2); } else if (downsamplingMethod == Downsampling._4xBox || downsamplingMethod == Downsampling._4xBilinear) { - descriptor.width /= 4; - descriptor.height /= 4; + descriptor.width = Mathf.Max(1, descriptor.width / 4); + descriptor.height = Mathf.Max(1, descriptor.height / 4); } filterMode = downsamplingMethod == Downsampling.None ? FilterMode.Point : FilterMode.Bilinear; @@ -216,7 +216,7 @@ private void RenderInternal(RenderGraph renderGraph, in TextureHandle destinatio using (var builder = renderGraph.AddRasterRenderPass(passName, out var passData, profilingSampler)) { passData.destination = destination; - builder.SetRenderAttachment(destination, 0, AccessFlags.Write); + builder.SetRenderAttachment(destination, 0, AccessFlags.WriteAll); passData.source = source; builder.UseTexture(source, AccessFlags.Read); passData.useProceduralBlit = useProceduralBlit; diff --git a/Packages/com.unity.render-pipelines.universal/Runtime/Passes/CopyDepthPass.cs b/Packages/com.unity.render-pipelines.universal/Runtime/Passes/CopyDepthPass.cs index f01112f490e..80bc10ea532 100644 --- a/Packages/com.unity.render-pipelines.universal/Runtime/Passes/CopyDepthPass.cs +++ b/Packages/com.unity.render-pipelines.universal/Runtime/Passes/CopyDepthPass.cs @@ -23,6 +23,8 @@ public class CopyDepthPass : ScriptableRenderPass internal bool CopyToDepth { get; set; } // In XR CopyDepth, we need a special workaround to handle dummy color issue in RenderGraph. internal bool CopyToDepthXR { get; set; } + // We need to know if we're copying to the backbuffer in order to handle y-flip correctly + internal bool CopyToBackbuffer { get; set; } Material m_CopyDepthMaterial; internal bool m_CopyResolvedDepth; @@ -59,6 +61,7 @@ public CopyDepthPass(RenderPassEvent evt, Shader copyDepthShader, bool shouldCle m_CopyResolvedDepth = copyResolvedDepth; m_ShouldClear = shouldClear; CopyToDepthXR = false; + CopyToBackbuffer = false; } /// @@ -105,12 +108,12 @@ public override void OnCameraSetup(CommandBuffer cmd, ref RenderingData renderin private class PassData { internal TextureHandle source; - internal TextureHandle destination; internal UniversalCameraData cameraData; internal Material copyDepthMaterial; internal int msaaSamples; internal bool copyResolvedDepth; internal bool copyToDepth; + internal bool isDstBackbuffer; } /// @@ -122,10 +125,12 @@ public override void Execute(ScriptableRenderContext context, ref RenderingData m_PassData.copyDepthMaterial = m_CopyDepthMaterial; m_PassData.msaaSamples = MssaSamples; m_PassData.copyResolvedDepth = m_CopyResolvedDepth; - m_PassData.copyToDepth = CopyToDepth; + m_PassData.copyToDepth = CopyToDepth || CopyToDepthXR; + m_PassData.isDstBackbuffer = CopyToBackbuffer || CopyToDepthXR; m_PassData.cameraData = cameraData; var cmd = renderingData.commandBuffer; cmd.SetGlobalTexture(ShaderConstants._CameraDepthAttachment, source.nameID); + #if ENABLE_VR && ENABLE_XR_MODULE if (m_PassData.cameraData.xr.enabled) { @@ -133,10 +138,10 @@ public override void Execute(ScriptableRenderContext context, ref RenderingData cmd.SetFoveatedRenderingMode(FoveatedRenderingMode.Disabled); } #endif - ExecutePass(CommandBufferHelpers.GetRasterCommandBuffer(cmd), m_PassData, this.source, this.destination); + ExecutePass(CommandBufferHelpers.GetRasterCommandBuffer(cmd), m_PassData, this.source); } - private static void ExecutePass(RasterCommandBuffer cmd, PassData passData, RTHandle source, RTHandle destination) + private static void ExecutePass(RasterCommandBuffer cmd, PassData passData, RTHandle source) { var copyDepthMaterial = passData.copyDepthMaterial; var msaaSamples = passData.msaaSamples; @@ -191,27 +196,20 @@ private static void ExecutePass(RasterCommandBuffer cmd, PassData passData, RTHa break; } - bool outputDepth = copyToDepth || destination.rt.graphicsFormat == GraphicsFormat.None; - cmd.SetKeyword(ShaderGlobalKeywords._OUTPUT_DEPTH, outputDepth); + cmd.SetKeyword(ShaderGlobalKeywords._OUTPUT_DEPTH, copyToDepth); + + // We must perform a yflip if we're rendering into the backbuffer and we have a flipped source texture. + bool yflip = passData.cameraData.IsHandleYFlipped(source) && passData.isDstBackbuffer; Vector2 viewportScale = source.useScaling ? new Vector2(source.rtHandleProperties.rtHandleScale.x, source.rtHandleProperties.rtHandleScale.y) : Vector2.one; - // We y-flip if - // 1) we are blitting from render texture to back buffer(UV starts at bottom) and - // 2) renderTexture starts UV at top - bool isGameViewFinalTarget = passData.cameraData.cameraType == CameraType.Game && destination.nameID == BuiltinRenderTextureType.CameraTarget; -#if ENABLE_VR && ENABLE_XR_MODULE - if (passData.cameraData.xr.enabled) - { - isGameViewFinalTarget |= new RenderTargetIdentifier(destination.nameID, 0, CubemapFace.Unknown, 0) == new RenderTargetIdentifier(passData.cameraData.xr.renderTarget, 0, CubemapFace.Unknown, 0); - } -#endif - bool yflip = passData.cameraData.IsHandleYFlipped(source) != passData.cameraData.IsHandleYFlipped(destination); Vector4 scaleBias = yflip ? new Vector4(viewportScale.x, -viewportScale.y, 0, viewportScale.y) : new Vector4(viewportScale.x, viewportScale.y, 0, 0); - if (isGameViewFinalTarget) + + // When we render to the backbuffer, we update the viewport to cover the entire screen just in case it hasn't been updated already. + if (passData.isDstBackbuffer) cmd.SetViewport(passData.cameraData.pixelRect); copyDepthMaterial.SetTexture(ShaderConstants._CameraDepthAttachment, source); - copyDepthMaterial.SetFloat(ShaderConstants._ZWriteShaderHandle, outputDepth ? 1.0f : 0.0f); + copyDepthMaterial.SetFloat(ShaderConstants._ZWriteShaderHandle, copyToDepth ? 1.0f : 0.0f); Blitter.BlitTexture(cmd, source, scaleBias, copyDepthMaterial, 0); } } @@ -267,12 +265,12 @@ public void Render(RenderGraph renderGraph, TextureHandle destination, TextureHa passData.cameraData = cameraData; passData.copyResolvedDepth = m_CopyResolvedDepth; passData.copyToDepth = CopyToDepth || CopyToDepthXR; + passData.isDstBackbuffer = CopyToBackbuffer || CopyToDepthXR; if (CopyToDepth) { // Writes depth using custom depth output - passData.destination = destination; - builder.SetRenderAttachmentDepth(destination, AccessFlags.Write); + builder.SetRenderAttachmentDepth(destination, AccessFlags.WriteAll); #if UNITY_EDITOR // binding a dummy color target as a workaround to an OSX issue in Editor scene view (UUM-47698). // Also required for preview camera rendering for grid drawn with builtin RP (UUM-55171). @@ -283,8 +281,7 @@ public void Render(RenderGraph renderGraph, TextureHandle destination, TextureHa else if (CopyToDepthXR) { // Writes depth using custom depth output - passData.destination = destination; - builder.SetRenderAttachmentDepth(destination, AccessFlags.Write); + builder.SetRenderAttachmentDepth(destination, AccessFlags.WriteAll); #if ENABLE_VR && ENABLE_XR_MODULE // binding a dummy color target as a workaround to NRP depth only rendering limitation: @@ -296,8 +293,7 @@ public void Render(RenderGraph renderGraph, TextureHandle destination, TextureHa else { // Writes depth as "grayscale color" output - passData.destination = destination; - builder.SetRenderAttachment(destination, 0, AccessFlags.Write); + builder.SetRenderAttachment(destination, 0, AccessFlags.WriteAll); } passData.source = source; @@ -312,7 +308,7 @@ public void Render(RenderGraph renderGraph, TextureHandle destination, TextureHa builder.SetRenderFunc((PassData data, RasterGraphContext context) => { - ExecutePass(context.cmd, data, data.source, data.destination); + ExecutePass(context.cmd, data, data.source); }); } } diff --git a/Packages/com.unity.render-pipelines.universal/Runtime/Passes/DepthNormalOnlyPass.cs b/Packages/com.unity.render-pipelines.universal/Runtime/Passes/DepthNormalOnlyPass.cs index 180b93aa115..0ac86cb9e1b 100644 --- a/Packages/com.unity.render-pipelines.universal/Runtime/Passes/DepthNormalOnlyPass.cs +++ b/Packages/com.unity.render-pipelines.universal/Runtime/Passes/DepthNormalOnlyPass.cs @@ -19,10 +19,14 @@ public class DepthNormalOnlyPass : ScriptableRenderPass internal RenderingLayerUtils.MaskSize renderingLayersMaskSize { get; set; } private FilteringSettings m_FilteringSettings; private PassData m_PassData; - // Constants + + // Statics private static readonly List k_DepthNormals = new List { new ShaderTagId("DepthNormals"), new ShaderTagId("DepthNormalsOnly") }; private static readonly RTHandle[] k_ColorAttachment1 = new RTHandle[1]; private static readonly RTHandle[] k_ColorAttachment2 = new RTHandle[2]; + private static readonly int s_CameraDepthTextureID = Shader.PropertyToID("_CameraDepthTexture"); + private static readonly int s_CameraNormalsTextureID = Shader.PropertyToID("_CameraNormalsTexture"); + private static readonly int s_CameraRenderingLayersTextureID = Shader.PropertyToID("_CameraRenderingLayersTexture"); /// /// Creates a new DepthNormalOnlyPass instance. @@ -182,7 +186,7 @@ private RendererListParams InitRendererListParams(UniversalRenderingData renderi return new RendererListParams(renderingData.cullResults, drawSettings, m_FilteringSettings); } - internal void Render(RenderGraph renderGraph, ContextContainer frameData, TextureHandle cameraNormalsTexture, TextureHandle cameraDepthTexture, TextureHandle renderingLayersTexture, uint batchLayerMask = uint.MaxValue, bool postSetGlobalTextures = true) + internal void Render(RenderGraph renderGraph, ContextContainer frameData, TextureHandle cameraNormalsTexture, TextureHandle cameraDepthTexture, TextureHandle renderingLayersTexture, uint batchLayerMask, bool setGlobalDepth, bool setGlobalTextures) { UniversalRenderingData renderingData = frameData.Get(); UniversalCameraData cameraData = frameData.Get(); @@ -210,16 +214,17 @@ internal void Render(RenderGraph renderGraph, ContextContainer frameData, Textur if (cameraData.xr.enabled) builder.EnableFoveatedRasterization(cameraData.xr.supportsFoveatedRendering && cameraData.xrUniversal.canFoveateIntermediatePasses); - UniversalRenderer universalRenderer = cameraData.renderer as UniversalRenderer; - if (postSetGlobalTextures && universalRenderer != null) + if (setGlobalTextures) { - var renderingMode = universalRenderer.renderingModeActual; - if (cameraNormalsTexture.IsValid() && renderingMode != RenderingMode.Deferred) - builder.SetGlobalTextureAfterPass(cameraNormalsTexture, Shader.PropertyToID("_CameraNormalsTexture")); - if (cameraDepthTexture.IsValid() && renderingMode != RenderingMode.Deferred) - builder.SetGlobalTextureAfterPass(cameraDepthTexture, Shader.PropertyToID("_CameraDepthTexture")); + builder.SetGlobalTextureAfterPass(cameraNormalsTexture, s_CameraNormalsTextureID); + + if (passData.enableRenderingLayers) + builder.SetGlobalTextureAfterPass(renderingLayersTexture, s_CameraRenderingLayersTextureID); } + if (setGlobalDepth) + builder.SetGlobalTextureAfterPass(cameraDepthTexture, s_CameraDepthTextureID); + // TODO RENDERGRAPH: culling? force culling off for testing builder.AllowPassCulling(false); // Required here because of RenderingLayerUtils.SetupProperties diff --git a/Packages/com.unity.render-pipelines.universal/Runtime/Passes/DepthOnlyPass.cs b/Packages/com.unity.render-pipelines.universal/Runtime/Passes/DepthOnlyPass.cs index 7888b58795c..6e48fba5877 100644 --- a/Packages/com.unity.render-pipelines.universal/Runtime/Passes/DepthOnlyPass.cs +++ b/Packages/com.unity.render-pipelines.universal/Runtime/Passes/DepthOnlyPass.cs @@ -12,8 +12,6 @@ namespace UnityEngine.Rendering.Universal.Internal /// public class DepthOnlyPass : ScriptableRenderPass { - private static readonly ShaderTagId k_ShaderTagId = new ShaderTagId("DepthOnly"); - private RTHandle destination { get; set; } private GraphicsFormat depthStencilFormat; internal ShaderTagId shaderTagId { get; set; } = k_ShaderTagId; @@ -21,6 +19,10 @@ public class DepthOnlyPass : ScriptableRenderPass private PassData m_PassData; FilteringSettings m_FilteringSettings; + // Statics + private static readonly ShaderTagId k_ShaderTagId = new ShaderTagId("DepthOnly"); + private static readonly int s_CameraDepthTextureID = Shader.PropertyToID("_CameraDepthTexture"); + /// /// Creates a new DepthOnlyPass instance. /// @@ -119,7 +121,7 @@ private RendererListParams InitRendererListParams(UniversalRenderingData renderi return new RendererListParams(renderingData.cullResults, drawSettings, m_FilteringSettings); } - internal void Render(RenderGraph renderGraph, ContextContainer frameData, ref TextureHandle cameraDepthTexture, uint batchLayerMask = uint.MaxValue) + internal void Render(RenderGraph renderGraph, ContextContainer frameData, ref TextureHandle cameraDepthTexture, uint batchLayerMask, bool setGlobalDepth) { UniversalRenderingData renderingData = frameData.Get(); UniversalCameraData cameraData = frameData.Get(); @@ -134,6 +136,9 @@ internal void Render(RenderGraph renderGraph, ContextContainer frameData, ref Te builder.SetRenderAttachmentDepth(cameraDepthTexture, AccessFlags.Write); + if (setGlobalDepth) + builder.SetGlobalTextureAfterPass(cameraDepthTexture, s_CameraDepthTextureID); + // TODO RENDERGRAPH: culling? force culling off for testing builder.AllowPassCulling(false); builder.AllowGlobalStateModification(true); diff --git a/Packages/com.unity.render-pipelines.universal/Runtime/Passes/DrawSkyboxPass.cs b/Packages/com.unity.render-pipelines.universal/Runtime/Passes/DrawSkyboxPass.cs index 8b19673bdb1..ffd5571530e 100644 --- a/Packages/com.unity.render-pipelines.universal/Runtime/Passes/DrawSkyboxPass.cs +++ b/Packages/com.unity.render-pipelines.universal/Runtime/Passes/DrawSkyboxPass.cs @@ -12,8 +12,6 @@ namespace UnityEngine.Rendering.Universal /// public class DrawSkyboxPass : ScriptableRenderPass { - static readonly int s_CameraDepthTextureID = Shader.PropertyToID("_CameraDepthTexture"); - /// /// Creates a new DrawSkyboxPass instance. /// @@ -131,7 +129,7 @@ private void InitPassData(ref PassData passData, in XRPass xr, in RendererListHa passData.skyRendererListHandle = handle; } - internal void Render(RenderGraph renderGraph, ContextContainer frameData, ScriptableRenderContext context, TextureHandle colorTarget, TextureHandle depthTarget, Material skyboxMaterial, bool hasDepthCopy = false) + internal void Render(RenderGraph renderGraph, ContextContainer frameData, ScriptableRenderContext context, TextureHandle colorTarget, TextureHandle depthTarget, Material skyboxMaterial) { UniversalCameraData cameraData = frameData.Get(); UniversalResourceData resourceData = frameData.Get(); @@ -156,9 +154,6 @@ internal void Render(RenderGraph renderGraph, ContextContainer frameData, Script builder.SetRenderAttachment(colorTarget, 0, AccessFlags.Write); builder.SetRenderAttachmentDepth(depthTarget, AccessFlags.Write); - if (hasDepthCopy) - builder.UseGlobalTexture(s_CameraDepthTextureID); - builder.AllowPassCulling(false); if (cameraData.xr.enabled) { diff --git a/Packages/com.unity.render-pipelines.universal/Runtime/Passes/GBufferPass.cs b/Packages/com.unity.render-pipelines.universal/Runtime/Passes/GBufferPass.cs index 74cde264182..1804d2443b7 100644 --- a/Packages/com.unity.render-pipelines.universal/Runtime/Passes/GBufferPass.cs +++ b/Packages/com.unity.render-pipelines.universal/Runtime/Passes/GBufferPass.cs @@ -10,13 +10,15 @@ namespace UnityEngine.Rendering.Universal.Internal // Render all tiled-based deferred lights. internal class GBufferPass : ScriptableRenderPass { - internal static readonly int s_CameraNormalsTextureID = Shader.PropertyToID("_CameraNormalsTexture"); - static ShaderTagId s_ShaderTagLit = new ShaderTagId("Lit"); - static ShaderTagId s_ShaderTagSimpleLit = new ShaderTagId("SimpleLit"); - static ShaderTagId s_ShaderTagUnlit = new ShaderTagId("Unlit"); - static ShaderTagId s_ShaderTagComplexLit = new ShaderTagId("ComplexLit"); - static ShaderTagId s_ShaderTagUniversalGBuffer = new ShaderTagId("UniversalGBuffer"); - static ShaderTagId s_ShaderTagUniversalMaterialType = new ShaderTagId("UniversalMaterialType"); + // Statics + private static readonly int s_CameraNormalsTextureID = Shader.PropertyToID("_CameraNormalsTexture"); + private static readonly int s_CameraRenderingLayersTextureID = Shader.PropertyToID("_CameraRenderingLayersTexture"); + private static readonly ShaderTagId s_ShaderTagLit = new ShaderTagId("Lit"); + private static readonly ShaderTagId s_ShaderTagSimpleLit = new ShaderTagId("SimpleLit"); + private static readonly ShaderTagId s_ShaderTagUnlit = new ShaderTagId("Unlit"); + private static readonly ShaderTagId s_ShaderTagComplexLit = new ShaderTagId("ComplexLit"); + private static readonly ShaderTagId s_ShaderTagUniversalGBuffer = new ShaderTagId("UniversalGBuffer"); + private static readonly ShaderTagId s_ShaderTagUniversalMaterialType = new ShaderTagId("UniversalMaterialType"); DeferredLights m_DeferredLights; @@ -148,6 +150,7 @@ public override void Execute(ScriptableRenderContext context, ref RenderingData } static void ExecutePass(RasterCommandBuffer cmd, PassData data, RendererList rendererList, RendererList errorRendererList) + { bool usesRenderingLayers = data.deferredLights.UseRenderingLayers && !data.deferredLights.HasRenderingLayerPrepass; if (usesRenderingLayers) @@ -223,7 +226,7 @@ private void InitRendererLists( ref PassData passData, ScriptableRenderContext c } } - internal void Render(RenderGraph renderGraph, ContextContainer frameData, TextureHandle cameraColor, TextureHandle cameraDepth) + internal void Render(RenderGraph renderGraph, ContextContainer frameData, TextureHandle cameraColor, TextureHandle cameraDepth, bool setGlobalTextures) { UniversalResourceData resourceData = frameData.Get(); UniversalRenderingData renderingData = frameData.Get(); @@ -232,6 +235,8 @@ internal void Render(RenderGraph renderGraph, ContextContainer frameData, Textur TextureHandle[] gbuffer; + bool useCameraRenderingLayersTexture = m_DeferredLights.UseRenderingLayers && !m_DeferredLights.UseLightLayers; + using (var builder = renderGraph.AddRasterRenderPass(passName, out var passData, profilingSampler)) { // Note: This code is pretty confusing as passData.gbuffer[i] and gbuffer[i] actually point to the same array but seem to be mixed in this code. @@ -244,7 +249,7 @@ internal void Render(RenderGraph renderGraph, ContextContainer frameData, Textur if (i == m_DeferredLights.GBufferNormalSmoothnessIndex && m_DeferredLights.HasNormalPrepass) gbuffer[i] = resourceData.cameraNormalsTexture; - else if (m_DeferredLights.UseRenderingLayers && i == m_DeferredLights.GBufferRenderingLayers && !m_DeferredLights.UseLightLayers) + else if (i == m_DeferredLights.GBufferRenderingLayers && useCameraRenderingLayersTexture) gbuffer[i] = resourceData.renderingLayersTexture; else if (i != m_DeferredLights.GBufferLightingIndex) { @@ -271,6 +276,14 @@ internal void Render(RenderGraph renderGraph, ContextContainer frameData, Textur builder.UseRendererList(passData.rendererListHdl); builder.UseRendererList(passData.objectsWithErrorRendererListHdl); + if (setGlobalTextures) + { + builder.SetGlobalTextureAfterPass(resourceData.cameraNormalsTexture, s_CameraNormalsTextureID); + + if (useCameraRenderingLayersTexture) + builder.SetGlobalTextureAfterPass(resourceData.renderingLayersTexture, s_CameraRenderingLayersTextureID); + } + builder.AllowPassCulling(false); builder.AllowGlobalStateModification(true); diff --git a/Packages/com.unity.render-pipelines.universal/Runtime/Passes/MainLightShadowCasterPass.cs b/Packages/com.unity.render-pipelines.universal/Runtime/Passes/MainLightShadowCasterPass.cs index 492219dd06f..ab3d091d064 100644 --- a/Packages/com.unity.render-pipelines.universal/Runtime/Passes/MainLightShadowCasterPass.cs +++ b/Packages/com.unity.render-pipelines.universal/Runtime/Passes/MainLightShadowCasterPass.cs @@ -43,6 +43,8 @@ private static class MainLightShadowConstantBuffer ShadowSliceData[] m_CascadeSlices; Vector4[] m_CascadeSplitDistances; + private RenderTextureDescriptor m_MainLightShadowDescriptor; + bool m_CreateEmptyShadowmap; bool m_EmptyShadowmapNeedsClear = false; @@ -164,7 +166,7 @@ public bool Setup(UniversalRenderingData renderingData, UniversalCameraData came return SetupForEmptyRendering(cameraData.renderer.stripShadowsOffVariants); } - ShadowUtils.ShadowRTReAllocateIfNeeded(ref m_MainLightShadowmapTexture, renderTargetWidth, renderTargetHeight, k_ShadowmapBufferBits, name: k_MainLightShadowMapTextureName); + UpdateTextureDescriptorIfNeeded(); m_MaxShadowDistanceSq = cameraData.maxShadowDistance * cameraData.maxShadowDistance; m_CascadeBorder = shadowData.mainLightShadowCascadeBorder; @@ -174,6 +176,17 @@ public bool Setup(UniversalRenderingData renderingData, UniversalCameraData came return true; } + private void UpdateTextureDescriptorIfNeeded() + { + if ( m_MainLightShadowDescriptor.width != renderTargetWidth + || m_MainLightShadowDescriptor.height != renderTargetHeight + || m_MainLightShadowDescriptor.depthBufferBits != k_ShadowmapBufferBits + || m_MainLightShadowDescriptor.colorFormat != RenderTextureFormat.Shadowmap) + { + m_MainLightShadowDescriptor = new RenderTextureDescriptor(renderTargetWidth, renderTargetHeight, RenderTextureFormat.Shadowmap, k_ShadowmapBufferBits); + } + } + bool SetupForEmptyRendering(bool stripShadowsOffVariants) { if (!stripShadowsOffVariants) @@ -182,10 +195,6 @@ bool SetupForEmptyRendering(bool stripShadowsOffVariants) m_CreateEmptyShadowmap = true; useNativeRenderPass = false; - // Required for scene view camera(URP renderer not initialized) - if(ShadowUtils.ShadowRTReAllocateIfNeeded(ref m_EmptyMainLightShadowmapTexture, k_EmptyShadowMapDimensions, k_EmptyShadowMapDimensions, k_ShadowmapBufferBits, name: k_EmptyMainLightShadowMapTextureName)) - m_EmptyShadowmapNeedsClear = true; - return true; } @@ -193,29 +202,36 @@ bool SetupForEmptyRendering(bool stripShadowsOffVariants) [Obsolete(DeprecationMessage.CompatibilityScriptingAPIObsolete, false)] public override void Configure(CommandBuffer cmd, RenderTextureDescriptor cameraTextureDescriptor) { + // Disable obsolete warning for internal usage + #pragma warning disable CS0618 - if (m_CreateEmptyShadowmap && !m_EmptyShadowmapNeedsClear) + if (m_CreateEmptyShadowmap) { - // UUM-63146 - glClientWaitSync: Expected application to have kicked everything until job: 96089 (possibly by calling glFlush)" are thrown in the Android Player on some devices with PowerVR Rogue GE8320 - // Resetting of target would clean up the color attachment buffers and depth attachment buffers, which inturn is preventing the leak in the said platform. This is likely a symptomatic fix, but is solving the problem for now. + // Required for scene view camera(URP renderer not initialized) + if (ShadowUtils.ShadowRTReAllocateIfNeeded(ref m_EmptyMainLightShadowmapTexture, k_EmptyShadowMapDimensions, k_EmptyShadowMapDimensions, k_ShadowmapBufferBits, name: k_EmptyMainLightShadowMapTextureName)) + m_EmptyShadowmapNeedsClear = true; - if (Application.platform == RuntimePlatform.Android && PlatformAutoDetect.isRunningOnPowerVRGPU) - ResetTarget(); + if (!m_EmptyShadowmapNeedsClear) + { + // UUM-63146 - glClientWaitSync: Expected application to have kicked everything until job: 96089 (possibly by calling glFlush)" are thrown in the Android Player on some devices with PowerVR Rogue GE8320 + // Resetting of target would clean up the color attachment buffers and depth attachment buffers, which inturn is preventing the leak in the said platform. This is likely a symptomatic fix, but is solving the problem for now. + if (Application.platform == RuntimePlatform.Android && PlatformAutoDetect.isRunningOnPowerVRGPU) + ResetTarget(); - return; - } + return; + } - // Disable obsolete warning for internal usage - #pragma warning disable CS0618 - if (m_CreateEmptyShadowmap) - { ConfigureTarget(m_EmptyMainLightShadowmapTexture); m_EmptyShadowmapNeedsClear = false; } else + { + ShadowUtils.ShadowRTReAllocateIfNeeded(ref m_MainLightShadowmapTexture, renderTargetWidth, renderTargetHeight, k_ShadowmapBufferBits, name: k_MainLightShadowMapTextureName); ConfigureTarget(m_MainLightShadowmapTexture); + } ConfigureClear(ClearFlag.All, Color.black); + #pragma warning restore CS0618 } @@ -459,7 +475,7 @@ internal TextureHandle Render(RenderGraph graph, ContextContainer frameData) builder.UseRendererList(passData.shadowRendererListsHandle[cascadeIndex]); } - shadowTexture = UniversalRenderer.CreateRenderGraphTexture(graph, m_MainLightShadowmapTexture.rt.descriptor, "_MainLightShadowmapTexture", true, ShadowUtils.m_ForceShadowPointSampling ? FilterMode.Point : FilterMode.Bilinear); + shadowTexture = UniversalRenderer.CreateRenderGraphTexture(graph, m_MainLightShadowDescriptor, k_MainLightShadowMapTextureName, true, ShadowUtils.m_ForceShadowPointSampling ? FilterMode.Point : FilterMode.Bilinear); builder.SetRenderAttachmentDepth(shadowTexture, AccessFlags.Write); } else diff --git a/Packages/com.unity.render-pipelines.universal/Runtime/Passes/PostProcessPass.cs b/Packages/com.unity.render-pipelines.universal/Runtime/Passes/PostProcessPass.cs index 640c8cb0e5c..b2aab74716a 100644 --- a/Packages/com.unity.render-pipelines.universal/Runtime/Passes/PostProcessPass.cs +++ b/Packages/com.unity.render-pipelines.universal/Runtime/Passes/PostProcessPass.cs @@ -32,6 +32,7 @@ internal partial class PostProcessPass : ScriptableRenderPass RTHandle m_StreakTmpTexture; RTHandle m_StreakTmpTexture2; RTHandle m_ScreenSpaceLensFlareResult; + RTHandle m_UserLut; const string k_RenderPostProcessingTag = "Blit PostProcessing Effects"; const string k_RenderFinalPostProcessingTag = "Blit Final PostProcessing"; @@ -265,6 +266,7 @@ public void Dispose() m_StreakTmpTexture?.Release(); m_StreakTmpTexture2?.Release(); m_ScreenSpaceLensFlareResult?.Release(); + m_UserLut?.Release(); } /// diff --git a/Packages/com.unity.render-pipelines.universal/Runtime/Passes/PostProcessPassRenderGraph.cs b/Packages/com.unity.render-pipelines.universal/Runtime/Passes/PostProcessPassRenderGraph.cs index e8868e4b4f0..f9cdb2eabf6 100644 --- a/Packages/com.unity.render-pipelines.universal/Runtime/Passes/PostProcessPassRenderGraph.cs +++ b/Packages/com.unity.render-pipelines.universal/Runtime/Passes/PostProcessPassRenderGraph.cs @@ -8,7 +8,6 @@ namespace UnityEngine.Rendering.Universal internal partial class PostProcessPass : ScriptableRenderPass { static readonly int s_CameraDepthTextureID = Shader.PropertyToID("_CameraDepthTexture"); - static readonly int s_CameraOpaqueTextureID = Shader.PropertyToID("_CameraOpaqueTexture"); private class UpdateCameraResolutionPassData { @@ -375,8 +374,10 @@ public void RenderBloomTexture(RenderGraph renderGraph, in TextureHandle source, throw new ArgumentOutOfRangeException(); } - int tw = m_Descriptor.width >> downres; - int th = m_Descriptor.height >> downres; + //We should set the limit the downres result to ensure we dont turn 1x1 textures, which should technically be valid + //into 0x0 textures which will be invalid + int tw = Mathf.Max(1, m_Descriptor.width >> downres); + int th = Mathf.Max(1, m_Descriptor.height >> downres); // Determine the iteration count int maxSize = Mathf.Max(tw, th); @@ -944,7 +945,7 @@ private void RenderTemporalAA(RenderGraph renderGraph, UniversalResourceData res private void RenderSTP(RenderGraph renderGraph, UniversalResourceData resourceData, UniversalCameraData cameraData, ref TextureHandle source, out TextureHandle destination) { - TextureHandle cameraDepth = resourceData.cameraDepth; + TextureHandle cameraDepth = resourceData.cameraDepthTexture; TextureHandle motionVectors = resourceData.motionVectorColor; Debug.Assert(motionVectors.IsValid(), "MotionVectors are invalid. STP requires a motion vector texture."); @@ -1789,6 +1790,27 @@ private class UberPostPassData internal bool enableAlphaOutput; } + TextureHandle TryGetCachedUserLutTextureHandle(RenderGraph renderGraph) + { + if (m_ColorLookup.texture.value == null) + { + if (m_UserLut != null) + { + m_UserLut.Release(); + m_UserLut = null; + } + } + else + { + if (m_UserLut == null || m_UserLut.externalTexture != m_ColorLookup.texture.value) + { + m_UserLut?.Release(); + m_UserLut = RTHandles.Alloc(m_ColorLookup.texture.value); + } + } + return m_UserLut != null ? renderGraph.ImportTexture(m_UserLut) : TextureHandle.nullHandle; + } + public void RenderUberPost(RenderGraph renderGraph, ContextContainer frameData, UniversalCameraData cameraData, UniversalPostProcessingData postProcessingData, in TextureHandle sourceTexture, in TextureHandle destTexture, in TextureHandle lutTexture, in TextureHandle overlayUITexture, bool requireHDROutput, bool enableAlphaOutput, bool resolveToDebugScreen) { var material = m_Materials.uber; @@ -1800,8 +1822,7 @@ public void RenderUberPost(RenderGraph renderGraph, ContextContainer frameData, float postExposureLinear = Mathf.Pow(2f, m_ColorAdjustments.postExposure.value); Vector4 lutParams = new Vector4(1f / lutWidth, 1f / lutHeight, lutHeight - 1f, postExposureLinear); - RTHandle userLutRThdl = m_ColorLookup.texture.value ? RTHandles.Alloc(m_ColorLookup.texture.value) : null; - TextureHandle userLutTexture = userLutRThdl != null ? renderGraph.ImportTexture(userLutRThdl) : TextureHandle.nullHandle; + TextureHandle userLutTexture = TryGetCachedUserLutTextureHandle(renderGraph); Vector4 userLutParams = !m_ColorLookup.IsActive() ? Vector4.zero : new Vector4(1f / m_ColorLookup.texture.value.width, @@ -1811,17 +1832,6 @@ public void RenderUberPost(RenderGraph renderGraph, ContextContainer frameData, using (var builder = renderGraph.AddRasterRenderPass("Blit Post Processing", out var passData, ProfilingSampler.Get(URPProfileId.RG_UberPost))) { - UniversalResourceData resourceData = frameData.Get(); - - // Only the UniversalRenderer guarantees that global textures will be available at this point - bool isUniversalRenderer = (cameraData.renderer as UniversalRenderer) != null; - - if (cameraData.requiresDepthTexture && isUniversalRenderer) - builder.UseGlobalTexture(s_CameraDepthTextureID); - - if (cameraData.requiresOpaqueTexture && isUniversalRenderer) - builder.UseGlobalTexture(s_CameraOpaqueTextureID); - builder.AllowGlobalStateModification(true); passData.destinationTexture = destTexture; builder.SetRenderAttachment(destTexture, 0, AccessFlags.Write); diff --git a/Packages/com.unity.render-pipelines.universal/Runtime/Passes/ScreenSpaceAmbientOcclusionPass.cs b/Packages/com.unity.render-pipelines.universal/Runtime/Passes/ScreenSpaceAmbientOcclusionPass.cs index 56e0c255fd8..b01de5a8d28 100644 --- a/Packages/com.unity.render-pipelines.universal/Runtime/Passes/ScreenSpaceAmbientOcclusionPass.cs +++ b/Packages/com.unity.render-pipelines.universal/Runtime/Passes/ScreenSpaceAmbientOcclusionPass.cs @@ -308,7 +308,6 @@ private class SSAOPassData internal TextureHandle AOTexture; internal TextureHandle finalTexture; internal TextureHandle blurTexture; - internal TextureHandle cameraDepthTexture; internal TextureHandle cameraNormalsTexture; } @@ -334,9 +333,7 @@ public override void RecordRenderGraph(RenderGraph renderGraph, ContextContainer out TextureHandle finalTexture); // Get the resources - UniversalRenderer universalRenderer = cameraData.renderer as UniversalRenderer; - bool isDeferred = universalRenderer != null && universalRenderer.renderingModeActual == RenderingMode.Deferred; - TextureHandle cameraDepthTexture = isDeferred ? resourceData.activeDepthTexture : resourceData.cameraDepthTexture; + TextureHandle cameraDepthTexture = resourceData.cameraDepthTexture; TextureHandle cameraNormalsTexture = resourceData.cameraNormalsTexture; // Update keywords and other shader params @@ -354,7 +351,6 @@ public override void RecordRenderGraph(RenderGraph renderGraph, ContextContainer passData.AOTexture = aoTexture; passData.finalTexture = finalTexture; passData.blurTexture = blurTexture; - passData.cameraDepthTexture = isDeferred ? cameraDepthTexture : TextureHandle.nullHandle; // Declare input textures builder.UseTexture(passData.AOTexture, AccessFlags.ReadWrite); @@ -380,9 +376,6 @@ public override void RecordRenderGraph(RenderGraph renderGraph, ContextContainer builder.SetRenderFunc((SSAOPassData data, UnsafeGraphContext rgContext) => { - if (data.cameraDepthTexture.IsValid()) - data.material.SetTexture(s_CameraDepthTextureID, data.cameraDepthTexture); - CommandBuffer cmd = CommandBufferHelpers.GetNativeCommandBuffer(rgContext.cmd); RenderBufferLoadAction finalLoadAction = data.afterOpaque ? RenderBufferLoadAction.Load : RenderBufferLoadAction.DontCare; @@ -390,9 +383,6 @@ public override void RecordRenderGraph(RenderGraph renderGraph, ContextContainer if (data.cameraColor.IsValid()) PostProcessUtils.SetSourceSize(cmd, data.cameraColor); - if (data.cameraDepthTexture.IsValid()) - data.material.SetTexture(s_CameraDepthTextureID, data.cameraDepthTexture); - if (data.cameraNormalsTexture.IsValid()) data.material.SetTexture(s_CameraNormalsTextureID, data.cameraNormalsTexture); diff --git a/Packages/com.unity.render-pipelines.universal/Runtime/RTHandleUtils.cs b/Packages/com.unity.render-pipelines.universal/Runtime/RTHandleUtils.cs index 4592fa15720..2ce3872a328 100644 --- a/Packages/com.unity.render-pipelines.universal/Runtime/RTHandleUtils.cs +++ b/Packages/com.unity.render-pipelines.universal/Runtime/RTHandleUtils.cs @@ -6,6 +6,7 @@ using Unity.Collections.LowLevel.Unsafe; using UnityEngine.Rendering; using UnityEngine.Rendering.RenderGraphModule; +using UnityEngine.Experimental.Rendering; namespace UnityEngine.Rendering.Universal { @@ -175,12 +176,16 @@ internal int GetHashCodeWithNameHash(in TextureDesc texDesc) internal static TextureDesc CreateTextureDesc(RenderTextureDescriptor desc, TextureSizeMode textureSizeMode = TextureSizeMode.Explicit, int anisoLevel = 1, float mipMapBias = 0, FilterMode filterMode = FilterMode.Point, TextureWrapMode wrapMode = TextureWrapMode.Clamp, string name = "") - { + { + Assertions.Assert.IsFalse(desc.graphicsFormat != GraphicsFormat.None && desc.depthStencilFormat != GraphicsFormat.None, + "The RenderTextureDescriptor used to create a TextureDesc contains both graphicsFormat and depthStencilFormat which is not allowed."); + + var format = (desc.depthStencilFormat != GraphicsFormat.None) ? desc.depthStencilFormat : desc.graphicsFormat; + TextureDesc rgDesc = new TextureDesc(desc.width, desc.height); rgDesc.sizeMode = textureSizeMode; rgDesc.slices = desc.volumeDepth; - rgDesc.depthBufferBits = (DepthBits)desc.depthBufferBits; - rgDesc.colorFormat = desc.graphicsFormat; + rgDesc.format = format; rgDesc.filterMode = filterMode; rgDesc.wrapMode = wrapMode; rgDesc.dimension = desc.dimension; diff --git a/Packages/com.unity.render-pipelines.universal/Runtime/RenderingLayerUtils.cs b/Packages/com.unity.render-pipelines.universal/Runtime/RenderingLayerUtils.cs index 3cec1a8cf1c..f71303af750 100644 --- a/Packages/com.unity.render-pipelines.universal/Runtime/RenderingLayerUtils.cs +++ b/Packages/com.unity.render-pipelines.universal/Runtime/RenderingLayerUtils.cs @@ -74,7 +74,7 @@ internal static bool RequireRenderingLayers(List rend // Make sure texture has enough bits to encode all rendering layers in urp global settings if (UniversalRenderPipelineGlobalSettings.instance) { - int count = RenderingLayerMask.GetLastDefinedRenderingLayerIndex(); + int count = RenderingLayerMask.GetRenderingLayerCount(); MaskSize maskSize = GetMaskSize(count); combinedMaskSize = Combine(combinedMaskSize, maskSize); } @@ -109,7 +109,16 @@ public static GraphicsFormat GetFormat(MaskSize maskSize) case MaskSize.Bits8: return GraphicsFormat.R8_UNorm; case MaskSize.Bits16: - return GraphicsFormat.R16_UNorm; + { + //webgpu does not support r16_unorm as a render target format +#if UNITY_2023_2_OR_NEWER + if (SystemInfo.graphicsDeviceType == GraphicsDeviceType.WebGPU) + { + return GraphicsFormat.R32_SFloat; + } +#endif + return GraphicsFormat.R16_UNorm; + } case MaskSize.Bits24: case MaskSize.Bits32: return GraphicsFormat.R32_SFloat; diff --git a/Packages/com.unity.render-pipelines.universal/Runtime/RenderingUtils.cs b/Packages/com.unity.render-pipelines.universal/Runtime/RenderingUtils.cs index abd50953912..b759431eb72 100644 --- a/Packages/com.unity.render-pipelines.universal/Runtime/RenderingUtils.cs +++ b/Packages/com.unity.render-pipelines.universal/Runtime/RenderingUtils.cs @@ -610,9 +610,11 @@ internal static bool RTHandleNeedsReAlloc( return true; if (!scaled && (handle.rt.width != descriptor.width || handle.rt.height != descriptor.height)) return true; + + var rtHandleFormat = (handle.rt.descriptor.depthStencilFormat != GraphicsFormat.None) ? handle.rt.descriptor.depthStencilFormat : handle.rt.descriptor.graphicsFormat; + return - (DepthBits)handle.rt.descriptor.depthBufferBits != descriptor.depthBufferBits || - (handle.rt.descriptor.depthBufferBits == (int)DepthBits.None && handle.rt.descriptor.graphicsFormat != descriptor.colorFormat) || + rtHandleFormat != descriptor.format || handle.rt.descriptor.dimension != descriptor.dimension || handle.rt.descriptor.enableRandomWrite != descriptor.enableRandomWrite || handle.rt.descriptor.useMipMap != descriptor.useMipMap || @@ -819,6 +821,8 @@ public static bool ReAllocateHandleIfNeeded( float mipMapBias = 0, string name = "") { + Assertions.Assert.IsTrue(descriptor.graphicsFormat == GraphicsFormat.None ^ descriptor.depthStencilFormat == GraphicsFormat.None); + TextureDesc requestRTDesc = RTHandleResourcePool.CreateTextureDesc(descriptor, TextureSizeMode.Explicit, anisoLevel, 0, filterMode, wrapMode, name); if (RTHandleNeedsReAlloc(handle, requestRTDesc, false)) { diff --git a/Packages/com.unity.render-pipelines.universal/Runtime/ShadowUtils.cs b/Packages/com.unity.render-pipelines.universal/Runtime/ShadowUtils.cs index c60a56df5ee..894a6267f47 100644 --- a/Packages/com.unity.render-pipelines.universal/Runtime/ShadowUtils.cs +++ b/Packages/com.unity.render-pipelines.universal/Runtime/ShadowUtils.cs @@ -446,7 +446,7 @@ static Vector4 GetShadowBias(ref VisibleLight shadowLight, int shadowLightIndex, normalBias *= kernelRadius; } - return new Vector4(depthBias, normalBias, 0.0f, 0.0f); + return new Vector4(depthBias, normalBias, (float)shadowLight.lightType, 0.0f); } @@ -762,6 +762,14 @@ internal static bool FastApproximately(float a, float b) return Mathf.Abs(a - b) < 0.000001f; } + internal static bool FastApproximately(Vector4 a, Vector4 b) + { + return FastApproximately(a.x, b.x) + && FastApproximately(a.y, b.y) + && FastApproximately(a.z, b.z) + && FastApproximately(a.w, b.w); + } + internal const int kMinimumPunctualLightHardShadowResolution = 8; internal const int kMinimumPunctualLightSoftShadowResolution = 16; // Minimal shadow map resolution required to have meaningful shadows visible during lighting diff --git a/Packages/com.unity.render-pipelines.universal/Runtime/StpUtils.cs b/Packages/com.unity.render-pipelines.universal/Runtime/StpUtils.cs index 5c174cf5f18..e0cfb953104 100644 --- a/Packages/com.unity.render-pipelines.universal/Runtime/StpUtils.cs +++ b/Packages/com.unity.render-pipelines.universal/Runtime/StpUtils.cs @@ -128,7 +128,7 @@ static internal void Execute(RenderGraph renderGraph, UniversalResourceData reso debugView = renderGraph.CreateTexture(new TextureDesc(cameraData.pixelWidth, cameraData.pixelHeight, false, (cameraData.xr.enabled && cameraData.xr.singlePassEnabled)) { name = "STP Debug View", - colorFormat = GraphicsFormat.R8G8B8A8_UNorm, + format = GraphicsFormat.R8G8B8A8_UNorm, clearBuffer = true, enableRandomWrite = true }); diff --git a/Packages/com.unity.render-pipelines.universal/Runtime/UniversalAdditionalCameraData.cs b/Packages/com.unity.render-pipelines.universal/Runtime/UniversalAdditionalCameraData.cs index fbed5a4b341..150386bdf07 100644 --- a/Packages/com.unity.render-pipelines.universal/Runtime/UniversalAdditionalCameraData.cs +++ b/Packages/com.unity.render-pipelines.universal/Runtime/UniversalAdditionalCameraData.cs @@ -292,7 +292,6 @@ public static string GetName(this CameraRenderType type) /// [DisallowMultipleComponent] [RequireComponent(typeof(Camera))] - [ImageEffectAllowedInSceneView] [ExecuteAlways] // NOTE: This is required to get calls to OnDestroy() always. Graphics resources are released in OnDestroy(). [URPHelpURL("universal-additional-camera-data")] public class UniversalAdditionalCameraData : MonoBehaviour, ISerializationCallbackReceiver, IAdditionalData @@ -849,11 +848,31 @@ public void OnDrawGizmos() /// public void OnDestroy() { + //You cannot call scriptableRenderer here. If you where not in URP, this will actually create the renderer. + //This can occurs in cross pipeline but also on Dedicated Server where the gfx device do not run. (UUM-75237) + //Use GetRawRenderer() instead. + m_Camera.DestroyVolumeStack(this); - if (camera.cameraType != CameraType.SceneView ) - scriptableRenderer?.ReleaseRenderTargets(); + if (camera.cameraType != CameraType.SceneView) + GetRawRenderer()?.ReleaseRenderTargets(); m_History?.Dispose(); m_History = null; } + + + ScriptableRenderer GetRawRenderer() + { + if (UniversalRenderPipeline.asset is null) + return null; + + ReadOnlySpan renderers = UniversalRenderPipeline.asset.renderers; + if (renderers == null || renderers.IsEmpty) + return null; + + if (m_RendererIndex >= renderers.Length || m_RendererIndex < 0) + return null; + + return renderers[m_RendererIndex]; + } } } diff --git a/Packages/com.unity.render-pipelines.universal/Runtime/UniversalRenderPipeline.cs b/Packages/com.unity.render-pipelines.universal/Runtime/UniversalRenderPipeline.cs index 9e44a29b3bf..49758329e2c 100644 --- a/Packages/com.unity.render-pipelines.universal/Runtime/UniversalRenderPipeline.cs +++ b/Packages/com.unity.render-pipelines.universal/Runtime/UniversalRenderPipeline.cs @@ -664,6 +664,12 @@ internal static void RenderSingleCameraInternal(ScriptableRenderContext context, return; } + if (camera.targetTexture.width == 0 || camera.targetTexture.height == 0 || camera.pixelWidth == 0 || camera.pixelHeight == 0) + { + Debug.LogWarning($"Camera '{camera.name}' has an invalid render target size (width: {camera.targetTexture.width}, height: {camera.targetTexture.height}) or pixel dimensions (width: {camera.pixelWidth}, height: {camera.pixelHeight}). Camera will be skipped."); + return; + } + var frameData = GetRenderer(camera, additionalCameraData).frameData; var cameraData = CreateCameraData(frameData, camera, additionalCameraData, true); InitializeAdditionalCameraData(camera, additionalCameraData, true, cameraData); diff --git a/Packages/com.unity.render-pipelines.universal/Runtime/UniversalRenderPipelineCore.cs b/Packages/com.unity.render-pipelines.universal/Runtime/UniversalRenderPipelineCore.cs index ee23e2fa62c..a6da8d184a8 100644 --- a/Packages/com.unity.render-pipelines.universal/Runtime/UniversalRenderPipelineCore.cs +++ b/Packages/com.unity.render-pipelines.universal/Runtime/UniversalRenderPipelineCore.cs @@ -1537,7 +1537,7 @@ internal static RenderTextureDescriptor CreateRenderTextureDescriptor(Camera cam { desc = new RenderTextureDescriptor(cameraData.scaledWidth, cameraData.scaledHeight); desc.graphicsFormat = MakeRenderTextureGraphicsFormat(isHdrEnabled, requestHDRColorBufferPrecision, needsAlpha); - desc.depthBufferBits = 32; + desc.depthStencilFormat = SystemInfo.GetGraphicsFormat(DefaultFormat.DepthStencil); desc.msaaSamples = msaaSamples; desc.sRGB = (QualitySettings.activeColorSpace == ColorSpace.Linear); } diff --git a/Packages/com.unity.render-pipelines.universal/Runtime/UniversalRenderer.cs b/Packages/com.unity.render-pipelines.universal/Runtime/UniversalRenderer.cs index 23758db9acc..249f38e9774 100644 --- a/Packages/com.unity.render-pipelines.universal/Runtime/UniversalRenderer.cs +++ b/Packages/com.unity.render-pipelines.universal/Runtime/UniversalRenderer.cs @@ -39,12 +39,10 @@ public enum DepthPrimingMode /// public sealed partial class UniversalRenderer : ScriptableRenderer { - #if UNITY_SWITCH || UNITY_ANDROID || UNITY_EMBEDDED_LINUX || UNITY_QNX - const GraphicsFormat k_DepthStencilFormat = GraphicsFormat.D24_UNorm_S8_UInt; - const int k_DepthBufferBits = 24; - #else - const GraphicsFormat k_DepthStencilFormat = GraphicsFormat.D32_SFloat_S8_UInt; - const int k_DepthBufferBits = 32; +#if UNITY_SWITCH || UNITY_ANDROID || UNITY_EMBEDDED_LINUX || UNITY_QNX + const GraphicsFormat k_DepthStencilFormatDefault = GraphicsFormat.D24_UNorm_S8_UInt; +#else + const GraphicsFormat k_DepthStencilFormatDefault = GraphicsFormat.D32_SFloat_S8_UInt; #endif const int k_FinalBlitPassQueueOffset = 1; @@ -176,6 +174,9 @@ protected internal override bool SupportsMotionVectors() internal LayerMask opaqueLayerMask { get; set; } internal LayerMask transparentLayerMask { get; set; } + internal GraphicsFormat cameraDepthTextureFormat { get => (m_CameraDepthTextureFormat != DepthFormat.Default) ? (GraphicsFormat)m_CameraDepthTextureFormat : k_DepthStencilFormatDefault; } + internal GraphicsFormat cameraDepthAttachmentFormat { get => (m_CameraDepthAttachmentFormat != DepthFormat.Default) ? (GraphicsFormat)m_CameraDepthAttachmentFormat : k_DepthStencilFormatDefault; } + /// /// Constructor for the Universal Renderer. /// @@ -567,6 +568,20 @@ private void SetupFinalPassDebug(UniversalCameraData cameraData) bool IsDepthPrimingEnabled(UniversalCameraData cameraData) { +#if UNITY_EDITOR + // We need to disable depth-priming for DrawCameraMode.Wireframe, since depth-priming forces ZTest to Equal + // for opaques rendering, which breaks wireframe rendering. + if (cameraData.isSceneViewCamera) + { + foreach (var sceneViewObject in UnityEditor.SceneView.sceneViews) + { + var sceneView = sceneViewObject as UnityEditor.SceneView; + if (sceneView != null && sceneView.camera == cameraData.camera && sceneView.cameraMode.drawMode == UnityEditor.DrawCameraMode.Wireframe) + return false; + } + } +#endif + // depth priming requires an extra depth copy, disable it on platforms not supporting it (like GLES when MSAA is on) if (!CanCopyDepth(cameraData)) return false; @@ -662,7 +677,7 @@ public override void Setup(ScriptableRenderContext context, ref RenderingData re RenderingUtils.ReAllocateHandleIfNeeded(ref DebugHandler.DebugScreenColorHandle, colorDesc, name: "_DebugScreenColor"); RenderTextureDescriptor depthDesc = cameraData.cameraTargetDescriptor; - DebugHandler.ConfigureDepthDescriptorForDebugScreen(ref depthDesc, k_DepthStencilFormat, cameraData.pixelWidth, cameraData.pixelHeight); + DebugHandler.ConfigureDepthDescriptorForDebugScreen(ref depthDesc, cameraDepthTextureFormat, cameraData.pixelWidth, cameraData.pixelHeight); RenderingUtils.ReAllocateHandleIfNeeded(ref DebugHandler.DebugScreenDepthHandle, depthDesc, name: "_DebugScreenDepth"); } @@ -677,6 +692,11 @@ public override void Setup(ScriptableRenderContext context, ref RenderingData re if (cameraData.cameraType != CameraType.Game) useRenderPassEnabled = false; +#if UNITY_EDITOR + useRenderPassEnabled = false; // UUM-73849 : Disable Native Render Pass in the editor for compatibility mode. + // (Compatibility mode is no longer in development. Disable it to prevent unexpected problems.) +#endif + // Because of the shortcutting done by depth only offscreen cameras, useDepthPriming must be computed early useDepthPriming = IsDepthPrimingEnabled(cameraData); @@ -889,7 +909,7 @@ public override void Setup(ScriptableRenderContext context, ref RenderingData re if (useRenderPassEnabled || useDepthPriming) createColorTexture |= createDepthTexture; - // If gfxAPI yflips intermediate texture, we can't mix-use backbuffer(not flipped) and render texture(flipped) due to different flip state/clipspace y. + // If gfxAPI yflips intermediate texture, we can't mix-use backbuffer(not flipped) and render texture(flipped) due to different flip state/clipspace y. // This introduces the final blit pass. if(SystemInfo.graphicsUVStartsAtTop) createColorTexture |= createDepthTexture; @@ -1029,7 +1049,7 @@ public override void Setup(ScriptableRenderContext context, ref RenderingData re if (requiresDepthPrepass && this.renderingModeActual != RenderingMode.Deferred) { depthDescriptor.graphicsFormat = GraphicsFormat.None; - depthDescriptor.depthStencilFormat = (m_CameraDepthTextureFormat != DepthFormat.Default) ? (GraphicsFormat)m_CameraDepthTextureFormat : k_DepthStencilFormat; + depthDescriptor.depthStencilFormat = cameraDepthTextureFormat; } else { @@ -1376,7 +1396,7 @@ public override void Setup(ScriptableRenderContext context, ref RenderingData re bool outputToHDR = cameraData.isHDROutputActive; if (shouldRenderUI && outputToHDR) { - m_DrawOffscreenUIPass.Setup(cameraData, k_DepthStencilFormat); + m_DrawOffscreenUIPass.Setup(cameraData, cameraDepthTextureFormat); EnqueuePass(m_DrawOffscreenUIPass); } @@ -1465,7 +1485,7 @@ public override void Setup(ScriptableRenderContext context, ref RenderingData re if (!depthTargetResolved && cameraData.xr.copyDepth) { m_XRCopyDepthPass.Setup(m_ActiveCameraDepthAttachment, m_TargetDepthHandle); - m_XRCopyDepthPass.CopyToDepth = true; + m_XRCopyDepthPass.CopyToDepthXR = true; EnqueuePass(m_XRCopyDepthPass); } } @@ -1484,10 +1504,11 @@ public override void Setup(ScriptableRenderContext context, ref RenderingData re // Scene view camera should always resolve target (not stacked) m_FinalDepthCopyPass.Setup(m_DepthTexture, k_CameraTarget); m_FinalDepthCopyPass.MssaSamples = 0; + m_FinalDepthCopyPass.CopyToBackbuffer = cameraData.isGameCamera; // Turning off unnecessary NRP in Editor because of MSAA mistmatch between CameraTargetDescriptor vs camera backbuffer // NRP layer considers this being a pass with MSAA samples by checking CameraTargetDescriptor taken from RP asset // while the camera backbuffer has a single sample - m_FinalDepthCopyPass.useNativeRenderPass = false; + m_FinalDepthCopyPass.useNativeRenderPass = false; EnqueuePass(m_FinalDepthCopyPass); } #endif @@ -1567,7 +1588,11 @@ private void SetupRawColorDepthHistory(UniversalCameraData cameraData, ref Rende depthHistory.Update(ref tempColorDepthDesc, xrMultipassEnabled); } else - depthHistory.Update(ref cameraTargetDescriptor, xrMultipassEnabled); + { + var tempColorDepthDesc = cameraData.cameraTargetDescriptor; + tempColorDepthDesc.graphicsFormat = GraphicsFormat.None; + depthHistory.Update(ref tempColorDepthDesc, xrMultipassEnabled); + } if (depthHistory.GetCurrentTexture(multipassId) != null) { @@ -1698,8 +1723,6 @@ private struct RenderPassInputSummary private RenderPassInputSummary GetRenderPassInputs(bool isTemporalAAEnabled, bool postProcessingEnabled) { - RenderPassEvent beforeMainRenderingEvent = m_RenderingMode == RenderingMode.Deferred ? RenderPassEvent.BeforeRenderingGbuffer : RenderPassEvent.BeforeRenderingOpaques; - RenderPassInputSummary inputSummary = new RenderPassInputSummary(); inputSummary.requiresDepthNormalAtEvent = RenderPassEvent.BeforeRenderingOpaques; inputSummary.requiresDepthTextureEarliestEvent = RenderPassEvent.BeforeRenderingPostProcessing; @@ -1710,7 +1733,7 @@ private RenderPassInputSummary GetRenderPassInputs(bool isTemporalAAEnabled, boo bool needsNormals = (pass.input & ScriptableRenderPassInput.Normal) != ScriptableRenderPassInput.None; bool needsColor = (pass.input & ScriptableRenderPassInput.Color) != ScriptableRenderPassInput.None; bool needsMotion = (pass.input & ScriptableRenderPassInput.Motion) != ScriptableRenderPassInput.None; - bool eventBeforeMainRendering = pass.renderPassEvent <= beforeMainRenderingEvent; + bool eventBeforeRenderingOpaques = pass.renderPassEvent < RenderPassEvent.AfterRenderingOpaques; // TODO: Need a better way to handle this, probably worth to recheck after render graph // DBuffer requires color texture created as it does not handle y flip correctly @@ -1720,7 +1743,11 @@ private RenderPassInputSummary GetRenderPassInputs(bool isTemporalAAEnabled, boo } inputSummary.requiresDepthTexture |= needsDepth; - inputSummary.requiresDepthPrepass |= needsNormals || needsDepth && eventBeforeMainRendering; + + // A depth prepass is always required when normals are needed because URP's forward passes don't support rendering into the normals texture + // If depth is needed without normals, we only need a prepass when the event consuming depth occurs before opaque rendering is completed. + inputSummary.requiresDepthPrepass |= needsNormals || (needsDepth && eventBeforeRenderingOpaques); + inputSummary.requiresNormalsTexture |= needsNormals; inputSummary.requiresColorTexture |= needsColor; inputSummary.requiresMotionVectors |= needsMotion; @@ -1798,7 +1825,7 @@ void CreateCameraRenderTarget(ScriptableRenderContext context, ref RenderTexture depthDescriptor.bindMS = false; depthDescriptor.graphicsFormat = GraphicsFormat.None; - depthDescriptor.depthStencilFormat = (m_CameraDepthAttachmentFormat != DepthFormat.Default) ? (GraphicsFormat)m_CameraDepthAttachmentFormat : k_DepthStencilFormat; + depthDescriptor.depthStencilFormat = cameraDepthAttachmentFormat; RenderingUtils.ReAllocateHandleIfNeeded(ref m_CameraDepthAttachment, depthDescriptor, FilterMode.Point, TextureWrapMode.Clamp, name: "_CameraDepthAttachment"); cmd.SetGlobalTexture(m_CameraDepthAttachment.name, m_CameraDepthAttachment.nameID); diff --git a/Packages/com.unity.render-pipelines.universal/Runtime/UniversalRendererRenderGraph.cs b/Packages/com.unity.render-pipelines.universal/Runtime/UniversalRendererRenderGraph.cs index 169086c0a6c..3045fcb00c1 100644 --- a/Packages/com.unity.render-pipelines.universal/Runtime/UniversalRendererRenderGraph.cs +++ b/Packages/com.unity.render-pipelines.universal/Runtime/UniversalRendererRenderGraph.cs @@ -236,8 +236,7 @@ public static TextureHandle CreateRenderGraphTexture(RenderGraph renderGraph, Re rgDesc.dimension = desc.dimension; rgDesc.clearBuffer = clear; rgDesc.bindTextureMS = desc.bindMS; - rgDesc.colorFormat = desc.graphicsFormat; - rgDesc.depthBufferBits = (DepthBits)desc.depthBufferBits; + rgDesc.format = (desc.depthStencilFormat != GraphicsFormat.None) ? desc.depthStencilFormat : desc.graphicsFormat; rgDesc.slices = desc.volumeDepth; rgDesc.msaaSamples = (MSAASamples)desc.msaaSamples; rgDesc.name = name; @@ -248,7 +247,6 @@ public static TextureHandle CreateRenderGraphTexture(RenderGraph renderGraph, Re rgDesc.vrUsage = desc.vrUsage; rgDesc.useDynamicScale = desc.useDynamicScale; rgDesc.useDynamicScaleExplicit = desc.useDynamicScaleExplicit; - // TODO RENDERGRAPH: depthStencilFormat handling? return renderGraph.CreateTexture(rgDesc); } @@ -261,8 +259,7 @@ internal static TextureHandle CreateRenderGraphTexture(RenderGraph renderGraph, rgDesc.clearBuffer = clear; rgDesc.clearColor = color; rgDesc.bindTextureMS = desc.bindMS; - rgDesc.colorFormat = desc.graphicsFormat; - rgDesc.depthBufferBits = (DepthBits)desc.depthBufferBits; + rgDesc.format = (desc.depthStencilFormat != GraphicsFormat.None) ? desc.depthStencilFormat : desc.graphicsFormat; rgDesc.slices = desc.volumeDepth; rgDesc.msaaSamples = (MSAASamples)desc.msaaSamples; rgDesc.name = name; @@ -285,7 +282,7 @@ bool CameraHasPostProcessingWithDepth(UniversalCameraData cameraData) return ShouldApplyPostProcessing(cameraData.postProcessEnabled) && cameraData.postProcessingRequiresDepthTexture; } - void RequiresColorAndDepthTextures(RenderGraph renderGraph, out bool createColorTexture, out bool createDepthTexture, UniversalCameraData cameraData, ref RenderPassInputSummary renderPassInputs) + void RequiresColorAndDepthAttachments(RenderGraph renderGraph, out bool createColorTexture, out bool createDepthTexture, UniversalCameraData cameraData, ref RenderPassInputSummary renderPassInputs) { bool isPreviewCamera = cameraData.isPreviewCamera; bool requiresDepthPrepass = RequireDepthPrepass(cameraData, ref renderPassInputs); @@ -440,7 +437,7 @@ void CreateRenderGraphCameraRenderTargets(RenderGraph renderGraph, bool isCamera // We configure this for the first camera of the stack and overlay camera will reuse create color/depth var // to pick the correct target, as if there is an intermediate texture, overlay cam should use them if (cameraData.renderType == CameraRenderType.Base) - RequiresColorAndDepthTextures(renderGraph, out m_CreateColorTexture, out m_CreateDepthTexture, cameraData, ref renderPassInputs); + RequiresColorAndDepthAttachments(renderGraph, out m_CreateColorAttachment, out m_CreateDepthAttachment, cameraData, ref renderPassInputs); // The final output back buffer should be cleared by the graph on first use only if we have no final blit pass. @@ -449,7 +446,7 @@ void CreateRenderGraphCameraRenderTargets(RenderGraph renderGraph, bool isCamera // with a Viewport Rect smaller than the full screen. So the existing backbuffer contents need to be preserved in this case. // Finally for non-base cameras the backbuffer should never be cleared. (Note that there might still be two base cameras // rendering to the same screen. See e.g. test foundation 014 that renders a minimap) - bool clearBackbufferOnFirstUse = (cameraData.renderType == CameraRenderType.Base) && !m_CreateColorTexture; + bool clearBackbufferOnFirstUse = (cameraData.renderType == CameraRenderType.Base) && !m_CreateColorAttachment; // force the clear if we are rendering to an offscreen depth texture clearBackbufferOnFirstUse |= isCameraTargetOffscreenDepth; @@ -460,9 +457,9 @@ void CreateRenderGraphCameraRenderTargets(RenderGraph renderGraph, bool isCamera // We cannot use directly !cameraData.rendersOverlayUI but this is similar logic bool isNativeUIOverlayRenderingAfterURP = !SupportedRenderingFeatures.active.rendersUIOverlay && cameraData.resolveToScreen; bool isNativeRenderingAfterURP = UnityEngine.Rendering.Watermark.IsVisible() || isNativeUIOverlayRenderingAfterURP; - // If MSAA > 1, no extra native rendering after SRP and we target the BB directly (!m_CreateColorTexture) + // If MSAA > 1, no extra native rendering after SRP and we target the BB directly (!m_CreateColorAttachment) // then we can discard MSAA buffers and only resolve, otherwise we must store and resolve - bool noStoreOnlyResolveBBColor = !m_CreateColorTexture && !isNativeRenderingAfterURP && (cameraData.cameraTargetDescriptor.msaaSamples > 1); + bool noStoreOnlyResolveBBColor = !m_CreateColorAttachment && !isNativeRenderingAfterURP && (cameraData.cameraTargetDescriptor.msaaSamples > 1); ImportResourceParams importBackbufferColorParams = new ImportResourceParams(); importBackbufferColorParams.clearOnFirstUse = clearBackbufferOnFirstUse; @@ -505,7 +502,7 @@ void CreateRenderGraphCameraRenderTargets(RenderGraph renderGraph, bool isCamera int oldSamples = cameraData.cameraTargetDescriptor.msaaSamples; #if !UNITY_EDITOR // for safety do this only for the NRP path, even though works also on non NRP, but would need extensive testing - if (m_CreateColorTexture && renderGraph.nativeRenderPassesEnabled && Screen.msaaSamples > 1) + if (m_CreateColorAttachment && renderGraph.nativeRenderPassesEnabled && Screen.msaaSamples > 1) { oldSamples = Mathf.Max(Screen.msaaSamples, oldSamples); msaaSamplesChangedThisFrame = true; @@ -535,10 +532,11 @@ void CreateRenderGraphCameraRenderTargets(RenderGraph renderGraph, bool isCamera importInfo.volumeDepth = 1; importInfo.msaaSamples = numSamples; - importInfo.format = UniversalRenderPipeline.MakeRenderTextureGraphicsFormat(cameraData.isHdrEnabled, cameraData.hdrColorBufferPrecision, Graphics.preserveFramebufferAlpha); + importInfo.format = cameraData.cameraTargetDescriptor.graphicsFormat; importInfoDepth = importInfo; - importInfoDepth.format = SystemInfo.GetGraphicsFormat(DefaultFormat.DepthStencil); + + importInfoDepth.format = cameraData.cameraTargetDescriptor.depthStencilFormat; } else { @@ -583,7 +581,7 @@ void CreateRenderGraphCameraRenderTargets(RenderGraph renderGraph, bool isCamera #region Intermediate Camera Target - if (m_CreateColorTexture && !isCameraTargetOffscreenDepth) + if (m_CreateColorAttachment && !isCameraTargetOffscreenDepth) { var cameraTargetDescriptor = cameraData.cameraTargetDescriptor; cameraTargetDescriptor.useMipMap = false; @@ -625,28 +623,19 @@ void CreateRenderGraphCameraRenderTargets(RenderGraph renderGraph, bool isCamera resourceData.activeColorID = UniversalResourceData.ActiveID.BackBuffer; } + bool depthTextureIsDepthFormat = RequireDepthPrepass(cameraData, ref renderPassInputs) && (renderingModeActual != RenderingMode.Deferred); - if (m_CreateDepthTexture) + if (m_CreateDepthAttachment) { var depthDescriptor = cameraData.cameraTargetDescriptor; depthDescriptor.useMipMap = false; depthDescriptor.autoGenerateMips = false; - depthDescriptor.bindMS = false; - bool hasMSAA = depthDescriptor.msaaSamples > 1 && (SystemInfo.supportsMultisampledTextures != 0); + bool hasMSAA = depthDescriptor.msaaSamples > 1; bool resolveDepth = RenderingUtils.MultisampleDepthResolveSupported() && renderGraph.nativeRenderPassesEnabled; - // TODO RENDERGRAPH: once all passes are ported to RasterCommandBuffers we need to reenable depth resolve - m_CopyDepthPass.m_CopyResolvedDepth = resolveDepth && m_CopyDepthMode == CopyDepthMode.AfterTransparents; - - if (hasMSAA) - { - // if depth priming is enabled the copy depth primed pass is meant to do the MSAA resolve, so we want to bind the MS surface - if (IsDepthPrimingEnabled(cameraData)) - depthDescriptor.bindMS = true; - else - depthDescriptor.bindMS = !(resolveDepth && m_CopyDepthMode == CopyDepthMode.AfterTransparents); - } + // If we aren't using hardware depth resolves and we have MSAA, we need to resolve depth manually by binding as an MSAA texture. + depthDescriptor.bindMS = !resolveDepth && hasMSAA; // binding MS surfaces is not supported by the GLES backend, and it won't be fixed after investigating // the high performance impact of potential fixes, which would make it more expensive than depth prepass (fogbugz 1339401 for more info) @@ -654,13 +643,18 @@ void CreateRenderGraphCameraRenderTargets(RenderGraph renderGraph, bool isCamera depthDescriptor.bindMS = false; depthDescriptor.graphicsFormat = GraphicsFormat.None; - depthDescriptor.depthStencilFormat = k_DepthStencilFormat; + depthDescriptor.depthStencilFormat = cameraDepthAttachmentFormat; RenderingUtils.ReAllocateHandleIfNeeded(ref m_RenderGraphCameraDepthHandle, depthDescriptor, FilterMode.Point, TextureWrapMode.Clamp, name: "_CameraDepthAttachment"); importDepthParams.discardOnLastUse = lastCameraInTheStack; resourceData.cameraDepth = renderGraph.ImportTexture(m_RenderGraphCameraDepthHandle, importDepthParams); resourceData.activeDepthID = UniversalResourceData.ActiveID.Camera; + + // Configure the copy depth pass based on the allocated depth texture + m_CopyDepthPass.MssaSamples = depthDescriptor.msaaSamples; + m_CopyDepthPass.CopyToDepth = depthTextureIsDepthFormat; + m_CopyDepthPass.m_CopyResolvedDepth = !depthDescriptor.bindMS; } else { @@ -668,7 +662,7 @@ void CreateRenderGraphCameraRenderTargets(RenderGraph renderGraph, bool isCamera } #endregion - CreateCameraDepthCopyTexture(renderGraph, cameraData.cameraTargetDescriptor, RequireDepthPrepass(cameraData, ref renderPassInputs) && this.renderingModeActual != RenderingMode.Deferred); + CreateCameraDepthCopyTexture(renderGraph, cameraData.cameraTargetDescriptor, depthTextureIsDepthFormat); CreateCameraNormalsTexture(renderGraph, cameraData.cameraTargetDescriptor); @@ -745,17 +739,21 @@ private void RenderRawColorDepthHistory(RenderGraph renderGraph, UniversalCamera { // Fall back to R32_Float if depth copy is disabled. var tempColorDepthDesc = cameraData.cameraTargetDescriptor; - tempColorDepthDesc.colorFormat = RenderTextureFormat.RFloat; tempColorDepthDesc.graphicsFormat = GraphicsFormat.R32_SFloat; tempColorDepthDesc.depthBufferBits = 0; depthHistory.Update(ref tempColorDepthDesc, xrMultipassEnabled); } else - depthHistory.Update(ref cameraData.cameraTargetDescriptor, xrMultipassEnabled); + { + var tempColorDepthDesc = cameraData.cameraTargetDescriptor; + tempColorDepthDesc.graphicsFormat = GraphicsFormat.None; + depthHistory.Update(ref tempColorDepthDesc, xrMultipassEnabled); + } if (depthHistory.GetCurrentTexture(multipassId) != null) { var depthHistoryTarget = renderGraph.ImportTexture(depthHistory.GetCurrentTexture(multipassId)); + // See pass create in UniversalRenderer() for execution order. m_HistoryRawDepthCopyPass.Render(renderGraph, frameData, depthHistoryTarget, resourceData.cameraDepth, false); } @@ -845,8 +843,8 @@ internal override void OnFinishRenderGraphRendering(CommandBuffer cmd) /// public override bool supportsGPUOcclusion => m_RenderingMode != RenderingMode.Deferred; - private static bool m_CreateColorTexture; - private static bool m_CreateDepthTexture; + private static bool m_CreateColorAttachment; + private static bool m_CreateDepthAttachment; private void OnOffscreenDepthTextureRendering(RenderGraph renderGraph, ScriptableRenderContext context, UniversalResourceData resourceData, UniversalCameraData cameraData) { @@ -894,6 +892,14 @@ private void OnBeforeRendering(RenderGraph renderGraph) SetupRenderGraphCameraProperties(renderGraph, resourceData.isActiveTargetBackBuffer); RecordCustomRenderGraphPasses(renderGraph, RenderPassEvent.AfterRenderingShadows); + + bool requiredColorGradingLutPass = cameraData.postProcessEnabled && m_PostProcessPasses.isCreated; + if (requiredColorGradingLutPass) + { + TextureHandle internalColorLut; + m_PostProcessPasses.colorGradingLutPass.Render(renderGraph, frameData, out internalColorLut); + resourceData.internalColorLut = internalColorLut; + } } private void UpdateInstanceOccluders(RenderGraph renderGraph, UniversalCameraData cameraData, TextureHandle depthTexture) @@ -946,7 +952,8 @@ private void InstanceOcclusionTest(RenderGraph renderGraph, UniversalCameraData } // Records the depth copy pass along with the specified custom passes in a way that properly handles depth read dependencies - private void RecordCustomPassesWithDepthCopy(RenderGraph renderGraph, UniversalResourceData resourceData, RenderPassEvent earliestDepthReadEvent, RenderPassEvent currentEvent) + // This function will also trigger motion vector rendering if required by the current frame since its availability is intended to match depth's. + private void RecordCustomPassesWithDepthCopyAndMotion(RenderGraph renderGraph, UniversalResourceData resourceData, RenderPassEvent earliestDepthReadEvent, RenderPassEvent currentEvent, bool renderMotionVectors) { // Custom passes typically come before built-in passes but there's an exception for passes that require depth. // In cases where custom passes passes may depend on depth, we split the event range and execute the depth copy as late as possible while still ensuring valid depth reads. @@ -955,7 +962,7 @@ private void RecordCustomPassesWithDepthCopy(RenderGraph renderGraph, UniversalR RecordCustomRenderGraphPassesInEventRange(renderGraph, startEvent, splitEvent); - m_CopyDepthPass.Render(renderGraph, frameData, resourceData.cameraDepthTexture, resourceData.activeDepthTexture, true); + ExecuteScheduledDepthCopyWithMotion(renderGraph, resourceData, renderMotionVectors); RecordCustomRenderGraphPassesInEventRange(renderGraph, splitEvent, endEvent); } @@ -972,12 +979,16 @@ private bool AllowPartialDepthNormalsPrepass(bool isDeferred, RenderPassEvent re (requiresDepthNormalEvent <= RenderPassEvent.BeforeRenderingOpaques)); } - // Enumeration of possible positions within the frame where the depth copy pass can be scheduled + // Enumeration of possible positions within the frame where the depth copy can occur private enum DepthCopySchedule { + // In some cases, we can render depth directly to the depth texture during the depth prepass + DuringPrepass, + AfterPrepass, AfterGBuffer, AfterOpaques, + AfterSkybox, AfterTransparents, // None is always the last value so we can easily check if the depth has already been copied in the current frame via comparison @@ -1007,8 +1018,8 @@ private DepthCopySchedule CalculateDepthCopySchedule(RenderPassEvent earliestDep if ((earliestDepthReadEvent < RenderPassEvent.AfterRenderingOpaques) || (m_CopyDepthMode == CopyDepthMode.ForcePrepass)) { - // The forward path never needs to copy depth this early in the frame since its prepass writes directly into the depth texture. - Debug.Assert(renderingModeActual == RenderingMode.Deferred); + // The forward path never needs to copy depth this early in the frame unless we're using depth priming. + Debug.Assert((renderingModeActual == RenderingMode.Deferred) || useDepthPriming); if (hasFullPrepass) { @@ -1027,7 +1038,10 @@ private DepthCopySchedule CalculateDepthCopySchedule(RenderPassEvent earliestDep } else if ((earliestDepthReadEvent < RenderPassEvent.AfterRenderingTransparents) || (m_CopyDepthMode == CopyDepthMode.AfterOpaques)) { - schedule = DepthCopySchedule.AfterOpaques; + if (earliestDepthReadEvent < RenderPassEvent.AfterRenderingSkybox) + schedule = DepthCopySchedule.AfterOpaques; + else + schedule = DepthCopySchedule.AfterSkybox; } else if ((earliestDepthReadEvent < RenderPassEvent.BeforeRenderingPostProcessing) || (m_CopyDepthMode == CopyDepthMode.AfterTransparents)) { @@ -1050,27 +1064,29 @@ private struct TextureCopySchedules internal ColorCopySchedule color; } - private TextureCopySchedules CalculateTextureCopySchedules(UniversalCameraData cameraData, RenderPassInputSummary renderPassInputs, bool isDeferred, bool requiresDepthPrepass) + private TextureCopySchedules CalculateTextureCopySchedules(UniversalCameraData cameraData, RenderPassInputSummary renderPassInputs, bool isDeferred, bool requiresDepthPrepass, bool hasFullPrepass) { // If Camera's PostProcessing is enabled and if there any enabled PostProcessing requires depth texture as shader read resource (Motion Blur/DoF) bool cameraHasPostProcessingWithDepth = CameraHasPostProcessingWithDepth(cameraData); - bool hasFullPrepass = requiresDepthPrepass && !AllowPartialDepthNormalsPrepass(isDeferred, renderPassInputs.requiresDepthNormalAtEvent); - // Determine if we read the contents of the depth texture at some point in the frame bool depthTextureUsed = (cameraData.requiresDepthTexture || cameraHasPostProcessingWithDepth || renderPassInputs.requiresDepthTexture) || DebugHandlerRequireDepthPass(frameData.Get()); - // In forward, the depth prepass writes directly to the depth texture so no copy is needed. - // In deferred, the depth prepass writes to the depth attachment so a copy must happen later in the frame if depth reads are required. - bool depthTextureRequiresCopy = (isDeferred || !requiresDepthPrepass); + // Assume the depth texture is unused and no copy is needed until we determine otherwise + DepthCopySchedule depth = DepthCopySchedule.None; - // We must schedule an explicit copy depth pass when the depth texture is read during the frame, and not populated directly by an earlier pass. - bool requiresDepthCopyPass = depthTextureUsed && depthTextureRequiresCopy; + // If the depth texture is read during the frame, determine when the copy should occur + if (depthTextureUsed) + { + // In forward, the depth prepass typically writes directly to the depth texture so no copy is needed. However, when depth priming is enabled, + // the prepass targets the depth attachment instead, so we still have to perform a depth copy to populate the depth texture. + // In deferred, the depth prepass writes to the depth attachment so a copy must happen later in the frame. + bool depthTextureRequiresCopy = (isDeferred || (!requiresDepthPrepass || useDepthPriming)); - // Schedule a depth copy pass if required - DepthCopySchedule depth = requiresDepthCopyPass ? CalculateDepthCopySchedule(renderPassInputs.requiresDepthTextureEarliestEvent, hasFullPrepass) - : DepthCopySchedule.None; + depth = depthTextureRequiresCopy ? CalculateDepthCopySchedule(renderPassInputs.requiresDepthTextureEarliestEvent, hasFullPrepass) + : DepthCopySchedule.DuringPrepass; + } bool requiresColorCopyPass = cameraData.requiresOpaqueTexture || renderPassInputs.requiresColorTexture; requiresColorCopyPass &= !cameraData.isPreviewCamera; @@ -1087,6 +1103,24 @@ private TextureCopySchedules CalculateTextureCopySchedules(UniversalCameraData c return schedules; } + private void CopyDepthToDepthTexture(RenderGraph renderGraph, UniversalResourceData resourceData) + { + m_CopyDepthPass.Render(renderGraph, frameData, resourceData.cameraDepthTexture, resourceData.activeDepthTexture, true); + } + + private void RenderMotionVectors(RenderGraph renderGraph, UniversalResourceData resourceData) + { + m_MotionVectorPass.Render(renderGraph, frameData, resourceData.cameraDepthTexture, resourceData.motionVectorColor, resourceData.motionVectorDepth); + } + + private void ExecuteScheduledDepthCopyWithMotion(RenderGraph renderGraph, UniversalResourceData resourceData, bool renderMotionVectors) + { + CopyDepthToDepthTexture(renderGraph, resourceData); + + if (renderMotionVectors) + RenderMotionVectors(renderGraph, resourceData); + } + private void OnMainRendering(RenderGraph renderGraph, ScriptableRenderContext context) { UniversalResourceData resourceData = frameData.Get(); @@ -1110,17 +1144,22 @@ private void OnMainRendering(RenderGraph renderGraph, ScriptableRenderContext co renderPassInputs.requiresNormalsTexture = true; #if UNITY_EDITOR - if (ProbeReferenceVolume.instance.IsProbeSamplingDebugEnabled()) + if (ProbeReferenceVolume.instance.IsProbeSamplingDebugEnabled() && cameraData.isSceneViewCamera) renderPassInputs.requiresNormalsTexture = true; #endif bool isDeferred = this.renderingModeActual == RenderingMode.Deferred; bool requiresDepthPrepass = RequireDepthPrepass(cameraData, ref renderPassInputs); + bool isDepthOnlyPrepass = requiresDepthPrepass && !renderPassInputs.requiresNormalsTexture; + bool isDepthNormalPrepass = requiresDepthPrepass && renderPassInputs.requiresNormalsTexture; - TextureCopySchedules copySchedules = CalculateTextureCopySchedules(cameraData, renderPassInputs, isDeferred, requiresDepthPrepass); + // The depth prepass is considered "full" (renders the entire scene, not a partial subset), when we either: + // - Have a depth only prepass (URP always renders the full scene in depth only mode) + // - Have a depth normals prepass that does not allow the partial prepass optimization + bool hasFullPrepass = isDepthOnlyPrepass || (isDepthNormalPrepass && !AllowPartialDepthNormalsPrepass(isDeferred, renderPassInputs.requiresDepthNormalAtEvent)); - bool requiredColorGradingLutPass = cameraData.postProcessEnabled && m_PostProcessPasses.isCreated; + TextureCopySchedules copySchedules = CalculateTextureCopySchedules(cameraData, renderPassInputs, isDeferred, requiresDepthPrepass, hasFullPrepass); bool needsOccluderUpdate = cameraData.useGPUOcclusionCulling; @@ -1137,9 +1176,11 @@ private void OnMainRendering(RenderGraph renderGraph, ScriptableRenderContext co if (requiresDepthPrepass) { - // TODO RENDERGRAPH: is this always a valid assumption for deferred rendering? - TextureHandle depthTarget = (renderingModeActual == RenderingMode.Deferred) ? resourceData.activeDepthTexture : resourceData.cameraDepthTexture; - depthTarget = (useDepthPriming && (cameraData.renderType == CameraRenderType.Base || cameraData.clearDepth)) ? resourceData.activeDepthTexture : depthTarget; + // If we're in deferred mode, prepasses always render directly to the depth attachment rather than the camera depth texture. + // In non-deferred mode, we only render to the depth attachment directly when depth priming is enabled and we're starting with an empty depth buffer. + bool isDepthPrimingTarget = (useDepthPriming && (cameraData.renderType == CameraRenderType.Base || cameraData.clearDepth)); + bool renderToAttachment = (isDeferred || isDepthPrimingTarget); + TextureHandle depthTarget = renderToAttachment ? resourceData.activeDepthTexture : resourceData.cameraDepthTexture; var passCount = needsOccluderUpdate ? 2 : 1; for (int passIndex = 0; passIndex < passCount; ++passIndex) @@ -1154,15 +1195,22 @@ private void OnMainRendering(RenderGraph renderGraph, ScriptableRenderContext co batchLayerMask = occlusionTest.GetBatchLayerMask(); } + // The prepasses are executed multiple times when GRD occlusion is active. + // We only want to set global textures after all executions are complete. bool isLastPass = (passIndex == (passCount - 1)); - if (renderPassInputs.requiresNormalsTexture) - DepthNormalPrepassRender(renderGraph, renderPassInputs, depthTarget, batchLayerMask, isLastPass); + + // When we render to the depth attachment, a copy must happen later to populate the camera depth texture and the copy will handle setting globals. + // If we're rendering to the camera depth texture, we can set the globals immediately. + bool setGlobalDepth = isLastPass && !renderToAttachment; + + // There's no special copy logic for the camera normals texture, so we can set the global as long as we're not performing a partial prepass. + // In the case of a partial prepass, the global will be set later by the gbuffer pass once it completes the data in the texture. + bool setGlobalTextures = isLastPass && (!isDeferred || hasFullPrepass); + + if (isDepthNormalPrepass) + DepthNormalPrepassRender(renderGraph, renderPassInputs, depthTarget, batchLayerMask, setGlobalDepth, setGlobalTextures); else - { - m_DepthPrepass.Render(renderGraph, frameData, ref depthTarget, batchLayerMask); - if (isLastPass && !useDepthPriming && depthTarget.IsValid()) - RenderGraphUtils.SetGlobalTexture(renderGraph, Shader.PropertyToID("_CameraDepthTexture"), depthTarget, "Set Global Depth Texture"); - } + m_DepthPrepass.Render(renderGraph, frameData, ref depthTarget, batchLayerMask, setGlobalDepth); if (needsOccluderUpdate) { @@ -1176,27 +1224,16 @@ private void OnMainRendering(RenderGraph renderGraph, ScriptableRenderContext co needsOccluderUpdate = false; } - // depth priming still needs to copy depth because the prepass doesn't target anymore CameraDepthTexture - // TODO: this is unoptimal, investigate optimizations - if (useDepthPriming) - { - TextureHandle depth = resourceData.cameraDepth; - TextureHandle cameraDepthTexture = resourceData.cameraDepthTexture; - m_PrimedDepthCopyPass.Render(renderGraph, frameData, cameraDepthTexture, depth, true); - } - + // After the prepass completes, we should copy depth if necessary and also render motion vectors. (they're expected to be available whenever depth is) + // In the case where depth is rendered as part of the prepass and no copy is necessary, we still need to render motion vectors here to ensure they're available + // with depth before any user passes are executed. if (copySchedules.depth == DepthCopySchedule.AfterPrepass) - m_CopyDepthPass.Render(renderGraph, frameData, resourceData.cameraDepthTexture, resourceData.activeDepthTexture, true); + ExecuteScheduledDepthCopyWithMotion(renderGraph, resourceData, renderPassInputs.requiresMotionVectors); + else if ((copySchedules.depth == DepthCopySchedule.DuringPrepass) && renderPassInputs.requiresMotionVectors) + RenderMotionVectors(renderGraph, resourceData); RecordCustomRenderGraphPasses(renderGraph, RenderPassEvent.AfterRenderingPrePasses); - if (requiredColorGradingLutPass) - { - TextureHandle internalColorLut; - m_PostProcessPasses.colorGradingLutPass.Render(renderGraph, frameData, out internalColorLut); - resourceData.internalColorLut = internalColorLut; - } - #if ENABLE_VR && ENABLE_XR_MODULE if (cameraData.xr.hasValidOcclusionMesh) m_XROcclusionMeshPass.Render(renderGraph, frameData, resourceData.activeColorTexture, resourceData.activeDepthTexture); @@ -1205,25 +1242,28 @@ private void OnMainRendering(RenderGraph renderGraph, ScriptableRenderContext co if (isDeferred) { m_DeferredLights.Setup(m_AdditionalLightsShadowCasterPass); - if (m_DeferredLights != null) - { - // We need to be sure there are no custom passes in between GBuffer/Deferred passes, if there are - we disable fb fetch just to be safe` - m_DeferredLights.UseFramebufferFetch = renderGraph.nativeRenderPassesEnabled; - m_DeferredLights.HasNormalPrepass = renderPassInputs.requiresNormalsTexture; - m_DeferredLights.HasDepthPrepass = requiresDepthPrepass; - m_DeferredLights.ResolveMixedLightingMode(lightData); - m_DeferredLights.IsOverlay = cameraData.renderType == CameraRenderType.Overlay; - } + + // We need to be sure there are no custom passes in between GBuffer/Deferred passes, if there are - we disable fb fetch just to be safe` + m_DeferredLights.UseFramebufferFetch = renderGraph.nativeRenderPassesEnabled; + m_DeferredLights.HasNormalPrepass = isDepthNormalPrepass; + m_DeferredLights.HasDepthPrepass = requiresDepthPrepass; + m_DeferredLights.ResolveMixedLightingMode(lightData); + m_DeferredLights.IsOverlay = cameraData.renderType == CameraRenderType.Overlay; RecordCustomRenderGraphPasses(renderGraph, RenderPassEvent.BeforeRenderingGbuffer); - m_GBufferPass.Render(renderGraph, frameData, resourceData.activeColorTexture, resourceData.activeDepthTexture); + // When we have a partial depth normals prepass, we must wait until the gbuffer pass to set global textures. + // In this case, the incoming global texture data is incomplete and the gbuffer pass is required to complete it. + bool setGlobalTextures = isDepthNormalPrepass && !hasFullPrepass; + m_GBufferPass.Render(renderGraph, frameData, resourceData.activeColorTexture, resourceData.activeDepthTexture, setGlobalTextures); // In addition to regularly scheduled depth copies here, we also need to copy depth when native render passes aren't available. // This is required because deferred lighting must read depth as a texture, but it must also bind depth as a depth write attachment at the same time. // When native render passes are available, we write depth into an internal gbuffer slice and read via framebuffer fetch so a depth copy is no longer required. - if (!renderGraph.nativeRenderPassesEnabled || (copySchedules.depth == DepthCopySchedule.AfterGBuffer)) - m_CopyDepthPass.Render(renderGraph, frameData, resourceData.cameraDepthTexture, resourceData.activeDepthTexture, true); + if (copySchedules.depth == DepthCopySchedule.AfterGBuffer) + ExecuteScheduledDepthCopyWithMotion(renderGraph, resourceData, renderPassInputs.requiresMotionVectors); + else if (!renderGraph.nativeRenderPassesEnabled) + CopyDepthToDepthTexture(renderGraph, resourceData); RecordCustomRenderGraphPasses(renderGraph, RenderPassEvent.AfterRenderingGbuffer, RenderPassEvent.BeforeRenderingDeferredLights); @@ -1291,14 +1331,10 @@ private void OnMainRendering(RenderGraph renderGraph, ScriptableRenderContext co } if (copySchedules.depth == DepthCopySchedule.AfterOpaques) - RecordCustomPassesWithDepthCopy(renderGraph, resourceData, renderPassInputs.requiresDepthTextureEarliestEvent, RenderPassEvent.AfterRenderingOpaques); + RecordCustomPassesWithDepthCopyAndMotion(renderGraph, resourceData, renderPassInputs.requiresDepthTextureEarliestEvent, RenderPassEvent.AfterRenderingOpaques, renderPassInputs.requiresMotionVectors); else RecordCustomRenderGraphPasses(renderGraph, RenderPassEvent.AfterRenderingOpaques); - // Depends on the camera (copy) depth texture. Depth is reprojected to calculate motion vectors. - if (renderPassInputs.requiresMotionVectors && m_CopyDepthMode != CopyDepthMode.AfterTransparents) - m_MotionVectorPass.Render(renderGraph, frameData, resourceData.cameraDepthTexture, resourceData.motionVectorColor, resourceData.motionVectorDepth); - RecordCustomRenderGraphPasses(renderGraph, RenderPassEvent.BeforeRenderingSkybox); if (cameraData.camera.clearFlags == CameraClearFlags.Skybox && cameraData.renderType != CameraRenderType.Overlay) @@ -1306,13 +1342,12 @@ private void OnMainRendering(RenderGraph renderGraph, ScriptableRenderContext co cameraData.camera.TryGetComponent(out Skybox cameraSkybox); Material skyboxMaterial = cameraSkybox != null ? cameraSkybox.material : RenderSettings.skybox; if (skyboxMaterial != null) - { - // The depth texture is only available in the skybox pass when it's populated earlier in the frame. - bool isDepthTextureAvailable = copySchedules.depth < DepthCopySchedule.AfterTransparents; - m_DrawSkyboxPass.Render(renderGraph, frameData, context, resourceData.activeColorTexture, resourceData.activeDepthTexture, skyboxMaterial, isDepthTextureAvailable); - } + m_DrawSkyboxPass.Render(renderGraph, frameData, context, resourceData.activeColorTexture, resourceData.activeDepthTexture, skyboxMaterial); } + if (copySchedules.depth == DepthCopySchedule.AfterSkybox) + ExecuteScheduledDepthCopyWithMotion(renderGraph, resourceData, renderPassInputs.requiresMotionVectors); + RecordCustomRenderGraphPasses(renderGraph, RenderPassEvent.AfterRenderingSkybox); if (copySchedules.color == ColorCopySchedule.AfterSkybox) @@ -1350,17 +1385,10 @@ private void OnMainRendering(RenderGraph renderGraph, ScriptableRenderContext co } if (copySchedules.depth == DepthCopySchedule.AfterTransparents) - RecordCustomPassesWithDepthCopy(renderGraph, resourceData, renderPassInputs.requiresDepthTextureEarliestEvent, RenderPassEvent.AfterRenderingTransparents); + RecordCustomPassesWithDepthCopyAndMotion(renderGraph, resourceData, renderPassInputs.requiresDepthTextureEarliestEvent, RenderPassEvent.AfterRenderingTransparents, renderPassInputs.requiresMotionVectors); else RecordCustomRenderGraphPasses(renderGraph, RenderPassEvent.AfterRenderingTransparents); - // TODO: Postprocess pass should be able configure its render pass inputs per camera per frame (settings) BEFORE building any of the graph - // TODO: Alternatively we could always build the graph (a potential graph) and cull away unused passes if "record + cull" is fast enough. - // TODO: Currently we just override "requiresMotionVectors" for TAA in GetRenderPassInputs() - // Depends on camera (copy) depth texture - if (renderPassInputs.requiresMotionVectors && m_CopyDepthMode == CopyDepthMode.AfterTransparents) - m_MotionVectorPass.Render(renderGraph, frameData, resourceData.cameraDepthTexture, resourceData.motionVectorColor, resourceData.motionVectorDepth); - if (context.HasInvokeOnRenderObjectCallbacks()) m_OnRenderObjectCallbackPass.Render(renderGraph, resourceData.activeColorTexture, resourceData.activeDepthTexture); @@ -1380,7 +1408,7 @@ private void OnMainRendering(RenderGraph renderGraph, ScriptableRenderContext co if (shouldRenderUI && outputToHDR) { TextureHandle overlayUI; - m_DrawOffscreenUIPass.RenderOffscreen(renderGraph, frameData, k_DepthStencilFormat, out overlayUI); + m_DrawOffscreenUIPass.RenderOffscreen(renderGraph, frameData, cameraDepthAttachmentFormat, out overlayUI); resourceData.overlayUITexture = overlayUI; } } @@ -1438,7 +1466,7 @@ private void OnAfterRendering(RenderGraph renderGraph) resourceData.debugScreenColor = CreateRenderGraphTexture(renderGraph, colorDesc, "_DebugScreenColor", false); RenderTextureDescriptor depthDesc = cameraData.cameraTargetDescriptor; - DebugHandler.ConfigureDepthDescriptorForDebugScreen(ref depthDesc, k_DepthStencilFormat, cameraData.pixelWidth, cameraData.pixelHeight); + DebugHandler.ConfigureDepthDescriptorForDebugScreen(ref depthDesc, cameraDepthAttachmentFormat, cameraData.pixelWidth, cameraData.pixelHeight); resourceData.debugScreenDepth = CreateRenderGraphTexture(renderGraph, depthDesc, "_DebugScreenDepth", false); } @@ -1605,7 +1633,8 @@ private void OnAfterRendering(RenderGraph renderGraph) { TextureHandle cameraDepthTexture = resourceData.cameraDepthTexture; m_FinalDepthCopyPass.MssaSamples = 0; - m_FinalDepthCopyPass.Render(renderGraph, frameData, resourceData.activeDepthTexture, cameraDepthTexture, false); + m_FinalDepthCopyPass.CopyToBackbuffer = cameraData.isGameCamera; + m_FinalDepthCopyPass.Render(renderGraph, frameData, resourceData.activeDepthTexture, cameraDepthTexture, false, "Final Depth Copy"); } #endif if (cameraData.isSceneViewCamera) @@ -1636,17 +1665,7 @@ bool RequireDepthPrepass(UniversalCameraData cameraData, ref RenderPassInputSumm requiresDepthPrepass |= isGizmosEnabled; requiresDepthPrepass |= cameraData.isPreviewCamera; requiresDepthPrepass |= renderPassInputs.requiresDepthPrepass; - requiresDepthPrepass |= renderPassInputs.requiresNormalsTexture; - - // Current aim of depth prepass is to generate a copy of depth buffer, it is NOT to prime depth buffer and reduce overdraw on non-mobile platforms. - // When deferred renderer is enabled, depth buffer is already accessible so depth prepass is not needed. - // The only exception is for generating depth-normal textures: SSAO pass needs it and it must run before forward-only geometry. - // DepthNormal prepass will render: - // - forward-only geometry when deferred renderer is enabled - // - all geometry when forward renderer is enabled - if (requiresDepthPrepass && this.renderingModeActual == RenderingMode.Deferred && !renderPassInputs.requiresNormalsTexture) - requiresDepthPrepass = false; - + requiresDepthPrepass |= renderPassInputs.requiresNormalsTexture; // This must be checked explicitly because some features inject normal requirements later in the frame requiresDepthPrepass |= depthPrimingEnabled; return requiresDepthPrepass; } @@ -1662,7 +1681,7 @@ bool RequireDepthTexture(UniversalCameraData cameraData, bool requiresDepthPrepa // Deferred renderer always need to access depth buffer. createDepthTexture |= (renderingModeActual == RenderingMode.Deferred && !useRenderPassEnabled); // Some render cases (e.g. Material previews) have shown we need to create a depth texture when we're forcing a prepass. - createDepthTexture |= depthPrimingEnabled; + createDepthTexture |= depthPrimingEnabled || cameraData.isPreviewCamera; // TODO: seems like with mrt depth is not taken from first target. Investigate if this is needed createDepthTexture |= m_RenderingLayerProvidesRenderObjectPass; @@ -1689,14 +1708,12 @@ void CreateCameraDepthCopyTexture(RenderGraph renderGraph, RenderTextureDescript if (isDepthTexture) { depthDescriptor.graphicsFormat = GraphicsFormat.None; - depthDescriptor.depthStencilFormat = k_DepthStencilFormat; - depthDescriptor.depthBufferBits = k_DepthBufferBits; + depthDescriptor.depthStencilFormat = cameraDepthTextureFormat; } else { depthDescriptor.graphicsFormat = GraphicsFormat.R32_SFloat; depthDescriptor.depthStencilFormat = GraphicsFormat.None; - depthDescriptor.depthBufferBits = 0; } resourceData.cameraDepthTexture = CreateRenderGraphTexture(renderGraph, depthDescriptor, "_CameraDepthTexture", true); @@ -1770,7 +1787,7 @@ void CreateAfterPostProcessTexture(RenderGraph renderGraph, RenderTextureDescrip resourceData.afterPostProcessColor = CreateRenderGraphTexture(renderGraph, desc, "_AfterPostProcessTexture", true); } - void DepthNormalPrepassRender(RenderGraph renderGraph, RenderPassInputSummary renderPassInputs, TextureHandle depthTarget, uint batchLayerMask, bool postSetGlobalTextures) + void DepthNormalPrepassRender(RenderGraph renderGraph, RenderPassInputSummary renderPassInputs, TextureHandle depthTarget, uint batchLayerMask, bool setGlobalDepth, bool setGlobalTextures) { UniversalResourceData resourceData = frameData.Get(); @@ -1797,7 +1814,7 @@ void DepthNormalPrepassRender(RenderGraph renderGraph, RenderPassInputSummary re TextureHandle normalsTexture = resourceData.cameraNormalsTexture; TextureHandle renderingLayersTexture = resourceData.renderingLayersTexture; - m_DepthNormalPrepass.Render(renderGraph, frameData, normalsTexture, depthTarget, renderingLayersTexture, batchLayerMask, postSetGlobalTextures); + m_DepthNormalPrepass.Render(renderGraph, frameData, normalsTexture, depthTarget, renderingLayersTexture, batchLayerMask, setGlobalDepth, setGlobalTextures); if (m_RequiresRenderingLayer) SetRenderingLayersGlobalTextures(renderGraph); diff --git a/Packages/com.unity.render-pipelines.universal/Runtime/VFXGraph/Shaders/VFXDefines.hlsl b/Packages/com.unity.render-pipelines.universal/Runtime/VFXGraph/Shaders/VFXDefines.hlsl index fcd8386eb1f..5282940ea3e 100644 --- a/Packages/com.unity.render-pipelines.universal/Runtime/VFXGraph/Shaders/VFXDefines.hlsl +++ b/Packages/com.unity.render-pipelines.universal/Runtime/VFXGraph/Shaders/VFXDefines.hlsl @@ -39,3 +39,7 @@ #else #define CULL_VERTEX(o) { o.VFX_VARYING_POSCS.x = VFX_NAN; return o; } #endif + +#if HAS_STRIPS +#define HAS_STRIPS_DATA 1 +#endif diff --git a/Packages/com.unity.render-pipelines.universal/ShaderLibrary/Debug/DebugViewEnums.cs b/Packages/com.unity.render-pipelines.universal/ShaderLibrary/Debug/DebugViewEnums.cs index b5b54755980..2a3e0f85c76 100644 --- a/Packages/com.unity.render-pipelines.universal/ShaderLibrary/Debug/DebugViewEnums.cs +++ b/Packages/com.unity.render-pipelines.universal/ShaderLibrary/Debug/DebugViewEnums.cs @@ -34,6 +34,8 @@ public enum DebugMaterialMode Metallic, /// Display material sprite mask. SpriteMask, + /// Display rendering layer masks. + RenderingLayerMasks, } /// diff --git a/Packages/com.unity.render-pipelines.universal/ShaderLibrary/Debug/DebugViewEnums.cs.hlsl b/Packages/com.unity.render-pipelines.universal/ShaderLibrary/Debug/DebugViewEnums.cs.hlsl index 34bb6464235..9f37d25c6fc 100644 --- a/Packages/com.unity.render-pipelines.universal/ShaderLibrary/Debug/DebugViewEnums.cs.hlsl +++ b/Packages/com.unity.render-pipelines.universal/ShaderLibrary/Debug/DebugViewEnums.cs.hlsl @@ -53,6 +53,7 @@ #define DEBUGMATERIALMODE_LIGHTING_COMPLEXITY (9) #define DEBUGMATERIALMODE_METALLIC (10) #define DEBUGMATERIALMODE_SPRITE_MASK (11) +#define DEBUGMATERIALMODE_RENDERING_LAYER_MASKS (12) // // UnityEngine.Rendering.Universal.DebugMaterialValidationMode: static fields diff --git a/Packages/com.unity.render-pipelines.universal/ShaderLibrary/Debug/Debugging2D.hlsl b/Packages/com.unity.render-pipelines.universal/ShaderLibrary/Debug/Debugging2D.hlsl index 39c84eed570..e3d6621f069 100644 --- a/Packages/com.unity.render-pipelines.universal/ShaderLibrary/Debug/Debugging2D.hlsl +++ b/Packages/com.unity.render-pipelines.universal/ShaderLibrary/Debug/Debugging2D.hlsl @@ -8,8 +8,9 @@ #if defined(DEBUG_DISPLAY) -#define SETUP_DEBUG_TEXTURE_DATA_2D(inputData, positionWS, positionCS, texture) SetupDebugDataTexture(inputData, positionWS, positionCS, texture##_TexelSize, texture##_MipInfo, texture##_StreamInfo, GetMipCount(TEXTURE2D_ARGS(texture, sampler##texture))) -#define SETUP_DEBUG_DATA_2D(inputData, positionWS, positionCS) SetupDebugData(inputData, positionWS, positionCS) +#define SETUP_DEBUG_TEXTURE_DATA_2D_NO_TS(inputData, positionWS, positionCS, texture) SetupDebugDataTexture(inputData, positionWS, positionCS, float4(0.1, 0.1, 1.0, 1.0), texture##_MipInfo, texture##_StreamInfo, GetMipCount(TEXTURE2D_ARGS(texture, sampler##texture))) +#define SETUP_DEBUG_TEXTURE_DATA_2D(inputData, positionWS, positionCS, texture) SetupDebugDataTexture(inputData, positionWS, positionCS, texture##_TexelSize, texture##_MipInfo, texture##_StreamInfo, GetMipCount(TEXTURE2D_ARGS(texture, sampler##texture))) +#define SETUP_DEBUG_DATA_2D(inputData, positionWS, positionCS) SetupDebugData(inputData, positionWS, positionCS) void SetupDebugData(inout InputData2D inputData, float3 positionWS, float4 positionCS) { @@ -150,6 +151,7 @@ bool CanDebugOverrideOutputColor(inout SurfaceData2D surfaceData, inout InputDat #else +#define SETUP_DEBUG_TEXTURE_DATA_2D_NO_TS(inputData, positionWS, positionCS, texture) #define SETUP_DEBUG_TEXTURE_DATA_2D(inputData, positionWS, positionCS, texture) #define SETUP_DEBUG_DATA_2D(inputData, positionWS, positionCS) diff --git a/Packages/com.unity.render-pipelines.universal/ShaderLibrary/Debug/Debugging3D.hlsl b/Packages/com.unity.render-pipelines.universal/ShaderLibrary/Debug/Debugging3D.hlsl index 978b854924a..ba4d53899a9 100644 --- a/Packages/com.unity.render-pipelines.universal/ShaderLibrary/Debug/Debugging3D.hlsl +++ b/Packages/com.unity.render-pipelines.universal/ShaderLibrary/Debug/Debugging3D.hlsl @@ -131,6 +131,27 @@ bool CalculateValidationColorForDebug(in InputData inputData, in SurfaceData sur } } +float3 GetRenderingLayerMasksDebugColor(float4 positionCS, float3 normalWS) +{ + uint stripeSize = 8; + int renderingLayers = GetMeshRenderingLayer() & _DebugRenderingLayerMask; + uint layerId = 0, layerCount = countbits(renderingLayers); + float4 debugColor = float4(1, 1, 1, 1); + for (uint i = 0; (i < 32) && (layerId < layerCount); i++) + { + if (renderingLayers & (1U << i)) + { + uint t = (positionCS.y / stripeSize) % layerCount; + if (t == layerId) + debugColor.rgb = _DebugRenderingLayerMaskColors[i].rgb; + layerId++; + } + } + float shading = saturate(dot(normalWS, TransformViewToWorldDir(float3(0.0f, 0.0f, 1.0f), true))); + shading = Remap(0.0f, 1.0f, 0.6, 1.0f, shading); + return shading * debugColor.xyz; +} + bool CalculateColorForDebugMaterial(in InputData inputData, in SurfaceData surfaceData, inout half4 debugColor) { // Debug materials... @@ -175,6 +196,10 @@ bool CalculateColorForDebugMaterial(in InputData inputData, in SurfaceData surfa debugColor = half4(surfaceData.metallic.rrr, 1); return true; + case DEBUGMATERIALMODE_RENDERING_LAYER_MASKS: + debugColor.xyz = GetRenderingLayerMasksDebugColor(inputData.positionCS, inputData.normalWS).xyz; + return true; + default: return TryGetDebugColorInvalidMode(debugColor); } diff --git a/Packages/com.unity.render-pipelines.universal/ShaderLibrary/Debug/DebuggingCommon.hlsl b/Packages/com.unity.render-pipelines.universal/ShaderLibrary/Debug/DebuggingCommon.hlsl index d4452fb653b..76f06d43d5a 100644 --- a/Packages/com.unity.render-pipelines.universal/ShaderLibrary/Debug/DebuggingCommon.hlsl +++ b/Packages/com.unity.render-pipelines.universal/ShaderLibrary/Debug/DebuggingCommon.hlsl @@ -36,6 +36,12 @@ half _DebugValidateAlbedoSaturationTolerance = 0.214; half _DebugValidateAlbedoHueTolerance = 0.104; half3 _DebugValidateAlbedoCompareColor = half3(0.5, 0.5, 0.5); +uint _DebugRenderingLayerMask = 0; + +CBUFFER_START(_DebugDisplayConstant) +float4 _DebugRenderingLayerMaskColors [32]; +CBUFFER_END + half _DebugValidateMetallicMinValue = 0; half _DebugValidateMetallicMaxValue = 0.9; diff --git a/Packages/com.unity.render-pipelines.universal/ShaderLibrary/Shadows.hlsl b/Packages/com.unity.render-pipelines.universal/ShaderLibrary/Shadows.hlsl index 17b986aa64b..338886a6d66 100644 --- a/Packages/com.unity.render-pipelines.universal/ShaderLibrary/Shadows.hlsl +++ b/Packages/com.unity.render-pipelines.universal/ShaderLibrary/Shadows.hlsl @@ -106,7 +106,16 @@ CBUFFER_END #endif #endif -float4 _ShadowBias; // x: depth bias, y: normal bias +// x: depth bias, +// y: normal bias, +// z: light type (Spot = 0, Directional = 1, Point = 2, Area/Rectangle = 3, Disc = 4, Pyramid = 5, Box = 6, Tube = 7) +// w: unused +float4 _ShadowBias; + +half IsPointLight() +{ + return _ShadowBias.z > 1.0 && _ShadowBias.z <= 2.0 ? 1 : 0; +} #define BEYOND_SHADOW_FAR(shadowCoord) shadowCoord.z <= 0.0 || shadowCoord.z >= 1.0 diff --git a/Packages/com.unity.render-pipelines.universal/Shaders/2D/Light2D-Point.shader b/Packages/com.unity.render-pipelines.universal/Shaders/2D/Light2D-Point.shader deleted file mode 100644 index 1d886dde282..00000000000 --- a/Packages/com.unity.render-pipelines.universal/Shaders/2D/Light2D-Point.shader +++ /dev/null @@ -1,147 +0,0 @@ -Shader "Hidden/Light2D-Point" -{ - Properties - { - [HideInInspector] _SrcBlend("__src", Float) = 1.0 - [HideInInspector] _DstBlend("__dst", Float) = 0.0 - [Enum(UnityEngine.Rendering.CompareFunction)] _HandleZTest ("_HandleZTest", Int) = 4 - } - - SubShader - { - Tags { "Queue" = "Transparent" "RenderType" = "Transparent" "RenderPipeline" = "UniversalPipeline" } - - Pass - { - Blend [_SrcBlend][_DstBlend] - ZWrite Off - ZTest [_HandleZTest] - Cull Off - - HLSLPROGRAM - #pragma vertex vert - #pragma fragment frag - #pragma multi_compile_local USE_POINT_LIGHT_COOKIES __ - #pragma multi_compile_local LIGHT_QUALITY_FAST __ - #pragma multi_compile_local USE_NORMAL_MAP __ - #pragma multi_compile_local USE_ADDITIVE_BLENDING __ - #pragma multi_compile_local USE_VOLUMETRIC __ - #pragma multi_compile USE_SHAPE_LIGHT_TYPE_0 __ - #pragma multi_compile USE_SHAPE_LIGHT_TYPE_1 __ - #pragma multi_compile USE_SHAPE_LIGHT_TYPE_2 __ - #pragma multi_compile USE_SHAPE_LIGHT_TYPE_3 __ - - #include "Packages/com.unity.render-pipelines.universal/ShaderLibrary/Core.hlsl" - #include "Packages/com.unity.render-pipelines.universal/Shaders/2D/Include/LightingUtility.hlsl" - - struct Attributes - { - float3 positionOS : POSITION; - float4 color : COLOR; - float2 texcoord : TEXCOORD0; - }; - - struct Varyings - { - float4 positionCS : SV_POSITION; - half2 uv : TEXCOORD0; - half2 lookupUV : TEXCOORD1; // This is used for light relative direction - - NORMALS_LIGHTING_COORDS(TEXCOORD2, TEXCOORD3) - SHADOW_COORDS(TEXCOORD4) - LIGHT_OFFSET(TEXCOORD5) - }; - - UNITY_LIGHT2D_DATA - -#if USE_POINT_LIGHT_COOKIES - TEXTURE2D(_PointLightCookieTex); - SAMPLER(sampler_PointLightCookieTex); -#endif - - TEXTURE2D(_FalloffLookup); - SAMPLER(sampler_FalloffLookup); - - TEXTURE2D(_LightLookup); - SAMPLER(sampler_LightLookup); - half4 _LightLookup_TexelSize; - - NORMALS_LIGHTING_VARIABLES - SHADOW_VARIABLES - - half _IsFullSpotlight; - half _InverseHDREmulationScale; - - Varyings vert(Attributes a) - { - Varyings output = (Varyings)0; - output.positionCS = TransformObjectToHClip(a.positionOS); - output.uv = a.texcoord; - output.lightOffset = a.color; - -#if USE_STRUCTURED_BUFFER_FOR_LIGHT2D_DATA - PerLight2D light = GetPerLight2D(output.lightOffset); -#endif - - float4 worldSpacePos; - worldSpacePos.xyz = TransformObjectToWorld(a.positionOS); - worldSpacePos.w = 1; - - float4 lightSpacePos = mul(_L2D_INVMATRIX, worldSpacePos); - float halfTexelOffset = 0.5 * _LightLookup_TexelSize.x; - output.lookupUV = 0.5 * (lightSpacePos.xy + 1) + halfTexelOffset; - - TRANSFER_NORMALS_LIGHTING(output, worldSpacePos, _L2D_POSITION.xyz, _L2D_POSITION.w) - TRANSFER_SHADOWS(output) - - return output; - } - - FragmentOutput frag(Varyings i) - { - -#if USE_STRUCTURED_BUFFER_FOR_LIGHT2D_DATA - PerLight2D light = GetPerLight2D(i.lightOffset); -#endif - - half4 lookupValue = SAMPLE_TEXTURE2D(_LightLookup, sampler_LightLookup, i.lookupUV); // r = distance, g = angle, b = x direction, a = y direction - - // Inner Radius - half attenuation = saturate(_L2D_INNER_RADIUS_MULT * lookupValue.r); // This is the code to take care of our inner radius - - // Spotlight - half isFullSpotlight = _L2D_INNER_ANGLE == 1.0f; - half spotAttenuation = saturate((_L2D_OUTER_ANGLE - lookupValue.g + _IsFullSpotlight) * (1.0f / (_L2D_OUTER_ANGLE - _L2D_INNER_ANGLE))); - attenuation = attenuation * spotAttenuation; - - half2 mappedUV; - mappedUV.x = attenuation; - mappedUV.y = _L2D_FALLOFF_INTENSITY; - attenuation = SAMPLE_TEXTURE2D(_FalloffLookup, sampler_FalloffLookup, mappedUV).r; - -#if USE_POINT_LIGHT_COOKIES - half4 cookieColor = SAMPLE_TEXTURE2D(_PointLightCookieTex, sampler_PointLightCookieTex, i.lookupUV); - half4 lightColor = cookieColor * _L2D_COLOR; -#else - half4 lightColor = _L2D_COLOR; -#endif - -#if USE_ADDITIVE_BLENDING || USE_VOLUMETRIC - lightColor *= attenuation; -#else - lightColor.a = attenuation; -#endif - - APPLY_NORMALS_LIGHTING(i, lightColor, _L2D_POSITION.xyz, _L2D_POSITION.w); - APPLY_SHADOWS(i, lightColor, _L2D_SHADOW_INTENSITY); - -#if USE_VOLUMETRIC - lightColor *= _L2D_VOLUME_OPACITY; -#endif - - return ToFragmentOutput(lightColor * _InverseHDREmulationScale); - } - ENDHLSL - } - } -} diff --git a/Packages/com.unity.render-pipelines.universal/Shaders/2D/Light2D-Point.shader.meta b/Packages/com.unity.render-pipelines.universal/Shaders/2D/Light2D-Point.shader.meta deleted file mode 100644 index d7861553c45..00000000000 --- a/Packages/com.unity.render-pipelines.universal/Shaders/2D/Light2D-Point.shader.meta +++ /dev/null @@ -1,9 +0,0 @@ -fileFormatVersion: 2 -guid: e35a31e1679aeff489e202f5cc4853d5 -ShaderImporter: - externalObjects: {} - defaultTextures: [] - nonModifiableTextures: [] - userData: - assetBundleName: - assetBundleVariant: diff --git a/Packages/com.unity.render-pipelines.universal/Shaders/2D/Light2D-Shape.shader b/Packages/com.unity.render-pipelines.universal/Shaders/2D/Light2D-Shape.shader deleted file mode 100644 index 15713b9866a..00000000000 --- a/Packages/com.unity.render-pipelines.universal/Shaders/2D/Light2D-Shape.shader +++ /dev/null @@ -1,136 +0,0 @@ -Shader "Hidden/Light2D-Shape" -{ - Properties - { - [HideInInspector] _SrcBlend("__src", Float) = 1.0 - [HideInInspector] _DstBlend("__dst", Float) = 0.0 - [Enum(UnityEngine.Rendering.CompareFunction)] _HandleZTest ("_HandleZTest", Int) = 4 - } - - SubShader - { - Tags { "Queue" = "Transparent" "RenderType" = "Transparent" "RenderPipeline" = "UniversalPipeline" } - - Pass - { - Blend [_SrcBlend][_DstBlend] - ZWrite Off - ZTest [_HandleZTest] - Cull Off - - HLSLPROGRAM - #pragma vertex vert - #pragma fragment frag - #pragma multi_compile_local USE_NORMAL_MAP __ - #pragma multi_compile_local LIGHT_QUALITY_FAST __ - #pragma multi_compile_local USE_ADDITIVE_BLENDING __ - #pragma multi_compile_local USE_VOLUMETRIC __ - #pragma multi_compile USE_SHAPE_LIGHT_TYPE_0 __ - #pragma multi_compile USE_SHAPE_LIGHT_TYPE_1 __ - #pragma multi_compile USE_SHAPE_LIGHT_TYPE_2 __ - #pragma multi_compile USE_SHAPE_LIGHT_TYPE_3 __ - - - #include "Packages/com.unity.render-pipelines.universal/ShaderLibrary/Core.hlsl" - #include "Packages/com.unity.render-pipelines.universal/Shaders/2D/Include/LightingUtility.hlsl" - - struct Attributes - { - float3 positionOS : POSITION; - float4 color : COLOR; - float2 uv : TEXCOORD0; - }; - - struct Varyings - { - float4 positionCS : SV_POSITION; - half4 color : COLOR; - half2 uv : TEXCOORD0; - - SHADOW_COORDS(TEXCOORD1) - NORMALS_LIGHTING_COORDS(TEXCOORD2, TEXCOORD3) - LIGHT_OFFSET(TEXCOORD4) - }; - - UNITY_LIGHT2D_DATA - - half _InverseHDREmulationScale; - - TEXTURE2D(_CookieTex); // This can either be a sprite texture uv or a falloff texture - SAMPLER(sampler_CookieTex); - - TEXTURE2D(_FalloffLookup); - SAMPLER(sampler_FalloffLookup); - - NORMALS_LIGHTING_VARIABLES - SHADOW_VARIABLES - - Varyings vert(Attributes a) - { - Varyings o = (Varyings)0; - o.lightOffset = a.color; - -#if USE_STRUCTURED_BUFFER_FOR_LIGHT2D_DATA - PerLight2D light = GetPerLight2D(o.lightOffset); -#endif - - float3 positionOS = a.positionOS; - - positionOS.x = positionOS.x + _L2D_FALLOFF_DISTANCE * a.color.r; - positionOS.y = positionOS.y + _L2D_FALLOFF_DISTANCE * a.color.g; - - o.positionCS = TransformObjectToHClip(positionOS); - o.color = _L2D_COLOR * _InverseHDREmulationScale; - o.color.a = a.color.a; -#if USE_VOLUMETRIC - o.color.a = _L2D_COLOR.a * _L2D_VOLUME_OPACITY; -#endif - - // If Sprite use UV. - o.uv = (_L2D_LIGHT_TYPE == 2) ? a.uv : float2(a.color.a, _L2D_FALLOFF_INTENSITY); - - float4 worldSpacePos; - worldSpacePos.xyz = TransformObjectToWorld(positionOS); - worldSpacePos.w = 1; - TRANSFER_NORMALS_LIGHTING(o, worldSpacePos, _L2D_POSITION.xyz, _L2D_POSITION.w) - TRANSFER_SHADOWS(o) - - return o; - } - - FragmentOutput frag(Varyings i) : SV_Target - { -#if USE_STRUCTURED_BUFFER_FOR_LIGHT2D_DATA - PerLight2D light = GetPerLight2D(i.lightOffset); -#endif - - half4 lightColor = i.color; - - if (_L2D_LIGHT_TYPE == 2) - { - half4 cookie = SAMPLE_TEXTURE2D(_CookieTex, sampler_CookieTex, i.uv); -#if USE_ADDITIVE_BLENDING - lightColor *= cookie * cookie.a; -#else - lightColor *= cookie; -#endif - } - else - { -#if USE_ADDITIVE_BLENDING - lightColor *= SAMPLE_TEXTURE2D(_FalloffLookup, sampler_FalloffLookup, i.uv).r; -#elif USE_VOLUMETRIC - lightColor.a = i.color.a * SAMPLE_TEXTURE2D(_FalloffLookup, sampler_FalloffLookup, i.uv).r; -#else - lightColor.a = SAMPLE_TEXTURE2D(_FalloffLookup, sampler_FalloffLookup, i.uv).r; -#endif - } - - APPLY_NORMALS_LIGHTING(i, lightColor, _L2D_POSITION.xyz, _L2D_POSITION.w); - APPLY_SHADOWS(i, lightColor, _L2D_SHADOW_INTENSITY); - return ToFragmentOutput(lightColor); - } - ENDHLSL - } - } -} diff --git a/Packages/com.unity.render-pipelines.universal/Shaders/2D/Light2D-Shape.shader.meta b/Packages/com.unity.render-pipelines.universal/Shaders/2D/Light2D-Shape.shader.meta deleted file mode 100644 index ab340501148..00000000000 --- a/Packages/com.unity.render-pipelines.universal/Shaders/2D/Light2D-Shape.shader.meta +++ /dev/null @@ -1,9 +0,0 @@ -fileFormatVersion: 2 -guid: d79e1c784eaf80c4585c0be7391f757a -ShaderImporter: - externalObjects: {} - defaultTextures: [] - nonModifiableTextures: [] - userData: - assetBundleName: - assetBundleVariant: diff --git a/Packages/com.unity.render-pipelines.universal/Shaders/2D/Light2D.shader b/Packages/com.unity.render-pipelines.universal/Shaders/2D/Light2D.shader index bca2ce7b58c..1dacff8dafe 100644 --- a/Packages/com.unity.render-pipelines.universal/Shaders/2D/Light2D.shader +++ b/Packages/com.unity.render-pipelines.universal/Shaders/2D/Light2D.shader @@ -4,7 +4,6 @@ Shader "Hidden/Light2D" { [HideInInspector] _SrcBlend("__src", Float) = 1.0 [HideInInspector] _DstBlend("__dst", Float) = 0.0 - [Enum(UnityEngine.Rendering.CompareFunction)] _HandleZTest("_HandleZTest", Int) = 0 } SubShader @@ -15,7 +14,7 @@ Shader "Hidden/Light2D" { Blend [_SrcBlend][_DstBlend] ZWrite Off - ZTest [_HandleZTest] + ZTest Off Cull Off HLSLPROGRAM diff --git a/Packages/com.unity.render-pipelines.universal/Shaders/2D/Sprite-Lit-Default.shader b/Packages/com.unity.render-pipelines.universal/Shaders/2D/Sprite-Lit-Default.shader index 4374894c2d6..970bac7efb8 100644 --- a/Packages/com.unity.render-pipelines.universal/Shaders/2D/Sprite-Lit-Default.shader +++ b/Packages/com.unity.render-pipelines.universal/Shaders/2D/Sprite-Lit-Default.shader @@ -64,7 +64,6 @@ Shader "Universal Render Pipeline/2D/Sprite-Lit-Default" TEXTURE2D(_MainTex); SAMPLER(sampler_MainTex); - float4 _MainTex_TexelSize; UNITY_TEXTURE_STREAMING_DEBUG_VARS_FOR_TEX(_MainTex); TEXTURE2D(_MaskTex); @@ -72,8 +71,6 @@ Shader "Universal Render Pipeline/2D/Sprite-Lit-Default" // NOTE: Do not ifdef the properties here as SRP batcher can not handle different layouts. CBUFFER_START(UnityPerMaterial) - half4 _MainTex_ST; - half4 _NormalMap_ST; // Is this the right way to do this? half4 _Color; CBUFFER_END @@ -105,7 +102,7 @@ Shader "Universal Render Pipeline/2D/Sprite-Lit-Default" #if defined(DEBUG_DISPLAY) o.positionWS = TransformObjectToWorld(v.positionOS); #endif - o.uv = TRANSFORM_TEX(v.uv, _MainTex); + o.uv = v.uv; o.lightingUV = half2(ComputeScreenPos(o.positionCS / o.positionCS.w).xy); o.color = v.color * _Color * unity_SpriteColor; @@ -124,7 +121,7 @@ Shader "Universal Render Pipeline/2D/Sprite-Lit-Default" InitializeSurfaceData(main.rgb, main.a, mask, surfaceData); InitializeInputData(i.uv, i.lightingUV, inputData); - SETUP_DEBUG_TEXTURE_DATA_2D(inputData, i.positionWS, i.positionCS, _MainTex); + SETUP_DEBUG_TEXTURE_DATA_2D_NO_TS(inputData, i.positionWS, i.positionCS, _MainTex); return CombinedShapeLightShared(surfaceData, inputData); } @@ -174,8 +171,6 @@ Shader "Universal Render Pipeline/2D/Sprite-Lit-Default" // NOTE: Do not ifdef the properties here as SRP batcher can not handle different layouts. CBUFFER_START( UnityPerMaterial ) - half4 _MainTex_ST; - half4 _NormalMap_ST; // Is this the right way to do this? half4 _Color; CBUFFER_END @@ -188,7 +183,7 @@ Shader "Universal Render Pipeline/2D/Sprite-Lit-Default" attributes.positionOS = UnityFlipSprite(attributes.positionOS, unity_SpriteProps.xy); o.positionCS = TransformObjectToHClip(attributes.positionOS); - o.uv = TRANSFORM_TEX(attributes.uv, _NormalMap); + o.uv = attributes.uv; o.color = attributes.color; o.normalWS = -GetViewForwardDir(); o.tangentWS = TransformObjectToWorldDir(attributes.tangent.xyz); @@ -247,13 +242,10 @@ Shader "Universal Render Pipeline/2D/Sprite-Lit-Default" TEXTURE2D(_MainTex); SAMPLER(sampler_MainTex); - float4 _MainTex_TexelSize; UNITY_TEXTURE_STREAMING_DEBUG_VARS_FOR_TEX(_MainTex); // NOTE: Do not ifdef the properties here as SRP batcher can not handle different layouts. CBUFFER_START( UnityPerMaterial ) - half4 _MainTex_ST; - half4 _NormalMap_ST; // Is this the right way to do this? half4 _Color; CBUFFER_END @@ -269,7 +261,7 @@ Shader "Universal Render Pipeline/2D/Sprite-Lit-Default" #if defined(DEBUG_DISPLAY) o.positionWS = TransformObjectToWorld(attributes.positionOS); #endif - o.uv = TRANSFORM_TEX(attributes.uv, _MainTex); + o.uv = attributes.uv; o.color = attributes.color * _Color * unity_SpriteColor; return o; } @@ -285,7 +277,7 @@ Shader "Universal Render Pipeline/2D/Sprite-Lit-Default" InitializeSurfaceData(mainTex.rgb, mainTex.a, surfaceData); InitializeInputData(i.uv, inputData); - SETUP_DEBUG_TEXTURE_DATA_2D(inputData, i.positionWS, i.positionCS, _MainTex); + SETUP_DEBUG_TEXTURE_DATA_2D_NO_TS(inputData, i.positionWS, i.positionCS, _MainTex); if(CanDebugOverrideOutputColor(surfaceData, inputData, debugColor)) { diff --git a/Packages/com.unity.render-pipelines.universal/Shaders/2D/Sprite-Unlit-Default.shader b/Packages/com.unity.render-pipelines.universal/Shaders/2D/Sprite-Unlit-Default.shader index 856b135a5a8..ae87a2594a4 100644 --- a/Packages/com.unity.render-pipelines.universal/Shaders/2D/Sprite-Unlit-Default.shader +++ b/Packages/com.unity.render-pipelines.universal/Shaders/2D/Sprite-Unlit-Default.shader @@ -61,12 +61,10 @@ Shader "Universal Render Pipeline/2D/Sprite-Unlit-Default" TEXTURE2D(_MainTex); SAMPLER(sampler_MainTex); - float4 _MainTex_TexelSize; UNITY_TEXTURE_STREAMING_DEBUG_VARS_FOR_TEX(_MainTex); // NOTE: Do not ifdef the properties here as SRP batcher can not handle different layouts. CBUFFER_START(UnityPerMaterial) - half4 _MainTex_ST; half4 _Color; CBUFFER_END @@ -82,7 +80,7 @@ Shader "Universal Render Pipeline/2D/Sprite-Unlit-Default" #if defined(DEBUG_DISPLAY) o.positionWS = TransformObjectToWorld(v.positionOS); #endif - o.uv = TRANSFORM_TEX(v.uv, _MainTex); + o.uv = v.uv; o.color = v.color * _Color * unity_SpriteColor; return o; } @@ -98,7 +96,7 @@ Shader "Universal Render Pipeline/2D/Sprite-Unlit-Default" InitializeSurfaceData(mainTex.rgb, mainTex.a, surfaceData); InitializeInputData(i.uv, inputData); - SETUP_DEBUG_TEXTURE_DATA_2D(inputData, i.positionWS, i.positionCS, _MainTex); + SETUP_DEBUG_TEXTURE_DATA_2D_NO_TS(inputData, i.positionWS, i.positionCS, _MainTex); if(CanDebugOverrideOutputColor(surfaceData, inputData, debugColor)) { @@ -152,12 +150,10 @@ Shader "Universal Render Pipeline/2D/Sprite-Unlit-Default" TEXTURE2D(_MainTex); SAMPLER(sampler_MainTex); - float4 _MainTex_TexelSize; UNITY_TEXTURE_STREAMING_DEBUG_VARS_FOR_TEX(_MainTex); // NOTE: Do not ifdef the properties here as SRP batcher can not handle different layouts. CBUFFER_START( UnityPerMaterial ) - half4 _MainTex_ST; half4 _Color; CBUFFER_END @@ -166,14 +162,14 @@ Shader "Universal Render Pipeline/2D/Sprite-Unlit-Default" Varyings o = (Varyings)0; UNITY_SETUP_INSTANCE_ID(attributes); UNITY_INITIALIZE_VERTEX_OUTPUT_STEREO(o); - UNITY_SKINNED_VERTEX_COMPUTE(v); + UNITY_SKINNED_VERTEX_COMPUTE(attributes); attributes.positionOS = UnityFlipSprite(attributes.positionOS, unity_SpriteProps.xy); o.positionCS = TransformObjectToHClip(attributes.positionOS); #if defined(DEBUG_DISPLAY) o.positionWS = TransformObjectToWorld(attributes.positionOS); #endif - o.uv = TRANSFORM_TEX(attributes.uv, _MainTex); + o.uv = attributes.uv; o.color = attributes.color * _Color * unity_SpriteColor; return o; } @@ -189,7 +185,7 @@ Shader "Universal Render Pipeline/2D/Sprite-Unlit-Default" InitializeSurfaceData(mainTex.rgb, mainTex.a, surfaceData); InitializeInputData(i.uv, inputData); - SETUP_DEBUG_TEXTURE_DATA_2D(inputData, i.positionWS, i.positionCS, _MainTex); + SETUP_DEBUG_TEXTURE_DATA_2D_NO_TS(inputData, i.positionWS, i.positionCS, _MainTex); if(CanDebugOverrideOutputColor(surfaceData, inputData, debugColor)) { diff --git a/Packages/com.unity.render-pipelines.universal/Shaders/Nature/SpeedTree7.shader b/Packages/com.unity.render-pipelines.universal/Shaders/Nature/SpeedTree7.shader index d4a86234e09..d58791e6cec 100644 --- a/Packages/com.unity.render-pipelines.universal/Shaders/Nature/SpeedTree7.shader +++ b/Packages/com.unity.render-pipelines.universal/Shaders/Nature/SpeedTree7.shader @@ -44,7 +44,7 @@ Shader "Universal Render Pipeline/Nature/SpeedTree7" #pragma multi_compile_fragment _ _SHADOWS_SOFT _SHADOWS_SOFT_LOW _SHADOWS_SOFT_MEDIUM _SHADOWS_SOFT_HIGH #pragma multi_compile _ LOD_FADE_CROSSFADE #pragma multi_compile_fragment _ _SCREEN_SPACE_OCCLUSION - #pragma multi_compile_vertex LOD_FADE_PERCENTAGE + #pragma multi_compile LOD_FADE_PERCENTAGE #pragma multi_compile_fragment _ DEBUG_DISPLAY #pragma multi_compile_fragment _ _LIGHT_COOKIES #pragma multi_compile _ _LIGHT_LAYERS @@ -150,7 +150,7 @@ Shader "Universal Render Pipeline/Nature/SpeedTree7" //#pragma multi_compile _ _ADDITIONAL_LIGHT_SHADOWS #pragma multi_compile_fragment _ _SHADOWS_SOFT _SHADOWS_SOFT_LOW _SHADOWS_SOFT_MEDIUM _SHADOWS_SOFT_HIGH #pragma multi_compile _ LOD_FADE_CROSSFADE - #pragma multi_compile_vertex LOD_FADE_PERCENTAGE + #pragma multi_compile LOD_FADE_PERCENTAGE #pragma multi_compile_fragment _ _GBUFFER_NORMALS_OCT #pragma multi_compile_fragment _ _RENDER_PASS_ENABLED #include_with_pragmas "Packages/com.unity.render-pipelines.universal/ShaderLibrary/ProbeVolumeVariants.hlsl" diff --git a/Packages/com.unity.render-pipelines.universal/Shaders/Nature/SpeedTree8.shader b/Packages/com.unity.render-pipelines.universal/Shaders/Nature/SpeedTree8.shader index 2144e2f3dbc..64cbed64bcc 100644 --- a/Packages/com.unity.render-pipelines.universal/Shaders/Nature/SpeedTree8.shader +++ b/Packages/com.unity.render-pipelines.universal/Shaders/Nature/SpeedTree8.shader @@ -57,7 +57,7 @@ Shader "Universal Render Pipeline/Nature/SpeedTree8" #pragma multi_compile _ _ADDITIONAL_LIGHTS_VERTEX _ADDITIONAL_LIGHTS #pragma multi_compile _ _LIGHT_LAYERS #pragma multi_compile _ _FORWARD_PLUS - #pragma multi_compile_vertex LOD_FADE_PERCENTAGE + #pragma multi_compile LOD_FADE_PERCENTAGE #pragma multi_compile_fragment _ _ADDITIONAL_LIGHT_SHADOWS #pragma multi_compile_fragment _ _REFLECTION_PROBE_BLENDING #pragma multi_compile_fragment _ _REFLECTION_PROBE_BOX_PROJECTION @@ -77,9 +77,9 @@ Shader "Universal Render Pipeline/Nature/SpeedTree8" #pragma shader_feature_local_vertex _WINDQUALITY_NONE _WINDQUALITY_FAST _WINDQUALITY_BETTER _WINDQUALITY_BEST _WINDQUALITY_PALM #pragma shader_feature_local EFFECT_BILLBOARD + #pragma shader_feature_local EFFECT_BUMP #pragma shader_feature_local_fragment EFFECT_HUE_VARIATION #pragma shader_feature_local_fragment EFFECT_SUBSURFACE - #pragma shader_feature_local_fragment EFFECT_BUMP #pragma shader_feature_local_fragment EFFECT_EXTRA_TEX #define ENABLE_WIND @@ -141,7 +141,7 @@ Shader "Universal Render Pipeline/Nature/SpeedTree8" #pragma multi_compile_fragment _ _REFLECTION_PROBE_BOX_PROJECTION #pragma multi_compile_fragment _ _SHADOWS_SOFT _SHADOWS_SOFT_LOW _SHADOWS_SOFT_MEDIUM _SHADOWS_SOFT_HIGH #pragma multi_compile _ LOD_FADE_CROSSFADE - #pragma multi_compile_vertex LOD_FADE_PERCENTAGE + #pragma multi_compile LOD_FADE_PERCENTAGE #pragma multi_compile_fragment _ _GBUFFER_NORMALS_OCT #pragma multi_compile_fragment _ _RENDER_PASS_ENABLED #include_with_pragmas "Packages/com.unity.render-pipelines.universal/ShaderLibrary/ProbeVolumeVariants.hlsl" @@ -152,9 +152,9 @@ Shader "Universal Render Pipeline/Nature/SpeedTree8" #pragma shader_feature_local_vertex _WINDQUALITY_NONE _WINDQUALITY_FAST _WINDQUALITY_BETTER _WINDQUALITY_BEST _WINDQUALITY_PALM #pragma shader_feature_local EFFECT_BILLBOARD + #pragma shader_feature_local EFFECT_BUMP #pragma shader_feature_local_fragment EFFECT_HUE_VARIATION #pragma shader_feature_local_fragment EFFECT_SUBSURFACE - #pragma shader_feature_local_fragment EFFECT_BUMP #pragma shader_feature_local_fragment EFFECT_EXTRA_TEX #define ENABLE_WIND @@ -184,7 +184,7 @@ Shader "Universal Render Pipeline/Nature/SpeedTree8" #pragma multi_compile_instancing #pragma instancing_options assumeuniformscaling maxcount:50 - #pragma shader_feature_local_vertex _WINDQUALITY_NONE _WINDQUALITY_FAST _WINDQUALITY_BETTER _WINDQUALITY_BEST _WINDQUALITY_PALMlity and backface normal mode enum keywords) + #pragma shader_feature_local_vertex _WINDQUALITY_NONE _WINDQUALITY_FAST _WINDQUALITY_BETTER _WINDQUALITY_BEST _WINDQUALITY_PALM #pragma shader_feature_local EFFECT_BILLBOARD #define ENABLE_WIND @@ -245,7 +245,7 @@ Shader "Universal Render Pipeline/Nature/SpeedTree8" #pragma fragment SpeedTree8FragDepthNormal #pragma shader_feature_local_vertex _WINDQUALITY_NONE _WINDQUALITY_FAST _WINDQUALITY_BETTER _WINDQUALITY_BEST _WINDQUALITY_PALM - #pragma shader_feature_local_fragment EFFECT_BUMP + #pragma shader_feature_local EFFECT_BUMP #pragma multi_compile _ LOD_FADE_CROSSFADE #pragma multi_compile_instancing diff --git a/Packages/com.unity.render-pipelines.universal/Shaders/Nature/SpeedTree8Passes.hlsl b/Packages/com.unity.render-pipelines.universal/Shaders/Nature/SpeedTree8Passes.hlsl index 81870755154..ebbf1cbb8b1 100644 --- a/Packages/com.unity.render-pipelines.universal/Shaders/Nature/SpeedTree8Passes.hlsl +++ b/Packages/com.unity.render-pipelines.universal/Shaders/Nature/SpeedTree8Passes.hlsl @@ -3,6 +3,7 @@ #include "Packages/com.unity.render-pipelines.universal/ShaderLibrary/Lighting.hlsl" #include "Packages/com.unity.render-pipelines.universal/ShaderLibrary/UnityGBuffer.hlsl" +#include "Packages/com.unity.shadergraph/ShaderGraphLibrary/Nature/SpeedTreeCommon.hlsl" #include "SpeedTreeUtility.hlsl" #if defined(LOD_FADE_CROSSFADE) #include "Packages/com.unity.render-pipelines.universal/ShaderLibrary/LODCrossFade.hlsl" @@ -96,8 +97,26 @@ struct SpeedTreeFragmentInput void InitializeData(inout SpeedTreeVertexInput input, float lodValue) { -#if defined(LOD_FADE_PERCENTAGE) && (!defined(LOD_FADE_CROSSFADE) && !defined(EFFECT_BILLBOARD)) - input.vertex.xyz = lerp(input.vertex.xyz, input.texcoord2.xyz, lodValue); +#if !defined(EFFECT_BILLBOARD) + #if defined(LOD_FADE_PERCENTAGE) && (!defined(LOD_FADE_CROSSFADE)) + input.vertex.xyz = lerp(input.vertex.xyz, input.texcoord2.xyz, lodValue); + #endif + + // geometry type + float geometryType = (int) (input.texcoord3.w + 0.25); + bool leafTwo = false; + if (geometryType > GEOM_TYPE_FACINGLEAF) + { + geometryType -= 2; + leafTwo = true; + } + + // leaf facing + if (geometryType == GEOM_TYPE_FACINGLEAF) + { + float3 anchor = float3(input.texcoord1.zw, input.texcoord2.w); + input.vertex.xyz = DoLeafFacing(input.vertex.xyz, anchor); + } #endif // wind @@ -117,14 +136,6 @@ void InitializeData(inout SpeedTreeVertexInput input, float lodValue) float3 windyPosition = input.vertex.xyz; #ifndef EFFECT_BILLBOARD - // geometry type - float geometryType = (int)(input.texcoord3.w + 0.25); - bool leafTwo = false; - if (geometryType > GEOM_TYPE_FACINGLEAF) - { - geometryType -= 2; - leafTwo = true; - } // leaves if (geometryType > GEOM_TYPE_FROND) @@ -132,15 +143,7 @@ void InitializeData(inout SpeedTreeVertexInput input, float lodValue) // remove anchor position float3 anchor = float3(input.texcoord1.zw, input.texcoord2.w); windyPosition -= anchor; - - if (geometryType == GEOM_TYPE_FACINGLEAF) - { - // face camera-facing leaf to camera - float offsetLen = length(windyPosition); - windyPosition = mul(windyPosition.xyz, (float3x3)UNITY_MATRIX_IT_MV); // inv(MV) * windyPosition - windyPosition = normalize(windyPosition) * offsetLen; // make sure the offset vector is still scaled - } - + // leaf wind #if defined(_WINDQUALITY_FAST) || defined(_WINDQUALITY_BETTER) || defined(_WINDQUALITY_BEST) #ifdef _WINDQUALITY_BEST diff --git a/Packages/com.unity.render-pipelines.universal/Shaders/ShadowCasterPass.hlsl b/Packages/com.unity.render-pipelines.universal/Shaders/ShadowCasterPass.hlsl index 3d2b44cad7d..18690d1c7bf 100644 --- a/Packages/com.unity.render-pipelines.universal/Shaders/ShadowCasterPass.hlsl +++ b/Packages/com.unity.render-pipelines.universal/Shaders/ShadowCasterPass.hlsl @@ -44,11 +44,17 @@ float4 GetShadowPositionHClip(Attributes input) float4 positionCS = TransformWorldToHClip(ApplyShadowBias(positionWS, normalWS, lightDirectionWS)); #if UNITY_REVERSED_Z - positionCS.z = min(positionCS.z, positionCS.w * UNITY_NEAR_CLIP_VALUE); + float clamped = min(positionCS.z, positionCS.w * UNITY_NEAR_CLIP_VALUE); #else - positionCS.z = max(positionCS.z, positionCS.w * UNITY_NEAR_CLIP_VALUE); + float clamped = max(positionCS.z, positionCS.w * UNITY_NEAR_CLIP_VALUE); #endif + // The current implementation of vertex clamping in Universal RP is the same as in Unity Built-In RP. + // This does not work well with Point Lights, which is why it is disabled in Built-In RP + // (see: https://github.cds.internal.unity3d.com/unity/unity/blob/a9c916ba27984da43724ba18e70f51469e0c34f5/Runtime/Camera/Shadows.cpp#L1685-L1686) + // We follow the same convention in Universal RP: + positionCS.z = lerp(clamped, positionCS.z, IsPointLight()); + return positionCS; } diff --git a/Packages/com.unity.render-pipelines.universal/Shaders/Utils/BlitHDROverlay.shader b/Packages/com.unity.render-pipelines.universal/Shaders/Utils/BlitHDROverlay.shader index 835feba119c..85b6e9f0f86 100644 --- a/Packages/com.unity.render-pipelines.universal/Shaders/Utils/BlitHDROverlay.shader +++ b/Packages/com.unity.render-pipelines.universal/Shaders/Utils/BlitHDROverlay.shader @@ -40,7 +40,7 @@ Shader "Hidden/Universal/BlitHDROverlay" float4 FragBlitHDR(Varyings input, SamplerState s) { float4 color = FragBlit(input, s); - UNITY_BRANCH if(!_HDR_OVERLAY) + if(!_HDR_OVERLAY) { return color; } diff --git a/Packages/com.unity.render-pipelines.universal/Shaders/Utils/FallbackError.shader b/Packages/com.unity.render-pipelines.universal/Shaders/Utils/FallbackError.shader index 0961ae524cc..95677334f3d 100644 --- a/Packages/com.unity.render-pipelines.universal/Shaders/Utils/FallbackError.shader +++ b/Packages/com.unity.render-pipelines.universal/Shaders/Utils/FallbackError.shader @@ -24,7 +24,6 @@ Shader "Hidden/Universal Render Pipeline/FallbackError" // ------------------------------------- // Unity defined keywords #pragma multi_compile _ STEREO_INSTANCING_ON STEREO_MULTIVIEW_ON - #pragma multi_compile _ DOTS_INSTANCING_ON //-------------------------------------- // GPU Instancing diff --git a/Packages/com.unity.render-pipelines.universal/Tests/Editor/RenderingLayersTests.cs b/Packages/com.unity.render-pipelines.universal/Tests/Editor/RenderingLayersTests.cs new file mode 100644 index 00000000000..4dd69483e98 --- /dev/null +++ b/Packages/com.unity.render-pipelines.universal/Tests/Editor/RenderingLayersTests.cs @@ -0,0 +1,96 @@ +using System.Collections.Generic; +using NUnit.Framework; +using UnityEditor.Rendering; +using UnityEngine; +using UnityEngine.Rendering.Universal; + +[TestFixture] +class RenderingLayersTests +{ + string[] m_DefinedLayers; + int[] m_DefinedValues; + int m_LayersSize; + + [OneTimeSetUp] + public void OneTimeSetup() + { + m_DefinedLayers = RenderingLayerMask.GetDefinedRenderingLayerNames(); + m_DefinedValues = RenderingLayerMask.GetDefinedRenderingLayerValues(); + m_LayersSize = RenderingLayerMask.GetRenderingLayerCount(); + } + + [OneTimeTearDown] + public void OneTimeTeardown() + { + var diff = RenderingLayerMask.GetRenderingLayerCount() - m_LayersSize; + if (diff > 0) + for (int i = 0; i < diff; i++) + RenderPipelineEditorUtility.TryRemoveLastRenderingLayerName(); + else + for (int i = 0; i < -diff; i++) + RenderPipelineEditorUtility.TryAddRenderingLayerName(string.Empty); + + for (int i = 1; i < RenderingLayerMask.GetRenderingLayerCount(); i++) + RenderPipelineEditorUtility.TrySetRenderingLayerName(i, string.Empty); + + for (int i = 0; i < m_DefinedValues.Length; i++) + { + var value = m_DefinedValues[i]; + if (RenderingLayerMask.defaultRenderingLayerMask == value) + continue; + var name = m_DefinedLayers[i]; + var index = Mathf.FloorToInt(Mathf.Log(value, 2)); + RenderPipelineEditorUtility.TrySetRenderingLayerName(index, name); + } + } + + [SetUp] + public void Setup() + { + var layerCount = RenderingLayerMask.GetRenderingLayerCount() - 1; + for (int i = 0; i < layerCount; i++) + RenderPipelineEditorUtility.TryRemoveLastRenderingLayerName(); + } + + static TestCaseData[] s_MaskSizeTestCases = + { + new TestCaseData(7) + .SetName("Given a Rendering Layers size of 7, the mask size is 8 bits") + .Returns((int)RenderingLayerUtils.MaskSize.Bits8), + new TestCaseData(8) + .SetName("Given a Rendering Layers size of 8, the mask size is 8 bits") + .Returns((int)RenderingLayerUtils.MaskSize.Bits8), + new TestCaseData(9) + .SetName("Given a Rendering Layers size of 9, the mask size is 16 bits") + .Returns((int)RenderingLayerUtils.MaskSize.Bits16), + new TestCaseData(16) + .SetName("Given a Rendering Layers size of 16, the mask size is 16 bits") + .Returns((int)RenderingLayerUtils.MaskSize.Bits16), + new TestCaseData(17) + .SetName("Given a Rendering Layers size of 17, the mask size is 24 bits") + .Returns((int)RenderingLayerUtils.MaskSize.Bits24), + new TestCaseData(24) + .SetName("Given a Rendering Layers size of 24, the mask size is 24 bits") + .Returns((int)RenderingLayerUtils.MaskSize.Bits24), + new TestCaseData(25) + .SetName("Given a Rendering Layers size of 25, the mask size is 32 bits") + .Returns((int)RenderingLayerUtils.MaskSize.Bits32), + new TestCaseData(32) + .SetName("Given a Rendering Layers size of 32, the mask size is 32 bits") + .Returns((int)RenderingLayerUtils.MaskSize.Bits32) + }; + + [Test, TestCaseSource(nameof(s_MaskSizeTestCases))] + public int MaskSizeTest(int tagManagerLayerCount) + { + var currentLayerCount = RenderingLayerMask.GetRenderingLayerCount(); + var requiredLayers = tagManagerLayerCount - currentLayerCount; + if (requiredLayers > 0) + for (int i = 0; i < requiredLayers; i++) + RenderPipelineEditorUtility.TryAddRenderingLayerName($"Layer {currentLayerCount + i}"); + + var urpRenderer = ScriptableObject.CreateInstance().InternalCreateRenderer() as UniversalRenderer; + RenderingLayerUtils.RequireRenderingLayers(urpRenderer, new List(), 0, out var evt, out var maskSize); + return (int)maskSize; + } +} diff --git a/Packages/com.unity.render-pipelines.universal/Tests/Editor/RenderingLayersTests.cs.meta b/Packages/com.unity.render-pipelines.universal/Tests/Editor/RenderingLayersTests.cs.meta new file mode 100644 index 00000000000..1e27ae40ad9 --- /dev/null +++ b/Packages/com.unity.render-pipelines.universal/Tests/Editor/RenderingLayersTests.cs.meta @@ -0,0 +1,3 @@ +fileFormatVersion: 2 +guid: 7bd051581c284375b0ba2d185aca3541 +timeCreated: 1724681009 \ No newline at end of file diff --git a/Packages/com.unity.shadergraph/Documentation~/Camera-Node.md b/Packages/com.unity.shadergraph/Documentation~/Camera-Node.md index a3b78be8907..7fa343c192a 100644 --- a/Packages/com.unity.shadergraph/Documentation~/Camera-Node.md +++ b/Packages/com.unity.shadergraph/Documentation~/Camera-Node.md @@ -28,7 +28,7 @@ The following example code represents one possible outcome of this node. ``` float3 _Camera_Position = _WorldSpaceCameraPos; -float3 _Camera_Direction = -1 * mul(UNITY_MATRIX_M, transpose(mul(UNITY_MATRIX_I_M, UNITY_MATRIX_I_V)) [2].xyz); +float3 _Camera_Direction = -UNITY_MATRIX_V[2].xyz; float _Camera_Orthographic = unity_OrthoParams.w; float _Camera_NearPlane = _ProjectionParams.y; float _Camera_FarPlane = _ProjectionParams.z; diff --git a/Packages/com.unity.shadergraph/Documentation~/First-Shader-Graph.md b/Packages/com.unity.shadergraph/Documentation~/First-Shader-Graph.md index 5db5d01622a..303b7dcda8b 100644 --- a/Packages/com.unity.shadergraph/Documentation~/First-Shader-Graph.md +++ b/Packages/com.unity.shadergraph/Documentation~/First-Shader-Graph.md @@ -105,11 +105,7 @@ Older tutorials use an outdated format of Shader Graph with master nodes. When l To keep exploring how to use Shader Graph to author shaders, check out these blog posts: -- [Art That Moves: Creating Animated Materials with Shader Graph](https://blogs.unity3d.com/2018/10/05/art-that-moves-creating-animated-materials-with-shader-graph/) -- [Shader Graph Updates and Sample Project ](https://blogs.unity3d.com/2018/08/07/shader-graph-updates-and-sample-project/) -- [Custom Lighting in Shader Graph: Expanding Your Graphs in 2019](https://blogs.unity3d.com/2019/07/31/custom-lighting-in-shader-graph-expanding-your-graphs-in-2019/) -- [Unity 2018.3 Shader Graph Update: Lit Master Node](https://blogs.unity3d.com/2018/12/19/unity-2018-3-shader-graph-update-lit-master-node/) -- [Creating an Interactive Vertex Effect using Shader Graph](https://blogs.unity3d.com/2019/02/12/creating-an-interactive-vertex-effect-using-shader-graph/) -- [Introduction to Shader Graph: Build your shaders with a visual editor](https://blogs.unity3d.com/2018/02/27/introduction-to-shader-graph-build-your-shaders-with-a-visual-editor/) - -You can also visit the [Unity YouTube Channel](https://www.youtube.com/channel/UCG08EqOAXJk_YXPDsAvReSg) and look for [video tutorials on Shader Graph](https://www.youtube.com/user/Unity3D/search?query=shader+graph), or head to our [user forum](https://forum.unity.com/forums/shader-graph.346/) to find the latest information and conversations about Shader Graph. +- [Art That Moves: Creating Animated Materials with Shader Graph](https://unity.com/blog/engine-platform/creating-animated-materials-with-shader-graph) +- [Custom Lighting in Shader Graph: Expanding Your Graphs in 2019](https://unity.com/blog/engine-platform/custom-lighting-in-shader-graph-expanding-your-graphs-in-2019) + +You can also visit the [Unity YouTube Channel](https://www.youtube.com/channel/UCG08EqOAXJk_YXPDsAvReSg) and look for [video tutorials on Shader Graph](https://www.youtube.com/user/Unity3D/search?query=shader+graph), or head to our [user forum](https://discussions.unity.com/tags/c/unity-engine/52/shader-graph) to find the latest information and conversations about Shader Graph. diff --git a/Packages/com.unity.shadergraph/Editor/Data/Graphs/AbstractShaderProperty.cs b/Packages/com.unity.shadergraph/Editor/Data/Graphs/AbstractShaderProperty.cs index 2f73ee25a15..2faa21f55d4 100644 --- a/Packages/com.unity.shadergraph/Editor/Data/Graphs/AbstractShaderProperty.cs +++ b/Packages/com.unity.shadergraph/Editor/Data/Graphs/AbstractShaderProperty.cs @@ -27,7 +27,7 @@ internal virtual string GetHLSLVariableName(bool isSubgraphProperty, GenerationM if (mode == GenerationMode.VFX) { // Per-element exposed properties are provided by the properties structure filled by VFX. - if (overrideHLSLDeclaration) + if (overrideHLSLDeclaration && hlslDeclarationOverride != HLSLDeclaration.Global) return $"PROP.{referenceName}"; // For un-exposed global properties, just read from the cbuffer. else diff --git a/Packages/com.unity.shadergraph/Editor/Data/Graphs/GroupData.cs b/Packages/com.unity.shadergraph/Editor/Data/Graphs/GroupData.cs index acab2ec9abb..363ce4000bb 100644 --- a/Packages/com.unity.shadergraph/Editor/Data/Graphs/GroupData.cs +++ b/Packages/com.unity.shadergraph/Editor/Data/Graphs/GroupData.cs @@ -5,7 +5,7 @@ namespace UnityEditor.ShaderGraph { [Serializable] - public class GroupData : JsonObject + public class GroupData : JsonObject, IRectInterface { [SerializeField] string m_Title; @@ -16,6 +16,7 @@ public string title set { m_Title = value; } } + [SerializeField] Vector2 m_Position; @@ -25,6 +26,16 @@ public Vector2 position set { m_Position = value; } } + Rect IRectInterface.rect + { + get => new Rect(position, Vector2.one); + set + { + position = value.position; + } + } + + public GroupData() : base() { } public GroupData(string title, Vector2 position) diff --git a/Packages/com.unity.shadergraph/Editor/Data/Graphs/SerializableTexture.cs b/Packages/com.unity.shadergraph/Editor/Data/Graphs/SerializableTexture.cs index aa8cf5fc7c9..0fa209ea59c 100644 --- a/Packages/com.unity.shadergraph/Editor/Data/Graphs/SerializableTexture.cs +++ b/Packages/com.unity.shadergraph/Editor/Data/Graphs/SerializableTexture.cs @@ -92,7 +92,8 @@ public Texture texture public void OnBeforeSerialize() { - m_SerializedTexture = EditorJsonUtility.ToJson(new TextureHelper { texture = texture }, false); + var textureHelper = texture ? new TextureHelper { texture = texture } : null; + m_SerializedTexture = EditorJsonUtility.ToJson(textureHelper, false); } public void OnAfterDeserialize() diff --git a/Packages/com.unity.shadergraph/Editor/Data/Implementation/NodeUtils.cs b/Packages/com.unity.shadergraph/Editor/Data/Implementation/NodeUtils.cs index 699fdd14b99..d593322dfad 100644 --- a/Packages/com.unity.shadergraph/Editor/Data/Implementation/NodeUtils.cs +++ b/Packages/com.unity.shadergraph/Editor/Data/Implementation/NodeUtils.cs @@ -819,6 +819,7 @@ public static string GetHLSLSafeName(string input) static HashSet m_ShaderGraphKeywords = new HashSet() { + "_Weight", "Gradient", "UnitySamplerState", "UnityTexture2D", @@ -865,10 +866,14 @@ public static string ConvertToValidHLSLIdentifier(string originalId, Func name; + public string[] synonyms; protected virtual string documentationPage => name; diff --git a/Packages/com.unity.shadergraph/Editor/Data/Nodes/Artistic/Normal/NormalFromHeightNode.cs b/Packages/com.unity.shadergraph/Editor/Data/Nodes/Artistic/Normal/NormalFromHeightNode.cs index bb610eb0795..bd51305338a 100644 --- a/Packages/com.unity.shadergraph/Editor/Data/Nodes/Artistic/Normal/NormalFromHeightNode.cs +++ b/Packages/com.unity.shadergraph/Editor/Data/Nodes/Artistic/Normal/NormalFromHeightNode.cs @@ -95,7 +95,7 @@ public void GenerateNodeFunction(FunctionRegistry registry, GenerationMode gener s.AppendLine("$precision3 crossY = cross(worldDerivativeY, TangentMatrix[2].xyz);"); s.AppendLine("$precision d = dot(worldDerivativeX, crossY);"); s.AppendLine("$precision sgn = d < 0.0 ? (-1.0f) : 1.0f;"); - s.AppendLine("$precision surface = sgn / max(0.000000000000001192093f, abs(d));"); + s.AppendLine("$precision surface = sgn / max({0}, abs(d));", concretePrecision == ConcretePrecision.Single ? "0.000000000000001192093f" : "0.00006103515625f"); s.AppendNewLine(); s.AppendLine("$precision dHdx = ddx(In);"); s.AppendLine("$precision dHdy = ddy(In);"); diff --git a/Packages/com.unity.shadergraph/Editor/Data/Nodes/BlockNode.cs b/Packages/com.unity.shadergraph/Editor/Data/Nodes/BlockNode.cs index c607ae0c566..47b67cb2c54 100644 --- a/Packages/com.unity.shadergraph/Editor/Data/Nodes/BlockNode.cs +++ b/Packages/com.unity.shadergraph/Editor/Data/Nodes/BlockNode.cs @@ -31,6 +31,23 @@ class BlockNode : AbstractMaterialNode [NonSerialized] BlockFieldDescriptor m_Descriptor; + public override string displayName + { + get + { + string displayName = ""; + if (m_Descriptor != null) + { + displayName = m_Descriptor.shaderStage.ToString(); + if (!string.IsNullOrEmpty(displayName)) + displayName += " "; + displayName += m_Descriptor.displayName; + } + + return displayName; + } + } + public override bool canCutNode => false; public override bool canCopyNode => false; @@ -83,7 +100,6 @@ public void Init(BlockFieldDescriptor fieldDescriptor) ? $"{fieldDescriptor.tag}.{fieldDescriptor.name}" : $"{BlockFields.VertexDescription.name}.{k_CustomBlockDefaultName}"; - // TODO: This exposes the MaterialSlot API // TODO: This needs to be removed but is currently required by HDRP for DiffusionProfileInputMaterialSlot if (m_Descriptor is CustomSlotBlockFieldDescriptor customSlotDescriptor) diff --git a/Packages/com.unity.shadergraph/Editor/Data/Nodes/GeometryNode.cs b/Packages/com.unity.shadergraph/Editor/Data/Nodes/GeometryNode.cs index ee190969ecb..0eb4a81b832 100644 --- a/Packages/com.unity.shadergraph/Editor/Data/Nodes/GeometryNode.cs +++ b/Packages/com.unity.shadergraph/Editor/Data/Nodes/GeometryNode.cs @@ -9,6 +9,7 @@ namespace UnityEditor.ShaderGraph { + using PopupList = UnityEditor.ShaderGraph.Drawing.Controls.PopupList; abstract class GeometryNode : AbstractMaterialNode { public GeometryNode() diff --git a/Packages/com.unity.shadergraph/Editor/Data/Nodes/Input/Geometry/ViewVectorNode.cs b/Packages/com.unity.shadergraph/Editor/Data/Nodes/Input/Geometry/ViewVectorNode.cs index 9281e1d4503..6792878ea7a 100644 --- a/Packages/com.unity.shadergraph/Editor/Data/Nodes/Input/Geometry/ViewVectorNode.cs +++ b/Packages/com.unity.shadergraph/Editor/Data/Nodes/Input/Geometry/ViewVectorNode.cs @@ -10,6 +10,7 @@ namespace UnityEditor.ShaderGraph { + using PopupList = UnityEditor.ShaderGraph.Drawing.Controls.PopupList; [Title("Input", "Geometry", "View Vector")] class ViewVectorNode : CodeFunctionNode { diff --git a/Packages/com.unity.shadergraph/Editor/Data/Nodes/Input/Scene/CameraNode.cs b/Packages/com.unity.shadergraph/Editor/Data/Nodes/Input/Scene/CameraNode.cs index 180f2fc25b0..d81006db62b 100644 --- a/Packages/com.unity.shadergraph/Editor/Data/Nodes/Input/Scene/CameraNode.cs +++ b/Packages/com.unity.shadergraph/Editor/Data/Nodes/Input/Scene/CameraNode.cs @@ -49,7 +49,7 @@ public override string GetVariableNameForSlot(int slotId) switch (slotId) { case OutputSlot1Id: - return "(-1 * mul((float3x3)UNITY_MATRIX_M, transpose(mul(UNITY_MATRIX_I_M, UNITY_MATRIX_I_V)) [2].xyz))"; + return "-UNITY_MATRIX_V[2].xyz"; case OutputSlot2Id: return "unity_OrthoParams.w"; case OutputSlot3Id: diff --git a/Packages/com.unity.shadergraph/Editor/Drawing/Inspector/PropertyDrawers/AbstractMaterialNodePropertyDrawer.cs b/Packages/com.unity.shadergraph/Editor/Drawing/Inspector/PropertyDrawers/AbstractMaterialNodePropertyDrawer.cs index 565428fefdb..445fb27322f 100644 --- a/Packages/com.unity.shadergraph/Editor/Drawing/Inspector/PropertyDrawers/AbstractMaterialNodePropertyDrawer.cs +++ b/Packages/com.unity.shadergraph/Editor/Drawing/Inspector/PropertyDrawers/AbstractMaterialNodePropertyDrawer.cs @@ -38,7 +38,7 @@ internal virtual void AddCustomNodeProperties(VisualElement parentElement, Abstr VisualElement CreateGUI(AbstractMaterialNode node, InspectableAttribute attribute, out VisualElement propertyVisualElement) { VisualElement nodeSettings = new VisualElement(); - var nameLabel = PropertyDrawerUtils.CreateLabel($"{node.name} Node", 0, FontStyle.Bold); + var nameLabel = PropertyDrawerUtils.CreateLabel($"{node.displayName} Node", 0, FontStyle.Bold); nodeSettings.Add(nameLabel); if (node.sgVersion < node.latestVersion) { diff --git a/Packages/com.unity.shadergraph/Editor/Drawing/Views/GraphEditorView.cs b/Packages/com.unity.shadergraph/Editor/Drawing/Views/GraphEditorView.cs index 9b60c99501b..15971a2afc6 100644 --- a/Packages/com.unity.shadergraph/Editor/Drawing/Views/GraphEditorView.cs +++ b/Packages/com.unity.shadergraph/Editor/Drawing/Views/GraphEditorView.cs @@ -217,13 +217,13 @@ public GraphEditorView(EditorWindow editorWindow, GraphData graph, MessageManage GUILayout.Space(6); - m_UserViewSettings.isInspectorVisible = GUILayout.Toggle(m_UserViewSettings.isInspectorVisible, new GUIContent(EditorGUIUtility.FindTexture("d_UnityEditor.InspectorWindow"), "Graph Inspector"), EditorStyles.toolbarButton); + m_UserViewSettings.isInspectorVisible = GUILayout.Toggle(m_UserViewSettings.isInspectorVisible, new GUIContent(EditorGUIUtility.TrIconContent("d_UnityEditor.InspectorWindow").image, "Graph Inspector"), EditorStyles.toolbarButton); GUILayout.Space(6); m_UserViewSettings.isPreviewVisible = GUILayout.Toggle(m_UserViewSettings.isPreviewVisible, new GUIContent(EditorGUIUtility.FindTexture("PreMatSphere"), "Main Preview"), EditorStyles.toolbarButton); - if (GUILayout.Button(new GUIContent(EditorGUIUtility.FindTexture("_Help"), "Open Shader Graph User Manual"), EditorStyles.toolbarButton)) + if (GUILayout.Button(new GUIContent(EditorGUIUtility.TrIconContent("_Help").image, "Open Shader Graph User Manual"), EditorStyles.toolbarButton)) { Application.OpenURL(UnityEngine.Rendering.ShaderGraph.Documentation.GetPageLink("index")); //Application.OpenURL("https://docs.unity3d.com/Packages/com.unity.shadergraph@17.0/manual/index.html"); // TODO : point to latest? diff --git a/Packages/com.unity.shadergraph/Editor/Drawing/Views/MaterialGraphView.cs b/Packages/com.unity.shadergraph/Editor/Drawing/Views/MaterialGraphView.cs index ca1cfa3d964..c0ebe52df15 100644 --- a/Packages/com.unity.shadergraph/Editor/Drawing/Views/MaterialGraphView.cs +++ b/Packages/com.unity.shadergraph/Editor/Drawing/Views/MaterialGraphView.cs @@ -52,7 +52,7 @@ public MaterialGraphView() m_UndoRedoPerformedMethodInfo = graphViewType?.GetMethod("UndoRedoPerformed", BindingFlags.FlattenHierarchy | BindingFlags.Instance | BindingFlags.NonPublic, null, - new Type[] { }, + new Type[] { typeof(UndoRedoInfo).MakeByRefType()}, null); } @@ -74,12 +74,12 @@ void OnTransformChanged(GraphView graphView) } } - protected override bool canCutSelection + protected internal override bool canCutSelection { get { return selection.OfType().Any(x => x.node.canCutNode) || selection.OfType().Any() || selection.OfType().Any() || selection.OfType().Any() || selection.OfType().Any(); } } - protected override bool canCopySelection + protected internal override bool canCopySelection { get { return selection.OfType().Any(x => x.node.canCopyNode) || selection.OfType().Any() || selection.OfType().Any() || selection.OfType().Any() || selection.OfType().Any(); } } @@ -321,7 +321,7 @@ public override void BuildContextualMenu(ContextualMenuPopulateEvent evt) { evt.menu.AppendSeparator(); var sc = ShaderGraphShortcuts.GetKeycodeForContextMenu(ShaderGraphShortcuts.summonDocumentationShortcutID); - evt.menu.AppendAction($"Open Documentation _{sc}", SeeDocumentation, SeeDocumentationStatus); + evt.menu.AppendAction($"Open Documentation {sc}", SeeDocumentation, SeeDocumentationStatus); } if (selection.OfType().Count() == 1 && selection.OfType().First().node is SubGraphNode) { @@ -687,14 +687,17 @@ void ApplyColor(Color pickedColor) m.Invoke(null, new object[] { (Action)ApplyColor, defaultColor, true, false }); } - protected override bool canDeleteSelection + protected internal override bool canDeleteSelection { get { - return selection.Any(x => !(x is IShaderNodeView nodeView) || nodeView.node.canDeleteNode); + return selection.Any(x => + { + if (x is ContextView) return false; //< context view must not be deleted. ( eg, Vertex, Fragment ) + return !(x is IShaderNodeView nodeView) || nodeView.node.canDeleteNode; + }); } } - public void GroupSelection() { var title = "New Group"; @@ -1107,7 +1110,8 @@ void DeleteSelectionImplementation(string operationName, GraphView.AskUser askUs internal void RestorePersistentSelectionAfterUndoRedo() { wasUndoRedoPerformed = true; - m_UndoRedoPerformedMethodInfo?.Invoke(this, new object[] { }); + UndoRedoInfo info = new UndoRedoInfo(); + m_UndoRedoPerformedMethodInfo?.Invoke(this, new object[] {info}); } #region Drag and drop @@ -1486,7 +1490,12 @@ internal static void InsertCopyPasteGraph(this MaterialGraphView graphView, Copy { var nodeList = copyGraph.GetNodes(); - ClampNodesWithinView(graphView, new List().Union(nodeList).Union(copyGraph.stickyNotes)); + ClampNodesWithinView(graphView, + new List() + .Union(nodeList) + .Union(copyGraph.stickyNotes) + .Union(copyGraph.groups) + ); graphView.graph.PasteGraph(copyGraph, remappedNodes, remappedEdges); diff --git a/Packages/com.unity.shadergraph/Editor/Drawing/Views/ShaderGroup.cs b/Packages/com.unity.shadergraph/Editor/Drawing/Views/ShaderGroup.cs index 92c3b46df6b..02b2fda1376 100644 --- a/Packages/com.unity.shadergraph/Editor/Drawing/Views/ShaderGroup.cs +++ b/Packages/com.unity.shadergraph/Editor/Drawing/Views/ShaderGroup.cs @@ -53,5 +53,13 @@ public override bool AcceptsElement(GraphElement element, ref string reasonWhyNo return true; } + + protected override void SetScopePositionOnly(Rect newPos) + { + base.SetScopePositionOnly(newPos); + + userData.position = newPos.position; + } + } } diff --git a/Packages/com.unity.shadergraph/Editor/Drawing/Views/ShaderPort.cs b/Packages/com.unity.shadergraph/Editor/Drawing/Views/ShaderPort.cs index 6ed25a7540d..ee3915b96a1 100644 --- a/Packages/com.unity.shadergraph/Editor/Drawing/Views/ShaderPort.cs +++ b/Packages/com.unity.shadergraph/Editor/Drawing/Views/ShaderPort.cs @@ -59,7 +59,7 @@ public MaterialSlot slot } } - public Action OnDisconnect; + public new Action OnDisconnect; public override void Disconnect(Edge edge) { diff --git a/Packages/com.unity.shadergraph/Editor/Generation/Processors/Generator.cs b/Packages/com.unity.shadergraph/Editor/Generation/Processors/Generator.cs index d45a80a9cd5..34598d64272 100644 --- a/Packages/com.unity.shadergraph/Editor/Generation/Processors/Generator.cs +++ b/Packages/com.unity.shadergraph/Editor/Generation/Processors/Generator.cs @@ -1046,7 +1046,7 @@ void ProcessStackForPass(ContextData contextData, BlockFieldDescriptor[] passBlo { m_GraphData.ForeachHLSLProperty(h => { - if (!h.IsObjectType()) + if (!h.IsObjectType() && h.declaration != HLSLDeclaration.Global) h.AppendTo(propertyBuilder); }); } diff --git a/Packages/com.unity.shadergraph/Editor/ShaderGraphPreferences.cs b/Packages/com.unity.shadergraph/Editor/ShaderGraphPreferences.cs index bf31971231d..c000726cbe9 100644 --- a/Packages/com.unity.shadergraph/Editor/ShaderGraphPreferences.cs +++ b/Packages/com.unity.shadergraph/Editor/ShaderGraphPreferences.cs @@ -87,49 +87,46 @@ static void OpenGUI() if (!m_Loaded) Load(); - var previousLabelWidth = EditorGUIUtility.labelWidth; - EditorGUIUtility.labelWidth = 256; - - EditorGUILayout.Space(); - EditorGUI.BeginChangeCheck(); - var actualLimit = ShaderGraphProjectSettings.instance.shaderVariantLimit; - var willPreviewVariantBeIgnored = ShaderGraphPreferences.previewVariantLimit > actualLimit; + using (new SettingsWindow.GUIScope()) + { + var actualLimit = ShaderGraphProjectSettings.instance.shaderVariantLimit; + var willPreviewVariantBeIgnored = ShaderGraphPreferences.previewVariantLimit > actualLimit; - var variantLimitLabel = willPreviewVariantBeIgnored - ? new GUIContent("Preview Variant Limit", EditorGUIUtility.IconContent("console.infoicon").image, $"The Preview Variant Limit is higher than the Shader Variant Limit in Project Settings: {actualLimit}. The Preview Variant Limit will be ignored.") - : new GUIContent("Preview Variant Limit"); + var variantLimitLabel = willPreviewVariantBeIgnored + ? new GUIContent("Preview Variant Limit", EditorGUIUtility.IconContent("console.infoicon").image, $"The Preview Variant Limit is higher than the Shader Variant Limit in Project Settings: {actualLimit}. The Preview Variant Limit will be ignored.") + : new GUIContent("Preview Variant Limit"); - var variantLimitValue = EditorGUILayout.DelayedIntField(variantLimitLabel, previewVariantLimit); - variantLimitValue = Mathf.Max(0, variantLimitValue); - if (EditorGUI.EndChangeCheck()) - { - previewVariantLimit = variantLimitValue; - } + var variantLimitValue = EditorGUILayout.DelayedIntField(variantLimitLabel, previewVariantLimit); + variantLimitValue = Mathf.Max(0, variantLimitValue); + if (EditorGUI.EndChangeCheck()) + { + previewVariantLimit = variantLimitValue; + } - EditorGUI.BeginChangeCheck(); - var autoAddRemoveBlocksValue = EditorGUILayout.Toggle("Automatically Add and Remove Block Nodes", autoAddRemoveBlocks); - if (EditorGUI.EndChangeCheck()) - { - autoAddRemoveBlocks = autoAddRemoveBlocksValue; - } + EditorGUI.BeginChangeCheck(); + var autoAddRemoveBlocksValue = EditorGUILayout.Toggle("Automatically Add and Remove Block Nodes", autoAddRemoveBlocks); + if (EditorGUI.EndChangeCheck()) + { + autoAddRemoveBlocks = autoAddRemoveBlocksValue; + } - EditorGUI.BeginChangeCheck(); - var allowDeprecatedBehaviorsValue = EditorGUILayout.Toggle("Enable Deprecated Nodes", allowDeprecatedBehaviors); - if (EditorGUI.EndChangeCheck()) - { - allowDeprecatedBehaviors = allowDeprecatedBehaviorsValue; - } + EditorGUI.BeginChangeCheck(); + var allowDeprecatedBehaviorsValue = EditorGUILayout.Toggle("Enable Deprecated Nodes", allowDeprecatedBehaviors); + if (EditorGUI.EndChangeCheck()) + { + allowDeprecatedBehaviors = allowDeprecatedBehaviorsValue; + } - EditorGUI.BeginChangeCheck(); - var zoomStepSizeValue = EditorGUILayout.Slider(new GUIContent("Zoom Step Size", $"Default is 0.5"), zoomStepSize, 0.0f, 1f); - if (EditorGUI.EndChangeCheck()) - { - zoomStepSize = zoomStepSizeValue; + EditorGUI.BeginChangeCheck(); + var zoomStepSizeValue = EditorGUILayout.Slider(new GUIContent("Zoom Step Size", $"Default is 0.5"), zoomStepSize, 0.0f, 1f); + if (EditorGUI.EndChangeCheck()) + { + zoomStepSize = zoomStepSizeValue; + } } - EditorGUIUtility.labelWidth = previousLabelWidth; } static void Load() diff --git a/Packages/com.unity.shadergraph/Editor/ShaderGraphShortcuts.cs b/Packages/com.unity.shadergraph/Editor/ShaderGraphShortcuts.cs index dec2e509d3d..3009ac8823f 100644 --- a/Packages/com.unity.shadergraph/Editor/ShaderGraphShortcuts.cs +++ b/Packages/com.unity.shadergraph/Editor/ShaderGraphShortcuts.cs @@ -2,6 +2,7 @@ using System; using System.Collections.Generic; using System.Reflection; +using System.Text; using UnityEditor.Experimental.GraphView; using UnityEditor.ShaderGraph.Drawing; using UnityEditor.ShortcutManagement; @@ -83,30 +84,34 @@ static void CheckBindings(string name) { if (reservedShortcuts.Contains((keyCombo.keyCode, keyCombo.modifiers))) { - string shortcut = ""; - bool isOSXEditor = Application.platform == RuntimePlatform.OSXEditor; // maybe not correct. - if (keyCombo.action) shortcut += $"{(isOSXEditor ? "Cmd" : "Ctrl")} + "; - if (keyCombo.shift) shortcut += "Shift + "; - if (keyCombo.alt) shortcut += "Alt + "; - shortcut += keyCombo.keyCode; - throw new Exception($"The binding for {name} ({shortcut}) conflicts with a built-in shortcut. Please go to Edit->Shortcuts... and change the binding."); + throw new Exception($"The binding for {name} ({keyCombo}) conflicts with a built-in shortcut. Please go to Edit->Shortcuts... and change the binding."); } } } internal static string GetKeycodeForContextMenu(string id) { + const string kKeycodePrefixAlt = "&"; + const string kKeycodePrefixShift = "#"; + const string kKeycodePrefixAction = "%"; + const string kKeycodePrefixControl = "^"; + const string kKeycodePrefixNoModifier = "_"; + var binding = ShortcutManager.instance.GetShortcutBinding(id); - bool isOSXEditor = Application.platform == RuntimePlatform.OSXEditor; // maybe not correct. foreach (var keyCombo in binding.keyCombinationSequence) { - string shortcut = ""; - if (keyCombo.action) shortcut += $"{(isOSXEditor ? "Cmd" : "Ctrl")}+"; - if (keyCombo.shift) shortcut += "Shift+"; - if (keyCombo.alt) shortcut += "Alt+"; - shortcut += keyCombo.keyCode; - return shortcut; + var sb = new StringBuilder(); + + if (keyCombo.alt) sb.Append(kKeycodePrefixAlt); + if (keyCombo.shift) sb.Append(kKeycodePrefixShift); + if (keyCombo.action) sb.Append(kKeycodePrefixAction); + if (keyCombo.control) sb.Append(kKeycodePrefixControl); + if (keyCombo.modifiers == ShortcutModifiers.None) sb.Append(kKeycodePrefixNoModifier); + + sb.Append(keyCombo.keyCode); + return sb.ToString(); } + return ""; } @@ -199,7 +204,8 @@ static void Group(ShortcutArguments args) CheckBindings(nodeGroupShortcutID); var graphView = GetGraphView(); foreach(var selected in graphView.selection) - if (selected is IShaderNodeView nodeView && nodeView.node is AbstractMaterialNode) + if ((selected is IShaderNodeView nodeView && nodeView.node is AbstractMaterialNode) + || selected.GetType() == typeof(Drawing.StickyNote)) { graphView.GroupSelection(); break; @@ -213,7 +219,8 @@ static void UnGroup(ShortcutArguments args) CheckBindings(nodeUnGroupShortcutID); var graphView = GetGraphView(); foreach (var selected in graphView.selection) - if (selected is IShaderNodeView nodeView && nodeView.node is AbstractMaterialNode) + if ((selected is IShaderNodeView nodeView && nodeView.node is AbstractMaterialNode) + || selected.GetType() == typeof(Drawing.StickyNote)) { graphView.RemoveFromGroupNode(); break; @@ -226,16 +233,36 @@ static void ToggleNodePreviews(ShortcutArguments args) { CheckBindings(nodePreviewShortcutID); bool shouldHide = false; - foreach (var selected in GetGraphView().selection) - if (selected is IShaderNodeView nodeView) - { - if (nodeView.node.previewExpanded && nodeView.node.hasPreview) + // Toggle all node previews if none are selected. Otherwise, update only the selected node previews. + var selection = GetGraphView().selection; + if (selection.Count == 0) + { + var graph = GetGraphView().graph; + var nodes = graph.GetNodes(); + foreach (AbstractMaterialNode node in nodes) + if (node.previewExpanded && node.hasPreview) { shouldHide = true; break; } - } - GetGraphView().SetPreviewExpandedForSelectedNodes(!shouldHide); + + graph.owner.RegisterCompleteObjectUndo("Toggle Previews"); + foreach (AbstractMaterialNode node in nodes) + node.previewExpanded = !shouldHide; + } + else + { + foreach (var selected in selection) + if (selected is IShaderNodeView nodeView) + { + if (nodeView.node.previewExpanded && nodeView.node.hasPreview) + { + shouldHide = true; + break; + } + } + GetGraphView().SetPreviewExpandedForSelectedNodes(!shouldHide); + } } internal const string nodeCollapsedShortcutID = "ShaderGraph/Selection: Toggle Node Collapsed"; diff --git a/Packages/com.unity.shadergraph/Tests/Editor/UnitTests/MaterialGraphTests.cs b/Packages/com.unity.shadergraph/Tests/Editor/UnitTests/MaterialGraphTests.cs index 902ce212374..5f2048cf5f3 100644 --- a/Packages/com.unity.shadergraph/Tests/Editor/UnitTests/MaterialGraphTests.cs +++ b/Packages/com.unity.shadergraph/Tests/Editor/UnitTests/MaterialGraphTests.cs @@ -1,7 +1,9 @@ using System.Linq; +using System.Reflection; using NUnit.Framework; using UnityEngine; using UnityEditor.Graphing; +using UnityEditor.ShaderGraph.Drawing; namespace UnityEditor.ShaderGraph.UnitTests { @@ -23,5 +25,16 @@ public void TestCreateMaterialGraph() Assert.AreEqual(0, graph.GetNodes().Count()); } + + [Test] + public void TestUndoRedoPerformedMethod() + { + var view = new MaterialGraphView(); + var viewType = typeof(MaterialGraphView); + var fieldInfo = viewType.GetField("m_UndoRedoPerformedMethodInfo", BindingFlags.NonPublic | BindingFlags.Instance); + var fieldInfoValue = fieldInfo.GetValue(view); + + Assert.IsNotNull(fieldInfoValue, "m_UndoRedoPerformedMethodInfo must not be null."); + } } } diff --git a/Packages/com.unity.visualeffectgraph/Documentation~/Blackboard.md b/Packages/com.unity.visualeffectgraph/Documentation~/Blackboard.md index 3975f1def37..d459a5eaf42 100644 --- a/Packages/com.unity.visualeffectgraph/Documentation~/Blackboard.md +++ b/Packages/com.unity.visualeffectgraph/Documentation~/Blackboard.md @@ -1,89 +1,146 @@ # Blackboard -The Blackboard is a utility panel in the [Visual Effect Graph window](VisualEffectGraphWindow.md) that allows you to manage **properties**. Here, you can define, order, and categorize properties. You can also expose properties so that you can access them from outside the graph. +The Blackboard is a utility panel in the [Visual Effect Graph window](VisualEffectGraphWindow.md) that allows you to manage: +- [Properties](Properties.md) +- [Custom Attributes](#attributes) +- Built-in particle [attributes](Attributes.md) -Properties you define in the Blackboard are global variables that you can use multiple times throughout the graph. For example, you can define a bounding box property once and then use it for multiple particle systems in the graph. +To toggle the Blackboard visibility, you can either use the `SHIFT + 1` shortcut or click on the **Blackboard** button in the Visual Effect Graph [Toolbar](VisualEffectGraphWindow.md#Toolbar). +Drag window's edge to resize and drag the window's header to move it. -![Blackboard-Properties](Images/Blackboard-Properties.png) +## Properties +You can create/delete, order and categorize properties. +For each property the following options are available: -Properties in the Blackboard are either **constants** or **exposed**. If you make a property exposed, you can see and edit it on the [Visual Effect Component](VisualEffectComponent.md) as well as via the C# API. +| **Setting** | **Description** | +|-------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| **Exposed** | Specifies whether the property is exposed or not. When enabled, you can see and edit the property on the [Visual Effect Component](VisualEffectComponent.md) as well as via the [C# API](ComponentAPI.md).
When the property is exposed, a green dot is displayed (bright green if the property is used and dimmed out if not used). | +| **Value** | Specifies the default value of the property. The Visual Effect Graph uses this value if you do not expose the property or if you expose the property, but do not override it. | +| **Tooltip** | Specifies text that appears when you hover the property in the graph or in the Visual Effect component inspector. | +| **Mode** | For numerical types you can set a **Range** mode (with minimum and maximum values) which turns the value control into a slider.
And for UInt type you can also choose **Enum** which turns the control into a dropdown | -To differentiate between exposed properties and constants, the Blackboard displays a green dot on the left of an exposed property's label. -## Using the Blackboard +To use a property in the graph you can either drag and drop it in the graph, or find it by name in the node search. +You can use the same property as many times as you want in the graph (even in different systems) -To open the Blackboard, click the **Blackboard** button in the Visual Effect Graph window [Toolbar](VisualEffectGraphWindow.md#Toolbar). To resize the Blackboard, click on any edge or corner and drag. To reposition the Blackboard, click on the header and drag. +![Blackboard-Properties](Images/blackboard-properties.png) -### Menu Category +### Add a property +To add a property, click the plus `+` button in the top-left corner of the Blackboard. +A menu will open offering two options: **Property** and [**Attribute**](#attributes). Choose the **Property** option and then select the desired type. -In order to set the Menu path of the currently edited Subgraph, you can double-click the sub-title of the blackboard and enter the desired Category Name, then validate using the Return Key +You can also convert an inline [Operator](Operators.md) to a property. To do this, either use the shortcut `SHIFT + X` or right-click on the Node in the graph and select either: +- **Convert to Property** if you want to create a constant. +- **Convert to Exposed Property** if you want to create an exposed property +Regardless of the option you choose, you can enable or disable the **Exposed** setting later. -![Blackboard-Category](Images/Blackboard-Category.gif) +### Arranging properties +1. To rename a property you can: + - Select property and press F2 + - Double-click the property name + - Right-click the property name and select **Rename** from the context menu. + - In the inspector change the **Exposed Name** value +2. To reorder a property, drag&drop it somewhere above or below. +If you drop it over a category or inside a category the property will move to that category +3. To delete a property: + - Right-click the property then select **Delete** from the context menu. + - Select the property then press the **Delete** key (for macOS, **Cmd** + **Delete** key). -### Creating properties +### Property categories +Categories allow you to sort properties into groups so you can manage them more easily. You can **rename** and **delete** categories in the same way as you can for properties. +Category creation works the same way as for Properties, just pick the **Category** type at the top of the menu. +You can drag&drop properties from one category to another. If you don't want a property to be part of a category, drop it at the top of the properties. -To create a property, click the plus (**+**) button in the top-right of the Blackboard then select a property type from the menu. +### Property Nodes +Property Nodes look slightly different to standard [operators](Operators.md). They display the property name and a green dot if the property is exposed. +The value is either the default value set in the blackboard or the overriden value set in the component inspector. -You can also convert an inline Operator to a property. To do this, right-click on the Node and select either: +You can expand them to use a sub-member of the property value. -- **Convert to Property** if you want to create a constant. -- **Convert to Exposed Property** if you want to create an exposed property +![PropertyNode](Images/PropertyNode.png) +
On the left are unexposed properties, on the right exposed properties (notice the green dot)
-Regardless of the option you choose, you can enable or disable the **Exposed** setting later. +
-### Editing properties +### Exposed Properties in the Inspector +When you enable the **Exposed** setting for a property, the property becomes visible in the **Properties** section of the Inspector for a [Visual Effect](VisualEffectComponent.md) component. +Properties appear in the same order and categories that you set in the Blackboard. -To edit a property in the Blackboard, click the folding arrow to the left of the property. This exposes settings that you can use to edit the property. Different properties expose different settings. The core settings are: +![Properties-Inspector](Images/blackboard-properties-inspector.png) -| **Setting** | **Description** | -| ----------- | ------------------------------------------------------------ | -| **Exposed** | Specifies whether the property is exposed or not. When enabled, you can see and edit the property on the [Visual Effect Component](VisualEffectComponent.md) as well as via the C# API. | -| **Value** | Specifies the default value of the property. The Visual Effect Graph uses this value if you do not expose the property or if you expose the property, but do not override it. | -| **Tooltip** | Specifies text that appears when you hover over the property in the Inspector for the Visual Effect. | +#### Overriding property values per GameObject +For each VFX component in the scene, you can override any exposed property value. +When the GameObject holding the Visual Effect is selected in the hierarchy, in the inspector you'll see all exposed properties listed. +Then if you change a property value, the override checkbox will be checked and the value overriden. +To revert your change, you can simply uncheck the override checkbox, then the default value for that property, set in the blackboard, will be used. -### Filtering properties -Float, Int and Uint properties have some filter mode : -* default. does nothing special. You will edit the property value in a textfield. -* Range. You will specify a minimum and a maximum value in the blackboard and you will edit the property with a slider instead of just a textfield -* Enum. Exclusive to uint, You will specify a list of names in the blackboard and you will edit the property with a popup menu. +## Attributes +An [attribute](Attributes.md) is a short name for **Particle Attribute** which are independent values carried by each particle. +The blackboard now lets you manage both built-in attributes and custom attributes. -### Arranging properties +The icon on the left represents the type of the attribute. In the screenshot below, we can see that the attribute is a `Vector3` -* To **rename** a property: - 1. Either double click the property name or right-click the property name and select **Rename** from the context menu. - 2. In the editable field, type the new name. - 3. Finally, to validate the change, press the **Enter** key or click away from the field. -* To **reorder** properties, **drag and drop** them in the Blackboard. -* To **delete** a property, either: - * Right-click the property then select **Delete** from the context menu. - * Select the property then press the **Delete** key (for macOS, **Cmd** + **Delete** key). +![Attribute](Images/blackboard-attribute.png) -### Property categories +To use an attribute in the graph you can drag&drop it in the graph. +* If you drop it in a context, it will create a `Set Attribute` block. +* If you drop it in the void, it will create a `Get Attribute` operator. -Categories allow you to sort properties into groups so you can manage them more easily. You can **rename**, **reorder**, and **delete** categories in the same way as you can for properties. +You can also find the attribute in the node search window. -* To **create** a category, click the plus (**+**) button in the top-right of the Blackboard, then select **Category** from the menu. -* You can **drag and drop** properties from one category to another, or if you want a property to not be part of any category, to the top of the window. +![](Images/blackboard-dragdrop-attribute.gif) -## Property Nodes +### Custom Attributes +For each custom attribute you can: +- **Rename**: all usage of the custom attribute will be updated automatically +- **Change type**: all usage of the custom attribute will be updated automatically +- **Add a description**: it will be displayed as a tooltip in the graph -Property Nodes look slightly different to standard Nodes. They display the property name and a green dot if the property is exposed. +### Built-in Attributes +This section lists all available built-in attributes. The small locker icon on the right indicates that you cannot modify them. +For each built-in attribute you have the follwing information: +- Name +- Type +- Access (is it read-only or not) +- Description -You can expand them to use a sub-member of the property value. +![Built-in Attributes](Images/blackboard-builtin-attributes.png) -![PropertyNode](Images/PropertyNode.png) +### Custom HLSL +To avoid typo when writing [custom HLSL](CustomHLSL-Common.md) code, you can drag&drop an attribute inside the HLSL code editor (in Unity). + +## Common Features + +- **Duplicate**: you can duplicate a property or a custom attribute with the shortcut `Ctrl + D` (macOS: `Cmd + D`) or with the context menu +- **Multi-Selection**: you can multi-select properties or custom attributes with `Shift + Click` or `Ctrl + Click`(macOS: `Cmd + Click`) and then drag&drop in the graph for instance +- **Copy/Paste**: you can copy/paste properties and custom attributes across different VFX Graphs using `Ctrl + C` and `Ctrl + V` +- **Hover Property**: if you hover a property, all corresponding nodes in the graph will get highlighted in orange +And same if you hover a node in the graph, corresponding property will be highlighted in the blackboard. +- **Hover Attribute**: if you hover an attribute, all nodes using it in the graph will get highlighted in orange +And same if you hover a node in the graph, corresponding attributes will be highlighted in the blackboard. +This applies for both custom and built-in attributes. +- **Select unused**: when right clicking on categories (or at the top of the blackboard) a context menu offers options to select unused properties or unused custom attributes. +This can be helpful when you want to cleanup your VFX. -## Exposed Properties in the Inspector +- ![Select Unused](Images/blackboard-unused.png) -When you enable the **Exposed** setting for a property, the property becomes visible in the **Properties** section of the Inspector for a [Visual Effect](VisualEffectComponent.md). Properties appear in the same order and categories that you set in the Blackboard. +> [!TIP] +> A handy workflow for custom attribute is to duplicate a built-in attribute with `Ctrl + D` (macOS: `Cmd + D`). +This will create a new custom attribute with a name like `orignalname_1` and with same type. -![Properties-Inspector](Images/Properties-Inspector.png) +## Filter +Near the top of the blackboard, there are three tabs which let you filter which kind of elements to display: +- **All**: Properties and Attributes are displayed +- **Properties**: Only properties are displayed +- **Attributes**: Only attributes are displayed -### Overriding property values +![Filter](Images/blackboard-filter.png) -To edit a property value, you need to override it. To do this, enable the checkbox to the left of the property's name. When you enable this checkbox, the Visual Effect Graph uses the value that you specify in the Inspector. If you disable this checkbox, the Visual Effect Graph uses the default value that you set in the Blackboard. +## Subgraph Category +When dealing with a subgraph, the blackboard lets you specify a category that will be used in the node search. +To change the category, double-click the sub-title of the blackboard and enter the desired category name, then validate using the `Return` key -### Using Gizmos +To create multiple category levels, use the `/` character. For example, `MySubgraphs/Math`. -You can use Gizmos to edit certain advanced property types. To enable Gizmo editing, click the **Show Property Gizmos** button. To use a Gizmo to edit a compatible property, click the **Edit** button next to the property. +![Blackboard-Category](Images/blackboard-subgraph.png) diff --git a/Packages/com.unity.visualeffectgraph/Documentation~/Block-CustomHLSL.md b/Packages/com.unity.visualeffectgraph/Documentation~/Block-CustomHLSL.md index a9616bf1c3d..b16d8f88d73 100644 --- a/Packages/com.unity.visualeffectgraph/Documentation~/Block-CustomHLSL.md +++ b/Packages/com.unity.visualeffectgraph/Documentation~/Block-CustomHLSL.md @@ -3,7 +3,7 @@ Menu Path : **HLSL > Custom HLSL** The **Custom HLSL** Block allows you to write an HLSL function that takes inputs and can read and write to particle attributes. -For information about the Custom HLSL Operator and Custom HLSL Block, refer to [Custom HLSL Nodes](CustomHLSL-Common.md). +For general information about Custom HLSL nodes, refer to [Custom HLSL Nodes](CustomHLSL-Common.md). ## Block compatibility This Block is compatible with the following Contexts: @@ -36,25 +36,25 @@ void ApplyGradient(inout VFXAttributes attributes, in VFXGradient gradient, in f ``` ## Particle attributes -Use the Custom HLSL Block block to alter any writable particle attribute with a custom algorithm. +Use the Custom HLSL block to alter any writable particle attribute with a custom algorithm. For performance reasons it's important for VFX Graph to detect which attributes are read and which are written. -By convention the `VFXAttributes` function parameter is named `attributes` (like in the example above) but you can name it as you wish. +By convention the `VFXAttributes` function's parameter is named `attributes` (like in the example above) but you can name it as you wish. To find all available attributes and their access rights, refer to [reference attributes](Reference-Attributes.md). ## Use macros to generate random numbers VFX Graph exposes the following macros that you can use to generate random numbers: -| **Macro** | **Type** | **Description** | -|--------------------|----------|--------------------------------------| -| `VFXRAND` | float | Generate a random scalar value for each particle. | -| `VFXRAND2` | float2 | Generate a random 2D vector value for each particle. | -| `VFXRAND3` | float3 | Generate a random 3D vector value for each particle. | -| `VFXRAND4` | float4 | Generate a random 4D vector value for each particle.| -| `VFXFIXED_RAND` | float | Generate a random scalar value for each VFX Graph system. | -| `VFXFIXED_RAND2` | float2 | Generate a random 2D vector value for each VFX Graph system. | -| `VFXFIXED_RAND3` | float3 | Generate a random 3D vector value for each VFX Graph system. | -| `VFXFIXED_RAND4` | float4 | Generate a random 4D vector value for each VFX Graph system. | +| **Macro** | **Type** | **Description** | +|----------------|----------|--------------------------------------------------------------| +| VFXRAND | float | Generate a random scalar value for each particle. | +| VFXRAND2 | float2 | Generate a random 2D vector value for each particle. | +| VFXRAND3 | float3 | Generate a random 3D vector value for each particle. | +| VFXRAND4 | float4 | Generate a random 4D vector value for each particle. | +| VFXFIXED_RAND | float | Generate a random scalar value for each VFX Graph system. | +| VFXFIXED_RAND2 | float2 | Generate a random 2D vector value for each VFX Graph system. | +| VFXFIXED_RAND3 | float3 | Generate a random 3D vector value for each VFX Graph system. | +| VFXFIXED_RAND4 | float4 | Generate a random 4D vector value for each VFX Graph system. | To generate a random scalar value (range from 0 to 1) for each particle, use the following syntax: @@ -64,9 +64,8 @@ float randomValue = VFXRAND; ## Use the same HLSL code in multiple VFX Graph systems - If you share the same HLSL code in multiple systems you may have compilation errors because the `VFXAttributes` may have different definition (depending on the particle layout). -To overcome these compilation error you can wrap the function with conditional compilation macro specifying which attribute is required. +To overcome this compilation error you can wrap the function with conditional compilation macro specifying which attribute is required. For example, the following code uses `VFX_USE_COLOR_CURRENT` and `VFX_USE_VELOCITY_CURRENT` to check for the `velocity` and `color` attributes: ````csharp #if defined(VFX_USE_COLOR_CURRENT) && defined(VFX_USE_VELOCITY_CURRENT) @@ -77,3 +76,21 @@ void Speed(inout VFXAttributes attributes, in float speedFactor, in VFXGradient } #endif ```` + +## Include other HLSL files +You can include any valid HLSL file with the standard `#include` directive. +The path to the included file can be: +- **Relative** to the VFX asset where the block will be used +- **Absolute** starting from the `Assets/` folder. +- **Absolute** starting from the `Packages/` folder. + +For the Custom HLSL block, you can use the `#include` directive in both cases, embedded code or hlsl file. + +```c++ +#include "HLSL/common.hlsl" + +void SomeFunctionName(inout VFXAttributes attributes, in float someValue) +{ +... +} +``` diff --git a/Packages/com.unity.visualeffectgraph/Documentation~/Block-IncrementStripIndexOnStart.md b/Packages/com.unity.visualeffectgraph/Documentation~/Block-IncrementStripIndexOnStart.md index 986524929f3..251755b829e 100644 --- a/Packages/com.unity.visualeffectgraph/Documentation~/Block-IncrementStripIndexOnStart.md +++ b/Packages/com.unity.visualeffectgraph/Documentation~/Block-IncrementStripIndexOnStart.md @@ -2,7 +2,7 @@ Menu Path: **Spawn > Custom > Increment Strip Index On Start** -The **Increment Strip Index On Start** Block helps to manage the initialization of Particle Strips. A Particle Strip comprises of linked groups of particles and the number of these groups is defined by the strip's stripIndex attribute. This Block increments the Particle Strip's stripIndex attribute (unsigned integer) each time the start event of the Spawn Context triggers. This adds a new linked group of particles to the Particle Strip. +The **Increment Strip Index On Start** Block helps to manage the initialization of Particle Strips. A Particle Strip comprises of linked groups of particles and the amount of these groups is defined by the strip's stripIndex attribute. Each particle with the same stripIndex attribute will a be part of the same strip, incrementing the stripIndex will create a new strip. This Block increments the Particle Strip's stripIndex attribute (unsigned integer) each time the start event of the Spawn Context triggers. This adds a new linked group of particles to the Particle Strip. The stripIndex attribute returns to zero when a stop event triggers or if stripIndex reaches the **Strip Max Count**. This goes back to the first strip group index. diff --git a/Packages/com.unity.visualeffectgraph/Documentation~/Block-VelocityFromDirectionAndSpeed(ChangeSpeed).md b/Packages/com.unity.visualeffectgraph/Documentation~/Block-VelocityFromDirectionAndSpeed(ChangeSpeed).md index adf7746ed54..335b9fb9d2e 100644 --- a/Packages/com.unity.visualeffectgraph/Documentation~/Block-VelocityFromDirectionAndSpeed(ChangeSpeed).md +++ b/Packages/com.unity.visualeffectgraph/Documentation~/Block-VelocityFromDirectionAndSpeed(ChangeSpeed).md @@ -1,5 +1,7 @@ # Velocity from Direction & Speed (Change Speed) +> [!IMPORTANT] +> This feature is experimental. To use this feature, open the **Preferences** window, go to the **Visual Effects** tab, and enable **Experimental Operators/Blocks**. Menu Path : **Velocity > [Set/Add] Velocity from Direction & Speed (Change Speed)** The **Velocity from Direction And Speed : Change Speed** Block calculates a velocity for the particle based on the direction attribute. diff --git a/Packages/com.unity.visualeffectgraph/Documentation~/Block-VelocityFromDirectionAndSpeed(NewDirection).md b/Packages/com.unity.visualeffectgraph/Documentation~/Block-VelocityFromDirectionAndSpeed(NewDirection).md index 663e65f8807..1b16ce6c763 100644 --- a/Packages/com.unity.visualeffectgraph/Documentation~/Block-VelocityFromDirectionAndSpeed(NewDirection).md +++ b/Packages/com.unity.visualeffectgraph/Documentation~/Block-VelocityFromDirectionAndSpeed(NewDirection).md @@ -1,5 +1,8 @@ # Velocity from Direction & Speed (New Direction) +> [!IMPORTANT] +> This feature is experimental. To use this feature, open the **Preferences** window, go to the **Visual Effects** tab, and enable **Experimental Operators/Blocks**. + Menu Path : **Velocity > [Set/Add] Velocity from Direction & Speed (New Direction)** The **Velocity from Direction And Speed (New Direction)** Block calculates a velocity for the particle based on a blend ratio between a given direction, and the direction attribute. diff --git a/Packages/com.unity.visualeffectgraph/Documentation~/Block-VelocityFromDirectionAndSpeed(RandomDirection).md b/Packages/com.unity.visualeffectgraph/Documentation~/Block-VelocityFromDirectionAndSpeed(RandomDirection).md index cebf390f063..69da873c2b1 100644 --- a/Packages/com.unity.visualeffectgraph/Documentation~/Block-VelocityFromDirectionAndSpeed(RandomDirection).md +++ b/Packages/com.unity.visualeffectgraph/Documentation~/Block-VelocityFromDirectionAndSpeed(RandomDirection).md @@ -1,5 +1,8 @@ # Velocity from Direction & Speed (Random Direction) +> [!IMPORTANT] +> This feature is experimental. To use this feature, open the **Preferences** window, go to the **Visual Effects** tab, and enable **Experimental Operators/Blocks**. + Menu Path : **Velocity > [Set/Add] Velocity from Direction & Speed (Random Direction)** The **Velocity from Direction And Speed (Random Direction)** Block calculates a velocity for the particle based on a blend ratio between a random direction (per-particle), and the direction attribute. diff --git a/Packages/com.unity.visualeffectgraph/Documentation~/Block-VelocityFromDirectionAndSpeed(Spherical).md b/Packages/com.unity.visualeffectgraph/Documentation~/Block-VelocityFromDirectionAndSpeed(Spherical).md index 303f16ffbe9..c9c6e5e2616 100644 --- a/Packages/com.unity.visualeffectgraph/Documentation~/Block-VelocityFromDirectionAndSpeed(Spherical).md +++ b/Packages/com.unity.visualeffectgraph/Documentation~/Block-VelocityFromDirectionAndSpeed(Spherical).md @@ -1,5 +1,8 @@ # Velocity from Direction & Speed (Spherical) +> [!IMPORTANT] +> This feature is experimental. To use this feature, open the **Preferences** window, go to the **Visual Effects** tab, and enable **Experimental Operators/Blocks**. + Menu Path : **Velocity > [Set/Add] Velocity from Direction & Speed (Spherical)** The **Velocity from Direction And Speed (Spherical)** Block calculates a velocity for the particle based on a blend ratio between the direction attribute and a spherical vector. diff --git a/Packages/com.unity.visualeffectgraph/Documentation~/Block-VelocityFromDirectionAndSpeed(Tangent).md b/Packages/com.unity.visualeffectgraph/Documentation~/Block-VelocityFromDirectionAndSpeed(Tangent).md index a951cf12914..c9e6e6f332d 100644 --- a/Packages/com.unity.visualeffectgraph/Documentation~/Block-VelocityFromDirectionAndSpeed(Tangent).md +++ b/Packages/com.unity.visualeffectgraph/Documentation~/Block-VelocityFromDirectionAndSpeed(Tangent).md @@ -1,5 +1,8 @@ # Velocity from Direction & Speed (Tangent) +> [!IMPORTANT] +> This feature is experimental. To use this feature, open the **Preferences** window, go to the **Visual Effects** tab, and enable **Experimental Operators/Blocks**. + Menu Path : **Velocity > [Set/Add] Velocity from Direction & Speed (Tangent)** The **Velocity from Direction And Speed (Tangent)** Block calculates a velocity for the particle based on a blend ratio between the direction attribute and a tangent vector. diff --git a/Packages/com.unity.visualeffectgraph/Documentation~/CustomHLSL-Common.md b/Packages/com.unity.visualeffectgraph/Documentation~/CustomHLSL-Common.md index 1e051b304e8..11d37b0c92c 100644 --- a/Packages/com.unity.visualeffectgraph/Documentation~/CustomHLSL-Common.md +++ b/Packages/com.unity.visualeffectgraph/Documentation~/CustomHLSL-Common.md @@ -1,9 +1,9 @@ ## Custom HLSL Nodes (block and operator) These Custom HLSL nodes let you execute custom HLSL code during particle simulation. -You can use an [operator](Operator-CustomHLSL.md) for horizontal flow or [block](Block-CustomHLSL.md) for vertical flow (in contexts). -To be valid and correctly interpreted by the VFX Graph some conventions must be adopted. +You can use an [operator](Operator-CustomHLSL.md) for horizontal flow or a [block](Block-CustomHLSL.md) for vertical flow (in contexts). +![Custom HLSL](Images/custom-hlsl.png) ## Node settings | **Setting name** | UI | Location | Action | @@ -14,17 +14,15 @@ To be valid and correctly interpreted by the VFX Graph some conventions must be ## HLSL Code -The HLSL code can be either embedded in the node or an HLSL file can be used. -In both cases you are allowed to include any other valid HLSL file (using #include ""). -You can also provide multiple functions in the same HLSL source (embedded or file), in this case, you'll have to pick the desired one in a choice list in the node. +The HLSL code can be either **embedded** in the node or an **HLSL file** can be used. +You can provide multiple functions in the same HLSL source (embedded or file), in this case, you'll have to pick the desired one in a choice list in the node. +To be valid and correctly interpreted by the VFX Graph, some conventions must be adopted. ## Function declaration To be properly recognized by VFX Graph the function must fulfill the following requirements: - Return a supported type [Supported types](#Supported-types) -- Each function parameter must have the `in`, `out` or `inout` access modifier - Each function parameter must be of a [Supported types](#Supported-types) - If you declare multiple functions, they must have unique names. -- **Function can take a maximum of 4 parameters** ## Inline documentation You can specify a tooltip for each function parameter using the three slash comment notation as shown below: @@ -38,38 +36,65 @@ These comments must be right above the function declaration. /// b: the tooltip for parameter b float Distance(in float3 a, in float3 b) { - return distance(a, b); + return distance(a, b); } ``` -You may want to write some helper function that you don't want to be exposed in the node's choice list. In that case, simply put this special comment: +You may want to write a helper function that you don't want to be exposed in the node's choice list. +In that case, simply put this special comment above the function declaration: ```csharp /// Hidden +float SomeFunction(in float a) +{ + ... +} ``` +> [!IMPORTANT] +> When you need to implement helper functions, you must use a HLSL file, not the embedded HLSL code. ## Supported types -| **HLSL Type** | **Port Type** | **Description** | -|-----------------------|----------------|-------------------------------------------------------| -| **bool** | bool | A scalar value represented as a boolean. | -| **uint** | uint | A scalar value represented as an unsigned integer. | -| **int** | int | A scalar value represented as a integer. | -| **float** | float | A scalar value represented as a float. | -| **float2** | Vector2 | A structure containing two float. | -| **float3** | Vector3 | A structure containing three float. | -| **float4** | Vector4 | A structure containing four float. | -| **float4x4** | Matrix4x4 | A structure representing a matrix. | -| **VFXSampler2D** | Texture2D | A two-dimensional texture. | -| **VFXSampler3D** | Texture3D | A three-dimensional texture. | -| **VFXGradient** | Gradient | A structure that describes a gradient that can be sampled. | -| **VFXCurve** | AnimationCurve | A structure that describes a curve that can be sampled. | -| **StructuredBuffer** | GraphicsBuffer | A read-only buffer for storing an array of structures or basic HLSL data types. | -| **ByteAddressBuffer** | GraphicsBuffer | A read-only raw buffer. | +### Basic types + +| **HLSL Type** | **Port Type** | **Description** | +|-----------------------|----------------|----------------------------------------------------------------------------------| +| **bool** | bool | A scalar value represented as a boolean. | +| **uint** | uint | A scalar value represented as an unsigned integer. | +| **int** | int | A scalar value represented as a integer. | +| **float** | float | A scalar value represented as a float. | +| **float2** | Vector2 | A structure containing two float. | +| **float3** | Vector3 | A structure containing three float. | +| **float4** | Vector4 | A structure containing four float. | +| **float4x4** | Matrix4x4 | A structure representing a matrix. | +| **VFXGradient** | Gradient | A structure that describes a gradient that can be sampled. | +| **VFXCurve** | AnimationCurve | A structure that describes a curve that can be sampled. | + +### Texture types + +| **HLSL Type** | **Port Type** | **Description** | +|------------------------|------------------|----------------------------------------------------------------------------------| +| **VFXSampler2D** | Texture2D | A structure containing a sampler state and a two-dimensional texture. | +| **VFXSampler3D** | Texture3D | A structure containing a sampler state and a three-dimensional texture. | +| **VFXSampler2DArray** | Texture2DArray | A structure containing a sampler state and an array of two-dimensional textures. | +| **VFXSamplerCube** | TextureCube | A structure containing a sampler state and a cube texture. | + +### Buffers + +| **HLSL Type** | **Port Type** | **Description** | +|-----------------------------|----------------|----------------------------------------------------------------------------------| +| **StructuredBuffer** | GraphicsBuffer | A read-only buffer for storing an array of structures or basic HLSL data types. | +| **ByteAddressBuffer** | GraphicsBuffer | A read-only raw buffer. | +| **Buffer** | GraphicsBuffer | A read-only raw buffer for basic HLSL types. | +| **AppendStructuredBuffer** | GraphicsBuffer | A read-only buffer where you can append new entries. | +| **ConsumeStructuredBuffer** | GraphicsBuffer | A read-only buffer where you can remove entries. | +| **RWBuffer** | GraphicsBuffer | A read-write raw buffer for basic HLSL types. | +| **RWStructuredBuffer** | GraphicsBuffer | A read-write buffer for storing an array of structures or basic HLSL data types. | +| **RWByteAddressBuffer** | GraphicsBuffer | A read-write raw buffer. | ## Sampling ### Textures -To sample a texture you must use the VFX Graph structure called VFXSampler2D (or VFXSample3D) which is defined as shown below: +The simplest way to sample a texture is to use the VFX Graph structure called VFXSampler2D (or VFXSample3D) which is defined as shown below: ```csharp struct VFXSampler2D { @@ -77,7 +102,7 @@ struct VFXSampler2D SamplerState s; }; ``` -The easiest way to sample a texture is to use a function provided by the VFX Graph common HLSL code: `SampleTexture(VFXSampler2D texure, float2 coordinates)`. +VFX Graph provides this function: `float4 SampleTexture(VFXSampler2D texure, float2 coordinates)`. But you can also use HLSL built-in functions to sample a texture using the VFXSampler2D fields. In that case, since this is used in a compute shader you must specify which mipmap level to sample (use `SampleLevel` for instance). @@ -85,24 +110,35 @@ In that case, since this is used in a compute shader you must specify which mipm You can use two types of buffers: `ByteAddressBuffer` and `StructuredBuffer<>`. In both cases the usage is the same as in any HLSL code: - `ByteAddressBuffer`: use the `Load` function -````csharp +```csharp uint char = buffer.Load(attributes.particleId % count); -```` +``` - `StructuredBuffer<>`: use classic index accessor -````csharp +```csharp float angle = phase + freq * buffer[attributes.particleId % bufferSize]; -```` +``` ### Gradient Gradients are handled specifically in VFX Graph (they are packed in a single texture) so you must use a dedicated function to sample them. Here is the function definition: `SampleGradient(VFXGradient gradient, float t)` -````csharp +```csharp float3 color = SampleGradient(grad, t); -```` +``` ### Curve Sampling a curve is really similar to sampling a gradient. Here is the function definition: `SampleCurve(VFXCurve curve, float t)` -````csharp +```csharp float r = SampleCurve(curve, t); -```` +``` + +## HLSL Code Editor +You can edit your HLSL code directly inside Unity Editor by clicking the `Edit` button on the node in the graph (see screenshot [above](#custom-hlsl-nodes-block-and-operator)). +The HLSL Code Editor supports the following shortcuts: +- `Ctrl + Z` and `Ctrl + Y` for Undo/Redo (independent from the Unity Editor undo stack) +- `Ctrl + S` to save the current HLSL code +- `Ctrl + Mouse Wheel` to change the font size + +>If you need to write down the name of a particle attribute, you can drag&drop drop the attribute from the blackboard to the code editor. This way you avoid any risk of typo. + +![Custom HLSL Editor](Images/custom-hlsl-editor.png) diff --git a/Packages/com.unity.visualeffectgraph/Documentation~/ExperimentalFeatures.md b/Packages/com.unity.visualeffectgraph/Documentation~/ExperimentalFeatures.md index 7ba4280f9a3..e39ba2d7817 100644 --- a/Packages/com.unity.visualeffectgraph/Documentation~/ExperimentalFeatures.md +++ b/Packages/com.unity.visualeffectgraph/Documentation~/ExperimentalFeatures.md @@ -4,7 +4,7 @@ Some features in Visual Effect Graph are in an experimental state, because impro By default, experimental features are disabled, meaning that they do not appear in the search results when you create Blocks, operators, or Contexts. -To enable experimental features, go to **Edit** > **Preferences** > **Visual Effects** and enable **Experimental Operators/Blockers**. This gives you access to all nodes marked as experimental in the graph. +To enable experimental features, go to **Edit** > [**Preferences**](VisualEffectPreferences.md) > **Visual Effects** and enable **Experimental Operators/Blockers**. This gives you access to all nodes marked as experimental in the graph. ![](Images/Experimental-features-enable.png) diff --git a/Packages/com.unity.visualeffectgraph/Documentation~/Images/Blackboard-Category.gif b/Packages/com.unity.visualeffectgraph/Documentation~/Images/Blackboard-Category.gif deleted file mode 100644 index 4e8a6d9566b..00000000000 Binary files a/Packages/com.unity.visualeffectgraph/Documentation~/Images/Blackboard-Category.gif and /dev/null differ diff --git a/Packages/com.unity.visualeffectgraph/Documentation~/Images/Blackboard-Properties.png b/Packages/com.unity.visualeffectgraph/Documentation~/Images/Blackboard-Properties.png deleted file mode 100644 index f88e57e0db2..00000000000 Binary files a/Packages/com.unity.visualeffectgraph/Documentation~/Images/Blackboard-Properties.png and /dev/null differ diff --git a/Packages/com.unity.visualeffectgraph/Documentation~/Images/Bounds-Init.png b/Packages/com.unity.visualeffectgraph/Documentation~/Images/Bounds-Init.png new file mode 100644 index 00000000000..b5cedfd869d Binary files /dev/null and b/Packages/com.unity.visualeffectgraph/Documentation~/Images/Bounds-Init.png differ diff --git a/Packages/com.unity.visualeffectgraph/Documentation~/Images/ControlPanelIcon.png b/Packages/com.unity.visualeffectgraph/Documentation~/Images/ControlPanelIcon.png new file mode 100644 index 00000000000..e93d697026d Binary files /dev/null and b/Packages/com.unity.visualeffectgraph/Documentation~/Images/ControlPanelIcon.png differ diff --git a/Packages/com.unity.visualeffectgraph/Documentation~/Images/Operator-ProbabilitySamplingExample.gif b/Packages/com.unity.visualeffectgraph/Documentation~/Images/Operator-ProbabilitySamplingExample.gif deleted file mode 100644 index 03d4629479f..00000000000 Binary files a/Packages/com.unity.visualeffectgraph/Documentation~/Images/Operator-ProbabilitySamplingExample.gif and /dev/null differ diff --git a/Packages/com.unity.visualeffectgraph/Documentation~/Images/Operator-ProbabilitySamplingExample.mp4 b/Packages/com.unity.visualeffectgraph/Documentation~/Images/Operator-ProbabilitySamplingExample.mp4 new file mode 100644 index 00000000000..4fa087cb92d Binary files /dev/null and b/Packages/com.unity.visualeffectgraph/Documentation~/Images/Operator-ProbabilitySamplingExample.mp4 differ diff --git a/Packages/com.unity.visualeffectgraph/Documentation~/Images/Operator-RandomNumberExample.mp4 b/Packages/com.unity.visualeffectgraph/Documentation~/Images/Operator-RandomNumberExample.mp4 new file mode 100644 index 00000000000..460ab5b3ff8 Binary files /dev/null and b/Packages/com.unity.visualeffectgraph/Documentation~/Images/Operator-RandomNumberExample.mp4 differ diff --git a/Packages/com.unity.visualeffectgraph/Documentation~/Images/Properties-Inspector.png b/Packages/com.unity.visualeffectgraph/Documentation~/Images/Properties-Inspector.png deleted file mode 100644 index de7f1c12025..00000000000 Binary files a/Packages/com.unity.visualeffectgraph/Documentation~/Images/Properties-Inspector.png and /dev/null differ diff --git a/Packages/com.unity.visualeffectgraph/Documentation~/Images/PropertyBinder_example.png b/Packages/com.unity.visualeffectgraph/Documentation~/Images/PropertyBinder_example.png new file mode 100644 index 00000000000..96c67a70c1a Binary files /dev/null and b/Packages/com.unity.visualeffectgraph/Documentation~/Images/PropertyBinder_example.png differ diff --git a/Packages/com.unity.visualeffectgraph/Documentation~/Images/PropertyNode.png b/Packages/com.unity.visualeffectgraph/Documentation~/Images/PropertyNode.png index 744ff028c5b..9f569ffff0a 100644 Binary files a/Packages/com.unity.visualeffectgraph/Documentation~/Images/PropertyNode.png and b/Packages/com.unity.visualeffectgraph/Documentation~/Images/PropertyNode.png differ diff --git a/Packages/com.unity.visualeffectgraph/Documentation~/Images/PropertyNodes.png b/Packages/com.unity.visualeffectgraph/Documentation~/Images/PropertyNodes.png deleted file mode 100644 index ea83b111b7b..00000000000 Binary files a/Packages/com.unity.visualeffectgraph/Documentation~/Images/PropertyNodes.png and /dev/null differ diff --git a/Packages/com.unity.visualeffectgraph/Documentation~/Images/blackboard-attribute.png b/Packages/com.unity.visualeffectgraph/Documentation~/Images/blackboard-attribute.png new file mode 100644 index 00000000000..2c2b9cd182c Binary files /dev/null and b/Packages/com.unity.visualeffectgraph/Documentation~/Images/blackboard-attribute.png differ diff --git a/Packages/com.unity.visualeffectgraph/Documentation~/Images/blackboard-builtin-attributes.png b/Packages/com.unity.visualeffectgraph/Documentation~/Images/blackboard-builtin-attributes.png new file mode 100644 index 00000000000..bb0e51301b5 Binary files /dev/null and b/Packages/com.unity.visualeffectgraph/Documentation~/Images/blackboard-builtin-attributes.png differ diff --git a/Packages/com.unity.visualeffectgraph/Documentation~/Images/blackboard-create-custom-attribute.gif b/Packages/com.unity.visualeffectgraph/Documentation~/Images/blackboard-create-custom-attribute.gif new file mode 100644 index 00000000000..ef871b6caba Binary files /dev/null and b/Packages/com.unity.visualeffectgraph/Documentation~/Images/blackboard-create-custom-attribute.gif differ diff --git a/Packages/com.unity.visualeffectgraph/Documentation~/Images/blackboard-dragdrop-attribute.gif b/Packages/com.unity.visualeffectgraph/Documentation~/Images/blackboard-dragdrop-attribute.gif new file mode 100644 index 00000000000..ec3edcd0779 Binary files /dev/null and b/Packages/com.unity.visualeffectgraph/Documentation~/Images/blackboard-dragdrop-attribute.gif differ diff --git a/Packages/com.unity.visualeffectgraph/Documentation~/Images/blackboard-filter.png b/Packages/com.unity.visualeffectgraph/Documentation~/Images/blackboard-filter.png new file mode 100644 index 00000000000..c9b7463fb6f Binary files /dev/null and b/Packages/com.unity.visualeffectgraph/Documentation~/Images/blackboard-filter.png differ diff --git a/Packages/com.unity.visualeffectgraph/Documentation~/Images/blackboard-properties-inspector.png b/Packages/com.unity.visualeffectgraph/Documentation~/Images/blackboard-properties-inspector.png new file mode 100644 index 00000000000..3a8a6c069fa Binary files /dev/null and b/Packages/com.unity.visualeffectgraph/Documentation~/Images/blackboard-properties-inspector.png differ diff --git a/Packages/com.unity.visualeffectgraph/Documentation~/Images/blackboard-properties.png b/Packages/com.unity.visualeffectgraph/Documentation~/Images/blackboard-properties.png new file mode 100644 index 00000000000..39d8293f957 Binary files /dev/null and b/Packages/com.unity.visualeffectgraph/Documentation~/Images/blackboard-properties.png differ diff --git a/Packages/com.unity.visualeffectgraph/Documentation~/Images/blackboard-subgraph.png b/Packages/com.unity.visualeffectgraph/Documentation~/Images/blackboard-subgraph.png new file mode 100644 index 00000000000..61a5462f184 Binary files /dev/null and b/Packages/com.unity.visualeffectgraph/Documentation~/Images/blackboard-subgraph.png differ diff --git a/Packages/com.unity.visualeffectgraph/Documentation~/Images/blackboard-unused.png b/Packages/com.unity.visualeffectgraph/Documentation~/Images/blackboard-unused.png new file mode 100644 index 00000000000..c4efb33381e Binary files /dev/null and b/Packages/com.unity.visualeffectgraph/Documentation~/Images/blackboard-unused.png differ diff --git a/Packages/com.unity.visualeffectgraph/Documentation~/Images/custom-hlsl-editor.png b/Packages/com.unity.visualeffectgraph/Documentation~/Images/custom-hlsl-editor.png new file mode 100644 index 00000000000..ab3677d48c7 Binary files /dev/null and b/Packages/com.unity.visualeffectgraph/Documentation~/Images/custom-hlsl-editor.png differ diff --git a/Packages/com.unity.visualeffectgraph/Documentation~/Images/custom-hlsl.png b/Packages/com.unity.visualeffectgraph/Documentation~/Images/custom-hlsl.png new file mode 100644 index 00000000000..2246a03d330 Binary files /dev/null and b/Packages/com.unity.visualeffectgraph/Documentation~/Images/custom-hlsl.png differ diff --git a/Packages/com.unity.visualeffectgraph/Documentation~/Operator-CustomHLSL.md b/Packages/com.unity.visualeffectgraph/Documentation~/Operator-CustomHLSL.md index 65d79380eb0..3eb137a2ce1 100644 --- a/Packages/com.unity.visualeffectgraph/Documentation~/Operator-CustomHLSL.md +++ b/Packages/com.unity.visualeffectgraph/Documentation~/Operator-CustomHLSL.md @@ -3,14 +3,16 @@ Menu Path : **Operator > HLSL > Custom HLSL** The **Custom HLSL Operator** allows you to write an HLSL function that takes **inputs** and produce **outputs**. -See common documentation for *Custom HLSL Operator* and *Custom HLSL Block* [here](CustomHLSL-Common.md). +For general information about Custom HLSL nodes, refer to [Custom HLSL Nodes](CustomHLSL-Common.md). -## Function specific constraints -- Return type cannot be `void` -- Function parameters must not be of type `VFXAttributes` +## Specific constraints +- Function must either return a value or have at least one out/inout parameter +- Function do not support parameters of type `VFXAttributes` +- **Function can take a maximum of 4 parameters** -To each function parameter will match an operator input, and the return value will match the operator output. -If you use the access modifier `out` or `inout` for some input parameters, then they will also generate an output port. +Each function parameter with no access modifier or `in`/`inout` access modifier will match an input port. +The return value will match the operator output. +If you use the access modifier `out` or `inout` for some function parameters, then they will generate an output port. Here is an example of a valid function declaration: ```csharp @@ -40,3 +42,31 @@ float Distance(in float3 a, in float3 b) return distance(a, b); } ``` + +You can also give a name to the output for the return value. +In the sample below, the output port in the graph will be named `sqr` +```csharp +/// return: sqr +float Square(in float t) +{ + return t * t; +} +``` + +## Include other HLSL files +You can include any valid HLSL file with the standard `#include` directive. +The path to the included file can be: +- **Relative** to the VFX asset where the block will be used +- **Absolute** starting from the `Assets/` folder. +- **Absolute** starting from the `Packages/` folder. + +For the Custom HLSL operator, the `#include` directive is only supported when used with hlsl file (not embedded code). + +```c++ +#include "HLSL/common.hlsl" + +float SomeFunctionName(in float someValue) +{ + // use any function or variable declared in the included file +} +``` diff --git a/Packages/com.unity.visualeffectgraph/Documentation~/Operator-GetAttributeParticleCountInStrip.md b/Packages/com.unity.visualeffectgraph/Documentation~/Operator-GetAttributeParticleCountInStrip.md index 1708be127ea..1dd65640b2d 100644 --- a/Packages/com.unity.visualeffectgraph/Documentation~/Operator-GetAttributeParticleCountInStrip.md +++ b/Packages/com.unity.visualeffectgraph/Documentation~/Operator-GetAttributeParticleCountInStrip.md @@ -15,3 +15,5 @@ The **Get Attribute: particleCountInStrip** returns the particleCountInStrip, wh ## Details The value the attribute returns uses the system’s space (either local-space or world-space). + +This operator can return wrong values when used in the Initialize context, if the strip index property is not constant. \ No newline at end of file diff --git a/Packages/com.unity.visualeffectgraph/Documentation~/Operator-PointCache.md b/Packages/com.unity.visualeffectgraph/Documentation~/Operator-PointCache.md index e2193371eb2..847cbdc38c1 100644 --- a/Packages/com.unity.visualeffectgraph/Documentation~/Operator-PointCache.md +++ b/Packages/com.unity.visualeffectgraph/Documentation~/Operator-PointCache.md @@ -1,5 +1,8 @@ # Point Cache +> [!IMPORTANT] +> This feature is experimental. To use this feature, open the **Preferences** window, go to the **Visual Effects** tab, and enable **Experimental Operators/Blocks**. + Menu Path : **Operator > Utility > Point Cache** The **Point Cache** Operator exposes the attribute maps and the point count stored into a [Point Cache asset](point-cache-asset.md). @@ -22,3 +25,5 @@ Based on the **Asset**, the number of AttributeMap outputs changes to match the ## Remarks If the attribute this Operator is trying to read from has not been written to, it returns the default standard value for its type. + +You can use the [Point Cache Bake Tool](point-cache-bake-tool.md) provided by VFX Graph to generate point cache from meshes or textures. diff --git a/Packages/com.unity.visualeffectgraph/Documentation~/Operator-ProbabilitySampling.md b/Packages/com.unity.visualeffectgraph/Documentation~/Operator-ProbabilitySampling.md deleted file mode 100644 index d64d16167d1..00000000000 --- a/Packages/com.unity.visualeffectgraph/Documentation~/Operator-ProbabilitySampling.md +++ /dev/null @@ -1,130 +0,0 @@ -# Probability Sampling - - - -Menu Path : **Operator > Logic > Probability Sampling** - -The **Probability Sampling** Operator performs a kind of switch/case operation where a weight controls the probability of selecting a case. If all weights are equal, this Operator produces a uniform distribution of the different output values. - -![img](Images/Operator-ProbabilitySamplingExample.gif) - -## Operator settings - -| **Setting** | **Description** | -| --------------------- | ------------------------------------------------------------ | -| **Integrated Random** | (**Inspector**) Specifies whether this Operator generates the random number itself, or if it allows you to input a custom random number instead. | -| **Seed** | Defines the scope of the random number. For more information, see [Random Number](Operator-RandomNumber.md#oprerator-settings).
This setting only appears if you enable **Integrated Random**. | -| **Constant** | Specifies whether the generated random number is constant or not. For more information, see [Random Number](Operator-RandomNumber.md#oprerator-settings).
This setting only appears if you enable **Integrated Random**. | -| **Entry Count** | The number of cases to test. The maximum value is **32**. | - -## Operator properties - -| **Input** | **Type** | **Description** | -| ------------ | --------------------------------------- | ------------------------------------------------------------ | -| **Weight 0** | float | The weight for the first value. The larger this value is compared to the rest of the weights, the higher the chance the Operator selects the first value. | -| **Value 0** | [Configurable](#operator-configuration) | The value to output if the Operator selects **Weight 0**. | -| **Weight 1** | float | The weight for the second value. The larger this value is compared to the rest of the weights, the higher the chance the Operator selects the second value. | -| **Value 1** | [Configurable](#operator-configuration) | The value to output if the Operator selects **Weight 1**. | -| **Weight N** | float | To expose more cases, increase the **Entry Count**. | -| **Value N** | [Configurable](#operator-configuration) | To expose more cases, increase the **Entry Count**. | -| **Rand** | float | The value this Operator uses to choose a value from amongst the weights. This should be between **0** and **1**.This property only appears if you disable **Integrated Random**. | -| **Hash** | uint | The value this Operator uses to create a constant random value. This property only appears if you enable **Constant**. | - -| **Output** | **Type** | **Description** | -| ---------- | --------------------------------------- | ------------------------------------------------------------ | -| **Output** | [Configurable](#operator-configuration) | The value where the corresponding case entry is equal to **Input** value or, if there isn’t any match, **Default**. | - -## Operator configuration - -To view the Node's configuration, click the **cog** icon in the Node's header. - -| **Property** | **Description** | -| ------------ | ------------------------------------------------------------ | -| **Type** | The value type this Operator uses. For the list of types this property supports, see [Available types](#available-types). | - -### Available types - -You can use the following types for your **Input values** and **Output** ports: - -- **Bool** -- **Int** -- **Uint** -- **Float** -- **Vector2** -- **Vector3** -- **Vector4** -- **Gradient** -- **AnimationCurve** -- **Matrix** -- **OrientedBox** -- **Color** -- **Direction** -- **Position** -- **Vector** -- **Transform** -- **Circle** -- **ArcCircle** -- **Sphere** -- **ArcSphere** -- **AABox** -- **Plane** -- **Cylinder** -- **Cone** -- **ArcCone** -- **Torus** -- **ArcTorus** -- **Line** -- **Flipbook** -- **Camera** - -This list does not include any type that corresponds to a buffer or texture because it is not possible to assign these types as local variables in generated HLSL code. - -## Details - -The internal algorithm of this Operator can be described by this sample code : - -``` -//Input - -float[] weight = { 0.2f, 1.2f, 0.7f }; - -char[] values = { 'a', 'b', 'c' }; - -//Compute prefix sum of height - -float[] prefixSumOfWeight = new float[height.Length]; - -prefixSumOfHeight[0] = weight[0]; - -for (int i = 1; i < weight.Length; ++i) - - prefixSumOfHeight[i] = weight[i] + weight[i - 1]; - -//Pick a random value [0, sum of all height] - -var rand = Random.Range(0.0f, weight[weight.Length - 1]); - -//Evaluate probability sampling - -char r = 'z'; - -for (int i = 0; i < weight.Length; ++i) - -{ - - if (rand < prefixSumOfWeight[i] || i == weight.Length - 1) - - { - - r = values[i]; - - break; - - } - -} - -//Output - -Debug.Log("Result : " + r.ToString()); -``` diff --git a/Packages/com.unity.visualeffectgraph/Documentation~/Operator-RandomNumber.md b/Packages/com.unity.visualeffectgraph/Documentation~/Operator-RandomNumber.md index bfe1e04ed7e..97c47089d36 100644 --- a/Packages/com.unity.visualeffectgraph/Documentation~/Operator-RandomNumber.md +++ b/Packages/com.unity.visualeffectgraph/Documentation~/Operator-RandomNumber.md @@ -8,21 +8,53 @@ You can define the scope of the Operator to generate random values on a per-part Note that every random number this Operator generates also depends on the global seed in the Visual Effect component. Running the same effect with the same seed allows for deterministic behavior in random number generations. + + ## Operator settings | **Property** | **Type** | **Description** | | ------------ | -------- | ------------------------------------------------------------ | -| **Seed** | Enum | Defines the scope of the random number. The options are:
•**Per Particle**: The Operator generates a different number every time.
•**Per Component**: The Operator generates a random number every frame and uses it for every particle in the same component.
•**Per Particle Strip**: The Operator generates the same number every time based on the value in the **Seed** input port. If you use this option, the Operator implicitly enables **Constant** and does not allow you to disable it. | +| **Seed** | Enum | Defines the scope of the random number. The options are:
•**Per Particle**: The Operator generates a different number every time.
•**Per VFX Component**: The Operator generates a random number every frame and uses it for every particle in the same component.
•**Per Particle Strip**: The Operator generates the same number every time based on the value in the **Seed** input port. If you use this option, the Operator implicitly enables **Constant** and does not allow you to disable it. | | **Constant** | boolean | Specifies whether the generated random number is constant or not.
When enabled, the Operator generates the same number every time based on the **Seed** Operator property.
This setting only appears if you set **Seed** to **Per Particle** or **Per Component**. If you set **Seed** to **Per Particle Strip**, the Operator implicitly enables this setting and does not allow you to disable it. | +| **Output** | **Type** | **Description** | +| ---------- | --------------------------------------- | ------------------------------------- | +| **Output** | [Configurable](#operator-configuration) | The output random per axis dimension. | + + + ## Operator properties -| **Input** | **Type** | **Description** | -| --------- | -------- | ------------------------------------------------------------ | -| **Min** | float | The minimum value of the generated random number | -| **Max** | float | The maximum value of the generated random number | -| **Seed** | uint | Specifies a seed that the Operator uses to generate random values.
This property only appears is you enable **Constant**. | +| **Input** | **Type** | **Description** | +| -------------------- | -------- | ------------------------------------------------------------ | +| **Min** | float | The minimum value of the generated random number | +| **Max** | float | The maximum value of the generated random number | +| **Seed** | uint | Specifies a seed that the Operator uses to generate random values.
This property only appears if you enable **Constant**.
If output type has more than one dimension and **Independent Seed** is enabled then there will be one **Seed** for every single channel. If two seed are equals, the result random value will be identical in this mode. | +| **Independent Seed** | bool | When enabled, you can customize Seed per channel, otherwise Seed is randomly generated for each channel.
This property only appears if output type has more than one dimension. | | **Output** | **Type** | **Description** | | ---------- | -------- | -------------------------------------------------------- | | **r** | float | The generated random number between **Min** and **Max**. | + +## Operator configuration + +To view the Node's configuration, click the **cog** icon in the Node's header. + +| **Property** | **Description** | +| ------------ | ------------------------------------------------------------ | +| **Type** | The value type this Operator uses. For the list of types this property supports, see [Available types](#available-types). | + + + +### Available types + +You can use the following types: + +- **Float** +- **Vector2** +- **Vector3** +- **Vector4** +- **Color** +- **Bool** +- **Int** +- **Uint** diff --git a/Packages/com.unity.visualeffectgraph/Documentation~/Operator-RandomSelectorWeighted.md b/Packages/com.unity.visualeffectgraph/Documentation~/Operator-RandomSelectorWeighted.md new file mode 100644 index 00000000000..dc210e4ed38 --- /dev/null +++ b/Packages/com.unity.visualeffectgraph/Documentation~/Operator-RandomSelectorWeighted.md @@ -0,0 +1,116 @@ +# Random Selector Weighted + + + +Menu Path : **Operator > Logic > Random Selector Weighted** + +The **Random Selector Weighted** Operator performs a kind of switch/case operation where a weight controls the probability of selecting a case. If all weights are equal, this Operator produces a uniform distribution of the different output values. + + + +## Operator settings + +| **Setting** | **Description** | +| ------------------ | ------------------------------------------------------------ | +| **Mode** | (**Inspector**) Specifies whether this Operator generates the random number itself, or if it allows you to input a custom random number instead. | +| **Seed** | Defines the scope of the random number. For more information, see [Random Number](Operator-RandomNumber.md#oprerator-settings).
This setting only appears if you enable **Mode** to **Random**. | +| **Constant** | Specifies whether the generated random number is constant or not. For more information, see [Random Number](Operator-RandomNumber.md#oprerator-settings).
This setting only appears if you enable **Mode** to **Random**. | +| **Entry Count** | The number of cases to test. The maximum value is **32**. | +| **Weighted** | (**Inspector**) When enables, reveals the weight values associated on each entry. If Weighted is disable then considers all weight equals. | +| **Clamp Weighted** | (**Inspector**) Restrict weight to be clamped between 0 and 1 range. | + +## Operator properties + +| **Input** | **Type** | **Description** | +| ------------ | --------------------------------------- | ------------------------------------------------------------ | +| **Weight 0** | float | The weight for the first value. The larger this value is compared to the rest of the weights, the higher the chance the Operator selects the first value. The property only appears if you enable **Weighted**. | +| **Value 0** | [Configurable](#operator-configuration) | The value to output if the Operator selects **Weight 0**. | +| **Weight 1** | float | The weight for the second value. The larger this value is compared to the rest of the weights, the higher the chance the Operator selects the second value. The property only appears if you enable **Weighted**. | +| **Value 1** | [Configurable](#operator-configuration) | The value to output if the Operator selects **Weight 1**. | +| **Weight N** | float | To expose more cases, increase the **Entry Count**. | +| **Value N** | [Configurable](#operator-configuration) | To expose more cases, increase the **Entry Count**. | +| **S** | float | The value this Operator uses to choose a value from amongst the weights. This should be between **0** and **1**.This property only appears if you disable **Mode** to **Custom**. | +| **Seed** | uint | The value this Operator uses to create a constant random value. This property only appears if you enable **Constant**. | + +| **Output** | **Type** | **Description** | +| ---------- | --------------------------------------- | ------------------------------------------------------------ | +| **Output** | [Configurable](#operator-configuration) | The value where the corresponding case entry is equal to **Input** value or, if there isn’t any match, **Default**. | + +## Operator configuration + +To view the Node's configuration, click the **cog** icon in the Node's header. + +| **Property** | **Description** | +| ------------ | ------------------------------------------------------------ | +| **Type** | The value type this Operator uses. For the list of types this property supports, see [Available types](#available-types). | + +### Available types + +You can use the following types for your **Input values** and **Output** ports: + +- **Bool** +- **Int** +- **Uint** +- **Float** +- **Vector2** +- **Vector3** +- **Vector4** +- **Gradient** +- **AnimationCurve** +- **Matrix** +- **OrientedBox** +- **Color** +- **Direction** +- **Position** +- **Vector** +- **Transform** +- **Circle** +- **ArcCircle** +- **Sphere** +- **ArcSphere** +- **AABox** +- **Plane** +- **Cylinder** +- **Cone** +- **ArcCone** +- **Torus** +- **ArcTorus** +- **Line** +- **Flipbook** +- **Camera** + +This list does not include any type that corresponds to a buffer or texture because it is not possible to assign these types as local variables in generated HLSL code. + +## Details + +The internal algorithm of this Operator can be described by this sample code : + +``` +//Input +float[] weight = { 0.2f, 1.2f, 0.7f }; +char[] values = { 'a', 'b', 'c' }; + +//Compute prefix sum of height +float[] prefixSumOfWeight = new float[height.Length]; +prefixSumOfHeight[0] = weight[0]; + +for (int i = 1; i < weight.Length; ++i) + prefixSumOfHeight[i] = weight[i] + weight[i - 1]; + +//Pick a random value [0, sum of all height] +var rand = Random.Range(0.0f, weight[weight.Length - 1]); + +//Evaluate probability sampling +char r = 'z'; +for (int i = 0; i < weight.Length; ++i) +{ + if (rand < prefixSumOfWeight[i] || i == weight.Length - 1) + { + r = values[i]; + break; + } +} + +//Output +Debug.Log("Result : " + r.ToString()); +``` diff --git a/Packages/com.unity.visualeffectgraph/Documentation~/Operator-RatioOverStrip.md b/Packages/com.unity.visualeffectgraph/Documentation~/Operator-RatioOverStrip.md new file mode 100644 index 00000000000..2cd21833de8 --- /dev/null +++ b/Packages/com.unity.visualeffectgraph/Documentation~/Operator-RatioOverStrip.md @@ -0,0 +1,19 @@ +# Ratio Over Strip + +Menu Path : **Operator > Attribute > Ratio Over Strip** + +The **Ratio Over Strip** Operator returns the ratio of the particle index relative to the total count of particles in that strip, as a value between 0.0 and 1.0. + +``` +t = particleIndexInStrip / (particleCountInStrip - 1) +``` + +## Operator properties + +| **Output** | **Type** | **Description** | +| ---------- | -------- | ------------------------------------------------- | +| **t** | float | The ratio of the particle index relative to the total particle count in that strip. | + +## Details + +If the system you use this Operator in does not have strips, Unity returns 0 instead. diff --git a/Packages/com.unity.visualeffectgraph/Documentation~/Operator-SampleAttributeMap.md b/Packages/com.unity.visualeffectgraph/Documentation~/Operator-SampleAttributeMap.md index 2fbb49b1f47..7599862a42e 100644 --- a/Packages/com.unity.visualeffectgraph/Documentation~/Operator-SampleAttributeMap.md +++ b/Packages/com.unity.visualeffectgraph/Documentation~/Operator-SampleAttributeMap.md @@ -1,5 +1,8 @@ # Sample Attribute Map +> [!IMPORTANT] +> This feature is experimental. To use this feature, open the **Preferences** window, go to the **Visual Effects** tab, and enable **Experimental Operators/Blocks**. + **Menu Path : Operator > Sampling > Attribute Map** The Sample Attribute Map Operator enables you to sample an [attribute map](point-cache-in-vfx-graph.md#attribute-map) from a [Point Cache](point-cache-in-vfx-graph.md). diff --git a/Packages/com.unity.visualeffectgraph/Documentation~/Properties.md b/Packages/com.unity.visualeffectgraph/Documentation~/Properties.md index e14160a3c58..98f2aea80af 100644 --- a/Packages/com.unity.visualeffectgraph/Documentation~/Properties.md +++ b/Packages/com.unity.visualeffectgraph/Documentation~/Properties.md @@ -59,13 +59,14 @@ For Example, a Position type carries a Vector3 value and a Spaceable Property. I Depending on the [System Simulation Space](Systems.md#system-spaces), the value will be automatically transformed to the simulation space if required. -> Tip: You can use the Change Space Operator to manually change a Property Space. +> [!TIP] +> You can use the Change Space Operator to manually change a Property Space. ## Property Nodes Property Nodes are [Operators](Operators.md) that give access to Graph-Wide Properties defined in the [Blackboard](Blackboard.md). These properties allow you to reuse the same value throughout the graph at different places. -![](Images/PropertyNodes.png) +![](Images/PropertyNode.png) * Property Nodes display a Green dot left to the Property name if the property is exposed. * To create a Property Node: diff --git a/Packages/com.unity.visualeffectgraph/Documentation~/PropertyBinders.md b/Packages/com.unity.visualeffectgraph/Documentation~/PropertyBinders.md index 5ea05be5bc8..2b022969cb1 100644 --- a/Packages/com.unity.visualeffectgraph/Documentation~/PropertyBinders.md +++ b/Packages/com.unity.visualeffectgraph/Documentation~/PropertyBinders.md @@ -12,29 +12,34 @@ You can add Property Binders through a common MonoBehaviour called **VFX Propert You can also add Property binders through the **Add Component** menu. Unity creates a VFX Property Binder component automatically if one does not already exist. + +![](Images/PropertyBinder_example.png) +>Here is an example of using Property Binder to bind Game Object's **transform** and **position** to VFX graph's exposed properties of similar type. + + ## Built-in Property Binders The Visual Effect Graph package comes with the following built-in property binders: * Audio - * **Audio Spectrum to AttributeMap :** Bakes the Audio Spectrum to an Attribute map and binds it to a Texture2D and uint Count properties. + * **Audio Spectrum to AttributeMap**: Bakes the Audio Spectrum to an Attribute map and binds it to a Texture2D and uint Count properties. * GameObject: - * **Enabled** : Binds the Enabled flag of a Game Object to a bool property. + * **Enabled**: Binds the Enabled flag of a Game Object to a bool property. * Point Cache: - * **Hierarchy to AttributeMap** : Binds positions an target positions of a hierarchy of transforms to Texture2Ds AttributeMaps and uint Count. + * **Hierarchy to AttributeMap**: Binds positions and target positions of a hierarchy of transforms to Texture2Ds AttributeMaps and uint Count. * **Multiple Position Binder**: Binds positions of a list of transforms to a Texture2D AttributeMap and uint Count. * Input: - * **Axis** : Binds the float value of an Input Axis to a float property. - * **Button** : Binds the bool value of a button press state to a bool property. - * **Key** : Binds the bool value of a keyboard key press state to a bool property. - * **Mouse** : Binds the general values of a mouse (Position, Velocity, Clicks) to exposed properties. - * **Touch** : Binds a input values of a Touch Input (Position, Velocity) to exposed properties. + * **Axis**: Binds the float value of an Input Axis to a float property. + * **Button**: Binds the bool value of a button press state to a bool property. + * **Key**: Binds the bool value of a keyboard key press state to a bool property. + * **Mouse**: Binds the general values of a mouse (Position, Velocity, Clicks) to exposed properties. + * **Touch**: Binds a input values of a Touch Input (Position, Velocity) to exposed properties. * Utility: - * **Light** : Binds Light Properties (Color, Brightness, Radius) to exposed properties. - * **Plane** : Binds Plane Properties (Position, Normal) to exposed properties. - * **Terrain** : Binds Terrain Properties (Size, Height Map) to exposed properties. + * **Light**: Binds Light Properties (Color, Brightness, Radius) to exposed properties. + * **Plane**: Binds Plane Properties (Position, Normal) to exposed properties. + * **Terrain**: Binds Terrain Properties (Size, Height Map) to exposed properties. * Transform: - * **Position**: Binds game object position to vector exposed property. + * **Position**: Binds game object position to vector exposed property. In the binder, you need to add "*_position*" at the end of your property name for it to work as espected. * **Position (previous)**: Binds previous game object position to vector exposed property. * **Transform**: Binds game object transform to transform exposed property. * **Velocity**: Binds game object velocity to vector exposed property. diff --git a/Packages/com.unity.visualeffectgraph/Documentation~/TableOfContents.md b/Packages/com.unity.visualeffectgraph/Documentation~/TableOfContents.md index c8f19c15c3d..e8c394b5b7e 100644 --- a/Packages/com.unity.visualeffectgraph/Documentation~/TableOfContents.md +++ b/Packages/com.unity.visualeffectgraph/Documentation~/TableOfContents.md @@ -253,7 +253,6 @@ * [Nor](Operator-LogicNor.md) * [Not](Operator-LogicNot.md) * [Or](Operator-LogicOr.md) - * [Probability Sampling](Operator-ProbabilitySampling.md) * [Switch](Operator-Switch.md) * Math * Arithmetic @@ -355,6 +354,7 @@ * [Value Noise](Operator-ValueNoise.md) * Random * [Random Number](Operator-RandomNumber.md) + * [Random Selector](Operator-RandomSelectorWeighted.md) * Sampling * [Buffer Count](Operator-BufferCount.md) * [Get Mesh Index Count](Operator-MeshIndexCount.md) @@ -385,6 +385,7 @@ * [Sample Texture3D](Operator-SampleTexture3D.md) * [Sample TextureCube](Operator-SampleTextureCube.md) * [Sample TextureCubeArray](Operator-SampleTextureCubeArray.md) + * [Sample Attribute Map](Operator-SampleAttributeMap.md) * Spawn * [Spawn State](Operator-SpawnState.md) * Utility diff --git a/Packages/com.unity.visualeffectgraph/Documentation~/Type-Direction.md b/Packages/com.unity.visualeffectgraph/Documentation~/Type-Direction.md index c69a256c9d7..a01bb7666f2 100644 --- a/Packages/com.unity.visualeffectgraph/Documentation~/Type-Direction.md +++ b/Packages/com.unity.visualeffectgraph/Documentation~/Type-Direction.md @@ -1,6 +1,6 @@ # Direction -A world-space or local-space three-component direction vector. +A world-space or local-space three-component direction vector. It indicates the initial orientation of the particle and is commonly used with blocks like [Set Velocity from Direction & Speed](Block-VelocityFromDirectionAndSpeed(ChangeSpeed).md) and [Set Position (Sphere)](Block-SetPosition(Sphere).md), [Set Position (Cone)](Block-SetPosition(Cone).md) where direction is set by shape's underlying surface normals. ## Properties diff --git a/Packages/com.unity.visualeffectgraph/Documentation~/VisualEffectComponent.md b/Packages/com.unity.visualeffectgraph/Documentation~/VisualEffectComponent.md index 57adcb43620..e16a63edb69 100644 --- a/Packages/com.unity.visualeffectgraph/Documentation~/VisualEffectComponent.md +++ b/Packages/com.unity.visualeffectgraph/Documentation~/VisualEffectComponent.md @@ -1,6 +1,6 @@ # Visual Effect (Component) -The Visual Effect Component creates an instance of a Visual Effect in the scene, based on a Visual Effect Graph Asset. It controls how the effect plays, renders and let the user customize the instance by editing [Exposed Properties](PropertiesAndBlackboard.md#exposed-properties). +The Visual Effect Component creates an instance of a Visual Effect in the scene, based on a Visual Effect Graph Asset. It controls how the effect plays, renders and let the user customize the instance by editing [Exposed Properties](Blackboard.md#creating-properties). ## How to create a Visual Effect diff --git a/Packages/com.unity.visualeffectgraph/Documentation~/visual-effect-bounds.md b/Packages/com.unity.visualeffectgraph/Documentation~/visual-effect-bounds.md index 09e5faa9ed7..1df6074f2a0 100644 --- a/Packages/com.unity.visualeffectgraph/Documentation~/visual-effect-bounds.md +++ b/Packages/com.unity.visualeffectgraph/Documentation~/visual-effect-bounds.md @@ -1,20 +1,31 @@ -# Visual effect bounds +# Visual Effect Bounds Unity uses the bounds of a visual effect to determine whether to render it or not. If a camera can't see the bounds of an effect, then it culls and doesn't render the effect. The cumulative bounds of each System within a visual effect define the bounds of the visual effect. It's important that the bounds of each System correctly encapsulate the System: - If the bounds are too large, cameras process the visual effect even if individual particles aren't on screen. This results in wasted resources. - If the bounds are too small, Unity may cull the visual effect even if some of the effect's particles are still on screen. -Each System in a visual effect defines its bounds in the [Initialize Context](Context-Initialize.md). By default, Unity calculates the bounds of each System automatically, but you can change this behavior and use other methods to define the bounds. The Initialize Context's **Bounds Setting Mode** property controls the method the visual effect uses. The bound calculation methods are: +**Culling Flags** can be set in the [Visual Effect Asset Inspector](VisualEffectGraphAsset.md#visual-effect-asset-inspector). The [Culling state](performance-debug-panel.md#particle-system-info) of a system can be known using the [Profiling and Debug Panel](performance-debug-panel.md) -- **Manual**: You set the bounds directly in the Initialize Context. You can calculate the bounds dynamically using Operators and send the output to the Initialize Context's **Bounds** input ports. +Each System in a visual effect defines its bounds in the [Initialize Context](Context-Initialize.md). By default, Recorded mode is enabled and default values are used to set bounds, waiting to be overwritten with a recording, but you can change this behavior and use other methods to define the bounds. The Initialize Context's **Bounds Setting Mode** property controls the method the visual effect uses. The bounds calculation methods are: + +- **Manual**: You set the bounds directly in the Initialize Context. You can calculate the bounds dynamically using Operators and send the output to the Initialize Context's **Bounds** input ports. Bounds are compatible with [AABox type](Type-AABox.md) Operators or Properties. - **Recorded**: Allows you to record the System from the VFX Control panel. For information on how to do this, see [Bounds Recording](#bounds-recording). In this mode, you can also calculate the bounds using Operators and pass them to the Initialize Context, like in **Manual**. This overrides any recorded bounds. -- **Automatic**: Unity calculates the bounds automatically. Note: This will force the culling flags of the VFX asset to "Always recompute bounds and simulate". +- **Automatic**: Unity calculates the bounds automatically. Note: This will force the culling flags of the VFX asset to "Always recompute bounds and simulate". Automatic bounds calculations can have a negative impact on performances and are not recommended when possible to avoid. + +![](Images/Bounds-Init.png) + +The Initialize Context also contains a **Bounds Padding** input port. This is a Vector3 that enlarges the per-axis bounds of the System. If a System uses **Automatic** bounds or a recording is in progress in **Recorded** mode, Unity calculates the bounds of the System during the Update Context. This means that any changes to the size, position, scale, or pivot of particles that occur in the Output Context don't affect the bounds during that frame. Adding padding to the bounds helps to mitigate this effect. -The Initialize Context also contains a **Bounds Padding** input port. This is a Vector3 that enlarges the per-axis bounds of the System. If a System uses **Recorded** or **Automatic** bounds, Unity calculates the bounds of the System during the Update Context. This means that any changes to the size, position, scale, or pivot of particles that occur in the Output Context don't affect the bounds during that frame. Adding padding to the bounds helps to mitigate this effect. ## Bounds recording -The [Target Visual Effect GameObject panel](VisualEffectGraphWindow.md#target-visual-effect-gameobject) in the Visual Effect Graph window includes the **Bounds Recording** section which helps you set the bounds of your Systems. If you set a System's **Bounds Setting Mode** to **Recorded**, the tool calculates the bounds of the System as the visual effect plays. +The [VFX Control panel](VisualEffectGraphWindow.md#vfx-control) in the Visual Effect Graph window includes the **Bounds Recording** section which helps you set the bounds of your Systems. If you set a System's **Bounds Setting Mode** to **Recorded**, the tool calculates the bounds of the System as the visual effect plays. + +You can click on the icon at the top right corner of the [Toolbar](VisualEffectGraphWindow.md#toolbar) to open the VFX Control panel. + +![](Images/ControlPanelIcon.png) + +Then, you will need to have an [attached VFX from the scene](GettingStarted.md#attaching-a-visual-effect-from-the-scene-to-the-current-graph) to properly use VFX Control panel's functionnalities. Once done, you can click on the red recording button to start recording your bounds to match your particles behavior. ![](Images/Bounds-Not-Recording.png) @@ -30,4 +41,4 @@ You can visualize the bounds that the recorder is saving. When the recorder is a > A visual effect and a preview of the bounds Unity is recording. -While recording, you can **Pause**, **Play**, **Restart**, or event change the **Play Rate**. This enables you to speed up the recording or simulate various spawn positions. When you are happy with the calculated bounds, click **Apply Bounds** to apply the recorded bounds to the System. \ No newline at end of file +While recording, you can **Pause**, **Play**, **Restart**, or event change the **Play Rate**. This enables you to speed up the recording or simulate various spawn positions. When you are happy with the calculated bounds, click **Apply Bounds** to apply the recorded bounds to the System. Applying recorded bounds can be done during or after the recording. To end the recording, click a second time on the recording button. diff --git a/Packages/com.unity.visualeffectgraph/Editor/Compiler/VFXCodeGenerator.cs b/Packages/com.unity.visualeffectgraph/Editor/Compiler/VFXCodeGenerator.cs index b9f131ec1f9..ec4c91fdc1c 100644 --- a/Packages/com.unity.visualeffectgraph/Editor/Compiler/VFXCodeGenerator.cs +++ b/Packages/com.unity.visualeffectgraph/Editor/Compiler/VFXCodeGenerator.cs @@ -1070,7 +1070,7 @@ internal static IEnumerable GetInstancingAdditionalDefines(VFXContext co } else { - if (particleData.IsAttributeStored(VFXAttribute.Alive)) + if (particleData.IsAttributeStored(VFXAttribute.Alive) || particleData.hasStrip) { yield return "#define VFX_INSTANCING_FIXED_SIZE " + Math.Max(particleData.alignedCapacity, nbThreadsPerGroup); } diff --git a/Packages/com.unity.visualeffectgraph/Editor/Core/VFXLibrary.cs b/Packages/com.unity.visualeffectgraph/Editor/Core/VFXLibrary.cs index 4489c39b945..c5148cbba60 100644 --- a/Packages/com.unity.visualeffectgraph/Editor/Core/VFXLibrary.cs +++ b/Packages/com.unity.visualeffectgraph/Editor/Core/VFXLibrary.cs @@ -207,6 +207,9 @@ public static VFXTypeAttribute GetAttributeFromSlotType(Type type) public static VFXModelDescriptor GetSlot(System.Type type) { + if (type == null) + return null; + LoadSlotsIfNeeded(); VFXModelDescriptor desc; m_SlotDescs.TryGetValue(type, out desc); diff --git a/Packages/com.unity.visualeffectgraph/Editor/Core/VFXSerializer.cs b/Packages/com.unity.visualeffectgraph/Editor/Core/VFXSerializer.cs index 07931929798..36eca60d439 100644 --- a/Packages/com.unity.visualeffectgraph/Editor/Core/VFXSerializer.cs +++ b/Packages/com.unity.visualeffectgraph/Editor/Core/VFXSerializer.cs @@ -49,7 +49,12 @@ public virtual void OnAfterDeserialize() public static Type GetType(string name) { - return Type.GetType(name); + var type = Type.GetType(name); + if (type == null + && !string.IsNullOrEmpty(name) + && !name.Contains("Unity.VisualEffectGraph.Runtime,", StringComparison.InvariantCulture)) //Don't log error from actual VFX package like IncrementStripIndexOnStart sanitization is handled automatically for those + Debug.LogErrorFormat("Unable to find type: {0}", name); + return type; } public override bool Equals(object obj) diff --git a/Packages/com.unity.visualeffectgraph/Editor/Data/VFXDataParticle.cs b/Packages/com.unity.visualeffectgraph/Editor/Data/VFXDataParticle.cs index acdf333c577..917f9d5da45 100644 --- a/Packages/com.unity.visualeffectgraph/Editor/Data/VFXDataParticle.cs +++ b/Packages/com.unity.visualeffectgraph/Editor/Data/VFXDataParticle.cs @@ -756,6 +756,22 @@ int GetGlobalSortingCriterionAndVoteCount(out SortingCriterion globalSortCriteri return voteResult.Value; } + private bool NeedsStripData(VFXContext context) + { + bool needsStripData = false; + + if (context.ownedType == VFXDataType.ParticleStrip) + { + needsStripData = true; + } + else if (context is VFXAbstractParticleOutput output) + { + needsStripData = output.HasStripsData(); + } + + return needsStripData; + } + public override void FillDescs( IVFXErrorReporter reporter, VFXCompilationMode compilationMode, @@ -1223,7 +1239,7 @@ int GetBufferIndex(VFXTask task, string baseName) if (attributeSourceBufferIndex != -1 && context.contextType == VFXContextType.Init) bufferMappings.Add(new VFXMapping("sourceAttributeBuffer", attributeSourceBufferIndex)); - if (stripDataIndex != -1 && context.ownedType == VFXDataType.ParticleStrip) + if (stripDataIndex != -1 && NeedsStripData(context)) bufferMappings.Add(new VFXMapping("stripDataBuffer", stripDataIndex)); if (sharedAabbBufferIndex != -1 && (context.contextType == VFXContextType.Update || diff --git a/Packages/com.unity.visualeffectgraph/Editor/Gizmo/VFXGizmoUtility.cs b/Packages/com.unity.visualeffectgraph/Editor/Gizmo/VFXGizmoUtility.cs index 7e73312303b..4f3bfb4b8c6 100644 --- a/Packages/com.unity.visualeffectgraph/Editor/Gizmo/VFXGizmoUtility.cs +++ b/Packages/com.unity.visualeffectgraph/Editor/Gizmo/VFXGizmoUtility.cs @@ -125,23 +125,10 @@ static VFXGizmoUtility() public static bool HasGizmo(Type type) { - return s_DrawFunctions.ContainsKey(type); - } + if (type == null) + return false; - static Type GetGizmoType(Type type) - { - if (type.IsAbstract) - return null; - Type baseType = type.BaseType; - while (baseType != null) - { - if (baseType.IsGenericType && !baseType.IsGenericTypeDefinition && baseType.GetGenericTypeDefinition() == typeof(VFXGizmo<>)) - { - return baseType.GetGenericArguments()[0]; - } - baseType = baseType.BaseType; - } - return null; + return s_DrawFunctions.ContainsKey(type); } public static VFXGizmo CreateGizmoInstance(Context context) diff --git a/Packages/com.unity.visualeffectgraph/Editor/GraphView/Elements/VFXStickyNote.cs b/Packages/com.unity.visualeffectgraph/Editor/GraphView/Elements/VFXStickyNote.cs index 13d9d846fc3..ae0c893d58c 100644 --- a/Packages/com.unity.visualeffectgraph/Editor/GraphView/Elements/VFXStickyNote.cs +++ b/Packages/com.unity.visualeffectgraph/Editor/GraphView/Elements/VFXStickyNote.cs @@ -88,7 +88,7 @@ public VFXStickyNoteController controller VFXStickyNoteController m_Controller; public VFXStickyNote() : base(Vector2.zero) { - styleSheets.Add(Resources.Load("StickyNote")); + styleSheets.Add(VFXView.LoadStyleSheet("VFXStickynote")); this.RegisterCallback(OnUIChange); } diff --git a/Packages/com.unity.visualeffectgraph/Editor/Models/Blocks/Implementations/HLSL/HLSLParser.cs b/Packages/com.unity.visualeffectgraph/Editor/Models/Blocks/Implementations/HLSL/HLSLParser.cs index 109db3e9dc8..fbb3c049618 100644 --- a/Packages/com.unity.visualeffectgraph/Editor/Models/Blocks/Implementations/HLSL/HLSLParser.cs +++ b/Packages/com.unity.visualeffectgraph/Editor/Models/Blocks/Implementations/HLSL/HLSLParser.cs @@ -176,7 +176,7 @@ public HLSLMissingIncludeFile(string filePath) class HLSLFunctionParameter { // Match inout/in/out accessor then any whitespace then the parameter type then optionally a template type any whitespace and then the parameter name - static readonly Regex s_ParametersParser = new Regex(@"(?(inout|in|out)\b)?\s*(?\w+)(?:[<](?