Skip to content
This repository was archived by the owner on Nov 30, 2020. It is now read-only.

Commit 85478d6

Browse files
committed
WIP for TAA + stereo
* Shaders are mostly done (clamping needs to be amended, and maybe usage of _ST float4s) * OnPreRender used for right eye in multi pass * Setting up jittered matrices for stereo proj * detecting stereo and enabling Single-pass path What remains is configuring the history textures correctly for TAA.
1 parent 063561d commit 85478d6

6 files changed

Lines changed: 307 additions & 18 deletions

File tree

PostProcessing/Runtime/Effects/TemporalAntialiasing.cs

Lines changed: 35 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -41,15 +41,18 @@ enum Pass
4141

4242
// Ping-pong between two history textures as we can't read & write the same target in the
4343
// same pass
44+
// TODO: We need to make left/right arrays
4445
readonly RenderTexture[] m_HistoryTextures = new RenderTexture[2];
46+
//readonly RenderTexture[][] m_HistoryTextures = new RenderTexture[2][];
47+
//m_HistoryTextures[0][] = new RenderTexture[2];
4548
int m_HistoryPingPong;
4649

4750
public bool IsSupported()
4851
{
4952
return SystemInfo.supportedRenderTargetCount >= 2
5053
&& SystemInfo.supportsMotionVectors
51-
&& SystemInfo.graphicsDeviceType != GraphicsDeviceType.OpenGLES2
52-
&& !RuntimeUtilities.isSinglePassStereoEnabled;
54+
&& SystemInfo.graphicsDeviceType != GraphicsDeviceType.OpenGLES2;
55+
//&& !RuntimeUtilities.isSinglePassStereoEnabled;
5356
}
5457

5558
internal DepthTextureMode GetCameraFlags()
@@ -96,6 +99,36 @@ public Matrix4x4 GetJitteredProjectionMatrix(Camera camera)
9699
return cameraProj;
97100
}
98101

102+
public void ConfiguredJitteredProjectionMatrix(PostProcessRenderContext context)
103+
{
104+
var camera = context.camera;
105+
camera.nonJitteredProjectionMatrix = camera.projectionMatrix;
106+
camera.projectionMatrix = GetJitteredProjectionMatrix(camera);
107+
camera.useJitteredProjectionMatrixForTransparentRendering = false;
108+
}
109+
110+
public void ConfiguredStereoJitteredProjectionMatrices(PostProcessRenderContext context)
111+
{
112+
var camera = context.camera;
113+
jitter = GenerateRandomOffset();
114+
jitter *= jitterSpread;
115+
116+
for (Camera.StereoscopicEye eye = Camera.StereoscopicEye.Left; eye <= Camera.StereoscopicEye.Right; eye++)
117+
{
118+
// This saves off the device generated projection matrices as non-jittered
119+
context.camera.CopyStereoDeviceProjectionMatrixToNonJittered(eye);
120+
var originalProj = context.camera.GetStereoNonJitteredProjectionMatrix(eye);
121+
122+
// Currently no support for custom jitter func, as VR devices would need to provide
123+
// original projection matrix as input along with jitter
124+
var jitteredMatrix = RuntimeUtilities.GenerateJitteredProjectionMatrixFromOriginal(context, originalProj, jitter);
125+
context.camera.SetStereoProjectionMatrix(eye, jitteredMatrix);
126+
}
127+
128+
jitter = new Vector2(jitter.x / context.singleEyeWidth, jitter.y / context.height);
129+
camera.useJitteredProjectionMatrixForTransparentRendering = false;
130+
}
131+
99132
RenderTexture CheckHistory(int id, PostProcessRenderContext context)
100133
{
101134
var rt = m_HistoryTextures[id];

PostProcessing/Runtime/PostProcessLayer.cs

Lines changed: 150 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -257,6 +257,8 @@ void OnPreCull()
257257
// is switched off and the FOV or any other camera property changes.
258258
m_Camera.ResetProjectionMatrix();
259259
m_Camera.nonJitteredProjectionMatrix = m_Camera.projectionMatrix;
260+
if (XR.XRSettings.isDeviceActive)
261+
m_Camera.ResetStereoProjectionMatrices();
260262

261263
context.Reset();
262264
context.camera = m_Camera;
@@ -362,15 +364,142 @@ void OnPreCull()
362364
m_LegacyCmdBuffer.ReleaseTemporaryRT(tempRt);
363365
}
364366

367+
void OnPreRender()
368+
{
369+
// Unused in scriptable render pipelines
370+
// we only execute this for right eye
371+
if (RuntimeUtilities.scriptableRenderPipelineActive ||
372+
(m_Camera.stereoActiveEye != Camera.MonoOrStereoscopicEye.Right))
373+
return;
374+
375+
var context = m_CurrentContext;
376+
var sourceFormat = m_Camera.allowHDR ? RenderTextureFormat.DefaultHDR : RenderTextureFormat.Default;
377+
378+
// Resets the projection matrix from previous frame in case TAA was enabled.
379+
// We also need to force reset the non-jittered projection matrix here as it's not done
380+
// when ResetProjectionMatrix() is called and will break transparent rendering if TAA
381+
// is switched off and the FOV or any other camera property changes.
382+
//m_Camera.ResetProjectionMatrix();
383+
//m_Camera.nonJitteredProjectionMatrix = m_Camera.projectionMatrix;
384+
385+
context.Reset();
386+
context.camera = m_Camera;
387+
context.sourceFormat = sourceFormat;
388+
389+
// TODO: I assume these are all executed for left eye before coming here
390+
m_LegacyCmdBufferBeforeReflections.Clear();
391+
m_LegacyCmdBufferOpaque.Clear();
392+
m_LegacyCmdBuffer.Clear();
393+
394+
SetupContext(context);
395+
396+
// Lighting & opaque-only effects
397+
int opaqueOnlyEffects = 0;
398+
bool hasCustomOpaqueOnlyEffects = HasOpaqueOnlyEffects(context);
399+
bool isAmbientOcclusionDeferred = ambientOcclusion.IsEnabledAndSupported(context) && ambientOcclusion.IsAmbientOnly(context);
400+
bool isAmbientOcclusionOpaque = ambientOcclusion.IsEnabledAndSupported(context) && !ambientOcclusion.IsAmbientOnly(context);
401+
bool isFogActive = fog.IsEnabledAndSupported(context);
402+
403+
// Ambient-only AO is done in a separate command buffer, before reflections
404+
if (isAmbientOcclusionDeferred)
405+
{
406+
context.command = m_LegacyCmdBufferBeforeReflections;
407+
ambientOcclusion.RenderAmbientOnly(context);
408+
}
409+
else if (isAmbientOcclusionOpaque)
410+
{
411+
opaqueOnlyEffects++;
412+
}
413+
414+
opaqueOnlyEffects += isFogActive ? 1 : 0;
415+
opaqueOnlyEffects += hasCustomOpaqueOnlyEffects ? 1 : 0;
416+
417+
var cameraTarget = new RenderTargetIdentifier(BuiltinRenderTextureType.CameraTarget);
418+
419+
if (opaqueOnlyEffects > 0)
420+
{
421+
var cmd = m_LegacyCmdBufferOpaque;
422+
context.command = cmd;
423+
424+
// We need to use the internal Blit method to copy the camera target or it'll fail
425+
// on tiled GPU as it won't be able to resolve
426+
int tempTarget0 = m_TargetPool.Get();
427+
cmd.GetTemporaryRT(tempTarget0, context.width, context.height, 24, FilterMode.Bilinear, sourceFormat);
428+
cmd.Blit(cameraTarget, tempTarget0);
429+
context.source = tempTarget0;
430+
431+
int tempTarget1 = -1;
432+
433+
if (opaqueOnlyEffects > 1)
434+
{
435+
tempTarget1 = m_TargetPool.Get();
436+
cmd.GetTemporaryRT(tempTarget1, context.width, context.height, 24, FilterMode.Bilinear, sourceFormat);
437+
context.destination = tempTarget1;
438+
}
439+
else
440+
{
441+
context.destination = cameraTarget;
442+
}
443+
444+
if (isAmbientOcclusionOpaque)
445+
{
446+
ambientOcclusion.RenderAfterOpaque(context);
447+
opaqueOnlyEffects--;
448+
var prevSource = context.source;
449+
context.source = context.destination;
450+
context.destination = opaqueOnlyEffects == 1 ? cameraTarget : prevSource;
451+
}
452+
453+
// TODO: Insert SSR here
454+
455+
if (isFogActive)
456+
{
457+
fog.Render(context);
458+
opaqueOnlyEffects--;
459+
var prevSource = context.source;
460+
context.source = context.destination;
461+
context.destination = opaqueOnlyEffects == 1 ? cameraTarget : prevSource;
462+
}
463+
464+
if (hasCustomOpaqueOnlyEffects)
465+
{
466+
RenderOpaqueOnly(context);
467+
}
468+
469+
if (opaqueOnlyEffects > 1)
470+
cmd.ReleaseTemporaryRT(tempTarget1);
471+
472+
cmd.ReleaseTemporaryRT(tempTarget0);
473+
}
474+
475+
// Post-transparency stack
476+
// Same as before, first blit needs to use the builtin Blit command to properly handle
477+
// tiled GPUs
478+
int tempRt = m_TargetPool.Get();
479+
m_LegacyCmdBuffer.GetTemporaryRT(tempRt, context.width, context.height, 24, FilterMode.Bilinear, sourceFormat);
480+
m_LegacyCmdBuffer.Blit(cameraTarget, tempRt, RuntimeUtilities.copyMaterial, stopNaNPropagation ? 3 : 2);
481+
m_NaNKilled = stopNaNPropagation;
482+
483+
context.command = m_LegacyCmdBuffer;
484+
context.source = tempRt;
485+
context.destination = cameraTarget;
486+
Render(context);
487+
m_LegacyCmdBuffer.ReleaseTemporaryRT(tempRt);
488+
}
489+
365490
void OnPostRender()
366-
{
367-
// Unused in scriptable render pipelines
368-
if (RuntimeUtilities.scriptableRenderPipelineActive)
369-
return;
370-
371-
if (m_CurrentContext.IsTemporalAntialiasingActive())
372-
m_Camera.ResetProjectionMatrix();
373-
}
491+
{
492+
// Unused in scriptable render pipelines
493+
if (RuntimeUtilities.scriptableRenderPipelineActive)
494+
return;
495+
496+
if (m_CurrentContext.IsTemporalAntialiasingActive())
497+
{
498+
m_Camera.ResetProjectionMatrix();
499+
if (XR.XRSettings.isDeviceActive)
500+
m_Camera.ResetStereoProjectionMatrices();
501+
}
502+
}
374503

375504
PostProcessBundle GetBundle<T>()
376505
where T : PostProcessEffectSettings
@@ -544,10 +673,19 @@ public void Render(PostProcessRenderContext context)
544673
{
545674
if (!RuntimeUtilities.scriptableRenderPipelineActive)
546675
{
547-
var camera = context.camera;
548-
camera.nonJitteredProjectionMatrix = camera.projectionMatrix;
549-
camera.projectionMatrix = temporalAntialiasing.GetJitteredProjectionMatrix(camera);
550-
camera.useJitteredProjectionMatrixForTransparentRendering = false;
676+
if (XR.XRSettings.isDeviceActive)
677+
{
678+
// We only need to configure all of this once for stereo, during OnPreCull
679+
if (context.camera.stereoActiveEye != Camera.MonoOrStereoscopicEye.Right)
680+
temporalAntialiasing.ConfiguredStereoJitteredProjectionMatrices(context);
681+
}
682+
else
683+
temporalAntialiasing.ConfiguredJitteredProjectionMatrix(context);
684+
685+
//var camera = context.camera;
686+
//camera.nonJitteredProjectionMatrix = camera.projectionMatrix;
687+
//camera.projectionMatrix = temporalAntialiasing.GetJitteredProjectionMatrix(camera);
688+
//camera.useJitteredProjectionMatrixForTransparentRendering = false;
551689
}
552690

553691
var taaTarget = m_TargetPool.Get();

PostProcessing/Runtime/PostProcessRenderContext.cs

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -26,11 +26,20 @@ public Camera camera
2626
m_height = xrDesc.height;
2727
// we should create eye-specific params
2828
// in order to support knowing the size of each eye
29+
30+
if ((xrDesc.vrUsage == VRTextureUsage.TwoEyes) &&
31+
(xrDesc.dimension != TextureDimension.Tex2DArray))
32+
{
33+
m_singleEyeWidth = m_width / 2;
34+
}
35+
else
36+
m_singleEyeWidth = m_width;
2937
}
3038
else
3139
{
3240
m_width = m_camera.pixelWidth;
3341
m_height = m_camera.pixelHeight;
42+
m_singleEyeWidth = m_width;
3443
}
3544
}
3645
}
@@ -81,6 +90,12 @@ public int height
8190
get { return m_height; }
8291
}
8392

93+
private int m_singleEyeWidth;
94+
public int singleEyeWidth
95+
{
96+
get { return m_singleEyeWidth; }
97+
}
98+
8499
// Are we currently rendering in the scene view?
85100
public bool isSceneView { get; internal set; }
86101

@@ -96,6 +111,7 @@ public void Reset()
96111
m_camera = null;
97112
m_width = 0;
98113
m_height = 0;
114+
m_singleEyeWidth = 0;
99115

100116
command = null;
101117
source = 0;

PostProcessing/Runtime/Utils/RuntimeUtilities.cs

Lines changed: 69 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -364,6 +364,75 @@ public static Matrix4x4 GetJitteredOrthographicProjectionMatrix(Camera camera, V
364364
return Matrix4x4.Ortho(left, right, bottom, top, camera.nearClipPlane, camera.farClipPlane);
365365
}
366366

367+
// We can represent a projection matrix by using the tangents of the frustum half angles.
368+
// The 'traditional' representation of the values in a projection matrix for
369+
// left, right, top and bottom are that they represent values on the near clip plane.
370+
// If we take the matrix element (0,0) as an example, it would be equal to
371+
// (2 * clipNearPlane) / (right - left). We can divide the term by clipNearPlane to get
372+
// 2 / ((right - left) / clipNearPlane), which gives us
373+
// 2 / (right/clipNearPlane - left/clipNearPlane). And we can substitue
374+
// tan(rightHalfAngle) = (right/clipNearPlane) and tan(leftHalfAngle) = (left/clipNearPlane).
375+
// Our new term for (0,0) is 2/(rTan - lTan).
376+
//
377+
// Since we have the calculated value for (0,0), we can use that to solve for (rTan - lTan).
378+
// (rTan - lTan) = 2 / proj(0,0)
379+
// We can also get the value for (rTan + lTan), as it is the numerator for term (0,2).
380+
// We have the denominator, so (rTan + lTan) = (rTan - lTan) * proj(0,2) = 2 * proj(0,2) / proj (0,0)
381+
// We can add (rTan + lTan) and (rTan - lTan) to get 2 * rTan, which can gives us rTan.
382+
// rTan = ((2 / proj(0,0)) + (2 * proj(0,2) / proj(0,0))) / 2 =>
383+
// rTan = (1 + proj(0,2)) / proj(0,0)
384+
//
385+
// We can derive lTan via proj(0,0) = 2 / (rTan - lTan), which gives us
386+
// lTan = rTan - 2/proj(0,0)
387+
// If we substitute our derivation for rTan in here, we get the conveniently symmetric:
388+
// lTan = ((1 + proj(0,2)) / proj(0,0)) - 2 / proj(0,0) =>
389+
// lTan = (-1 + proj(0,2)) / proj(0,0)
390+
//
391+
// We can repeat these calculations for the top and bottom tangents as well.
392+
public static Matrix4x4 GenerateJitteredProjectionMatrixFromOriginal(PostProcessRenderContext context, Matrix4x4 origProj, Vector2 jitter)
393+
{
394+
var rTan = (1.0f + origProj[0, 2]) / origProj[0, 0];
395+
var lTan = (-1.0f + origProj[0, 2]) / origProj[0, 0];
396+
397+
var tTan = (1.0f + origProj[1, 2]) / origProj[1, 1];
398+
var bTan = (-1.0f + origProj[1, 2]) / origProj[1, 1];
399+
400+
float tanVertFov = Math.Abs(tTan) + Math.Abs(bTan);
401+
float tanHorizFov = Math.Abs(lTan) + Math.Abs(rTan);
402+
403+
jitter.x *= tanHorizFov / context.singleEyeWidth;
404+
jitter.y *= tanVertFov / context.height;
405+
406+
float left = jitter.x + lTan;
407+
float right = jitter.x + rTan;
408+
float top = jitter.y + tTan;
409+
float bottom = jitter.y + bTan;
410+
411+
var jitteredMatrix = new Matrix4x4();
412+
413+
jitteredMatrix[0, 0] = 2f / (right - left);
414+
jitteredMatrix[0, 1] = 0f;
415+
jitteredMatrix[0, 2] = (right + left) / (right - left);
416+
jitteredMatrix[0, 3] = 0f;
417+
418+
jitteredMatrix[1, 0] = 0f;
419+
jitteredMatrix[1, 1] = 2f / (top - bottom);
420+
jitteredMatrix[1, 2] = (top + bottom) / (top - bottom);
421+
jitteredMatrix[1, 3] = 0f;
422+
423+
jitteredMatrix[2, 0] = 0f;
424+
jitteredMatrix[2, 1] = 0f;
425+
jitteredMatrix[2, 2] = origProj[2, 2];
426+
jitteredMatrix[2, 3] = origProj[2, 3];
427+
428+
jitteredMatrix[3, 0] = 0f;
429+
jitteredMatrix[3, 1] = 0f;
430+
jitteredMatrix[3, 2] = -1f;
431+
jitteredMatrix[3, 3] = 0f;
432+
433+
return jitteredMatrix;
434+
}
435+
367436
#endregion
368437

369438
#region Reflection

0 commit comments

Comments
 (0)