diff --git a/40_PathTracer/app_resources/pathtrace/beauty.hlsl b/40_PathTracer/app_resources/pathtrace/beauty.hlsl index a40d9db3a..75f6ac45e 100644 --- a/40_PathTracer/app_resources/pathtrace/beauty.hlsl +++ b/40_PathTracer/app_resources/pathtrace/beauty.hlsl @@ -32,7 +32,7 @@ struct CCascades inline uint16_t3 __getCoord(const uint16_t cascadeIx) { uint16_t3 coord = _static_cast(spirv::LaunchIdKHR); - coord.z = coord.z*uint16_t(6)+cascadeIx; + coord.z = coord.z+cascadeIx*uint16_t(spirv::LaunchSizeKHR.z); return coord; } @@ -249,7 +249,7 @@ void raygen() // fetch random variable from memory const float32_t3 randVec = randgen(0u,sampleIndex); // TODO: motion blur and lens DOF triplet - + // get our NDC coordinates and ray const float32_t2 pixelSizeNDC = promote(2.f)/float32_t2(spirv::LaunchSizeKHR.xy); const float32_t2 NDC = float32_t2(launchID.xy)*pixelSizeNDC - promote(1.f); diff --git a/40_PathTracer/app_resources/pathtrace/common.hlsl b/40_PathTracer/app_resources/pathtrace/common.hlsl index 471b67d00..19128a611 100644 --- a/40_PathTracer/app_resources/pathtrace/common.hlsl +++ b/40_PathTracer/app_resources/pathtrace/common.hlsl @@ -285,13 +285,43 @@ SPrimaryRay genPrimaryRay(const SSensorDynamics sensor, const float32_t2 pixelSi retval.ray.origin = hlsl::transpose(sensor.invView)[3]; float32_t3 viewDir; if (spirv::LaunchSizeKHR.z != 6u) + { viewDir = float32_t3(hlsl::mul(sensor.ndcToRay, adjNDC), -1.0); + viewDir = hlsl::normalize(viewDir); + retval.tMin = sensor.nearClip / hlsl::abs(viewDir.z); + } else { - // TODO: handle cubemap cameras + // TODO: handle cubemap cameras + const uint16_t faceID = spirv::LaunchIdKHR.z; + const float32_t2 coord = adjNDC.xy; + switch(faceID) + { + case 0: // +X + viewDir = float32_t3(1.0, -coord.y, -coord.x); + break; + case 1: // -X + viewDir = float32_t3(-1.0, -coord.y, coord.x); + break; + case 2: // +Y + viewDir = float32_t3(coord.x, 1.0, coord.y); + break; + case 3: // -Y + viewDir = float32_t3(coord.x, -1.0, -coord.y); + break; + case 4: // +Z + viewDir = float32_t3(coord.x, -coord.y, 1.0); + break; + case 5: // -Z + viewDir = float32_t3(-coord.x, -coord.y, -1.0); + break; + default: + viewDir = float32_t3(0,0,0); + } + viewDir = hlsl::normalize(viewDir); + const float32_t maxComponent = hlsl::max(hlsl::abs(viewDir.x), hlsl::max(hlsl::abs(viewDir.y), hlsl::abs(viewDir.z))); + retval.tMin = sensor.nearClip / maxComponent; } - viewDir = hlsl::normalize(viewDir); - retval.tMin = sensor.nearClip / hlsl::abs(viewDir.z); retval.ray.direction.setDirection(viewDir); } // rotate and scale with camera @@ -410,7 +440,7 @@ struct SArbitraryOutputValues // To follow through 2 or more reflections we'd need to multiply these 3x3 matrices together along the ray like so // `(I - 2 n_0 n_0^T) (I - 2 n_1 n_1^T) = I + 4 n_0 (n_0^T n_1) n_1^T - 2 (n_0 n_0^T + n_1 n_1^T)` // Theoretically because every series of reflections is just one reflection and a rotation, it could be possible to store this in 3 floats, due to the properties of SO(3) - // "The orthogonal group, consisting of all proper and improper rotations, is generated by reflections. Every proper rotation is the composition of two reflections, a special case of the Cartan–Dieudonné theorem." + // "The orthogonal group, consisting of all proper and improper rotations, is generated by reflections. Every proper rotation is the composition of two reflections, a special case of the Cartan�Dieudonn� theorem." // I'm not sure how we could extend that for refractions but probably a similar form is possible - virtual object corresponding under transmission to what's seen under refraction. // The question is.. is it worth it? Do er really need objects warped by in a labyrynth of wonky mirrors to have warped normals? Or a ceiling reflected in a choppy swimming pool to inherit the pool's wave normals ? // NO because this is an input to a denoiser to stop it blurring lighting across surfaces oriented in different directions! Doesn't matter what the reflection and refraction normals are as long as they're consistent. diff --git a/40_PathTracer/app_resources/present/default.hlsl b/40_PathTracer/app_resources/present/default.hlsl index dc857fb2d..c0d0c7684 100644 --- a/40_PathTracer/app_resources/present/default.hlsl +++ b/40_PathTracer/app_resources/present/default.hlsl @@ -22,11 +22,49 @@ float32_t4 present_default(SVertexAttributes vxAttr) : SV_Target0 float32_t3 uv; if (pc.isCubemap) { - const float32_t4 ndc = float32_t4(vxAttr.uv*2.f-float32_t2(1,1),1.f,1.f); - float32_t4 tmp = mul(pc.cubemap().invProjView,ndc); - float32_t3 dir = tmp.xyz/tmp.www; - // TODO: convert dir to cubemap face, and the UV coord - tint = float32_t3(1,0,1); // right now go magenta error colour + // TODO: currently unused + // const float32_t4 ndc = float32_t4(vxAttr.uv*2.f-float32_t2(1,1),1.f,1.f); + // float32_t4 tmp = mul(pc.cubemap().invProjView,ndc); + // float32_t3 dir = tmp.xyz/tmp.www; + + const uint32_t x = uint32_t(floor(vxAttr.uv.x * 4.0)); + const uint32_t y = uint32_t(floor(vxAttr.uv.y * 3.0)); + const float32_t one_third = 1.0/3.0; + if (y == 1) + { + float32_t2 coord = float32_t2(vxAttr.uv.x * 4.0, (vxAttr.uv.y - one_third) * 3.0); + uv.xy = float32_t2(coord.x - float32_t(x) * 1.0, coord.y); + switch (x) // tile index + { + case 0: // -X + uv.z = 1.f; + break; + case 1: // +Z + uv.z = 4.f; + break; + case 2: // +X + uv.z = 0.f; + break; + case 3: // -Z + uv.z = 5.f; + break; + } + } + else if (x == 1) + { + uv.xy = float32_t2((vxAttr.uv.x - 0.25) * 4.0, (vxAttr.uv.y - float32_t(y) * one_third) * 3.0); + switch (y) + { + case 0: // +Y + uv.z = 2.f; + break; + case 2: // -Y + uv.z = 3.f; + break; + } + } + else + return float32_t4(0,0,0,1); } else { diff --git a/40_PathTracer/src/renderer/present/CWindowPresenter.cpp b/40_PathTracer/src/renderer/present/CWindowPresenter.cpp index 274f3ae35..3f108659c 100644 --- a/40_PathTracer/src/renderer/present/CWindowPresenter.cpp +++ b/40_PathTracer/src/renderer/present/CWindowPresenter.cpp @@ -4,6 +4,8 @@ #include "renderer/present/CWindowPresenter.h" #include "renderer/shaders/session.hlsl" +#include "nbl/builtin/hlsl/math/thin_lens_projection.hlsl" + namespace nbl::this_example { using namespace nbl::core; @@ -193,8 +195,12 @@ auto CWindowPresenter::acquire_impl(const CSession* session, ISemaphore::SWaitIn uint16_t2 targetResolution = m_pushConstants.isCubemap ? maxResolution:sessionParams.uniforms.renderSize; if (m_pushConstants.isCubemap) { - // TODO: build default perspective projection matrix given aspect ratio and smaller axis (or diagonal) FOV of the viewer -// m_pushConstants.cubemap.invProjView = ; + const auto invView = math::linalg::promote_affine<4,4>(sessionParams.initDynamics.invView); + // TODO: consider handedness, right now right hand + const auto originalAspectRatio = float32_t(targetResolution.x) / float32_t(targetResolution.y); + const auto proj = buildProjectionMatrixPerspectiveFovRH(numbers::pi * 0.5f, originalAspectRatio, sessionParams.initDynamics.nearClip, sessionParams.initDynamics.tMax); + const auto invProj = hlsl::inverse(proj); + m_pushConstants.cubemap.invProjView = hlsl::mul(invView, invProj); } else {