Vulkan Schnee 0.0.1
High-performance rendering engine
Loading...
Searching...
No Matches
Headset.cpp
Go to the documentation of this file.
2
3#include <cmath>
9#include <array>
10#include <chrono>
11#include <future>
12#include <glm/gtc/matrix_transform.hpp>
13#include <glm/gtx/string_cast.hpp>
14#include <plog/Log.h>
15#include <vulkan/vulkan_core.h>
16
18
19namespace
20{
21constexpr XrReferenceSpaceType spaceType = XR_REFERENCE_SPACE_TYPE_STAGE;
22constexpr VkFormat colorFormat = VK_FORMAT_R8G8B8A8_SRGB; //_RGBA8UnormSrgb
23constexpr VkFormat depthFormat = VK_FORMAT_D32_SFLOAT;
24} // namespace
25
26namespace EngineCore
27{
29{
30#ifdef ENABLE_TRACY
32#endif
33 // Create named thread pool for xrWaitFrame timeout handling (visible in Tracy as "XR Wait 1")
34 xrWaitThreadPool_ = std::make_unique<NamedThreadPool>(1, "XR Wait");
35
36 const VkDevice device = context->getVkDevice();
37 const VkSampleCountFlagBits multisampleCount = context->getMultisampleCount();
38
39 // create Render Pass
40 {
41 constexpr uint32_t viewMask = 0b00000011;
42 constexpr uint32_t correlationMask = 0b00000011;
43
44 VkRenderPassMultiviewCreateInfo renderPassMultiviewCreateInfo{};
45 renderPassMultiviewCreateInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_MULTIVIEW_CREATE_INFO;
46 renderPassMultiviewCreateInfo.subpassCount = 1u;
47 renderPassMultiviewCreateInfo.pViewMasks = &viewMask;
48 renderPassMultiviewCreateInfo.correlationMaskCount = 1u;
49 renderPassMultiviewCreateInfo.pCorrelationMasks = &correlationMask;
50
51 VkAttachmentDescription colorAttachmentDescription{};
52 colorAttachmentDescription.format = colorFormat;
53 colorAttachmentDescription.samples = multisampleCount;
54 colorAttachmentDescription.loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
55 colorAttachmentDescription.storeOp = VK_ATTACHMENT_STORE_OP_DONT_CARE; // Use DONT_CARE for multisampled attachments for lazy memory allocation
56 colorAttachmentDescription.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
57 colorAttachmentDescription.stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
58 colorAttachmentDescription.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
59 colorAttachmentDescription.finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
60
61 VkAttachmentReference colorAttachmentReference{};
62 colorAttachmentReference.attachment = 0u;
63 colorAttachmentReference.layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
64
65 VkAttachmentDescription depthAttachmentDescription{};
66 depthAttachmentDescription.format = depthFormat;
67 depthAttachmentDescription.samples = multisampleCount;
68 depthAttachmentDescription.loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
69 depthAttachmentDescription.storeOp = VK_ATTACHMENT_STORE_OP_STORE; // Store for Hi-Z generation
70 depthAttachmentDescription.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
71 depthAttachmentDescription.stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
72 // Use UNDEFINED since we CLEAR the depth buffer - contents are discarded anyway.
73 // This avoids synchronization issues with shared depth buffer across frames.
74 depthAttachmentDescription.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
75 depthAttachmentDescription.finalLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL; // For Hi-Z sampling
76
77 VkAttachmentReference depthAttachmentReference;
78 depthAttachmentReference.attachment = 1u;
79 depthAttachmentReference.layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
80
81 VkAttachmentDescription resolveAttachmentDescription{};
82 resolveAttachmentDescription.format = colorFormat;
83 resolveAttachmentDescription.samples = VK_SAMPLE_COUNT_1_BIT;
84 resolveAttachmentDescription.loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
85 resolveAttachmentDescription.storeOp = VK_ATTACHMENT_STORE_OP_STORE;
86 resolveAttachmentDescription.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
87 resolveAttachmentDescription.stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
88 // Use UNDEFINED as initial layout since the pre-render barrier transitions
89 // the swapchain image from UNDEFINED to COLOR_ATTACHMENT_OPTIMAL before the render pass.
90 // This is safe because the MSAA resolve operation will completely overwrite the image.
91 resolveAttachmentDescription.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
92 resolveAttachmentDescription.finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
93
94 VkAttachmentReference resolveAttachmentReference{};
95 resolveAttachmentReference.attachment = 2u;
96 resolveAttachmentReference.layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
97
98 VkSubpassDescription subpassDescription{};
99 subpassDescription.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
100 subpassDescription.colorAttachmentCount = 1u;
101 subpassDescription.pColorAttachments = &colorAttachmentReference;
102 subpassDescription.pDepthStencilAttachment = &depthAttachmentReference;
103 subpassDescription.pResolveAttachments = &resolveAttachmentReference;
104
105 const std::array attachments = {
106 colorAttachmentDescription,
107 depthAttachmentDescription,
108 resolveAttachmentDescription
109 };
110
111 // Subpass dependencies for external synchronization
112 // These ensure proper synchronization between render passes across frames
113 std::array<VkSubpassDependency, 2> dependencies{};
114
115 // External -> Subpass 0: Synchronize with previous frame's render pass
116 // Wait for previous color/depth writes to complete before starting new render pass
117 dependencies[0].srcSubpass = VK_SUBPASS_EXTERNAL;
118 dependencies[0].dstSubpass = 0;
119 dependencies[0].srcStageMask = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT |
120 VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT |
121 VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT;
122 dependencies[0].dstStageMask = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT |
123 VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT |
124 VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT;
125 dependencies[0].srcAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT |
126 VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
127 dependencies[0].dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT |
128 VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
129 dependencies[0].dependencyFlags = VK_DEPENDENCY_BY_REGION_BIT;
130
131 // Subpass 0 -> External: Ensure render pass completes before external operations
132 // This synchronizes with the MirrorView's blit operation and SteamVR's compositor copy operations
133 dependencies[1].srcSubpass = 0;
134 dependencies[1].dstSubpass = VK_SUBPASS_EXTERNAL;
135 dependencies[1].srcStageMask = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT |
136 VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT |
137 VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT;
138 // Use TRANSFER_BIT to cover both blit and copy operations (MirrorView blit + SteamVR compositor copy)
139 dependencies[1].dstStageMask = VK_PIPELINE_STAGE_TRANSFER_BIT;
140 dependencies[1].srcAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT |
141 VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
142 dependencies[1].dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
143 // Remove BY_REGION_BIT as the transfer operations may not be region-local
144 dependencies[1].dependencyFlags = 0;
145
146 VkRenderPassCreateInfo renderPassCreateInfo{VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO};
147 renderPassCreateInfo.pNext = &renderPassMultiviewCreateInfo;
148 renderPassCreateInfo.attachmentCount = static_cast<uint32_t>(attachments.size());
149 renderPassCreateInfo.pAttachments = attachments.data();
150 renderPassCreateInfo.subpassCount = 1u;
151 renderPassCreateInfo.pSubpasses = &subpassDescription;
152 renderPassCreateInfo.dependencyCount = static_cast<uint32_t>(dependencies.size());
153 renderPassCreateInfo.pDependencies = dependencies.data();
154 PLOG_FN_VK(vkCreateRenderPass(context->getVkDevice(), &renderPassCreateInfo, nullptr, &vkRenderPass));
155 VulkanHelper::setObjectName(context->getVkDevice(), vkRenderPass, "Headset render pass");
156
157 PLOGI << "Renderpass created!";
158 }
159
160 const uint32_t vkDrawQueueFamilyIndex = context->getVkGraphicsQueueFamilyIndex();
161
163 {
164 XrGraphicsBindingVulkan2KHR graphicsBinding{XR_TYPE_GRAPHICS_BINDING_VULKAN2_KHR};
165 graphicsBinding.device = device;
166 graphicsBinding.instance = context->getVkInstance();
167 graphicsBinding.physicalDevice = context->getVkPhysicalDevice();
168 graphicsBinding.queueFamilyIndex = vkDrawQueueFamilyIndex;
169 graphicsBinding.queueIndex = 0u;
170
171 XrSessionCreateInfo xrSessionCreateInfo{XR_TYPE_SESSION_CREATE_INFO};
172 xrSessionCreateInfo.next = &graphicsBinding;
173 xrSessionCreateInfo.systemId = context->getXrSystemId();
174 PLOG_THROW_FN_XR(xrCreateSession(context->getXrInstance(), &xrSessionCreateInfo, &xrSession));
175 }
176
178 {
179 XrReferenceSpaceCreateInfo referenceSpaceCreateInfo{XR_TYPE_REFERENCE_SPACE_CREATE_INFO};
180 referenceSpaceCreateInfo.referenceSpaceType = spaceType;
181 referenceSpaceCreateInfo.poseInReferenceSpace = OpenXrHelper::makeIdentityPose();
182 PLOG_THROW_FN_XR(xrCreateReferenceSpace(xrSession, &referenceSpaceCreateInfo, &xrReferenceSpace));
183 }
184
185 const XrViewConfigurationType xrViewType = context->getXrViewType();
186
188 {
189 PLOG_THROW_FN_XR(xrEnumerateViewConfigurationViews(context->getXrInstance(), context->getXrSystemId(),
190 xrViewType, 0u, reinterpret_cast<uint32_t*>(&eyeCount),
191 nullptr));
192
193 eyeImageInfos.resize(eyeCount);
194 for(XrViewConfigurationView& eyeInfo : eyeImageInfos)
195 {
196 eyeInfo.type = XR_TYPE_VIEW_CONFIGURATION_VIEW;
197 eyeInfo.next = nullptr;
198 }
199
200 PLOG_THROW_FN_XR(xrEnumerateViewConfigurationViews(context->getXrInstance(), context->getXrSystemId(),
201 xrViewType, eyeCount, reinterpret_cast<uint32_t*>(&eyeCount),
202 eyeImageInfos.data()));
203 }
204
206 {
207 eyePoses.resize(eyeCount);
208 for(XrView& eyePose : eyePoses)
209 {
210 eyePose.type = XR_TYPE_VIEW;
211 eyePose.next = nullptr;
212 }
213 }
214
216 {
217 uint32_t swapchainFormatCount = 0u;
218 PLOG_FN_XR(xrEnumerateSwapchainFormats(xrSession, 0u, &swapchainFormatCount, nullptr));
219
220 std::vector<int64_t> swapchainFormats(swapchainFormatCount);
221 PLOG_FN_XR(xrEnumerateSwapchainFormats(xrSession, swapchainFormatCount, &swapchainFormatCount,
222 swapchainFormats.data()));
223
224 bool doesSwapchainFormatExist = false;
225 for(const int64_t& format : swapchainFormats)
226 {
227 if(format == static_cast<int64_t>(colorFormat))
228 {
229 doesSwapchainFormatExist = true;
230 break;
231 }
232 }
233
234 if(!doesSwapchainFormatExist)
235 {
236 THROW_ERROR("Color swapchain format does not exist!");
237 }
238
239 PLOGI << "Color format is supported by eyes";
240 }
241
242 const VkExtent2D eyeResolution = getEyeResolution(0u);
243
245 uint32_t layerCount = 2u;
246 // color
247 {
249
250 VkImageCreateInfo imageCreateInfo{VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO};
251 imageCreateInfo.imageType = VK_IMAGE_TYPE_2D;
252 imageCreateInfo.extent.width = eyeResolution.width;
253 imageCreateInfo.extent.height = eyeResolution.height;
254 imageCreateInfo.extent.depth = 1u;
255 imageCreateInfo.mipLevels = 1u;
256 imageCreateInfo.arrayLayers = static_cast<uint32_t>(layerCount);
257 imageCreateInfo.format = colorFormat;
258 imageCreateInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
259 imageCreateInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
260 imageCreateInfo.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
261 imageCreateInfo.samples = multisampleCount;
262 imageCreateInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
263 PLOG_THROW_FN_VK(vkCreateImage(device, &imageCreateInfo, nullptr, &colorBuffer.image));
264 VulkanHelper::setObjectName(context->getVkDevice(), colorBuffer.image, "color buffer image");
265
266 // get memory
267 VkImageMemoryRequirementsInfo2 memoryRequirementsInfo{};
268 memoryRequirementsInfo.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2;
269 memoryRequirementsInfo.image = colorBuffer.image;
270 memoryRequirementsInfo.pNext = nullptr;
271
272 VkMemoryRequirements2 memoryRequirements{VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2};
273 vkGetImageMemoryRequirements2(device, &memoryRequirementsInfo, &memoryRequirements);
274
275 uint32_t suiteableMemoryTypeIndex = 0u;
276 if(!VulkanHelper::findSuitableMemoryType(context->getVkPhysicalDevice(), memoryRequirements,
277 VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT, suiteableMemoryTypeIndex))
278 {
279 THROW_ERROR("Could not find a suitable memory type for the image buffer");
280 }
281
282 // Allocate image memory
283 VkMemoryAllocateInfo memoryAllocateInfo{VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO};
284 memoryAllocateInfo.allocationSize = memoryRequirements.memoryRequirements.size;
285 memoryAllocateInfo.memoryTypeIndex = suiteableMemoryTypeIndex;
286 PLOG_FN_VK(vkAllocateMemory(device, &memoryAllocateInfo, nullptr, &colorBuffer.deviceMemory));
287 VulkanHelper::setObjectName(context->getVkDevice(), colorBuffer.deviceMemory, "color buffer memory");
288
289 VkBindImageMemoryInfo bindImageMemoryInfo{VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO};
290 bindImageMemoryInfo.image = colorBuffer.image;
291 bindImageMemoryInfo.memory = colorBuffer.deviceMemory;
292 bindImageMemoryInfo.memoryOffset = 0u;
293 bindImageMemoryInfo.pNext = nullptr;
294 PLOG_FN_VK(vkBindImageMemory2(device, 1u, &bindImageMemoryInfo));
295
296 // create image view
297 VkImageViewCreateInfo imageViewCreateInfo{VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO};
298 imageViewCreateInfo.image = colorBuffer.image;
299 imageViewCreateInfo.format = colorFormat;
300 imageViewCreateInfo.viewType = (layerCount == 1u ? VK_IMAGE_VIEW_TYPE_2D : VK_IMAGE_VIEW_TYPE_2D_ARRAY);
301 imageViewCreateInfo.components = {VK_COMPONENT_SWIZZLE_IDENTITY, VK_COMPONENT_SWIZZLE_IDENTITY,
302 VK_COMPONENT_SWIZZLE_IDENTITY, VK_COMPONENT_SWIZZLE_IDENTITY};
303 imageViewCreateInfo.subresourceRange.layerCount = static_cast<uint32_t>(layerCount);
304 imageViewCreateInfo.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
305 imageViewCreateInfo.subresourceRange.baseArrayLayer = 0u;
306 imageViewCreateInfo.subresourceRange.baseMipLevel = 0u;
307 imageViewCreateInfo.subresourceRange.levelCount = 1u;
308 PLOG_FN_VK(vkCreateImageView(device, &imageViewCreateInfo, nullptr, &colorBuffer.imageView));
309 VulkanHelper::setObjectName(context->getVkDevice(), colorBuffer.imageView, "color buffer image view");
310
311 PLOGI << "Created color buffer";
312 }
313
314 {
316
317 VkImageCreateInfo imageCreateInfo{VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO};
318 imageCreateInfo.imageType = VK_IMAGE_TYPE_2D;
319 imageCreateInfo.extent.width = eyeResolution.width;
320 imageCreateInfo.extent.height = eyeResolution.height;
321 imageCreateInfo.extent.depth = 1u;
322 imageCreateInfo.mipLevels = 1u;
323 imageCreateInfo.arrayLayers = static_cast<uint32_t>(layerCount);
324 imageCreateInfo.format = depthFormat;
325 imageCreateInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
326 imageCreateInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
327 imageCreateInfo.usage = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT | VK_IMAGE_USAGE_SAMPLED_BIT; // SAMPLED for Hi-Z
328 imageCreateInfo.samples = multisampleCount;
329 imageCreateInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
330 PLOG_FN_VK(vkCreateImage(device, &imageCreateInfo, nullptr, &depthBuffer.image));
331 VulkanHelper::setObjectName(context->getVkDevice(), depthBuffer.image, "depth buffer image");
332
333 // get memory
334 VkImageMemoryRequirementsInfo2 memoryRequirementsInfo;
335 memoryRequirementsInfo.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2;
336 memoryRequirementsInfo.image = depthBuffer.image;
337 memoryRequirementsInfo.pNext = nullptr;
338
339 VkMemoryRequirements2 memoryRequirements{VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2};
340 vkGetImageMemoryRequirements2(device, &memoryRequirementsInfo, &memoryRequirements);
341
342 uint32_t suiteableMemoryTypeIndex = 0u;
343 if(!VulkanHelper::findSuitableMemoryType(context->getVkPhysicalDevice(), memoryRequirements,
344 VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT, suiteableMemoryTypeIndex))
345 {
346 THROW_ERROR("Could not find a suitable memory type for the image buffer");
347 }
348
349 // Allocate image memory
350 VkMemoryAllocateInfo memoryAllocateInfo{VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO};
351 memoryAllocateInfo.allocationSize = memoryRequirements.memoryRequirements.size;
352 memoryAllocateInfo.memoryTypeIndex = suiteableMemoryTypeIndex;
353 PLOG_FN_VK(vkAllocateMemory(device, &memoryAllocateInfo, nullptr, &depthBuffer.deviceMemory));
354 VulkanHelper::setObjectName(context->getVkDevice(), depthBuffer.deviceMemory, "depth buffer device memory");
355
356 VkBindImageMemoryInfo bindImageMemoryInfo{VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO};
357 bindImageMemoryInfo.image = depthBuffer.image;
358 bindImageMemoryInfo.memory = depthBuffer.deviceMemory;
359 bindImageMemoryInfo.memoryOffset = 0u;
360 bindImageMemoryInfo.pNext = nullptr;
361 PLOG_FN_VK(vkBindImageMemory2(device, 1u, &bindImageMemoryInfo));
362
363 // create image view
364 VkImageViewCreateInfo imageViewCreateInfo{VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO};
365 imageViewCreateInfo.image = depthBuffer.image;
366 imageViewCreateInfo.format = depthFormat;
367 imageViewCreateInfo.viewType = (layerCount == 1u ? VK_IMAGE_VIEW_TYPE_2D : VK_IMAGE_VIEW_TYPE_2D_ARRAY);
368 imageViewCreateInfo.components = {VK_COMPONENT_SWIZZLE_IDENTITY, VK_COMPONENT_SWIZZLE_IDENTITY,
369 VK_COMPONENT_SWIZZLE_IDENTITY, VK_COMPONENT_SWIZZLE_IDENTITY};
370 imageViewCreateInfo.subresourceRange.layerCount = static_cast<uint32_t>(layerCount);
371 imageViewCreateInfo.subresourceRange.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
372 imageViewCreateInfo.subresourceRange.baseArrayLayer = 0u;
373 imageViewCreateInfo.subresourceRange.baseMipLevel = 0u;
374 imageViewCreateInfo.subresourceRange.levelCount = 1u;
375 PLOG_FN_VK(vkCreateImageView(device, &imageViewCreateInfo, nullptr, &depthBuffer.imageView));
376 VulkanHelper::setObjectName(context->getVkDevice(), depthBuffer.imageView, "depth buffer image view");
377
378 PLOGI << "Created depth buffer";
379 }
380
381 {
382 PLOGI << "Performing initial layout transitions for render target images...";
383
384 // Create a temporary command pool for the setup commands
385 VkCommandPoolCreateInfo poolInfo{VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO};
386 poolInfo.queueFamilyIndex = vkDrawQueueFamilyIndex;
387 poolInfo.flags = VK_COMMAND_POOL_CREATE_TRANSIENT_BIT;
388 VkCommandPool tempPool;
389 PLOG_FN_VK(vkCreateCommandPool(device, &poolInfo, nullptr, &tempPool));
390 VulkanHelper::setObjectName(context->getVkDevice(), tempPool, "Render target image layout transition command pool");
391
392 // Begin a one-time command buffer using your helper
393 VkCommandBuffer cmd = VulkanHelper::beginSingleTimeCommands(device, tempPool);
394 VulkanHelper::setObjectName(context->getVkDevice(), tempPool, "Render target image command buffer");
395
396
397 // Transition the multisampled color buffer (both layers)
398 VulkanHelper::transitionImageLayout(cmd, device, context->getGraphicsQueue(), tempPool,
399 colorBuffer.image, colorFormat,
400 VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
401 2u); // Pass layerCount = 2
402
403 // Transition the multisampled depth buffer (both layers)
404 VulkanHelper::transitionImageLayout(cmd, device, context->getGraphicsQueue(), tempPool,
405 depthBuffer.image, depthFormat,
406 VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
407 2u); // Pass layerCount = 2
408
409 // End and submit the commands using your helper
410 VulkanHelper::endSingleTimeCommands(device, context->getGraphicsQueue(), tempPool, cmd);
411
412 // Clean up the temporary pool
413 vkDestroyCommandPool(device, tempPool, nullptr);
414 PLOGI << "Initial layout transitions complete.";
415 }
416
418 {
419 const XrViewConfigurationView& eyeImageInfo = eyeImageInfos.at(0u);
420
421 XrSwapchainCreateInfo xrSwapchainCreateInfo{XR_TYPE_SWAPCHAIN_CREATE_INFO};
422 xrSwapchainCreateInfo.format = colorFormat;
423 xrSwapchainCreateInfo.sampleCount = eyeImageInfo.recommendedSwapchainSampleCount;
424 xrSwapchainCreateInfo.width = eyeImageInfo.recommendedImageRectWidth;
425 xrSwapchainCreateInfo.height = eyeImageInfo.recommendedImageRectHeight;
426 xrSwapchainCreateInfo.arraySize = 2u; // multiview
427 xrSwapchainCreateInfo.faceCount = 1u;
428 xrSwapchainCreateInfo.mipCount = 1u;
429 // Critical: Include COLOR_ATTACHMENT_BIT so we can render to these images
430 // Also include TRANSFER_SRC_BIT for mirror view blitting
431 xrSwapchainCreateInfo.usageFlags = XR_SWAPCHAIN_USAGE_SAMPLED_BIT | XR_SWAPCHAIN_USAGE_COLOR_ATTACHMENT_BIT | XR_SWAPCHAIN_USAGE_TRANSFER_SRC_BIT;
432
433 PLOG_THROW_FN_XR(xrCreateSwapchain(xrSession, &xrSwapchainCreateInfo, &xrSwapchain));
434 PLOGI << "OpenXR swapchain created";
435 }
436
438 {
439 uint32_t swapchainImageCount;
440 PLOG_FN_XR(xrEnumerateSwapchainImages(xrSwapchain, 0u, &swapchainImageCount, nullptr));
441
442 std::vector<XrSwapchainImageVulkan2KHR> swapchainImages(swapchainImageCount);
443 for(XrSwapchainImageVulkan2KHR& swapchainImage : swapchainImages)
444 {
445 swapchainImage.type = XR_TYPE_SWAPCHAIN_IMAGE_VULKAN2_KHR;
446 }
447
448 XrSwapchainImageBaseHeader* data = reinterpret_cast<XrSwapchainImageBaseHeader*>(swapchainImages.data());
449 PLOG_FN_XR(xrEnumerateSwapchainImages(xrSwapchain, static_cast<uint32_t>(swapchainImages.size()),
450 &swapchainImageCount, data));
451
452 PLOGI << "Found " << swapchainImages.size() << " xr swapchain images!";
453
454 swapchainRenderTargets.resize(swapchainImages.size());
455 constexpr uint32_t layerCount = 2u;
456 VkExtent2D eyeResolution = getEyeResolution(0u);
457 for(size_t renderTargetsIndex = 0u; renderTargetsIndex < swapchainRenderTargets.size(); renderTargetsIndex++)
458 {
459 RenderTarget& renderTarget = swapchainRenderTargets.at(renderTargetsIndex);
460
461 const VkImage image = swapchainImages.at(renderTargetsIndex).image;
462 renderTarget = RenderTarget();
463 renderTarget.image = image;
464 VulkanHelper::setObjectName(context->getVkDevice(), renderTarget.image, "OpenXR Swapchain Image " + std::to_string(renderTargetsIndex));
465
466 VkImageViewCreateInfo imageViewCreateInfo{VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO};
467 imageViewCreateInfo.image = image;
468 imageViewCreateInfo.format = colorFormat;
469 imageViewCreateInfo.viewType = (layerCount == 1u ? VK_IMAGE_VIEW_TYPE_2D : VK_IMAGE_VIEW_TYPE_2D_ARRAY);
470 imageViewCreateInfo.components = {VK_COMPONENT_SWIZZLE_IDENTITY, VK_COMPONENT_SWIZZLE_IDENTITY,
471 VK_COMPONENT_SWIZZLE_IDENTITY, VK_COMPONENT_SWIZZLE_IDENTITY};
472 imageViewCreateInfo.subresourceRange.layerCount = layerCount;
473 imageViewCreateInfo.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
474 imageViewCreateInfo.subresourceRange.baseArrayLayer = 0u;
475 imageViewCreateInfo.subresourceRange.baseMipLevel = 0u;
476 imageViewCreateInfo.subresourceRange.levelCount = 1u;
477 PLOG_FN_VK(vkCreateImageView(device, &imageViewCreateInfo, nullptr, &renderTarget.imageView));
478
479 const std::array<VkImageView, 3> attachments = {colorBuffer.imageView, depthBuffer.imageView, renderTarget.imageView};
480
481 // create a frame buffer
482 VkFramebufferCreateInfo framebufferCreateInfo{VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO};
483 framebufferCreateInfo.renderPass = vkRenderPass;
484 framebufferCreateInfo.attachmentCount = static_cast<uint32_t>(attachments.size());
485 framebufferCreateInfo.pAttachments = attachments.data();
486 framebufferCreateInfo.width = eyeResolution.width;
487 framebufferCreateInfo.height = eyeResolution.height;
488 framebufferCreateInfo.layers = 1u;
489 PLOG_FN_VK(vkCreateFramebuffer(device, &framebufferCreateInfo, nullptr, &renderTarget.framebuffer));
490 PLOGI << "Created Frame buffer";
491 }
492
493 PLOGI << "Created swapchain and bound images to multiview";
494 }
495
497 {
498 eyeRenderInfos.resize(eyeCount);
499 for(size_t eyeIndex = 0u; eyeIndex < eyeRenderInfos.size(); eyeIndex++)
500 {
501 XrCompositionLayerProjectionView& eyeRenderInfo = eyeRenderInfos.at(eyeIndex);
502 eyeRenderInfo.type = XR_TYPE_COMPOSITION_LAYER_PROJECTION_VIEW;
503 eyeRenderInfo.next = nullptr;
504
505 const XrViewConfigurationView& eyeImageInfo = eyeImageInfos.at(eyeIndex);
506 eyeRenderInfo.subImage.swapchain = xrSwapchain;
507 eyeRenderInfo.subImage.imageArrayIndex = static_cast<uint32_t>(eyeIndex);
508 eyeRenderInfo.subImage.imageRect.offset = {0, 0};
509 eyeRenderInfo.subImage.imageRect.extent = {static_cast<int32_t>(eyeImageInfo.recommendedImageRectWidth),
510 static_cast<int32_t>(eyeImageInfo.recommendedImageRectHeight)};
511 }
512
513 eyes.resize(eyeCount);
514 }
515
517 {
518 createHiZSampler(device);
519 }
520}
523{
524 // Clean up Hi-Z sampler (pyramids are now per-frame in RenderProcess)
525 if (hiZSampler_ != VK_NULL_HANDLE)
526 {
527 vkDestroySampler(context->getVkDevice(), hiZSampler_, nullptr);
528 hiZSampler_ = VK_NULL_HANDLE;
529 }
530
531 colorBuffer.cleanup(context);
532 depthBuffer.cleanup(context);
533
534 // Destroy the OpenXR swapchain FIRST, before cleaning up render targets
535 if(xrSwapchain != XR_NULL_HANDLE)
536 {
537 xrDestroySwapchain(xrSwapchain);
538 xrSwapchain = XR_NULL_HANDLE;
539 }
540
541 // Now clean up the render targets (framebuffers and image views)
542 for(auto& renderTarget : swapchainRenderTargets)
543 {
544 renderTarget.cleanup(context);
545 }
547
548 if (vkRenderPass != VK_NULL_HANDLE)
549 {
550 vkDestroyRenderPass(context->getVkDevice(), vkRenderPass, nullptr);
551 vkRenderPass = VK_NULL_HANDLE;
552 }
553
554 if(xrReferenceSpace != XR_NULL_HANDLE)
555 {
556 xrDestroySpace(xrReferenceSpace);
557 xrReferenceSpace = XR_NULL_HANDLE;
558 }
559
560 if(xrSession != XR_NULL_HANDLE)
561 {
562 xrDestroySession(xrSession);
563 xrSession = XR_NULL_HANDLE;
564 }
565}
566
568{
569 const VkDevice device = context->getVkDevice();
570 if(imageView != VK_NULL_HANDLE)
571 {
572 vkDestroyImageView(device, imageView, nullptr);
573 imageView = VK_NULL_HANDLE;
574 }
575 if(image != VK_NULL_HANDLE)
576 {
577 vkDestroyImage(device, image, nullptr);
578 image = VK_NULL_HANDLE;
579 }
580 if(deviceMemory != VK_NULL_HANDLE)
581 {
582 vkFreeMemory(device, deviceMemory, nullptr);
583 deviceMemory = VK_NULL_HANDLE;
584 }
585}
586
588{
589 TRACY_ZONE_SCOPED_NAMED("Get view matrix from openxr");
590 for(size_t eyeIndex = 0u; eyeIndex < eyeCount; eyeIndex++)
591 {
592 XrCompositionLayerProjectionView& eyeRenderInfo = eyeRenderInfos.at(eyeIndex);
593 const XrView& eyePose = eyePoses.at(eyeIndex);
594 eyeRenderInfo.pose = eyePose.pose;
595 eyeRenderInfo.fov = eyePose.fov;
596
597 const XrPosef& pose = eyeRenderInfo.pose;
598
599 eyes.at(eyeIndex).eyeViewMatrix = glm::inverse(OpenXrHelper::poseToMatrix(pose));
600 eyes.at(eyeIndex).eyeProjectionMatrix = OpenXrHelper::createProjectionMatrix(eyeRenderInfo.fov, 0.05f, 1250.0f);
601 }
602}
603
605{
606#ifdef ENABLE_TRACY
608#endif
609 WaitFrameResult waitResult;
611 waitResult.shouldCallBeginFrame = false;
612
613 // Poll openxr events
614 XrEventDataBuffer eventBuffer{XR_TYPE_EVENT_DATA_BUFFER};
615 {
616#ifdef ENABLE_TRACY
617 TRACY_ZONE_SCOPED_NAMED("Poll all xr events");
618#endif
619 while(xrPollEvent(context->getXrInstance(), &eventBuffer) == XR_SUCCESS)
620 {
621#ifdef ENABLE_TRACY
622 std::stringstream tracyZoneName;
623 tracyZoneName << "Poll xr Event: ";
624 const std::string zone = tracyZoneName.str();
625 ZoneName(zone.c_str(), zone.size());
626#endif
627 PLOGI_XR << "Polled event " << OpenXrHelper::GetXrStructureTypeName(eventBuffer.type);
628 switch(eventBuffer.type)
629 {
630 case XR_TYPE_EVENT_DATA_INSTANCE_LOSS_PENDING:
631 {
632 isExistRequested = true;
633#ifdef ENABLE_TRACY
634 tracyZoneName << "Xr Type Event Data Instance Loss Pending";
635 const std::string zoneName = tracyZoneName.str();
636 ZoneName(zoneName.c_str(), zoneName.size());
637#endif
638 return waitResult; // SkipFully, shouldCallBeginFrame = false
639 }
640 case XR_TYPE_EVENT_DATA_SESSION_STATE_CHANGED:
641 {
642 XrEventDataSessionStateChanged* event = reinterpret_cast<XrEventDataSessionStateChanged*>(&eventBuffer);
643 XrSessionState previousState = xrSessionState;
644 xrSessionState = event->state;
645
646 PLOGI_XR << "Session state changed to: " << OpenXrHelper::sessionStateToString(event->state);
647
648 // Session state transition synchronization
649 // Wait for GPU idle during state transitions to ensure:
650 // 1. All in-flight commands referencing swapchain images complete
651 // 2. SteamVR compositor internal state is clean before we resume rendering
652 // 3. No race conditions with BlankEyeBuffer or compositor copies
653
654 bool wasRendering = (previousState == XR_SESSION_STATE_VISIBLE ||
655 previousState == XR_SESSION_STATE_FOCUSED);
656 bool isRendering = (xrSessionState == XR_SESSION_STATE_VISIBLE ||
657 xrSessionState == XR_SESSION_STATE_FOCUSED);
658 bool isNotRendering = (xrSessionState == XR_SESSION_STATE_SYNCHRONIZED ||
659 xrSessionState == XR_SESSION_STATE_STOPPING ||
660 xrSessionState == XR_SESSION_STATE_IDLE);
661 bool wasNotRendering = (previousState == XR_SESSION_STATE_SYNCHRONIZED ||
662 previousState == XR_SESSION_STATE_READY ||
663 previousState == XR_SESSION_STATE_IDLE);
664
665 // Wait when transitioning FROM rendering TO non-rendering
666 // Ensures our GPU work completes before runtime takes over swapchain
667 if(wasRendering && isNotRendering)
668 {
669 PLOGI_XR << "Waiting for GPU idle before transitioning to non-rendering state";
670 vkDeviceWaitIdle(context->getVkDevice());
671 }
672
673 // Also wait when transitioning FROM non-rendering TO rendering
674 // Ensures SteamVR compositor's internal operations (BlankEyeBuffer, etc.)
675 // are complete before we start rendering to swapchain images again
676 if(wasNotRendering && isRendering)
677 {
678 PLOGI_XR << "Waiting for GPU idle before transitioning to rendering state";
679 vkDeviceWaitIdle(context->getVkDevice());
680
681#ifdef ENABLE_TRACY
682 // Reset Tracy Vulkan contexts to fix query pool state.
683 // Tracy's query pools have stale queries that weren't collected/reset
684 // while the session was in non-rendering state. Recreating the contexts
685 // resets the query pools and prevents validation errors.
686 if(renderer != nullptr)
687 {
688 PLOGI_XR << "Resetting Tracy contexts after state transition";
689 renderer->resetTracyContexts();
690 }
691#endif
692 }
693
694 if(xrSessionState == XR_SESSION_STATE_READY)
695 {
697 TRACY_ZONE_SCOPED_NAMED("XR_SESSION_STATE_READY");
698 }
699 else if(xrSessionState == XR_SESSION_STATE_STOPPING)
700 {
701 PLOGI_XR << "Session stopping, ending XR session";
702 endXrSession();
703 TRACY_ZONE_SCOPED_NAMED("XR_SESSION_STATE_STOPPING");
704 // Continue polling to process remaining state changes
705 }
706 else if(xrSessionState == XR_SESSION_STATE_IDLE)
707 {
708 // Session has ended, waiting for EXITING
709 PLOGI_XR << "Session idle after ending";
710 TRACY_ZONE_SCOPED_NAMED("XR_SESSION_STATE_IDLE");
711 // Continue polling to get EXITING state
712 }
713 else if(xrSessionState == XR_SESSION_STATE_EXITING)
714 {
715 // Graceful exit requested - this is not an error
716 PLOGI_XR << "Session exiting gracefully";
717 isExistRequested = true;
718 TRACY_ZONE_SCOPED_NAMED("XR_SESSION_STATE_EXITING");
719 return waitResult; // SkipFully, shouldCallBeginFrame = false
720 }
721 else if(xrSessionState == XR_SESSION_STATE_LOSS_PENDING)
722 {
723 // Session loss is an error condition
724 PLOGE_XR << "Session loss pending";
725 isExistRequested = true;
726 TRACY_ZONE_SCOPED_NAMED("XR_SESSION_STATE_LOSS_PENDING");
727 waitResult.result = BeginFrameResult::Error;
728 return waitResult;
729 }
730 break;
731 }
732 }
733
734 eventBuffer.type = XR_TYPE_EVENT_DATA_BUFFER;
735 }
736 }
737
738 if(xrSessionState != XR_SESSION_STATE_READY && xrSessionState != XR_SESSION_STATE_SYNCHRONIZED &&
739 xrSessionState != XR_SESSION_STATE_VISIBLE && xrSessionState != XR_SESSION_STATE_FOCUSED)
740 {
741 return waitResult; // SkipFully, shouldCallBeginFrame = false
742 }
743
744 {
745 TRACY_ZONE_SCOPED_NAMED("Wait Frame");
746
747 xrFrameState.type = XR_TYPE_FRAME_STATE;
748 XrFrameWaitInfo frameWaitInfo{XR_TYPE_FRAME_WAIT_INFO};
749
750 PLOGI_XR << "Calling xrWaitFrame (session state: " << OpenXrHelper::sessionStateToString(xrSessionState) << ")";
751
752 // Use async with timeout to prevent indefinite blocking when headset is removed
753 // This handles the race condition where xrWaitFrame is called before the runtime
754 // delivers the STOPPING event
755 auto waitFuture = xrWaitThreadPool_->submit_task([this, &frameWaitInfo]() {
756 TRACY_ZONE_SCOPED_NAMED("xrWaitFrame (Async)");
757 return xrWaitFrame(xrSession, &frameWaitInfo, &xrFrameState);
758 });
759
760 constexpr auto waitTimeout = std::chrono::seconds(5);
761 auto status = waitFuture.wait_for(waitTimeout);
762
763 if (status == std::future_status::timeout)
764 {
765 PLOGW_XR << "xrWaitFrame timed out after 5 seconds - headset may have been removed";
766 PLOGW_XR << "Requesting session exit to unblock...";
767
768 // Request exit to trigger the OpenXR state machine
769 // This should cause xrWaitFrame to return
770 XrResult exitResult = xrRequestExitSession(xrSession);
771 if (exitResult != XR_SUCCESS)
772 {
773 PLOGW_XR << "xrRequestExitSession failed: " << exitResult;
774 }
775
776 // Wait for the async call to complete (it should unblock after exit request)
777 XrResult result = waitFuture.get();
778 if (XR_FAILED(result))
779 {
780 PLOGE_XR << "xrWaitFrame failed after timeout: " << result;
781 }
782
783 isExistRequested = true;
784 return waitResult; // SkipFully, shouldCallBeginFrame = false
785 }
786
787 XrResult result = waitFuture.get();
788 if (XR_FAILED(result))
789 {
790 PLOGE_XR << "xrWaitFrame failed: " << result;
791 return waitResult; // SkipFully, shouldCallBeginFrame = false
792 }
793
794 PLOGI_XR << "xrWaitFrame returned successfully";
795 }
796
797 // xrWaitFrame succeeded - we need to call xrBeginFrame later
798 waitResult.shouldCallBeginFrame = true;
799
800 // Pre-determine if we'll be rendering based on shouldRender flag
801 // The actual begin frame and swapchain acquisition happens in beginXrFrameAfterWait
802 if(!xrFrameState.shouldRender)
803 {
805 }
806 else
807 {
809 }
810
811 return waitResult;
812}
813
815{
816#ifdef ENABLE_TRACY
818#endif
819
820 // Call xrBeginFrame - this MUST happen after previous frame's xrEndFrame completes
821 XrFrameBeginInfo frameBeginInfo{XR_TYPE_FRAME_BEGIN_INFO};
822 PLOG_FN_XR(xrBeginFrame(xrSession, &frameBeginInfo));
823
824 if(!xrFrameState.shouldRender)
825 {
827 }
828
829 xrViewState.type = XR_TYPE_VIEW_STATE;
830 uint32_t viewCount;
831 XrViewLocateInfo viewLocateInfo{XR_TYPE_VIEW_LOCATE_INFO};
832 viewLocateInfo.viewConfigurationType = context->getXrViewType();
833 viewLocateInfo.displayTime = xrFrameState.predictedDisplayTime;
834 viewLocateInfo.space = xrReferenceSpace;
835 PLOG_THROW_FN_XR(xrLocateViews(xrSession, &viewLocateInfo, &xrViewState, static_cast<uint32_t>(eyePoses.size()),
836 &viewCount, eyePoses.data()));
838 if(viewCount != eyeCount)
839 {
840 THROW_ERROR("View count is not equal eye count");
841 }
842
843 XrSwapchainImageAcquireInfo swapchainImageAcquireInfo{XR_TYPE_SWAPCHAIN_IMAGE_ACQUIRE_INFO};
844 PLOG_THROW_FN_XR(xrAcquireSwapchainImage(xrSwapchain, &swapchainImageAcquireInfo, &swapchainImageIndex));
845
846 XrSwapchainImageWaitInfo swapchainImageWaitInfo{XR_TYPE_SWAPCHAIN_IMAGE_WAIT_INFO};
847 swapchainImageWaitInfo.timeout = XR_INFINITE_DURATION;
848 PLOG_THROW_FN_XR(xrWaitSwapchainImage(xrSwapchain, &swapchainImageWaitInfo));
849
851}
852
853Headset::BeginFrameResult Headset::beginXrFrame(uint32_t& swapchainImageIndex, Renderer *renderer)
854{
855 // Legacy function: combines both phases for backward compatibility
856 auto waitResult = waitForXrFrame(renderer);
857
858 if (!waitResult.shouldCallBeginFrame)
859 {
860 return waitResult.result;
861 }
862
863 return beginXrFrameAfterWait(swapchainImageIndex);
864}
865
867{
868 {
869 TRACY_ZONE_SCOPED_NAMED("Release Swapchain Image");
870 XrSwapchainImageReleaseInfo swapchainImageReleaseInfo{XR_TYPE_SWAPCHAIN_IMAGE_RELEASE_INFO};
871 PLOG_THROW_FN_XR(xrReleaseSwapchainImage(xrSwapchain, &swapchainImageReleaseInfo));
872 }
873
874 {
875 TRACY_ZONE_SCOPED_NAMED("End XR Frame");
876 XrCompositionLayerProjection compositionLayerProjection{XR_TYPE_COMPOSITION_LAYER_PROJECTION};
877 compositionLayerProjection.space = xrReferenceSpace;
878 compositionLayerProjection.viewCount = static_cast<uint32_t>(eyeRenderInfos.size());
879 compositionLayerProjection.views = eyeRenderInfos.data();
880
881 std::vector<XrCompositionLayerBaseHeader*> layers;
882
883 const bool positionValid = xrViewState.viewStateFlags & XR_VIEW_STATE_POSITION_VALID_BIT;
884 const bool orientationValid = xrViewState.viewStateFlags & XR_VIEW_STATE_ORIENTATION_VALID_BIT;
885 if(xrFrameState.shouldRender && positionValid && orientationValid)
886 {
887 layers.push_back(reinterpret_cast<XrCompositionLayerBaseHeader*>(&compositionLayerProjection));
888 }
889
890 XrFrameEndInfo frameEndInfo{XR_TYPE_FRAME_END_INFO};
891 frameEndInfo.displayTime = xrFrameState.predictedDisplayTime;
892 frameEndInfo.layerCount = static_cast<uint32_t>(layers.size());
893 frameEndInfo.layers = layers.data();
894 frameEndInfo.environmentBlendMode = XR_ENVIRONMENT_BLEND_MODE_OPAQUE;
895 PLOG_THROW_FN_XR(xrEndFrame(xrSession, &frameEndInfo));
896 }
897}
898
900{
901 // End frame without releasing swapchain image since none was acquired
902 // This is used when beginXrFrame returned SkipRender
903 XrFrameEndInfo frameEndInfo{XR_TYPE_FRAME_END_INFO};
904 frameEndInfo.displayTime = xrFrameState.predictedDisplayTime;
905 frameEndInfo.layerCount = 0; // No layers since we didn't render anything
906 frameEndInfo.layers = nullptr;
907 frameEndInfo.environmentBlendMode = XR_ENVIRONMENT_BLEND_MODE_OPAQUE;
908 PLOG_THROW_FN_XR(xrEndFrame(xrSession, &frameEndInfo));
909}
910
912{
913 XrSessionBeginInfo sessionBeginInfo{XR_TYPE_SESSION_BEGIN_INFO};
914 sessionBeginInfo.primaryViewConfigurationType = context->getXrViewType();
915 PLOG_THROW_FN_XR(xrBeginSession(xrSession, &sessionBeginInfo));
916}
917
918VkImage Headset::getSwapchainImage(size_t swapchainImageIndex) const {
919 return swapchainRenderTargets[swapchainImageIndex].image;
920}
921
922std::vector<RenderTarget> Headset::getSwapchainRenderTargets() {
924}
925
927{
928 PLOG_THROW_FN_XR(xrEndSession(xrSession));
929}
930
932{
933 if (xrSession != XR_NULL_HANDLE && !isExistRequested)
934 {
935 PLOGI << "Requesting XR session exit";
936 XrResult result = xrRequestExitSession(xrSession);
937 if (result == XR_SUCCESS)
938 {
939 isExistRequested = true;
940 }
941 else
942 {
943 PLOGW << "xrRequestExitSession failed with result: " << result;
944 }
945 }
946}
947
948uint32_t Headset::getEyeCount() const
949{
950 return eyeCount;
951}
952
953VkExtent2D Headset::getEyeResolution(size_t eyeIndex) const
954{
955 const XrViewConfigurationView& eyeInfo = eyeImageInfos.at(eyeIndex);
956 return {eyeInfo.recommendedImageRectWidth, eyeInfo.recommendedImageRectHeight};
957}
958
959VkRenderPass Headset::getRenderPass() const
960{
961 return vkRenderPass;
962}
963
965{
966 const VkDevice device = context->getVkDevice();
967 if(framebuffer != VK_NULL_HANDLE)
968 {
969 vkDestroyFramebuffer(device, framebuffer, nullptr);
970 framebuffer = VK_NULL_HANDLE;
971 }
972 if(imageView != VK_NULL_HANDLE)
973 {
974 vkDestroyImageView(device, imageView, nullptr);
975 imageView = VK_NULL_HANDLE;
976 }
977 // The VkImage is owned by the swapchain, so we don't destroy it here.
978 image = VK_NULL_HANDLE;
979}
980
981glm::mat4 Headset::getEyeViewMatrix(size_t eyeIndex) const
982{
983 // Apply player position offset: translate the view by negative player position
984 // This effectively moves the world opposite to player movement
985 glm::mat4 playerTranslation = glm::translate(glm::mat4(1.0f), -playerPosition_);
986 return eyes.at(eyeIndex).eyeViewMatrix * playerTranslation;
987}
988
989glm::mat4 Headset::getEyeProjectionMatrix(size_t eyeIndex) const
990{
991 return eyes.at(eyeIndex).eyeProjectionMatrix;
992}
993
994glm::mat4 Headset::getViewProjectionMatrix(size_t eyeIndex) const
995{
996 return getEyeProjectionMatrix(eyeIndex) * getEyeViewMatrix(eyeIndex);
997}
998
1000{
1001 return xrReferenceSpace;
1002}
1003
1004XrSession Headset::getSession() const
1005{
1006 return xrSession;
1007}
1008
1010{
1011 return isExistRequested;
1012}
1013XrFrameState Headset::getXrFrameState() const
1014{
1015 return xrFrameState;
1016}
1017
1018const RenderTarget* Headset::getRenderTarget(size_t swapchainImageIndex) const
1019{
1020 return &swapchainRenderTargets[swapchainImageIndex];
1021}
1022
1023void Headset::createHiZSampler(VkDevice device)
1024{
1025 // Use MIN reduction mode for reverse-Z depth buffer (1=near, 0=far)
1026 // MIN gives us the FARTHEST depth when filtering, which is correct for conservative occlusion culling
1027 // (we need the farthest depth to avoid culling objects visible through gaps)
1028
1029 VkSamplerReductionModeCreateInfo reductionModeInfo{VK_STRUCTURE_TYPE_SAMPLER_REDUCTION_MODE_CREATE_INFO};
1030 reductionModeInfo.reductionMode = VK_SAMPLER_REDUCTION_MODE_MIN;
1031
1032 // Calculate max mip levels based on eye resolution (same logic as per-frame pyramids)
1033 VkExtent2D eyeRes = getEyeResolution(0);
1034 uint32_t maxMipLevels = static_cast<uint32_t>(
1035 std::floor(std::log2(std::max(eyeRes.width, eyeRes.height)))) + 1;
1036
1037 VkSamplerCreateInfo samplerInfo{VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO};
1038 samplerInfo.pNext = &reductionModeInfo;
1039 samplerInfo.magFilter = VK_FILTER_NEAREST;
1040 samplerInfo.minFilter = VK_FILTER_NEAREST;
1041 samplerInfo.mipmapMode = VK_SAMPLER_MIPMAP_MODE_NEAREST;
1042 samplerInfo.addressModeU = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE;
1043 samplerInfo.addressModeV = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE;
1044 samplerInfo.addressModeW = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE;
1045 samplerInfo.mipLodBias = 0.0f;
1046 samplerInfo.anisotropyEnable = VK_FALSE;
1047 samplerInfo.maxAnisotropy = 1.0f;
1048 samplerInfo.compareEnable = VK_FALSE;
1049 samplerInfo.compareOp = VK_COMPARE_OP_ALWAYS;
1050 samplerInfo.minLod = 0.0f;
1051 samplerInfo.maxLod = static_cast<float>(maxMipLevels);
1052 samplerInfo.borderColor = VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK; // 0.0 = far depth in reverse-Z for out-of-bounds
1053 samplerInfo.unnormalizedCoordinates = VK_FALSE;
1054
1055 PLOG_FN_VK(vkCreateSampler(device, &samplerInfo, nullptr, &hiZSampler_));
1056 VulkanHelper::setObjectName(device, hiZSampler_, "Hi-Z Sampler (MIN reduction)");
1057
1058 PLOGI << "Hi-Z sampler created with MIN reduction mode for reverse-Z";
1059}
1060
1061} // namespace EngineCore
#define TRACY_ZONE_SCOPED_FUNCTION
#define TRACY_ZONE_SCOPED_NAMED(name)
The application context is the core class which stores the basic openxr and vulkan objects.
const VkDevice getVkDevice() const
Gets the vulkan device.
VkExtent2D getEyeResolution(size_t eyeIndex) const
Gets eye resolution in x and y for a specified eye index. 0 is left.
Definition Headset.cpp:953
std::vector< XrView > eyePoses
Definition Headset.h:279
VkSampler hiZSampler_
Definition Headset.h:295
void beginXrSession() const
Definition Headset.cpp:911
glm::mat4 getEyeViewMatrix(size_t eyeIndex) const
Returns the view matrix of the specified eye.
Definition Headset.cpp:981
std::vector< RenderTarget > swapchainRenderTargets
The swapchain render targets. The images get rendered onto those render targets.
Definition Headset.h:308
std::unique_ptr< NamedThreadPool > xrWaitThreadPool_
Definition Headset.h:302
glm::mat4 getViewProjectionMatrix(size_t eyeIndex) const
Gets you a matrix of the multiplied view and projection matrix.
Definition Headset.cpp:994
XrSession getSession() const
Gets the active xr session.
Definition Headset.cpp:1004
XrSpace xrReferenceSpace
The xr reference space.
Definition Headset.h:269
std::vector< XrCompositionLayerProjectionView > eyeRenderInfos
Definition Headset.h:310
void endXrSession() const
ends the xr session and thus terminating the xr window on the headset
Definition Headset.cpp:926
uint32_t eyeCount
Definition Headset.h:275
void endXrFrameNoRender()
Definition Headset.cpp:899
ImageBuffer colorBuffer
Definition Headset.h:290
std::vector< XrViewConfigurationView > eyeImageInfos
Definition Headset.h:277
XrViewState xrViewState
Definition Headset.h:247
XrSessionState xrSessionState
Definition Headset.h:250
void requestExitSession()
Requests the XR runtime to exit the session gracefully. This should be called when the application wa...
Definition Headset.cpp:931
std::vector< Eye > eyes
Definition Headset.h:316
void updateViewMatrix()
Updates the eye matrix for the headset.
Definition Headset.cpp:587
XrSession xrSession
The xr session.
Definition Headset.h:264
std::vector< RenderTarget > getSwapchainRenderTargets()
Definition Headset.cpp:922
Headset(const ApplicationContext *context=nullptr)
Definition Headset.cpp:28
WaitFrameResult waitForXrFrame(Renderer *renderer)
First part of frame setup: calls xrWaitFrame. Can be called before previous xrEndFrame completes....
Definition Headset.cpp:604
glm::mat4 getEyeProjectionMatrix(size_t eyeIndex) const
Gets the projection matrix for an eye.
Definition Headset.cpp:989
BeginFrameResult beginXrFrame(uint32_t &swapchainImageIndex, Renderer *renderer)
Legacy function that combines waitForXrFrame and beginXrFrameAfterWait. Kept for compatibility but pr...
Definition Headset.cpp:853
bool getIsExitRequested() const
Gets whether the headset has requested for the application to stop.
Definition Headset.cpp:1009
XrSwapchain xrSwapchain
Definition Headset.h:299
XrSpace getReferenceSpace() const
Gets reference space for the xr headset.
Definition Headset.cpp:999
ImageBuffer depthBuffer
Definition Headset.h:291
void createHiZSampler(VkDevice device)
Definition Headset.cpp:1023
BeginFrameResult beginXrFrameAfterWait(uint32_t &swapchainImageIndex)
Second part of frame setup: calls xrBeginFrame, acquires swapchain image. MUST be called after previo...
Definition Headset.cpp:814
const RenderTarget * getRenderTarget(size_t swapchainImageIndex) const
Definition Headset.cpp:1018
uint32_t getEyeCount() const
Gets eye count.
Definition Headset.cpp:948
VkImage getSwapchainImage(size_t swapchainImageIndex) const
Gets the swapchain image for the given index.
Definition Headset.cpp:918
XrFrameState xrFrameState
Definition Headset.h:248
const ApplicationContext * context
Definition Headset.h:254
glm::vec3 playerPosition_
Definition Headset.h:321
XrFrameState getXrFrameState() const
gets the current state of the frame
Definition Headset.cpp:1013
VkRenderPass getRenderPass() const
Gets render pass.
Definition Headset.cpp:959
VkRenderPass vkRenderPass
The vulkan render pass.
Definition Headset.h:259
static std::string sessionStateToString(XrSessionState state)
static XrPosef makeIdentityPose()
Makes identity pose.
static glm::mat4 createProjectionMatrix(XrFovf fov, float nearClip, float farClip)
static glm::mat4 poseToMatrix(const XrPosef &pose)
Converts an XrPosef to a glm::mat4.
static const char * GetXrStructureTypeName(XrStructureType type)
static void endSingleTimeCommands(VkDevice device, VkQueue graphicsQueue, VkCommandPool commandPool, VkCommandBuffer commandBuffer)
static bool findSuitableMemoryType(VkPhysicalDevice physicalDevice, VkMemoryRequirements2 requirements, VkMemoryPropertyFlags properties, uint32_t &out_typeIndex)
static void transitionImageLayout(VkCommandBuffer commandBuffer, VkDevice device, VkQueue graphicsQueue, VkCommandPool commandPool, VkImage image, VkFormat format, VkImageLayout oldLayout, VkImageLayout newLayout, uint32_t layerCount=1)
static void setObjectName(VkDevice device, VulkanObjectType objectHandle, const std::string &name)
static VkCommandBuffer beginSingleTimeCommands(VkDevice device, VkCommandPool commandPool)
Log category system implementation.
constexpr XrViewConfigurationType xrViewType
void cleanup(const ApplicationContext *context)
Definition Headset.cpp:567
Result from waitForXrFrame indicating what the main loop should do.
Definition Headset.h:58
A render target which contains all resources to access the rendered image.
Definition Headset.h:26
void cleanup(const ApplicationContext *context)
Definition Headset.cpp:964
VkFramebuffer framebuffer
Definition Headset.h:29
VkImageView imageView
Definition Headset.h:28