common.c (33786B)
1 /* See LICENSE for license details. */ 2 #include <stdarg.h> 3 #include <stdio.h> 4 5 #include <glslang/Include/glslang_c_interface.h> 6 #include <glslang/Public/resource_limits_c.h> 7 8 #include "GLFW/glfw3.h" 9 VkResult glfwCreateWindowSurface(VkInstance instance, void *window, 10 const VkAllocationCallbacks *allocator, VkSurfaceKHR *surface); 11 12 #define OUTPUT_FRAME_RATE 60.0f 13 #define OUTPUT_TIME_SECONDS 10.0f 14 #define CYCLE_T_UPDATE_SPEED 0.1f 15 #define BG_CLEAR_COLOUR (v4){{0.12, 0.1, 0.1, 1}} 16 17 global f32 dt_for_frame; 18 19 read_only global const char *required_vulkan_extensions[] = { 20 "VK_KHR_swapchain", 21 }; 22 23 #define vulkan_info(format, ...) info("[vulkan] "format, __VA_ARGS__) 24 #define glslang_info(format, ...) info("[glslang] "format, __VA_ARGS__) 25 function void 26 info(const char *format, ...) 27 { 28 va_list ap; 29 va_start(ap, format); 30 vfprintf(stdout, format, ap); 31 va_end(ap); 32 } 33 34 function f32 35 get_frame_time_step(ViewerContext *ctx) 36 { 37 f32 result = 0; 38 /* NOTE(rnp): if we are outputting frames do a constant time step */ 39 if (ctx->output_frames_count > 0) { 40 result = 1.0f / (OUTPUT_FRAME_RATE * OUTPUT_TIME_SECONDS * CYCLE_T_UPDATE_SPEED); 41 } else { 42 f64 now = glfwGetTime(); 43 result = (f32)(now - ctx->last_time); 44 ctx->last_time = (f32)now; 45 } 46 ctx->do_update |= result != 0; 47 return result; 48 } 49 50 function void 51 key_callback(GLFWwindow *window, s32 key, s32 scancode, s32 action, s32 modifiers) 52 { 53 ViewerContext *ctx = glfwGetWindowUserPointer(window); 54 if (key == GLFW_KEY_ESCAPE && action == GLFW_PRESS) 55 ctx->should_exit = 1; 56 } 57 58 function void 59 fb_callback(GLFWwindow *w, s32 width, s32 height) 60 { 61 ViewerContext *v = glfwGetWindowUserPointer(w); 62 v->vulkan.swap_chain.framebuffer_resize = 1; 63 v->window_size = (sv2){{width, height}}; 64 } 65 66 function u32 67 vulkan_shader_kind_to_glslang_shader_kind(u32 kind) 68 { 69 u32 result = ctz_u32(kind); 70 return result; 71 } 72 73 #define glslang_log(a, ...) glslang_log_(a, arg_list(str8, __VA_ARGS__)) 74 function void 75 glslang_log_(Arena arena, str8 *items, uz count) 76 { 77 Stream sb = arena_stream(arena); 78 stream_append_str8s_(&sb, items, count); 79 str8 log = str8_trim_trailing(stream_to_str8(&sb), '\n'); 80 glslang_info("%.*s\n", (s32)log.len, log.data); 81 } 82 83 function str8 84 glsl_to_spirv(OS *os, Arena *arena, u32 kind, str8 shader_text, str8 name) 85 { 86 /* NOTE(rnp): glslang's garbage c interface doesn't expose internal usage of strings with length */ 87 assert(shader_text.data[shader_text.len] == 0); 88 89 glslang_input_t input = { 90 .language = GLSLANG_SOURCE_GLSL, 91 .stage = kind, 92 .client = GLSLANG_CLIENT_VULKAN, 93 .client_version = GLSLANG_TARGET_VULKAN_1_4, 94 .target_language = GLSLANG_TARGET_SPV, 95 .target_language_version = GLSLANG_TARGET_SPV_1_6, 96 .code = (c8 *)shader_text.data, 97 .default_version = 100, 98 .default_profile = GLSLANG_NO_PROFILE, 99 .force_default_version_and_profile = 0, 100 .forward_compatible = 0, 101 .messages = GLSLANG_MSG_DEFAULT_BIT, 102 .resource = glslang_default_resource(), 103 }; 104 glslang_shader_t *shader = glslang_shader_create(&input); 105 106 str8 error = {0}; 107 if (glslang_shader_preprocess(shader, &input)) { 108 if (!glslang_shader_parse(shader, &input)) 109 error = str8("parsing failed"); 110 } else { 111 error = str8("preprocessing failed"); 112 } 113 114 if (error.len) { 115 glslang_log(*arena, name, str8(": "), error, str8("\n"), 116 c_str_to_str8((c8 *)glslang_shader_get_info_log(shader)), 117 c_str_to_str8((c8 *)glslang_shader_get_info_debug_log(shader))); 118 glslang_shader_delete(shader); 119 shader = 0; 120 } 121 122 str8 result = {0}; 123 if (shader) { 124 glslang_program_t *program = glslang_program_create(); 125 glslang_program_add_shader(program, shader); 126 s32 messages = GLSLANG_MSG_DEBUG_INFO_BIT|GLSLANG_MSG_SPV_RULES_BIT|GLSLANG_MSG_VULKAN_RULES_BIT; 127 if (glslang_program_link(program, messages)) { 128 glslang_spv_options_t options = { 129 .validate = 1, 130 .generate_debug_info = 1, 131 //.disable_optimizer = 1, 132 }; 133 134 glslang_program_add_source_text(program, kind, (c8 *)shader_text.data, shader_text.len); 135 glslang_program_SPIRV_generate_with_options(program, kind, &options); 136 137 u32 words = glslang_program_SPIRV_get_size(program); 138 result.data = (u8 *)push_array(arena, u32, words); 139 result.len = words * sizeof(u32); 140 glslang_program_SPIRV_get(program, (u32 *)result.data); 141 142 str8 spirv_msg = c_str_to_str8((c8 *)glslang_program_SPIRV_get_messages(program)); 143 if (spirv_msg.len) glslang_log(*arena, name, str8(": spirv info: "), spirv_msg); 144 } else { 145 glslang_log(*arena, name, str8(": shader linking failed\n"), 146 c_str_to_str8((c8 *)glslang_program_get_info_log(program)), 147 c_str_to_str8((c8 *)glslang_program_get_info_debug_log(program))); 148 } 149 glslang_shader_delete(shader); 150 glslang_program_delete(program); 151 } 152 153 return result; 154 } 155 156 function VkShaderModule 157 compile_vulkan_shader(VkDevice device, OS *os, Arena arena, u32 kind, str8 shader, str8 name) 158 { 159 VkShaderModule result = 0; 160 161 str8 spirv = glsl_to_spirv(os, &arena, vulkan_shader_kind_to_glslang_shader_kind(kind), shader, name); 162 163 VkShaderModuleCreateInfo create_info = { 164 .sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO, 165 .codeSize = (uz)spirv.len, 166 .pCode = (u32 *)spirv.data, 167 }; 168 if (spirv.len > 0) vkCreateShaderModule(device, &create_info, 0, &result); 169 170 return result; 171 } 172 173 function void 174 vulkan_pipeline_from_shader_text(VulkanPipeline *vp, VkDevice device, OS *os, Arena arena, 175 str8 *shader_texts, u32 *shader_kinds, s32 count, str8 name) 176 { 177 VkPipelineShaderStageCreateInfo *infos = push_array(&arena, typeof(*infos), count); 178 179 b32 valid = 1; 180 for (s32 i = 0; i < count; i++) { 181 infos[i].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO; 182 infos[i].stage = shader_kinds[i]; 183 infos[i].module = compile_vulkan_shader(device, os, arena, shader_kinds[i], shader_texts[i], name); 184 infos[i].pName = "main"; 185 valid &= infos[i].module != 0; 186 } 187 188 /* TODO(rnp): only useful for window drawing not for compute only pipeline */ 189 VkDynamicState dynamic_states[] = { 190 VK_DYNAMIC_STATE_VIEWPORT, 191 VK_DYNAMIC_STATE_SCISSOR, 192 }; 193 VkPipelineDynamicStateCreateInfo dynamic_state_info = { 194 .sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO, 195 .dynamicStateCount = countof(dynamic_states), 196 .pDynamicStates = dynamic_states, 197 }; 198 199 /* TODO(rnp): this doesn't apply to compute only pipelines */ 200 VkPipelineVertexInputStateCreateInfo vertex_input_info = { 201 .sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO, 202 }; 203 204 VkPipelineInputAssemblyStateCreateInfo input_assembly_info = { 205 .sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO, 206 .topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST, 207 }; 208 209 VkPipelineViewportStateCreateInfo viewport_info = { 210 .sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO, 211 .viewportCount = 1, 212 .scissorCount = 1, 213 }; 214 215 VkPipelineRasterizationStateCreateInfo rasterization_info = { 216 .sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO, 217 .polygonMode = VK_POLYGON_MODE_FILL, 218 .lineWidth = 1.0f, 219 .cullMode = VK_CULL_MODE_BACK_BIT, 220 .frontFace = VK_FRONT_FACE_CLOCKWISE, 221 }; 222 223 /* NOTE(rnp): probably won't use if we aren't doing any 3D rendering */ 224 VkPipelineMultisampleStateCreateInfo multisampling_info = { 225 .sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO, 226 .rasterizationSamples = VK_SAMPLE_COUNT_1_BIT, 227 }; 228 229 u32 colour_mask = VK_COLOR_COMPONENT_R_BIT|VK_COLOR_COMPONENT_G_BIT|VK_COLOR_COMPONENT_B_BIT|VK_COLOR_COMPONENT_A_BIT; 230 VkPipelineColorBlendAttachmentState colour_attachment = { 231 .colorWriteMask = colour_mask, 232 .blendEnable = 1, 233 .srcColorBlendFactor = VK_BLEND_FACTOR_SRC_ALPHA, 234 .dstColorBlendFactor = VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA, 235 .colorBlendOp = VK_BLEND_OP_ADD, 236 .srcAlphaBlendFactor = VK_BLEND_FACTOR_ONE, 237 .dstAlphaBlendFactor = VK_BLEND_FACTOR_ZERO, 238 .alphaBlendOp = VK_BLEND_OP_ADD, 239 }; 240 241 VkPipelineColorBlendStateCreateInfo colour_blend_info = { 242 .sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO, 243 .logicOp = VK_LOGIC_OP_COPY, 244 .attachmentCount = 1, 245 .pAttachments = &colour_attachment, 246 }; 247 248 VkPipelineLayoutCreateInfo pipeline_layout_info = { 249 .sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO, 250 }; 251 252 /* TODO(rnp): multiple pipelines */ 253 vkDestroyPipelineLayout(device, vp->pipeline_layout, 0); 254 vkCreatePipelineLayout(device, &pipeline_layout_info, 0, &vp->pipeline_layout); 255 256 VkGraphicsPipelineCreateInfo pipeline_info = { 257 .sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO, 258 .stageCount = count, 259 .pStages = infos, 260 .pVertexInputState = &vertex_input_info, 261 .pInputAssemblyState = &input_assembly_info, 262 .pViewportState = &viewport_info, 263 .pRasterizationState = &rasterization_info, 264 .pMultisampleState = &multisampling_info, 265 .pDepthStencilState = 0, 266 .pColorBlendState = &colour_blend_info, 267 .pDynamicState = &dynamic_state_info, 268 .layout = vp->pipeline_layout, 269 .renderPass = vp->render.render_pass, 270 .subpass = 0, 271 }; 272 273 if (valid) { 274 /* TODO(rnp): have a set of default shaders for when compilation fails. 275 * Vulkan has no concept of a 0 shader which does nothing. Visual indication 276 * of brokeness is desirable for debugging */ 277 278 /* NOTE(rnp): this could be fancier to avoid it but for now we are okay 279 * with a stall when shaders are reloading */ 280 switch (vp->kind) { 281 case VulkanPipelineKind_Render:{ 282 vkWaitForFences(device, countof(vp->render.command_buffer_fences), 283 vp->render.command_buffer_fences, 1, U64_MAX); 284 }break; 285 case VulkanPipelineKind_Compute:{ 286 vkWaitForFences(device, countof(vp->compute.command_buffer_fences), 287 vp->compute.command_buffer_fences, 1, U64_MAX); 288 }break; 289 InvalidDefaultCase; 290 } 291 292 vkDestroyPipeline(device, vp->pipeline, 0); 293 vkCreateGraphicsPipelines(device, 0, 1, &pipeline_info, 0, &vp->pipeline); 294 Stream sb = arena_stream(arena); 295 stream_append_str8s(&sb, str8("loaded: "), name, str8("\n")); 296 os_write_file(os->error_handle, stream_to_str8(&sb)); 297 } 298 299 for (s32 i = 0; i < count; i++) 300 vkDestroyShaderModule(device, infos[i].module, 0); 301 } 302 303 typedef struct ShaderReloadContext ShaderReloadContext; 304 struct ShaderReloadContext { 305 VulkanPipeline *pipeline; 306 VkDevice device; 307 VkShaderStageFlagBits vulkan_kind; 308 // TODO(rnp): probably shader kind 309 310 str8 path; 311 str8 name; 312 str8 header; 313 314 ShaderReloadContext *next; 315 }; 316 317 function str8 318 shader_text_with_header(ShaderReloadContext *ctx, OS *os, Arena *arena) 319 { 320 Stream sb = arena_stream(*arena); 321 stream_append_str8s(&sb, str8("#version 460 core\n\n"), ctx->header); 322 // TODO(rnp): custom behaviour per shader kind */ 323 324 //stream_append_str8(&sb, str8("\n#line 1\n")); 325 326 str8 result = arena_stream_commit(arena, &sb); 327 if (ctx->path.len) { 328 str8 file = os_read_whole_file(arena, (c8 *)ctx->path.data); 329 assert(file.data == result.data + result.len); 330 result.len += file.len; 331 } 332 333 /* NOTE(rnp): glslang requires 0 terminated shader string */ 334 arena_commit(arena, 1); 335 result.data[result.len] = 0; 336 337 return result; 338 } 339 340 function FILE_WATCH_CALLBACK_FN(reload_shader) 341 { 342 ShaderReloadContext *src = (typeof(src))user_data; 343 344 s32 shader_count = 1; 345 ShaderReloadContext *link = src->next; 346 while (link != src) { shader_count++; link = link->next; } 347 348 str8 *shader_texts = push_array(&arena, str8, shader_count); 349 u32 *shader_kinds = push_array(&arena, u32, shader_count); 350 351 s32 index = 0; 352 do { 353 shader_texts[index] = shader_text_with_header(link, os, &arena); 354 shader_kinds[index] = link->vulkan_kind; 355 index++; 356 link = link->next; 357 } while (link != src); 358 359 vulkan_pipeline_from_shader_text(src->pipeline, src->device, os, arena, shader_texts, 360 shader_kinds, shader_count, src->name); 361 362 return 1; 363 } 364 365 #if 0 366 function void 367 check_for_validation_layers(Arena arena) 368 { 369 u32 layer_count; 370 vkEnumerateInstanceLayerProperties(&layer_count, 0); 371 372 VkLayerProperties *layers = push_array(&arena, typeof(*layers), layer_count); 373 vkEnumerateInstanceLayerProperties(&layer_count, layers); 374 375 printf("---------------------------------\n"); 376 printf("supported validation layers: %u\n", layer_count); 377 for (u32 i = 0; i < layer_count; i++) { 378 printf("%s\n", layers[i].layerName); 379 printf(" %s\n", layers[i].description); 380 } 381 printf("---------------------------------\n"); 382 } 383 #endif 384 385 struct VulkanSurfaceInfo { 386 VkSurfaceCapabilitiesKHR capabilities; 387 VkSurfaceFormatKHR *formats; 388 VkPresentModeKHR *present_modes; 389 u32 formats_count; 390 u32 present_modes_count; 391 }; 392 393 function void 394 fill_vulkan_surface_info(VkPhysicalDevice device, VkSurfaceKHR surface, struct VulkanSurfaceInfo *si, Arena *arena) 395 { 396 vkGetPhysicalDeviceSurfaceCapabilitiesKHR(device, surface, &si->capabilities); 397 398 vkGetPhysicalDeviceSurfaceFormatsKHR(device, surface, &si->formats_count, 0); 399 if (si->formats_count) { 400 si->formats = push_array(arena, typeof(*si->formats), si->formats_count); 401 vkGetPhysicalDeviceSurfaceFormatsKHR(device, surface, &si->formats_count, si->formats); 402 } 403 404 vkGetPhysicalDeviceSurfacePresentModesKHR(device, surface, &si->present_modes_count, 0); 405 if (si->present_modes_count) { 406 si->present_modes = push_array(arena, typeof(*si->present_modes), si->present_modes_count); 407 vkGetPhysicalDeviceSurfacePresentModesKHR(device, surface, &si->present_modes_count, si->present_modes); 408 } 409 } 410 411 /* TODO(rnp): rewrite this to return a list of queues (da). need a presentation queue, 412 * transfer queue, compute queue, graphics queue. If only one queue exists we must handle 413 * that case */ 414 function b32 415 device_is_valid(VkPhysicalDevice device, VkSurfaceKHR surface, struct VulkanSurfaceInfo *si, Arena *arena) 416 { 417 b32 result = 0; 418 419 u32 queue_family_count; 420 vkGetPhysicalDeviceQueueFamilyProperties(device, &queue_family_count, 0); 421 422 VkQueueFamilyProperties *queues = push_array(arena, typeof(*queues), queue_family_count); 423 vkGetPhysicalDeviceQueueFamilyProperties(device, &queue_family_count, queues); 424 425 /* TODO(rnp): this potentially needs to be returned */ 426 s32 presentation_index = -1; 427 428 b32 has_graphics = 0, has_compute = 0, has_transfer = 0; 429 for (u32 i = 0; i < queue_family_count; i++) { 430 has_graphics |= queues[i].queueFlags & VK_QUEUE_GRAPHICS_BIT; 431 has_compute |= queues[i].queueFlags & VK_QUEUE_COMPUTE_BIT; 432 has_transfer |= queues[i].queueFlags & VK_QUEUE_TRANSFER_BIT; 433 434 u32 presentation_supported = 0; 435 vkGetPhysicalDeviceSurfaceSupportKHR(device, i, surface, &presentation_supported); 436 437 if (i == 0) assert(presentation_supported); // TODO(rnp): this is just a kludge until multi-queue 438 439 if (presentation_supported) 440 presentation_index = (s32)i; 441 } 442 result = has_graphics && has_compute && has_transfer && (presentation_index != -1); 443 444 //////////////////////////////////////////////// 445 // NOTE(rnp): required device extension support 446 if (result) { 447 u32 extension_count; 448 vkEnumerateDeviceExtensionProperties(device, 0, &extension_count, 0); 449 VkExtensionProperties *device_extensions = push_array(arena, VkExtensionProperties, extension_count); 450 vkEnumerateDeviceExtensionProperties(device, 0, &extension_count, device_extensions); 451 452 for (u32 i = 0; i < countof(required_vulkan_extensions); i++) { 453 b32 found = 0; 454 for (u32 extension = 0; extension < extension_count; extension++) { 455 if (c_str_equal(required_vulkan_extensions[i], device_extensions[extension].extensionName)) { 456 found = 1; 457 break; 458 } 459 } 460 if (!found) { result = 0; break; } 461 } 462 463 if (result) { 464 fill_vulkan_surface_info(device, surface, si, arena); 465 result = si->formats_count != 0 && si->present_modes_count != 0; 466 } 467 } 468 469 return result; 470 } 471 472 function s32 473 rate_graphics_device(VkPhysicalDevice device, VkSurfaceKHR surface, Arena arena) 474 { 475 s32 result = -1; 476 477 struct VulkanSurfaceInfo surface_info; 478 if (device_is_valid(device, surface, &surface_info, &arena)) { 479 VkPhysicalDeviceProperties dp; 480 vkGetPhysicalDeviceProperties(device, &dp); 481 482 result = 0; 483 if (dp.deviceType == VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU) 484 result += 1000; 485 } 486 487 return result; 488 } 489 490 function VkPhysicalDevice 491 find_best_graphics_device(VulkanContext *vctx, Arena arena) 492 { 493 u32 device_count; 494 vkEnumeratePhysicalDevices(vctx->handle, &device_count, 0); 495 496 VkPhysicalDevice *devices = push_array(&arena, typeof(*devices), device_count); 497 vkEnumeratePhysicalDevices(vctx->handle, &device_count, devices); 498 499 s32 best_score = -1; 500 s32 best_index = -1; 501 for (u32 i = 0; i < device_count; i++) { 502 s32 score = rate_graphics_device(devices[i], vctx->swap_chain.surface, arena); 503 if (score > best_score) { 504 best_score = score; 505 best_index = (s32)i; 506 } 507 } 508 509 VkPhysicalDevice result = best_index >= 0 ? devices[best_index] : 0; 510 if (result) { 511 VkPhysicalDeviceProperties dp; 512 vkGetPhysicalDeviceProperties(result, &dp); 513 514 vulkan_info("selecting device: %s\n", dp.deviceName); 515 } else { 516 os_fatal(str8("failed to find a suitable graphics device\n")); 517 } 518 519 return result; 520 } 521 522 function VkExtent2D 523 swap_chain_extent_for_window(GLFWwindow *w, VkSurfaceCapabilitiesKHR *si) 524 { 525 VkExtent2D result = si->currentExtent; 526 if (result.width == U32_MAX) 527 glfwGetFramebufferSize(w, (s32 *)&result.width, (s32 *)&result.height); 528 529 result.width = CLAMP(result.width, si->minImageExtent.width, si->maxImageExtent.width); 530 result.height = CLAMP(result.height, si->minImageExtent.height, si->maxImageExtent.height); 531 532 return result; 533 } 534 535 function void 536 swap_chain_from_existing(VulkanSwapChain *sc, VkDevice device, VkRenderPass render_pass, VkExtent2D extent, 537 VkSurfaceTransformFlagBitsKHR transform, u32 requested_images, Arena *arena) 538 { 539 VkSwapchainCreateInfoKHR create_info = { 540 .sType = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR, 541 .surface = sc->surface, 542 .minImageCount = requested_images, 543 .imageFormat = sc->surface_format.format, 544 .imageColorSpace = sc->surface_format.colorSpace, 545 .imageExtent = extent, 546 .imageArrayLayers = 1, 547 .imageUsage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, 548 .preTransform = transform, 549 .compositeAlpha = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR, 550 .clipped = 1, 551 .oldSwapchain = sc->swap_chain, 552 /* NOTE(rnp): guaranteed available and sufficient for this application (normal v-sync) */ 553 .presentMode = VK_PRESENT_MODE_FIFO_KHR, 554 }; 555 556 if (sc->exclusive_queue) { 557 create_info.imageSharingMode = VK_SHARING_MODE_EXCLUSIVE; 558 create_info.queueFamilyIndexCount = sc->queue_index; 559 create_info.pQueueFamilyIndices = 0; 560 } else { 561 InvalidCodePath; 562 } 563 564 sc->surface_extent = extent; 565 566 VkSwapchainKHR swap_chain; 567 /* TODO(rnp): custom allocator? */ 568 vkCreateSwapchainKHR(device, &create_info, 0, &swap_chain); 569 570 vkDeviceWaitIdle(device); 571 vkDestroySwapchainKHR(device, sc->swap_chain, 0); 572 sc->swap_chain = swap_chain; 573 574 u32 image_count; 575 vkGetSwapchainImagesKHR(device, sc->swap_chain, &image_count, 0); 576 577 /* NOTE(rnp): just assume this doesn't change at runtime when the swap chain is recreated */ 578 if (sc->image_count == 0) { 579 sc->image_count = image_count; 580 sc->images = push_array(arena, typeof(*sc->images), image_count); 581 sc->image_views = push_array(arena, typeof(*sc->image_views), image_count); 582 sc->framebuffers = push_array(arena, typeof(*sc->framebuffers), image_count); 583 sc->render_complete_semaphores = push_array(arena, VkSemaphore, image_count); 584 } else { 585 assert(sc->image_count == image_count); 586 for (u32 i = 0; i < sc->image_count; i++) { 587 vkDestroyFramebuffer(device, sc->framebuffers[i], 0); 588 vkDestroyImageView(device, sc->image_views[i], 0); 589 sc->image_views[i] = 0; 590 sc->framebuffers[i] = 0; 591 } 592 } 593 594 vkGetSwapchainImagesKHR(device, sc->swap_chain, &image_count, sc->images); 595 596 VkImageViewCreateInfo view_info = { 597 .sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO, 598 .viewType = VK_IMAGE_VIEW_TYPE_2D, 599 .format = sc->surface_format.format, 600 .components = { 601 .r = VK_COMPONENT_SWIZZLE_IDENTITY, 602 .g = VK_COMPONENT_SWIZZLE_IDENTITY, 603 .b = VK_COMPONENT_SWIZZLE_IDENTITY, 604 .a = VK_COMPONENT_SWIZZLE_IDENTITY, 605 }, 606 .subresourceRange = { 607 .aspectMask = VK_IMAGE_ASPECT_COLOR_BIT, 608 .baseMipLevel = 0, 609 .levelCount = 1, 610 .baseArrayLayer = 0, 611 .layerCount = 1, 612 }, 613 }; 614 615 VkFramebufferCreateInfo fb_info = { 616 .sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, 617 .renderPass = render_pass, 618 .attachmentCount = 1, 619 .width = sc->surface_extent.width, 620 .height = sc->surface_extent.height, 621 .layers = 1, 622 }; 623 624 for (u32 i = 0; i < sc->image_count; i++) { 625 view_info.image = sc->images[i]; 626 vkCreateImageView(device, &view_info, 0, sc->image_views + i); 627 628 fb_info.pAttachments = sc->image_views + i; 629 vkCreateFramebuffer(device, &fb_info, 0, sc->framebuffers + i); 630 } 631 } 632 633 function void 634 init_viewer(ViewerContext *ctx) 635 { 636 ctx->window_size = (sv2){.w = 640, .h = 640}; 637 638 if (!glfwInit()) os_fatal(str8("failed to start glfw\n")); 639 640 glfwWindowHint(GLFW_CLIENT_API, GLFW_NO_API); 641 642 ctx->window = glfwCreateWindow(ctx->window_size.w, ctx->window_size.h, "Camera CNN", 0, 0); 643 if (!ctx->window) os_fatal(str8("failed to open window\n")); 644 645 VulkanContext *v = &ctx->vulkan; 646 VulkanSwapChain *sc = &v->swap_chain; 647 648 VkApplicationInfo app_info = { 649 .sType = VK_STRUCTURE_TYPE_APPLICATION_INFO, 650 .pApplicationName = "Camera CNN", 651 .applicationVersion = VK_MAKE_API_VERSION(0, 1, 0, 0), 652 .pEngineName = "No Engine", 653 .engineVersion = VK_MAKE_API_VERSION(0, 4, 0, 0), 654 .apiVersion = VK_MAKE_API_VERSION(0, 4, 0, 0), 655 }; 656 657 u32 glfwExtensionCount = 0; 658 const char **glfwExtensions = glfwGetRequiredInstanceExtensions(&glfwExtensionCount); 659 660 /* TODO(rnp): debug only, and check for these before enabling */ 661 //check_for_validation_layers(ctx->arena); 662 const char *validation_layers[] = { 663 "VK_LAYER_KHRONOS_validation", 664 }; 665 VkInstanceCreateInfo instance_create_info = { 666 .sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO, 667 .pApplicationInfo = &app_info, 668 .enabledExtensionCount = glfwExtensionCount, 669 .ppEnabledExtensionNames = glfwExtensions, 670 .ppEnabledLayerNames = validation_layers, 671 DEBUG_DECL(.enabledLayerCount = countof(validation_layers)) 672 }; 673 vkCreateInstance(&instance_create_info, 0, &v->handle); 674 675 glfwCreateWindowSurface(v->handle, ctx->window, 0, &sc->surface); 676 677 v->physical_device = find_best_graphics_device(v, ctx->arena); 678 679 /* TODO(rnp): for now just use queue 0; later we will need multiple queues for 680 * uploading, compute, and display */ 681 u32 queue_family_index = 0; 682 VkDeviceQueueCreateInfo queue_create_info = { 683 .sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO, 684 .queueFamilyIndex = queue_family_index, 685 .queueCount = 1, 686 .pQueuePriorities = (f32 []){1.0f}, 687 }; 688 689 VkPhysicalDeviceFeatures device_features = {0}; 690 VkDeviceCreateInfo device_create_info = { 691 .sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO, 692 .pQueueCreateInfos = &queue_create_info, 693 .queueCreateInfoCount = 1, 694 .pEnabledFeatures = &device_features, 695 .ppEnabledExtensionNames = required_vulkan_extensions, 696 .enabledExtensionCount = countof(required_vulkan_extensions), 697 /* TODO(rnp): same as instance_create_info */ 698 .ppEnabledLayerNames = validation_layers, 699 .enabledLayerCount = countof(validation_layers), 700 }; 701 vkCreateDevice(v->physical_device, &device_create_info, 0, &v->device); 702 703 /* TODO(rnp): same as above, later we need to grab the correct queue */ 704 vkGetDeviceQueue(v->device, 0, 0, &v->queue); 705 706 VulkanPipeline *rp = &v->render_pipeline; 707 rp->kind = VulkanPipelineKind_Render; 708 709 { 710 Arena tmp = ctx->arena; 711 struct VulkanSurfaceInfo surface_info; 712 fill_vulkan_surface_info(v->physical_device, sc->surface, &surface_info, &tmp); 713 sc->surface_format = surface_info.formats[0]; 714 for (u32 i = 0; i < surface_info.formats_count; i++) { 715 VkSurfaceFormatKHR f = surface_info.formats[i]; 716 if (f.format == VK_FORMAT_B8G8R8A8_SRGB && f.colorSpace == VK_COLOR_SPACE_SRGB_NONLINEAR_KHR) { 717 sc->surface_format = f; 718 break; 719 } 720 } 721 722 VkAttachmentDescription colour_attachment = { 723 .format = sc->surface_format.format, 724 .samples = VK_SAMPLE_COUNT_1_BIT, 725 .loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR, 726 .storeOp = VK_ATTACHMENT_STORE_OP_STORE, 727 .stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE, 728 .stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE, 729 .initialLayout = VK_IMAGE_LAYOUT_UNDEFINED, 730 .finalLayout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR, 731 }; 732 733 VkAttachmentReference colour_attachment_reference = { 734 .attachment = 0, 735 .layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, 736 }; 737 738 VkSubpassDescription subpass = { 739 .pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS, 740 .colorAttachmentCount = 1, 741 .pColorAttachments = &colour_attachment_reference, 742 }; 743 744 VkSubpassDependency dependency = { 745 .srcSubpass = VK_SUBPASS_EXTERNAL, 746 .dstSubpass = 0, 747 .srcStageMask = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, 748 .srcAccessMask = 0, 749 .dstStageMask = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT, 750 .dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT, 751 }; 752 753 VkRenderPassCreateInfo render_pass_info = { 754 .sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO, 755 .attachmentCount = 1, 756 .pAttachments = &colour_attachment, 757 .subpassCount = 1, 758 .pSubpasses = &subpass, 759 .dependencyCount = 1, 760 .pDependencies = &dependency, 761 }; 762 763 vkCreateRenderPass(v->device, &render_pass_info, 0, &rp->render.render_pass); 764 765 VkExtent2D extent = swap_chain_extent_for_window(ctx->window, &surface_info.capabilities); 766 767 sc->queue_index = 0; 768 sc->exclusive_queue = 1; 769 770 /* TODO(rnp): this could probably be higher */ 771 u32 image_count = surface_info.capabilities.minImageCount + 1; 772 if (surface_info.capabilities.maxImageCount > 0 && 773 image_count > surface_info.capabilities.maxImageCount) 774 { 775 image_count = surface_info.capabilities.maxImageCount; 776 } 777 778 swap_chain_from_existing(&v->swap_chain, v->device, rp->render.render_pass, extent, 779 surface_info.capabilities.currentTransform, image_count, &ctx->arena); 780 } 781 782 VkCommandPoolCreateInfo command_pool_info = { 783 .sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO, 784 .flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT, 785 .queueFamilyIndex = queue_family_index, 786 }; 787 788 vkCreateCommandPool(v->device, &command_pool_info, 0, &v->command_pool); 789 790 VkCommandBufferAllocateInfo command_buffer_info = { 791 .sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO, 792 .commandPool = v->command_pool, 793 .level = VK_COMMAND_BUFFER_LEVEL_PRIMARY, 794 .commandBufferCount = countof(rp->render.command_buffers), 795 }; 796 vkAllocateCommandBuffers(v->device, &command_buffer_info, rp->render.command_buffers); 797 798 VkSemaphoreCreateInfo semaphore_info = {.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO}; 799 VkFenceCreateInfo fence_info = { 800 .sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO, 801 .flags = VK_FENCE_CREATE_SIGNALED_BIT, 802 }; 803 804 for (u32 i = 0; i < sc->image_count; i++) 805 vkCreateSemaphore(v->device, &semaphore_info, 0, sc->render_complete_semaphores + i); 806 807 for EachElement(rp->render.command_buffer_fences, it) { 808 vkCreateSemaphore(v->device, &semaphore_info, 0, sc->image_available_semaphores + it); 809 vkCreateFence(v->device, &fence_info, 0, rp->render.command_buffer_fences + it); 810 } 811 812 ShaderReloadContext *render = push_struct(&ctx->arena, typeof(*render)); 813 render->pipeline = rp; 814 render->device = v->device; 815 render->path = str8("render.frag.glsl"); 816 render->name = str8("render (frag)"); 817 render->vulkan_kind = VK_SHADER_STAGE_FRAGMENT_BIT; 818 render->header = str8("" 819 "layout(location = 0) in vec3 colour;\n" 820 "layout(location = 0) out vec4 out_colour;\n\n" 821 ""); 822 823 render->next = push_struct(&ctx->arena, typeof(*render)); 824 render->next->vulkan_kind = VK_SHADER_STAGE_VERTEX_BIT; 825 render->next->next = render; 826 render->next->name = str8("render (vert)"); 827 render->next->header = str8("" 828 "layout(location = 0) out vec3 f_colour;\n\n" 829 "vec2 positions[3] = {\n" 830 "\tvec2(0.0, -0.5),\n" 831 "\tvec2(0.5, 0.5),\n" 832 "\tvec2(-0.5, 0.5)\n" 833 "};\n" 834 "vec3 colours[3] = {\n" 835 "\tvec3(1.0, 0.0, 0.0),\n" 836 "\tvec3(0.0, 1.0, 0.0),\n" 837 "\tvec3(0.0, 0.0, 1.0)\n" 838 "};\n" 839 "\n" 840 "void main()\n" 841 "{\n" 842 "\tf_colour = colours[gl_VertexIndex];\n" 843 "\tgl_Position = vec4(positions[gl_VertexIndex], 0.0, 1.0);\n" 844 "}\n" 845 ""); 846 reload_shader(&ctx->os, render->path, (sptr)render, ctx->arena); 847 os_add_file_watch(&ctx->os, &ctx->arena, render->path, reload_shader, (sptr)render); 848 849 glfwSetWindowUserPointer(ctx->window, ctx); 850 glfwSetKeyCallback(ctx->window, key_callback); 851 glfwSetFramebufferSizeCallback(ctx->window, fb_callback); 852 853 #if 0 854 glfwSetScrollCallback(ctx->window, scroll_callback); 855 #endif 856 } 857 858 function void 859 begin_frame(VulkanContext *v) 860 { 861 VulkanSwapChain *sc = &v->swap_chain; 862 VulkanPipeline *rp = &v->render_pipeline; 863 864 u32 index = sc->current_frame_index; 865 vkWaitForFences(v->device, 1, rp->render.command_buffer_fences + index, 0, U64_MAX); 866 vkResetFences(v->device, 1, rp->render.command_buffer_fences + index); 867 vkAcquireNextImageKHR(v->device, sc->swap_chain, U64_MAX, 868 sc->image_available_semaphores[index], 0, 869 &sc->framebuffer_index); 870 871 /* TODO(rnp): random_uniform to scale speed */ 872 local_persist f32 h_cycle_t; 873 h_cycle_t += CYCLE_T_UPDATE_SPEED * dt_for_frame; 874 if (h_cycle_t > 1.0f) h_cycle_t -= 1.0f; 875 v4 clear_colour = hsv_to_rgb((v4){{h_cycle_t, 0.7f, 0.5f, 1.0f}}); 876 877 VkCommandBufferBeginInfo begin_info = {.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO}; 878 879 VkRenderPassBeginInfo render_pass_info = { 880 .sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO, 881 .renderPass = rp->render.render_pass, 882 .framebuffer = sc->framebuffers[sc->framebuffer_index], 883 .renderArea = { 884 .extent = sc->surface_extent, 885 }, 886 .clearValueCount = 1, 887 .pClearValues = (VkClearValue *)clear_colour.E, 888 }; 889 890 VkRect2D scissor = {.extent = sc->surface_extent}; 891 VkViewport viewport = { 892 .x = 0.0f, 893 .y = 0.0f, 894 .width = (f32)sc->surface_extent.width, 895 .height = (f32)sc->surface_extent.height, 896 .minDepth = 0.0f, 897 .maxDepth = 1.0f, 898 }; 899 900 vkResetCommandBuffer(rp->render.command_buffers[index], 0); 901 vkBeginCommandBuffer(rp->render.command_buffers[index], &begin_info); 902 vkCmdBeginRenderPass(rp->render.command_buffers[index], &render_pass_info, VK_SUBPASS_CONTENTS_INLINE); 903 vkCmdBindPipeline(rp->render.command_buffers[index], VK_PIPELINE_BIND_POINT_GRAPHICS, rp->pipeline); 904 vkCmdSetViewport(rp->render.command_buffers[index], 0, 1, &viewport); 905 vkCmdSetScissor(rp->render.command_buffers[index], 0, 1, &scissor); 906 } 907 908 function void 909 end_frame(VulkanContext *v, GLFWwindow *window, Arena arena) 910 { 911 VulkanSwapChain *sc = &v->swap_chain; 912 VulkanPipeline *rp = &v->render_pipeline; 913 914 u32 index = sc->current_frame_index; 915 VkPipelineStageFlags wait_stages[] = {VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT}; 916 VkSubmitInfo submit_info = { 917 .sType = VK_STRUCTURE_TYPE_SUBMIT_INFO, 918 .signalSemaphoreCount = 1, 919 .pSignalSemaphores = sc->render_complete_semaphores + sc->framebuffer_index, 920 .waitSemaphoreCount = 1, 921 .pWaitSemaphores = sc->image_available_semaphores + index, 922 .pWaitDstStageMask = wait_stages, 923 .commandBufferCount = 1, 924 .pCommandBuffers = rp->render.command_buffers + index, 925 }; 926 927 VkPresentInfoKHR present_info = { 928 .sType = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR, 929 .waitSemaphoreCount = 1, 930 .pWaitSemaphores = sc->render_complete_semaphores + sc->framebuffer_index, 931 .swapchainCount = 1, 932 .pSwapchains = &sc->swap_chain, 933 .pImageIndices = &sc->framebuffer_index, 934 }; 935 936 vkCmdEndRenderPass(rp->render.command_buffers[index]); 937 vkEndCommandBuffer(rp->render.command_buffers[index]); 938 vkQueueSubmit(v->queue, 1, &submit_info, rp->render.command_buffer_fences[index]); 939 VkResult pres = vkQueuePresentKHR(v->queue, &present_info); 940 if (pres == VK_ERROR_OUT_OF_DATE_KHR || pres == VK_SUBOPTIMAL_KHR || sc->framebuffer_resize) { 941 sc->framebuffer_resize = 0; 942 struct VulkanSurfaceInfo surface_info; 943 fill_vulkan_surface_info(v->physical_device, sc->surface, &surface_info, &arena); 944 /* TODO(rnp): handle surface format change */ 945 946 VkExtent2D extent = swap_chain_extent_for_window(window, &surface_info.capabilities); 947 swap_chain_from_existing(&v->swap_chain, v->device, rp->render.render_pass, extent, 948 surface_info.capabilities.currentTransform, sc->image_count, &arena); 949 } 950 951 sc->current_frame_index = (sc->current_frame_index + 1) % MAX_RENDER_FRAMES_IN_FLIGHT; 952 } 953 954 function void 955 viewer_frame_step(ViewerContext *ctx, f32 dt) 956 { 957 ctx->should_exit |= (b32)glfwWindowShouldClose(ctx->window); 958 dt_for_frame = dt; 959 960 VulkanPipeline *rp = &ctx->vulkan.render_pipeline; 961 begin_frame(&ctx->vulkan); 962 vkCmdDraw(rp->render.command_buffers[ctx->vulkan.swap_chain.current_frame_index], 3, 1, 0, 0); 963 end_frame(&ctx->vulkan, ctx->window, ctx->arena); 964 }