From c74daec6bd12628bab560062c58cb6bd677d2829 Mon Sep 17 00:00:00 2001 From: Noah Metz Date: Mon, 8 Jan 2024 12:42:59 -0700 Subject: [PATCH] Added UBO for shader matrices --- shader_src/basic.vert | 8 +- src/main.c | 199 ++++++++++++++++++++++++++++++++++++++++-- 2 files changed, 199 insertions(+), 8 deletions(-) diff --git a/shader_src/basic.vert b/shader_src/basic.vert index 46d581b..7ce15ef 100644 --- a/shader_src/basic.vert +++ b/shader_src/basic.vert @@ -1,11 +1,17 @@ #version 450 +layout(binding = 0) uniform UniformBufferObject { + mat4 model; + mat4 view; + mat4 proj; +} ubo; + layout(location = 0) in vec2 inPosition; layout(location = 1) in vec3 inColor; layout(location = 0) out vec3 fragColor; void main() { - gl_Position = vec4(inPosition, 0.0, 1.0); + gl_Position = ubo.proj * ubo.view * ubo.model * vec4(inPosition, 0.0, 1.0); fragColor = inColor; } diff --git a/src/main.c b/src/main.c index 502ce16..a333657 100644 --- a/src/main.c +++ b/src/main.c @@ -7,8 +7,8 @@ #define GLM_FORCE_RADIANS #define GLM_FORCE_DEPTH_ZERO_TO_ONE -#include -#include +#include +#include #include #include @@ -80,11 +80,17 @@ typedef struct VulkanContextStruct { VkSemaphore* render_finished_semaphores; VkFence* in_flight_fences; + AllocatedBuffer* uniform_buffers; + void** uniform_buffer_ptrs; + VkDescriptorPool descriptor_pool; + VkDescriptorSet* descriptor_sets; + VkRenderPass render_pass; VkCommandPool graphics_command_pool; VkCommandPool transfer_command_pool; + VkDescriptorSetLayout triangle_descriptor_set; VkPipelineLayout triangle_pipeline_layout; VkPipeline triangle_pipeline; AllocatedBuffer triangle_vertex_buffer; @@ -98,6 +104,12 @@ struct Vertex{ vec3 color; }; +struct ShaderUBO { + mat4 model; + mat4 view; + mat4 proj; +}; + const struct Vertex vertices[] = { {.pos = {-0.5f, -0.5f}, .color = {1.0f, 0.0f, 0.0f}}, {.pos = { 0.5f, -0.5f}, .color = {0.0f, 1.0f, 0.0f}}, @@ -199,7 +211,7 @@ bool check_validation_layers(const char ** layers, uint32_t num_layers) { free(available_layers); return true; -}; +} static VKAPI_ATTR VkBool32 VKAPI_CALL debug_callback( VkDebugUtilsMessageSeverityFlagBitsEXT severity, @@ -214,7 +226,81 @@ static VKAPI_ATTR VkBool32 VKAPI_CALL debug_callback( fprintf(stderr, "Validation layer: %s\n", callback_data->pMessage); return VK_FALSE; -}; +} + +VkDescriptorSet* create_descriptor_sets(VkDevice device, VkDescriptorSetLayout layout, VkDescriptorPool pool, AllocatedBuffer* uniform_buffers, uint32_t count) { + VkDescriptorSetLayout* layouts = malloc(sizeof(VkDescriptorSetLayout)*count); + if(layouts == 0) { + return 0; + } + + VkDescriptorSet* sets = malloc(sizeof(VkDescriptorSet)*count); + if(sets == 0) { + free(layouts); + return 0; + } + + for(uint32_t i = 0; i < count; i++) { + layouts[i] = layout; + } + + VkDescriptorSetAllocateInfo alloc_info = {}; + alloc_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO; + alloc_info.descriptorPool = pool; + alloc_info.descriptorSetCount = count; + alloc_info.pSetLayouts = layouts; + + VkResult result = vkAllocateDescriptorSets(device, &alloc_info, sets); + free(layouts); + if(result != VK_SUCCESS) { + free(sets); + return 0; + } + + for(uint32_t i = 0; i < count; i++) { + VkDescriptorBufferInfo buffer_info = {}; + buffer_info.buffer = uniform_buffers[i].buffer; + buffer_info.offset = 0; + buffer_info.range = sizeof(struct ShaderUBO); + + VkWriteDescriptorSet descriptor_write = {}; + descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET; + descriptor_write.dstSet = sets[i]; + descriptor_write.dstBinding = 0; + descriptor_write.dstArrayElement = 0; + descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER; + descriptor_write.descriptorCount = 1; + descriptor_write.pBufferInfo = &buffer_info; + descriptor_write.pImageInfo = 0; + descriptor_write.pTexelBufferView = 0; + + vkUpdateDescriptorSets(device, 1, &descriptor_write, 0, 0); + } + + return sets; +} + +VkDescriptorSetLayout create_descriptor_set_layout(VkDevice device) { + VkDescriptorSetLayoutBinding layout_binding = {}; + layout_binding.binding = 0; + layout_binding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER; + layout_binding.descriptorCount = 1; + layout_binding.stageFlags = VK_SHADER_STAGE_VERTEX_BIT; + layout_binding.pImmutableSamplers = 0; + + VkDescriptorSetLayoutCreateInfo layout_info = {}; + layout_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO; + layout_info.bindingCount = 1; + layout_info.pBindings = &layout_binding; + + VkDescriptorSetLayout layout; + VkResult result = vkCreateDescriptorSetLayout(device, &layout_info, 0, &layout); + if(result != VK_SUCCESS) { + return VK_NULL_HANDLE; + } + + return layout; +} VkSurfaceKHR create_surface_khr(VkInstance instance, GLFWwindow* window) { VkSurfaceKHR surface; @@ -364,6 +450,26 @@ VkDebugUtilsMessengerEXT create_debug_messenger(VkInstance instance) { return debug_messenger; } +VkDescriptorPool create_descriptor_pool(VkDevice device, uint32_t size) { + VkDescriptorPoolSize pool_size = {}; + pool_size.type = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER; + pool_size.descriptorCount = size; + + VkDescriptorPoolCreateInfo pool_info = {}; + pool_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO; + pool_info.poolSizeCount = 1; + pool_info.pPoolSizes = &pool_size; + pool_info.maxSets = size; + + VkDescriptorPool pool; + VkResult result = vkCreateDescriptorPool(device, &pool_info, 0, &pool); + if(result != VK_SUCCESS) { + return VK_NULL_HANDLE; + } + + return pool; +} + VkInstance create_instance() { VkInstance instance; @@ -849,6 +955,27 @@ void deallocate_buffer(VkDevice device, AllocatedBuffer buffer) { vkFreeMemory(device, buffer.memory, 0); }; +AllocatedBuffer* allocate_buffers(VkPhysicalDevice physical_device, VkDevice device, VkDeviceSize size, VkBufferUsageFlags usage, VkMemoryPropertyFlags properties, uint32_t count) { + AllocatedBuffer* buffers = malloc(sizeof(AllocatedBuffer)*count); + if(buffers == 0) { + return 0; + } + + for(uint32_t i = 0; i < count; i++) { + buffers[i] = allocate_buffer(physical_device, device, size, usage, properties); + if(buffers[i].memory == VK_NULL_HANDLE) { + for(uint32_t j = 0; j < i; j++) { + deallocate_buffer(device, buffers[i]); + } + + free(buffers); + return 0; + } + } + + return buffers; +} + VkResult command_copy_buffers(VkDevice device, VkCommandPool transfer_pool, VkQueue transfer_queue, VkBuffer source, VkBuffer dest, VkDeviceSize size) { VkCommandBufferAllocateInfo command_info = {}; command_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO; @@ -1087,7 +1214,7 @@ VkCommandPool create_command_pool(VkDevice device, uint32_t queue_family) { return command_pool; } -VkResult record_command_buffer_triangle(VkCommandBuffer command_buffer, uint32_t image_index, VkRenderPass render_pass, VkFramebuffer* framebuffers, VkExtent2D extent, VkPipeline graphics_pipeline, VkBuffer vertex_buffer, VkBuffer index_buffer, uint32_t num_vertices) { +VkResult record_command_buffer_triangle(VkCommandBuffer command_buffer, uint32_t image_index, VkRenderPass render_pass, VkFramebuffer* framebuffers, VkExtent2D extent, VkPipeline graphics_pipeline, VkPipelineLayout pipeline_layout, VkDescriptorSet descriptor_set, VkBuffer vertex_buffer, VkBuffer index_buffer, uint32_t num_vertices) { VkCommandBufferBeginInfo begin_info = {}; begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO; begin_info.flags = 0; @@ -1132,6 +1259,7 @@ VkResult record_command_buffer_triangle(VkCommandBuffer command_buffer, uint32_t scissor.extent = extent; vkCmdSetScissor(command_buffer, 0, 1, &scissor); + vkCmdBindDescriptorSets(command_buffer, VK_PIPELINE_BIND_POINT_GRAPHICS, pipeline_layout, 0, 1, &descriptor_set, 0, 0); vkCmdDrawIndexed(command_buffer, num_vertices, 1, 0, 0, 0); vkCmdEndRenderPass(command_buffer); @@ -1355,7 +1483,51 @@ VulkanContext* init_vulkan(GLFWwindow* window, uint32_t max_frames_in_flight) { context->in_flight_fences = if_fences; } - VkPipelineLayout triangle_pipeline_layout = create_pipeline_layout(device, 0, 0, 0, 0); + AllocatedBuffer* uniform_buffers = allocate_buffers(context->physical_device, context->device, sizeof(struct ShaderUBO), VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT, max_frames_in_flight); + if(uniform_buffers == 0) { + fprintf(stderr, "failed to create vulkan uniform buffers\n"); + return 0; + } else { + context->uniform_buffers = uniform_buffers; + context->uniform_buffer_ptrs = malloc(sizeof(void*)*max_frames_in_flight); + if(context->uniform_buffer_ptrs == 0) { + fprintf(stderr, "failed to allocate cpu pointers for uniform buffers\n"); + return 0; + } + for(uint32_t i = 0; i < max_frames_in_flight; i++) { + VkResult result = vkMapMemory(context->device, context->uniform_buffers[i].memory, 0, sizeof(struct ShaderUBO), 0, &context->uniform_buffer_ptrs[i]); + if(result != VK_SUCCESS) { + fprintf(stderr, "failed to map cpu pointer for uniform buffer\n"); + return 0; + } + } + } + + VkDescriptorPool descriptor_pool = create_descriptor_pool(context->device, max_frames_in_flight); + if(descriptor_pool == VK_NULL_HANDLE) { + fprintf(stderr, "failed to create vulkan descriptor pool\n"); + return 0; + } else { + context->descriptor_pool = descriptor_pool; + } + + VkDescriptorSetLayout triangle_descriptor_set = create_descriptor_set_layout(device); + if(triangle_descriptor_set == VK_NULL_HANDLE) { + fprintf(stderr, "failed to create vulkan descriptor set layout\n"); + return 0; + } else { + context->triangle_descriptor_set = triangle_descriptor_set; + } + + VkDescriptorSet* descriptor_sets = create_descriptor_sets(context->device, context->triangle_descriptor_set, context->descriptor_pool, context->uniform_buffers, max_frames_in_flight); + if(descriptor_sets == 0) { + fprintf(stderr, "failed to create vulkan descriptor sets\n"); + return 0; + } else { + context->descriptor_sets = descriptor_sets; + } + + VkPipelineLayout triangle_pipeline_layout = create_pipeline_layout(device, 1, &context->triangle_descriptor_set, 0, 0); if(triangle_pipeline_layout == VK_NULL_HANDLE) { fprintf(stderr, "failed to create vulkan pipeline layout\n"); return 0; @@ -1388,7 +1560,20 @@ VulkanContext* init_vulkan(GLFWwindow* window, uint32_t max_frames_in_flight) { return context; } +VkResult update_ubo(void** buffers, uint32_t frame_index) { + struct ShaderUBO ubo = {}; + glm_mat4_identity(ubo.proj); + glm_mat4_identity(ubo.view); + glm_mat4_identity(ubo.model); + + memcpy(buffers[frame_index], (void*)&ubo, sizeof(ubo)); + + return VK_SUCCESS; +} + VkResult draw_frame(VulkanContext* context) { + update_ubo(context->uniform_buffer_ptrs, context->current_frame); + VkResult result; result = vkWaitForFences(context->device, 1, &context->in_flight_fences[context->current_frame], VK_TRUE, UINT64_MAX); if(result != VK_SUCCESS) { @@ -1411,7 +1596,7 @@ VkResult draw_frame(VulkanContext* context) { return result; } - result = record_command_buffer_triangle(context->swapchain_command_buffers[context->current_frame], image_index, context->render_pass, context->swapchain_framebuffers, context->swapchain_extent, context->triangle_pipeline, context->triangle_vertex_buffer.buffer, context->triangle_index_buffer.buffer, 6); + result = record_command_buffer_triangle(context->swapchain_command_buffers[context->current_frame], image_index, context->render_pass, context->swapchain_framebuffers, context->swapchain_extent, context->triangle_pipeline, context->triangle_pipeline_layout, context->descriptor_sets[context->current_frame], context->triangle_vertex_buffer.buffer, context->triangle_index_buffer.buffer, 6); if(result != VK_SUCCESS) { return result; }