kiba-engine
image.c
1 #include <kiba/renderer/vulkan/image.h>
2 
3 #include <kiba/renderer/vulkan/buffer.h>
4 #include <kiba/renderer/vulkan/command_buffer.h>
5 
6 b8 vulkan_image_create(vulkan_context *context,
7  u32 width,
8  u32 height,
9  VkFormat format,
10  VkImageTiling tiling,
11  VkImageUsageFlags usage,
12  VkMemoryPropertyFlags properties,
13  vulkan_image *image) {
14  VkExtent3D image_extent = {
15  .width = width,
16  .height = height,
17  .depth = 1,
18  };
19  VkImageCreateInfo image_info = {
20  .sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
21  .imageType = VK_IMAGE_TYPE_2D,
22  .extent = image_extent,
23  .mipLevels = 1,
24  .arrayLayers = 1,
25  .format = format,
26  .tiling = tiling,
27  .initialLayout = VK_IMAGE_LAYOUT_UNDEFINED,
28  .usage = usage,
29  .sharingMode = VK_SHARING_MODE_EXCLUSIVE,
30  .samples = VK_SAMPLE_COUNT_1_BIT,
31  .flags = 0,
32  };
33  VK_CALL_B8(vkCreateImage(context->device.logical, &image_info, &context->alloc.vulkan_callbacks, &image->image));
34 
35  VkMemoryRequirements memory_requirements;
36  vkGetImageMemoryRequirements(context->device.logical, image->image, &memory_requirements);
37 
38  u32 memory_type = vulkan_get_memory_type(context, memory_requirements.memoryTypeBits, properties);
39  KB_ASSERT(memory_type != U32_MAX,
40  "memory type must be available for the vulkan_image memory"); // TODO should have fallback handling
41  VkMemoryAllocateInfo alloc_info = {
42  .sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
43  .allocationSize = memory_requirements.size,
44  .memoryTypeIndex = memory_type,
45  };
46  VK_CALL_B8(
47  vkAllocateMemory(context->device.logical, &alloc_info, &context->alloc.vulkan_callbacks, &image->memory));
48  VK_CALL_B8(vkBindImageMemory(context->device.logical, image->image, image->memory, 0));
49  return true;
50 }
51 
52 VkFormat vulkan_image_fitting_format(vulkan_context *context,
53  VkFormat *candidates,
54  usize n,
55  VkImageTiling tiling,
56  VkFormatFeatureFlags features) {
57  KB_ASSERT(n > 0, "must provide at least one candidate");
58  for (usize i = 0; i < n; ++i) {
59  VkFormat format = candidates[i];
60  VkFormatProperties properties;
61  vkGetPhysicalDeviceFormatProperties(context->device.physical, format, &properties);
62  if (tiling == VK_IMAGE_TILING_OPTIMAL && KB_FLAGS_ALL_SET(properties.optimalTilingFeatures, features)) {
63  return format;
64  }
65  if (tiling == VK_IMAGE_TILING_LINEAR && KB_FLAGS_ALL_SET(properties.linearTilingFeatures, features)) {
66  return format;
67  }
68  return format;
69  }
70  return VK_FORMAT_MAX_ENUM;
71 }
72 
73 b8 vulkan_image_transition_layout(vulkan_context *context,
74  vulkan_image *image,
75  VkFormat format,
76  VkImageLayout src,
77  VkImageLayout dst) {
78  VkCommandBuffer command_buffer;
79  if (!vulkan_command_buffer_start_single_time_command(context, &command_buffer)) {
80  return false;
81  }
82 
83  VkImageSubresourceRange subresource_range = {
84  .aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
85  .baseMipLevel = 0,
86  .levelCount = 1,
87  .baseArrayLayer = 0,
88  .layerCount = 1,
89  };
90  VkImageMemoryBarrier barrier = {
91  .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
92  .oldLayout = src,
93  .newLayout = dst,
94  .srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
95  .dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED, // these two could be set to transfer queue ownership
96  .image = image->image,
97  .subresourceRange = subresource_range,
98  };
99 
100  VkPipelineStageFlags src_stage;
101  VkPipelineStageFlags dst_stage;
102 
103  if (src == VK_IMAGE_LAYOUT_UNDEFINED && dst == VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL) {
104  barrier.srcAccessMask = 0;
105  barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
106  src_stage = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
107  dst_stage = VK_PIPELINE_STAGE_TRANSFER_BIT;
108  } else if (src == VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL && dst == VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL) {
109  barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
110  barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
111  src_stage = VK_PIPELINE_STAGE_TRANSFER_BIT;
112  dst_stage = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
113  } else if (src == VK_IMAGE_LAYOUT_UNDEFINED && dst == VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL) {
114  barrier.srcAccessMask = 0;
115  barrier.dstAccessMask = VK_ACCESS_SHADER_WRITE_BIT;
116  src_stage = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
117  dst_stage = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
118  } else if (src == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR && dst == VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL) {
119  barrier.srcAccessMask = 0;
120  barrier.dstAccessMask = VK_ACCESS_SHADER_WRITE_BIT;
121  src_stage = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
122  dst_stage = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
123  } else if (src == VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL && dst == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR) {
124  barrier.srcAccessMask = VK_ACCESS_SHADER_WRITE_BIT;
125  barrier.dstAccessMask = 0;
126  src_stage = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
127  dst_stage = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT;
128  } else {
129  KB_ASSERT(false, "unsupported combination of src({u32}) and dst({u32}) layout", src, dst);
130  }
131 
132  vkCmdPipelineBarrier(command_buffer, src_stage, dst_stage, 0, 0, KB_NULL, 0, KB_NULL, 1, &barrier);
133  UNUSED(format); // TODO for now, revisit in depth buffer section
134 
135  return vulkan_command_buffer_end_single_time_command(context, &command_buffer);
136 }
137 
138 b8 vulkan_image_copy_from_buffer(vulkan_context *context,
139  vulkan_buffer *buffer,
140  vulkan_image *image,
141  u32 width,
142  u32 height) {
143  VkCommandBuffer command_buffer;
144  if (!vulkan_command_buffer_start_single_time_command(context, &command_buffer)) {
145  return false;
146  }
147 
148  VkImageSubresourceLayers subresource = {
149  .aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
150  .mipLevel = 0,
151  .baseArrayLayer = 0,
152  .layerCount = 1,
153  };
154  VkOffset3D image_offset = {0, 0, 0};
155  VkExtent3D image_extent = {
156  .width = width,
157  .height = height,
158  .depth = 1,
159  };
160  VkBufferImageCopy region = {
161  .bufferOffset = 0,
162  .bufferRowLength = 0,
163  .bufferImageHeight = 0,
164  .imageSubresource = subresource,
165  .imageOffset = image_offset,
166  .imageExtent = image_extent,
167  };
168  vkCmdCopyBufferToImage(command_buffer,
169  buffer->buffer,
170  image->image,
171  VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
172  1,
173  &region);
174 
175  return vulkan_command_buffer_end_single_time_command(context, &command_buffer);
176 }
177 
178 void vulkan_image_destroy(vulkan_context *context, vulkan_image *image) {
179  vkDestroyImage(context->device.logical, image->image, &context->alloc.vulkan_callbacks);
180  vkFreeMemory(context->device.logical, image->memory, &context->alloc.vulkan_callbacks);
181 }
182 
183 b8 vulkan_image_view_create(vulkan_context *context,
184  VkImage image,
185  VkFormat format,
186  VkImageAspectFlags aspect_flags,
187  VkImageView *view) {
188  VkImageSubresourceRange subresource_range = {
189  .aspectMask = aspect_flags,
190  .baseMipLevel = 0,
191  .levelCount = 1,
192  .baseArrayLayer = 0,
193  .layerCount = 1,
194  };
195  VkImageViewCreateInfo view_info = {
196  .sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
197  .image = image,
198  .viewType = VK_IMAGE_VIEW_TYPE_2D,
199  .format = format,
200  .subresourceRange = subresource_range,
201  };
202  VK_CALL_B8(vkCreateImageView(context->device.logical, &view_info, &context->alloc.vulkan_callbacks, view));
203  return true;
204 }
205 
206 void vulkan_image_view_destroy(vulkan_context *context, VkImageView *view) {
207  vkDestroyImageView(context->device.logical, *view, &context->alloc.vulkan_callbacks);
208 }
#define UNUSED(x)
Mark parameter as unused.
Definition: defines.h:21
#define KB_FLAGS_ALL_SET(value, flags)
Check if all flags are set inside value.
Definition: defines.h:62
#define KB_NULL
Value of an invalid ptr (nullptr).
Definition: defines.h:18
#define KB_ASSERT(expr,...)
Perform runtime assertion and log failures.
Definition: log.h:133