kiba-engine
conv.h
1 #pragma once
2 
3 #include <kiba/defines.h>
4 #include <kiba/gpu/enums.h>
5 #include <vulkan/vulkan.h>
6 
7 static inline VkAttachmentLoadOp vk_convert_attachment_load_op(enum gpu_attachment_ops op) {
8  return KB_FLAGS_ALL_SET(op, GPU_ATTACHMENT_OP_LOAD) ? VK_ATTACHMENT_LOAD_OP_LOAD : VK_ATTACHMENT_LOAD_OP_CLEAR;
9 }
10 
11 static inline VkAttachmentStoreOp vk_convert_attachment_store_op(enum gpu_attachment_ops op) {
12  return KB_FLAGS_ALL_SET(op, GPU_ATTACHMENT_OP_STORE) ? VK_ATTACHMENT_STORE_OP_STORE
13  : VK_ATTACHMENT_STORE_OP_DONT_CARE;
14 }
15 
16 static inline VkCompareOp vk_convert_compare_function(enum gpu_compare_func func) {
17  switch (func) {
18  case GPU_COMPARE_FUNC_NEVER:
19  return VK_COMPARE_OP_NEVER;
20  case GPU_COMPARE_FUNC_LESS:
21  return VK_COMPARE_OP_LESS;
22  case GPU_COMPARE_FUNC_EQUAL:
23  return VK_COMPARE_OP_EQUAL;
24  case GPU_COMPARE_FUNC_LESS_EQUAL:
25  return VK_COMPARE_OP_LESS_OR_EQUAL;
26  case GPU_COMPARE_FUNC_GREATER:
27  return VK_COMPARE_OP_GREATER;
28  case GPU_COMPARE_FUNC_NOT_EQUAL:
29  return VK_COMPARE_OP_NOT_EQUAL;
30  case GPU_COMPARE_FUNC_GREATER_EQUAL:
31  return VK_COMPARE_OP_GREATER_OR_EQUAL;
32  case GPU_COMPARE_FUNC_ALWAYS:
33  return VK_COMPARE_OP_ALWAYS;
34  }
35  return VK_COMPARE_OP_ALWAYS;
36 }
37 
38 static inline VkPrimitiveTopology vk_convert_primitive_topology(enum gpu_primitive_topology topology) {
39  switch (topology) {
40  case GPU_PIMITIVE_TOPOLOGY_POINT_LIST:
41  return VK_PRIMITIVE_TOPOLOGY_POINT_LIST;
42  case GPU_PIMITIVE_TOPOLOGY_LINE_LIST:
43  return VK_PRIMITIVE_TOPOLOGY_LINE_LIST;
44  case GPU_PIMITIVE_TOPOLOGY_LINE_STRIP:
45  return VK_PRIMITIVE_TOPOLOGY_LINE_STRIP;
46  case GPU_PIMITIVE_TOPOLOGY_TRIANGLE_LIST:
47  return VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST;
48  case GPU_PIMITIVE_TOPOLOGY_TRIANGLE_STRIP:
49  return VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP;
50  }
51  return VK_PRIMITIVE_TOPOLOGY_POINT_LIST;
52 }
53 
54 static inline VkFrontFace vk_convert_front_face(enum gpu_front_face front_face) {
55  switch (front_face) {
56  case GPU_FRONT_FACE_CCW:
57  return VK_FRONT_FACE_COUNTER_CLOCKWISE;
58  case GPU_FRONT_FACE_CW:
59  return VK_FRONT_FACE_CLOCKWISE;
60  }
61  return VK_FRONT_FACE_COUNTER_CLOCKWISE;
62 }
63 
64 static inline VkCullModeFlags vk_convert_cull_mode(enum gpu_cull_mode cull_mode) {
65  switch (cull_mode) {
66  case GPU_CULL_MODE_NONE:
67  return VK_CULL_MODE_NONE;
68  case GPU_CULL_MODE_FRONT:
69  return VK_CULL_MODE_FRONT_BIT;
70  case GPU_CULL_MODE_BACK:
71  return VK_CULL_MODE_BACK_BIT;
72  }
73  return VK_CULL_MODE_NONE;
74 }
75 
76 static inline VkPolygonMode vk_convert_polygon_mode(enum gpu_polygon_mode polygon_mode) {
77  switch (polygon_mode) {
78  case GPU_POLYGON_MODE_FILL:
79  return VK_POLYGON_MODE_FILL;
80  case GPU_POLYGON_MODE_LINE:
81  return VK_POLYGON_MODE_LINE;
82  case GPU_POLYGON_MODE_POINT:
83  return VK_POLYGON_MODE_POINT;
84  }
85  return VK_POLYGON_MODE_FILL;
86 }
87 
88 static inline VkVertexInputRate vk_convert_step_mode(enum gpu_step_mode step_mode) {
89  switch (step_mode) {
90  case GPU_STEP_MODE_VERTEX:
91  return VK_VERTEX_INPUT_RATE_VERTEX;
92  case GPU_STEP_MODE_INSTANCE:
93  return VK_VERTEX_INPUT_RATE_INSTANCE;
94  }
95  return VK_VERTEX_INPUT_RATE_VERTEX;
96 }
97 
98 static inline VkFormat vk_convert_vertex_format(enum gpu_vertex_format format) {
99  switch (format) {
100  case GPU_VERTEX_FLOAT32x1:
101  return VK_FORMAT_R32_SFLOAT;
102  case GPU_VERTEX_FLOAT32x2:
103  return VK_FORMAT_R32G32_SFLOAT;
104  case GPU_VERTEX_FLOAT32x3:
105  return VK_FORMAT_R32G32B32_SFLOAT;
106  case GPU_VERTEX_FLOAT32x4:
107  return VK_FORMAT_R32G32B32A32_SFLOAT;
108  }
109  return VK_FORMAT_UNDEFINED;
110 }
111 
112 static inline VkIndexType vk_convert_index_format(enum gpu_index_format format) {
113  switch (format) {
114  case GPU_INDEX_U16:
115  return VK_INDEX_TYPE_UINT16;
116  case GPU_INDEX_U32:
117  return VK_INDEX_TYPE_UINT32;
118  }
119  return VK_INDEX_TYPE_UINT16;
120 }
121 
122 static inline enum gpu_texture_format vk_convert_vk_surface_format(VkSurfaceFormatKHR format) {
123  switch (format.colorSpace) {
124  case VK_COLOR_SPACE_EXTENDED_SRGB_LINEAR_EXT:
125  switch (format.format) {
126  case VK_FORMAT_R16G16B16A16_SFLOAT:
127  return GPU_TEXTURE_FORMAT_RGBA16_FLOAT;
128  case VK_FORMAT_R16G16B16A16_SNORM:
129  return GPU_TEXTURE_FORMAT_RGBA16_SNORM;
130  case VK_FORMAT_R16G16B16A16_UNORM:
131  return GPU_TEXTURE_FORMAT_RGBA16_UNORM;
132  case VK_FORMAT_A2B10G10R10_UNORM_PACK32:
133  return GPU_TEXTURE_FORMAT_RGB10A2_UNORM;
134  default:
135  break;
136  }
137  break;
138  case VK_COLOR_SPACE_SRGB_NONLINEAR_KHR:
139  switch (format.format) {
140  case VK_FORMAT_B8G8R8A8_UNORM:
141  return GPU_TEXTURE_FORMAT_BGRA8_UNORM;
142  case VK_FORMAT_B8G8R8A8_SNORM:
143  return GPU_TEXTURE_FORMAT_BGRA8_UNORM;
144  case VK_FORMAT_R8G8B8A8_SRGB:
145  return GPU_TEXTURE_FORMAT_RGBA8_UNORM_SRGB;
146  case VK_FORMAT_R8G8B8A8_SNORM:
147  return GPU_TEXTURE_FORMAT_RGBA8_SNORM;
148  case VK_FORMAT_R8G8B8A8_UNORM:
149  return GPU_TEXTURE_FORMAT_RGBA8_UNORM;
150  default:
151  break;
152  }
153  break;
154  default:
155  break;
156  }
157  KB_ERROR("unsupported format of color_space: {u32} and format: {u32}, returning GPU_TEXTURE_FORMAT_RGBA8_UNORM as "
158  "a fallback",
159  format.colorSpace,
160  format.format);
161  return GPU_TEXTURE_FORMAT_RGBA8_UNORM;
162 }
163 
164 static inline VkImageType vk_convert_texture_dimension(enum gpu_texture_dimension dim) {
165  switch (dim) {
166  case GPU_TEXTURE_DIMENSION_D1:
167  return VK_IMAGE_TYPE_1D;
168  case GPU_TEXTURE_DIMENSION_D2:
169  return VK_IMAGE_TYPE_2D;
170  case GPU_TEXTURE_DIMENSION_D3:
171  return VK_IMAGE_TYPE_3D;
172  }
173  return VK_IMAGE_TYPE_2D;
174 }
175 
176 static inline VkImageUsageFlagBits vk_convert_texture_use(enum gpu_texture_use usage) {
177  VkImageUsageFlagBits ret = 0;
178  if (KB_FLAGS_ALL_SET(usage, GPU_TEXTURE_USE_COPY_SRC)) {
179  ret |= VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
180  }
181  if (KB_FLAGS_ALL_SET(usage, GPU_TEXTURE_USE_COPY_DST)) {
182  ret |= VK_IMAGE_USAGE_TRANSFER_DST_BIT;
183  }
184  if (KB_FLAGS_ALL_SET(usage, GPU_TEXTURE_USE_RESOURCE)) {
185  ret |= VK_IMAGE_USAGE_SAMPLED_BIT;
186  }
187  if (KB_FLAGS_ALL_SET(usage, GPU_TEXTURE_USE_COLOR_TARGET)) {
188  ret |= VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
189  }
190  if (KB_FLAGS_ANY_SET(usage, GPU_TEXTURE_USE_DEPTH_STENCIL_READ | GPU_TEXTURE_USE_DEPTH_STENCIL_WRITE)) {
191  ret |= VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
192  }
193  if (KB_FLAGS_ANY_SET(usage, GPU_TEXTURE_USE_STORAGE_READ | GPU_TEXTURE_USE_STORAGE_WRITE)) {
194  ret |= VK_IMAGE_USAGE_STORAGE_BIT;
195  }
196  return ret;
197 }
198 
199 static inline VkImageLayout vk_optimal_image_layout(enum gpu_texture_use use, enum gpu_texture_format format) {
200  switch (use) {
201  case GPU_TEXTURE_USE_UNINITIALIZED:
202  return VK_IMAGE_LAYOUT_UNDEFINED;
203  case GPU_TEXTURE_USE_COPY_SRC:
204  return VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
205  case GPU_TEXTURE_USE_COPY_DST:
206  return VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
207  case GPU_TEXTURE_USE_COLOR_TARGET:
208  return VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
209  case GPU_TEXTURE_USE_DEPTH_STENCIL_WRITE:
210  return VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
211  case GPU_TEXTURE_USE_PRESENT:
212  return VK_IMAGE_LAYOUT_PRESENT_SRC_KHR;
213  default:
214  break;
215  }
216  const b8 is_color = gpu_format_aspect_from_format(format) == GPU_TEXTURE_FORMAT_ASPECT_COLOR;
217  if (is_color) {
218  if (use == GPU_TEXTURE_USE_RESOURCE) {
219  return VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
220  }
221  return VK_IMAGE_LAYOUT_GENERAL;
222  }
223  return VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL;
224 }
225 
226 static inline void vk_map_texture_use_to_barrier_flags(enum gpu_texture_use use,
227  VkPipelineStageFlags *stages,
228  VkAccessFlags *access) {
229  KB_ASSERT(use != GPU_TEXTURE_USE_UNKNOWN, "texture use must be known");
230  if (use == GPU_TEXTURE_USE_UNINITIALIZED || use == GPU_TEXTURE_USE_PRESENT) {
231  *stages |= VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
232  return;
233  }
234  const VkPipelineStageFlags shader_stages = VK_PIPELINE_STAGE_VERTEX_SHADER_BIT
235  | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT
236  | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT;
237  if (KB_FLAGS_ALL_SET(use, GPU_TEXTURE_USE_COPY_SRC)) {
238  *stages |= VK_PIPELINE_STAGE_TRANSFER_BIT;
239  *access |= VK_ACCESS_TRANSFER_READ_BIT;
240  }
241  if (KB_FLAGS_ALL_SET(use, GPU_TEXTURE_USE_COPY_DST)) {
242  *stages |= VK_PIPELINE_STAGE_TRANSFER_BIT;
243  *access |= VK_ACCESS_TRANSFER_WRITE_BIT;
244  }
245  if (KB_FLAGS_ALL_SET(use, GPU_TEXTURE_USE_RESOURCE)) {
246  *stages |= shader_stages;
247  *access |= VK_ACCESS_SHADER_READ_BIT;
248  }
249  if (KB_FLAGS_ALL_SET(use, GPU_TEXTURE_USE_COLOR_TARGET)) {
250  *stages |= VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
251  *access |= VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
252  }
253  if (KB_FLAGS_ALL_SET(use, GPU_TEXTURE_USE_DEPTH_STENCIL_READ)) {
254  *stages |= VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT;
255  *access |= VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT;
256  }
257  if (KB_FLAGS_ALL_SET(use, GPU_TEXTURE_USE_DEPTH_STENCIL_WRITE)) {
258  *stages |= VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT;
259  *access |= VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
260  }
261  if (KB_FLAGS_ALL_SET(use, GPU_TEXTURE_USE_STORAGE_READ)) {
262  *stages |= shader_stages;
263  *access |= VK_ACCESS_SHADER_READ_BIT;
264  }
265  if (KB_FLAGS_ALL_SET(use, GPU_TEXTURE_USE_STORAGE_WRITE)) {
266  *stages |= shader_stages;
267  *access |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT;
268  }
269 }
270 
271 static inline VkSampleCountFlagBits vk_convert_sample_count(usize count) {
272  switch (count) {
273  case 1:
274  return VK_SAMPLE_COUNT_1_BIT;
275  case 2:
276  return VK_SAMPLE_COUNT_2_BIT;
277  case 4:
278  return VK_SAMPLE_COUNT_4_BIT;
279  case 8:
280  return VK_SAMPLE_COUNT_8_BIT;
281  case 16:
282  return VK_SAMPLE_COUNT_16_BIT;
283  case 32:
284  return VK_SAMPLE_COUNT_32_BIT;
285  case 64:
286  return VK_SAMPLE_COUNT_64_BIT;
287  }
288  KB_WARN("received unsupported sample count of {usize}, assuming sample count of 1", count);
289  return VK_SAMPLE_COUNT_1_BIT;
290 }
291 
292 static inline VkImageAspectFlagBits vk_convert_texture_format_aspect(enum gpu_texture_format_aspect aspects) {
293  // TODO check if planes should be supported
294  VkImageAspectFlagBits ret = 0;
295  if (KB_FLAGS_ANY_SET(aspects, GPU_TEXTURE_FORMAT_ASPECT_COLOR)) {
296  ret |= VK_IMAGE_ASPECT_COLOR_BIT;
297  }
298  if (KB_FLAGS_ANY_SET(aspects, GPU_TEXTURE_FORMAT_ASPECT_STENCIL)) {
299  ret |= VK_IMAGE_ASPECT_STENCIL_BIT;
300  }
301  if (KB_FLAGS_ANY_SET(aspects, GPU_TEXTURE_FORMAT_ASPECT_DEPTH)) {
302  ret |= VK_IMAGE_ASPECT_DEPTH_BIT;
303  }
304  return ret;
305 }
306 
307 static inline VkImageViewType vk_convert_texture_view_dimension(enum gpu_texture_view_dimension dim) {
308  switch (dim) {
309  case GPU_TEXTURE_VIEW_DIMENSION_D1:
310  return VK_IMAGE_VIEW_TYPE_1D;
311  case GPU_TEXTURE_VIEW_DIMENSION_D2:
312  return VK_IMAGE_VIEW_TYPE_2D;
313  case GPU_TEXTURE_VIEW_DIMENSION_D2_ARRAY:
314  return VK_IMAGE_VIEW_TYPE_2D_ARRAY;
315  case GPU_TEXTURE_VIEW_DIMENSION_CUBE:
316  return VK_IMAGE_VIEW_TYPE_CUBE;
317  case GPU_TEXTURE_VIEW_DIMENSION_CUBE_ARRAY:
318  return VK_IMAGE_VIEW_TYPE_CUBE_ARRAY;
319  case GPU_TEXTURE_VIEW_DIMENSION_D3:
320  return VK_IMAGE_VIEW_TYPE_3D;
321  }
322  return VK_IMAGE_VIEW_TYPE_2D;
323 }
324 
325 static inline VkPresentModeKHR vk_convert_present_mode(enum gpu_present_mode present_mode) {
326  switch (present_mode) {
327  case GPU_PRESENT_MODE_FIFO:
328  return VK_PRESENT_MODE_FIFO_KHR;
329  case GPU_PRESENT_MODE_FIFO_RELAXED:
330  return VK_PRESENT_MODE_FIFO_RELAXED_KHR;
331  case GPU_PRESENT_MODE_IMMEDIATE:
332  return VK_PRESENT_MODE_IMMEDIATE_KHR;
333  case GPU_PRESENT_MODE_MAILBOX:
334  return VK_PRESENT_MODE_MAILBOX_KHR;
335  }
336  return VK_PRESENT_MODE_FIFO_KHR;
337 }
338 
339 static inline VkFormat vk_convert_texture_format(enum gpu_texture_format format) {
340  // TODO return better formats depending on the device capabilities
341  switch (format) {
342  case GPU_TEXTURE_FORMAT_R8_UNORM:
343  return VK_FORMAT_R8_UNORM;
344  case GPU_TEXTURE_FORMAT_R8_SNORM:
345  return VK_FORMAT_R8_SNORM;
346  case GPU_TEXTURE_FORMAT_R8_UINT:
347  return VK_FORMAT_R8_UINT;
348  case GPU_TEXTURE_FORMAT_R8_SINT:
349  return VK_FORMAT_R8_SINT;
350 
351  case GPU_TEXTURE_FORMAT_R16_UINT:
352  return VK_FORMAT_R16_UINT;
353  case GPU_TEXTURE_FORMAT_R16_SINT:
354  return VK_FORMAT_R16_SINT;
355  case GPU_TEXTURE_FORMAT_R16_UNORM:
356  return VK_FORMAT_R16_UNORM;
357  case GPU_TEXTURE_FORMAT_R16_SNORM:
358  return VK_FORMAT_R16_SNORM;
359  case GPU_TEXTURE_FORMAT_R16_FLOAT:
360  return VK_FORMAT_R16_SFLOAT;
361  case GPU_TEXTURE_FORMAT_RG8_UNORM:
362  return VK_FORMAT_R8G8_UNORM;
363  case GPU_TEXTURE_FORMAT_RG8_SNORM:
364  return VK_FORMAT_R8G8_SNORM;
365  case GPU_TEXTURE_FORMAT_RG8_UINT:
366  return VK_FORMAT_R8G8_UINT;
367  case GPU_TEXTURE_FORMAT_RG8_SINT:
368  return VK_FORMAT_R8G8_SINT;
369 
370  case GPU_TEXTURE_FORMAT_R32_UINT:
371  return VK_FORMAT_R32_UINT;
372  case GPU_TEXTURE_FORMAT_R32_SINT:
373  return VK_FORMAT_R32_SINT;
374  case GPU_TEXTURE_FORMAT_R32_FLOAT:
375  return VK_FORMAT_R32_SFLOAT;
376  case GPU_TEXTURE_FORMAT_RG16_UINT:
377  return VK_FORMAT_R16G16_UINT;
378  case GPU_TEXTURE_FORMAT_RG16_SINT:
379  return VK_FORMAT_R16G16_SINT;
380  case GPU_TEXTURE_FORMAT_RG16_UNORM:
381  return VK_FORMAT_R16G16_UNORM;
382  case GPU_TEXTURE_FORMAT_RG16_SNORM:
383  return VK_FORMAT_R16G16_SNORM;
384  case GPU_TEXTURE_FORMAT_RG16_FLOAT:
385  return VK_FORMAT_R16G16_SFLOAT;
386  case GPU_TEXTURE_FORMAT_RGBA8_UNORM:
387  return VK_FORMAT_R8G8B8A8_UNORM;
388  case GPU_TEXTURE_FORMAT_RGBA8_UNORM_SRGB:
389  return VK_FORMAT_R8G8B8A8_SRGB;
390  case GPU_TEXTURE_FORMAT_RGBA8_SNORM:
391  return VK_FORMAT_R8G8B8A8_SNORM;
392  case GPU_TEXTURE_FORMAT_RGBA8_UINT:
393  return VK_FORMAT_R8G8B8A8_UINT;
394  case GPU_TEXTURE_FORMAT_RGBA8_SINT:
395  return VK_FORMAT_R8G8B8A8_SINT;
396  case GPU_TEXTURE_FORMAT_BGRA8_UNORM:
397  return VK_FORMAT_B8G8R8A8_UNORM;
398  case GPU_TEXTURE_FORMAT_BGRA8_UNORM_SRGB:
399  return VK_FORMAT_B8G8R8A8_SRGB;
400 
401  case GPU_TEXTURE_FORMAT_RGB9E5_UFLOAT:
402  return VK_FORMAT_E5B9G9R9_UFLOAT_PACK32;
403  case GPU_TEXTURE_FORMAT_RGB10A2_UINT:
404  return VK_FORMAT_A2B10G10R10_UINT_PACK32;
405  case GPU_TEXTURE_FORMAT_RGB10A2_UNORM:
406  return VK_FORMAT_A2B10G10R10_UNORM_PACK32;
407 
408  case GPU_TEXTURE_FORMAT_RG32_UINT:
409  return VK_FORMAT_R32G32_UINT;
410  case GPU_TEXTURE_FORMAT_RG32_SINT:
411  return VK_FORMAT_R32G32_SINT;
412  case GPU_TEXTURE_FORMAT_RG32_FLOAT:
413  return VK_FORMAT_R32G32_SFLOAT;
414  case GPU_TEXTURE_FORMAT_RGBA16_UINT:
415  return VK_FORMAT_R16G16B16A16_UINT;
416  case GPU_TEXTURE_FORMAT_RGBA16_SINT:
417  return VK_FORMAT_R16G16B16A16_SINT;
418  case GPU_TEXTURE_FORMAT_RGBA16_UNORM:
419  return VK_FORMAT_R16G16B16A16_UNORM;
420  case GPU_TEXTURE_FORMAT_RGBA16_SNORM:
421  return VK_FORMAT_R16G16B16A16_SNORM;
422  case GPU_TEXTURE_FORMAT_RGBA16_FLOAT:
423  return VK_FORMAT_R16G16B16A16_SFLOAT;
424 
425  case GPU_TEXTURE_FORMAT_RGBA32_UINT:
426  return VK_FORMAT_R32G32B32A32_UINT;
427  case GPU_TEXTURE_FORMAT_RGBA32_SINT:
428  return VK_FORMAT_R32G32B32A32_SINT;
429  case GPU_TEXTURE_FORMAT_RGBA32_FLOAT:
430  return VK_FORMAT_R32G32B32A32_SFLOAT;
431 
432  case GPU_TEXTURE_FORMAT_STENCIL8:
433  return VK_FORMAT_D32_SFLOAT_S8_UINT; // or VK_FORMAT_S8_UINT or VK_FORMAT_D24_UNORM_S8_UINT
434  case GPU_TEXTURE_FORMAT_DEPTH16_UNORM:
435  return VK_FORMAT_D16_UNORM;
436  case GPU_TEXTURE_FORMAT_DEPTH24_PLUS:
437  return VK_FORMAT_D32_SFLOAT; // or VK_FORMAT_X8_D24_UNORM_PACK32
438  case GPU_TEXTURE_FORMAT_DEPTH24_PLUS_STENCIL8:
439  return VK_FORMAT_D32_SFLOAT_S8_UINT; // or VK_FORMAT_D24_UNORM_S8_UINT
440  case GPU_TEXTURE_FORMAT_DEPTH32_FLOAT:
441  return VK_FORMAT_D32_SFLOAT;
442  case GPU_TEXTURE_FORMAT_DEPTH32_FLOAT_STENCIL8:
443  return VK_FORMAT_D32_SFLOAT_S8_UINT;
444  }
445  return VK_FORMAT_B8G8R8A8_UNORM;
446 }
447 
448 static inline void vk_map_buffer_use_to_barrier_flags(enum gpu_buffer_usage use,
449  VkPipelineStageFlags *stages,
450  VkAccessFlags *access) {
451  KB_ASSERT(use, "buffer must have dedicated usage");
452  const VkPipelineStageFlags shader_stages = VK_PIPELINE_STAGE_VERTEX_SHADER_BIT
453  | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT
454  | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT;
455  if (KB_FLAGS_ALL_SET(use, GPU_BUFFER_USAGE_MAP_READ)) {
456  *stages |= VK_PIPELINE_STAGE_HOST_BIT;
457  *access |= VK_ACCESS_HOST_READ_BIT;
458  }
459  if (KB_FLAGS_ALL_SET(use, GPU_BUFFER_USAGE_MAP_WRITE)) {
460  *stages |= VK_PIPELINE_STAGE_HOST_BIT;
461  *access |= VK_ACCESS_HOST_WRITE_BIT;
462  }
463  if (KB_FLAGS_ALL_SET(use, GPU_BUFFER_USAGE_COPY_SRC)) {
464  *stages |= VK_PIPELINE_STAGE_TRANSFER_BIT;
465  *access |= VK_ACCESS_TRANSFER_READ_BIT;
466  }
467  if (KB_FLAGS_ALL_SET(use, GPU_BUFFER_USAGE_COPY_DST)) {
468  *stages |= VK_PIPELINE_STAGE_TRANSFER_BIT;
469  *access |= VK_ACCESS_TRANSFER_WRITE_BIT;
470  }
471  if (KB_FLAGS_ALL_SET(use, GPU_BUFFER_USAGE_UNIFORM)) {
472  *stages |= shader_stages;
473  *access |= VK_ACCESS_UNIFORM_READ_BIT;
474  }
475  if (KB_FLAGS_ALL_SET(use, GPU_BUFFER_USAGE_STORAGE)) {
476  *stages |= shader_stages;
477  *access |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT;
478  }
479  if (KB_FLAGS_ALL_SET(use, GPU_BUFFER_USAGE_INDEX)) {
480  *stages |= VK_PIPELINE_STAGE_VERTEX_INPUT_BIT;
481  *access |= VK_ACCESS_INDEX_READ_BIT;
482  }
483  if (KB_FLAGS_ALL_SET(use, GPU_BUFFER_USAGE_VERTEX)) {
484  *stages |= VK_PIPELINE_STAGE_VERTEX_INPUT_BIT;
485  *access |= VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT;
486  }
487  if (KB_FLAGS_ALL_SET(use, GPU_BUFFER_USAGE_INDIRECT)) {
488  *stages |= VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT;
489  *access |= VK_ACCESS_INDIRECT_COMMAND_READ_BIT;
490  }
491 }
492 
493 static inline VkBufferUsageFlagBits vk_convert_buffer_usage(enum gpu_buffer_usage usage) {
494  VkBufferUsageFlagBits ret = 0;
495  if(KB_FLAGS_ALL_SET(usage, GPU_BUFFER_USAGE_COPY_SRC)) {
496  ret |= VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
497  }
498  if(KB_FLAGS_ALL_SET(usage, GPU_BUFFER_USAGE_COPY_DST)) {
499  ret |= VK_BUFFER_USAGE_TRANSFER_DST_BIT;
500  }
501  if(KB_FLAGS_ALL_SET(usage, GPU_BUFFER_USAGE_INDEX )) {
502  ret |= VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
503  }
504  if(KB_FLAGS_ALL_SET(usage, GPU_BUFFER_USAGE_VERTEX )) {
505  ret |= VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
506  }
507  if(KB_FLAGS_ALL_SET(usage, GPU_BUFFER_USAGE_UNIFORM )) {
508  ret |= VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT;
509  }
510  if(KB_FLAGS_ALL_SET(usage, GPU_BUFFER_USAGE_STORAGE )) {
511  ret |= VK_BUFFER_USAGE_STORAGE_BUFFER_BIT;
512  }
513  if(KB_FLAGS_ALL_SET(usage, GPU_BUFFER_USAGE_INDIRECT)) {
514  ret |= VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT;
515  }
516  return ret;
517 }
Global typedefs and macros.
#define KB_FLAGS_ANY_SET(value, flags)
Check if any of the flags are set inside value.
Definition: defines.h:67
#define KB_FLAGS_ALL_SET(value, flags)
Check if all flags are set inside value.
Definition: defines.h:62
#define KB_ASSERT(expr,...)
Perform runtime assertion and log failures.
Definition: log.h:133
#define KB_WARN(...)
Log entry with warn log level.
Definition: log.h:161
#define KB_ERROR(...)
Log entry with error log level.
Definition: log.h:142