1 #include <kiba/gpu/command.h>
4 #include <kiba/gpu/device.h>
12 if (!gpu_device_resource_command_encoder_create(device, &new_encoder)) {
13 KB_ERROR(
"failed to create command encoder as device resource");
16 const usize tex_tracker_size = array_size(device->textures);
18 new_encoder->texture_trackers = array_create(
struct gpu_texture_tracker, tex_tracker_size, &device->alloc);
19 for (usize i = 0; i < tex_tracker_size; ++i) {
21 .texture = device->textures[i].texture,
22 .start_use = GPU_TEXTURE_USE_UNINITIALIZED,
23 .cur_use = GPU_TEXTURE_USE_UNINITIALIZED,
25 array_push(new_encoder->texture_trackers, new_tracker);
27 new_encoder->pending_barriers = array_create(
struct gpu_texture_barrier, tex_tracker_size, &device->alloc);
28 const usize buf_tracker_size = array_size(device->buffer_trackers);
29 new_encoder->buffer_trackers = array_create(
struct gpu_buffer_tracker, buf_tracker_size, &device->alloc);
30 for (usize i = 0; i < buf_tracker_size; ++i) {
32 .buffer = device->buffer_trackers[i].buffer,
36 array_push(new_encoder->buffer_trackers, new_tracker);
38 new_encoder->pending_buffer_barriers = array_create(
struct gpu_buffer_barrier, buf_tracker_size, &device->alloc);
39 new_encoder->desc = desc;
40 if (!gpu_backend_command_encoder_create(&new_encoder->bce, &device->bd, desc)) {
44 *encoder = new_encoder;
49 gpu_device_resource_command_encoder_enqueue_destroy(encoder);
53 gpu_backend_command_encoder_destroy(&encoder->bce, &encoder->device->bd);
54 array_destroy(&encoder->buffer_trackers);
55 array_destroy(&encoder->texture_trackers);
60 enum gpu_texture_use new_use) {
61 KB_ASSERT(array_size(encoder->texture_trackers) > texture->tracker_id,
62 "texture must be created before command encoder was created");
64 if (tracker->start_use == GPU_TEXTURE_USE_UNINITIALIZED) {
65 tracker->start_use = new_use;
66 }
else if (tracker->cur_use != GPU_TEXTURE_USE_UNINITIALIZED && tracker->cur_use != new_use) {
69 .format = texture->desc.format,
70 .source_use = tracker->cur_use,
71 .target_use = new_use,
73 KB_ASSERT(array_push_checked(&encoder->pending_barriers, &barrier),
"TODO propagate error");
75 tracker->cur_use = new_use;
80 enum gpu_buffer_usage new_usage) {
81 KB_ASSERT(array_size(encoder->buffer_trackers) > buffer->tracker_id,
82 "buffer must be created before command encoder was created");
84 if (!tracker->start_use) {
85 tracker->start_use = new_usage;
86 }
else if (tracker->cur_use && tracker->cur_use != new_usage) {
89 .source_usage = tracker->cur_use,
90 .target_usage = new_usage,
92 KB_ASSERT(array_push_checked(&encoder->pending_buffer_barriers, &barrier),
"TODO propagate error");
94 tracker->cur_use = new_usage;
97 static void gpu_command_encoder_insert_pending_texture_barriers(
gpu_command_encoder encoder) {
98 if (array_size(encoder->pending_barriers)) {
99 gpu_backend_insert_texture_barriers(&encoder->bce, encoder->pending_barriers);
100 array_resize(&encoder->pending_barriers, 0);
104 static void gpu_command_encoder_insert_pending_buffer_barriers(
gpu_command_encoder encoder) {
105 if (array_size(encoder->pending_buffer_barriers)) {
106 gpu_backend_insert_buffer_barriers(&encoder->bce, encoder->pending_buffer_barriers);
107 array_resize(&encoder->pending_buffer_barriers, 0);
112 if (gpu_backend_cmd_begin(&encoder->bce)) {
115 gpu_cmd_debug_marker_begin(encoder, encoder->desc.label);
134 gpu_command_encoder_set_buffer_usage(encoder, src, GPU_BUFFER_USAGE_COPY_SRC);
135 gpu_command_encoder_set_buffer_usage(encoder, dst, GPU_BUFFER_USAGE_COPY_DST);
136 gpu_command_encoder_insert_pending_buffer_barriers(encoder);
137 gpu_backend_cmd_copy_buffer_to_buffer(&encoder->bce, &src->bb, &dst->bb, src_offset, dst_offset, size);
143 for (usize i = 0; i < desc.color_attachment_count; ++i) {
144 gpu_command_encoder_set_texture_usage(encoder,
145 desc.color_attachments[i].view->texture,
146 GPU_TEXTURE_USE_COLOR_TARGET);
148 gpu_command_encoder_insert_pending_texture_barriers(encoder);
149 return gpu_backend_begin_render_pass(&encoder->bce, &encoder->device->bd, desc);
152 b8 gpu_end_render_pass(
gpu_command_encoder encoder) {
return gpu_backend_end_render_pass(&encoder->bce); }
156 gpu_cmd_debug_marker_end(encoder);
161 const usize tracker_size = array_size(encoder->texture_trackers);
162 KB_ASSERT(tracker_size <= array_size(encoder->device->textures),
163 "at least all command buffer textures ({usize}) must still be available in the device ({usize})",
165 array_size(encoder->device->textures));
166 if (tracker_size > 0) {
169 for (usize i = 0; i < tracker_size; ++i) {
172 if (device_tracker.texture && encoder_tracker.cur_use != GPU_TEXTURE_USE_UNINITIALIZED
173 && encoder_tracker.start_use != device_tracker.cur_use) {
175 .tex = &device_tracker.texture->bt,
176 .format = device_tracker.texture->desc.format,
177 .source_use = device_tracker.cur_use,
178 .target_use = encoder_tracker.start_use,
180 KB_ASSERT(array_push_checked(&init_barriers, &barrier),
"TODO propagate error");
181 encoder->device->textures[i].cur_use = encoder_tracker.cur_use;
184 if (array_size(init_barriers) > 0) {
187 .label =
"CE: texture init",
195 gpu_backend_command_encoder_create(&init, &encoder->device->bd, init_desc);
196 gpu_backend_cmd_begin(&init);
197 gpu_backend_insert_texture_barriers(&init, init_barriers);
198 gpu_backend_cmd_end(&init);
199 gpu_backend_command_encoder_submit(&init, &encoder->device->bd);
200 gpu_backend_command_encoder_destroy(&init, &encoder->device->bd);
202 array_destroy(&init_barriers);
204 for (usize i = 0; i < tracker_size; ++i) {
207 if (device_tracker.texture && device_tracker.start_use != GPU_TEXTURE_USE_UNINITIALIZED
208 && encoder_tracker.cur_use != device_tracker.start_use) {
209 gpu_command_encoder_set_texture_usage(encoder, device_tracker.texture, device_tracker.start_use);
212 gpu_command_encoder_insert_pending_texture_barriers(encoder);
214 gpu_backend_cmd_end(&encoder->bce);
215 return gpu_backend_command_encoder_submit(&encoder->bce, &encoder->device->bd);
void memory_free(void *mem, usize size)
Free memory.
Lightweight layer between platform and other engine components to enable tracing/monitoring.
#define KB_FLAGS_ANY_SET(value, flags)
Check if any of the flags are set inside value.
#define KB_ASSERT(expr,...)
Perform runtime assertion and log failures.
#define KB_ERROR(...)
Log entry with error log level.