vulkan: Add upload fastpath
authorBenjamin Otte <otte@redhat.com>
Wed, 14 Jun 2023 01:21:24 +0000 (03:21 +0200)
committerBenjamin Otte <otte@redhat.com>
Wed, 14 Jun 2023 01:34:07 +0000 (03:34 +0200)
If the memory heap that the GPU uses allows CPU access
(which is the case on basically every integrated GPU, including phones),
we can avoid a staging buffer and write directly into the image memory.

Check for this case and do that automatically.

Unfortunately we need to change the image format we use from
VK_IMAGE_TILING_OPTIMAL to VK_IMAGE_TILING_LINEAR, I haven't found a way
around that yet.

gsk/vulkan/gskvulkanimage.c

index 5ffaf0fde65f1a3a33d87e56afefdc05a585ec0b..e88c4a559e5ca8cebf54300a4d96d8188bf42a20 100644 (file)
@@ -573,7 +573,7 @@ gsk_vulkan_image_new_for_upload (GskVulkanUploader *uploader,
   self = gsk_vulkan_image_new (uploader->vulkan,
                                width,
                                height,
-                               VK_IMAGE_TILING_OPTIMAL,
+                               VK_IMAGE_TILING_LINEAR,
                                VK_IMAGE_USAGE_TRANSFER_DST_BIT |
                                VK_IMAGE_USAGE_SAMPLED_BIT,
                                VK_IMAGE_LAYOUT_UNDEFINED,
@@ -585,25 +585,56 @@ gsk_vulkan_image_new_for_upload (GskVulkanUploader *uploader,
   return self;
 }
 
-void
-gsk_vulkan_image_map_memory (GskVulkanImage    *self,
-                             GskVulkanUploader *uploader,
-                             GskVulkanImageMap *map)
+static void
+gsk_vulkan_image_map_memory_direct (GskVulkanImage    *self,
+                                    GskVulkanUploader *uploader,
+                                    GskVulkanImageMap *map)
 {
-  gsize buffer_size = self->width * self->height * 4;
+  VkImageSubresource image_res;
+  VkSubresourceLayout image_layout;
 
-  g_assert (self->vk_image_layout == VK_IMAGE_LAYOUT_UNDEFINED ||
-            self->vk_image_layout == VK_IMAGE_LAYOUT_PREINITIALIZED);
+  image_res.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
+  image_res.mipLevel = 0;
+  image_res.arrayLayer = 0;
+
+  vkGetImageSubresourceLayout (gdk_vulkan_context_get_device (self->vulkan),
+                               self->vk_image, &image_res, &image_layout);
+
+  map->staging_buffer = NULL;
+  map->data = gsk_vulkan_memory_map (self->memory) + image_layout.offset;
+  map->stride = image_layout.rowPitch;
+}
+
+static void
+gsk_vulkan_image_unmap_memory_direct (GskVulkanImage    *self,
+                                      GskVulkanUploader *uploader,
+                                      GskVulkanImageMap *map)
+{
+  gsk_vulkan_memory_unmap (self->memory);
+
+  gsk_vulkan_uploader_add_image_barrier (uploader,
+                                         TRUE,
+                                         self,
+                                         VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL,
+                                         VK_ACCESS_SHADER_READ_BIT);
+}
+
+static void
+gsk_vulkan_image_map_memory_indirect (GskVulkanImage    *self,
+                                      GskVulkanUploader *uploader,
+                                      GskVulkanImageMap *map)
+{
+  gsize buffer_size = self->width * self->height * 4;
 
   map->staging_buffer = gsk_vulkan_buffer_new_staging (uploader->vulkan, buffer_size);
   map->data = gsk_vulkan_buffer_map (map->staging_buffer);
   map->stride = self->width * 4;
 }
 
-void
-gsk_vulkan_image_unmap_memory (GskVulkanImage    *self,
-                               GskVulkanUploader *uploader,
-                               GskVulkanImageMap *map)
+static void
+gsk_vulkan_image_unmap_memory_indirect (GskVulkanImage    *self,
+                                        GskVulkanUploader *uploader,
+                                        GskVulkanImageMap *map)
 {
   gsk_vulkan_buffer_unmap (map->staging_buffer);
 
@@ -659,6 +690,31 @@ gsk_vulkan_image_unmap_memory (GskVulkanImage    *self,
                                                         map->staging_buffer);
 }
 
+void
+gsk_vulkan_image_map_memory (GskVulkanImage    *self,
+                             GskVulkanUploader *uploader,
+                             GskVulkanImageMap *map)
+{
+  g_assert (self->vk_image_layout == VK_IMAGE_LAYOUT_UNDEFINED ||
+            self->vk_image_layout == VK_IMAGE_LAYOUT_PREINITIALIZED);
+
+  if (gsk_vulkan_memory_can_map (self->memory, TRUE))
+    gsk_vulkan_image_map_memory_direct (self, uploader, map);
+  else
+    gsk_vulkan_image_map_memory_indirect (self, uploader, map);
+}
+
+void
+gsk_vulkan_image_unmap_memory (GskVulkanImage    *self,
+                               GskVulkanUploader *uploader,
+                               GskVulkanImageMap *map)
+{
+  if (map->staging_buffer)
+    gsk_vulkan_image_unmap_memory_indirect (self, uploader, map);
+  else
+    gsk_vulkan_image_unmap_memory_direct (self, uploader, map);
+}
+
 GskVulkanImage *
 gsk_vulkan_image_new_for_swapchain (GdkVulkanContext *context,
                                     VkImage           image,