Vulkan: Use Vulkan Memory Allocator

This commit is contained in:
Connor McLaughlin 2022-09-26 20:44:23 +10:00
parent 56293e4d8f
commit 9d27f7095f
30 changed files with 603 additions and 1349 deletions

View file

@ -234,10 +234,6 @@ if(ENABLE_VULKAN)
vulkan/shader_cache.h vulkan/shader_cache.h
vulkan/shader_compiler.cpp vulkan/shader_compiler.cpp
vulkan/shader_compiler.h vulkan/shader_compiler.h
vulkan/staging_buffer.cpp
vulkan/staging_buffer.h
vulkan/staging_texture.cpp
vulkan/staging_texture.h
vulkan/stream_buffer.cpp vulkan/stream_buffer.cpp
vulkan/stream_buffer.h vulkan/stream_buffer.h
vulkan/swap_chain.cpp vulkan/swap_chain.cpp

View file

@ -92,12 +92,6 @@
<ClInclude Include="vulkan\shader_compiler.h"> <ClInclude Include="vulkan\shader_compiler.h">
<ExcludedFromBuild Condition="'$(Platform)'=='ARM64'">true</ExcludedFromBuild> <ExcludedFromBuild Condition="'$(Platform)'=='ARM64'">true</ExcludedFromBuild>
</ClInclude> </ClInclude>
<ClInclude Include="vulkan\staging_buffer.h">
<ExcludedFromBuild Condition="'$(Platform)'=='ARM64'">true</ExcludedFromBuild>
</ClInclude>
<ClInclude Include="vulkan\staging_texture.h">
<ExcludedFromBuild Condition="'$(Platform)'=='ARM64'">true</ExcludedFromBuild>
</ClInclude>
<ClInclude Include="vulkan\stream_buffer.h"> <ClInclude Include="vulkan\stream_buffer.h">
<ExcludedFromBuild Condition="'$(Platform)'=='ARM64'">true</ExcludedFromBuild> <ExcludedFromBuild Condition="'$(Platform)'=='ARM64'">true</ExcludedFromBuild>
</ClInclude> </ClInclude>
@ -180,12 +174,6 @@
<ClCompile Include="vulkan\shader_compiler.cpp"> <ClCompile Include="vulkan\shader_compiler.cpp">
<ExcludedFromBuild Condition="'$(Platform)'=='ARM64'">true</ExcludedFromBuild> <ExcludedFromBuild Condition="'$(Platform)'=='ARM64'">true</ExcludedFromBuild>
</ClCompile> </ClCompile>
<ClCompile Include="vulkan\staging_buffer.cpp">
<ExcludedFromBuild Condition="'$(Platform)'=='ARM64'">true</ExcludedFromBuild>
</ClCompile>
<ClCompile Include="vulkan\staging_texture.cpp">
<ExcludedFromBuild Condition="'$(Platform)'=='ARM64'">true</ExcludedFromBuild>
</ClCompile>
<ClCompile Include="vulkan\stream_buffer.cpp"> <ClCompile Include="vulkan\stream_buffer.cpp">
<ExcludedFromBuild Condition="'$(Platform)'=='ARM64'">true</ExcludedFromBuild> <ExcludedFromBuild Condition="'$(Platform)'=='ARM64'">true</ExcludedFromBuild>
</ClCompile> </ClCompile>
@ -221,4 +209,4 @@
</ClCompile> </ClCompile>
</ItemDefinitionGroup> </ItemDefinitionGroup>
<Import Project="..\..\dep\msvc\vsprops\Targets.props" /> <Import Project="..\..\dep\msvc\vsprops\Targets.props" />
</Project> </Project>

View file

@ -55,9 +55,6 @@
<ClInclude Include="vulkan\texture.h"> <ClInclude Include="vulkan\texture.h">
<Filter>vulkan</Filter> <Filter>vulkan</Filter>
</ClInclude> </ClInclude>
<ClInclude Include="vulkan\staging_buffer.h">
<Filter>vulkan</Filter>
</ClInclude>
<ClInclude Include="vulkan\stream_buffer.h"> <ClInclude Include="vulkan\stream_buffer.h">
<Filter>vulkan</Filter> <Filter>vulkan</Filter>
</ClInclude> </ClInclude>
@ -70,9 +67,6 @@
<ClInclude Include="vulkan\swap_chain.h"> <ClInclude Include="vulkan\swap_chain.h">
<Filter>vulkan</Filter> <Filter>vulkan</Filter>
</ClInclude> </ClInclude>
<ClInclude Include="vulkan\staging_texture.h">
<Filter>vulkan</Filter>
</ClInclude>
<ClInclude Include="dimensional_array.h" /> <ClInclude Include="dimensional_array.h" />
<ClInclude Include="vulkan\context.h"> <ClInclude Include="vulkan\context.h">
<Filter>vulkan</Filter> <Filter>vulkan</Filter>
@ -188,9 +182,6 @@
<ClCompile Include="vulkan\context.cpp"> <ClCompile Include="vulkan\context.cpp">
<Filter>vulkan</Filter> <Filter>vulkan</Filter>
</ClCompile> </ClCompile>
<ClCompile Include="vulkan\staging_buffer.cpp">
<Filter>vulkan</Filter>
</ClCompile>
<ClCompile Include="vulkan\stream_buffer.cpp"> <ClCompile Include="vulkan\stream_buffer.cpp">
<Filter>vulkan</Filter> <Filter>vulkan</Filter>
</ClCompile> </ClCompile>
@ -203,9 +194,6 @@
<ClCompile Include="vulkan\swap_chain.cpp"> <ClCompile Include="vulkan\swap_chain.cpp">
<Filter>vulkan</Filter> <Filter>vulkan</Filter>
</ClCompile> </ClCompile>
<ClCompile Include="vulkan\staging_texture.cpp">
<Filter>vulkan</Filter>
</ClCompile>
<ClCompile Include="vulkan\builders.cpp"> <ClCompile Include="vulkan\builders.cpp">
<Filter>vulkan</Filter> <Filter>vulkan</Filter>
</ClCompile> </ClCompile>
@ -277,4 +265,4 @@
<Filter>vulkan</Filter> <Filter>vulkan</Filter>
</None> </None>
</ItemGroup> </ItemGroup>
</Project> </Project>

View file

@ -1,8 +1,3 @@
// Copyright 2016 Dolphin Emulator Project
// Copyright 2020 DuckStation Emulator Project
// Licensed under GPLv2+
// Refer to the LICENSE file included.
#include "context.h" #include "context.h"
#include "../assert.h" #include "../assert.h"
#include "../log.h" #include "../log.h"
@ -21,7 +16,7 @@ namespace Vulkan {
enum : u32 enum : u32
{ {
TEXTURE_BUFFER_SIZE = 16 * 1024 * 1024, TEXTURE_BUFFER_SIZE = 32 * 1024 * 1024,
}; };
Context::Context(VkInstance instance, VkPhysicalDevice physical_device, bool owns_device) Context::Context(VkInstance instance, VkPhysicalDevice physical_device, bool owns_device)
@ -351,8 +346,8 @@ bool Context::Create(std::string_view gpu_name, const WindowInfo* wi, std::uniqu
// Attempt to create the device. // Attempt to create the device.
if (!g_vulkan_context->CreateDevice(surface, enable_validation_layer, nullptr, 0, nullptr, 0, nullptr) || if (!g_vulkan_context->CreateDevice(surface, enable_validation_layer, nullptr, 0, nullptr, 0, nullptr) ||
!g_vulkan_context->CreateGlobalDescriptorPool() || !g_vulkan_context->CreateCommandBuffers() || !g_vulkan_context->CreateAllocator() || !g_vulkan_context->CreateGlobalDescriptorPool() ||
!g_vulkan_context->CreateTextureStreamBuffer() || !g_vulkan_context->CreateCommandBuffers() || !g_vulkan_context->CreateTextureStreamBuffer() ||
(enable_surface && (*out_swap_chain = SwapChain::Create(wi_copy, surface, true)) == nullptr)) (enable_surface && (*out_swap_chain = SwapChain::Create(wi_copy, surface, true)) == nullptr))
{ {
// Since we are destroying the instance, we're also responsible for destroying the surface. // Since we are destroying the instance, we're also responsible for destroying the surface.
@ -410,6 +405,7 @@ void Context::Destroy()
g_vulkan_context->DestroyRenderPassCache(); g_vulkan_context->DestroyRenderPassCache();
g_vulkan_context->DestroyGlobalDescriptorPool(); g_vulkan_context->DestroyGlobalDescriptorPool();
g_vulkan_context->DestroyCommandBuffers(); g_vulkan_context->DestroyCommandBuffers();
g_vulkan_context->DestroyAllocator();
if (g_vulkan_context->m_device != VK_NULL_HANDLE) if (g_vulkan_context->m_device != VK_NULL_HANDLE)
vkDestroyDevice(g_vulkan_context->m_device, nullptr); vkDestroyDevice(g_vulkan_context->m_device, nullptr);
@ -474,6 +470,9 @@ bool Context::SelectDeviceExtensions(ExtensionList* extension_list, bool enable_
if (enable_surface && !SupportsExtension(VK_KHR_SWAPCHAIN_EXTENSION_NAME, true)) if (enable_surface && !SupportsExtension(VK_KHR_SWAPCHAIN_EXTENSION_NAME, true))
return false; return false;
m_optional_extensions.vk_ext_memory_budget = SupportsExtension(VK_EXT_MEMORY_BUDGET_EXTENSION_NAME, false);
m_optional_extensions.vk_khr_driver_properties = SupportsExtension(VK_KHR_DRIVER_PROPERTIES_EXTENSION_NAME, false);
return true; return true;
} }
@ -643,6 +642,37 @@ bool Context::CreateDevice(VkSurfaceKHR surface, bool enable_validation_layer, c
return true; return true;
} }
bool Context::CreateAllocator()
{
VmaAllocatorCreateInfo ci = {};
ci.vulkanApiVersion = VK_API_VERSION_1_1;
ci.flags = VMA_ALLOCATOR_CREATE_EXTERNALLY_SYNCHRONIZED_BIT;
ci.physicalDevice = m_physical_device;
ci.device = m_device;
ci.instance = m_instance;
if (m_optional_extensions.vk_ext_memory_budget)
ci.flags |= VMA_ALLOCATOR_CREATE_EXT_MEMORY_BUDGET_BIT;
VkResult res = vmaCreateAllocator(&ci, &m_allocator);
if (res != VK_SUCCESS)
{
LOG_VULKAN_ERROR(res, "vmaCreateAllocator failed: ");
return false;
}
return true;
}
void Context::DestroyAllocator()
{
if (m_allocator == VK_NULL_HANDLE)
return;
vmaDestroyAllocator(m_allocator);
m_allocator = VK_NULL_HANDLE;
}
bool Context::CreateCommandBuffers() bool Context::CreateCommandBuffers()
{ {
VkResult res; VkResult res;
@ -1143,6 +1173,9 @@ void Context::ActivateCommandBuffer(u32 index)
m_current_frame = index; m_current_frame = index;
m_current_command_buffer = resources.command_buffer; m_current_command_buffer = resources.command_buffer;
// using the lower 32 bits of the fence index should be sufficient here, I hope...
vmaSetCurrentFrameIndex(m_allocator, static_cast<u32>(m_next_fence_counter));
} }
void Context::ExecuteCommandBuffer(bool wait_for_completion) void Context::ExecuteCommandBuffer(bool wait_for_completion)
@ -1169,6 +1202,13 @@ void Context::DeferBufferDestruction(VkBuffer object)
resources.cleanup_resources.push_back([this, object]() { vkDestroyBuffer(m_device, object, nullptr); }); resources.cleanup_resources.push_back([this, object]() { vkDestroyBuffer(m_device, object, nullptr); });
} }
void Context::DeferBufferDestruction(VkBuffer object, VmaAllocation allocation)
{
FrameResources& resources = m_frame_resources[m_current_frame];
resources.cleanup_resources.push_back(
[this, object, allocation]() { vmaDestroyBuffer(m_allocator, object, allocation); });
}
void Context::DeferBufferViewDestruction(VkBufferView object) void Context::DeferBufferViewDestruction(VkBufferView object)
{ {
FrameResources& resources = m_frame_resources[m_current_frame]; FrameResources& resources = m_frame_resources[m_current_frame];
@ -1193,6 +1233,13 @@ void Context::DeferImageDestruction(VkImage object)
resources.cleanup_resources.push_back([this, object]() { vkDestroyImage(m_device, object, nullptr); }); resources.cleanup_resources.push_back([this, object]() { vkDestroyImage(m_device, object, nullptr); });
} }
void Context::DeferImageDestruction(VkImage object, VmaAllocation allocation)
{
FrameResources& resources = m_frame_resources[m_current_frame];
resources.cleanup_resources.push_back(
[this, object, allocation]() { vmaDestroyImage(m_allocator, object, allocation); });
}
void Context::DeferImageViewDestruction(VkImageView object) void Context::DeferImageViewDestruction(VkImageView object)
{ {
FrameResources& resources = m_frame_resources[m_current_frame]; FrameResources& resources = m_frame_resources[m_current_frame];
@ -1267,97 +1314,6 @@ void Context::DisableDebugUtils()
} }
} }
bool Context::GetMemoryType(u32 bits, VkMemoryPropertyFlags properties, u32* out_type_index)
{
for (u32 i = 0; i < VK_MAX_MEMORY_TYPES; i++)
{
if ((bits & (1 << i)) != 0)
{
u32 supported = m_device_memory_properties.memoryTypes[i].propertyFlags & properties;
if (supported == properties)
{
*out_type_index = i;
return true;
}
}
}
return false;
}
u32 Context::GetMemoryType(u32 bits, VkMemoryPropertyFlags properties)
{
u32 type_index = VK_MAX_MEMORY_TYPES;
if (!GetMemoryType(bits, properties, &type_index))
{
Log_ErrorPrintf("Unable to find memory type for %x:%x", bits, properties);
Panic("Unable to find memory type");
}
return type_index;
}
u32 Context::GetUploadMemoryType(u32 bits, bool* is_coherent)
{
// Try for coherent memory first.
VkMemoryPropertyFlags flags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
u32 type_index;
if (!GetMemoryType(bits, flags, &type_index))
{
Log_WarningPrintf("Vulkan: Failed to find a coherent memory type for uploads, this will affect performance.");
// Try non-coherent memory.
flags &= ~VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
if (!GetMemoryType(bits, flags, &type_index))
{
// We shouldn't have any memory types that aren't host-visible.
Panic("Unable to get memory type for upload.");
type_index = 0;
}
}
if (is_coherent)
*is_coherent = ((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0);
return type_index;
}
u32 Context::GetReadbackMemoryType(u32 bits, bool* is_coherent, bool* is_cached)
{
// Try for cached and coherent memory first.
VkMemoryPropertyFlags flags =
VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
u32 type_index;
if (!GetMemoryType(bits, flags, &type_index))
{
// For readbacks, caching is more important than coherency.
flags &= ~VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
if (!GetMemoryType(bits, flags, &type_index))
{
Log_WarningPrintf("Vulkan: Failed to find a cached memory type for readbacks, this will affect "
"performance.");
// Remove the cached bit as well.
flags &= ~VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
if (!GetMemoryType(bits, flags, &type_index))
{
// We shouldn't have any memory types that aren't host-visible.
Panic("Unable to get memory type for upload.");
type_index = 0;
}
}
}
if (is_coherent)
*is_coherent = ((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0);
if (is_cached)
*is_cached = ((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0);
return type_index;
}
VkRenderPass Context::GetRenderPass(VkFormat color_format, VkFormat depth_format, VkSampleCountFlagBits samples, VkRenderPass Context::GetRenderPass(VkFormat color_format, VkFormat depth_format, VkSampleCountFlagBits samples,
VkAttachmentLoadOp load_op) VkAttachmentLoadOp load_op)
{ {

View file

@ -1,8 +1,3 @@
// Copyright 2016 Dolphin Emulator Project
// Copyright 2020 DuckStation Emulator Project
// Licensed under GPLv2+
// Refer to the LICENSE file included.
#pragma once #pragma once
#include "../types.h" #include "../types.h"
@ -33,6 +28,12 @@ public:
NUM_COMMAND_BUFFERS = 2 NUM_COMMAND_BUFFERS = 2
}; };
struct OptionalExtensions
{
bool vk_ext_memory_budget : 1;
bool vk_khr_driver_properties : 1;
};
~Context(); ~Context();
// Determines if the Vulkan validation layer is available on the system. // Determines if the Vulkan validation layer is available on the system.
@ -71,6 +72,7 @@ public:
ALWAYS_INLINE VkInstance GetVulkanInstance() const { return m_instance; } ALWAYS_INLINE VkInstance GetVulkanInstance() const { return m_instance; }
ALWAYS_INLINE VkPhysicalDevice GetPhysicalDevice() const { return m_physical_device; } ALWAYS_INLINE VkPhysicalDevice GetPhysicalDevice() const { return m_physical_device; }
ALWAYS_INLINE VkDevice GetDevice() const { return m_device; } ALWAYS_INLINE VkDevice GetDevice() const { return m_device; }
ALWAYS_INLINE VmaAllocator GetAllocator() const { return m_allocator; }
ALWAYS_INLINE VkQueue GetGraphicsQueue() const { return m_graphics_queue; } ALWAYS_INLINE VkQueue GetGraphicsQueue() const { return m_graphics_queue; }
ALWAYS_INLINE u32 GetGraphicsQueueFamilyIndex() const { return m_graphics_queue_family_index; } ALWAYS_INLINE u32 GetGraphicsQueueFamilyIndex() const { return m_graphics_queue_family_index; }
ALWAYS_INLINE VkQueue GetPresentQueue() const { return m_present_queue; } ALWAYS_INLINE VkQueue GetPresentQueue() const { return m_present_queue; }
@ -118,15 +120,6 @@ public:
} }
ALWAYS_INLINE u32 GetMaxImageDimension2D() const { return m_device_properties.limits.maxImageDimension2D; } ALWAYS_INLINE u32 GetMaxImageDimension2D() const { return m_device_properties.limits.maxImageDimension2D; }
// Finds a memory type index for the specified memory properties and the bits returned by
// vkGetImageMemoryRequirements
bool GetMemoryType(u32 bits, VkMemoryPropertyFlags properties, u32* out_type_index);
u32 GetMemoryType(u32 bits, VkMemoryPropertyFlags properties);
// Finds a memory type for upload or readback buffers.
u32 GetUploadMemoryType(u32 bits, bool* is_coherent = nullptr);
u32 GetReadbackMemoryType(u32 bits, bool* is_coherent = nullptr, bool* is_cached = nullptr);
// Creates a simple render pass. // Creates a simple render pass.
VkRenderPass GetRenderPass(VkFormat color_format, VkFormat depth_format, VkSampleCountFlagBits samples, VkRenderPass GetRenderPass(VkFormat color_format, VkFormat depth_format, VkSampleCountFlagBits samples,
VkAttachmentLoadOp load_op); VkAttachmentLoadOp load_op);
@ -178,10 +171,12 @@ public:
// Schedule a vulkan resource for destruction later on. This will occur when the command buffer // Schedule a vulkan resource for destruction later on. This will occur when the command buffer
// is next re-used, and the GPU has finished working with the specified resource. // is next re-used, and the GPU has finished working with the specified resource.
void DeferBufferDestruction(VkBuffer object); void DeferBufferDestruction(VkBuffer object);
void DeferBufferDestruction(VkBuffer object, VmaAllocation allocation);
void DeferBufferViewDestruction(VkBufferView object); void DeferBufferViewDestruction(VkBufferView object);
void DeferDeviceMemoryDestruction(VkDeviceMemory object); void DeferDeviceMemoryDestruction(VkDeviceMemory object);
void DeferFramebufferDestruction(VkFramebuffer object); void DeferFramebufferDestruction(VkFramebuffer object);
void DeferImageDestruction(VkImage object); void DeferImageDestruction(VkImage object);
void DeferImageDestruction(VkImage object, VmaAllocation allocation);
void DeferImageViewDestruction(VkImageView object); void DeferImageViewDestruction(VkImageView object);
void DeferPipelineDestruction(VkPipeline pipeline); void DeferPipelineDestruction(VkPipeline pipeline);
@ -205,6 +200,8 @@ private:
u32 num_required_device_extensions, const char** required_device_layers, u32 num_required_device_extensions, const char** required_device_layers,
u32 num_required_device_layers, const VkPhysicalDeviceFeatures* required_features); u32 num_required_device_layers, const VkPhysicalDeviceFeatures* required_features);
bool CreateAllocator();
void DestroyAllocator();
bool CreateCommandBuffers(); bool CreateCommandBuffers();
void DestroyCommandBuffers(); void DestroyCommandBuffers();
bool CreateGlobalDescriptorPool(); bool CreateGlobalDescriptorPool();
@ -239,6 +236,7 @@ private:
VkInstance m_instance = VK_NULL_HANDLE; VkInstance m_instance = VK_NULL_HANDLE;
VkPhysicalDevice m_physical_device = VK_NULL_HANDLE; VkPhysicalDevice m_physical_device = VK_NULL_HANDLE;
VkDevice m_device = VK_NULL_HANDLE; VkDevice m_device = VK_NULL_HANDLE;
VmaAllocator m_allocator = VK_NULL_HANDLE;
VkCommandBuffer m_current_command_buffer = VK_NULL_HANDLE; VkCommandBuffer m_current_command_buffer = VK_NULL_HANDLE;
@ -290,6 +288,8 @@ private:
VkPhysicalDeviceFeatures m_device_features = {}; VkPhysicalDeviceFeatures m_device_features = {};
VkPhysicalDeviceProperties m_device_properties = {}; VkPhysicalDeviceProperties m_device_properties = {};
VkPhysicalDeviceMemoryProperties m_device_memory_properties = {}; VkPhysicalDeviceMemoryProperties m_device_memory_properties = {};
VkPhysicalDeviceDriverPropertiesKHR m_device_driver_properties = {};
OptionalExtensions m_optional_extensions = {};
}; };
} // namespace Vulkan } // namespace Vulkan

View file

@ -1,5 +1,9 @@
#pragma once #pragma once
#ifdef __cplusplus
extern "C" {
#endif
// We abuse the preprocessor here to only need to specify function names once. // We abuse the preprocessor here to only need to specify function names once.
// Function names are prefixed so to not conflict with system symbols at runtime. // Function names are prefixed so to not conflict with system symbols at runtime.
#define VULKAN_MODULE_ENTRY_POINT(name, required) extern PFN_##name ds_##name; #define VULKAN_MODULE_ENTRY_POINT(name, required) extern PFN_##name ds_##name;
@ -12,6 +16,10 @@
#undef VULKAN_INSTANCE_ENTRY_POINT #undef VULKAN_INSTANCE_ENTRY_POINT
#undef VULKAN_MODULE_ENTRY_POINT #undef VULKAN_MODULE_ENTRY_POINT
#ifdef __cplusplus
}
#endif
#define vkCreateInstance ds_vkCreateInstance #define vkCreateInstance ds_vkCreateInstance
#define vkGetInstanceProcAddr ds_vkGetInstanceProcAddr #define vkGetInstanceProcAddr ds_vkGetInstanceProcAddr
#define vkEnumerateInstanceExtensionProperties ds_vkEnumerateInstanceExtensionProperties #define vkEnumerateInstanceExtensionProperties ds_vkEnumerateInstanceExtensionProperties
@ -58,7 +66,6 @@
#define vkSetDebugUtilsObjectTagEXT ds_vkSetDebugUtilsObjectTagEXT #define vkSetDebugUtilsObjectTagEXT ds_vkSetDebugUtilsObjectTagEXT
#define vkSubmitDebugUtilsMessageEXT ds_vkSubmitDebugUtilsMessageEXT #define vkSubmitDebugUtilsMessageEXT ds_vkSubmitDebugUtilsMessageEXT
#define vkGetPhysicalDeviceProperties2 ds_vkGetPhysicalDeviceProperties2
#define vkGetPhysicalDeviceSurfaceCapabilities2KHR ds_vkGetPhysicalDeviceSurfaceCapabilities2KHR #define vkGetPhysicalDeviceSurfaceCapabilities2KHR ds_vkGetPhysicalDeviceSurfaceCapabilities2KHR
#define vkGetPhysicalDeviceDisplayPropertiesKHR ds_vkGetPhysicalDeviceDisplayPropertiesKHR #define vkGetPhysicalDeviceDisplayPropertiesKHR ds_vkGetPhysicalDeviceDisplayPropertiesKHR
#define vkGetPhysicalDeviceDisplayPlanePropertiesKHR ds_vkGetPhysicalDeviceDisplayPlanePropertiesKHR #define vkGetPhysicalDeviceDisplayPlanePropertiesKHR ds_vkGetPhysicalDeviceDisplayPlanePropertiesKHR
@ -68,6 +75,11 @@
#define vkGetDisplayPlaneCapabilitiesKHR ds_vkGetDisplayPlaneCapabilitiesKHR #define vkGetDisplayPlaneCapabilitiesKHR ds_vkGetDisplayPlaneCapabilitiesKHR
#define vkCreateDisplayPlaneSurfaceKHR ds_vkCreateDisplayPlaneSurfaceKHR #define vkCreateDisplayPlaneSurfaceKHR ds_vkCreateDisplayPlaneSurfaceKHR
// Vulkan 1.1 functions.
#define vkGetPhysicalDeviceFeatures2 ds_vkGetPhysicalDeviceFeatures2
#define vkGetPhysicalDeviceProperties2 ds_vkGetPhysicalDeviceProperties2
#define vkGetPhysicalDeviceMemoryProperties2 ds_vkGetPhysicalDeviceMemoryProperties2
#define vkDestroyDevice ds_vkDestroyDevice #define vkDestroyDevice ds_vkDestroyDevice
#define vkGetDeviceQueue ds_vkGetDeviceQueue #define vkGetDeviceQueue ds_vkGetDeviceQueue
#define vkQueueSubmit ds_vkQueueSubmit #define vkQueueSubmit ds_vkQueueSubmit
@ -194,7 +206,13 @@
#define vkAcquireNextImageKHR ds_vkAcquireNextImageKHR #define vkAcquireNextImageKHR ds_vkAcquireNextImageKHR
#define vkQueuePresentKHR ds_vkQueuePresentKHR #define vkQueuePresentKHR ds_vkQueuePresentKHR
// Vulkan 1.1 functions.
#define vkGetBufferMemoryRequirements2 ds_vkGetBufferMemoryRequirements2
#define vkGetImageMemoryRequirements2 ds_vkGetImageMemoryRequirements2
#define vkBindBufferMemory2 ds_vkBindBufferMemory2
#define vkBindImageMemory2 ds_vkBindImageMemory2
#ifdef SUPPORTS_VULKAN_EXCLUSIVE_FULLSCREEN #ifdef SUPPORTS_VULKAN_EXCLUSIVE_FULLSCREEN
#define vkAcquireFullScreenExclusiveModeEXT ds_vkAcquireFullScreenExclusiveModeEXT #define vkAcquireFullScreenExclusiveModeEXT ds_vkAcquireFullScreenExclusiveModeEXT
#define vkReleaseFullScreenExclusiveModeEXT ds_vkReleaseFullScreenExclusiveModeEXT #define vkReleaseFullScreenExclusiveModeEXT ds_vkReleaseFullScreenExclusiveModeEXT
#endif #endif

View file

@ -1,8 +1,3 @@
// Copyright 2016 Dolphin Emulator Project
// Copyright 2020 DuckStation Emulator Project
// Licensed under GPLv2+
// Refer to the LICENSE file included.
// Expands the VULKAN_ENTRY_POINT macro for each function when this file is included. // Expands the VULKAN_ENTRY_POINT macro for each function when this file is included.
// Parameters: Function name, is required // Parameters: Function name, is required
// VULKAN_MODULE_ENTRY_POINT is for functions in vulkan-1.dll // VULKAN_MODULE_ENTRY_POINT is for functions in vulkan-1.dll
@ -16,13 +11,13 @@ VULKAN_MODULE_ENTRY_POINT(vkGetInstanceProcAddr, true)
VULKAN_MODULE_ENTRY_POINT(vkEnumerateInstanceExtensionProperties, true) VULKAN_MODULE_ENTRY_POINT(vkEnumerateInstanceExtensionProperties, true)
VULKAN_MODULE_ENTRY_POINT(vkEnumerateInstanceLayerProperties, true) VULKAN_MODULE_ENTRY_POINT(vkEnumerateInstanceLayerProperties, true)
VULKAN_MODULE_ENTRY_POINT(vkEnumerateInstanceVersion, false) VULKAN_MODULE_ENTRY_POINT(vkEnumerateInstanceVersion, false)
VULKAN_MODULE_ENTRY_POINT(vkDestroyInstance, true)
#endif // VULKAN_MODULE_ENTRY_POINT #endif // VULKAN_MODULE_ENTRY_POINT
#ifdef VULKAN_INSTANCE_ENTRY_POINT #ifdef VULKAN_INSTANCE_ENTRY_POINT
VULKAN_INSTANCE_ENTRY_POINT(vkGetDeviceProcAddr, true) VULKAN_INSTANCE_ENTRY_POINT(vkGetDeviceProcAddr, true)
VULKAN_INSTANCE_ENTRY_POINT(vkDestroyInstance, true)
VULKAN_INSTANCE_ENTRY_POINT(vkEnumeratePhysicalDevices, true) VULKAN_INSTANCE_ENTRY_POINT(vkEnumeratePhysicalDevices, true)
VULKAN_INSTANCE_ENTRY_POINT(vkGetPhysicalDeviceFeatures, true) VULKAN_INSTANCE_ENTRY_POINT(vkGetPhysicalDeviceFeatures, true)
VULKAN_INSTANCE_ENTRY_POINT(vkGetPhysicalDeviceFormatProperties, true) VULKAN_INSTANCE_ENTRY_POINT(vkGetPhysicalDeviceFormatProperties, true)
@ -79,7 +74,6 @@ VULKAN_INSTANCE_ENTRY_POINT(vkSetDebugUtilsObjectNameEXT, false)
VULKAN_INSTANCE_ENTRY_POINT(vkSetDebugUtilsObjectTagEXT, false) VULKAN_INSTANCE_ENTRY_POINT(vkSetDebugUtilsObjectTagEXT, false)
VULKAN_INSTANCE_ENTRY_POINT(vkSubmitDebugUtilsMessageEXT, false) VULKAN_INSTANCE_ENTRY_POINT(vkSubmitDebugUtilsMessageEXT, false)
VULKAN_INSTANCE_ENTRY_POINT(vkGetPhysicalDeviceProperties2, false)
VULKAN_INSTANCE_ENTRY_POINT(vkGetPhysicalDeviceSurfaceCapabilities2KHR, false) VULKAN_INSTANCE_ENTRY_POINT(vkGetPhysicalDeviceSurfaceCapabilities2KHR, false)
VULKAN_INSTANCE_ENTRY_POINT(vkGetPhysicalDeviceDisplayPropertiesKHR, false) VULKAN_INSTANCE_ENTRY_POINT(vkGetPhysicalDeviceDisplayPropertiesKHR, false)
@ -90,7 +84,12 @@ VULKAN_INSTANCE_ENTRY_POINT(vkCreateDisplayModeKHR, false)
VULKAN_INSTANCE_ENTRY_POINT(vkGetDisplayPlaneCapabilitiesKHR, false) VULKAN_INSTANCE_ENTRY_POINT(vkGetDisplayPlaneCapabilitiesKHR, false)
VULKAN_INSTANCE_ENTRY_POINT(vkCreateDisplayPlaneSurfaceKHR, false) VULKAN_INSTANCE_ENTRY_POINT(vkCreateDisplayPlaneSurfaceKHR, false)
#endif // VULKAN_INSTANCE_ENTRY_POINT // Vulkan 1.1 functions.
VULKAN_INSTANCE_ENTRY_POINT(vkGetPhysicalDeviceFeatures2, true)
VULKAN_INSTANCE_ENTRY_POINT(vkGetPhysicalDeviceProperties2, true)
VULKAN_INSTANCE_ENTRY_POINT(vkGetPhysicalDeviceMemoryProperties2, true)
#endif // VULKAN_INSTANCE_ENTRY_POINT
#ifdef VULKAN_DEVICE_ENTRY_POINT #ifdef VULKAN_DEVICE_ENTRY_POINT
@ -220,9 +219,15 @@ VULKAN_DEVICE_ENTRY_POINT(vkGetSwapchainImagesKHR, false)
VULKAN_DEVICE_ENTRY_POINT(vkAcquireNextImageKHR, false) VULKAN_DEVICE_ENTRY_POINT(vkAcquireNextImageKHR, false)
VULKAN_DEVICE_ENTRY_POINT(vkQueuePresentKHR, false) VULKAN_DEVICE_ENTRY_POINT(vkQueuePresentKHR, false)
// Vulkan 1.1 functions.
VULKAN_DEVICE_ENTRY_POINT(vkGetBufferMemoryRequirements2, true)
VULKAN_DEVICE_ENTRY_POINT(vkGetImageMemoryRequirements2, true)
VULKAN_DEVICE_ENTRY_POINT(vkBindBufferMemory2, true)
VULKAN_DEVICE_ENTRY_POINT(vkBindImageMemory2, true)
#ifdef SUPPORTS_VULKAN_EXCLUSIVE_FULLSCREEN #ifdef SUPPORTS_VULKAN_EXCLUSIVE_FULLSCREEN
VULKAN_DEVICE_ENTRY_POINT(vkAcquireFullScreenExclusiveModeEXT, false) VULKAN_DEVICE_ENTRY_POINT(vkAcquireFullScreenExclusiveModeEXT, false)
VULKAN_DEVICE_ENTRY_POINT(vkReleaseFullScreenExclusiveModeEXT, false) VULKAN_DEVICE_ENTRY_POINT(vkReleaseFullScreenExclusiveModeEXT, false)
#endif #endif
#endif // VULKAN_DEVICE_ENTRY_POINT #endif // VULKAN_DEVICE_ENTRY_POINT

View file

@ -1,8 +1,3 @@
// Copyright 2016 Dolphin Emulator Project
// Copyright 2020 DuckStation Emulator Project
// Licensed under GPLv2+
// Refer to the LICENSE file included.
#include <atomic> #include <atomic>
#include <cstdarg> #include <cstdarg>
#include <cstdio> #include <cstdio>
@ -10,6 +5,8 @@
#include <cstring> #include <cstring>
#include <string> #include <string>
#define VMA_IMPLEMENTATION 1
#include "loader.h" #include "loader.h"
#ifndef _WIN32 #ifndef _WIN32
@ -20,6 +17,8 @@
#include <mach-o/dyld.h> #include <mach-o/dyld.h>
#endif #endif
extern "C" {
#define VULKAN_MODULE_ENTRY_POINT(name, required) PFN_##name ds_##name; #define VULKAN_MODULE_ENTRY_POINT(name, required) PFN_##name ds_##name;
#define VULKAN_INSTANCE_ENTRY_POINT(name, required) PFN_##name ds_##name; #define VULKAN_INSTANCE_ENTRY_POINT(name, required) PFN_##name ds_##name;
#define VULKAN_DEVICE_ENTRY_POINT(name, required) PFN_##name ds_##name; #define VULKAN_DEVICE_ENTRY_POINT(name, required) PFN_##name ds_##name;
@ -27,9 +26,9 @@
#undef VULKAN_DEVICE_ENTRY_POINT #undef VULKAN_DEVICE_ENTRY_POINT
#undef VULKAN_INSTANCE_ENTRY_POINT #undef VULKAN_INSTANCE_ENTRY_POINT
#undef VULKAN_MODULE_ENTRY_POINT #undef VULKAN_MODULE_ENTRY_POINT
}
namespace Vulkan { namespace Vulkan {
void ResetVulkanLibraryFunctionPointers() void ResetVulkanLibraryFunctionPointers()
{ {
#define VULKAN_MODULE_ENTRY_POINT(name, required) ds_##name = nullptr; #define VULKAN_MODULE_ENTRY_POINT(name, required) ds_##name = nullptr;
@ -55,11 +54,7 @@ bool LoadVulkanLibrary()
return true; return true;
} }
#if WINAPI_FAMILY_PARTITION(WINAPI_PARTITION_DESKTOP | WINAPI_PARTITION_SYSTEM | WINAPI_PARTITION_GAMES)
vulkan_module = LoadLibraryA("vulkan-1.dll"); vulkan_module = LoadLibraryA("vulkan-1.dll");
#else
vulkan_module = NULL;
#endif
if (!vulkan_module) if (!vulkan_module)
{ {
std::fprintf(stderr, "Failed to load vulkan-1.dll\n"); std::fprintf(stderr, "Failed to load vulkan-1.dll\n");
@ -118,14 +113,9 @@ bool LoadVulkanLibrary()
#if defined(__APPLE__) #if defined(__APPLE__)
// Check if a path to a specific Vulkan library has been specified. // Check if a path to a specific Vulkan library has been specified.
// Otherwise, try for a system-wide libvulkan.
char* libvulkan_env = getenv("LIBVULKAN_PATH"); char* libvulkan_env = getenv("LIBVULKAN_PATH");
if (libvulkan_env) if (libvulkan_env)
vulkan_module = dlopen(libvulkan_env, RTLD_NOW); vulkan_module = dlopen(libvulkan_env, RTLD_NOW);
else
vulkan_module = dlopen("libvulkan.dylib", RTLD_NOW);
// Fall back to the packaged MoltenVK.
if (!vulkan_module) if (!vulkan_module)
{ {
unsigned path_size = 0; unsigned path_size = 0;
@ -140,11 +130,23 @@ bool LoadVulkanLibrary()
if (pos != std::string::npos) if (pos != std::string::npos)
{ {
path.erase(pos); path.erase(pos);
path += "/../Frameworks/libMoltenVK.dylib"; path += "/../Frameworks/libvulkan.dylib";
vulkan_module = dlopen(path.c_str(), RTLD_NOW); vulkan_module = dlopen(path.c_str(), RTLD_NOW);
if (!vulkan_module)
{
path.erase(pos);
path += "/../Frameworks/libMoltenVK.dylib";
vulkan_module = dlopen(path.c_str(), RTLD_NOW);
}
} }
} }
} }
if (!vulkan_module)
{
vulkan_module = dlopen("libvulkan.dylib", RTLD_NOW);
if (!vulkan_module)
vulkan_module = dlopen("libMoltenVK.dylib", RTLD_NOW);
}
#else #else
// Names of libraries to search. Desktop should use libvulkan.so.1 or libvulkan.so. // Names of libraries to search. Desktop should use libvulkan.so.1 or libvulkan.so.
static const char* search_lib_names[] = {"libvulkan.so.1", "libvulkan.so"}; static const char* search_lib_names[] = {"libvulkan.so.1", "libvulkan.so"};
@ -240,4 +242,4 @@ bool LoadVulkanDeviceFunctions(VkDevice device)
return !required_functions_missing; return !required_functions_missing;
} }
} // namespace Vulkan } // namespace Vulkan

View file

@ -1,8 +1,3 @@
// Copyright 2016 Dolphin Emulator Project
// Copyright 2020 DuckStation Emulator Project
// Licensed under GPLv2+
// Refer to the LICENSE file included.
#pragma once #pragma once
#define VK_NO_PROTOTYPES #define VK_NO_PROTOTYPES
@ -81,6 +76,25 @@
#include "entry_points.h" #include "entry_points.h"
// We include vk_mem_alloc globally, so we don't accidentally include it before the vulkan header somewhere.
#if defined(__clang__)
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Wnullability-completeness"
#elif defined(_MSC_VER)
#pragma warning(push, 0)
#endif
#define VMA_STATIC_VULKAN_FUNCTIONS 1
#define VMA_DYNAMIC_VULKAN_FUNCTIONS 0
#define VMA_STATS_STRING_ENABLED 0
#include "vulkan/vk_mem_alloc.h"
#if defined(__clang__)
#pragma clang diagnostic pop
#elif defined(_MSC_VER)
#pragma warning(pop)
#endif
namespace Vulkan { namespace Vulkan {
bool LoadVulkanLibrary(); bool LoadVulkanLibrary();

View file

@ -1,8 +1,3 @@
// Copyright 2016 Dolphin Emulator Project
// Copyright 2020 DuckStation Emulator Project
// Licensed under GPLv2+
// Refer to the LICENSE file included.
#include "shader_compiler.h" #include "shader_compiler.h"
#include "../assert.h" #include "../assert.h"
#include "../log.h" #include "../log.h"

View file

@ -1,8 +1,3 @@
// Copyright 2016 Dolphin Emulator Project
// Copyright 2020 DuckStation Emulator Project
// Licensed under GPLv2+
// Refer to the LICENSE file included.
#pragma once #pragma once
#include "../types.h" #include "../types.h"

View file

@ -1,253 +0,0 @@
// Copyright 2016 Dolphin Emulator Project
// Copyright 2020 DuckStation Emulator Project
// Licensed under GPLv2+
// Refer to the LICENSE file included.
#include "staging_buffer.h"
#include "../assert.h"
#include "context.h"
#include "util.h"
namespace Vulkan {
StagingBuffer::StagingBuffer() = default;
StagingBuffer::StagingBuffer(StagingBuffer&& move)
: m_type(move.m_type), m_buffer(move.m_buffer), m_memory(move.m_memory), m_size(move.m_size),
m_coherent(move.m_coherent), m_map_pointer(move.m_map_pointer), m_map_offset(move.m_map_offset),
m_map_size(move.m_map_size)
{
move.m_type = Type::Upload;
move.m_buffer = VK_NULL_HANDLE;
move.m_memory = VK_NULL_HANDLE;
move.m_size = 0;
move.m_coherent = false;
move.m_map_pointer = nullptr;
move.m_map_offset = 0;
move.m_map_size = 0;
}
StagingBuffer::~StagingBuffer()
{
if (IsValid())
Destroy(true);
}
StagingBuffer& StagingBuffer::operator=(StagingBuffer&& move)
{
if (IsValid())
Destroy(true);
std::swap(m_type, move.m_type);
std::swap(m_buffer, move.m_buffer);
std::swap(m_memory, move.m_memory);
std::swap(m_size, move.m_size);
std::swap(m_coherent, move.m_coherent);
std::swap(m_map_pointer, move.m_map_pointer);
std::swap(m_map_offset, move.m_map_offset);
std::swap(m_map_size, move.m_map_size);
return *this;
}
bool StagingBuffer::Map(VkDeviceSize offset, VkDeviceSize size)
{
m_map_offset = offset;
if (size == VK_WHOLE_SIZE)
m_map_size = m_size - offset;
else
m_map_size = size;
Assert(!m_map_pointer);
Assert(m_map_offset + m_map_size <= m_size);
void* map_pointer;
VkResult res = vkMapMemory(g_vulkan_context->GetDevice(), m_memory, m_map_offset, m_map_size, 0, &map_pointer);
if (res != VK_SUCCESS)
{
LOG_VULKAN_ERROR(res, "vkMapMemory failed: ");
return false;
}
m_map_pointer = reinterpret_cast<char*>(map_pointer);
return true;
}
void StagingBuffer::Unmap()
{
Assert(m_map_pointer);
vkUnmapMemory(g_vulkan_context->GetDevice(), m_memory);
m_map_pointer = nullptr;
m_map_offset = 0;
m_map_size = 0;
}
void StagingBuffer::FlushCPUCache(VkDeviceSize offset, VkDeviceSize size)
{
Assert(offset >= m_map_offset);
if (m_coherent || !IsMapped())
return;
VkMappedMemoryRange range = {VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE, nullptr, m_memory, offset - m_map_offset, size};
vkFlushMappedMemoryRanges(g_vulkan_context->GetDevice(), 1, &range);
}
void StagingBuffer::InvalidateGPUCache(VkCommandBuffer command_buffer, VkAccessFlagBits dest_access_flags,
VkPipelineStageFlagBits dest_pipeline_stage, VkDeviceSize offset,
VkDeviceSize size)
{
if (m_coherent)
return;
Assert((offset + size) <= m_size || (offset < m_size && size == VK_WHOLE_SIZE));
Util::BufferMemoryBarrier(command_buffer, m_buffer, VK_ACCESS_HOST_WRITE_BIT, dest_access_flags, offset, size,
VK_PIPELINE_STAGE_HOST_BIT, dest_pipeline_stage);
}
void StagingBuffer::PrepareForGPUWrite(VkCommandBuffer command_buffer, VkAccessFlagBits dst_access_flags,
VkPipelineStageFlagBits dst_pipeline_stage, VkDeviceSize offset,
VkDeviceSize size)
{
if (m_coherent)
return;
Assert((offset + size) <= m_size || (offset < m_size && size == VK_WHOLE_SIZE));
Util::BufferMemoryBarrier(command_buffer, m_buffer, 0, dst_access_flags, offset, size,
VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, dst_pipeline_stage);
}
void StagingBuffer::FlushGPUCache(VkCommandBuffer command_buffer, VkAccessFlagBits src_access_flags,
VkPipelineStageFlagBits src_pipeline_stage, VkDeviceSize offset, VkDeviceSize size)
{
if (m_coherent)
return;
Assert((offset + size) <= m_size || (offset < m_size && size == VK_WHOLE_SIZE));
Util::BufferMemoryBarrier(command_buffer, m_buffer, src_access_flags, VK_ACCESS_HOST_READ_BIT, offset, size,
src_pipeline_stage, VK_PIPELINE_STAGE_HOST_BIT);
}
void StagingBuffer::InvalidateCPUCache(VkDeviceSize offset, VkDeviceSize size)
{
Assert(offset >= m_map_offset);
if (m_coherent || !IsMapped())
return;
VkMappedMemoryRange range = {VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE, nullptr, m_memory, offset - m_map_offset, size};
vkInvalidateMappedMemoryRanges(g_vulkan_context->GetDevice(), 1, &range);
}
void StagingBuffer::Read(VkDeviceSize offset, void* data, size_t size, bool invalidate_caches)
{
Assert((offset + size) <= m_size);
Assert(offset >= m_map_offset && size <= (m_map_size + (offset - m_map_offset)));
if (invalidate_caches)
InvalidateCPUCache(offset, size);
memcpy(data, m_map_pointer + (offset - m_map_offset), size);
}
void StagingBuffer::Write(VkDeviceSize offset, const void* data, size_t size, bool invalidate_caches)
{
Assert((offset + size) <= m_size);
Assert(offset >= m_map_offset && size <= (m_map_size + (offset - m_map_offset)));
memcpy(m_map_pointer + (offset - m_map_offset), data, size);
if (invalidate_caches)
FlushCPUCache(offset, size);
}
bool StagingBuffer::AllocateBuffer(Type type, VkDeviceSize size, VkBufferUsageFlags usage, VkBuffer* out_buffer,
VkDeviceMemory* out_memory, bool* out_coherent)
{
VkBufferCreateInfo buffer_create_info = {
VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO, // VkStructureType sType
nullptr, // const void* pNext
0, // VkBufferCreateFlags flags
size, // VkDeviceSize size
usage, // VkBufferUsageFlags usage
VK_SHARING_MODE_EXCLUSIVE, // VkSharingMode sharingMode
0, // uint32_t queueFamilyIndexCount
nullptr // const uint32_t* pQueueFamilyIndices
};
VkResult res = vkCreateBuffer(g_vulkan_context->GetDevice(), &buffer_create_info, nullptr, out_buffer);
if (res != VK_SUCCESS)
{
LOG_VULKAN_ERROR(res, "vkCreateBuffer failed: ");
return false;
}
VkMemoryRequirements requirements;
vkGetBufferMemoryRequirements(g_vulkan_context->GetDevice(), *out_buffer, &requirements);
u32 type_index;
if (type == Type::Upload)
type_index = g_vulkan_context->GetUploadMemoryType(requirements.memoryTypeBits, out_coherent);
else
type_index = g_vulkan_context->GetReadbackMemoryType(requirements.memoryTypeBits, out_coherent);
VkMemoryAllocateInfo memory_allocate_info = {
VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO, // VkStructureType sType
nullptr, // const void* pNext
requirements.size, // VkDeviceSize allocationSize
type_index // uint32_t memoryTypeIndex
};
res = vkAllocateMemory(g_vulkan_context->GetDevice(), &memory_allocate_info, nullptr, out_memory);
if (res != VK_SUCCESS)
{
LOG_VULKAN_ERROR(res, "vkAllocateMemory failed: ");
vkDestroyBuffer(g_vulkan_context->GetDevice(), *out_buffer, nullptr);
return false;
}
res = vkBindBufferMemory(g_vulkan_context->GetDevice(), *out_buffer, *out_memory, 0);
if (res != VK_SUCCESS)
{
LOG_VULKAN_ERROR(res, "vkBindBufferMemory failed: ");
vkDestroyBuffer(g_vulkan_context->GetDevice(), *out_buffer, nullptr);
vkFreeMemory(g_vulkan_context->GetDevice(), *out_memory, nullptr);
return false;
}
return true;
}
bool StagingBuffer::Create(Type type, VkDeviceSize size, VkBufferUsageFlags usage)
{
if (!AllocateBuffer(type, size, usage, &m_buffer, &m_memory, &m_coherent))
return false;
m_type = type;
m_size = size;
return true;
}
void StagingBuffer::Destroy(bool defer /* = true */)
{
if (!IsValid())
return;
// Unmap before destroying
if (m_map_pointer)
Unmap();
if (defer)
g_vulkan_context->DeferBufferDestruction(m_buffer);
else
vkDestroyBuffer(g_vulkan_context->GetDevice(), m_buffer, nullptr);
if (defer)
g_vulkan_context->DeferDeviceMemoryDestruction(m_memory);
else
vkFreeMemory(g_vulkan_context->GetDevice(), m_memory, nullptr);
m_type = Type::Upload;
m_buffer = VK_NULL_HANDLE;
m_memory = VK_NULL_HANDLE;
m_size = 0;
m_coherent = false;
m_map_pointer = nullptr;
m_map_offset = 0;
m_map_size = 0;
}
} // namespace Vulkan

View file

@ -1,91 +0,0 @@
// Copyright 2016 Dolphin Emulator Project
// Copyright 2020 DuckStation Emulator Project
// Licensed under GPLv2+
// Refer to the LICENSE file included.
#pragma once
#include "../types.h"
#include "loader.h"
#include <memory>
namespace Vulkan {
class StagingBuffer
{
public:
enum class Type
{
Upload,
Readback,
Mutable
};
StagingBuffer();
StagingBuffer(StagingBuffer&& move);
StagingBuffer(const StagingBuffer&) = delete;
virtual ~StagingBuffer();
StagingBuffer& operator=(StagingBuffer&& move);
StagingBuffer& operator=(const StagingBuffer&) = delete;
ALWAYS_INLINE Type GetType() const { return m_type; }
ALWAYS_INLINE VkDeviceSize GetSize() const { return m_size; }
ALWAYS_INLINE VkBuffer GetBuffer() const { return m_buffer; }
ALWAYS_INLINE bool IsMapped() const { return m_map_pointer != nullptr; }
ALWAYS_INLINE const char* GetMapPointer() const { return m_map_pointer; }
ALWAYS_INLINE char* GetMapPointer() { return m_map_pointer; }
ALWAYS_INLINE VkDeviceSize GetMapOffset() const { return m_map_offset; }
ALWAYS_INLINE VkDeviceSize GetMapSize() const { return m_map_size; }
ALWAYS_INLINE bool IsValid() const { return (m_buffer != VK_NULL_HANDLE); }
ALWAYS_INLINE bool IsCoherent() const { return m_coherent; }
bool Map(VkDeviceSize offset = 0, VkDeviceSize size = VK_WHOLE_SIZE);
void Unmap();
// Upload part 1: Prepare from device read from the CPU side
void FlushCPUCache(VkDeviceSize offset = 0, VkDeviceSize size = VK_WHOLE_SIZE);
// Upload part 2: Prepare for device read from the GPU side
// Implicit when submitting the command buffer, so rarely needed.
void InvalidateGPUCache(VkCommandBuffer command_buffer, VkAccessFlagBits dst_access_flags,
VkPipelineStageFlagBits dst_pipeline_stage, VkDeviceSize offset = 0,
VkDeviceSize size = VK_WHOLE_SIZE);
// Readback part 0: Prepare for GPU usage (if necessary)
void PrepareForGPUWrite(VkCommandBuffer command_buffer, VkAccessFlagBits dst_access_flags,
VkPipelineStageFlagBits dst_pipeline_stage, VkDeviceSize offset = 0,
VkDeviceSize size = VK_WHOLE_SIZE);
// Readback part 1: Prepare for host readback from the GPU side
void FlushGPUCache(VkCommandBuffer command_buffer, VkAccessFlagBits src_access_flags,
VkPipelineStageFlagBits src_pipeline_stage, VkDeviceSize offset = 0,
VkDeviceSize size = VK_WHOLE_SIZE);
// Readback part 2: Prepare for host readback from the CPU side
void InvalidateCPUCache(VkDeviceSize offset = 0, VkDeviceSize size = VK_WHOLE_SIZE);
// offset is from the start of the buffer, not from the map offset
void Read(VkDeviceSize offset, void* data, size_t size, bool invalidate_caches = true);
void Write(VkDeviceSize offset, const void* data, size_t size, bool invalidate_caches = true);
// Creates the optimal format of image copy.
bool Create(Type type, VkDeviceSize size, VkBufferUsageFlags usage);
void Destroy(bool defer = true);
// Allocates the resources needed to create a staging buffer.
static bool AllocateBuffer(Type type, VkDeviceSize size, VkBufferUsageFlags usage, VkBuffer* out_buffer,
VkDeviceMemory* out_memory, bool* out_coherent);
protected:
Type m_type = Type::Upload;
VkBuffer m_buffer = VK_NULL_HANDLE;
VkDeviceMemory m_memory = VK_NULL_HANDLE;
VkDeviceSize m_size = 0;
bool m_coherent = false;
char* m_map_pointer = nullptr;
VkDeviceSize m_map_offset = 0;
VkDeviceSize m_map_size = 0;
};
} // namespace Vulkan

View file

@ -1,291 +0,0 @@
// Copyright 2016 Dolphin Emulator Project
// Copyright 2020 DuckStation Emulator Project
// Licensed under GPLv2+
// Refer to the LICENSE file included.
#include "staging_texture.h"
#include "../assert.h"
#include "context.h"
#include "util.h"
namespace Vulkan {
StagingTexture::StagingTexture() = default;
StagingTexture::StagingTexture(StagingTexture&& move)
: m_staging_buffer(std::move(move.m_staging_buffer)), m_flush_fence_counter(move.m_flush_fence_counter),
m_width(move.m_width), m_height(move.m_height), m_texel_size(move.m_texel_size), m_map_stride(move.m_map_stride)
{
move.m_flush_fence_counter = 0;
move.m_width = 0;
move.m_height = 0;
move.m_texel_size = 0;
move.m_map_stride = 0;
}
StagingTexture& StagingTexture::operator=(StagingTexture&& move)
{
if (IsValid())
Destroy(true);
std::swap(m_staging_buffer, move.m_staging_buffer);
std::swap(m_flush_fence_counter, move.m_flush_fence_counter);
std::swap(m_width, move.m_width);
std::swap(m_height, move.m_height);
std::swap(m_texel_size, move.m_texel_size);
std::swap(m_map_stride, move.m_map_stride);
return *this;
}
StagingTexture::~StagingTexture()
{
if (IsValid())
Destroy(true);
}
bool StagingTexture::Create(StagingBuffer::Type type, VkFormat format, u32 width, u32 height)
{
const u32 texel_size = Util::GetTexelSize(format);
const u32 map_stride = texel_size * width;
const u32 buffer_size = map_stride * height;
VkBufferUsageFlags usage_flags;
switch (type)
{
case StagingBuffer::Type::Readback:
usage_flags = VK_BUFFER_USAGE_TRANSFER_DST_BIT;
break;
case StagingBuffer::Type::Upload:
usage_flags = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
break;
case StagingBuffer::Type::Mutable:
default:
usage_flags = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
break;
}
StagingBuffer new_buffer;
if (!new_buffer.Create(type, buffer_size, usage_flags) || !new_buffer.Map())
return false;
if (IsValid())
Destroy(true);
m_staging_buffer = std::move(new_buffer);
m_width = width;
m_height = height;
m_texel_size = texel_size;
m_map_stride = map_stride;
return true;
}
void StagingTexture::Destroy(bool defer /* = true */)
{
if (!IsValid())
return;
m_staging_buffer.Destroy(defer);
m_flush_fence_counter = 0;
m_width = 0;
m_height = 0;
m_texel_size = 0;
m_map_stride = 0;
}
void StagingTexture::CopyFromTexture(VkCommandBuffer command_buffer, Texture& src_texture, u32 src_x, u32 src_y,
u32 src_layer, u32 src_level, u32 dst_x, u32 dst_y, u32 width, u32 height)
{
Assert(m_staging_buffer.GetType() == StagingBuffer::Type::Readback ||
m_staging_buffer.GetType() == StagingBuffer::Type::Mutable);
Assert((src_x + width) <= src_texture.GetWidth() && (src_y + height) <= src_texture.GetHeight());
Assert((dst_x + width) <= m_width && (dst_y + height) <= m_height);
const Vulkan::Util::DebugScope debugScope(command_buffer,
"StagingTexture::CopyFromTexture: {%u,%u} Lyr:%u Lvl:%u {%u,%u} %ux%u",
src_x, src_y, src_layer, src_level, dst_x, dst_y, width, height);
VkImageLayout old_layout = src_texture.GetLayout();
src_texture.TransitionToLayout(command_buffer, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL);
// Issue the image->buffer copy, but delay it for now.
VkBufferImageCopy image_copy = {};
const VkImageAspectFlags aspect =
Util ::IsDepthFormat(src_texture.GetFormat()) ? VK_IMAGE_ASPECT_DEPTH_BIT : VK_IMAGE_ASPECT_COLOR_BIT;
image_copy.bufferOffset = static_cast<VkDeviceSize>(dst_y * m_map_stride + dst_x * m_texel_size);
image_copy.bufferRowLength = m_width;
image_copy.bufferImageHeight = 0;
image_copy.imageSubresource = {aspect, src_level, src_layer, 1};
image_copy.imageOffset = {static_cast<int32_t>(src_x), static_cast<int32_t>(src_y), 0};
image_copy.imageExtent = {width, height, 1u};
vkCmdCopyImageToBuffer(command_buffer, src_texture.GetImage(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
m_staging_buffer.GetBuffer(), 1, &image_copy);
// Restore old source texture layout.
src_texture.TransitionToLayout(command_buffer, old_layout);
}
void StagingTexture::CopyFromTexture(Texture& src_texture, u32 src_x, u32 src_y, u32 src_layer, u32 src_level,
u32 dst_x, u32 dst_y, u32 width, u32 height)
{
const Vulkan::Util::DebugScope debugScope(g_vulkan_context->GetCurrentCommandBuffer(),
"StagingTexture::CopyFromTexture: {%u,%u} Lyr:%u Lvl:%u {%u,%u} %ux%u",
src_x, src_y, src_layer, src_level, dst_x, dst_y, width, height);
CopyFromTexture(g_vulkan_context->GetCurrentCommandBuffer(), src_texture, src_x, src_y, src_layer, src_level, dst_x,
dst_y, width, height);
m_needs_flush = true;
m_flush_fence_counter = g_vulkan_context->GetCurrentFenceCounter();
}
void StagingTexture::CopyToTexture(VkCommandBuffer command_buffer, u32 src_x, u32 src_y, Texture& dst_texture,
u32 dst_x, u32 dst_y, u32 dst_layer, u32 dst_level, u32 width, u32 height)
{
Assert(m_staging_buffer.GetType() == StagingBuffer::Type::Upload ||
m_staging_buffer.GetType() == StagingBuffer::Type::Mutable);
Assert((dst_x + width) <= dst_texture.GetWidth() && (dst_y + height) <= dst_texture.GetHeight());
Assert((src_x + width) <= m_width && (src_y + height) <= m_height);
// Flush caches before copying.
m_staging_buffer.FlushCPUCache();
VkImageLayout old_layout = dst_texture.GetLayout();
dst_texture.TransitionToLayout(command_buffer, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
// Issue the image->buffer copy, but delay it for now.
VkBufferImageCopy image_copy = {};
image_copy.bufferOffset = static_cast<VkDeviceSize>(src_y * m_map_stride + src_x * m_texel_size);
image_copy.bufferRowLength = m_width;
image_copy.bufferImageHeight = 0;
image_copy.imageSubresource = {VK_IMAGE_ASPECT_COLOR_BIT, dst_level, dst_layer, 1};
image_copy.imageOffset = {static_cast<int32_t>(dst_x), static_cast<int32_t>(dst_y), 0};
image_copy.imageExtent = {width, height, 1u};
vkCmdCopyBufferToImage(command_buffer, m_staging_buffer.GetBuffer(), dst_texture.GetImage(),
VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &image_copy);
// Restore old source texture layout.
dst_texture.TransitionToLayout(command_buffer, old_layout);
}
void StagingTexture::CopyToTexture(u32 src_x, u32 src_y, Texture& dst_texture, u32 dst_x, u32 dst_y, u32 dst_layer,
u32 dst_level, u32 width, u32 height)
{
const Vulkan::Util::DebugScope debugScope(g_vulkan_context->GetCurrentCommandBuffer(),
"StagingTexture::CopyToTexture: {%u,%u} | {%u,%u} Lyr:%u Lvl:%u %ux%u",
src_x, src_y, dst_x, dst_y, dst_layer, dst_level, width, height);
CopyToTexture(g_vulkan_context->GetCurrentCommandBuffer(), src_x, src_y, dst_texture, dst_x, dst_y, dst_layer,
dst_level, width, height);
m_needs_flush = true;
m_flush_fence_counter = g_vulkan_context->GetCurrentFenceCounter();
}
void StagingTexture::Flush()
{
if (!m_needs_flush)
return;
// Is this copy in the current command buffer?
if (g_vulkan_context->GetCurrentFenceCounter() == m_flush_fence_counter)
{
// Execute the command buffer and wait for it to finish.
g_vulkan_context->ExecuteCommandBuffer(true);
}
else
{
// Wait for the GPU to finish with it.
g_vulkan_context->WaitForFenceCounter(m_flush_fence_counter);
}
// For readback textures, invalidate the CPU cache as there is new data there.
if (m_staging_buffer.GetType() == StagingBuffer::Type::Readback ||
m_staging_buffer.GetType() == StagingBuffer::Type::Mutable)
{
m_staging_buffer.InvalidateCPUCache();
}
m_needs_flush = false;
}
void StagingTexture::ReadTexels(u32 src_x, u32 src_y, u32 width, u32 height, void* out_ptr, u32 out_stride)
{
Assert(m_staging_buffer.GetType() != StagingBuffer::Type::Upload);
Assert((src_x + width) <= m_width && (src_y + height) <= m_height);
PrepareForAccess();
// Offset pointer to point to start of region being copied out.
const char* current_ptr = m_staging_buffer.GetMapPointer();
current_ptr += src_y * m_map_stride;
current_ptr += src_x * m_texel_size;
// Optimal path: same dimensions, same stride.
if (src_x == 0 && width == m_width && m_map_stride == out_stride)
{
std::memcpy(out_ptr, current_ptr, m_map_stride * height);
return;
}
size_t copy_size = std::min<u32>(width * m_texel_size, m_map_stride);
char* dst_ptr = reinterpret_cast<char*>(out_ptr);
for (u32 row = 0; row < height; row++)
{
std::memcpy(dst_ptr, current_ptr, copy_size);
current_ptr += m_map_stride;
dst_ptr += out_stride;
}
}
void StagingTexture::ReadTexel(u32 x, u32 y, void* out_ptr)
{
Assert(m_staging_buffer.GetType() != StagingBuffer::Type::Upload);
Assert(x < m_width && y < m_height);
PrepareForAccess();
const char* src_ptr = GetMappedPointer() + y * GetMappedStride() + x * m_texel_size;
std::memcpy(out_ptr, src_ptr, m_texel_size);
}
void StagingTexture::WriteTexels(u32 dst_x, u32 dst_y, u32 width, u32 height, const void* in_ptr, u32 in_stride)
{
Assert(m_staging_buffer.GetType() != StagingBuffer::Type::Readback);
Assert((dst_x + width) <= m_width && (dst_y + height) <= m_height);
PrepareForAccess();
// Offset pointer to point to start of region being copied to.
char* current_ptr = GetMappedPointer();
current_ptr += dst_y * m_map_stride;
current_ptr += dst_x * m_texel_size;
// Optimal path: same dimensions, same stride.
if (dst_x == 0 && width == m_width && m_map_stride == in_stride)
{
std::memcpy(current_ptr, in_ptr, m_map_stride * height);
return;
}
size_t copy_size = std::min<u32>(width * m_texel_size, m_map_stride);
const char* src_ptr = reinterpret_cast<const char*>(in_ptr);
for (u32 row = 0; row < height; row++)
{
std::memcpy(current_ptr, src_ptr, copy_size);
current_ptr += m_map_stride;
src_ptr += in_stride;
}
}
void StagingTexture::WriteTexel(u32 x, u32 y, const void* in_ptr)
{
Assert(x < m_width && y < m_height);
PrepareForAccess();
char* dest_ptr = GetMappedPointer() + y * m_map_stride + x * m_texel_size;
std::memcpy(dest_ptr, in_ptr, m_texel_size);
}
void StagingTexture::PrepareForAccess()
{
Assert(IsMapped());
if (m_needs_flush)
Flush();
}
} // namespace Vulkan

View file

@ -1,77 +0,0 @@
// Copyright 2016 Dolphin Emulator Project
// Copyright 2020 DuckStation Emulator Project
// Licensed under GPLv2+
// Refer to the LICENSE file included.
#pragma once
#include "staging_buffer.h"
#include "texture.h"
namespace Vulkan {
class StagingTexture final
{
public:
StagingTexture();
StagingTexture(StagingTexture&& move);
StagingTexture(const StagingTexture&) = delete;
~StagingTexture();
StagingTexture& operator=(StagingTexture&& move);
StagingTexture& operator=(const StagingTexture&) = delete;
ALWAYS_INLINE bool IsValid() const { return m_staging_buffer.IsValid(); }
ALWAYS_INLINE bool IsMapped() const { return m_staging_buffer.IsMapped(); }
ALWAYS_INLINE const char* GetMappedPointer() const { return m_staging_buffer.GetMapPointer(); }
ALWAYS_INLINE char* GetMappedPointer() { return m_staging_buffer.GetMapPointer(); }
ALWAYS_INLINE u32 GetMappedStride() const { return m_map_stride; }
ALWAYS_INLINE u32 GetWidth() const { return m_width; }
ALWAYS_INLINE u32 GetHeight() const { return m_height; }
bool Create(StagingBuffer::Type type, VkFormat format, u32 width, u32 height);
void Destroy(bool defer = true);
// Copies from the GPU texture object to the staging texture, which can be mapped/read by the CPU.
// Both src_rect and dst_rect must be with within the bounds of the the specified textures.
void CopyFromTexture(VkCommandBuffer command_buffer, Texture& src_texture, u32 src_x, u32 src_y, u32 src_layer,
u32 src_level, u32 dst_x, u32 dst_y, u32 width, u32 height);
void CopyFromTexture(Texture& src_texture, u32 src_x, u32 src_y, u32 src_layer, u32 src_level, u32 dst_x, u32 dst_y,
u32 width, u32 height);
// Wrapper for copying a whole layer of a texture to a readback texture.
// Assumes that the level of src texture and this texture have the same dimensions.
void CopyToTexture(VkCommandBuffer command_buffer, u32 src_x, u32 src_y, Texture& dst_texture, u32 dst_x, u32 dst_y,
u32 dst_layer, u32 dst_level, u32 width, u32 height);
void CopyToTexture(u32 src_x, u32 src_y, Texture& dst_texture, u32 dst_x, u32 dst_y, u32 dst_layer, u32 dst_level,
u32 width, u32 height);
// Flushes pending writes from the CPU to the GPU, and reads from the GPU to the CPU.
// This may cause a command buffer flush depending on if one has occurred between the last
// call to CopyFromTexture()/CopyToTexture() and the Flush() call.
void Flush();
// Reads the specified rectangle from the staging texture to out_ptr, with the specified stride
// (length in bytes of each row). CopyFromTexture must be called first. The contents of any
// texels outside of the rectangle used for CopyFromTexture is undefined.
void ReadTexels(u32 src_x, u32 src_y, u32 width, u32 height, void* out_ptr, u32 out_stride);
void ReadTexel(u32 x, u32 y, void* out_ptr);
// Copies the texels from in_ptr to the staging texture, which can be read by the GPU, with the
// specified stride (length in bytes of each row). After updating the staging texture with all
// changes, call CopyToTexture() to update the GPU copy.
void WriteTexels(u32 dst_x, u32 dst_y, u32 width, u32 height, const void* in_ptr, u32 in_stride);
void WriteTexel(u32 x, u32 y, const void* in_ptr);
private:
void PrepareForAccess();
StagingBuffer m_staging_buffer;
u64 m_flush_fence_counter = 0;
u32 m_width = 0;
u32 m_height = 0;
u32 m_texel_size = 0;
u32 m_map_stride = 0;
bool m_needs_flush = false;
};
} // namespace Vulkan

View file

@ -1,8 +1,3 @@
// Copyright 2016 Dolphin Emulator Project
// Copyright 2020 DuckStation Emulator Project
// Licensed under GPLv2+
// Refer to the LICENSE file included.
#include "stream_buffer.h" #include "stream_buffer.h"
#include "../align.h" #include "../align.h"
#include "../assert.h" #include "../assert.h"
@ -12,15 +7,20 @@
Log_SetChannel(Vulkan::StreamBuffer); Log_SetChannel(Vulkan::StreamBuffer);
namespace Vulkan { namespace Vulkan {
StreamBuffer::StreamBuffer() = default; StreamBuffer::StreamBuffer() = default;
StreamBuffer::StreamBuffer(StreamBuffer&& move) StreamBuffer::StreamBuffer(StreamBuffer&& move)
: m_usage(move.m_usage), m_size(move.m_size), m_current_offset(move.m_current_offset), : m_size(move.m_size), m_current_offset(move.m_current_offset), m_current_space(move.m_current_space),
m_current_space(move.m_current_space), m_current_gpu_position(move.m_current_gpu_position), m_buffer(move.m_buffer), m_current_gpu_position(move.m_current_gpu_position), m_allocation(move.m_allocation), m_buffer(move.m_buffer),
m_memory(move.m_memory), m_host_pointer(move.m_host_pointer), m_tracked_fences(std::move(move.m_tracked_fences)), m_host_pointer(move.m_host_pointer), m_tracked_fences(std::move(move.m_tracked_fences))
m_coherent_mapping(move.m_coherent_mapping)
{ {
move.m_size = 0;
move.m_current_offset = 0;
move.m_current_space = 0;
move.m_current_gpu_position = 0;
move.m_allocation = VK_NULL_HANDLE;
move.m_buffer = VK_NULL_HANDLE;
move.m_host_pointer = nullptr;
} }
StreamBuffer::~StreamBuffer() StreamBuffer::~StreamBuffer()
@ -34,130 +34,74 @@ StreamBuffer& StreamBuffer::operator=(StreamBuffer&& move)
if (IsValid()) if (IsValid())
Destroy(true); Destroy(true);
std::swap(m_usage, move.m_usage);
std::swap(m_size, move.m_size); std::swap(m_size, move.m_size);
std::swap(m_current_offset, move.m_current_offset); std::swap(m_current_offset, move.m_current_offset);
std::swap(m_current_space, move.m_current_space); std::swap(m_current_space, move.m_current_space);
std::swap(m_current_gpu_position, move.m_current_gpu_position); std::swap(m_current_gpu_position, move.m_current_gpu_position);
std::swap(m_buffer, move.m_buffer); std::swap(m_buffer, move.m_buffer);
std::swap(m_memory, move.m_memory);
std::swap(m_host_pointer, move.m_host_pointer); std::swap(m_host_pointer, move.m_host_pointer);
std::swap(m_tracked_fences, move.m_tracked_fences); std::swap(m_tracked_fences, move.m_tracked_fences);
std::swap(m_coherent_mapping, move.m_coherent_mapping);
return *this; return *this;
} }
bool StreamBuffer::Create(VkBufferUsageFlags usage, u32 size) bool StreamBuffer::Create(VkBufferUsageFlags usage, u32 size)
{ {
// Create the buffer descriptor const VkBufferCreateInfo bci = {VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
VkBufferCreateInfo buffer_create_info = { nullptr,
VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO, // VkStructureType sType 0,
nullptr, // const void* pNext static_cast<VkDeviceSize>(size),
0, // VkBufferCreateFlags flags usage,
static_cast<VkDeviceSize>(size), // VkDeviceSize size VK_SHARING_MODE_EXCLUSIVE,
usage, // VkBufferUsageFlags usage 0,
VK_SHARING_MODE_EXCLUSIVE, // VkSharingMode sharingMode nullptr};
0, // uint32_t queueFamilyIndexCount
nullptr // const uint32_t* pQueueFamilyIndices
};
VkBuffer buffer = VK_NULL_HANDLE; VmaAllocationCreateInfo aci = {};
VkResult res = vkCreateBuffer(g_vulkan_context->GetDevice(), &buffer_create_info, nullptr, &buffer); aci.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
aci.usage = VMA_MEMORY_USAGE_CPU_TO_GPU;
aci.preferredFlags = VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
VmaAllocationInfo ai = {};
VkBuffer new_buffer = VK_NULL_HANDLE;
VmaAllocation new_allocation = VK_NULL_HANDLE;
VkResult res = vmaCreateBuffer(g_vulkan_context->GetAllocator(), &bci, &aci, &new_buffer, &new_allocation, &ai);
if (res != VK_SUCCESS) if (res != VK_SUCCESS)
{ {
LOG_VULKAN_ERROR(res, "vkCreateBuffer failed: "); LOG_VULKAN_ERROR(res, "vkCreateBuffer failed: ");
return false; return false;
} }
// Get memory requirements (types etc) for this buffer
VkMemoryRequirements memory_requirements;
vkGetBufferMemoryRequirements(g_vulkan_context->GetDevice(), buffer, &memory_requirements);
// Aim for a coherent mapping if possible.
u32 memory_type_index =
g_vulkan_context->GetUploadMemoryType(memory_requirements.memoryTypeBits, &m_coherent_mapping);
// Allocate memory for backing this buffer
VkMemoryAllocateInfo memory_allocate_info = {
VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO, // VkStructureType sType
nullptr, // const void* pNext
memory_requirements.size, // VkDeviceSize allocationSize
memory_type_index // uint32_t memoryTypeIndex
};
VkDeviceMemory memory = VK_NULL_HANDLE;
res = vkAllocateMemory(g_vulkan_context->GetDevice(), &memory_allocate_info, nullptr, &memory);
if (res != VK_SUCCESS)
{
LOG_VULKAN_ERROR(res, "vkAllocateMemory failed: ");
vkDestroyBuffer(g_vulkan_context->GetDevice(), buffer, nullptr);
return false;
}
// Bind memory to buffer
res = vkBindBufferMemory(g_vulkan_context->GetDevice(), buffer, memory, 0);
if (res != VK_SUCCESS)
{
LOG_VULKAN_ERROR(res, "vkBindBufferMemory failed: ");
vkDestroyBuffer(g_vulkan_context->GetDevice(), buffer, nullptr);
vkFreeMemory(g_vulkan_context->GetDevice(), memory, nullptr);
return false;
}
// Map this buffer into user-space
void* mapped_ptr = nullptr;
res = vkMapMemory(g_vulkan_context->GetDevice(), memory, 0, size, 0, &mapped_ptr);
if (res != VK_SUCCESS)
{
LOG_VULKAN_ERROR(res, "vkMapMemory failed: ");
vkDestroyBuffer(g_vulkan_context->GetDevice(), buffer, nullptr);
vkFreeMemory(g_vulkan_context->GetDevice(), memory, nullptr);
return false;
}
// Unmap current host pointer (if there was a previous buffer)
if (m_host_pointer)
vkUnmapMemory(g_vulkan_context->GetDevice(), m_memory);
if (IsValid()) if (IsValid())
Destroy(true); Destroy(true);
// Replace with the new buffer // Replace with the new buffer
m_usage = usage;
m_size = size; m_size = size;
m_buffer = buffer;
m_memory = memory;
m_host_pointer = reinterpret_cast<u8*>(mapped_ptr);
m_current_offset = 0; m_current_offset = 0;
m_current_gpu_position = 0; m_current_gpu_position = 0;
m_tracked_fences.clear(); m_tracked_fences.clear();
m_allocation = new_allocation;
m_buffer = new_buffer;
m_host_pointer = static_cast<u8*>(ai.pMappedData);
return true; return true;
} }
void StreamBuffer::Destroy(bool defer) void StreamBuffer::Destroy(bool defer)
{ {
if (m_host_pointer)
{
vkUnmapMemory(g_vulkan_context->GetDevice(), m_memory);
m_host_pointer = nullptr;
}
if (m_buffer != VK_NULL_HANDLE) if (m_buffer != VK_NULL_HANDLE)
{ {
if (defer) if (defer)
g_vulkan_context->DeferBufferDestruction(m_buffer); g_vulkan_context->DeferBufferDestruction(m_buffer, m_allocation);
else else
vkDestroyBuffer(g_vulkan_context->GetDevice(), m_buffer, nullptr); vmaDestroyBuffer(g_vulkan_context->GetAllocator(), m_buffer, m_allocation);
m_buffer = VK_NULL_HANDLE;
}
if (m_memory != VK_NULL_HANDLE)
{
if (defer)
g_vulkan_context->DeferDeviceMemoryDestruction(m_memory);
else
vkFreeMemory(g_vulkan_context->GetDevice(), m_memory, nullptr);
m_memory = VK_NULL_HANDLE;
} }
m_size = 0;
m_current_offset = 0;
m_current_gpu_position = 0;
m_tracked_fences.clear();
m_buffer = VK_NULL_HANDLE;
m_allocation = VK_NULL_HANDLE;
m_host_pointer = nullptr;
} }
bool StreamBuffer::ReserveMemory(u32 num_bytes, u32 alignment) bool StreamBuffer::ReserveMemory(u32 num_bytes, u32 alignment)
@ -231,16 +175,11 @@ bool StreamBuffer::ReserveMemory(u32 num_bytes, u32 alignment)
void StreamBuffer::CommitMemory(u32 final_num_bytes) void StreamBuffer::CommitMemory(u32 final_num_bytes)
{ {
Assert((m_current_offset + final_num_bytes) <= m_size); DebugAssert((m_current_offset + final_num_bytes) <= m_size);
Assert(final_num_bytes <= m_current_space); DebugAssert(final_num_bytes <= m_current_space);
// For non-coherent mappings, flush the memory range // For non-coherent mappings, flush the memory range
if (!m_coherent_mapping) vmaFlushAllocation(g_vulkan_context->GetAllocator(), m_allocation, m_current_offset, final_num_bytes);
{
VkMappedMemoryRange range = {VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE, nullptr, m_memory, m_current_offset,
final_num_bytes};
vkFlushMappedMemoryRanges(g_vulkan_context->GetDevice(), 1, &range);
}
m_current_offset += final_num_bytes; m_current_offset += final_num_bytes;
m_current_space -= final_num_bytes; m_current_space -= final_num_bytes;
@ -366,4 +305,4 @@ bool StreamBuffer::WaitForClearSpace(u32 num_bytes)
return true; return true;
} }
} // namespace Vulkan } // namespace Vulkan

View file

@ -1,8 +1,3 @@
// Copyright 2016 Dolphin Emulator Project
// Copyright 2020 DuckStation Emulator Project
// Licensed under GPLv2+
// Refer to the LICENSE file included.
#pragma once #pragma once
#include "../types.h" #include "../types.h"
@ -26,9 +21,9 @@ public:
ALWAYS_INLINE bool IsValid() const { return (m_buffer != VK_NULL_HANDLE); } ALWAYS_INLINE bool IsValid() const { return (m_buffer != VK_NULL_HANDLE); }
ALWAYS_INLINE VkBuffer GetBuffer() const { return m_buffer; } ALWAYS_INLINE VkBuffer GetBuffer() const { return m_buffer; }
ALWAYS_INLINE const VkBuffer* GetBufferPointer() const { return &m_buffer; } ALWAYS_INLINE const VkBuffer* GetBufferPointer() const { return &m_buffer; }
ALWAYS_INLINE VkDeviceMemory GetDeviceMemory() const { return m_memory; } ALWAYS_INLINE VmaAllocation GetAllocation() const { return m_allocation; }
ALWAYS_INLINE void* GetHostPointer() const { return m_host_pointer; } ALWAYS_INLINE u8* GetHostPointer() const { return m_host_pointer; }
ALWAYS_INLINE void* GetCurrentHostPointer() const { return m_host_pointer + m_current_offset; } ALWAYS_INLINE u8* GetCurrentHostPointer() const { return m_host_pointer + m_current_offset; }
ALWAYS_INLINE u32 GetCurrentSize() const { return m_size; } ALWAYS_INLINE u32 GetCurrentSize() const { return m_size; }
ALWAYS_INLINE u32 GetCurrentSpace() const { return m_current_space; } ALWAYS_INLINE u32 GetCurrentSpace() const { return m_current_space; }
ALWAYS_INLINE u32 GetCurrentOffset() const { return m_current_offset; } ALWAYS_INLINE u32 GetCurrentOffset() const { return m_current_offset; }
@ -47,20 +42,17 @@ private:
// Waits for as many fences as needed to allocate num_bytes bytes from the buffer. // Waits for as many fences as needed to allocate num_bytes bytes from the buffer.
bool WaitForClearSpace(u32 num_bytes); bool WaitForClearSpace(u32 num_bytes);
VkBufferUsageFlags m_usage = 0;
u32 m_size = 0; u32 m_size = 0;
u32 m_current_offset = 0; u32 m_current_offset = 0;
u32 m_current_space = 0; u32 m_current_space = 0;
u32 m_current_gpu_position = 0; u32 m_current_gpu_position = 0;
VmaAllocation m_allocation = VK_NULL_HANDLE;
VkBuffer m_buffer = VK_NULL_HANDLE; VkBuffer m_buffer = VK_NULL_HANDLE;
VkDeviceMemory m_memory = VK_NULL_HANDLE;
u8* m_host_pointer = nullptr; u8* m_host_pointer = nullptr;
// List of fences and the corresponding positions in the buffer // List of fences and the corresponding positions in the buffer
std::deque<std::pair<u64, u32>> m_tracked_fences; std::deque<std::pair<u64, u32>> m_tracked_fences;
bool m_coherent_mapping = false;
}; };
} // namespace Vulkan } // namespace Vulkan

View file

@ -1,8 +1,3 @@
// Copyright 2016 Dolphin Emulator Project
// Copyright 2020 DuckStation Emulator Project
// Licensed under GPLv2+
// Refer to the LICENSE file included.
#include "swap_chain.h" #include "swap_chain.h"
#include "../assert.h" #include "../assert.h"
#include "../log.h" #include "../log.h"

View file

@ -1,8 +1,3 @@
// Copyright 2016 Dolphin Emulator Project
// Copyright 2020 DuckStation Emulator Project
// Licensed under GPLv2+
// Refer to the LICENSE file included.
#pragma once #pragma once
#include "../types.h" #include "../types.h"

View file

@ -1,21 +1,22 @@
// Copyright 2016 Dolphin Emulator Project
// Copyright 2020 DuckStation Emulator Project
// Licensed under GPLv2+
// Refer to the LICENSE file included.
#include "texture.h" #include "texture.h"
#include "../align.h"
#include "../assert.h" #include "../assert.h"
#include "../log.h"
#include "../string_util.h"
#include "context.h" #include "context.h"
#include "util.h" #include "util.h"
#include <algorithm> #include <algorithm>
Log_SetChannel(Texture);
namespace Vulkan { static constexpr VkComponentMapping s_identity_swizzle{VK_COMPONENT_SWIZZLE_IDENTITY, VK_COMPONENT_SWIZZLE_IDENTITY,
Texture::Texture() = default; VK_COMPONENT_SWIZZLE_IDENTITY, VK_COMPONENT_SWIZZLE_IDENTITY};
Texture::Texture(Texture&& move) Vulkan::Texture::Texture() = default;
Vulkan::Texture::Texture(Texture&& move)
: m_width(move.m_width), m_height(move.m_height), m_levels(move.m_levels), m_layers(move.m_layers), : m_width(move.m_width), m_height(move.m_height), m_levels(move.m_levels), m_layers(move.m_layers),
m_format(move.m_format), m_samples(move.m_samples), m_view_type(move.m_view_type), m_layout(move.m_layout), m_format(move.m_format), m_samples(move.m_samples), m_view_type(move.m_view_type), m_layout(move.m_layout),
m_image(move.m_image), m_device_memory(move.m_device_memory), m_view(move.m_view) m_image(move.m_image), m_allocation(move.m_allocation), m_view(move.m_view)
{ {
move.m_width = 0; move.m_width = 0;
move.m_height = 0; move.m_height = 0;
@ -26,17 +27,17 @@ Texture::Texture(Texture&& move)
move.m_view_type = VK_IMAGE_VIEW_TYPE_2D; move.m_view_type = VK_IMAGE_VIEW_TYPE_2D;
move.m_layout = VK_IMAGE_LAYOUT_UNDEFINED; move.m_layout = VK_IMAGE_LAYOUT_UNDEFINED;
move.m_image = VK_NULL_HANDLE; move.m_image = VK_NULL_HANDLE;
move.m_device_memory = VK_NULL_HANDLE; move.m_allocation = VK_NULL_HANDLE;
move.m_view = VK_NULL_HANDLE; move.m_view = VK_NULL_HANDLE;
} }
Texture::~Texture() Vulkan::Texture::~Texture()
{ {
if (IsValid()) if (IsValid())
Destroy(true); Destroy(true);
} }
Vulkan::Texture& Texture::operator=(Texture&& move) Vulkan::Texture& Vulkan::Texture::operator=(Texture&& move)
{ {
if (IsValid()) if (IsValid())
Destroy(true); Destroy(true);
@ -50,85 +51,78 @@ Vulkan::Texture& Texture::operator=(Texture&& move)
std::swap(m_view_type, move.m_view_type); std::swap(m_view_type, move.m_view_type);
std::swap(m_layout, move.m_layout); std::swap(m_layout, move.m_layout);
std::swap(m_image, move.m_image); std::swap(m_image, move.m_image);
std::swap(m_device_memory, move.m_device_memory); std::swap(m_allocation, move.m_allocation);
std::swap(m_view, move.m_view); std::swap(m_view, move.m_view);
return *this; return *this;
} }
bool Texture::Create(u32 width, u32 height, u32 levels, u32 layers, VkFormat format, VkSampleCountFlagBits samples, bool Vulkan::Texture::Create(u32 width, u32 height, u32 levels, u32 layers, VkFormat format,
VkImageViewType view_type, VkImageTiling tiling, VkImageUsageFlags usage) VkSampleCountFlagBits samples, VkImageViewType view_type, VkImageTiling tiling,
VkImageUsageFlags usage, bool dedicated_memory /* = false */,
const VkComponentMapping* swizzle /* = nullptr */)
{ {
VkImageCreateInfo image_info = {VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO, const VkImageCreateInfo image_info = {VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
nullptr, nullptr,
0, 0,
VK_IMAGE_TYPE_2D, VK_IMAGE_TYPE_2D,
format, format,
{width, height, 1}, {width, height, 1},
levels, levels,
layers, layers,
samples, samples,
tiling, tiling,
usage, usage,
VK_SHARING_MODE_EXCLUSIVE, VK_SHARING_MODE_EXCLUSIVE,
0, 0,
nullptr, nullptr,
VK_IMAGE_LAYOUT_UNDEFINED}; VK_IMAGE_LAYOUT_UNDEFINED};
VmaAllocationCreateInfo aci = {};
aci.usage = VMA_MEMORY_USAGE_GPU_ONLY;
aci.flags = VMA_ALLOCATION_CREATE_WITHIN_BUDGET_BIT;
aci.requiredFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
if (dedicated_memory)
aci.flags |= VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
VkImage image = VK_NULL_HANDLE; VkImage image = VK_NULL_HANDLE;
VkResult res = vkCreateImage(g_vulkan_context->GetDevice(), &image_info, nullptr, &image); VmaAllocation allocation = VK_NULL_HANDLE;
if (res != VK_SUCCESS) VkResult res = vmaCreateImage(g_vulkan_context->GetAllocator(), &image_info, &aci, &image, &allocation, nullptr);
if (res != VK_SUCCESS && dedicated_memory)
{ {
LOG_VULKAN_ERROR(res, "vkCreateImage failed: "); // try without dedicated memory
aci.flags &= ~VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT;
res = vmaCreateImage(g_vulkan_context->GetAllocator(), &image_info, &aci, &image, &allocation, nullptr);
}
if (res == VK_ERROR_OUT_OF_DEVICE_MEMORY)
{
Log_WarningPrintf("Failed to allocate device memory for %ux%u texture", width, height);
return false;
}
else if (res != VK_SUCCESS)
{
LOG_VULKAN_ERROR(res, "vmaCreateImage failed: ");
return false; return false;
} }
// Allocate memory to back this texture, we want device local memory in this case const VkImageViewCreateInfo view_info = {VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
VkMemoryRequirements memory_requirements; nullptr,
vkGetImageMemoryRequirements(g_vulkan_context->GetDevice(), image, &memory_requirements); 0,
image,
VkMemoryAllocateInfo memory_info = { view_type,
VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO, nullptr, memory_requirements.size, format,
g_vulkan_context->GetMemoryType(memory_requirements.memoryTypeBits, VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT)}; swizzle ? *swizzle : s_identity_swizzle,
{Util::IsDepthFormat(format) ?
VkDeviceMemory device_memory; static_cast<VkImageAspectFlags>(VK_IMAGE_ASPECT_DEPTH_BIT) :
res = vkAllocateMemory(g_vulkan_context->GetDevice(), &memory_info, nullptr, &device_memory); static_cast<VkImageAspectFlags>(VK_IMAGE_ASPECT_COLOR_BIT),
if (res != VK_SUCCESS) 0, levels, 0, layers}};
{
LOG_VULKAN_ERROR(res, "vkAllocateMemory failed: ");
vkDestroyImage(g_vulkan_context->GetDevice(), image, nullptr);
return false;
}
res = vkBindImageMemory(g_vulkan_context->GetDevice(), image, device_memory, 0);
if (res != VK_SUCCESS)
{
LOG_VULKAN_ERROR(res, "vkBindImageMemory failed: ");
vkDestroyImage(g_vulkan_context->GetDevice(), image, nullptr);
vkFreeMemory(g_vulkan_context->GetDevice(), device_memory, nullptr);
return false;
}
VkImageViewCreateInfo view_info = {VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
nullptr,
0,
image,
view_type,
format,
{VK_COMPONENT_SWIZZLE_IDENTITY, VK_COMPONENT_SWIZZLE_IDENTITY,
VK_COMPONENT_SWIZZLE_IDENTITY, VK_COMPONENT_SWIZZLE_IDENTITY},
{Util::IsDepthFormat(format) ?
static_cast<VkImageAspectFlags>(VK_IMAGE_ASPECT_DEPTH_BIT) :
static_cast<VkImageAspectFlags>(VK_IMAGE_ASPECT_COLOR_BIT),
0, levels, 0, layers}};
VkImageView view = VK_NULL_HANDLE; VkImageView view = VK_NULL_HANDLE;
res = vkCreateImageView(g_vulkan_context->GetDevice(), &view_info, nullptr, &view); res = vkCreateImageView(g_vulkan_context->GetDevice(), &view_info, nullptr, &view);
if (res != VK_SUCCESS) if (res != VK_SUCCESS)
{ {
LOG_VULKAN_ERROR(res, "vkCreateImageView failed: "); LOG_VULKAN_ERROR(res, "vkCreateImageView failed: ");
vkDestroyImage(g_vulkan_context->GetDevice(), image, nullptr); vmaDestroyImage(g_vulkan_context->GetAllocator(), image, allocation);
vkFreeMemory(g_vulkan_context->GetDevice(), device_memory, nullptr);
return false; return false;
} }
@ -143,27 +137,27 @@ bool Texture::Create(u32 width, u32 height, u32 levels, u32 layers, VkFormat for
m_samples = samples; m_samples = samples;
m_view_type = view_type; m_view_type = view_type;
m_image = image; m_image = image;
m_device_memory = device_memory; m_allocation = allocation;
m_view = view; m_view = view;
return true; return true;
} }
bool Texture::Adopt(VkImage existing_image, VkImageViewType view_type, u32 width, u32 height, u32 levels, u32 layers, bool Vulkan::Texture::Adopt(VkImage existing_image, VkImageViewType view_type, u32 width, u32 height, u32 levels,
VkFormat format, VkSampleCountFlagBits samples) u32 layers, VkFormat format, VkSampleCountFlagBits samples,
const VkComponentMapping* swizzle /* = nullptr */)
{ {
// Only need to create the image view, this is mainly for swap chains. // Only need to create the image view, this is mainly for swap chains.
VkImageViewCreateInfo view_info = {VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO, const VkImageViewCreateInfo view_info = {VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
nullptr, nullptr,
0, 0,
existing_image, existing_image,
view_type, view_type,
format, format,
{VK_COMPONENT_SWIZZLE_IDENTITY, VK_COMPONENT_SWIZZLE_IDENTITY, swizzle ? *swizzle : s_identity_swizzle,
VK_COMPONENT_SWIZZLE_IDENTITY, VK_COMPONENT_SWIZZLE_IDENTITY}, {Util::IsDepthFormat(format) ?
{Util::IsDepthFormat(format) ? static_cast<VkImageAspectFlags>(VK_IMAGE_ASPECT_DEPTH_BIT) :
static_cast<VkImageAspectFlags>(VK_IMAGE_ASPECT_DEPTH_BIT) : static_cast<VkImageAspectFlags>(VK_IMAGE_ASPECT_COLOR_BIT),
static_cast<VkImageAspectFlags>(VK_IMAGE_ASPECT_COLOR_BIT), 0, levels, 0, layers}};
0, levels, 0, layers}};
// Memory is managed by the owner of the image. // Memory is managed by the owner of the image.
VkImageView view = VK_NULL_HANDLE; VkImageView view = VK_NULL_HANDLE;
@ -189,7 +183,7 @@ bool Texture::Adopt(VkImage existing_image, VkImageViewType view_type, u32 width
return true; return true;
} }
void Texture::Destroy(bool defer /* = true */) void Vulkan::Texture::Destroy(bool defer /* = true */)
{ {
if (m_view != VK_NULL_HANDLE) if (m_view != VK_NULL_HANDLE)
{ {
@ -201,20 +195,15 @@ void Texture::Destroy(bool defer /* = true */)
} }
// If we don't have device memory allocated, the image is not owned by us (e.g. swapchain) // If we don't have device memory allocated, the image is not owned by us (e.g. swapchain)
if (m_device_memory != VK_NULL_HANDLE) if (m_allocation != VK_NULL_HANDLE)
{ {
DebugAssert(m_image != VK_NULL_HANDLE); Assert(m_image != VK_NULL_HANDLE);
if (defer) if (defer)
g_vulkan_context->DeferImageDestruction(m_image); g_vulkan_context->DeferImageDestruction(m_image, m_allocation);
else else
vkDestroyImage(g_vulkan_context->GetDevice(), m_image, nullptr); vmaDestroyImage(g_vulkan_context->GetAllocator(), m_image, m_allocation);
m_image = VK_NULL_HANDLE; m_image = VK_NULL_HANDLE;
m_allocation = VK_NULL_HANDLE;
if (defer)
g_vulkan_context->DeferDeviceMemoryDestruction(m_device_memory);
else
vkFreeMemory(g_vulkan_context->GetDevice(), m_device_memory, nullptr);
m_device_memory = VK_NULL_HANDLE;
} }
m_width = 0; m_width = 0;
@ -225,17 +214,14 @@ void Texture::Destroy(bool defer /* = true */)
m_samples = VK_SAMPLE_COUNT_1_BIT; m_samples = VK_SAMPLE_COUNT_1_BIT;
m_view_type = VK_IMAGE_VIEW_TYPE_2D; m_view_type = VK_IMAGE_VIEW_TYPE_2D;
m_layout = VK_IMAGE_LAYOUT_UNDEFINED; m_layout = VK_IMAGE_LAYOUT_UNDEFINED;
m_image = VK_NULL_HANDLE;
m_device_memory = VK_NULL_HANDLE;
m_view = VK_NULL_HANDLE;
} }
void Texture::OverrideImageLayout(VkImageLayout new_layout) void Vulkan::Texture::OverrideImageLayout(VkImageLayout new_layout)
{ {
m_layout = new_layout; m_layout = new_layout;
} }
void Texture::TransitionToLayout(VkCommandBuffer command_buffer, VkImageLayout new_layout) void Vulkan::Texture::TransitionToLayout(VkCommandBuffer command_buffer, VkImageLayout new_layout)
{ {
if (m_layout == new_layout) if (m_layout == new_layout)
return; return;
@ -247,9 +233,9 @@ void Texture::TransitionToLayout(VkCommandBuffer command_buffer, VkImageLayout n
m_layout = new_layout; m_layout = new_layout;
} }
void Texture::TransitionSubresourcesToLayout(VkCommandBuffer command_buffer, u32 start_level, u32 num_levels, void Vulkan::Texture::TransitionSubresourcesToLayout(VkCommandBuffer command_buffer, u32 start_level, u32 num_levels,
u32 start_layer, u32 num_layers, VkImageLayout old_layout, u32 start_layer, u32 num_layers, VkImageLayout old_layout,
VkImageLayout new_layout) VkImageLayout new_layout)
{ {
const Vulkan::Util::DebugScope debugScope( const Vulkan::Util::DebugScope debugScope(
command_buffer, "Texture::TransitionSubresourcesToLayout: Lvl:[%u,%u) Lyr:[%u,%u) %s -> %s", start_level, command_buffer, "Texture::TransitionSubresourcesToLayout: Lvl:[%u,%u) Lyr:[%u,%u) %s -> %s", start_level,
@ -369,7 +355,7 @@ void Texture::TransitionSubresourcesToLayout(VkCommandBuffer command_buffer, u32
vkCmdPipelineBarrier(command_buffer, srcStageMask, dstStageMask, 0, 0, nullptr, 0, nullptr, 1, &barrier); vkCmdPipelineBarrier(command_buffer, srcStageMask, dstStageMask, 0, 0, nullptr, 0, nullptr, 1, &barrier);
} }
VkFramebuffer Texture::CreateFramebuffer(VkRenderPass render_pass) VkFramebuffer Vulkan::Texture::CreateFramebuffer(VkRenderPass render_pass)
{ {
const VkFramebufferCreateInfo ci = { const VkFramebufferCreateInfo ci = {
VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0u, render_pass, 1, &m_view, m_width, m_height, m_layers}; VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO, nullptr, 0u, render_pass, 1, &m_view, m_width, m_height, m_layers};
@ -384,8 +370,8 @@ VkFramebuffer Texture::CreateFramebuffer(VkRenderPass render_pass)
return fb; return fb;
} }
void Texture::UpdateFromBuffer(VkCommandBuffer cmdbuf, u32 level, u32 layer, u32 x, u32 y, u32 width, u32 height, void Vulkan::Texture::UpdateFromBuffer(VkCommandBuffer cmdbuf, u32 level, u32 layer, u32 x, u32 y, u32 width,
VkBuffer buffer, u32 buffer_offset, u32 row_length) u32 height, VkBuffer buffer, u32 buffer_offset, u32 row_length)
{ {
const VkImageLayout old_layout = m_layout; const VkImageLayout old_layout = m_layout;
const Vulkan::Util::DebugScope debugScope(cmdbuf, "Texture::UpdateFromBuffer: Lvl:%u Lyr:%u {%u,%u} %ux%u", level, const Vulkan::Util::DebugScope debugScope(cmdbuf, "Texture::UpdateFromBuffer: Lvl:%u Lyr:%u {%u,%u} %ux%u", level,
@ -404,4 +390,117 @@ void Texture::UpdateFromBuffer(VkCommandBuffer cmdbuf, u32 level, u32 layer, u32
TransitionToLayout(cmdbuf, old_layout); TransitionToLayout(cmdbuf, old_layout);
} }
} // namespace Vulkan u32 Vulkan::Texture::CalcUpdatePitch(u32 width) const
{
return Common::AlignUp(width * Vulkan::Util::GetTexelSize(m_format),
g_vulkan_context->GetBufferCopyRowPitchAlignment());
}
u32 Vulkan::Texture::CalcUpdateRowLength(u32 pitch) const
{
return pitch / Vulkan::Util::GetTexelSize(m_format);
}
bool Vulkan::Texture::BeginUpdate(u32 width, u32 height, void** out_buffer, u32* out_pitch)
{
const u32 pitch = CalcUpdatePitch(width);
const u32 required_size = pitch * height;
StreamBuffer& buffer = g_vulkan_context->GetTextureUploadBuffer();
if (required_size > buffer.GetCurrentSize())
return false;
// TODO: allocate temporary buffer if this fails...
if (!buffer.ReserveMemory(required_size, g_vulkan_context->GetBufferCopyOffsetAlignment()))
{
g_vulkan_context->ExecuteCommandBuffer(false);
if (!buffer.ReserveMemory(required_size, g_vulkan_context->GetBufferCopyOffsetAlignment()))
return false;
}
*out_buffer = buffer.GetCurrentHostPointer();
*out_pitch = pitch;
return true;
}
void Vulkan::Texture::EndUpdate(u32 x, u32 y, u32 width, u32 height)
{
const u32 pitch = CalcUpdatePitch(width);
const u32 required_size = pitch * height;
StreamBuffer& buffer = g_vulkan_context->GetTextureUploadBuffer();
const u32 buffer_offset = buffer.GetCurrentOffset();
buffer.CommitMemory(required_size);
UpdateFromBuffer(g_vulkan_context->GetCurrentCommandBuffer(), 0, 0, x, y, width, height, buffer.GetBuffer(),
buffer_offset, CalcUpdateRowLength(pitch));
}
bool Vulkan::Texture::Update(u32 x, u32 y, u32 width, u32 height, const void* data, u32 data_pitch)
{
const u32 pitch = CalcUpdatePitch(width);
const u32 row_length = CalcUpdateRowLength(pitch);
const u32 required_size = pitch * height;
StreamBuffer& sbuffer = g_vulkan_context->GetTextureUploadBuffer();
// If the texture is larger than half our streaming buffer size, use a separate buffer.
// Otherwise allocation will either fail, or require lots of cmdbuffer submissions.
if (required_size > (g_vulkan_context->GetTextureUploadBuffer().GetCurrentSize() / 2))
{
const u32 size = data_pitch * height;
const VkBufferCreateInfo bci = {VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
nullptr,
0,
static_cast<VkDeviceSize>(size),
VK_BUFFER_USAGE_TRANSFER_SRC_BIT,
VK_SHARING_MODE_EXCLUSIVE,
0,
nullptr};
// Don't worry about setting the coherent bit for this upload, the main reason we had
// that set in StreamBuffer was for MoltenVK, which would upload the whole buffer on
// smaller uploads, but we're writing to the whole thing anyway.
VmaAllocationCreateInfo aci = {};
aci.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
aci.usage = VMA_MEMORY_USAGE_CPU_TO_GPU;
VmaAllocationInfo ai;
VkBuffer buffer;
VmaAllocation allocation;
VkResult res = vmaCreateBuffer(g_vulkan_context->GetAllocator(), &bci, &aci, &buffer, &allocation, &ai);
if (res != VK_SUCCESS)
{
LOG_VULKAN_ERROR(res, "vmaCreateBuffer() failed: ");
return VK_NULL_HANDLE;
}
// Immediately queue it for freeing after the command buffer finishes, since it's only needed for the copy.
g_vulkan_context->DeferBufferDestruction(buffer, allocation);
StringUtil::StrideMemCpy(ai.pMappedData, pitch, data, data_pitch, std::min(data_pitch, pitch), height);
vmaFlushAllocation(g_vulkan_context->GetAllocator(), allocation, 0, size);
UpdateFromBuffer(g_vulkan_context->GetCurrentCommandBuffer(), 0, 0, x, y, width, height, buffer, 0, row_length);
return true;
}
else
{
if (!sbuffer.ReserveMemory(required_size, g_vulkan_context->GetBufferCopyOffsetAlignment()))
{
g_vulkan_context->ExecuteCommandBuffer(false);
if (!sbuffer.ReserveMemory(required_size, g_vulkan_context->GetBufferCopyOffsetAlignment()))
{
Log_ErrorPrintf("Failed to reserve texture upload memory (%u bytes).", required_size);
return false;
}
}
const u32 buffer_offset = sbuffer.GetCurrentOffset();
StringUtil::StrideMemCpy(sbuffer.GetCurrentHostPointer(), pitch, data, data_pitch, std::min(data_pitch, pitch),
height);
sbuffer.CommitMemory(required_size);
UpdateFromBuffer(g_vulkan_context->GetCurrentCommandBuffer(), 0, 0, x, y, width, height, sbuffer.GetBuffer(),
buffer_offset, row_length);
return true;
}
}

View file

@ -1,8 +1,3 @@
// Copyright 2016 Dolphin Emulator Project
// Copyright 2020 DuckStation Emulator Project
// Licensed under GPLv2+
// Refer to the LICENSE file included.
#pragma once #pragma once
#include "../types.h" #include "../types.h"
#include "loader.h" #include "loader.h"
@ -24,7 +19,7 @@ public:
ALWAYS_INLINE bool IsValid() const { return (m_image != VK_NULL_HANDLE); } ALWAYS_INLINE bool IsValid() const { return (m_image != VK_NULL_HANDLE); }
/// An image is considered owned/managed if we control the memory. /// An image is considered owned/managed if we control the memory.
ALWAYS_INLINE bool IsOwned() const { return (m_device_memory != VK_NULL_HANDLE); } ALWAYS_INLINE bool IsOwned() const { return (m_allocation != VK_NULL_HANDLE); }
ALWAYS_INLINE u32 GetWidth() const { return m_width; } ALWAYS_INLINE u32 GetWidth() const { return m_width; }
ALWAYS_INLINE u32 GetHeight() const { return m_height; } ALWAYS_INLINE u32 GetHeight() const { return m_height; }
@ -37,14 +32,15 @@ public:
ALWAYS_INLINE VkImageLayout GetLayout() const { return m_layout; } ALWAYS_INLINE VkImageLayout GetLayout() const { return m_layout; }
ALWAYS_INLINE VkImageViewType GetViewType() const { return m_view_type; } ALWAYS_INLINE VkImageViewType GetViewType() const { return m_view_type; }
ALWAYS_INLINE VkImage GetImage() const { return m_image; } ALWAYS_INLINE VkImage GetImage() const { return m_image; }
ALWAYS_INLINE VkDeviceMemory GetDeviceMemory() const { return m_device_memory; } ALWAYS_INLINE VmaAllocation GetAllocation() const { return m_allocation; }
ALWAYS_INLINE VkImageView GetView() const { return m_view; } ALWAYS_INLINE VkImageView GetView() const { return m_view; }
bool Create(u32 width, u32 height, u32 levels, u32 layers, VkFormat format, VkSampleCountFlagBits samples, bool Create(u32 width, u32 height, u32 levels, u32 layers, VkFormat format, VkSampleCountFlagBits samples,
VkImageViewType view_type, VkImageTiling tiling, VkImageUsageFlags usage); VkImageViewType view_type, VkImageTiling tiling, VkImageUsageFlags usage, bool dedicated_memory = false,
const VkComponentMapping* swizzle = nullptr);
bool Adopt(VkImage existing_image, VkImageViewType view_type, u32 width, u32 height, u32 levels, u32 layers, bool Adopt(VkImage existing_image, VkImageViewType view_type, u32 width, u32 height, u32 levels, u32 layers,
VkFormat format, VkSampleCountFlagBits samples); VkFormat format, VkSampleCountFlagBits samples, const VkComponentMapping* swizzle = nullptr);
void Destroy(bool defer = true); void Destroy(bool defer = true);
@ -62,6 +58,12 @@ public:
void UpdateFromBuffer(VkCommandBuffer cmdbuf, u32 level, u32 layer, u32 x, u32 y, u32 width, u32 height, void UpdateFromBuffer(VkCommandBuffer cmdbuf, u32 level, u32 layer, u32 x, u32 y, u32 width, u32 height,
VkBuffer buffer, u32 buffer_offset, u32 row_length); VkBuffer buffer, u32 buffer_offset, u32 row_length);
u32 CalcUpdatePitch(u32 width) const;
u32 CalcUpdateRowLength(u32 pitch) const;
bool BeginUpdate(u32 width, u32 height, void** out_buffer, u32* out_pitch);
void EndUpdate(u32 x, u32 y, u32 width, u32 height);
bool Update(u32 x, u32 y, u32 width, u32 height, const void* data, u32 data_pitch);
private: private:
u32 m_width = 0; u32 m_width = 0;
u32 m_height = 0; u32 m_height = 0;
@ -73,7 +75,7 @@ private:
VkImageLayout m_layout = VK_IMAGE_LAYOUT_UNDEFINED; VkImageLayout m_layout = VK_IMAGE_LAYOUT_UNDEFINED;
VkImage m_image = VK_NULL_HANDLE; VkImage m_image = VK_NULL_HANDLE;
VkDeviceMemory m_device_memory = VK_NULL_HANDLE; VmaAllocation m_allocation = VK_NULL_HANDLE;
VkImageView m_view = VK_NULL_HANDLE; VkImageView m_view = VK_NULL_HANDLE;
}; };

View file

@ -1,8 +1,3 @@
// Copyright 2016 Dolphin Emulator Project
// Copyright 2020 DuckStation Emulator Project
// Licensed under GPLv2+
// Refer to the LICENSE file included.
#include "util.h" #include "util.h"
#include "../assert.h" #include "../assert.h"
#include "../log.h" #include "../log.h"
@ -552,8 +547,9 @@ DebugScope<VkQueue>::DebugScope(VkQueue context, const char* format, ...) : queu
va_end(ap); va_end(ap);
const float depth_phase = depth / static_cast<float>(max_depth); const float depth_phase = depth / static_cast<float>(max_depth);
BeginDebugScope(queue, str, BeginDebugScope(
Palette(depth_phase, {0.5f, 0.5f, 0.5f}, {0.5f, 0.5f, 0.5f}, {2.0f, 1.0f, 0.0f}, {0.5f, 0.20f, 0.25f})); queue, str,
Palette(depth_phase, {0.5f, 0.5f, 0.5f}, {0.5f, 0.5f, 0.5f}, {2.0f, 1.0f, 0.0f}, {0.5f, 0.20f, 0.25f}));
++depth; ++depth;
} }
} }

View file

@ -1,12 +1,8 @@
// Copyright 2016 Dolphin Emulator Project
// Copyright 2020 DuckStation Emulator Project
// Licensed under GPLv2+
// Refer to the LICENSE file included.
#pragma once #pragma once
#include "../string.h" #include "../string.h"
#include "../types.h" #include "../types.h"
#include "context.h"
#include "loader.h" #include "loader.h"
#include <algorithm> #include <algorithm>
#include <array> #include <array>
@ -163,6 +159,20 @@ inline void SetObjectName(VkDevice device, T object_handle, const char* format,
#endif #endif
} }
template<>
inline void SetObjectName(VkDevice device, VmaAllocation object_handle, const char* format, ...)
{
#ifdef ENABLE_VULKAN_DEBUG_OBJECTS
std::va_list ap;
SmallString str;
va_start(ap, format);
str.FormatVA(format, ap);
va_end(ap);
vmaSetAllocationName(g_vulkan_context->GetAllocator(), object_handle, str);
#endif
}
// Command buffer debug utils // Command buffer debug utils
inline void BeginDebugScope(VkCommandBuffer command_buffer, const char* scope_name, inline void BeginDebugScope(VkCommandBuffer command_buffer, const char* scope_name,
const std::array<float, 4>& scope_color = {0.5, 0.5, 0.5, 1.0}) const std::array<float, 4>& scope_color = {0.5, 0.5, 0.5, 1.0})

View file

@ -254,7 +254,7 @@ void GPU_HW_Vulkan::MapBatchVertexPointer(u32 required_vertices)
Panic("Failed to reserve vertex stream buffer memory"); Panic("Failed to reserve vertex stream buffer memory");
} }
m_batch_start_vertex_ptr = static_cast<BatchVertex*>(m_vertex_stream_buffer.GetCurrentHostPointer()); m_batch_start_vertex_ptr = reinterpret_cast<BatchVertex*>(m_vertex_stream_buffer.GetCurrentHostPointer());
m_batch_current_vertex_ptr = m_batch_start_vertex_ptr; m_batch_current_vertex_ptr = m_batch_start_vertex_ptr;
m_batch_end_vertex_ptr = m_batch_start_vertex_ptr + (m_vertex_stream_buffer.GetCurrentSpace() / sizeof(BatchVertex)); m_batch_end_vertex_ptr = m_batch_start_vertex_ptr + (m_vertex_stream_buffer.GetCurrentSpace() / sizeof(BatchVertex));
m_batch_base_vertex = m_vertex_stream_buffer.GetCurrentOffset() / sizeof(BatchVertex); m_batch_base_vertex = m_vertex_stream_buffer.GetCurrentOffset() / sizeof(BatchVertex);
@ -559,52 +559,50 @@ bool GPU_HW_Vulkan::CreateFramebuffer()
if (!m_vram_texture.Create(texture_width, texture_height, 1, 1, texture_format, samples, VK_IMAGE_VIEW_TYPE_2D, if (!m_vram_texture.Create(texture_width, texture_height, 1, 1, texture_format, samples, VK_IMAGE_VIEW_TYPE_2D,
VK_IMAGE_TILING_OPTIMAL, VK_IMAGE_TILING_OPTIMAL,
VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_SAMPLED_BIT |
VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT) || VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT, true) ||
!m_vram_depth_texture.Create(texture_width, texture_height, 1, 1, depth_format, samples, VK_IMAGE_VIEW_TYPE_2D, !m_vram_depth_texture.Create(texture_width, texture_height, 1, 1, depth_format, samples, VK_IMAGE_VIEW_TYPE_2D,
VK_IMAGE_TILING_OPTIMAL, VK_IMAGE_TILING_OPTIMAL,
VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT) || VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT) ||
!m_vram_read_texture.Create(texture_width, texture_height, 1, 1, texture_format, VK_SAMPLE_COUNT_1_BIT, !m_vram_read_texture.Create(texture_width, texture_height, 1, 1, texture_format, VK_SAMPLE_COUNT_1_BIT,
VK_IMAGE_VIEW_TYPE_2D, VK_IMAGE_TILING_OPTIMAL, VK_IMAGE_VIEW_TYPE_2D, VK_IMAGE_TILING_OPTIMAL,
VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT) || VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT, true) ||
!m_display_texture.Create( !m_display_texture.Create(
((m_downsample_mode == GPUDownsampleMode::Adaptive) ? VRAM_WIDTH : GPU_MAX_DISPLAY_WIDTH) * m_resolution_scale, ((m_downsample_mode == GPUDownsampleMode::Adaptive) ? VRAM_WIDTH : GPU_MAX_DISPLAY_WIDTH) * m_resolution_scale,
GPU_MAX_DISPLAY_HEIGHT * m_resolution_scale, 1, 1, texture_format, VK_SAMPLE_COUNT_1_BIT, VK_IMAGE_VIEW_TYPE_2D, GPU_MAX_DISPLAY_HEIGHT * m_resolution_scale, 1, 1, texture_format, VK_SAMPLE_COUNT_1_BIT, VK_IMAGE_VIEW_TYPE_2D,
VK_IMAGE_TILING_OPTIMAL, VK_IMAGE_TILING_OPTIMAL,
VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
VK_IMAGE_USAGE_TRANSFER_DST_BIT) || VK_IMAGE_USAGE_TRANSFER_DST_BIT, true) ||
!m_vram_readback_texture.Create(VRAM_WIDTH, VRAM_HEIGHT, 1, 1, texture_format, VK_SAMPLE_COUNT_1_BIT, !m_vram_readback_texture.Create(VRAM_WIDTH, VRAM_HEIGHT, 1, 1, texture_format, VK_SAMPLE_COUNT_1_BIT,
VK_IMAGE_VIEW_TYPE_2D, VK_IMAGE_TILING_OPTIMAL, VK_IMAGE_VIEW_TYPE_2D, VK_IMAGE_TILING_OPTIMAL,
VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT) || VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT, true))
!m_vram_readback_staging_texture.Create(Vulkan::StagingBuffer::Type::Readback, texture_format, VRAM_WIDTH / 2,
VRAM_HEIGHT))
{ {
return false; return false;
} }
Vulkan::Util::SetObjectName(g_vulkan_context->GetDevice(), m_vram_texture.GetImage(), "VRAM Texture"); Vulkan::Util::SetObjectName(g_vulkan_context->GetDevice(), m_vram_texture.GetImage(), "VRAM Texture");
Vulkan::Util::SetObjectName(g_vulkan_context->GetDevice(), m_vram_texture.GetView(), "VRAM Texture View"); Vulkan::Util::SetObjectName(g_vulkan_context->GetDevice(), m_vram_texture.GetView(), "VRAM Texture View");
Vulkan::Util::SetObjectName(g_vulkan_context->GetDevice(), m_vram_texture.GetDeviceMemory(), "VRAM Texture Memory"); Vulkan::Util::SetObjectName(g_vulkan_context->GetDevice(), m_vram_texture.GetAllocation(), "VRAM Texture Memory");
Vulkan::Util::SetObjectName(g_vulkan_context->GetDevice(), m_vram_depth_texture.GetImage(), "VRAM Depth Texture"); Vulkan::Util::SetObjectName(g_vulkan_context->GetDevice(), m_vram_depth_texture.GetImage(), "VRAM Depth Texture");
Vulkan::Util::SetObjectName(g_vulkan_context->GetDevice(), m_vram_depth_texture.GetView(), "VRAM Depth Texture View"); Vulkan::Util::SetObjectName(g_vulkan_context->GetDevice(), m_vram_depth_texture.GetView(), "VRAM Depth Texture View");
Vulkan::Util::SetObjectName(g_vulkan_context->GetDevice(), m_vram_depth_texture.GetDeviceMemory(), Vulkan::Util::SetObjectName(g_vulkan_context->GetDevice(), m_vram_depth_texture.GetAllocation(),
"VRAM Depth Texture Memory"); "VRAM Depth Texture Memory");
Vulkan::Util::SetObjectName(g_vulkan_context->GetDevice(), m_vram_read_texture.GetImage(), "VRAM Read Texture"); Vulkan::Util::SetObjectName(g_vulkan_context->GetDevice(), m_vram_read_texture.GetImage(), "VRAM Read Texture");
Vulkan::Util::SetObjectName(g_vulkan_context->GetDevice(), m_vram_read_texture.GetView(), "VRAM Read Texture View"); Vulkan::Util::SetObjectName(g_vulkan_context->GetDevice(), m_vram_read_texture.GetView(), "VRAM Read Texture View");
Vulkan::Util::SetObjectName(g_vulkan_context->GetDevice(), m_vram_read_texture.GetDeviceMemory(), Vulkan::Util::SetObjectName(g_vulkan_context->GetDevice(), m_vram_read_texture.GetAllocation(),
"VRAM Read Texture Memory"); "VRAM Read Texture Memory");
Vulkan::Util::SetObjectName(g_vulkan_context->GetDevice(), m_display_texture.GetImage(), "Display Texture"); Vulkan::Util::SetObjectName(g_vulkan_context->GetDevice(), m_display_texture.GetImage(), "Display Texture");
Vulkan::Util::SetObjectName(g_vulkan_context->GetDevice(), m_display_texture.GetView(), "Display Texture View"); Vulkan::Util::SetObjectName(g_vulkan_context->GetDevice(), m_display_texture.GetView(), "Display Texture View");
Vulkan::Util::SetObjectName(g_vulkan_context->GetDevice(), m_display_texture.GetDeviceMemory(), Vulkan::Util::SetObjectName(g_vulkan_context->GetDevice(), m_display_texture.GetAllocation(),
"Display Texture Memory"); "Display Texture Memory");
Vulkan::Util::SetObjectName(g_vulkan_context->GetDevice(), m_vram_readback_texture.GetImage(), Vulkan::Util::SetObjectName(g_vulkan_context->GetDevice(), m_vram_readback_texture.GetImage(),
"VRAM Readback Texture"); "VRAM Readback Texture");
Vulkan::Util::SetObjectName(g_vulkan_context->GetDevice(), m_vram_readback_texture.GetView(), Vulkan::Util::SetObjectName(g_vulkan_context->GetDevice(), m_vram_readback_texture.GetView(),
"VRAM Readback Texture View"); "VRAM Readback Texture View");
Vulkan::Util::SetObjectName(g_vulkan_context->GetDevice(), m_vram_readback_texture.GetDeviceMemory(), Vulkan::Util::SetObjectName(g_vulkan_context->GetDevice(), m_vram_readback_texture.GetAllocation(),
"VRAM Readback Texture Memory"); "VRAM Readback Texture Memory");
m_vram_render_pass = m_vram_render_pass =
@ -842,7 +840,6 @@ void GPU_HW_Vulkan::DestroyFramebuffer()
m_vram_texture.Destroy(false); m_vram_texture.Destroy(false);
m_vram_readback_texture.Destroy(false); m_vram_readback_texture.Destroy(false);
m_display_texture.Destroy(false); m_display_texture.Destroy(false);
m_vram_readback_staging_texture.Destroy(false);
} }
bool GPU_HW_Vulkan::CreateVertexBuffer() bool GPU_HW_Vulkan::CreateVertexBuffer()
@ -852,7 +849,7 @@ bool GPU_HW_Vulkan::CreateVertexBuffer()
Vulkan::Util::SetObjectName(g_vulkan_context->GetDevice(), m_vertex_stream_buffer.GetBuffer(), Vulkan::Util::SetObjectName(g_vulkan_context->GetDevice(), m_vertex_stream_buffer.GetBuffer(),
"Vertex Stream Buffer"); "Vertex Stream Buffer");
Vulkan::Util::SetObjectName(g_vulkan_context->GetDevice(), m_vertex_stream_buffer.GetDeviceMemory(), Vulkan::Util::SetObjectName(g_vulkan_context->GetDevice(), m_vertex_stream_buffer.GetAllocation(),
"Vertex Stream Buffer Memory"); "Vertex Stream Buffer Memory");
return true; return true;
} }
@ -864,7 +861,7 @@ bool GPU_HW_Vulkan::CreateUniformBuffer()
Vulkan::Util::SetObjectName(g_vulkan_context->GetDevice(), m_uniform_stream_buffer.GetBuffer(), Vulkan::Util::SetObjectName(g_vulkan_context->GetDevice(), m_uniform_stream_buffer.GetBuffer(),
"Uniform Stream Buffer"); "Uniform Stream Buffer");
Vulkan::Util::SetObjectName(g_vulkan_context->GetDevice(), m_uniform_stream_buffer.GetDeviceMemory(), Vulkan::Util::SetObjectName(g_vulkan_context->GetDevice(), m_uniform_stream_buffer.GetAllocation(),
"Uniform Stream Buffer Memory"); "Uniform Stream Buffer Memory");
return true; return true;
} }
@ -909,7 +906,7 @@ bool GPU_HW_Vulkan::CreateTextureBuffer()
Vulkan::Util::SetObjectName(g_vulkan_context->GetDevice(), m_texture_stream_buffer.GetBuffer(), Vulkan::Util::SetObjectName(g_vulkan_context->GetDevice(), m_texture_stream_buffer.GetBuffer(),
"Texture Stream Buffer"); "Texture Stream Buffer");
Vulkan::Util::SetObjectName(g_vulkan_context->GetDevice(), m_texture_stream_buffer.GetDeviceMemory(), Vulkan::Util::SetObjectName(g_vulkan_context->GetDevice(), m_texture_stream_buffer.GetAllocation(),
"Texture Stream Buffer Memory"); "Texture Stream Buffer Memory");
Vulkan::Util::SetObjectName(g_vulkan_context->GetDevice(), m_vram_write_descriptor_set, "VRAM Write Descriptor Set"); Vulkan::Util::SetObjectName(g_vulkan_context->GetDevice(), m_vram_write_descriptor_set, "VRAM Write Descriptor Set");
@ -1577,15 +1574,10 @@ void GPU_HW_Vulkan::ReadVRAM(u32 x, u32 y, u32 width, u32 height)
m_vram_readback_texture.TransitionToLayout(cmdbuf, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL); m_vram_readback_texture.TransitionToLayout(cmdbuf, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL);
m_vram_texture.TransitionToLayout(cmdbuf, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL); m_vram_texture.TransitionToLayout(cmdbuf, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL);
// Stage the readback. // Stage the readback and copy it into our shadow buffer (will execute command buffer and stall).
m_vram_readback_staging_texture.CopyFromTexture(m_vram_readback_texture, 0, 0, 0, 0, 0, 0, encoded_width, g_host_display->DownloadTexture(&m_vram_readback_texture, HostDisplayPixelFormat::RGBA8, 0, 0, encoded_width,
encoded_height); encoded_height, &m_vram_shadow[copy_rect.top * VRAM_WIDTH + copy_rect.left],
VRAM_WIDTH * sizeof(u16));
// And copy it into our shadow buffer (will execute command buffer and stall).
ExecuteCommandBuffer(true, true);
m_vram_readback_staging_texture.ReadTexels(0, 0, encoded_width, encoded_height,
&m_vram_shadow[copy_rect.top * VRAM_WIDTH + copy_rect.left],
VRAM_WIDTH * sizeof(u16));
} }
void GPU_HW_Vulkan::FillVRAM(u32 x, u32 y, u32 width, u32 height, u32 color) void GPU_HW_Vulkan::FillVRAM(u32 x, u32 y, u32 width, u32 height, u32 color)
@ -1831,7 +1823,7 @@ bool GPU_HW_Vulkan::CreateTextureReplacementStreamBuffer()
Vulkan::Util::SetObjectName(g_vulkan_context->GetDevice(), m_texture_replacment_stream_buffer.GetBuffer(), Vulkan::Util::SetObjectName(g_vulkan_context->GetDevice(), m_texture_replacment_stream_buffer.GetBuffer(),
"Texture Replacement Stream Buffer"); "Texture Replacement Stream Buffer");
Vulkan::Util::SetObjectName(g_vulkan_context->GetDevice(), m_texture_replacment_stream_buffer.GetDeviceMemory(), Vulkan::Util::SetObjectName(g_vulkan_context->GetDevice(), m_texture_replacment_stream_buffer.GetAllocation(),
"Texture Replacement Stream Buffer Memory"); "Texture Replacement Stream Buffer Memory");
return true; return true;

View file

@ -1,6 +1,5 @@
#pragma once #pragma once
#include "common/dimensional_array.h" #include "common/dimensional_array.h"
#include "common/vulkan/staging_texture.h"
#include "common/vulkan/stream_buffer.h" #include "common/vulkan/stream_buffer.h"
#include "common/vulkan/texture.h" #include "common/vulkan/texture.h"
#include "gpu_hw.h" #include "gpu_hw.h"
@ -100,7 +99,6 @@ private:
Vulkan::Texture m_vram_depth_texture; Vulkan::Texture m_vram_depth_texture;
Vulkan::Texture m_vram_read_texture; Vulkan::Texture m_vram_read_texture;
Vulkan::Texture m_vram_readback_texture; Vulkan::Texture m_vram_readback_texture;
Vulkan::StagingTexture m_vram_readback_staging_texture;
Vulkan::Texture m_display_texture; Vulkan::Texture m_display_texture;
bool m_use_ssbos_for_vram_writes = false; bool m_use_ssbos_for_vram_writes = false;

View file

@ -213,7 +213,7 @@ void DisplaySettingsWidget::populateGPUAdaptersAndResolutions()
#endif #endif
#ifdef WITH_VULKAN #ifdef WITH_VULKAN
case GPURenderer::HardwareVulkan: case GPURenderer::HardwareVulkan:
aml = FrontendCommon::VulkanHostDisplay::StaticGetAdapterAndModeList(nullptr); aml = VulkanHostDisplay::StaticGetAdapterAndModeList(nullptr);
threaded_presentation_supported = true; threaded_presentation_supported = true;
break; break;
#endif #endif

View file

@ -136,7 +136,7 @@ std::unique_ptr<HostDisplay> Host::CreateDisplayForAPI(RenderAPI api)
{ {
#ifdef WITH_VULKAN #ifdef WITH_VULKAN
case RenderAPI::Vulkan: case RenderAPI::Vulkan:
return std::make_unique<FrontendCommon::VulkanHostDisplay>(); return std::make_unique<VulkanHostDisplay>();
#endif #endif
#ifdef WITH_OPENGL #ifdef WITH_OPENGL

View file

@ -67,7 +67,6 @@
#include "common/vulkan/context.h" #include "common/vulkan/context.h"
#include "common/vulkan/texture.h" #include "common/vulkan/texture.h"
#include "common/vulkan/stream_buffer.h" #include "common/vulkan/stream_buffer.h"
#include "common/vulkan/staging_texture.h"
#include "common/vulkan/util.h" #include "common/vulkan/util.h"
#include <cstdio> #include <cstdio>
@ -412,41 +411,8 @@ bool ImGui_ImplVulkan_CreateFontsTexture()
} }
} }
#if 0
const size_t upload_size = width * height * 4 * sizeof(unsigned char);
const VkBufferCreateInfo bci = {VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO, nullptr, 0,
static_cast<VkDeviceSize>(upload_size), VK_BUFFER_USAGE_TRANSFER_SRC_BIT, VK_SHARING_MODE_EXCLUSIVE, 0, nullptr};
VmaAllocationCreateInfo aci = {};
aci.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
aci.usage = VMA_MEMORY_USAGE_CPU_TO_GPU;
VmaAllocationInfo ai;
VkBuffer buffer;
VmaAllocation allocation;
VkResult res = vmaCreateBuffer(g_vulkan_context->GetAllocator(), &bci, &aci, &buffer, &allocation, &ai);
if (res != VK_SUCCESS)
return false;
std::memcpy(ai.pMappedData, pixels, upload_size);
vmaFlushAllocation(g_vulkan_context->GetAllocator(), allocation, 0, upload_size);
bd->FontTexture.TransitionToLayout(g_vulkan_context->GetCurrentInitCommandBuffer(), VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
bd->FontTexture.UpdateFromBuffer(g_vulkan_context->GetCurrentInitCommandBuffer(), 0, 0, 0, 0, width, height, width, buffer, 0);
bd->FontTexture.TransitionToLayout(g_vulkan_context->GetCurrentInitCommandBuffer(), VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL);
// Immediately queue it for freeing after the command buffer finishes, since it's only needed for the copy.
g_vulkan_context->DeferBufferDestruction(buffer, allocation);
#else
Vulkan::StagingTexture stex;
if (!stex.Create(Vulkan::StagingBuffer::Type::Upload, VK_FORMAT_R8G8B8A8_UNORM, static_cast<u32>(width), static_cast<u32>(height)))
return false;
const u32 stride = static_cast<u32>(width) * static_cast<u32>(sizeof(u32));
stex.WriteTexels(0, 0, static_cast<u32>(width), static_cast<u32>(height), pixels, stride);
stex.CopyToTexture(g_vulkan_context->GetCurrentCommandBuffer(), 0, 0, bd->FontTexture, 0, 0, 0, 0, width, height);
stex.Destroy(true);
#endif
// Store our identifier // Store our identifier
bd->FontTexture.Update(0, 0, width, height, pixels, sizeof(u32) * width);
io.Fonts->SetTexID((ImTextureID)&bd->FontTexture); io.Fonts->SetTexID((ImTextureID)&bd->FontTexture);
return true; return true;
} }

View file

@ -7,7 +7,6 @@
#include "common/vulkan/builders.h" #include "common/vulkan/builders.h"
#include "common/vulkan/context.h" #include "common/vulkan/context.h"
#include "common/vulkan/shader_cache.h" #include "common/vulkan/shader_cache.h"
#include "common/vulkan/staging_texture.h"
#include "common/vulkan/stream_buffer.h" #include "common/vulkan/stream_buffer.h"
#include "common/vulkan/swap_chain.h" #include "common/vulkan/swap_chain.h"
#include "common/vulkan/util.h" #include "common/vulkan/util.h"
@ -19,8 +18,6 @@
#include <array> #include <array>
Log_SetChannel(VulkanHostDisplay); Log_SetChannel(VulkanHostDisplay);
namespace FrontendCommon {
class VulkanHostDisplayTexture : public HostDisplayTexture class VulkanHostDisplayTexture : public HostDisplayTexture
{ {
public: public:
@ -38,45 +35,16 @@ public:
u32 GetSamples() const override { return m_texture.GetSamples(); } u32 GetSamples() const override { return m_texture.GetSamples(); }
HostDisplayPixelFormat GetFormat() const override { return m_format; } HostDisplayPixelFormat GetFormat() const override { return m_format; }
u32 CalcUpdatePitch(u32 width) const bool BeginUpdate(u32 width, u32 height, void** out_buffer, u32* out_pitch) override
{ {
return Common::AlignUp(width * HostDisplay::GetDisplayPixelFormatSize(m_format), return m_texture.BeginUpdate(width, height, out_buffer, out_pitch);
g_vulkan_context->GetBufferCopyRowPitchAlignment());
} }
bool BeginUpdate(u32 width, u32 height, void** out_buffer, u32* out_pitch) void EndUpdate(u32 x, u32 y, u32 width, u32 height) override { m_texture.EndUpdate(x, y, width, height); }
bool Update(u32 x, u32 y, u32 width, u32 height, const void* data, u32 pitch) override
{ {
const u32 pitch = CalcUpdatePitch(width); return m_texture.Update(x, y, width, height, data, pitch);
const u32 required_size = pitch * height;
Vulkan::StreamBuffer& buffer = g_vulkan_context->GetTextureUploadBuffer();
if (required_size > buffer.GetCurrentSize())
return false;
// TODO: allocate temporary buffer if this fails...
if (!buffer.ReserveMemory(required_size, g_vulkan_context->GetBufferCopyOffsetAlignment()))
{
g_vulkan_context->ExecuteCommandBuffer(false);
if (!buffer.ReserveMemory(required_size, g_vulkan_context->GetBufferCopyOffsetAlignment()))
return false;
}
*out_buffer = buffer.GetCurrentHostPointer();
*out_pitch = pitch;
return true;
}
void EndUpdate(u32 x, u32 y, u32 width, u32 height)
{
const u32 pitch = CalcUpdatePitch(width);
const u32 required_size = pitch * height;
Vulkan::StreamBuffer& buffer = g_vulkan_context->GetTextureUploadBuffer();
const u32 buffer_offset = buffer.GetCurrentOffset();
buffer.CommitMemory(required_size);
m_texture.UpdateFromBuffer(g_vulkan_context->GetCurrentCommandBuffer(), 0, 0, x, y, width, height,
buffer.GetBuffer(), buffer_offset,
HostDisplay::GetDisplayPixelFormatSize(m_format) / width);
} }
const Vulkan::Texture& GetTexture() const { return m_texture; } const Vulkan::Texture& GetTexture() const { return m_texture; }
@ -91,6 +59,18 @@ VulkanHostDisplay::VulkanHostDisplay() = default;
VulkanHostDisplay::~VulkanHostDisplay() VulkanHostDisplay::~VulkanHostDisplay()
{ {
if (!g_vulkan_context)
return;
g_vulkan_context->WaitForGPUIdle();
DestroyStagingBuffer();
DestroyResources();
Vulkan::ShaderCache::Destroy();
m_swap_chain.reset();
Vulkan::Context::Destroy();
AssertMsg(!g_vulkan_context, "Context should have been destroyed by now"); AssertMsg(!g_vulkan_context, "Context should have been destroyed by now");
AssertMsg(!m_swap_chain, "Swap chain should have been destroyed by now"); AssertMsg(!m_swap_chain, "Swap chain should have been destroyed by now");
} }
@ -223,39 +203,7 @@ std::unique_ptr<HostDisplayTexture> VulkanHostDisplay::CreateTexture(u32 width,
if (data) if (data)
{ {
const u32 row_size = width * GetDisplayPixelFormatSize(format); texture.Update(0, 0, width, height, data, data_stride);
const u32 data_upload_pitch = Common::AlignUp(row_size, g_vulkan_context->GetBufferCopyRowPitchAlignment());
const u32 data_size = data_upload_pitch * height;
Vulkan::StreamBuffer& buffer = g_vulkan_context->GetTextureUploadBuffer();
if (data_size < buffer.GetCurrentSize())
{
if (!buffer.ReserveMemory(data_size, g_vulkan_context->GetBufferCopyOffsetAlignment()))
{
g_vulkan_context->ExecuteCommandBuffer(false);
if (!buffer.ReserveMemory(data_size, g_vulkan_context->GetBufferCopyOffsetAlignment()))
goto use_staging;
}
StringUtil::StrideMemCpy(buffer.GetCurrentHostPointer(), data_upload_pitch, data, data_stride, row_size, height);
const u32 buffer_offset = buffer.GetCurrentOffset();
buffer.CommitMemory(data_size);
texture.UpdateFromBuffer(g_vulkan_context->GetCurrentCommandBuffer(), 0, 0, 0, 0, width, height,
buffer.GetBuffer(), buffer_offset,
data_upload_pitch / GetDisplayPixelFormatSize(format));
}
else
{
use_staging:
// TODO: Drop this thing completely. It's not using the buffer copy row pitch alignment.
Vulkan::StagingTexture staging_texture;
if (!staging_texture.Create(Vulkan::StagingBuffer::Type::Upload, vk_format, width, height))
return {};
staging_texture.WriteTexels(0, 0, width, height, data, data_stride);
staging_texture.CopyToTexture(g_vulkan_context->GetCurrentCommandBuffer(), 0, 0, texture, 0, 0, 0, 0, width,
height);
}
} }
else else
{ {
@ -271,22 +219,6 @@ std::unique_ptr<HostDisplayTexture> VulkanHostDisplay::CreateTexture(u32 width,
return std::make_unique<VulkanHostDisplayTexture>(std::move(texture), format); return std::make_unique<VulkanHostDisplayTexture>(std::move(texture), format);
} }
bool VulkanHostDisplay::DownloadTexture(const void* texture_handle, HostDisplayPixelFormat texture_format, u32 x, u32 y,
u32 width, u32 height, void* out_data, u32 out_data_stride)
{
Vulkan::Texture* texture = static_cast<Vulkan::Texture*>(const_cast<void*>(texture_handle));
if ((m_readback_staging_texture.GetWidth() < width || m_readback_staging_texture.GetHeight() < height) &&
!m_readback_staging_texture.Create(Vulkan::StagingBuffer::Type::Readback, texture->GetFormat(), width, height))
{
return false;
}
m_readback_staging_texture.CopyFromTexture(*texture, x, y, 0, 0, 0, 0, width, height);
m_readback_staging_texture.ReadTexels(0, 0, width, height, out_data, out_data_stride);
return true;
}
bool VulkanHostDisplay::SupportsDisplayPixelFormat(HostDisplayPixelFormat format) const bool VulkanHostDisplay::SupportsDisplayPixelFormat(HostDisplayPixelFormat format) const
{ {
const VkFormat vk_format = s_display_pixel_format_mapping[static_cast<u32>(format)]; const VkFormat vk_format = s_display_pixel_format_mapping[static_cast<u32>(format)];
@ -360,6 +292,121 @@ VkRenderPass VulkanHostDisplay::GetRenderPassForDisplay() const
} }
} }
bool VulkanHostDisplay::CheckStagingBufferSize(u32 required_size)
{
if (m_readback_staging_buffer_size >= required_size)
return true;
DestroyStagingBuffer();
const VkBufferCreateInfo bci = {VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
nullptr,
0u,
required_size,
VK_BUFFER_USAGE_TRANSFER_DST_BIT,
VK_SHARING_MODE_EXCLUSIVE,
0u,
nullptr};
VmaAllocationCreateInfo aci = {};
aci.usage = VMA_MEMORY_USAGE_GPU_TO_CPU;
aci.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
aci.preferredFlags = VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
VmaAllocationInfo ai = {};
VkResult res = vmaCreateBuffer(g_vulkan_context->GetAllocator(), &bci, &aci, &m_readback_staging_buffer,
&m_readback_staging_allocation, &ai);
if (res != VK_SUCCESS)
{
LOG_VULKAN_ERROR(res, "vmaCreateBuffer() failed: ");
return false;
}
m_readback_staging_buffer_map = static_cast<u8*>(ai.pMappedData);
return true;
}
void VulkanHostDisplay::DestroyStagingBuffer()
{
// unmapped as part of the buffer destroy
m_readback_staging_buffer_map = nullptr;
m_readback_staging_buffer_size = 0;
if (m_readback_staging_buffer != VK_NULL_HANDLE)
{
vmaDestroyBuffer(g_vulkan_context->GetAllocator(), m_readback_staging_buffer, m_readback_staging_allocation);
m_readback_staging_buffer = VK_NULL_HANDLE;
m_readback_staging_allocation = VK_NULL_HANDLE;
m_readback_staging_buffer_size = 0;
}
}
bool VulkanHostDisplay::DownloadTexture(const void* texture_handle, HostDisplayPixelFormat texture_format, u32 x, u32 y,
u32 width, u32 height, void* out_data, u32 out_data_stride)
{
Vulkan::Texture* texture = static_cast<Vulkan::Texture*>(const_cast<void*>(texture_handle));
const u32 pitch = texture->CalcUpdatePitch(width);
const u32 size = pitch * height;
const u32 level = 0;
if (!CheckStagingBufferSize(size))
{
Log_ErrorPrintf("Can't read back %ux%u", width, height);
return false;
}
{
const VkCommandBuffer cmdbuf = g_vulkan_context->GetCurrentCommandBuffer();
const Vulkan::Util::DebugScope debugScope(cmdbuf, "VulkanHostDisplay::DownloadTexture(%u,%u)", width, height);
VkImageLayout old_layout = texture->GetLayout();
if (old_layout != VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL)
texture->TransitionSubresourcesToLayout(cmdbuf, level, 1, 0, 1, old_layout, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL);
VkBufferImageCopy image_copy = {};
const VkImageAspectFlags aspect = Vulkan::Util::IsDepthFormat(static_cast<VkFormat>(texture->GetFormat())) ?
VK_IMAGE_ASPECT_DEPTH_BIT :
VK_IMAGE_ASPECT_COLOR_BIT;
image_copy.bufferOffset = 0;
image_copy.bufferRowLength = texture->CalcUpdateRowLength(pitch);
image_copy.bufferImageHeight = 0;
image_copy.imageSubresource = {aspect, level, 0u, 1u};
image_copy.imageOffset = {static_cast<s32>(x), static_cast<s32>(y), 0};
image_copy.imageExtent = {width, height, 1u};
// invalidate gpu cache
// TODO: Needed?
Vulkan::Util::BufferMemoryBarrier(cmdbuf, m_readback_staging_buffer, 0, VK_ACCESS_TRANSFER_WRITE_BIT, 0, size,
VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT);
// do the copy
vkCmdCopyImageToBuffer(cmdbuf, texture->GetImage(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, m_readback_staging_buffer,
1, &image_copy);
// flush gpu cache
Vulkan::Util::BufferMemoryBarrier(cmdbuf, m_readback_staging_buffer, VK_ACCESS_TRANSFER_WRITE_BIT,
VK_ACCESS_HOST_READ_BIT, 0, size, VK_ACCESS_TRANSFER_WRITE_BIT,
VK_PIPELINE_STAGE_HOST_BIT);
if (old_layout != VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL)
{
texture->TransitionSubresourcesToLayout(cmdbuf, level, 1, 0, 1, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, old_layout);
}
}
g_vulkan_context->ExecuteCommandBuffer(true);
// invalidate cpu cache before reading
VkResult res = vmaInvalidateAllocation(g_vulkan_context->GetAllocator(), m_readback_staging_allocation, 0, size);
if (res != VK_SUCCESS)
LOG_VULKAN_ERROR(res, "vmaInvalidateAllocation() failed, readback may be incorrect: ");
StringUtil::StrideMemCpy(out_data, out_data_stride, m_readback_staging_buffer_map, pitch,
std::min(pitch, out_data_stride), height);
return true;
}
bool VulkanHostDisplay::CreateResources() bool VulkanHostDisplay::CreateResources()
{ {
static constexpr char fullscreen_quad_vertex_shader[] = R"( static constexpr char fullscreen_quad_vertex_shader[] = R"(
@ -431,7 +478,7 @@ void main()
plbuilder.AddDescriptorSet(m_post_process_descriptor_set_layout); plbuilder.AddDescriptorSet(m_post_process_descriptor_set_layout);
plbuilder.AddPushConstants(VK_SHADER_STAGE_VERTEX_BIT | VK_SHADER_STAGE_FRAGMENT_BIT, 0, plbuilder.AddPushConstants(VK_SHADER_STAGE_VERTEX_BIT | VK_SHADER_STAGE_FRAGMENT_BIT, 0,
PostProcessingShader::PUSH_CONSTANT_SIZE_THRESHOLD); FrontendCommon::PostProcessingShader::PUSH_CONSTANT_SIZE_THRESHOLD);
m_post_process_pipeline_layout = plbuilder.Create(device); m_post_process_pipeline_layout = plbuilder.Create(device);
if (m_post_process_pipeline_layout == VK_NULL_HANDLE) if (m_post_process_pipeline_layout == VK_NULL_HANDLE)
return false; return false;
@ -510,8 +557,6 @@ void VulkanHostDisplay::DestroyResources()
m_post_processing_ubo.Destroy(true); m_post_processing_ubo.Destroy(true);
m_post_processing_chain.ClearStages(); m_post_processing_chain.ClearStages();
m_readback_staging_texture.Destroy(false);
Vulkan::Util::SafeDestroyPipeline(m_display_pipeline); Vulkan::Util::SafeDestroyPipeline(m_display_pipeline);
Vulkan::Util::SafeDestroyPipeline(m_cursor_pipeline); Vulkan::Util::SafeDestroyPipeline(m_cursor_pipeline);
Vulkan::Util::SafeDestroyPipelineLayout(m_pipeline_layout); Vulkan::Util::SafeDestroyPipelineLayout(m_pipeline_layout);
@ -538,19 +583,7 @@ bool VulkanHostDisplay::UpdateImGuiFontTexture()
return ImGui_ImplVulkan_CreateFontsTexture(); return ImGui_ImplVulkan_CreateFontsTexture();
} }
void VulkanHostDisplay::DestroyRenderDevice() void VulkanHostDisplay::DestroyRenderDevice() {}
{
if (!g_vulkan_context)
return;
g_vulkan_context->WaitForGPUIdle();
DestroyResources();
Vulkan::ShaderCache::Destroy();
DestroyRenderSurface();
Vulkan::Context::Destroy();
}
bool VulkanHostDisplay::MakeRenderContextCurrent() bool VulkanHostDisplay::MakeRenderContextCurrent()
{ {
@ -687,10 +720,8 @@ bool VulkanHostDisplay::RenderScreenshot(u32 width, u32 height, std::vector<u32>
} }
Vulkan::Texture tex; Vulkan::Texture tex;
Vulkan::StagingTexture staging_tex;
if (!tex.Create(width, height, 1, 1, format, VK_SAMPLE_COUNT_1_BIT, VK_IMAGE_VIEW_TYPE_2D, VK_IMAGE_TILING_OPTIMAL, if (!tex.Create(width, height, 1, 1, format, VK_SAMPLE_COUNT_1_BIT, VK_IMAGE_VIEW_TYPE_2D, VK_IMAGE_TILING_OPTIMAL,
VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT) || VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT))
!staging_tex.Create(Vulkan::StagingBuffer::Type::Readback, format, width, height))
{ {
return false; return false;
} }
@ -728,13 +759,11 @@ bool VulkanHostDisplay::RenderScreenshot(u32 width, u32 height, std::vector<u32>
vkCmdEndRenderPass(g_vulkan_context->GetCurrentCommandBuffer()); vkCmdEndRenderPass(g_vulkan_context->GetCurrentCommandBuffer());
Vulkan::Util::EndDebugScope(g_vulkan_context->GetCurrentCommandBuffer()); Vulkan::Util::EndDebugScope(g_vulkan_context->GetCurrentCommandBuffer());
tex.TransitionToLayout(g_vulkan_context->GetCurrentCommandBuffer(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL); tex.TransitionToLayout(g_vulkan_context->GetCurrentCommandBuffer(), VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL);
staging_tex.CopyFromTexture(tex, 0, 0, 0, 0, 0, 0, width, height); DownloadTexture(&tex, *out_format, 0, 0, width, height, out_pixels->data(), *out_stride);
staging_tex.ReadTexels(0, 0, width, height, out_pixels->data(), *out_stride);
// destroying these immediately should be safe since nothing's going to access them, and it's not part of the command // destroying these immediately should be safe since nothing's going to access them, and it's not part of the command
// stream // stream
vkDestroyFramebuffer(g_vulkan_context->GetDevice(), fb, nullptr); vkDestroyFramebuffer(g_vulkan_context->GetDevice(), fb, nullptr);
staging_tex.Destroy(false);
tex.Destroy(false); tex.Destroy(false);
return true; return true;
} }
@ -960,7 +989,7 @@ bool VulkanHostDisplay::SetPostProcessingChain(const std::string_view& config)
for (u32 i = 0; i < m_post_processing_chain.GetStageCount(); i++) for (u32 i = 0; i < m_post_processing_chain.GetStageCount(); i++)
{ {
const PostProcessingShader& shader = m_post_processing_chain.GetShaderStage(i); const FrontendCommon::PostProcessingShader& shader = m_post_processing_chain.GetShaderStage(i);
const std::string vs = shadergen.GeneratePostProcessingVertexShader(shader); const std::string vs = shadergen.GeneratePostProcessingVertexShader(shader);
const std::string ps = shadergen.GeneratePostProcessingFragmentShader(shader); const std::string ps = shadergen.GeneratePostProcessingFragmentShader(shader);
const bool use_push_constants = shader.UsePushConstants(); const bool use_push_constants = shader.UsePushConstants();
@ -1023,8 +1052,6 @@ bool VulkanHostDisplay::SetPostProcessingChain(const std::string_view& config)
} }
Vulkan::Util::SetObjectName(g_vulkan_context->GetDevice(), m_post_processing_ubo.GetBuffer(), Vulkan::Util::SetObjectName(g_vulkan_context->GetDevice(), m_post_processing_ubo.GetBuffer(),
"Post Processing Uniform Buffer"); "Post Processing Uniform Buffer");
Vulkan::Util::SetObjectName(g_vulkan_context->GetDevice(), m_post_processing_ubo.GetDeviceMemory(),
"Post Processing Uniform Buffer Memory");
return true; return true;
} }
@ -1053,7 +1080,7 @@ bool VulkanHostDisplay::CheckPostProcessingRenderTargets(u32 target_width, u32 t
"Post Processing Input Texture"); "Post Processing Input Texture");
Vulkan::Util::SetObjectName(g_vulkan_context->GetDevice(), m_post_processing_input_texture.GetView(), Vulkan::Util::SetObjectName(g_vulkan_context->GetDevice(), m_post_processing_input_texture.GetView(),
"Post Processing Input Texture View"); "Post Processing Input Texture View");
Vulkan::Util::SetObjectName(g_vulkan_context->GetDevice(), m_post_processing_input_texture.GetDeviceMemory(), Vulkan::Util::SetObjectName(g_vulkan_context->GetDevice(), m_post_processing_input_texture.GetAllocation(),
"Post Processing Input Texture Memory"); "Post Processing Input Texture Memory");
} }
@ -1078,7 +1105,7 @@ bool VulkanHostDisplay::CheckPostProcessingRenderTargets(u32 target_width, u32 t
} }
Vulkan::Util::SetObjectName(g_vulkan_context->GetDevice(), pps.output_texture.GetImage(), Vulkan::Util::SetObjectName(g_vulkan_context->GetDevice(), pps.output_texture.GetImage(),
"Post Processing Output Texture %u", i); "Post Processing Output Texture %u", i);
Vulkan::Util::SetObjectName(g_vulkan_context->GetDevice(), pps.output_texture.GetDeviceMemory(), Vulkan::Util::SetObjectName(g_vulkan_context->GetDevice(), pps.output_texture.GetAllocation(),
"Post Processing Output Texture Memory %u", i); "Post Processing Output Texture Memory %u", i);
Vulkan::Util::SetObjectName(g_vulkan_context->GetDevice(), pps.output_texture.GetView(), Vulkan::Util::SetObjectName(g_vulkan_context->GetDevice(), pps.output_texture.GetView(),
"Post Processing Output Texture View %u", i); "Post Processing Output Texture View %u", i);
@ -1154,7 +1181,7 @@ void VulkanHostDisplay::ApplyPostProcessingChain(VkFramebuffer target_fb, s32 fi
if (use_push_constants) if (use_push_constants)
{ {
u8 buffer[PostProcessingShader::PUSH_CONSTANT_SIZE_THRESHOLD]; u8 buffer[FrontendCommon::PostProcessingShader::PUSH_CONSTANT_SIZE_THRESHOLD];
Assert(pps.uniforms_size <= sizeof(buffer)); Assert(pps.uniforms_size <= sizeof(buffer));
m_post_processing_chain.GetShaderStage(i).FillUniformBuffer( m_post_processing_chain.GetShaderStage(i).FillUniformBuffer(
buffer, texture_width, texture_height, texture_view_x, texture_view_y, texture_view_width, texture_view_height, buffer, texture_width, texture_height, texture_view_x, texture_view_y, texture_view_width, texture_view_height,
@ -1201,5 +1228,3 @@ void VulkanHostDisplay::ApplyPostProcessingChain(VkFramebuffer target_fb, s32 fi
} }
} }
} }
} // namespace FrontendCommon

View file

@ -1,6 +1,5 @@
#pragma once #pragma once
#include "common/vulkan/loader.h" #include "common/vulkan/loader.h"
#include "common/vulkan/staging_texture.h"
#include "common/vulkan/stream_buffer.h" #include "common/vulkan/stream_buffer.h"
#include "common/vulkan/swap_chain.h" #include "common/vulkan/swap_chain.h"
#include "common/window_info.h" #include "common/window_info.h"
@ -14,8 +13,6 @@ class StreamBuffer;
class SwapChain; class SwapChain;
} // namespace Vulkan } // namespace Vulkan
namespace FrontendCommon {
class VulkanHostDisplay final : public HostDisplay class VulkanHostDisplay final : public HostDisplay
{ {
public: public:
@ -93,9 +90,11 @@ protected:
s32 texture_view_x, s32 texture_view_y, s32 texture_view_width, s32 texture_view_height, s32 texture_view_x, s32 texture_view_y, s32 texture_view_width, s32 texture_view_height,
u32 target_width, u32 target_height); u32 target_width, u32 target_height);
// Can be overridden by frontends.
VkRenderPass GetRenderPassForDisplay() const; VkRenderPass GetRenderPassForDisplay() const;
bool CheckStagingBufferSize(u32 required_size);
void DestroyStagingBuffer();
bool CreateResources() override; bool CreateResources() override;
void DestroyResources() override; void DestroyResources() override;
@ -122,18 +121,19 @@ protected:
VkSampler m_point_sampler = VK_NULL_HANDLE; VkSampler m_point_sampler = VK_NULL_HANDLE;
VkSampler m_linear_sampler = VK_NULL_HANDLE; VkSampler m_linear_sampler = VK_NULL_HANDLE;
Vulkan::StagingTexture m_readback_staging_texture; VmaAllocation m_readback_staging_allocation = VK_NULL_HANDLE;
VkBuffer m_readback_staging_buffer = VK_NULL_HANDLE;
u8* m_readback_staging_buffer_map = nullptr;
u32 m_readback_staging_buffer_size = 0;
VkDescriptorSetLayout m_post_process_descriptor_set_layout = VK_NULL_HANDLE; VkDescriptorSetLayout m_post_process_descriptor_set_layout = VK_NULL_HANDLE;
VkDescriptorSetLayout m_post_process_ubo_descriptor_set_layout = VK_NULL_HANDLE; VkDescriptorSetLayout m_post_process_ubo_descriptor_set_layout = VK_NULL_HANDLE;
VkPipelineLayout m_post_process_pipeline_layout = VK_NULL_HANDLE; VkPipelineLayout m_post_process_pipeline_layout = VK_NULL_HANDLE;
VkPipelineLayout m_post_process_ubo_pipeline_layout = VK_NULL_HANDLE; VkPipelineLayout m_post_process_ubo_pipeline_layout = VK_NULL_HANDLE;
PostProcessingChain m_post_processing_chain; FrontendCommon::PostProcessingChain m_post_processing_chain;
Vulkan::Texture m_post_processing_input_texture; Vulkan::Texture m_post_processing_input_texture;
VkFramebuffer m_post_processing_input_framebuffer = VK_NULL_HANDLE; VkFramebuffer m_post_processing_input_framebuffer = VK_NULL_HANDLE;
Vulkan::StreamBuffer m_post_processing_ubo; Vulkan::StreamBuffer m_post_processing_ubo;
std::vector<PostProcessingStage> m_post_processing_stages; std::vector<PostProcessingStage> m_post_processing_stages;
}; };
} // namespace FrontendCommon