Refresh/src/Refresh_Driver_Vulkan.c

11740 lines
320 KiB
C
Raw Normal View History

/* Refresh - XNA-inspired 3D Graphics Library with modern capabilities
2020-12-17 00:27:14 +00:00
*
* Copyright (c) 2020 Evan Hemsley
*
* This software is provided 'as-is', without any express or implied warranty.
* In no event will the authors be held liable for any damages arising from
* the use of this software.
*
* Permission is granted to anyone to use this software for any purpose,
* including commercial applications, and to alter it and redistribute it
* freely, subject to the following restrictions:
*
* 1. The origin of this software must not be misrepresented; you must not
* claim that you wrote the original software. If you use this software in a
* product, an acknowledgment in the product documentation would be
* appreciated but is not required.
*
* 2. Altered source versions must be plainly marked as such, and must not be
* misrepresented as being the original software.
*
* 3. This notice may not be removed or altered from any source distribution.
*
* Evan "cosmonaut" Hemsley <evan@moonside.games>
*
*/
#if REFRESH_DRIVER_VULKAN
/* Needed for VK_KHR_portability_subset */
#define VK_ENABLE_BETA_EXTENSIONS
2020-12-17 00:27:14 +00:00
#define VK_NO_PROTOTYPES
#include "vulkan/vulkan.h"
#include "Refresh_Driver.h"
#include <SDL.h>
#include <SDL_syswm.h>
#include <SDL_vulkan.h>
#define VULKAN_INTERNAL_clamp(val, min, max) SDL_max(min, SDL_min(val, max))
2020-12-17 02:38:22 +00:00
/* Global Vulkan Loader Entry Points */
static PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr = NULL;
#define VULKAN_GLOBAL_FUNCTION(name) \
static PFN_##name name = NULL;
#include "Refresh_Driver_Vulkan_vkfuncs.h"
/* vkInstance/vkDevice function typedefs */
#define VULKAN_INSTANCE_FUNCTION(ext, ret, func, params) \
typedef ret (VKAPI_CALL *vkfntype_##func) params;
#define VULKAN_DEVICE_FUNCTION(ext, ret, func, params) \
typedef ret (VKAPI_CALL *vkfntype_##func) params;
#include "Refresh_Driver_Vulkan_vkfuncs.h"
typedef struct VulkanExtensions
2020-12-17 02:38:22 +00:00
{
/* Globally supported */
uint8_t KHR_swapchain;
2020-12-17 02:38:22 +00:00
/* Core since 1.1 */
uint8_t KHR_maintenance1;
uint8_t KHR_get_memory_requirements2;
2020-12-17 02:38:22 +00:00
/* Core since 1.2 */
uint8_t KHR_driver_properties;
2020-12-17 02:38:22 +00:00
/* EXT, probably not going to be Core */
uint8_t EXT_vertex_attribute_divisor;
/* Only required for special implementations (i.e. MoltenVK) */
uint8_t KHR_portability_subset;
} VulkanExtensions;
2020-12-17 02:38:22 +00:00
2020-12-17 08:19:02 +00:00
/* Defines */
#define STARTING_ALLOCATION_SIZE 64000000 /* 64MB */
#define MAX_ALLOCATION_SIZE 256000000 /* 256MB */
#define ALLOCATION_INCREMENT 16000000 /* 16MB */
#define TRANSFER_BUFFER_STARTING_SIZE 8000000 /* 8MB */
#define POOLED_TRANSFER_BUFFER_SIZE 16000000 /* 16MB */
2024-01-14 07:45:07 +00:00
#define UBO_BUFFER_SIZE 16777216 /* 16MB */
2024-01-14 07:39:58 +00:00
#define UBO_SECTION_SIZE 4096 /* 4KB */
2020-12-30 00:53:10 +00:00
#define DESCRIPTOR_POOL_STARTING_SIZE 128
#define DEFRAG_TIME 200
#define WINDOW_DATA "Refresh_VulkanWindowData"
2020-12-18 22:35:33 +00:00
2022-02-25 21:51:29 +00:00
#define IDENTITY_SWIZZLE \
{ \
VK_COMPONENT_SWIZZLE_IDENTITY, \
VK_COMPONENT_SWIZZLE_IDENTITY, \
VK_COMPONENT_SWIZZLE_IDENTITY, \
VK_COMPONENT_SWIZZLE_IDENTITY \
2020-12-18 22:35:33 +00:00
}
#define NULL_DESC_LAYOUT (VkDescriptorSetLayout) 0
#define NULL_PIPELINE_LAYOUT (VkPipelineLayout) 0
2021-01-05 23:00:51 +00:00
#define NULL_RENDER_PASS (Refresh_RenderPass*) 0
2020-12-17 08:19:02 +00:00
2020-12-29 00:28:14 +00:00
#define EXPAND_ELEMENTS_IF_NEEDED(arr, initialValue, type) \
2022-02-25 21:51:29 +00:00
if (arr->count == arr->capacity) \
{ \
2022-02-25 21:51:29 +00:00
if (arr->capacity == 0) \
{ \
arr->capacity = initialValue; \
} \
else \
{ \
arr->capacity *= 2; \
} \
arr->elements = (type*) SDL_realloc( \
arr->elements, \
arr->capacity * sizeof(type) \
); \
}
2020-12-29 00:42:51 +00:00
#define EXPAND_ARRAY_IF_NEEDED(arr, elementType, newCount, capacity, newCapacity) \
2022-02-25 21:51:29 +00:00
if (newCount >= capacity) \
{ \
capacity = newCapacity; \
arr = (elementType*) SDL_realloc( \
arr, \
sizeof(elementType) * capacity \
); \
2020-12-29 00:28:14 +00:00
}
2020-12-29 00:56:49 +00:00
#define MOVE_ARRAY_CONTENTS_AND_RESET(i, dstArr, dstCount, srcArr, srcCount) \
2022-02-25 21:51:29 +00:00
for (i = 0; i < srcCount; i += 1) \
{ \
dstArr[i] = srcArr[i]; \
} \
dstCount = srcCount; \
2020-12-29 00:56:49 +00:00
srcCount = 0;
2020-12-17 03:28:02 +00:00
/* Enums */
typedef enum VulkanResourceAccessType
{
/* Reads */
RESOURCE_ACCESS_NONE, /* For initialization */
RESOURCE_ACCESS_INDEX_BUFFER,
RESOURCE_ACCESS_VERTEX_BUFFER,
RESOURCE_ACCESS_INDIRECT_BUFFER,
2020-12-17 03:28:02 +00:00
RESOURCE_ACCESS_VERTEX_SHADER_READ_UNIFORM_BUFFER,
RESOURCE_ACCESS_VERTEX_SHADER_READ_SAMPLED_IMAGE,
RESOURCE_ACCESS_FRAGMENT_SHADER_READ_UNIFORM_BUFFER,
RESOURCE_ACCESS_FRAGMENT_SHADER_READ_SAMPLED_IMAGE,
RESOURCE_ACCESS_FRAGMENT_SHADER_READ_COLOR_ATTACHMENT,
RESOURCE_ACCESS_FRAGMENT_SHADER_READ_DEPTH_STENCIL_ATTACHMENT,
2020-12-31 04:39:47 +00:00
RESOURCE_ACCESS_COMPUTE_SHADER_READ_UNIFORM_BUFFER,
RESOURCE_ACCESS_COMPUTE_SHADER_READ_SAMPLED_IMAGE_OR_UNIFORM_TEXEL_BUFFER,
2020-12-31 06:28:37 +00:00
RESOURCE_ACCESS_COMPUTE_SHADER_READ_OTHER,
RESOURCE_ACCESS_ANY_SHADER_READ_SAMPLED_IMAGE,
2020-12-17 03:28:02 +00:00
RESOURCE_ACCESS_COLOR_ATTACHMENT_READ,
RESOURCE_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ,
RESOURCE_ACCESS_TRANSFER_READ,
RESOURCE_ACCESS_HOST_READ,
RESOURCE_ACCESS_PRESENT,
RESOURCE_ACCESS_END_OF_READ,
/* Writes */
RESOURCE_ACCESS_VERTEX_SHADER_WRITE,
RESOURCE_ACCESS_FRAGMENT_SHADER_WRITE,
RESOURCE_ACCESS_COLOR_ATTACHMENT_WRITE,
RESOURCE_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE,
RESOURCE_ACCESS_TRANSFER_WRITE,
RESOURCE_ACCESS_HOST_WRITE,
/* Read-Writes */
RESOURCE_ACCESS_COLOR_ATTACHMENT_READ_WRITE,
RESOURCE_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_WRITE,
RESOURCE_ACCESS_COMPUTE_SHADER_STORAGE_IMAGE_READ_WRITE,
RESOURCE_ACCESS_COMPUTE_SHADER_BUFFER_READ_WRITE,
RESOURCE_ACCESS_TRANSFER_READ_WRITE,
2020-12-17 03:28:02 +00:00
RESOURCE_ACCESS_GENERAL,
/* Count */
RESOURCE_ACCESS_TYPES_COUNT
} VulkanResourceAccessType;
2020-12-17 08:19:02 +00:00
/* Conversions */
static const uint8_t DEVICE_PRIORITY[] =
{
0, /* VK_PHYSICAL_DEVICE_TYPE_OTHER */
3, /* VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU */
4, /* VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU */
2, /* VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU */
1 /* VK_PHYSICAL_DEVICE_TYPE_CPU */
};
2020-12-17 08:19:02 +00:00
static VkFormat RefreshToVK_SurfaceFormat[] =
{
VK_FORMAT_R8G8B8A8_UNORM, /* R8G8B8A8_UNORM */
VK_FORMAT_B8G8R8A8_UNORM, /* B8G8R8A8_UNORM */
VK_FORMAT_R5G6B5_UNORM_PACK16, /* R5G6B5_UNORM */
VK_FORMAT_A1R5G5B5_UNORM_PACK16, /* A1R5G5B5_UNORM */
VK_FORMAT_B4G4R4A4_UNORM_PACK16, /* B4G4R4A4_UNORM */
VK_FORMAT_A2R10G10B10_UNORM_PACK32, /* A2R10G10B10_UNORM */
VK_FORMAT_R16G16_UNORM, /* R16G16_UNORM */
VK_FORMAT_R16G16B16A16_UNORM, /* R16G16B16A16_UNORM */
VK_FORMAT_R8_UNORM, /* R8_UNORM */
VK_FORMAT_BC1_RGBA_UNORM_BLOCK, /* BC1_UNORM */
VK_FORMAT_BC2_UNORM_BLOCK, /* BC2_UNORM */
VK_FORMAT_BC3_UNORM_BLOCK, /* BC3_UNORM */
VK_FORMAT_BC7_UNORM_BLOCK, /* BC7_UNORM */
VK_FORMAT_R8G8_SNORM, /* R8G8_SNORM */
VK_FORMAT_R8G8B8A8_SNORM, /* R8G8B8A8_SNORM */
VK_FORMAT_R16_SFLOAT, /* R16_SFLOAT */
VK_FORMAT_R16G16_SFLOAT, /* R16G16_SFLOAT */
2021-01-29 05:37:11 +00:00
VK_FORMAT_R16G16B16A16_SFLOAT, /* R16G16B16A16_SFLOAT */
VK_FORMAT_R32_SFLOAT, /* R32_SFLOAT */
VK_FORMAT_R32G32_SFLOAT, /* R32G32_SFLOAT */
VK_FORMAT_R32G32B32A32_SFLOAT, /* R32G32B32A32_SFLOAT */
VK_FORMAT_R8_UINT, /* R8_UINT */
VK_FORMAT_R8G8_UINT, /* R8G8_UINT */
VK_FORMAT_R8G8B8A8_UINT, /* R8G8B8A8_UINT */
VK_FORMAT_R16_UINT, /* R16_UINT */
VK_FORMAT_R16G16_UINT, /* R16G16_UINT */
VK_FORMAT_R16G16B16A16_UINT, /* R16G16B16A16_UINT */
VK_FORMAT_D16_UNORM, /* D16_UNORM */
VK_FORMAT_D32_SFLOAT, /* D32_SFLOAT */
VK_FORMAT_D16_UNORM_S8_UINT, /* D16_UNORM_S8_UINT */
VK_FORMAT_D32_SFLOAT_S8_UINT /* D32_SFLOAT_S8_UINT */
2020-12-17 08:19:02 +00:00
};
2020-12-17 19:40:49 +00:00
static VkFormat RefreshToVK_VertexFormat[] =
{
VK_FORMAT_R32_UINT, /* UINT */
VK_FORMAT_R32_SFLOAT, /* FLOAT */
VK_FORMAT_R32G32_SFLOAT, /* VECTOR2 */
VK_FORMAT_R32G32B32_SFLOAT, /* VECTOR3 */
2020-12-17 19:40:49 +00:00
VK_FORMAT_R32G32B32A32_SFLOAT, /* VECTOR4 */
VK_FORMAT_R8G8B8A8_UNORM, /* COLOR */
VK_FORMAT_R8G8B8A8_USCALED, /* BYTE4 */
VK_FORMAT_R16G16_SSCALED, /* SHORT2 */
2020-12-17 19:40:49 +00:00
VK_FORMAT_R16G16B16A16_SSCALED, /* SHORT4 */
VK_FORMAT_R16G16_SNORM, /* NORMALIZEDSHORT2 */
2020-12-17 19:40:49 +00:00
VK_FORMAT_R16G16B16A16_SNORM, /* NORMALIZEDSHORT4 */
VK_FORMAT_R16G16_SFLOAT, /* HALFVECTOR2 */
2020-12-17 19:40:49 +00:00
VK_FORMAT_R16G16B16A16_SFLOAT /* HALFVECTOR4 */
};
2020-12-20 07:31:55 +00:00
static VkIndexType RefreshToVK_IndexType[] =
{
VK_INDEX_TYPE_UINT16,
VK_INDEX_TYPE_UINT32
};
2020-12-17 19:40:49 +00:00
static VkPrimitiveTopology RefreshToVK_PrimitiveType[] =
{
VK_PRIMITIVE_TOPOLOGY_POINT_LIST,
VK_PRIMITIVE_TOPOLOGY_LINE_LIST,
VK_PRIMITIVE_TOPOLOGY_LINE_STRIP,
VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST,
VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP
};
static VkPolygonMode RefreshToVK_PolygonMode[] =
{
VK_POLYGON_MODE_FILL,
VK_POLYGON_MODE_LINE,
VK_POLYGON_MODE_POINT
};
static VkCullModeFlags RefreshToVK_CullMode[] =
{
VK_CULL_MODE_NONE,
VK_CULL_MODE_FRONT_BIT,
VK_CULL_MODE_BACK_BIT,
VK_CULL_MODE_FRONT_AND_BACK
};
static VkFrontFace RefreshToVK_FrontFace[] =
{
VK_FRONT_FACE_COUNTER_CLOCKWISE,
VK_FRONT_FACE_CLOCKWISE
};
static VkBlendFactor RefreshToVK_BlendFactor[] =
{
VK_BLEND_FACTOR_ZERO,
VK_BLEND_FACTOR_ONE,
VK_BLEND_FACTOR_SRC_COLOR,
VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR,
VK_BLEND_FACTOR_DST_COLOR,
VK_BLEND_FACTOR_ONE_MINUS_DST_COLOR,
VK_BLEND_FACTOR_SRC_ALPHA,
VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA,
VK_BLEND_FACTOR_DST_ALPHA,
VK_BLEND_FACTOR_ONE_MINUS_DST_ALPHA,
VK_BLEND_FACTOR_CONSTANT_COLOR,
VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_COLOR,
VK_BLEND_FACTOR_CONSTANT_ALPHA,
VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA,
VK_BLEND_FACTOR_SRC_ALPHA_SATURATE
2020-12-17 19:40:49 +00:00
};
static VkBlendOp RefreshToVK_BlendOp[] =
{
VK_BLEND_OP_ADD,
VK_BLEND_OP_SUBTRACT,
VK_BLEND_OP_REVERSE_SUBTRACT,
VK_BLEND_OP_MIN,
VK_BLEND_OP_MAX
};
static VkCompareOp RefreshToVK_CompareOp[] =
{
VK_COMPARE_OP_NEVER,
VK_COMPARE_OP_LESS,
VK_COMPARE_OP_EQUAL,
VK_COMPARE_OP_LESS_OR_EQUAL,
VK_COMPARE_OP_GREATER,
VK_COMPARE_OP_NOT_EQUAL,
VK_COMPARE_OP_GREATER_OR_EQUAL,
VK_COMPARE_OP_ALWAYS
};
static VkStencilOp RefreshToVK_StencilOp[] =
{
VK_STENCIL_OP_KEEP,
VK_STENCIL_OP_ZERO,
VK_STENCIL_OP_REPLACE,
VK_STENCIL_OP_INCREMENT_AND_CLAMP,
VK_STENCIL_OP_DECREMENT_AND_CLAMP,
VK_STENCIL_OP_INVERT,
VK_STENCIL_OP_INCREMENT_AND_WRAP,
VK_STENCIL_OP_DECREMENT_AND_WRAP
};
2020-12-17 08:19:02 +00:00
static VkAttachmentLoadOp RefreshToVK_LoadOp[] =
{
2022-02-25 21:42:11 +00:00
VK_ATTACHMENT_LOAD_OP_LOAD,
VK_ATTACHMENT_LOAD_OP_CLEAR,
VK_ATTACHMENT_LOAD_OP_DONT_CARE
2020-12-17 08:19:02 +00:00
};
static VkAttachmentStoreOp RefreshToVK_StoreOp[] =
{
2022-02-25 21:42:11 +00:00
VK_ATTACHMENT_STORE_OP_STORE,
VK_ATTACHMENT_STORE_OP_DONT_CARE
2020-12-17 08:19:02 +00:00
};
static VkSampleCountFlagBits RefreshToVK_SampleCount[] =
{
2022-02-25 21:42:11 +00:00
VK_SAMPLE_COUNT_1_BIT,
VK_SAMPLE_COUNT_2_BIT,
VK_SAMPLE_COUNT_4_BIT,
VK_SAMPLE_COUNT_8_BIT,
VK_SAMPLE_COUNT_16_BIT,
VK_SAMPLE_COUNT_32_BIT,
VK_SAMPLE_COUNT_64_BIT
2020-12-17 08:19:02 +00:00
};
2020-12-17 19:40:49 +00:00
static VkVertexInputRate RefreshToVK_VertexInputRate[] =
{
VK_VERTEX_INPUT_RATE_VERTEX,
VK_VERTEX_INPUT_RATE_INSTANCE
};
2021-01-03 21:01:29 +00:00
static VkFilter RefreshToVK_Filter[] =
2020-12-18 01:48:26 +00:00
{
VK_FILTER_NEAREST,
2021-01-03 21:01:29 +00:00
VK_FILTER_LINEAR,
VK_FILTER_CUBIC_EXT
2020-12-18 01:48:26 +00:00
};
static VkSamplerMipmapMode RefreshToVK_SamplerMipmapMode[] =
{
VK_SAMPLER_MIPMAP_MODE_NEAREST,
VK_SAMPLER_MIPMAP_MODE_LINEAR
};
static VkSamplerAddressMode RefreshToVK_SamplerAddressMode[] =
{
VK_SAMPLER_ADDRESS_MODE_REPEAT,
VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT,
VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,
VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER
};
2020-12-18 20:58:03 +00:00
static VkBorderColor RefreshToVK_BorderColor[] =
{
VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK,
VK_BORDER_COLOR_INT_TRANSPARENT_BLACK,
VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK,
VK_BORDER_COLOR_INT_OPAQUE_BLACK,
VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE,
VK_BORDER_COLOR_INT_OPAQUE_WHITE
};
2020-12-17 03:28:02 +00:00
/* Structures */
2020-12-19 00:39:03 +00:00
/* Memory Allocation */
2020-12-18 22:35:33 +00:00
typedef struct VulkanMemoryAllocation VulkanMemoryAllocation;
typedef struct VulkanBuffer VulkanBuffer;
typedef struct VulkanTexture VulkanTexture;
2020-12-18 22:35:33 +00:00
typedef struct VulkanMemoryFreeRegion
{
VulkanMemoryAllocation *allocation;
VkDeviceSize offset;
VkDeviceSize size;
uint32_t allocationIndex;
uint32_t sortedIndex;
} VulkanMemoryFreeRegion;
typedef struct VulkanMemoryUsedRegion
{
VulkanMemoryAllocation *allocation;
VkDeviceSize offset;
VkDeviceSize size;
VkDeviceSize resourceOffset; /* differs from offset based on alignment*/
VkDeviceSize resourceSize; /* differs from size based on alignment */
VkDeviceSize alignment;
uint8_t isBuffer;
REFRESHNAMELESS union
{
VulkanBuffer *vulkanBuffer;
VulkanTexture *vulkanTexture;
};
} VulkanMemoryUsedRegion;
2020-12-18 22:35:33 +00:00
typedef struct VulkanMemorySubAllocator
{
uint32_t memoryTypeIndex;
2020-12-18 22:35:33 +00:00
VkDeviceSize nextAllocationSize;
VulkanMemoryAllocation **allocations;
uint32_t allocationCount;
VulkanMemoryFreeRegion **sortedFreeRegions;
uint32_t sortedFreeRegionCount;
uint32_t sortedFreeRegionCapacity;
} VulkanMemorySubAllocator;
struct VulkanMemoryAllocation
{
VulkanMemorySubAllocator *allocator;
VkDeviceMemory memory;
VkDeviceSize size;
VulkanMemoryUsedRegion **usedRegions;
uint32_t usedRegionCount;
uint32_t usedRegionCapacity;
2020-12-18 22:35:33 +00:00
VulkanMemoryFreeRegion **freeRegions;
uint32_t freeRegionCount;
uint32_t freeRegionCapacity;
uint8_t dedicated;
uint8_t availableForAllocation;
VkDeviceSize freeSpace;
VkDeviceSize usedSpace;
2021-01-03 21:12:12 +00:00
SDL_mutex *memoryLock;
2020-12-18 22:35:33 +00:00
};
typedef struct VulkanMemoryAllocator
{
VulkanMemorySubAllocator subAllocators[VK_MAX_MEMORY_TYPES];
} VulkanMemoryAllocator;
2020-12-19 00:39:03 +00:00
/* Memory Barriers */
typedef struct VulkanResourceAccessInfo
{
VkPipelineStageFlags stageMask;
VkAccessFlags accessMask;
VkImageLayout imageLayout;
} VulkanResourceAccessInfo;
static const VulkanResourceAccessInfo AccessMap[RESOURCE_ACCESS_TYPES_COUNT] =
{
/* RESOURCE_ACCESS_NONE */
{
0,
0,
VK_IMAGE_LAYOUT_UNDEFINED
},
/* RESOURCE_ACCESS_INDEX_BUFFER */
{
VK_PIPELINE_STAGE_VERTEX_INPUT_BIT,
VK_ACCESS_INDEX_READ_BIT,
VK_IMAGE_LAYOUT_UNDEFINED
},
/* RESOURCE_ACCESS_VERTEX_BUFFER */
{
VK_PIPELINE_STAGE_VERTEX_INPUT_BIT,
VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT,
VK_IMAGE_LAYOUT_UNDEFINED
},
/* RESOURCE_ACCESS_INDIRECT_BUFFER */
{
VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT,
VK_ACCESS_INDIRECT_COMMAND_READ_BIT,
2020-12-19 00:39:03 +00:00
VK_IMAGE_LAYOUT_UNDEFINED
},
/* RESOURCE_ACCESS_VERTEX_SHADER_READ_UNIFORM_BUFFER */
{
VK_PIPELINE_STAGE_VERTEX_SHADER_BIT,
VK_ACCESS_SHADER_READ_BIT,
VK_IMAGE_LAYOUT_UNDEFINED
},
/* RESOURCE_ACCESS_VERTEX_SHADER_READ_SAMPLED_IMAGE */
{
VK_PIPELINE_STAGE_VERTEX_SHADER_BIT,
VK_ACCESS_SHADER_READ_BIT,
VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL
},
/* RESOURCE_ACCESS_FRAGMENT_SHADER_READ_UNIFORM_BUFFER */
{
VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
VK_ACCESS_UNIFORM_READ_BIT,
VK_IMAGE_LAYOUT_UNDEFINED
},
/* RESOURCE_ACCESS_FRAGMENT_SHADER_READ_SAMPLED_IMAGE */
{
VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
VK_ACCESS_SHADER_READ_BIT,
VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL
},
/* RESOURCE_ACCESS_FRAGMENT_SHADER_READ_COLOR_ATTACHMENT */
{
VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
VK_ACCESS_INPUT_ATTACHMENT_READ_BIT,
VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL
},
/* RESOURCE_ACCESS_FRAGMENT_SHADER_READ_DEPTH_STENCIL_ATTACHMENT */
{
VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
VK_ACCESS_INPUT_ATTACHMENT_READ_BIT,
VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL
},
2020-12-31 04:39:47 +00:00
/* RESOURCE_ACCESS_COMPUTE_SHADER_READ_UNIFORM_BUFFER */
{
VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT,
VK_ACCESS_UNIFORM_READ_BIT,
VK_IMAGE_LAYOUT_UNDEFINED
},
/* RESOURCE_ACCESS_COMPUTE_SHADER_READ_SAMPLED_IMAGE_OR_UNIFORM_TEXEL_BUFFER */
2022-02-25 21:42:11 +00:00
{ VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT,
VK_ACCESS_SHADER_READ_BIT,
VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL
},
2020-12-31 06:28:37 +00:00
/* RESOURCE_ACCESS_COMPUTE_SHADER_READ_OTHER */
{
VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT,
VK_ACCESS_SHADER_READ_BIT,
VK_IMAGE_LAYOUT_UNDEFINED
},
/* RESOURCE_ACCESS_ANY_SHADER_READ_SAMPLED_IMAGE */
{
VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT,
VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_INPUT_ATTACHMENT_READ_BIT,
VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL
},
2020-12-19 00:39:03 +00:00
/* RESOURCE_ACCESS_COLOR_ATTACHMENT_READ */
{
VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
VK_ACCESS_COLOR_ATTACHMENT_READ_BIT,
VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL
},
/* RESOURCE_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ */
{
VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT,
VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT,
VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL
},
/* RESOURCE_ACCESS_TRANSFER_READ */
{
VK_PIPELINE_STAGE_TRANSFER_BIT,
VK_ACCESS_TRANSFER_READ_BIT,
VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL
},
/* RESOURCE_ACCESS_HOST_READ */
{
VK_PIPELINE_STAGE_HOST_BIT,
VK_ACCESS_HOST_READ_BIT,
VK_IMAGE_LAYOUT_GENERAL
},
/* RESOURCE_ACCESS_PRESENT */
{
0,
0,
VK_IMAGE_LAYOUT_PRESENT_SRC_KHR
},
/* RESOURCE_ACCESS_END_OF_READ */
{
0,
0,
VK_IMAGE_LAYOUT_UNDEFINED
},
/* RESOURCE_ACCESS_VERTEX_SHADER_WRITE */
{
VK_PIPELINE_STAGE_VERTEX_SHADER_BIT,
VK_ACCESS_SHADER_WRITE_BIT,
VK_IMAGE_LAYOUT_GENERAL
},
/* RESOURCE_ACCESS_FRAGMENT_SHADER_WRITE */
{
VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
VK_ACCESS_SHADER_WRITE_BIT,
VK_IMAGE_LAYOUT_GENERAL
},
/* RESOURCE_ACCESS_COLOR_ATTACHMENT_WRITE */
{
VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL
},
/* RESOURCE_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE */
{
VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT,
VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT,
VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL
},
/* RESOURCE_ACCESS_TRANSFER_WRITE */
{
VK_PIPELINE_STAGE_TRANSFER_BIT,
VK_ACCESS_TRANSFER_WRITE_BIT,
VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL
},
/* RESOURCE_ACCESS_HOST_WRITE */
{
VK_PIPELINE_STAGE_HOST_BIT,
VK_ACCESS_HOST_WRITE_BIT,
VK_IMAGE_LAYOUT_GENERAL
},
/* RESOURCE_ACCESS_COLOR_ATTACHMENT_READ_WRITE */
{
VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL
},
/* RESOURCE_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_WRITE */
{
VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT,
VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT,
VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL
},
/* RESOURCE_ACCESS_COMPUTE_SHADER_STORAGE_IMAGE_READ_WRITE */
{
VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT,
VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT,
VK_IMAGE_LAYOUT_GENERAL
},
/* RESOURCE_ACCESS_COMPUTE_SHADER_BUFFER_READ_WRITE */
{
VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT,
VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT,
VK_IMAGE_LAYOUT_UNDEFINED
},
/* RESOURCE_ACCESS_TRANSFER_READ_WRITE */
2020-12-19 00:39:03 +00:00
{
VK_PIPELINE_STAGE_TRANSFER_BIT,
VK_ACCESS_TRANSFER_READ_BIT | VK_ACCESS_TRANSFER_WRITE_BIT,
VK_IMAGE_LAYOUT_UNDEFINED
},
/* RESOURCE_ACCESS_GENERAL */
{
VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
VK_ACCESS_MEMORY_READ_BIT | VK_ACCESS_MEMORY_WRITE_BIT,
VK_IMAGE_LAYOUT_GENERAL
}
};
/* Memory structures */
typedef struct VulkanBufferContainer /* cast from Refresh_Buffer */
{
VulkanBuffer *vulkanBuffer;
} VulkanBufferContainer;
struct VulkanBuffer
2020-12-19 04:08:07 +00:00
{
VkBuffer buffer;
2020-12-19 04:08:07 +00:00
VkDeviceSize size;
VulkanMemoryUsedRegion *usedRegion;
2020-12-19 04:08:07 +00:00
VulkanResourceAccessType resourceAccessType;
VkBufferUsageFlags usage;
uint8_t requireHostVisible;
uint8_t preferDeviceLocal;
2024-02-17 02:43:02 +00:00
uint8_t requireHostLocal;
SDL_atomic_t referenceCount; /* Tracks command buffer usage */
VulkanBufferContainer *container;
};
typedef enum VulkanUniformBufferType
{
UNIFORM_BUFFER_VERTEX,
UNIFORM_BUFFER_FRAGMENT,
UNIFORM_BUFFER_COMPUTE
} VulkanUniformBufferType;
/* Yes, the pool is made of multiple pools.
* For some reason it was considered a good idea to make VkDescriptorPool fixed-size.
*/
typedef struct VulkanUniformDescriptorPool
{
VkDescriptorPool* descriptorPools;
uint32_t descriptorPoolCount;
/* Decremented whenever a descriptor set is allocated and
* incremented whenever a descriptor pool is allocated.
* This lets us keep track of when we need a new pool.
*/
uint32_t availableDescriptorSetCount;
} VulkanUniformDescriptorPool;
/* Uniform buffers are just one buffer that we carve slices out of. */
typedef struct VulkanUniformBufferObject
{
VulkanUniformBufferType type;
VkDescriptorSet descriptorSet;
VulkanBuffer *buffer;
uint32_t currentOffset;
uint8_t *mapPointer; /* uniform buffers are permanently mapped */
SDL_mutex *lock;
} VulkanUniformBufferObject;
2020-12-19 01:03:26 +00:00
2020-12-19 00:39:03 +00:00
/* Renderer Structure */
2020-12-17 02:38:22 +00:00
typedef struct QueueFamilyIndices
{
uint32_t graphicsFamily;
uint32_t presentFamily;
2021-01-03 03:03:25 +00:00
uint32_t computeFamily;
2021-01-02 21:31:17 +00:00
uint32_t transferFamily;
2020-12-17 02:38:22 +00:00
} QueueFamilyIndices;
typedef struct VulkanSampler
{
VkSampler sampler;
SDL_atomic_t referenceCount;
} VulkanSampler;
typedef struct VulkanShaderModule
{
VkShaderModule shaderModule;
SDL_atomic_t referenceCount;
} VulkanShaderModule;
typedef struct VulkanTextureContainer /* Cast from Refresh_Texture */
{
VulkanTexture *vulkanTexture;
} VulkanTextureContainer;
struct VulkanTexture
{
VulkanMemoryUsedRegion *usedRegion;
VkImage image;
VkImageView view;
VkExtent2D dimensions;
uint8_t is3D;
uint8_t isCube;
uint32_t depth;
uint32_t layerCount;
uint32_t levelCount;
Refresh_SampleCount sampleCount;
VkFormat format;
VulkanResourceAccessType resourceAccessType;
VkImageUsageFlags usageFlags;
VkImageAspectFlags aspectFlags;
struct VulkanTexture *msaaTex;
SDL_atomic_t referenceCount;
VulkanTextureContainer *container;
};
typedef struct VulkanRenderTarget
{
VkImageView view;
} VulkanRenderTarget;
typedef struct VulkanFramebuffer
{
VkFramebuffer framebuffer;
SDL_atomic_t referenceCount;
} VulkanFramebuffer;
typedef struct VulkanSwapchainData
{
/* Window surface */
VkSurfaceKHR surface;
VkSurfaceFormatKHR surfaceFormat;
/* Swapchain for window surface */
VkSwapchainKHR swapchain;
VkFormat swapchainFormat;
VkComponentMapping swapchainSwizzle;
VkPresentModeKHR presentMode;
/* Swapchain images */
VkExtent2D extent;
VulkanTextureContainer *textureContainers;
uint32_t imageCount;
2022-02-10 05:42:19 +00:00
/* Synchronization primitives */
VkSemaphore imageAvailableSemaphore;
VkSemaphore renderFinishedSemaphore;
} VulkanSwapchainData;
typedef struct WindowData
{
void *windowHandle;
VkPresentModeKHR preferredPresentMode;
VulkanSwapchainData *swapchainData;
} WindowData;
2020-12-17 03:28:02 +00:00
typedef struct SwapChainSupportDetails
{
VkSurfaceCapabilitiesKHR capabilities;
VkSurfaceFormatKHR *formats;
uint32_t formatsLength;
VkPresentModeKHR *presentModes;
uint32_t presentModesLength;
} SwapChainSupportDetails;
typedef struct VulkanPresentData
{
WindowData *windowData;
uint32_t swapchainImageIndex;
} VulkanPresentData;
typedef struct DescriptorSetCache DescriptorSetCache;
2020-12-27 23:20:59 +00:00
typedef struct VulkanGraphicsPipelineLayout
2020-12-23 06:56:26 +00:00
{
VkPipelineLayout pipelineLayout;
DescriptorSetCache *vertexSamplerDescriptorSetCache;
DescriptorSetCache *fragmentSamplerDescriptorSetCache;
} VulkanGraphicsPipelineLayout;
typedef struct VulkanGraphicsPipeline
{
VkPipeline pipeline;
VulkanGraphicsPipelineLayout *pipelineLayout;
2021-01-05 23:00:51 +00:00
Refresh_PrimitiveType primitiveType;
2024-02-17 02:29:05 +00:00
uint32_t vertexUniformBlockSize;
uint32_t fragmentUniformBlockSize;
VulkanShaderModule *vertexShaderModule;
VulkanShaderModule *fragmentShaderModule;
SDL_atomic_t referenceCount;
2020-12-23 06:56:26 +00:00
} VulkanGraphicsPipeline;
2020-12-29 22:52:24 +00:00
typedef struct VulkanComputePipelineLayout
{
VkPipelineLayout pipelineLayout;
DescriptorSetCache *bufferDescriptorSetCache;
DescriptorSetCache *imageDescriptorSetCache;
2020-12-29 22:52:24 +00:00
} VulkanComputePipelineLayout;
typedef struct VulkanComputePipeline
{
VkPipeline pipeline;
VulkanComputePipelineLayout *pipelineLayout;
2024-02-17 02:29:05 +00:00
uint32_t uniformBlockSize; /* permanently set in Create function */
VulkanShaderModule *computeShaderModule;
SDL_atomic_t referenceCount;
2020-12-29 22:52:24 +00:00
} VulkanComputePipeline;
/* Cache structures */
2020-12-29 22:52:24 +00:00
/* Descriptor Set Layout Caches*/
#define NUM_DESCRIPTOR_SET_LAYOUT_BUCKETS 1031
2020-12-29 23:05:26 +00:00
typedef struct DescriptorSetLayoutHash
{
2020-12-29 23:05:26 +00:00
VkDescriptorType descriptorType;
uint32_t bindingCount;
VkShaderStageFlagBits stageFlag;
2020-12-29 23:05:26 +00:00
} DescriptorSetLayoutHash;
2020-12-29 23:05:26 +00:00
typedef struct DescriptorSetLayoutHashMap
{
2020-12-29 23:05:26 +00:00
DescriptorSetLayoutHash key;
VkDescriptorSetLayout value;
2020-12-29 23:05:26 +00:00
} DescriptorSetLayoutHashMap;
2020-12-29 23:05:26 +00:00
typedef struct DescriptorSetLayoutHashArray
{
2020-12-29 23:05:26 +00:00
DescriptorSetLayoutHashMap *elements;
int32_t count;
int32_t capacity;
2020-12-29 23:05:26 +00:00
} DescriptorSetLayoutHashArray;
2020-12-29 23:05:26 +00:00
typedef struct DescriptorSetLayoutHashTable
{
2020-12-29 23:05:26 +00:00
DescriptorSetLayoutHashArray buckets[NUM_DESCRIPTOR_SET_LAYOUT_BUCKETS];
} DescriptorSetLayoutHashTable;
2020-12-29 23:05:26 +00:00
static inline uint64_t DescriptorSetLayoutHashTable_GetHashCode(DescriptorSetLayoutHash key)
{
const uint64_t HASH_FACTOR = 97;
uint64_t result = 1;
result = result * HASH_FACTOR + key.descriptorType;
2020-12-29 23:05:26 +00:00
result = result * HASH_FACTOR + key.bindingCount;
result = result * HASH_FACTOR + key.stageFlag;
return result;
}
2020-12-29 23:05:26 +00:00
static inline VkDescriptorSetLayout DescriptorSetLayoutHashTable_Fetch(
DescriptorSetLayoutHashTable *table,
DescriptorSetLayoutHash key
) {
int32_t i;
2020-12-29 23:05:26 +00:00
uint64_t hashcode = DescriptorSetLayoutHashTable_GetHashCode(key);
DescriptorSetLayoutHashArray *arr = &table->buckets[hashcode % NUM_DESCRIPTOR_SET_LAYOUT_BUCKETS];
for (i = 0; i < arr->count; i += 1)
{
2020-12-29 23:05:26 +00:00
const DescriptorSetLayoutHash *e = &arr->elements[i].key;
2022-02-25 21:42:11 +00:00
if ( key.descriptorType == e->descriptorType &&
2020-12-29 23:05:26 +00:00
key.bindingCount == e->bindingCount &&
2022-02-25 21:42:11 +00:00
key.stageFlag == e->stageFlag )
{
return arr->elements[i].value;
}
}
return VK_NULL_HANDLE;
}
2020-12-29 23:05:26 +00:00
static inline void DescriptorSetLayoutHashTable_Insert(
DescriptorSetLayoutHashTable *table,
DescriptorSetLayoutHash key,
VkDescriptorSetLayout value
) {
2020-12-29 23:05:26 +00:00
uint64_t hashcode = DescriptorSetLayoutHashTable_GetHashCode(key);
DescriptorSetLayoutHashArray *arr = &table->buckets[hashcode % NUM_DESCRIPTOR_SET_LAYOUT_BUCKETS];
2020-12-29 23:05:26 +00:00
DescriptorSetLayoutHashMap map;
map.key = key;
map.value = value;
2020-12-29 23:05:26 +00:00
EXPAND_ELEMENTS_IF_NEEDED(arr, 4, DescriptorSetLayoutHashMap);
arr->elements[arr->count] = map;
arr->count += 1;
}
typedef struct RenderPassColorTargetDescription
{
2022-06-06 18:46:08 +00:00
VkFormat format;
Refresh_Vec4 clearColor;
Refresh_LoadOp loadOp;
Refresh_StoreOp storeOp;
} RenderPassColorTargetDescription;
typedef struct RenderPassDepthStencilTargetDescription
{
2022-06-06 18:46:08 +00:00
VkFormat format;
Refresh_LoadOp loadOp;
Refresh_StoreOp storeOp;
Refresh_LoadOp stencilLoadOp;
Refresh_StoreOp stencilStoreOp;
} RenderPassDepthStencilTargetDescription;
typedef struct RenderPassHash
{
RenderPassColorTargetDescription colorTargetDescriptions[MAX_COLOR_TARGET_BINDINGS];
uint32_t colorAttachmentCount;
RenderPassDepthStencilTargetDescription depthStencilTargetDescription;
Miscellaneous API changes + more MSAA fixes (#26) **Breaking API Changes** * Removed `REFRESH_SAMPLECOUNT_16/32/64`, since hardware support for these sample counts is generally poor (and completely non-existent with MoltenVK and certain consoles). * Removed unused `sampleCount` parameter from `Refresh_TextureCreateInfo`. * Removed `sampleCount` parameter from `Refresh_ColorAttachmentDescription`. The existence of this parameter meant you had to sync up three different sample count values (render pass, pipeline, and color attachment description) whenever you wanted to use multisampling. However, Vulkan requires that all color attachments in a given pipeline _must_ match the pipeline's sample count anyway, so we can assume all color attachments will use the pipeline's sample count. * Removed the `renderArea` parameter from `Refresh_BeginRenderPass()` since it didn't serve much practical purpose and slightly complicated things on the MoonWorks managed side. **Behavior Changes** * When creating a render pass or graphics pipeline, the requested multisample count will be converted into a sample count that's actually supported by the GPU. For example, if you request 8x MSAA on a device that only supports up to 4x MSAA, it will silently fall back to 4x MSAA. * All color attachments are now forced to have an internal store op of `STORE`, even if `REFRESH_STORE_OP_DONTCARE` is specified. The one exception is internal multisample textures -- if `DONTCARE` is used, those textures will be discarded to save on bandwidth. (Their resolve textures will still be stored.) * The RenderPass hashing logic was updated so that it would still work correctly with the removal of `Refresh_ColorAttachmentDescription.sampleCount`. **Bug Fixes** * Fixed bugs where multisampling logic wasn't kicking in for certain sample counts due to incorrect enum comparisons. Co-authored-by: Caleb Cornett <caleb.cornett@outlook.com> Reviewed-on: https://gitea.moonside.games/MoonsideGames/Refresh/pulls/26 Co-authored-by: TheSpydog <thespydog@noreply.example.org> Co-committed-by: TheSpydog <thespydog@noreply.example.org>
2022-11-08 19:09:21 +00:00
Refresh_SampleCount colorAttachmentSampleCount;
} RenderPassHash;
typedef struct RenderPassHashMap
{
RenderPassHash key;
VkRenderPass value;
} RenderPassHashMap;
typedef struct RenderPassHashArray
{
RenderPassHashMap *elements;
int32_t count;
int32_t capacity;
} RenderPassHashArray;
static inline uint8_t RenderPassHash_Compare(
RenderPassHash *a,
RenderPassHash *b
) {
uint32_t i;
if (a->colorAttachmentCount != b->colorAttachmentCount)
{
return 0;
}
Miscellaneous API changes + more MSAA fixes (#26) **Breaking API Changes** * Removed `REFRESH_SAMPLECOUNT_16/32/64`, since hardware support for these sample counts is generally poor (and completely non-existent with MoltenVK and certain consoles). * Removed unused `sampleCount` parameter from `Refresh_TextureCreateInfo`. * Removed `sampleCount` parameter from `Refresh_ColorAttachmentDescription`. The existence of this parameter meant you had to sync up three different sample count values (render pass, pipeline, and color attachment description) whenever you wanted to use multisampling. However, Vulkan requires that all color attachments in a given pipeline _must_ match the pipeline's sample count anyway, so we can assume all color attachments will use the pipeline's sample count. * Removed the `renderArea` parameter from `Refresh_BeginRenderPass()` since it didn't serve much practical purpose and slightly complicated things on the MoonWorks managed side. **Behavior Changes** * When creating a render pass or graphics pipeline, the requested multisample count will be converted into a sample count that's actually supported by the GPU. For example, if you request 8x MSAA on a device that only supports up to 4x MSAA, it will silently fall back to 4x MSAA. * All color attachments are now forced to have an internal store op of `STORE`, even if `REFRESH_STORE_OP_DONTCARE` is specified. The one exception is internal multisample textures -- if `DONTCARE` is used, those textures will be discarded to save on bandwidth. (Their resolve textures will still be stored.) * The RenderPass hashing logic was updated so that it would still work correctly with the removal of `Refresh_ColorAttachmentDescription.sampleCount`. **Bug Fixes** * Fixed bugs where multisampling logic wasn't kicking in for certain sample counts due to incorrect enum comparisons. Co-authored-by: Caleb Cornett <caleb.cornett@outlook.com> Reviewed-on: https://gitea.moonside.games/MoonsideGames/Refresh/pulls/26 Co-authored-by: TheSpydog <thespydog@noreply.example.org> Co-committed-by: TheSpydog <thespydog@noreply.example.org>
2022-11-08 19:09:21 +00:00
if (a->colorAttachmentSampleCount != b->colorAttachmentSampleCount)
{
return 0;
}
for (i = 0; i < a->colorAttachmentCount; i += 1)
{
2022-06-06 18:46:08 +00:00
if (a->colorTargetDescriptions[i].format != b->colorTargetDescriptions[i].format)
{
return 0;
}
if ( a->colorTargetDescriptions[i].clearColor.x != b->colorTargetDescriptions[i].clearColor.x ||
a->colorTargetDescriptions[i].clearColor.y != b->colorTargetDescriptions[i].clearColor.y ||
a->colorTargetDescriptions[i].clearColor.z != b->colorTargetDescriptions[i].clearColor.z ||
a->colorTargetDescriptions[i].clearColor.w != b->colorTargetDescriptions[i].clearColor.w )
{
return 0;
}
if (a->colorTargetDescriptions[i].loadOp != b->colorTargetDescriptions[i].loadOp)
{
return 0;
}
if (a->colorTargetDescriptions[i].storeOp != b->colorTargetDescriptions[i].storeOp)
{
return 0;
}
}
2022-06-06 18:46:08 +00:00
if (a->depthStencilTargetDescription.format != b->depthStencilTargetDescription.format)
{
return 0;
}
if (a->depthStencilTargetDescription.loadOp != b->depthStencilTargetDescription.loadOp)
{
return 0;
}
if (a->depthStencilTargetDescription.storeOp != b->depthStencilTargetDescription.storeOp)
{
return 0;
}
if (a->depthStencilTargetDescription.stencilLoadOp != b->depthStencilTargetDescription.stencilLoadOp)
{
return 0;
}
if (a->depthStencilTargetDescription.stencilStoreOp != b->depthStencilTargetDescription.stencilStoreOp)
{
return 0;
}
return 1;
}
static inline VkRenderPass RenderPassHashArray_Fetch(
RenderPassHashArray *arr,
RenderPassHash *key
) {
int32_t i;
for (i = 0; i < arr->count; i += 1)
{
RenderPassHash *e = &arr->elements[i].key;
if (RenderPassHash_Compare(e, key))
{
return arr->elements[i].value;
}
}
return VK_NULL_HANDLE;
}
static inline void RenderPassHashArray_Insert(
RenderPassHashArray *arr,
RenderPassHash key,
VkRenderPass value
) {
RenderPassHashMap map;
map.key = key;
map.value = value;
EXPAND_ELEMENTS_IF_NEEDED(arr, 4, RenderPassHashMap)
arr->elements[arr->count] = map;
arr->count += 1;
}
typedef struct FramebufferHash
{
VkImageView colorAttachmentViews[MAX_COLOR_TARGET_BINDINGS];
VkImageView colorMultiSampleAttachmentViews[MAX_COLOR_TARGET_BINDINGS];
uint32_t colorAttachmentCount;
VkImageView depthStencilAttachmentView;
uint32_t width;
uint32_t height;
} FramebufferHash;
typedef struct FramebufferHashMap
{
FramebufferHash key;
VulkanFramebuffer *value;
} FramebufferHashMap;
typedef struct FramebufferHashArray
{
FramebufferHashMap *elements;
int32_t count;
int32_t capacity;
} FramebufferHashArray;
static inline uint8_t FramebufferHash_Compare(
FramebufferHash *a,
FramebufferHash *b
) {
uint32_t i;
if (a->colorAttachmentCount != b->colorAttachmentCount)
{
return 0;
}
for (i = 0; i < a->colorAttachmentCount; i += 1)
{
if (a->colorAttachmentViews[i] != b->colorAttachmentViews[i])
{
return 0;
}
if (a->colorMultiSampleAttachmentViews[i] != b->colorMultiSampleAttachmentViews[i])
{
return 0;
}
}
if (a->depthStencilAttachmentView != b->depthStencilAttachmentView)
{
return 0;
}
if (a->width != b->width)
{
return 0;
}
if (a->height != b->height)
{
return 0;
}
return 1;
}
static inline VulkanFramebuffer* FramebufferHashArray_Fetch(
FramebufferHashArray *arr,
FramebufferHash *key
) {
int32_t i;
for (i = 0; i < arr->count; i += 1)
{
FramebufferHash *e = &arr->elements[i].key;
if (FramebufferHash_Compare(e, key))
{
return arr->elements[i].value;
}
}
return VK_NULL_HANDLE;
}
static inline void FramebufferHashArray_Insert(
FramebufferHashArray *arr,
FramebufferHash key,
VulkanFramebuffer *value
) {
FramebufferHashMap map;
map.key = key;
map.value = value;
EXPAND_ELEMENTS_IF_NEEDED(arr, 4, FramebufferHashMap)
arr->elements[arr->count] = map;
arr->count += 1;
}
static inline void FramebufferHashArray_Remove(
FramebufferHashArray *arr,
uint32_t index
) {
if (index != arr->count - 1)
{
arr->elements[index] = arr->elements[arr->count - 1];
}
arr->count -= 1;
}
typedef struct RenderTargetHash
{
VulkanTexture *texture;
uint32_t depth;
uint32_t layer;
uint32_t level;
} RenderTargetHash;
typedef struct RenderTargetHashMap
{
RenderTargetHash key;
VulkanRenderTarget *value;
} RenderTargetHashMap;
typedef struct RenderTargetHashArray
{
RenderTargetHashMap *elements;
int32_t count;
int32_t capacity;
} RenderTargetHashArray;
static inline uint8_t RenderTargetHash_Compare(
RenderTargetHash *a,
RenderTargetHash *b
) {
if (a->texture != b->texture)
{
return 0;
}
if (a->layer != b->layer)
{
return 0;
}
if (a->level != b->level)
{
return 0;
}
if (a->depth != b->depth)
{
return 0;
}
return 1;
}
static inline VulkanRenderTarget* RenderTargetHash_Fetch(
RenderTargetHashArray *arr,
RenderTargetHash *key
) {
int32_t i;
for (i = 0; i < arr->count; i += 1)
{
RenderTargetHash *e = &arr->elements[i].key;
if (RenderTargetHash_Compare(e, key))
{
return arr->elements[i].value;
}
}
return NULL;
}
static inline void RenderTargetHash_Insert(
RenderTargetHashArray *arr,
RenderTargetHash key,
VulkanRenderTarget *value
) {
RenderTargetHashMap map;
map.key = key;
map.value = value;
EXPAND_ELEMENTS_IF_NEEDED(arr, 4, RenderTargetHashMap)
arr->elements[arr->count] = map;
arr->count += 1;
}
static inline void RenderTargetHash_Remove(
RenderTargetHashArray *arr,
uint32_t index
) {
if (index != arr->count - 1)
{
arr->elements[index] = arr->elements[arr->count - 1];
}
arr->count -= 1;
}
2020-12-29 22:52:24 +00:00
/* Descriptor Set Caches */
struct DescriptorSetCache
2020-12-30 00:53:10 +00:00
{
SDL_mutex *lock;
2020-12-30 00:53:10 +00:00
VkDescriptorSetLayout descriptorSetLayout;
uint32_t bindingCount;
2020-12-31 04:39:47 +00:00
VkDescriptorType descriptorType;
2020-12-30 00:53:10 +00:00
VkDescriptorPool *descriptorPools;
uint32_t descriptorPoolCount;
2020-12-30 00:53:10 +00:00
uint32_t nextPoolSize;
VkDescriptorSet *inactiveDescriptorSets;
uint32_t inactiveDescriptorSetCount;
uint32_t inactiveDescriptorSetCapacity;
};
2020-12-29 22:52:24 +00:00
/* Pipeline Caches */
2021-01-03 02:02:20 +00:00
#define NUM_PIPELINE_LAYOUT_BUCKETS 1031
2020-12-29 22:52:24 +00:00
typedef struct GraphicsPipelineLayoutHash
{
VkDescriptorSetLayout vertexSamplerLayout;
VkDescriptorSetLayout fragmentSamplerLayout;
VkDescriptorSetLayout vertexUniformLayout;
VkDescriptorSetLayout fragmentUniformLayout;
} GraphicsPipelineLayoutHash;
typedef struct GraphicsPipelineLayoutHashMap
{
GraphicsPipelineLayoutHash key;
VulkanGraphicsPipelineLayout *value;
} GraphicsPipelineLayoutHashMap;
typedef struct GraphicsPipelineLayoutHashArray
{
GraphicsPipelineLayoutHashMap *elements;
int32_t count;
int32_t capacity;
} GraphicsPipelineLayoutHashArray;
typedef struct GraphicsPipelineLayoutHashTable
{
GraphicsPipelineLayoutHashArray buckets[NUM_PIPELINE_LAYOUT_BUCKETS];
} GraphicsPipelineLayoutHashTable;
static inline uint64_t GraphicsPipelineLayoutHashTable_GetHashCode(GraphicsPipelineLayoutHash key)
{
const uint64_t HASH_FACTOR = 97;
uint64_t result = 1;
result = result * HASH_FACTOR + (uint64_t) key.vertexSamplerLayout;
result = result * HASH_FACTOR + (uint64_t) key.fragmentSamplerLayout;
result = result * HASH_FACTOR + (uint64_t) key.vertexUniformLayout;
result = result * HASH_FACTOR + (uint64_t) key.fragmentUniformLayout;
return result;
}
static inline VulkanGraphicsPipelineLayout* GraphicsPipelineLayoutHashArray_Fetch(
GraphicsPipelineLayoutHashTable *table,
GraphicsPipelineLayoutHash key
) {
int32_t i;
uint64_t hashcode = GraphicsPipelineLayoutHashTable_GetHashCode(key);
GraphicsPipelineLayoutHashArray *arr = &table->buckets[hashcode % NUM_PIPELINE_LAYOUT_BUCKETS];
for (i = 0; i < arr->count; i += 1)
{
const GraphicsPipelineLayoutHash *e = &arr->elements[i].key;
if ( key.vertexSamplerLayout == e->vertexSamplerLayout &&
key.fragmentSamplerLayout == e->fragmentSamplerLayout &&
key.vertexUniformLayout == e->vertexUniformLayout &&
key.fragmentUniformLayout == e->fragmentUniformLayout )
{
return arr->elements[i].value;
}
}
return NULL;
}
static inline void GraphicsPipelineLayoutHashArray_Insert(
GraphicsPipelineLayoutHashTable *table,
GraphicsPipelineLayoutHash key,
VulkanGraphicsPipelineLayout *value
) {
uint64_t hashcode = GraphicsPipelineLayoutHashTable_GetHashCode(key);
GraphicsPipelineLayoutHashArray *arr = &table->buckets[hashcode % NUM_PIPELINE_LAYOUT_BUCKETS];
GraphicsPipelineLayoutHashMap map;
map.key = key;
map.value = value;
EXPAND_ELEMENTS_IF_NEEDED(arr, 4, GraphicsPipelineLayoutHashMap)
arr->elements[arr->count] = map;
arr->count += 1;
}
typedef struct ComputePipelineLayoutHash
{
VkDescriptorSetLayout bufferLayout;
VkDescriptorSetLayout imageLayout;
2020-12-31 04:39:47 +00:00
VkDescriptorSetLayout uniformLayout;
2020-12-29 22:52:24 +00:00
} ComputePipelineLayoutHash;
typedef struct ComputePipelineLayoutHashMap
{
ComputePipelineLayoutHash key;
VulkanComputePipelineLayout *value;
} ComputePipelineLayoutHashMap;
typedef struct ComputePipelineLayoutHashArray
{
ComputePipelineLayoutHashMap *elements;
int32_t count;
int32_t capacity;
} ComputePipelineLayoutHashArray;
typedef struct ComputePipelineLayoutHashTable
{
ComputePipelineLayoutHashArray buckets[NUM_PIPELINE_LAYOUT_BUCKETS];
} ComputePipelineLayoutHashTable;
static inline uint64_t ComputePipelineLayoutHashTable_GetHashCode(ComputePipelineLayoutHash key)
{
const uint64_t HASH_FACTOR = 97;
uint64_t result = 1;
result = result * HASH_FACTOR + (uint64_t) key.bufferLayout;
result = result * HASH_FACTOR + (uint64_t) key.imageLayout;
2020-12-31 04:39:47 +00:00
result = result * HASH_FACTOR + (uint64_t) key.uniformLayout;
2020-12-29 22:52:24 +00:00
return result;
}
static inline VulkanComputePipelineLayout* ComputePipelineLayoutHashArray_Fetch(
ComputePipelineLayoutHashTable *table,
ComputePipelineLayoutHash key
) {
int32_t i;
uint64_t hashcode = ComputePipelineLayoutHashTable_GetHashCode(key);
ComputePipelineLayoutHashArray *arr = &table->buckets[hashcode % NUM_PIPELINE_LAYOUT_BUCKETS];
for (i = 0; i < arr->count; i += 1)
{
const ComputePipelineLayoutHash *e = &arr->elements[i].key;
if ( key.bufferLayout == e->bufferLayout &&
2020-12-31 04:39:47 +00:00
key.imageLayout == e->imageLayout &&
key.uniformLayout == e->uniformLayout )
2020-12-29 22:52:24 +00:00
{
return arr->elements[i].value;
}
}
return NULL;
}
static inline void ComputePipelineLayoutHashArray_Insert(
ComputePipelineLayoutHashTable *table,
ComputePipelineLayoutHash key,
VulkanComputePipelineLayout *value
) {
uint64_t hashcode = ComputePipelineLayoutHashTable_GetHashCode(key);
ComputePipelineLayoutHashArray *arr = &table->buckets[hashcode % NUM_PIPELINE_LAYOUT_BUCKETS];
ComputePipelineLayoutHashMap map;
map.key = key;
map.value = value;
EXPAND_ELEMENTS_IF_NEEDED(arr, 4, ComputePipelineLayoutHashMap)
arr->elements[arr->count] = map;
arr->count += 1;
}
2021-01-03 02:02:20 +00:00
/* Command structures */
typedef struct DescriptorSetData
{
DescriptorSetCache *descriptorSetCache;
VkDescriptorSet descriptorSet;
} DescriptorSetData;
typedef struct VulkanFencePool
{
SDL_mutex *lock;
VkFence *availableFences;
uint32_t availableFenceCount;
uint32_t availableFenceCapacity;
} VulkanFencePool;
2021-01-02 06:07:15 +00:00
typedef struct VulkanCommandPool VulkanCommandPool;
typedef struct VulkanCommandBuffer
{
VkCommandBuffer commandBuffer;
VulkanCommandPool *commandPool;
VulkanPresentData *presentDatas;
uint32_t presentDataCount;
uint32_t presentDataCapacity;
VkSemaphore *waitSemaphores;
uint32_t waitSemaphoreCount;
uint32_t waitSemaphoreCapacity;
VkSemaphore *signalSemaphores;
uint32_t signalSemaphoreCount;
uint32_t signalSemaphoreCapacity;
2021-01-02 06:07:15 +00:00
VulkanComputePipeline *currentComputePipeline;
VulkanGraphicsPipeline *currentGraphicsPipeline;
2024-02-17 02:26:30 +00:00
uint32_t vertexUniformOffset;
uint32_t fragmentUniformOffset;
uint32_t computeUniformOffset;
VulkanTexture *renderPassColorTargetTextures[MAX_COLOR_TARGET_BINDINGS];
uint32_t renderPassColorTargetCount;
VulkanTexture *renderPassDepthTexture; /* can be NULL */
2021-01-02 06:07:15 +00:00
VkDescriptorSet vertexSamplerDescriptorSet; /* updated by BindVertexSamplers */
VkDescriptorSet fragmentSamplerDescriptorSet; /* updated by BindFragmentSamplers */
VkDescriptorSet bufferDescriptorSet; /* updated by BindComputeBuffers */
VkDescriptorSet imageDescriptorSet; /* updated by BindComputeTextures */
2024-02-17 01:45:27 +00:00
/* FIXME: descriptor pools should be per-command-buffer */
DescriptorSetData *boundDescriptorSetDatas;
uint32_t boundDescriptorSetDataCount;
uint32_t boundDescriptorSetDataCapacity;
/* Keep track of compute resources for memory barriers */
VulkanBuffer **boundComputeBuffers;
uint32_t boundComputeBufferCount;
uint32_t boundComputeBufferCapacity;
VulkanTexture **boundComputeTextures;
uint32_t boundComputeTextureCount;
uint32_t boundComputeTextureCapacity;
/* Keep track of copy resources for memory barriers */
VulkanBuffer **copiedGpuBuffers;
uint32_t copiedGpuBufferCount;
uint32_t copiedGpuBufferCapacity;
VulkanTexture **copiedTextures;
uint32_t copiedTextureCount;
uint32_t copiedTextureCapacity;
/* Viewport/scissor state */
2022-03-04 20:30:33 +00:00
VkViewport currentViewport;
VkRect2D currentScissor;
/* Track used resources */
VulkanBuffer **usedBuffers;
uint32_t usedBufferCount;
uint32_t usedBufferCapacity;
VulkanTexture **usedTextures;
uint32_t usedTextureCount;
uint32_t usedTextureCapacity;
VulkanSampler **usedSamplers;
uint32_t usedSamplerCount;
uint32_t usedSamplerCapacity;
VulkanGraphicsPipeline **usedGraphicsPipelines;
uint32_t usedGraphicsPipelineCount;
uint32_t usedGraphicsPipelineCapacity;
VulkanComputePipeline **usedComputePipelines;
uint32_t usedComputePipelineCount;
uint32_t usedComputePipelineCapacity;
VulkanFramebuffer **usedFramebuffers;
uint32_t usedFramebufferCount;
uint32_t usedFramebufferCapacity;
/* Shader modules have references tracked by pipelines */
VkFence inFlightFence;
uint8_t autoReleaseFence;
2021-01-02 06:07:15 +00:00
} VulkanCommandBuffer;
struct VulkanCommandPool
{
SDL_threadID threadID;
VkCommandPool commandPool;
VulkanCommandBuffer **inactiveCommandBuffers;
uint32_t inactiveCommandBufferCapacity;
uint32_t inactiveCommandBufferCount;
};
2021-01-03 02:02:20 +00:00
#define NUM_COMMAND_POOL_BUCKETS 1031
typedef struct CommandPoolHash
{
SDL_threadID threadID;
} CommandPoolHash;
typedef struct CommandPoolHashMap
{
CommandPoolHash key;
VulkanCommandPool *value;
} CommandPoolHashMap;
typedef struct CommandPoolHashArray
{
CommandPoolHashMap *elements;
uint32_t count;
uint32_t capacity;
} CommandPoolHashArray;
typedef struct CommandPoolHashTable
{
CommandPoolHashArray buckets[NUM_COMMAND_POOL_BUCKETS];
} CommandPoolHashTable;
static inline uint64_t CommandPoolHashTable_GetHashCode(CommandPoolHash key)
{
const uint64_t HASH_FACTOR = 97;
uint64_t result = 1;
result = result * HASH_FACTOR + (uint64_t) key.threadID;
return result;
}
static inline VulkanCommandPool* CommandPoolHashTable_Fetch(
CommandPoolHashTable *table,
CommandPoolHash key
) {
uint32_t i;
uint64_t hashcode = CommandPoolHashTable_GetHashCode(key);
CommandPoolHashArray *arr = &table->buckets[hashcode % NUM_COMMAND_POOL_BUCKETS];
for (i = 0; i < arr->count; i += 1)
{
const CommandPoolHash *e = &arr->elements[i].key;
if (key.threadID == e->threadID)
{
return arr->elements[i].value;
}
}
return NULL;
}
static inline void CommandPoolHashTable_Insert(
CommandPoolHashTable *table,
CommandPoolHash key,
VulkanCommandPool *value
) {
uint64_t hashcode = CommandPoolHashTable_GetHashCode(key);
CommandPoolHashArray *arr = &table->buckets[hashcode % NUM_COMMAND_POOL_BUCKETS];
CommandPoolHashMap map;
map.key = key;
map.value = value;
EXPAND_ELEMENTS_IF_NEEDED(arr, 4, CommandPoolHashMap)
arr->elements[arr->count] = map;
arr->count += 1;
}
/* Context */
2020-12-17 03:28:02 +00:00
typedef struct VulkanRenderer
2020-12-17 01:23:49 +00:00
{
2022-02-25 21:42:11 +00:00
VkInstance instance;
VkPhysicalDevice physicalDevice;
VkPhysicalDeviceProperties2 physicalDeviceProperties;
VkPhysicalDeviceDriverPropertiesKHR physicalDeviceDriverProperties;
VkDevice logicalDevice;
uint8_t unifiedMemoryWarning;
2020-12-17 02:38:22 +00:00
2022-02-25 21:42:11 +00:00
uint8_t supportsDebugUtils;
uint8_t debugMode;
VulkanExtensions supports;
2020-12-17 03:28:02 +00:00
2020-12-18 22:35:33 +00:00
VulkanMemoryAllocator *memoryAllocator;
VkPhysicalDeviceMemoryProperties memoryProperties;
2020-12-18 22:35:33 +00:00
WindowData **claimedWindows;
uint32_t claimedWindowCount;
uint32_t claimedWindowCapacity;
2020-12-17 03:28:02 +00:00
uint32_t queueFamilyIndex;
VkQueue unifiedQueue;
2020-12-17 02:38:22 +00:00
2021-01-02 06:07:15 +00:00
VulkanCommandBuffer **submittedCommandBuffers;
2020-12-17 02:38:22 +00:00
uint32_t submittedCommandBufferCount;
2021-01-02 06:07:15 +00:00
uint32_t submittedCommandBufferCapacity;
2020-12-31 06:28:37 +00:00
VulkanFencePool fencePool;
2021-01-03 02:02:20 +00:00
CommandPoolHashTable commandPoolHashTable;
2020-12-29 23:05:26 +00:00
DescriptorSetLayoutHashTable descriptorSetLayoutHashTable;
2020-12-29 22:52:24 +00:00
GraphicsPipelineLayoutHashTable graphicsPipelineLayoutHashTable;
ComputePipelineLayoutHashTable computePipelineLayoutHashTable;
RenderPassHashArray renderPassHashArray;
FramebufferHashArray framebufferHashArray;
RenderTargetHashArray renderTargetHashArray;
2020-12-19 05:35:21 +00:00
VkDescriptorPool defaultDescriptorPool;
2020-12-29 22:52:24 +00:00
VkDescriptorSetLayout emptyVertexSamplerLayout;
VkDescriptorSetLayout emptyFragmentSamplerLayout;
2020-12-29 22:52:24 +00:00
VkDescriptorSetLayout emptyComputeBufferDescriptorSetLayout;
2020-12-31 04:39:47 +00:00
VkDescriptorSetLayout emptyComputeImageDescriptorSetLayout;
2020-12-29 22:52:24 +00:00
VkDescriptorSet emptyVertexSamplerDescriptorSet;
VkDescriptorSet emptyFragmentSamplerDescriptorSet;
2020-12-29 22:52:24 +00:00
VkDescriptorSet emptyComputeBufferDescriptorSet;
2020-12-31 04:39:47 +00:00
VkDescriptorSet emptyComputeImageDescriptorSet;
VulkanUniformBufferObject *vertexUniformBufferObject;
VulkanUniformBufferObject *fragmentUniformBufferObject;
VulkanUniformBufferObject *computeUniformBufferObject;
2020-12-17 19:40:49 +00:00
VkDescriptorSetLayout vertexUniformDescriptorSetLayout;
VkDescriptorSetLayout fragmentUniformDescriptorSetLayout;
VkDescriptorSetLayout computeUniformDescriptorSetLayout;
2024-02-17 02:29:05 +00:00
uint32_t minUBOAlignment;
2020-12-19 04:08:07 +00:00
2022-06-17 07:41:27 +00:00
/* Some drivers don't support D16 for some reason. Fun! */
VkFormat D16Format;
VkFormat D16S8Format;
VulkanTexture **texturesToDestroy;
uint32_t texturesToDestroyCount;
uint32_t texturesToDestroyCapacity;
VulkanBuffer **buffersToDestroy;
uint32_t buffersToDestroyCount;
uint32_t buffersToDestroyCapacity;
VulkanSampler **samplersToDestroy;
uint32_t samplersToDestroyCount;
uint32_t samplersToDestroyCapacity;
VulkanGraphicsPipeline **graphicsPipelinesToDestroy;
uint32_t graphicsPipelinesToDestroyCount;
uint32_t graphicsPipelinesToDestroyCapacity;
VulkanComputePipeline **computePipelinesToDestroy;
uint32_t computePipelinesToDestroyCount;
uint32_t computePipelinesToDestroyCapacity;
VulkanShaderModule **shaderModulesToDestroy;
uint32_t shaderModulesToDestroyCount;
uint32_t shaderModulesToDestroyCapacity;
VulkanFramebuffer **framebuffersToDestroy;
uint32_t framebuffersToDestroyCount;
uint32_t framebuffersToDestroyCapacity;
2020-12-18 22:35:33 +00:00
SDL_mutex *allocatorLock;
SDL_mutex *disposeLock;
SDL_mutex *submitLock;
SDL_mutex *acquireCommandBufferLock;
SDL_mutex *renderPassFetchLock;
SDL_mutex *framebufferFetchLock;
SDL_mutex *renderTargetFetchLock;
2020-12-29 03:44:34 +00:00
uint8_t needDefrag;
uint64_t defragTimestamp;
uint8_t defragInProgress;
#define VULKAN_INSTANCE_FUNCTION(ext, ret, func, params) \
2020-12-17 02:38:22 +00:00
vkfntype_##func func;
#define VULKAN_DEVICE_FUNCTION(ext, ret, func, params) \
vkfntype_##func func;
#include "Refresh_Driver_Vulkan_vkfuncs.h"
2020-12-17 03:28:02 +00:00
} VulkanRenderer;
2020-12-19 00:39:03 +00:00
/* Forward declarations */
static uint8_t VULKAN_INTERNAL_DefragmentMemory(VulkanRenderer *renderer);
2021-01-02 06:07:15 +00:00
static void VULKAN_INTERNAL_BeginCommandBuffer(VulkanRenderer *renderer, VulkanCommandBuffer *commandBuffer);
static void VULKAN_UnclaimWindow(Refresh_Renderer *driverData, void *windowHandle);
static void VULKAN_Wait(Refresh_Renderer *driverData);
static void VULKAN_Submit(Refresh_Renderer *driverData, Refresh_CommandBuffer *commandBuffer);
static void VULKAN_INTERNAL_DestroyRenderTarget(VulkanRenderer *renderer, VulkanRenderTarget *renderTarget);
2020-12-19 00:39:03 +00:00
2020-12-17 03:28:02 +00:00
/* Error Handling */
static inline const char* VkErrorMessages(VkResult code)
{
#define ERR_TO_STR(e) \
case e: return #e;
switch (code)
{
ERR_TO_STR(VK_ERROR_OUT_OF_HOST_MEMORY)
ERR_TO_STR(VK_ERROR_OUT_OF_DEVICE_MEMORY)
ERR_TO_STR(VK_ERROR_FRAGMENTED_POOL)
ERR_TO_STR(VK_ERROR_OUT_OF_POOL_MEMORY)
ERR_TO_STR(VK_ERROR_INITIALIZATION_FAILED)
ERR_TO_STR(VK_ERROR_LAYER_NOT_PRESENT)
ERR_TO_STR(VK_ERROR_EXTENSION_NOT_PRESENT)
ERR_TO_STR(VK_ERROR_FEATURE_NOT_PRESENT)
ERR_TO_STR(VK_ERROR_TOO_MANY_OBJECTS)
ERR_TO_STR(VK_ERROR_DEVICE_LOST)
ERR_TO_STR(VK_ERROR_INCOMPATIBLE_DRIVER)
ERR_TO_STR(VK_ERROR_OUT_OF_DATE_KHR)
ERR_TO_STR(VK_ERROR_SURFACE_LOST_KHR)
ERR_TO_STR(VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT)
ERR_TO_STR(VK_SUBOPTIMAL_KHR)
default: return "Unhandled VkResult!";
}
#undef ERR_TO_STR
}
2021-01-27 20:51:36 +00:00
static inline void LogVulkanResultAsError(
2020-12-17 03:28:02 +00:00
const char* vulkanFunctionName,
VkResult result
) {
if (result != VK_SUCCESS)
{
2021-01-05 23:00:51 +00:00
Refresh_LogError(
2020-12-17 03:28:02 +00:00
"%s: %s",
vulkanFunctionName,
VkErrorMessages(result)
);
}
}
2020-12-17 01:23:49 +00:00
2021-01-27 20:51:36 +00:00
static inline void LogVulkanResultAsWarn(
const char* vulkanFunctionName,
VkResult result
) {
if (result != VK_SUCCESS)
{
Refresh_LogWarn(
"%s: %s",
vulkanFunctionName,
VkErrorMessages(result)
);
}
}
#define VULKAN_ERROR_CHECK(res, fn, ret) \
if (res != VK_SUCCESS) \
{ \
Refresh_LogError("%s %s", #fn, VkErrorMessages(res)); \
return ret; \
}
2020-12-18 22:35:33 +00:00
/* Utility */
2022-06-17 07:41:27 +00:00
static inline VkFormat RefreshToVK_DepthFormat(
VulkanRenderer* renderer,
Refresh_TextureFormat format
) {
switch (format)
{
case REFRESH_TEXTUREFORMAT_D16_UNORM:
return renderer->D16Format;
case REFRESH_TEXTUREFORMAT_D16_UNORM_S8_UINT:
return renderer->D16S8Format;
case REFRESH_TEXTUREFORMAT_D32_SFLOAT:
return VK_FORMAT_D32_SFLOAT;
case REFRESH_TEXTUREFORMAT_D32_SFLOAT_S8_UINT:
return VK_FORMAT_D32_SFLOAT_S8_UINT;
default:
return VK_FORMAT_UNDEFINED;
}
}
static inline uint8_t IsRefreshDepthFormat(Refresh_TextureFormat format)
{
switch (format)
{
case REFRESH_TEXTUREFORMAT_D16_UNORM:
case REFRESH_TEXTUREFORMAT_D32_SFLOAT:
case REFRESH_TEXTUREFORMAT_D16_UNORM_S8_UINT:
case REFRESH_TEXTUREFORMAT_D32_SFLOAT_S8_UINT:
return 1;
default:
return 0;
}
}
2021-01-27 20:51:36 +00:00
static inline uint8_t IsDepthFormat(VkFormat format)
2020-12-18 22:35:33 +00:00
{
switch(format)
{
2021-01-27 20:51:36 +00:00
case VK_FORMAT_D16_UNORM:
case VK_FORMAT_D32_SFLOAT:
case VK_FORMAT_D16_UNORM_S8_UINT:
case VK_FORMAT_D32_SFLOAT_S8_UINT:
return 1;
default:
2020-12-18 22:35:33 +00:00
return 0;
}
}
2021-01-27 20:51:36 +00:00
static inline uint8_t IsStencilFormat(VkFormat format)
{
switch(format)
{
2021-01-27 20:51:36 +00:00
case VK_FORMAT_D16_UNORM_S8_UINT:
case VK_FORMAT_D32_SFLOAT_S8_UINT:
2020-12-18 22:35:33 +00:00
return 1;
2020-12-18 22:35:33 +00:00
default:
return 0;
}
}
static inline uint32_t VULKAN_INTERNAL_BytesPerPixel(VkFormat format)
{
switch (format)
{
2024-01-16 00:36:56 +00:00
case VK_FORMAT_R8_UNORM:
case VK_FORMAT_R8_UINT:
return 1;
case VK_FORMAT_R5G6B5_UNORM_PACK16:
case VK_FORMAT_B4G4R4A4_UNORM_PACK16:
case VK_FORMAT_A1R5G5B5_UNORM_PACK16:
case VK_FORMAT_R16_SFLOAT:
case VK_FORMAT_R8G8_SNORM:
case VK_FORMAT_R8G8_UINT:
case VK_FORMAT_R16_UINT:
case VK_FORMAT_D16_UNORM:
return 2;
case VK_FORMAT_D16_UNORM_S8_UINT:
return 3;
2021-02-03 00:37:01 +00:00
case VK_FORMAT_R8G8B8A8_UNORM:
case VK_FORMAT_B8G8R8A8_UNORM:
2024-01-16 00:36:56 +00:00
case VK_FORMAT_R32_SFLOAT:
case VK_FORMAT_R16G16_UNORM:
case VK_FORMAT_R16G16_SFLOAT:
2024-01-16 00:36:56 +00:00
case VK_FORMAT_R8G8B8A8_SNORM:
case VK_FORMAT_A2R10G10B10_UNORM_PACK32:
case VK_FORMAT_R8G8B8A8_UINT:
case VK_FORMAT_R16G16_UINT:
2024-01-16 00:36:56 +00:00
case VK_FORMAT_D32_SFLOAT:
return 4;
2024-01-16 00:36:56 +00:00
case VK_FORMAT_D32_SFLOAT_S8_UINT:
return 5;
case VK_FORMAT_R16G16B16A16_SFLOAT:
case VK_FORMAT_R16G16B16A16_UNORM:
case VK_FORMAT_R32G32_SFLOAT:
2024-01-16 00:36:56 +00:00
case VK_FORMAT_R16G16B16A16_UINT:
case VK_FORMAT_BC1_RGBA_UNORM_BLOCK:
return 8;
2024-01-16 00:36:56 +00:00
case VK_FORMAT_R32G32B32A32_SFLOAT:
case VK_FORMAT_BC2_UNORM_BLOCK:
case VK_FORMAT_BC3_UNORM_BLOCK:
case VK_FORMAT_BC7_UNORM_BLOCK:
return 16;
default:
2024-01-16 00:36:56 +00:00
Refresh_LogError("Texture format not recognized!");
return 0;
}
}
2024-01-16 00:36:56 +00:00
static inline uint32_t VULKAN_INTERNAL_TextureBlockSize(
2022-05-12 04:16:24 +00:00
VkFormat format
) {
switch (format)
{
case VK_FORMAT_BC1_RGBA_UNORM_BLOCK:
case VK_FORMAT_BC2_UNORM_BLOCK:
case VK_FORMAT_BC3_UNORM_BLOCK:
case VK_FORMAT_BC7_UNORM_BLOCK:
return 4;
case VK_FORMAT_R8G8B8A8_UNORM:
case VK_FORMAT_B8G8R8A8_UNORM:
case VK_FORMAT_R5G6B5_UNORM_PACK16:
case VK_FORMAT_A1R5G5B5_UNORM_PACK16:
case VK_FORMAT_B4G4R4A4_UNORM_PACK16:
case VK_FORMAT_A2R10G10B10_UNORM_PACK32:
case VK_FORMAT_R16G16_UNORM:
case VK_FORMAT_R16G16B16A16_UNORM:
case VK_FORMAT_R8_UNORM:
2024-01-16 00:36:56 +00:00
case VK_FORMAT_R8G8_SNORM:
case VK_FORMAT_R8G8B8A8_SNORM:
2022-05-12 04:16:24 +00:00
case VK_FORMAT_R16_SFLOAT:
case VK_FORMAT_R16G16_SFLOAT:
case VK_FORMAT_R16G16B16A16_SFLOAT:
2024-01-16 00:36:56 +00:00
case VK_FORMAT_R32_SFLOAT:
case VK_FORMAT_R32G32_SFLOAT:
case VK_FORMAT_R32G32B32A32_SFLOAT:
case VK_FORMAT_R8_UINT:
case VK_FORMAT_R8G8_UINT:
case VK_FORMAT_R8G8B8A8_UINT:
case VK_FORMAT_R16_UINT:
case VK_FORMAT_R16G16_UINT:
case VK_FORMAT_R16G16B16A16_UINT:
2024-01-16 00:36:56 +00:00
case VK_FORMAT_D16_UNORM:
case VK_FORMAT_D32_SFLOAT:
case VK_FORMAT_D16_UNORM_S8_UINT:
case VK_FORMAT_D32_SFLOAT_S8_UINT:
2022-05-12 04:16:24 +00:00
return 1;
default:
Refresh_LogError("Unrecognized texture format!");
return 0;
}
}
static inline VkDeviceSize VULKAN_INTERNAL_BytesPerImage(
uint32_t width,
uint32_t height,
VkFormat format
) {
2024-01-16 00:36:56 +00:00
uint32_t blockSize = VULKAN_INTERNAL_TextureBlockSize(format);
return (width * height * VULKAN_INTERNAL_BytesPerPixel(format)) / (blockSize * blockSize);
}
Miscellaneous API changes + more MSAA fixes (#26) **Breaking API Changes** * Removed `REFRESH_SAMPLECOUNT_16/32/64`, since hardware support for these sample counts is generally poor (and completely non-existent with MoltenVK and certain consoles). * Removed unused `sampleCount` parameter from `Refresh_TextureCreateInfo`. * Removed `sampleCount` parameter from `Refresh_ColorAttachmentDescription`. The existence of this parameter meant you had to sync up three different sample count values (render pass, pipeline, and color attachment description) whenever you wanted to use multisampling. However, Vulkan requires that all color attachments in a given pipeline _must_ match the pipeline's sample count anyway, so we can assume all color attachments will use the pipeline's sample count. * Removed the `renderArea` parameter from `Refresh_BeginRenderPass()` since it didn't serve much practical purpose and slightly complicated things on the MoonWorks managed side. **Behavior Changes** * When creating a render pass or graphics pipeline, the requested multisample count will be converted into a sample count that's actually supported by the GPU. For example, if you request 8x MSAA on a device that only supports up to 4x MSAA, it will silently fall back to 4x MSAA. * All color attachments are now forced to have an internal store op of `STORE`, even if `REFRESH_STORE_OP_DONTCARE` is specified. The one exception is internal multisample textures -- if `DONTCARE` is used, those textures will be discarded to save on bandwidth. (Their resolve textures will still be stored.) * The RenderPass hashing logic was updated so that it would still work correctly with the removal of `Refresh_ColorAttachmentDescription.sampleCount`. **Bug Fixes** * Fixed bugs where multisampling logic wasn't kicking in for certain sample counts due to incorrect enum comparisons. Co-authored-by: Caleb Cornett <caleb.cornett@outlook.com> Reviewed-on: https://gitea.moonside.games/MoonsideGames/Refresh/pulls/26 Co-authored-by: TheSpydog <thespydog@noreply.example.org> Co-committed-by: TheSpydog <thespydog@noreply.example.org>
2022-11-08 19:09:21 +00:00
static inline Refresh_SampleCount VULKAN_INTERNAL_GetMaxMultiSampleCount(
VulkanRenderer *renderer,
Refresh_SampleCount multiSampleCount
) {
VkSampleCountFlags flags = renderer->physicalDeviceProperties.properties.limits.framebufferColorSampleCounts;
Refresh_SampleCount maxSupported = REFRESH_SAMPLECOUNT_1;
if (flags & VK_SAMPLE_COUNT_8_BIT)
{
maxSupported = REFRESH_SAMPLECOUNT_8;
}
else if (flags & VK_SAMPLE_COUNT_4_BIT)
{
maxSupported = REFRESH_SAMPLECOUNT_4;
}
else if (flags & VK_SAMPLE_COUNT_2_BIT)
{
maxSupported = REFRESH_SAMPLECOUNT_2;
}
return SDL_min(multiSampleCount, maxSupported);
}
2020-12-18 22:35:33 +00:00
/* Memory Management */
/* Vulkan: Memory Allocation */
2020-12-18 22:35:33 +00:00
static inline VkDeviceSize VULKAN_INTERNAL_NextHighestAlignment(
VkDeviceSize n,
VkDeviceSize align
) {
return align * ((n + align - 1) / align);
}
2024-02-17 02:29:05 +00:00
static inline uint32_t VULKAN_INTERNAL_NextHighestAlignment32(
uint32_t n,
uint32_t align
) {
return align * ((n + align - 1) / align);
}
static void VULKAN_INTERNAL_MakeMemoryUnavailable(
VulkanRenderer* renderer,
VulkanMemoryAllocation *allocation
) {
uint32_t i, j;
VulkanMemoryFreeRegion *freeRegion;
allocation->availableForAllocation = 0;
for (i = 0; i < allocation->freeRegionCount; i += 1)
{
freeRegion = allocation->freeRegions[i];
/* close the gap in the sorted list */
if (allocation->allocator->sortedFreeRegionCount > 1)
{
for (j = freeRegion->sortedIndex; j < allocation->allocator->sortedFreeRegionCount - 1; j += 1)
{
allocation->allocator->sortedFreeRegions[j] =
allocation->allocator->sortedFreeRegions[j + 1];
allocation->allocator->sortedFreeRegions[j]->sortedIndex = j;
}
}
allocation->allocator->sortedFreeRegionCount -= 1;
}
}
2021-01-30 00:03:00 +00:00
static void VULKAN_INTERNAL_RemoveMemoryFreeRegion(
VulkanRenderer *renderer,
2021-01-30 00:03:00 +00:00
VulkanMemoryFreeRegion *freeRegion
) {
uint32_t i;
SDL_LockMutex(renderer->allocatorLock);
if (freeRegion->allocation->availableForAllocation)
2021-01-30 00:03:00 +00:00
{
/* close the gap in the sorted list */
if (freeRegion->allocation->allocator->sortedFreeRegionCount > 1)
2021-01-30 00:03:00 +00:00
{
for (i = freeRegion->sortedIndex; i < freeRegion->allocation->allocator->sortedFreeRegionCount - 1; i += 1)
{
freeRegion->allocation->allocator->sortedFreeRegions[i] =
freeRegion->allocation->allocator->sortedFreeRegions[i + 1];
2021-01-30 00:03:00 +00:00
freeRegion->allocation->allocator->sortedFreeRegions[i]->sortedIndex = i;
}
2021-01-30 00:03:00 +00:00
}
freeRegion->allocation->allocator->sortedFreeRegionCount -= 1;
}
2021-01-30 00:03:00 +00:00
/* close the gap in the buffer list */
if (freeRegion->allocation->freeRegionCount > 1 && freeRegion->allocationIndex != freeRegion->allocation->freeRegionCount - 1)
{
freeRegion->allocation->freeRegions[freeRegion->allocationIndex] =
freeRegion->allocation->freeRegions[freeRegion->allocation->freeRegionCount - 1];
freeRegion->allocation->freeRegions[freeRegion->allocationIndex]->allocationIndex =
freeRegion->allocationIndex;
}
freeRegion->allocation->freeRegionCount -= 1;
freeRegion->allocation->freeSpace -= freeRegion->size;
2021-01-30 00:03:00 +00:00
SDL_free(freeRegion);
SDL_UnlockMutex(renderer->allocatorLock);
2021-01-30 00:03:00 +00:00
}
static void VULKAN_INTERNAL_NewMemoryFreeRegion(
VulkanRenderer *renderer,
2020-12-18 22:35:33 +00:00
VulkanMemoryAllocation *allocation,
VkDeviceSize offset,
VkDeviceSize size
) {
VulkanMemoryFreeRegion *newFreeRegion;
2021-01-30 00:03:00 +00:00
VkDeviceSize newOffset, newSize;
int32_t insertionIndex = 0;
int32_t i;
SDL_LockMutex(renderer->allocatorLock);
2021-01-30 00:03:00 +00:00
/* look for an adjacent region to merge */
for (i = allocation->freeRegionCount - 1; i >= 0; i -= 1)
{
/* check left side */
if (allocation->freeRegions[i]->offset + allocation->freeRegions[i]->size == offset)
{
newOffset = allocation->freeRegions[i]->offset;
newSize = allocation->freeRegions[i]->size + size;
VULKAN_INTERNAL_RemoveMemoryFreeRegion(renderer, allocation->freeRegions[i]);
VULKAN_INTERNAL_NewMemoryFreeRegion(renderer, allocation, newOffset, newSize);
SDL_UnlockMutex(renderer->allocatorLock);
2021-02-01 00:29:38 +00:00
return;
2021-01-30 00:03:00 +00:00
}
/* check right side */
if (allocation->freeRegions[i]->offset == offset + size)
{
newOffset = offset;
newSize = allocation->freeRegions[i]->size + size;
VULKAN_INTERNAL_RemoveMemoryFreeRegion(renderer, allocation->freeRegions[i]);
VULKAN_INTERNAL_NewMemoryFreeRegion(renderer, allocation, newOffset, newSize);
SDL_UnlockMutex(renderer->allocatorLock);
2021-01-30 00:03:00 +00:00
return;
}
}
/* region is not contiguous with another free region, make a new one */
2020-12-18 22:35:33 +00:00
allocation->freeRegionCount += 1;
if (allocation->freeRegionCount > allocation->freeRegionCapacity)
{
allocation->freeRegionCapacity *= 2;
allocation->freeRegions = SDL_realloc(
allocation->freeRegions,
sizeof(VulkanMemoryFreeRegion*) * allocation->freeRegionCapacity
);
}
newFreeRegion = SDL_malloc(sizeof(VulkanMemoryFreeRegion));
newFreeRegion->offset = offset;
newFreeRegion->size = size;
newFreeRegion->allocation = allocation;
allocation->freeSpace += size;
2020-12-18 22:35:33 +00:00
allocation->freeRegions[allocation->freeRegionCount - 1] = newFreeRegion;
newFreeRegion->allocationIndex = allocation->freeRegionCount - 1;
if (allocation->availableForAllocation)
2020-12-18 22:35:33 +00:00
{
for (i = 0; i < allocation->allocator->sortedFreeRegionCount; i += 1)
2020-12-18 22:35:33 +00:00
{
if (allocation->allocator->sortedFreeRegions[i]->size < size)
{
/* this is where the new region should go */
break;
}
insertionIndex += 1;
}
if (allocation->allocator->sortedFreeRegionCount + 1 > allocation->allocator->sortedFreeRegionCapacity)
{
allocation->allocator->sortedFreeRegionCapacity *= 2;
allocation->allocator->sortedFreeRegions = SDL_realloc(
allocation->allocator->sortedFreeRegions,
sizeof(VulkanMemoryFreeRegion*) * allocation->allocator->sortedFreeRegionCapacity
);
}
/* perform insertion sort */
if (allocation->allocator->sortedFreeRegionCount > 0 && insertionIndex != allocation->allocator->sortedFreeRegionCount)
{
for (i = allocation->allocator->sortedFreeRegionCount; i > insertionIndex && i > 0; i -= 1)
{
allocation->allocator->sortedFreeRegions[i] = allocation->allocator->sortedFreeRegions[i - 1];
allocation->allocator->sortedFreeRegions[i]->sortedIndex = i;
}
2020-12-18 22:35:33 +00:00
}
allocation->allocator->sortedFreeRegionCount += 1;
allocation->allocator->sortedFreeRegions[insertionIndex] = newFreeRegion;
newFreeRegion->sortedIndex = insertionIndex;
2020-12-18 22:35:33 +00:00
}
SDL_UnlockMutex(renderer->allocatorLock);
}
static VulkanMemoryUsedRegion* VULKAN_INTERNAL_NewMemoryUsedRegion(
VulkanRenderer *renderer,
VulkanMemoryAllocation *allocation,
VkDeviceSize offset,
VkDeviceSize size,
VkDeviceSize resourceOffset,
VkDeviceSize resourceSize,
VkDeviceSize alignment
) {
VulkanMemoryUsedRegion *memoryUsedRegion;
SDL_LockMutex(renderer->allocatorLock);
if (allocation->usedRegionCount == allocation->usedRegionCapacity)
2020-12-18 22:35:33 +00:00
{
allocation->usedRegionCapacity *= 2;
allocation->usedRegions = SDL_realloc(
allocation->usedRegions,
allocation->usedRegionCapacity * sizeof(VulkanMemoryUsedRegion*)
2020-12-18 22:35:33 +00:00
);
}
memoryUsedRegion = SDL_malloc(sizeof(VulkanMemoryUsedRegion));
memoryUsedRegion->allocation = allocation;
memoryUsedRegion->offset = offset;
memoryUsedRegion->size = size;
memoryUsedRegion->resourceOffset = resourceOffset;
memoryUsedRegion->resourceSize = resourceSize;
memoryUsedRegion->alignment = alignment;
allocation->usedSpace += size;
allocation->usedRegions[allocation->usedRegionCount] = memoryUsedRegion;
allocation->usedRegionCount += 1;
SDL_UnlockMutex(renderer->allocatorLock);
return memoryUsedRegion;
}
static void VULKAN_INTERNAL_RemoveMemoryUsedRegion(
VulkanRenderer *renderer,
VulkanMemoryUsedRegion *usedRegion
) {
uint32_t i;
SDL_LockMutex(renderer->allocatorLock);
for (i = 0; i < usedRegion->allocation->usedRegionCount; i += 1)
2020-12-18 22:35:33 +00:00
{
if (usedRegion->allocation->usedRegions[i] == usedRegion)
2020-12-18 22:35:33 +00:00
{
/* plug the hole */
if (i != usedRegion->allocation->usedRegionCount - 1)
{
usedRegion->allocation->usedRegions[i] = usedRegion->allocation->usedRegions[usedRegion->allocation->usedRegionCount - 1];
}
break;
2020-12-18 22:35:33 +00:00
}
}
usedRegion->allocation->usedSpace -= usedRegion->size;
usedRegion->allocation->usedRegionCount -= 1;
VULKAN_INTERNAL_NewMemoryFreeRegion(
renderer,
usedRegion->allocation,
usedRegion->offset,
usedRegion->size
);
if (!usedRegion->allocation->dedicated)
{
renderer->needDefrag = 1;
renderer->defragTimestamp = SDL_GetTicks64() + DEFRAG_TIME; /* reset timer so we batch defrags */
}
SDL_free(usedRegion);
SDL_UnlockMutex(renderer->allocatorLock);
2020-12-18 22:35:33 +00:00
}
static uint8_t VULKAN_INTERNAL_FindMemoryType(
VulkanRenderer *renderer,
uint32_t typeFilter,
VkMemoryPropertyFlags requiredProperties,
2024-02-17 02:43:02 +00:00
VkMemoryPropertyFlags ignoredProperties,
uint32_t *memoryTypeIndex
2020-12-18 22:35:33 +00:00
) {
uint32_t i;
for (i = *memoryTypeIndex; i < renderer->memoryProperties.memoryTypeCount; i += 1)
2020-12-18 22:35:33 +00:00
{
if ( (typeFilter & (1 << i)) &&
2024-02-17 02:43:02 +00:00
(renderer->memoryProperties.memoryTypes[i].propertyFlags & requiredProperties) == requiredProperties &&
(renderer->memoryProperties.memoryTypes[i].propertyFlags & ignoredProperties) == 0 )
2020-12-18 22:35:33 +00:00
{
*memoryTypeIndex = i;
2020-12-18 22:35:33 +00:00
return 1;
}
}
return 0;
}
static uint8_t VULKAN_INTERNAL_FindBufferMemoryRequirements(
VulkanRenderer *renderer,
VkBuffer buffer,
VkMemoryPropertyFlags requiredMemoryProperties,
2024-02-17 02:43:02 +00:00
VkMemoryPropertyFlags ignoredMemoryProperties,
2020-12-18 22:35:33 +00:00
VkMemoryRequirements2KHR *pMemoryRequirements,
uint32_t *pMemoryTypeIndex
) {
VkBufferMemoryRequirementsInfo2KHR bufferRequirementsInfo;
bufferRequirementsInfo.sType =
VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR;
bufferRequirementsInfo.pNext = NULL;
bufferRequirementsInfo.buffer = buffer;
renderer->vkGetBufferMemoryRequirements2KHR(
renderer->logicalDevice,
&bufferRequirementsInfo,
pMemoryRequirements
);
return VULKAN_INTERNAL_FindMemoryType(
2020-12-18 22:35:33 +00:00
renderer,
pMemoryRequirements->memoryRequirements.memoryTypeBits,
requiredMemoryProperties,
2024-02-17 02:43:02 +00:00
ignoredMemoryProperties,
2020-12-18 22:35:33 +00:00
pMemoryTypeIndex
);
2020-12-18 22:35:33 +00:00
}
static uint8_t VULKAN_INTERNAL_FindImageMemoryRequirements(
VulkanRenderer *renderer,
VkImage image,
VkMemoryPropertyFlags requiredMemoryPropertyFlags,
2020-12-18 22:35:33 +00:00
VkMemoryRequirements2KHR *pMemoryRequirements,
uint32_t *pMemoryTypeIndex
) {
VkImageMemoryRequirementsInfo2KHR imageRequirementsInfo;
imageRequirementsInfo.sType =
VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR;
imageRequirementsInfo.pNext = NULL;
imageRequirementsInfo.image = image;
renderer->vkGetImageMemoryRequirements2KHR(
renderer->logicalDevice,
&imageRequirementsInfo,
pMemoryRequirements
);
return VULKAN_INTERNAL_FindMemoryType(
2020-12-18 22:35:33 +00:00
renderer,
pMemoryRequirements->memoryRequirements.memoryTypeBits,
requiredMemoryPropertyFlags,
2024-02-17 02:43:02 +00:00
0,
2020-12-18 22:35:33 +00:00
pMemoryTypeIndex
);
}
static void VULKAN_INTERNAL_DeallocateMemory(
VulkanRenderer *renderer,
VulkanMemorySubAllocator *allocator,
uint32_t allocationIndex
) {
uint32_t i;
VulkanMemoryAllocation *allocation = allocator->allocations[allocationIndex];
SDL_LockMutex(renderer->allocatorLock);
for (i = 0; i < allocation->freeRegionCount; i += 1)
{
VULKAN_INTERNAL_RemoveMemoryFreeRegion(
renderer,
allocation->freeRegions[i]
2020-12-18 22:35:33 +00:00
);
}
SDL_free(allocation->freeRegions);
2020-12-18 22:35:33 +00:00
/* no need to iterate used regions because deallocate
* only happens when there are 0 used regions
*/
SDL_free(allocation->usedRegions);
renderer->vkFreeMemory(
renderer->logicalDevice,
allocation->memory,
NULL
);
SDL_DestroyMutex(allocation->memoryLock);
SDL_free(allocation);
if (allocationIndex != allocator->allocationCount - 1)
{
allocator->allocations[allocationIndex] = allocator->allocations[allocator->allocationCount - 1];
}
allocator->allocationCount -= 1;
SDL_UnlockMutex(renderer->allocatorLock);
2020-12-18 22:35:33 +00:00
}
static uint8_t VULKAN_INTERNAL_AllocateMemory(
VulkanRenderer *renderer,
VkBuffer buffer,
VkImage image,
uint32_t memoryTypeIndex,
VkDeviceSize allocationSize,
uint8_t dedicated, /* indicates that one resource uses this memory and the memory shouldn't be moved */
uint8_t isHostVisible,
VulkanMemoryAllocation **pMemoryAllocation)
{
2020-12-18 22:35:33 +00:00
VulkanMemoryAllocation *allocation;
VulkanMemorySubAllocator *allocator = &renderer->memoryAllocator->subAllocators[memoryTypeIndex];
VkMemoryAllocateInfo allocInfo;
VkResult result;
allocInfo.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
allocInfo.pNext = NULL;
2020-12-18 22:35:33 +00:00
allocInfo.memoryTypeIndex = memoryTypeIndex;
allocInfo.allocationSize = allocationSize;
allocation = SDL_malloc(sizeof(VulkanMemoryAllocation));
allocation->size = allocationSize;
allocation->freeSpace = 0; /* added by FreeRegions */
allocation->usedSpace = 0; /* added by UsedRegions */
2021-01-03 21:12:12 +00:00
allocation->memoryLock = SDL_CreateMutex();
2020-12-18 22:35:33 +00:00
allocator->allocationCount += 1;
allocator->allocations = SDL_realloc(
allocator->allocations,
sizeof(VulkanMemoryAllocation*) * allocator->allocationCount
);
allocator->allocations[
allocator->allocationCount - 1
] = allocation;
2020-12-18 22:35:33 +00:00
if (dedicated)
{
allocation->dedicated = 1;
allocation->availableForAllocation = 0;
2020-12-18 22:35:33 +00:00
}
else
{
allocInfo.pNext = NULL;
allocation->dedicated = 0;
allocation->availableForAllocation = 1;
2020-12-18 22:35:33 +00:00
}
allocation->usedRegions = SDL_malloc(sizeof(VulkanMemoryUsedRegion*));
allocation->usedRegionCount = 0;
allocation->usedRegionCapacity = 1;
2020-12-18 22:35:33 +00:00
allocation->freeRegions = SDL_malloc(sizeof(VulkanMemoryFreeRegion*));
allocation->freeRegionCount = 0;
allocation->freeRegionCapacity = 1;
2020-12-18 22:35:33 +00:00
allocation->allocator = allocator;
result = renderer->vkAllocateMemory(
renderer->logicalDevice,
&allocInfo,
NULL,
&allocation->memory
);
if (result != VK_SUCCESS)
{
2021-01-27 20:51:36 +00:00
/* Uh oh, we couldn't allocate, time to clean up */
SDL_free(allocation->freeRegions);
allocator->allocationCount -= 1;
allocator->allocations = SDL_realloc(
allocator->allocations,
sizeof(VulkanMemoryAllocation*) * allocator->allocationCount
);
SDL_free(allocation);
2020-12-18 22:35:33 +00:00
return 0;
}
VULKAN_INTERNAL_NewMemoryFreeRegion(
renderer,
2020-12-18 22:35:33 +00:00
allocation,
0,
allocation->size
);
*pMemoryAllocation = allocation;
return 1;
}
static uint8_t VULKAN_INTERNAL_BindBufferMemory(
VulkanRenderer *renderer,
VulkanMemoryUsedRegion *usedRegion,
VkDeviceSize alignedOffset,
VkBuffer buffer
) {
VkResult vulkanResult;
SDL_LockMutex(usedRegion->allocation->memoryLock);
vulkanResult = renderer->vkBindBufferMemory(
renderer->logicalDevice,
buffer,
usedRegion->allocation->memory,
alignedOffset
);
SDL_UnlockMutex(usedRegion->allocation->memoryLock);
VULKAN_ERROR_CHECK(vulkanResult, vkBindBufferMemory, 0)
return 1;
}
static uint8_t VULKAN_INTERNAL_BindImageMemory(
2020-12-18 22:35:33 +00:00
VulkanRenderer *renderer,
VulkanMemoryUsedRegion *usedRegion,
VkDeviceSize alignedOffset,
VkImage image
) {
VkResult vulkanResult;
SDL_LockMutex(usedRegion->allocation->memoryLock);
vulkanResult = renderer->vkBindImageMemory(
renderer->logicalDevice,
image,
usedRegion->allocation->memory,
alignedOffset
);
SDL_UnlockMutex(usedRegion->allocation->memoryLock);
VULKAN_ERROR_CHECK(vulkanResult, vkBindBufferMemory, 0)
return 1;
}
static uint8_t VULKAN_INTERNAL_BindResourceMemory(
VulkanRenderer* renderer,
2021-01-27 20:51:36 +00:00
uint32_t memoryTypeIndex,
VkMemoryRequirements2KHR* memoryRequirements,
uint8_t forceDedicated,
VkDeviceSize resourceSize, /* may be different from requirements size! */
2021-01-27 20:51:36 +00:00
VkBuffer buffer, /* may be VK_NULL_HANDLE */
VkImage image, /* may be VK_NULL_HANDLE */
VulkanMemoryUsedRegion** pMemoryUsedRegion
2020-12-18 22:35:33 +00:00
) {
VulkanMemoryAllocation *allocation;
VulkanMemorySubAllocator *allocator;
VulkanMemoryFreeRegion *region;
VulkanMemoryUsedRegion *usedRegion;
2020-12-18 22:35:33 +00:00
VkDeviceSize requiredSize, allocationSize;
VkDeviceSize alignedOffset;
2021-01-27 20:51:36 +00:00
uint32_t newRegionSize, newRegionOffset;
uint8_t shouldAllocDedicated = forceDedicated;
2023-07-31 23:12:20 +00:00
uint8_t isHostVisible, allocationResult;
isHostVisible =
(renderer->memoryProperties.memoryTypes[memoryTypeIndex].propertyFlags &
VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0;
2020-12-18 22:35:33 +00:00
allocator = &renderer->memoryAllocator->subAllocators[memoryTypeIndex];
2021-01-27 20:51:36 +00:00
requiredSize = memoryRequirements->memoryRequirements.size;
2020-12-18 22:35:33 +00:00
if ( (buffer == VK_NULL_HANDLE && image == VK_NULL_HANDLE) ||
(buffer != VK_NULL_HANDLE && image != VK_NULL_HANDLE) )
{
Refresh_LogError("BindResourceMemory must be given either a VulkanBuffer or a VulkanTexture");
return 0;
}
2020-12-18 22:35:33 +00:00
SDL_LockMutex(renderer->allocatorLock);
/* find the largest free region and use it */
if (!shouldAllocDedicated && allocator->sortedFreeRegionCount > 0)
2020-12-18 22:35:33 +00:00
{
region = allocator->sortedFreeRegions[0];
allocation = region->allocation;
alignedOffset = VULKAN_INTERNAL_NextHighestAlignment(
region->offset,
2021-01-27 20:51:36 +00:00
memoryRequirements->memoryRequirements.alignment
2020-12-18 22:35:33 +00:00
);
if (alignedOffset + requiredSize <= region->offset + region->size)
{
usedRegion = VULKAN_INTERNAL_NewMemoryUsedRegion(
renderer,
allocation,
region->offset,
requiredSize + (alignedOffset - region->offset),
alignedOffset,
resourceSize,
memoryRequirements->memoryRequirements.alignment
);
2020-12-18 22:35:33 +00:00
usedRegion->isBuffer = buffer != VK_NULL_HANDLE;
2020-12-18 22:35:33 +00:00
newRegionSize = region->size - ((alignedOffset - region->offset) + requiredSize);
newRegionOffset = alignedOffset + requiredSize;
/* remove and add modified region to re-sort */
VULKAN_INTERNAL_RemoveMemoryFreeRegion(renderer, region);
2020-12-18 22:35:33 +00:00
/* if size is 0, no need to re-insert */
if (newRegionSize != 0)
{
VULKAN_INTERNAL_NewMemoryFreeRegion(
renderer,
2020-12-18 22:35:33 +00:00
allocation,
newRegionOffset,
newRegionSize
);
}
SDL_UnlockMutex(renderer->allocatorLock);
if (buffer != VK_NULL_HANDLE)
{
if (!VULKAN_INTERNAL_BindBufferMemory(
renderer,
usedRegion,
alignedOffset,
buffer
)) {
VULKAN_INTERNAL_RemoveMemoryUsedRegion(
renderer,
usedRegion
);
return 0;
}
}
else if (image != VK_NULL_HANDLE)
{
if (!VULKAN_INTERNAL_BindImageMemory(
renderer,
usedRegion,
alignedOffset,
image
)) {
VULKAN_INTERNAL_RemoveMemoryUsedRegion(
renderer,
usedRegion
);
return 0;
}
}
*pMemoryUsedRegion = usedRegion;
2020-12-18 22:35:33 +00:00
return 1;
}
}
/* No suitable free regions exist, allocate a new memory region */
2021-01-27 20:51:36 +00:00
if (shouldAllocDedicated)
2020-12-18 22:35:33 +00:00
{
allocationSize = requiredSize;
}
else if (requiredSize > allocator->nextAllocationSize)
{
/* allocate a page of required size aligned to ALLOCATION_INCREMENT increments */
2020-12-18 22:35:33 +00:00
allocationSize =
VULKAN_INTERNAL_NextHighestAlignment(requiredSize, ALLOCATION_INCREMENT);
2020-12-18 22:35:33 +00:00
}
else
{
allocationSize = allocator->nextAllocationSize;
}
allocationResult = VULKAN_INTERNAL_AllocateMemory(
renderer,
buffer,
image,
memoryTypeIndex,
allocationSize,
2021-01-27 20:51:36 +00:00
shouldAllocDedicated,
isHostVisible,
2020-12-18 22:35:33 +00:00
&allocation
);
2020-12-17 04:04:47 +00:00
2020-12-18 22:35:33 +00:00
/* Uh oh, we're out of memory */
if (allocationResult == 0)
{
SDL_UnlockMutex(renderer->allocatorLock);
2021-01-27 20:51:36 +00:00
/* Responsibility of the caller to handle being out of memory */
2020-12-18 22:35:33 +00:00
return 2;
}
usedRegion = VULKAN_INTERNAL_NewMemoryUsedRegion(
renderer,
allocation,
0,
requiredSize,
0,
resourceSize,
memoryRequirements->memoryRequirements.alignment
);
usedRegion->isBuffer = buffer != VK_NULL_HANDLE;
2020-12-18 22:35:33 +00:00
region = allocation->freeRegions[0];
newRegionOffset = region->offset + requiredSize;
newRegionSize = region->size - requiredSize;
VULKAN_INTERNAL_RemoveMemoryFreeRegion(renderer, region);
2020-12-18 22:35:33 +00:00
if (newRegionSize != 0)
{
VULKAN_INTERNAL_NewMemoryFreeRegion(
renderer,
2020-12-18 22:35:33 +00:00
allocation,
newRegionOffset,
newRegionSize
);
}
SDL_UnlockMutex(renderer->allocatorLock);
if (buffer != VK_NULL_HANDLE)
{
if (!VULKAN_INTERNAL_BindBufferMemory(
renderer,
usedRegion,
0,
buffer
)) {
VULKAN_INTERNAL_RemoveMemoryUsedRegion(
renderer,
usedRegion
);
return 0;
}
}
else if (image != VK_NULL_HANDLE)
{
if (!VULKAN_INTERNAL_BindImageMemory(
renderer,
usedRegion,
0,
image
)) {
VULKAN_INTERNAL_RemoveMemoryUsedRegion(
renderer,
usedRegion
);
return 0;
}
}
*pMemoryUsedRegion = usedRegion;
2020-12-18 22:35:33 +00:00
return 1;
}
static uint8_t VULKAN_INTERNAL_BindMemoryForImage(
VulkanRenderer* renderer,
VkImage image,
uint8_t isRenderTarget,
VulkanMemoryUsedRegion** usedRegion
2021-01-27 20:51:36 +00:00
) {
uint8_t bindResult = 0;
uint32_t memoryTypeIndex = 0;
VkMemoryPropertyFlags requiredMemoryPropertyFlags;
2021-01-27 20:51:36 +00:00
VkMemoryRequirements2KHR memoryRequirements =
{
VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR,
NULL
2021-01-27 20:51:36 +00:00
};
2024-02-16 04:13:26 +00:00
/* Prefer GPU allocation for textures */
requiredMemoryPropertyFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
while (VULKAN_INTERNAL_FindImageMemoryRequirements(
2021-01-27 20:51:36 +00:00
renderer,
image,
requiredMemoryPropertyFlags,
2021-01-27 20:51:36 +00:00
&memoryRequirements,
&memoryTypeIndex
)) {
bindResult = VULKAN_INTERNAL_BindResourceMemory(
renderer,
memoryTypeIndex,
&memoryRequirements,
isRenderTarget,
memoryRequirements.memoryRequirements.size,
VK_NULL_HANDLE,
image,
usedRegion
);
if (bindResult == 1)
{
break;
}
else /* Bind failed, try the next device-local heap */
{
memoryTypeIndex += 1;
}
2021-01-27 20:51:36 +00:00
}
/* Bind _still_ failed, try again without device local */
if (bindResult != 1)
{
memoryTypeIndex = 0;
requiredMemoryPropertyFlags = 0;
if (isRenderTarget)
{
Refresh_LogWarn("RenderTarget is allocated in host memory, pre-allocate your targets!");
}
Refresh_LogWarn("Out of device local memory, falling back to host memory");
while (VULKAN_INTERNAL_FindImageMemoryRequirements(
renderer,
image,
requiredMemoryPropertyFlags,
&memoryRequirements,
&memoryTypeIndex
)) {
bindResult = VULKAN_INTERNAL_BindResourceMemory(
renderer,
memoryTypeIndex,
&memoryRequirements,
isRenderTarget,
memoryRequirements.memoryRequirements.size,
VK_NULL_HANDLE,
image,
usedRegion
);
if (bindResult == 1)
{
break;
}
else /* Bind failed, try the next heap */
{
memoryTypeIndex += 1;
}
}
}
return bindResult;
2021-01-27 20:51:36 +00:00
}
static uint8_t VULKAN_INTERNAL_BindMemoryForBuffer(
VulkanRenderer* renderer,
VkBuffer buffer,
VkDeviceSize size,
uint8_t requireHostVisible,
2024-02-17 02:43:02 +00:00
uint8_t requireHostLocal,
uint8_t preferDeviceLocal,
uint8_t dedicatedAllocation,
VulkanMemoryUsedRegion** usedRegion
2021-01-27 20:51:36 +00:00
) {
uint8_t bindResult = 0;
uint32_t memoryTypeIndex = 0;
VkMemoryPropertyFlags requiredMemoryPropertyFlags = 0;
2024-02-17 02:43:02 +00:00
VkMemoryPropertyFlags ignoredMemoryPropertyFlags = 0;
2021-01-27 20:51:36 +00:00
VkMemoryRequirements2KHR memoryRequirements =
{
VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR,
NULL
2021-01-27 20:51:36 +00:00
};
if (requireHostVisible)
{
requiredMemoryPropertyFlags =
VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT |
VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
}
2024-02-17 02:43:02 +00:00
if (requireHostLocal)
{
ignoredMemoryPropertyFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
}
else if (preferDeviceLocal)
2021-01-27 20:51:36 +00:00
{
requiredMemoryPropertyFlags |=
VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
2021-01-27 20:51:36 +00:00
}
while (VULKAN_INTERNAL_FindBufferMemoryRequirements(
2021-01-27 20:51:36 +00:00
renderer,
buffer,
requiredMemoryPropertyFlags,
2024-02-17 02:43:02 +00:00
ignoredMemoryPropertyFlags,
2021-01-27 20:51:36 +00:00
&memoryRequirements,
&memoryTypeIndex
)) {
bindResult = VULKAN_INTERNAL_BindResourceMemory(
renderer,
memoryTypeIndex,
&memoryRequirements,
dedicatedAllocation,
size,
buffer,
VK_NULL_HANDLE,
usedRegion
);
if (bindResult == 1)
{
break;
}
else /* Bind failed, try the next device-local heap */
{
memoryTypeIndex += 1;
}
2021-01-27 20:51:36 +00:00
}
2024-02-17 02:43:02 +00:00
/* Bind failed, try again with fallback flags */
2024-02-11 23:56:53 +00:00
if (bindResult != 1)
{
memoryTypeIndex = 0;
2024-02-16 04:13:26 +00:00
requiredMemoryPropertyFlags = 0;
2024-02-16 04:13:26 +00:00
if (requireHostVisible)
{
requiredMemoryPropertyFlags =
VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT |
VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
}
2024-02-17 02:43:02 +00:00
if (requireHostLocal)
{
ignoredMemoryPropertyFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
}
/* Follow-up for the warning logged by FindMemoryType */
if (!renderer->unifiedMemoryWarning)
{
Refresh_LogWarn("No unified memory found, falling back to host memory");
renderer->unifiedMemoryWarning = 1;
}
while (VULKAN_INTERNAL_FindBufferMemoryRequirements(
renderer,
buffer,
requiredMemoryPropertyFlags,
2024-02-17 02:43:02 +00:00
ignoredMemoryPropertyFlags,
&memoryRequirements,
&memoryTypeIndex
)) {
bindResult = VULKAN_INTERNAL_BindResourceMemory(
renderer,
memoryTypeIndex,
&memoryRequirements,
dedicatedAllocation,
size,
buffer,
VK_NULL_HANDLE,
usedRegion
);
if (bindResult == 1)
{
break;
}
else /* Bind failed, try the next heap */
{
memoryTypeIndex += 1;
}
}
}
return bindResult;
}
static uint8_t VULKAN_INTERNAL_FindAllocationToDefragment(
VulkanRenderer *renderer,
VulkanMemorySubAllocator *allocator,
uint32_t *allocationIndexToDefrag
) {
uint32_t i, j;
for (i = 0; i < VK_MAX_MEMORY_TYPES; i += 1)
{
*allocator = renderer->memoryAllocator->subAllocators[i];
for (j = 0; j < allocator->allocationCount; j += 1)
{
if (allocator->allocations[j]->availableForAllocation == 1 && allocator->allocations[j]->freeRegionCount > 1)
{
*allocationIndexToDefrag = j;
return 1;
}
}
}
return 0;
2021-01-27 20:51:36 +00:00
}
2020-12-19 00:39:03 +00:00
/* Memory Barriers */
2020-12-19 01:03:26 +00:00
static void VULKAN_INTERNAL_BufferMemoryBarrier(
VulkanRenderer *renderer,
2021-01-02 21:31:17 +00:00
VkCommandBuffer commandBuffer,
2020-12-19 01:03:26 +00:00
VulkanResourceAccessType nextResourceAccessType,
VulkanBuffer *buffer
2020-12-19 01:03:26 +00:00
) {
VkPipelineStageFlags srcStages = 0;
VkPipelineStageFlags dstStages = 0;
VkBufferMemoryBarrier memoryBarrier;
VulkanResourceAccessType prevAccess, nextAccess;
const VulkanResourceAccessInfo *prevAccessInfo, *nextAccessInfo;
memoryBarrier.sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER;
memoryBarrier.pNext = NULL;
memoryBarrier.srcAccessMask = 0;
memoryBarrier.dstAccessMask = 0;
memoryBarrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
memoryBarrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
memoryBarrier.buffer = buffer->buffer;
2020-12-19 01:03:26 +00:00
memoryBarrier.offset = 0;
memoryBarrier.size = buffer->size;
prevAccess = buffer->resourceAccessType;
prevAccessInfo = &AccessMap[prevAccess];
srcStages |= prevAccessInfo->stageMask;
if (prevAccess > RESOURCE_ACCESS_END_OF_READ)
{
memoryBarrier.srcAccessMask |= prevAccessInfo->accessMask;
}
nextAccess = nextResourceAccessType;
nextAccessInfo = &AccessMap[nextAccess];
dstStages |= nextAccessInfo->stageMask;
if (memoryBarrier.srcAccessMask != 0)
{
memoryBarrier.dstAccessMask |= nextAccessInfo->accessMask;
}
if (srcStages == 0)
{
srcStages = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
}
if (dstStages == 0)
{
dstStages = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT;
}
2021-01-02 06:07:15 +00:00
renderer->vkCmdPipelineBarrier(
2021-01-02 21:31:17 +00:00
commandBuffer,
2020-12-19 01:03:26 +00:00
srcStages,
dstStages,
0,
0,
NULL,
1,
&memoryBarrier,
0,
NULL
2021-01-02 06:07:15 +00:00
);
2020-12-19 01:03:26 +00:00
buffer->resourceAccessType = nextResourceAccessType;
}
2020-12-19 00:39:03 +00:00
static void VULKAN_INTERNAL_ImageMemoryBarrier(
VulkanRenderer *renderer,
2021-01-02 21:31:17 +00:00
VkCommandBuffer commandBuffer,
2020-12-19 00:39:03 +00:00
VulkanResourceAccessType nextAccess,
VkImageAspectFlags aspectMask,
uint32_t baseLayer,
uint32_t layerCount,
uint32_t baseLevel,
uint32_t levelCount,
uint8_t discardContents,
VkImage image,
VulkanResourceAccessType *resourceAccessType
) {
VkPipelineStageFlags srcStages = 0;
VkPipelineStageFlags dstStages = 0;
VkImageMemoryBarrier memoryBarrier;
VulkanResourceAccessType prevAccess;
const VulkanResourceAccessInfo *pPrevAccessInfo, *pNextAccessInfo;
memoryBarrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
memoryBarrier.pNext = NULL;
memoryBarrier.srcAccessMask = 0;
memoryBarrier.dstAccessMask = 0;
memoryBarrier.oldLayout = VK_IMAGE_LAYOUT_UNDEFINED;
memoryBarrier.newLayout = VK_IMAGE_LAYOUT_UNDEFINED;
2021-01-03 04:03:07 +00:00
memoryBarrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
memoryBarrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
2020-12-19 00:39:03 +00:00
memoryBarrier.image = image;
memoryBarrier.subresourceRange.aspectMask = aspectMask;
memoryBarrier.subresourceRange.baseArrayLayer = baseLayer;
memoryBarrier.subresourceRange.layerCount = layerCount;
memoryBarrier.subresourceRange.baseMipLevel = baseLevel;
memoryBarrier.subresourceRange.levelCount = levelCount;
prevAccess = *resourceAccessType;
pPrevAccessInfo = &AccessMap[prevAccess];
srcStages |= pPrevAccessInfo->stageMask;
if (prevAccess > RESOURCE_ACCESS_END_OF_READ)
{
memoryBarrier.srcAccessMask |= pPrevAccessInfo->accessMask;
}
if (discardContents)
{
memoryBarrier.oldLayout = VK_IMAGE_LAYOUT_UNDEFINED;
}
else
{
memoryBarrier.oldLayout = pPrevAccessInfo->imageLayout;
}
pNextAccessInfo = &AccessMap[nextAccess];
dstStages |= pNextAccessInfo->stageMask;
memoryBarrier.dstAccessMask |= pNextAccessInfo->accessMask;
memoryBarrier.newLayout = pNextAccessInfo->imageLayout;
if (srcStages == 0)
{
srcStages = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
}
if (dstStages == 0)
{
dstStages = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT;
}
2021-01-02 06:07:15 +00:00
renderer->vkCmdPipelineBarrier(
2021-01-02 21:31:17 +00:00
commandBuffer,
2020-12-19 00:39:03 +00:00
srcStages,
dstStages,
0,
0,
NULL,
0,
NULL,
1,
&memoryBarrier
2021-01-02 06:07:15 +00:00
);
2021-01-02 21:31:17 +00:00
2020-12-21 23:44:43 +00:00
*resourceAccessType = nextAccess;
}
/* Resource tracking */
#define TRACK_RESOURCE(resource, type, array, count, capacity) \
uint32_t i; \
\
for (i = 0; i < commandBuffer->count; i += 1) \
{ \
if (commandBuffer->array[i] == resource) \
{ \
return; \
} \
} \
\
if (commandBuffer->count == commandBuffer->capacity) \
{ \
commandBuffer->capacity += 1; \
commandBuffer->array = SDL_realloc( \
commandBuffer->array, \
commandBuffer->capacity * sizeof(type) \
); \
} \
commandBuffer->array[commandBuffer->count] = resource; \
commandBuffer->count += 1; \
\
SDL_AtomicIncRef(&resource->referenceCount);
static void VULKAN_INTERNAL_TrackBuffer(
VulkanRenderer *renderer,
VulkanCommandBuffer *commandBuffer,
VulkanBuffer *buffer
) {
TRACK_RESOURCE(
buffer,
VulkanBuffer*,
usedBuffers,
usedBufferCount,
usedBufferCapacity
)
}
static void VULKAN_INTERNAL_TrackTexture(
VulkanRenderer *renderer,
VulkanCommandBuffer *commandBuffer,
VulkanTexture *texture
) {
TRACK_RESOURCE(
texture,
VulkanTexture*,
usedTextures,
usedTextureCount,
usedTextureCapacity
)
}
static void VULKAN_INTERNAL_TrackSampler(
VulkanRenderer *renderer,
VulkanCommandBuffer *commandBuffer,
VulkanSampler *sampler
) {
TRACK_RESOURCE(
sampler,
VulkanSampler*,
usedSamplers,
usedSamplerCount,
usedSamplerCapacity
)
}
static void VULKAN_INTERNAL_TrackGraphicsPipeline(
VulkanRenderer *renderer,
VulkanCommandBuffer *commandBuffer,
VulkanGraphicsPipeline *graphicsPipeline
) {
TRACK_RESOURCE(
graphicsPipeline,
VulkanGraphicsPipeline*,
usedGraphicsPipelines,
usedGraphicsPipelineCount,
usedGraphicsPipelineCapacity
)
}
static void VULKAN_INTERNAL_TrackComputePipeline(
VulkanRenderer *renderer,
VulkanCommandBuffer *commandBuffer,
VulkanComputePipeline *computePipeline
) {
TRACK_RESOURCE(
computePipeline,
VulkanComputePipeline*,
usedComputePipelines,
usedComputePipelineCount,
usedComputePipelineCapacity
)
}
static void VULKAN_INTERNAL_TrackFramebuffer(
VulkanRenderer *renderer,
VulkanCommandBuffer *commandBuffer,
VulkanFramebuffer *framebuffer
) {
TRACK_RESOURCE(
framebuffer,
VulkanFramebuffer*,
usedFramebuffers,
usedFramebufferCount,
usedFramebufferCapacity
);
}
static void VULKAN_INTERNAL_TrackComputeBuffer(
VulkanRenderer *renderer,
VulkanCommandBuffer *commandBuffer,
VulkanBuffer *computeBuffer
) {
TRACK_RESOURCE(
computeBuffer,
VulkanBuffer*,
boundComputeBuffers,
boundComputeBufferCount,
boundComputeBufferCapacity
);
}
static void VULKAN_INTERNAL_TrackComputeTexture(
VulkanRenderer *renderer,
VulkanCommandBuffer *commandBuffer,
VulkanTexture *computeTexture
) {
TRACK_RESOURCE(
computeTexture,
VulkanTexture*,
boundComputeTextures,
boundComputeTextureCount,
boundComputeTextureCapacity
);
}
/* For tracking Textures used in a copy pass. */
static void VULKAN_INTERNAL_TrackCopiedTexture(
VulkanRenderer *renderer,
VulkanCommandBuffer *commandBuffer,
VulkanTexture *texture
) {
TRACK_RESOURCE(
texture,
VulkanTexture*,
copiedTextures,
copiedTextureCount,
copiedTextureCapacity
);
}
/* For tracking GpuBuffers used in a copy pass. */
static void VULKAN_INTERNAL_TrackCopiedBuffer(
VulkanRenderer *renderer,
VulkanCommandBuffer *commandBuffer,
VulkanBuffer *buffer
) {
TRACK_RESOURCE(
buffer,
VulkanBuffer*,
copiedGpuBuffers,
copiedGpuBufferCount,
copiedGpuBufferCapacity
);
}
#undef TRACK_RESOURCE
2020-12-21 23:44:43 +00:00
/* Resource Disposal */
static void VULKAN_INTERNAL_QueueDestroyFramebuffer(
VulkanRenderer *renderer,
VulkanFramebuffer *framebuffer
) {
SDL_LockMutex(renderer->disposeLock);
EXPAND_ARRAY_IF_NEEDED(
renderer->framebuffersToDestroy,
VulkanFramebuffer*,
renderer->framebuffersToDestroyCount + 1,
renderer->framebuffersToDestroyCapacity,
renderer->framebuffersToDestroyCapacity * 2
)
renderer->framebuffersToDestroy[renderer->framebuffersToDestroyCount] = framebuffer;
renderer->framebuffersToDestroyCount += 1;
SDL_UnlockMutex(renderer->disposeLock);
}
static void VULKAN_INTERNAL_DestroyFramebuffer(
VulkanRenderer *renderer,
VulkanFramebuffer *framebuffer
) {
renderer->vkDestroyFramebuffer(
renderer->logicalDevice,
framebuffer->framebuffer,
NULL
);
SDL_free(framebuffer);
}
static void VULKAN_INTERNAL_RemoveFramebuffersContainingView(
VulkanRenderer *renderer,
VkImageView view
) {
FramebufferHash *hash;
int32_t i, j;
for (i = renderer->framebufferHashArray.count - 1; i >= 0; i -= 1)
{
hash = &renderer->framebufferHashArray.elements[i].key;
for (j = 0; j < hash->colorAttachmentCount; j += 1)
{
2022-12-23 01:23:11 +00:00
if (hash->colorAttachmentViews[j] == view)
{
VULKAN_INTERNAL_QueueDestroyFramebuffer(
renderer,
renderer->framebufferHashArray.elements[i].value
);
FramebufferHashArray_Remove(
&renderer->framebufferHashArray,
i
);
break;
}
}
}
}
static void VULKAN_INTERNAL_RemoveRenderTargetsContainingTexture(
VulkanRenderer *renderer,
VulkanTexture *texture
) {
RenderTargetHash *hash;
VkImageView *viewsToCheck;
int32_t viewsToCheckCount;
int32_t viewsToCheckCapacity;
int32_t i;
viewsToCheckCapacity = 16;
viewsToCheckCount = 0;
viewsToCheck = SDL_malloc(sizeof(VkImageView) * viewsToCheckCapacity);
SDL_LockMutex(renderer->renderTargetFetchLock);
for (i = renderer->renderTargetHashArray.count - 1; i >= 0; i -= 1)
{
hash = &renderer->renderTargetHashArray.elements[i].key;
if (hash->texture == texture)
{
EXPAND_ARRAY_IF_NEEDED(
viewsToCheck,
VkImageView,
viewsToCheckCount + 1,
viewsToCheckCapacity,
viewsToCheckCapacity * 2
);
viewsToCheck[viewsToCheckCount] = renderer->renderTargetHashArray.elements[i].value->view;
viewsToCheckCount += 1;
VULKAN_INTERNAL_DestroyRenderTarget(
renderer,
renderer->renderTargetHashArray.elements[i].value
);
RenderTargetHash_Remove(
&renderer->renderTargetHashArray,
i
);
}
}
SDL_UnlockMutex(renderer->renderTargetFetchLock);
SDL_LockMutex(renderer->framebufferFetchLock);
for (i = 0; i < viewsToCheckCount; i += 1)
2020-12-29 00:28:14 +00:00
{
VULKAN_INTERNAL_RemoveFramebuffersContainingView(
renderer,
viewsToCheck[i]
2020-12-29 00:28:14 +00:00
);
}
SDL_UnlockMutex(renderer->framebufferFetchLock);
2020-12-29 00:28:14 +00:00
SDL_free(viewsToCheck);
}
2020-12-29 00:28:14 +00:00
static void VULKAN_INTERNAL_DestroyTexture(
VulkanRenderer* renderer,
VulkanTexture* texture
) {
VULKAN_INTERNAL_RemoveRenderTargetsContainingTexture(
renderer,
texture
);
2020-12-29 00:28:14 +00:00
renderer->vkDestroyImageView(
renderer->logicalDevice,
texture->view,
NULL
);
renderer->vkDestroyImage(
renderer->logicalDevice,
texture->image,
NULL
);
VULKAN_INTERNAL_RemoveMemoryUsedRegion(
renderer,
texture->usedRegion
);
/* destroy the msaa texture, if there is one */
if (texture->msaaTex != NULL)
{
VULKAN_INTERNAL_DestroyTexture(
renderer,
texture->msaaTex
);
}
2020-12-29 00:28:14 +00:00
SDL_free(texture);
}
static void VULKAN_INTERNAL_DestroyRenderTarget(
2020-12-29 00:28:14 +00:00
VulkanRenderer *renderer,
VulkanRenderTarget *renderTarget
2020-12-29 00:28:14 +00:00
) {
renderer->vkDestroyImageView(
renderer->logicalDevice,
renderTarget->view,
2020-12-29 00:28:14 +00:00
NULL
);
SDL_free(renderTarget);
2020-12-29 00:28:14 +00:00
}
2020-12-21 23:44:43 +00:00
static void VULKAN_INTERNAL_DestroyBuffer(
2020-12-28 22:57:14 +00:00
VulkanRenderer* renderer,
VulkanBuffer* buffer
2020-12-21 23:44:43 +00:00
) {
renderer->vkDestroyBuffer(
renderer->logicalDevice,
buffer->buffer,
NULL
);
2020-12-21 23:44:43 +00:00
VULKAN_INTERNAL_RemoveMemoryUsedRegion(
renderer,
buffer->usedRegion
);
2020-12-21 23:44:43 +00:00
SDL_free(buffer);
}
2021-01-03 02:02:20 +00:00
static void VULKAN_INTERNAL_DestroyCommandPool(
VulkanRenderer *renderer,
VulkanCommandPool *commandPool
) {
uint32_t i;
VulkanCommandBuffer* commandBuffer;
renderer->vkDestroyCommandPool(
2021-01-03 02:02:20 +00:00
renderer->logicalDevice,
commandPool->commandPool,
NULL
2021-01-03 02:02:20 +00:00
);
for (i = 0; i < commandPool->inactiveCommandBufferCount; i += 1)
{
commandBuffer = commandPool->inactiveCommandBuffers[i];
SDL_free(commandBuffer->presentDatas);
SDL_free(commandBuffer->waitSemaphores);
SDL_free(commandBuffer->signalSemaphores);
SDL_free(commandBuffer->boundDescriptorSetDatas);
SDL_free(commandBuffer->boundComputeBuffers);
SDL_free(commandBuffer->boundComputeTextures);
SDL_free(commandBuffer->copiedGpuBuffers);
SDL_free(commandBuffer->copiedTextures);
SDL_free(commandBuffer->usedBuffers);
SDL_free(commandBuffer->usedTextures);
2022-03-04 01:30:26 +00:00
SDL_free(commandBuffer->usedSamplers);
SDL_free(commandBuffer->usedGraphicsPipelines);
SDL_free(commandBuffer->usedComputePipelines);
SDL_free(commandBuffer->usedFramebuffers);
SDL_free(commandBuffer);
}
2021-01-03 02:02:20 +00:00
SDL_free(commandPool->inactiveCommandBuffers);
SDL_free(commandPool);
}
2020-12-29 00:28:14 +00:00
static void VULKAN_INTERNAL_DestroyGraphicsPipeline(
VulkanRenderer *renderer,
VulkanGraphicsPipeline *graphicsPipeline
) {
renderer->vkDestroyPipeline(
renderer->logicalDevice,
graphicsPipeline->pipeline,
NULL
);
SDL_AtomicDecRef(&graphicsPipeline->vertexShaderModule->referenceCount);
SDL_AtomicDecRef(&graphicsPipeline->fragmentShaderModule->referenceCount);
2020-12-29 00:28:14 +00:00
SDL_free(graphicsPipeline);
}
2020-12-31 07:02:12 +00:00
static void VULKAN_INTERNAL_DestroyComputePipeline(
VulkanRenderer *renderer,
VulkanComputePipeline *computePipeline
) {
renderer->vkDestroyPipeline(
renderer->logicalDevice,
computePipeline->pipeline,
NULL
);
SDL_AtomicDecRef(&computePipeline->computeShaderModule->referenceCount);
2020-12-31 07:02:12 +00:00
SDL_free(computePipeline);
}
2020-12-29 00:42:51 +00:00
static void VULKAN_INTERNAL_DestroyShaderModule(
VulkanRenderer *renderer,
VulkanShaderModule *vulkanShaderModule
2020-12-29 00:42:51 +00:00
) {
renderer->vkDestroyShaderModule(
renderer->logicalDevice,
vulkanShaderModule->shaderModule,
2020-12-29 00:42:51 +00:00
NULL
);
SDL_free(vulkanShaderModule);
2020-12-29 00:42:51 +00:00
}
2020-12-29 00:56:49 +00:00
static void VULKAN_INTERNAL_DestroySampler(
VulkanRenderer *renderer,
VulkanSampler *vulkanSampler
2020-12-29 00:56:49 +00:00
) {
renderer->vkDestroySampler(
renderer->logicalDevice,
vulkanSampler->sampler,
2020-12-29 00:56:49 +00:00
NULL
);
SDL_free(vulkanSampler);
2020-12-29 00:56:49 +00:00
}
static void VULKAN_INTERNAL_DestroySwapchain(
VulkanRenderer* renderer,
WindowData *windowData
) {
2020-12-21 23:44:43 +00:00
uint32_t i;
VulkanSwapchainData *swapchainData;
if (windowData == NULL)
{
return;
}
swapchainData = windowData->swapchainData;
if (swapchainData == NULL)
{
return;
}
2020-12-21 23:44:43 +00:00
for (i = 0; i < swapchainData->imageCount; i += 1)
2020-12-21 23:44:43 +00:00
{
VULKAN_INTERNAL_RemoveRenderTargetsContainingTexture(
renderer,
swapchainData->textureContainers[i].vulkanTexture
);
2020-12-21 23:44:43 +00:00
renderer->vkDestroyImageView(
renderer->logicalDevice,
swapchainData->textureContainers[i].vulkanTexture->view,
2020-12-21 23:44:43 +00:00
NULL
);
SDL_free(swapchainData->textureContainers[i].vulkanTexture);
2020-12-21 23:44:43 +00:00
}
SDL_free(swapchainData->textureContainers);
2020-12-21 23:44:43 +00:00
renderer->vkDestroySwapchainKHR(
renderer->logicalDevice,
swapchainData->swapchain,
NULL
);
renderer->vkDestroySurfaceKHR(
renderer->instance,
swapchainData->surface,
2020-12-21 23:44:43 +00:00
NULL
);
2022-02-10 05:42:19 +00:00
renderer->vkDestroySemaphore(
renderer->logicalDevice,
swapchainData->imageAvailableSemaphore,
NULL
);
renderer->vkDestroySemaphore(
renderer->logicalDevice,
swapchainData->renderFinishedSemaphore,
NULL
);
windowData->swapchainData = NULL;
SDL_free(swapchainData);
2020-12-21 23:44:43 +00:00
}
static void VULKAN_INTERNAL_DestroyDescriptorSetCache(
2020-12-31 07:02:12 +00:00
VulkanRenderer *renderer,
DescriptorSetCache *cache
2020-12-28 22:57:14 +00:00
) {
uint32_t i;
2020-12-28 23:11:05 +00:00
if (cache == NULL)
{
return;
}
for (i = 0; i < cache->descriptorPoolCount; i += 1)
2020-12-28 22:57:14 +00:00
{
renderer->vkDestroyDescriptorPool(
renderer->logicalDevice,
cache->descriptorPools[i],
2020-12-28 22:57:14 +00:00
NULL
);
}
SDL_free(cache->descriptorPools);
2020-12-28 22:57:14 +00:00
SDL_free(cache->inactiveDescriptorSets);
SDL_DestroyMutex(cache->lock);
2020-12-28 22:57:14 +00:00
SDL_free(cache);
}
/* Descriptor cache stuff */
2020-12-28 23:11:05 +00:00
static uint8_t VULKAN_INTERNAL_CreateDescriptorPool(
VulkanRenderer *renderer,
VkDescriptorType descriptorType,
uint32_t descriptorSetCount,
uint32_t descriptorCount,
VkDescriptorPool *pDescriptorPool
) {
VkResult vulkanResult;
2020-12-29 04:09:31 +00:00
VkDescriptorPoolSize descriptorPoolSize;
VkDescriptorPoolCreateInfo descriptorPoolInfo;
2020-12-28 23:11:05 +00:00
descriptorPoolSize.type = descriptorType;
descriptorPoolSize.descriptorCount = descriptorCount;
2020-12-28 23:11:05 +00:00
descriptorPoolInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
descriptorPoolInfo.pNext = NULL;
descriptorPoolInfo.flags = 0;
descriptorPoolInfo.maxSets = descriptorSetCount;
descriptorPoolInfo.poolSizeCount = 1;
descriptorPoolInfo.pPoolSizes = &descriptorPoolSize;
2020-12-29 00:28:14 +00:00
vulkanResult = renderer->vkCreateDescriptorPool(
renderer->logicalDevice,
&descriptorPoolInfo,
NULL,
pDescriptorPool
);
2020-12-31 07:02:12 +00:00
if (vulkanResult != VK_SUCCESS)
2020-12-29 00:42:51 +00:00
{
LogVulkanResultAsError("vkCreateDescriptorPool", vulkanResult);
return 0;
2020-12-29 00:42:51 +00:00
}
return 1;
}
2020-12-29 00:56:49 +00:00
static uint8_t VULKAN_INTERNAL_AllocateDescriptorSets(
VulkanRenderer *renderer,
VkDescriptorPool descriptorPool,
VkDescriptorSetLayout descriptorSetLayout,
uint32_t descriptorSetCount,
VkDescriptorSet *descriptorSetArray
) {
VkResult vulkanResult;
uint32_t i;
VkDescriptorSetAllocateInfo descriptorSetAllocateInfo;
VkDescriptorSetLayout *descriptorSetLayouts = SDL_stack_alloc(VkDescriptorSetLayout, descriptorSetCount);
for (i = 0; i < descriptorSetCount; i += 1)
2020-12-29 01:35:18 +00:00
{
descriptorSetLayouts[i] = descriptorSetLayout;
2020-12-29 01:35:18 +00:00
}
descriptorSetAllocateInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
descriptorSetAllocateInfo.pNext = NULL;
descriptorSetAllocateInfo.descriptorPool = descriptorPool;
descriptorSetAllocateInfo.descriptorSetCount = descriptorSetCount;
descriptorSetAllocateInfo.pSetLayouts = descriptorSetLayouts;
vulkanResult = renderer->vkAllocateDescriptorSets(
renderer->logicalDevice,
&descriptorSetAllocateInfo,
descriptorSetArray
);
if (vulkanResult != VK_SUCCESS)
2020-12-29 03:44:34 +00:00
{
LogVulkanResultAsError("vkAllocateDescriptorSets", vulkanResult);
SDL_stack_free(descriptorSetLayouts);
return 0;
2020-12-29 03:44:34 +00:00
}
SDL_stack_free(descriptorSetLayouts);
return 1;
}
2020-12-29 00:28:14 +00:00
static DescriptorSetCache* VULKAN_INTERNAL_CreateDescriptorSetCache(
VulkanRenderer *renderer,
VkDescriptorType descriptorType,
VkDescriptorSetLayout descriptorSetLayout,
uint32_t bindingCount
) {
DescriptorSetCache *descriptorSetCache = SDL_malloc(sizeof(DescriptorSetCache));
2020-12-29 04:09:31 +00:00
descriptorSetCache->lock = SDL_CreateMutex();
2020-12-29 00:28:14 +00:00
descriptorSetCache->descriptorSetLayout = descriptorSetLayout;
descriptorSetCache->bindingCount = bindingCount;
descriptorSetCache->descriptorType = descriptorType;
2020-12-29 00:28:14 +00:00
descriptorSetCache->descriptorPools = SDL_malloc(sizeof(VkDescriptorPool));
descriptorSetCache->descriptorPoolCount = 1;
descriptorSetCache->nextPoolSize = DESCRIPTOR_POOL_STARTING_SIZE * 2;
2020-12-31 07:02:12 +00:00
VULKAN_INTERNAL_CreateDescriptorPool(
renderer,
descriptorType,
DESCRIPTOR_POOL_STARTING_SIZE,
DESCRIPTOR_POOL_STARTING_SIZE * bindingCount,
&descriptorSetCache->descriptorPools[0]
);
2020-12-31 07:02:12 +00:00
descriptorSetCache->inactiveDescriptorSetCapacity = DESCRIPTOR_POOL_STARTING_SIZE;
descriptorSetCache->inactiveDescriptorSetCount = DESCRIPTOR_POOL_STARTING_SIZE;
descriptorSetCache->inactiveDescriptorSets = SDL_malloc(
sizeof(VkDescriptorSet) * DESCRIPTOR_POOL_STARTING_SIZE
);
2020-12-29 00:56:49 +00:00
VULKAN_INTERNAL_AllocateDescriptorSets(
renderer,
descriptorSetCache->descriptorPools[0],
descriptorSetCache->descriptorSetLayout,
DESCRIPTOR_POOL_STARTING_SIZE,
descriptorSetCache->inactiveDescriptorSets
);
2020-12-29 00:42:51 +00:00
return descriptorSetCache;
}
2020-12-29 01:35:18 +00:00
static VkDescriptorSetLayout VULKAN_INTERNAL_FetchDescriptorSetLayout(
VulkanRenderer *renderer,
VkDescriptorType descriptorType,
uint32_t bindingCount,
VkShaderStageFlagBits shaderStageFlagBit
) {
DescriptorSetLayoutHash descriptorSetLayoutHash;
VkDescriptorSetLayout descriptorSetLayout;
2020-12-29 03:44:34 +00:00
VkDescriptorSetLayoutBinding setLayoutBindings[MAX_TEXTURE_SAMPLERS];
VkDescriptorSetLayoutCreateInfo setLayoutCreateInfo;
2020-12-29 00:28:14 +00:00
VkResult vulkanResult;
uint32_t i;
2020-12-29 00:28:14 +00:00
if (bindingCount == 0)
2020-12-29 00:28:14 +00:00
{
if (shaderStageFlagBit == VK_SHADER_STAGE_VERTEX_BIT)
2020-12-29 00:28:14 +00:00
{
return renderer->emptyVertexSamplerLayout;
}
else if (shaderStageFlagBit == VK_SHADER_STAGE_FRAGMENT_BIT)
{
return renderer->emptyFragmentSamplerLayout;
}
else if (shaderStageFlagBit == VK_SHADER_STAGE_COMPUTE_BIT)
{
if (descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER)
2020-12-29 00:28:14 +00:00
{
return renderer->emptyComputeBufferDescriptorSetLayout;
2020-12-29 00:28:14 +00:00
}
else if (descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE)
{
return renderer->emptyComputeImageDescriptorSetLayout;
}
else
{
Refresh_LogError("Invalid descriptor type for compute shader: ", descriptorType);
return NULL_DESC_LAYOUT;
}
}
else
{
Refresh_LogError("Invalid shader stage flag bit: ", shaderStageFlagBit);
return NULL_DESC_LAYOUT;
2020-12-29 00:28:14 +00:00
}
}
descriptorSetLayoutHash.descriptorType = descriptorType;
descriptorSetLayoutHash.bindingCount = bindingCount;
descriptorSetLayoutHash.stageFlag = shaderStageFlagBit;
2020-12-29 00:28:14 +00:00
descriptorSetLayout = DescriptorSetLayoutHashTable_Fetch(
&renderer->descriptorSetLayoutHashTable,
descriptorSetLayoutHash
);
2020-12-29 00:28:14 +00:00
if (descriptorSetLayout != VK_NULL_HANDLE)
{
return descriptorSetLayout;
2020-12-29 00:28:14 +00:00
}
for (i = 0; i < bindingCount; i += 1)
2020-12-29 00:28:14 +00:00
{
setLayoutBindings[i].binding = i;
setLayoutBindings[i].descriptorCount = 1;
setLayoutBindings[i].descriptorType = descriptorType;
setLayoutBindings[i].stageFlags = shaderStageFlagBit;
setLayoutBindings[i].pImmutableSamplers = NULL;
2020-12-29 00:28:14 +00:00
}
setLayoutCreateInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
setLayoutCreateInfo.pNext = NULL;
setLayoutCreateInfo.flags = 0;
setLayoutCreateInfo.bindingCount = bindingCount;
setLayoutCreateInfo.pBindings = setLayoutBindings;
2020-12-21 23:44:43 +00:00
vulkanResult = renderer->vkCreateDescriptorSetLayout(
renderer->logicalDevice,
&setLayoutCreateInfo,
NULL,
&descriptorSetLayout
);
2020-12-21 23:44:43 +00:00
if (vulkanResult != VK_SUCCESS)
2020-12-21 23:44:43 +00:00
{
LogVulkanResultAsError("vkCreateDescriptorSetLayout", vulkanResult);
return NULL_DESC_LAYOUT;
2020-12-21 23:44:43 +00:00
}
DescriptorSetLayoutHashTable_Insert(
&renderer->descriptorSetLayoutHashTable,
descriptorSetLayoutHash,
descriptorSetLayout
);
2020-12-21 23:44:43 +00:00
return descriptorSetLayout;
2020-12-21 23:44:43 +00:00
}
static VulkanGraphicsPipelineLayout* VULKAN_INTERNAL_FetchGraphicsPipelineLayout(
2020-12-21 23:44:43 +00:00
VulkanRenderer *renderer,
uint32_t vertexSamplerBindingCount,
uint32_t fragmentSamplerBindingCount
2020-12-21 23:44:43 +00:00
) {
VkDescriptorSetLayout setLayouts[4];
2020-12-21 23:44:43 +00:00
GraphicsPipelineLayoutHash pipelineLayoutHash;
VkPipelineLayoutCreateInfo pipelineLayoutCreateInfo;
VkResult vulkanResult;
VulkanGraphicsPipelineLayout *vulkanGraphicsPipelineLayout;
pipelineLayoutHash.vertexSamplerLayout = VULKAN_INTERNAL_FetchDescriptorSetLayout(
renderer,
VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
vertexSamplerBindingCount,
VK_SHADER_STAGE_VERTEX_BIT
2020-12-21 23:44:43 +00:00
);
pipelineLayoutHash.fragmentSamplerLayout = VULKAN_INTERNAL_FetchDescriptorSetLayout(
renderer,
VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
fragmentSamplerBindingCount,
VK_SHADER_STAGE_FRAGMENT_BIT
);
2020-12-21 23:44:43 +00:00
pipelineLayoutHash.vertexUniformLayout = renderer->vertexUniformDescriptorSetLayout;
pipelineLayoutHash.fragmentUniformLayout = renderer->fragmentUniformDescriptorSetLayout;
vulkanGraphicsPipelineLayout = GraphicsPipelineLayoutHashArray_Fetch(
&renderer->graphicsPipelineLayoutHashTable,
pipelineLayoutHash
2020-12-21 23:44:43 +00:00
);
if (vulkanGraphicsPipelineLayout != NULL)
2020-12-21 23:44:43 +00:00
{
return vulkanGraphicsPipelineLayout;
}
2020-12-21 23:44:43 +00:00
vulkanGraphicsPipelineLayout = SDL_malloc(sizeof(VulkanGraphicsPipelineLayout));
2020-12-21 23:44:43 +00:00
setLayouts[0] = pipelineLayoutHash.vertexSamplerLayout;
setLayouts[1] = pipelineLayoutHash.fragmentSamplerLayout;
setLayouts[2] = renderer->vertexUniformDescriptorSetLayout;
setLayouts[3] = renderer->fragmentUniformDescriptorSetLayout;
2020-12-21 23:44:43 +00:00
pipelineLayoutCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
pipelineLayoutCreateInfo.pNext = NULL;
pipelineLayoutCreateInfo.flags = 0;
pipelineLayoutCreateInfo.setLayoutCount = 4;
pipelineLayoutCreateInfo.pSetLayouts = setLayouts;
pipelineLayoutCreateInfo.pushConstantRangeCount = 0;
pipelineLayoutCreateInfo.pPushConstantRanges = NULL;
2020-12-21 23:44:43 +00:00
vulkanResult = renderer->vkCreatePipelineLayout(
renderer->logicalDevice,
&pipelineLayoutCreateInfo,
NULL,
&vulkanGraphicsPipelineLayout->pipelineLayout
2020-12-21 23:44:43 +00:00
);
if (vulkanResult != VK_SUCCESS)
2020-12-21 23:44:43 +00:00
{
LogVulkanResultAsError("vkCreatePipelineLayout", vulkanResult);
return NULL;
}
2020-12-21 23:44:43 +00:00
GraphicsPipelineLayoutHashArray_Insert(
&renderer->graphicsPipelineLayoutHashTable,
pipelineLayoutHash,
vulkanGraphicsPipelineLayout
);
2020-12-21 23:44:43 +00:00
/* If the binding count is 0
* we can just bind the same descriptor set
* so no cache is needed
*/
2020-12-21 23:44:43 +00:00
if (vertexSamplerBindingCount == 0)
{
vulkanGraphicsPipelineLayout->vertexSamplerDescriptorSetCache = NULL;
}
else
{
vulkanGraphicsPipelineLayout->vertexSamplerDescriptorSetCache =
VULKAN_INTERNAL_CreateDescriptorSetCache(
renderer,
VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
pipelineLayoutHash.vertexSamplerLayout,
vertexSamplerBindingCount
);
2020-12-21 23:44:43 +00:00
}
if (fragmentSamplerBindingCount == 0)
2020-12-21 23:44:43 +00:00
{
vulkanGraphicsPipelineLayout->fragmentSamplerDescriptorSetCache = NULL;
}
else
{
vulkanGraphicsPipelineLayout->fragmentSamplerDescriptorSetCache =
VULKAN_INTERNAL_CreateDescriptorSetCache(
renderer,
VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
pipelineLayoutHash.fragmentSamplerLayout,
fragmentSamplerBindingCount
);
2020-12-21 23:44:43 +00:00
}
return vulkanGraphicsPipelineLayout;
2020-12-21 23:44:43 +00:00
}
/* Data Buffer */
2020-12-21 23:44:43 +00:00
static VulkanBuffer* VULKAN_INTERNAL_CreateBuffer(
VulkanRenderer *renderer,
VkDeviceSize size,
VulkanResourceAccessType resourceAccessType,
VkBufferUsageFlags usage,
uint8_t requireHostVisible,
2024-02-17 02:43:02 +00:00
uint8_t requireHostLocal,
uint8_t preferDeviceLocal,
uint8_t dedicatedAllocation
2020-12-21 23:44:43 +00:00
) {
VulkanBuffer* buffer;
2020-12-21 23:44:43 +00:00
VkResult vulkanResult;
VkBufferCreateInfo bufferCreateInfo;
uint8_t bindResult;
2020-12-21 23:44:43 +00:00
buffer = SDL_malloc(sizeof(VulkanBuffer));
2020-12-21 23:44:43 +00:00
buffer->size = size;
buffer->resourceAccessType = resourceAccessType;
buffer->usage = usage;
buffer->requireHostVisible = requireHostVisible;
2024-02-17 02:43:02 +00:00
buffer->requireHostLocal = requireHostLocal;
buffer->preferDeviceLocal = preferDeviceLocal;
2020-12-21 23:44:43 +00:00
bufferCreateInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
bufferCreateInfo.pNext = NULL;
bufferCreateInfo.flags = 0;
bufferCreateInfo.size = size;
bufferCreateInfo.usage = usage;
bufferCreateInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
bufferCreateInfo.queueFamilyIndexCount = 1;
bufferCreateInfo.pQueueFamilyIndices = &renderer->queueFamilyIndex;
2020-12-21 23:44:43 +00:00
vulkanResult = renderer->vkCreateBuffer(
renderer->logicalDevice,
&bufferCreateInfo,
NULL,
&buffer->buffer
2020-12-21 23:44:43 +00:00
);
VULKAN_ERROR_CHECK(vulkanResult, vkCreateBuffer, 0)
2020-12-21 23:44:43 +00:00
bindResult = VULKAN_INTERNAL_BindMemoryForBuffer(
renderer,
buffer->buffer,
buffer->size,
buffer->requireHostVisible,
2024-02-17 02:43:02 +00:00
buffer->requireHostLocal,
buffer->preferDeviceLocal,
dedicatedAllocation,
&buffer->usedRegion
);
2020-12-21 23:44:43 +00:00
if (bindResult != 1)
2020-12-21 23:44:43 +00:00
{
renderer->vkDestroyBuffer(
renderer->logicalDevice,
buffer->buffer,
NULL);
2020-12-21 23:44:43 +00:00
return NULL;
2020-12-21 23:44:43 +00:00
}
buffer->usedRegion->vulkanBuffer = buffer; /* lol */
buffer->container = NULL;
buffer->resourceAccessType = resourceAccessType;
SDL_AtomicSet(&buffer->referenceCount, 0);
return buffer;
}
/* Uniform buffer functions */
2022-01-13 07:09:06 +00:00
static uint8_t VULKAN_INTERNAL_AddUniformDescriptorPool(
VulkanRenderer *renderer,
VulkanUniformDescriptorPool *vulkanUniformDescriptorPool
) {
vulkanUniformDescriptorPool->descriptorPools = SDL_realloc(
vulkanUniformDescriptorPool->descriptorPools,
sizeof(VkDescriptorPool) * (vulkanUniformDescriptorPool->descriptorPoolCount + 1)
2020-12-21 23:44:43 +00:00
);
2022-01-13 07:09:06 +00:00
if (!VULKAN_INTERNAL_CreateDescriptorPool(
renderer,
VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC,
DESCRIPTOR_POOL_STARTING_SIZE,
DESCRIPTOR_POOL_STARTING_SIZE,
&vulkanUniformDescriptorPool->descriptorPools[vulkanUniformDescriptorPool->descriptorPoolCount]
2022-01-13 07:09:06 +00:00
)) {
Refresh_LogError("Failed to create descriptor pool!");
return 0;
}
vulkanUniformDescriptorPool->descriptorPoolCount += 1;
vulkanUniformDescriptorPool->availableDescriptorSetCount += DESCRIPTOR_POOL_STARTING_SIZE;
2022-01-13 07:09:06 +00:00
return 1;
}
static VulkanUniformBufferObject* VULKAN_INTERNAL_CreateUniformBufferObject(
VulkanRenderer *renderer,
VulkanUniformBufferType uniformBufferType
) {
VulkanUniformBufferObject* uniformBufferObject = SDL_malloc(sizeof(VulkanUniformBufferObject));
VulkanResourceAccessType resourceAccessType;
VkDescriptorSetLayout descriptorSetLayout;
VkWriteDescriptorSet writeDescriptorSet;
VkDescriptorBufferInfo descriptorBufferInfo;
if (uniformBufferType == UNIFORM_BUFFER_VERTEX)
{
resourceAccessType = RESOURCE_ACCESS_VERTEX_SHADER_READ_UNIFORM_BUFFER;
descriptorSetLayout = renderer->vertexUniformDescriptorSetLayout;
}
else if (uniformBufferType == UNIFORM_BUFFER_FRAGMENT)
{
resourceAccessType = RESOURCE_ACCESS_FRAGMENT_SHADER_READ_UNIFORM_BUFFER;
descriptorSetLayout = renderer->fragmentUniformDescriptorSetLayout;
}
else if (uniformBufferType == UNIFORM_BUFFER_COMPUTE)
{
resourceAccessType = RESOURCE_ACCESS_COMPUTE_SHADER_READ_UNIFORM_BUFFER;
descriptorSetLayout = renderer->computeUniformDescriptorSetLayout;
}
else
{
Refresh_LogError("Unrecognized uniform buffer type!");
return 0;
}
/* Allocate backing buffer */
uniformBufferObject->buffer = VULKAN_INTERNAL_CreateBuffer(
renderer,
UBO_BUFFER_SIZE,
resourceAccessType,
VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT,
1,
2024-02-17 02:43:02 +00:00
0,
1,
1
);
uniformBufferObject->lock = SDL_CreateMutex();
uniformBufferObject->currentOffset = 0;
uniformBufferObject->type = uniformBufferType;
/* Allocate a descriptor set for the uniform buffer */
VULKAN_INTERNAL_AllocateDescriptorSets(
renderer,
renderer->defaultDescriptorPool,
descriptorSetLayout,
1,
&uniformBufferObject->descriptorSet
);
/* Update the descriptor set for the first and last time! */
descriptorBufferInfo.buffer = uniformBufferObject->buffer->buffer;
descriptorBufferInfo.offset = 0;
descriptorBufferInfo.range = UBO_SECTION_SIZE;
writeDescriptorSet.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
writeDescriptorSet.pNext = NULL;
writeDescriptorSet.descriptorCount = 1;
writeDescriptorSet.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
writeDescriptorSet.dstArrayElement = 0;
writeDescriptorSet.dstBinding = 0;
writeDescriptorSet.dstSet = uniformBufferObject->descriptorSet;
writeDescriptorSet.pBufferInfo = &descriptorBufferInfo;
writeDescriptorSet.pImageInfo = NULL;
writeDescriptorSet.pTexelBufferView = NULL;
renderer->vkUpdateDescriptorSets(
renderer->logicalDevice,
1,
&writeDescriptorSet,
0,
NULL
);
/* Permanently map the memory */
renderer->vkMapMemory(
renderer->logicalDevice,
uniformBufferObject->buffer->usedRegion->allocation->memory,
0,
VK_WHOLE_SIZE,
0,
(void**) &uniformBufferObject->mapPointer
);
return uniformBufferObject;
}
2024-02-17 02:43:02 +00:00
/* Buffer indirection so we can cleanly defrag GpuBuffers */
static VulkanBufferContainer* VULKAN_INTERNAL_CreateBufferContainer(
VulkanRenderer *renderer,
uint32_t sizeInBytes,
VulkanResourceAccessType resourceAccessType,
2024-02-11 23:56:53 +00:00
VkBufferUsageFlags usageFlags
) {
VulkanBufferContainer* bufferContainer;
VulkanBuffer* buffer;
/* always set transfer bits so we can defrag */
usageFlags |= VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
buffer = VULKAN_INTERNAL_CreateBuffer(
renderer,
sizeInBytes,
resourceAccessType,
usageFlags,
0,
2024-02-17 02:43:02 +00:00
0,
1,
2024-02-11 23:56:53 +00:00
0
);
if (buffer == NULL)
{
Refresh_LogError("Failed to create buffer!");
return NULL;
}
bufferContainer = SDL_malloc(sizeof(VulkanBufferContainer));
bufferContainer->vulkanBuffer = buffer;
buffer->container = bufferContainer;
return (VulkanBufferContainer*) bufferContainer;
2020-12-19 00:39:03 +00:00
}
static void VULKAN_INTERNAL_DestroyUniformBufferObject(
VulkanRenderer *renderer,
VulkanUniformBufferObject *uniformBufferObject
) {
renderer->vkUnmapMemory(
renderer->logicalDevice,
uniformBufferObject->buffer->usedRegion->allocation->memory
);
VULKAN_INTERNAL_DestroyBuffer(renderer, uniformBufferObject->buffer);
SDL_DestroyMutex(uniformBufferObject->lock);
SDL_free(uniformBufferObject);
}
2020-12-17 04:04:47 +00:00
/* Swapchain */
static uint8_t VULKAN_INTERNAL_QuerySwapChainSupport(
VulkanRenderer *renderer,
VkPhysicalDevice physicalDevice,
VkSurfaceKHR surface,
SwapChainSupportDetails *outputDetails
) {
VkResult result;
VkBool32 supportsPresent;
renderer->vkGetPhysicalDeviceSurfaceSupportKHR(
physicalDevice,
renderer->queueFamilyIndex,
surface,
&supportsPresent
);
if (!supportsPresent)
{
Refresh_LogWarn("This surface does not support presenting!");
return 0;
}
/* Initialize these in case anything fails */
outputDetails->formatsLength = 0;
outputDetails->presentModesLength = 0;
/* Run the device surface queries */
result = renderer->vkGetPhysicalDeviceSurfaceCapabilitiesKHR(
physicalDevice,
surface,
&outputDetails->capabilities
);
VULKAN_ERROR_CHECK(result, vkGetPhysicalDeviceSurfaceCapabilitiesKHR, 0)
if (!(outputDetails->capabilities.supportedCompositeAlpha & VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR))
{
Refresh_LogWarn("Opaque presentation unsupported! Expect weird transparency bugs!");
}
result = renderer->vkGetPhysicalDeviceSurfaceFormatsKHR(
physicalDevice,
surface,
&outputDetails->formatsLength,
NULL
);
VULKAN_ERROR_CHECK(result, vkGetPhysicalDeviceSurfaceFormatsKHR, 0)
result = renderer->vkGetPhysicalDeviceSurfacePresentModesKHR(
physicalDevice,
surface,
&outputDetails->presentModesLength,
NULL
);
VULKAN_ERROR_CHECK(result, vkGetPhysicalDeviceSurfacePresentModesKHR, 0)
/* Generate the arrays, if applicable */
if (outputDetails->formatsLength != 0)
{
outputDetails->formats = (VkSurfaceFormatKHR*) SDL_malloc(
sizeof(VkSurfaceFormatKHR) * outputDetails->formatsLength
);
if (!outputDetails->formats)
{
SDL_OutOfMemory();
return 0;
}
result = renderer->vkGetPhysicalDeviceSurfaceFormatsKHR(
physicalDevice,
surface,
&outputDetails->formatsLength,
outputDetails->formats
);
if (result != VK_SUCCESS)
{
Refresh_LogError(
"vkGetPhysicalDeviceSurfaceFormatsKHR: %s",
VkErrorMessages(result)
);
SDL_free(outputDetails->formats);
return 0;
}
}
if (outputDetails->presentModesLength != 0)
{
outputDetails->presentModes = (VkPresentModeKHR*) SDL_malloc(
sizeof(VkPresentModeKHR) * outputDetails->presentModesLength
);
if (!outputDetails->presentModes)
{
SDL_OutOfMemory();
return 0;
}
result = renderer->vkGetPhysicalDeviceSurfacePresentModesKHR(
physicalDevice,
surface,
&outputDetails->presentModesLength,
outputDetails->presentModes
);
if (result != VK_SUCCESS)
{
Refresh_LogError(
"vkGetPhysicalDeviceSurfacePresentModesKHR: %s",
VkErrorMessages(result)
);
SDL_free(outputDetails->formats);
SDL_free(outputDetails->presentModes);
return 0;
}
}
/* If we made it here, all the queries were successfull. This does NOT
* necessarily mean there are any supported formats or present modes!
*/
return 1;
2020-12-17 01:23:49 +00:00
}
static uint8_t VULKAN_INTERNAL_ChooseSwapSurfaceFormat(
VkFormat desiredFormat,
VkSurfaceFormatKHR *availableFormats,
uint32_t availableFormatsLength,
VkSurfaceFormatKHR *outputFormat
2020-12-17 01:23:49 +00:00
) {
uint32_t i;
for (i = 0; i < availableFormatsLength; i += 1)
{
if ( availableFormats[i].format == desiredFormat &&
availableFormats[i].colorSpace == VK_COLOR_SPACE_SRGB_NONLINEAR_KHR )
{
*outputFormat = availableFormats[i];
return 1;
}
}
2021-01-02 06:07:15 +00:00
return 0;
}
2020-12-29 06:19:46 +00:00
static uint8_t VULKAN_INTERNAL_ChooseSwapPresentMode(
Refresh_PresentMode desiredPresentInterval,
VkPresentModeKHR *availablePresentModes,
uint32_t availablePresentModesLength,
VkPresentModeKHR *outputPresentMode
) {
#define CHECK_MODE(m) \
for (i = 0; i < availablePresentModesLength; i += 1) \
{ \
if (availablePresentModes[i] == m) \
{ \
*outputPresentMode = m; \
return 1; \
} \
} \
2020-12-29 06:19:46 +00:00
uint32_t i;
2022-02-25 21:42:11 +00:00
if (desiredPresentInterval == REFRESH_PRESENTMODE_IMMEDIATE)
{
CHECK_MODE(VK_PRESENT_MODE_IMMEDIATE_KHR)
}
2022-02-25 21:42:11 +00:00
else if (desiredPresentInterval == REFRESH_PRESENTMODE_MAILBOX)
{
CHECK_MODE(VK_PRESENT_MODE_MAILBOX_KHR)
}
else if (desiredPresentInterval == REFRESH_PRESENTMODE_FIFO)
{
CHECK_MODE(VK_PRESENT_MODE_FIFO_KHR)
}
else if (desiredPresentInterval == REFRESH_PRESENTMODE_FIFO_RELAXED)
{
CHECK_MODE(VK_PRESENT_MODE_FIFO_RELAXED_KHR)
}
else
{
Refresh_LogError(
"Unrecognized PresentInterval: %d",
desiredPresentInterval
);
return 0;
}
#undef CHECK_MODE
*outputPresentMode = VK_PRESENT_MODE_FIFO_KHR;
return 1;
}
static uint8_t VULKAN_INTERNAL_CreateSwapchain(
VulkanRenderer *renderer,
WindowData *windowData
) {
VkResult vulkanResult;
VulkanSwapchainData *swapchainData;
VkSwapchainCreateInfoKHR swapchainCreateInfo;
VkImage *swapchainImages;
2022-02-10 05:42:19 +00:00
VkImageViewCreateInfo imageViewCreateInfo;
VkSemaphoreCreateInfo semaphoreCreateInfo;
SwapChainSupportDetails swapchainSupportDetails;
int32_t drawableWidth, drawableHeight;
uint32_t i;
swapchainData = SDL_malloc(sizeof(VulkanSwapchainData));
/* Each swapchain must have its own surface. */
if (!SDL_Vulkan_CreateSurface(
(SDL_Window*) windowData->windowHandle,
renderer->instance,
&swapchainData->surface
)) {
SDL_free(swapchainData);
Refresh_LogError(
"SDL_Vulkan_CreateSurface failed: %s",
SDL_GetError()
);
return 0;
}
if (!VULKAN_INTERNAL_QuerySwapChainSupport(
renderer,
renderer->physicalDevice,
swapchainData->surface,
&swapchainSupportDetails
)) {
renderer->vkDestroySurfaceKHR(
renderer->instance,
swapchainData->surface,
NULL
);
if (swapchainSupportDetails.formatsLength > 0)
{
SDL_free(swapchainSupportDetails.formats);
}
if (swapchainSupportDetails.presentModesLength > 0)
{
SDL_free(swapchainSupportDetails.presentModes);
}
SDL_free(swapchainData);
Refresh_LogError("Device does not support swap chain creation");
return 0;
}
if ( swapchainSupportDetails.capabilities.currentExtent.width == 0 ||
swapchainSupportDetails.capabilities.currentExtent.height == 0)
{
/* Not an error, just minimize behavior! */
renderer->vkDestroySurfaceKHR(
renderer->instance,
swapchainData->surface,
NULL
);
if (swapchainSupportDetails.formatsLength > 0)
{
SDL_free(swapchainSupportDetails.formats);
}
if (swapchainSupportDetails.presentModesLength > 0)
{
SDL_free(swapchainSupportDetails.presentModes);
}
SDL_free(swapchainData);
return 0;
}
swapchainData->swapchainFormat = VK_FORMAT_R8G8B8A8_UNORM;
swapchainData->swapchainSwizzle.r = VK_COMPONENT_SWIZZLE_IDENTITY;
swapchainData->swapchainSwizzle.g = VK_COMPONENT_SWIZZLE_IDENTITY;
swapchainData->swapchainSwizzle.b = VK_COMPONENT_SWIZZLE_IDENTITY;
swapchainData->swapchainSwizzle.a = VK_COMPONENT_SWIZZLE_IDENTITY;
if (!VULKAN_INTERNAL_ChooseSwapSurfaceFormat(
swapchainData->swapchainFormat,
swapchainSupportDetails.formats,
swapchainSupportDetails.formatsLength,
&swapchainData->surfaceFormat
)) {
swapchainData->swapchainFormat = VK_FORMAT_B8G8R8A8_UNORM;
swapchainData->swapchainSwizzle.r = VK_COMPONENT_SWIZZLE_B;
swapchainData->swapchainSwizzle.g = VK_COMPONENT_SWIZZLE_G;
swapchainData->swapchainSwizzle.b = VK_COMPONENT_SWIZZLE_R;
swapchainData->swapchainSwizzle.a = VK_COMPONENT_SWIZZLE_A;
if (!VULKAN_INTERNAL_ChooseSwapSurfaceFormat(
swapchainData->swapchainFormat,
swapchainSupportDetails.formats,
swapchainSupportDetails.formatsLength,
&swapchainData->surfaceFormat
)) {
renderer->vkDestroySurfaceKHR(
renderer->instance,
swapchainData->surface,
NULL
);
if (swapchainSupportDetails.formatsLength > 0)
{
SDL_free(swapchainSupportDetails.formats);
}
if (swapchainSupportDetails.presentModesLength > 0)
{
SDL_free(swapchainSupportDetails.presentModes);
}
SDL_free(swapchainData);
Refresh_LogError("Device does not support swap chain format");
return 0;
}
}
if (!VULKAN_INTERNAL_ChooseSwapPresentMode(
windowData->preferredPresentMode,
swapchainSupportDetails.presentModes,
swapchainSupportDetails.presentModesLength,
&swapchainData->presentMode
)) {
renderer->vkDestroySurfaceKHR(
renderer->instance,
swapchainData->surface,
NULL
);
if (swapchainSupportDetails.formatsLength > 0)
{
SDL_free(swapchainSupportDetails.formats);
}
if (swapchainSupportDetails.presentModesLength > 0)
{
SDL_free(swapchainSupportDetails.presentModes);
}
SDL_free(swapchainData);
Refresh_LogError("Device does not support swap chain present mode");
return 0;
}
SDL_Vulkan_GetDrawableSize(
(SDL_Window*) windowData->windowHandle,
&drawableWidth,
&drawableHeight
);
if ( drawableWidth < swapchainSupportDetails.capabilities.minImageExtent.width ||
drawableWidth > swapchainSupportDetails.capabilities.maxImageExtent.width ||
drawableHeight < swapchainSupportDetails.capabilities.minImageExtent.height ||
drawableHeight > swapchainSupportDetails.capabilities.maxImageExtent.height )
{
if (swapchainSupportDetails.capabilities.currentExtent.width != UINT32_MAX)
{
drawableWidth = VULKAN_INTERNAL_clamp(
drawableWidth,
swapchainSupportDetails.capabilities.minImageExtent.width,
swapchainSupportDetails.capabilities.maxImageExtent.width
);
drawableHeight = VULKAN_INTERNAL_clamp(
drawableHeight,
swapchainSupportDetails.capabilities.minImageExtent.height,
swapchainSupportDetails.capabilities.maxImageExtent.height
);
}
else
{
renderer->vkDestroySurfaceKHR(
renderer->instance,
swapchainData->surface,
NULL
);
if (swapchainSupportDetails.formatsLength > 0)
{
SDL_free(swapchainSupportDetails.formats);
}
if (swapchainSupportDetails.presentModesLength > 0)
{
SDL_free(swapchainSupportDetails.presentModes);
}
SDL_free(swapchainData);
Refresh_LogError("No fallback swapchain size available!");
return 0;
}
}
swapchainData->extent.width = drawableWidth;
swapchainData->extent.height = drawableHeight;
swapchainData->imageCount = swapchainSupportDetails.capabilities.minImageCount + 1;
if ( swapchainSupportDetails.capabilities.maxImageCount > 0 &&
swapchainData->imageCount > swapchainSupportDetails.capabilities.maxImageCount )
{
swapchainData->imageCount = swapchainSupportDetails.capabilities.maxImageCount;
}
if (swapchainData->presentMode == VK_PRESENT_MODE_MAILBOX_KHR)
{
/* Required for proper triple-buffering.
* Note that this is below the above maxImageCount check!
* If the driver advertises MAILBOX but does not support 3 swap
* images, it's not real mailbox support, so let it fail hard.
* -flibit
*/
swapchainData->imageCount = SDL_max(swapchainData->imageCount, 3);
}
swapchainCreateInfo.sType = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR;
swapchainCreateInfo.pNext = NULL;
swapchainCreateInfo.flags = 0;
swapchainCreateInfo.surface = swapchainData->surface;
swapchainCreateInfo.minImageCount = swapchainData->imageCount;
swapchainCreateInfo.imageFormat = swapchainData->surfaceFormat.format;
swapchainCreateInfo.imageColorSpace = swapchainData->surfaceFormat.colorSpace;
swapchainCreateInfo.imageExtent = swapchainData->extent;
swapchainCreateInfo.imageArrayLayers = 1;
swapchainCreateInfo.imageUsage =
VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT |
VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
VK_IMAGE_USAGE_TRANSFER_DST_BIT;
swapchainCreateInfo.imageSharingMode = VK_SHARING_MODE_EXCLUSIVE;
swapchainCreateInfo.queueFamilyIndexCount = 0;
swapchainCreateInfo.pQueueFamilyIndices = NULL;
swapchainCreateInfo.preTransform = swapchainSupportDetails.capabilities.currentTransform;
swapchainCreateInfo.compositeAlpha = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR;
swapchainCreateInfo.presentMode = swapchainData->presentMode;
swapchainCreateInfo.clipped = VK_TRUE;
swapchainCreateInfo.oldSwapchain = VK_NULL_HANDLE;
vulkanResult = renderer->vkCreateSwapchainKHR(
renderer->logicalDevice,
&swapchainCreateInfo,
NULL,
&swapchainData->swapchain
);
if (swapchainSupportDetails.formatsLength > 0)
{
SDL_free(swapchainSupportDetails.formats);
}
if (swapchainSupportDetails.presentModesLength > 0)
{
SDL_free(swapchainSupportDetails.presentModes);
}
if (vulkanResult != VK_SUCCESS)
{
renderer->vkDestroySurfaceKHR(
renderer->instance,
swapchainData->surface,
NULL
);
SDL_free(swapchainData);
LogVulkanResultAsError("vkCreateSwapchainKHR", vulkanResult);
return 0;
}
renderer->vkGetSwapchainImagesKHR(
renderer->logicalDevice,
swapchainData->swapchain,
&swapchainData->imageCount,
NULL
2020-12-29 06:19:46 +00:00
);
swapchainData->textureContainers = SDL_malloc(
sizeof(VulkanTextureContainer) * swapchainData->imageCount
);
2020-12-29 06:19:46 +00:00
if (!swapchainData->textureContainers)
{
SDL_OutOfMemory();
renderer->vkDestroySurfaceKHR(
renderer->instance,
swapchainData->surface,
NULL
);
SDL_free(swapchainData);
return 0;
}
2020-12-29 06:19:46 +00:00
swapchainImages = SDL_stack_alloc(VkImage, swapchainData->imageCount);
2020-12-29 06:19:46 +00:00
renderer->vkGetSwapchainImagesKHR(
renderer->logicalDevice,
swapchainData->swapchain,
&swapchainData->imageCount,
swapchainImages
);
2022-02-10 05:42:19 +00:00
imageViewCreateInfo.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
imageViewCreateInfo.pNext = NULL;
imageViewCreateInfo.flags = 0;
imageViewCreateInfo.viewType = VK_IMAGE_VIEW_TYPE_2D;
imageViewCreateInfo.format = swapchainData->surfaceFormat.format;
imageViewCreateInfo.components = swapchainData->swapchainSwizzle;
imageViewCreateInfo.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
imageViewCreateInfo.subresourceRange.baseMipLevel = 0;
imageViewCreateInfo.subresourceRange.levelCount = 1;
imageViewCreateInfo.subresourceRange.baseArrayLayer = 0;
imageViewCreateInfo.subresourceRange.layerCount = 1;
for (i = 0; i < swapchainData->imageCount; i += 1)
2020-12-29 06:19:46 +00:00
{
swapchainData->textureContainers[i].vulkanTexture = SDL_malloc(sizeof(VulkanTexture));
swapchainData->textureContainers[i].vulkanTexture->image = swapchainImages[i];
imageViewCreateInfo.image = swapchainImages[i];
2020-12-29 06:19:46 +00:00
vulkanResult = renderer->vkCreateImageView(
renderer->logicalDevice,
2022-02-10 05:42:19 +00:00
&imageViewCreateInfo,
NULL,
&swapchainData->textureContainers[i].vulkanTexture->view
);
2020-12-29 06:19:46 +00:00
if (vulkanResult != VK_SUCCESS)
2020-12-29 06:19:46 +00:00
{
renderer->vkDestroySurfaceKHR(
renderer->instance,
swapchainData->surface,
NULL
);
SDL_stack_free(swapchainImages);
SDL_free(swapchainData->textureContainers);
SDL_free(swapchainData);
LogVulkanResultAsError("vkCreateImageView", vulkanResult);
return 0;
2020-12-29 06:19:46 +00:00
}
swapchainData->textureContainers[i].vulkanTexture->resourceAccessType = RESOURCE_ACCESS_NONE;
/* Swapchain memory is managed by the driver */
swapchainData->textureContainers[i].vulkanTexture->usedRegion = NULL;
swapchainData->textureContainers[i].vulkanTexture->dimensions = swapchainData->extent;
swapchainData->textureContainers[i].vulkanTexture->format = swapchainData->swapchainFormat;
swapchainData->textureContainers[i].vulkanTexture->is3D = 0;
swapchainData->textureContainers[i].vulkanTexture->isCube = 0;
swapchainData->textureContainers[i].vulkanTexture->layerCount = 1;
swapchainData->textureContainers[i].vulkanTexture->levelCount = 1;
swapchainData->textureContainers[i].vulkanTexture->sampleCount = REFRESH_SAMPLECOUNT_1;
swapchainData->textureContainers[i].vulkanTexture->usageFlags =
VK_IMAGE_USAGE_TRANSFER_DST_BIT |
VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
swapchainData->textureContainers[i].vulkanTexture->aspectFlags = VK_IMAGE_ASPECT_COLOR_BIT;
swapchainData->textureContainers[i].vulkanTexture->resourceAccessType = RESOURCE_ACCESS_NONE;
swapchainData->textureContainers[i].vulkanTexture->msaaTex = NULL;
2020-12-29 06:19:46 +00:00
}
SDL_stack_free(swapchainImages);
2022-02-10 05:42:19 +00:00
semaphoreCreateInfo.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
semaphoreCreateInfo.pNext = NULL;
semaphoreCreateInfo.flags = 0;
renderer->vkCreateSemaphore(
renderer->logicalDevice,
&semaphoreCreateInfo,
NULL,
&swapchainData->imageAvailableSemaphore
);
renderer->vkCreateSemaphore(
renderer->logicalDevice,
&semaphoreCreateInfo,
NULL,
&swapchainData->renderFinishedSemaphore
);
windowData->swapchainData = swapchainData;
return 1;
2020-12-17 01:23:49 +00:00
}
static void VULKAN_INTERNAL_RecreateSwapchain(
VulkanRenderer* renderer,
WindowData *windowData
) {
VULKAN_Wait((Refresh_Renderer*) renderer);
VULKAN_INTERNAL_DestroySwapchain(renderer, windowData);
VULKAN_INTERNAL_CreateSwapchain(renderer, windowData);
2020-12-17 01:23:49 +00:00
}
/* Command Buffers */
static void VULKAN_INTERNAL_BeginCommandBuffer(
VulkanRenderer *renderer,
VulkanCommandBuffer *commandBuffer
2020-12-17 01:23:49 +00:00
) {
VkCommandBufferBeginInfo beginInfo;
VkResult result;
2021-01-02 21:31:17 +00:00
beginInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
beginInfo.pNext = NULL;
beginInfo.flags = 0;
beginInfo.pInheritanceInfo = NULL;
beginInfo.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
result = renderer->vkBeginCommandBuffer(
commandBuffer->commandBuffer,
&beginInfo
);
if (result != VK_SUCCESS)
{
LogVulkanResultAsError("vkBeginCommandBuffer", result);
}
}
2020-12-20 09:29:15 +00:00
static void VULKAN_INTERNAL_EndCommandBuffer(
VulkanRenderer* renderer,
VulkanCommandBuffer *commandBuffer
) {
VkResult result;
2020-12-20 09:29:15 +00:00
result = renderer->vkEndCommandBuffer(
commandBuffer->commandBuffer
2021-01-02 06:07:15 +00:00
);
2020-12-20 09:29:15 +00:00
if (result != VK_SUCCESS)
{
LogVulkanResultAsError("vkEndCommandBuffer", result);
}
2020-12-17 01:23:49 +00:00
}
static void VULKAN_DestroyDevice(
2022-02-25 21:42:11 +00:00
Refresh_Device *device
2020-12-30 01:31:39 +00:00
) {
VulkanRenderer* renderer = (VulkanRenderer*) device->driverData;
CommandPoolHashArray commandPoolHashArray;
GraphicsPipelineLayoutHashArray graphicsPipelineLayoutHashArray;
ComputePipelineLayoutHashArray computePipelineLayoutHashArray;
VulkanMemorySubAllocator *allocator;
int32_t i, j, k;
2020-12-31 06:28:37 +00:00
VULKAN_Wait(device->driverData);
2020-12-31 00:47:13 +00:00
for (i = renderer->claimedWindowCount - 1; i >= 0; i -= 1)
{
VULKAN_UnclaimWindow(device->driverData, renderer->claimedWindows[i]->windowHandle);
}
SDL_free(renderer->claimedWindows);
VULKAN_Wait(device->driverData);
SDL_free(renderer->submittedCommandBuffers);
2022-01-13 20:03:44 +00:00
for (i = 0; i < renderer->fencePool.availableFenceCount; i += 1)
{
renderer->vkDestroyFence(renderer->logicalDevice, renderer->fencePool.availableFences[i], NULL);
}
SDL_free(renderer->fencePool.availableFences);
SDL_DestroyMutex(renderer->fencePool.lock);
for (i = 0; i < NUM_COMMAND_POOL_BUCKETS; i += 1)
{
commandPoolHashArray = renderer->commandPoolHashTable.buckets[i];
for (j = 0; j < commandPoolHashArray.count; j += 1)
2020-12-31 06:28:37 +00:00
{
VULKAN_INTERNAL_DestroyCommandPool(
2020-12-31 06:28:37 +00:00
renderer,
commandPoolHashArray.elements[j].value
2020-12-31 06:28:37 +00:00
);
}
if (commandPoolHashArray.elements != NULL)
2020-12-31 06:28:37 +00:00
{
SDL_free(commandPoolHashArray.elements);
2020-12-31 06:28:37 +00:00
}
}
2020-12-30 01:31:39 +00:00
for (i = 0; i < NUM_PIPELINE_LAYOUT_BUCKETS; i += 1)
{
graphicsPipelineLayoutHashArray = renderer->graphicsPipelineLayoutHashTable.buckets[i];
for (j = 0; j < graphicsPipelineLayoutHashArray.count; j += 1)
{
VULKAN_INTERNAL_DestroyDescriptorSetCache(
renderer,
graphicsPipelineLayoutHashArray.elements[j].value->vertexSamplerDescriptorSetCache
);
2020-12-17 08:19:02 +00:00
VULKAN_INTERNAL_DestroyDescriptorSetCache(
renderer,
graphicsPipelineLayoutHashArray.elements[j].value->fragmentSamplerDescriptorSetCache
);
2020-12-17 08:19:02 +00:00
renderer->vkDestroyPipelineLayout(
renderer->logicalDevice,
graphicsPipelineLayoutHashArray.elements[j].value->pipelineLayout,
NULL
);
SDL_free(graphicsPipelineLayoutHashArray.elements[j].value);
}
2020-12-17 08:19:02 +00:00
if (graphicsPipelineLayoutHashArray.elements != NULL)
{
SDL_free(graphicsPipelineLayoutHashArray.elements);
}
2020-12-22 02:34:57 +00:00
computePipelineLayoutHashArray = renderer->computePipelineLayoutHashTable.buckets[i];
for (j = 0; j < computePipelineLayoutHashArray.count; j += 1)
{
VULKAN_INTERNAL_DestroyDescriptorSetCache(
renderer,
computePipelineLayoutHashArray.elements[j].value->bufferDescriptorSetCache
);
2020-12-17 08:19:02 +00:00
VULKAN_INTERNAL_DestroyDescriptorSetCache(
renderer,
computePipelineLayoutHashArray.elements[j].value->imageDescriptorSetCache
);
2020-12-17 08:19:02 +00:00
renderer->vkDestroyPipelineLayout(
renderer->logicalDevice,
computePipelineLayoutHashArray.elements[j].value->pipelineLayout,
NULL
);
SDL_free(computePipelineLayoutHashArray.elements[j].value);
}
2020-12-17 08:19:02 +00:00
if (computePipelineLayoutHashArray.elements != NULL)
{
SDL_free(computePipelineLayoutHashArray.elements);
}
}
2020-12-17 08:19:02 +00:00
renderer->vkDestroyDescriptorPool(
renderer->logicalDevice,
renderer->defaultDescriptorPool,
NULL
);
2020-12-17 08:19:02 +00:00
for (i = 0; i < NUM_DESCRIPTOR_SET_LAYOUT_BUCKETS; i += 1)
{
for (j = 0; j < renderer->descriptorSetLayoutHashTable.buckets[i].count; j += 1)
{
renderer->vkDestroyDescriptorSetLayout(
renderer->logicalDevice,
renderer->descriptorSetLayoutHashTable.buckets[i].elements[j].value,
NULL
);
}
2020-12-17 08:19:02 +00:00
SDL_free(renderer->descriptorSetLayoutHashTable.buckets[i].elements);
}
2020-12-17 08:19:02 +00:00
renderer->vkDestroyDescriptorSetLayout(
renderer->logicalDevice,
renderer->emptyVertexSamplerLayout,
NULL
);
2020-12-17 08:19:02 +00:00
renderer->vkDestroyDescriptorSetLayout(
renderer->logicalDevice,
renderer->emptyFragmentSamplerLayout,
NULL
);
2020-12-17 08:19:02 +00:00
renderer->vkDestroyDescriptorSetLayout(
renderer->logicalDevice,
renderer->emptyComputeBufferDescriptorSetLayout,
NULL
);
2020-12-17 08:19:02 +00:00
renderer->vkDestroyDescriptorSetLayout(
renderer->logicalDevice,
renderer->emptyComputeImageDescriptorSetLayout,
NULL
);
2020-12-17 08:19:02 +00:00
renderer->vkDestroyDescriptorSetLayout(
renderer->logicalDevice,
renderer->vertexUniformDescriptorSetLayout,
NULL
);
2020-12-17 08:19:02 +00:00
renderer->vkDestroyDescriptorSetLayout(
renderer->logicalDevice,
renderer->fragmentUniformDescriptorSetLayout,
NULL
);
2020-12-17 08:19:02 +00:00
renderer->vkDestroyDescriptorSetLayout(
renderer->logicalDevice,
renderer->computeUniformDescriptorSetLayout,
NULL
);
2020-12-17 08:19:02 +00:00
VULKAN_INTERNAL_DestroyUniformBufferObject(renderer, renderer->vertexUniformBufferObject);
VULKAN_INTERNAL_DestroyUniformBufferObject(renderer, renderer->fragmentUniformBufferObject);
VULKAN_INTERNAL_DestroyUniformBufferObject(renderer, renderer->computeUniformBufferObject);
for (i = 0; i < renderer->framebufferHashArray.count; i += 1)
{
VULKAN_INTERNAL_DestroyFramebuffer(
renderer,
renderer->framebufferHashArray.elements[i].value
);
}
SDL_free(renderer->framebufferHashArray.elements);
for (i = 0; i < renderer->renderPassHashArray.count; i += 1)
{
renderer->vkDestroyRenderPass(
renderer->logicalDevice,
renderer->renderPassHashArray.elements[i].value,
NULL
);
}
SDL_free(renderer->renderPassHashArray.elements);
SDL_free(renderer->renderTargetHashArray.elements);
for (i = 0; i < VK_MAX_MEMORY_TYPES; i += 1)
{
allocator = &renderer->memoryAllocator->subAllocators[i];
2020-12-17 08:19:02 +00:00
for (j = allocator->allocationCount - 1; j >= 0; j -= 1)
{
for (k = allocator->allocations[j]->usedRegionCount - 1; k >= 0; k -= 1)
{
VULKAN_INTERNAL_RemoveMemoryUsedRegion(
renderer,
allocator->allocations[j]->usedRegions[k]
);
}
2020-12-17 08:19:02 +00:00
VULKAN_INTERNAL_DeallocateMemory(
renderer,
allocator,
j
);
}
2020-12-17 01:23:49 +00:00
if (renderer->memoryAllocator->subAllocators[i].allocations != NULL)
{
SDL_free(renderer->memoryAllocator->subAllocators[i].allocations);
}
2020-12-19 04:08:07 +00:00
SDL_free(renderer->memoryAllocator->subAllocators[i].sortedFreeRegions);
}
2020-12-19 04:08:07 +00:00
SDL_free(renderer->memoryAllocator);
2020-12-19 04:08:07 +00:00
SDL_free(renderer->texturesToDestroy);
SDL_free(renderer->buffersToDestroy);
SDL_free(renderer->graphicsPipelinesToDestroy);
SDL_free(renderer->computePipelinesToDestroy);
SDL_free(renderer->shaderModulesToDestroy);
SDL_free(renderer->samplersToDestroy);
SDL_free(renderer->framebuffersToDestroy);
SDL_DestroyMutex(renderer->allocatorLock);
SDL_DestroyMutex(renderer->disposeLock);
SDL_DestroyMutex(renderer->submitLock);
SDL_DestroyMutex(renderer->acquireCommandBufferLock);
SDL_DestroyMutex(renderer->renderPassFetchLock);
SDL_DestroyMutex(renderer->framebufferFetchLock);
SDL_DestroyMutex(renderer->renderTargetFetchLock);
2020-12-19 04:08:07 +00:00
renderer->vkDestroyDevice(renderer->logicalDevice, NULL);
renderer->vkDestroyInstance(renderer->instance, NULL);
2020-12-19 04:08:07 +00:00
SDL_free(renderer);
SDL_free(device);
2020-12-19 04:08:07 +00:00
}
static void VULKAN_DrawInstancedPrimitives(
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer,
uint32_t baseVertex,
uint32_t startIndex,
uint32_t primitiveCount,
2024-02-17 02:26:30 +00:00
uint32_t instanceCount
2020-12-27 23:20:59 +00:00
) {
VulkanRenderer* renderer = (VulkanRenderer*) driverData;
VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
2020-12-28 06:45:12 +00:00
VkDescriptorSet descriptorSets[4];
uint32_t dynamicOffsets[2];
2020-12-27 23:20:59 +00:00
descriptorSets[0] = vulkanCommandBuffer->vertexSamplerDescriptorSet;
descriptorSets[1] = vulkanCommandBuffer->fragmentSamplerDescriptorSet;
descriptorSets[2] = renderer->vertexUniformBufferObject->descriptorSet;
descriptorSets[3] = renderer->fragmentUniformBufferObject->descriptorSet;
2020-12-27 23:20:59 +00:00
2024-02-17 02:26:30 +00:00
dynamicOffsets[0] = vulkanCommandBuffer->vertexUniformOffset;
dynamicOffsets[1] = vulkanCommandBuffer->fragmentUniformOffset;
2020-12-28 06:45:12 +00:00
renderer->vkCmdBindDescriptorSets(
vulkanCommandBuffer->commandBuffer,
VK_PIPELINE_BIND_POINT_GRAPHICS,
vulkanCommandBuffer->currentGraphicsPipeline->pipelineLayout->pipelineLayout,
0,
4,
descriptorSets,
2,
dynamicOffsets
2020-12-27 23:20:59 +00:00
);
renderer->vkCmdDrawIndexed(
vulkanCommandBuffer->commandBuffer,
PrimitiveVerts(
vulkanCommandBuffer->currentGraphicsPipeline->primitiveType,
primitiveCount
),
instanceCount,
startIndex,
baseVertex,
0
2020-12-27 23:20:59 +00:00
);
}
2020-12-27 23:20:59 +00:00
static void VULKAN_DrawIndexedPrimitives(
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer,
uint32_t baseVertex,
uint32_t startIndex,
2024-02-17 02:26:30 +00:00
uint32_t primitiveCount
) {
VULKAN_DrawInstancedPrimitives(
driverData,
commandBuffer,
baseVertex,
startIndex,
primitiveCount,
2024-02-17 02:26:30 +00:00
1
2020-12-27 23:20:59 +00:00
);
}
static void VULKAN_DrawPrimitives(
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer,
uint32_t vertexStart,
2024-02-17 02:26:30 +00:00
uint32_t primitiveCount
2020-12-30 00:53:10 +00:00
) {
VulkanRenderer* renderer = (VulkanRenderer*) driverData;
VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
2020-12-30 00:53:10 +00:00
VkDescriptorSet descriptorSets[4];
uint32_t dynamicOffsets[2];
2020-12-31 04:39:47 +00:00
descriptorSets[0] = vulkanCommandBuffer->vertexSamplerDescriptorSet;
descriptorSets[1] = vulkanCommandBuffer->fragmentSamplerDescriptorSet;
descriptorSets[2] = renderer->vertexUniformBufferObject->descriptorSet;
descriptorSets[3] = renderer->fragmentUniformBufferObject->descriptorSet;
2020-12-30 00:53:10 +00:00
2024-02-17 02:26:30 +00:00
dynamicOffsets[0] = vulkanCommandBuffer->vertexUniformOffset;
dynamicOffsets[1] = vulkanCommandBuffer->fragmentUniformOffset;
2020-12-30 00:53:10 +00:00
renderer->vkCmdBindDescriptorSets(
vulkanCommandBuffer->commandBuffer,
VK_PIPELINE_BIND_POINT_GRAPHICS,
vulkanCommandBuffer->currentGraphicsPipeline->pipelineLayout->pipelineLayout,
0,
4,
descriptorSets,
2,
dynamicOffsets
2020-12-30 00:53:10 +00:00
);
renderer->vkCmdDraw(
vulkanCommandBuffer->commandBuffer,
PrimitiveVerts(
vulkanCommandBuffer->currentGraphicsPipeline->primitiveType,
primitiveCount
),
1,
vertexStart,
0
2020-12-30 00:53:10 +00:00
);
}
static void VULKAN_DrawPrimitivesIndirect(
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer,
Refresh_GpuBuffer *buffer,
uint32_t offsetInBytes,
uint32_t drawCount,
2024-02-17 02:26:30 +00:00
uint32_t stride
) {
VulkanRenderer* renderer = (VulkanRenderer*) driverData;
VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
VulkanBuffer *vulkanBuffer = ((VulkanBufferContainer*) buffer)->vulkanBuffer;
VkDescriptorSet descriptorSets[4];
uint32_t dynamicOffsets[2];
descriptorSets[0] = vulkanCommandBuffer->vertexSamplerDescriptorSet;
descriptorSets[1] = vulkanCommandBuffer->fragmentSamplerDescriptorSet;
descriptorSets[2] = renderer->vertexUniformBufferObject->descriptorSet;
descriptorSets[3] = renderer->fragmentUniformBufferObject->descriptorSet;
2024-02-17 02:26:30 +00:00
dynamicOffsets[0] = vulkanCommandBuffer->vertexUniformOffset;
dynamicOffsets[1] = vulkanCommandBuffer->fragmentUniformOffset;
renderer->vkCmdBindDescriptorSets(
vulkanCommandBuffer->commandBuffer,
VK_PIPELINE_BIND_POINT_GRAPHICS,
vulkanCommandBuffer->currentGraphicsPipeline->pipelineLayout->pipelineLayout,
0,
4,
descriptorSets,
2,
dynamicOffsets
);
renderer->vkCmdDrawIndirect(
vulkanCommandBuffer->commandBuffer,
vulkanBuffer->buffer,
offsetInBytes,
drawCount,
stride
);
VULKAN_INTERNAL_TrackBuffer(renderer, vulkanCommandBuffer, vulkanBuffer);
}
static VulkanTexture* VULKAN_INTERNAL_CreateTexture(
VulkanRenderer *renderer,
uint32_t width,
uint32_t height,
uint32_t depth,
uint32_t isCube,
uint32_t levelCount,
Refresh_SampleCount sampleCount,
VkFormat format,
VkImageAspectFlags aspectMask,
VkImageUsageFlags imageUsageFlags
) {
2022-02-25 21:42:11 +00:00
VkResult vulkanResult;
VkImageCreateInfo imageCreateInfo;
VkImageCreateFlags imageCreateFlags = 0;
VkImageViewCreateInfo imageViewCreateInfo;
uint8_t bindResult;
uint8_t is3D = depth > 1 ? 1 : 0;
uint8_t layerCount = isCube ? 6 : 1;
uint8_t isRenderTarget =
((imageUsageFlags & VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT) != 0) ||
((imageUsageFlags & VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT) != 0);
VkComponentMapping swizzle = IDENTITY_SWIZZLE;
VulkanTexture *texture = SDL_malloc(sizeof(VulkanTexture));
texture->isCube = 0;
texture->is3D = 0;
if (isCube)
{
imageCreateFlags |= VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT;
texture->isCube = 1;
}
else if (is3D)
{
imageCreateFlags |= VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT;
texture->is3D = 1;
}
imageCreateInfo.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
imageCreateInfo.pNext = NULL;
imageCreateInfo.flags = imageCreateFlags;
imageCreateInfo.imageType = is3D ? VK_IMAGE_TYPE_3D : VK_IMAGE_TYPE_2D;
imageCreateInfo.format = format;
imageCreateInfo.extent.width = width;
imageCreateInfo.extent.height = height;
imageCreateInfo.extent.depth = depth;
imageCreateInfo.mipLevels = levelCount;
imageCreateInfo.arrayLayers = layerCount;
imageCreateInfo.samples = RefreshToVK_SampleCount[sampleCount];
imageCreateInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
imageCreateInfo.usage = imageUsageFlags;
imageCreateInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
imageCreateInfo.queueFamilyIndexCount = 0;
imageCreateInfo.pQueueFamilyIndices = NULL;
imageCreateInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
vulkanResult = renderer->vkCreateImage(
renderer->logicalDevice,
&imageCreateInfo,
NULL,
&texture->image
);
VULKAN_ERROR_CHECK(vulkanResult, vkCreateImage, 0)
bindResult = VULKAN_INTERNAL_BindMemoryForImage(
renderer,
texture->image,
isRenderTarget,
&texture->usedRegion
);
if (bindResult != 1)
{
renderer->vkDestroyImage(
renderer->logicalDevice,
texture->image,
NULL);
Refresh_LogError("Unable to bind memory for texture!");
return NULL;
}
texture->usedRegion->vulkanTexture = texture; /* lol */
imageViewCreateInfo.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
imageViewCreateInfo.pNext = NULL;
imageViewCreateInfo.flags = 0;
imageViewCreateInfo.image = texture->image;
imageViewCreateInfo.format = format;
imageViewCreateInfo.components = swizzle;
imageViewCreateInfo.subresourceRange.aspectMask = aspectMask;
imageViewCreateInfo.subresourceRange.baseMipLevel = 0;
imageViewCreateInfo.subresourceRange.levelCount = levelCount;
imageViewCreateInfo.subresourceRange.baseArrayLayer = 0;
imageViewCreateInfo.subresourceRange.layerCount = layerCount;
if (isCube)
{
imageViewCreateInfo.viewType = VK_IMAGE_VIEW_TYPE_CUBE;
}
else if (is3D)
{
imageViewCreateInfo.viewType = VK_IMAGE_VIEW_TYPE_3D;
}
else
{
imageViewCreateInfo.viewType = VK_IMAGE_VIEW_TYPE_2D;
}
vulkanResult = renderer->vkCreateImageView(
renderer->logicalDevice,
&imageViewCreateInfo,
NULL,
&texture->view
);
if (vulkanResult != VK_SUCCESS)
{
LogVulkanResultAsError("vkCreateImageView", vulkanResult);
Refresh_LogError("Failed to create texture image view");
return NULL;
}
texture->dimensions.width = width;
texture->dimensions.height = height;
texture->depth = depth;
texture->format = format;
texture->levelCount = levelCount;
texture->layerCount = layerCount;
texture->sampleCount = sampleCount;
texture->resourceAccessType = RESOURCE_ACCESS_NONE;
texture->usageFlags = imageUsageFlags;
texture->aspectFlags = aspectMask;
texture->msaaTex = NULL;
SDL_AtomicSet(&texture->referenceCount, 0);
return texture;
}
static VulkanRenderTarget* VULKAN_INTERNAL_CreateRenderTarget(
VulkanRenderer *renderer,
VulkanTexture *texture,
uint32_t depth,
uint32_t layer,
uint32_t level
) {
VkResult vulkanResult;
VulkanRenderTarget *renderTarget = (VulkanRenderTarget*) SDL_malloc(sizeof(VulkanRenderTarget));
VkImageViewCreateInfo imageViewCreateInfo;
VkComponentMapping swizzle = IDENTITY_SWIZZLE;
VkImageAspectFlags aspectFlags = 0;
if (IsDepthFormat(texture->format))
{
aspectFlags |= VK_IMAGE_ASPECT_DEPTH_BIT;
if (IsStencilFormat(texture->format))
{
aspectFlags |= VK_IMAGE_ASPECT_STENCIL_BIT;
}
}
else
{
aspectFlags |= VK_IMAGE_ASPECT_COLOR_BIT;
}
/* create framebuffer compatible views for RenderTarget */
imageViewCreateInfo.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
imageViewCreateInfo.pNext = NULL;
imageViewCreateInfo.flags = 0;
imageViewCreateInfo.image = texture->image;
imageViewCreateInfo.format = texture->format;
imageViewCreateInfo.components = swizzle;
imageViewCreateInfo.subresourceRange.aspectMask = aspectFlags;
imageViewCreateInfo.subresourceRange.baseMipLevel = level;
imageViewCreateInfo.subresourceRange.levelCount = 1;
imageViewCreateInfo.subresourceRange.baseArrayLayer = 0;
if (texture->is3D)
{
imageViewCreateInfo.subresourceRange.baseArrayLayer = depth;
}
else if (texture->isCube)
{
imageViewCreateInfo.subresourceRange.baseArrayLayer = layer;
}
imageViewCreateInfo.subresourceRange.layerCount = 1;
imageViewCreateInfo.viewType = VK_IMAGE_VIEW_TYPE_2D;
vulkanResult = renderer->vkCreateImageView(
renderer->logicalDevice,
&imageViewCreateInfo,
NULL,
&renderTarget->view
);
if (vulkanResult != VK_SUCCESS)
{
LogVulkanResultAsError(
"vkCreateImageView",
vulkanResult
);
Refresh_LogError("Failed to create color attachment image view");
return NULL;
}
return renderTarget;
}
static VulkanRenderTarget* VULKAN_INTERNAL_FetchRenderTarget(
VulkanRenderer *renderer,
VulkanTexture *texture,
uint32_t depth,
uint32_t layer,
uint32_t level
) {
RenderTargetHash hash;
VulkanRenderTarget *renderTarget;
hash.texture = texture;
hash.depth = depth;
hash.layer = layer;
hash.level = level;
SDL_LockMutex(renderer->renderTargetFetchLock);
renderTarget = RenderTargetHash_Fetch(
&renderer->renderTargetHashArray,
&hash
);
if (renderTarget == NULL)
{
renderTarget = VULKAN_INTERNAL_CreateRenderTarget(
renderer,
texture,
depth,
layer,
level
);
RenderTargetHash_Insert(
&renderer->renderTargetHashArray,
hash,
renderTarget
);
}
SDL_UnlockMutex(renderer->renderTargetFetchLock);
return renderTarget;
}
static VkRenderPass VULKAN_INTERNAL_CreateRenderPass(
VulkanRenderer *renderer,
VulkanCommandBuffer *commandBuffer,
Refresh_ColorAttachmentInfo *colorAttachmentInfos,
uint32_t colorAttachmentCount,
Refresh_DepthStencilAttachmentInfo *depthStencilAttachmentInfo
) {
VkResult vulkanResult;
VkAttachmentDescription attachmentDescriptions[2 * MAX_COLOR_TARGET_BINDINGS + 1];
VkAttachmentReference colorAttachmentReferences[MAX_COLOR_TARGET_BINDINGS];
VkAttachmentReference resolveReferences[MAX_COLOR_TARGET_BINDINGS + 1];
VkAttachmentReference depthStencilAttachmentReference;
VkRenderPassCreateInfo renderPassCreateInfo;
2022-02-25 21:42:11 +00:00
VkSubpassDescription subpass;
VkRenderPass renderPass;
uint32_t i;
2022-02-25 21:42:11 +00:00
uint32_t attachmentDescriptionCount = 0;
uint32_t colorAttachmentReferenceCount = 0;
uint32_t resolveReferenceCount = 0;
VulkanTexture *texture;
VulkanTexture *msaaTexture = NULL;
2022-02-25 21:42:11 +00:00
for (i = 0; i < colorAttachmentCount; i += 1)
{
texture = ((VulkanTextureContainer*) colorAttachmentInfos[i].texture)->vulkanTexture;
if (texture->msaaTex != NULL)
{
msaaTexture = texture->msaaTex;
Miscellaneous API changes + more MSAA fixes (#26) **Breaking API Changes** * Removed `REFRESH_SAMPLECOUNT_16/32/64`, since hardware support for these sample counts is generally poor (and completely non-existent with MoltenVK and certain consoles). * Removed unused `sampleCount` parameter from `Refresh_TextureCreateInfo`. * Removed `sampleCount` parameter from `Refresh_ColorAttachmentDescription`. The existence of this parameter meant you had to sync up three different sample count values (render pass, pipeline, and color attachment description) whenever you wanted to use multisampling. However, Vulkan requires that all color attachments in a given pipeline _must_ match the pipeline's sample count anyway, so we can assume all color attachments will use the pipeline's sample count. * Removed the `renderArea` parameter from `Refresh_BeginRenderPass()` since it didn't serve much practical purpose and slightly complicated things on the MoonWorks managed side. **Behavior Changes** * When creating a render pass or graphics pipeline, the requested multisample count will be converted into a sample count that's actually supported by the GPU. For example, if you request 8x MSAA on a device that only supports up to 4x MSAA, it will silently fall back to 4x MSAA. * All color attachments are now forced to have an internal store op of `STORE`, even if `REFRESH_STORE_OP_DONTCARE` is specified. The one exception is internal multisample textures -- if `DONTCARE` is used, those textures will be discarded to save on bandwidth. (Their resolve textures will still be stored.) * The RenderPass hashing logic was updated so that it would still work correctly with the removal of `Refresh_ColorAttachmentDescription.sampleCount`. **Bug Fixes** * Fixed bugs where multisampling logic wasn't kicking in for certain sample counts due to incorrect enum comparisons. Co-authored-by: Caleb Cornett <caleb.cornett@outlook.com> Reviewed-on: https://gitea.moonside.games/MoonsideGames/Refresh/pulls/26 Co-authored-by: TheSpydog <thespydog@noreply.example.org> Co-committed-by: TheSpydog <thespydog@noreply.example.org>
2022-11-08 19:09:21 +00:00
/* Transition the multisample attachment */
VULKAN_INTERNAL_ImageMemoryBarrier(
renderer,
commandBuffer->commandBuffer,
RESOURCE_ACCESS_COLOR_ATTACHMENT_WRITE,
VK_IMAGE_ASPECT_COLOR_BIT,
0,
msaaTexture->layerCount,
0,
msaaTexture->levelCount,
0,
msaaTexture->image,
&msaaTexture->resourceAccessType
);
2022-02-25 21:42:11 +00:00
/* Resolve attachment and multisample attachment */
attachmentDescriptions[attachmentDescriptionCount].flags = 0;
attachmentDescriptions[attachmentDescriptionCount].format = texture->format;
2022-02-25 21:42:11 +00:00
attachmentDescriptions[attachmentDescriptionCount].samples =
VK_SAMPLE_COUNT_1_BIT;
attachmentDescriptions[attachmentDescriptionCount].loadOp = RefreshToVK_LoadOp[
colorAttachmentInfos[i].loadOp
];
Miscellaneous API changes + more MSAA fixes (#26) **Breaking API Changes** * Removed `REFRESH_SAMPLECOUNT_16/32/64`, since hardware support for these sample counts is generally poor (and completely non-existent with MoltenVK and certain consoles). * Removed unused `sampleCount` parameter from `Refresh_TextureCreateInfo`. * Removed `sampleCount` parameter from `Refresh_ColorAttachmentDescription`. The existence of this parameter meant you had to sync up three different sample count values (render pass, pipeline, and color attachment description) whenever you wanted to use multisampling. However, Vulkan requires that all color attachments in a given pipeline _must_ match the pipeline's sample count anyway, so we can assume all color attachments will use the pipeline's sample count. * Removed the `renderArea` parameter from `Refresh_BeginRenderPass()` since it didn't serve much practical purpose and slightly complicated things on the MoonWorks managed side. **Behavior Changes** * When creating a render pass or graphics pipeline, the requested multisample count will be converted into a sample count that's actually supported by the GPU. For example, if you request 8x MSAA on a device that only supports up to 4x MSAA, it will silently fall back to 4x MSAA. * All color attachments are now forced to have an internal store op of `STORE`, even if `REFRESH_STORE_OP_DONTCARE` is specified. The one exception is internal multisample textures -- if `DONTCARE` is used, those textures will be discarded to save on bandwidth. (Their resolve textures will still be stored.) * The RenderPass hashing logic was updated so that it would still work correctly with the removal of `Refresh_ColorAttachmentDescription.sampleCount`. **Bug Fixes** * Fixed bugs where multisampling logic wasn't kicking in for certain sample counts due to incorrect enum comparisons. Co-authored-by: Caleb Cornett <caleb.cornett@outlook.com> Reviewed-on: https://gitea.moonside.games/MoonsideGames/Refresh/pulls/26 Co-authored-by: TheSpydog <thespydog@noreply.example.org> Co-committed-by: TheSpydog <thespydog@noreply.example.org>
2022-11-08 19:09:21 +00:00
attachmentDescriptions[attachmentDescriptionCount].storeOp =
VK_ATTACHMENT_STORE_OP_STORE; /* Always store the resolve texture */
2022-02-25 21:42:11 +00:00
attachmentDescriptions[attachmentDescriptionCount].stencilLoadOp =
VK_ATTACHMENT_LOAD_OP_DONT_CARE;
attachmentDescriptions[attachmentDescriptionCount].stencilStoreOp =
VK_ATTACHMENT_STORE_OP_DONT_CARE;
attachmentDescriptions[attachmentDescriptionCount].initialLayout =
VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
attachmentDescriptions[attachmentDescriptionCount].finalLayout =
VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
2022-02-25 21:42:11 +00:00
resolveReferences[resolveReferenceCount].attachment =
attachmentDescriptionCount;
resolveReferences[resolveReferenceCount].layout =
VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
attachmentDescriptionCount += 1;
resolveReferenceCount += 1;
attachmentDescriptions[attachmentDescriptionCount].flags = 0;
attachmentDescriptions[attachmentDescriptionCount].format = msaaTexture->format;
attachmentDescriptions[attachmentDescriptionCount].samples = RefreshToVK_SampleCount[
msaaTexture->sampleCount
];
2022-02-25 21:42:11 +00:00
attachmentDescriptions[attachmentDescriptionCount].loadOp = RefreshToVK_LoadOp[
colorAttachmentInfos[i].loadOp
];
attachmentDescriptions[attachmentDescriptionCount].storeOp = RefreshToVK_StoreOp[
colorAttachmentInfos[i].storeOp
];
attachmentDescriptions[attachmentDescriptionCount].stencilLoadOp =
VK_ATTACHMENT_LOAD_OP_DONT_CARE;
attachmentDescriptions[attachmentDescriptionCount].stencilStoreOp =
VK_ATTACHMENT_STORE_OP_DONT_CARE;
attachmentDescriptions[attachmentDescriptionCount].initialLayout =
VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
attachmentDescriptions[attachmentDescriptionCount].finalLayout =
VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
colorAttachmentReferences[colorAttachmentReferenceCount].attachment =
attachmentDescriptionCount;
colorAttachmentReferences[colorAttachmentReferenceCount].layout =
VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
attachmentDescriptionCount += 1;
colorAttachmentReferenceCount += 1;
}
else
{
attachmentDescriptions[attachmentDescriptionCount].flags = 0;
attachmentDescriptions[attachmentDescriptionCount].format = texture->format;
2022-02-25 21:42:11 +00:00
attachmentDescriptions[attachmentDescriptionCount].samples =
VK_SAMPLE_COUNT_1_BIT;
attachmentDescriptions[attachmentDescriptionCount].loadOp = RefreshToVK_LoadOp[
colorAttachmentInfos[i].loadOp
];
Miscellaneous API changes + more MSAA fixes (#26) **Breaking API Changes** * Removed `REFRESH_SAMPLECOUNT_16/32/64`, since hardware support for these sample counts is generally poor (and completely non-existent with MoltenVK and certain consoles). * Removed unused `sampleCount` parameter from `Refresh_TextureCreateInfo`. * Removed `sampleCount` parameter from `Refresh_ColorAttachmentDescription`. The existence of this parameter meant you had to sync up three different sample count values (render pass, pipeline, and color attachment description) whenever you wanted to use multisampling. However, Vulkan requires that all color attachments in a given pipeline _must_ match the pipeline's sample count anyway, so we can assume all color attachments will use the pipeline's sample count. * Removed the `renderArea` parameter from `Refresh_BeginRenderPass()` since it didn't serve much practical purpose and slightly complicated things on the MoonWorks managed side. **Behavior Changes** * When creating a render pass or graphics pipeline, the requested multisample count will be converted into a sample count that's actually supported by the GPU. For example, if you request 8x MSAA on a device that only supports up to 4x MSAA, it will silently fall back to 4x MSAA. * All color attachments are now forced to have an internal store op of `STORE`, even if `REFRESH_STORE_OP_DONTCARE` is specified. The one exception is internal multisample textures -- if `DONTCARE` is used, those textures will be discarded to save on bandwidth. (Their resolve textures will still be stored.) * The RenderPass hashing logic was updated so that it would still work correctly with the removal of `Refresh_ColorAttachmentDescription.sampleCount`. **Bug Fixes** * Fixed bugs where multisampling logic wasn't kicking in for certain sample counts due to incorrect enum comparisons. Co-authored-by: Caleb Cornett <caleb.cornett@outlook.com> Reviewed-on: https://gitea.moonside.games/MoonsideGames/Refresh/pulls/26 Co-authored-by: TheSpydog <thespydog@noreply.example.org> Co-committed-by: TheSpydog <thespydog@noreply.example.org>
2022-11-08 19:09:21 +00:00
attachmentDescriptions[attachmentDescriptionCount].storeOp =
VK_ATTACHMENT_STORE_OP_STORE; /* Always store non-MSAA textures */
2022-02-25 21:42:11 +00:00
attachmentDescriptions[attachmentDescriptionCount].stencilLoadOp =
VK_ATTACHMENT_LOAD_OP_DONT_CARE;
attachmentDescriptions[attachmentDescriptionCount].stencilStoreOp =
VK_ATTACHMENT_STORE_OP_DONT_CARE;
attachmentDescriptions[attachmentDescriptionCount].initialLayout =
VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
attachmentDescriptions[attachmentDescriptionCount].finalLayout =
VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
2022-02-25 21:42:11 +00:00
colorAttachmentReferences[colorAttachmentReferenceCount].attachment = attachmentDescriptionCount;
colorAttachmentReferences[colorAttachmentReferenceCount].layout =
VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
attachmentDescriptionCount += 1;
colorAttachmentReferenceCount += 1;
}
}
subpass.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
subpass.flags = 0;
subpass.inputAttachmentCount = 0;
subpass.pInputAttachments = NULL;
subpass.colorAttachmentCount = colorAttachmentCount;
subpass.pColorAttachments = colorAttachmentReferences;
subpass.preserveAttachmentCount = 0;
subpass.pPreserveAttachments = NULL;
if (depthStencilAttachmentInfo == NULL)
{
subpass.pDepthStencilAttachment = NULL;
}
else
{
texture = ((VulkanTextureContainer*) depthStencilAttachmentInfo->texture)->vulkanTexture;
2022-02-25 21:42:11 +00:00
attachmentDescriptions[attachmentDescriptionCount].flags = 0;
attachmentDescriptions[attachmentDescriptionCount].format = texture->format;
attachmentDescriptions[attachmentDescriptionCount].samples = RefreshToVK_SampleCount[
texture->sampleCount
];
2022-02-25 21:42:11 +00:00
attachmentDescriptions[attachmentDescriptionCount].loadOp = RefreshToVK_LoadOp[
depthStencilAttachmentInfo->loadOp
];
attachmentDescriptions[attachmentDescriptionCount].storeOp = RefreshToVK_StoreOp[
depthStencilAttachmentInfo->storeOp
];
attachmentDescriptions[attachmentDescriptionCount].stencilLoadOp = RefreshToVK_LoadOp[
depthStencilAttachmentInfo->stencilLoadOp
];
attachmentDescriptions[attachmentDescriptionCount].stencilStoreOp = RefreshToVK_StoreOp[
depthStencilAttachmentInfo->stencilStoreOp
];
attachmentDescriptions[attachmentDescriptionCount].initialLayout =
VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
attachmentDescriptions[attachmentDescriptionCount].finalLayout =
VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
2022-02-25 21:42:11 +00:00
depthStencilAttachmentReference.attachment =
attachmentDescriptionCount;
depthStencilAttachmentReference.layout =
VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
subpass.pDepthStencilAttachment =
&depthStencilAttachmentReference;
attachmentDescriptionCount += 1;
}
2020-12-27 23:34:15 +00:00
if (msaaTexture != NULL)
2020-12-27 23:20:59 +00:00
{
subpass.pResolveAttachments = resolveReferences;
2020-12-27 23:20:59 +00:00
}
else
{
subpass.pResolveAttachments = NULL;
2020-12-27 23:20:59 +00:00
}
2022-02-25 21:42:11 +00:00
renderPassCreateInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
renderPassCreateInfo.pNext = NULL;
renderPassCreateInfo.flags = 0;
renderPassCreateInfo.pAttachments = attachmentDescriptions;
renderPassCreateInfo.attachmentCount = attachmentDescriptionCount;
renderPassCreateInfo.subpassCount = 1;
renderPassCreateInfo.pSubpasses = &subpass;
renderPassCreateInfo.dependencyCount = 0;
renderPassCreateInfo.pDependencies = NULL;
2022-02-25 21:42:11 +00:00
vulkanResult = renderer->vkCreateRenderPass(
renderer->logicalDevice,
&renderPassCreateInfo,
NULL,
&renderPass
);
if (vulkanResult != VK_SUCCESS)
2020-12-27 23:20:59 +00:00
{
renderPass = VK_NULL_HANDLE;
LogVulkanResultAsError("vkCreateRenderPass", vulkanResult);
}
2022-02-25 21:42:11 +00:00
return renderPass;
}
static VkRenderPass VULKAN_INTERNAL_CreateTransientRenderPass(
VulkanRenderer *renderer,
Miscellaneous API changes + more MSAA fixes (#26) **Breaking API Changes** * Removed `REFRESH_SAMPLECOUNT_16/32/64`, since hardware support for these sample counts is generally poor (and completely non-existent with MoltenVK and certain consoles). * Removed unused `sampleCount` parameter from `Refresh_TextureCreateInfo`. * Removed `sampleCount` parameter from `Refresh_ColorAttachmentDescription`. The existence of this parameter meant you had to sync up three different sample count values (render pass, pipeline, and color attachment description) whenever you wanted to use multisampling. However, Vulkan requires that all color attachments in a given pipeline _must_ match the pipeline's sample count anyway, so we can assume all color attachments will use the pipeline's sample count. * Removed the `renderArea` parameter from `Refresh_BeginRenderPass()` since it didn't serve much practical purpose and slightly complicated things on the MoonWorks managed side. **Behavior Changes** * When creating a render pass or graphics pipeline, the requested multisample count will be converted into a sample count that's actually supported by the GPU. For example, if you request 8x MSAA on a device that only supports up to 4x MSAA, it will silently fall back to 4x MSAA. * All color attachments are now forced to have an internal store op of `STORE`, even if `REFRESH_STORE_OP_DONTCARE` is specified. The one exception is internal multisample textures -- if `DONTCARE` is used, those textures will be discarded to save on bandwidth. (Their resolve textures will still be stored.) * The RenderPass hashing logic was updated so that it would still work correctly with the removal of `Refresh_ColorAttachmentDescription.sampleCount`. **Bug Fixes** * Fixed bugs where multisampling logic wasn't kicking in for certain sample counts due to incorrect enum comparisons. Co-authored-by: Caleb Cornett <caleb.cornett@outlook.com> Reviewed-on: https://gitea.moonside.games/MoonsideGames/Refresh/pulls/26 Co-authored-by: TheSpydog <thespydog@noreply.example.org> Co-committed-by: TheSpydog <thespydog@noreply.example.org>
2022-11-08 19:09:21 +00:00
Refresh_GraphicsPipelineAttachmentInfo attachmentInfo,
Refresh_SampleCount sampleCount
) {
VkAttachmentDescription attachmentDescriptions[2 * MAX_COLOR_TARGET_BINDINGS + 1];
VkAttachmentReference colorAttachmentReferences[MAX_COLOR_TARGET_BINDINGS];
VkAttachmentReference resolveReferences[MAX_COLOR_TARGET_BINDINGS + 1];
VkAttachmentReference depthStencilAttachmentReference;
Refresh_ColorAttachmentDescription attachmentDescription;
2022-02-25 21:42:11 +00:00
VkSubpassDescription subpass;
VkRenderPassCreateInfo renderPassCreateInfo;
2022-02-25 21:42:11 +00:00
VkRenderPass renderPass;
VkResult result;
uint32_t multisampling = 0;
2022-02-25 21:42:11 +00:00
uint32_t attachmentDescriptionCount = 0;
uint32_t colorAttachmentReferenceCount = 0;
uint32_t resolveReferenceCount = 0;
uint32_t i;
for (i = 0; i < attachmentInfo.colorAttachmentCount; i += 1)
{
attachmentDescription = attachmentInfo.colorAttachmentDescriptions[i];
Miscellaneous API changes + more MSAA fixes (#26) **Breaking API Changes** * Removed `REFRESH_SAMPLECOUNT_16/32/64`, since hardware support for these sample counts is generally poor (and completely non-existent with MoltenVK and certain consoles). * Removed unused `sampleCount` parameter from `Refresh_TextureCreateInfo`. * Removed `sampleCount` parameter from `Refresh_ColorAttachmentDescription`. The existence of this parameter meant you had to sync up three different sample count values (render pass, pipeline, and color attachment description) whenever you wanted to use multisampling. However, Vulkan requires that all color attachments in a given pipeline _must_ match the pipeline's sample count anyway, so we can assume all color attachments will use the pipeline's sample count. * Removed the `renderArea` parameter from `Refresh_BeginRenderPass()` since it didn't serve much practical purpose and slightly complicated things on the MoonWorks managed side. **Behavior Changes** * When creating a render pass or graphics pipeline, the requested multisample count will be converted into a sample count that's actually supported by the GPU. For example, if you request 8x MSAA on a device that only supports up to 4x MSAA, it will silently fall back to 4x MSAA. * All color attachments are now forced to have an internal store op of `STORE`, even if `REFRESH_STORE_OP_DONTCARE` is specified. The one exception is internal multisample textures -- if `DONTCARE` is used, those textures will be discarded to save on bandwidth. (Their resolve textures will still be stored.) * The RenderPass hashing logic was updated so that it would still work correctly with the removal of `Refresh_ColorAttachmentDescription.sampleCount`. **Bug Fixes** * Fixed bugs where multisampling logic wasn't kicking in for certain sample counts due to incorrect enum comparisons. Co-authored-by: Caleb Cornett <caleb.cornett@outlook.com> Reviewed-on: https://gitea.moonside.games/MoonsideGames/Refresh/pulls/26 Co-authored-by: TheSpydog <thespydog@noreply.example.org> Co-committed-by: TheSpydog <thespydog@noreply.example.org>
2022-11-08 19:09:21 +00:00
if (sampleCount > REFRESH_SAMPLECOUNT_1)
{
multisampling = 1;
/* Resolve attachment and multisample attachment */
attachmentDescriptions[attachmentDescriptionCount].flags = 0;
attachmentDescriptions[attachmentDescriptionCount].format = RefreshToVK_SurfaceFormat[
attachmentDescription.format
];
attachmentDescriptions[attachmentDescriptionCount].samples = VK_SAMPLE_COUNT_1_BIT;
attachmentDescriptions[attachmentDescriptionCount].loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
attachmentDescriptions[attachmentDescriptionCount].storeOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
attachmentDescriptions[attachmentDescriptionCount].stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
attachmentDescriptions[attachmentDescriptionCount].stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
attachmentDescriptions[attachmentDescriptionCount].initialLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
attachmentDescriptions[attachmentDescriptionCount].finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
resolveReferences[resolveReferenceCount].attachment = attachmentDescriptionCount;
resolveReferences[resolveReferenceCount].layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
attachmentDescriptionCount += 1;
resolveReferenceCount += 1;
attachmentDescriptions[attachmentDescriptionCount].flags = 0;
attachmentDescriptions[attachmentDescriptionCount].format = RefreshToVK_SurfaceFormat[
attachmentDescription.format
];
attachmentDescriptions[attachmentDescriptionCount].samples = RefreshToVK_SampleCount[
Miscellaneous API changes + more MSAA fixes (#26) **Breaking API Changes** * Removed `REFRESH_SAMPLECOUNT_16/32/64`, since hardware support for these sample counts is generally poor (and completely non-existent with MoltenVK and certain consoles). * Removed unused `sampleCount` parameter from `Refresh_TextureCreateInfo`. * Removed `sampleCount` parameter from `Refresh_ColorAttachmentDescription`. The existence of this parameter meant you had to sync up three different sample count values (render pass, pipeline, and color attachment description) whenever you wanted to use multisampling. However, Vulkan requires that all color attachments in a given pipeline _must_ match the pipeline's sample count anyway, so we can assume all color attachments will use the pipeline's sample count. * Removed the `renderArea` parameter from `Refresh_BeginRenderPass()` since it didn't serve much practical purpose and slightly complicated things on the MoonWorks managed side. **Behavior Changes** * When creating a render pass or graphics pipeline, the requested multisample count will be converted into a sample count that's actually supported by the GPU. For example, if you request 8x MSAA on a device that only supports up to 4x MSAA, it will silently fall back to 4x MSAA. * All color attachments are now forced to have an internal store op of `STORE`, even if `REFRESH_STORE_OP_DONTCARE` is specified. The one exception is internal multisample textures -- if `DONTCARE` is used, those textures will be discarded to save on bandwidth. (Their resolve textures will still be stored.) * The RenderPass hashing logic was updated so that it would still work correctly with the removal of `Refresh_ColorAttachmentDescription.sampleCount`. **Bug Fixes** * Fixed bugs where multisampling logic wasn't kicking in for certain sample counts due to incorrect enum comparisons. Co-authored-by: Caleb Cornett <caleb.cornett@outlook.com> Reviewed-on: https://gitea.moonside.games/MoonsideGames/Refresh/pulls/26 Co-authored-by: TheSpydog <thespydog@noreply.example.org> Co-committed-by: TheSpydog <thespydog@noreply.example.org>
2022-11-08 19:09:21 +00:00
sampleCount
];
attachmentDescriptions[attachmentDescriptionCount].loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
attachmentDescriptions[attachmentDescriptionCount].storeOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
attachmentDescriptions[attachmentDescriptionCount].stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
attachmentDescriptions[attachmentDescriptionCount].stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
attachmentDescriptions[attachmentDescriptionCount].initialLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
attachmentDescriptions[attachmentDescriptionCount].finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
colorAttachmentReferences[colorAttachmentReferenceCount].attachment =
2022-02-25 21:42:11 +00:00
attachmentDescriptionCount;
colorAttachmentReferences[colorAttachmentReferenceCount].layout =
VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
2022-02-25 21:42:11 +00:00
attachmentDescriptionCount += 1;
colorAttachmentReferenceCount += 1;
}
else
2022-02-25 21:42:11 +00:00
{
attachmentDescriptions[attachmentDescriptionCount].flags = 0;
attachmentDescriptions[attachmentDescriptionCount].format = RefreshToVK_SurfaceFormat[
attachmentDescription.format
];
2022-02-25 21:42:11 +00:00
attachmentDescriptions[attachmentDescriptionCount].samples =
VK_SAMPLE_COUNT_1_BIT;
2022-02-25 21:42:11 +00:00
attachmentDescriptions[attachmentDescriptionCount].loadOp =
VK_ATTACHMENT_LOAD_OP_DONT_CARE;
2022-02-25 21:42:11 +00:00
attachmentDescriptions[attachmentDescriptionCount].storeOp =
VK_ATTACHMENT_STORE_OP_DONT_CARE;
2022-02-25 21:42:11 +00:00
attachmentDescriptions[attachmentDescriptionCount].stencilLoadOp =
VK_ATTACHMENT_LOAD_OP_DONT_CARE;
2022-02-25 21:42:11 +00:00
attachmentDescriptions[attachmentDescriptionCount].stencilStoreOp =
VK_ATTACHMENT_STORE_OP_DONT_CARE;
2022-02-25 21:42:11 +00:00
attachmentDescriptions[attachmentDescriptionCount].initialLayout =
VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
2022-02-25 21:42:11 +00:00
attachmentDescriptions[attachmentDescriptionCount].finalLayout =
VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
2022-02-25 21:42:11 +00:00
colorAttachmentReferences[colorAttachmentReferenceCount].attachment = attachmentDescriptionCount;
colorAttachmentReferences[colorAttachmentReferenceCount].layout =
VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
2022-02-25 21:42:11 +00:00
attachmentDescriptionCount += 1;
colorAttachmentReferenceCount += 1;
}
}
subpass.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
subpass.flags = 0;
subpass.inputAttachmentCount = 0;
subpass.pInputAttachments = NULL;
subpass.colorAttachmentCount = attachmentInfo.colorAttachmentCount;
subpass.pColorAttachments = colorAttachmentReferences;
subpass.preserveAttachmentCount = 0;
subpass.pPreserveAttachments = NULL;
if (attachmentInfo.hasDepthStencilAttachment)
{
attachmentDescriptions[attachmentDescriptionCount].flags = 0;
2022-06-17 07:41:27 +00:00
attachmentDescriptions[attachmentDescriptionCount].format = RefreshToVK_DepthFormat(
renderer,
attachmentInfo.depthStencilFormat
2022-06-17 07:41:27 +00:00
);
attachmentDescriptions[attachmentDescriptionCount].samples = RefreshToVK_SampleCount[
sampleCount
];
2022-02-25 21:42:11 +00:00
attachmentDescriptions[attachmentDescriptionCount].loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
attachmentDescriptions[attachmentDescriptionCount].storeOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
attachmentDescriptions[attachmentDescriptionCount].stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
attachmentDescriptions[attachmentDescriptionCount].stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
attachmentDescriptions[attachmentDescriptionCount].initialLayout =
VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
attachmentDescriptions[attachmentDescriptionCount].finalLayout =
VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
depthStencilAttachmentReference.attachment =
attachmentDescriptionCount;
depthStencilAttachmentReference.layout =
VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
subpass.pDepthStencilAttachment =
&depthStencilAttachmentReference;
attachmentDescriptionCount += 1;
}
else
{
subpass.pDepthStencilAttachment = NULL;
}
if (multisampling)
{
subpass.pResolveAttachments = resolveReferences;
}
else
{
subpass.pResolveAttachments = NULL;
}
2022-02-25 21:42:11 +00:00
renderPassCreateInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
renderPassCreateInfo.pNext = NULL;
renderPassCreateInfo.flags = 0;
renderPassCreateInfo.pAttachments = attachmentDescriptions;
renderPassCreateInfo.attachmentCount = attachmentDescriptionCount;
renderPassCreateInfo.subpassCount = 1;
renderPassCreateInfo.pSubpasses = &subpass;
renderPassCreateInfo.dependencyCount = 0;
renderPassCreateInfo.pDependencies = NULL;
result = renderer->vkCreateRenderPass(
renderer->logicalDevice,
&renderPassCreateInfo,
NULL,
&renderPass
);
if (result != VK_SUCCESS)
{
renderPass = VK_NULL_HANDLE;
LogVulkanResultAsError("vkCreateRenderPass", result);
}
return renderPass;
}
2021-01-05 23:00:51 +00:00
static Refresh_GraphicsPipeline* VULKAN_CreateGraphicsPipeline(
Refresh_Renderer *driverData,
Refresh_GraphicsPipelineCreateInfo *pipelineCreateInfo
2020-12-17 01:23:49 +00:00
) {
2020-12-17 19:40:49 +00:00
VkResult vulkanResult;
uint32_t i;
Miscellaneous API changes + more MSAA fixes (#26) **Breaking API Changes** * Removed `REFRESH_SAMPLECOUNT_16/32/64`, since hardware support for these sample counts is generally poor (and completely non-existent with MoltenVK and certain consoles). * Removed unused `sampleCount` parameter from `Refresh_TextureCreateInfo`. * Removed `sampleCount` parameter from `Refresh_ColorAttachmentDescription`. The existence of this parameter meant you had to sync up three different sample count values (render pass, pipeline, and color attachment description) whenever you wanted to use multisampling. However, Vulkan requires that all color attachments in a given pipeline _must_ match the pipeline's sample count anyway, so we can assume all color attachments will use the pipeline's sample count. * Removed the `renderArea` parameter from `Refresh_BeginRenderPass()` since it didn't serve much practical purpose and slightly complicated things on the MoonWorks managed side. **Behavior Changes** * When creating a render pass or graphics pipeline, the requested multisample count will be converted into a sample count that's actually supported by the GPU. For example, if you request 8x MSAA on a device that only supports up to 4x MSAA, it will silently fall back to 4x MSAA. * All color attachments are now forced to have an internal store op of `STORE`, even if `REFRESH_STORE_OP_DONTCARE` is specified. The one exception is internal multisample textures -- if `DONTCARE` is used, those textures will be discarded to save on bandwidth. (Their resolve textures will still be stored.) * The RenderPass hashing logic was updated so that it would still work correctly with the removal of `Refresh_ColorAttachmentDescription.sampleCount`. **Bug Fixes** * Fixed bugs where multisampling logic wasn't kicking in for certain sample counts due to incorrect enum comparisons. Co-authored-by: Caleb Cornett <caleb.cornett@outlook.com> Reviewed-on: https://gitea.moonside.games/MoonsideGames/Refresh/pulls/26 Co-authored-by: TheSpydog <thespydog@noreply.example.org> Co-committed-by: TheSpydog <thespydog@noreply.example.org>
2022-11-08 19:09:21 +00:00
Refresh_SampleCount actualSampleCount;
2020-12-17 19:40:49 +00:00
2020-12-19 04:08:07 +00:00
VulkanGraphicsPipeline *graphicsPipeline = (VulkanGraphicsPipeline*) SDL_malloc(sizeof(VulkanGraphicsPipeline));
2020-12-17 19:40:49 +00:00
VkGraphicsPipelineCreateInfo vkPipelineCreateInfo;
VkPipelineShaderStageCreateInfo shaderStageCreateInfos[2];
VkPipelineVertexInputStateCreateInfo vertexInputStateCreateInfo;
VkVertexInputBindingDescription *vertexInputBindingDescriptions = SDL_stack_alloc(VkVertexInputBindingDescription, pipelineCreateInfo->vertexInputState.vertexBindingCount);
VkVertexInputAttributeDescription *vertexInputAttributeDescriptions = SDL_stack_alloc(VkVertexInputAttributeDescription, pipelineCreateInfo->vertexInputState.vertexAttributeCount);
2020-12-17 19:40:49 +00:00
VkPipelineInputAssemblyStateCreateInfo inputAssemblyStateCreateInfo;
VkPipelineViewportStateCreateInfo viewportStateCreateInfo;
VkPipelineRasterizationStateCreateInfo rasterizationStateCreateInfo;
VkPipelineMultisampleStateCreateInfo multisampleStateCreateInfo;
VkPipelineDepthStencilStateCreateInfo depthStencilStateCreateInfo;
VkStencilOpState frontStencilState;
VkStencilOpState backStencilState;
VkPipelineColorBlendStateCreateInfo colorBlendStateCreateInfo;
VkPipelineColorBlendAttachmentState *colorBlendAttachmentStates = SDL_stack_alloc(
VkPipelineColorBlendAttachmentState,
pipelineCreateInfo->attachmentInfo.colorAttachmentCount
);
2020-12-17 19:40:49 +00:00
2022-03-04 20:30:33 +00:00
static const VkDynamicState dynamicStates[] =
{
VK_DYNAMIC_STATE_VIEWPORT,
VK_DYNAMIC_STATE_SCISSOR
};
VkPipelineDynamicStateCreateInfo dynamicStateCreateInfo;
2020-12-17 19:40:49 +00:00
VulkanRenderer *renderer = (VulkanRenderer*) driverData;
Miscellaneous API changes + more MSAA fixes (#26) **Breaking API Changes** * Removed `REFRESH_SAMPLECOUNT_16/32/64`, since hardware support for these sample counts is generally poor (and completely non-existent with MoltenVK and certain consoles). * Removed unused `sampleCount` parameter from `Refresh_TextureCreateInfo`. * Removed `sampleCount` parameter from `Refresh_ColorAttachmentDescription`. The existence of this parameter meant you had to sync up three different sample count values (render pass, pipeline, and color attachment description) whenever you wanted to use multisampling. However, Vulkan requires that all color attachments in a given pipeline _must_ match the pipeline's sample count anyway, so we can assume all color attachments will use the pipeline's sample count. * Removed the `renderArea` parameter from `Refresh_BeginRenderPass()` since it didn't serve much practical purpose and slightly complicated things on the MoonWorks managed side. **Behavior Changes** * When creating a render pass or graphics pipeline, the requested multisample count will be converted into a sample count that's actually supported by the GPU. For example, if you request 8x MSAA on a device that only supports up to 4x MSAA, it will silently fall back to 4x MSAA. * All color attachments are now forced to have an internal store op of `STORE`, even if `REFRESH_STORE_OP_DONTCARE` is specified. The one exception is internal multisample textures -- if `DONTCARE` is used, those textures will be discarded to save on bandwidth. (Their resolve textures will still be stored.) * The RenderPass hashing logic was updated so that it would still work correctly with the removal of `Refresh_ColorAttachmentDescription.sampleCount`. **Bug Fixes** * Fixed bugs where multisampling logic wasn't kicking in for certain sample counts due to incorrect enum comparisons. Co-authored-by: Caleb Cornett <caleb.cornett@outlook.com> Reviewed-on: https://gitea.moonside.games/MoonsideGames/Refresh/pulls/26 Co-authored-by: TheSpydog <thespydog@noreply.example.org> Co-committed-by: TheSpydog <thespydog@noreply.example.org>
2022-11-08 19:09:21 +00:00
/* Find a compatible sample count to use */
actualSampleCount = VULKAN_INTERNAL_GetMaxMultiSampleCount(
renderer,
pipelineCreateInfo->multisampleState.multisampleCount
);
/* Create a "compatible" render pass */
VkRenderPass transientRenderPass = VULKAN_INTERNAL_CreateTransientRenderPass(
renderer,
Miscellaneous API changes + more MSAA fixes (#26) **Breaking API Changes** * Removed `REFRESH_SAMPLECOUNT_16/32/64`, since hardware support for these sample counts is generally poor (and completely non-existent with MoltenVK and certain consoles). * Removed unused `sampleCount` parameter from `Refresh_TextureCreateInfo`. * Removed `sampleCount` parameter from `Refresh_ColorAttachmentDescription`. The existence of this parameter meant you had to sync up three different sample count values (render pass, pipeline, and color attachment description) whenever you wanted to use multisampling. However, Vulkan requires that all color attachments in a given pipeline _must_ match the pipeline's sample count anyway, so we can assume all color attachments will use the pipeline's sample count. * Removed the `renderArea` parameter from `Refresh_BeginRenderPass()` since it didn't serve much practical purpose and slightly complicated things on the MoonWorks managed side. **Behavior Changes** * When creating a render pass or graphics pipeline, the requested multisample count will be converted into a sample count that's actually supported by the GPU. For example, if you request 8x MSAA on a device that only supports up to 4x MSAA, it will silently fall back to 4x MSAA. * All color attachments are now forced to have an internal store op of `STORE`, even if `REFRESH_STORE_OP_DONTCARE` is specified. The one exception is internal multisample textures -- if `DONTCARE` is used, those textures will be discarded to save on bandwidth. (Their resolve textures will still be stored.) * The RenderPass hashing logic was updated so that it would still work correctly with the removal of `Refresh_ColorAttachmentDescription.sampleCount`. **Bug Fixes** * Fixed bugs where multisampling logic wasn't kicking in for certain sample counts due to incorrect enum comparisons. Co-authored-by: Caleb Cornett <caleb.cornett@outlook.com> Reviewed-on: https://gitea.moonside.games/MoonsideGames/Refresh/pulls/26 Co-authored-by: TheSpydog <thespydog@noreply.example.org> Co-committed-by: TheSpydog <thespydog@noreply.example.org>
2022-11-08 19:09:21 +00:00
pipelineCreateInfo->attachmentInfo,
actualSampleCount
);
2022-03-04 20:30:33 +00:00
/* Dynamic state */
dynamicStateCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO;
dynamicStateCreateInfo.pNext = NULL;
dynamicStateCreateInfo.flags = 0;
dynamicStateCreateInfo.dynamicStateCount = SDL_arraysize(dynamicStates);
dynamicStateCreateInfo.pDynamicStates = dynamicStates;
2020-12-17 19:40:49 +00:00
/* Shader stages */
graphicsPipeline->vertexShaderModule = (VulkanShaderModule*) pipelineCreateInfo->vertexShaderInfo.shaderModule;
SDL_AtomicIncRef(&graphicsPipeline->vertexShaderModule->referenceCount);
2020-12-17 19:40:49 +00:00
shaderStageCreateInfos[0].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
shaderStageCreateInfos[0].pNext = NULL;
shaderStageCreateInfos[0].flags = 0;
shaderStageCreateInfos[0].stage = VK_SHADER_STAGE_VERTEX_BIT;
shaderStageCreateInfos[0].module = graphicsPipeline->vertexShaderModule->shaderModule;
2022-03-02 19:22:52 +00:00
shaderStageCreateInfos[0].pName = pipelineCreateInfo->vertexShaderInfo.entryPointName;
2020-12-17 19:40:49 +00:00
shaderStageCreateInfos[0].pSpecializationInfo = NULL;
graphicsPipeline->vertexUniformBlockSize =
2024-02-17 02:29:05 +00:00
VULKAN_INTERNAL_NextHighestAlignment32(
2022-03-02 19:22:52 +00:00
pipelineCreateInfo->vertexShaderInfo.uniformBufferSize,
2020-12-23 21:11:09 +00:00
renderer->minUBOAlignment
);
graphicsPipeline->fragmentShaderModule = (VulkanShaderModule*) pipelineCreateInfo->fragmentShaderInfo.shaderModule;
SDL_AtomicIncRef(&graphicsPipeline->fragmentShaderModule->referenceCount);
2020-12-17 19:40:49 +00:00
shaderStageCreateInfos[1].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
shaderStageCreateInfos[1].pNext = NULL;
shaderStageCreateInfos[1].flags = 0;
shaderStageCreateInfos[1].stage = VK_SHADER_STAGE_FRAGMENT_BIT;
shaderStageCreateInfos[1].module = graphicsPipeline->fragmentShaderModule->shaderModule;
2022-03-02 19:22:52 +00:00
shaderStageCreateInfos[1].pName = pipelineCreateInfo->fragmentShaderInfo.entryPointName;
2020-12-17 19:40:49 +00:00
shaderStageCreateInfos[1].pSpecializationInfo = NULL;
graphicsPipeline->fragmentUniformBlockSize =
2024-02-17 02:29:05 +00:00
VULKAN_INTERNAL_NextHighestAlignment32(
2022-03-02 19:22:52 +00:00
pipelineCreateInfo->fragmentShaderInfo.uniformBufferSize,
2020-12-23 21:11:09 +00:00
renderer->minUBOAlignment
);
2020-12-17 19:40:49 +00:00
/* Vertex input */
for (i = 0; i < pipelineCreateInfo->vertexInputState.vertexBindingCount; i += 1)
{
vertexInputBindingDescriptions[i].binding = pipelineCreateInfo->vertexInputState.vertexBindings[i].binding;
vertexInputBindingDescriptions[i].inputRate = RefreshToVK_VertexInputRate[
pipelineCreateInfo->vertexInputState.vertexBindings[i].inputRate
];
vertexInputBindingDescriptions[i].stride = pipelineCreateInfo->vertexInputState.vertexBindings[i].stride;
}
for (i = 0; i < pipelineCreateInfo->vertexInputState.vertexAttributeCount; i += 1)
{
vertexInputAttributeDescriptions[i].binding = pipelineCreateInfo->vertexInputState.vertexAttributes[i].binding;
vertexInputAttributeDescriptions[i].format = RefreshToVK_VertexFormat[
pipelineCreateInfo->vertexInputState.vertexAttributes[i].format
];
vertexInputAttributeDescriptions[i].location = pipelineCreateInfo->vertexInputState.vertexAttributes[i].location;
vertexInputAttributeDescriptions[i].offset = pipelineCreateInfo->vertexInputState.vertexAttributes[i].offset;
}
vertexInputStateCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO;
vertexInputStateCreateInfo.pNext = NULL;
vertexInputStateCreateInfo.flags = 0;
vertexInputStateCreateInfo.vertexBindingDescriptionCount = pipelineCreateInfo->vertexInputState.vertexBindingCount;
vertexInputStateCreateInfo.pVertexBindingDescriptions = vertexInputBindingDescriptions;
vertexInputStateCreateInfo.vertexAttributeDescriptionCount = pipelineCreateInfo->vertexInputState.vertexAttributeCount;
vertexInputStateCreateInfo.pVertexAttributeDescriptions = vertexInputAttributeDescriptions;
/* Topology */
inputAssemblyStateCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
inputAssemblyStateCreateInfo.pNext = NULL;
inputAssemblyStateCreateInfo.flags = 0;
inputAssemblyStateCreateInfo.primitiveRestartEnable = VK_FALSE;
inputAssemblyStateCreateInfo.topology = RefreshToVK_PrimitiveType[
pipelineCreateInfo->primitiveType
2020-12-17 19:40:49 +00:00
];
graphicsPipeline->primitiveType = pipelineCreateInfo->primitiveType;
2020-12-17 19:40:49 +00:00
/* Viewport */
2022-03-04 20:30:33 +00:00
/* NOTE: viewport and scissor are dynamic, and must be set using the command buffer */
2020-12-17 19:40:49 +00:00
viewportStateCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO;
viewportStateCreateInfo.pNext = NULL;
viewportStateCreateInfo.flags = 0;
2022-03-04 20:30:33 +00:00
viewportStateCreateInfo.viewportCount = 1;
viewportStateCreateInfo.pViewports = NULL;
viewportStateCreateInfo.scissorCount = 1;
viewportStateCreateInfo.pScissors = NULL;
2020-12-17 19:40:49 +00:00
/* Rasterization */
rasterizationStateCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO;
rasterizationStateCreateInfo.pNext = NULL;
rasterizationStateCreateInfo.flags = 0;
rasterizationStateCreateInfo.depthClampEnable = VK_FALSE;
2020-12-17 19:40:49 +00:00
rasterizationStateCreateInfo.rasterizerDiscardEnable = VK_FALSE;
rasterizationStateCreateInfo.polygonMode = RefreshToVK_PolygonMode[
pipelineCreateInfo->rasterizerState.fillMode
];
rasterizationStateCreateInfo.cullMode = RefreshToVK_CullMode[
pipelineCreateInfo->rasterizerState.cullMode
];
rasterizationStateCreateInfo.frontFace = RefreshToVK_FrontFace[
pipelineCreateInfo->rasterizerState.frontFace
];
rasterizationStateCreateInfo.depthBiasEnable =
pipelineCreateInfo->rasterizerState.depthBiasEnable;
rasterizationStateCreateInfo.depthBiasConstantFactor =
pipelineCreateInfo->rasterizerState.depthBiasConstantFactor;
rasterizationStateCreateInfo.depthBiasClamp =
pipelineCreateInfo->rasterizerState.depthBiasClamp;
rasterizationStateCreateInfo.depthBiasSlopeFactor =
pipelineCreateInfo->rasterizerState.depthBiasSlopeFactor;
2022-03-14 17:43:01 +00:00
rasterizationStateCreateInfo.lineWidth = 1.0f;
2020-12-17 19:40:49 +00:00
/* Multisample */
multisampleStateCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO;
multisampleStateCreateInfo.pNext = NULL;
multisampleStateCreateInfo.flags = 0;
Miscellaneous API changes + more MSAA fixes (#26) **Breaking API Changes** * Removed `REFRESH_SAMPLECOUNT_16/32/64`, since hardware support for these sample counts is generally poor (and completely non-existent with MoltenVK and certain consoles). * Removed unused `sampleCount` parameter from `Refresh_TextureCreateInfo`. * Removed `sampleCount` parameter from `Refresh_ColorAttachmentDescription`. The existence of this parameter meant you had to sync up three different sample count values (render pass, pipeline, and color attachment description) whenever you wanted to use multisampling. However, Vulkan requires that all color attachments in a given pipeline _must_ match the pipeline's sample count anyway, so we can assume all color attachments will use the pipeline's sample count. * Removed the `renderArea` parameter from `Refresh_BeginRenderPass()` since it didn't serve much practical purpose and slightly complicated things on the MoonWorks managed side. **Behavior Changes** * When creating a render pass or graphics pipeline, the requested multisample count will be converted into a sample count that's actually supported by the GPU. For example, if you request 8x MSAA on a device that only supports up to 4x MSAA, it will silently fall back to 4x MSAA. * All color attachments are now forced to have an internal store op of `STORE`, even if `REFRESH_STORE_OP_DONTCARE` is specified. The one exception is internal multisample textures -- if `DONTCARE` is used, those textures will be discarded to save on bandwidth. (Their resolve textures will still be stored.) * The RenderPass hashing logic was updated so that it would still work correctly with the removal of `Refresh_ColorAttachmentDescription.sampleCount`. **Bug Fixes** * Fixed bugs where multisampling logic wasn't kicking in for certain sample counts due to incorrect enum comparisons. Co-authored-by: Caleb Cornett <caleb.cornett@outlook.com> Reviewed-on: https://gitea.moonside.games/MoonsideGames/Refresh/pulls/26 Co-authored-by: TheSpydog <thespydog@noreply.example.org> Co-committed-by: TheSpydog <thespydog@noreply.example.org>
2022-11-08 19:09:21 +00:00
multisampleStateCreateInfo.rasterizationSamples = RefreshToVK_SampleCount[actualSampleCount];
2020-12-17 19:40:49 +00:00
multisampleStateCreateInfo.sampleShadingEnable = VK_FALSE;
multisampleStateCreateInfo.minSampleShading = 1.0f;
multisampleStateCreateInfo.pSampleMask =
2021-01-05 23:53:16 +00:00
&pipelineCreateInfo->multisampleState.sampleMask;
2020-12-17 19:40:49 +00:00
multisampleStateCreateInfo.alphaToCoverageEnable = VK_FALSE;
multisampleStateCreateInfo.alphaToOneEnable = VK_FALSE;
/* Depth Stencil State */
frontStencilState.failOp = RefreshToVK_StencilOp[
pipelineCreateInfo->depthStencilState.stencilState.failOp
2020-12-17 19:40:49 +00:00
];
frontStencilState.passOp = RefreshToVK_StencilOp[
pipelineCreateInfo->depthStencilState.stencilState.passOp
2020-12-17 19:40:49 +00:00
];
frontStencilState.depthFailOp = RefreshToVK_StencilOp[
pipelineCreateInfo->depthStencilState.stencilState.depthFailOp
2020-12-17 19:40:49 +00:00
];
frontStencilState.compareOp = RefreshToVK_CompareOp[
pipelineCreateInfo->depthStencilState.stencilState.compareOp
2020-12-17 19:40:49 +00:00
];
frontStencilState.compareMask =
pipelineCreateInfo->depthStencilState.stencilState.compareMask;
2020-12-17 19:40:49 +00:00
frontStencilState.writeMask =
pipelineCreateInfo->depthStencilState.stencilState.writeMask;
2020-12-17 19:40:49 +00:00
frontStencilState.reference =
pipelineCreateInfo->depthStencilState.stencilState.reference;
2020-12-17 19:40:49 +00:00
backStencilState.failOp = RefreshToVK_StencilOp[
pipelineCreateInfo->depthStencilState.stencilState.failOp
2020-12-17 19:40:49 +00:00
];
backStencilState.passOp = RefreshToVK_StencilOp[
pipelineCreateInfo->depthStencilState.stencilState.passOp
2020-12-17 19:40:49 +00:00
];
backStencilState.depthFailOp = RefreshToVK_StencilOp[
pipelineCreateInfo->depthStencilState.stencilState.depthFailOp
2020-12-17 19:40:49 +00:00
];
backStencilState.compareOp = RefreshToVK_CompareOp[
pipelineCreateInfo->depthStencilState.stencilState.compareOp
2020-12-17 19:40:49 +00:00
];
backStencilState.compareMask =
pipelineCreateInfo->depthStencilState.stencilState.compareMask;
2020-12-17 19:40:49 +00:00
backStencilState.writeMask =
pipelineCreateInfo->depthStencilState.stencilState.writeMask;
2020-12-17 19:40:49 +00:00
backStencilState.reference =
pipelineCreateInfo->depthStencilState.stencilState.reference;
2020-12-17 19:40:49 +00:00
depthStencilStateCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO;
depthStencilStateCreateInfo.pNext = NULL;
depthStencilStateCreateInfo.flags = 0;
depthStencilStateCreateInfo.depthTestEnable =
pipelineCreateInfo->depthStencilState.depthTestEnable;
depthStencilStateCreateInfo.depthWriteEnable =
pipelineCreateInfo->depthStencilState.depthWriteEnable;
depthStencilStateCreateInfo.depthCompareOp = RefreshToVK_CompareOp[
pipelineCreateInfo->depthStencilState.compareOp
];
depthStencilStateCreateInfo.depthBoundsTestEnable =
pipelineCreateInfo->depthStencilState.depthBoundsTestEnable;
depthStencilStateCreateInfo.stencilTestEnable =
pipelineCreateInfo->depthStencilState.stencilTestEnable;
depthStencilStateCreateInfo.front = frontStencilState;
depthStencilStateCreateInfo.back = backStencilState;
depthStencilStateCreateInfo.minDepthBounds =
pipelineCreateInfo->depthStencilState.minDepthBounds;
depthStencilStateCreateInfo.maxDepthBounds =
pipelineCreateInfo->depthStencilState.maxDepthBounds;
/* Color Blend */
for (i = 0; i < pipelineCreateInfo->attachmentInfo.colorAttachmentCount; i += 1)
2020-12-17 19:40:49 +00:00
{
Refresh_ColorAttachmentBlendState blendState = pipelineCreateInfo->attachmentInfo.colorAttachmentDescriptions[i].blendState;
2020-12-17 19:40:49 +00:00
colorBlendAttachmentStates[i].blendEnable =
blendState.blendEnable;
2020-12-17 19:40:49 +00:00
colorBlendAttachmentStates[i].srcColorBlendFactor = RefreshToVK_BlendFactor[
blendState.srcColorBlendFactor
2020-12-17 19:40:49 +00:00
];
colorBlendAttachmentStates[i].dstColorBlendFactor = RefreshToVK_BlendFactor[
blendState.dstColorBlendFactor
2020-12-17 19:40:49 +00:00
];
colorBlendAttachmentStates[i].colorBlendOp = RefreshToVK_BlendOp[
blendState.colorBlendOp
2020-12-17 19:40:49 +00:00
];
colorBlendAttachmentStates[i].srcAlphaBlendFactor = RefreshToVK_BlendFactor[
blendState.srcAlphaBlendFactor
2020-12-17 19:40:49 +00:00
];
colorBlendAttachmentStates[i].dstAlphaBlendFactor = RefreshToVK_BlendFactor[
blendState.dstAlphaBlendFactor
2020-12-17 19:40:49 +00:00
];
colorBlendAttachmentStates[i].alphaBlendOp = RefreshToVK_BlendOp[
blendState.alphaBlendOp
2020-12-17 19:40:49 +00:00
];
colorBlendAttachmentStates[i].colorWriteMask =
blendState.colorWriteMask;
2020-12-17 19:40:49 +00:00
}
colorBlendStateCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO;
colorBlendStateCreateInfo.pNext = NULL;
colorBlendStateCreateInfo.flags = 0;
colorBlendStateCreateInfo.attachmentCount =
pipelineCreateInfo->attachmentInfo.colorAttachmentCount;
2020-12-17 19:40:49 +00:00
colorBlendStateCreateInfo.pAttachments =
colorBlendAttachmentStates;
colorBlendStateCreateInfo.blendConstants[0] =
2022-03-02 19:10:28 +00:00
pipelineCreateInfo->blendConstants[0];
2020-12-17 19:40:49 +00:00
colorBlendStateCreateInfo.blendConstants[1] =
2022-03-02 19:10:28 +00:00
pipelineCreateInfo->blendConstants[1];
2020-12-17 19:40:49 +00:00
colorBlendStateCreateInfo.blendConstants[2] =
2022-03-02 19:10:28 +00:00
pipelineCreateInfo->blendConstants[2];
2020-12-17 19:40:49 +00:00
colorBlendStateCreateInfo.blendConstants[3] =
2022-03-02 19:10:28 +00:00
pipelineCreateInfo->blendConstants[3];
/* We don't support LogicOp, so this is easy. */
colorBlendStateCreateInfo.logicOpEnable = VK_FALSE;
colorBlendStateCreateInfo.logicOp = 0;
2020-12-17 19:40:49 +00:00
/* Pipeline Layout */
2020-12-23 21:11:09 +00:00
graphicsPipeline->pipelineLayout = VULKAN_INTERNAL_FetchGraphicsPipelineLayout(
renderer,
2022-03-02 19:22:52 +00:00
pipelineCreateInfo->vertexShaderInfo.samplerBindingCount,
pipelineCreateInfo->fragmentShaderInfo.samplerBindingCount
2020-12-17 19:40:49 +00:00
);
/* Pipeline */
vkPipelineCreateInfo.sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO;
2020-12-23 04:22:17 +00:00
vkPipelineCreateInfo.pNext = NULL;
2020-12-17 19:40:49 +00:00
vkPipelineCreateInfo.flags = 0;
vkPipelineCreateInfo.stageCount = 2;
vkPipelineCreateInfo.pStages = shaderStageCreateInfos;
vkPipelineCreateInfo.pVertexInputState = &vertexInputStateCreateInfo;
vkPipelineCreateInfo.pInputAssemblyState = &inputAssemblyStateCreateInfo;
vkPipelineCreateInfo.pTessellationState = VK_NULL_HANDLE;
vkPipelineCreateInfo.pViewportState = &viewportStateCreateInfo;
vkPipelineCreateInfo.pRasterizationState = &rasterizationStateCreateInfo;
vkPipelineCreateInfo.pMultisampleState = &multisampleStateCreateInfo;
vkPipelineCreateInfo.pDepthStencilState = &depthStencilStateCreateInfo;
vkPipelineCreateInfo.pColorBlendState = &colorBlendStateCreateInfo;
2022-03-04 20:30:33 +00:00
vkPipelineCreateInfo.pDynamicState = &dynamicStateCreateInfo;
vkPipelineCreateInfo.layout = graphicsPipeline->pipelineLayout->pipelineLayout;
vkPipelineCreateInfo.renderPass = transientRenderPass;
2020-12-17 19:40:49 +00:00
vkPipelineCreateInfo.subpass = 0;
vkPipelineCreateInfo.basePipelineHandle = VK_NULL_HANDLE;
vkPipelineCreateInfo.basePipelineIndex = 0;
/* TODO: enable pipeline caching */
vulkanResult = renderer->vkCreateGraphicsPipelines(
renderer->logicalDevice,
VK_NULL_HANDLE,
1,
&vkPipelineCreateInfo,
NULL,
2020-12-19 04:08:07 +00:00
&graphicsPipeline->pipeline
2020-12-17 19:40:49 +00:00
);
SDL_stack_free(vertexInputBindingDescriptions);
SDL_stack_free(vertexInputAttributeDescriptions);
SDL_stack_free(colorBlendAttachmentStates);
2020-12-19 04:08:07 +00:00
renderer->vkDestroyRenderPass(
renderer->logicalDevice,
transientRenderPass,
NULL
);
if (vulkanResult != VK_SUCCESS)
2020-12-28 22:07:13 +00:00
{
SDL_free(graphicsPipeline);
LogVulkanResultAsError("vkCreateGraphicsPipelines", vulkanResult);
Refresh_LogError("Failed to create graphics pipeline!");
return NULL;
2020-12-28 22:07:13 +00:00
}
2020-12-23 21:11:09 +00:00
SDL_AtomicSet(&graphicsPipeline->referenceCount, 0);
2021-01-05 23:00:51 +00:00
return (Refresh_GraphicsPipeline*) graphicsPipeline;
2020-12-17 01:23:49 +00:00
}
2020-12-29 22:52:24 +00:00
static VulkanComputePipelineLayout* VULKAN_INTERNAL_FetchComputePipelineLayout(
VulkanRenderer *renderer,
uint32_t bufferBindingCount,
uint32_t imageBindingCount
) {
VkResult vulkanResult;
2020-12-31 04:39:47 +00:00
VkDescriptorSetLayout setLayouts[3];
2020-12-29 22:52:24 +00:00
VkPipelineLayoutCreateInfo pipelineLayoutCreateInfo;
ComputePipelineLayoutHash pipelineLayoutHash;
VulkanComputePipelineLayout *vulkanComputePipelineLayout;
2020-12-29 23:05:26 +00:00
pipelineLayoutHash.bufferLayout = VULKAN_INTERNAL_FetchDescriptorSetLayout(
2020-12-29 22:52:24 +00:00
renderer,
2020-12-29 23:05:26 +00:00
VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,
bufferBindingCount,
VK_SHADER_STAGE_COMPUTE_BIT
2020-12-29 22:52:24 +00:00
);
2020-12-29 23:05:26 +00:00
pipelineLayoutHash.imageLayout = VULKAN_INTERNAL_FetchDescriptorSetLayout(
2020-12-29 22:52:24 +00:00
renderer,
2020-12-30 00:19:19 +00:00
VK_DESCRIPTOR_TYPE_STORAGE_IMAGE,
2020-12-29 23:05:26 +00:00
imageBindingCount,
VK_SHADER_STAGE_COMPUTE_BIT
2020-12-29 22:52:24 +00:00
);
pipelineLayoutHash.uniformLayout = renderer->computeUniformDescriptorSetLayout;
2020-12-31 04:39:47 +00:00
2020-12-29 22:52:24 +00:00
vulkanComputePipelineLayout = ComputePipelineLayoutHashArray_Fetch(
&renderer->computePipelineLayoutHashTable,
pipelineLayoutHash
);
if (vulkanComputePipelineLayout != NULL)
{
return vulkanComputePipelineLayout;
}
vulkanComputePipelineLayout = SDL_malloc(sizeof(VulkanComputePipelineLayout));
setLayouts[0] = pipelineLayoutHash.bufferLayout;
setLayouts[1] = pipelineLayoutHash.imageLayout;
2020-12-31 04:39:47 +00:00
setLayouts[2] = pipelineLayoutHash.uniformLayout;
2020-12-29 22:52:24 +00:00
pipelineLayoutCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
pipelineLayoutCreateInfo.pNext = NULL;
pipelineLayoutCreateInfo.flags = 0;
2020-12-31 04:39:47 +00:00
pipelineLayoutCreateInfo.setLayoutCount = 3;
2020-12-29 22:52:24 +00:00
pipelineLayoutCreateInfo.pSetLayouts = setLayouts;
pipelineLayoutCreateInfo.pushConstantRangeCount = 0;
pipelineLayoutCreateInfo.pPushConstantRanges = NULL;
vulkanResult = renderer->vkCreatePipelineLayout(
renderer->logicalDevice,
&pipelineLayoutCreateInfo,
NULL,
&vulkanComputePipelineLayout->pipelineLayout
);
if (vulkanResult != VK_SUCCESS)
{
2021-01-27 20:51:36 +00:00
LogVulkanResultAsError("vkCreatePipelineLayout", vulkanResult);
2020-12-29 22:52:24 +00:00
return NULL;
}
ComputePipelineLayoutHashArray_Insert(
&renderer->computePipelineLayoutHashTable,
pipelineLayoutHash,
vulkanComputePipelineLayout
);
/* If the binding count is 0
* we can just bind the same descriptor set
* so no cache is needed
*/
if (bufferBindingCount == 0)
{
vulkanComputePipelineLayout->bufferDescriptorSetCache = NULL;
}
else
{
vulkanComputePipelineLayout->bufferDescriptorSetCache =
VULKAN_INTERNAL_CreateDescriptorSetCache(
2020-12-29 22:52:24 +00:00
renderer,
2020-12-31 04:39:47 +00:00
VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,
2020-12-29 22:52:24 +00:00
pipelineLayoutHash.bufferLayout,
bufferBindingCount
);
}
if (imageBindingCount == 0)
{
vulkanComputePipelineLayout->imageDescriptorSetCache = NULL;
}
else
{
vulkanComputePipelineLayout->imageDescriptorSetCache =
VULKAN_INTERNAL_CreateDescriptorSetCache(
2020-12-29 22:52:24 +00:00
renderer,
2020-12-30 00:53:10 +00:00
VK_DESCRIPTOR_TYPE_STORAGE_IMAGE,
2020-12-29 22:52:24 +00:00
pipelineLayoutHash.imageLayout,
imageBindingCount
);
}
return vulkanComputePipelineLayout;
}
2021-01-05 23:00:51 +00:00
static Refresh_ComputePipeline* VULKAN_CreateComputePipeline(
Refresh_Renderer *driverData,
2022-03-02 19:22:52 +00:00
Refresh_ComputeShaderInfo *computeShaderInfo
2020-12-29 22:52:24 +00:00
) {
VkComputePipelineCreateInfo computePipelineCreateInfo;
VkPipelineShaderStageCreateInfo pipelineShaderStageCreateInfo;
2020-12-31 04:39:47 +00:00
2020-12-29 22:52:24 +00:00
VulkanRenderer *renderer = (VulkanRenderer*) driverData;
VulkanComputePipeline *vulkanComputePipeline = SDL_malloc(sizeof(VulkanComputePipeline));
vulkanComputePipeline->computeShaderModule = (VulkanShaderModule*) computeShaderInfo->shaderModule;
SDL_AtomicIncRef(&vulkanComputePipeline->computeShaderModule->referenceCount);
2020-12-29 22:52:24 +00:00
pipelineShaderStageCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
pipelineShaderStageCreateInfo.pNext = NULL;
pipelineShaderStageCreateInfo.flags = 0;
pipelineShaderStageCreateInfo.stage = VK_SHADER_STAGE_COMPUTE_BIT;
2022-03-04 21:37:45 +00:00
pipelineShaderStageCreateInfo.module = vulkanComputePipeline->computeShaderModule->shaderModule;
2022-03-02 19:22:52 +00:00
pipelineShaderStageCreateInfo.pName = computeShaderInfo->entryPointName;
2020-12-29 22:52:24 +00:00
pipelineShaderStageCreateInfo.pSpecializationInfo = NULL;
2020-12-31 04:39:47 +00:00
vulkanComputePipeline->pipelineLayout = VULKAN_INTERNAL_FetchComputePipelineLayout(
2020-12-29 22:52:24 +00:00
renderer,
2022-03-02 19:22:52 +00:00
computeShaderInfo->bufferBindingCount,
computeShaderInfo->imageBindingCount
2020-12-29 22:52:24 +00:00
);
computePipelineCreateInfo.sType = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO;
computePipelineCreateInfo.pNext = NULL;
computePipelineCreateInfo.flags = 0;
computePipelineCreateInfo.stage = pipelineShaderStageCreateInfo;
2020-12-31 04:39:47 +00:00
computePipelineCreateInfo.layout =
vulkanComputePipeline->pipelineLayout->pipelineLayout;
2020-12-29 22:52:24 +00:00
computePipelineCreateInfo.basePipelineHandle = NULL;
computePipelineCreateInfo.basePipelineIndex = 0;
renderer->vkCreateComputePipelines(
renderer->logicalDevice,
NULL,
1,
&computePipelineCreateInfo,
NULL,
&vulkanComputePipeline->pipeline
);
vulkanComputePipeline->uniformBlockSize =
2024-02-17 02:29:05 +00:00
VULKAN_INTERNAL_NextHighestAlignment32(
2022-03-02 19:22:52 +00:00
computeShaderInfo->uniformBufferSize,
2020-12-31 04:39:47 +00:00
renderer->minUBOAlignment
);
SDL_AtomicSet(&vulkanComputePipeline->referenceCount, 0);
2021-01-05 23:00:51 +00:00
return (Refresh_ComputePipeline*) vulkanComputePipeline;
2020-12-29 22:52:24 +00:00
}
2021-01-05 23:00:51 +00:00
static Refresh_Sampler* VULKAN_CreateSampler(
Refresh_Renderer *driverData,
Refresh_SamplerStateCreateInfo *samplerStateCreateInfo
2020-12-17 01:23:49 +00:00
) {
2020-12-18 01:48:26 +00:00
VulkanRenderer* renderer = (VulkanRenderer*)driverData;
VulkanSampler *vulkanSampler = SDL_malloc(sizeof(VulkanSampler));
VkResult vulkanResult;
2020-12-18 01:48:26 +00:00
VkSamplerCreateInfo vkSamplerCreateInfo;
vkSamplerCreateInfo.sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO;
vkSamplerCreateInfo.pNext = NULL;
vkSamplerCreateInfo.flags = 0;
2021-01-03 21:01:29 +00:00
vkSamplerCreateInfo.magFilter = RefreshToVK_Filter[
2020-12-18 01:48:26 +00:00
samplerStateCreateInfo->magFilter
];
2021-01-03 21:01:29 +00:00
vkSamplerCreateInfo.minFilter = RefreshToVK_Filter[
2020-12-18 01:48:26 +00:00
samplerStateCreateInfo->minFilter
];
vkSamplerCreateInfo.mipmapMode = RefreshToVK_SamplerMipmapMode[
samplerStateCreateInfo->mipmapMode
];
vkSamplerCreateInfo.addressModeU = RefreshToVK_SamplerAddressMode[
samplerStateCreateInfo->addressModeU
];
vkSamplerCreateInfo.addressModeV = RefreshToVK_SamplerAddressMode[
samplerStateCreateInfo->addressModeV
];
vkSamplerCreateInfo.addressModeW = RefreshToVK_SamplerAddressMode[
samplerStateCreateInfo->addressModeW
];
vkSamplerCreateInfo.mipLodBias = samplerStateCreateInfo->mipLodBias;
vkSamplerCreateInfo.anisotropyEnable = samplerStateCreateInfo->anisotropyEnable;
vkSamplerCreateInfo.maxAnisotropy = samplerStateCreateInfo->maxAnisotropy;
vkSamplerCreateInfo.compareEnable = samplerStateCreateInfo->compareEnable;
vkSamplerCreateInfo.compareOp = RefreshToVK_CompareOp[
samplerStateCreateInfo->compareOp
];
vkSamplerCreateInfo.minLod = samplerStateCreateInfo->minLod;
vkSamplerCreateInfo.maxLod = samplerStateCreateInfo->maxLod;
2020-12-18 20:58:03 +00:00
vkSamplerCreateInfo.borderColor = RefreshToVK_BorderColor[
samplerStateCreateInfo->borderColor
];
2020-12-18 01:48:26 +00:00
vkSamplerCreateInfo.unnormalizedCoordinates = VK_FALSE;
vulkanResult = renderer->vkCreateSampler(
renderer->logicalDevice,
&vkSamplerCreateInfo,
NULL,
&vulkanSampler->sampler
2020-12-18 01:48:26 +00:00
);
if (vulkanResult != VK_SUCCESS)
{
SDL_free(vulkanSampler);
2021-01-27 20:51:36 +00:00
LogVulkanResultAsError("vkCreateSampler", vulkanResult);
2020-12-18 01:48:26 +00:00
return NULL;
}
SDL_AtomicSet(&vulkanSampler->referenceCount, 0);
return (Refresh_Sampler*) vulkanSampler;
2020-12-17 01:23:49 +00:00
}
2021-01-05 23:00:51 +00:00
static Refresh_ShaderModule* VULKAN_CreateShaderModule(
Refresh_Renderer *driverData,
Refresh_ShaderModuleCreateInfo *shaderModuleCreateInfo
2020-12-17 01:23:49 +00:00
) {
VulkanShaderModule *vulkanShaderModule = SDL_malloc(sizeof(VulkanShaderModule));
2020-12-19 01:25:22 +00:00
VkResult vulkanResult;
VkShaderModuleCreateInfo vkShaderModuleCreateInfo;
VulkanRenderer *renderer = (VulkanRenderer*) driverData;
vkShaderModuleCreateInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
vkShaderModuleCreateInfo.pNext = NULL;
vkShaderModuleCreateInfo.flags = 0;
vkShaderModuleCreateInfo.codeSize = shaderModuleCreateInfo->codeSize;
vkShaderModuleCreateInfo.pCode = (uint32_t*) shaderModuleCreateInfo->byteCode;
vulkanResult = renderer->vkCreateShaderModule(
renderer->logicalDevice,
&vkShaderModuleCreateInfo,
2020-12-18 22:35:33 +00:00
NULL,
&vulkanShaderModule->shaderModule
2020-12-18 22:35:33 +00:00
);
if (vulkanResult != VK_SUCCESS)
{
SDL_free(vulkanShaderModule);
LogVulkanResultAsError("vkCreateShaderModule", vulkanResult);
Refresh_LogError("Failed to create shader module!");
return NULL;
2020-12-18 22:35:33 +00:00
}
SDL_AtomicSet(&vulkanShaderModule->referenceCount, 0);
return (Refresh_ShaderModule*) vulkanShaderModule;
2020-12-18 22:35:33 +00:00
}
static Refresh_Texture* VULKAN_CreateTexture(
2021-01-05 23:00:51 +00:00
Refresh_Renderer *driverData,
Refresh_TextureCreateInfo *textureCreateInfo
2020-12-17 01:23:49 +00:00
) {
2020-12-18 22:35:33 +00:00
VulkanRenderer *renderer = (VulkanRenderer*) driverData;
VkImageUsageFlags imageUsageFlags = (
2020-12-18 22:35:33 +00:00
VK_IMAGE_USAGE_TRANSFER_DST_BIT |
VK_IMAGE_USAGE_TRANSFER_SRC_BIT
);
2021-01-29 05:37:11 +00:00
VkImageAspectFlags imageAspectFlags;
uint8_t isDepthFormat = IsRefreshDepthFormat(textureCreateInfo->format);
2022-06-17 07:41:27 +00:00
VkFormat format;
VulkanTextureContainer *container;
VulkanTexture *vulkanTexture;
2022-06-17 07:41:27 +00:00
if (isDepthFormat)
2022-06-17 07:41:27 +00:00
{
format = RefreshToVK_DepthFormat(renderer, textureCreateInfo->format);
}
else
{
format = RefreshToVK_SurfaceFormat[textureCreateInfo->format];
}
2020-12-18 22:35:33 +00:00
if (textureCreateInfo->usageFlags & REFRESH_TEXTUREUSAGE_SAMPLER_BIT)
{
imageUsageFlags |= VK_IMAGE_USAGE_SAMPLED_BIT;
}
if (textureCreateInfo->usageFlags & REFRESH_TEXTUREUSAGE_COLOR_TARGET_BIT)
2020-12-18 22:35:33 +00:00
{
imageUsageFlags |= VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
2020-12-18 22:35:33 +00:00
}
if (textureCreateInfo->usageFlags & REFRESH_TEXTUREUSAGE_DEPTH_STENCIL_TARGET_BIT)
{
imageUsageFlags |= VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
}
if (textureCreateInfo->usageFlags & REFRESH_TEXTUREUSAGE_COMPUTE_BIT)
{
imageUsageFlags |= VK_IMAGE_USAGE_STORAGE_BIT;
}
if (isDepthFormat)
2021-01-29 05:37:11 +00:00
{
imageAspectFlags = VK_IMAGE_ASPECT_DEPTH_BIT;
if (IsStencilFormat(format))
{
imageAspectFlags |= VK_IMAGE_ASPECT_STENCIL_BIT;
}
}
else
{
imageAspectFlags = VK_IMAGE_ASPECT_COLOR_BIT;
}
vulkanTexture = VULKAN_INTERNAL_CreateTexture(
2020-12-18 22:35:33 +00:00
renderer,
textureCreateInfo->width,
textureCreateInfo->height,
textureCreateInfo->depth,
textureCreateInfo->isCube,
textureCreateInfo->levelCount,
isDepthFormat ?
textureCreateInfo->sampleCount : /* depth textures do not have a separate msaaTex */
REFRESH_SAMPLECOUNT_1,
2021-01-29 05:37:11 +00:00
format,
imageAspectFlags,
imageUsageFlags
2020-12-18 22:35:33 +00:00
);
/* create the MSAA texture for color attachments, if needed */
if ( vulkanTexture != NULL &&
!isDepthFormat &&
textureCreateInfo->sampleCount > REFRESH_SAMPLECOUNT_1 )
{
vulkanTexture->msaaTex = VULKAN_INTERNAL_CreateTexture(
renderer,
textureCreateInfo->width,
textureCreateInfo->height,
textureCreateInfo->depth,
textureCreateInfo->isCube,
textureCreateInfo->levelCount,
textureCreateInfo->sampleCount,
format,
imageAspectFlags,
imageUsageFlags
);
}
container = SDL_malloc(sizeof(VulkanTextureContainer));
container->vulkanTexture = vulkanTexture;
vulkanTexture->container = container;
return (Refresh_Texture*) container;
2020-12-18 22:35:33 +00:00
}
static Refresh_GpuBuffer* VULKAN_CreateGpuBuffer(
2021-01-05 23:00:51 +00:00
Refresh_Renderer *driverData,
Refresh_BufferUsageFlags usageFlags,
2020-12-17 01:23:49 +00:00
uint32_t sizeInBytes
) {
VulkanResourceAccessType resourceAccessType;
2021-01-03 03:03:25 +00:00
VkBufferUsageFlags vulkanUsageFlags =
VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
2020-12-31 04:39:47 +00:00
if (usageFlags == 0)
{
resourceAccessType = RESOURCE_ACCESS_TRANSFER_READ_WRITE;
}
2020-12-31 04:39:47 +00:00
if (usageFlags & REFRESH_BUFFERUSAGE_VERTEX_BIT)
{
vulkanUsageFlags |= VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
resourceAccessType = RESOURCE_ACCESS_VERTEX_BUFFER;
2020-12-19 01:32:27 +00:00
}
2020-12-31 04:39:47 +00:00
if (usageFlags & REFRESH_BUFFERUSAGE_INDEX_BIT)
{
vulkanUsageFlags |= VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
resourceAccessType = RESOURCE_ACCESS_INDEX_BUFFER;
2020-12-31 04:39:47 +00:00
}
2020-12-17 01:23:49 +00:00
2020-12-31 06:28:37 +00:00
if (usageFlags & REFRESH_BUFFERUSAGE_COMPUTE_BIT)
2020-12-31 04:39:47 +00:00
{
vulkanUsageFlags |= VK_BUFFER_USAGE_STORAGE_BUFFER_BIT;
resourceAccessType = RESOURCE_ACCESS_COMPUTE_SHADER_BUFFER_READ_WRITE;
}
if (usageFlags & REFRESH_BUFFERUSAGE_INDIRECT_BIT)
{
vulkanUsageFlags |= VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT;
resourceAccessType = RESOURCE_ACCESS_INDIRECT_BUFFER;
2020-12-31 04:39:47 +00:00
}
2020-12-19 01:32:27 +00:00
return (Refresh_GpuBuffer*) VULKAN_INTERNAL_CreateBufferContainer(
(VulkanRenderer*) driverData,
2020-12-19 01:32:27 +00:00
sizeInBytes,
resourceAccessType,
2024-02-11 23:56:53 +00:00
vulkanUsageFlags
2022-01-02 22:35:57 +00:00
);
2020-12-17 01:23:49 +00:00
}
static Refresh_CpuBuffer* VULKAN_CreateCpuBuffer(
Refresh_Renderer *driverData,
uint32_t sizeInBytes
2020-12-19 04:08:07 +00:00
) {
VulkanRenderer *renderer = (VulkanRenderer*) driverData;
VkBufferUsageFlags vulkanUsageFlags =
VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
2022-01-02 22:35:57 +00:00
VulkanBuffer *vulkanBuffer = VULKAN_INTERNAL_CreateBuffer(
renderer,
sizeInBytes,
RESOURCE_ACCESS_NONE,
vulkanUsageFlags,
1,
2024-02-17 02:43:02 +00:00
1,
2024-02-11 23:56:53 +00:00
0,
1
);
2022-01-02 22:35:57 +00:00
return (Refresh_CpuBuffer*) vulkanBuffer;
}
/* Setters */
static void VULKAN_INTERNAL_SetUniformBufferData(
VulkanUniformBufferObject *uniformBufferObject,
void* data,
uint32_t dataLength
) {
uint8_t *dst =
uniformBufferObject->mapPointer +
uniformBufferObject->buffer->usedRegion->resourceOffset +
uniformBufferObject->currentOffset;
SDL_memcpy(
dst,
data,
dataLength
);
}
2024-02-17 02:26:30 +00:00
static void VULKAN_PushVertexShaderUniforms(
2021-01-05 23:00:51 +00:00
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer,
2020-12-17 01:23:49 +00:00
void *data,
uint32_t dataLengthInBytes
) {
VulkanRenderer* renderer = (VulkanRenderer*) driverData;
VulkanCommandBuffer* vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
VulkanGraphicsPipeline* graphicsPipeline = vulkanCommandBuffer->currentGraphicsPipeline;
2020-12-19 01:03:26 +00:00
SDL_LockMutex(renderer->vertexUniformBufferObject->lock);
2024-02-17 02:26:30 +00:00
if (renderer->vertexUniformBufferObject->currentOffset + graphicsPipeline->vertexUniformBlockSize + UBO_SECTION_SIZE >= UBO_BUFFER_SIZE)
{
renderer->vertexUniformBufferObject->currentOffset = 0;
}
2022-05-12 04:16:24 +00:00
2024-02-17 02:26:30 +00:00
vulkanCommandBuffer->vertexUniformOffset = renderer->vertexUniformBufferObject->currentOffset;
2020-12-19 01:03:26 +00:00
VULKAN_INTERNAL_SetUniformBufferData(
renderer->vertexUniformBufferObject,
data,
dataLengthInBytes
2021-01-02 06:07:15 +00:00
);
renderer->vertexUniformBufferObject->currentOffset += graphicsPipeline->vertexUniformBlockSize;
SDL_UnlockMutex(renderer->vertexUniformBufferObject->lock);
2020-12-17 01:23:49 +00:00
}
2024-02-17 02:26:30 +00:00
static void VULKAN_PushFragmentShaderUniforms(
2021-01-05 23:00:51 +00:00
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer,
void *data,
uint32_t dataLengthInBytes
2020-12-17 01:23:49 +00:00
) {
VulkanRenderer* renderer = (VulkanRenderer*) driverData;
VulkanCommandBuffer* vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
VulkanGraphicsPipeline* graphicsPipeline = vulkanCommandBuffer->currentGraphicsPipeline;
SDL_LockMutex(renderer->fragmentUniformBufferObject->lock);
2024-02-17 02:26:30 +00:00
if (renderer->fragmentUniformBufferObject->currentOffset + graphicsPipeline->fragmentUniformBlockSize + UBO_SECTION_SIZE >= UBO_BUFFER_SIZE)
{
renderer->fragmentUniformBufferObject->currentOffset = 0;
}
2024-02-17 02:26:30 +00:00
vulkanCommandBuffer->fragmentUniformOffset = renderer->fragmentUniformBufferObject->currentOffset;
2020-12-19 02:38:15 +00:00
VULKAN_INTERNAL_SetUniformBufferData(
renderer->fragmentUniformBufferObject,
data,
dataLengthInBytes
2020-12-19 02:38:15 +00:00
);
renderer->fragmentUniformBufferObject->currentOffset += graphicsPipeline->fragmentUniformBlockSize;
SDL_UnlockMutex(renderer->fragmentUniformBufferObject->lock);
}
2020-12-19 02:38:15 +00:00
2024-02-17 02:26:30 +00:00
static void VULKAN_PushComputeShaderUniforms(
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer,
void *data,
uint32_t dataLengthInBytes
) {
VulkanRenderer* renderer = (VulkanRenderer*) driverData;
VulkanCommandBuffer* vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
VulkanComputePipeline* computePipeline = vulkanCommandBuffer->currentComputePipeline;
SDL_LockMutex(renderer->computeUniformBufferObject->lock);
2024-02-17 02:26:30 +00:00
if (renderer->computeUniformBufferObject->currentOffset + computePipeline->uniformBlockSize + UBO_SECTION_SIZE >= UBO_BUFFER_SIZE)
{
renderer->computeUniformBufferObject->currentOffset = 0;
}
2024-02-17 02:26:30 +00:00
vulkanCommandBuffer->computeUniformOffset = renderer->computeUniformBufferObject->currentOffset;
VULKAN_INTERNAL_SetUniformBufferData(
renderer->computeUniformBufferObject,
data,
dataLengthInBytes
);
2020-12-19 02:38:15 +00:00
renderer->computeUniformBufferObject->currentOffset += computePipeline->uniformBlockSize;
SDL_UnlockMutex(renderer->computeUniformBufferObject->lock);
}
2020-12-19 02:38:15 +00:00
/* If fetching an image descriptor, descriptorImageInfos must not be NULL.
* If fetching a buffer descriptor, descriptorBufferInfos must not be NULL.
*/
static VkDescriptorSet VULKAN_INTERNAL_FetchDescriptorSet(
VulkanRenderer *renderer,
VulkanCommandBuffer *vulkanCommandBuffer,
DescriptorSetCache *descriptorSetCache,
VkDescriptorImageInfo *descriptorImageInfos, /* Can be NULL */
VkDescriptorBufferInfo *descriptorBufferInfos /* Can be NULL */
) {
uint32_t i;
VkDescriptorSet descriptorSet;
VkWriteDescriptorSet writeDescriptorSets[MAX_TEXTURE_SAMPLERS];
uint8_t isImage;
2020-12-19 02:38:15 +00:00
if (descriptorImageInfos == NULL && descriptorBufferInfos == NULL)
{
Refresh_LogError("descriptorImageInfos and descriptorBufferInfos cannot both be NULL!");
return VK_NULL_HANDLE;
}
else if (descriptorImageInfos != NULL && descriptorBufferInfos != NULL)
{
Refresh_LogError("descriptorImageInfos and descriptorBufferInfos cannot both be set!");
return VK_NULL_HANDLE;
}
2020-12-19 02:38:15 +00:00
isImage = descriptorImageInfos != NULL;
2020-12-19 02:38:15 +00:00
SDL_LockMutex(descriptorSetCache->lock);
2020-12-19 02:38:15 +00:00
/* If no inactive descriptor sets remain, create a new pool and allocate new inactive sets */
2020-12-19 02:38:15 +00:00
if (descriptorSetCache->inactiveDescriptorSetCount == 0)
{
descriptorSetCache->descriptorPoolCount += 1;
descriptorSetCache->descriptorPools = SDL_realloc(
descriptorSetCache->descriptorPools,
sizeof(VkDescriptorPool) * descriptorSetCache->descriptorPoolCount
);
if (!VULKAN_INTERNAL_CreateDescriptorPool(
renderer,
descriptorSetCache->descriptorType,
descriptorSetCache->nextPoolSize,
descriptorSetCache->nextPoolSize * descriptorSetCache->bindingCount,
&descriptorSetCache->descriptorPools[descriptorSetCache->descriptorPoolCount - 1]
)) {
SDL_UnlockMutex(descriptorSetCache->lock);
Refresh_LogError("Failed to create descriptor pool!");
return VK_NULL_HANDLE;
}
2020-12-19 02:38:15 +00:00
descriptorSetCache->inactiveDescriptorSetCapacity += descriptorSetCache->nextPoolSize;
2020-12-19 02:38:15 +00:00
descriptorSetCache->inactiveDescriptorSets = SDL_realloc(
descriptorSetCache->inactiveDescriptorSets,
sizeof(VkDescriptorSet) * descriptorSetCache->inactiveDescriptorSetCapacity
);
2020-12-19 02:38:15 +00:00
if (!VULKAN_INTERNAL_AllocateDescriptorSets(
renderer,
descriptorSetCache->descriptorPools[descriptorSetCache->descriptorPoolCount - 1],
descriptorSetCache->descriptorSetLayout,
descriptorSetCache->nextPoolSize,
descriptorSetCache->inactiveDescriptorSets
)) {
SDL_UnlockMutex(descriptorSetCache->lock);
Refresh_LogError("Failed to allocate descriptor sets!");
return VK_NULL_HANDLE;
}
2020-12-19 02:38:15 +00:00
descriptorSetCache->inactiveDescriptorSetCount = descriptorSetCache->nextPoolSize;
2020-12-19 02:38:15 +00:00
descriptorSetCache->nextPoolSize *= 2;
}
descriptorSet = descriptorSetCache->inactiveDescriptorSets[descriptorSetCache->inactiveDescriptorSetCount - 1];
descriptorSetCache->inactiveDescriptorSetCount -= 1;
for (i = 0; i < descriptorSetCache->bindingCount; i += 1)
{
writeDescriptorSets[i].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
writeDescriptorSets[i].pNext = NULL;
writeDescriptorSets[i].descriptorCount = 1;
writeDescriptorSets[i].descriptorType = descriptorSetCache->descriptorType;
writeDescriptorSets[i].dstArrayElement = 0;
writeDescriptorSets[i].dstBinding = i;
writeDescriptorSets[i].dstSet = descriptorSet;
writeDescriptorSets[i].pTexelBufferView = NULL;
if (isImage)
{
writeDescriptorSets[i].pImageInfo = &descriptorImageInfos[i];
writeDescriptorSets[i].pBufferInfo = NULL;
}
else
{
writeDescriptorSets[i].pBufferInfo = &descriptorBufferInfos[i];
writeDescriptorSets[i].pImageInfo = NULL;
}
}
2021-01-03 21:01:29 +00:00
renderer->vkUpdateDescriptorSets(
renderer->logicalDevice,
descriptorSetCache->bindingCount,
writeDescriptorSets,
0,
NULL
2021-01-03 21:01:29 +00:00
);
SDL_UnlockMutex(descriptorSetCache->lock);
2021-01-03 21:01:29 +00:00
if (vulkanCommandBuffer->boundDescriptorSetDataCount == vulkanCommandBuffer->boundDescriptorSetDataCapacity)
{
vulkanCommandBuffer->boundDescriptorSetDataCapacity *= 2;
vulkanCommandBuffer->boundDescriptorSetDatas = SDL_realloc(
vulkanCommandBuffer->boundDescriptorSetDatas,
vulkanCommandBuffer->boundDescriptorSetDataCapacity * sizeof(DescriptorSetData)
);
}
2021-01-03 21:01:29 +00:00
vulkanCommandBuffer->boundDescriptorSetDatas[vulkanCommandBuffer->boundDescriptorSetDataCount].descriptorSet = descriptorSet;
vulkanCommandBuffer->boundDescriptorSetDatas[vulkanCommandBuffer->boundDescriptorSetDataCount].descriptorSetCache = descriptorSetCache;
vulkanCommandBuffer->boundDescriptorSetDataCount += 1;
return descriptorSet;
2021-01-03 21:01:29 +00:00
}
static void VULKAN_BindVertexSamplers(
2021-01-05 23:00:51 +00:00
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer,
Refresh_Texture **pTextures,
Refresh_Sampler **pSamplers
2021-01-03 21:01:29 +00:00
) {
VulkanRenderer* renderer = (VulkanRenderer*) driverData;
2021-01-03 21:01:29 +00:00
VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
VulkanGraphicsPipeline *graphicsPipeline = vulkanCommandBuffer->currentGraphicsPipeline;
VulkanTexture *currentTexture;
VulkanSampler *currentSampler;
uint32_t i, samplerCount;
VkDescriptorImageInfo descriptorImageInfos[MAX_TEXTURE_SAMPLERS];
2021-01-03 21:01:29 +00:00
if (graphicsPipeline->pipelineLayout->vertexSamplerDescriptorSetCache == NULL)
{
return;
}
samplerCount = graphicsPipeline->pipelineLayout->vertexSamplerDescriptorSetCache->bindingCount;
for (i = 0; i < samplerCount; i += 1)
{
currentTexture = ((VulkanTextureContainer*) pTextures[i])->vulkanTexture;
currentSampler = (VulkanSampler*) pSamplers[i];
descriptorImageInfos[i].imageView = currentTexture->view;
descriptorImageInfos[i].sampler = currentSampler->sampler;
descriptorImageInfos[i].imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
VULKAN_INTERNAL_TrackTexture(renderer, vulkanCommandBuffer, currentTexture);
VULKAN_INTERNAL_TrackSampler(renderer, vulkanCommandBuffer, currentSampler);
}
vulkanCommandBuffer->vertexSamplerDescriptorSet = VULKAN_INTERNAL_FetchDescriptorSet(
2021-01-03 21:01:29 +00:00
renderer,
vulkanCommandBuffer,
graphicsPipeline->pipelineLayout->vertexSamplerDescriptorSetCache,
descriptorImageInfos,
NULL
);
}
static void VULKAN_BindFragmentSamplers(
2021-01-05 23:00:51 +00:00
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer,
Refresh_Texture **pTextures,
Refresh_Sampler **pSamplers
2020-12-21 20:37:54 +00:00
) {
VulkanRenderer* renderer = (VulkanRenderer*) driverData;
VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
VulkanGraphicsPipeline *graphicsPipeline = vulkanCommandBuffer->currentGraphicsPipeline;
2020-12-21 20:37:54 +00:00
VulkanTexture *currentTexture;
VulkanSampler *currentSampler;
uint32_t i, samplerCount;
VkDescriptorImageInfo descriptorImageInfos[MAX_TEXTURE_SAMPLERS];
if (graphicsPipeline->pipelineLayout->fragmentSamplerDescriptorSetCache == NULL)
{
return;
}
samplerCount = graphicsPipeline->pipelineLayout->fragmentSamplerDescriptorSetCache->bindingCount;
2020-12-21 20:37:54 +00:00
for (i = 0; i < samplerCount; i += 1)
{
currentTexture = ((VulkanTextureContainer*) pTextures[i])->vulkanTexture;
currentSampler = (VulkanSampler*) pSamplers[i];
descriptorImageInfos[i].imageView = currentTexture->view;
descriptorImageInfos[i].sampler = currentSampler->sampler;
descriptorImageInfos[i].imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
VULKAN_INTERNAL_TrackTexture(renderer, vulkanCommandBuffer, currentTexture);
VULKAN_INTERNAL_TrackSampler(renderer, vulkanCommandBuffer, currentSampler);
}
vulkanCommandBuffer->fragmentSamplerDescriptorSet = VULKAN_INTERNAL_FetchDescriptorSet(
renderer,
vulkanCommandBuffer,
graphicsPipeline->pipelineLayout->fragmentSamplerDescriptorSetCache,
descriptorImageInfos,
NULL
);
}
static void VULKAN_INTERNAL_QueueDestroyTexture(
VulkanRenderer *renderer,
VulkanTexture *vulkanTexture
) {
SDL_LockMutex(renderer->disposeLock);
EXPAND_ARRAY_IF_NEEDED(
renderer->texturesToDestroy,
VulkanTexture*,
renderer->texturesToDestroyCount + 1,
renderer->texturesToDestroyCapacity,
renderer->texturesToDestroyCapacity * 2
)
renderer->texturesToDestroy[
renderer->texturesToDestroyCount
] = vulkanTexture;
renderer->texturesToDestroyCount += 1;
SDL_UnlockMutex(renderer->disposeLock);
2020-12-21 20:37:54 +00:00
}
static void VULKAN_QueueDestroyTexture(
2021-01-05 23:00:51 +00:00
Refresh_Renderer *driverData,
Refresh_Texture *texture
) {
VulkanRenderer *renderer = (VulkanRenderer*) driverData;
VulkanTextureContainer *vulkanTextureContainer = (VulkanTextureContainer *)texture;
VulkanTexture *vulkanTexture = vulkanTextureContainer->vulkanTexture;
2021-01-02 06:07:15 +00:00
SDL_LockMutex(renderer->disposeLock);
2020-12-21 21:02:07 +00:00
VULKAN_INTERNAL_QueueDestroyTexture(renderer, vulkanTexture);
2020-12-21 21:02:07 +00:00
/* Containers are just client handles, so we can destroy immediately */
SDL_free(vulkanTextureContainer);
2020-12-21 21:02:07 +00:00
SDL_UnlockMutex(renderer->disposeLock);
}
static void VULKAN_QueueDestroySampler(
Refresh_Renderer *driverData,
Refresh_Sampler *sampler
) {
VulkanRenderer *renderer = (VulkanRenderer*) driverData;
VulkanSampler* vulkanSampler = (VulkanSampler*) sampler;
SDL_LockMutex(renderer->disposeLock);
2021-01-02 06:07:15 +00:00
EXPAND_ARRAY_IF_NEEDED(
renderer->samplersToDestroy,
VulkanSampler*,
renderer->samplersToDestroyCount + 1,
renderer->samplersToDestroyCapacity,
renderer->samplersToDestroyCapacity * 2
)
renderer->samplersToDestroy[renderer->samplersToDestroyCount] = vulkanSampler;
renderer->samplersToDestroyCount += 1;
SDL_UnlockMutex(renderer->disposeLock);
}
static void VULKAN_INTERNAL_QueueDestroyBuffer(
VulkanRenderer *renderer,
VulkanBuffer *vulkanBuffer
2020-12-17 01:23:49 +00:00
) {
SDL_LockMutex(renderer->disposeLock);
2021-01-02 06:07:15 +00:00
EXPAND_ARRAY_IF_NEEDED(
renderer->buffersToDestroy,
VulkanBuffer*,
renderer->buffersToDestroyCount + 1,
renderer->buffersToDestroyCapacity,
renderer->buffersToDestroyCapacity * 2
)
2020-12-21 21:02:07 +00:00
renderer->buffersToDestroy[
renderer->buffersToDestroyCount
] = vulkanBuffer;
renderer->buffersToDestroyCount += 1;
SDL_UnlockMutex(renderer->disposeLock);
}
static void VULKAN_QueueDestroyGpuBuffer(
Refresh_Renderer *driverData,
Refresh_GpuBuffer *buffer
) {
VulkanRenderer *renderer = (VulkanRenderer*) driverData;
VulkanBufferContainer *vulkanBufferContainer = (VulkanBufferContainer*) buffer;
VulkanBuffer *vulkanBuffer = vulkanBufferContainer->vulkanBuffer;
2021-01-02 06:07:15 +00:00
SDL_LockMutex(renderer->disposeLock);
VULKAN_INTERNAL_QueueDestroyBuffer(renderer, vulkanBuffer);
/* Containers are just client handles, so we can destroy immediately */
SDL_free(vulkanBufferContainer);
SDL_UnlockMutex(renderer->disposeLock);
2020-12-17 01:23:49 +00:00
}
static void VULKAN_QueueDestroyCpuBuffer(
2021-01-05 23:00:51 +00:00
Refresh_Renderer *driverData,
Refresh_CpuBuffer *buffer
2020-12-31 04:39:47 +00:00
) {
VulkanRenderer *renderer = (VulkanRenderer*) driverData;
VulkanBuffer *vulkanBuffer = (VulkanBuffer*) buffer;
2021-01-02 06:07:15 +00:00
SDL_LockMutex(renderer->disposeLock);
2020-12-31 04:39:47 +00:00
VULKAN_INTERNAL_QueueDestroyBuffer(renderer, vulkanBuffer);
SDL_UnlockMutex(renderer->disposeLock);
}
2020-12-31 04:39:47 +00:00
static void VULKAN_QueueDestroyShaderModule(
Refresh_Renderer *driverData,
Refresh_ShaderModule *shaderModule
) {
VulkanRenderer *renderer = (VulkanRenderer*) driverData;
VulkanShaderModule *vulkanShaderModule = (VulkanShaderModule*) shaderModule;
2021-01-02 06:07:15 +00:00
SDL_LockMutex(renderer->disposeLock);
EXPAND_ARRAY_IF_NEEDED(
renderer->shaderModulesToDestroy,
VulkanShaderModule*,
renderer->shaderModulesToDestroyCount + 1,
renderer->shaderModulesToDestroyCapacity,
renderer->shaderModulesToDestroyCapacity * 2
)
renderer->shaderModulesToDestroy[renderer->shaderModulesToDestroyCount] = vulkanShaderModule;
renderer->shaderModulesToDestroyCount += 1;
SDL_UnlockMutex(renderer->disposeLock);
2020-12-31 04:39:47 +00:00
}
static void VULKAN_QueueDestroyComputePipeline(
Refresh_Renderer *driverData,
Refresh_ComputePipeline *computePipeline
2020-12-31 00:47:13 +00:00
) {
VulkanRenderer *renderer = (VulkanRenderer*) driverData;
VulkanComputePipeline *vulkanComputePipeline = (VulkanComputePipeline*) computePipeline;
2020-12-31 00:47:13 +00:00
SDL_LockMutex(renderer->disposeLock);
2020-12-31 00:47:13 +00:00
EXPAND_ARRAY_IF_NEEDED(
renderer->computePipelinesToDestroy,
VulkanComputePipeline*,
renderer->computePipelinesToDestroyCount + 1,
renderer->computePipelinesToDestroyCapacity,
renderer->computePipelinesToDestroyCapacity * 2
)
renderer->computePipelinesToDestroy[renderer->computePipelinesToDestroyCount] = vulkanComputePipeline;
renderer->computePipelinesToDestroyCount += 1;
SDL_UnlockMutex(renderer->disposeLock);
}
2020-12-31 00:47:13 +00:00
static void VULKAN_QueueDestroyGraphicsPipeline(
Refresh_Renderer *driverData,
Refresh_GraphicsPipeline *graphicsPipeline
) {
VulkanRenderer *renderer = (VulkanRenderer*) driverData;
VulkanGraphicsPipeline *vulkanGraphicsPipeline = (VulkanGraphicsPipeline*) graphicsPipeline;
2020-12-31 00:47:13 +00:00
SDL_LockMutex(renderer->disposeLock);
2020-12-31 00:47:13 +00:00
EXPAND_ARRAY_IF_NEEDED(
renderer->graphicsPipelinesToDestroy,
VulkanGraphicsPipeline*,
renderer->graphicsPipelinesToDestroyCount + 1,
renderer->graphicsPipelinesToDestroyCapacity,
renderer->graphicsPipelinesToDestroyCapacity * 2
)
2020-12-31 00:47:13 +00:00
renderer->graphicsPipelinesToDestroy[renderer->graphicsPipelinesToDestroyCount] = vulkanGraphicsPipeline;
renderer->graphicsPipelinesToDestroyCount += 1;
2020-12-31 00:47:13 +00:00
SDL_UnlockMutex(renderer->disposeLock);
}
2020-12-31 00:47:13 +00:00
/* Command Buffer render state */
2020-12-31 00:47:13 +00:00
static VkRenderPass VULKAN_INTERNAL_FetchRenderPass(
VulkanRenderer *renderer,
VulkanCommandBuffer *commandBuffer,
Refresh_ColorAttachmentInfo *colorAttachmentInfos,
uint32_t colorAttachmentCount,
Refresh_DepthStencilAttachmentInfo *depthStencilAttachmentInfo
) {
VkRenderPass renderPass;
RenderPassHash hash;
uint32_t i;
VulkanTexture *texture;
2020-12-31 00:47:13 +00:00
SDL_LockMutex(renderer->renderPassFetchLock);
2020-12-31 00:47:13 +00:00
for (i = 0; i < colorAttachmentCount; i += 1)
2020-12-31 00:47:13 +00:00
{
hash.colorTargetDescriptions[i].format = ((VulkanTextureContainer*) colorAttachmentInfos[i].texture)->vulkanTexture->format;
hash.colorTargetDescriptions[i].clearColor = colorAttachmentInfos[i].clearColor;
hash.colorTargetDescriptions[i].loadOp = colorAttachmentInfos[i].loadOp;
hash.colorTargetDescriptions[i].storeOp = colorAttachmentInfos[i].storeOp;
}
2020-12-19 05:35:21 +00:00
hash.colorAttachmentSampleCount = REFRESH_SAMPLECOUNT_1;
if (colorAttachmentCount > 0)
{
texture = ((VulkanTextureContainer*) colorAttachmentInfos[0].texture)->vulkanTexture;
if (texture->msaaTex != NULL)
{
hash.colorAttachmentSampleCount = texture->msaaTex->sampleCount;
2022-01-13 07:09:06 +00:00
}
2020-12-19 05:35:21 +00:00
}
hash.colorAttachmentCount = colorAttachmentCount;
if (depthStencilAttachmentInfo == NULL)
{
hash.depthStencilTargetDescription.format = 0;
hash.depthStencilTargetDescription.loadOp = REFRESH_LOADOP_DONT_CARE;
hash.depthStencilTargetDescription.storeOp = REFRESH_STOREOP_DONT_CARE;
hash.depthStencilTargetDescription.stencilLoadOp = REFRESH_LOADOP_DONT_CARE;
hash.depthStencilTargetDescription.stencilStoreOp = REFRESH_STOREOP_DONT_CARE;
}
else
{
hash.depthStencilTargetDescription.format = ((VulkanTextureContainer*) depthStencilAttachmentInfo->texture)->vulkanTexture->format;
hash.depthStencilTargetDescription.loadOp = depthStencilAttachmentInfo->loadOp;
hash.depthStencilTargetDescription.storeOp = depthStencilAttachmentInfo->storeOp;
hash.depthStencilTargetDescription.stencilLoadOp = depthStencilAttachmentInfo->stencilLoadOp;
hash.depthStencilTargetDescription.stencilStoreOp = depthStencilAttachmentInfo->stencilStoreOp;
}
renderPass = RenderPassHashArray_Fetch(
&renderer->renderPassHashArray,
&hash
2020-12-19 05:35:21 +00:00
);
if (renderPass != VK_NULL_HANDLE)
{
SDL_UnlockMutex(renderer->renderPassFetchLock);
return renderPass;
}
2020-12-20 09:29:15 +00:00
renderPass = VULKAN_INTERNAL_CreateRenderPass(
renderer,
commandBuffer,
colorAttachmentInfos,
colorAttachmentCount,
depthStencilAttachmentInfo
);
if (renderPass != VK_NULL_HANDLE)
2020-12-27 23:20:59 +00:00
{
RenderPassHashArray_Insert(
&renderer->renderPassHashArray,
hash,
renderPass
2020-12-27 23:20:59 +00:00
);
}
SDL_UnlockMutex(renderer->renderPassFetchLock);
return renderPass;
2020-12-17 01:23:49 +00:00
}
static VulkanFramebuffer* VULKAN_INTERNAL_FetchFramebuffer(
VulkanRenderer *renderer,
VkRenderPass renderPass,
Refresh_ColorAttachmentInfo *colorAttachmentInfos,
uint32_t colorAttachmentCount,
Refresh_DepthStencilAttachmentInfo *depthStencilAttachmentInfo,
uint32_t width,
uint32_t height
2020-12-17 01:23:49 +00:00
) {
VulkanFramebuffer *vulkanFramebuffer;
VkFramebufferCreateInfo framebufferInfo;
VkResult result;
VkImageView imageViewAttachments[2 * MAX_COLOR_TARGET_BINDINGS + 1];
FramebufferHash hash;
VulkanTexture *texture;
VulkanRenderTarget *renderTarget;
uint32_t attachmentCount = 0;
uint32_t i;
2020-12-19 05:35:21 +00:00
for (i = 0; i < MAX_COLOR_TARGET_BINDINGS; i += 1)
2020-12-27 23:20:59 +00:00
{
hash.colorAttachmentViews[i] = VK_NULL_HANDLE;
hash.colorMultiSampleAttachmentViews[i] = VK_NULL_HANDLE;
2020-12-27 23:20:59 +00:00
}
2020-12-19 05:35:21 +00:00
hash.colorAttachmentCount = colorAttachmentCount;
2020-12-27 23:38:58 +00:00
for (i = 0; i < colorAttachmentCount; i += 1)
2020-12-19 05:35:21 +00:00
{
texture = ((VulkanTextureContainer*) colorAttachmentInfos[i].texture)->vulkanTexture;
renderTarget = VULKAN_INTERNAL_FetchRenderTarget(
renderer,
texture,
colorAttachmentInfos[i].depth,
colorAttachmentInfos[i].layer,
colorAttachmentInfos[i].level
);
2020-12-19 05:35:21 +00:00
hash.colorAttachmentViews[i] = (
renderTarget->view
);
2020-12-19 05:35:21 +00:00
if (texture->msaaTex != NULL)
{
renderTarget = VULKAN_INTERNAL_FetchRenderTarget(
renderer,
texture->msaaTex,
colorAttachmentInfos[i].depth,
colorAttachmentInfos[i].layer,
colorAttachmentInfos[i].level
);
2021-01-02 21:31:17 +00:00
hash.colorMultiSampleAttachmentViews[i] = (
renderTarget->view
);
}
}
2020-12-27 23:20:59 +00:00
if (depthStencilAttachmentInfo == NULL)
2020-12-27 23:20:59 +00:00
{
hash.depthStencilAttachmentView = VK_NULL_HANDLE;
2020-12-27 23:20:59 +00:00
}
else
2020-12-27 23:20:59 +00:00
{
texture = ((VulkanTextureContainer*) depthStencilAttachmentInfo->texture)->vulkanTexture;
renderTarget = VULKAN_INTERNAL_FetchRenderTarget(
renderer,
texture,
depthStencilAttachmentInfo->depth,
depthStencilAttachmentInfo->layer,
depthStencilAttachmentInfo->level
);
hash.depthStencilAttachmentView = renderTarget->view;
2020-12-27 23:20:59 +00:00
}
hash.width = width;
hash.height = height;
2021-01-03 01:00:52 +00:00
SDL_LockMutex(renderer->framebufferFetchLock);
2021-01-03 01:00:52 +00:00
vulkanFramebuffer = FramebufferHashArray_Fetch(
&renderer->framebufferHashArray,
&hash
2021-01-03 01:00:52 +00:00
);
SDL_UnlockMutex(renderer->framebufferFetchLock);
2021-01-02 21:31:17 +00:00
if (vulkanFramebuffer != NULL)
{
return vulkanFramebuffer;
}
2020-12-29 07:41:59 +00:00
vulkanFramebuffer = SDL_malloc(sizeof(VulkanFramebuffer));
2020-12-29 07:41:59 +00:00
SDL_AtomicSet(&vulkanFramebuffer->referenceCount, 0);
2020-12-29 07:41:59 +00:00
/* Create a new framebuffer */
2020-12-29 07:41:59 +00:00
for (i = 0; i < colorAttachmentCount; i += 1)
{
texture = ((VulkanTextureContainer*) colorAttachmentInfos[i].texture)->vulkanTexture;
renderTarget = VULKAN_INTERNAL_FetchRenderTarget(
renderer,
texture,
colorAttachmentInfos[i].depth,
colorAttachmentInfos[i].layer,
colorAttachmentInfos[i].level
);
2020-12-29 07:41:59 +00:00
imageViewAttachments[attachmentCount] =
renderTarget->view;
attachmentCount += 1;
2020-12-28 23:11:05 +00:00
if (texture->msaaTex != NULL)
{
renderTarget = VULKAN_INTERNAL_FetchRenderTarget(
renderer,
texture->msaaTex,
colorAttachmentInfos[i].depth,
colorAttachmentInfos[i].layer,
colorAttachmentInfos[i].level
);
imageViewAttachments[attachmentCount] =
renderTarget->view;
2020-12-17 01:23:49 +00:00
attachmentCount += 1;
}
}
if (depthStencilAttachmentInfo != NULL)
{
texture = ((VulkanTextureContainer*) depthStencilAttachmentInfo->texture)->vulkanTexture;
renderTarget = VULKAN_INTERNAL_FetchRenderTarget(
renderer,
texture,
depthStencilAttachmentInfo->depth,
depthStencilAttachmentInfo->layer,
depthStencilAttachmentInfo->level
);
imageViewAttachments[attachmentCount] = renderTarget->view;
attachmentCount += 1;
}
framebufferInfo.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO;
framebufferInfo.pNext = NULL;
framebufferInfo.flags = 0;
framebufferInfo.renderPass = renderPass;
framebufferInfo.attachmentCount = attachmentCount;
framebufferInfo.pAttachments = imageViewAttachments;
framebufferInfo.width = hash.width;
framebufferInfo.height = hash.height;
framebufferInfo.layers = 1;
result = renderer->vkCreateFramebuffer(
renderer->logicalDevice,
&framebufferInfo,
NULL,
&vulkanFramebuffer->framebuffer
);
if (result == VK_SUCCESS)
{
SDL_LockMutex(renderer->framebufferFetchLock);
2020-12-29 00:56:49 +00:00
FramebufferHashArray_Insert(
&renderer->framebufferHashArray,
hash,
vulkanFramebuffer
);
2020-12-29 00:56:49 +00:00
SDL_UnlockMutex(renderer->framebufferFetchLock);
}
else
{
LogVulkanResultAsError("vkCreateFramebuffer", result);
SDL_free(vulkanFramebuffer);
vulkanFramebuffer = NULL;
}
return vulkanFramebuffer;
2020-12-17 01:23:49 +00:00
}
static void VULKAN_INTERNAL_SetCurrentViewport(
VulkanCommandBuffer *commandBuffer,
Refresh_Viewport *viewport
2020-12-17 01:23:49 +00:00
) {
VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
vulkanCommandBuffer->currentViewport.x = viewport->x;
vulkanCommandBuffer->currentViewport.y = viewport->y;
vulkanCommandBuffer->currentViewport.width = viewport->w;
vulkanCommandBuffer->currentViewport.height = viewport->h;
vulkanCommandBuffer->currentViewport.minDepth = viewport->minDepth;
vulkanCommandBuffer->currentViewport.maxDepth = viewport->maxDepth;
2020-12-17 01:23:49 +00:00
}
static void VULKAN_SetViewport(
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer,
Refresh_Viewport *viewport
) {
VulkanRenderer* renderer = (VulkanRenderer*) driverData;
VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
VULKAN_INTERNAL_SetCurrentViewport(
vulkanCommandBuffer,
viewport
);
renderer->vkCmdSetViewport(
vulkanCommandBuffer->commandBuffer,
0,
1,
&vulkanCommandBuffer->currentViewport
);
}
static void VULKAN_INTERNAL_SetCurrentScissor(
VulkanCommandBuffer *vulkanCommandBuffer,
Refresh_Rect *scissor
) {
vulkanCommandBuffer->currentScissor.offset.x = scissor->x;
vulkanCommandBuffer->currentScissor.offset.y = scissor->y;
vulkanCommandBuffer->currentScissor.extent.width = scissor->w;
vulkanCommandBuffer->currentScissor.extent.height = scissor->h;
}
static void VULKAN_SetScissor(
2021-01-05 23:00:51 +00:00
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer,
Refresh_Rect *scissor
2020-12-17 01:23:49 +00:00
) {
VulkanRenderer* renderer = (VulkanRenderer*) driverData;
VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
2020-12-29 00:42:51 +00:00
VULKAN_INTERNAL_SetCurrentScissor(
vulkanCommandBuffer,
scissor
);
renderer->vkCmdSetScissor(
vulkanCommandBuffer->commandBuffer,
0,
1,
&vulkanCommandBuffer->currentScissor
);
2020-12-17 01:23:49 +00:00
}
static void VULKAN_BeginRenderPass(
2021-01-05 23:00:51 +00:00
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer,
Refresh_ColorAttachmentInfo *colorAttachmentInfos,
uint32_t colorAttachmentCount,
Refresh_DepthStencilAttachmentInfo *depthStencilAttachmentInfo
2020-12-29 22:52:24 +00:00
) {
VulkanRenderer* renderer = (VulkanRenderer*) driverData;
VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
VkRenderPass renderPass;
VulkanFramebuffer *framebuffer;
VulkanTexture *texture;
uint32_t w, h;
VkClearValue *clearValues;
uint32_t clearCount = colorAttachmentCount;
uint32_t multisampleAttachmentCount = 0;
uint32_t totalColorAttachmentCount = 0;
uint32_t i;
VkImageAspectFlags depthAspectFlags;
Refresh_Viewport defaultViewport;
Refresh_Rect defaultScissor;
uint32_t framebufferWidth = UINT32_MAX;
uint32_t framebufferHeight = UINT32_MAX;
/* The framebuffer cannot be larger than the smallest attachment. */
for (i = 0; i < colorAttachmentCount; i += 1)
{
texture = ((VulkanTextureContainer*) colorAttachmentInfos[i].texture)->vulkanTexture;
w = texture->dimensions.width >> colorAttachmentInfos[i].level;
h = texture->dimensions.height >> colorAttachmentInfos[i].level;
if (w < framebufferWidth)
{
framebufferWidth = w;
}
Miscellaneous API changes + more MSAA fixes (#26) **Breaking API Changes** * Removed `REFRESH_SAMPLECOUNT_16/32/64`, since hardware support for these sample counts is generally poor (and completely non-existent with MoltenVK and certain consoles). * Removed unused `sampleCount` parameter from `Refresh_TextureCreateInfo`. * Removed `sampleCount` parameter from `Refresh_ColorAttachmentDescription`. The existence of this parameter meant you had to sync up three different sample count values (render pass, pipeline, and color attachment description) whenever you wanted to use multisampling. However, Vulkan requires that all color attachments in a given pipeline _must_ match the pipeline's sample count anyway, so we can assume all color attachments will use the pipeline's sample count. * Removed the `renderArea` parameter from `Refresh_BeginRenderPass()` since it didn't serve much practical purpose and slightly complicated things on the MoonWorks managed side. **Behavior Changes** * When creating a render pass or graphics pipeline, the requested multisample count will be converted into a sample count that's actually supported by the GPU. For example, if you request 8x MSAA on a device that only supports up to 4x MSAA, it will silently fall back to 4x MSAA. * All color attachments are now forced to have an internal store op of `STORE`, even if `REFRESH_STORE_OP_DONTCARE` is specified. The one exception is internal multisample textures -- if `DONTCARE` is used, those textures will be discarded to save on bandwidth. (Their resolve textures will still be stored.) * The RenderPass hashing logic was updated so that it would still work correctly with the removal of `Refresh_ColorAttachmentDescription.sampleCount`. **Bug Fixes** * Fixed bugs where multisampling logic wasn't kicking in for certain sample counts due to incorrect enum comparisons. Co-authored-by: Caleb Cornett <caleb.cornett@outlook.com> Reviewed-on: https://gitea.moonside.games/MoonsideGames/Refresh/pulls/26 Co-authored-by: TheSpydog <thespydog@noreply.example.org> Co-committed-by: TheSpydog <thespydog@noreply.example.org>
2022-11-08 19:09:21 +00:00
if (h < framebufferHeight)
{
framebufferHeight = h;
}
}
if (depthStencilAttachmentInfo != NULL)
{
texture = ((VulkanTextureContainer*) depthStencilAttachmentInfo->texture)->vulkanTexture;
w = texture->dimensions.width >> depthStencilAttachmentInfo->level;
h = texture->dimensions.height >> depthStencilAttachmentInfo->level;
if (w < framebufferWidth)
{
framebufferWidth = w;
}
if (h < framebufferHeight)
{
framebufferHeight = h;
}
}
/* Fetch required render objects */
renderPass = VULKAN_INTERNAL_FetchRenderPass(
renderer,
vulkanCommandBuffer,
colorAttachmentInfos,
colorAttachmentCount,
depthStencilAttachmentInfo
);
framebuffer = VULKAN_INTERNAL_FetchFramebuffer(
renderer,
renderPass,
colorAttachmentInfos,
colorAttachmentCount,
depthStencilAttachmentInfo,
framebufferWidth,
framebufferHeight
);
VULKAN_INTERNAL_TrackFramebuffer(renderer, vulkanCommandBuffer, framebuffer);
/* Layout transitions */
for (i = 0; i < colorAttachmentCount; i += 1)
{
texture = ((VulkanTextureContainer*) colorAttachmentInfos[i].texture)->vulkanTexture;
VULKAN_INTERNAL_ImageMemoryBarrier(
renderer,
vulkanCommandBuffer->commandBuffer,
RESOURCE_ACCESS_COLOR_ATTACHMENT_READ_WRITE,
VK_IMAGE_ASPECT_COLOR_BIT,
0,
texture->layerCount,
0,
texture->levelCount,
0,
texture->image,
&texture->resourceAccessType
);
if (texture->msaaTex != NULL)
{
clearCount += 1;
multisampleAttachmentCount += 1;
}
VULKAN_INTERNAL_TrackTexture(renderer, vulkanCommandBuffer, texture);
}
if (depthStencilAttachmentInfo != NULL)
{
texture = ((VulkanTextureContainer*) depthStencilAttachmentInfo->texture)->vulkanTexture;
depthAspectFlags = VK_IMAGE_ASPECT_DEPTH_BIT;
if (IsStencilFormat(texture->format))
{
depthAspectFlags |= VK_IMAGE_ASPECT_STENCIL_BIT;
}
VULKAN_INTERNAL_ImageMemoryBarrier(
renderer,
vulkanCommandBuffer->commandBuffer,
RESOURCE_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_WRITE,
depthAspectFlags,
0,
texture->layerCount,
0,
texture->levelCount,
0,
texture->image,
&texture->resourceAccessType
);
clearCount += 1;
VULKAN_INTERNAL_TrackTexture(renderer, vulkanCommandBuffer, texture);
}
/* Set clear values */
clearValues = SDL_stack_alloc(VkClearValue, clearCount);
totalColorAttachmentCount = colorAttachmentCount + multisampleAttachmentCount;
for (i = 0; i < totalColorAttachmentCount; i += 1)
{
clearValues[i].color.float32[0] = colorAttachmentInfos[i].clearColor.x;
clearValues[i].color.float32[1] = colorAttachmentInfos[i].clearColor.y;
clearValues[i].color.float32[2] = colorAttachmentInfos[i].clearColor.z;
clearValues[i].color.float32[3] = colorAttachmentInfos[i].clearColor.w;
texture = ((VulkanTextureContainer*) colorAttachmentInfos[i].texture)->vulkanTexture;
if (texture->msaaTex != NULL)
{
clearValues[i+1].color.float32[0] = colorAttachmentInfos[i].clearColor.x;
clearValues[i+1].color.float32[1] = colorAttachmentInfos[i].clearColor.y;
clearValues[i+1].color.float32[2] = colorAttachmentInfos[i].clearColor.z;
clearValues[i+1].color.float32[3] = colorAttachmentInfos[i].clearColor.w;
i += 1;
}
}
if (depthStencilAttachmentInfo != NULL)
{
clearValues[totalColorAttachmentCount].depthStencil.depth =
depthStencilAttachmentInfo->depthStencilClearValue.depth;
clearValues[totalColorAttachmentCount].depthStencil.stencil =
depthStencilAttachmentInfo->depthStencilClearValue.stencil;
}
VkRenderPassBeginInfo renderPassBeginInfo;
renderPassBeginInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO;
renderPassBeginInfo.pNext = NULL;
renderPassBeginInfo.renderPass = renderPass;
renderPassBeginInfo.framebuffer = framebuffer->framebuffer;
renderPassBeginInfo.pClearValues = clearValues;
renderPassBeginInfo.clearValueCount = clearCount;
renderPassBeginInfo.renderArea.extent.width = framebufferWidth;
renderPassBeginInfo.renderArea.extent.height = framebufferHeight;
renderPassBeginInfo.renderArea.offset.x = 0;
renderPassBeginInfo.renderArea.offset.y = 0;
renderer->vkCmdBeginRenderPass(
vulkanCommandBuffer->commandBuffer,
&renderPassBeginInfo,
VK_SUBPASS_CONTENTS_INLINE
);
SDL_stack_free(clearValues);
for (i = 0; i < colorAttachmentCount; i += 1)
{
vulkanCommandBuffer->renderPassColorTargetTextures[i] =
((VulkanTextureContainer*) colorAttachmentInfos[i].texture)->vulkanTexture;
}
vulkanCommandBuffer->renderPassColorTargetCount = colorAttachmentCount;
if (depthStencilAttachmentInfo != NULL)
{
vulkanCommandBuffer->renderPassDepthTexture = ((VulkanTextureContainer*) depthStencilAttachmentInfo->texture)->vulkanTexture;
}
2022-03-04 20:30:33 +00:00
/* Set sensible default viewport state */
2022-03-04 20:30:33 +00:00
defaultViewport.x = 0;
defaultViewport.y = 0;
defaultViewport.w = framebufferWidth;
defaultViewport.h = framebufferHeight;
defaultViewport.minDepth = 0;
defaultViewport.maxDepth = 1;
2022-03-04 20:30:33 +00:00
VULKAN_INTERNAL_SetCurrentViewport(
vulkanCommandBuffer,
&defaultViewport
2022-03-04 20:30:33 +00:00
);
defaultScissor.x = 0;
defaultScissor.y = 0;
defaultScissor.w = framebufferWidth;
defaultScissor.h = framebufferHeight;
2022-03-04 20:30:33 +00:00
VULKAN_INTERNAL_SetCurrentScissor(
vulkanCommandBuffer,
&defaultScissor
2022-03-04 20:30:33 +00:00
);
}
static void VULKAN_EndRenderPass(
2021-01-05 23:00:51 +00:00
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer
2020-12-17 01:23:49 +00:00
) {
2021-01-02 21:31:17 +00:00
VulkanRenderer* renderer = (VulkanRenderer*) driverData;
2021-01-02 06:07:15 +00:00
VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
VulkanTexture *currentTexture;
2020-12-20 08:05:12 +00:00
uint32_t i;
2022-03-04 20:30:33 +00:00
renderer->vkCmdEndRenderPass(
vulkanCommandBuffer->commandBuffer
);
2022-03-04 20:30:33 +00:00
/* If the render targets can be sampled, transition them to sample layout */
for (i = 0; i < vulkanCommandBuffer->renderPassColorTargetCount; i += 1)
2022-03-04 20:30:33 +00:00
{
currentTexture = vulkanCommandBuffer->renderPassColorTargetTextures[i];
2022-03-04 20:30:33 +00:00
if (currentTexture->usageFlags & VK_IMAGE_USAGE_SAMPLED_BIT)
2022-03-04 20:30:33 +00:00
{
VULKAN_INTERNAL_ImageMemoryBarrier(
renderer,
vulkanCommandBuffer->commandBuffer,
RESOURCE_ACCESS_ANY_SHADER_READ_SAMPLED_IMAGE,
currentTexture->aspectFlags,
0,
currentTexture->layerCount,
0,
currentTexture->levelCount,
0,
currentTexture->image,
&currentTexture->resourceAccessType
);
}
else if (currentTexture->usageFlags & VK_IMAGE_USAGE_STORAGE_BIT)
{
VULKAN_INTERNAL_ImageMemoryBarrier(
renderer,
vulkanCommandBuffer->commandBuffer,
RESOURCE_ACCESS_COMPUTE_SHADER_STORAGE_IMAGE_READ_WRITE,
currentTexture->aspectFlags,
0,
currentTexture->layerCount,
0,
currentTexture->levelCount,
0,
currentTexture->image,
&currentTexture->resourceAccessType
);
2020-12-23 07:17:09 +00:00
}
2020-12-20 08:05:12 +00:00
}
vulkanCommandBuffer->renderPassColorTargetCount = 0;
2020-12-20 08:05:12 +00:00
if (vulkanCommandBuffer->renderPassDepthTexture != NULL)
2020-12-20 08:05:12 +00:00
{
currentTexture = vulkanCommandBuffer->renderPassDepthTexture;
if (currentTexture->usageFlags & VK_IMAGE_USAGE_SAMPLED_BIT)
{
VULKAN_INTERNAL_ImageMemoryBarrier(
renderer,
vulkanCommandBuffer->commandBuffer,
RESOURCE_ACCESS_ANY_SHADER_READ_SAMPLED_IMAGE,
currentTexture->aspectFlags,
0,
currentTexture->layerCount,
0,
currentTexture->levelCount,
0,
currentTexture->image,
&currentTexture->resourceAccessType
);
}
2020-12-20 08:05:12 +00:00
}
vulkanCommandBuffer->renderPassDepthTexture = NULL;
2020-12-20 08:05:12 +00:00
vulkanCommandBuffer->currentGraphicsPipeline = NULL;
2020-12-17 01:23:49 +00:00
}
static void VULKAN_BindGraphicsPipeline(
2021-01-05 23:00:51 +00:00
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer,
Refresh_GraphicsPipeline *graphicsPipeline
2020-12-17 01:23:49 +00:00
) {
2021-01-02 21:31:17 +00:00
VulkanRenderer* renderer = (VulkanRenderer*) driverData;
2021-01-02 06:07:15 +00:00
VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
VulkanGraphicsPipeline* pipeline = (VulkanGraphicsPipeline*) graphicsPipeline;
2020-12-23 07:17:09 +00:00
/* bind dummy sets if necessary */
if (pipeline->pipelineLayout->vertexSamplerDescriptorSetCache == NULL)
{
vulkanCommandBuffer->vertexSamplerDescriptorSet = renderer->emptyVertexSamplerDescriptorSet;
}
if (pipeline->pipelineLayout->fragmentSamplerDescriptorSetCache == NULL)
{
vulkanCommandBuffer->fragmentSamplerDescriptorSet = renderer->emptyFragmentSamplerDescriptorSet;
}
renderer->vkCmdBindPipeline(
vulkanCommandBuffer->commandBuffer,
VK_PIPELINE_BIND_POINT_GRAPHICS,
pipeline->pipeline
);
vulkanCommandBuffer->currentGraphicsPipeline = pipeline;
VULKAN_INTERNAL_TrackGraphicsPipeline(renderer, vulkanCommandBuffer, pipeline);
renderer->vkCmdSetViewport(
vulkanCommandBuffer->commandBuffer,
0,
1,
&vulkanCommandBuffer->currentViewport
);
renderer->vkCmdSetScissor(
vulkanCommandBuffer->commandBuffer,
0,
1,
&vulkanCommandBuffer->currentScissor
);
}
static void VULKAN_BindVertexBuffers(
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer,
uint32_t firstBinding,
uint32_t bindingCount,
Refresh_GpuBuffer **pBuffers,
uint64_t *pOffsets
) {
VulkanRenderer* renderer = (VulkanRenderer*) driverData;
VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
VulkanBuffer *currentVulkanBuffer;
VkBuffer *buffers = SDL_stack_alloc(VkBuffer, bindingCount);
uint32_t i;
for (i = 0; i < bindingCount; i += 1)
{
currentVulkanBuffer = ((VulkanBufferContainer*) pBuffers[i])->vulkanBuffer;
buffers[i] = currentVulkanBuffer->buffer;
VULKAN_INTERNAL_TrackBuffer(renderer, vulkanCommandBuffer, currentVulkanBuffer);
}
renderer->vkCmdBindVertexBuffers(
vulkanCommandBuffer->commandBuffer,
firstBinding,
bindingCount,
buffers,
pOffsets
);
SDL_stack_free(buffers);
}
static void VULKAN_BindIndexBuffer(
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer,
Refresh_GpuBuffer *buffer,
uint64_t offset,
Refresh_IndexElementSize indexElementSize
) {
VulkanRenderer* renderer = (VulkanRenderer*) driverData;
VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
VulkanBuffer* vulkanBuffer = ((VulkanBufferContainer*) buffer)->vulkanBuffer;
VULKAN_INTERNAL_TrackBuffer(renderer, vulkanCommandBuffer, vulkanBuffer);
renderer->vkCmdBindIndexBuffer(
vulkanCommandBuffer->commandBuffer,
vulkanBuffer->buffer,
offset,
RefreshToVK_IndexType[indexElementSize]
);
}
static void VULKAN_BeginComputePass(
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer
) {
VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
vulkanCommandBuffer->boundComputeBufferCount = 0;
vulkanCommandBuffer->boundComputeTextureCount = 0;
}
static void VULKAN_BindComputePipeline(
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer,
Refresh_ComputePipeline *computePipeline
) {
VulkanRenderer* renderer = (VulkanRenderer*) driverData;
VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
VulkanComputePipeline *vulkanComputePipeline = (VulkanComputePipeline*) computePipeline;
/* bind dummy sets if necessary */
if (vulkanComputePipeline->pipelineLayout->bufferDescriptorSetCache == NULL)
{
vulkanCommandBuffer->bufferDescriptorSet = renderer->emptyComputeBufferDescriptorSet;
}
if (vulkanComputePipeline->pipelineLayout->imageDescriptorSetCache == NULL)
{
vulkanCommandBuffer->imageDescriptorSet = renderer->emptyComputeImageDescriptorSet;
}
renderer->vkCmdBindPipeline(
vulkanCommandBuffer->commandBuffer,
VK_PIPELINE_BIND_POINT_COMPUTE,
vulkanComputePipeline->pipeline
);
vulkanCommandBuffer->currentComputePipeline = vulkanComputePipeline;
VULKAN_INTERNAL_TrackComputePipeline(renderer, vulkanCommandBuffer, vulkanComputePipeline);
}
static void VULKAN_BindComputeBuffers(
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer,
Refresh_GpuBuffer **pBuffers
) {
VulkanRenderer* renderer = (VulkanRenderer*) driverData;
VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
VulkanComputePipeline *computePipeline = vulkanCommandBuffer->currentComputePipeline;
VulkanBuffer *currentVulkanBuffer;
VkDescriptorBufferInfo descriptorBufferInfos[MAX_BUFFER_BINDINGS];
uint32_t i;
if (computePipeline->pipelineLayout->bufferDescriptorSetCache == NULL)
{
return;
}
for (i = 0; i < computePipeline->pipelineLayout->bufferDescriptorSetCache->bindingCount; i += 1)
{
currentVulkanBuffer = ((VulkanBufferContainer*) pBuffers[i])->vulkanBuffer;
descriptorBufferInfos[i].buffer = currentVulkanBuffer->buffer;
descriptorBufferInfos[i].offset = 0;
descriptorBufferInfos[i].range = currentVulkanBuffer->size;
VULKAN_INTERNAL_BufferMemoryBarrier(
renderer,
vulkanCommandBuffer->commandBuffer,
RESOURCE_ACCESS_COMPUTE_SHADER_BUFFER_READ_WRITE,
currentVulkanBuffer
);
VULKAN_INTERNAL_TrackBuffer(renderer, vulkanCommandBuffer, currentVulkanBuffer);
VULKAN_INTERNAL_TrackComputeBuffer(renderer, vulkanCommandBuffer, currentVulkanBuffer);
}
vulkanCommandBuffer->bufferDescriptorSet =
VULKAN_INTERNAL_FetchDescriptorSet(
renderer,
vulkanCommandBuffer,
computePipeline->pipelineLayout->bufferDescriptorSetCache,
NULL,
descriptorBufferInfos
);
}
static void VULKAN_BindComputeTextures(
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer,
Refresh_Texture **pTextures,
uint32_t **pLevels
) {
VulkanRenderer* renderer = (VulkanRenderer*) driverData;
VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
VulkanComputePipeline *computePipeline = vulkanCommandBuffer->currentComputePipeline;
VulkanTexture *currentTexture;
VkDescriptorImageInfo descriptorImageInfos[MAX_TEXTURE_SAMPLERS];
uint32_t i;
if (computePipeline->pipelineLayout->imageDescriptorSetCache == NULL)
{
return;
}
for (i = 0; i < computePipeline->pipelineLayout->imageDescriptorSetCache->bindingCount; i += 1)
{
currentTexture = ((VulkanTextureContainer*) pTextures[i])->vulkanTexture;
descriptorImageInfos[i].imageView = currentTexture->view;
descriptorImageInfos[i].sampler = VK_NULL_HANDLE;
descriptorImageInfos[i].imageLayout = VK_IMAGE_LAYOUT_GENERAL;
VULKAN_INTERNAL_ImageMemoryBarrier(
renderer,
vulkanCommandBuffer->commandBuffer,
RESOURCE_ACCESS_COMPUTE_SHADER_STORAGE_IMAGE_READ_WRITE,
VK_IMAGE_ASPECT_COLOR_BIT,
0,
currentTexture->layerCount,
0,
currentTexture->levelCount,
0,
currentTexture->image,
&currentTexture->resourceAccessType
);
VULKAN_INTERNAL_TrackTexture(renderer, vulkanCommandBuffer, currentTexture);
VULKAN_INTERNAL_TrackComputeTexture(renderer, vulkanCommandBuffer, currentTexture);
}
vulkanCommandBuffer->imageDescriptorSet =
VULKAN_INTERNAL_FetchDescriptorSet(
renderer,
vulkanCommandBuffer,
computePipeline->pipelineLayout->imageDescriptorSetCache,
descriptorImageInfos,
NULL
);
}
static void VULKAN_DispatchCompute(
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer,
uint32_t groupCountX,
uint32_t groupCountY,
2024-02-17 02:26:30 +00:00
uint32_t groupCountZ
) {
VulkanRenderer* renderer = (VulkanRenderer*) driverData;
VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
VulkanComputePipeline *computePipeline = vulkanCommandBuffer->currentComputePipeline;
VkDescriptorSet descriptorSets[3];
descriptorSets[0] = vulkanCommandBuffer->bufferDescriptorSet;
descriptorSets[1] = vulkanCommandBuffer->imageDescriptorSet;
descriptorSets[2] = renderer->computeUniformBufferObject->descriptorSet;
renderer->vkCmdBindDescriptorSets(
vulkanCommandBuffer->commandBuffer,
VK_PIPELINE_BIND_POINT_COMPUTE,
computePipeline->pipelineLayout->pipelineLayout,
0,
3,
descriptorSets,
1,
2024-02-17 02:26:30 +00:00
&vulkanCommandBuffer->computeUniformOffset
);
renderer->vkCmdDispatch(
vulkanCommandBuffer->commandBuffer,
groupCountX,
groupCountY,
groupCountZ
);
}
static void VULKAN_EndComputePass(
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer
) {
VulkanRenderer *renderer = (VulkanRenderer*) driverData;
VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
VulkanBuffer *currentComputeBuffer;
VulkanTexture *currentComputeTexture;
VulkanResourceAccessType resourceAccessType = RESOURCE_ACCESS_NONE;
uint32_t i;
/* Re-transition buffers */
for (i = 0; i < vulkanCommandBuffer->boundComputeBufferCount; i += 1)
{
currentComputeBuffer = vulkanCommandBuffer->boundComputeBuffers[i];
if (currentComputeBuffer->usage & VK_BUFFER_USAGE_VERTEX_BUFFER_BIT)
{
resourceAccessType = RESOURCE_ACCESS_VERTEX_BUFFER;
}
else if (currentComputeBuffer->usage & VK_BUFFER_USAGE_INDEX_BUFFER_BIT)
{
resourceAccessType = RESOURCE_ACCESS_INDEX_BUFFER;
}
else if (currentComputeBuffer->usage & VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT)
{
resourceAccessType = RESOURCE_ACCESS_INDIRECT_BUFFER;
}
if (resourceAccessType != RESOURCE_ACCESS_NONE)
{
VULKAN_INTERNAL_BufferMemoryBarrier(
renderer,
vulkanCommandBuffer->commandBuffer,
resourceAccessType,
currentComputeBuffer
);
}
}
/* Re-transition sampler images */
for (i = 0; i < vulkanCommandBuffer->boundComputeTextureCount; i += 1)
{
currentComputeTexture = vulkanCommandBuffer->boundComputeTextures[i];
if (currentComputeTexture->usageFlags & VK_IMAGE_USAGE_SAMPLED_BIT)
{
resourceAccessType = RESOURCE_ACCESS_ANY_SHADER_READ_SAMPLED_IMAGE;
VULKAN_INTERNAL_ImageMemoryBarrier(
renderer,
vulkanCommandBuffer->commandBuffer,
resourceAccessType,
currentComputeTexture->aspectFlags,
0,
currentComputeTexture->layerCount,
0,
currentComputeTexture->levelCount,
0,
currentComputeTexture->image,
&currentComputeTexture->resourceAccessType
);
}
}
2024-02-16 23:57:41 +00:00
vulkanCommandBuffer->currentComputePipeline = NULL;
}
static void* VULKAN_MapCpuBuffer(
Refresh_Renderer *driverData,
Refresh_CpuBuffer *buffer,
uint32_t offsetInBytes,
uint32_t sizeInBytes
) {
VulkanRenderer *renderer = (VulkanRenderer*) driverData;
VulkanBuffer *vulkanBuffer = (VulkanBuffer*) buffer;
VkResult result;
void* mapPointer;
result = renderer->vkMapMemory(
renderer->logicalDevice,
vulkanBuffer->usedRegion->allocation->memory,
offsetInBytes,
sizeInBytes,
0,
&mapPointer
);
VULKAN_ERROR_CHECK(result, vkMapMemory, NULL)
return mapPointer;
}
static void VULKAN_UnmapCpuBuffer(
Refresh_Renderer *driverData,
Refresh_CpuBuffer *buffer
) {
VulkanRenderer *renderer = (VulkanRenderer*) driverData;
VulkanBuffer *vulkanBuffer = (VulkanBuffer*) buffer;
renderer->vkUnmapMemory(
renderer->logicalDevice,
vulkanBuffer->usedRegion->allocation->memory
);
}
static void VULKAN_BeginCopyPass(
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer
) {
VulkanRenderer *renderer = (VulkanRenderer*) driverData;
VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
vulkanCommandBuffer->copiedGpuBufferCount = 0;
vulkanCommandBuffer->copiedTextureCount = 0;
}
static void VULKAN_UploadToTexture(
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer,
Refresh_CpuBuffer *cpuBuffer,
Refresh_TextureSlice *textureSlice,
Refresh_BufferImageCopy *copyParams
) {
VulkanRenderer *renderer = (VulkanRenderer*) driverData;
VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
VulkanBuffer *vulkanCpuBuffer = (VulkanBuffer*) cpuBuffer;
VulkanTexture *vulkanTexture = ((VulkanTextureContainer*) textureSlice->texture)->vulkanTexture;
VkBufferImageCopy imageCopy;
VULKAN_INTERNAL_BufferMemoryBarrier(
renderer,
vulkanCommandBuffer->commandBuffer,
RESOURCE_ACCESS_TRANSFER_READ,
vulkanCpuBuffer
);
VULKAN_INTERNAL_ImageMemoryBarrier(
renderer,
vulkanCommandBuffer->commandBuffer,
RESOURCE_ACCESS_TRANSFER_WRITE,
vulkanTexture->aspectFlags,
0,
vulkanTexture->layerCount,
0,
vulkanTexture->levelCount,
0,
vulkanTexture->image,
&vulkanTexture->resourceAccessType
);
imageCopy.imageExtent.width = textureSlice->w;
imageCopy.imageExtent.height = textureSlice->h;
imageCopy.imageExtent.depth = textureSlice->d;
imageCopy.imageOffset.x = textureSlice->x;
imageCopy.imageOffset.y = textureSlice->y;
imageCopy.imageOffset.z = textureSlice->z;
imageCopy.imageSubresource.aspectMask = vulkanTexture->aspectFlags;
imageCopy.imageSubresource.baseArrayLayer = textureSlice->baseLayer;
imageCopy.imageSubresource.layerCount = textureSlice->layerCount;
imageCopy.imageSubresource.mipLevel = textureSlice->mipLevel;
imageCopy.bufferOffset = copyParams->bufferOffset;
imageCopy.bufferRowLength = copyParams->bufferStride;
imageCopy.bufferImageHeight = copyParams->bufferImageHeight;
renderer->vkCmdCopyBufferToImage(
vulkanCommandBuffer->commandBuffer,
vulkanCpuBuffer->buffer,
vulkanTexture->image,
AccessMap[vulkanTexture->resourceAccessType].imageLayout,
1,
&imageCopy
);
VULKAN_INTERNAL_TrackBuffer(renderer, vulkanCommandBuffer, vulkanCpuBuffer);
VULKAN_INTERNAL_TrackTexture(renderer, vulkanCommandBuffer, vulkanTexture);
VULKAN_INTERNAL_TrackCopiedTexture(renderer, vulkanCommandBuffer, vulkanTexture);
}
static void VULKAN_UploadToBuffer(
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer,
Refresh_CpuBuffer *cpuBuffer,
Refresh_GpuBuffer *gpuBuffer,
Refresh_BufferCopy *copyParams
) {
VulkanRenderer *renderer = (VulkanRenderer*) driverData;
VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
VulkanBuffer *vulkanCpuBuffer = (VulkanBuffer*) cpuBuffer;
VulkanBuffer *vulkanGpuBuffer = ((VulkanBufferContainer*) gpuBuffer)->vulkanBuffer;
VkBufferCopy bufferCopy;
VULKAN_INTERNAL_BufferMemoryBarrier(
renderer,
vulkanCommandBuffer->commandBuffer,
RESOURCE_ACCESS_TRANSFER_READ,
vulkanCpuBuffer
);
VULKAN_INTERNAL_BufferMemoryBarrier(
renderer,
vulkanCommandBuffer->commandBuffer,
RESOURCE_ACCESS_TRANSFER_WRITE,
vulkanGpuBuffer
);
2021-02-10 01:52:26 +00:00
bufferCopy.srcOffset = copyParams->srcOffset;
bufferCopy.dstOffset = copyParams->dstOffset;
bufferCopy.size = copyParams->size;
renderer->vkCmdCopyBuffer(
vulkanCommandBuffer->commandBuffer,
vulkanCpuBuffer->buffer,
vulkanGpuBuffer->buffer,
1,
&bufferCopy
);
VULKAN_INTERNAL_TrackBuffer(renderer, vulkanCommandBuffer, vulkanCpuBuffer);
VULKAN_INTERNAL_TrackBuffer(renderer, vulkanCommandBuffer, vulkanGpuBuffer);
VULKAN_INTERNAL_TrackCopiedBuffer(renderer, vulkanCommandBuffer, vulkanGpuBuffer);
2020-12-17 01:23:49 +00:00
}
static void VULKAN_DownloadFromTexture(
2021-01-05 23:00:51 +00:00
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer,
Refresh_TextureSlice *textureSlice,
Refresh_CpuBuffer *cpuBuffer,
Refresh_BufferImageCopy *copyParams
2020-12-17 01:23:49 +00:00
) {
VulkanRenderer *renderer = (VulkanRenderer*) driverData;
2021-01-02 06:07:15 +00:00
VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
VulkanTexture *vulkanTexture = ((VulkanTextureContainer*) textureSlice->texture)->vulkanTexture;
VulkanBuffer *vulkanCpuBuffer = (VulkanBuffer*) cpuBuffer;
VkBufferImageCopy imageCopy;
2020-12-20 07:17:55 +00:00
VULKAN_INTERNAL_BufferMemoryBarrier(
renderer,
vulkanCommandBuffer->commandBuffer,
RESOURCE_ACCESS_TRANSFER_WRITE,
vulkanCpuBuffer
);
VULKAN_INTERNAL_ImageMemoryBarrier(
renderer,
vulkanCommandBuffer->commandBuffer,
RESOURCE_ACCESS_TRANSFER_READ,
vulkanTexture->aspectFlags,
0,
vulkanTexture->layerCount,
0,
vulkanTexture->levelCount,
0,
vulkanTexture->image,
&vulkanTexture->resourceAccessType
);
imageCopy.imageExtent.width = textureSlice->w;
imageCopy.imageExtent.height = textureSlice->h;
imageCopy.imageExtent.depth = textureSlice->d;
imageCopy.imageOffset.x = textureSlice->x;
imageCopy.imageOffset.y = textureSlice->y;
imageCopy.imageOffset.z = textureSlice->z;
imageCopy.imageSubresource.aspectMask = vulkanTexture->aspectFlags;
imageCopy.imageSubresource.baseArrayLayer = textureSlice->baseLayer;
imageCopy.imageSubresource.layerCount = textureSlice->layerCount;
imageCopy.imageSubresource.mipLevel = textureSlice->mipLevel;
imageCopy.bufferOffset = copyParams->bufferOffset;
imageCopy.bufferRowLength = copyParams->bufferStride;
imageCopy.bufferImageHeight = copyParams->bufferImageHeight;
renderer->vkCmdCopyImageToBuffer(
vulkanCommandBuffer->commandBuffer,
vulkanTexture->image,
AccessMap[vulkanTexture->resourceAccessType].imageLayout,
vulkanCpuBuffer->buffer,
1,
&imageCopy
);
VULKAN_INTERNAL_TrackBuffer(renderer, vulkanCommandBuffer, vulkanCpuBuffer);
VULKAN_INTERNAL_TrackTexture(renderer, vulkanCommandBuffer, vulkanTexture);
VULKAN_INTERNAL_TrackCopiedTexture(renderer, vulkanCommandBuffer, vulkanTexture);
}
static void VULKAN_DownloadFromBuffer(
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer,
Refresh_GpuBuffer *gpuBuffer,
Refresh_CpuBuffer *cpuBuffer,
Refresh_BufferCopy *copyParams
) {
VulkanRenderer *renderer = (VulkanRenderer*) driverData;
VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
VulkanBuffer *vulkanCpuBuffer = (VulkanBuffer*) cpuBuffer;
VulkanBuffer *vulkanGpuBuffer = ((VulkanBufferContainer*) gpuBuffer)->vulkanBuffer;
VkBufferCopy bufferCopy;
VULKAN_INTERNAL_BufferMemoryBarrier(
renderer,
2021-01-02 06:07:15 +00:00
vulkanCommandBuffer->commandBuffer,
RESOURCE_ACCESS_TRANSFER_WRITE,
vulkanCpuBuffer
2021-01-02 06:07:15 +00:00
);
2020-12-23 06:56:26 +00:00
VULKAN_INTERNAL_BufferMemoryBarrier(
renderer,
vulkanCommandBuffer->commandBuffer,
RESOURCE_ACCESS_TRANSFER_READ,
vulkanGpuBuffer
);
bufferCopy.srcOffset = copyParams->srcOffset;
bufferCopy.dstOffset = copyParams->dstOffset;
bufferCopy.size = copyParams->size;
2022-03-04 20:30:33 +00:00
renderer->vkCmdCopyBuffer(
vulkanCommandBuffer->commandBuffer,
vulkanGpuBuffer->buffer,
vulkanCpuBuffer->buffer,
1,
&bufferCopy
);
VULKAN_INTERNAL_TrackBuffer(renderer, vulkanCommandBuffer, vulkanCpuBuffer);
VULKAN_INTERNAL_TrackBuffer(renderer, vulkanCommandBuffer, vulkanGpuBuffer);
VULKAN_INTERNAL_TrackBuffer(renderer, vulkanCommandBuffer, vulkanGpuBuffer);
}
static void VULKAN_CopyTextureToTexture(
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer,
Refresh_TextureSlice *source,
Refresh_TextureSlice *destination
) {
VulkanRenderer *renderer = (VulkanRenderer*) driverData;
VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
VulkanTexture *srcTexture = ((VulkanTextureContainer*) source->texture)->vulkanTexture;
VulkanTexture *dstTexture = ((VulkanTextureContainer*) destination->texture)->vulkanTexture;
VkImageCopy imageCopy;
VULKAN_INTERNAL_ImageMemoryBarrier(
renderer,
vulkanCommandBuffer->commandBuffer,
RESOURCE_ACCESS_TRANSFER_READ,
VK_IMAGE_ASPECT_COLOR_BIT,
0,
srcTexture->layerCount,
0,
srcTexture->levelCount,
0,
srcTexture->image,
&srcTexture->resourceAccessType
);
VULKAN_INTERNAL_ImageMemoryBarrier(
renderer,
2022-03-04 20:30:33 +00:00
vulkanCommandBuffer->commandBuffer,
RESOURCE_ACCESS_TRANSFER_WRITE,
VK_IMAGE_ASPECT_COLOR_BIT,
2022-03-04 20:30:33 +00:00
0,
dstTexture->layerCount,
0,
dstTexture->levelCount,
0,
dstTexture->image,
&dstTexture->resourceAccessType
);
imageCopy.srcOffset.x = source->x;
imageCopy.srcOffset.y = source->y;
imageCopy.srcOffset.z = source->z;
imageCopy.srcSubresource.aspectMask = srcTexture->aspectFlags;
imageCopy.srcSubresource.baseArrayLayer = source->baseLayer;
imageCopy.srcSubresource.layerCount = source->layerCount;
imageCopy.srcSubresource.mipLevel = source->mipLevel;
imageCopy.dstOffset.x = destination->x;
imageCopy.dstOffset.y = destination->y;
imageCopy.dstOffset.z = destination->z;
imageCopy.dstSubresource.aspectMask = dstTexture->aspectFlags;
imageCopy.dstSubresource.baseArrayLayer = destination->baseLayer;
imageCopy.dstSubresource.layerCount = destination->layerCount;
imageCopy.dstSubresource.mipLevel = destination->mipLevel;
imageCopy.extent.width = source->w;
imageCopy.extent.height = source->h;
imageCopy.extent.depth = source->d;
renderer->vkCmdCopyImage(
vulkanCommandBuffer->commandBuffer,
srcTexture->image,
AccessMap[srcTexture->resourceAccessType].imageLayout,
dstTexture->image,
AccessMap[dstTexture->resourceAccessType].imageLayout,
2022-03-04 20:30:33 +00:00
1,
&imageCopy
2022-03-04 20:30:33 +00:00
);
VULKAN_INTERNAL_TrackTexture(renderer, vulkanCommandBuffer, srcTexture);
VULKAN_INTERNAL_TrackTexture(renderer, vulkanCommandBuffer, dstTexture);
VULKAN_INTERNAL_TrackCopiedTexture(renderer, vulkanCommandBuffer, srcTexture);
VULKAN_INTERNAL_TrackCopiedTexture(renderer, vulkanCommandBuffer, dstTexture);
}
static void VULKAN_CopyTextureToBuffer(
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer,
Refresh_TextureSlice *textureSlice,
Refresh_GpuBuffer *buffer,
Refresh_BufferImageCopy *copyParams
) {
VulkanRenderer *renderer = (VulkanRenderer*) driverData;
VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
VulkanTexture *vulkanTexture = ((VulkanTextureContainer*) textureSlice->texture)->vulkanTexture;
VulkanBuffer *vulkanBuffer = ((VulkanBufferContainer*) buffer)->vulkanBuffer;
VkBufferImageCopy imageCopy;
VULKAN_INTERNAL_ImageMemoryBarrier(
renderer,
2022-03-04 20:30:33 +00:00
vulkanCommandBuffer->commandBuffer,
RESOURCE_ACCESS_TRANSFER_READ,
vulkanTexture->aspectFlags,
0,
vulkanTexture->layerCount,
2022-03-04 20:30:33 +00:00
0,
vulkanTexture->levelCount,
0,
vulkanTexture->image,
&vulkanTexture->resourceAccessType
);
VULKAN_INTERNAL_BufferMemoryBarrier(
renderer,
vulkanCommandBuffer->commandBuffer,
RESOURCE_ACCESS_TRANSFER_WRITE,
vulkanBuffer
);
imageCopy.imageExtent.width = textureSlice->w;
imageCopy.imageExtent.height = textureSlice->h;
imageCopy.imageExtent.depth = textureSlice->d;
imageCopy.imageOffset.x = textureSlice->x;
imageCopy.imageOffset.y = textureSlice->y;
imageCopy.imageOffset.z = textureSlice->z;
imageCopy.imageSubresource.aspectMask = vulkanTexture->aspectFlags;
imageCopy.imageSubresource.baseArrayLayer = textureSlice->baseLayer;
imageCopy.imageSubresource.layerCount = textureSlice->layerCount;
imageCopy.imageSubresource.mipLevel = textureSlice->mipLevel;
imageCopy.bufferOffset = copyParams->bufferOffset;
imageCopy.bufferRowLength = copyParams->bufferStride;
imageCopy.bufferImageHeight = copyParams->bufferImageHeight;
renderer->vkCmdCopyImageToBuffer(
vulkanCommandBuffer->commandBuffer,
vulkanTexture->image,
AccessMap[vulkanTexture->resourceAccessType].imageLayout,
vulkanBuffer->buffer,
2022-03-04 20:30:33 +00:00
1,
&imageCopy
2022-03-04 20:30:33 +00:00
);
VULKAN_INTERNAL_TrackBuffer(renderer, vulkanCommandBuffer, vulkanBuffer);
VULKAN_INTERNAL_TrackTexture(renderer, vulkanCommandBuffer, vulkanTexture);
VULKAN_INTERNAL_TrackCopiedBuffer(renderer, vulkanCommandBuffer, vulkanBuffer);
VULKAN_INTERNAL_TrackCopiedTexture(renderer, vulkanCommandBuffer, vulkanTexture);
2020-12-17 01:23:49 +00:00
}
static void VULKAN_CopyBufferToTexture(
2021-01-05 23:00:51 +00:00
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer,
Refresh_GpuBuffer *buffer,
Refresh_TextureSlice *textureSlice,
Refresh_BufferImageCopy *copyParams
2020-12-20 07:31:55 +00:00
) {
VulkanRenderer *renderer = (VulkanRenderer*) driverData;
2021-01-02 06:07:15 +00:00
VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
VulkanBuffer *vulkanBuffer = ((VulkanBufferContainer*) buffer)->vulkanBuffer;
VulkanTexture *vulkanTexture = ((VulkanTextureContainer*) textureSlice->texture)->vulkanTexture;
VkBufferImageCopy imageCopy;
2020-12-20 07:31:55 +00:00
VULKAN_INTERNAL_BufferMemoryBarrier(
renderer,
vulkanCommandBuffer->commandBuffer,
RESOURCE_ACCESS_TRANSFER_READ,
vulkanBuffer
);
2020-12-20 07:31:55 +00:00
VULKAN_INTERNAL_ImageMemoryBarrier(
renderer,
2021-01-02 06:07:15 +00:00
vulkanCommandBuffer->commandBuffer,
RESOURCE_ACCESS_TRANSFER_WRITE,
vulkanTexture->aspectFlags,
0,
vulkanTexture->layerCount,
0,
vulkanTexture->levelCount,
0,
vulkanTexture->image,
&vulkanTexture->resourceAccessType
);
imageCopy.imageExtent.width = textureSlice->w;
imageCopy.imageExtent.height = textureSlice->h;
imageCopy.imageExtent.depth = textureSlice->d;
imageCopy.imageOffset.x = textureSlice->x;
imageCopy.imageOffset.y = textureSlice->y;
imageCopy.imageOffset.z = textureSlice->z;
imageCopy.imageSubresource.aspectMask = vulkanTexture->aspectFlags;
imageCopy.imageSubresource.baseArrayLayer = textureSlice->baseLayer;
imageCopy.imageSubresource.layerCount = textureSlice->layerCount;
imageCopy.imageSubresource.mipLevel = textureSlice->mipLevel;
imageCopy.bufferOffset = copyParams->bufferOffset;
imageCopy.bufferRowLength = copyParams->bufferStride;
imageCopy.bufferImageHeight = copyParams->bufferImageHeight;
renderer->vkCmdCopyBufferToImage(
vulkanCommandBuffer->commandBuffer,
vulkanBuffer->buffer,
vulkanTexture->image,
AccessMap[vulkanTexture->resourceAccessType].imageLayout,
1,
&imageCopy
2021-01-02 06:07:15 +00:00
);
2020-12-20 07:31:55 +00:00
VULKAN_INTERNAL_TrackBuffer(renderer, vulkanCommandBuffer, vulkanBuffer);
VULKAN_INTERNAL_TrackTexture(renderer, vulkanCommandBuffer, vulkanTexture);
VULKAN_INTERNAL_TrackCopiedBuffer(renderer, vulkanCommandBuffer, vulkanBuffer);
VULKAN_INTERNAL_TrackCopiedTexture(renderer, vulkanCommandBuffer, vulkanTexture);
2020-12-20 07:31:55 +00:00
}
static void VULKAN_CopyBufferToBuffer(
2021-01-05 23:00:51 +00:00
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer,
Refresh_GpuBuffer *source,
Refresh_GpuBuffer *destination,
Refresh_BufferCopy *copyParams
2020-12-20 07:31:55 +00:00
) {
VulkanRenderer *renderer = (VulkanRenderer*) driverData;
2021-01-02 06:07:15 +00:00
VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
VulkanBuffer *vulkanSrcBuffer = ((VulkanBufferContainer*) source)->vulkanBuffer;
VulkanBuffer *vulkanDstBuffer = ((VulkanBufferContainer*) destination)->vulkanBuffer;
VkBufferCopy bufferCopy;
2020-12-21 23:44:43 +00:00
VULKAN_INTERNAL_BufferMemoryBarrier(
renderer,
vulkanCommandBuffer->commandBuffer,
RESOURCE_ACCESS_TRANSFER_READ,
vulkanSrcBuffer
);
VULKAN_INTERNAL_BufferMemoryBarrier(
renderer,
2021-01-02 06:07:15 +00:00
vulkanCommandBuffer->commandBuffer,
RESOURCE_ACCESS_TRANSFER_WRITE,
vulkanDstBuffer
);
bufferCopy.srcOffset = copyParams->srcOffset;
bufferCopy.dstOffset = copyParams->dstOffset;
bufferCopy.size = copyParams->size;
renderer->vkCmdCopyBuffer(
vulkanCommandBuffer->commandBuffer,
vulkanSrcBuffer->buffer,
vulkanDstBuffer->buffer,
1,
&bufferCopy
2021-01-02 06:07:15 +00:00
);
VULKAN_INTERNAL_TrackBuffer(renderer, vulkanCommandBuffer, vulkanSrcBuffer);
VULKAN_INTERNAL_TrackBuffer(renderer, vulkanCommandBuffer, vulkanDstBuffer);
VULKAN_INTERNAL_TrackCopiedBuffer(renderer, vulkanCommandBuffer, vulkanSrcBuffer);
VULKAN_INTERNAL_TrackCopiedBuffer(renderer, vulkanCommandBuffer, vulkanDstBuffer);
2020-12-20 07:31:55 +00:00
}
static void VULKAN_GenerateMipmaps(
2021-01-05 23:00:51 +00:00
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer,
Refresh_Texture *texture
2020-12-30 01:31:39 +00:00
) {
VulkanRenderer *renderer = (VulkanRenderer*) driverData;
2021-01-02 06:07:15 +00:00
VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
VulkanTexture *vulkanTexture = ((VulkanTextureContainer*) texture)->vulkanTexture;
VulkanResourceAccessType *levelAccessType;
VkImageBlit blit;
uint32_t level;
2020-12-31 00:47:13 +00:00
if (vulkanTexture->levelCount <= 1) { return; }
2020-12-31 04:39:47 +00:00
/* Store the original image layout... */
levelAccessType = SDL_stack_alloc(
VulkanResourceAccessType,
vulkanTexture->levelCount
);
for (level = 0; level < vulkanTexture->levelCount; level += 1)
2020-12-31 04:39:47 +00:00
{
levelAccessType[level] = vulkanTexture->resourceAccessType;
}
/* Blit each mip sequentially. Barriers, barriers everywhere! */
for (level = 1; level < vulkanTexture->levelCount; level += 1)
{
VULKAN_INTERNAL_ImageMemoryBarrier(
renderer,
vulkanCommandBuffer->commandBuffer,
RESOURCE_ACCESS_TRANSFER_READ,
VK_IMAGE_ASPECT_COLOR_BIT,
0,
vulkanTexture->layerCount,
level - 1,
1,
0,
vulkanTexture->image,
&levelAccessType[level - 1]
);
VULKAN_INTERNAL_ImageMemoryBarrier(
renderer,
vulkanCommandBuffer->commandBuffer,
RESOURCE_ACCESS_TRANSFER_WRITE,
VK_IMAGE_ASPECT_COLOR_BIT,
0,
vulkanTexture->layerCount,
level,
1,
1,
vulkanTexture->image,
&levelAccessType[level]
);
blit.srcOffsets[0].x = 0;
blit.srcOffsets[0].y = 0;
blit.srcOffsets[0].z = 0;
2020-12-30 01:31:39 +00:00
blit.srcOffsets[1].x = vulkanTexture->dimensions.width >> (level - 1);
blit.srcOffsets[1].y = vulkanTexture->dimensions.height >> (level - 1);
blit.srcOffsets[1].z = 1;
2021-01-02 21:31:17 +00:00
blit.dstOffsets[0].x = 0;
blit.dstOffsets[0].y = 0;
blit.dstOffsets[0].z = 0;
2020-12-31 00:47:13 +00:00
blit.dstOffsets[1].x = vulkanTexture->dimensions.width >> level;
blit.dstOffsets[1].y = vulkanTexture->dimensions.height >> level;
blit.dstOffsets[1].z = 1;
2020-12-31 00:47:13 +00:00
blit.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
blit.srcSubresource.baseArrayLayer = 0;
blit.srcSubresource.layerCount = vulkanTexture->layerCount;
blit.srcSubresource.mipLevel = level - 1;
2020-12-31 06:28:37 +00:00
blit.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
blit.dstSubresource.baseArrayLayer = 0;
blit.dstSubresource.layerCount = vulkanTexture->layerCount;
blit.dstSubresource.mipLevel = level;
2020-12-31 06:28:37 +00:00
renderer->vkCmdBlitImage(
vulkanCommandBuffer->commandBuffer,
vulkanTexture->image,
VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
vulkanTexture->image,
VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
1,
&blit,
VK_FILTER_LINEAR
);
2020-12-31 00:47:13 +00:00
}
/* Transition final level to READ */
VULKAN_INTERNAL_ImageMemoryBarrier(
renderer,
vulkanCommandBuffer->commandBuffer,
RESOURCE_ACCESS_TRANSFER_READ,
VK_IMAGE_ASPECT_COLOR_BIT,
0,
vulkanTexture->layerCount,
vulkanTexture->levelCount - 1,
1,
1,
vulkanTexture->image,
&levelAccessType[vulkanTexture->levelCount - 1]
);
/* The whole texture is in READ layout now, so set the access type on the texture */
vulkanTexture->resourceAccessType = RESOURCE_ACCESS_TRANSFER_READ;
SDL_stack_free(levelAccessType);
2020-12-30 01:31:39 +00:00
}
static void VULKAN_EndCopyPass(
2021-01-05 23:00:51 +00:00
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer
2020-12-30 01:31:39 +00:00
) {
VulkanRenderer *renderer = (VulkanRenderer*) driverData;
2021-01-02 06:07:15 +00:00
VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
VulkanBuffer *currentBuffer;
2020-12-31 00:47:13 +00:00
VulkanTexture *currentTexture;
VulkanResourceAccessType resourceAccessType = RESOURCE_ACCESS_NONE;
2020-12-31 00:47:13 +00:00
uint32_t i;
/* Re-transition GpuBuffers */
for (i = 0; i < vulkanCommandBuffer->copiedGpuBufferCount; i += 1)
2020-12-31 00:47:13 +00:00
{
currentBuffer = vulkanCommandBuffer->copiedGpuBuffers[i];
2020-12-31 00:47:13 +00:00
if (currentBuffer->usage & VK_BUFFER_USAGE_VERTEX_BUFFER_BIT)
{
resourceAccessType = RESOURCE_ACCESS_VERTEX_BUFFER;
}
else if (currentBuffer->usage & VK_BUFFER_USAGE_INDEX_BUFFER_BIT)
{
resourceAccessType = RESOURCE_ACCESS_INDEX_BUFFER;
}
else if (currentBuffer->usage & VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT)
{
resourceAccessType = RESOURCE_ACCESS_INDIRECT_BUFFER;
}
if (resourceAccessType != RESOURCE_ACCESS_NONE)
{
VULKAN_INTERNAL_BufferMemoryBarrier(
renderer,
vulkanCommandBuffer->commandBuffer,
resourceAccessType,
currentBuffer
);
}
}
/* Re-transition textures */
for (i = 0; i < vulkanCommandBuffer->copiedTextureCount; i += 1)
{
currentTexture = vulkanCommandBuffer->copiedTextures[i];
if (currentTexture->usageFlags & VK_IMAGE_USAGE_SAMPLED_BIT)
{
resourceAccessType = RESOURCE_ACCESS_ANY_SHADER_READ_SAMPLED_IMAGE;
VULKAN_INTERNAL_ImageMemoryBarrier(
renderer,
vulkanCommandBuffer->commandBuffer,
resourceAccessType,
currentTexture->aspectFlags,
0,
currentTexture->layerCount,
0,
currentTexture->levelCount,
0,
currentTexture->image,
&currentTexture->resourceAccessType
);
}
2020-12-31 00:47:13 +00:00
}
2020-12-30 01:31:39 +00:00
}
2021-01-03 02:02:20 +00:00
static void VULKAN_INTERNAL_AllocateCommandBuffers(
2021-01-02 06:07:15 +00:00
VulkanRenderer *renderer,
2021-01-03 02:02:20 +00:00
VulkanCommandPool *vulkanCommandPool,
uint32_t allocateCount
2021-01-02 06:07:15 +00:00
) {
VkCommandBufferAllocateInfo allocateInfo;
VkResult vulkanResult;
2021-01-03 02:02:20 +00:00
uint32_t i;
VkCommandBuffer *commandBuffers = SDL_stack_alloc(VkCommandBuffer, allocateCount);
VulkanCommandBuffer *commandBuffer;
2021-01-02 06:07:15 +00:00
2021-01-03 02:02:20 +00:00
vulkanCommandPool->inactiveCommandBufferCapacity += allocateCount;
vulkanCommandPool->inactiveCommandBuffers = SDL_realloc(
vulkanCommandPool->inactiveCommandBuffers,
sizeof(VulkanCommandBuffer*) *
vulkanCommandPool->inactiveCommandBufferCapacity
);
allocateInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
allocateInfo.pNext = NULL;
allocateInfo.commandPool = vulkanCommandPool->commandPool;
allocateInfo.commandBufferCount = allocateCount;
allocateInfo.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
vulkanResult = renderer->vkAllocateCommandBuffers(
renderer->logicalDevice,
&allocateInfo,
commandBuffers
);
if (vulkanResult != VK_SUCCESS)
2021-01-02 06:07:15 +00:00
{
2021-01-27 20:51:36 +00:00
LogVulkanResultAsError("vkAllocateCommandBuffers", vulkanResult);
2021-01-03 02:02:20 +00:00
SDL_stack_free(commandBuffers);
return;
}
2021-01-02 06:07:15 +00:00
2021-01-03 02:02:20 +00:00
for (i = 0; i < allocateCount; i += 1)
{
commandBuffer = SDL_malloc(sizeof(VulkanCommandBuffer));
commandBuffer->commandPool = vulkanCommandPool;
commandBuffer->commandBuffer = commandBuffers[i];
commandBuffer->inFlightFence = VK_NULL_HANDLE;
commandBuffer->renderPassDepthTexture = NULL;
/* Presentation tracking */
commandBuffer->presentDataCapacity = 1;
commandBuffer->presentDataCount = 0;
commandBuffer->presentDatas = SDL_malloc(
commandBuffer->presentDataCapacity * sizeof(VkPresentInfoKHR)
);
commandBuffer->waitSemaphoreCapacity = 1;
commandBuffer->waitSemaphoreCount = 0;
commandBuffer->waitSemaphores = SDL_malloc(
commandBuffer->waitSemaphoreCapacity * sizeof(VkSemaphore)
);
commandBuffer->signalSemaphoreCapacity = 1;
commandBuffer->signalSemaphoreCount = 0;
commandBuffer->signalSemaphores = SDL_malloc(
commandBuffer->signalSemaphoreCapacity * sizeof(VkSemaphore)
);
/* Descriptor set tracking */
commandBuffer->boundDescriptorSetDataCapacity = 16;
commandBuffer->boundDescriptorSetDataCount = 0;
commandBuffer->boundDescriptorSetDatas = SDL_malloc(
commandBuffer->boundDescriptorSetDataCapacity * sizeof(DescriptorSetData)
);
/* Bound compute resource tracking */
commandBuffer->boundComputeBufferCapacity = 16;
commandBuffer->boundComputeBufferCount = 0;
commandBuffer->boundComputeBuffers = SDL_malloc(
commandBuffer->boundComputeBufferCapacity * sizeof(VulkanBuffer*)
);
commandBuffer->boundComputeTextureCapacity = 16;
commandBuffer->boundComputeTextureCount = 0;
commandBuffer->boundComputeTextures = SDL_malloc(
commandBuffer->boundComputeTextureCapacity * sizeof(VulkanTexture*)
);
/* Copy resource tracking */
commandBuffer->copiedGpuBufferCapacity = 16;
commandBuffer->copiedGpuBufferCount = 0;
commandBuffer->copiedGpuBuffers = SDL_malloc(
commandBuffer->copiedGpuBufferCapacity * sizeof(VulkanBuffer*)
);
commandBuffer->copiedTextureCapacity = 16;
commandBuffer->copiedTextureCount = 0;
commandBuffer->copiedTextures = SDL_malloc(
commandBuffer->copiedTextureCapacity * sizeof(VulkanTexture*)
);
/* Resource tracking */
commandBuffer->usedBufferCapacity = 4;
commandBuffer->usedBufferCount = 0;
commandBuffer->usedBuffers = SDL_malloc(
commandBuffer->usedBufferCapacity * sizeof(VulkanBuffer*)
);
commandBuffer->usedTextureCapacity = 4;
commandBuffer->usedTextureCount = 0;
commandBuffer->usedTextures = SDL_malloc(
commandBuffer->usedTextureCapacity * sizeof(VulkanTexture*)
);
commandBuffer->usedSamplerCapacity = 4;
commandBuffer->usedSamplerCount = 0;
commandBuffer->usedSamplers = SDL_malloc(
commandBuffer->usedSamplerCapacity * sizeof(VulkanSampler*)
);
commandBuffer->usedGraphicsPipelineCapacity = 4;
commandBuffer->usedGraphicsPipelineCount = 0;
commandBuffer->usedGraphicsPipelines = SDL_malloc(
commandBuffer->usedGraphicsPipelineCapacity * sizeof(VulkanGraphicsPipeline*)
);
commandBuffer->usedComputePipelineCapacity = 4;
commandBuffer->usedComputePipelineCount = 0;
commandBuffer->usedComputePipelines = SDL_malloc(
commandBuffer->usedComputePipelineCapacity * sizeof(VulkanComputePipeline*)
);
commandBuffer->usedFramebufferCapacity = 4;
commandBuffer->usedFramebufferCount = 0;
commandBuffer->usedFramebuffers = SDL_malloc(
commandBuffer->usedFramebufferCapacity * sizeof(VulkanFramebuffer*)
);
2021-01-03 02:02:20 +00:00
vulkanCommandPool->inactiveCommandBuffers[
vulkanCommandPool->inactiveCommandBufferCount
] = commandBuffer;
2021-01-03 02:02:20 +00:00
vulkanCommandPool->inactiveCommandBufferCount += 1;
}
2021-01-02 06:07:15 +00:00
2021-01-03 02:02:20 +00:00
SDL_stack_free(commandBuffers);
}
2021-01-02 06:07:15 +00:00
2021-01-03 02:02:20 +00:00
static VulkanCommandPool* VULKAN_INTERNAL_FetchCommandPool(
VulkanRenderer *renderer,
SDL_threadID threadID
) {
VulkanCommandPool *vulkanCommandPool;
VkCommandPoolCreateInfo commandPoolCreateInfo;
VkResult vulkanResult;
CommandPoolHash commandPoolHash;
2021-01-02 06:07:15 +00:00
2021-01-03 02:02:20 +00:00
commandPoolHash.threadID = threadID;
vulkanCommandPool = CommandPoolHashTable_Fetch(
&renderer->commandPoolHashTable,
commandPoolHash
);
if (vulkanCommandPool != NULL)
{
return vulkanCommandPool;
}
2021-01-03 03:03:25 +00:00
vulkanCommandPool = (VulkanCommandPool*) SDL_malloc(sizeof(VulkanCommandPool));
2021-01-03 02:02:20 +00:00
commandPoolCreateInfo.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
commandPoolCreateInfo.pNext = NULL;
commandPoolCreateInfo.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
commandPoolCreateInfo.queueFamilyIndex = renderer->queueFamilyIndex;
2021-01-03 02:02:20 +00:00
vulkanResult = renderer->vkCreateCommandPool(
renderer->logicalDevice,
&commandPoolCreateInfo,
NULL,
&vulkanCommandPool->commandPool
);
2021-01-05 07:31:56 +00:00
if (vulkanResult != VK_SUCCESS)
{
2021-01-05 23:00:51 +00:00
Refresh_LogError("Failed to create command pool!");
2021-01-27 20:51:36 +00:00
LogVulkanResultAsError("vkCreateCommandPool", vulkanResult);
2021-01-05 07:31:56 +00:00
return NULL;
}
2021-01-03 02:02:20 +00:00
vulkanCommandPool->threadID = threadID;
vulkanCommandPool->inactiveCommandBufferCapacity = 0;
vulkanCommandPool->inactiveCommandBufferCount = 0;
vulkanCommandPool->inactiveCommandBuffers = NULL;
VULKAN_INTERNAL_AllocateCommandBuffers(
renderer,
vulkanCommandPool,
2
);
CommandPoolHashTable_Insert(
&renderer->commandPoolHashTable,
commandPoolHash,
vulkanCommandPool
);
return vulkanCommandPool;
}
static VulkanCommandBuffer* VULKAN_INTERNAL_GetInactiveCommandBufferFromPool(
VulkanRenderer *renderer,
SDL_threadID threadID
) {
VulkanCommandPool *commandPool =
VULKAN_INTERNAL_FetchCommandPool(renderer, threadID);
VulkanCommandBuffer *commandBuffer;
if (commandPool->inactiveCommandBufferCount == 0)
{
VULKAN_INTERNAL_AllocateCommandBuffers(
renderer,
commandPool,
commandPool->inactiveCommandBufferCapacity
);
2021-01-02 06:07:15 +00:00
}
2021-01-03 03:03:25 +00:00
commandBuffer = commandPool->inactiveCommandBuffers[commandPool->inactiveCommandBufferCount - 1];
2021-01-02 06:07:15 +00:00
commandPool->inactiveCommandBufferCount -= 1;
return commandBuffer;
}
2021-01-05 23:00:51 +00:00
static Refresh_CommandBuffer* VULKAN_AcquireCommandBuffer(
Refresh_Renderer *driverData
2021-01-02 06:07:15 +00:00
) {
VulkanRenderer *renderer = (VulkanRenderer*) driverData;
VkResult result;
2021-01-02 06:07:15 +00:00
SDL_threadID threadID = SDL_ThreadID();
SDL_LockMutex(renderer->acquireCommandBufferLock);
2021-01-02 06:07:15 +00:00
VulkanCommandBuffer *commandBuffer =
VULKAN_INTERNAL_GetInactiveCommandBufferFromPool(renderer, threadID);
SDL_UnlockMutex(renderer->acquireCommandBufferLock);
/* Reset state */
2021-01-02 06:07:15 +00:00
commandBuffer->currentComputePipeline = NULL;
commandBuffer->currentGraphicsPipeline = NULL;
2024-02-17 02:26:30 +00:00
commandBuffer->vertexUniformOffset = 0;
commandBuffer->fragmentUniformOffset = 0;
commandBuffer->computeUniformOffset = 0;
commandBuffer->renderPassColorTargetCount = 0;
2024-02-16 04:19:36 +00:00
commandBuffer->autoReleaseFence = 1;
2021-01-02 06:07:15 +00:00
/* Reset the command buffer here to avoid resets being called
* from a separate thread than where the command buffer was acquired
*/
result = renderer->vkResetCommandBuffer(
commandBuffer->commandBuffer,
VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT
);
if (result != VK_SUCCESS)
{
LogVulkanResultAsError("vkResetCommandBuffer", result);
}
2021-01-02 06:07:15 +00:00
VULKAN_INTERNAL_BeginCommandBuffer(renderer, commandBuffer);
2021-01-05 23:00:51 +00:00
return (Refresh_CommandBuffer*) commandBuffer;
2021-01-02 06:07:15 +00:00
}
static WindowData* VULKAN_INTERNAL_FetchWindowData(
void *windowHandle
) {
return (WindowData*) SDL_GetWindowData(windowHandle, WINDOW_DATA);
}
static uint8_t VULKAN_ClaimWindow(
Refresh_Renderer *driverData,
void *windowHandle,
Refresh_PresentMode presentMode
) {
VulkanRenderer *renderer = (VulkanRenderer*) driverData;
WindowData *windowData = VULKAN_INTERNAL_FetchWindowData(windowHandle);
if (windowData == NULL)
{
windowData = SDL_malloc(sizeof(WindowData));
windowData->windowHandle = windowHandle;
windowData->preferredPresentMode = presentMode;
if (VULKAN_INTERNAL_CreateSwapchain(renderer, windowData))
{
SDL_SetWindowData((SDL_Window*) windowHandle, WINDOW_DATA, windowData);
if (renderer->claimedWindowCount >= renderer->claimedWindowCapacity)
{
renderer->claimedWindowCapacity *= 2;
renderer->claimedWindows = SDL_realloc(
renderer->claimedWindows,
renderer->claimedWindowCapacity * sizeof(WindowData*)
);
}
renderer->claimedWindows[renderer->claimedWindowCount] = windowData;
renderer->claimedWindowCount += 1;
return 1;
}
else
{
Refresh_LogError("Could not create swapchain, failed to claim window!");
SDL_free(windowData);
return 0;
}
}
else
{
Refresh_LogWarn("Window already claimed!");
return 0;
}
}
static void VULKAN_UnclaimWindow(
Refresh_Renderer *driverData,
void *windowHandle
) {
VulkanRenderer *renderer = (VulkanRenderer*) driverData;
WindowData *windowData = VULKAN_INTERNAL_FetchWindowData(windowHandle);
uint32_t i;
if (windowData == NULL)
{
return;
}
if (windowData->swapchainData != NULL)
{
VULKAN_Wait(driverData);
VULKAN_INTERNAL_DestroySwapchain(
(VulkanRenderer*) driverData,
windowData
);
}
for (i = 0; i < renderer->claimedWindowCount; i += 1)
{
if (renderer->claimedWindows[i]->windowHandle == windowHandle)
{
renderer->claimedWindows[i] = renderer->claimedWindows[renderer->claimedWindowCount - 1];
renderer->claimedWindowCount -= 1;
break;
}
}
SDL_free(windowData);
SDL_SetWindowData((SDL_Window*) windowHandle, WINDOW_DATA, NULL);
}
static Refresh_Texture* VULKAN_AcquireSwapchainTexture(
2021-01-05 23:00:51 +00:00
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer,
void *windowHandle,
uint32_t *pWidth,
uint32_t *pHeight
2020-12-17 04:19:11 +00:00
) {
VulkanRenderer *renderer = (VulkanRenderer*) driverData;
2021-01-02 06:07:15 +00:00
VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
uint32_t swapchainImageIndex;
WindowData *windowData;
VulkanSwapchainData *swapchainData;
VkResult acquireResult = VK_SUCCESS;
VulkanTextureContainer *swapchainTextureContainer = NULL;
VulkanPresentData *presentData;
2020-12-31 07:02:12 +00:00
windowData = VULKAN_INTERNAL_FetchWindowData(windowHandle);
if (windowData == NULL)
{
return NULL;
}
swapchainData = windowData->swapchainData;
/* Window is claimed but swapchain is invalid! */
if (swapchainData == NULL)
{
if (SDL_GetWindowFlags(windowHandle) & SDL_WINDOW_MINIMIZED)
{
/* Window is minimized, don't bother */
return NULL;
}
/* Let's try to recreate */
VULKAN_INTERNAL_RecreateSwapchain(renderer, windowData);
swapchainData = windowData->swapchainData;
if (swapchainData == NULL)
{
Refresh_LogWarn("Failed to recreate swapchain!");
return NULL;
}
}
acquireResult = renderer->vkAcquireNextImageKHR(
renderer->logicalDevice,
swapchainData->swapchain,
UINT64_MAX,
swapchainData->imageAvailableSemaphore,
VK_NULL_HANDLE,
&swapchainImageIndex
);
/* Acquisition is invalid, let's try to recreate */
if (acquireResult != VK_SUCCESS && acquireResult != VK_SUBOPTIMAL_KHR)
{
VULKAN_INTERNAL_RecreateSwapchain(renderer, windowData);
swapchainData = windowData->swapchainData;
if (swapchainData == NULL)
{
Refresh_LogWarn("Failed to recreate swapchain!");
return NULL;
}
2020-12-27 23:34:15 +00:00
acquireResult = renderer->vkAcquireNextImageKHR(
renderer->logicalDevice,
swapchainData->swapchain,
UINT64_MAX,
2022-02-10 05:42:19 +00:00
swapchainData->imageAvailableSemaphore,
VK_NULL_HANDLE,
&swapchainImageIndex
);
if (acquireResult != VK_SUCCESS && acquireResult != VK_SUBOPTIMAL_KHR)
{
Refresh_LogWarn("Failed to acquire swapchain texture!");
return NULL;
}
}
swapchainTextureContainer = &swapchainData->textureContainers[swapchainImageIndex];
VULKAN_INTERNAL_ImageMemoryBarrier(
renderer,
vulkanCommandBuffer->commandBuffer,
RESOURCE_ACCESS_COLOR_ATTACHMENT_WRITE,
VK_IMAGE_ASPECT_COLOR_BIT,
0,
1,
0,
1,
0,
swapchainTextureContainer->vulkanTexture->image,
&swapchainTextureContainer->vulkanTexture->resourceAccessType
);
/* Set up present struct */
if (vulkanCommandBuffer->presentDataCount == vulkanCommandBuffer->presentDataCapacity)
{
vulkanCommandBuffer->presentDataCapacity += 1;
vulkanCommandBuffer->presentDatas = SDL_realloc(
vulkanCommandBuffer->presentDatas,
vulkanCommandBuffer->presentDataCapacity * sizeof(VkPresentInfoKHR)
);
}
presentData = &vulkanCommandBuffer->presentDatas[vulkanCommandBuffer->presentDataCount];
vulkanCommandBuffer->presentDataCount += 1;
presentData->windowData = windowData;
presentData->swapchainImageIndex = swapchainImageIndex;
/* Set up present semaphores */
if (vulkanCommandBuffer->waitSemaphoreCount == vulkanCommandBuffer->waitSemaphoreCapacity)
{
vulkanCommandBuffer->waitSemaphoreCapacity += 1;
vulkanCommandBuffer->waitSemaphores = SDL_realloc(
vulkanCommandBuffer->waitSemaphores,
vulkanCommandBuffer->waitSemaphoreCapacity * sizeof(VkSemaphore)
);
}
vulkanCommandBuffer->waitSemaphores[vulkanCommandBuffer->waitSemaphoreCount] = swapchainData->imageAvailableSemaphore;
vulkanCommandBuffer->waitSemaphoreCount += 1;
if (vulkanCommandBuffer->signalSemaphoreCount == vulkanCommandBuffer->signalSemaphoreCapacity)
{
vulkanCommandBuffer->signalSemaphoreCapacity += 1;
vulkanCommandBuffer->signalSemaphores = SDL_realloc(
vulkanCommandBuffer->signalSemaphores,
vulkanCommandBuffer->signalSemaphoreCapacity * sizeof(VkSemaphore)
);
2020-12-27 23:34:15 +00:00
}
vulkanCommandBuffer->signalSemaphores[vulkanCommandBuffer->signalSemaphoreCount] = swapchainData->renderFinishedSemaphore;
vulkanCommandBuffer->signalSemaphoreCount += 1;
*pWidth = swapchainData->extent.width;
*pHeight = swapchainData->extent.height;
return (Refresh_Texture*) swapchainTextureContainer;
}
static Refresh_TextureFormat VULKAN_GetSwapchainFormat(
Refresh_Renderer *driverData,
void *windowHandle
) {
WindowData *windowData = VULKAN_INTERNAL_FetchWindowData(windowHandle);
if (windowData == NULL)
{
Refresh_LogWarn("Cannot get swapchain format, window has not been claimed!");
return 0;
}
if (windowData->swapchainData == NULL)
{
Refresh_LogWarn("Cannot get swapchain format, swapchain is currently invalid!");
return 0;
}
if (windowData->swapchainData->swapchainFormat == VK_FORMAT_R8G8B8A8_UNORM)
{
return REFRESH_TEXTUREFORMAT_R8G8B8A8;
}
else if (windowData->swapchainData->swapchainFormat == VK_FORMAT_B8G8R8A8_UNORM)
{
return REFRESH_TEXTUREFORMAT_B8G8R8A8;
}
else
{
Refresh_LogWarn("Unrecognized swapchain format!");
return 0;
}
2020-12-27 23:34:15 +00:00
}
static void VULKAN_SetSwapchainPresentMode(
Refresh_Renderer *driverData,
void *windowHandle,
Refresh_PresentMode presentMode
) {
WindowData *windowData = VULKAN_INTERNAL_FetchWindowData(windowHandle);
if (windowData == NULL)
{
Refresh_LogWarn("Cannot set present mode, window has not been claimed!");
return;
}
VULKAN_INTERNAL_RecreateSwapchain(
(VulkanRenderer *)driverData,
windowData
);
}
/* Submission structure */
static VkFence VULKAN_INTERNAL_AcquireFenceFromPool(
VulkanRenderer *renderer
) {
VkFenceCreateInfo fenceCreateInfo;
VkFence fence;
VkResult vulkanResult;
if (renderer->fencePool.availableFenceCount == 0)
{
/* Create fence */
fenceCreateInfo.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
fenceCreateInfo.pNext = NULL;
fenceCreateInfo.flags = 0;
vulkanResult = renderer->vkCreateFence(
renderer->logicalDevice,
&fenceCreateInfo,
NULL,
&fence
);
if (vulkanResult != VK_SUCCESS)
{
LogVulkanResultAsError("vkCreateFence", vulkanResult);
return NULL;
}
return fence;
}
SDL_LockMutex(renderer->fencePool.lock);
fence = renderer->fencePool.availableFences[renderer->fencePool.availableFenceCount - 1];
renderer->fencePool.availableFenceCount -= 1;
vulkanResult = renderer->vkResetFences(
renderer->logicalDevice,
1,
&fence
);
if (vulkanResult != VK_SUCCESS)
{
LogVulkanResultAsError("vkResetFences", vulkanResult);
}
SDL_UnlockMutex(renderer->fencePool.lock);
return fence;
}
static void VULKAN_INTERNAL_ReturnFenceToPool(
VulkanRenderer *renderer,
VkFence fence
) {
SDL_LockMutex(renderer->fencePool.lock);
EXPAND_ARRAY_IF_NEEDED(
renderer->fencePool.availableFences,
VkFence,
renderer->fencePool.availableFenceCount + 1,
renderer->fencePool.availableFenceCapacity,
renderer->fencePool.availableFenceCapacity * 2
);
renderer->fencePool.availableFences[renderer->fencePool.availableFenceCount] = fence;
renderer->fencePool.availableFenceCount += 1;
SDL_UnlockMutex(renderer->fencePool.lock);
}
static void VULKAN_INTERNAL_PerformPendingDestroys(
VulkanRenderer *renderer
) {
int32_t i;
SDL_LockMutex(renderer->disposeLock);
for (i = renderer->texturesToDestroyCount - 1; i >= 0; i -= 1)
{
if (SDL_AtomicGet(&renderer->texturesToDestroy[i]->referenceCount) == 0)
{
VULKAN_INTERNAL_DestroyTexture(
renderer,
renderer->texturesToDestroy[i]
);
renderer->texturesToDestroy[i] = renderer->texturesToDestroy[renderer->texturesToDestroyCount - 1];
renderer->texturesToDestroyCount -= 1;
}
}
for (i = renderer->buffersToDestroyCount - 1; i >= 0; i -= 1)
{
if (SDL_AtomicGet(&renderer->buffersToDestroy[i]->referenceCount) == 0)
{
VULKAN_INTERNAL_DestroyBuffer(
renderer,
renderer->buffersToDestroy[i]);
renderer->buffersToDestroy[i] = renderer->buffersToDestroy[renderer->buffersToDestroyCount - 1];
renderer->buffersToDestroyCount -= 1;
}
}
for (i = renderer->graphicsPipelinesToDestroyCount - 1; i >= 0; i -= 1)
{
if (SDL_AtomicGet(&renderer->graphicsPipelinesToDestroy[i]->referenceCount) == 0)
{
VULKAN_INTERNAL_DestroyGraphicsPipeline(
renderer,
renderer->graphicsPipelinesToDestroy[i]
);
renderer->graphicsPipelinesToDestroy[i] = renderer->graphicsPipelinesToDestroy[renderer->graphicsPipelinesToDestroyCount - 1];
renderer->graphicsPipelinesToDestroyCount -= 1;
}
}
for (i = renderer->computePipelinesToDestroyCount - 1; i >= 0; i -= 1)
{
if (SDL_AtomicGet(&renderer->computePipelinesToDestroy[i]->referenceCount) == 0)
{
VULKAN_INTERNAL_DestroyComputePipeline(
renderer,
renderer->computePipelinesToDestroy[i]
);
renderer->computePipelinesToDestroy[i] = renderer->computePipelinesToDestroy[renderer->computePipelinesToDestroyCount - 1];
renderer->computePipelinesToDestroyCount -= 1 ;
}
}
2022-02-23 05:54:32 +00:00
for (i = renderer->shaderModulesToDestroyCount - 1; i >= 0; i -= 1)
{
if (SDL_AtomicGet(&renderer->shaderModulesToDestroy[i]->referenceCount) == 0)
{
VULKAN_INTERNAL_DestroyShaderModule(
renderer,
renderer->shaderModulesToDestroy[i]
);
renderer->shaderModulesToDestroy[i] = renderer->shaderModulesToDestroy[renderer->shaderModulesToDestroyCount - 1];
renderer->shaderModulesToDestroyCount -= 1;
}
}
for (i = renderer->samplersToDestroyCount - 1; i >= 0; i -= 1)
{
if (SDL_AtomicGet(&renderer->samplersToDestroy[i]->referenceCount) == 0)
{
VULKAN_INTERNAL_DestroySampler(
renderer,
renderer->samplersToDestroy[i]
);
renderer->samplersToDestroy[i] = renderer->samplersToDestroy[renderer->samplersToDestroyCount - 1];
renderer->samplersToDestroyCount -= 1;
}
}
for (i = renderer->framebuffersToDestroyCount - 1; i >= 0; i -= 1)
{
if (SDL_AtomicGet(&renderer->framebuffersToDestroy[i]->referenceCount) == 0)
{
VULKAN_INTERNAL_DestroyFramebuffer(
renderer,
renderer->framebuffersToDestroy[i]
);
renderer->framebuffersToDestroy[i] = renderer->framebuffersToDestroy[renderer->framebuffersToDestroyCount - 1];
renderer->framebuffersToDestroyCount -= 1;
}
}
SDL_UnlockMutex(renderer->disposeLock);
}
static void VULKAN_INTERNAL_CleanCommandBuffer(
VulkanRenderer *renderer,
VulkanCommandBuffer *commandBuffer
) {
uint32_t i;
DescriptorSetData *descriptorSetData;
if (commandBuffer->autoReleaseFence)
{
VULKAN_INTERNAL_ReturnFenceToPool(
renderer,
commandBuffer->inFlightFence
);
commandBuffer->inFlightFence = VK_NULL_HANDLE;
}
/* Bound descriptor sets are now available */
for (i = 0; i < commandBuffer->boundDescriptorSetDataCount; i += 1)
{
descriptorSetData = &commandBuffer->boundDescriptorSetDatas[i];
SDL_LockMutex(descriptorSetData->descriptorSetCache->lock);
if (descriptorSetData->descriptorSetCache->inactiveDescriptorSetCount == descriptorSetData->descriptorSetCache->inactiveDescriptorSetCapacity)
2022-01-18 05:09:27 +00:00
{
descriptorSetData->descriptorSetCache->inactiveDescriptorSetCapacity *= 2;
descriptorSetData->descriptorSetCache->inactiveDescriptorSets = SDL_realloc(
descriptorSetData->descriptorSetCache->inactiveDescriptorSets,
descriptorSetData->descriptorSetCache->inactiveDescriptorSetCapacity * sizeof(VkDescriptorSet)
2022-01-18 05:09:27 +00:00
);
}
descriptorSetData->descriptorSetCache->inactiveDescriptorSets[descriptorSetData->descriptorSetCache->inactiveDescriptorSetCount] = descriptorSetData->descriptorSet;
descriptorSetData->descriptorSetCache->inactiveDescriptorSetCount += 1;
SDL_UnlockMutex(descriptorSetData->descriptorSetCache->lock);
}
commandBuffer->boundDescriptorSetDataCount = 0;
/* Decrement reference counts */
for (i = 0; i < commandBuffer->usedBufferCount; i += 1)
{
SDL_AtomicDecRef(&commandBuffer->usedBuffers[i]->referenceCount);
}
commandBuffer->usedBufferCount = 0;
for (i = 0; i < commandBuffer->usedTextureCount; i += 1)
{
SDL_AtomicDecRef(&commandBuffer->usedTextures[i]->referenceCount);
}
commandBuffer->usedTextureCount = 0;
for (i = 0; i < commandBuffer->usedSamplerCount; i += 1)
{
SDL_AtomicDecRef(&commandBuffer->usedSamplers[i]->referenceCount);
}
commandBuffer->usedSamplerCount = 0;
for (i = 0; i < commandBuffer->usedGraphicsPipelineCount; i += 1)
{
SDL_AtomicDecRef(&commandBuffer->usedGraphicsPipelines[i]->referenceCount);
}
commandBuffer->usedGraphicsPipelineCount = 0;
for (i = 0; i < commandBuffer->usedComputePipelineCount; i += 1)
{
SDL_AtomicDecRef(&commandBuffer->usedComputePipelines[i]->referenceCount);
}
commandBuffer->usedComputePipelineCount = 0;
for (i = 0; i < commandBuffer->usedFramebufferCount; i += 1)
{
SDL_AtomicDecRef(&commandBuffer->usedFramebuffers[i]->referenceCount);
}
commandBuffer->usedFramebufferCount = 0;
/* Reset presentation data */
commandBuffer->presentDataCount = 0;
commandBuffer->waitSemaphoreCount = 0;
commandBuffer->signalSemaphoreCount = 0;
/* Return command buffer to pool */
SDL_LockMutex(renderer->acquireCommandBufferLock);
if (commandBuffer->commandPool->inactiveCommandBufferCount == commandBuffer->commandPool->inactiveCommandBufferCapacity)
{
commandBuffer->commandPool->inactiveCommandBufferCapacity += 1;
commandBuffer->commandPool->inactiveCommandBuffers = SDL_realloc(
commandBuffer->commandPool->inactiveCommandBuffers,
commandBuffer->commandPool->inactiveCommandBufferCapacity * sizeof(VulkanCommandBuffer*)
);
}
commandBuffer->commandPool->inactiveCommandBuffers[
commandBuffer->commandPool->inactiveCommandBufferCount
] = commandBuffer;
commandBuffer->commandPool->inactiveCommandBufferCount += 1;
SDL_UnlockMutex(renderer->acquireCommandBufferLock);
/* Remove this command buffer from the submitted list */
for (i = 0; i < renderer->submittedCommandBufferCount; i += 1)
{
if (renderer->submittedCommandBuffers[i] == commandBuffer)
2022-01-18 05:09:27 +00:00
{
renderer->submittedCommandBuffers[i] = renderer->submittedCommandBuffers[renderer->submittedCommandBufferCount - 1];
renderer->submittedCommandBufferCount -= 1;
2022-01-18 05:09:27 +00:00
}
}
}
static void VULKAN_Wait(
2022-02-25 21:42:11 +00:00
Refresh_Renderer *driverData
) {
VulkanRenderer *renderer = (VulkanRenderer*) driverData;
VulkanCommandBuffer *commandBuffer;
VkResult result;
int32_t i;
result = renderer->vkDeviceWaitIdle(renderer->logicalDevice);
if (result != VK_SUCCESS)
2022-01-18 05:09:27 +00:00
{
LogVulkanResultAsError("vkDeviceWaitIdle", result);
return;
}
SDL_LockMutex(renderer->submitLock);
for (i = renderer->submittedCommandBufferCount - 1; i >= 0; i -= 1)
{
commandBuffer = renderer->submittedCommandBuffers[i];
VULKAN_INTERNAL_CleanCommandBuffer(renderer, commandBuffer);
2022-01-18 05:09:27 +00:00
}
VULKAN_INTERNAL_PerformPendingDestroys(renderer);
SDL_UnlockMutex(renderer->submitLock);
}
static Refresh_Fence* VULKAN_SubmitAndAcquireFence(
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer
) {
VulkanCommandBuffer *vulkanCommandBuffer;
vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
vulkanCommandBuffer->autoReleaseFence = 0;
2024-02-16 04:19:36 +00:00
VULKAN_Submit(driverData, commandBuffer);
return (Refresh_Fence*) vulkanCommandBuffer->inFlightFence;
}
2020-12-21 23:44:43 +00:00
static void VULKAN_Submit(
2022-02-25 21:42:11 +00:00
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer
2020-12-17 03:50:31 +00:00
) {
2020-12-21 23:44:43 +00:00
VulkanRenderer* renderer = (VulkanRenderer*)driverData;
VkSubmitInfo submitInfo;
VkPresentInfoKHR presentInfo;
VulkanPresentData *presentData;
2020-12-21 23:44:43 +00:00
VkResult vulkanResult, presentResult = VK_SUCCESS;
VulkanCommandBuffer *vulkanCommandBuffer;
VkPipelineStageFlags waitStages[MAX_PRESENT_COUNT];
uint32_t swapchainImageIndex;
uint8_t commandBufferCleaned = 0;
VulkanMemorySubAllocator *allocator;
int32_t i, j;
2020-12-21 23:44:43 +00:00
SDL_LockMutex(renderer->submitLock);
2020-12-21 23:44:43 +00:00
/* FIXME: Can this just be permanent? */
for (i = 0; i < MAX_PRESENT_COUNT; i += 1)
{
waitStages[i] = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
}
vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
2022-02-10 05:42:19 +00:00
for (j = 0; j < vulkanCommandBuffer->presentDataCount; j += 1)
2020-12-22 02:34:57 +00:00
{
swapchainImageIndex = vulkanCommandBuffer->presentDatas[j].swapchainImageIndex;
VULKAN_INTERNAL_ImageMemoryBarrier(
renderer,
vulkanCommandBuffer->commandBuffer,
RESOURCE_ACCESS_PRESENT,
VK_IMAGE_ASPECT_COLOR_BIT,
0,
1,
0,
1,
0,
vulkanCommandBuffer->presentDatas[j].windowData->swapchainData->textureContainers[swapchainImageIndex].vulkanTexture->image,
&vulkanCommandBuffer->presentDatas[j].windowData->swapchainData->textureContainers[swapchainImageIndex].vulkanTexture->resourceAccessType
);
}
VULKAN_INTERNAL_EndCommandBuffer(renderer, vulkanCommandBuffer);
vulkanCommandBuffer->inFlightFence = VULKAN_INTERNAL_AcquireFenceFromPool(renderer);
2020-12-22 02:34:57 +00:00
submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
submitInfo.pNext = NULL;
submitInfo.commandBufferCount = 1;
submitInfo.pCommandBuffers = &vulkanCommandBuffer->commandBuffer;
2020-12-21 23:44:43 +00:00
submitInfo.pWaitDstStageMask = waitStages;
submitInfo.pWaitSemaphores = vulkanCommandBuffer->waitSemaphores;
submitInfo.waitSemaphoreCount = vulkanCommandBuffer->waitSemaphoreCount;
submitInfo.pSignalSemaphores = vulkanCommandBuffer->signalSemaphores;
submitInfo.signalSemaphoreCount = vulkanCommandBuffer->signalSemaphoreCount;
vulkanResult = renderer->vkQueueSubmit(
renderer->unifiedQueue,
1,
&submitInfo,
vulkanCommandBuffer->inFlightFence
);
if (vulkanResult != VK_SUCCESS)
{
LogVulkanResultAsError("vkQueueSubmit", vulkanResult);
}
2020-12-21 23:44:43 +00:00
/* Mark command buffers as submitted */
if (renderer->submittedCommandBufferCount + 1 >= renderer->submittedCommandBufferCapacity)
{
renderer->submittedCommandBufferCapacity = renderer->submittedCommandBufferCount + 1;
renderer->submittedCommandBuffers = SDL_realloc(
renderer->submittedCommandBuffers,
sizeof(VulkanCommandBuffer*) * renderer->submittedCommandBufferCapacity
);
}
renderer->submittedCommandBuffers[renderer->submittedCommandBufferCount] = vulkanCommandBuffer;
renderer->submittedCommandBufferCount += 1;
/* Present, if applicable */
for (j = 0; j < vulkanCommandBuffer->presentDataCount; j += 1)
{
presentData = &vulkanCommandBuffer->presentDatas[j];
presentInfo.sType = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR;
presentInfo.pNext = NULL;
presentInfo.pWaitSemaphores = &presentData->windowData->swapchainData->renderFinishedSemaphore;
presentInfo.waitSemaphoreCount = 1;
presentInfo.pSwapchains = &presentData->windowData->swapchainData->swapchain;
presentInfo.swapchainCount = 1;
presentInfo.pImageIndices = &presentData->swapchainImageIndex;
presentInfo.pResults = NULL;
presentResult = renderer->vkQueuePresentKHR(
renderer->unifiedQueue,
&presentInfo
);
if (presentResult != VK_SUCCESS)
{
VULKAN_INTERNAL_RecreateSwapchain(
renderer,
presentData->windowData
);
}
}
/* Check if we can perform any cleanups */
2020-12-21 23:44:43 +00:00
for (i = renderer->submittedCommandBufferCount - 1; i >= 0; i -= 1)
2020-12-17 03:50:31 +00:00
{
vulkanResult = renderer->vkGetFenceStatus(
renderer->logicalDevice,
renderer->submittedCommandBuffers[i]->inFlightFence
);
2020-12-17 03:50:31 +00:00
if (vulkanResult == VK_SUCCESS)
2020-12-17 03:50:31 +00:00
{
VULKAN_INTERNAL_CleanCommandBuffer(
renderer,
renderer->submittedCommandBuffers[i]
);
commandBufferCleaned = 1;
}
}
if (commandBufferCleaned)
{
SDL_LockMutex(renderer->allocatorLock);
for (i = 0; i < VK_MAX_MEMORY_TYPES; i += 1)
{
allocator = &renderer->memoryAllocator->subAllocators[i];
for (j = allocator->allocationCount - 1; j >= 0; j -= 1)
{
if (allocator->allocations[j]->usedRegionCount == 0)
{
VULKAN_INTERNAL_DeallocateMemory(
renderer,
allocator,
j
);
}
}
2020-12-17 03:50:31 +00:00
}
SDL_UnlockMutex(renderer->allocatorLock);
2020-12-17 03:50:31 +00:00
}
/* Check pending destroys */
VULKAN_INTERNAL_PerformPendingDestroys(renderer);
/* Defrag! */
if (renderer->needDefrag && !renderer->defragInProgress)
{
if (SDL_GetTicks64() >= renderer->defragTimestamp)
{
VULKAN_INTERNAL_DefragmentMemory(renderer);
}
}
SDL_UnlockMutex(renderer->submitLock);
2021-01-03 05:07:51 +00:00
}
static uint8_t VULKAN_INTERNAL_DefragmentMemory(
VulkanRenderer *renderer
) {
VulkanMemorySubAllocator allocator;
VulkanMemoryAllocation *allocation;
uint32_t allocationIndexToDefrag;
VulkanMemoryUsedRegion *currentRegion;
VulkanBuffer* newBuffer;
VulkanTexture* newTexture;
VkBufferCopy bufferCopy;
VkImageCopy *imageCopyRegions;
VulkanCommandBuffer *commandBuffer;
uint32_t i, level;
VulkanResourceAccessType copyResourceAccessType = RESOURCE_ACCESS_NONE;
VulkanResourceAccessType originalResourceAccessType;
SDL_LockMutex(renderer->allocatorLock);
renderer->needDefrag = 0;
renderer->defragInProgress = 1;
commandBuffer = (VulkanCommandBuffer*) VULKAN_AcquireCommandBuffer((Refresh_Renderer *) renderer);
if (VULKAN_INTERNAL_FindAllocationToDefragment(
renderer,
&allocator,
&allocationIndexToDefrag
)) {
allocation = allocator.allocations[allocationIndexToDefrag];
VULKAN_INTERNAL_MakeMemoryUnavailable(
renderer,
allocation
);
/* For each used region in the allocation
* create a new resource, copy the data
* and re-point the resource containers
*/
for (i = 0; i < allocation->usedRegionCount; i += 1)
{
currentRegion = allocation->usedRegions[i];
copyResourceAccessType = RESOURCE_ACCESS_NONE;
if (currentRegion->isBuffer)
{
currentRegion->vulkanBuffer->usage |= VK_BUFFER_USAGE_TRANSFER_DST_BIT;
newBuffer = VULKAN_INTERNAL_CreateBuffer(
renderer,
currentRegion->vulkanBuffer->size,
RESOURCE_ACCESS_NONE,
currentRegion->vulkanBuffer->usage,
currentRegion->vulkanBuffer->requireHostVisible,
2024-02-17 02:43:02 +00:00
currentRegion->vulkanBuffer->requireHostLocal,
currentRegion->vulkanBuffer->preferDeviceLocal,
0
);
if (newBuffer == NULL)
{
Refresh_LogError("Failed to create defrag buffer!");
return 0;
}
originalResourceAccessType = currentRegion->vulkanBuffer->resourceAccessType;
VULKAN_INTERNAL_BufferMemoryBarrier(
renderer,
commandBuffer->commandBuffer,
RESOURCE_ACCESS_TRANSFER_READ,
currentRegion->vulkanBuffer
);
VULKAN_INTERNAL_BufferMemoryBarrier(
renderer,
commandBuffer->commandBuffer,
RESOURCE_ACCESS_TRANSFER_WRITE,
newBuffer
);
bufferCopy.srcOffset = 0;
bufferCopy.dstOffset = 0;
bufferCopy.size = currentRegion->resourceSize;
renderer->vkCmdCopyBuffer(
commandBuffer->commandBuffer,
currentRegion->vulkanBuffer->buffer,
newBuffer->buffer,
1,
&bufferCopy
);
VULKAN_INTERNAL_BufferMemoryBarrier(
renderer,
commandBuffer->commandBuffer,
originalResourceAccessType,
newBuffer
);
VULKAN_INTERNAL_TrackBuffer(renderer, commandBuffer, currentRegion->vulkanBuffer);
VULKAN_INTERNAL_TrackBuffer(renderer, commandBuffer, newBuffer);
/* re-point original container to new buffer */
if (currentRegion->vulkanBuffer->container != NULL)
{
newBuffer->container = currentRegion->vulkanBuffer->container;
newBuffer->container->vulkanBuffer = newBuffer;
currentRegion->vulkanBuffer->container = NULL;
}
VULKAN_INTERNAL_QueueDestroyBuffer(renderer, currentRegion->vulkanBuffer);
renderer->needDefrag = 1;
}
else
{
newTexture = VULKAN_INTERNAL_CreateTexture(
renderer,
currentRegion->vulkanTexture->dimensions.width,
currentRegion->vulkanTexture->dimensions.height,
currentRegion->vulkanTexture->depth,
currentRegion->vulkanTexture->isCube,
currentRegion->vulkanTexture->levelCount,
currentRegion->vulkanTexture->sampleCount,
currentRegion->vulkanTexture->format,
currentRegion->vulkanTexture->aspectFlags,
currentRegion->vulkanTexture->usageFlags
);
if (newTexture == NULL)
{
Refresh_LogError("Failed to create defrag texture!");
return 0;
}
originalResourceAccessType = currentRegion->vulkanTexture->resourceAccessType;
VULKAN_INTERNAL_ImageMemoryBarrier(
renderer,
commandBuffer->commandBuffer,
RESOURCE_ACCESS_TRANSFER_READ,
currentRegion->vulkanTexture->aspectFlags,
0,
currentRegion->vulkanTexture->layerCount,
0,
currentRegion->vulkanTexture->levelCount,
0,
currentRegion->vulkanTexture->image,
&currentRegion->vulkanTexture->resourceAccessType
);
VULKAN_INTERNAL_ImageMemoryBarrier(
renderer,
commandBuffer->commandBuffer,
RESOURCE_ACCESS_TRANSFER_WRITE,
currentRegion->vulkanTexture->aspectFlags,
0,
currentRegion->vulkanTexture->layerCount,
0,
currentRegion->vulkanTexture->levelCount,
0,
newTexture->image,
&copyResourceAccessType
);
imageCopyRegions = SDL_stack_alloc(VkImageCopy, currentRegion->vulkanTexture->levelCount);
for (level = 0; level < currentRegion->vulkanTexture->levelCount; level += 1)
{
imageCopyRegions[level].srcOffset.x = 0;
imageCopyRegions[level].srcOffset.y = 0;
imageCopyRegions[level].srcOffset.z = 0;
imageCopyRegions[level].srcSubresource.aspectMask = currentRegion->vulkanTexture->aspectFlags;
imageCopyRegions[level].srcSubresource.baseArrayLayer = 0;
imageCopyRegions[level].srcSubresource.layerCount = currentRegion->vulkanTexture->layerCount;
imageCopyRegions[level].srcSubresource.mipLevel = level;
imageCopyRegions[level].extent.width = SDL_max(1, currentRegion->vulkanTexture->dimensions.width >> level);
imageCopyRegions[level].extent.height = SDL_max(1, currentRegion->vulkanTexture->dimensions.height >> level);
imageCopyRegions[level].extent.depth = currentRegion->vulkanTexture->depth;
imageCopyRegions[level].dstOffset.x = 0;
imageCopyRegions[level].dstOffset.y = 0;
imageCopyRegions[level].dstOffset.z = 0;
imageCopyRegions[level].dstSubresource.aspectMask = currentRegion->vulkanTexture->aspectFlags;
imageCopyRegions[level].dstSubresource.baseArrayLayer = 0;
imageCopyRegions[level].dstSubresource.layerCount = currentRegion->vulkanTexture->layerCount;
imageCopyRegions[level].dstSubresource.mipLevel = level;
}
renderer->vkCmdCopyImage(
commandBuffer->commandBuffer,
currentRegion->vulkanTexture->image,
AccessMap[currentRegion->vulkanTexture->resourceAccessType].imageLayout,
newTexture->image,
AccessMap[copyResourceAccessType].imageLayout,
currentRegion->vulkanTexture->levelCount,
imageCopyRegions
);
VULKAN_INTERNAL_ImageMemoryBarrier(
renderer,
commandBuffer->commandBuffer,
originalResourceAccessType,
currentRegion->vulkanTexture->aspectFlags,
0,
currentRegion->vulkanTexture->layerCount,
0,
currentRegion->vulkanTexture->levelCount,
0,
newTexture->image,
&copyResourceAccessType
);
SDL_stack_free(imageCopyRegions);
VULKAN_INTERNAL_TrackTexture(renderer, commandBuffer, currentRegion->vulkanTexture);
VULKAN_INTERNAL_TrackTexture(renderer, commandBuffer, newTexture);
/* re-point original container to new texture */
newTexture->container = currentRegion->vulkanTexture->container;
newTexture->container->vulkanTexture = newTexture;
currentRegion->vulkanTexture->container = NULL;
VULKAN_INTERNAL_QueueDestroyTexture(renderer, currentRegion->vulkanTexture);
renderer->needDefrag = 1;
}
}
}
SDL_UnlockMutex(renderer->allocatorLock);
renderer->defragTimestamp = SDL_GetTicks64() + DEFRAG_TIME;
VULKAN_Submit(
(Refresh_Renderer*) renderer,
(Refresh_CommandBuffer*) commandBuffer
);
renderer->defragInProgress = 0;
return 1;
}
static void VULKAN_WaitForFences(
Refresh_Renderer *driverData,
uint8_t waitAll,
uint32_t fenceCount,
Refresh_Fence **pFences
) {
VulkanRenderer* renderer = (VulkanRenderer*) driverData;
VkResult result;
result = renderer->vkWaitForFences(
renderer->logicalDevice,
fenceCount,
(VkFence*) pFences,
waitAll,
UINT64_MAX
);
if (result != VK_SUCCESS)
{
LogVulkanResultAsError("vkWaitForFences", result);
}
}
static int VULKAN_QueryFence(
Refresh_Renderer *driverData,
Refresh_Fence *fence
) {
VulkanRenderer* renderer = (VulkanRenderer*) driverData;
VkResult result;
result = renderer->vkGetFenceStatus(
renderer->logicalDevice,
(VkFence) fence
);
if (result == VK_SUCCESS)
{
return 1;
}
else if (result == VK_NOT_READY)
{
return 0;
}
else
{
LogVulkanResultAsError("vkGetFenceStatus", result);
return -1;
}
}
static void VULKAN_ReleaseFence(
Refresh_Renderer *driverData,
Refresh_Fence *fence
) {
VULKAN_INTERNAL_ReturnFenceToPool((VulkanRenderer*) driverData, (VkFence) fence);
}
2020-12-17 03:28:02 +00:00
/* Device instantiation */
static inline uint8_t CheckDeviceExtensions(
VkExtensionProperties *extensions,
uint32_t numExtensions,
VulkanExtensions *supports
) {
uint32_t i;
SDL_memset(supports, '\0', sizeof(VulkanExtensions));
for (i = 0; i < numExtensions; i += 1)
{
const char *name = extensions[i].extensionName;
#define CHECK(ext) \
if (SDL_strcmp(name, "VK_" #ext) == 0) \
{ \
supports->ext = 1; \
}
CHECK(KHR_swapchain)
else CHECK(KHR_maintenance1)
else CHECK(KHR_get_memory_requirements2)
else CHECK(KHR_driver_properties)
else CHECK(EXT_vertex_attribute_divisor)
else CHECK(KHR_portability_subset)
#undef CHECK
}
return ( supports->KHR_swapchain &&
supports->KHR_maintenance1 &&
supports->KHR_get_memory_requirements2 );
}
static inline uint32_t GetDeviceExtensionCount(VulkanExtensions *supports)
{
return (
supports->KHR_swapchain +
supports->KHR_maintenance1 +
supports->KHR_get_memory_requirements2 +
supports->KHR_driver_properties +
supports->EXT_vertex_attribute_divisor +
supports->KHR_portability_subset
);
}
static inline void CreateDeviceExtensionArray(
VulkanExtensions *supports,
const char **extensions
) {
uint8_t cur = 0;
#define CHECK(ext) \
if (supports->ext) \
{ \
extensions[cur++] = "VK_" #ext; \
}
CHECK(KHR_swapchain)
CHECK(KHR_maintenance1)
CHECK(KHR_get_memory_requirements2)
CHECK(KHR_driver_properties)
CHECK(EXT_vertex_attribute_divisor)
CHECK(KHR_portability_subset)
#undef CHECK
}
static inline uint8_t SupportsInstanceExtension(
2020-12-17 03:28:02 +00:00
const char *ext,
VkExtensionProperties *availableExtensions,
uint32_t numAvailableExtensions
) {
uint32_t i;
for (i = 0; i < numAvailableExtensions; i += 1)
{
if (SDL_strcmp(ext, availableExtensions[i].extensionName) == 0)
{
return 1;
}
}
return 0;
}
static uint8_t VULKAN_INTERNAL_CheckInstanceExtensions(
const char **requiredExtensions,
uint32_t requiredExtensionsLength,
uint8_t *supportsDebugUtils
) {
uint32_t extensionCount, i;
VkExtensionProperties *availableExtensions;
uint8_t allExtensionsSupported = 1;
vkEnumerateInstanceExtensionProperties(
NULL,
&extensionCount,
NULL
);
availableExtensions = SDL_malloc(
extensionCount * sizeof(VkExtensionProperties)
2020-12-17 03:28:02 +00:00
);
vkEnumerateInstanceExtensionProperties(
NULL,
&extensionCount,
availableExtensions
);
for (i = 0; i < requiredExtensionsLength; i += 1)
{
if (!SupportsInstanceExtension(
2020-12-17 03:28:02 +00:00
requiredExtensions[i],
availableExtensions,
extensionCount
)) {
allExtensionsSupported = 0;
break;
}
}
/* This is optional, but nice to have! */
*supportsDebugUtils = SupportsInstanceExtension(
2020-12-17 03:28:02 +00:00
VK_EXT_DEBUG_UTILS_EXTENSION_NAME,
availableExtensions,
extensionCount
);
SDL_free(availableExtensions);
return allExtensionsSupported;
}
static uint8_t VULKAN_INTERNAL_CheckDeviceExtensions(
VulkanRenderer *renderer,
VkPhysicalDevice physicalDevice,
VulkanExtensions *physicalDeviceExtensions
) {
uint32_t extensionCount;
VkExtensionProperties *availableExtensions;
uint8_t allExtensionsSupported;
renderer->vkEnumerateDeviceExtensionProperties(
physicalDevice,
NULL,
&extensionCount,
NULL
);
availableExtensions = (VkExtensionProperties*) SDL_malloc(
extensionCount * sizeof(VkExtensionProperties)
);
renderer->vkEnumerateDeviceExtensionProperties(
physicalDevice,
NULL,
&extensionCount,
availableExtensions
);
allExtensionsSupported = CheckDeviceExtensions(
availableExtensions,
extensionCount,
physicalDeviceExtensions
);
SDL_free(availableExtensions);
2020-12-17 03:28:02 +00:00
return allExtensionsSupported;
}
static uint8_t VULKAN_INTERNAL_CheckValidationLayers(
const char** validationLayers,
uint32_t validationLayersLength
) {
uint32_t layerCount;
VkLayerProperties *availableLayers;
uint32_t i, j;
uint8_t layerFound;
2020-12-17 03:28:02 +00:00
vkEnumerateInstanceLayerProperties(&layerCount, NULL);
availableLayers = (VkLayerProperties*) SDL_malloc(
layerCount * sizeof(VkLayerProperties)
);
2020-12-17 03:28:02 +00:00
vkEnumerateInstanceLayerProperties(&layerCount, availableLayers);
for (i = 0; i < validationLayersLength; i += 1)
{
layerFound = 0;
for (j = 0; j < layerCount; j += 1)
{
if (SDL_strcmp(validationLayers[i], availableLayers[j].layerName) == 0)
{
layerFound = 1;
break;
}
}
if (!layerFound)
{
break;
}
}
SDL_free(availableLayers);
2020-12-17 03:28:02 +00:00
return layerFound;
}
static uint8_t VULKAN_INTERNAL_CreateInstance(
2022-02-25 21:42:11 +00:00
VulkanRenderer *renderer,
void *deviceWindowHandle
2020-12-17 03:28:02 +00:00
) {
VkResult vulkanResult;
VkApplicationInfo appInfo;
const char **instanceExtensionNames;
uint32_t instanceExtensionCount;
VkInstanceCreateInfo createInfo;
static const char *layerNames[] = { "VK_LAYER_KHRONOS_validation" };
appInfo.sType = VK_STRUCTURE_TYPE_APPLICATION_INFO;
appInfo.pNext = NULL;
appInfo.pApplicationName = NULL;
appInfo.applicationVersion = 0;
appInfo.pEngineName = "REFRESH";
appInfo.engineVersion = REFRESH_COMPILED_VERSION;
appInfo.apiVersion = VK_MAKE_VERSION(1, 0, 0);
2022-02-25 21:42:11 +00:00
if (!SDL_Vulkan_GetInstanceExtensions(
(SDL_Window*) deviceWindowHandle,
&instanceExtensionCount,
NULL
)) {
Refresh_LogError(
"SDL_Vulkan_GetInstanceExtensions(): getExtensionCount: %s",
SDL_GetError()
);
2020-12-17 03:28:02 +00:00
2022-02-25 21:42:11 +00:00
return 0;
}
2020-12-17 03:28:02 +00:00
/* Extra space for the following extensions:
* VK_KHR_get_physical_device_properties2
* VK_EXT_debug_utils
*/
instanceExtensionNames = SDL_stack_alloc(
const char*,
instanceExtensionCount + 2
);
if (!SDL_Vulkan_GetInstanceExtensions(
(SDL_Window*) deviceWindowHandle,
&instanceExtensionCount,
instanceExtensionNames
)) {
2021-01-05 23:00:51 +00:00
Refresh_LogError(
2020-12-17 03:28:02 +00:00
"SDL_Vulkan_GetInstanceExtensions(): %s",
SDL_GetError()
);
2022-02-25 21:42:11 +00:00
SDL_stack_free((char*) instanceExtensionNames);
return 0;
2020-12-17 03:28:02 +00:00
}
/* Core since 1.1 */
instanceExtensionNames[instanceExtensionCount++] =
VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME;
if (!VULKAN_INTERNAL_CheckInstanceExtensions(
instanceExtensionNames,
instanceExtensionCount,
&renderer->supportsDebugUtils
)) {
2021-01-05 23:00:51 +00:00
Refresh_LogError(
2020-12-17 03:28:02 +00:00
"Required Vulkan instance extensions not supported"
);
2022-02-25 21:42:11 +00:00
SDL_stack_free((char*) instanceExtensionNames);
return 0;
2020-12-17 03:28:02 +00:00
}
if (renderer->supportsDebugUtils)
{
/* Append the debug extension to the end */
instanceExtensionNames[instanceExtensionCount++] =
VK_EXT_DEBUG_UTILS_EXTENSION_NAME;
}
else
{
2021-01-05 23:00:51 +00:00
Refresh_LogWarn(
2020-12-17 03:28:02 +00:00
"%s is not supported!",
VK_EXT_DEBUG_UTILS_EXTENSION_NAME
);
}
2022-02-25 21:42:11 +00:00
createInfo.sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO;
2020-12-17 03:28:02 +00:00
createInfo.pNext = NULL;
createInfo.flags = 0;
createInfo.pApplicationInfo = &appInfo;
createInfo.ppEnabledLayerNames = layerNames;
createInfo.enabledExtensionCount = instanceExtensionCount;
createInfo.ppEnabledExtensionNames = instanceExtensionNames;
if (renderer->debugMode)
{
createInfo.enabledLayerCount = SDL_arraysize(layerNames);
if (!VULKAN_INTERNAL_CheckValidationLayers(
layerNames,
createInfo.enabledLayerCount
)) {
2021-01-05 23:00:51 +00:00
Refresh_LogWarn("Validation layers not found, continuing without validation");
2020-12-17 03:28:02 +00:00
createInfo.enabledLayerCount = 0;
}
else
{
Refresh_LogInfo("Validation layers enabled, expect debug level performance!");
}
2020-12-17 03:28:02 +00:00
}
else
{
createInfo.enabledLayerCount = 0;
}
2022-02-25 21:42:11 +00:00
vulkanResult = vkCreateInstance(&createInfo, NULL, &renderer->instance);
2020-12-17 03:28:02 +00:00
if (vulkanResult != VK_SUCCESS)
{
2021-01-05 23:00:51 +00:00
Refresh_LogError(
2020-12-17 03:28:02 +00:00
"vkCreateInstance failed: %s",
VkErrorMessages(vulkanResult)
);
2022-02-25 21:42:11 +00:00
SDL_stack_free((char*) instanceExtensionNames);
return 0;
2020-12-17 03:28:02 +00:00
}
SDL_stack_free((char*) instanceExtensionNames);
return 1;
}
static uint8_t VULKAN_INTERNAL_IsDeviceSuitable(
VulkanRenderer *renderer,
VkPhysicalDevice physicalDevice,
VulkanExtensions *physicalDeviceExtensions,
2020-12-17 03:28:02 +00:00
VkSurfaceKHR surface,
uint32_t *queueFamilyIndex,
uint8_t *deviceRank
2020-12-17 03:28:02 +00:00
) {
uint32_t queueFamilyCount, queueFamilyRank, queueFamilyBest;
SwapChainSupportDetails swapchainSupportDetails;
2020-12-17 03:28:02 +00:00
VkQueueFamilyProperties *queueProps;
VkBool32 supportsPresent;
uint8_t querySuccess;
2020-12-17 03:28:02 +00:00
VkPhysicalDeviceProperties deviceProperties;
uint32_t i;
2020-12-17 03:28:02 +00:00
/* Get the device rank before doing any checks, in case one fails.
* Note: If no dedicated device exists, one that supports our features
* would be fine
2020-12-17 03:28:02 +00:00
*/
renderer->vkGetPhysicalDeviceProperties(
physicalDevice,
&deviceProperties
);
if (*deviceRank < DEVICE_PRIORITY[deviceProperties.deviceType])
{
/* This device outranks the best device we've found so far!
* This includes a dedicated GPU that has less features than an
* integrated GPU, because this is a freak case that is almost
* never intentionally desired by the end user
*/
*deviceRank = DEVICE_PRIORITY[deviceProperties.deviceType];
}
else if (*deviceRank > DEVICE_PRIORITY[deviceProperties.deviceType])
{
/* Device is outranked by a previous device, don't even try to
* run a query and reset the rank to avoid overwrites
*/
*deviceRank = 0;
return 0;
}
2020-12-17 03:28:02 +00:00
if (!VULKAN_INTERNAL_CheckDeviceExtensions(
renderer,
physicalDevice,
physicalDeviceExtensions
2020-12-17 03:28:02 +00:00
)) {
return 0;
}
renderer->vkGetPhysicalDeviceQueueFamilyProperties(
physicalDevice,
&queueFamilyCount,
NULL
);
queueProps = (VkQueueFamilyProperties*) SDL_stack_alloc(
VkQueueFamilyProperties,
queueFamilyCount
);
renderer->vkGetPhysicalDeviceQueueFamilyProperties(
physicalDevice,
&queueFamilyCount,
queueProps
);
queueFamilyBest = 0;
*queueFamilyIndex = UINT32_MAX;
2020-12-17 03:28:02 +00:00
for (i = 0; i < queueFamilyCount; i += 1)
{
renderer->vkGetPhysicalDeviceSurfaceSupportKHR(
physicalDevice,
i,
surface,
&supportsPresent
);
if ( !supportsPresent ||
!(queueProps[i].queueFlags & VK_QUEUE_GRAPHICS_BIT) )
2020-12-17 03:28:02 +00:00
{
/* Not a graphics family, ignore. */
continue;
}
/* The queue family bitflags are kind of annoying.
*
* We of course need a graphics family, but we ideally want the
* _primary_ graphics family. The spec states that at least one
* graphics family must also be a compute family, so generally
* drivers make that the first one. But hey, maybe something
* genuinely can't do compute or something, and FNA doesn't
* need it, so we'll be open to a non-compute queue family.
*
* Additionally, it's common to see the primary queue family
* have the transfer bit set, which is great! But this is
* actually optional; it's impossible to NOT have transfers in
* graphics/compute but it _is_ possible for a graphics/compute
* family, even the primary one, to just decide not to set the
* bitflag. Admittedly, a driver may want to isolate transfer
* queues to a dedicated family so that queues made solely for
* transfers can have an optimized DMA queue.
*
* That, or the driver author got lazy and decided not to set
* the bit. Looking at you, Android.
*
* -flibit
*/
if (queueProps[i].queueFlags & VK_QUEUE_COMPUTE_BIT)
{
if (queueProps[i].queueFlags & VK_QUEUE_TRANSFER_BIT)
{
/* Has all attribs! */
queueFamilyRank = 3;
}
else
{
/* Probably has a DMA transfer queue family */
queueFamilyRank = 2;
}
}
else
{
/* Just a graphics family, probably has something better */
queueFamilyRank = 1;
}
if (queueFamilyRank > queueFamilyBest)
{
*queueFamilyIndex = i;
queueFamilyBest = queueFamilyRank;
2020-12-17 03:28:02 +00:00
}
}
SDL_stack_free(queueProps);
if (*queueFamilyIndex == UINT32_MAX)
2020-12-17 03:28:02 +00:00
{
/* Somehow no graphics queues existed. Compute-only device? */
return 0;
2020-12-17 03:28:02 +00:00
}
/* FIXME: Need better structure for checking vs storing support details */
querySuccess = VULKAN_INTERNAL_QuerySwapChainSupport(
renderer,
physicalDevice,
surface,
&swapchainSupportDetails
);
if (swapchainSupportDetails.formatsLength > 0)
{
SDL_free(swapchainSupportDetails.formats);
}
if (swapchainSupportDetails.presentModesLength > 0)
{
SDL_free(swapchainSupportDetails.presentModes);
}
return ( querySuccess &&
swapchainSupportDetails.formatsLength > 0 &&
swapchainSupportDetails.presentModesLength > 0 );
2020-12-17 03:28:02 +00:00
}
static uint8_t VULKAN_INTERNAL_DeterminePhysicalDevice(
VulkanRenderer *renderer,
VkSurfaceKHR surface
2020-12-17 03:28:02 +00:00
) {
VkResult vulkanResult;
VkPhysicalDevice *physicalDevices;
VulkanExtensions *physicalDeviceExtensions;
2020-12-17 03:28:02 +00:00
uint32_t physicalDeviceCount, i, suitableIndex;
uint32_t queueFamilyIndex, suitableQueueFamilyIndex;
uint8_t deviceRank, highestRank;
2020-12-17 03:28:02 +00:00
vulkanResult = renderer->vkEnumeratePhysicalDevices(
renderer->instance,
&physicalDeviceCount,
NULL
);
VULKAN_ERROR_CHECK(vulkanResult, vkEnumeratePhysicalDevices, 0)
2020-12-17 03:28:02 +00:00
if (physicalDeviceCount == 0)
{
Refresh_LogWarn("Failed to find any GPUs with Vulkan support");
2020-12-17 03:28:02 +00:00
return 0;
}
physicalDevices = SDL_stack_alloc(VkPhysicalDevice, physicalDeviceCount);
physicalDeviceExtensions = SDL_stack_alloc(VulkanExtensions, physicalDeviceCount);
2020-12-17 03:28:02 +00:00
vulkanResult = renderer->vkEnumeratePhysicalDevices(
renderer->instance,
&physicalDeviceCount,
physicalDevices
);
/* This should be impossible to hit, but from what I can tell this can
* be triggered not because the array is too small, but because there
* were drivers that turned out to be bogus, so this is the loader's way
* of telling us that the list is now smaller than expected :shrug:
*/
if (vulkanResult == VK_INCOMPLETE)
{
Refresh_LogWarn("vkEnumeratePhysicalDevices returned VK_INCOMPLETE, will keep trying anyway...");
vulkanResult = VK_SUCCESS;
}
2020-12-17 03:28:02 +00:00
if (vulkanResult != VK_SUCCESS)
{
Refresh_LogWarn(
2020-12-17 03:28:02 +00:00
"vkEnumeratePhysicalDevices failed: %s",
VkErrorMessages(vulkanResult)
);
SDL_stack_free(physicalDevices);
SDL_stack_free(physicalDeviceExtensions);
2020-12-17 03:28:02 +00:00
return 0;
}
/* Any suitable device will do, but we'd like the best */
suitableIndex = -1;
highestRank = 0;
2020-12-17 03:28:02 +00:00
for (i = 0; i < physicalDeviceCount; i += 1)
{
deviceRank = highestRank;
if (VULKAN_INTERNAL_IsDeviceSuitable(
2020-12-17 03:28:02 +00:00
renderer,
physicalDevices[i],
&physicalDeviceExtensions[i],
surface,
&queueFamilyIndex,
&deviceRank
)) {
/* Use this for rendering.
* Note that this may override a previous device that
* supports rendering, but shares the same device rank.
*/
suitableIndex = i;
suitableQueueFamilyIndex = queueFamilyIndex;
highestRank = deviceRank;
}
else if (deviceRank > highestRank)
{
/* In this case, we found a... "realer?" GPU,
* but it doesn't actually support our Vulkan.
* We should disqualify all devices below as a
* result, because if we don't we end up
* ignoring real hardware and risk using
* something like LLVMpipe instead!
* -flibit
*/
suitableIndex = -1;
highestRank = deviceRank;
2020-12-17 03:28:02 +00:00
}
}
if (suitableIndex != -1)
{
renderer->supports = physicalDeviceExtensions[suitableIndex];
renderer->physicalDevice = physicalDevices[suitableIndex];
renderer->queueFamilyIndex = suitableQueueFamilyIndex;
2020-12-17 03:28:02 +00:00
}
else
{
SDL_stack_free(physicalDevices);
SDL_stack_free(physicalDeviceExtensions);
2020-12-17 03:28:02 +00:00
return 0;
}
renderer->physicalDeviceProperties.sType =
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2;
if (renderer->supports.KHR_driver_properties)
{
renderer->physicalDeviceDriverProperties.sType =
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR;
renderer->physicalDeviceDriverProperties.pNext = NULL;
renderer->physicalDeviceProperties.pNext =
&renderer->physicalDeviceDriverProperties;
}
else
{
renderer->physicalDeviceProperties.pNext = NULL;
}
renderer->vkGetPhysicalDeviceProperties2KHR(
renderer->physicalDevice,
&renderer->physicalDeviceProperties
);
renderer->vkGetPhysicalDeviceMemoryProperties(
renderer->physicalDevice,
&renderer->memoryProperties
);
2020-12-17 03:28:02 +00:00
SDL_stack_free(physicalDevices);
SDL_stack_free(physicalDeviceExtensions);
2020-12-17 03:28:02 +00:00
return 1;
}
static uint8_t VULKAN_INTERNAL_CreateLogicalDevice(
VulkanRenderer *renderer
2020-12-17 03:28:02 +00:00
) {
VkResult vulkanResult;
VkDeviceCreateInfo deviceCreateInfo;
VkPhysicalDeviceFeatures deviceFeatures;
VkPhysicalDevicePortabilitySubsetFeaturesKHR portabilityFeatures;
const char **deviceExtensions;
2020-12-17 03:28:02 +00:00
VkDeviceQueueCreateInfo queueCreateInfo;
2020-12-17 03:28:02 +00:00
float queuePriority = 1.0f;
queueCreateInfo.sType =
2020-12-17 03:28:02 +00:00
VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
queueCreateInfo.pNext = NULL;
queueCreateInfo.flags = 0;
queueCreateInfo.queueFamilyIndex = renderer->queueFamilyIndex;
queueCreateInfo.queueCount = 1;
queueCreateInfo.pQueuePriorities = &queuePriority;
2020-12-17 03:28:02 +00:00
/* specifying used device features */
SDL_zero(deviceFeatures);
deviceFeatures.fillModeNonSolid = VK_TRUE;
deviceFeatures.samplerAnisotropy = VK_TRUE;
deviceFeatures.multiDrawIndirect = VK_TRUE;
2023-06-16 22:00:47 +00:00
deviceFeatures.independentBlend = VK_TRUE;
2020-12-17 03:28:02 +00:00
/* creating the logical device */
deviceCreateInfo.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
if (renderer->supports.KHR_portability_subset)
{
portabilityFeatures.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PORTABILITY_SUBSET_FEATURES_KHR;
portabilityFeatures.pNext = NULL;
portabilityFeatures.constantAlphaColorBlendFactors = VK_FALSE;
portabilityFeatures.events = VK_FALSE;
portabilityFeatures.imageViewFormatReinterpretation = VK_FALSE;
portabilityFeatures.imageViewFormatSwizzle = VK_TRUE;
portabilityFeatures.imageView2DOn3DImage = VK_FALSE;
portabilityFeatures.multisampleArrayImage = VK_FALSE;
portabilityFeatures.mutableComparisonSamplers = VK_FALSE;
portabilityFeatures.pointPolygons = VK_FALSE;
portabilityFeatures.samplerMipLodBias = VK_FALSE; /* Technically should be true, but eh */
portabilityFeatures.separateStencilMaskRef = VK_FALSE;
portabilityFeatures.shaderSampleRateInterpolationFunctions = VK_FALSE;
portabilityFeatures.tessellationIsolines = VK_FALSE;
portabilityFeatures.tessellationPointMode = VK_FALSE;
portabilityFeatures.triangleFans = VK_FALSE;
portabilityFeatures.vertexAttributeAccessBeyondStride = VK_FALSE;
deviceCreateInfo.pNext = &portabilityFeatures;
}
else
{
deviceCreateInfo.pNext = NULL;
}
2020-12-17 03:28:02 +00:00
deviceCreateInfo.flags = 0;
deviceCreateInfo.queueCreateInfoCount = 1;
deviceCreateInfo.pQueueCreateInfos = &queueCreateInfo;
2020-12-17 03:28:02 +00:00
deviceCreateInfo.enabledLayerCount = 0;
deviceCreateInfo.ppEnabledLayerNames = NULL;
deviceCreateInfo.enabledExtensionCount = GetDeviceExtensionCount(
&renderer->supports
);
deviceExtensions = SDL_stack_alloc(
const char*,
deviceCreateInfo.enabledExtensionCount
);
CreateDeviceExtensionArray(&renderer->supports, deviceExtensions);
deviceCreateInfo.ppEnabledExtensionNames = deviceExtensions;
2020-12-17 03:28:02 +00:00
deviceCreateInfo.pEnabledFeatures = &deviceFeatures;
vulkanResult = renderer->vkCreateDevice(
renderer->physicalDevice,
&deviceCreateInfo,
NULL,
&renderer->logicalDevice
);
SDL_stack_free(deviceExtensions);
VULKAN_ERROR_CHECK(vulkanResult, vkCreateDevice, 0)
2020-12-17 03:28:02 +00:00
/* Load vkDevice entry points */
#define VULKAN_DEVICE_FUNCTION(ext, ret, func, params) \
renderer->func = (vkfntype_##func) \
renderer->vkGetDeviceProcAddr( \
renderer->logicalDevice, \
#func \
);
#include "Refresh_Driver_Vulkan_vkfuncs.h"
renderer->vkGetDeviceQueue(
renderer->logicalDevice,
renderer->queueFamilyIndex,
2020-12-17 03:28:02 +00:00
0,
&renderer->unifiedQueue
2021-01-02 21:31:17 +00:00
);
2020-12-17 03:28:02 +00:00
return 1;
}
static void VULKAN_INTERNAL_LoadEntryPoints(void)
{
2023-10-15 05:14:00 +00:00
/* Required for MoltenVK support */
SDL_setenv("MVK_CONFIG_FULL_IMAGE_VIEW_SWIZZLE", "1", 1);
2021-01-14 01:37:54 +00:00
/* Load Vulkan entry points */
if (SDL_Vulkan_LoadLibrary(NULL) < 0)
{
Refresh_LogWarn("Vulkan: SDL_Vulkan_LoadLibrary failed!");
return;
2021-01-14 01:37:54 +00:00
}
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wpedantic"
vkGetInstanceProcAddr = (PFN_vkGetInstanceProcAddr)SDL_Vulkan_GetVkGetInstanceProcAddr();
#pragma GCC diagnostic pop
if (vkGetInstanceProcAddr == NULL)
{
Refresh_LogWarn(
"SDL_Vulkan_GetVkGetInstanceProcAddr(): %s",
SDL_GetError()
);
return;
2021-01-14 01:37:54 +00:00
}
#define VULKAN_GLOBAL_FUNCTION(name) \
name = (PFN_##name) vkGetInstanceProcAddr(VK_NULL_HANDLE, #name); \
if (name == NULL) \
{ \
Refresh_LogWarn("vkGetInstanceProcAddr(VK_NULL_HANDLE, \"" #name "\") failed"); \
return; \
2021-01-14 01:37:54 +00:00
}
#include "Refresh_Driver_Vulkan_vkfuncs.h"
}
static uint8_t VULKAN_INTERNAL_PrepareVulkan(
VulkanRenderer *renderer
) {
SDL_Window *dummyWindowHandle;
VkSurfaceKHR surface;
VULKAN_INTERNAL_LoadEntryPoints();
dummyWindowHandle = SDL_CreateWindow(
"Refresh Vulkan",
0, 0,
128, 128,
SDL_WINDOW_VULKAN | SDL_WINDOW_HIDDEN
);
if (dummyWindowHandle == NULL)
{
Refresh_LogWarn("Vulkan: Could not create dummy window");
return 0;
}
if (!VULKAN_INTERNAL_CreateInstance(renderer, dummyWindowHandle))
{
SDL_DestroyWindow(dummyWindowHandle);
SDL_free(renderer);
Refresh_LogWarn("Vulkan: Could not create Vulkan instance");
return 0;
}
if (!SDL_Vulkan_CreateSurface(
(SDL_Window*) dummyWindowHandle,
renderer->instance,
&surface
)) {
SDL_DestroyWindow(dummyWindowHandle);
SDL_free(renderer);
Refresh_LogWarn(
"SDL_Vulkan_CreateSurface failed: %s",
SDL_GetError()
);
return 0;
}
#define VULKAN_INSTANCE_FUNCTION(ext, ret, func, params) \
renderer->func = (vkfntype_##func) vkGetInstanceProcAddr(renderer->instance, #func);
#include "Refresh_Driver_Vulkan_vkfuncs.h"
if (!VULKAN_INTERNAL_DeterminePhysicalDevice(renderer, surface))
{
return 0;
}
renderer->vkDestroySurfaceKHR(
renderer->instance,
surface,
NULL
);
SDL_DestroyWindow(dummyWindowHandle);
return 1;
}
static uint8_t VULKAN_PrepareDriver(uint32_t *flags)
{
/* Set up dummy VulkanRenderer */
VulkanRenderer *renderer = (VulkanRenderer*) SDL_malloc(sizeof(VulkanRenderer));
uint8_t result;
SDL_memset(renderer, '\0', sizeof(VulkanRenderer));
result = VULKAN_INTERNAL_PrepareVulkan(renderer);
if (!result)
{
Refresh_LogWarn("Vulkan: Failed to determine a suitable physical device");
}
else
{
*flags = SDL_WINDOW_VULKAN;
}
renderer->vkDestroyInstance(renderer->instance, NULL);
SDL_free(renderer);
return result;
}
static Refresh_Device* VULKAN_CreateDevice(
uint8_t debugMode
2020-12-17 01:23:49 +00:00
) {
VulkanRenderer *renderer = (VulkanRenderer*) SDL_malloc(sizeof(VulkanRenderer));
2020-12-17 01:23:49 +00:00
2022-02-25 21:42:11 +00:00
Refresh_Device *result;
VkResult vulkanResult;
2020-12-18 22:35:33 +00:00
uint32_t i;
2020-12-17 04:04:47 +00:00
2020-12-29 22:52:24 +00:00
/* Variables: Descriptor set layouts */
2020-12-17 19:40:49 +00:00
VkDescriptorSetLayoutCreateInfo setLayoutCreateInfo;
VkDescriptorSetLayoutBinding vertexParamLayoutBinding;
VkDescriptorSetLayoutBinding fragmentParamLayoutBinding;
2020-12-31 04:39:47 +00:00
VkDescriptorSetLayoutBinding computeParamLayoutBinding;
2020-12-17 19:40:49 +00:00
2020-12-31 04:39:47 +00:00
VkDescriptorSetLayoutBinding emptyVertexSamplerLayoutBinding;
VkDescriptorSetLayoutBinding emptyFragmentSamplerLayoutBinding;
2020-12-29 22:52:24 +00:00
VkDescriptorSetLayoutBinding emptyComputeBufferDescriptorSetLayoutBinding;
2020-12-31 04:39:47 +00:00
VkDescriptorSetLayoutBinding emptyComputeImageDescriptorSetLayoutBinding;
2020-12-29 22:52:24 +00:00
2020-12-20 09:29:15 +00:00
/* Variables: UBO Creation */
VkDescriptorPoolCreateInfo defaultDescriptorPoolInfo;
2020-12-31 04:39:47 +00:00
VkDescriptorPoolSize poolSizes[4];
2020-12-28 22:07:13 +00:00
VkDescriptorSetAllocateInfo descriptorAllocateInfo;
2020-12-20 09:29:15 +00:00
2022-06-17 07:41:27 +00:00
/* Variables: Image Format Detection */
VkImageFormatProperties imageFormatProperties;
SDL_memset(renderer, '\0', sizeof(VulkanRenderer));
renderer->debugMode = debugMode;
if (!VULKAN_INTERNAL_PrepareVulkan(renderer))
{
Refresh_LogError("Failed to initialize Vulkan!");
return NULL;
}
Refresh_LogInfo("Refresh Driver: Vulkan");
Refresh_LogInfo(
"Vulkan Device: %s",
renderer->physicalDeviceProperties.properties.deviceName
);
Refresh_LogInfo(
"Vulkan Driver: %s %s",
renderer->physicalDeviceDriverProperties.driverName,
renderer->physicalDeviceDriverProperties.driverInfo
);
Refresh_LogInfo(
"Vulkan Conformance: %u.%u.%u",
renderer->physicalDeviceDriverProperties.conformanceVersion.major,
renderer->physicalDeviceDriverProperties.conformanceVersion.minor,
renderer->physicalDeviceDriverProperties.conformanceVersion.patch
);
if (!VULKAN_INTERNAL_CreateLogicalDevice(
renderer
)) {
Refresh_LogError("Failed to create logical device");
return NULL;
}
/* FIXME: just move this into this function */
result = (Refresh_Device*) SDL_malloc(sizeof(Refresh_Device));
2022-02-25 21:42:11 +00:00
ASSIGN_DRIVER(VULKAN)
2020-12-17 01:23:49 +00:00
2022-02-25 21:42:11 +00:00
result->driverData = (Refresh_Renderer*) renderer;
2020-12-17 01:23:49 +00:00
2020-12-17 03:50:31 +00:00
/*
* Create initial swapchain array
2020-12-17 03:50:31 +00:00
*/
renderer->claimedWindowCapacity = 1;
renderer->claimedWindowCount = 0;
renderer->claimedWindows = SDL_malloc(
renderer->claimedWindowCapacity * sizeof(WindowData*)
);
2020-12-17 03:50:31 +00:00
2020-12-22 01:59:08 +00:00
/* Threading */
renderer->allocatorLock = SDL_CreateMutex();
renderer->disposeLock = SDL_CreateMutex();
renderer->submitLock = SDL_CreateMutex();
renderer->acquireCommandBufferLock = SDL_CreateMutex();
renderer->renderPassFetchLock = SDL_CreateMutex();
renderer->framebufferFetchLock = SDL_CreateMutex();
renderer->renderTargetFetchLock = SDL_CreateMutex();
2020-12-17 04:04:47 +00:00
/*
2021-01-02 06:07:15 +00:00
* Create submitted command buffer list
2020-12-17 04:04:47 +00:00
*/
2021-01-02 06:07:15 +00:00
renderer->submittedCommandBufferCapacity = 16;
2020-12-17 04:04:47 +00:00
renderer->submittedCommandBufferCount = 0;
2021-01-02 06:07:15 +00:00
renderer->submittedCommandBuffers = SDL_malloc(sizeof(VulkanCommandBuffer*) * renderer->submittedCommandBufferCapacity);
2020-12-17 04:04:47 +00:00
2020-12-22 01:59:08 +00:00
/* Memory Allocator */
renderer->memoryAllocator = (VulkanMemoryAllocator*) SDL_malloc(
2020-12-22 01:59:08 +00:00
sizeof(VulkanMemoryAllocator)
);
for (i = 0; i < VK_MAX_MEMORY_TYPES; i += 1)
{
renderer->memoryAllocator->subAllocators[i].memoryTypeIndex = i;
2020-12-22 01:59:08 +00:00
renderer->memoryAllocator->subAllocators[i].nextAllocationSize = STARTING_ALLOCATION_SIZE;
renderer->memoryAllocator->subAllocators[i].allocations = NULL;
renderer->memoryAllocator->subAllocators[i].allocationCount = 0;
renderer->memoryAllocator->subAllocators[i].sortedFreeRegions = SDL_malloc(
sizeof(VulkanMemoryFreeRegion*) * 4
);
renderer->memoryAllocator->subAllocators[i].sortedFreeRegionCount = 0;
renderer->memoryAllocator->subAllocators[i].sortedFreeRegionCapacity = 4;
}
/* Set up UBO layouts */
2020-12-31 04:39:47 +00:00
2024-02-17 02:29:05 +00:00
renderer->minUBOAlignment = (uint32_t) renderer->physicalDeviceProperties.properties.limits.minUniformBufferOffsetAlignment;
2020-12-17 19:40:49 +00:00
emptyVertexSamplerLayoutBinding.binding = 0;
emptyVertexSamplerLayoutBinding.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
emptyVertexSamplerLayoutBinding.descriptorCount = 0;
emptyVertexSamplerLayoutBinding.stageFlags = VK_SHADER_STAGE_VERTEX_BIT;
emptyVertexSamplerLayoutBinding.pImmutableSamplers = NULL;
setLayoutCreateInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
setLayoutCreateInfo.pNext = NULL;
setLayoutCreateInfo.flags = 0;
setLayoutCreateInfo.bindingCount = 1;
setLayoutCreateInfo.pBindings = &emptyVertexSamplerLayoutBinding;
vulkanResult = renderer->vkCreateDescriptorSetLayout(
renderer->logicalDevice,
&setLayoutCreateInfo,
NULL,
&renderer->emptyVertexSamplerLayout
);
emptyFragmentSamplerLayoutBinding.binding = 0;
emptyFragmentSamplerLayoutBinding.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
emptyFragmentSamplerLayoutBinding.descriptorCount = 0;
emptyFragmentSamplerLayoutBinding.stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
emptyFragmentSamplerLayoutBinding.pImmutableSamplers = NULL;
setLayoutCreateInfo.pBindings = &emptyFragmentSamplerLayoutBinding;
vulkanResult = renderer->vkCreateDescriptorSetLayout(
renderer->logicalDevice,
&setLayoutCreateInfo,
NULL,
&renderer->emptyFragmentSamplerLayout
);
2020-12-29 22:52:24 +00:00
emptyComputeBufferDescriptorSetLayoutBinding.binding = 0;
emptyComputeBufferDescriptorSetLayoutBinding.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
emptyComputeBufferDescriptorSetLayoutBinding.descriptorCount = 0;
emptyComputeBufferDescriptorSetLayoutBinding.stageFlags = VK_SHADER_STAGE_COMPUTE_BIT;
emptyComputeBufferDescriptorSetLayoutBinding.pImmutableSamplers = NULL;
setLayoutCreateInfo.pBindings = &emptyComputeBufferDescriptorSetLayoutBinding;
vulkanResult = renderer->vkCreateDescriptorSetLayout(
renderer->logicalDevice,
&setLayoutCreateInfo,
NULL,
&renderer->emptyComputeBufferDescriptorSetLayout
);
2020-12-31 04:39:47 +00:00
emptyComputeImageDescriptorSetLayoutBinding.binding = 0;
emptyComputeImageDescriptorSetLayoutBinding.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
2020-12-31 04:39:47 +00:00
emptyComputeImageDescriptorSetLayoutBinding.descriptorCount = 0;
emptyComputeImageDescriptorSetLayoutBinding.stageFlags = VK_SHADER_STAGE_COMPUTE_BIT;
emptyComputeImageDescriptorSetLayoutBinding.pImmutableSamplers = NULL;
setLayoutCreateInfo.pBindings = &emptyComputeImageDescriptorSetLayoutBinding;
vulkanResult = renderer->vkCreateDescriptorSetLayout(
renderer->logicalDevice,
&setLayoutCreateInfo,
NULL,
&renderer->emptyComputeImageDescriptorSetLayout
);
2020-12-17 19:40:49 +00:00
vertexParamLayoutBinding.binding = 0;
vertexParamLayoutBinding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
vertexParamLayoutBinding.descriptorCount = 1;
vertexParamLayoutBinding.stageFlags = VK_SHADER_STAGE_VERTEX_BIT;
vertexParamLayoutBinding.pImmutableSamplers = NULL;
setLayoutCreateInfo.bindingCount = 1;
setLayoutCreateInfo.pBindings = &vertexParamLayoutBinding;
vulkanResult = renderer->vkCreateDescriptorSetLayout(
renderer->logicalDevice,
&setLayoutCreateInfo,
NULL,
&renderer->vertexUniformDescriptorSetLayout
2020-12-17 19:40:49 +00:00
);
if (vulkanResult != VK_SUCCESS)
{
2021-01-05 23:00:51 +00:00
Refresh_LogError("Failed to create vertex UBO layout!");
2020-12-17 19:40:49 +00:00
return NULL;
}
fragmentParamLayoutBinding.binding = 0;
fragmentParamLayoutBinding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
fragmentParamLayoutBinding.descriptorCount = 1;
fragmentParamLayoutBinding.stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
fragmentParamLayoutBinding.pImmutableSamplers = NULL;
setLayoutCreateInfo.bindingCount = 1;
setLayoutCreateInfo.pBindings = &fragmentParamLayoutBinding;
vulkanResult = renderer->vkCreateDescriptorSetLayout(
renderer->logicalDevice,
&setLayoutCreateInfo,
NULL,
&renderer->fragmentUniformDescriptorSetLayout
2020-12-17 19:40:49 +00:00
);
if (vulkanResult != VK_SUCCESS)
{
2021-01-05 23:00:51 +00:00
Refresh_LogError("Failed to create fragment UBO layout!");
2020-12-17 19:40:49 +00:00
return NULL;
}
2020-12-31 04:39:47 +00:00
computeParamLayoutBinding.binding = 0;
computeParamLayoutBinding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
computeParamLayoutBinding.descriptorCount = 1;
computeParamLayoutBinding.stageFlags = VK_SHADER_STAGE_COMPUTE_BIT;
computeParamLayoutBinding.pImmutableSamplers = NULL;
setLayoutCreateInfo.bindingCount = 1;
setLayoutCreateInfo.pBindings = &computeParamLayoutBinding;
vulkanResult = renderer->vkCreateDescriptorSetLayout(
renderer->logicalDevice,
&setLayoutCreateInfo,
NULL,
&renderer->computeUniformDescriptorSetLayout
2020-12-31 04:39:47 +00:00
);
/* Default Descriptors */
2020-12-20 09:29:15 +00:00
poolSizes[0].descriptorCount = 2;
poolSizes[0].type = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
poolSizes[1].descriptorCount = 1;
poolSizes[1].type = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
2020-12-20 09:29:15 +00:00
2020-12-29 22:52:24 +00:00
poolSizes[2].descriptorCount = 1;
poolSizes[2].type = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
2020-12-29 22:52:24 +00:00
poolSizes[3].descriptorCount = 3;
poolSizes[3].type = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
2020-12-31 04:39:47 +00:00
defaultDescriptorPoolInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
defaultDescriptorPoolInfo.pNext = NULL;
defaultDescriptorPoolInfo.flags = 0;
defaultDescriptorPoolInfo.maxSets = 2 + 1 + 1 + 3;
2020-12-31 04:39:47 +00:00
defaultDescriptorPoolInfo.poolSizeCount = 4;
defaultDescriptorPoolInfo.pPoolSizes = poolSizes;
2020-12-20 09:29:15 +00:00
renderer->vkCreateDescriptorPool(
renderer->logicalDevice,
&defaultDescriptorPoolInfo,
2020-12-20 09:29:15 +00:00
NULL,
&renderer->defaultDescriptorPool
);
2020-12-28 22:07:13 +00:00
descriptorAllocateInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
descriptorAllocateInfo.pNext = NULL;
descriptorAllocateInfo.descriptorPool = renderer->defaultDescriptorPool;
descriptorAllocateInfo.descriptorSetCount = 1;
descriptorAllocateInfo.pSetLayouts = &renderer->emptyVertexSamplerLayout;
renderer->vkAllocateDescriptorSets(
renderer->logicalDevice,
2020-12-28 22:07:13 +00:00
&descriptorAllocateInfo,
&renderer->emptyVertexSamplerDescriptorSet
);
2020-12-28 22:07:13 +00:00
descriptorAllocateInfo.pSetLayouts = &renderer->emptyFragmentSamplerLayout;
renderer->vkAllocateDescriptorSets(
renderer->logicalDevice,
2020-12-28 22:07:13 +00:00
&descriptorAllocateInfo,
&renderer->emptyFragmentSamplerDescriptorSet
2020-12-20 09:29:15 +00:00
);
2020-12-29 22:52:24 +00:00
descriptorAllocateInfo.pSetLayouts = &renderer->emptyComputeBufferDescriptorSetLayout;
renderer->vkAllocateDescriptorSets(
renderer->logicalDevice,
&descriptorAllocateInfo,
&renderer->emptyComputeBufferDescriptorSet
);
2020-12-31 04:39:47 +00:00
descriptorAllocateInfo.pSetLayouts = &renderer->emptyComputeImageDescriptorSetLayout;
renderer->vkAllocateDescriptorSets(
renderer->logicalDevice,
&descriptorAllocateInfo,
&renderer->emptyComputeImageDescriptorSet
);
/* Initialize uniform buffer objects */
renderer->vertexUniformBufferObject = VULKAN_INTERNAL_CreateUniformBufferObject(
renderer,
UNIFORM_BUFFER_VERTEX
);
renderer->fragmentUniformBufferObject = VULKAN_INTERNAL_CreateUniformBufferObject(
renderer,
UNIFORM_BUFFER_FRAGMENT
);
renderer->computeUniformBufferObject = VULKAN_INTERNAL_CreateUniformBufferObject(
renderer,
UNIFORM_BUFFER_COMPUTE
);
/* Initialize caches */
2021-01-03 02:02:20 +00:00
for (i = 0; i < NUM_COMMAND_POOL_BUCKETS; i += 1)
{
renderer->commandPoolHashTable.buckets[i].elements = NULL;
renderer->commandPoolHashTable.buckets[i].count = 0;
renderer->commandPoolHashTable.buckets[i].capacity = 0;
}
for (i = 0; i < NUM_PIPELINE_LAYOUT_BUCKETS; i += 1)
{
2020-12-29 22:52:24 +00:00
renderer->graphicsPipelineLayoutHashTable.buckets[i].elements = NULL;
renderer->graphicsPipelineLayoutHashTable.buckets[i].count = 0;
renderer->graphicsPipelineLayoutHashTable.buckets[i].capacity = 0;
}
for (i = 0; i < NUM_PIPELINE_LAYOUT_BUCKETS; i += 1)
{
renderer->computePipelineLayoutHashTable.buckets[i].elements = NULL;
renderer->computePipelineLayoutHashTable.buckets[i].count = 0;
renderer->computePipelineLayoutHashTable.buckets[i].capacity = 0;
}
for (i = 0; i < NUM_DESCRIPTOR_SET_LAYOUT_BUCKETS; i += 1)
{
2020-12-29 23:05:26 +00:00
renderer->descriptorSetLayoutHashTable.buckets[i].elements = NULL;
renderer->descriptorSetLayoutHashTable.buckets[i].count = 0;
renderer->descriptorSetLayoutHashTable.buckets[i].capacity = 0;
2020-12-29 22:52:24 +00:00
}
renderer->renderPassHashArray.elements = NULL;
renderer->renderPassHashArray.count = 0;
renderer->renderPassHashArray.capacity = 0;
renderer->framebufferHashArray.elements = NULL;
renderer->framebufferHashArray.count = 0;
renderer->framebufferHashArray.capacity = 0;
renderer->renderTargetHashArray.elements = NULL;
renderer->renderTargetHashArray.count = 0;
renderer->renderTargetHashArray.capacity = 0;
/* Initialize fence pool */
renderer->fencePool.lock = SDL_CreateMutex();
renderer->fencePool.availableFenceCapacity = 4;
renderer->fencePool.availableFenceCount = 0;
renderer->fencePool.availableFences = SDL_malloc(
renderer->fencePool.availableFenceCapacity * sizeof(VkFence)
);
2022-06-17 07:41:27 +00:00
/* Some drivers don't support D16, so we have to fall back to D32. */
vulkanResult = renderer->vkGetPhysicalDeviceImageFormatProperties(
renderer->physicalDevice,
VK_FORMAT_D16_UNORM,
VK_IMAGE_TYPE_2D,
VK_IMAGE_TILING_OPTIMAL,
VK_IMAGE_ASPECT_DEPTH_BIT,
0,
&imageFormatProperties
);
if (vulkanResult == VK_ERROR_FORMAT_NOT_SUPPORTED)
{
renderer->D16Format = VK_FORMAT_D32_SFLOAT;
}
else
{
renderer->D16Format = VK_FORMAT_D16_UNORM;
}
vulkanResult = renderer->vkGetPhysicalDeviceImageFormatProperties(
renderer->physicalDevice,
VK_FORMAT_D16_UNORM_S8_UINT,
VK_IMAGE_TYPE_2D,
VK_IMAGE_TILING_OPTIMAL,
VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT,
0,
&imageFormatProperties
);
if (vulkanResult == VK_ERROR_FORMAT_NOT_SUPPORTED)
{
renderer->D16S8Format = VK_FORMAT_D32_SFLOAT_S8_UINT;
}
else
{
renderer->D16S8Format = VK_FORMAT_D16_UNORM_S8_UINT;
}
/* Deferred destroy storage */
renderer->texturesToDestroyCapacity = 16;
renderer->texturesToDestroyCount = 0;
renderer->texturesToDestroy = (VulkanTexture**)SDL_malloc(
sizeof(VulkanTexture*) *
renderer->texturesToDestroyCapacity
);
renderer->buffersToDestroyCapacity = 16;
renderer->buffersToDestroyCount = 0;
renderer->buffersToDestroy = SDL_malloc(
sizeof(VulkanBuffer*) *
renderer->buffersToDestroyCapacity
);
renderer->samplersToDestroyCapacity = 16;
renderer->samplersToDestroyCount = 0;
renderer->samplersToDestroy = SDL_malloc(
sizeof(VulkanSampler*) *
renderer->samplersToDestroyCapacity
);
renderer->graphicsPipelinesToDestroyCapacity = 16;
renderer->graphicsPipelinesToDestroyCount = 0;
renderer->graphicsPipelinesToDestroy = SDL_malloc(
sizeof(VulkanGraphicsPipeline*) *
renderer->graphicsPipelinesToDestroyCapacity
);
renderer->computePipelinesToDestroyCapacity = 16;
renderer->computePipelinesToDestroyCount = 0;
renderer->computePipelinesToDestroy = SDL_malloc(
sizeof(VulkanComputePipeline*) *
renderer->computePipelinesToDestroyCapacity
);
renderer->shaderModulesToDestroyCapacity = 16;
renderer->shaderModulesToDestroyCount = 0;
renderer->shaderModulesToDestroy = SDL_malloc(
sizeof(VulkanShaderModule*) *
renderer->shaderModulesToDestroyCapacity
);
renderer->framebuffersToDestroyCapacity = 16;
renderer->framebuffersToDestroyCount = 0;
renderer->framebuffersToDestroy = SDL_malloc(
sizeof(VulkanFramebuffer*) *
renderer->framebuffersToDestroyCapacity
);
renderer->needDefrag = 0;
renderer->defragTimestamp = 0;
renderer->defragInProgress = 0;
2022-02-25 21:42:11 +00:00
return result;
2020-12-17 01:23:49 +00:00
}
2021-01-05 23:00:51 +00:00
Refresh_Driver VulkanDriver = {
2022-02-25 21:42:11 +00:00
"Vulkan",
VULKAN_PrepareDriver,
2022-02-25 21:42:11 +00:00
VULKAN_CreateDevice
2020-12-17 00:27:14 +00:00
};
#endif //REFRESH_DRIVER_VULKAN