Refresh/src/Refresh_Driver_Vulkan.c

11795 lines
330 KiB
C
Raw Normal View History

/* Refresh - XNA-inspired 3D Graphics Library with modern capabilities
2020-12-17 00:27:14 +00:00
*
* Copyright (c) 2020 Evan Hemsley
*
* This software is provided 'as-is', without any express or implied warranty.
* In no event will the authors be held liable for any damages arising from
* the use of this software.
*
* Permission is granted to anyone to use this software for any purpose,
* including commercial applications, and to alter it and redistribute it
* freely, subject to the following restrictions:
*
* 1. The origin of this software must not be misrepresented; you must not
* claim that you wrote the original software. If you use this software in a
* product, an acknowledgment in the product documentation would be
* appreciated but is not required.
*
* 2. Altered source versions must be plainly marked as such, and must not be
* misrepresented as being the original software.
*
* 3. This notice may not be removed or altered from any source distribution.
*
* Evan "cosmonaut" Hemsley <evan@moonside.games>
*
*/
#if REFRESH_DRIVER_VULKAN
/* Needed for VK_KHR_portability_subset */
#define VK_ENABLE_BETA_EXTENSIONS
2020-12-17 00:27:14 +00:00
#define VK_NO_PROTOTYPES
#include "vulkan/vulkan.h"
#include "Refresh_Driver.h"
#include <SDL.h>
#include <SDL_syswm.h>
#include <SDL_vulkan.h>
#define VULKAN_INTERNAL_clamp(val, min, max) SDL_max(min, SDL_min(val, max))
2020-12-17 02:38:22 +00:00
/* Global Vulkan Loader Entry Points */
static PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr = NULL;
#define VULKAN_GLOBAL_FUNCTION(name) \
static PFN_##name name = NULL;
#include "Refresh_Driver_Vulkan_vkfuncs.h"
/* vkInstance/vkDevice function typedefs */
#define VULKAN_INSTANCE_FUNCTION(ext, ret, func, params) \
typedef ret (VKAPI_CALL *vkfntype_##func) params;
#define VULKAN_DEVICE_FUNCTION(ext, ret, func, params) \
typedef ret (VKAPI_CALL *vkfntype_##func) params;
#include "Refresh_Driver_Vulkan_vkfuncs.h"
typedef struct VulkanExtensions
2020-12-17 02:38:22 +00:00
{
/* Globally supported */
uint8_t KHR_swapchain;
2020-12-17 02:38:22 +00:00
/* Core since 1.1 */
uint8_t KHR_maintenance1;
uint8_t KHR_get_memory_requirements2;
2020-12-17 02:38:22 +00:00
/* Core since 1.2 */
uint8_t KHR_driver_properties;
2020-12-17 02:38:22 +00:00
/* EXT, probably not going to be Core */
uint8_t EXT_vertex_attribute_divisor;
/* Only required for special implementations (i.e. MoltenVK) */
uint8_t KHR_portability_subset;
} VulkanExtensions;
2020-12-17 02:38:22 +00:00
2020-12-17 08:19:02 +00:00
/* Defines */
#define SMALL_ALLOCATION_THRESHOLD 1048576 /* 1 MiB */
#define SMALL_ALLOCATION_SIZE 16777216 /* 16 MiB */
#define LARGE_ALLOCATION_INCREMENT 67108864 /* 64 MiB */
#define UBO_BUFFER_SIZE 16777216 /* 16 MiB */
#define MAX_UBO_SECTION_SIZE 4096 /* 4 KiB */
2020-12-30 00:53:10 +00:00
#define DESCRIPTOR_POOL_STARTING_SIZE 128
#define MAX_FRAMES_IN_FLIGHT 3
#define WINDOW_DATA "Refresh_VulkanWindowData"
2020-12-18 22:35:33 +00:00
2022-02-25 21:51:29 +00:00
#define IDENTITY_SWIZZLE \
{ \
VK_COMPONENT_SWIZZLE_IDENTITY, \
VK_COMPONENT_SWIZZLE_IDENTITY, \
VK_COMPONENT_SWIZZLE_IDENTITY, \
VK_COMPONENT_SWIZZLE_IDENTITY \
2020-12-18 22:35:33 +00:00
}
#define NULL_DESC_LAYOUT (VkDescriptorSetLayout) 0
#define NULL_PIPELINE_LAYOUT (VkPipelineLayout) 0
2021-01-05 23:00:51 +00:00
#define NULL_RENDER_PASS (Refresh_RenderPass*) 0
2020-12-17 08:19:02 +00:00
2020-12-29 00:28:14 +00:00
#define EXPAND_ELEMENTS_IF_NEEDED(arr, initialValue, type) \
2022-02-25 21:51:29 +00:00
if (arr->count == arr->capacity) \
{ \
2022-02-25 21:51:29 +00:00
if (arr->capacity == 0) \
{ \
arr->capacity = initialValue; \
} \
else \
{ \
arr->capacity *= 2; \
} \
arr->elements = (type*) SDL_realloc( \
arr->elements, \
arr->capacity * sizeof(type) \
); \
}
2020-12-29 00:42:51 +00:00
#define EXPAND_ARRAY_IF_NEEDED(arr, elementType, newCount, capacity, newCapacity) \
2022-02-25 21:51:29 +00:00
if (newCount >= capacity) \
{ \
capacity = newCapacity; \
arr = (elementType*) SDL_realloc( \
arr, \
sizeof(elementType) * capacity \
); \
2020-12-29 00:28:14 +00:00
}
2020-12-29 00:56:49 +00:00
#define MOVE_ARRAY_CONTENTS_AND_RESET(i, dstArr, dstCount, srcArr, srcCount) \
2022-02-25 21:51:29 +00:00
for (i = 0; i < srcCount; i += 1) \
{ \
dstArr[i] = srcArr[i]; \
} \
dstCount = srcCount; \
2020-12-29 00:56:49 +00:00
srcCount = 0;
2020-12-17 03:28:02 +00:00
/* Enums */
typedef enum VulkanResourceAccessType
{
/* Reads */
RESOURCE_ACCESS_NONE, /* For initialization */
RESOURCE_ACCESS_INDEX_BUFFER,
RESOURCE_ACCESS_VERTEX_BUFFER,
RESOURCE_ACCESS_INDIRECT_BUFFER,
2020-12-17 03:28:02 +00:00
RESOURCE_ACCESS_VERTEX_SHADER_READ_UNIFORM_BUFFER,
RESOURCE_ACCESS_VERTEX_SHADER_READ_SAMPLED_IMAGE,
RESOURCE_ACCESS_FRAGMENT_SHADER_READ_UNIFORM_BUFFER,
RESOURCE_ACCESS_FRAGMENT_SHADER_READ_SAMPLED_IMAGE,
RESOURCE_ACCESS_FRAGMENT_SHADER_READ_COLOR_ATTACHMENT,
RESOURCE_ACCESS_FRAGMENT_SHADER_READ_DEPTH_STENCIL_ATTACHMENT,
2020-12-31 04:39:47 +00:00
RESOURCE_ACCESS_COMPUTE_SHADER_READ_UNIFORM_BUFFER,
RESOURCE_ACCESS_COMPUTE_SHADER_READ_SAMPLED_IMAGE_OR_UNIFORM_TEXEL_BUFFER,
2020-12-31 06:28:37 +00:00
RESOURCE_ACCESS_COMPUTE_SHADER_READ_OTHER,
RESOURCE_ACCESS_ANY_SHADER_READ_SAMPLED_IMAGE,
2020-12-17 03:28:02 +00:00
RESOURCE_ACCESS_COLOR_ATTACHMENT_READ,
RESOURCE_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ,
RESOURCE_ACCESS_TRANSFER_READ,
RESOURCE_ACCESS_HOST_READ,
RESOURCE_ACCESS_PRESENT,
RESOURCE_ACCESS_END_OF_READ,
/* Writes */
RESOURCE_ACCESS_VERTEX_SHADER_WRITE,
RESOURCE_ACCESS_FRAGMENT_SHADER_WRITE,
RESOURCE_ACCESS_COLOR_ATTACHMENT_WRITE,
RESOURCE_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE,
RESOURCE_ACCESS_TRANSFER_WRITE,
RESOURCE_ACCESS_HOST_WRITE,
/* Read-Writes */
RESOURCE_ACCESS_COLOR_ATTACHMENT_READ_WRITE,
RESOURCE_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_WRITE,
RESOURCE_ACCESS_COMPUTE_SHADER_STORAGE_IMAGE_READ_WRITE,
RESOURCE_ACCESS_COMPUTE_SHADER_BUFFER_READ_WRITE,
RESOURCE_ACCESS_TRANSFER_READ_WRITE,
2020-12-17 03:28:02 +00:00
RESOURCE_ACCESS_GENERAL,
/* Count */
RESOURCE_ACCESS_TYPES_COUNT
} VulkanResourceAccessType;
2020-12-17 08:19:02 +00:00
/* Conversions */
static const uint8_t DEVICE_PRIORITY[] =
{
0, /* VK_PHYSICAL_DEVICE_TYPE_OTHER */
3, /* VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU */
4, /* VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU */
2, /* VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU */
1 /* VK_PHYSICAL_DEVICE_TYPE_CPU */
};
2020-12-17 08:19:02 +00:00
static VkFormat RefreshToVK_SurfaceFormat[] =
{
VK_FORMAT_R8G8B8A8_UNORM, /* R8G8B8A8_UNORM */
VK_FORMAT_B8G8R8A8_UNORM, /* B8G8R8A8_UNORM */
VK_FORMAT_R5G6B5_UNORM_PACK16, /* R5G6B5_UNORM */
VK_FORMAT_A1R5G5B5_UNORM_PACK16, /* A1R5G5B5_UNORM */
VK_FORMAT_B4G4R4A4_UNORM_PACK16, /* B4G4R4A4_UNORM */
VK_FORMAT_A2R10G10B10_UNORM_PACK32, /* A2R10G10B10_UNORM */
VK_FORMAT_R16G16_UNORM, /* R16G16_UNORM */
VK_FORMAT_R16G16B16A16_UNORM, /* R16G16B16A16_UNORM */
VK_FORMAT_R8_UNORM, /* R8_UNORM */
VK_FORMAT_BC1_RGBA_UNORM_BLOCK, /* BC1_UNORM */
VK_FORMAT_BC2_UNORM_BLOCK, /* BC2_UNORM */
VK_FORMAT_BC3_UNORM_BLOCK, /* BC3_UNORM */
VK_FORMAT_BC7_UNORM_BLOCK, /* BC7_UNORM */
VK_FORMAT_R8G8_SNORM, /* R8G8_SNORM */
VK_FORMAT_R8G8B8A8_SNORM, /* R8G8B8A8_SNORM */
VK_FORMAT_R16_SFLOAT, /* R16_SFLOAT */
VK_FORMAT_R16G16_SFLOAT, /* R16G16_SFLOAT */
2021-01-29 05:37:11 +00:00
VK_FORMAT_R16G16B16A16_SFLOAT, /* R16G16B16A16_SFLOAT */
VK_FORMAT_R32_SFLOAT, /* R32_SFLOAT */
VK_FORMAT_R32G32_SFLOAT, /* R32G32_SFLOAT */
VK_FORMAT_R32G32B32A32_SFLOAT, /* R32G32B32A32_SFLOAT */
VK_FORMAT_R8_UINT, /* R8_UINT */
VK_FORMAT_R8G8_UINT, /* R8G8_UINT */
VK_FORMAT_R8G8B8A8_UINT, /* R8G8B8A8_UINT */
VK_FORMAT_R16_UINT, /* R16_UINT */
VK_FORMAT_R16G16_UINT, /* R16G16_UINT */
VK_FORMAT_R16G16B16A16_UINT, /* R16G16B16A16_UINT */
VK_FORMAT_D16_UNORM, /* D16_UNORM */
VK_FORMAT_D32_SFLOAT, /* D32_SFLOAT */
VK_FORMAT_D16_UNORM_S8_UINT, /* D16_UNORM_S8_UINT */
VK_FORMAT_D32_SFLOAT_S8_UINT /* D32_SFLOAT_S8_UINT */
2020-12-17 08:19:02 +00:00
};
2020-12-17 19:40:49 +00:00
static VkFormat RefreshToVK_VertexFormat[] =
{
VK_FORMAT_R32_UINT, /* UINT */
VK_FORMAT_R32_SFLOAT, /* FLOAT */
VK_FORMAT_R32G32_SFLOAT, /* VECTOR2 */
VK_FORMAT_R32G32B32_SFLOAT, /* VECTOR3 */
2020-12-17 19:40:49 +00:00
VK_FORMAT_R32G32B32A32_SFLOAT, /* VECTOR4 */
VK_FORMAT_R8G8B8A8_UNORM, /* COLOR */
VK_FORMAT_R8G8B8A8_USCALED, /* BYTE4 */
VK_FORMAT_R16G16_SSCALED, /* SHORT2 */
2020-12-17 19:40:49 +00:00
VK_FORMAT_R16G16B16A16_SSCALED, /* SHORT4 */
VK_FORMAT_R16G16_SNORM, /* NORMALIZEDSHORT2 */
2020-12-17 19:40:49 +00:00
VK_FORMAT_R16G16B16A16_SNORM, /* NORMALIZEDSHORT4 */
VK_FORMAT_R16G16_SFLOAT, /* HALFVECTOR2 */
2020-12-17 19:40:49 +00:00
VK_FORMAT_R16G16B16A16_SFLOAT /* HALFVECTOR4 */
};
2020-12-20 07:31:55 +00:00
static VkIndexType RefreshToVK_IndexType[] =
{
VK_INDEX_TYPE_UINT16,
VK_INDEX_TYPE_UINT32
};
2020-12-17 19:40:49 +00:00
static VkPrimitiveTopology RefreshToVK_PrimitiveType[] =
{
VK_PRIMITIVE_TOPOLOGY_POINT_LIST,
VK_PRIMITIVE_TOPOLOGY_LINE_LIST,
VK_PRIMITIVE_TOPOLOGY_LINE_STRIP,
VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST,
VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP
};
static VkPolygonMode RefreshToVK_PolygonMode[] =
{
VK_POLYGON_MODE_FILL,
VK_POLYGON_MODE_LINE,
VK_POLYGON_MODE_POINT
};
static VkCullModeFlags RefreshToVK_CullMode[] =
{
VK_CULL_MODE_NONE,
VK_CULL_MODE_FRONT_BIT,
VK_CULL_MODE_BACK_BIT,
VK_CULL_MODE_FRONT_AND_BACK
};
static VkFrontFace RefreshToVK_FrontFace[] =
{
VK_FRONT_FACE_COUNTER_CLOCKWISE,
VK_FRONT_FACE_CLOCKWISE
};
static VkBlendFactor RefreshToVK_BlendFactor[] =
{
VK_BLEND_FACTOR_ZERO,
VK_BLEND_FACTOR_ONE,
VK_BLEND_FACTOR_SRC_COLOR,
VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR,
VK_BLEND_FACTOR_DST_COLOR,
VK_BLEND_FACTOR_ONE_MINUS_DST_COLOR,
VK_BLEND_FACTOR_SRC_ALPHA,
VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA,
VK_BLEND_FACTOR_DST_ALPHA,
VK_BLEND_FACTOR_ONE_MINUS_DST_ALPHA,
VK_BLEND_FACTOR_CONSTANT_COLOR,
VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_COLOR,
VK_BLEND_FACTOR_CONSTANT_ALPHA,
VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA,
VK_BLEND_FACTOR_SRC_ALPHA_SATURATE
2020-12-17 19:40:49 +00:00
};
static VkBlendOp RefreshToVK_BlendOp[] =
{
VK_BLEND_OP_ADD,
VK_BLEND_OP_SUBTRACT,
VK_BLEND_OP_REVERSE_SUBTRACT,
VK_BLEND_OP_MIN,
VK_BLEND_OP_MAX
};
static VkCompareOp RefreshToVK_CompareOp[] =
{
VK_COMPARE_OP_NEVER,
VK_COMPARE_OP_LESS,
VK_COMPARE_OP_EQUAL,
VK_COMPARE_OP_LESS_OR_EQUAL,
VK_COMPARE_OP_GREATER,
VK_COMPARE_OP_NOT_EQUAL,
VK_COMPARE_OP_GREATER_OR_EQUAL,
VK_COMPARE_OP_ALWAYS
};
static VkStencilOp RefreshToVK_StencilOp[] =
{
VK_STENCIL_OP_KEEP,
VK_STENCIL_OP_ZERO,
VK_STENCIL_OP_REPLACE,
VK_STENCIL_OP_INCREMENT_AND_CLAMP,
VK_STENCIL_OP_DECREMENT_AND_CLAMP,
VK_STENCIL_OP_INVERT,
VK_STENCIL_OP_INCREMENT_AND_WRAP,
VK_STENCIL_OP_DECREMENT_AND_WRAP
};
2020-12-17 08:19:02 +00:00
static VkAttachmentLoadOp RefreshToVK_LoadOp[] =
{
2022-02-25 21:42:11 +00:00
VK_ATTACHMENT_LOAD_OP_LOAD,
VK_ATTACHMENT_LOAD_OP_CLEAR,
VK_ATTACHMENT_LOAD_OP_DONT_CARE
2020-12-17 08:19:02 +00:00
};
static VkAttachmentStoreOp RefreshToVK_StoreOp[] =
{
2022-02-25 21:42:11 +00:00
VK_ATTACHMENT_STORE_OP_STORE,
VK_ATTACHMENT_STORE_OP_DONT_CARE
2020-12-17 08:19:02 +00:00
};
static VkSampleCountFlagBits RefreshToVK_SampleCount[] =
{
2022-02-25 21:42:11 +00:00
VK_SAMPLE_COUNT_1_BIT,
VK_SAMPLE_COUNT_2_BIT,
VK_SAMPLE_COUNT_4_BIT,
VK_SAMPLE_COUNT_8_BIT,
VK_SAMPLE_COUNT_16_BIT,
VK_SAMPLE_COUNT_32_BIT,
VK_SAMPLE_COUNT_64_BIT
2020-12-17 08:19:02 +00:00
};
2020-12-17 19:40:49 +00:00
static VkVertexInputRate RefreshToVK_VertexInputRate[] =
{
VK_VERTEX_INPUT_RATE_VERTEX,
VK_VERTEX_INPUT_RATE_INSTANCE
};
2021-01-03 21:01:29 +00:00
static VkFilter RefreshToVK_Filter[] =
2020-12-18 01:48:26 +00:00
{
VK_FILTER_NEAREST,
2021-01-03 21:01:29 +00:00
VK_FILTER_LINEAR,
VK_FILTER_CUBIC_EXT
2020-12-18 01:48:26 +00:00
};
static VkSamplerMipmapMode RefreshToVK_SamplerMipmapMode[] =
{
VK_SAMPLER_MIPMAP_MODE_NEAREST,
VK_SAMPLER_MIPMAP_MODE_LINEAR
};
static VkSamplerAddressMode RefreshToVK_SamplerAddressMode[] =
{
VK_SAMPLER_ADDRESS_MODE_REPEAT,
VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT,
VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,
VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER
};
2020-12-18 20:58:03 +00:00
static VkBorderColor RefreshToVK_BorderColor[] =
{
VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK,
VK_BORDER_COLOR_INT_TRANSPARENT_BLACK,
VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK,
VK_BORDER_COLOR_INT_OPAQUE_BLACK,
VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE,
VK_BORDER_COLOR_INT_OPAQUE_WHITE
};
2020-12-17 03:28:02 +00:00
/* Structures */
2020-12-19 00:39:03 +00:00
/* Memory Allocation */
2020-12-18 22:35:33 +00:00
typedef struct VulkanMemoryAllocation VulkanMemoryAllocation;
typedef struct VulkanBuffer VulkanBuffer;
typedef struct VulkanTexture VulkanTexture;
2020-12-18 22:35:33 +00:00
typedef struct VulkanMemoryFreeRegion
{
VulkanMemoryAllocation *allocation;
VkDeviceSize offset;
VkDeviceSize size;
uint32_t allocationIndex;
uint32_t sortedIndex;
} VulkanMemoryFreeRegion;
typedef struct VulkanMemoryUsedRegion
{
VulkanMemoryAllocation *allocation;
VkDeviceSize offset;
VkDeviceSize size;
VkDeviceSize resourceOffset; /* differs from offset based on alignment*/
VkDeviceSize resourceSize; /* differs from size based on alignment */
VkDeviceSize alignment;
uint8_t isBuffer;
REFRESHNAMELESS union
{
VulkanBuffer *vulkanBuffer;
VulkanTexture *vulkanTexture;
};
} VulkanMemoryUsedRegion;
2020-12-18 22:35:33 +00:00
typedef struct VulkanMemorySubAllocator
{
uint32_t memoryTypeIndex;
2020-12-18 22:35:33 +00:00
VulkanMemoryAllocation **allocations;
uint32_t allocationCount;
VulkanMemoryFreeRegion **sortedFreeRegions;
uint32_t sortedFreeRegionCount;
uint32_t sortedFreeRegionCapacity;
} VulkanMemorySubAllocator;
struct VulkanMemoryAllocation
{
VulkanMemorySubAllocator *allocator;
VkDeviceMemory memory;
VkDeviceSize size;
VulkanMemoryUsedRegion **usedRegions;
uint32_t usedRegionCount;
uint32_t usedRegionCapacity;
2020-12-18 22:35:33 +00:00
VulkanMemoryFreeRegion **freeRegions;
uint32_t freeRegionCount;
uint32_t freeRegionCapacity;
uint8_t dedicated;
uint8_t availableForAllocation;
VkDeviceSize freeSpace;
VkDeviceSize usedSpace;
2021-01-14 02:02:45 +00:00
uint8_t *mapPointer;
2021-01-03 21:12:12 +00:00
SDL_mutex *memoryLock;
2020-12-18 22:35:33 +00:00
};
typedef struct VulkanMemoryAllocator
{
VulkanMemorySubAllocator subAllocators[VK_MAX_MEMORY_TYPES];
} VulkanMemoryAllocator;
2020-12-19 00:39:03 +00:00
/* Memory Barriers */
typedef struct VulkanResourceAccessInfo
{
VkPipelineStageFlags stageMask;
VkAccessFlags accessMask;
VkImageLayout imageLayout;
} VulkanResourceAccessInfo;
static const VulkanResourceAccessInfo AccessMap[RESOURCE_ACCESS_TYPES_COUNT] =
{
/* RESOURCE_ACCESS_NONE */
{
0,
0,
VK_IMAGE_LAYOUT_UNDEFINED
},
/* RESOURCE_ACCESS_INDEX_BUFFER */
{
VK_PIPELINE_STAGE_VERTEX_INPUT_BIT,
VK_ACCESS_INDEX_READ_BIT,
VK_IMAGE_LAYOUT_UNDEFINED
},
/* RESOURCE_ACCESS_VERTEX_BUFFER */
{
VK_PIPELINE_STAGE_VERTEX_INPUT_BIT,
VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT,
VK_IMAGE_LAYOUT_UNDEFINED
},
/* RESOURCE_ACCESS_INDIRECT_BUFFER */
{
VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT,
VK_ACCESS_INDIRECT_COMMAND_READ_BIT,
2020-12-19 00:39:03 +00:00
VK_IMAGE_LAYOUT_UNDEFINED
},
/* RESOURCE_ACCESS_VERTEX_SHADER_READ_UNIFORM_BUFFER */
{
VK_PIPELINE_STAGE_VERTEX_SHADER_BIT,
VK_ACCESS_SHADER_READ_BIT,
VK_IMAGE_LAYOUT_UNDEFINED
},
/* RESOURCE_ACCESS_VERTEX_SHADER_READ_SAMPLED_IMAGE */
{
VK_PIPELINE_STAGE_VERTEX_SHADER_BIT,
VK_ACCESS_SHADER_READ_BIT,
VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL
},
/* RESOURCE_ACCESS_FRAGMENT_SHADER_READ_UNIFORM_BUFFER */
{
VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
VK_ACCESS_UNIFORM_READ_BIT,
VK_IMAGE_LAYOUT_UNDEFINED
},
/* RESOURCE_ACCESS_FRAGMENT_SHADER_READ_SAMPLED_IMAGE */
{
VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
VK_ACCESS_SHADER_READ_BIT,
VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL
},
/* RESOURCE_ACCESS_FRAGMENT_SHADER_READ_COLOR_ATTACHMENT */
{
VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
VK_ACCESS_INPUT_ATTACHMENT_READ_BIT,
VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL
},
/* RESOURCE_ACCESS_FRAGMENT_SHADER_READ_DEPTH_STENCIL_ATTACHMENT */
{
VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
VK_ACCESS_INPUT_ATTACHMENT_READ_BIT,
VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL
},
2020-12-31 04:39:47 +00:00
/* RESOURCE_ACCESS_COMPUTE_SHADER_READ_UNIFORM_BUFFER */
{
VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT,
VK_ACCESS_UNIFORM_READ_BIT,
VK_IMAGE_LAYOUT_UNDEFINED
},
/* RESOURCE_ACCESS_COMPUTE_SHADER_READ_SAMPLED_IMAGE_OR_UNIFORM_TEXEL_BUFFER */
2022-02-25 21:42:11 +00:00
{ VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT,
VK_ACCESS_SHADER_READ_BIT,
VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL
},
2020-12-31 06:28:37 +00:00
/* RESOURCE_ACCESS_COMPUTE_SHADER_READ_OTHER */
{
VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT,
VK_ACCESS_SHADER_READ_BIT,
VK_IMAGE_LAYOUT_UNDEFINED
},
/* RESOURCE_ACCESS_ANY_SHADER_READ_SAMPLED_IMAGE */
{
VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT,
VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_INPUT_ATTACHMENT_READ_BIT,
VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL
},
2020-12-19 00:39:03 +00:00
/* RESOURCE_ACCESS_COLOR_ATTACHMENT_READ */
{
VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
VK_ACCESS_COLOR_ATTACHMENT_READ_BIT,
VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL
},
/* RESOURCE_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ */
{
VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT,
VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT,
VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL
},
/* RESOURCE_ACCESS_TRANSFER_READ */
{
VK_PIPELINE_STAGE_TRANSFER_BIT,
VK_ACCESS_TRANSFER_READ_BIT,
VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL
},
/* RESOURCE_ACCESS_HOST_READ */
{
VK_PIPELINE_STAGE_HOST_BIT,
VK_ACCESS_HOST_READ_BIT,
VK_IMAGE_LAYOUT_GENERAL
},
/* RESOURCE_ACCESS_PRESENT */
{
0,
0,
VK_IMAGE_LAYOUT_PRESENT_SRC_KHR
},
/* RESOURCE_ACCESS_END_OF_READ */
{
0,
0,
VK_IMAGE_LAYOUT_UNDEFINED
},
/* RESOURCE_ACCESS_VERTEX_SHADER_WRITE */
{
VK_PIPELINE_STAGE_VERTEX_SHADER_BIT,
VK_ACCESS_SHADER_WRITE_BIT,
VK_IMAGE_LAYOUT_GENERAL
},
/* RESOURCE_ACCESS_FRAGMENT_SHADER_WRITE */
{
VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
VK_ACCESS_SHADER_WRITE_BIT,
VK_IMAGE_LAYOUT_GENERAL
},
/* RESOURCE_ACCESS_COLOR_ATTACHMENT_WRITE */
{
VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL
},
/* RESOURCE_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE */
{
VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT,
VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT,
VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL
},
/* RESOURCE_ACCESS_TRANSFER_WRITE */
{
VK_PIPELINE_STAGE_TRANSFER_BIT,
VK_ACCESS_TRANSFER_WRITE_BIT,
VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL
},
/* RESOURCE_ACCESS_HOST_WRITE */
{
VK_PIPELINE_STAGE_HOST_BIT,
VK_ACCESS_HOST_WRITE_BIT,
VK_IMAGE_LAYOUT_GENERAL
},
/* RESOURCE_ACCESS_COLOR_ATTACHMENT_READ_WRITE */
{
VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL
},
/* RESOURCE_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_WRITE */
{
VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT,
VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT,
VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL
},
/* RESOURCE_ACCESS_COMPUTE_SHADER_STORAGE_IMAGE_READ_WRITE */
{
VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT,
VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT,
VK_IMAGE_LAYOUT_GENERAL
},
/* RESOURCE_ACCESS_COMPUTE_SHADER_BUFFER_READ_WRITE */
{
VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT,
VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT,
VK_IMAGE_LAYOUT_UNDEFINED
},
/* RESOURCE_ACCESS_TRANSFER_READ_WRITE */
2020-12-19 00:39:03 +00:00
{
VK_PIPELINE_STAGE_TRANSFER_BIT,
VK_ACCESS_TRANSFER_READ_BIT | VK_ACCESS_TRANSFER_WRITE_BIT,
VK_IMAGE_LAYOUT_UNDEFINED
},
/* RESOURCE_ACCESS_GENERAL */
{
VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
VK_ACCESS_MEMORY_READ_BIT | VK_ACCESS_MEMORY_WRITE_BIT,
VK_IMAGE_LAYOUT_GENERAL
}
};
/* Memory structures */
/* We use pointer indirection so that defrag can occur without objects
* needing to be aware of the backing buffers changing.
*/
typedef struct VulkanBufferHandle
{
VulkanBuffer *vulkanBuffer;
} VulkanBufferHandle;
struct VulkanBuffer
2020-12-19 04:08:07 +00:00
{
VkBuffer buffer;
2020-12-19 04:08:07 +00:00
VkDeviceSize size;
VulkanMemoryUsedRegion *usedRegion;
2020-12-19 04:08:07 +00:00
VulkanResourceAccessType resourceAccessType;
VkBufferUsageFlags usage;
uint8_t requireHostVisible;
uint8_t preferDeviceLocal;
uint8_t preferHostLocal;
SDL_atomic_t referenceCount; /* Tracks command buffer usage */
VulkanBufferHandle *handle;
uint8_t markedForDestroy; /* so that defrag doesn't double-free */
};
/* Buffer resources consist of multiple backing buffer handles so that data transfers
* can occur without blocking or the client having to manage extra resources.
*
* Cast from Refresh_GpuBuffer or Refresh_TransferBuffer.
*/
typedef struct VulkanBufferContainer
{
VulkanBufferHandle *activeBufferHandle;
/* These are all the buffer handles that have been used by this container.
* If the resource is bound and then updated with SafeDiscard, a new resource
* will be added to this list.
* These can be reused after they are submitted and command processing is complete.
*/
uint32_t bufferCapacity;
uint32_t bufferCount;
VulkanBufferHandle **bufferHandles;
} VulkanBufferContainer;
typedef enum VulkanUniformBufferType
{
UNIFORM_BUFFER_VERTEX,
UNIFORM_BUFFER_FRAGMENT,
UNIFORM_BUFFER_COMPUTE
} VulkanUniformBufferType;
/* Uniform buffers are just one buffer that we carve slices out of. */
typedef struct VulkanUniformBufferObject
{
VulkanUniformBufferType type;
VkDescriptorSet descriptorSet;
VulkanBuffer *buffer;
uint32_t currentOffset;
SDL_mutex *lock;
} VulkanUniformBufferObject;
2020-12-19 01:03:26 +00:00
2020-12-19 00:39:03 +00:00
/* Renderer Structure */
2020-12-17 02:38:22 +00:00
typedef struct QueueFamilyIndices
{
uint32_t graphicsFamily;
uint32_t presentFamily;
2021-01-03 03:03:25 +00:00
uint32_t computeFamily;
2021-01-02 21:31:17 +00:00
uint32_t transferFamily;
2020-12-17 02:38:22 +00:00
} QueueFamilyIndices;
typedef struct VulkanSampler
{
VkSampler sampler;
SDL_atomic_t referenceCount;
} VulkanSampler;
typedef struct VulkanShaderModule
{
VkShaderModule shaderModule;
SDL_atomic_t referenceCount;
} VulkanShaderModule;
typedef struct VulkanTextureHandle
{
VulkanTexture *vulkanTexture;
} VulkanTextureHandle;
/* Textures are made up of individual slices.
* This helps us barrier the resource efficiently.
*/
typedef struct VulkanTextureSlice
{
VulkanTexture *parent;
uint32_t layer;
uint32_t level;
VulkanResourceAccessType resourceAccessType;
SDL_atomic_t referenceCount;
VkImageView view;
VulkanTexture *msaaTex; /* NULL if parent sample count is 1 or is depth target */
} VulkanTextureSlice;
struct VulkanTexture
{
VulkanMemoryUsedRegion *usedRegion;
VkImage image;
VkImageView view;
VkExtent2D dimensions;
uint8_t is3D;
uint8_t isCube;
uint8_t isRenderTarget;
uint32_t depth;
uint32_t layerCount;
uint32_t levelCount;
VkSampleCountFlagBits sampleCount; /* NOTE: This refers to the sample count of a render target pass using this texture, not the actual sample count of the texture */
VkFormat format;
VkImageUsageFlags usageFlags;
VkImageAspectFlags aspectFlags;
uint32_t sliceCount;
VulkanTextureSlice *slices;
VulkanTextureHandle *handle;
uint8_t markedForDestroy; /* so that defrag doesn't double-free */
};
/* Texture resources consist of multiple backing texture handles so that data transfers
* can occur without blocking or the client having to manage extra resources.
*
* Cast from Refresh_Texture.
*/
typedef struct VulkanTextureContainer
{
VulkanTextureHandle *activeTextureHandle;
/* These are all the texture handles that have been used by this container.
* If the resource is bound and then updated with SafeDiscard, a new resource
* will be added to this list.
* These can be reused after they are submitted and command processing is complete.
*/
uint32_t textureCapacity;
uint32_t textureCount;
VulkanTextureHandle **textureHandles;
/* Swapchain images cannot be discarded */
uint8_t canBeDiscarded;
} VulkanTextureContainer;
typedef struct VulkanFramebuffer
{
VkFramebuffer framebuffer;
SDL_atomic_t referenceCount;
} VulkanFramebuffer;
typedef struct VulkanSwapchainData
{
/* Window surface */
VkSurfaceKHR surface;
VkSurfaceFormatKHR surfaceFormat;
/* Swapchain for window surface */
VkSwapchainKHR swapchain;
VkFormat swapchainFormat;
VkComponentMapping swapchainSwizzle;
VkPresentModeKHR presentMode;
/* Swapchain images */
VkExtent2D extent;
VulkanTextureContainer *textureContainers; /* use containers so that swapchain textures can use the same API as other textures */
uint32_t imageCount;
2022-02-10 05:42:19 +00:00
/* Synchronization primitives */
VkSemaphore imageAvailableSemaphore;
VkSemaphore renderFinishedSemaphore;
uint32_t submissionsInFlight;
} VulkanSwapchainData;
typedef struct WindowData
{
void *windowHandle;
VkPresentModeKHR preferredPresentMode;
VulkanSwapchainData *swapchainData;
} WindowData;
2020-12-17 03:28:02 +00:00
typedef struct SwapChainSupportDetails
{
VkSurfaceCapabilitiesKHR capabilities;
VkSurfaceFormatKHR *formats;
uint32_t formatsLength;
VkPresentModeKHR *presentModes;
uint32_t presentModesLength;
} SwapChainSupportDetails;
typedef struct VulkanPresentData
{
WindowData *windowData;
uint32_t swapchainImageIndex;
} VulkanPresentData;
typedef struct DescriptorSetCache DescriptorSetCache;
2020-12-27 23:20:59 +00:00
typedef struct VulkanGraphicsPipelineLayout
2020-12-23 06:56:26 +00:00
{
VkPipelineLayout pipelineLayout;
DescriptorSetCache *vertexSamplerDescriptorSetCache;
DescriptorSetCache *fragmentSamplerDescriptorSetCache;
} VulkanGraphicsPipelineLayout;
typedef struct VulkanGraphicsPipeline
{
VkPipeline pipeline;
VulkanGraphicsPipelineLayout *pipelineLayout;
2021-01-05 23:00:51 +00:00
Refresh_PrimitiveType primitiveType;
uint32_t vertexUniformBlockSize;
uint32_t fragmentUniformBlockSize;
VulkanShaderModule *vertexShaderModule;
VulkanShaderModule *fragmentShaderModule;
SDL_atomic_t referenceCount;
2020-12-23 06:56:26 +00:00
} VulkanGraphicsPipeline;
2020-12-29 22:52:24 +00:00
typedef struct VulkanComputePipelineLayout
{
VkPipelineLayout pipelineLayout;
DescriptorSetCache *bufferDescriptorSetCache;
DescriptorSetCache *imageDescriptorSetCache;
2020-12-29 22:52:24 +00:00
} VulkanComputePipelineLayout;
typedef struct VulkanComputePipeline
{
VkPipeline pipeline;
VulkanComputePipelineLayout *pipelineLayout;
uint32_t uniformBlockSize; /* permanently set in Create function */
VulkanShaderModule *computeShaderModule;
SDL_atomic_t referenceCount;
2020-12-29 22:52:24 +00:00
} VulkanComputePipeline;
/* Cache structures */
2020-12-29 22:52:24 +00:00
/* Descriptor Set Layout Caches*/
#define NUM_DESCRIPTOR_SET_LAYOUT_BUCKETS 1031
2020-12-29 23:05:26 +00:00
typedef struct DescriptorSetLayoutHash
{
2020-12-29 23:05:26 +00:00
VkDescriptorType descriptorType;
uint32_t bindingCount;
VkShaderStageFlagBits stageFlag;
2020-12-29 23:05:26 +00:00
} DescriptorSetLayoutHash;
2020-12-29 23:05:26 +00:00
typedef struct DescriptorSetLayoutHashMap
{
2020-12-29 23:05:26 +00:00
DescriptorSetLayoutHash key;
VkDescriptorSetLayout value;
2020-12-29 23:05:26 +00:00
} DescriptorSetLayoutHashMap;
2020-12-29 23:05:26 +00:00
typedef struct DescriptorSetLayoutHashArray
{
2020-12-29 23:05:26 +00:00
DescriptorSetLayoutHashMap *elements;
int32_t count;
int32_t capacity;
2020-12-29 23:05:26 +00:00
} DescriptorSetLayoutHashArray;
2020-12-29 23:05:26 +00:00
typedef struct DescriptorSetLayoutHashTable
{
2020-12-29 23:05:26 +00:00
DescriptorSetLayoutHashArray buckets[NUM_DESCRIPTOR_SET_LAYOUT_BUCKETS];
} DescriptorSetLayoutHashTable;
2020-12-29 23:05:26 +00:00
static inline uint64_t DescriptorSetLayoutHashTable_GetHashCode(DescriptorSetLayoutHash key)
{
const uint64_t HASH_FACTOR = 97;
uint64_t result = 1;
result = result * HASH_FACTOR + key.descriptorType;
2020-12-29 23:05:26 +00:00
result = result * HASH_FACTOR + key.bindingCount;
result = result * HASH_FACTOR + key.stageFlag;
return result;
}
2020-12-29 23:05:26 +00:00
static inline VkDescriptorSetLayout DescriptorSetLayoutHashTable_Fetch(
DescriptorSetLayoutHashTable *table,
DescriptorSetLayoutHash key
) {
int32_t i;
2020-12-29 23:05:26 +00:00
uint64_t hashcode = DescriptorSetLayoutHashTable_GetHashCode(key);
DescriptorSetLayoutHashArray *arr = &table->buckets[hashcode % NUM_DESCRIPTOR_SET_LAYOUT_BUCKETS];
for (i = 0; i < arr->count; i += 1)
{
2020-12-29 23:05:26 +00:00
const DescriptorSetLayoutHash *e = &arr->elements[i].key;
2022-02-25 21:42:11 +00:00
if ( key.descriptorType == e->descriptorType &&
2020-12-29 23:05:26 +00:00
key.bindingCount == e->bindingCount &&
2022-02-25 21:42:11 +00:00
key.stageFlag == e->stageFlag )
{
return arr->elements[i].value;
}
}
return VK_NULL_HANDLE;
}
2020-12-29 23:05:26 +00:00
static inline void DescriptorSetLayoutHashTable_Insert(
DescriptorSetLayoutHashTable *table,
DescriptorSetLayoutHash key,
VkDescriptorSetLayout value
) {
2020-12-29 23:05:26 +00:00
uint64_t hashcode = DescriptorSetLayoutHashTable_GetHashCode(key);
DescriptorSetLayoutHashArray *arr = &table->buckets[hashcode % NUM_DESCRIPTOR_SET_LAYOUT_BUCKETS];
2020-12-29 23:05:26 +00:00
DescriptorSetLayoutHashMap map;
map.key = key;
map.value = value;
2020-12-29 23:05:26 +00:00
EXPAND_ELEMENTS_IF_NEEDED(arr, 4, DescriptorSetLayoutHashMap);
arr->elements[arr->count] = map;
arr->count += 1;
}
typedef struct RenderPassColorTargetDescription
{
2022-06-06 18:46:08 +00:00
VkFormat format;
Refresh_Vec4 clearColor;
Refresh_LoadOp loadOp;
Refresh_StoreOp storeOp;
} RenderPassColorTargetDescription;
typedef struct RenderPassDepthStencilTargetDescription
{
2022-06-06 18:46:08 +00:00
VkFormat format;
Refresh_LoadOp loadOp;
Refresh_StoreOp storeOp;
Refresh_LoadOp stencilLoadOp;
Refresh_StoreOp stencilStoreOp;
} RenderPassDepthStencilTargetDescription;
typedef struct RenderPassHash
{
RenderPassColorTargetDescription colorTargetDescriptions[MAX_COLOR_TARGET_BINDINGS];
uint32_t colorAttachmentCount;
RenderPassDepthStencilTargetDescription depthStencilTargetDescription;
VkSampleCountFlagBits colorAttachmentSampleCount;
} RenderPassHash;
typedef struct RenderPassHashMap
{
RenderPassHash key;
VkRenderPass value;
} RenderPassHashMap;
typedef struct RenderPassHashArray
{
RenderPassHashMap *elements;
int32_t count;
int32_t capacity;
} RenderPassHashArray;
static inline uint8_t RenderPassHash_Compare(
RenderPassHash *a,
RenderPassHash *b
) {
uint32_t i;
if (a->colorAttachmentCount != b->colorAttachmentCount)
{
return 0;
}
Miscellaneous API changes + more MSAA fixes (#26) **Breaking API Changes** * Removed `REFRESH_SAMPLECOUNT_16/32/64`, since hardware support for these sample counts is generally poor (and completely non-existent with MoltenVK and certain consoles). * Removed unused `sampleCount` parameter from `Refresh_TextureCreateInfo`. * Removed `sampleCount` parameter from `Refresh_ColorAttachmentDescription`. The existence of this parameter meant you had to sync up three different sample count values (render pass, pipeline, and color attachment description) whenever you wanted to use multisampling. However, Vulkan requires that all color attachments in a given pipeline _must_ match the pipeline's sample count anyway, so we can assume all color attachments will use the pipeline's sample count. * Removed the `renderArea` parameter from `Refresh_BeginRenderPass()` since it didn't serve much practical purpose and slightly complicated things on the MoonWorks managed side. **Behavior Changes** * When creating a render pass or graphics pipeline, the requested multisample count will be converted into a sample count that's actually supported by the GPU. For example, if you request 8x MSAA on a device that only supports up to 4x MSAA, it will silently fall back to 4x MSAA. * All color attachments are now forced to have an internal store op of `STORE`, even if `REFRESH_STORE_OP_DONTCARE` is specified. The one exception is internal multisample textures -- if `DONTCARE` is used, those textures will be discarded to save on bandwidth. (Their resolve textures will still be stored.) * The RenderPass hashing logic was updated so that it would still work correctly with the removal of `Refresh_ColorAttachmentDescription.sampleCount`. **Bug Fixes** * Fixed bugs where multisampling logic wasn't kicking in for certain sample counts due to incorrect enum comparisons. Co-authored-by: Caleb Cornett <caleb.cornett@outlook.com> Reviewed-on: https://gitea.moonside.games/MoonsideGames/Refresh/pulls/26 Co-authored-by: TheSpydog <thespydog@noreply.example.org> Co-committed-by: TheSpydog <thespydog@noreply.example.org>
2022-11-08 19:09:21 +00:00
if (a->colorAttachmentSampleCount != b->colorAttachmentSampleCount)
{
return 0;
}
for (i = 0; i < a->colorAttachmentCount; i += 1)
{
2022-06-06 18:46:08 +00:00
if (a->colorTargetDescriptions[i].format != b->colorTargetDescriptions[i].format)
{
return 0;
}
if ( a->colorTargetDescriptions[i].clearColor.x != b->colorTargetDescriptions[i].clearColor.x ||
a->colorTargetDescriptions[i].clearColor.y != b->colorTargetDescriptions[i].clearColor.y ||
a->colorTargetDescriptions[i].clearColor.z != b->colorTargetDescriptions[i].clearColor.z ||
a->colorTargetDescriptions[i].clearColor.w != b->colorTargetDescriptions[i].clearColor.w )
{
return 0;
}
if (a->colorTargetDescriptions[i].loadOp != b->colorTargetDescriptions[i].loadOp)
{
return 0;
}
if (a->colorTargetDescriptions[i].storeOp != b->colorTargetDescriptions[i].storeOp)
{
return 0;
}
}
2022-06-06 18:46:08 +00:00
if (a->depthStencilTargetDescription.format != b->depthStencilTargetDescription.format)
{
return 0;
}
if (a->depthStencilTargetDescription.loadOp != b->depthStencilTargetDescription.loadOp)
{
return 0;
}
if (a->depthStencilTargetDescription.storeOp != b->depthStencilTargetDescription.storeOp)
{
return 0;
}
if (a->depthStencilTargetDescription.stencilLoadOp != b->depthStencilTargetDescription.stencilLoadOp)
{
return 0;
}
if (a->depthStencilTargetDescription.stencilStoreOp != b->depthStencilTargetDescription.stencilStoreOp)
{
return 0;
}
return 1;
}
static inline VkRenderPass RenderPassHashArray_Fetch(
RenderPassHashArray *arr,
RenderPassHash *key
) {
int32_t i;
for (i = 0; i < arr->count; i += 1)
{
RenderPassHash *e = &arr->elements[i].key;
if (RenderPassHash_Compare(e, key))
{
return arr->elements[i].value;
}
}
return VK_NULL_HANDLE;
}
static inline void RenderPassHashArray_Insert(
RenderPassHashArray *arr,
RenderPassHash key,
VkRenderPass value
) {
RenderPassHashMap map;
map.key = key;
map.value = value;
EXPAND_ELEMENTS_IF_NEEDED(arr, 4, RenderPassHashMap)
arr->elements[arr->count] = map;
arr->count += 1;
}
typedef struct FramebufferHash
{
VkImageView colorAttachmentViews[MAX_COLOR_TARGET_BINDINGS];
VkImageView colorMultiSampleAttachmentViews[MAX_COLOR_TARGET_BINDINGS];
uint32_t colorAttachmentCount;
VkImageView depthStencilAttachmentView;
uint32_t width;
uint32_t height;
} FramebufferHash;
typedef struct FramebufferHashMap
{
FramebufferHash key;
VulkanFramebuffer *value;
} FramebufferHashMap;
typedef struct FramebufferHashArray
{
FramebufferHashMap *elements;
int32_t count;
int32_t capacity;
} FramebufferHashArray;
static inline uint8_t FramebufferHash_Compare(
FramebufferHash *a,
FramebufferHash *b
) {
uint32_t i;
if (a->colorAttachmentCount != b->colorAttachmentCount)
{
return 0;
}
for (i = 0; i < a->colorAttachmentCount; i += 1)
{
if (a->colorAttachmentViews[i] != b->colorAttachmentViews[i])
{
return 0;
}
if (a->colorMultiSampleAttachmentViews[i] != b->colorMultiSampleAttachmentViews[i])
{
return 0;
}
}
if (a->depthStencilAttachmentView != b->depthStencilAttachmentView)
{
return 0;
}
if (a->width != b->width)
{
return 0;
}
if (a->height != b->height)
{
return 0;
}
return 1;
}
static inline VulkanFramebuffer* FramebufferHashArray_Fetch(
FramebufferHashArray *arr,
FramebufferHash *key
) {
int32_t i;
for (i = 0; i < arr->count; i += 1)
{
FramebufferHash *e = &arr->elements[i].key;
if (FramebufferHash_Compare(e, key))
{
return arr->elements[i].value;
}
}
return VK_NULL_HANDLE;
}
static inline void FramebufferHashArray_Insert(
FramebufferHashArray *arr,
FramebufferHash key,
VulkanFramebuffer *value
) {
FramebufferHashMap map;
map.key = key;
map.value = value;
EXPAND_ELEMENTS_IF_NEEDED(arr, 4, FramebufferHashMap)
arr->elements[arr->count] = map;
arr->count += 1;
}
static inline void FramebufferHashArray_Remove(
FramebufferHashArray *arr,
uint32_t index
) {
if (index != arr->count - 1)
{
arr->elements[index] = arr->elements[arr->count - 1];
}
arr->count -= 1;
}
2020-12-29 22:52:24 +00:00
/* Descriptor Set Caches */
struct DescriptorSetCache
2020-12-30 00:53:10 +00:00
{
SDL_mutex *lock;
2020-12-30 00:53:10 +00:00
VkDescriptorSetLayout descriptorSetLayout;
uint32_t bindingCount;
2020-12-31 04:39:47 +00:00
VkDescriptorType descriptorType;
2020-12-30 00:53:10 +00:00
VkDescriptorPool *descriptorPools;
uint32_t descriptorPoolCount;
2020-12-30 00:53:10 +00:00
uint32_t nextPoolSize;
VkDescriptorSet *inactiveDescriptorSets;
uint32_t inactiveDescriptorSetCount;
uint32_t inactiveDescriptorSetCapacity;
};
2020-12-29 22:52:24 +00:00
/* Pipeline Caches */
2021-01-03 02:02:20 +00:00
#define NUM_PIPELINE_LAYOUT_BUCKETS 1031
2020-12-29 22:52:24 +00:00
typedef struct GraphicsPipelineLayoutHash
{
VkDescriptorSetLayout vertexSamplerLayout;
VkDescriptorSetLayout fragmentSamplerLayout;
VkDescriptorSetLayout vertexUniformLayout;
VkDescriptorSetLayout fragmentUniformLayout;
} GraphicsPipelineLayoutHash;
typedef struct GraphicsPipelineLayoutHashMap
{
GraphicsPipelineLayoutHash key;
VulkanGraphicsPipelineLayout *value;
} GraphicsPipelineLayoutHashMap;
typedef struct GraphicsPipelineLayoutHashArray
{
GraphicsPipelineLayoutHashMap *elements;
int32_t count;
int32_t capacity;
} GraphicsPipelineLayoutHashArray;
typedef struct GraphicsPipelineLayoutHashTable
{
GraphicsPipelineLayoutHashArray buckets[NUM_PIPELINE_LAYOUT_BUCKETS];
} GraphicsPipelineLayoutHashTable;
static inline uint64_t GraphicsPipelineLayoutHashTable_GetHashCode(GraphicsPipelineLayoutHash key)
{
const uint64_t HASH_FACTOR = 97;
uint64_t result = 1;
result = result * HASH_FACTOR + (uint64_t) key.vertexSamplerLayout;
result = result * HASH_FACTOR + (uint64_t) key.fragmentSamplerLayout;
result = result * HASH_FACTOR + (uint64_t) key.vertexUniformLayout;
result = result * HASH_FACTOR + (uint64_t) key.fragmentUniformLayout;
return result;
}
static inline VulkanGraphicsPipelineLayout* GraphicsPipelineLayoutHashArray_Fetch(
GraphicsPipelineLayoutHashTable *table,
GraphicsPipelineLayoutHash key
) {
int32_t i;
uint64_t hashcode = GraphicsPipelineLayoutHashTable_GetHashCode(key);
GraphicsPipelineLayoutHashArray *arr = &table->buckets[hashcode % NUM_PIPELINE_LAYOUT_BUCKETS];
for (i = 0; i < arr->count; i += 1)
{
const GraphicsPipelineLayoutHash *e = &arr->elements[i].key;
if ( key.vertexSamplerLayout == e->vertexSamplerLayout &&
key.fragmentSamplerLayout == e->fragmentSamplerLayout &&
key.vertexUniformLayout == e->vertexUniformLayout &&
key.fragmentUniformLayout == e->fragmentUniformLayout )
{
return arr->elements[i].value;
}
}
return NULL;
}
static inline void GraphicsPipelineLayoutHashArray_Insert(
GraphicsPipelineLayoutHashTable *table,
GraphicsPipelineLayoutHash key,
VulkanGraphicsPipelineLayout *value
) {
uint64_t hashcode = GraphicsPipelineLayoutHashTable_GetHashCode(key);
GraphicsPipelineLayoutHashArray *arr = &table->buckets[hashcode % NUM_PIPELINE_LAYOUT_BUCKETS];
GraphicsPipelineLayoutHashMap map;
map.key = key;
map.value = value;
EXPAND_ELEMENTS_IF_NEEDED(arr, 4, GraphicsPipelineLayoutHashMap)
arr->elements[arr->count] = map;
arr->count += 1;
}
typedef struct ComputePipelineLayoutHash
{
VkDescriptorSetLayout bufferLayout;
VkDescriptorSetLayout imageLayout;
2020-12-31 04:39:47 +00:00
VkDescriptorSetLayout uniformLayout;
2020-12-29 22:52:24 +00:00
} ComputePipelineLayoutHash;
typedef struct ComputePipelineLayoutHashMap
{
ComputePipelineLayoutHash key;
VulkanComputePipelineLayout *value;
} ComputePipelineLayoutHashMap;
typedef struct ComputePipelineLayoutHashArray
{
ComputePipelineLayoutHashMap *elements;
int32_t count;
int32_t capacity;
} ComputePipelineLayoutHashArray;
typedef struct ComputePipelineLayoutHashTable
{
ComputePipelineLayoutHashArray buckets[NUM_PIPELINE_LAYOUT_BUCKETS];
} ComputePipelineLayoutHashTable;
static inline uint64_t ComputePipelineLayoutHashTable_GetHashCode(ComputePipelineLayoutHash key)
{
const uint64_t HASH_FACTOR = 97;
uint64_t result = 1;
result = result * HASH_FACTOR + (uint64_t) key.bufferLayout;
result = result * HASH_FACTOR + (uint64_t) key.imageLayout;
2020-12-31 04:39:47 +00:00
result = result * HASH_FACTOR + (uint64_t) key.uniformLayout;
2020-12-29 22:52:24 +00:00
return result;
}
static inline VulkanComputePipelineLayout* ComputePipelineLayoutHashArray_Fetch(
ComputePipelineLayoutHashTable *table,
ComputePipelineLayoutHash key
) {
int32_t i;
uint64_t hashcode = ComputePipelineLayoutHashTable_GetHashCode(key);
ComputePipelineLayoutHashArray *arr = &table->buckets[hashcode % NUM_PIPELINE_LAYOUT_BUCKETS];
for (i = 0; i < arr->count; i += 1)
{
const ComputePipelineLayoutHash *e = &arr->elements[i].key;
if ( key.bufferLayout == e->bufferLayout &&
2020-12-31 04:39:47 +00:00
key.imageLayout == e->imageLayout &&
key.uniformLayout == e->uniformLayout )
2020-12-29 22:52:24 +00:00
{
return arr->elements[i].value;
}
}
return NULL;
}
static inline void ComputePipelineLayoutHashArray_Insert(
ComputePipelineLayoutHashTable *table,
ComputePipelineLayoutHash key,
VulkanComputePipelineLayout *value
) {
uint64_t hashcode = ComputePipelineLayoutHashTable_GetHashCode(key);
ComputePipelineLayoutHashArray *arr = &table->buckets[hashcode % NUM_PIPELINE_LAYOUT_BUCKETS];
ComputePipelineLayoutHashMap map;
map.key = key;
map.value = value;
EXPAND_ELEMENTS_IF_NEEDED(arr, 4, ComputePipelineLayoutHashMap)
arr->elements[arr->count] = map;
arr->count += 1;
}
2021-01-03 02:02:20 +00:00
/* Command structures */
typedef struct DescriptorSetData
{
DescriptorSetCache *descriptorSetCache;
VkDescriptorSet descriptorSet;
} DescriptorSetData;
typedef struct VulkanFencePool
{
SDL_mutex *lock;
VkFence *availableFences;
uint32_t availableFenceCount;
uint32_t availableFenceCapacity;
} VulkanFencePool;
2021-01-02 06:07:15 +00:00
typedef struct VulkanCommandPool VulkanCommandPool;
typedef struct VulkanCommandBuffer
{
VkCommandBuffer commandBuffer;
VulkanCommandPool *commandPool;
VulkanPresentData *presentDatas;
uint32_t presentDataCount;
uint32_t presentDataCapacity;
VkSemaphore *waitSemaphores;
uint32_t waitSemaphoreCount;
uint32_t waitSemaphoreCapacity;
VkSemaphore *signalSemaphores;
uint32_t signalSemaphoreCount;
uint32_t signalSemaphoreCapacity;
2021-01-02 06:07:15 +00:00
VulkanComputePipeline *currentComputePipeline;
VulkanGraphicsPipeline *currentGraphicsPipeline;
uint32_t vertexUniformOffset;
uint32_t fragmentUniformOffset;
uint32_t computeUniformOffset;
VulkanTextureSlice *renderPassColorTargetTextureSlices[MAX_COLOR_TARGET_BINDINGS];
uint32_t renderPassColorTargetTextureSliceCount;
VulkanTextureSlice *renderPassDepthTextureSlice; /* can be NULL */
2021-01-02 06:07:15 +00:00
VkDescriptorSet vertexSamplerDescriptorSet; /* updated by BindVertexSamplers */
VkDescriptorSet fragmentSamplerDescriptorSet; /* updated by BindFragmentSamplers */
VkDescriptorSet bufferDescriptorSet; /* updated by BindComputeBuffers */
VkDescriptorSet imageDescriptorSet; /* updated by BindComputeTextures */
/* FIXME: descriptor pools should be per-command-buffer */
DescriptorSetData *boundDescriptorSetDatas;
uint32_t boundDescriptorSetDataCount;
uint32_t boundDescriptorSetDataCapacity;
/* Keep track of compute resources for memory barriers */
VulkanBuffer **boundComputeBuffers;
uint32_t boundComputeBufferCount;
uint32_t boundComputeBufferCapacity;
VulkanTextureSlice **boundComputeTextureSlices;
uint32_t boundComputeTextureSliceCount;
uint32_t boundComputeTextureSliceCapacity;
/* Keep track of copy resources for memory barriers */
VulkanBuffer **copiedGpuBuffers;
uint32_t copiedGpuBufferCount;
uint32_t copiedGpuBufferCapacity;
VulkanTextureSlice **copiedTextureSlices;
uint32_t copiedTextureSliceCount;
uint32_t copiedTextureSliceCapacity;
/* Viewport/scissor state */
2022-03-04 20:30:33 +00:00
VkViewport currentViewport;
VkRect2D currentScissor;
/* Track used resources */
VulkanBuffer **usedBuffers;
uint32_t usedBufferCount;
uint32_t usedBufferCapacity;
VulkanTextureSlice **usedTextureSlices;
uint32_t usedTextureSliceCount;
uint32_t usedTextureSliceCapacity;
VulkanSampler **usedSamplers;
uint32_t usedSamplerCount;
uint32_t usedSamplerCapacity;
VulkanGraphicsPipeline **usedGraphicsPipelines;
uint32_t usedGraphicsPipelineCount;
uint32_t usedGraphicsPipelineCapacity;
VulkanComputePipeline **usedComputePipelines;
uint32_t usedComputePipelineCount;
uint32_t usedComputePipelineCapacity;
VulkanFramebuffer **usedFramebuffers;
uint32_t usedFramebufferCount;
uint32_t usedFramebufferCapacity;
VkFence inFlightFence;
uint8_t autoReleaseFence;
2024-02-27 08:16:06 +00:00
uint8_t isDefrag; /* Whether this CB was created for defragging */
2021-01-02 06:07:15 +00:00
} VulkanCommandBuffer;
struct VulkanCommandPool
{
SDL_threadID threadID;
VkCommandPool commandPool;
VulkanCommandBuffer **inactiveCommandBuffers;
uint32_t inactiveCommandBufferCapacity;
uint32_t inactiveCommandBufferCount;
};
2021-01-03 02:02:20 +00:00
#define NUM_COMMAND_POOL_BUCKETS 1031
typedef struct CommandPoolHash
{
SDL_threadID threadID;
} CommandPoolHash;
typedef struct CommandPoolHashMap
{
CommandPoolHash key;
VulkanCommandPool *value;
} CommandPoolHashMap;
typedef struct CommandPoolHashArray
{
CommandPoolHashMap *elements;
uint32_t count;
uint32_t capacity;
} CommandPoolHashArray;
typedef struct CommandPoolHashTable
{
CommandPoolHashArray buckets[NUM_COMMAND_POOL_BUCKETS];
} CommandPoolHashTable;
static inline uint64_t CommandPoolHashTable_GetHashCode(CommandPoolHash key)
{
const uint64_t HASH_FACTOR = 97;
uint64_t result = 1;
result = result * HASH_FACTOR + (uint64_t) key.threadID;
return result;
}
static inline VulkanCommandPool* CommandPoolHashTable_Fetch(
CommandPoolHashTable *table,
CommandPoolHash key
) {
uint32_t i;
uint64_t hashcode = CommandPoolHashTable_GetHashCode(key);
CommandPoolHashArray *arr = &table->buckets[hashcode % NUM_COMMAND_POOL_BUCKETS];
for (i = 0; i < arr->count; i += 1)
{
const CommandPoolHash *e = &arr->elements[i].key;
if (key.threadID == e->threadID)
{
return arr->elements[i].value;
}
}
return NULL;
}
static inline void CommandPoolHashTable_Insert(
CommandPoolHashTable *table,
CommandPoolHash key,
VulkanCommandPool *value
) {
uint64_t hashcode = CommandPoolHashTable_GetHashCode(key);
CommandPoolHashArray *arr = &table->buckets[hashcode % NUM_COMMAND_POOL_BUCKETS];
CommandPoolHashMap map;
map.key = key;
map.value = value;
EXPAND_ELEMENTS_IF_NEEDED(arr, 4, CommandPoolHashMap)
arr->elements[arr->count] = map;
arr->count += 1;
}
/* Context */
2020-12-17 03:28:02 +00:00
typedef struct VulkanRenderer
2020-12-17 01:23:49 +00:00
{
2022-02-25 21:42:11 +00:00
VkInstance instance;
VkPhysicalDevice physicalDevice;
VkPhysicalDeviceProperties2 physicalDeviceProperties;
VkPhysicalDeviceDriverPropertiesKHR physicalDeviceDriverProperties;
VkDevice logicalDevice;
uint8_t integratedMemoryNotification;
uint8_t outOfDeviceLocalMemoryWarning;
2020-12-17 02:38:22 +00:00
2022-02-25 21:42:11 +00:00
uint8_t supportsDebugUtils;
uint8_t debugMode;
VulkanExtensions supports;
2020-12-17 03:28:02 +00:00
2020-12-18 22:35:33 +00:00
VulkanMemoryAllocator *memoryAllocator;
VkPhysicalDeviceMemoryProperties memoryProperties;
2020-12-18 22:35:33 +00:00
WindowData **claimedWindows;
uint32_t claimedWindowCount;
uint32_t claimedWindowCapacity;
2020-12-17 03:28:02 +00:00
uint32_t queueFamilyIndex;
VkQueue unifiedQueue;
2020-12-17 02:38:22 +00:00
2021-01-02 06:07:15 +00:00
VulkanCommandBuffer **submittedCommandBuffers;
2020-12-17 02:38:22 +00:00
uint32_t submittedCommandBufferCount;
2021-01-02 06:07:15 +00:00
uint32_t submittedCommandBufferCapacity;
2020-12-31 06:28:37 +00:00
VulkanFencePool fencePool;
2021-01-03 02:02:20 +00:00
CommandPoolHashTable commandPoolHashTable;
2020-12-29 23:05:26 +00:00
DescriptorSetLayoutHashTable descriptorSetLayoutHashTable;
2020-12-29 22:52:24 +00:00
GraphicsPipelineLayoutHashTable graphicsPipelineLayoutHashTable;
ComputePipelineLayoutHashTable computePipelineLayoutHashTable;
RenderPassHashArray renderPassHashArray;
FramebufferHashArray framebufferHashArray;
2020-12-19 05:35:21 +00:00
VkDescriptorPool defaultDescriptorPool;
2020-12-29 22:52:24 +00:00
VkDescriptorSetLayout emptyVertexSamplerLayout;
VkDescriptorSetLayout emptyFragmentSamplerLayout;
2020-12-29 22:52:24 +00:00
VkDescriptorSetLayout emptyComputeBufferDescriptorSetLayout;
2020-12-31 04:39:47 +00:00
VkDescriptorSetLayout emptyComputeImageDescriptorSetLayout;
2020-12-29 22:52:24 +00:00
VkDescriptorSet emptyVertexSamplerDescriptorSet;
VkDescriptorSet emptyFragmentSamplerDescriptorSet;
2020-12-29 22:52:24 +00:00
VkDescriptorSet emptyComputeBufferDescriptorSet;
2020-12-31 04:39:47 +00:00
VkDescriptorSet emptyComputeImageDescriptorSet;
VulkanUniformBufferObject *vertexUniformBufferObject;
VulkanUniformBufferObject *fragmentUniformBufferObject;
VulkanUniformBufferObject *computeUniformBufferObject;
2020-12-17 19:40:49 +00:00
VkDescriptorSetLayout vertexUniformDescriptorSetLayout;
VkDescriptorSetLayout fragmentUniformDescriptorSetLayout;
VkDescriptorSetLayout computeUniformDescriptorSetLayout;
uint32_t minUBOAlignment;
2020-12-19 04:08:07 +00:00
2022-06-17 07:41:27 +00:00
/* Some drivers don't support D16 for some reason. Fun! */
VkFormat D16Format;
VkFormat D16S8Format;
VulkanTexture **texturesToDestroy;
uint32_t texturesToDestroyCount;
uint32_t texturesToDestroyCapacity;
VulkanBuffer **buffersToDestroy;
uint32_t buffersToDestroyCount;
uint32_t buffersToDestroyCapacity;
VulkanSampler **samplersToDestroy;
uint32_t samplersToDestroyCount;
uint32_t samplersToDestroyCapacity;
VulkanGraphicsPipeline **graphicsPipelinesToDestroy;
uint32_t graphicsPipelinesToDestroyCount;
uint32_t graphicsPipelinesToDestroyCapacity;
VulkanComputePipeline **computePipelinesToDestroy;
uint32_t computePipelinesToDestroyCount;
uint32_t computePipelinesToDestroyCapacity;
VulkanShaderModule **shaderModulesToDestroy;
uint32_t shaderModulesToDestroyCount;
uint32_t shaderModulesToDestroyCapacity;
VulkanFramebuffer **framebuffersToDestroy;
uint32_t framebuffersToDestroyCount;
uint32_t framebuffersToDestroyCapacity;
2020-12-18 22:35:33 +00:00
SDL_mutex *allocatorLock;
SDL_mutex *disposeLock;
SDL_mutex *submitLock;
SDL_mutex *acquireCommandBufferLock;
SDL_mutex *renderPassFetchLock;
SDL_mutex *framebufferFetchLock;
2020-12-29 03:44:34 +00:00
uint8_t defragInProgress;
2024-02-27 08:16:06 +00:00
VulkanMemoryAllocation **allocationsToDefrag;
uint32_t allocationsToDefragCount;
uint32_t allocationsToDefragCapacity;
#define VULKAN_INSTANCE_FUNCTION(ext, ret, func, params) \
2020-12-17 02:38:22 +00:00
vkfntype_##func func;
#define VULKAN_DEVICE_FUNCTION(ext, ret, func, params) \
vkfntype_##func func;
#include "Refresh_Driver_Vulkan_vkfuncs.h"
2020-12-17 03:28:02 +00:00
} VulkanRenderer;
2020-12-19 00:39:03 +00:00
/* Forward declarations */
static uint8_t VULKAN_INTERNAL_DefragmentMemory(VulkanRenderer *renderer);
2021-01-02 06:07:15 +00:00
static void VULKAN_INTERNAL_BeginCommandBuffer(VulkanRenderer *renderer, VulkanCommandBuffer *commandBuffer);
static void VULKAN_UnclaimWindow(Refresh_Renderer *driverData, void *windowHandle);
static void VULKAN_Wait(Refresh_Renderer *driverData);
static void VULKAN_Submit(Refresh_Renderer *driverData, Refresh_CommandBuffer *commandBuffer);
static VulkanTextureSlice* VULKAN_INTERNAL_FetchTextureSlice(VulkanTexture* texture, uint32_t layer, uint32_t level);
2020-12-19 00:39:03 +00:00
2020-12-17 03:28:02 +00:00
/* Error Handling */
static inline const char* VkErrorMessages(VkResult code)
{
#define ERR_TO_STR(e) \
case e: return #e;
switch (code)
{
ERR_TO_STR(VK_ERROR_OUT_OF_HOST_MEMORY)
ERR_TO_STR(VK_ERROR_OUT_OF_DEVICE_MEMORY)
ERR_TO_STR(VK_ERROR_FRAGMENTED_POOL)
ERR_TO_STR(VK_ERROR_OUT_OF_POOL_MEMORY)
ERR_TO_STR(VK_ERROR_INITIALIZATION_FAILED)
ERR_TO_STR(VK_ERROR_LAYER_NOT_PRESENT)
ERR_TO_STR(VK_ERROR_EXTENSION_NOT_PRESENT)
ERR_TO_STR(VK_ERROR_FEATURE_NOT_PRESENT)
ERR_TO_STR(VK_ERROR_TOO_MANY_OBJECTS)
ERR_TO_STR(VK_ERROR_DEVICE_LOST)
ERR_TO_STR(VK_ERROR_INCOMPATIBLE_DRIVER)
ERR_TO_STR(VK_ERROR_OUT_OF_DATE_KHR)
ERR_TO_STR(VK_ERROR_SURFACE_LOST_KHR)
ERR_TO_STR(VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT)
ERR_TO_STR(VK_SUBOPTIMAL_KHR)
default: return "Unhandled VkResult!";
}
#undef ERR_TO_STR
}
2021-01-27 20:51:36 +00:00
static inline void LogVulkanResultAsError(
2020-12-17 03:28:02 +00:00
const char* vulkanFunctionName,
VkResult result
) {
if (result != VK_SUCCESS)
{
2021-01-05 23:00:51 +00:00
Refresh_LogError(
2020-12-17 03:28:02 +00:00
"%s: %s",
vulkanFunctionName,
VkErrorMessages(result)
);
}
}
2020-12-17 01:23:49 +00:00
2021-01-27 20:51:36 +00:00
static inline void LogVulkanResultAsWarn(
const char* vulkanFunctionName,
VkResult result
) {
if (result != VK_SUCCESS)
{
Refresh_LogWarn(
"%s: %s",
vulkanFunctionName,
VkErrorMessages(result)
);
}
}
#define VULKAN_ERROR_CHECK(res, fn, ret) \
if (res != VK_SUCCESS) \
{ \
Refresh_LogError("%s %s", #fn, VkErrorMessages(res)); \
return ret; \
}
2020-12-18 22:35:33 +00:00
/* Utility */
2022-06-17 07:41:27 +00:00
static inline VkFormat RefreshToVK_DepthFormat(
VulkanRenderer* renderer,
Refresh_TextureFormat format
) {
switch (format)
{
case REFRESH_TEXTUREFORMAT_D16_UNORM:
return renderer->D16Format;
case REFRESH_TEXTUREFORMAT_D16_UNORM_S8_UINT:
return renderer->D16S8Format;
case REFRESH_TEXTUREFORMAT_D32_SFLOAT:
return VK_FORMAT_D32_SFLOAT;
case REFRESH_TEXTUREFORMAT_D32_SFLOAT_S8_UINT:
return VK_FORMAT_D32_SFLOAT_S8_UINT;
default:
return VK_FORMAT_UNDEFINED;
}
}
static inline uint8_t IsRefreshDepthFormat(Refresh_TextureFormat format)
{
switch (format)
{
case REFRESH_TEXTUREFORMAT_D16_UNORM:
case REFRESH_TEXTUREFORMAT_D32_SFLOAT:
case REFRESH_TEXTUREFORMAT_D16_UNORM_S8_UINT:
case REFRESH_TEXTUREFORMAT_D32_SFLOAT_S8_UINT:
return 1;
default:
return 0;
}
}
2021-01-27 20:51:36 +00:00
static inline uint8_t IsDepthFormat(VkFormat format)
2020-12-18 22:35:33 +00:00
{
switch(format)
{
2021-01-27 20:51:36 +00:00
case VK_FORMAT_D16_UNORM:
case VK_FORMAT_D32_SFLOAT:
case VK_FORMAT_D16_UNORM_S8_UINT:
case VK_FORMAT_D32_SFLOAT_S8_UINT:
return 1;
default:
2020-12-18 22:35:33 +00:00
return 0;
}
}
2021-01-27 20:51:36 +00:00
static inline uint8_t IsStencilFormat(VkFormat format)
{
switch(format)
{
2021-01-27 20:51:36 +00:00
case VK_FORMAT_D16_UNORM_S8_UINT:
case VK_FORMAT_D32_SFLOAT_S8_UINT:
2020-12-18 22:35:33 +00:00
return 1;
2020-12-18 22:35:33 +00:00
default:
return 0;
}
}
static inline VkSampleCountFlagBits VULKAN_INTERNAL_GetMaxMultiSampleCount(
Miscellaneous API changes + more MSAA fixes (#26) **Breaking API Changes** * Removed `REFRESH_SAMPLECOUNT_16/32/64`, since hardware support for these sample counts is generally poor (and completely non-existent with MoltenVK and certain consoles). * Removed unused `sampleCount` parameter from `Refresh_TextureCreateInfo`. * Removed `sampleCount` parameter from `Refresh_ColorAttachmentDescription`. The existence of this parameter meant you had to sync up three different sample count values (render pass, pipeline, and color attachment description) whenever you wanted to use multisampling. However, Vulkan requires that all color attachments in a given pipeline _must_ match the pipeline's sample count anyway, so we can assume all color attachments will use the pipeline's sample count. * Removed the `renderArea` parameter from `Refresh_BeginRenderPass()` since it didn't serve much practical purpose and slightly complicated things on the MoonWorks managed side. **Behavior Changes** * When creating a render pass or graphics pipeline, the requested multisample count will be converted into a sample count that's actually supported by the GPU. For example, if you request 8x MSAA on a device that only supports up to 4x MSAA, it will silently fall back to 4x MSAA. * All color attachments are now forced to have an internal store op of `STORE`, even if `REFRESH_STORE_OP_DONTCARE` is specified. The one exception is internal multisample textures -- if `DONTCARE` is used, those textures will be discarded to save on bandwidth. (Their resolve textures will still be stored.) * The RenderPass hashing logic was updated so that it would still work correctly with the removal of `Refresh_ColorAttachmentDescription.sampleCount`. **Bug Fixes** * Fixed bugs where multisampling logic wasn't kicking in for certain sample counts due to incorrect enum comparisons. Co-authored-by: Caleb Cornett <caleb.cornett@outlook.com> Reviewed-on: https://gitea.moonside.games/MoonsideGames/Refresh/pulls/26 Co-authored-by: TheSpydog <thespydog@noreply.example.org> Co-committed-by: TheSpydog <thespydog@noreply.example.org>
2022-11-08 19:09:21 +00:00
VulkanRenderer *renderer,
VkSampleCountFlagBits multiSampleCount
Miscellaneous API changes + more MSAA fixes (#26) **Breaking API Changes** * Removed `REFRESH_SAMPLECOUNT_16/32/64`, since hardware support for these sample counts is generally poor (and completely non-existent with MoltenVK and certain consoles). * Removed unused `sampleCount` parameter from `Refresh_TextureCreateInfo`. * Removed `sampleCount` parameter from `Refresh_ColorAttachmentDescription`. The existence of this parameter meant you had to sync up three different sample count values (render pass, pipeline, and color attachment description) whenever you wanted to use multisampling. However, Vulkan requires that all color attachments in a given pipeline _must_ match the pipeline's sample count anyway, so we can assume all color attachments will use the pipeline's sample count. * Removed the `renderArea` parameter from `Refresh_BeginRenderPass()` since it didn't serve much practical purpose and slightly complicated things on the MoonWorks managed side. **Behavior Changes** * When creating a render pass or graphics pipeline, the requested multisample count will be converted into a sample count that's actually supported by the GPU. For example, if you request 8x MSAA on a device that only supports up to 4x MSAA, it will silently fall back to 4x MSAA. * All color attachments are now forced to have an internal store op of `STORE`, even if `REFRESH_STORE_OP_DONTCARE` is specified. The one exception is internal multisample textures -- if `DONTCARE` is used, those textures will be discarded to save on bandwidth. (Their resolve textures will still be stored.) * The RenderPass hashing logic was updated so that it would still work correctly with the removal of `Refresh_ColorAttachmentDescription.sampleCount`. **Bug Fixes** * Fixed bugs where multisampling logic wasn't kicking in for certain sample counts due to incorrect enum comparisons. Co-authored-by: Caleb Cornett <caleb.cornett@outlook.com> Reviewed-on: https://gitea.moonside.games/MoonsideGames/Refresh/pulls/26 Co-authored-by: TheSpydog <thespydog@noreply.example.org> Co-committed-by: TheSpydog <thespydog@noreply.example.org>
2022-11-08 19:09:21 +00:00
) {
VkSampleCountFlags flags = renderer->physicalDeviceProperties.properties.limits.framebufferColorSampleCounts;
VkSampleCountFlagBits maxSupported = VK_SAMPLE_COUNT_1_BIT;
Miscellaneous API changes + more MSAA fixes (#26) **Breaking API Changes** * Removed `REFRESH_SAMPLECOUNT_16/32/64`, since hardware support for these sample counts is generally poor (and completely non-existent with MoltenVK and certain consoles). * Removed unused `sampleCount` parameter from `Refresh_TextureCreateInfo`. * Removed `sampleCount` parameter from `Refresh_ColorAttachmentDescription`. The existence of this parameter meant you had to sync up three different sample count values (render pass, pipeline, and color attachment description) whenever you wanted to use multisampling. However, Vulkan requires that all color attachments in a given pipeline _must_ match the pipeline's sample count anyway, so we can assume all color attachments will use the pipeline's sample count. * Removed the `renderArea` parameter from `Refresh_BeginRenderPass()` since it didn't serve much practical purpose and slightly complicated things on the MoonWorks managed side. **Behavior Changes** * When creating a render pass or graphics pipeline, the requested multisample count will be converted into a sample count that's actually supported by the GPU. For example, if you request 8x MSAA on a device that only supports up to 4x MSAA, it will silently fall back to 4x MSAA. * All color attachments are now forced to have an internal store op of `STORE`, even if `REFRESH_STORE_OP_DONTCARE` is specified. The one exception is internal multisample textures -- if `DONTCARE` is used, those textures will be discarded to save on bandwidth. (Their resolve textures will still be stored.) * The RenderPass hashing logic was updated so that it would still work correctly with the removal of `Refresh_ColorAttachmentDescription.sampleCount`. **Bug Fixes** * Fixed bugs where multisampling logic wasn't kicking in for certain sample counts due to incorrect enum comparisons. Co-authored-by: Caleb Cornett <caleb.cornett@outlook.com> Reviewed-on: https://gitea.moonside.games/MoonsideGames/Refresh/pulls/26 Co-authored-by: TheSpydog <thespydog@noreply.example.org> Co-committed-by: TheSpydog <thespydog@noreply.example.org>
2022-11-08 19:09:21 +00:00
if (flags & VK_SAMPLE_COUNT_8_BIT)
{
maxSupported = VK_SAMPLE_COUNT_8_BIT;
Miscellaneous API changes + more MSAA fixes (#26) **Breaking API Changes** * Removed `REFRESH_SAMPLECOUNT_16/32/64`, since hardware support for these sample counts is generally poor (and completely non-existent with MoltenVK and certain consoles). * Removed unused `sampleCount` parameter from `Refresh_TextureCreateInfo`. * Removed `sampleCount` parameter from `Refresh_ColorAttachmentDescription`. The existence of this parameter meant you had to sync up three different sample count values (render pass, pipeline, and color attachment description) whenever you wanted to use multisampling. However, Vulkan requires that all color attachments in a given pipeline _must_ match the pipeline's sample count anyway, so we can assume all color attachments will use the pipeline's sample count. * Removed the `renderArea` parameter from `Refresh_BeginRenderPass()` since it didn't serve much practical purpose and slightly complicated things on the MoonWorks managed side. **Behavior Changes** * When creating a render pass or graphics pipeline, the requested multisample count will be converted into a sample count that's actually supported by the GPU. For example, if you request 8x MSAA on a device that only supports up to 4x MSAA, it will silently fall back to 4x MSAA. * All color attachments are now forced to have an internal store op of `STORE`, even if `REFRESH_STORE_OP_DONTCARE` is specified. The one exception is internal multisample textures -- if `DONTCARE` is used, those textures will be discarded to save on bandwidth. (Their resolve textures will still be stored.) * The RenderPass hashing logic was updated so that it would still work correctly with the removal of `Refresh_ColorAttachmentDescription.sampleCount`. **Bug Fixes** * Fixed bugs where multisampling logic wasn't kicking in for certain sample counts due to incorrect enum comparisons. Co-authored-by: Caleb Cornett <caleb.cornett@outlook.com> Reviewed-on: https://gitea.moonside.games/MoonsideGames/Refresh/pulls/26 Co-authored-by: TheSpydog <thespydog@noreply.example.org> Co-committed-by: TheSpydog <thespydog@noreply.example.org>
2022-11-08 19:09:21 +00:00
}
else if (flags & VK_SAMPLE_COUNT_4_BIT)
{
maxSupported = VK_SAMPLE_COUNT_4_BIT;
Miscellaneous API changes + more MSAA fixes (#26) **Breaking API Changes** * Removed `REFRESH_SAMPLECOUNT_16/32/64`, since hardware support for these sample counts is generally poor (and completely non-existent with MoltenVK and certain consoles). * Removed unused `sampleCount` parameter from `Refresh_TextureCreateInfo`. * Removed `sampleCount` parameter from `Refresh_ColorAttachmentDescription`. The existence of this parameter meant you had to sync up three different sample count values (render pass, pipeline, and color attachment description) whenever you wanted to use multisampling. However, Vulkan requires that all color attachments in a given pipeline _must_ match the pipeline's sample count anyway, so we can assume all color attachments will use the pipeline's sample count. * Removed the `renderArea` parameter from `Refresh_BeginRenderPass()` since it didn't serve much practical purpose and slightly complicated things on the MoonWorks managed side. **Behavior Changes** * When creating a render pass or graphics pipeline, the requested multisample count will be converted into a sample count that's actually supported by the GPU. For example, if you request 8x MSAA on a device that only supports up to 4x MSAA, it will silently fall back to 4x MSAA. * All color attachments are now forced to have an internal store op of `STORE`, even if `REFRESH_STORE_OP_DONTCARE` is specified. The one exception is internal multisample textures -- if `DONTCARE` is used, those textures will be discarded to save on bandwidth. (Their resolve textures will still be stored.) * The RenderPass hashing logic was updated so that it would still work correctly with the removal of `Refresh_ColorAttachmentDescription.sampleCount`. **Bug Fixes** * Fixed bugs where multisampling logic wasn't kicking in for certain sample counts due to incorrect enum comparisons. Co-authored-by: Caleb Cornett <caleb.cornett@outlook.com> Reviewed-on: https://gitea.moonside.games/MoonsideGames/Refresh/pulls/26 Co-authored-by: TheSpydog <thespydog@noreply.example.org> Co-committed-by: TheSpydog <thespydog@noreply.example.org>
2022-11-08 19:09:21 +00:00
}
else if (flags & VK_SAMPLE_COUNT_2_BIT)
{
maxSupported = VK_SAMPLE_COUNT_2_BIT;
Miscellaneous API changes + more MSAA fixes (#26) **Breaking API Changes** * Removed `REFRESH_SAMPLECOUNT_16/32/64`, since hardware support for these sample counts is generally poor (and completely non-existent with MoltenVK and certain consoles). * Removed unused `sampleCount` parameter from `Refresh_TextureCreateInfo`. * Removed `sampleCount` parameter from `Refresh_ColorAttachmentDescription`. The existence of this parameter meant you had to sync up three different sample count values (render pass, pipeline, and color attachment description) whenever you wanted to use multisampling. However, Vulkan requires that all color attachments in a given pipeline _must_ match the pipeline's sample count anyway, so we can assume all color attachments will use the pipeline's sample count. * Removed the `renderArea` parameter from `Refresh_BeginRenderPass()` since it didn't serve much practical purpose and slightly complicated things on the MoonWorks managed side. **Behavior Changes** * When creating a render pass or graphics pipeline, the requested multisample count will be converted into a sample count that's actually supported by the GPU. For example, if you request 8x MSAA on a device that only supports up to 4x MSAA, it will silently fall back to 4x MSAA. * All color attachments are now forced to have an internal store op of `STORE`, even if `REFRESH_STORE_OP_DONTCARE` is specified. The one exception is internal multisample textures -- if `DONTCARE` is used, those textures will be discarded to save on bandwidth. (Their resolve textures will still be stored.) * The RenderPass hashing logic was updated so that it would still work correctly with the removal of `Refresh_ColorAttachmentDescription.sampleCount`. **Bug Fixes** * Fixed bugs where multisampling logic wasn't kicking in for certain sample counts due to incorrect enum comparisons. Co-authored-by: Caleb Cornett <caleb.cornett@outlook.com> Reviewed-on: https://gitea.moonside.games/MoonsideGames/Refresh/pulls/26 Co-authored-by: TheSpydog <thespydog@noreply.example.org> Co-committed-by: TheSpydog <thespydog@noreply.example.org>
2022-11-08 19:09:21 +00:00
}
return SDL_min(multiSampleCount, maxSupported);
}
2020-12-18 22:35:33 +00:00
/* Memory Management */
/* Vulkan: Memory Allocation */
2020-12-18 22:35:33 +00:00
static inline VkDeviceSize VULKAN_INTERNAL_NextHighestAlignment(
VkDeviceSize n,
VkDeviceSize align
) {
return align * ((n + align - 1) / align);
}
static inline uint32_t VULKAN_INTERNAL_NextHighestAlignment32(
uint32_t n,
uint32_t align
) {
return align * ((n + align - 1) / align);
}
static void VULKAN_INTERNAL_MakeMemoryUnavailable(
VulkanRenderer* renderer,
VulkanMemoryAllocation *allocation
) {
uint32_t i, j;
VulkanMemoryFreeRegion *freeRegion;
allocation->availableForAllocation = 0;
for (i = 0; i < allocation->freeRegionCount; i += 1)
{
freeRegion = allocation->freeRegions[i];
/* close the gap in the sorted list */
if (allocation->allocator->sortedFreeRegionCount > 1)
{
for (j = freeRegion->sortedIndex; j < allocation->allocator->sortedFreeRegionCount - 1; j += 1)
{
allocation->allocator->sortedFreeRegions[j] =
allocation->allocator->sortedFreeRegions[j + 1];
allocation->allocator->sortedFreeRegions[j]->sortedIndex = j;
}
}
allocation->allocator->sortedFreeRegionCount -= 1;
}
}
2024-02-27 08:16:06 +00:00
static void VULKAN_INTERNAL_MarkAllocationsForDefrag(
VulkanRenderer *renderer
) {
uint32_t memoryType, allocationIndex;
VulkanMemorySubAllocator *currentAllocator;
for (memoryType = 0; memoryType < VK_MAX_MEMORY_TYPES; memoryType += 1)
{
currentAllocator = &renderer->memoryAllocator->subAllocators[memoryType];
for (allocationIndex = 0; allocationIndex < currentAllocator->allocationCount; allocationIndex += 1)
{
if (currentAllocator->allocations[allocationIndex]->availableForAllocation == 1)
{
if (currentAllocator->allocations[allocationIndex]->freeRegionCount > 1)
{
EXPAND_ARRAY_IF_NEEDED(
renderer->allocationsToDefrag,
VulkanMemoryAllocation*,
renderer->allocationsToDefragCount + 1,
renderer->allocationsToDefragCapacity,
renderer->allocationsToDefragCapacity * 2
);
renderer->allocationsToDefrag[renderer->allocationsToDefragCount] =
currentAllocator->allocations[allocationIndex];
renderer->allocationsToDefragCount += 1;
VULKAN_INTERNAL_MakeMemoryUnavailable(
renderer,
currentAllocator->allocations[allocationIndex]
);
}
}
}
}
}
2021-01-30 00:03:00 +00:00
static void VULKAN_INTERNAL_RemoveMemoryFreeRegion(
VulkanRenderer *renderer,
2021-01-30 00:03:00 +00:00
VulkanMemoryFreeRegion *freeRegion
) {
uint32_t i;
SDL_LockMutex(renderer->allocatorLock);
if (freeRegion->allocation->availableForAllocation)
2021-01-30 00:03:00 +00:00
{
/* close the gap in the sorted list */
if (freeRegion->allocation->allocator->sortedFreeRegionCount > 1)
2021-01-30 00:03:00 +00:00
{
for (i = freeRegion->sortedIndex; i < freeRegion->allocation->allocator->sortedFreeRegionCount - 1; i += 1)
{
freeRegion->allocation->allocator->sortedFreeRegions[i] =
freeRegion->allocation->allocator->sortedFreeRegions[i + 1];
2021-01-30 00:03:00 +00:00
freeRegion->allocation->allocator->sortedFreeRegions[i]->sortedIndex = i;
}
2021-01-30 00:03:00 +00:00
}
freeRegion->allocation->allocator->sortedFreeRegionCount -= 1;
}
2021-01-30 00:03:00 +00:00
/* close the gap in the buffer list */
if (freeRegion->allocation->freeRegionCount > 1 && freeRegion->allocationIndex != freeRegion->allocation->freeRegionCount - 1)
{
freeRegion->allocation->freeRegions[freeRegion->allocationIndex] =
freeRegion->allocation->freeRegions[freeRegion->allocation->freeRegionCount - 1];
freeRegion->allocation->freeRegions[freeRegion->allocationIndex]->allocationIndex =
freeRegion->allocationIndex;
}
freeRegion->allocation->freeRegionCount -= 1;
freeRegion->allocation->freeSpace -= freeRegion->size;
2021-01-30 00:03:00 +00:00
SDL_free(freeRegion);
SDL_UnlockMutex(renderer->allocatorLock);
2021-01-30 00:03:00 +00:00
}
static void VULKAN_INTERNAL_NewMemoryFreeRegion(
VulkanRenderer *renderer,
2020-12-18 22:35:33 +00:00
VulkanMemoryAllocation *allocation,
VkDeviceSize offset,
VkDeviceSize size
) {
VulkanMemoryFreeRegion *newFreeRegion;
2021-01-30 00:03:00 +00:00
VkDeviceSize newOffset, newSize;
int32_t insertionIndex = 0;
int32_t i;
SDL_LockMutex(renderer->allocatorLock);
2021-01-30 00:03:00 +00:00
/* look for an adjacent region to merge */
for (i = allocation->freeRegionCount - 1; i >= 0; i -= 1)
{
/* check left side */
if (allocation->freeRegions[i]->offset + allocation->freeRegions[i]->size == offset)
{
newOffset = allocation->freeRegions[i]->offset;
newSize = allocation->freeRegions[i]->size + size;
VULKAN_INTERNAL_RemoveMemoryFreeRegion(renderer, allocation->freeRegions[i]);
VULKAN_INTERNAL_NewMemoryFreeRegion(renderer, allocation, newOffset, newSize);
SDL_UnlockMutex(renderer->allocatorLock);
2021-02-01 00:29:38 +00:00
return;
2021-01-30 00:03:00 +00:00
}
/* check right side */
if (allocation->freeRegions[i]->offset == offset + size)
{
newOffset = offset;
newSize = allocation->freeRegions[i]->size + size;
VULKAN_INTERNAL_RemoveMemoryFreeRegion(renderer, allocation->freeRegions[i]);
VULKAN_INTERNAL_NewMemoryFreeRegion(renderer, allocation, newOffset, newSize);
SDL_UnlockMutex(renderer->allocatorLock);
2021-01-30 00:03:00 +00:00
return;
}
}
/* region is not contiguous with another free region, make a new one */
2020-12-18 22:35:33 +00:00
allocation->freeRegionCount += 1;
if (allocation->freeRegionCount > allocation->freeRegionCapacity)
{
allocation->freeRegionCapacity *= 2;
allocation->freeRegions = SDL_realloc(
allocation->freeRegions,
sizeof(VulkanMemoryFreeRegion*) * allocation->freeRegionCapacity
);
}
newFreeRegion = SDL_malloc(sizeof(VulkanMemoryFreeRegion));
newFreeRegion->offset = offset;
newFreeRegion->size = size;
newFreeRegion->allocation = allocation;
allocation->freeSpace += size;
2020-12-18 22:35:33 +00:00
allocation->freeRegions[allocation->freeRegionCount - 1] = newFreeRegion;
newFreeRegion->allocationIndex = allocation->freeRegionCount - 1;
if (allocation->availableForAllocation)
2020-12-18 22:35:33 +00:00
{
for (i = 0; i < allocation->allocator->sortedFreeRegionCount; i += 1)
2020-12-18 22:35:33 +00:00
{
if (allocation->allocator->sortedFreeRegions[i]->size < size)
{
/* this is where the new region should go */
break;
}
insertionIndex += 1;
}
if (allocation->allocator->sortedFreeRegionCount + 1 > allocation->allocator->sortedFreeRegionCapacity)
{
allocation->allocator->sortedFreeRegionCapacity *= 2;
allocation->allocator->sortedFreeRegions = SDL_realloc(
allocation->allocator->sortedFreeRegions,
sizeof(VulkanMemoryFreeRegion*) * allocation->allocator->sortedFreeRegionCapacity
);
}
/* perform insertion sort */
if (allocation->allocator->sortedFreeRegionCount > 0 && insertionIndex != allocation->allocator->sortedFreeRegionCount)
{
for (i = allocation->allocator->sortedFreeRegionCount; i > insertionIndex && i > 0; i -= 1)
{
allocation->allocator->sortedFreeRegions[i] = allocation->allocator->sortedFreeRegions[i - 1];
allocation->allocator->sortedFreeRegions[i]->sortedIndex = i;
}
2020-12-18 22:35:33 +00:00
}
allocation->allocator->sortedFreeRegionCount += 1;
allocation->allocator->sortedFreeRegions[insertionIndex] = newFreeRegion;
newFreeRegion->sortedIndex = insertionIndex;
2020-12-18 22:35:33 +00:00
}
SDL_UnlockMutex(renderer->allocatorLock);
}
static VulkanMemoryUsedRegion* VULKAN_INTERNAL_NewMemoryUsedRegion(
VulkanRenderer *renderer,
VulkanMemoryAllocation *allocation,
VkDeviceSize offset,
VkDeviceSize size,
VkDeviceSize resourceOffset,
VkDeviceSize resourceSize,
VkDeviceSize alignment
) {
VulkanMemoryUsedRegion *memoryUsedRegion;
SDL_LockMutex(renderer->allocatorLock);
if (allocation->usedRegionCount == allocation->usedRegionCapacity)
2020-12-18 22:35:33 +00:00
{
allocation->usedRegionCapacity *= 2;
allocation->usedRegions = SDL_realloc(
allocation->usedRegions,
allocation->usedRegionCapacity * sizeof(VulkanMemoryUsedRegion*)
2020-12-18 22:35:33 +00:00
);
}
memoryUsedRegion = SDL_malloc(sizeof(VulkanMemoryUsedRegion));
memoryUsedRegion->allocation = allocation;
memoryUsedRegion->offset = offset;
memoryUsedRegion->size = size;
memoryUsedRegion->resourceOffset = resourceOffset;
memoryUsedRegion->resourceSize = resourceSize;
memoryUsedRegion->alignment = alignment;
allocation->usedSpace += size;
allocation->usedRegions[allocation->usedRegionCount] = memoryUsedRegion;
allocation->usedRegionCount += 1;
SDL_UnlockMutex(renderer->allocatorLock);
return memoryUsedRegion;
}
static void VULKAN_INTERNAL_RemoveMemoryUsedRegion(
VulkanRenderer *renderer,
VulkanMemoryUsedRegion *usedRegion
) {
uint32_t i;
SDL_LockMutex(renderer->allocatorLock);
for (i = 0; i < usedRegion->allocation->usedRegionCount; i += 1)
2020-12-18 22:35:33 +00:00
{
if (usedRegion->allocation->usedRegions[i] == usedRegion)
2020-12-18 22:35:33 +00:00
{
/* plug the hole */
if (i != usedRegion->allocation->usedRegionCount - 1)
{
usedRegion->allocation->usedRegions[i] = usedRegion->allocation->usedRegions[usedRegion->allocation->usedRegionCount - 1];
}
break;
2020-12-18 22:35:33 +00:00
}
}
usedRegion->allocation->usedSpace -= usedRegion->size;
usedRegion->allocation->usedRegionCount -= 1;
VULKAN_INTERNAL_NewMemoryFreeRegion(
renderer,
usedRegion->allocation,
usedRegion->offset,
usedRegion->size
);
SDL_free(usedRegion);
SDL_UnlockMutex(renderer->allocatorLock);
2020-12-18 22:35:33 +00:00
}
static uint8_t VULKAN_INTERNAL_FindMemoryType(
VulkanRenderer *renderer,
uint32_t typeFilter,
VkMemoryPropertyFlags requiredProperties,
VkMemoryPropertyFlags ignoredProperties,
uint32_t *memoryTypeIndex
2020-12-18 22:35:33 +00:00
) {
uint32_t i;
for (i = *memoryTypeIndex; i < renderer->memoryProperties.memoryTypeCount; i += 1)
2020-12-18 22:35:33 +00:00
{
if ( (typeFilter & (1 << i)) &&
(renderer->memoryProperties.memoryTypes[i].propertyFlags & requiredProperties) == requiredProperties &&
(renderer->memoryProperties.memoryTypes[i].propertyFlags & ignoredProperties) == 0 )
2020-12-18 22:35:33 +00:00
{
*memoryTypeIndex = i;
2020-12-18 22:35:33 +00:00
return 1;
}
}
return 0;
}
static uint8_t VULKAN_INTERNAL_FindBufferMemoryRequirements(
VulkanRenderer *renderer,
VkBuffer buffer,
VkMemoryPropertyFlags requiredMemoryProperties,
VkMemoryPropertyFlags ignoredMemoryProperties,
2020-12-18 22:35:33 +00:00
VkMemoryRequirements2KHR *pMemoryRequirements,
uint32_t *pMemoryTypeIndex
) {
VkBufferMemoryRequirementsInfo2KHR bufferRequirementsInfo;
bufferRequirementsInfo.sType =
VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR;
bufferRequirementsInfo.pNext = NULL;
bufferRequirementsInfo.buffer = buffer;
renderer->vkGetBufferMemoryRequirements2KHR(
renderer->logicalDevice,
&bufferRequirementsInfo,
pMemoryRequirements
);
return VULKAN_INTERNAL_FindMemoryType(
2020-12-18 22:35:33 +00:00
renderer,
pMemoryRequirements->memoryRequirements.memoryTypeBits,
requiredMemoryProperties,
ignoredMemoryProperties,
2020-12-18 22:35:33 +00:00
pMemoryTypeIndex
);
2020-12-18 22:35:33 +00:00
}
static uint8_t VULKAN_INTERNAL_FindImageMemoryRequirements(
VulkanRenderer *renderer,
VkImage image,
VkMemoryPropertyFlags requiredMemoryPropertyFlags,
2020-12-18 22:35:33 +00:00
VkMemoryRequirements2KHR *pMemoryRequirements,
uint32_t *pMemoryTypeIndex
) {
VkImageMemoryRequirementsInfo2KHR imageRequirementsInfo;
imageRequirementsInfo.sType =
VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR;
imageRequirementsInfo.pNext = NULL;
imageRequirementsInfo.image = image;
renderer->vkGetImageMemoryRequirements2KHR(
renderer->logicalDevice,
&imageRequirementsInfo,
pMemoryRequirements
);
return VULKAN_INTERNAL_FindMemoryType(
2020-12-18 22:35:33 +00:00
renderer,
pMemoryRequirements->memoryRequirements.memoryTypeBits,
requiredMemoryPropertyFlags,
0,
2020-12-18 22:35:33 +00:00
pMemoryTypeIndex
);
}
static void VULKAN_INTERNAL_DeallocateMemory(
VulkanRenderer *renderer,
VulkanMemorySubAllocator *allocator,
uint32_t allocationIndex
) {
uint32_t i;
VulkanMemoryAllocation *allocation = allocator->allocations[allocationIndex];
SDL_LockMutex(renderer->allocatorLock);
for (i = 0; i < allocation->freeRegionCount; i += 1)
{
VULKAN_INTERNAL_RemoveMemoryFreeRegion(
renderer,
allocation->freeRegions[i]
2020-12-18 22:35:33 +00:00
);
}
SDL_free(allocation->freeRegions);
2020-12-18 22:35:33 +00:00
/* no need to iterate used regions because deallocate
* only happens when there are 0 used regions
*/
SDL_free(allocation->usedRegions);
renderer->vkFreeMemory(
renderer->logicalDevice,
allocation->memory,
NULL
);
SDL_DestroyMutex(allocation->memoryLock);
SDL_free(allocation);
if (allocationIndex != allocator->allocationCount - 1)
{
allocator->allocations[allocationIndex] = allocator->allocations[allocator->allocationCount - 1];
}
allocator->allocationCount -= 1;
SDL_UnlockMutex(renderer->allocatorLock);
2020-12-18 22:35:33 +00:00
}
static uint8_t VULKAN_INTERNAL_AllocateMemory(
VulkanRenderer *renderer,
VkBuffer buffer,
VkImage image,
uint32_t memoryTypeIndex,
VkDeviceSize allocationSize,
uint8_t dedicated, /* indicates that one resource uses this memory and the memory shouldn't be moved */
uint8_t isHostVisible,
VulkanMemoryAllocation **pMemoryAllocation)
{
2020-12-18 22:35:33 +00:00
VulkanMemoryAllocation *allocation;
VulkanMemorySubAllocator *allocator = &renderer->memoryAllocator->subAllocators[memoryTypeIndex];
VkMemoryAllocateInfo allocInfo;
VkResult result;
allocInfo.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
allocInfo.pNext = NULL;
2020-12-18 22:35:33 +00:00
allocInfo.memoryTypeIndex = memoryTypeIndex;
allocInfo.allocationSize = allocationSize;
allocation = SDL_malloc(sizeof(VulkanMemoryAllocation));
allocation->size = allocationSize;
allocation->freeSpace = 0; /* added by FreeRegions */
allocation->usedSpace = 0; /* added by UsedRegions */
2021-01-03 21:12:12 +00:00
allocation->memoryLock = SDL_CreateMutex();
2020-12-18 22:35:33 +00:00
allocator->allocationCount += 1;
allocator->allocations = SDL_realloc(
allocator->allocations,
sizeof(VulkanMemoryAllocation*) * allocator->allocationCount
);
allocator->allocations[
allocator->allocationCount - 1
] = allocation;
2020-12-18 22:35:33 +00:00
if (dedicated)
{
allocation->dedicated = 1;
allocation->availableForAllocation = 0;
2020-12-18 22:35:33 +00:00
}
else
{
allocInfo.pNext = NULL;
allocation->dedicated = 0;
allocation->availableForAllocation = 1;
2020-12-18 22:35:33 +00:00
}
allocation->usedRegions = SDL_malloc(sizeof(VulkanMemoryUsedRegion*));
allocation->usedRegionCount = 0;
allocation->usedRegionCapacity = 1;
2020-12-18 22:35:33 +00:00
allocation->freeRegions = SDL_malloc(sizeof(VulkanMemoryFreeRegion*));
allocation->freeRegionCount = 0;
allocation->freeRegionCapacity = 1;
2020-12-18 22:35:33 +00:00
allocation->allocator = allocator;
result = renderer->vkAllocateMemory(
renderer->logicalDevice,
&allocInfo,
NULL,
&allocation->memory
);
if (result != VK_SUCCESS)
{
2021-01-27 20:51:36 +00:00
/* Uh oh, we couldn't allocate, time to clean up */
SDL_free(allocation->freeRegions);
allocator->allocationCount -= 1;
allocator->allocations = SDL_realloc(
allocator->allocations,
sizeof(VulkanMemoryAllocation*) * allocator->allocationCount
);
SDL_free(allocation);
2020-12-18 22:35:33 +00:00
return 0;
}
/* Persistent mapping for host-visible memory */
if (isHostVisible)
2021-01-14 02:02:45 +00:00
{
result = renderer->vkMapMemory(
renderer->logicalDevice,
allocation->memory,
0,
VK_WHOLE_SIZE,
2021-01-14 02:02:45 +00:00
0,
(void**) &allocation->mapPointer
);
VULKAN_ERROR_CHECK(result, vkMapMemory, 0)
2021-01-14 02:02:45 +00:00
}
else
{
allocation->mapPointer = NULL;
}
2020-12-18 22:35:33 +00:00
VULKAN_INTERNAL_NewMemoryFreeRegion(
renderer,
2020-12-18 22:35:33 +00:00
allocation,
0,
allocation->size
);
*pMemoryAllocation = allocation;
return 1;
}
static uint8_t VULKAN_INTERNAL_BindBufferMemory(
VulkanRenderer *renderer,
VulkanMemoryUsedRegion *usedRegion,
VkDeviceSize alignedOffset,
VkBuffer buffer
) {
VkResult vulkanResult;
SDL_LockMutex(usedRegion->allocation->memoryLock);
vulkanResult = renderer->vkBindBufferMemory(
renderer->logicalDevice,
buffer,
usedRegion->allocation->memory,
alignedOffset
);
SDL_UnlockMutex(usedRegion->allocation->memoryLock);
VULKAN_ERROR_CHECK(vulkanResult, vkBindBufferMemory, 0)
return 1;
}
static uint8_t VULKAN_INTERNAL_BindImageMemory(
2020-12-18 22:35:33 +00:00
VulkanRenderer *renderer,
VulkanMemoryUsedRegion *usedRegion,
VkDeviceSize alignedOffset,
VkImage image
) {
VkResult vulkanResult;
SDL_LockMutex(usedRegion->allocation->memoryLock);
vulkanResult = renderer->vkBindImageMemory(
renderer->logicalDevice,
image,
usedRegion->allocation->memory,
alignedOffset
);
SDL_UnlockMutex(usedRegion->allocation->memoryLock);
VULKAN_ERROR_CHECK(vulkanResult, vkBindBufferMemory, 0)
return 1;
}
static uint8_t VULKAN_INTERNAL_BindResourceMemory(
VulkanRenderer* renderer,
2021-01-27 20:51:36 +00:00
uint32_t memoryTypeIndex,
VkMemoryRequirements2KHR* memoryRequirements,
uint8_t forceDedicated,
VkDeviceSize resourceSize, /* may be different from requirements size! */
2021-01-27 20:51:36 +00:00
VkBuffer buffer, /* may be VK_NULL_HANDLE */
VkImage image, /* may be VK_NULL_HANDLE */
VulkanMemoryUsedRegion** pMemoryUsedRegion
2020-12-18 22:35:33 +00:00
) {
VulkanMemoryAllocation *allocation;
VulkanMemorySubAllocator *allocator;
VulkanMemoryFreeRegion *region;
VulkanMemoryFreeRegion *selectedRegion;
VulkanMemoryUsedRegion *usedRegion;
2020-12-18 22:35:33 +00:00
VkDeviceSize requiredSize, allocationSize;
VkDeviceSize alignedOffset;
2021-01-27 20:51:36 +00:00
uint32_t newRegionSize, newRegionOffset;
uint8_t shouldAllocDedicated = forceDedicated;
uint8_t isHostVisible, smallAllocation, allocationResult;
int32_t i;
isHostVisible =
(renderer->memoryProperties.memoryTypes[memoryTypeIndex].propertyFlags &
VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0;
2020-12-18 22:35:33 +00:00
allocator = &renderer->memoryAllocator->subAllocators[memoryTypeIndex];
2021-01-27 20:51:36 +00:00
requiredSize = memoryRequirements->memoryRequirements.size;
smallAllocation = requiredSize < SMALL_ALLOCATION_THRESHOLD;
2020-12-18 22:35:33 +00:00
if ( (buffer == VK_NULL_HANDLE && image == VK_NULL_HANDLE) ||
(buffer != VK_NULL_HANDLE && image != VK_NULL_HANDLE) )
{
Refresh_LogError("BindResourceMemory must be given either a VulkanBuffer or a VulkanTexture");
return 0;
}
2020-12-18 22:35:33 +00:00
SDL_LockMutex(renderer->allocatorLock);
selectedRegion = NULL;
if (!shouldAllocDedicated)
{
for (i = allocator->sortedFreeRegionCount - 1; i >= 0; i -= 1)
{
region = allocator->sortedFreeRegions[i];
if (smallAllocation && region->allocation->size != SMALL_ALLOCATION_SIZE)
{
/* region is not in a small allocation */
continue;
}
if (!smallAllocation && region->allocation->size == SMALL_ALLOCATION_SIZE)
{
/* allocation is not small and current region is in a small allocation */
continue;
}
alignedOffset = VULKAN_INTERNAL_NextHighestAlignment(
region->offset,
memoryRequirements->memoryRequirements.alignment
);
if (alignedOffset + requiredSize <= region->offset + region->size)
{
selectedRegion = region;
break;
}
}
}
if (selectedRegion != NULL)
2020-12-18 22:35:33 +00:00
{
region = selectedRegion;
2020-12-18 22:35:33 +00:00
allocation = region->allocation;
usedRegion = VULKAN_INTERNAL_NewMemoryUsedRegion(
renderer,
allocation,
2020-12-18 22:35:33 +00:00
region->offset,
requiredSize + (alignedOffset - region->offset),
alignedOffset,
resourceSize,
2021-01-27 20:51:36 +00:00
memoryRequirements->memoryRequirements.alignment
2020-12-18 22:35:33 +00:00
);
usedRegion->isBuffer = buffer != VK_NULL_HANDLE;
newRegionSize = region->size - ((alignedOffset - region->offset) + requiredSize);
newRegionOffset = alignedOffset + requiredSize;
/* remove and add modified region to re-sort */
VULKAN_INTERNAL_RemoveMemoryFreeRegion(renderer, region);
/* if size is 0, no need to re-insert */
if (newRegionSize != 0)
2020-12-18 22:35:33 +00:00
{
VULKAN_INTERNAL_NewMemoryFreeRegion(
renderer,
allocation,
newRegionOffset,
newRegionSize
);
}
2020-12-18 22:35:33 +00:00
SDL_UnlockMutex(renderer->allocatorLock);
2020-12-18 22:35:33 +00:00
if (buffer != VK_NULL_HANDLE)
{
if (!VULKAN_INTERNAL_BindBufferMemory(
renderer,
usedRegion,
alignedOffset,
buffer
)) {
VULKAN_INTERNAL_RemoveMemoryUsedRegion(
renderer,
usedRegion
2020-12-18 22:35:33 +00:00
);
return 0;
}
}
else if (image != VK_NULL_HANDLE)
{
if (!VULKAN_INTERNAL_BindImageMemory(
renderer,
usedRegion,
alignedOffset,
image
)) {
VULKAN_INTERNAL_RemoveMemoryUsedRegion(
renderer,
usedRegion
);
return 0;
}
2020-12-18 22:35:33 +00:00
}
*pMemoryUsedRegion = usedRegion;
return 1;
2020-12-18 22:35:33 +00:00
}
/* No suitable free regions exist, allocate a new memory region */
2024-02-27 08:16:06 +00:00
if (
!shouldAllocDedicated &&
renderer->allocationsToDefragCount == 0 &&
!renderer->defragInProgress
) {
/* Mark currently fragmented allocations for defrag */
VULKAN_INTERNAL_MarkAllocationsForDefrag(renderer);
}
2021-01-27 20:51:36 +00:00
if (shouldAllocDedicated)
2020-12-18 22:35:33 +00:00
{
allocationSize = requiredSize;
}
else if (requiredSize > SMALL_ALLOCATION_THRESHOLD)
2020-12-18 22:35:33 +00:00
{
/* allocate a page of required size aligned to LARGE_ALLOCATION_INCREMENT increments */
2020-12-18 22:35:33 +00:00
allocationSize =
VULKAN_INTERNAL_NextHighestAlignment(requiredSize, LARGE_ALLOCATION_INCREMENT);
2020-12-18 22:35:33 +00:00
}
else
{
allocationSize = SMALL_ALLOCATION_SIZE;
2020-12-18 22:35:33 +00:00
}
allocationResult = VULKAN_INTERNAL_AllocateMemory(
renderer,
buffer,
image,
memoryTypeIndex,
allocationSize,
2021-01-27 20:51:36 +00:00
shouldAllocDedicated,
isHostVisible,
2020-12-18 22:35:33 +00:00
&allocation
);
2020-12-17 04:04:47 +00:00
2020-12-18 22:35:33 +00:00
/* Uh oh, we're out of memory */
if (allocationResult == 0)
{
SDL_UnlockMutex(renderer->allocatorLock);
2021-01-27 20:51:36 +00:00
/* Responsibility of the caller to handle being out of memory */
2020-12-18 22:35:33 +00:00
return 2;
}
usedRegion = VULKAN_INTERNAL_NewMemoryUsedRegion(
renderer,
allocation,
0,
requiredSize,
0,
resourceSize,
memoryRequirements->memoryRequirements.alignment
);
usedRegion->isBuffer = buffer != VK_NULL_HANDLE;
2020-12-18 22:35:33 +00:00
region = allocation->freeRegions[0];
newRegionOffset = region->offset + requiredSize;
newRegionSize = region->size - requiredSize;
VULKAN_INTERNAL_RemoveMemoryFreeRegion(renderer, region);
2020-12-18 22:35:33 +00:00
if (newRegionSize != 0)
{
VULKAN_INTERNAL_NewMemoryFreeRegion(
renderer,
2020-12-18 22:35:33 +00:00
allocation,
newRegionOffset,
newRegionSize
);
}
SDL_UnlockMutex(renderer->allocatorLock);
if (buffer != VK_NULL_HANDLE)
{
if (!VULKAN_INTERNAL_BindBufferMemory(
renderer,
usedRegion,
0,
buffer
)) {
VULKAN_INTERNAL_RemoveMemoryUsedRegion(
renderer,
usedRegion
);
return 0;
}
}
else if (image != VK_NULL_HANDLE)
{
if (!VULKAN_INTERNAL_BindImageMemory(
renderer,
usedRegion,
0,
image
)) {
VULKAN_INTERNAL_RemoveMemoryUsedRegion(
renderer,
usedRegion
);
return 0;
}
}
*pMemoryUsedRegion = usedRegion;
2020-12-18 22:35:33 +00:00
return 1;
}
static uint8_t VULKAN_INTERNAL_BindMemoryForImage(
VulkanRenderer* renderer,
VkImage image,
uint8_t dedicated,
VulkanMemoryUsedRegion** usedRegion
2021-01-27 20:51:36 +00:00
) {
uint8_t bindResult = 0;
uint32_t memoryTypeIndex = 0;
VkMemoryPropertyFlags requiredMemoryPropertyFlags;
2021-01-27 20:51:36 +00:00
VkMemoryRequirements2KHR memoryRequirements =
{
VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR,
NULL
2021-01-27 20:51:36 +00:00
};
/* Prefer GPU allocation for textures */
requiredMemoryPropertyFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
while (VULKAN_INTERNAL_FindImageMemoryRequirements(
2021-01-27 20:51:36 +00:00
renderer,
image,
requiredMemoryPropertyFlags,
2021-01-27 20:51:36 +00:00
&memoryRequirements,
&memoryTypeIndex
)) {
bindResult = VULKAN_INTERNAL_BindResourceMemory(
renderer,
memoryTypeIndex,
&memoryRequirements,
dedicated,
memoryRequirements.memoryRequirements.size,
VK_NULL_HANDLE,
image,
usedRegion
);
if (bindResult == 1)
{
break;
}
else /* Bind failed, try the next device-local heap */
{
memoryTypeIndex += 1;
}
2021-01-27 20:51:36 +00:00
}
/* Bind _still_ failed, try again without device local */
if (bindResult != 1)
{
memoryTypeIndex = 0;
requiredMemoryPropertyFlags = 0;
Refresh_LogWarn("Out of device-local memory, allocating textures on host-local memory!");
while (VULKAN_INTERNAL_FindImageMemoryRequirements(
renderer,
image,
requiredMemoryPropertyFlags,
&memoryRequirements,
&memoryTypeIndex
)) {
bindResult = VULKAN_INTERNAL_BindResourceMemory(
renderer,
memoryTypeIndex,
&memoryRequirements,
0,
memoryRequirements.memoryRequirements.size,
VK_NULL_HANDLE,
image,
usedRegion
);
if (bindResult == 1)
{
break;
}
else /* Bind failed, try the next heap */
{
memoryTypeIndex += 1;
}
}
}
return bindResult;
2021-01-27 20:51:36 +00:00
}
static uint8_t VULKAN_INTERNAL_BindMemoryForBuffer(
VulkanRenderer* renderer,
VkBuffer buffer,
VkDeviceSize size,
uint8_t requireHostVisible,
uint8_t preferHostLocal,
uint8_t preferDeviceLocal,
uint8_t dedicatedAllocation,
VulkanMemoryUsedRegion** usedRegion
2021-01-27 20:51:36 +00:00
) {
uint8_t bindResult = 0;
uint32_t memoryTypeIndex = 0;
VkMemoryPropertyFlags requiredMemoryPropertyFlags = 0;
VkMemoryPropertyFlags ignoredMemoryPropertyFlags = 0;
2021-01-27 20:51:36 +00:00
VkMemoryRequirements2KHR memoryRequirements =
{
VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR,
NULL
2021-01-27 20:51:36 +00:00
};
if (requireHostVisible)
{
requiredMemoryPropertyFlags =
VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT |
VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
}
if (preferHostLocal)
{
ignoredMemoryPropertyFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
}
else if (preferDeviceLocal)
2021-01-27 20:51:36 +00:00
{
requiredMemoryPropertyFlags |=
VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
2021-01-27 20:51:36 +00:00
}
while (VULKAN_INTERNAL_FindBufferMemoryRequirements(
2021-01-27 20:51:36 +00:00
renderer,
buffer,
requiredMemoryPropertyFlags,
ignoredMemoryPropertyFlags,
2021-01-27 20:51:36 +00:00
&memoryRequirements,
&memoryTypeIndex
)) {
bindResult = VULKAN_INTERNAL_BindResourceMemory(
renderer,
memoryTypeIndex,
&memoryRequirements,
dedicatedAllocation,
size,
buffer,
VK_NULL_HANDLE,
usedRegion
);
if (bindResult == 1)
{
break;
}
else /* Bind failed, try the next device-local heap */
{
memoryTypeIndex += 1;
}
2021-01-27 20:51:36 +00:00
}
/* Bind failed, try again without preferred flags */
2024-02-11 23:56:53 +00:00
if (bindResult != 1)
{
memoryTypeIndex = 0;
requiredMemoryPropertyFlags = 0;
ignoredMemoryPropertyFlags = 0;
if (requireHostVisible)
{
requiredMemoryPropertyFlags =
VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT |
VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
}
if (preferHostLocal && !renderer->integratedMemoryNotification)
{
Refresh_LogInfo("Integrated memory detected, allocating TransferBuffers on device-local memory!");
renderer->integratedMemoryNotification = 1;
}
if (preferDeviceLocal && !renderer->outOfDeviceLocalMemoryWarning)
{
Refresh_LogWarn("Out of device-local memory, allocating GpuBuffers on host-local memory, expect degraded performance!");
renderer->outOfDeviceLocalMemoryWarning = 1;
}
while (VULKAN_INTERNAL_FindBufferMemoryRequirements(
renderer,
buffer,
requiredMemoryPropertyFlags,
ignoredMemoryPropertyFlags,
&memoryRequirements,
&memoryTypeIndex
)) {
bindResult = VULKAN_INTERNAL_BindResourceMemory(
renderer,
memoryTypeIndex,
&memoryRequirements,
dedicatedAllocation,
size,
buffer,
VK_NULL_HANDLE,
usedRegion
);
if (bindResult == 1)
{
break;
}
else /* Bind failed, try the next heap */
{
memoryTypeIndex += 1;
}
}
}
return bindResult;
}
2020-12-19 00:39:03 +00:00
/* Memory Barriers */
2020-12-19 01:03:26 +00:00
static void VULKAN_INTERNAL_BufferMemoryBarrier(
VulkanRenderer *renderer,
2021-01-02 21:31:17 +00:00
VkCommandBuffer commandBuffer,
2020-12-19 01:03:26 +00:00
VulkanResourceAccessType nextResourceAccessType,
VulkanBuffer *buffer
2020-12-19 01:03:26 +00:00
) {
VkPipelineStageFlags srcStages = 0;
VkPipelineStageFlags dstStages = 0;
VkBufferMemoryBarrier memoryBarrier;
VulkanResourceAccessType prevAccess, nextAccess;
const VulkanResourceAccessInfo *prevAccessInfo, *nextAccessInfo;
memoryBarrier.sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER;
memoryBarrier.pNext = NULL;
memoryBarrier.srcAccessMask = 0;
memoryBarrier.dstAccessMask = 0;
memoryBarrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
memoryBarrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
memoryBarrier.buffer = buffer->buffer;
2020-12-19 01:03:26 +00:00
memoryBarrier.offset = 0;
memoryBarrier.size = buffer->size;
prevAccess = buffer->resourceAccessType;
prevAccessInfo = &AccessMap[prevAccess];
srcStages |= prevAccessInfo->stageMask;
if (prevAccess > RESOURCE_ACCESS_END_OF_READ)
{
memoryBarrier.srcAccessMask |= prevAccessInfo->accessMask;
}
nextAccess = nextResourceAccessType;
nextAccessInfo = &AccessMap[nextAccess];
dstStages |= nextAccessInfo->stageMask;
if (memoryBarrier.srcAccessMask != 0)
{
memoryBarrier.dstAccessMask |= nextAccessInfo->accessMask;
}
if (srcStages == 0)
{
srcStages = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
}
if (dstStages == 0)
{
dstStages = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT;
}
2021-01-02 06:07:15 +00:00
renderer->vkCmdPipelineBarrier(
2021-01-02 21:31:17 +00:00
commandBuffer,
2020-12-19 01:03:26 +00:00
srcStages,
dstStages,
0,
0,
NULL,
1,
&memoryBarrier,
0,
NULL
2021-01-02 06:07:15 +00:00
);
2020-12-19 01:03:26 +00:00
buffer->resourceAccessType = nextResourceAccessType;
}
2020-12-19 00:39:03 +00:00
static void VULKAN_INTERNAL_ImageMemoryBarrier(
VulkanRenderer *renderer,
2021-01-02 21:31:17 +00:00
VkCommandBuffer commandBuffer,
2020-12-19 00:39:03 +00:00
VulkanResourceAccessType nextAccess,
VulkanTextureSlice *textureSlice
2020-12-19 00:39:03 +00:00
) {
VkPipelineStageFlags srcStages = 0;
VkPipelineStageFlags dstStages = 0;
VkImageMemoryBarrier memoryBarrier;
VulkanResourceAccessType prevAccess;
const VulkanResourceAccessInfo *pPrevAccessInfo, *pNextAccessInfo;
memoryBarrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
memoryBarrier.pNext = NULL;
memoryBarrier.srcAccessMask = 0;
memoryBarrier.dstAccessMask = 0;
memoryBarrier.oldLayout = VK_IMAGE_LAYOUT_UNDEFINED;
memoryBarrier.newLayout = VK_IMAGE_LAYOUT_UNDEFINED;
2021-01-03 04:03:07 +00:00
memoryBarrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
memoryBarrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
memoryBarrier.image = textureSlice->parent->image;
memoryBarrier.subresourceRange.aspectMask = textureSlice->parent->aspectFlags;
memoryBarrier.subresourceRange.baseArrayLayer = textureSlice->layer;
memoryBarrier.subresourceRange.layerCount = 1;
memoryBarrier.subresourceRange.baseMipLevel = textureSlice->level;
memoryBarrier.subresourceRange.levelCount = 1;
prevAccess = textureSlice->resourceAccessType;
2020-12-19 00:39:03 +00:00
pPrevAccessInfo = &AccessMap[prevAccess];
srcStages |= pPrevAccessInfo->stageMask;
if (prevAccess > RESOURCE_ACCESS_END_OF_READ)
{
memoryBarrier.srcAccessMask |= pPrevAccessInfo->accessMask;
}
memoryBarrier.oldLayout = pPrevAccessInfo->imageLayout;
2020-12-19 00:39:03 +00:00
pNextAccessInfo = &AccessMap[nextAccess];
dstStages |= pNextAccessInfo->stageMask;
memoryBarrier.dstAccessMask |= pNextAccessInfo->accessMask;
memoryBarrier.newLayout = pNextAccessInfo->imageLayout;
if (srcStages == 0)
{
srcStages = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
}
if (dstStages == 0)
{
dstStages = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT;
}
2021-01-02 06:07:15 +00:00
renderer->vkCmdPipelineBarrier(
2021-01-02 21:31:17 +00:00
commandBuffer,
2020-12-19 00:39:03 +00:00
srcStages,
dstStages,
0,
0,
NULL,
0,
NULL,
1,
&memoryBarrier
2021-01-02 06:07:15 +00:00
);
2021-01-02 21:31:17 +00:00
textureSlice->resourceAccessType = nextAccess;
2020-12-21 23:44:43 +00:00
}
/* Resource tracking */
#define ADD_TO_ARRAY_UNIQUE(resource, type, array, count, capacity) \
uint32_t i; \
\
for (i = 0; i < commandBuffer->count; i += 1) \
{ \
if (commandBuffer->array[i] == resource) \
{ \
return; \
} \
} \
\
if (commandBuffer->count == commandBuffer->capacity) \
{ \
commandBuffer->capacity += 1; \
commandBuffer->array = SDL_realloc( \
commandBuffer->array, \
commandBuffer->capacity * sizeof(type) \
); \
} \
commandBuffer->array[commandBuffer->count] = resource; \
commandBuffer->count += 1;
#define TRACK_RESOURCE(resource, type, array, count, capacity) \
uint32_t i; \
\
for (i = 0; i < commandBuffer->count; i += 1) \
{ \
if (commandBuffer->array[i] == resource) \
{ \
return; \
} \
} \
\
if (commandBuffer->count == commandBuffer->capacity) \
{ \
commandBuffer->capacity += 1; \
commandBuffer->array = SDL_realloc( \
commandBuffer->array, \
commandBuffer->capacity * sizeof(type) \
); \
} \
commandBuffer->array[commandBuffer->count] = resource; \
commandBuffer->count += 1; \
SDL_AtomicIncRef(&resource->referenceCount);
static void VULKAN_INTERNAL_TrackBuffer(
VulkanRenderer *renderer,
VulkanCommandBuffer *commandBuffer,
VulkanBuffer *buffer
) {
TRACK_RESOURCE(
buffer,
VulkanBuffer*,
usedBuffers,
usedBufferCount,
usedBufferCapacity
)
}
static void VULKAN_INTERNAL_TrackTextureSlice(
VulkanRenderer *renderer,
VulkanCommandBuffer *commandBuffer,
VulkanTextureSlice *textureSlice
) {
TRACK_RESOURCE(
textureSlice,
VulkanTextureSlice*,
usedTextureSlices,
usedTextureSliceCount,
usedTextureSliceCapacity
)
}
static void VULKAN_INTERNAL_TrackSampler(
VulkanRenderer *renderer,
VulkanCommandBuffer *commandBuffer,
VulkanSampler *sampler
) {
TRACK_RESOURCE(
sampler,
VulkanSampler*,
usedSamplers,
usedSamplerCount,
usedSamplerCapacity
)
}
static void VULKAN_INTERNAL_TrackGraphicsPipeline(
VulkanRenderer *renderer,
VulkanCommandBuffer *commandBuffer,
VulkanGraphicsPipeline *graphicsPipeline
) {
TRACK_RESOURCE(
graphicsPipeline,
VulkanGraphicsPipeline*,
usedGraphicsPipelines,
usedGraphicsPipelineCount,
usedGraphicsPipelineCapacity
)
}
static void VULKAN_INTERNAL_TrackComputePipeline(
VulkanRenderer *renderer,
VulkanCommandBuffer *commandBuffer,
VulkanComputePipeline *computePipeline
) {
TRACK_RESOURCE(
computePipeline,
VulkanComputePipeline*,
usedComputePipelines,
usedComputePipelineCount,
usedComputePipelineCapacity
)
}
static void VULKAN_INTERNAL_TrackFramebuffer(
VulkanRenderer *renderer,
VulkanCommandBuffer *commandBuffer,
VulkanFramebuffer *framebuffer
) {
TRACK_RESOURCE(
framebuffer,
VulkanFramebuffer*,
usedFramebuffers,
usedFramebufferCount,
usedFramebufferCapacity
);
}
static void VULKAN_INTERNAL_TrackComputeBuffer(
VulkanRenderer *renderer,
VulkanCommandBuffer *commandBuffer,
VulkanBuffer *computeBuffer
) {
ADD_TO_ARRAY_UNIQUE(
computeBuffer,
VulkanBuffer*,
boundComputeBuffers,
boundComputeBufferCount,
boundComputeBufferCapacity
);
}
static void VULKAN_INTERNAL_TrackComputeTextureSlice(
VulkanRenderer *renderer,
VulkanCommandBuffer *commandBuffer,
VulkanTextureSlice *textureSlice
) {
ADD_TO_ARRAY_UNIQUE(
textureSlice,
VulkanTextureSlice*,
boundComputeTextureSlices,
boundComputeTextureSliceCount,
boundComputeTextureSliceCapacity
);
}
/* For tracking Textures used in a copy pass. */
static void VULKAN_INTERNAL_TrackCopiedTextureSlice(
VulkanRenderer *renderer,
VulkanCommandBuffer *commandBuffer,
VulkanTextureSlice *textureSlice
) {
ADD_TO_ARRAY_UNIQUE(
textureSlice,
VulkanTextureSlice*,
copiedTextureSlices,
copiedTextureSliceCount,
copiedTextureSliceCapacity
);
}
/* For tracking GpuBuffers used in a copy pass. */
static void VULKAN_INTERNAL_TrackCopiedBuffer(
VulkanRenderer *renderer,
VulkanCommandBuffer *commandBuffer,
VulkanBuffer *buffer
) {
ADD_TO_ARRAY_UNIQUE(
buffer,
VulkanBuffer*,
copiedGpuBuffers,
copiedGpuBufferCount,
copiedGpuBufferCapacity
);
}
#undef TRACK_RESOURCE
2020-12-21 23:44:43 +00:00
/* Resource Disposal */
static void VULKAN_INTERNAL_QueueDestroyFramebuffer(
VulkanRenderer *renderer,
VulkanFramebuffer *framebuffer
) {
SDL_LockMutex(renderer->disposeLock);
EXPAND_ARRAY_IF_NEEDED(
renderer->framebuffersToDestroy,
VulkanFramebuffer*,
renderer->framebuffersToDestroyCount + 1,
renderer->framebuffersToDestroyCapacity,
renderer->framebuffersToDestroyCapacity * 2
)
renderer->framebuffersToDestroy[renderer->framebuffersToDestroyCount] = framebuffer;
renderer->framebuffersToDestroyCount += 1;
SDL_UnlockMutex(renderer->disposeLock);
}
static void VULKAN_INTERNAL_DestroyFramebuffer(
VulkanRenderer *renderer,
VulkanFramebuffer *framebuffer
) {
renderer->vkDestroyFramebuffer(
renderer->logicalDevice,
framebuffer->framebuffer,
NULL
);
SDL_free(framebuffer);
}
static void VULKAN_INTERNAL_RemoveFramebuffersContainingView(
VulkanRenderer *renderer,
VkImageView view
) {
FramebufferHash *hash;
int32_t i, j;
SDL_LockMutex(renderer->framebufferFetchLock);
for (i = renderer->framebufferHashArray.count - 1; i >= 0; i -= 1)
{
hash = &renderer->framebufferHashArray.elements[i].key;
for (j = 0; j < hash->colorAttachmentCount; j += 1)
{
2022-12-23 01:23:11 +00:00
if (hash->colorAttachmentViews[j] == view)
{
/* FIXME: do we actually need to queue this?
* The framebuffer should not be in use once the associated texture is being destroyed
*/
VULKAN_INTERNAL_QueueDestroyFramebuffer(
renderer,
renderer->framebufferHashArray.elements[i].value
);
FramebufferHashArray_Remove(
&renderer->framebufferHashArray,
i
);
break;
}
}
}
SDL_UnlockMutex(renderer->framebufferFetchLock);
}
static void VULKAN_INTERNAL_DestroyTexture(
VulkanRenderer* renderer,
VulkanTexture* texture
) {
uint32_t sliceIndex;
/* Clean up slices */
for (sliceIndex = 0; sliceIndex < texture->sliceCount; sliceIndex += 1)
{
if (texture->isRenderTarget)
{
VULKAN_INTERNAL_RemoveFramebuffersContainingView(
renderer,
texture->slices[sliceIndex].view
);
if (texture->slices[sliceIndex].msaaTex != NULL)
{
VULKAN_INTERNAL_DestroyTexture(
renderer,
texture->slices[sliceIndex].msaaTex
);
}
}
renderer->vkDestroyImageView(
renderer->logicalDevice,
texture->slices[sliceIndex].view,
NULL
2020-12-29 00:28:14 +00:00
);
}
SDL_free(texture->slices);
2020-12-29 00:28:14 +00:00
renderer->vkDestroyImageView(
renderer->logicalDevice,
texture->view,
NULL
);
2020-12-29 00:28:14 +00:00
renderer->vkDestroyImage(
renderer->logicalDevice,
texture->image,
NULL
);
VULKAN_INTERNAL_RemoveMemoryUsedRegion(
renderer,
texture->usedRegion
);
2020-12-29 00:28:14 +00:00
SDL_free(texture);
}
2020-12-21 23:44:43 +00:00
static void VULKAN_INTERNAL_DestroyBuffer(
2020-12-28 22:57:14 +00:00
VulkanRenderer* renderer,
VulkanBuffer* buffer
2020-12-21 23:44:43 +00:00
) {
renderer->vkDestroyBuffer(
renderer->logicalDevice,
buffer->buffer,
NULL
);
2020-12-21 23:44:43 +00:00
VULKAN_INTERNAL_RemoveMemoryUsedRegion(
renderer,
buffer->usedRegion
);
2020-12-21 23:44:43 +00:00
SDL_free(buffer);
}
2021-01-03 02:02:20 +00:00
static void VULKAN_INTERNAL_DestroyCommandPool(
VulkanRenderer *renderer,
VulkanCommandPool *commandPool
) {
uint32_t i;
VulkanCommandBuffer* commandBuffer;
renderer->vkDestroyCommandPool(
2021-01-03 02:02:20 +00:00
renderer->logicalDevice,
commandPool->commandPool,
NULL
2021-01-03 02:02:20 +00:00
);
for (i = 0; i < commandPool->inactiveCommandBufferCount; i += 1)
{
commandBuffer = commandPool->inactiveCommandBuffers[i];
SDL_free(commandBuffer->presentDatas);
SDL_free(commandBuffer->waitSemaphores);
SDL_free(commandBuffer->signalSemaphores);
SDL_free(commandBuffer->boundDescriptorSetDatas);
SDL_free(commandBuffer->boundComputeBuffers);
SDL_free(commandBuffer->boundComputeTextureSlices);
SDL_free(commandBuffer->copiedGpuBuffers);
SDL_free(commandBuffer->copiedTextureSlices);
SDL_free(commandBuffer->usedBuffers);
SDL_free(commandBuffer->usedTextureSlices);
2022-03-04 01:30:26 +00:00
SDL_free(commandBuffer->usedSamplers);
SDL_free(commandBuffer->usedGraphicsPipelines);
SDL_free(commandBuffer->usedComputePipelines);
SDL_free(commandBuffer->usedFramebuffers);
SDL_free(commandBuffer);
}
2021-01-03 02:02:20 +00:00
SDL_free(commandPool->inactiveCommandBuffers);
SDL_free(commandPool);
}
2020-12-29 00:28:14 +00:00
static void VULKAN_INTERNAL_DestroyGraphicsPipeline(
VulkanRenderer *renderer,
VulkanGraphicsPipeline *graphicsPipeline
) {
renderer->vkDestroyPipeline(
renderer->logicalDevice,
graphicsPipeline->pipeline,
NULL
);
SDL_AtomicDecRef(&graphicsPipeline->vertexShaderModule->referenceCount);
SDL_AtomicDecRef(&graphicsPipeline->fragmentShaderModule->referenceCount);
2020-12-29 00:28:14 +00:00
SDL_free(graphicsPipeline);
}
2020-12-31 07:02:12 +00:00
static void VULKAN_INTERNAL_DestroyComputePipeline(
VulkanRenderer *renderer,
VulkanComputePipeline *computePipeline
) {
renderer->vkDestroyPipeline(
renderer->logicalDevice,
computePipeline->pipeline,
NULL
);
SDL_AtomicDecRef(&computePipeline->computeShaderModule->referenceCount);
2020-12-31 07:02:12 +00:00
SDL_free(computePipeline);
}
2020-12-29 00:42:51 +00:00
static void VULKAN_INTERNAL_DestroyShaderModule(
VulkanRenderer *renderer,
VulkanShaderModule *vulkanShaderModule
2020-12-29 00:42:51 +00:00
) {
renderer->vkDestroyShaderModule(
renderer->logicalDevice,
vulkanShaderModule->shaderModule,
2020-12-29 00:42:51 +00:00
NULL
);
SDL_free(vulkanShaderModule);
2020-12-29 00:42:51 +00:00
}
2020-12-29 00:56:49 +00:00
static void VULKAN_INTERNAL_DestroySampler(
VulkanRenderer *renderer,
VulkanSampler *vulkanSampler
2020-12-29 00:56:49 +00:00
) {
renderer->vkDestroySampler(
renderer->logicalDevice,
vulkanSampler->sampler,
2020-12-29 00:56:49 +00:00
NULL
);
SDL_free(vulkanSampler);
2020-12-29 00:56:49 +00:00
}
static void VULKAN_INTERNAL_DestroySwapchain(
VulkanRenderer* renderer,
WindowData *windowData
) {
2020-12-21 23:44:43 +00:00
uint32_t i;
VulkanSwapchainData *swapchainData;
if (windowData == NULL)
{
return;
}
swapchainData = windowData->swapchainData;
if (swapchainData == NULL)
{
return;
}
2020-12-21 23:44:43 +00:00
for (i = 0; i < swapchainData->imageCount; i += 1)
2020-12-21 23:44:43 +00:00
{
renderer->vkDestroyImageView(
renderer->logicalDevice,
swapchainData->textureContainers[i].activeTextureHandle->vulkanTexture->slices[0].view,
NULL
);
SDL_free(swapchainData->textureContainers[i].activeTextureHandle->vulkanTexture->slices);
2020-12-21 23:44:43 +00:00
renderer->vkDestroyImageView(
renderer->logicalDevice,
swapchainData->textureContainers[i].activeTextureHandle->vulkanTexture->view,
2020-12-21 23:44:43 +00:00
NULL
);
SDL_free(swapchainData->textureContainers[i].activeTextureHandle->vulkanTexture);
SDL_free(swapchainData->textureContainers[i].activeTextureHandle);
2020-12-21 23:44:43 +00:00
}
SDL_free(swapchainData->textureContainers);
2020-12-21 23:44:43 +00:00
renderer->vkDestroySwapchainKHR(
renderer->logicalDevice,
swapchainData->swapchain,
NULL
);
renderer->vkDestroySurfaceKHR(
renderer->instance,
swapchainData->surface,
2020-12-21 23:44:43 +00:00
NULL
);
2022-02-10 05:42:19 +00:00
renderer->vkDestroySemaphore(
renderer->logicalDevice,
swapchainData->imageAvailableSemaphore,
NULL
);
renderer->vkDestroySemaphore(
renderer->logicalDevice,
swapchainData->renderFinishedSemaphore,
NULL
);
windowData->swapchainData = NULL;
SDL_free(swapchainData);
2020-12-21 23:44:43 +00:00
}
static void VULKAN_INTERNAL_DestroyDescriptorSetCache(
2020-12-31 07:02:12 +00:00
VulkanRenderer *renderer,
DescriptorSetCache *cache
2020-12-28 22:57:14 +00:00
) {
uint32_t i;
2020-12-28 23:11:05 +00:00
if (cache == NULL)
{
return;
}
for (i = 0; i < cache->descriptorPoolCount; i += 1)
2020-12-28 22:57:14 +00:00
{
renderer->vkDestroyDescriptorPool(
renderer->logicalDevice,
cache->descriptorPools[i],
2020-12-28 22:57:14 +00:00
NULL
);
}
SDL_free(cache->descriptorPools);
2020-12-28 22:57:14 +00:00
SDL_free(cache->inactiveDescriptorSets);
SDL_DestroyMutex(cache->lock);
2020-12-28 22:57:14 +00:00
SDL_free(cache);
}
/* Descriptor cache stuff */
2020-12-28 23:11:05 +00:00
static uint8_t VULKAN_INTERNAL_CreateDescriptorPool(
VulkanRenderer *renderer,
VkDescriptorType descriptorType,
uint32_t descriptorSetCount,
uint32_t descriptorCount,
VkDescriptorPool *pDescriptorPool
) {
VkResult vulkanResult;
2020-12-29 04:09:31 +00:00
VkDescriptorPoolSize descriptorPoolSize;
VkDescriptorPoolCreateInfo descriptorPoolInfo;
2020-12-28 23:11:05 +00:00
descriptorPoolSize.type = descriptorType;
descriptorPoolSize.descriptorCount = descriptorCount;
2020-12-28 23:11:05 +00:00
descriptorPoolInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
descriptorPoolInfo.pNext = NULL;
descriptorPoolInfo.flags = 0;
descriptorPoolInfo.maxSets = descriptorSetCount;
descriptorPoolInfo.poolSizeCount = 1;
descriptorPoolInfo.pPoolSizes = &descriptorPoolSize;
2020-12-29 00:28:14 +00:00
vulkanResult = renderer->vkCreateDescriptorPool(
renderer->logicalDevice,
&descriptorPoolInfo,
NULL,
pDescriptorPool
);
2020-12-31 07:02:12 +00:00
if (vulkanResult != VK_SUCCESS)
2020-12-29 00:42:51 +00:00
{
LogVulkanResultAsError("vkCreateDescriptorPool", vulkanResult);
return 0;
2020-12-29 00:42:51 +00:00
}
return 1;
}
2020-12-29 00:56:49 +00:00
static uint8_t VULKAN_INTERNAL_AllocateDescriptorSets(
VulkanRenderer *renderer,
VkDescriptorPool descriptorPool,
VkDescriptorSetLayout descriptorSetLayout,
uint32_t descriptorSetCount,
VkDescriptorSet *descriptorSetArray
) {
VkResult vulkanResult;
uint32_t i;
VkDescriptorSetAllocateInfo descriptorSetAllocateInfo;
VkDescriptorSetLayout *descriptorSetLayouts = SDL_stack_alloc(VkDescriptorSetLayout, descriptorSetCount);
for (i = 0; i < descriptorSetCount; i += 1)
2020-12-29 01:35:18 +00:00
{
descriptorSetLayouts[i] = descriptorSetLayout;
2020-12-29 01:35:18 +00:00
}
descriptorSetAllocateInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
descriptorSetAllocateInfo.pNext = NULL;
descriptorSetAllocateInfo.descriptorPool = descriptorPool;
descriptorSetAllocateInfo.descriptorSetCount = descriptorSetCount;
descriptorSetAllocateInfo.pSetLayouts = descriptorSetLayouts;
vulkanResult = renderer->vkAllocateDescriptorSets(
renderer->logicalDevice,
&descriptorSetAllocateInfo,
descriptorSetArray
);
if (vulkanResult != VK_SUCCESS)
2020-12-29 03:44:34 +00:00
{
LogVulkanResultAsError("vkAllocateDescriptorSets", vulkanResult);
SDL_stack_free(descriptorSetLayouts);
return 0;
2020-12-29 03:44:34 +00:00
}
SDL_stack_free(descriptorSetLayouts);
return 1;
}
2020-12-29 00:28:14 +00:00
static DescriptorSetCache* VULKAN_INTERNAL_CreateDescriptorSetCache(
VulkanRenderer *renderer,
VkDescriptorType descriptorType,
VkDescriptorSetLayout descriptorSetLayout,
uint32_t bindingCount
) {
DescriptorSetCache *descriptorSetCache = SDL_malloc(sizeof(DescriptorSetCache));
2020-12-29 04:09:31 +00:00
descriptorSetCache->lock = SDL_CreateMutex();
2020-12-29 00:28:14 +00:00
descriptorSetCache->descriptorSetLayout = descriptorSetLayout;
descriptorSetCache->bindingCount = bindingCount;
descriptorSetCache->descriptorType = descriptorType;
2020-12-29 00:28:14 +00:00
descriptorSetCache->descriptorPools = SDL_malloc(sizeof(VkDescriptorPool));
descriptorSetCache->descriptorPoolCount = 1;
descriptorSetCache->nextPoolSize = DESCRIPTOR_POOL_STARTING_SIZE * 2;
2020-12-31 07:02:12 +00:00
VULKAN_INTERNAL_CreateDescriptorPool(
renderer,
descriptorType,
DESCRIPTOR_POOL_STARTING_SIZE,
DESCRIPTOR_POOL_STARTING_SIZE * bindingCount,
&descriptorSetCache->descriptorPools[0]
);
2020-12-31 07:02:12 +00:00
descriptorSetCache->inactiveDescriptorSetCapacity = DESCRIPTOR_POOL_STARTING_SIZE;
descriptorSetCache->inactiveDescriptorSetCount = DESCRIPTOR_POOL_STARTING_SIZE;
descriptorSetCache->inactiveDescriptorSets = SDL_malloc(
sizeof(VkDescriptorSet) * DESCRIPTOR_POOL_STARTING_SIZE
);
2020-12-29 00:56:49 +00:00
VULKAN_INTERNAL_AllocateDescriptorSets(
renderer,
descriptorSetCache->descriptorPools[0],
descriptorSetCache->descriptorSetLayout,
DESCRIPTOR_POOL_STARTING_SIZE,
descriptorSetCache->inactiveDescriptorSets
);
2020-12-29 00:42:51 +00:00
return descriptorSetCache;
}
2020-12-29 01:35:18 +00:00
static VkDescriptorSetLayout VULKAN_INTERNAL_FetchDescriptorSetLayout(
VulkanRenderer *renderer,
VkDescriptorType descriptorType,
uint32_t bindingCount,
VkShaderStageFlagBits shaderStageFlagBit
) {
DescriptorSetLayoutHash descriptorSetLayoutHash;
VkDescriptorSetLayout descriptorSetLayout;
2020-12-29 03:44:34 +00:00
VkDescriptorSetLayoutBinding setLayoutBindings[MAX_TEXTURE_SAMPLERS];
VkDescriptorSetLayoutCreateInfo setLayoutCreateInfo;
2020-12-29 00:28:14 +00:00
VkResult vulkanResult;
uint32_t i;
2020-12-29 00:28:14 +00:00
if (bindingCount == 0)
2020-12-29 00:28:14 +00:00
{
if (shaderStageFlagBit == VK_SHADER_STAGE_VERTEX_BIT)
2020-12-29 00:28:14 +00:00
{
return renderer->emptyVertexSamplerLayout;
}
else if (shaderStageFlagBit == VK_SHADER_STAGE_FRAGMENT_BIT)
{
return renderer->emptyFragmentSamplerLayout;
}
else if (shaderStageFlagBit == VK_SHADER_STAGE_COMPUTE_BIT)
{
if (descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER)
2020-12-29 00:28:14 +00:00
{
return renderer->emptyComputeBufferDescriptorSetLayout;
2020-12-29 00:28:14 +00:00
}
else if (descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE)
{
return renderer->emptyComputeImageDescriptorSetLayout;
}
else
{
Refresh_LogError("Invalid descriptor type for compute shader: ", descriptorType);
return NULL_DESC_LAYOUT;
}
}
else
{
Refresh_LogError("Invalid shader stage flag bit: ", shaderStageFlagBit);
return NULL_DESC_LAYOUT;
2020-12-29 00:28:14 +00:00
}
}
descriptorSetLayoutHash.descriptorType = descriptorType;
descriptorSetLayoutHash.bindingCount = bindingCount;
descriptorSetLayoutHash.stageFlag = shaderStageFlagBit;
2020-12-29 00:28:14 +00:00
descriptorSetLayout = DescriptorSetLayoutHashTable_Fetch(
&renderer->descriptorSetLayoutHashTable,
descriptorSetLayoutHash
);
2020-12-29 00:28:14 +00:00
if (descriptorSetLayout != VK_NULL_HANDLE)
{
return descriptorSetLayout;
2020-12-29 00:28:14 +00:00
}
for (i = 0; i < bindingCount; i += 1)
2020-12-29 00:28:14 +00:00
{
setLayoutBindings[i].binding = i;
setLayoutBindings[i].descriptorCount = 1;
setLayoutBindings[i].descriptorType = descriptorType;
setLayoutBindings[i].stageFlags = shaderStageFlagBit;
setLayoutBindings[i].pImmutableSamplers = NULL;
2020-12-29 00:28:14 +00:00
}
setLayoutCreateInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
setLayoutCreateInfo.pNext = NULL;
setLayoutCreateInfo.flags = 0;
setLayoutCreateInfo.bindingCount = bindingCount;
setLayoutCreateInfo.pBindings = setLayoutBindings;
2020-12-21 23:44:43 +00:00
vulkanResult = renderer->vkCreateDescriptorSetLayout(
renderer->logicalDevice,
&setLayoutCreateInfo,
NULL,
&descriptorSetLayout
);
2020-12-21 23:44:43 +00:00
if (vulkanResult != VK_SUCCESS)
2020-12-21 23:44:43 +00:00
{
LogVulkanResultAsError("vkCreateDescriptorSetLayout", vulkanResult);
return NULL_DESC_LAYOUT;
2020-12-21 23:44:43 +00:00
}
DescriptorSetLayoutHashTable_Insert(
&renderer->descriptorSetLayoutHashTable,
descriptorSetLayoutHash,
descriptorSetLayout
);
2020-12-21 23:44:43 +00:00
return descriptorSetLayout;
2020-12-21 23:44:43 +00:00
}
static VulkanGraphicsPipelineLayout* VULKAN_INTERNAL_FetchGraphicsPipelineLayout(
2020-12-21 23:44:43 +00:00
VulkanRenderer *renderer,
uint32_t vertexSamplerBindingCount,
uint32_t fragmentSamplerBindingCount
2020-12-21 23:44:43 +00:00
) {
VkDescriptorSetLayout setLayouts[4];
2020-12-21 23:44:43 +00:00
GraphicsPipelineLayoutHash pipelineLayoutHash;
VkPipelineLayoutCreateInfo pipelineLayoutCreateInfo;
VkResult vulkanResult;
VulkanGraphicsPipelineLayout *vulkanGraphicsPipelineLayout;
pipelineLayoutHash.vertexSamplerLayout = VULKAN_INTERNAL_FetchDescriptorSetLayout(
renderer,
VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
vertexSamplerBindingCount,
VK_SHADER_STAGE_VERTEX_BIT
2020-12-21 23:44:43 +00:00
);
pipelineLayoutHash.fragmentSamplerLayout = VULKAN_INTERNAL_FetchDescriptorSetLayout(
renderer,
VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
fragmentSamplerBindingCount,
VK_SHADER_STAGE_FRAGMENT_BIT
);
2020-12-21 23:44:43 +00:00
pipelineLayoutHash.vertexUniformLayout = renderer->vertexUniformDescriptorSetLayout;
pipelineLayoutHash.fragmentUniformLayout = renderer->fragmentUniformDescriptorSetLayout;
vulkanGraphicsPipelineLayout = GraphicsPipelineLayoutHashArray_Fetch(
&renderer->graphicsPipelineLayoutHashTable,
pipelineLayoutHash
2020-12-21 23:44:43 +00:00
);
if (vulkanGraphicsPipelineLayout != NULL)
2020-12-21 23:44:43 +00:00
{
return vulkanGraphicsPipelineLayout;
}
2020-12-21 23:44:43 +00:00
vulkanGraphicsPipelineLayout = SDL_malloc(sizeof(VulkanGraphicsPipelineLayout));
2020-12-21 23:44:43 +00:00
setLayouts[0] = pipelineLayoutHash.vertexSamplerLayout;
setLayouts[1] = pipelineLayoutHash.fragmentSamplerLayout;
setLayouts[2] = renderer->vertexUniformDescriptorSetLayout;
setLayouts[3] = renderer->fragmentUniformDescriptorSetLayout;
2020-12-21 23:44:43 +00:00
pipelineLayoutCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
pipelineLayoutCreateInfo.pNext = NULL;
pipelineLayoutCreateInfo.flags = 0;
pipelineLayoutCreateInfo.setLayoutCount = 4;
pipelineLayoutCreateInfo.pSetLayouts = setLayouts;
pipelineLayoutCreateInfo.pushConstantRangeCount = 0;
pipelineLayoutCreateInfo.pPushConstantRanges = NULL;
2020-12-21 23:44:43 +00:00
vulkanResult = renderer->vkCreatePipelineLayout(
renderer->logicalDevice,
&pipelineLayoutCreateInfo,
NULL,
&vulkanGraphicsPipelineLayout->pipelineLayout
2020-12-21 23:44:43 +00:00
);
if (vulkanResult != VK_SUCCESS)
2020-12-21 23:44:43 +00:00
{
LogVulkanResultAsError("vkCreatePipelineLayout", vulkanResult);
return NULL;
}
2020-12-21 23:44:43 +00:00
GraphicsPipelineLayoutHashArray_Insert(
&renderer->graphicsPipelineLayoutHashTable,
pipelineLayoutHash,
vulkanGraphicsPipelineLayout
);
2020-12-21 23:44:43 +00:00
/* If the binding count is 0
* we can just bind the same descriptor set
* so no cache is needed
*/
2020-12-21 23:44:43 +00:00
if (vertexSamplerBindingCount == 0)
{
vulkanGraphicsPipelineLayout->vertexSamplerDescriptorSetCache = NULL;
}
else
{
vulkanGraphicsPipelineLayout->vertexSamplerDescriptorSetCache =
VULKAN_INTERNAL_CreateDescriptorSetCache(
renderer,
VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
pipelineLayoutHash.vertexSamplerLayout,
vertexSamplerBindingCount
);
2020-12-21 23:44:43 +00:00
}
if (fragmentSamplerBindingCount == 0)
2020-12-21 23:44:43 +00:00
{
vulkanGraphicsPipelineLayout->fragmentSamplerDescriptorSetCache = NULL;
}
else
{
vulkanGraphicsPipelineLayout->fragmentSamplerDescriptorSetCache =
VULKAN_INTERNAL_CreateDescriptorSetCache(
renderer,
VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
pipelineLayoutHash.fragmentSamplerLayout,
fragmentSamplerBindingCount
);
2020-12-21 23:44:43 +00:00
}
return vulkanGraphicsPipelineLayout;
2020-12-21 23:44:43 +00:00
}
/* Data Buffer */
2020-12-21 23:44:43 +00:00
static VulkanBuffer* VULKAN_INTERNAL_CreateBuffer(
VulkanRenderer *renderer,
VkDeviceSize size,
VulkanResourceAccessType resourceAccessType,
VkBufferUsageFlags usage,
uint8_t requireHostVisible,
uint8_t preferHostLocal,
uint8_t preferDeviceLocal,
uint8_t dedicatedAllocation
2020-12-21 23:44:43 +00:00
) {
VulkanBuffer* buffer;
2020-12-21 23:44:43 +00:00
VkResult vulkanResult;
VkBufferCreateInfo bufferCreateInfo;
uint8_t bindResult;
2020-12-21 23:44:43 +00:00
buffer = SDL_malloc(sizeof(VulkanBuffer));
2020-12-21 23:44:43 +00:00
buffer->size = size;
buffer->resourceAccessType = resourceAccessType;
buffer->usage = usage;
buffer->requireHostVisible = requireHostVisible;
buffer->preferHostLocal = preferHostLocal;
buffer->preferDeviceLocal = preferDeviceLocal;
buffer->markedForDestroy = 0;
2020-12-21 23:44:43 +00:00
bufferCreateInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
bufferCreateInfo.pNext = NULL;
bufferCreateInfo.flags = 0;
bufferCreateInfo.size = size;
bufferCreateInfo.usage = usage;
bufferCreateInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
bufferCreateInfo.queueFamilyIndexCount = 1;
bufferCreateInfo.pQueueFamilyIndices = &renderer->queueFamilyIndex;
2020-12-21 23:44:43 +00:00
vulkanResult = renderer->vkCreateBuffer(
renderer->logicalDevice,
&bufferCreateInfo,
NULL,
&buffer->buffer
2020-12-21 23:44:43 +00:00
);
VULKAN_ERROR_CHECK(vulkanResult, vkCreateBuffer, 0)
2020-12-21 23:44:43 +00:00
bindResult = VULKAN_INTERNAL_BindMemoryForBuffer(
renderer,
buffer->buffer,
buffer->size,
buffer->requireHostVisible,
buffer->preferHostLocal,
buffer->preferDeviceLocal,
dedicatedAllocation,
&buffer->usedRegion
);
2020-12-21 23:44:43 +00:00
if (bindResult != 1)
2020-12-21 23:44:43 +00:00
{
renderer->vkDestroyBuffer(
renderer->logicalDevice,
buffer->buffer,
NULL);
2020-12-21 23:44:43 +00:00
return NULL;
2020-12-21 23:44:43 +00:00
}
buffer->usedRegion->vulkanBuffer = buffer; /* lol */
buffer->handle = NULL;
buffer->resourceAccessType = resourceAccessType;
SDL_AtomicSet(&buffer->referenceCount, 0);
return buffer;
}
/* Uniform buffer functions */
static VulkanUniformBufferObject* VULKAN_INTERNAL_CreateUniformBufferObject(
VulkanRenderer *renderer,
VulkanUniformBufferType uniformBufferType
) {
VulkanUniformBufferObject* uniformBufferObject = SDL_malloc(sizeof(VulkanUniformBufferObject));
VulkanResourceAccessType resourceAccessType;
VkDescriptorSetLayout descriptorSetLayout;
VkWriteDescriptorSet writeDescriptorSet;
VkDescriptorBufferInfo descriptorBufferInfo;
if (uniformBufferType == UNIFORM_BUFFER_VERTEX)
{
resourceAccessType = RESOURCE_ACCESS_VERTEX_SHADER_READ_UNIFORM_BUFFER;
descriptorSetLayout = renderer->vertexUniformDescriptorSetLayout;
}
else if (uniformBufferType == UNIFORM_BUFFER_FRAGMENT)
{
resourceAccessType = RESOURCE_ACCESS_FRAGMENT_SHADER_READ_UNIFORM_BUFFER;
descriptorSetLayout = renderer->fragmentUniformDescriptorSetLayout;
}
else if (uniformBufferType == UNIFORM_BUFFER_COMPUTE)
{
resourceAccessType = RESOURCE_ACCESS_COMPUTE_SHADER_READ_UNIFORM_BUFFER;
descriptorSetLayout = renderer->computeUniformDescriptorSetLayout;
}
else
{
Refresh_LogError("Unrecognized uniform buffer type!");
return 0;
}
/* Allocate backing buffer */
uniformBufferObject->buffer = VULKAN_INTERNAL_CreateBuffer(
renderer,
UBO_BUFFER_SIZE,
resourceAccessType,
VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT,
1,
0,
1,
1
);
uniformBufferObject->lock = SDL_CreateMutex();
uniformBufferObject->currentOffset = 0;
uniformBufferObject->type = uniformBufferType;
/* Allocate a descriptor set for the uniform buffer */
VULKAN_INTERNAL_AllocateDescriptorSets(
renderer,
renderer->defaultDescriptorPool,
descriptorSetLayout,
1,
&uniformBufferObject->descriptorSet
);
/* Update the descriptor set for the first and last time! */
descriptorBufferInfo.buffer = uniformBufferObject->buffer->buffer;
descriptorBufferInfo.offset = 0;
descriptorBufferInfo.range = MAX_UBO_SECTION_SIZE;
writeDescriptorSet.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
writeDescriptorSet.pNext = NULL;
writeDescriptorSet.descriptorCount = 1;
writeDescriptorSet.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
writeDescriptorSet.dstArrayElement = 0;
writeDescriptorSet.dstBinding = 0;
writeDescriptorSet.dstSet = uniformBufferObject->descriptorSet;
writeDescriptorSet.pBufferInfo = &descriptorBufferInfo;
writeDescriptorSet.pImageInfo = NULL;
writeDescriptorSet.pTexelBufferView = NULL;
renderer->vkUpdateDescriptorSets(
renderer->logicalDevice,
1,
&writeDescriptorSet,
0,
NULL
);
2020-12-21 23:44:43 +00:00
return uniformBufferObject;
}
/* Indirection so we can cleanly defrag buffers */
static VulkanBufferHandle* VULKAN_INTERNAL_CreateBufferHandle(
VulkanRenderer *renderer,
uint32_t sizeInBytes,
VulkanResourceAccessType resourceAccessType,
VkBufferUsageFlags usageFlags,
uint8_t requireHostVisible,
uint8_t preferHostLocal,
uint8_t preferDeviceLocal
) {
VulkanBufferHandle* bufferHandle;
VulkanBuffer* buffer;
/* always set transfer bits so we can defrag */
usageFlags |= VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
buffer = VULKAN_INTERNAL_CreateBuffer(
renderer,
sizeInBytes,
resourceAccessType,
usageFlags,
requireHostVisible,
preferHostLocal,
preferDeviceLocal,
0
);
if (buffer == NULL)
{
Refresh_LogError("Failed to create buffer!");
return NULL;
}
bufferHandle = SDL_malloc(sizeof(VulkanBufferHandle));
bufferHandle->vulkanBuffer = buffer;
buffer->handle = bufferHandle;
return bufferHandle;
}
static VulkanBufferContainer* VULKAN_INTERNAL_CreateBufferContainer(
VulkanRenderer *renderer,
uint32_t sizeInBytes,
VulkanResourceAccessType resourceAccessType,
VkBufferUsageFlags usageFlags,
uint8_t requireHostVisible,
uint8_t preferHostLocal,
uint8_t preferDeviceLocal
) {
VulkanBufferContainer *bufferContainer;
VulkanBufferHandle *bufferHandle;
bufferHandle = VULKAN_INTERNAL_CreateBufferHandle(
renderer,
sizeInBytes,
resourceAccessType,
usageFlags,
requireHostVisible,
preferHostLocal,
preferDeviceLocal
);
if (bufferHandle == NULL)
{
Refresh_LogError("Failed to create buffer container!");
return NULL;
}
bufferContainer = SDL_malloc(sizeof(VulkanBufferContainer));
bufferContainer->activeBufferHandle = bufferHandle;
bufferContainer->bufferCapacity = 1;
bufferContainer->bufferCount = 1;
bufferContainer->bufferHandles = SDL_malloc(
bufferContainer->bufferCapacity * sizeof(VulkanBufferHandle*)
);
bufferContainer->bufferHandles[0] = bufferContainer->activeBufferHandle;
return bufferContainer;
2020-12-19 00:39:03 +00:00
}
static void VULKAN_INTERNAL_DestroyUniformBufferObject(
VulkanRenderer *renderer,
VulkanUniformBufferObject *uniformBufferObject
) {
VULKAN_INTERNAL_DestroyBuffer(renderer, uniformBufferObject->buffer);
2020-12-19 04:08:07 +00:00
SDL_DestroyMutex(uniformBufferObject->lock);
SDL_free(uniformBufferObject);
}
/* Texture Slice Utilities */
static uint32_t VULKAN_INTERNAL_GetTextureSliceIndex(
VulkanTexture *texture,
uint32_t layer,
uint32_t level
) {
return (layer * texture->levelCount) + level;
}
static VulkanTextureSlice* VULKAN_INTERNAL_FetchTextureSlice(
VulkanTexture *texture,
uint32_t layer,
uint32_t level
) {
return &texture->slices[
VULKAN_INTERNAL_GetTextureSliceIndex(
texture,
layer,
level
)
];
}
static VulkanTextureSlice* VULKAN_INTERNAL_RefreshToVulkanTextureSlice(
Refresh_TextureSlice *refreshTextureSlice
) {
return VULKAN_INTERNAL_FetchTextureSlice(
((VulkanTextureContainer*) refreshTextureSlice->texture)->activeTextureHandle->vulkanTexture,
refreshTextureSlice->layer,
refreshTextureSlice->mipLevel
);
}
static void VULKAN_INTERNAL_CreateSliceView(
VulkanRenderer *renderer,
VulkanTexture *texture,
uint32_t layer,
uint32_t level,
VkImageView *pView
) {
VkResult vulkanResult;
VkImageViewCreateInfo imageViewCreateInfo;
VkComponentMapping swizzle = IDENTITY_SWIZZLE;
/* create framebuffer compatible views for RenderTarget */
imageViewCreateInfo.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
imageViewCreateInfo.pNext = NULL;
imageViewCreateInfo.flags = 0;
imageViewCreateInfo.image = texture->image;
imageViewCreateInfo.format = texture->format;
imageViewCreateInfo.components = swizzle;
imageViewCreateInfo.subresourceRange.aspectMask = texture->aspectFlags;
imageViewCreateInfo.subresourceRange.baseMipLevel = level;
imageViewCreateInfo.subresourceRange.levelCount = 1;
imageViewCreateInfo.subresourceRange.baseArrayLayer = layer;
imageViewCreateInfo.subresourceRange.layerCount = 1;
imageViewCreateInfo.viewType = VK_IMAGE_VIEW_TYPE_2D;
vulkanResult = renderer->vkCreateImageView(
renderer->logicalDevice,
&imageViewCreateInfo,
NULL,
pView
);
if (vulkanResult != VK_SUCCESS)
{
LogVulkanResultAsError(
"vkCreateImageView",
vulkanResult
);
Refresh_LogError("Failed to create color attachment image view");
*pView = NULL;
return;
}
}
/* Swapchain */
2020-12-19 04:08:07 +00:00
static uint8_t VULKAN_INTERNAL_QuerySwapChainSupport(
VulkanRenderer *renderer,
VkPhysicalDevice physicalDevice,
VkSurfaceKHR surface,
SwapChainSupportDetails *outputDetails
) {
VkResult result;
VkBool32 supportsPresent;
2022-01-13 07:09:06 +00:00
renderer->vkGetPhysicalDeviceSurfaceSupportKHR(
physicalDevice,
renderer->queueFamilyIndex,
surface,
&supportsPresent
);
if (!supportsPresent)
2022-01-13 07:09:06 +00:00
{
Refresh_LogWarn("This surface does not support presenting!");
2022-01-13 07:09:06 +00:00
return 0;
}
/* Initialize these in case anything fails */
outputDetails->formatsLength = 0;
outputDetails->presentModesLength = 0;
2020-12-21 23:44:43 +00:00
/* Run the device surface queries */
result = renderer->vkGetPhysicalDeviceSurfaceCapabilitiesKHR(
physicalDevice,
surface,
&outputDetails->capabilities
);
VULKAN_ERROR_CHECK(result, vkGetPhysicalDeviceSurfaceCapabilitiesKHR, 0)
2020-12-19 04:08:07 +00:00
if (!(outputDetails->capabilities.supportedCompositeAlpha & VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR))
{
Refresh_LogWarn("Opaque presentation unsupported! Expect weird transparency bugs!");
2022-01-13 07:09:06 +00:00
}
2020-12-19 04:08:07 +00:00
result = renderer->vkGetPhysicalDeviceSurfaceFormatsKHR(
physicalDevice,
surface,
&outputDetails->formatsLength,
NULL
);
VULKAN_ERROR_CHECK(result, vkGetPhysicalDeviceSurfaceFormatsKHR, 0)
result = renderer->vkGetPhysicalDeviceSurfacePresentModesKHR(
physicalDevice,
surface,
&outputDetails->presentModesLength,
NULL
);
VULKAN_ERROR_CHECK(result, vkGetPhysicalDeviceSurfacePresentModesKHR, 0)
/* Generate the arrays, if applicable */
if (outputDetails->formatsLength != 0)
2020-12-21 23:44:43 +00:00
{
outputDetails->formats = (VkSurfaceFormatKHR*) SDL_malloc(
sizeof(VkSurfaceFormatKHR) * outputDetails->formatsLength
);
if (!outputDetails->formats)
{
SDL_OutOfMemory();
return 0;
}
result = renderer->vkGetPhysicalDeviceSurfaceFormatsKHR(
physicalDevice,
surface,
&outputDetails->formatsLength,
outputDetails->formats
);
if (result != VK_SUCCESS)
{
Refresh_LogError(
"vkGetPhysicalDeviceSurfaceFormatsKHR: %s",
VkErrorMessages(result)
);
SDL_free(outputDetails->formats);
return 0;
}
}
if (outputDetails->presentModesLength != 0)
{
outputDetails->presentModes = (VkPresentModeKHR*) SDL_malloc(
sizeof(VkPresentModeKHR) * outputDetails->presentModesLength
);
if (!outputDetails->presentModes)
{
SDL_OutOfMemory();
return 0;
}
result = renderer->vkGetPhysicalDeviceSurfacePresentModesKHR(
physicalDevice,
surface,
&outputDetails->presentModesLength,
outputDetails->presentModes
);
if (result != VK_SUCCESS)
{
Refresh_LogError(
"vkGetPhysicalDeviceSurfacePresentModesKHR: %s",
VkErrorMessages(result)
);
SDL_free(outputDetails->formats);
SDL_free(outputDetails->presentModes);
return 0;
}
}
/* If we made it here, all the queries were successfull. This does NOT
* necessarily mean there are any supported formats or present modes!
*/
return 1;
2020-12-17 01:23:49 +00:00
}
static uint8_t VULKAN_INTERNAL_ChooseSwapSurfaceFormat(
VkFormat desiredFormat,
VkSurfaceFormatKHR *availableFormats,
uint32_t availableFormatsLength,
VkSurfaceFormatKHR *outputFormat
2020-12-17 01:23:49 +00:00
) {
uint32_t i;
for (i = 0; i < availableFormatsLength; i += 1)
{
if ( availableFormats[i].format == desiredFormat &&
availableFormats[i].colorSpace == VK_COLOR_SPACE_SRGB_NONLINEAR_KHR )
{
*outputFormat = availableFormats[i];
return 1;
}
}
2021-01-02 06:07:15 +00:00
return 0;
}
2020-12-29 06:19:46 +00:00
static uint8_t VULKAN_INTERNAL_ChooseSwapPresentMode(
Refresh_PresentMode desiredPresentInterval,
VkPresentModeKHR *availablePresentModes,
uint32_t availablePresentModesLength,
VkPresentModeKHR *outputPresentMode
) {
#define CHECK_MODE(m) \
for (i = 0; i < availablePresentModesLength; i += 1) \
{ \
if (availablePresentModes[i] == m) \
{ \
*outputPresentMode = m; \
return 1; \
} \
} \
2020-12-29 06:19:46 +00:00
uint32_t i;
2022-02-25 21:42:11 +00:00
if (desiredPresentInterval == REFRESH_PRESENTMODE_IMMEDIATE)
{
CHECK_MODE(VK_PRESENT_MODE_IMMEDIATE_KHR)
}
2022-02-25 21:42:11 +00:00
else if (desiredPresentInterval == REFRESH_PRESENTMODE_MAILBOX)
{
CHECK_MODE(VK_PRESENT_MODE_MAILBOX_KHR)
}
else if (desiredPresentInterval == REFRESH_PRESENTMODE_FIFO)
{
CHECK_MODE(VK_PRESENT_MODE_FIFO_KHR)
}
else if (desiredPresentInterval == REFRESH_PRESENTMODE_FIFO_RELAXED)
{
CHECK_MODE(VK_PRESENT_MODE_FIFO_RELAXED_KHR)
}
else
{
Refresh_LogError(
"Unrecognized PresentInterval: %d",
desiredPresentInterval
);
return 0;
}
#undef CHECK_MODE
*outputPresentMode = VK_PRESENT_MODE_FIFO_KHR;
return 1;
}
static uint8_t VULKAN_INTERNAL_CreateSwapchain(
VulkanRenderer *renderer,
WindowData *windowData
) {
VkResult vulkanResult;
VulkanSwapchainData *swapchainData;
VkSwapchainCreateInfoKHR swapchainCreateInfo;
VkImage *swapchainImages;
2022-02-10 05:42:19 +00:00
VkImageViewCreateInfo imageViewCreateInfo;
VkSemaphoreCreateInfo semaphoreCreateInfo;
SwapChainSupportDetails swapchainSupportDetails;
int32_t drawableWidth, drawableHeight;
uint32_t i;
swapchainData = SDL_malloc(sizeof(VulkanSwapchainData));
swapchainData->submissionsInFlight = 0;
/* Each swapchain must have its own surface. */
if (!SDL_Vulkan_CreateSurface(
(SDL_Window*) windowData->windowHandle,
renderer->instance,
&swapchainData->surface
)) {
SDL_free(swapchainData);
Refresh_LogError(
"SDL_Vulkan_CreateSurface failed: %s",
SDL_GetError()
);
return 0;
}
if (!VULKAN_INTERNAL_QuerySwapChainSupport(
renderer,
renderer->physicalDevice,
swapchainData->surface,
&swapchainSupportDetails
)) {
renderer->vkDestroySurfaceKHR(
renderer->instance,
swapchainData->surface,
NULL
);
if (swapchainSupportDetails.formatsLength > 0)
{
SDL_free(swapchainSupportDetails.formats);
}
if (swapchainSupportDetails.presentModesLength > 0)
{
SDL_free(swapchainSupportDetails.presentModes);
}
SDL_free(swapchainData);
Refresh_LogError("Device does not support swap chain creation");
return 0;
}
if ( swapchainSupportDetails.capabilities.currentExtent.width == 0 ||
swapchainSupportDetails.capabilities.currentExtent.height == 0)
{
/* Not an error, just minimize behavior! */
renderer->vkDestroySurfaceKHR(
renderer->instance,
swapchainData->surface,
NULL
);
if (swapchainSupportDetails.formatsLength > 0)
{
SDL_free(swapchainSupportDetails.formats);
}
if (swapchainSupportDetails.presentModesLength > 0)
{
SDL_free(swapchainSupportDetails.presentModes);
}
SDL_free(swapchainData);
return 0;
}
swapchainData->swapchainFormat = VK_FORMAT_R8G8B8A8_UNORM;
swapchainData->swapchainSwizzle.r = VK_COMPONENT_SWIZZLE_IDENTITY;
swapchainData->swapchainSwizzle.g = VK_COMPONENT_SWIZZLE_IDENTITY;
swapchainData->swapchainSwizzle.b = VK_COMPONENT_SWIZZLE_IDENTITY;
swapchainData->swapchainSwizzle.a = VK_COMPONENT_SWIZZLE_IDENTITY;
if (!VULKAN_INTERNAL_ChooseSwapSurfaceFormat(
swapchainData->swapchainFormat,
swapchainSupportDetails.formats,
swapchainSupportDetails.formatsLength,
&swapchainData->surfaceFormat
)) {
swapchainData->swapchainFormat = VK_FORMAT_B8G8R8A8_UNORM;
swapchainData->swapchainSwizzle.r = VK_COMPONENT_SWIZZLE_B;
swapchainData->swapchainSwizzle.g = VK_COMPONENT_SWIZZLE_G;
swapchainData->swapchainSwizzle.b = VK_COMPONENT_SWIZZLE_R;
swapchainData->swapchainSwizzle.a = VK_COMPONENT_SWIZZLE_A;
if (!VULKAN_INTERNAL_ChooseSwapSurfaceFormat(
swapchainData->swapchainFormat,
swapchainSupportDetails.formats,
swapchainSupportDetails.formatsLength,
&swapchainData->surfaceFormat
)) {
renderer->vkDestroySurfaceKHR(
renderer->instance,
swapchainData->surface,
NULL
);
if (swapchainSupportDetails.formatsLength > 0)
{
SDL_free(swapchainSupportDetails.formats);
}
if (swapchainSupportDetails.presentModesLength > 0)
{
SDL_free(swapchainSupportDetails.presentModes);
}
SDL_free(swapchainData);
Refresh_LogError("Device does not support swap chain format");
return 0;
}
}
if (!VULKAN_INTERNAL_ChooseSwapPresentMode(
windowData->preferredPresentMode,
swapchainSupportDetails.presentModes,
swapchainSupportDetails.presentModesLength,
&swapchainData->presentMode
)) {
renderer->vkDestroySurfaceKHR(
renderer->instance,
swapchainData->surface,
NULL
);
if (swapchainSupportDetails.formatsLength > 0)
{
SDL_free(swapchainSupportDetails.formats);
}
if (swapchainSupportDetails.presentModesLength > 0)
{
SDL_free(swapchainSupportDetails.presentModes);
}
SDL_free(swapchainData);
Refresh_LogError("Device does not support swap chain present mode");
return 0;
}
SDL_Vulkan_GetDrawableSize(
(SDL_Window*) windowData->windowHandle,
&drawableWidth,
&drawableHeight
);
if ( drawableWidth < swapchainSupportDetails.capabilities.minImageExtent.width ||
drawableWidth > swapchainSupportDetails.capabilities.maxImageExtent.width ||
drawableHeight < swapchainSupportDetails.capabilities.minImageExtent.height ||
drawableHeight > swapchainSupportDetails.capabilities.maxImageExtent.height )
{
if (swapchainSupportDetails.capabilities.currentExtent.width != UINT32_MAX)
{
drawableWidth = VULKAN_INTERNAL_clamp(
drawableWidth,
swapchainSupportDetails.capabilities.minImageExtent.width,
swapchainSupportDetails.capabilities.maxImageExtent.width
);
drawableHeight = VULKAN_INTERNAL_clamp(
drawableHeight,
swapchainSupportDetails.capabilities.minImageExtent.height,
swapchainSupportDetails.capabilities.maxImageExtent.height
);
}
else
{
renderer->vkDestroySurfaceKHR(
renderer->instance,
swapchainData->surface,
NULL
);
if (swapchainSupportDetails.formatsLength > 0)
{
SDL_free(swapchainSupportDetails.formats);
}
if (swapchainSupportDetails.presentModesLength > 0)
{
SDL_free(swapchainSupportDetails.presentModes);
}
SDL_free(swapchainData);
Refresh_LogError("No fallback swapchain size available!");
return 0;
}
}
swapchainData->extent.width = drawableWidth;
swapchainData->extent.height = drawableHeight;
swapchainData->imageCount = swapchainSupportDetails.capabilities.minImageCount + 1;
if ( swapchainSupportDetails.capabilities.maxImageCount > 0 &&
swapchainData->imageCount > swapchainSupportDetails.capabilities.maxImageCount )
{
swapchainData->imageCount = swapchainSupportDetails.capabilities.maxImageCount;
}
if (swapchainData->presentMode == VK_PRESENT_MODE_MAILBOX_KHR)
{
/* Required for proper triple-buffering.
* Note that this is below the above maxImageCount check!
* If the driver advertises MAILBOX but does not support 3 swap
* images, it's not real mailbox support, so let it fail hard.
* -flibit
*/
swapchainData->imageCount = SDL_max(swapchainData->imageCount, 3);
}
swapchainCreateInfo.sType = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR;
swapchainCreateInfo.pNext = NULL;
swapchainCreateInfo.flags = 0;
swapchainCreateInfo.surface = swapchainData->surface;
swapchainCreateInfo.minImageCount = swapchainData->imageCount;
swapchainCreateInfo.imageFormat = swapchainData->surfaceFormat.format;
swapchainCreateInfo.imageColorSpace = swapchainData->surfaceFormat.colorSpace;
swapchainCreateInfo.imageExtent = swapchainData->extent;
swapchainCreateInfo.imageArrayLayers = 1;
swapchainCreateInfo.imageUsage =
VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT |
VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
VK_IMAGE_USAGE_TRANSFER_DST_BIT;
swapchainCreateInfo.imageSharingMode = VK_SHARING_MODE_EXCLUSIVE;
swapchainCreateInfo.queueFamilyIndexCount = 0;
swapchainCreateInfo.pQueueFamilyIndices = NULL;
swapchainCreateInfo.preTransform = swapchainSupportDetails.capabilities.currentTransform;
swapchainCreateInfo.compositeAlpha = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR;
swapchainCreateInfo.presentMode = swapchainData->presentMode;
swapchainCreateInfo.clipped = VK_TRUE;
swapchainCreateInfo.oldSwapchain = VK_NULL_HANDLE;
vulkanResult = renderer->vkCreateSwapchainKHR(
renderer->logicalDevice,
&swapchainCreateInfo,
NULL,
&swapchainData->swapchain
);
if (swapchainSupportDetails.formatsLength > 0)
{
SDL_free(swapchainSupportDetails.formats);
}
if (swapchainSupportDetails.presentModesLength > 0)
{
SDL_free(swapchainSupportDetails.presentModes);
}
if (vulkanResult != VK_SUCCESS)
{
renderer->vkDestroySurfaceKHR(
renderer->instance,
swapchainData->surface,
NULL
);
SDL_free(swapchainData);
LogVulkanResultAsError("vkCreateSwapchainKHR", vulkanResult);
return 0;
}
renderer->vkGetSwapchainImagesKHR(
renderer->logicalDevice,
swapchainData->swapchain,
&swapchainData->imageCount,
NULL
2020-12-29 06:19:46 +00:00
);
swapchainData->textureContainers = SDL_malloc(
sizeof(VulkanTextureContainer) * swapchainData->imageCount
);
2020-12-29 06:19:46 +00:00
if (!swapchainData->textureContainers)
{
SDL_OutOfMemory();
renderer->vkDestroySurfaceKHR(
renderer->instance,
swapchainData->surface,
NULL
);
SDL_free(swapchainData);
return 0;
}
2020-12-29 06:19:46 +00:00
swapchainImages = SDL_stack_alloc(VkImage, swapchainData->imageCount);
2020-12-29 06:19:46 +00:00
renderer->vkGetSwapchainImagesKHR(
renderer->logicalDevice,
swapchainData->swapchain,
&swapchainData->imageCount,
swapchainImages
);
2022-02-10 05:42:19 +00:00
imageViewCreateInfo.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
imageViewCreateInfo.pNext = NULL;
imageViewCreateInfo.flags = 0;
imageViewCreateInfo.viewType = VK_IMAGE_VIEW_TYPE_2D;
imageViewCreateInfo.format = swapchainData->surfaceFormat.format;
imageViewCreateInfo.components = swapchainData->swapchainSwizzle;
imageViewCreateInfo.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
imageViewCreateInfo.subresourceRange.baseMipLevel = 0;
imageViewCreateInfo.subresourceRange.levelCount = 1;
imageViewCreateInfo.subresourceRange.baseArrayLayer = 0;
imageViewCreateInfo.subresourceRange.layerCount = 1;
for (i = 0; i < swapchainData->imageCount; i += 1)
2020-12-29 06:19:46 +00:00
{
swapchainData->textureContainers[i].canBeDiscarded = 0;
swapchainData->textureContainers[i].textureCapacity = 0;
swapchainData->textureContainers[i].textureCount = 0;
swapchainData->textureContainers[i].textureHandles = NULL;
swapchainData->textureContainers[i].activeTextureHandle = SDL_malloc(sizeof(VulkanTextureHandle));
swapchainData->textureContainers[i].activeTextureHandle->vulkanTexture = SDL_malloc(sizeof(VulkanTexture));
swapchainData->textureContainers[i].activeTextureHandle->vulkanTexture->image = swapchainImages[i];
imageViewCreateInfo.image = swapchainImages[i];
2020-12-29 06:19:46 +00:00
vulkanResult = renderer->vkCreateImageView(
renderer->logicalDevice,
2022-02-10 05:42:19 +00:00
&imageViewCreateInfo,
NULL,
&swapchainData->textureContainers[i].activeTextureHandle->vulkanTexture->view
);
2020-12-29 06:19:46 +00:00
if (vulkanResult != VK_SUCCESS)
2020-12-29 06:19:46 +00:00
{
renderer->vkDestroySurfaceKHR(
renderer->instance,
swapchainData->surface,
NULL
);
SDL_stack_free(swapchainImages);
SDL_free(swapchainData->textureContainers);
SDL_free(swapchainData);
LogVulkanResultAsError("vkCreateImageView", vulkanResult);
return 0;
2020-12-29 06:19:46 +00:00
}
/* Swapchain memory is managed by the driver */
swapchainData->textureContainers[i].activeTextureHandle->vulkanTexture->usedRegion = NULL;
swapchainData->textureContainers[i].activeTextureHandle->vulkanTexture->dimensions = swapchainData->extent;
swapchainData->textureContainers[i].activeTextureHandle->vulkanTexture->format = swapchainData->swapchainFormat;
swapchainData->textureContainers[i].activeTextureHandle->vulkanTexture->is3D = 0;
swapchainData->textureContainers[i].activeTextureHandle->vulkanTexture->isCube = 0;
swapchainData->textureContainers[i].activeTextureHandle->vulkanTexture->isRenderTarget = 1;
swapchainData->textureContainers[i].activeTextureHandle->vulkanTexture->depth = 1;
swapchainData->textureContainers[i].activeTextureHandle->vulkanTexture->layerCount = 1;
swapchainData->textureContainers[i].activeTextureHandle->vulkanTexture->levelCount = 1;
swapchainData->textureContainers[i].activeTextureHandle->vulkanTexture->sampleCount = VK_SAMPLE_COUNT_1_BIT;
swapchainData->textureContainers[i].activeTextureHandle->vulkanTexture->usageFlags =
VK_IMAGE_USAGE_TRANSFER_DST_BIT |
VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
swapchainData->textureContainers[i].activeTextureHandle->vulkanTexture->aspectFlags = VK_IMAGE_ASPECT_COLOR_BIT;
/* Create slice */
swapchainData->textureContainers[i].activeTextureHandle->vulkanTexture->sliceCount = 1;
swapchainData->textureContainers[i].activeTextureHandle->vulkanTexture->slices = SDL_malloc(sizeof(VulkanTextureSlice));
swapchainData->textureContainers[i].activeTextureHandle->vulkanTexture->slices[0].parent = swapchainData->textureContainers[i].activeTextureHandle->vulkanTexture;
swapchainData->textureContainers[i].activeTextureHandle->vulkanTexture->slices[0].layer = 0;
swapchainData->textureContainers[i].activeTextureHandle->vulkanTexture->slices[0].level = 0;
swapchainData->textureContainers[i].activeTextureHandle->vulkanTexture->slices[0].resourceAccessType = RESOURCE_ACCESS_NONE;
swapchainData->textureContainers[i].activeTextureHandle->vulkanTexture->slices[0].msaaTex = NULL;
VULKAN_INTERNAL_CreateSliceView(
renderer,
swapchainData->textureContainers[i].activeTextureHandle->vulkanTexture,
0,
0,
&swapchainData->textureContainers[i].activeTextureHandle->vulkanTexture->slices[0].view
);
SDL_AtomicSet(&swapchainData->textureContainers[i].activeTextureHandle->vulkanTexture->slices[0].referenceCount, 0);
2020-12-29 06:19:46 +00:00
}
SDL_stack_free(swapchainImages);
2022-02-10 05:42:19 +00:00
semaphoreCreateInfo.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
semaphoreCreateInfo.pNext = NULL;
semaphoreCreateInfo.flags = 0;
renderer->vkCreateSemaphore(
renderer->logicalDevice,
&semaphoreCreateInfo,
NULL,
&swapchainData->imageAvailableSemaphore
);
renderer->vkCreateSemaphore(
renderer->logicalDevice,
&semaphoreCreateInfo,
NULL,
&swapchainData->renderFinishedSemaphore
);
windowData->swapchainData = swapchainData;
return 1;
2020-12-17 01:23:49 +00:00
}
static void VULKAN_INTERNAL_RecreateSwapchain(
VulkanRenderer* renderer,
WindowData *windowData
) {
VULKAN_Wait((Refresh_Renderer*) renderer);
VULKAN_INTERNAL_DestroySwapchain(renderer, windowData);
VULKAN_INTERNAL_CreateSwapchain(renderer, windowData);
2020-12-17 01:23:49 +00:00
}
/* Command Buffers */
static void VULKAN_INTERNAL_BeginCommandBuffer(
VulkanRenderer *renderer,
VulkanCommandBuffer *commandBuffer
2020-12-17 01:23:49 +00:00
) {
VkCommandBufferBeginInfo beginInfo;
VkResult result;
2021-01-02 21:31:17 +00:00
beginInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
beginInfo.pNext = NULL;
beginInfo.flags = 0;
beginInfo.pInheritanceInfo = NULL;
beginInfo.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
result = renderer->vkBeginCommandBuffer(
commandBuffer->commandBuffer,
&beginInfo
);
if (result != VK_SUCCESS)
{
LogVulkanResultAsError("vkBeginCommandBuffer", result);
}
}
2020-12-20 09:29:15 +00:00
static void VULKAN_INTERNAL_EndCommandBuffer(
VulkanRenderer* renderer,
VulkanCommandBuffer *commandBuffer
) {
VkResult result;
2020-12-20 09:29:15 +00:00
result = renderer->vkEndCommandBuffer(
commandBuffer->commandBuffer
2021-01-02 06:07:15 +00:00
);
2020-12-20 09:29:15 +00:00
if (result != VK_SUCCESS)
{
LogVulkanResultAsError("vkEndCommandBuffer", result);
}
2020-12-17 01:23:49 +00:00
}
static void VULKAN_DestroyDevice(
2022-02-25 21:42:11 +00:00
Refresh_Device *device
2020-12-30 01:31:39 +00:00
) {
VulkanRenderer* renderer = (VulkanRenderer*) device->driverData;
CommandPoolHashArray commandPoolHashArray;
GraphicsPipelineLayoutHashArray graphicsPipelineLayoutHashArray;
ComputePipelineLayoutHashArray computePipelineLayoutHashArray;
VulkanMemorySubAllocator *allocator;
int32_t i, j, k;
2020-12-31 06:28:37 +00:00
VULKAN_Wait(device->driverData);
2020-12-31 00:47:13 +00:00
for (i = renderer->claimedWindowCount - 1; i >= 0; i -= 1)
{
VULKAN_UnclaimWindow(device->driverData, renderer->claimedWindows[i]->windowHandle);
}
SDL_free(renderer->claimedWindows);
VULKAN_Wait(device->driverData);
SDL_free(renderer->submittedCommandBuffers);
2022-01-13 20:03:44 +00:00
for (i = 0; i < renderer->fencePool.availableFenceCount; i += 1)
{
renderer->vkDestroyFence(renderer->logicalDevice, renderer->fencePool.availableFences[i], NULL);
}
SDL_free(renderer->fencePool.availableFences);
SDL_DestroyMutex(renderer->fencePool.lock);
for (i = 0; i < NUM_COMMAND_POOL_BUCKETS; i += 1)
{
commandPoolHashArray = renderer->commandPoolHashTable.buckets[i];
for (j = 0; j < commandPoolHashArray.count; j += 1)
2020-12-31 06:28:37 +00:00
{
VULKAN_INTERNAL_DestroyCommandPool(
2020-12-31 06:28:37 +00:00
renderer,
commandPoolHashArray.elements[j].value
2020-12-31 06:28:37 +00:00
);
}
if (commandPoolHashArray.elements != NULL)
2020-12-31 06:28:37 +00:00
{
SDL_free(commandPoolHashArray.elements);
2020-12-31 06:28:37 +00:00
}
}
2020-12-30 01:31:39 +00:00
for (i = 0; i < NUM_PIPELINE_LAYOUT_BUCKETS; i += 1)
{
graphicsPipelineLayoutHashArray = renderer->graphicsPipelineLayoutHashTable.buckets[i];
for (j = 0; j < graphicsPipelineLayoutHashArray.count; j += 1)
{
VULKAN_INTERNAL_DestroyDescriptorSetCache(
renderer,
graphicsPipelineLayoutHashArray.elements[j].value->vertexSamplerDescriptorSetCache
);
2020-12-17 08:19:02 +00:00
VULKAN_INTERNAL_DestroyDescriptorSetCache(
renderer,
graphicsPipelineLayoutHashArray.elements[j].value->fragmentSamplerDescriptorSetCache
);
2020-12-17 08:19:02 +00:00
renderer->vkDestroyPipelineLayout(
renderer->logicalDevice,
graphicsPipelineLayoutHashArray.elements[j].value->pipelineLayout,
NULL
);
SDL_free(graphicsPipelineLayoutHashArray.elements[j].value);
}
2020-12-17 08:19:02 +00:00
if (graphicsPipelineLayoutHashArray.elements != NULL)
{
SDL_free(graphicsPipelineLayoutHashArray.elements);
}
2020-12-22 02:34:57 +00:00
computePipelineLayoutHashArray = renderer->computePipelineLayoutHashTable.buckets[i];
for (j = 0; j < computePipelineLayoutHashArray.count; j += 1)
{
VULKAN_INTERNAL_DestroyDescriptorSetCache(
renderer,
computePipelineLayoutHashArray.elements[j].value->bufferDescriptorSetCache
);
2020-12-17 08:19:02 +00:00
VULKAN_INTERNAL_DestroyDescriptorSetCache(
renderer,
computePipelineLayoutHashArray.elements[j].value->imageDescriptorSetCache
);
2020-12-17 08:19:02 +00:00
renderer->vkDestroyPipelineLayout(
renderer->logicalDevice,
computePipelineLayoutHashArray.elements[j].value->pipelineLayout,
NULL
);
SDL_free(computePipelineLayoutHashArray.elements[j].value);
}
2020-12-17 08:19:02 +00:00
if (computePipelineLayoutHashArray.elements != NULL)
{
SDL_free(computePipelineLayoutHashArray.elements);
}
}
2020-12-17 08:19:02 +00:00
renderer->vkDestroyDescriptorPool(
renderer->logicalDevice,
renderer->defaultDescriptorPool,
NULL
);
2020-12-17 08:19:02 +00:00
for (i = 0; i < NUM_DESCRIPTOR_SET_LAYOUT_BUCKETS; i += 1)
{
for (j = 0; j < renderer->descriptorSetLayoutHashTable.buckets[i].count; j += 1)
{
renderer->vkDestroyDescriptorSetLayout(
renderer->logicalDevice,
renderer->descriptorSetLayoutHashTable.buckets[i].elements[j].value,
NULL
);
}
2020-12-17 08:19:02 +00:00
SDL_free(renderer->descriptorSetLayoutHashTable.buckets[i].elements);
}
2020-12-17 08:19:02 +00:00
renderer->vkDestroyDescriptorSetLayout(
renderer->logicalDevice,
renderer->emptyVertexSamplerLayout,
NULL
);
2020-12-17 08:19:02 +00:00
renderer->vkDestroyDescriptorSetLayout(
renderer->logicalDevice,
renderer->emptyFragmentSamplerLayout,
NULL
);
2020-12-17 08:19:02 +00:00
renderer->vkDestroyDescriptorSetLayout(
renderer->logicalDevice,
renderer->emptyComputeBufferDescriptorSetLayout,
NULL
);
2020-12-17 08:19:02 +00:00
renderer->vkDestroyDescriptorSetLayout(
renderer->logicalDevice,
renderer->emptyComputeImageDescriptorSetLayout,
NULL
);
2020-12-17 08:19:02 +00:00
renderer->vkDestroyDescriptorSetLayout(
renderer->logicalDevice,
renderer->vertexUniformDescriptorSetLayout,
NULL
);
2020-12-17 08:19:02 +00:00
renderer->vkDestroyDescriptorSetLayout(
renderer->logicalDevice,
renderer->fragmentUniformDescriptorSetLayout,
NULL
);
2020-12-17 08:19:02 +00:00
renderer->vkDestroyDescriptorSetLayout(
renderer->logicalDevice,
renderer->computeUniformDescriptorSetLayout,
NULL
);
2020-12-17 08:19:02 +00:00
VULKAN_INTERNAL_DestroyUniformBufferObject(renderer, renderer->vertexUniformBufferObject);
VULKAN_INTERNAL_DestroyUniformBufferObject(renderer, renderer->fragmentUniformBufferObject);
VULKAN_INTERNAL_DestroyUniformBufferObject(renderer, renderer->computeUniformBufferObject);
for (i = 0; i < renderer->framebufferHashArray.count; i += 1)
{
VULKAN_INTERNAL_DestroyFramebuffer(
renderer,
renderer->framebufferHashArray.elements[i].value
);
}
SDL_free(renderer->framebufferHashArray.elements);
for (i = 0; i < renderer->renderPassHashArray.count; i += 1)
{
renderer->vkDestroyRenderPass(
renderer->logicalDevice,
renderer->renderPassHashArray.elements[i].value,
NULL
);
}
SDL_free(renderer->renderPassHashArray.elements);
for (i = 0; i < VK_MAX_MEMORY_TYPES; i += 1)
{
allocator = &renderer->memoryAllocator->subAllocators[i];
2020-12-17 08:19:02 +00:00
for (j = allocator->allocationCount - 1; j >= 0; j -= 1)
{
for (k = allocator->allocations[j]->usedRegionCount - 1; k >= 0; k -= 1)
{
VULKAN_INTERNAL_RemoveMemoryUsedRegion(
renderer,
allocator->allocations[j]->usedRegions[k]
);
}
2020-12-17 08:19:02 +00:00
VULKAN_INTERNAL_DeallocateMemory(
renderer,
allocator,
j
);
}
2020-12-17 01:23:49 +00:00
if (renderer->memoryAllocator->subAllocators[i].allocations != NULL)
{
SDL_free(renderer->memoryAllocator->subAllocators[i].allocations);
}
2020-12-19 04:08:07 +00:00
SDL_free(renderer->memoryAllocator->subAllocators[i].sortedFreeRegions);
}
2020-12-19 04:08:07 +00:00
SDL_free(renderer->memoryAllocator);
2020-12-19 04:08:07 +00:00
SDL_free(renderer->texturesToDestroy);
SDL_free(renderer->buffersToDestroy);
SDL_free(renderer->graphicsPipelinesToDestroy);
SDL_free(renderer->computePipelinesToDestroy);
SDL_free(renderer->shaderModulesToDestroy);
SDL_free(renderer->samplersToDestroy);
SDL_free(renderer->framebuffersToDestroy);
SDL_DestroyMutex(renderer->allocatorLock);
SDL_DestroyMutex(renderer->disposeLock);
SDL_DestroyMutex(renderer->submitLock);
SDL_DestroyMutex(renderer->acquireCommandBufferLock);
SDL_DestroyMutex(renderer->renderPassFetchLock);
SDL_DestroyMutex(renderer->framebufferFetchLock);
2020-12-19 04:08:07 +00:00
renderer->vkDestroyDevice(renderer->logicalDevice, NULL);
renderer->vkDestroyInstance(renderer->instance, NULL);
2020-12-19 04:08:07 +00:00
SDL_free(renderer);
SDL_free(device);
2020-12-19 04:08:07 +00:00
}
static void VULKAN_DrawInstancedPrimitives(
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer,
uint32_t baseVertex,
uint32_t startIndex,
uint32_t primitiveCount,
uint32_t instanceCount
2020-12-27 23:20:59 +00:00
) {
VulkanRenderer* renderer = (VulkanRenderer*) driverData;
VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
2020-12-28 06:45:12 +00:00
VkDescriptorSet descriptorSets[4];
uint32_t dynamicOffsets[2];
2020-12-27 23:20:59 +00:00
descriptorSets[0] = vulkanCommandBuffer->vertexSamplerDescriptorSet;
descriptorSets[1] = vulkanCommandBuffer->fragmentSamplerDescriptorSet;
descriptorSets[2] = renderer->vertexUniformBufferObject->descriptorSet;
descriptorSets[3] = renderer->fragmentUniformBufferObject->descriptorSet;
2020-12-27 23:20:59 +00:00
dynamicOffsets[0] = vulkanCommandBuffer->vertexUniformOffset;
dynamicOffsets[1] = vulkanCommandBuffer->fragmentUniformOffset;
2020-12-28 06:45:12 +00:00
renderer->vkCmdBindDescriptorSets(
vulkanCommandBuffer->commandBuffer,
VK_PIPELINE_BIND_POINT_GRAPHICS,
vulkanCommandBuffer->currentGraphicsPipeline->pipelineLayout->pipelineLayout,
0,
4,
descriptorSets,
2,
dynamicOffsets
2020-12-27 23:20:59 +00:00
);
renderer->vkCmdDrawIndexed(
vulkanCommandBuffer->commandBuffer,
PrimitiveVerts(
vulkanCommandBuffer->currentGraphicsPipeline->primitiveType,
primitiveCount
),
instanceCount,
startIndex,
baseVertex,
0
2020-12-27 23:20:59 +00:00
);
}
2020-12-27 23:20:59 +00:00
static void VULKAN_DrawIndexedPrimitives(
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer,
uint32_t baseVertex,
uint32_t startIndex,
uint32_t primitiveCount
) {
VULKAN_DrawInstancedPrimitives(
driverData,
commandBuffer,
baseVertex,
startIndex,
primitiveCount,
1
2020-12-27 23:20:59 +00:00
);
}
static void VULKAN_DrawPrimitives(
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer,
uint32_t vertexStart,
uint32_t primitiveCount
2020-12-30 00:53:10 +00:00
) {
VulkanRenderer* renderer = (VulkanRenderer*) driverData;
VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
2020-12-30 00:53:10 +00:00
VkDescriptorSet descriptorSets[4];
uint32_t dynamicOffsets[2];
2020-12-31 04:39:47 +00:00
descriptorSets[0] = vulkanCommandBuffer->vertexSamplerDescriptorSet;
descriptorSets[1] = vulkanCommandBuffer->fragmentSamplerDescriptorSet;
descriptorSets[2] = renderer->vertexUniformBufferObject->descriptorSet;
descriptorSets[3] = renderer->fragmentUniformBufferObject->descriptorSet;
2020-12-30 00:53:10 +00:00
dynamicOffsets[0] = vulkanCommandBuffer->vertexUniformOffset;
dynamicOffsets[1] = vulkanCommandBuffer->fragmentUniformOffset;
2020-12-30 00:53:10 +00:00
renderer->vkCmdBindDescriptorSets(
vulkanCommandBuffer->commandBuffer,
VK_PIPELINE_BIND_POINT_GRAPHICS,
vulkanCommandBuffer->currentGraphicsPipeline->pipelineLayout->pipelineLayout,
0,
4,
descriptorSets,
2,
dynamicOffsets
2020-12-30 00:53:10 +00:00
);
renderer->vkCmdDraw(
vulkanCommandBuffer->commandBuffer,
PrimitiveVerts(
vulkanCommandBuffer->currentGraphicsPipeline->primitiveType,
primitiveCount
),
1,
vertexStart,
0
2020-12-30 00:53:10 +00:00
);
}
static void VULKAN_DrawPrimitivesIndirect(
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer,
Refresh_GpuBuffer *gpuBuffer,
uint32_t offsetInBytes,
uint32_t drawCount,
uint32_t stride
) {
VulkanRenderer* renderer = (VulkanRenderer*) driverData;
VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
VulkanBuffer *vulkanBuffer = ((VulkanBufferContainer*) gpuBuffer)->activeBufferHandle->vulkanBuffer;
VkDescriptorSet descriptorSets[4];
uint32_t dynamicOffsets[2];
descriptorSets[0] = vulkanCommandBuffer->vertexSamplerDescriptorSet;
descriptorSets[1] = vulkanCommandBuffer->fragmentSamplerDescriptorSet;
descriptorSets[2] = renderer->vertexUniformBufferObject->descriptorSet;
descriptorSets[3] = renderer->fragmentUniformBufferObject->descriptorSet;
dynamicOffsets[0] = vulkanCommandBuffer->vertexUniformOffset;
dynamicOffsets[1] = vulkanCommandBuffer->fragmentUniformOffset;
renderer->vkCmdBindDescriptorSets(
vulkanCommandBuffer->commandBuffer,
VK_PIPELINE_BIND_POINT_GRAPHICS,
vulkanCommandBuffer->currentGraphicsPipeline->pipelineLayout->pipelineLayout,
0,
4,
descriptorSets,
2,
dynamicOffsets
);
renderer->vkCmdDrawIndirect(
vulkanCommandBuffer->commandBuffer,
vulkanBuffer->buffer,
offsetInBytes,
drawCount,
stride
);
VULKAN_INTERNAL_TrackBuffer(renderer, vulkanCommandBuffer, vulkanBuffer);
}
static VulkanTexture* VULKAN_INTERNAL_CreateTexture(
VulkanRenderer *renderer,
uint32_t width,
uint32_t height,
uint32_t depth,
uint32_t isCube,
uint32_t layerCount,
uint32_t levelCount,
VkSampleCountFlagBits sampleCount,
VkFormat format,
VkImageAspectFlags aspectMask,
VkImageUsageFlags imageUsageFlags,
uint8_t isMsaaTexture
) {
VkResult vulkanResult;
VkImageCreateInfo imageCreateInfo;
VkImageCreateFlags imageCreateFlags = 0;
VkImageViewCreateInfo imageViewCreateInfo;
uint8_t bindResult;
uint8_t is3D = depth > 1 ? 1 : 0;
uint8_t isRenderTarget =
((imageUsageFlags & VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT) != 0) ||
((imageUsageFlags & VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT) != 0);
VkComponentMapping swizzle = IDENTITY_SWIZZLE;
uint32_t i, j, sliceIndex;
VulkanTexture *texture = SDL_malloc(sizeof(VulkanTexture));
texture->isCube = 0;
texture->is3D = 0;
texture->isRenderTarget = isRenderTarget;
texture->markedForDestroy = 0;
if (isCube)
{
imageCreateFlags |= VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT;
texture->isCube = 1;
}
else if (is3D)
{
imageCreateFlags |= VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT;
texture->is3D = 1;
}
imageCreateInfo.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
imageCreateInfo.pNext = NULL;
imageCreateInfo.flags = imageCreateFlags;
imageCreateInfo.imageType = is3D ? VK_IMAGE_TYPE_3D : VK_IMAGE_TYPE_2D;
imageCreateInfo.format = format;
imageCreateInfo.extent.width = width;
imageCreateInfo.extent.height = height;
imageCreateInfo.extent.depth = depth;
imageCreateInfo.mipLevels = levelCount;
imageCreateInfo.arrayLayers = layerCount;
imageCreateInfo.samples = isMsaaTexture || IsDepthFormat(format) ? sampleCount : VK_SAMPLE_COUNT_1_BIT;
imageCreateInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
imageCreateInfo.usage = imageUsageFlags;
imageCreateInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
imageCreateInfo.queueFamilyIndexCount = 0;
imageCreateInfo.pQueueFamilyIndices = NULL;
imageCreateInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
vulkanResult = renderer->vkCreateImage(
renderer->logicalDevice,
&imageCreateInfo,
NULL,
&texture->image
);
VULKAN_ERROR_CHECK(vulkanResult, vkCreateImage, 0)
bindResult = VULKAN_INTERNAL_BindMemoryForImage(
renderer,
texture->image,
isMsaaTexture, /* bind MSAA texture as dedicated alloc so we don't have to track it in defrag */
&texture->usedRegion
);
if (bindResult != 1)
{
renderer->vkDestroyImage(
renderer->logicalDevice,
texture->image,
NULL);
Refresh_LogError("Unable to bind memory for texture!");
return NULL;
}
texture->usedRegion->vulkanTexture = texture; /* lol */
imageViewCreateInfo.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
imageViewCreateInfo.pNext = NULL;
imageViewCreateInfo.flags = 0;
imageViewCreateInfo.image = texture->image;
imageViewCreateInfo.format = format;
imageViewCreateInfo.components = swizzle;
imageViewCreateInfo.subresourceRange.aspectMask = aspectMask;
imageViewCreateInfo.subresourceRange.baseMipLevel = 0;
imageViewCreateInfo.subresourceRange.levelCount = levelCount;
imageViewCreateInfo.subresourceRange.baseArrayLayer = 0;
imageViewCreateInfo.subresourceRange.layerCount = layerCount;
if (isCube)
{
imageViewCreateInfo.viewType = VK_IMAGE_VIEW_TYPE_CUBE;
}
else if (is3D)
{
imageViewCreateInfo.viewType = VK_IMAGE_VIEW_TYPE_3D;
}
else if (layerCount > 1)
{
imageViewCreateInfo.viewType = VK_IMAGE_VIEW_TYPE_2D_ARRAY;
}
else
{
imageViewCreateInfo.viewType = VK_IMAGE_VIEW_TYPE_2D;
}
vulkanResult = renderer->vkCreateImageView(
renderer->logicalDevice,
&imageViewCreateInfo,
NULL,
&texture->view
);
if (vulkanResult != VK_SUCCESS)
{
LogVulkanResultAsError("vkCreateImageView", vulkanResult);
Refresh_LogError("Failed to create texture image view");
return NULL;
}
texture->dimensions.width = width;
texture->dimensions.height = height;
texture->depth = depth;
texture->format = format;
texture->levelCount = levelCount;
texture->layerCount = layerCount;
texture->sampleCount = sampleCount;
texture->usageFlags = imageUsageFlags;
texture->aspectFlags = aspectMask;
/* Define slices */
texture->sliceCount =
texture->layerCount *
texture->levelCount;
texture->slices = SDL_malloc(
texture->sliceCount * sizeof(VulkanTextureSlice)
);
for (i = 0; i < texture->layerCount; i += 1)
{
for (j = 0; j < texture->levelCount; j += 1)
{
sliceIndex = VULKAN_INTERNAL_GetTextureSliceIndex(
texture,
i,
j
);
VULKAN_INTERNAL_CreateSliceView(
renderer,
texture,
i,
j,
&texture->slices[sliceIndex].view
);
texture->slices[sliceIndex].parent = texture;
texture->slices[sliceIndex].layer = i;
texture->slices[sliceIndex].level = j;
texture->slices[sliceIndex].resourceAccessType = RESOURCE_ACCESS_NONE;
texture->slices[sliceIndex].msaaTex = NULL;
SDL_AtomicSet(&texture->slices[sliceIndex].referenceCount, 0);
if (
sampleCount > VK_SAMPLE_COUNT_1_BIT &&
isRenderTarget &&
!IsDepthFormat(texture->format) &&
!isMsaaTexture
) {
texture->slices[sliceIndex].msaaTex = VULKAN_INTERNAL_CreateTexture(
renderer,
texture->dimensions.width,
texture->dimensions.height,
1,
0,
1,
1,
sampleCount,
texture->format,
aspectMask,
imageUsageFlags,
1
);
}
}
}
return texture;
}
static VulkanTextureHandle* VULKAN_INTERNAL_CreateTextureHandle(
VulkanRenderer *renderer,
uint32_t width,
uint32_t height,
uint32_t depth,
uint32_t isCube,
uint32_t layerCount,
uint32_t levelCount,
VkSampleCountFlagBits sampleCount,
VkFormat format,
VkImageAspectFlags aspectMask,
VkImageUsageFlags imageUsageFlags,
uint8_t isMsaaTexture
) {
VulkanTextureHandle *textureHandle;
VulkanTexture *texture;
texture = VULKAN_INTERNAL_CreateTexture(
renderer,
width,
height,
depth,
isCube,
layerCount,
levelCount,
sampleCount,
format,
aspectMask,
imageUsageFlags,
isMsaaTexture
);
if (texture == NULL)
{
Refresh_LogError("Failed to create texture!");
return NULL;
}
textureHandle = SDL_malloc(sizeof(VulkanTextureHandle));
textureHandle->vulkanTexture = texture;
texture->handle = textureHandle;
return textureHandle;
}
static void VULKAN_INTERNAL_DiscardActiveBuffer(
VulkanRenderer *renderer,
VulkanBufferContainer *bufferContainer
) {
VulkanBufferHandle *bufferHandle;
uint32_t i;
/* If a previously-discarded buffer is available, we can use that. */
for (i = 0; i < bufferContainer->bufferCount; i += 1)
{
bufferHandle = bufferContainer->bufferHandles[i];
if (SDL_AtomicGet(&bufferHandle->vulkanBuffer->referenceCount) == 0)
{
bufferContainer->activeBufferHandle = bufferHandle;
return;
}
}
/* No buffer handle is available, generate a new one. */
bufferContainer->activeBufferHandle = VULKAN_INTERNAL_CreateBufferHandle(
renderer,
bufferContainer->activeBufferHandle->vulkanBuffer->size,
RESOURCE_ACCESS_NONE,
bufferContainer->activeBufferHandle->vulkanBuffer->usage,
bufferContainer->activeBufferHandle->vulkanBuffer->requireHostVisible,
bufferContainer->activeBufferHandle->vulkanBuffer->preferHostLocal,
bufferContainer->activeBufferHandle->vulkanBuffer->preferDeviceLocal
);
EXPAND_ARRAY_IF_NEEDED(
bufferContainer->bufferHandles,
VulkanBufferHandle*,
bufferContainer->bufferCount + 1,
bufferContainer->bufferCapacity,
bufferContainer->bufferCapacity * 2
);
bufferContainer->bufferHandles[
bufferContainer->bufferCount
] = bufferContainer->activeBufferHandle;
bufferContainer->bufferCount += 1;
}
static void VULKAN_INTERNAL_DiscardActiveTexture(
VulkanRenderer *renderer,
VulkanTextureContainer *textureContainer
) {
VulkanTextureHandle *textureHandle;
uint32_t i, j;
int32_t refCountTotal;
/* If a previously-discarded texture is available, we can use that. */
for (i = 0; i < textureContainer->textureCount; i += 1)
{
textureHandle = textureContainer->textureHandles[i];
refCountTotal = 0;
for (j = 0; j < textureHandle->vulkanTexture->sliceCount; j += 1)
{
refCountTotal += SDL_AtomicGet(&textureHandle->vulkanTexture->slices[j].referenceCount);
}
if (refCountTotal == 0)
{
textureContainer->activeTextureHandle = textureHandle;
return;
}
}
/* No texture handle is available, generate a new one. */
textureContainer->activeTextureHandle = VULKAN_INTERNAL_CreateTextureHandle(
renderer,
textureContainer->activeTextureHandle->vulkanTexture->dimensions.width,
textureContainer->activeTextureHandle->vulkanTexture->dimensions.height,
textureContainer->activeTextureHandle->vulkanTexture->depth,
textureContainer->activeTextureHandle->vulkanTexture->isCube,
textureContainer->activeTextureHandle->vulkanTexture->layerCount,
textureContainer->activeTextureHandle->vulkanTexture->levelCount,
textureContainer->activeTextureHandle->vulkanTexture->sampleCount,
textureContainer->activeTextureHandle->vulkanTexture->format,
textureContainer->activeTextureHandle->vulkanTexture->aspectFlags,
textureContainer->activeTextureHandle->vulkanTexture->usageFlags,
0
);
EXPAND_ARRAY_IF_NEEDED(
textureContainer->textureHandles,
VulkanTextureHandle*,
textureContainer->textureCount + 1,
textureContainer->textureCapacity,
textureContainer->textureCapacity * 2
);
textureContainer->textureHandles[
textureContainer->textureCount
] = textureContainer->activeTextureHandle;
textureContainer->textureCount += 1;
}
static VkRenderPass VULKAN_INTERNAL_CreateRenderPass(
VulkanRenderer *renderer,
VulkanCommandBuffer *commandBuffer,
Refresh_ColorAttachmentInfo *colorAttachmentInfos,
uint32_t colorAttachmentCount,
Refresh_DepthStencilAttachmentInfo *depthStencilAttachmentInfo
) {
VkResult vulkanResult;
VkAttachmentDescription attachmentDescriptions[2 * MAX_COLOR_TARGET_BINDINGS + 1];
VkAttachmentReference colorAttachmentReferences[MAX_COLOR_TARGET_BINDINGS];
VkAttachmentReference resolveReferences[MAX_COLOR_TARGET_BINDINGS + 1];
VkAttachmentReference depthStencilAttachmentReference;
VkRenderPassCreateInfo renderPassCreateInfo;
2022-02-25 21:42:11 +00:00
VkSubpassDescription subpass;
VkRenderPass renderPass;
uint32_t i;
2022-02-25 21:42:11 +00:00
uint32_t attachmentDescriptionCount = 0;
uint32_t colorAttachmentReferenceCount = 0;
uint32_t resolveReferenceCount = 0;
VulkanTexture *texture;
2022-02-25 21:42:11 +00:00
for (i = 0; i < colorAttachmentCount; i += 1)
{
texture = ((VulkanTextureContainer*) colorAttachmentInfos[i].textureSlice.texture)->activeTextureHandle->vulkanTexture;
if (texture->sampleCount > VK_SAMPLE_COUNT_1_BIT)
{
/* Resolve attachment and multisample attachment */
2022-02-25 21:42:11 +00:00
attachmentDescriptions[attachmentDescriptionCount].flags = 0;
attachmentDescriptions[attachmentDescriptionCount].format = texture->format;
2022-02-25 21:42:11 +00:00
attachmentDescriptions[attachmentDescriptionCount].samples =
VK_SAMPLE_COUNT_1_BIT;
attachmentDescriptions[attachmentDescriptionCount].loadOp = RefreshToVK_LoadOp[
colorAttachmentInfos[i].loadOp
];
Miscellaneous API changes + more MSAA fixes (#26) **Breaking API Changes** * Removed `REFRESH_SAMPLECOUNT_16/32/64`, since hardware support for these sample counts is generally poor (and completely non-existent with MoltenVK and certain consoles). * Removed unused `sampleCount` parameter from `Refresh_TextureCreateInfo`. * Removed `sampleCount` parameter from `Refresh_ColorAttachmentDescription`. The existence of this parameter meant you had to sync up three different sample count values (render pass, pipeline, and color attachment description) whenever you wanted to use multisampling. However, Vulkan requires that all color attachments in a given pipeline _must_ match the pipeline's sample count anyway, so we can assume all color attachments will use the pipeline's sample count. * Removed the `renderArea` parameter from `Refresh_BeginRenderPass()` since it didn't serve much practical purpose and slightly complicated things on the MoonWorks managed side. **Behavior Changes** * When creating a render pass or graphics pipeline, the requested multisample count will be converted into a sample count that's actually supported by the GPU. For example, if you request 8x MSAA on a device that only supports up to 4x MSAA, it will silently fall back to 4x MSAA. * All color attachments are now forced to have an internal store op of `STORE`, even if `REFRESH_STORE_OP_DONTCARE` is specified. The one exception is internal multisample textures -- if `DONTCARE` is used, those textures will be discarded to save on bandwidth. (Their resolve textures will still be stored.) * The RenderPass hashing logic was updated so that it would still work correctly with the removal of `Refresh_ColorAttachmentDescription.sampleCount`. **Bug Fixes** * Fixed bugs where multisampling logic wasn't kicking in for certain sample counts due to incorrect enum comparisons. Co-authored-by: Caleb Cornett <caleb.cornett@outlook.com> Reviewed-on: https://gitea.moonside.games/MoonsideGames/Refresh/pulls/26 Co-authored-by: TheSpydog <thespydog@noreply.example.org> Co-committed-by: TheSpydog <thespydog@noreply.example.org>
2022-11-08 19:09:21 +00:00
attachmentDescriptions[attachmentDescriptionCount].storeOp =
VK_ATTACHMENT_STORE_OP_STORE; /* Always store the resolve texture */
2022-02-25 21:42:11 +00:00
attachmentDescriptions[attachmentDescriptionCount].stencilLoadOp =
VK_ATTACHMENT_LOAD_OP_DONT_CARE;
attachmentDescriptions[attachmentDescriptionCount].stencilStoreOp =
VK_ATTACHMENT_STORE_OP_DONT_CARE;
attachmentDescriptions[attachmentDescriptionCount].initialLayout =
VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
attachmentDescriptions[attachmentDescriptionCount].finalLayout =
VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
2022-02-25 21:42:11 +00:00
resolveReferences[resolveReferenceCount].attachment =
attachmentDescriptionCount;
resolveReferences[resolveReferenceCount].layout =
VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
attachmentDescriptionCount += 1;
resolveReferenceCount += 1;
attachmentDescriptions[attachmentDescriptionCount].flags = 0;
attachmentDescriptions[attachmentDescriptionCount].format = texture->format;
attachmentDescriptions[attachmentDescriptionCount].samples = texture->sampleCount;
2022-02-25 21:42:11 +00:00
attachmentDescriptions[attachmentDescriptionCount].loadOp = RefreshToVK_LoadOp[
colorAttachmentInfos[i].loadOp
];
attachmentDescriptions[attachmentDescriptionCount].storeOp = RefreshToVK_StoreOp[
colorAttachmentInfos[i].storeOp
];
attachmentDescriptions[attachmentDescriptionCount].stencilLoadOp =
VK_ATTACHMENT_LOAD_OP_DONT_CARE;
attachmentDescriptions[attachmentDescriptionCount].stencilStoreOp =
VK_ATTACHMENT_STORE_OP_DONT_CARE;
attachmentDescriptions[attachmentDescriptionCount].initialLayout =
VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
attachmentDescriptions[attachmentDescriptionCount].finalLayout =
VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
colorAttachmentReferences[colorAttachmentReferenceCount].attachment =
attachmentDescriptionCount;
colorAttachmentReferences[colorAttachmentReferenceCount].layout =
VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
attachmentDescriptionCount += 1;
colorAttachmentReferenceCount += 1;
}
else
{
attachmentDescriptions[attachmentDescriptionCount].flags = 0;
attachmentDescriptions[attachmentDescriptionCount].format = texture->format;
2022-02-25 21:42:11 +00:00
attachmentDescriptions[attachmentDescriptionCount].samples =
VK_SAMPLE_COUNT_1_BIT;
attachmentDescriptions[attachmentDescriptionCount].loadOp = RefreshToVK_LoadOp[
colorAttachmentInfos[i].loadOp
];
Miscellaneous API changes + more MSAA fixes (#26) **Breaking API Changes** * Removed `REFRESH_SAMPLECOUNT_16/32/64`, since hardware support for these sample counts is generally poor (and completely non-existent with MoltenVK and certain consoles). * Removed unused `sampleCount` parameter from `Refresh_TextureCreateInfo`. * Removed `sampleCount` parameter from `Refresh_ColorAttachmentDescription`. The existence of this parameter meant you had to sync up three different sample count values (render pass, pipeline, and color attachment description) whenever you wanted to use multisampling. However, Vulkan requires that all color attachments in a given pipeline _must_ match the pipeline's sample count anyway, so we can assume all color attachments will use the pipeline's sample count. * Removed the `renderArea` parameter from `Refresh_BeginRenderPass()` since it didn't serve much practical purpose and slightly complicated things on the MoonWorks managed side. **Behavior Changes** * When creating a render pass or graphics pipeline, the requested multisample count will be converted into a sample count that's actually supported by the GPU. For example, if you request 8x MSAA on a device that only supports up to 4x MSAA, it will silently fall back to 4x MSAA. * All color attachments are now forced to have an internal store op of `STORE`, even if `REFRESH_STORE_OP_DONTCARE` is specified. The one exception is internal multisample textures -- if `DONTCARE` is used, those textures will be discarded to save on bandwidth. (Their resolve textures will still be stored.) * The RenderPass hashing logic was updated so that it would still work correctly with the removal of `Refresh_ColorAttachmentDescription.sampleCount`. **Bug Fixes** * Fixed bugs where multisampling logic wasn't kicking in for certain sample counts due to incorrect enum comparisons. Co-authored-by: Caleb Cornett <caleb.cornett@outlook.com> Reviewed-on: https://gitea.moonside.games/MoonsideGames/Refresh/pulls/26 Co-authored-by: TheSpydog <thespydog@noreply.example.org> Co-committed-by: TheSpydog <thespydog@noreply.example.org>
2022-11-08 19:09:21 +00:00
attachmentDescriptions[attachmentDescriptionCount].storeOp =
VK_ATTACHMENT_STORE_OP_STORE; /* Always store non-MSAA textures */
2022-02-25 21:42:11 +00:00
attachmentDescriptions[attachmentDescriptionCount].stencilLoadOp =
VK_ATTACHMENT_LOAD_OP_DONT_CARE;
attachmentDescriptions[attachmentDescriptionCount].stencilStoreOp =
VK_ATTACHMENT_STORE_OP_DONT_CARE;
attachmentDescriptions[attachmentDescriptionCount].initialLayout =
VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
attachmentDescriptions[attachmentDescriptionCount].finalLayout =
VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
2022-02-25 21:42:11 +00:00
colorAttachmentReferences[colorAttachmentReferenceCount].attachment = attachmentDescriptionCount;
colorAttachmentReferences[colorAttachmentReferenceCount].layout =
VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
attachmentDescriptionCount += 1;
colorAttachmentReferenceCount += 1;
}
}
subpass.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
subpass.flags = 0;
subpass.inputAttachmentCount = 0;
subpass.pInputAttachments = NULL;
subpass.colorAttachmentCount = colorAttachmentCount;
subpass.pColorAttachments = colorAttachmentReferences;
subpass.preserveAttachmentCount = 0;
subpass.pPreserveAttachments = NULL;
if (depthStencilAttachmentInfo == NULL)
{
subpass.pDepthStencilAttachment = NULL;
}
else
{
texture = ((VulkanTextureContainer*) depthStencilAttachmentInfo->textureSlice.texture)->activeTextureHandle->vulkanTexture;
2022-02-25 21:42:11 +00:00
attachmentDescriptions[attachmentDescriptionCount].flags = 0;
attachmentDescriptions[attachmentDescriptionCount].format = texture->format;
attachmentDescriptions[attachmentDescriptionCount].samples = texture->sampleCount;
2022-02-25 21:42:11 +00:00
attachmentDescriptions[attachmentDescriptionCount].loadOp = RefreshToVK_LoadOp[
depthStencilAttachmentInfo->loadOp
];
attachmentDescriptions[attachmentDescriptionCount].storeOp = RefreshToVK_StoreOp[
depthStencilAttachmentInfo->storeOp
];
attachmentDescriptions[attachmentDescriptionCount].stencilLoadOp = RefreshToVK_LoadOp[
depthStencilAttachmentInfo->stencilLoadOp
];
attachmentDescriptions[attachmentDescriptionCount].stencilStoreOp = RefreshToVK_StoreOp[
depthStencilAttachmentInfo->stencilStoreOp
];
attachmentDescriptions[attachmentDescriptionCount].initialLayout =
VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
attachmentDescriptions[attachmentDescriptionCount].finalLayout =
VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
2022-02-25 21:42:11 +00:00
depthStencilAttachmentReference.attachment =
attachmentDescriptionCount;
depthStencilAttachmentReference.layout =
VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
subpass.pDepthStencilAttachment =
&depthStencilAttachmentReference;
attachmentDescriptionCount += 1;
}
2020-12-27 23:34:15 +00:00
if (texture->sampleCount > VK_SAMPLE_COUNT_1_BIT)
2020-12-27 23:20:59 +00:00
{
subpass.pResolveAttachments = resolveReferences;
2020-12-27 23:20:59 +00:00
}
else
{
subpass.pResolveAttachments = NULL;
2020-12-27 23:20:59 +00:00
}
2022-02-25 21:42:11 +00:00
renderPassCreateInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
renderPassCreateInfo.pNext = NULL;
renderPassCreateInfo.flags = 0;
renderPassCreateInfo.pAttachments = attachmentDescriptions;
renderPassCreateInfo.attachmentCount = attachmentDescriptionCount;
renderPassCreateInfo.subpassCount = 1;
renderPassCreateInfo.pSubpasses = &subpass;
renderPassCreateInfo.dependencyCount = 0;
renderPassCreateInfo.pDependencies = NULL;
2022-02-25 21:42:11 +00:00
vulkanResult = renderer->vkCreateRenderPass(
renderer->logicalDevice,
&renderPassCreateInfo,
NULL,
&renderPass
);
if (vulkanResult != VK_SUCCESS)
2020-12-27 23:20:59 +00:00
{
renderPass = VK_NULL_HANDLE;
LogVulkanResultAsError("vkCreateRenderPass", vulkanResult);
}
2022-02-25 21:42:11 +00:00
return renderPass;
}
static VkRenderPass VULKAN_INTERNAL_CreateTransientRenderPass(
VulkanRenderer *renderer,
Miscellaneous API changes + more MSAA fixes (#26) **Breaking API Changes** * Removed `REFRESH_SAMPLECOUNT_16/32/64`, since hardware support for these sample counts is generally poor (and completely non-existent with MoltenVK and certain consoles). * Removed unused `sampleCount` parameter from `Refresh_TextureCreateInfo`. * Removed `sampleCount` parameter from `Refresh_ColorAttachmentDescription`. The existence of this parameter meant you had to sync up three different sample count values (render pass, pipeline, and color attachment description) whenever you wanted to use multisampling. However, Vulkan requires that all color attachments in a given pipeline _must_ match the pipeline's sample count anyway, so we can assume all color attachments will use the pipeline's sample count. * Removed the `renderArea` parameter from `Refresh_BeginRenderPass()` since it didn't serve much practical purpose and slightly complicated things on the MoonWorks managed side. **Behavior Changes** * When creating a render pass or graphics pipeline, the requested multisample count will be converted into a sample count that's actually supported by the GPU. For example, if you request 8x MSAA on a device that only supports up to 4x MSAA, it will silently fall back to 4x MSAA. * All color attachments are now forced to have an internal store op of `STORE`, even if `REFRESH_STORE_OP_DONTCARE` is specified. The one exception is internal multisample textures -- if `DONTCARE` is used, those textures will be discarded to save on bandwidth. (Their resolve textures will still be stored.) * The RenderPass hashing logic was updated so that it would still work correctly with the removal of `Refresh_ColorAttachmentDescription.sampleCount`. **Bug Fixes** * Fixed bugs where multisampling logic wasn't kicking in for certain sample counts due to incorrect enum comparisons. Co-authored-by: Caleb Cornett <caleb.cornett@outlook.com> Reviewed-on: https://gitea.moonside.games/MoonsideGames/Refresh/pulls/26 Co-authored-by: TheSpydog <thespydog@noreply.example.org> Co-committed-by: TheSpydog <thespydog@noreply.example.org>
2022-11-08 19:09:21 +00:00
Refresh_GraphicsPipelineAttachmentInfo attachmentInfo,
VkSampleCountFlagBits sampleCount
) {
VkAttachmentDescription attachmentDescriptions[2 * MAX_COLOR_TARGET_BINDINGS + 1];
VkAttachmentReference colorAttachmentReferences[MAX_COLOR_TARGET_BINDINGS];
VkAttachmentReference resolveReferences[MAX_COLOR_TARGET_BINDINGS + 1];
VkAttachmentReference depthStencilAttachmentReference;
Refresh_ColorAttachmentDescription attachmentDescription;
2022-02-25 21:42:11 +00:00
VkSubpassDescription subpass;
VkRenderPassCreateInfo renderPassCreateInfo;
2022-02-25 21:42:11 +00:00
VkRenderPass renderPass;
VkResult result;
uint32_t multisampling = 0;
2022-02-25 21:42:11 +00:00
uint32_t attachmentDescriptionCount = 0;
uint32_t colorAttachmentReferenceCount = 0;
uint32_t resolveReferenceCount = 0;
uint32_t i;
for (i = 0; i < attachmentInfo.colorAttachmentCount; i += 1)
{
attachmentDescription = attachmentInfo.colorAttachmentDescriptions[i];
if (sampleCount > VK_SAMPLE_COUNT_1_BIT)
{
multisampling = 1;
/* Resolve attachment and multisample attachment */
attachmentDescriptions[attachmentDescriptionCount].flags = 0;
attachmentDescriptions[attachmentDescriptionCount].format = RefreshToVK_SurfaceFormat[
attachmentDescription.format
];
attachmentDescriptions[attachmentDescriptionCount].samples = VK_SAMPLE_COUNT_1_BIT;
attachmentDescriptions[attachmentDescriptionCount].loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
attachmentDescriptions[attachmentDescriptionCount].storeOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
attachmentDescriptions[attachmentDescriptionCount].stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
attachmentDescriptions[attachmentDescriptionCount].stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
attachmentDescriptions[attachmentDescriptionCount].initialLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
attachmentDescriptions[attachmentDescriptionCount].finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
resolveReferences[resolveReferenceCount].attachment = attachmentDescriptionCount;
resolveReferences[resolveReferenceCount].layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
attachmentDescriptionCount += 1;
resolveReferenceCount += 1;
attachmentDescriptions[attachmentDescriptionCount].flags = 0;
attachmentDescriptions[attachmentDescriptionCount].format = RefreshToVK_SurfaceFormat[
attachmentDescription.format
];
attachmentDescriptions[attachmentDescriptionCount].samples = sampleCount;
attachmentDescriptions[attachmentDescriptionCount].loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
attachmentDescriptions[attachmentDescriptionCount].storeOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
attachmentDescriptions[attachmentDescriptionCount].stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
attachmentDescriptions[attachmentDescriptionCount].stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
attachmentDescriptions[attachmentDescriptionCount].initialLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
attachmentDescriptions[attachmentDescriptionCount].finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
colorAttachmentReferences[colorAttachmentReferenceCount].attachment =
2022-02-25 21:42:11 +00:00
attachmentDescriptionCount;
colorAttachmentReferences[colorAttachmentReferenceCount].layout =
VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
2022-02-25 21:42:11 +00:00
attachmentDescriptionCount += 1;
colorAttachmentReferenceCount += 1;
}
else
2022-02-25 21:42:11 +00:00
{
attachmentDescriptions[attachmentDescriptionCount].flags = 0;
attachmentDescriptions[attachmentDescriptionCount].format = RefreshToVK_SurfaceFormat[
attachmentDescription.format
];
2022-02-25 21:42:11 +00:00
attachmentDescriptions[attachmentDescriptionCount].samples =
VK_SAMPLE_COUNT_1_BIT;
2022-02-25 21:42:11 +00:00
attachmentDescriptions[attachmentDescriptionCount].loadOp =
VK_ATTACHMENT_LOAD_OP_DONT_CARE;
2022-02-25 21:42:11 +00:00
attachmentDescriptions[attachmentDescriptionCount].storeOp =
VK_ATTACHMENT_STORE_OP_DONT_CARE;
2022-02-25 21:42:11 +00:00
attachmentDescriptions[attachmentDescriptionCount].stencilLoadOp =
VK_ATTACHMENT_LOAD_OP_DONT_CARE;
2022-02-25 21:42:11 +00:00
attachmentDescriptions[attachmentDescriptionCount].stencilStoreOp =
VK_ATTACHMENT_STORE_OP_DONT_CARE;
2022-02-25 21:42:11 +00:00
attachmentDescriptions[attachmentDescriptionCount].initialLayout =
VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
2022-02-25 21:42:11 +00:00
attachmentDescriptions[attachmentDescriptionCount].finalLayout =
VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
2022-02-25 21:42:11 +00:00
colorAttachmentReferences[colorAttachmentReferenceCount].attachment = attachmentDescriptionCount;
colorAttachmentReferences[colorAttachmentReferenceCount].layout =
VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
2022-02-25 21:42:11 +00:00
attachmentDescriptionCount += 1;
colorAttachmentReferenceCount += 1;
}
}
subpass.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
subpass.flags = 0;
subpass.inputAttachmentCount = 0;
subpass.pInputAttachments = NULL;
subpass.colorAttachmentCount = attachmentInfo.colorAttachmentCount;
subpass.pColorAttachments = colorAttachmentReferences;
subpass.preserveAttachmentCount = 0;
subpass.pPreserveAttachments = NULL;
if (attachmentInfo.hasDepthStencilAttachment)
{
attachmentDescriptions[attachmentDescriptionCount].flags = 0;
2022-06-17 07:41:27 +00:00
attachmentDescriptions[attachmentDescriptionCount].format = RefreshToVK_DepthFormat(
renderer,
attachmentInfo.depthStencilFormat
2022-06-17 07:41:27 +00:00
);
attachmentDescriptions[attachmentDescriptionCount].samples = sampleCount;
2022-02-25 21:42:11 +00:00
attachmentDescriptions[attachmentDescriptionCount].loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
attachmentDescriptions[attachmentDescriptionCount].storeOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
attachmentDescriptions[attachmentDescriptionCount].stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
attachmentDescriptions[attachmentDescriptionCount].stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
attachmentDescriptions[attachmentDescriptionCount].initialLayout =
VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
attachmentDescriptions[attachmentDescriptionCount].finalLayout =
VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
depthStencilAttachmentReference.attachment =
attachmentDescriptionCount;
depthStencilAttachmentReference.layout =
VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
subpass.pDepthStencilAttachment =
&depthStencilAttachmentReference;
attachmentDescriptionCount += 1;
}
else
{
subpass.pDepthStencilAttachment = NULL;
}
if (multisampling)
{
subpass.pResolveAttachments = resolveReferences;
}
else
{
subpass.pResolveAttachments = NULL;
}
2022-02-25 21:42:11 +00:00
renderPassCreateInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
renderPassCreateInfo.pNext = NULL;
renderPassCreateInfo.flags = 0;
renderPassCreateInfo.pAttachments = attachmentDescriptions;
renderPassCreateInfo.attachmentCount = attachmentDescriptionCount;
renderPassCreateInfo.subpassCount = 1;
renderPassCreateInfo.pSubpasses = &subpass;
renderPassCreateInfo.dependencyCount = 0;
renderPassCreateInfo.pDependencies = NULL;
result = renderer->vkCreateRenderPass(
renderer->logicalDevice,
&renderPassCreateInfo,
NULL,
&renderPass
);
if (result != VK_SUCCESS)
{
renderPass = VK_NULL_HANDLE;
LogVulkanResultAsError("vkCreateRenderPass", result);
}
return renderPass;
}
2021-01-05 23:00:51 +00:00
static Refresh_GraphicsPipeline* VULKAN_CreateGraphicsPipeline(
Refresh_Renderer *driverData,
Refresh_GraphicsPipelineCreateInfo *pipelineCreateInfo
2020-12-17 01:23:49 +00:00
) {
2020-12-17 19:40:49 +00:00
VkResult vulkanResult;
uint32_t i;
VkSampleCountFlagBits actualSampleCount;
2020-12-17 19:40:49 +00:00
2020-12-19 04:08:07 +00:00
VulkanGraphicsPipeline *graphicsPipeline = (VulkanGraphicsPipeline*) SDL_malloc(sizeof(VulkanGraphicsPipeline));
2020-12-17 19:40:49 +00:00
VkGraphicsPipelineCreateInfo vkPipelineCreateInfo;
VkPipelineShaderStageCreateInfo shaderStageCreateInfos[2];
VkPipelineVertexInputStateCreateInfo vertexInputStateCreateInfo;
VkVertexInputBindingDescription *vertexInputBindingDescriptions = SDL_stack_alloc(VkVertexInputBindingDescription, pipelineCreateInfo->vertexInputState.vertexBindingCount);
VkVertexInputAttributeDescription *vertexInputAttributeDescriptions = SDL_stack_alloc(VkVertexInputAttributeDescription, pipelineCreateInfo->vertexInputState.vertexAttributeCount);
2020-12-17 19:40:49 +00:00
VkPipelineInputAssemblyStateCreateInfo inputAssemblyStateCreateInfo;
VkPipelineViewportStateCreateInfo viewportStateCreateInfo;
VkPipelineRasterizationStateCreateInfo rasterizationStateCreateInfo;
VkPipelineMultisampleStateCreateInfo multisampleStateCreateInfo;
VkPipelineDepthStencilStateCreateInfo depthStencilStateCreateInfo;
VkStencilOpState frontStencilState;
VkStencilOpState backStencilState;
VkPipelineColorBlendStateCreateInfo colorBlendStateCreateInfo;
VkPipelineColorBlendAttachmentState *colorBlendAttachmentStates = SDL_stack_alloc(
VkPipelineColorBlendAttachmentState,
pipelineCreateInfo->attachmentInfo.colorAttachmentCount
);
2020-12-17 19:40:49 +00:00
2022-03-04 20:30:33 +00:00
static const VkDynamicState dynamicStates[] =
{
VK_DYNAMIC_STATE_VIEWPORT,
VK_DYNAMIC_STATE_SCISSOR
};
VkPipelineDynamicStateCreateInfo dynamicStateCreateInfo;
2020-12-17 19:40:49 +00:00
VulkanRenderer *renderer = (VulkanRenderer*) driverData;
Miscellaneous API changes + more MSAA fixes (#26) **Breaking API Changes** * Removed `REFRESH_SAMPLECOUNT_16/32/64`, since hardware support for these sample counts is generally poor (and completely non-existent with MoltenVK and certain consoles). * Removed unused `sampleCount` parameter from `Refresh_TextureCreateInfo`. * Removed `sampleCount` parameter from `Refresh_ColorAttachmentDescription`. The existence of this parameter meant you had to sync up three different sample count values (render pass, pipeline, and color attachment description) whenever you wanted to use multisampling. However, Vulkan requires that all color attachments in a given pipeline _must_ match the pipeline's sample count anyway, so we can assume all color attachments will use the pipeline's sample count. * Removed the `renderArea` parameter from `Refresh_BeginRenderPass()` since it didn't serve much practical purpose and slightly complicated things on the MoonWorks managed side. **Behavior Changes** * When creating a render pass or graphics pipeline, the requested multisample count will be converted into a sample count that's actually supported by the GPU. For example, if you request 8x MSAA on a device that only supports up to 4x MSAA, it will silently fall back to 4x MSAA. * All color attachments are now forced to have an internal store op of `STORE`, even if `REFRESH_STORE_OP_DONTCARE` is specified. The one exception is internal multisample textures -- if `DONTCARE` is used, those textures will be discarded to save on bandwidth. (Their resolve textures will still be stored.) * The RenderPass hashing logic was updated so that it would still work correctly with the removal of `Refresh_ColorAttachmentDescription.sampleCount`. **Bug Fixes** * Fixed bugs where multisampling logic wasn't kicking in for certain sample counts due to incorrect enum comparisons. Co-authored-by: Caleb Cornett <caleb.cornett@outlook.com> Reviewed-on: https://gitea.moonside.games/MoonsideGames/Refresh/pulls/26 Co-authored-by: TheSpydog <thespydog@noreply.example.org> Co-committed-by: TheSpydog <thespydog@noreply.example.org>
2022-11-08 19:09:21 +00:00
/* Find a compatible sample count to use */
actualSampleCount = VULKAN_INTERNAL_GetMaxMultiSampleCount(
renderer,
RefreshToVK_SampleCount[pipelineCreateInfo->multisampleState.multisampleCount]
Miscellaneous API changes + more MSAA fixes (#26) **Breaking API Changes** * Removed `REFRESH_SAMPLECOUNT_16/32/64`, since hardware support for these sample counts is generally poor (and completely non-existent with MoltenVK and certain consoles). * Removed unused `sampleCount` parameter from `Refresh_TextureCreateInfo`. * Removed `sampleCount` parameter from `Refresh_ColorAttachmentDescription`. The existence of this parameter meant you had to sync up three different sample count values (render pass, pipeline, and color attachment description) whenever you wanted to use multisampling. However, Vulkan requires that all color attachments in a given pipeline _must_ match the pipeline's sample count anyway, so we can assume all color attachments will use the pipeline's sample count. * Removed the `renderArea` parameter from `Refresh_BeginRenderPass()` since it didn't serve much practical purpose and slightly complicated things on the MoonWorks managed side. **Behavior Changes** * When creating a render pass or graphics pipeline, the requested multisample count will be converted into a sample count that's actually supported by the GPU. For example, if you request 8x MSAA on a device that only supports up to 4x MSAA, it will silently fall back to 4x MSAA. * All color attachments are now forced to have an internal store op of `STORE`, even if `REFRESH_STORE_OP_DONTCARE` is specified. The one exception is internal multisample textures -- if `DONTCARE` is used, those textures will be discarded to save on bandwidth. (Their resolve textures will still be stored.) * The RenderPass hashing logic was updated so that it would still work correctly with the removal of `Refresh_ColorAttachmentDescription.sampleCount`. **Bug Fixes** * Fixed bugs where multisampling logic wasn't kicking in for certain sample counts due to incorrect enum comparisons. Co-authored-by: Caleb Cornett <caleb.cornett@outlook.com> Reviewed-on: https://gitea.moonside.games/MoonsideGames/Refresh/pulls/26 Co-authored-by: TheSpydog <thespydog@noreply.example.org> Co-committed-by: TheSpydog <thespydog@noreply.example.org>
2022-11-08 19:09:21 +00:00
);
/* Create a "compatible" render pass */
VkRenderPass transientRenderPass = VULKAN_INTERNAL_CreateTransientRenderPass(
renderer,
Miscellaneous API changes + more MSAA fixes (#26) **Breaking API Changes** * Removed `REFRESH_SAMPLECOUNT_16/32/64`, since hardware support for these sample counts is generally poor (and completely non-existent with MoltenVK and certain consoles). * Removed unused `sampleCount` parameter from `Refresh_TextureCreateInfo`. * Removed `sampleCount` parameter from `Refresh_ColorAttachmentDescription`. The existence of this parameter meant you had to sync up three different sample count values (render pass, pipeline, and color attachment description) whenever you wanted to use multisampling. However, Vulkan requires that all color attachments in a given pipeline _must_ match the pipeline's sample count anyway, so we can assume all color attachments will use the pipeline's sample count. * Removed the `renderArea` parameter from `Refresh_BeginRenderPass()` since it didn't serve much practical purpose and slightly complicated things on the MoonWorks managed side. **Behavior Changes** * When creating a render pass or graphics pipeline, the requested multisample count will be converted into a sample count that's actually supported by the GPU. For example, if you request 8x MSAA on a device that only supports up to 4x MSAA, it will silently fall back to 4x MSAA. * All color attachments are now forced to have an internal store op of `STORE`, even if `REFRESH_STORE_OP_DONTCARE` is specified. The one exception is internal multisample textures -- if `DONTCARE` is used, those textures will be discarded to save on bandwidth. (Their resolve textures will still be stored.) * The RenderPass hashing logic was updated so that it would still work correctly with the removal of `Refresh_ColorAttachmentDescription.sampleCount`. **Bug Fixes** * Fixed bugs where multisampling logic wasn't kicking in for certain sample counts due to incorrect enum comparisons. Co-authored-by: Caleb Cornett <caleb.cornett@outlook.com> Reviewed-on: https://gitea.moonside.games/MoonsideGames/Refresh/pulls/26 Co-authored-by: TheSpydog <thespydog@noreply.example.org> Co-committed-by: TheSpydog <thespydog@noreply.example.org>
2022-11-08 19:09:21 +00:00
pipelineCreateInfo->attachmentInfo,
actualSampleCount
);
2022-03-04 20:30:33 +00:00
/* Dynamic state */
dynamicStateCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO;
dynamicStateCreateInfo.pNext = NULL;
dynamicStateCreateInfo.flags = 0;
dynamicStateCreateInfo.dynamicStateCount = SDL_arraysize(dynamicStates);
dynamicStateCreateInfo.pDynamicStates = dynamicStates;
2020-12-17 19:40:49 +00:00
/* Shader stages */
graphicsPipeline->vertexShaderModule = (VulkanShaderModule*) pipelineCreateInfo->vertexShaderInfo.shaderModule;
SDL_AtomicIncRef(&graphicsPipeline->vertexShaderModule->referenceCount);
2020-12-17 19:40:49 +00:00
shaderStageCreateInfos[0].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
shaderStageCreateInfos[0].pNext = NULL;
shaderStageCreateInfos[0].flags = 0;
shaderStageCreateInfos[0].stage = VK_SHADER_STAGE_VERTEX_BIT;
shaderStageCreateInfos[0].module = graphicsPipeline->vertexShaderModule->shaderModule;
2022-03-02 19:22:52 +00:00
shaderStageCreateInfos[0].pName = pipelineCreateInfo->vertexShaderInfo.entryPointName;
2020-12-17 19:40:49 +00:00
shaderStageCreateInfos[0].pSpecializationInfo = NULL;
graphicsPipeline->vertexUniformBlockSize =
VULKAN_INTERNAL_NextHighestAlignment32(
2022-03-02 19:22:52 +00:00
pipelineCreateInfo->vertexShaderInfo.uniformBufferSize,
2020-12-23 21:11:09 +00:00
renderer->minUBOAlignment
);
graphicsPipeline->fragmentShaderModule = (VulkanShaderModule*) pipelineCreateInfo->fragmentShaderInfo.shaderModule;
SDL_AtomicIncRef(&graphicsPipeline->fragmentShaderModule->referenceCount);
2020-12-17 19:40:49 +00:00
shaderStageCreateInfos[1].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
shaderStageCreateInfos[1].pNext = NULL;
shaderStageCreateInfos[1].flags = 0;
shaderStageCreateInfos[1].stage = VK_SHADER_STAGE_FRAGMENT_BIT;
shaderStageCreateInfos[1].module = graphicsPipeline->fragmentShaderModule->shaderModule;
2022-03-02 19:22:52 +00:00
shaderStageCreateInfos[1].pName = pipelineCreateInfo->fragmentShaderInfo.entryPointName;
2020-12-17 19:40:49 +00:00
shaderStageCreateInfos[1].pSpecializationInfo = NULL;
graphicsPipeline->fragmentUniformBlockSize =
VULKAN_INTERNAL_NextHighestAlignment32(
2022-03-02 19:22:52 +00:00
pipelineCreateInfo->fragmentShaderInfo.uniformBufferSize,
2020-12-23 21:11:09 +00:00
renderer->minUBOAlignment
);
2020-12-17 19:40:49 +00:00
/* Vertex input */
for (i = 0; i < pipelineCreateInfo->vertexInputState.vertexBindingCount; i += 1)
{
vertexInputBindingDescriptions[i].binding = pipelineCreateInfo->vertexInputState.vertexBindings[i].binding;
vertexInputBindingDescriptions[i].inputRate = RefreshToVK_VertexInputRate[
pipelineCreateInfo->vertexInputState.vertexBindings[i].inputRate
];
vertexInputBindingDescriptions[i].stride = pipelineCreateInfo->vertexInputState.vertexBindings[i].stride;
}
for (i = 0; i < pipelineCreateInfo->vertexInputState.vertexAttributeCount; i += 1)
{
vertexInputAttributeDescriptions[i].binding = pipelineCreateInfo->vertexInputState.vertexAttributes[i].binding;
vertexInputAttributeDescriptions[i].format = RefreshToVK_VertexFormat[
pipelineCreateInfo->vertexInputState.vertexAttributes[i].format
];
vertexInputAttributeDescriptions[i].location = pipelineCreateInfo->vertexInputState.vertexAttributes[i].location;
vertexInputAttributeDescriptions[i].offset = pipelineCreateInfo->vertexInputState.vertexAttributes[i].offset;
}
vertexInputStateCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO;
vertexInputStateCreateInfo.pNext = NULL;
vertexInputStateCreateInfo.flags = 0;
vertexInputStateCreateInfo.vertexBindingDescriptionCount = pipelineCreateInfo->vertexInputState.vertexBindingCount;
vertexInputStateCreateInfo.pVertexBindingDescriptions = vertexInputBindingDescriptions;
vertexInputStateCreateInfo.vertexAttributeDescriptionCount = pipelineCreateInfo->vertexInputState.vertexAttributeCount;
vertexInputStateCreateInfo.pVertexAttributeDescriptions = vertexInputAttributeDescriptions;
/* Topology */
inputAssemblyStateCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
inputAssemblyStateCreateInfo.pNext = NULL;
inputAssemblyStateCreateInfo.flags = 0;
inputAssemblyStateCreateInfo.primitiveRestartEnable = VK_FALSE;
inputAssemblyStateCreateInfo.topology = RefreshToVK_PrimitiveType[
pipelineCreateInfo->primitiveType
2020-12-17 19:40:49 +00:00
];
graphicsPipeline->primitiveType = pipelineCreateInfo->primitiveType;
2020-12-17 19:40:49 +00:00
/* Viewport */
2022-03-04 20:30:33 +00:00
/* NOTE: viewport and scissor are dynamic, and must be set using the command buffer */
2020-12-17 19:40:49 +00:00
viewportStateCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO;
viewportStateCreateInfo.pNext = NULL;
viewportStateCreateInfo.flags = 0;
2022-03-04 20:30:33 +00:00
viewportStateCreateInfo.viewportCount = 1;
viewportStateCreateInfo.pViewports = NULL;
viewportStateCreateInfo.scissorCount = 1;
viewportStateCreateInfo.pScissors = NULL;
2020-12-17 19:40:49 +00:00
/* Rasterization */
rasterizationStateCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO;
rasterizationStateCreateInfo.pNext = NULL;
rasterizationStateCreateInfo.flags = 0;
rasterizationStateCreateInfo.depthClampEnable = VK_FALSE;
2020-12-17 19:40:49 +00:00
rasterizationStateCreateInfo.rasterizerDiscardEnable = VK_FALSE;
rasterizationStateCreateInfo.polygonMode = RefreshToVK_PolygonMode[
pipelineCreateInfo->rasterizerState.fillMode
];
rasterizationStateCreateInfo.cullMode = RefreshToVK_CullMode[
pipelineCreateInfo->rasterizerState.cullMode
];
rasterizationStateCreateInfo.frontFace = RefreshToVK_FrontFace[
pipelineCreateInfo->rasterizerState.frontFace
];
rasterizationStateCreateInfo.depthBiasEnable =
pipelineCreateInfo->rasterizerState.depthBiasEnable;
rasterizationStateCreateInfo.depthBiasConstantFactor =
pipelineCreateInfo->rasterizerState.depthBiasConstantFactor;
rasterizationStateCreateInfo.depthBiasClamp =
pipelineCreateInfo->rasterizerState.depthBiasClamp;
rasterizationStateCreateInfo.depthBiasSlopeFactor =
pipelineCreateInfo->rasterizerState.depthBiasSlopeFactor;
2022-03-14 17:43:01 +00:00
rasterizationStateCreateInfo.lineWidth = 1.0f;
2020-12-17 19:40:49 +00:00
/* Multisample */
multisampleStateCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO;
multisampleStateCreateInfo.pNext = NULL;
multisampleStateCreateInfo.flags = 0;
multisampleStateCreateInfo.rasterizationSamples = actualSampleCount;
2020-12-17 19:40:49 +00:00
multisampleStateCreateInfo.sampleShadingEnable = VK_FALSE;
multisampleStateCreateInfo.minSampleShading = 1.0f;
multisampleStateCreateInfo.pSampleMask =
2021-01-05 23:53:16 +00:00
&pipelineCreateInfo->multisampleState.sampleMask;
2020-12-17 19:40:49 +00:00
multisampleStateCreateInfo.alphaToCoverageEnable = VK_FALSE;
multisampleStateCreateInfo.alphaToOneEnable = VK_FALSE;
/* Depth Stencil State */
frontStencilState.failOp = RefreshToVK_StencilOp[
2024-03-03 07:11:07 +00:00
pipelineCreateInfo->depthStencilState.frontStencilState.failOp
2020-12-17 19:40:49 +00:00
];
frontStencilState.passOp = RefreshToVK_StencilOp[
2024-03-03 07:11:07 +00:00
pipelineCreateInfo->depthStencilState.frontStencilState.passOp
2020-12-17 19:40:49 +00:00
];
frontStencilState.depthFailOp = RefreshToVK_StencilOp[
2024-03-03 07:11:07 +00:00
pipelineCreateInfo->depthStencilState.frontStencilState.depthFailOp
2020-12-17 19:40:49 +00:00
];
frontStencilState.compareOp = RefreshToVK_CompareOp[
2024-03-03 07:11:07 +00:00
pipelineCreateInfo->depthStencilState.frontStencilState.compareOp
2020-12-17 19:40:49 +00:00
];
frontStencilState.compareMask =
2024-03-03 07:11:07 +00:00
pipelineCreateInfo->depthStencilState.compareMask;
2020-12-17 19:40:49 +00:00
frontStencilState.writeMask =
2024-03-03 07:11:07 +00:00
pipelineCreateInfo->depthStencilState.writeMask;
2020-12-17 19:40:49 +00:00
frontStencilState.reference =
2024-03-03 07:11:07 +00:00
pipelineCreateInfo->depthStencilState.reference;
2020-12-17 19:40:49 +00:00
backStencilState.failOp = RefreshToVK_StencilOp[
2024-03-03 07:11:07 +00:00
pipelineCreateInfo->depthStencilState.backStencilState.failOp
2020-12-17 19:40:49 +00:00
];
backStencilState.passOp = RefreshToVK_StencilOp[
2024-03-03 07:11:07 +00:00
pipelineCreateInfo->depthStencilState.backStencilState.passOp
2020-12-17 19:40:49 +00:00
];
backStencilState.depthFailOp = RefreshToVK_StencilOp[
2024-03-03 07:11:07 +00:00
pipelineCreateInfo->depthStencilState.backStencilState.depthFailOp
2020-12-17 19:40:49 +00:00
];
backStencilState.compareOp = RefreshToVK_CompareOp[
2024-03-03 07:11:07 +00:00
pipelineCreateInfo->depthStencilState.backStencilState.compareOp
2020-12-17 19:40:49 +00:00
];
backStencilState.compareMask =
2024-03-03 07:11:07 +00:00
pipelineCreateInfo->depthStencilState.compareMask;
2020-12-17 19:40:49 +00:00
backStencilState.writeMask =
2024-03-03 07:11:07 +00:00
pipelineCreateInfo->depthStencilState.writeMask;
2020-12-17 19:40:49 +00:00
backStencilState.reference =
2024-03-03 07:11:07 +00:00
pipelineCreateInfo->depthStencilState.reference;
2020-12-17 19:40:49 +00:00
depthStencilStateCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO;
depthStencilStateCreateInfo.pNext = NULL;
depthStencilStateCreateInfo.flags = 0;
depthStencilStateCreateInfo.depthTestEnable =
pipelineCreateInfo->depthStencilState.depthTestEnable;
depthStencilStateCreateInfo.depthWriteEnable =
pipelineCreateInfo->depthStencilState.depthWriteEnable;
depthStencilStateCreateInfo.depthCompareOp = RefreshToVK_CompareOp[
pipelineCreateInfo->depthStencilState.compareOp
];
depthStencilStateCreateInfo.depthBoundsTestEnable =
pipelineCreateInfo->depthStencilState.depthBoundsTestEnable;
depthStencilStateCreateInfo.stencilTestEnable =
pipelineCreateInfo->depthStencilState.stencilTestEnable;
depthStencilStateCreateInfo.front = frontStencilState;
depthStencilStateCreateInfo.back = backStencilState;
depthStencilStateCreateInfo.minDepthBounds =
pipelineCreateInfo->depthStencilState.minDepthBounds;
depthStencilStateCreateInfo.maxDepthBounds =
pipelineCreateInfo->depthStencilState.maxDepthBounds;
/* Color Blend */
for (i = 0; i < pipelineCreateInfo->attachmentInfo.colorAttachmentCount; i += 1)
2020-12-17 19:40:49 +00:00
{
Refresh_ColorAttachmentBlendState blendState = pipelineCreateInfo->attachmentInfo.colorAttachmentDescriptions[i].blendState;
2020-12-17 19:40:49 +00:00
colorBlendAttachmentStates[i].blendEnable =
blendState.blendEnable;
2020-12-17 19:40:49 +00:00
colorBlendAttachmentStates[i].srcColorBlendFactor = RefreshToVK_BlendFactor[
blendState.srcColorBlendFactor
2020-12-17 19:40:49 +00:00
];
colorBlendAttachmentStates[i].dstColorBlendFactor = RefreshToVK_BlendFactor[
blendState.dstColorBlendFactor
2020-12-17 19:40:49 +00:00
];
colorBlendAttachmentStates[i].colorBlendOp = RefreshToVK_BlendOp[
blendState.colorBlendOp
2020-12-17 19:40:49 +00:00
];
colorBlendAttachmentStates[i].srcAlphaBlendFactor = RefreshToVK_BlendFactor[
blendState.srcAlphaBlendFactor
2020-12-17 19:40:49 +00:00
];
colorBlendAttachmentStates[i].dstAlphaBlendFactor = RefreshToVK_BlendFactor[
blendState.dstAlphaBlendFactor
2020-12-17 19:40:49 +00:00
];
colorBlendAttachmentStates[i].alphaBlendOp = RefreshToVK_BlendOp[
blendState.alphaBlendOp
2020-12-17 19:40:49 +00:00
];
colorBlendAttachmentStates[i].colorWriteMask =
blendState.colorWriteMask;
2020-12-17 19:40:49 +00:00
}
colorBlendStateCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO;
colorBlendStateCreateInfo.pNext = NULL;
colorBlendStateCreateInfo.flags = 0;
colorBlendStateCreateInfo.attachmentCount =
pipelineCreateInfo->attachmentInfo.colorAttachmentCount;
2020-12-17 19:40:49 +00:00
colorBlendStateCreateInfo.pAttachments =
colorBlendAttachmentStates;
colorBlendStateCreateInfo.blendConstants[0] =
2022-03-02 19:10:28 +00:00
pipelineCreateInfo->blendConstants[0];
2020-12-17 19:40:49 +00:00
colorBlendStateCreateInfo.blendConstants[1] =
2022-03-02 19:10:28 +00:00
pipelineCreateInfo->blendConstants[1];
2020-12-17 19:40:49 +00:00
colorBlendStateCreateInfo.blendConstants[2] =
2022-03-02 19:10:28 +00:00
pipelineCreateInfo->blendConstants[2];
2020-12-17 19:40:49 +00:00
colorBlendStateCreateInfo.blendConstants[3] =
2022-03-02 19:10:28 +00:00
pipelineCreateInfo->blendConstants[3];
/* We don't support LogicOp, so this is easy. */
colorBlendStateCreateInfo.logicOpEnable = VK_FALSE;
colorBlendStateCreateInfo.logicOp = 0;
2020-12-17 19:40:49 +00:00
/* Pipeline Layout */
2020-12-23 21:11:09 +00:00
graphicsPipeline->pipelineLayout = VULKAN_INTERNAL_FetchGraphicsPipelineLayout(
renderer,
2022-03-02 19:22:52 +00:00
pipelineCreateInfo->vertexShaderInfo.samplerBindingCount,
pipelineCreateInfo->fragmentShaderInfo.samplerBindingCount
2020-12-17 19:40:49 +00:00
);
/* Pipeline */
vkPipelineCreateInfo.sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO;
2020-12-23 04:22:17 +00:00
vkPipelineCreateInfo.pNext = NULL;
2020-12-17 19:40:49 +00:00
vkPipelineCreateInfo.flags = 0;
vkPipelineCreateInfo.stageCount = 2;
vkPipelineCreateInfo.pStages = shaderStageCreateInfos;
vkPipelineCreateInfo.pVertexInputState = &vertexInputStateCreateInfo;
vkPipelineCreateInfo.pInputAssemblyState = &inputAssemblyStateCreateInfo;
vkPipelineCreateInfo.pTessellationState = VK_NULL_HANDLE;
vkPipelineCreateInfo.pViewportState = &viewportStateCreateInfo;
vkPipelineCreateInfo.pRasterizationState = &rasterizationStateCreateInfo;
vkPipelineCreateInfo.pMultisampleState = &multisampleStateCreateInfo;
vkPipelineCreateInfo.pDepthStencilState = &depthStencilStateCreateInfo;
vkPipelineCreateInfo.pColorBlendState = &colorBlendStateCreateInfo;
2022-03-04 20:30:33 +00:00
vkPipelineCreateInfo.pDynamicState = &dynamicStateCreateInfo;
vkPipelineCreateInfo.layout = graphicsPipeline->pipelineLayout->pipelineLayout;
vkPipelineCreateInfo.renderPass = transientRenderPass;
2020-12-17 19:40:49 +00:00
vkPipelineCreateInfo.subpass = 0;
vkPipelineCreateInfo.basePipelineHandle = VK_NULL_HANDLE;
vkPipelineCreateInfo.basePipelineIndex = 0;
/* TODO: enable pipeline caching */
vulkanResult = renderer->vkCreateGraphicsPipelines(
renderer->logicalDevice,
VK_NULL_HANDLE,
1,
&vkPipelineCreateInfo,
NULL,
2020-12-19 04:08:07 +00:00
&graphicsPipeline->pipeline
2020-12-17 19:40:49 +00:00
);
SDL_stack_free(vertexInputBindingDescriptions);
SDL_stack_free(vertexInputAttributeDescriptions);
SDL_stack_free(colorBlendAttachmentStates);
2020-12-19 04:08:07 +00:00
renderer->vkDestroyRenderPass(
renderer->logicalDevice,
transientRenderPass,
NULL
);
if (vulkanResult != VK_SUCCESS)
2020-12-28 22:07:13 +00:00
{
SDL_free(graphicsPipeline);
LogVulkanResultAsError("vkCreateGraphicsPipelines", vulkanResult);
Refresh_LogError("Failed to create graphics pipeline!");
return NULL;
2020-12-28 22:07:13 +00:00
}
2020-12-23 21:11:09 +00:00
SDL_AtomicSet(&graphicsPipeline->referenceCount, 0);
2021-01-05 23:00:51 +00:00
return (Refresh_GraphicsPipeline*) graphicsPipeline;
2020-12-17 01:23:49 +00:00
}
2020-12-29 22:52:24 +00:00
static VulkanComputePipelineLayout* VULKAN_INTERNAL_FetchComputePipelineLayout(
VulkanRenderer *renderer,
uint32_t bufferBindingCount,
uint32_t imageBindingCount
) {
VkResult vulkanResult;
2020-12-31 04:39:47 +00:00
VkDescriptorSetLayout setLayouts[3];
2020-12-29 22:52:24 +00:00
VkPipelineLayoutCreateInfo pipelineLayoutCreateInfo;
ComputePipelineLayoutHash pipelineLayoutHash;
VulkanComputePipelineLayout *vulkanComputePipelineLayout;
2020-12-29 23:05:26 +00:00
pipelineLayoutHash.bufferLayout = VULKAN_INTERNAL_FetchDescriptorSetLayout(
2020-12-29 22:52:24 +00:00
renderer,
2020-12-29 23:05:26 +00:00
VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,
bufferBindingCount,
VK_SHADER_STAGE_COMPUTE_BIT
2020-12-29 22:52:24 +00:00
);
2020-12-29 23:05:26 +00:00
pipelineLayoutHash.imageLayout = VULKAN_INTERNAL_FetchDescriptorSetLayout(
2020-12-29 22:52:24 +00:00
renderer,
2020-12-30 00:19:19 +00:00
VK_DESCRIPTOR_TYPE_STORAGE_IMAGE,
2020-12-29 23:05:26 +00:00
imageBindingCount,
VK_SHADER_STAGE_COMPUTE_BIT
2020-12-29 22:52:24 +00:00
);
pipelineLayoutHash.uniformLayout = renderer->computeUniformDescriptorSetLayout;
2020-12-31 04:39:47 +00:00
2020-12-29 22:52:24 +00:00
vulkanComputePipelineLayout = ComputePipelineLayoutHashArray_Fetch(
&renderer->computePipelineLayoutHashTable,
pipelineLayoutHash
);
if (vulkanComputePipelineLayout != NULL)
{
return vulkanComputePipelineLayout;
}
vulkanComputePipelineLayout = SDL_malloc(sizeof(VulkanComputePipelineLayout));
setLayouts[0] = pipelineLayoutHash.bufferLayout;
setLayouts[1] = pipelineLayoutHash.imageLayout;
2020-12-31 04:39:47 +00:00
setLayouts[2] = pipelineLayoutHash.uniformLayout;
2020-12-29 22:52:24 +00:00
pipelineLayoutCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
pipelineLayoutCreateInfo.pNext = NULL;
pipelineLayoutCreateInfo.flags = 0;
2020-12-31 04:39:47 +00:00
pipelineLayoutCreateInfo.setLayoutCount = 3;
2020-12-29 22:52:24 +00:00
pipelineLayoutCreateInfo.pSetLayouts = setLayouts;
pipelineLayoutCreateInfo.pushConstantRangeCount = 0;
pipelineLayoutCreateInfo.pPushConstantRanges = NULL;
vulkanResult = renderer->vkCreatePipelineLayout(
renderer->logicalDevice,
&pipelineLayoutCreateInfo,
NULL,
&vulkanComputePipelineLayout->pipelineLayout
);
if (vulkanResult != VK_SUCCESS)
{
2021-01-27 20:51:36 +00:00
LogVulkanResultAsError("vkCreatePipelineLayout", vulkanResult);
2020-12-29 22:52:24 +00:00
return NULL;
}
ComputePipelineLayoutHashArray_Insert(
&renderer->computePipelineLayoutHashTable,
pipelineLayoutHash,
vulkanComputePipelineLayout
);
/* If the binding count is 0
* we can just bind the same descriptor set
* so no cache is needed
*/
if (bufferBindingCount == 0)
{
vulkanComputePipelineLayout->bufferDescriptorSetCache = NULL;
}
else
{
vulkanComputePipelineLayout->bufferDescriptorSetCache =
VULKAN_INTERNAL_CreateDescriptorSetCache(
2020-12-29 22:52:24 +00:00
renderer,
2020-12-31 04:39:47 +00:00
VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,
2020-12-29 22:52:24 +00:00
pipelineLayoutHash.bufferLayout,
bufferBindingCount
);
}
if (imageBindingCount == 0)
{
vulkanComputePipelineLayout->imageDescriptorSetCache = NULL;
}
else
{
vulkanComputePipelineLayout->imageDescriptorSetCache =
VULKAN_INTERNAL_CreateDescriptorSetCache(
2020-12-29 22:52:24 +00:00
renderer,
2020-12-30 00:53:10 +00:00
VK_DESCRIPTOR_TYPE_STORAGE_IMAGE,
2020-12-29 22:52:24 +00:00
pipelineLayoutHash.imageLayout,
imageBindingCount
);
}
return vulkanComputePipelineLayout;
}
2021-01-05 23:00:51 +00:00
static Refresh_ComputePipeline* VULKAN_CreateComputePipeline(
Refresh_Renderer *driverData,
2022-03-02 19:22:52 +00:00
Refresh_ComputeShaderInfo *computeShaderInfo
2020-12-29 22:52:24 +00:00
) {
VkComputePipelineCreateInfo computePipelineCreateInfo;
VkPipelineShaderStageCreateInfo pipelineShaderStageCreateInfo;
2020-12-31 04:39:47 +00:00
2020-12-29 22:52:24 +00:00
VulkanRenderer *renderer = (VulkanRenderer*) driverData;
VulkanComputePipeline *vulkanComputePipeline = SDL_malloc(sizeof(VulkanComputePipeline));
vulkanComputePipeline->computeShaderModule = (VulkanShaderModule*) computeShaderInfo->shaderModule;
SDL_AtomicIncRef(&vulkanComputePipeline->computeShaderModule->referenceCount);
2020-12-29 22:52:24 +00:00
pipelineShaderStageCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
pipelineShaderStageCreateInfo.pNext = NULL;
pipelineShaderStageCreateInfo.flags = 0;
pipelineShaderStageCreateInfo.stage = VK_SHADER_STAGE_COMPUTE_BIT;
2022-03-04 21:37:45 +00:00
pipelineShaderStageCreateInfo.module = vulkanComputePipeline->computeShaderModule->shaderModule;
2022-03-02 19:22:52 +00:00
pipelineShaderStageCreateInfo.pName = computeShaderInfo->entryPointName;
2020-12-29 22:52:24 +00:00
pipelineShaderStageCreateInfo.pSpecializationInfo = NULL;
2020-12-31 04:39:47 +00:00
vulkanComputePipeline->pipelineLayout = VULKAN_INTERNAL_FetchComputePipelineLayout(
2020-12-29 22:52:24 +00:00
renderer,
2022-03-02 19:22:52 +00:00
computeShaderInfo->bufferBindingCount,
computeShaderInfo->imageBindingCount
2020-12-29 22:52:24 +00:00
);
computePipelineCreateInfo.sType = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO;
computePipelineCreateInfo.pNext = NULL;
computePipelineCreateInfo.flags = 0;
computePipelineCreateInfo.stage = pipelineShaderStageCreateInfo;
2020-12-31 04:39:47 +00:00
computePipelineCreateInfo.layout =
vulkanComputePipeline->pipelineLayout->pipelineLayout;
2020-12-29 22:52:24 +00:00
computePipelineCreateInfo.basePipelineHandle = NULL;
computePipelineCreateInfo.basePipelineIndex = 0;
renderer->vkCreateComputePipelines(
renderer->logicalDevice,
NULL,
1,
&computePipelineCreateInfo,
NULL,
&vulkanComputePipeline->pipeline
);
vulkanComputePipeline->uniformBlockSize =
VULKAN_INTERNAL_NextHighestAlignment32(
2022-03-02 19:22:52 +00:00
computeShaderInfo->uniformBufferSize,
2020-12-31 04:39:47 +00:00
renderer->minUBOAlignment
);
SDL_AtomicSet(&vulkanComputePipeline->referenceCount, 0);
2021-01-05 23:00:51 +00:00
return (Refresh_ComputePipeline*) vulkanComputePipeline;
2020-12-29 22:52:24 +00:00
}
2021-01-05 23:00:51 +00:00
static Refresh_Sampler* VULKAN_CreateSampler(
Refresh_Renderer *driverData,
Refresh_SamplerStateCreateInfo *samplerStateCreateInfo
2020-12-17 01:23:49 +00:00
) {
2020-12-18 01:48:26 +00:00
VulkanRenderer* renderer = (VulkanRenderer*)driverData;
VulkanSampler *vulkanSampler = SDL_malloc(sizeof(VulkanSampler));
VkResult vulkanResult;
2020-12-18 01:48:26 +00:00
VkSamplerCreateInfo vkSamplerCreateInfo;
vkSamplerCreateInfo.sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO;
vkSamplerCreateInfo.pNext = NULL;
vkSamplerCreateInfo.flags = 0;
2021-01-03 21:01:29 +00:00
vkSamplerCreateInfo.magFilter = RefreshToVK_Filter[
2020-12-18 01:48:26 +00:00
samplerStateCreateInfo->magFilter
];
2021-01-03 21:01:29 +00:00
vkSamplerCreateInfo.minFilter = RefreshToVK_Filter[
2020-12-18 01:48:26 +00:00
samplerStateCreateInfo->minFilter
];
vkSamplerCreateInfo.mipmapMode = RefreshToVK_SamplerMipmapMode[
samplerStateCreateInfo->mipmapMode
];
vkSamplerCreateInfo.addressModeU = RefreshToVK_SamplerAddressMode[
samplerStateCreateInfo->addressModeU
];
vkSamplerCreateInfo.addressModeV = RefreshToVK_SamplerAddressMode[
samplerStateCreateInfo->addressModeV
];
vkSamplerCreateInfo.addressModeW = RefreshToVK_SamplerAddressMode[
samplerStateCreateInfo->addressModeW
];
vkSamplerCreateInfo.mipLodBias = samplerStateCreateInfo->mipLodBias;
vkSamplerCreateInfo.anisotropyEnable = samplerStateCreateInfo->anisotropyEnable;
vkSamplerCreateInfo.maxAnisotropy = samplerStateCreateInfo->maxAnisotropy;
vkSamplerCreateInfo.compareEnable = samplerStateCreateInfo->compareEnable;
vkSamplerCreateInfo.compareOp = RefreshToVK_CompareOp[
samplerStateCreateInfo->compareOp
];
vkSamplerCreateInfo.minLod = samplerStateCreateInfo->minLod;
vkSamplerCreateInfo.maxLod = samplerStateCreateInfo->maxLod;
2020-12-18 20:58:03 +00:00
vkSamplerCreateInfo.borderColor = RefreshToVK_BorderColor[
samplerStateCreateInfo->borderColor
];
2020-12-18 01:48:26 +00:00
vkSamplerCreateInfo.unnormalizedCoordinates = VK_FALSE;
vulkanResult = renderer->vkCreateSampler(
renderer->logicalDevice,
&vkSamplerCreateInfo,
NULL,
&vulkanSampler->sampler
2020-12-18 01:48:26 +00:00
);
if (vulkanResult != VK_SUCCESS)
{
SDL_free(vulkanSampler);
2021-01-27 20:51:36 +00:00
LogVulkanResultAsError("vkCreateSampler", vulkanResult);
2020-12-18 01:48:26 +00:00
return NULL;
}
SDL_AtomicSet(&vulkanSampler->referenceCount, 0);
return (Refresh_Sampler*) vulkanSampler;
2020-12-17 01:23:49 +00:00
}
2021-01-05 23:00:51 +00:00
static Refresh_ShaderModule* VULKAN_CreateShaderModule(
Refresh_Renderer *driverData,
Refresh_ShaderModuleCreateInfo *shaderModuleCreateInfo
2020-12-17 01:23:49 +00:00
) {
VulkanShaderModule *vulkanShaderModule = SDL_malloc(sizeof(VulkanShaderModule));
2020-12-19 01:25:22 +00:00
VkResult vulkanResult;
VkShaderModuleCreateInfo vkShaderModuleCreateInfo;
VulkanRenderer *renderer = (VulkanRenderer*) driverData;
vkShaderModuleCreateInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
vkShaderModuleCreateInfo.pNext = NULL;
vkShaderModuleCreateInfo.flags = 0;
vkShaderModuleCreateInfo.codeSize = shaderModuleCreateInfo->codeSize;
vkShaderModuleCreateInfo.pCode = (uint32_t*) shaderModuleCreateInfo->byteCode;
vulkanResult = renderer->vkCreateShaderModule(
renderer->logicalDevice,
&vkShaderModuleCreateInfo,
2020-12-18 22:35:33 +00:00
NULL,
&vulkanShaderModule->shaderModule
2020-12-18 22:35:33 +00:00
);
if (vulkanResult != VK_SUCCESS)
{
SDL_free(vulkanShaderModule);
LogVulkanResultAsError("vkCreateShaderModule", vulkanResult);
Refresh_LogError("Failed to create shader module!");
return NULL;
2020-12-18 22:35:33 +00:00
}
SDL_AtomicSet(&vulkanShaderModule->referenceCount, 0);
return (Refresh_ShaderModule*) vulkanShaderModule;
2020-12-18 22:35:33 +00:00
}
static Refresh_Texture* VULKAN_CreateTexture(
2021-01-05 23:00:51 +00:00
Refresh_Renderer *driverData,
Refresh_TextureCreateInfo *textureCreateInfo
2020-12-17 01:23:49 +00:00
) {
2020-12-18 22:35:33 +00:00
VulkanRenderer *renderer = (VulkanRenderer*) driverData;
VkImageUsageFlags imageUsageFlags = (
2020-12-18 22:35:33 +00:00
VK_IMAGE_USAGE_TRANSFER_DST_BIT |
VK_IMAGE_USAGE_TRANSFER_SRC_BIT
);
2021-01-29 05:37:11 +00:00
VkImageAspectFlags imageAspectFlags;
uint8_t isDepthFormat = IsRefreshDepthFormat(textureCreateInfo->format);
2022-06-17 07:41:27 +00:00
VkFormat format;
VulkanTextureContainer *container;
VulkanTextureHandle *textureHandle;
2022-06-17 07:41:27 +00:00
if (isDepthFormat)
2022-06-17 07:41:27 +00:00
{
format = RefreshToVK_DepthFormat(renderer, textureCreateInfo->format);
}
else
{
format = RefreshToVK_SurfaceFormat[textureCreateInfo->format];
}
2020-12-18 22:35:33 +00:00
if (textureCreateInfo->usageFlags & REFRESH_TEXTUREUSAGE_SAMPLER_BIT)
{
imageUsageFlags |= VK_IMAGE_USAGE_SAMPLED_BIT;
}
if (textureCreateInfo->usageFlags & REFRESH_TEXTUREUSAGE_COLOR_TARGET_BIT)
2020-12-18 22:35:33 +00:00
{
imageUsageFlags |= VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
2020-12-18 22:35:33 +00:00
}
if (textureCreateInfo->usageFlags & REFRESH_TEXTUREUSAGE_DEPTH_STENCIL_TARGET_BIT)
{
imageUsageFlags |= VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
}
if (textureCreateInfo->usageFlags & REFRESH_TEXTUREUSAGE_COMPUTE_BIT)
{
imageUsageFlags |= VK_IMAGE_USAGE_STORAGE_BIT;
}
if (isDepthFormat)
2021-01-29 05:37:11 +00:00
{
imageAspectFlags = VK_IMAGE_ASPECT_DEPTH_BIT;
if (IsStencilFormat(format))
{
imageAspectFlags |= VK_IMAGE_ASPECT_STENCIL_BIT;
}
}
else
{
imageAspectFlags = VK_IMAGE_ASPECT_COLOR_BIT;
}
textureHandle = VULKAN_INTERNAL_CreateTextureHandle(
2020-12-18 22:35:33 +00:00
renderer,
textureCreateInfo->width,
textureCreateInfo->height,
textureCreateInfo->depth,
textureCreateInfo->isCube,
textureCreateInfo->layerCount,
textureCreateInfo->levelCount,
RefreshToVK_SampleCount[textureCreateInfo->sampleCount],
2021-01-29 05:37:11 +00:00
format,
imageAspectFlags,
imageUsageFlags,
0
2020-12-18 22:35:33 +00:00
);
if (textureHandle == NULL)
{
Refresh_LogInfo("Failed to create texture container!");
return NULL;
}
container = SDL_malloc(sizeof(VulkanTextureContainer));
container->canBeDiscarded = 1;
container->activeTextureHandle = textureHandle;
container->textureCapacity = 1;
container->textureCount = 1 ;
container->textureHandles = SDL_malloc(
container->textureCapacity * sizeof(VulkanTextureHandle*)
);
container->textureHandles[0] = container->activeTextureHandle;
return (Refresh_Texture*) container;
2020-12-18 22:35:33 +00:00
}
static Refresh_GpuBuffer* VULKAN_CreateGpuBuffer(
2021-01-05 23:00:51 +00:00
Refresh_Renderer *driverData,
Refresh_BufferUsageFlags usageFlags,
2020-12-17 01:23:49 +00:00
uint32_t sizeInBytes
) {
VulkanResourceAccessType resourceAccessType;
2021-01-03 03:03:25 +00:00
VkBufferUsageFlags vulkanUsageFlags =
VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
2020-12-31 04:39:47 +00:00
if (usageFlags == 0)
{
resourceAccessType = RESOURCE_ACCESS_TRANSFER_READ_WRITE;
}
2020-12-31 04:39:47 +00:00
if (usageFlags & REFRESH_BUFFERUSAGE_VERTEX_BIT)
{
vulkanUsageFlags |= VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
resourceAccessType = RESOURCE_ACCESS_VERTEX_BUFFER;
2020-12-19 01:32:27 +00:00
}
2020-12-31 04:39:47 +00:00
if (usageFlags & REFRESH_BUFFERUSAGE_INDEX_BIT)
{
vulkanUsageFlags |= VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
resourceAccessType = RESOURCE_ACCESS_INDEX_BUFFER;
2020-12-31 04:39:47 +00:00
}
2020-12-17 01:23:49 +00:00
2020-12-31 06:28:37 +00:00
if (usageFlags & REFRESH_BUFFERUSAGE_COMPUTE_BIT)
2020-12-31 04:39:47 +00:00
{
vulkanUsageFlags |= VK_BUFFER_USAGE_STORAGE_BUFFER_BIT;
resourceAccessType = RESOURCE_ACCESS_COMPUTE_SHADER_BUFFER_READ_WRITE;
}
if (usageFlags & REFRESH_BUFFERUSAGE_INDIRECT_BIT)
{
vulkanUsageFlags |= VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT;
resourceAccessType = RESOURCE_ACCESS_INDIRECT_BUFFER;
2020-12-31 04:39:47 +00:00
}
2020-12-19 01:32:27 +00:00
return (Refresh_GpuBuffer*) VULKAN_INTERNAL_CreateBufferContainer(
(VulkanRenderer*) driverData,
2020-12-19 01:32:27 +00:00
sizeInBytes,
resourceAccessType,
vulkanUsageFlags,
0,
0,
1
2022-01-02 22:35:57 +00:00
);
2020-12-17 01:23:49 +00:00
}
static Refresh_TransferBuffer* VULKAN_CreateTransferBuffer(
Refresh_Renderer *driverData,
uint32_t sizeInBytes
2020-12-19 04:08:07 +00:00
) {
return (Refresh_TransferBuffer*) VULKAN_INTERNAL_CreateBufferContainer(
(VulkanRenderer*) driverData,
sizeInBytes,
RESOURCE_ACCESS_NONE,
VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT,
1,
1,
0
);
}
/* Setters */
static void VULKAN_INTERNAL_SetUniformBufferData(
VulkanUniformBufferObject *uniformBufferObject,
void* data,
uint32_t dataLength
) {
uint8_t *dst =
uniformBufferObject->buffer->usedRegion->allocation->mapPointer +
uniformBufferObject->buffer->usedRegion->resourceOffset +
uniformBufferObject->currentOffset;
SDL_memcpy(
dst,
data,
dataLength
);
}
static void VULKAN_PushVertexShaderUniforms(
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer,
void *data,
uint32_t dataLengthInBytes
) {
VulkanRenderer* renderer = (VulkanRenderer*) driverData;
VulkanCommandBuffer* vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
VulkanGraphicsPipeline* graphicsPipeline = vulkanCommandBuffer->currentGraphicsPipeline;
2022-01-18 05:28:25 +00:00
SDL_LockMutex(renderer->vertexUniformBufferObject->lock);
2022-01-02 22:35:57 +00:00
if (renderer->vertexUniformBufferObject->currentOffset + graphicsPipeline->vertexUniformBlockSize + MAX_UBO_SECTION_SIZE >= UBO_BUFFER_SIZE)
{
renderer->vertexUniformBufferObject->currentOffset = 0;
}
2022-01-02 22:35:57 +00:00
vulkanCommandBuffer->vertexUniformOffset = renderer->vertexUniformBufferObject->currentOffset;
VULKAN_INTERNAL_SetUniformBufferData(
renderer->vertexUniformBufferObject,
data,
dataLengthInBytes
);
renderer->vertexUniformBufferObject->currentOffset += graphicsPipeline->vertexUniformBlockSize;
SDL_UnlockMutex(renderer->vertexUniformBufferObject->lock);
}
static void VULKAN_PushFragmentShaderUniforms(
2021-01-05 23:00:51 +00:00
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer,
2020-12-17 01:23:49 +00:00
void *data,
uint32_t dataLengthInBytes
) {
VulkanRenderer* renderer = (VulkanRenderer*) driverData;
VulkanCommandBuffer* vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
VulkanGraphicsPipeline* graphicsPipeline = vulkanCommandBuffer->currentGraphicsPipeline;
2020-12-19 01:03:26 +00:00
SDL_LockMutex(renderer->fragmentUniformBufferObject->lock);
2020-12-19 04:08:07 +00:00
if (renderer->fragmentUniformBufferObject->currentOffset + graphicsPipeline->fragmentUniformBlockSize + MAX_UBO_SECTION_SIZE >= UBO_BUFFER_SIZE)
{
renderer->fragmentUniformBufferObject->currentOffset = 0;
}
vulkanCommandBuffer->fragmentUniformOffset = renderer->fragmentUniformBufferObject->currentOffset;
2020-12-19 01:03:26 +00:00
VULKAN_INTERNAL_SetUniformBufferData(
renderer->fragmentUniformBufferObject,
2021-01-21 01:16:43 +00:00
data,
2021-01-14 02:02:45 +00:00
dataLengthInBytes
2020-12-19 01:03:26 +00:00
);
renderer->fragmentUniformBufferObject->currentOffset += graphicsPipeline->fragmentUniformBlockSize;
2022-05-12 04:16:24 +00:00
SDL_UnlockMutex(renderer->fragmentUniformBufferObject->lock);
}
2020-12-19 01:03:26 +00:00
static void VULKAN_PushComputeShaderUniforms(
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer,
void *data,
uint32_t dataLengthInBytes
) {
VulkanRenderer* renderer = (VulkanRenderer*) driverData;
VulkanCommandBuffer* vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
VulkanComputePipeline* computePipeline = vulkanCommandBuffer->currentComputePipeline;
SDL_LockMutex(renderer->computeUniformBufferObject->lock);
if (renderer->computeUniformBufferObject->currentOffset + computePipeline->uniformBlockSize + MAX_UBO_SECTION_SIZE >= UBO_BUFFER_SIZE)
{
renderer->computeUniformBufferObject->currentOffset = 0;
}
vulkanCommandBuffer->computeUniformOffset = renderer->computeUniformBufferObject->currentOffset;
2020-12-17 01:23:49 +00:00
VULKAN_INTERNAL_SetUniformBufferData(
renderer->computeUniformBufferObject,
data,
dataLengthInBytes
);
2021-01-02 21:31:17 +00:00
renderer->computeUniformBufferObject->currentOffset += computePipeline->uniformBlockSize;
2020-12-19 02:38:15 +00:00
SDL_UnlockMutex(renderer->computeUniformBufferObject->lock);
}
/* If fetching an image descriptor, descriptorImageInfos must not be NULL.
* If fetching a buffer descriptor, descriptorBufferInfos must not be NULL.
*/
static VkDescriptorSet VULKAN_INTERNAL_FetchDescriptorSet(
VulkanRenderer *renderer,
VulkanCommandBuffer *vulkanCommandBuffer,
DescriptorSetCache *descriptorSetCache,
VkDescriptorImageInfo *descriptorImageInfos, /* Can be NULL */
VkDescriptorBufferInfo *descriptorBufferInfos /* Can be NULL */
) {
uint32_t i;
VkDescriptorSet descriptorSet;
VkWriteDescriptorSet writeDescriptorSets[MAX_TEXTURE_SAMPLERS];
uint8_t isImage;
2020-12-19 04:08:07 +00:00
if (descriptorImageInfos == NULL && descriptorBufferInfos == NULL)
{
Refresh_LogError("descriptorImageInfos and descriptorBufferInfos cannot both be NULL!");
return VK_NULL_HANDLE;
}
else if (descriptorImageInfos != NULL && descriptorBufferInfos != NULL)
{
Refresh_LogError("descriptorImageInfos and descriptorBufferInfos cannot both be set!");
return VK_NULL_HANDLE;
}
isImage = descriptorImageInfos != NULL;
2020-12-19 02:38:15 +00:00
SDL_LockMutex(descriptorSetCache->lock);
2020-12-19 02:38:15 +00:00
/* If no inactive descriptor sets remain, create a new pool and allocate new inactive sets */
2020-12-19 02:38:15 +00:00
if (descriptorSetCache->inactiveDescriptorSetCount == 0)
{
descriptorSetCache->descriptorPoolCount += 1;
descriptorSetCache->descriptorPools = SDL_realloc(
descriptorSetCache->descriptorPools,
sizeof(VkDescriptorPool) * descriptorSetCache->descriptorPoolCount
);
2020-12-19 02:38:15 +00:00
if (!VULKAN_INTERNAL_CreateDescriptorPool(
renderer,
descriptorSetCache->descriptorType,
descriptorSetCache->nextPoolSize,
descriptorSetCache->nextPoolSize * descriptorSetCache->bindingCount,
&descriptorSetCache->descriptorPools[descriptorSetCache->descriptorPoolCount - 1]
)) {
SDL_UnlockMutex(descriptorSetCache->lock);
Refresh_LogError("Failed to create descriptor pool!");
return VK_NULL_HANDLE;
}
2020-12-19 02:38:15 +00:00
descriptorSetCache->inactiveDescriptorSetCapacity += descriptorSetCache->nextPoolSize;
2020-12-19 02:38:15 +00:00
descriptorSetCache->inactiveDescriptorSets = SDL_realloc(
descriptorSetCache->inactiveDescriptorSets,
sizeof(VkDescriptorSet) * descriptorSetCache->inactiveDescriptorSetCapacity
);
2020-12-19 02:38:15 +00:00
if (!VULKAN_INTERNAL_AllocateDescriptorSets(
renderer,
descriptorSetCache->descriptorPools[descriptorSetCache->descriptorPoolCount - 1],
descriptorSetCache->descriptorSetLayout,
descriptorSetCache->nextPoolSize,
descriptorSetCache->inactiveDescriptorSets
)) {
SDL_UnlockMutex(descriptorSetCache->lock);
Refresh_LogError("Failed to allocate descriptor sets!");
return VK_NULL_HANDLE;
}
2020-12-19 02:38:15 +00:00
descriptorSetCache->inactiveDescriptorSetCount = descriptorSetCache->nextPoolSize;
2020-12-19 02:38:15 +00:00
descriptorSetCache->nextPoolSize *= 2;
}
descriptorSet = descriptorSetCache->inactiveDescriptorSets[descriptorSetCache->inactiveDescriptorSetCount - 1];
descriptorSetCache->inactiveDescriptorSetCount -= 1;
2020-12-19 02:38:15 +00:00
for (i = 0; i < descriptorSetCache->bindingCount; i += 1)
{
writeDescriptorSets[i].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
writeDescriptorSets[i].pNext = NULL;
writeDescriptorSets[i].descriptorCount = 1;
writeDescriptorSets[i].descriptorType = descriptorSetCache->descriptorType;
writeDescriptorSets[i].dstArrayElement = 0;
writeDescriptorSets[i].dstBinding = i;
writeDescriptorSets[i].dstSet = descriptorSet;
writeDescriptorSets[i].pTexelBufferView = NULL;
2020-12-19 02:38:15 +00:00
if (isImage)
{
writeDescriptorSets[i].pImageInfo = &descriptorImageInfos[i];
writeDescriptorSets[i].pBufferInfo = NULL;
2020-12-19 02:38:15 +00:00
}
else
{
writeDescriptorSets[i].pBufferInfo = &descriptorBufferInfos[i];
writeDescriptorSets[i].pImageInfo = NULL;
}
}
2020-12-19 02:38:15 +00:00
renderer->vkUpdateDescriptorSets(
renderer->logicalDevice,
descriptorSetCache->bindingCount,
writeDescriptorSets,
2020-12-19 02:38:15 +00:00
0,
NULL
2021-01-02 06:07:15 +00:00
);
SDL_UnlockMutex(descriptorSetCache->lock);
if (vulkanCommandBuffer->boundDescriptorSetDataCount == vulkanCommandBuffer->boundDescriptorSetDataCapacity)
{
vulkanCommandBuffer->boundDescriptorSetDataCapacity *= 2;
vulkanCommandBuffer->boundDescriptorSetDatas = SDL_realloc(
vulkanCommandBuffer->boundDescriptorSetDatas,
vulkanCommandBuffer->boundDescriptorSetDataCapacity * sizeof(DescriptorSetData)
);
}
vulkanCommandBuffer->boundDescriptorSetDatas[vulkanCommandBuffer->boundDescriptorSetDataCount].descriptorSet = descriptorSet;
vulkanCommandBuffer->boundDescriptorSetDatas[vulkanCommandBuffer->boundDescriptorSetDataCount].descriptorSetCache = descriptorSetCache;
vulkanCommandBuffer->boundDescriptorSetDataCount += 1;
return descriptorSet;
2020-12-17 01:23:49 +00:00
}
static void VULKAN_INTERNAL_QueueDestroyTexture(
VulkanRenderer *renderer,
VulkanTexture *vulkanTexture
) {
if (vulkanTexture->markedForDestroy) { return; }
SDL_LockMutex(renderer->disposeLock);
EXPAND_ARRAY_IF_NEEDED(
renderer->texturesToDestroy,
VulkanTexture*,
renderer->texturesToDestroyCount + 1,
renderer->texturesToDestroyCapacity,
renderer->texturesToDestroyCapacity * 2
)
renderer->texturesToDestroy[
renderer->texturesToDestroyCount
] = vulkanTexture;
renderer->texturesToDestroyCount += 1;
vulkanTexture->markedForDestroy = 1;
SDL_UnlockMutex(renderer->disposeLock);
}
static void VULKAN_QueueDestroyTexture(
2021-01-05 23:00:51 +00:00
Refresh_Renderer *driverData,
Refresh_Texture *texture
2020-12-21 20:37:54 +00:00
) {
VulkanRenderer *renderer = (VulkanRenderer*) driverData;
VulkanTextureContainer *vulkanTextureContainer = (VulkanTextureContainer*) texture;
uint32_t i;
2020-12-21 20:37:54 +00:00
SDL_LockMutex(renderer->disposeLock);
for (i = 0; i < vulkanTextureContainer->textureCount; i += 1)
{
VULKAN_INTERNAL_QueueDestroyTexture(renderer, vulkanTextureContainer->textureHandles[i]->vulkanTexture);
SDL_free(vulkanTextureContainer->textureHandles[i]);
}
/* Containers are just client handles, so we can destroy immediately */
SDL_free(vulkanTextureContainer->textureHandles);
SDL_free(vulkanTextureContainer);
2020-12-21 20:37:54 +00:00
SDL_UnlockMutex(renderer->disposeLock);
}
static void VULKAN_QueueDestroySampler(
Refresh_Renderer *driverData,
Refresh_Sampler *sampler
) {
VulkanRenderer *renderer = (VulkanRenderer*) driverData;
VulkanSampler* vulkanSampler = (VulkanSampler*) sampler;
SDL_LockMutex(renderer->disposeLock);
EXPAND_ARRAY_IF_NEEDED(
renderer->samplersToDestroy,
VulkanSampler*,
renderer->samplersToDestroyCount + 1,
renderer->samplersToDestroyCapacity,
renderer->samplersToDestroyCapacity * 2
)
renderer->samplersToDestroy[renderer->samplersToDestroyCount] = vulkanSampler;
renderer->samplersToDestroyCount += 1;
SDL_UnlockMutex(renderer->disposeLock);
}
static void VULKAN_INTERNAL_QueueDestroyBuffer(
VulkanRenderer *renderer,
VulkanBuffer *vulkanBuffer
) {
if (vulkanBuffer->markedForDestroy) { return; }
SDL_LockMutex(renderer->disposeLock);
EXPAND_ARRAY_IF_NEEDED(
renderer->buffersToDestroy,
VulkanBuffer*,
renderer->buffersToDestroyCount + 1,
renderer->buffersToDestroyCapacity,
renderer->buffersToDestroyCapacity * 2
)
renderer->buffersToDestroy[
renderer->buffersToDestroyCount
] = vulkanBuffer;
renderer->buffersToDestroyCount += 1;
vulkanBuffer->markedForDestroy = 1;
SDL_UnlockMutex(renderer->disposeLock);
2020-12-21 20:37:54 +00:00
}
static void VULKAN_QueueDestroyGpuBuffer(
2021-01-05 23:00:51 +00:00
Refresh_Renderer *driverData,
Refresh_GpuBuffer *gpuBuffer
) {
VulkanRenderer *renderer = (VulkanRenderer*) driverData;
VulkanBufferContainer *vulkanBufferContainer = (VulkanBufferContainer*) gpuBuffer;
uint32_t i;
2021-01-02 06:07:15 +00:00
SDL_LockMutex(renderer->disposeLock);
2020-12-21 21:02:07 +00:00
for (i = 0; i < vulkanBufferContainer->bufferCount; i += 1)
{
VULKAN_INTERNAL_QueueDestroyBuffer(renderer, vulkanBufferContainer->bufferHandles[i]->vulkanBuffer);
SDL_free(vulkanBufferContainer->bufferHandles[i]);
}
/* Containers are just client handles, so we can free immediately */
SDL_free(vulkanBufferContainer->bufferHandles);
SDL_free(vulkanBufferContainer);
SDL_UnlockMutex(renderer->disposeLock);
}
static void VULKAN_QueueDestroyTransferBuffer(
Refresh_Renderer *driverData,
Refresh_TransferBuffer *transferBuffer
) {
VulkanRenderer *renderer = (VulkanRenderer*) driverData;
VulkanBufferContainer *transferBufferContainer = (VulkanBufferContainer*) transferBuffer;
uint32_t i;
SDL_LockMutex(renderer->disposeLock);
for (i = 0; i < transferBufferContainer->bufferCount; i += 1)
{
VULKAN_INTERNAL_QueueDestroyBuffer(renderer, transferBufferContainer->bufferHandles[i]->vulkanBuffer);
SDL_free(transferBufferContainer->bufferHandles[i]);
}
2020-12-21 21:02:07 +00:00
/* Containers are just client handles, so we can free immediately */
SDL_free(transferBufferContainer->bufferHandles);
SDL_free(transferBufferContainer);
2020-12-21 21:02:07 +00:00
SDL_UnlockMutex(renderer->disposeLock);
}
static void VULKAN_QueueDestroyShaderModule(
Refresh_Renderer *driverData,
Refresh_ShaderModule *shaderModule
) {
VulkanRenderer *renderer = (VulkanRenderer*) driverData;
VulkanShaderModule *vulkanShaderModule = (VulkanShaderModule*) shaderModule;
SDL_LockMutex(renderer->disposeLock);
EXPAND_ARRAY_IF_NEEDED(
renderer->shaderModulesToDestroy,
VulkanShaderModule*,
renderer->shaderModulesToDestroyCount + 1,
renderer->shaderModulesToDestroyCapacity,
renderer->shaderModulesToDestroyCapacity * 2
)
renderer->shaderModulesToDestroy[renderer->shaderModulesToDestroyCount] = vulkanShaderModule;
renderer->shaderModulesToDestroyCount += 1;
2021-01-02 06:07:15 +00:00
SDL_UnlockMutex(renderer->disposeLock);
}
static void VULKAN_QueueDestroyComputePipeline(
2021-01-05 23:00:51 +00:00
Refresh_Renderer *driverData,
Refresh_ComputePipeline *computePipeline
2020-12-17 01:23:49 +00:00
) {
VulkanRenderer *renderer = (VulkanRenderer*) driverData;
VulkanComputePipeline *vulkanComputePipeline = (VulkanComputePipeline*) computePipeline;
2020-12-21 21:02:07 +00:00
SDL_LockMutex(renderer->disposeLock);
EXPAND_ARRAY_IF_NEEDED(
renderer->computePipelinesToDestroy,
VulkanComputePipeline*,
renderer->computePipelinesToDestroyCount + 1,
renderer->computePipelinesToDestroyCapacity,
renderer->computePipelinesToDestroyCapacity * 2
)
renderer->computePipelinesToDestroy[renderer->computePipelinesToDestroyCount] = vulkanComputePipeline;
renderer->computePipelinesToDestroyCount += 1;
2021-01-02 06:07:15 +00:00
SDL_UnlockMutex(renderer->disposeLock);
2020-12-17 01:23:49 +00:00
}
static void VULKAN_QueueDestroyGraphicsPipeline(
2021-01-05 23:00:51 +00:00
Refresh_Renderer *driverData,
Refresh_GraphicsPipeline *graphicsPipeline
2020-12-31 04:39:47 +00:00
) {
VulkanRenderer *renderer = (VulkanRenderer*) driverData;
VulkanGraphicsPipeline *vulkanGraphicsPipeline = (VulkanGraphicsPipeline*) graphicsPipeline;
2020-12-31 04:39:47 +00:00
SDL_LockMutex(renderer->disposeLock);
EXPAND_ARRAY_IF_NEEDED(
renderer->graphicsPipelinesToDestroy,
VulkanGraphicsPipeline*,
renderer->graphicsPipelinesToDestroyCount + 1,
renderer->graphicsPipelinesToDestroyCapacity,
renderer->graphicsPipelinesToDestroyCapacity * 2
)
2020-12-31 04:39:47 +00:00
renderer->graphicsPipelinesToDestroy[renderer->graphicsPipelinesToDestroyCount] = vulkanGraphicsPipeline;
renderer->graphicsPipelinesToDestroyCount += 1;
2021-01-02 06:07:15 +00:00
SDL_UnlockMutex(renderer->disposeLock);
2020-12-31 04:39:47 +00:00
}
/* Command Buffer render state */
static VkRenderPass VULKAN_INTERNAL_FetchRenderPass(
VulkanRenderer *renderer,
VulkanCommandBuffer *commandBuffer,
Refresh_ColorAttachmentInfo *colorAttachmentInfos,
uint32_t colorAttachmentCount,
Refresh_DepthStencilAttachmentInfo *depthStencilAttachmentInfo
2020-12-31 00:47:13 +00:00
) {
VkRenderPass renderPass;
RenderPassHash hash;
2020-12-31 00:47:13 +00:00
uint32_t i;
SDL_LockMutex(renderer->renderPassFetchLock);
for (i = 0; i < colorAttachmentCount; i += 1)
{
hash.colorTargetDescriptions[i].format = ((VulkanTextureContainer*) colorAttachmentInfos[i].textureSlice.texture)->activeTextureHandle->vulkanTexture->format;
hash.colorTargetDescriptions[i].clearColor = colorAttachmentInfos[i].clearColor;
hash.colorTargetDescriptions[i].loadOp = colorAttachmentInfos[i].loadOp;
hash.colorTargetDescriptions[i].storeOp = colorAttachmentInfos[i].storeOp;
2020-12-31 00:47:13 +00:00
}
hash.colorAttachmentSampleCount = VK_SAMPLE_COUNT_1_BIT;
if (colorAttachmentCount > 0)
2020-12-31 00:47:13 +00:00
{
hash.colorAttachmentSampleCount = ((VulkanTextureContainer*) colorAttachmentInfos[0].textureSlice.texture)->activeTextureHandle->vulkanTexture->sampleCount;
2020-12-31 00:47:13 +00:00
}
hash.colorAttachmentCount = colorAttachmentCount;
if (depthStencilAttachmentInfo == NULL)
{
hash.depthStencilTargetDescription.format = 0;
hash.depthStencilTargetDescription.loadOp = REFRESH_LOADOP_DONT_CARE;
hash.depthStencilTargetDescription.storeOp = REFRESH_STOREOP_DONT_CARE;
hash.depthStencilTargetDescription.stencilLoadOp = REFRESH_LOADOP_DONT_CARE;
hash.depthStencilTargetDescription.stencilStoreOp = REFRESH_STOREOP_DONT_CARE;
}
else
{
hash.depthStencilTargetDescription.format = ((VulkanTextureContainer*) depthStencilAttachmentInfo->textureSlice.texture)->activeTextureHandle->vulkanTexture->format;
hash.depthStencilTargetDescription.loadOp = depthStencilAttachmentInfo->loadOp;
hash.depthStencilTargetDescription.storeOp = depthStencilAttachmentInfo->storeOp;
hash.depthStencilTargetDescription.stencilLoadOp = depthStencilAttachmentInfo->stencilLoadOp;
hash.depthStencilTargetDescription.stencilStoreOp = depthStencilAttachmentInfo->stencilStoreOp;
}
renderPass = RenderPassHashArray_Fetch(
&renderer->renderPassHashArray,
&hash
);
2020-12-31 00:47:13 +00:00
if (renderPass != VK_NULL_HANDLE)
2020-12-31 00:47:13 +00:00
{
SDL_UnlockMutex(renderer->renderPassFetchLock);
return renderPass;
}
2020-12-31 00:47:13 +00:00
renderPass = VULKAN_INTERNAL_CreateRenderPass(
renderer,
commandBuffer,
colorAttachmentInfos,
colorAttachmentCount,
depthStencilAttachmentInfo
);
2020-12-31 00:47:13 +00:00
if (renderPass != VK_NULL_HANDLE)
{
RenderPassHashArray_Insert(
&renderer->renderPassHashArray,
hash,
renderPass
2020-12-31 00:47:13 +00:00
);
}
2020-12-31 00:47:13 +00:00
SDL_UnlockMutex(renderer->renderPassFetchLock);
return renderPass;
}
2020-12-31 00:47:13 +00:00
static VulkanFramebuffer* VULKAN_INTERNAL_FetchFramebuffer(
VulkanRenderer *renderer,
VkRenderPass renderPass,
Refresh_ColorAttachmentInfo *colorAttachmentInfos,
uint32_t colorAttachmentCount,
Refresh_DepthStencilAttachmentInfo *depthStencilAttachmentInfo,
uint32_t width,
uint32_t height
) {
VulkanFramebuffer *vulkanFramebuffer;
VkFramebufferCreateInfo framebufferInfo;
VkResult result;
VkImageView imageViewAttachments[2 * MAX_COLOR_TARGET_BINDINGS + 1];
FramebufferHash hash;
VulkanTextureSlice *textureSlice;
uint32_t attachmentCount = 0;
uint32_t i;
2020-12-31 00:47:13 +00:00
for (i = 0; i < MAX_COLOR_TARGET_BINDINGS; i += 1)
{
hash.colorAttachmentViews[i] = VK_NULL_HANDLE;
hash.colorMultiSampleAttachmentViews[i] = VK_NULL_HANDLE;
2020-12-31 00:47:13 +00:00
}
hash.colorAttachmentCount = colorAttachmentCount;
2020-12-31 00:47:13 +00:00
for (i = 0; i < colorAttachmentCount; i += 1)
2020-12-31 00:47:13 +00:00
{
textureSlice = VULKAN_INTERNAL_RefreshToVulkanTextureSlice(&colorAttachmentInfos[i].textureSlice);
2020-12-19 05:35:21 +00:00
hash.colorAttachmentViews[i] = textureSlice->view;
if (textureSlice->msaaTex != NULL)
{
hash.colorMultiSampleAttachmentViews[i] = textureSlice->msaaTex->view;
2022-01-13 07:09:06 +00:00
}
2020-12-19 05:35:21 +00:00
}
if (depthStencilAttachmentInfo == NULL)
2020-12-27 23:20:59 +00:00
{
hash.depthStencilAttachmentView = VK_NULL_HANDLE;
}
else
{
textureSlice = VULKAN_INTERNAL_RefreshToVulkanTextureSlice(&depthStencilAttachmentInfo->textureSlice);
hash.depthStencilAttachmentView = textureSlice->view;
2020-12-27 23:20:59 +00:00
}
hash.width = width;
hash.height = height;
2020-12-27 23:20:59 +00:00
SDL_LockMutex(renderer->framebufferFetchLock);
2020-12-17 01:23:49 +00:00
vulkanFramebuffer = FramebufferHashArray_Fetch(
&renderer->framebufferHashArray,
&hash
);
2021-01-02 21:31:17 +00:00
SDL_UnlockMutex(renderer->framebufferFetchLock);
2020-12-19 05:35:21 +00:00
if (vulkanFramebuffer != NULL)
2020-12-27 23:20:59 +00:00
{
return vulkanFramebuffer;
2020-12-27 23:20:59 +00:00
}
2020-12-19 05:35:21 +00:00
vulkanFramebuffer = SDL_malloc(sizeof(VulkanFramebuffer));
2020-12-27 23:38:58 +00:00
SDL_AtomicSet(&vulkanFramebuffer->referenceCount, 0);
/* Create a new framebuffer */
2020-12-19 05:35:21 +00:00
for (i = 0; i < colorAttachmentCount; i += 1)
{
textureSlice = VULKAN_INTERNAL_RefreshToVulkanTextureSlice(&colorAttachmentInfos[i].textureSlice);
2021-01-02 21:31:17 +00:00
imageViewAttachments[attachmentCount] =
textureSlice->view;
2020-12-27 23:20:59 +00:00
attachmentCount += 1;
if (textureSlice->msaaTex != NULL)
{
imageViewAttachments[attachmentCount] =
textureSlice->msaaTex->view;
attachmentCount += 1;
}
}
if (depthStencilAttachmentInfo != NULL)
2020-12-27 23:20:59 +00:00
{
textureSlice = VULKAN_INTERNAL_RefreshToVulkanTextureSlice(&depthStencilAttachmentInfo->textureSlice);
imageViewAttachments[attachmentCount] =
textureSlice->view;
attachmentCount += 1;
2020-12-27 23:20:59 +00:00
}
framebufferInfo.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO;
framebufferInfo.pNext = NULL;
framebufferInfo.flags = 0;
framebufferInfo.renderPass = renderPass;
framebufferInfo.attachmentCount = attachmentCount;
framebufferInfo.pAttachments = imageViewAttachments;
framebufferInfo.width = hash.width;
framebufferInfo.height = hash.height;
framebufferInfo.layers = 1;
2020-12-27 23:38:58 +00:00
result = renderer->vkCreateFramebuffer(
renderer->logicalDevice,
&framebufferInfo,
NULL,
&vulkanFramebuffer->framebuffer
);
if (result == VK_SUCCESS)
2020-12-27 23:20:59 +00:00
{
SDL_LockMutex(renderer->framebufferFetchLock);
FramebufferHashArray_Insert(
&renderer->framebufferHashArray,
hash,
vulkanFramebuffer
);
SDL_UnlockMutex(renderer->framebufferFetchLock);
}
else
{
LogVulkanResultAsError("vkCreateFramebuffer", result);
SDL_free(vulkanFramebuffer);
vulkanFramebuffer = NULL;
2020-12-27 23:20:59 +00:00
}
return vulkanFramebuffer;
2020-12-17 01:23:49 +00:00
}
static void VULKAN_INTERNAL_SetCurrentViewport(
VulkanCommandBuffer *commandBuffer,
Refresh_Viewport *viewport
2021-01-03 01:00:52 +00:00
) {
VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
2021-01-03 01:00:52 +00:00
vulkanCommandBuffer->currentViewport.x = viewport->x;
vulkanCommandBuffer->currentViewport.y = viewport->y;
vulkanCommandBuffer->currentViewport.width = viewport->w;
vulkanCommandBuffer->currentViewport.height = viewport->h;
vulkanCommandBuffer->currentViewport.minDepth = viewport->minDepth;
vulkanCommandBuffer->currentViewport.maxDepth = viewport->maxDepth;
2021-01-03 01:00:52 +00:00
}
static void VULKAN_SetViewport(
2021-01-05 23:00:51 +00:00
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer,
Refresh_Viewport *viewport
2020-12-29 07:41:59 +00:00
) {
VulkanRenderer* renderer = (VulkanRenderer*) driverData;
2021-01-03 01:00:52 +00:00
VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
2020-12-29 07:41:59 +00:00
VULKAN_INTERNAL_SetCurrentViewport(
vulkanCommandBuffer,
viewport
);
2020-12-29 07:41:59 +00:00
renderer->vkCmdSetViewport(
2021-01-03 01:00:52 +00:00
vulkanCommandBuffer->commandBuffer,
2020-12-29 07:41:59 +00:00
0,
1,
&vulkanCommandBuffer->currentViewport
2020-12-29 07:41:59 +00:00
);
}
2020-12-29 07:41:59 +00:00
static void VULKAN_INTERNAL_SetCurrentScissor(
VulkanCommandBuffer *vulkanCommandBuffer,
Refresh_Rect *scissor
) {
vulkanCommandBuffer->currentScissor.offset.x = scissor->x;
vulkanCommandBuffer->currentScissor.offset.y = scissor->y;
vulkanCommandBuffer->currentScissor.extent.width = scissor->w;
vulkanCommandBuffer->currentScissor.extent.height = scissor->h;
}
2020-12-29 07:41:59 +00:00
static void VULKAN_SetScissor(
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer,
Refresh_Rect *scissor
) {
VulkanRenderer* renderer = (VulkanRenderer*) driverData;
VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
2020-12-29 07:41:59 +00:00
VULKAN_INTERNAL_SetCurrentScissor(
vulkanCommandBuffer,
scissor
2021-01-02 06:07:15 +00:00
);
2020-12-29 07:41:59 +00:00
renderer->vkCmdSetScissor(
2021-01-03 01:00:52 +00:00
vulkanCommandBuffer->commandBuffer,
2020-12-29 07:41:59 +00:00
0,
1,
&vulkanCommandBuffer->currentScissor
2020-12-29 07:41:59 +00:00
);
}
static void VULKAN_BeginRenderPass(
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer,
Refresh_ColorAttachmentInfo *colorAttachmentInfos,
uint32_t colorAttachmentCount,
Refresh_DepthStencilAttachmentInfo *depthStencilAttachmentInfo
2020-12-17 01:23:49 +00:00
) {
VulkanRenderer* renderer = (VulkanRenderer*) driverData;
VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
VkRenderPass renderPass;
VulkanFramebuffer *framebuffer;
2020-12-28 23:11:05 +00:00
VulkanTextureContainer *textureContainer;
VulkanTextureSlice *textureSlice;
uint32_t w, h;
VkClearValue *clearValues;
uint32_t clearCount = colorAttachmentCount;
uint32_t multisampleAttachmentCount = 0;
uint32_t totalColorAttachmentCount = 0;
uint32_t i;
Refresh_Viewport defaultViewport;
Refresh_Rect defaultScissor;
uint32_t framebufferWidth = UINT32_MAX;
uint32_t framebufferHeight = UINT32_MAX;
for (i = 0; i < colorAttachmentCount; i += 1)
{
textureContainer = (VulkanTextureContainer*) colorAttachmentInfos[i].textureSlice.texture;
textureSlice = VULKAN_INTERNAL_RefreshToVulkanTextureSlice(&colorAttachmentInfos[i].textureSlice);
if (
colorAttachmentInfos[i].writeOption == REFRESH_WRITEOPTIONS_SAFEDISCARD &&
colorAttachmentInfos[i].loadOp != REFRESH_LOADOP_LOAD &&
textureContainer->canBeDiscarded &&
SDL_AtomicGet(&textureSlice->referenceCount) > 0
) {
VULKAN_INTERNAL_DiscardActiveTexture(
renderer,
textureContainer
);
textureSlice = VULKAN_INTERNAL_RefreshToVulkanTextureSlice(&colorAttachmentInfos[i].textureSlice);
}
w = textureSlice->parent->dimensions.width >> colorAttachmentInfos[i].textureSlice.mipLevel;
h = textureSlice->parent->dimensions.height >> colorAttachmentInfos[i].textureSlice.mipLevel;
/* The framebuffer cannot be larger than the smallest attachment. */
if (w < framebufferWidth)
{
framebufferWidth = w;
}
if (h < framebufferHeight)
{
framebufferHeight = h;
}
if (!textureSlice->parent->isRenderTarget)
{
Refresh_LogError("Color attachment texture was not designated as a target!");
return;
}
}
if (depthStencilAttachmentInfo != NULL)
{
textureContainer = (VulkanTextureContainer*) depthStencilAttachmentInfo->textureSlice.texture;
textureSlice = VULKAN_INTERNAL_RefreshToVulkanTextureSlice(&depthStencilAttachmentInfo->textureSlice);
if (
depthStencilAttachmentInfo->writeOption == REFRESH_WRITEOPTIONS_SAFEDISCARD &&
depthStencilAttachmentInfo->loadOp != REFRESH_LOADOP_LOAD &&
depthStencilAttachmentInfo->stencilLoadOp != REFRESH_LOADOP_LOAD &&
textureContainer->canBeDiscarded &&
SDL_AtomicGet(&textureSlice->referenceCount) > 0
) {
VULKAN_INTERNAL_DiscardActiveTexture(
renderer,
textureContainer
);
textureSlice = VULKAN_INTERNAL_RefreshToVulkanTextureSlice(&depthStencilAttachmentInfo->textureSlice);
}
w = textureSlice->parent->dimensions.width >> depthStencilAttachmentInfo->textureSlice.mipLevel;
h = textureSlice->parent->dimensions.height >> depthStencilAttachmentInfo->textureSlice.mipLevel;
/* The framebuffer cannot be larger than the smallest attachment. */
if (w < framebufferWidth)
{
framebufferWidth = w;
}
if (h < framebufferHeight)
{
framebufferHeight = h;
}
if (!textureSlice->parent->isRenderTarget)
{
Refresh_LogError("Depth stencil attachment texture was not designated as a target!");
return;
}
}
2020-12-17 01:23:49 +00:00
/* Fetch required render objects */
renderPass = VULKAN_INTERNAL_FetchRenderPass(
renderer,
vulkanCommandBuffer,
colorAttachmentInfos,
colorAttachmentCount,
depthStencilAttachmentInfo
);
framebuffer = VULKAN_INTERNAL_FetchFramebuffer(
renderer,
renderPass,
colorAttachmentInfos,
colorAttachmentCount,
depthStencilAttachmentInfo,
framebufferWidth,
framebufferHeight
);
VULKAN_INTERNAL_TrackFramebuffer(renderer, vulkanCommandBuffer, framebuffer);
/* Layout transitions */
for (i = 0; i < colorAttachmentCount; i += 1)
{
textureSlice = VULKAN_INTERNAL_RefreshToVulkanTextureSlice(&colorAttachmentInfos[i].textureSlice);
vulkanCommandBuffer->renderPassColorTargetTextureSlices[i] = textureSlice;
/* Transition the attachment */
VULKAN_INTERNAL_ImageMemoryBarrier(
renderer,
vulkanCommandBuffer->commandBuffer,
RESOURCE_ACCESS_COLOR_ATTACHMENT_READ_WRITE,
textureSlice
);
2020-12-29 00:42:51 +00:00
if (textureSlice->msaaTex != NULL)
{
/* Transition the multisample attachment */
VULKAN_INTERNAL_ImageMemoryBarrier(
renderer,
vulkanCommandBuffer->commandBuffer,
RESOURCE_ACCESS_COLOR_ATTACHMENT_WRITE,
&textureSlice->msaaTex->slices[0]
);
clearCount += 1;
multisampleAttachmentCount += 1;
}
2020-12-29 00:42:51 +00:00
VULKAN_INTERNAL_TrackTextureSlice(renderer, vulkanCommandBuffer, textureSlice);
/* TODO: do we need to track the msaa texture? or is it implicitly only used when the regular texture is used? */
}
vulkanCommandBuffer->renderPassColorTargetTextureSliceCount = colorAttachmentCount;
vulkanCommandBuffer->renderPassDepthTextureSlice = NULL;
if (depthStencilAttachmentInfo != NULL)
{
textureSlice = VULKAN_INTERNAL_RefreshToVulkanTextureSlice(&depthStencilAttachmentInfo->textureSlice);
2020-12-17 01:23:49 +00:00
vulkanCommandBuffer->renderPassDepthTextureSlice = textureSlice;
2020-12-31 07:02:12 +00:00
VULKAN_INTERNAL_ImageMemoryBarrier(
renderer,
vulkanCommandBuffer->commandBuffer,
RESOURCE_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_WRITE,
textureSlice
);
clearCount += 1;
2020-12-31 07:02:12 +00:00
VULKAN_INTERNAL_TrackTextureSlice(renderer, vulkanCommandBuffer, textureSlice);
}
/* Set clear values */
2020-12-29 22:52:24 +00:00
clearValues = SDL_stack_alloc(VkClearValue, clearCount);
2020-12-29 00:28:14 +00:00
totalColorAttachmentCount = colorAttachmentCount + multisampleAttachmentCount;
for (i = 0; i < totalColorAttachmentCount; i += 1)
{
clearValues[i].color.float32[0] = colorAttachmentInfos[i].clearColor.x;
clearValues[i].color.float32[1] = colorAttachmentInfos[i].clearColor.y;
clearValues[i].color.float32[2] = colorAttachmentInfos[i].clearColor.z;
clearValues[i].color.float32[3] = colorAttachmentInfos[i].clearColor.w;
2020-12-29 00:28:14 +00:00
textureSlice = VULKAN_INTERNAL_RefreshToVulkanTextureSlice(&colorAttachmentInfos[i].textureSlice);
if (textureSlice->parent->sampleCount > VK_SAMPLE_COUNT_1_BIT)
{
clearValues[i+1].color.float32[0] = colorAttachmentInfos[i].clearColor.x;
clearValues[i+1].color.float32[1] = colorAttachmentInfos[i].clearColor.y;
clearValues[i+1].color.float32[2] = colorAttachmentInfos[i].clearColor.z;
clearValues[i+1].color.float32[3] = colorAttachmentInfos[i].clearColor.w;
i += 1;
}
}
if (depthStencilAttachmentInfo != NULL)
{
clearValues[totalColorAttachmentCount].depthStencil.depth =
depthStencilAttachmentInfo->depthStencilClearValue.depth;
clearValues[totalColorAttachmentCount].depthStencil.stencil =
depthStencilAttachmentInfo->depthStencilClearValue.stencil;
}
2020-12-17 01:23:49 +00:00
VkRenderPassBeginInfo renderPassBeginInfo;
renderPassBeginInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO;
renderPassBeginInfo.pNext = NULL;
renderPassBeginInfo.renderPass = renderPass;
renderPassBeginInfo.framebuffer = framebuffer->framebuffer;
renderPassBeginInfo.pClearValues = clearValues;
renderPassBeginInfo.clearValueCount = clearCount;
renderPassBeginInfo.renderArea.extent.width = framebufferWidth;
renderPassBeginInfo.renderArea.extent.height = framebufferHeight;
renderPassBeginInfo.renderArea.offset.x = 0;
renderPassBeginInfo.renderArea.offset.y = 0;
renderer->vkCmdBeginRenderPass(
vulkanCommandBuffer->commandBuffer,
&renderPassBeginInfo,
VK_SUBPASS_CONTENTS_INLINE
);
SDL_stack_free(clearValues);
/* Set sensible default viewport state */
2022-02-25 05:29:52 +00:00
defaultViewport.x = 0;
defaultViewport.y = 0;
defaultViewport.w = framebufferWidth;
defaultViewport.h = framebufferHeight;
defaultViewport.minDepth = 0;
defaultViewport.maxDepth = 1;
VULKAN_INTERNAL_SetCurrentViewport(
vulkanCommandBuffer,
&defaultViewport
);
defaultScissor.x = 0;
defaultScissor.y = 0;
defaultScissor.w = framebufferWidth;
defaultScissor.h = framebufferHeight;
VULKAN_INTERNAL_SetCurrentScissor(
vulkanCommandBuffer,
&defaultScissor
);
}
static void VULKAN_BindGraphicsPipeline(
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer,
Refresh_GraphicsPipeline *graphicsPipeline
) {
VulkanRenderer* renderer = (VulkanRenderer*) driverData;
VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
VulkanGraphicsPipeline* pipeline = (VulkanGraphicsPipeline*) graphicsPipeline;
/* bind dummy sets if necessary */
if (pipeline->pipelineLayout->vertexSamplerDescriptorSetCache == NULL)
{
vulkanCommandBuffer->vertexSamplerDescriptorSet = renderer->emptyVertexSamplerDescriptorSet;
}
if (pipeline->pipelineLayout->fragmentSamplerDescriptorSetCache == NULL)
{
vulkanCommandBuffer->fragmentSamplerDescriptorSet = renderer->emptyFragmentSamplerDescriptorSet;
}
renderer->vkCmdBindPipeline(
vulkanCommandBuffer->commandBuffer,
VK_PIPELINE_BIND_POINT_GRAPHICS,
pipeline->pipeline
);
vulkanCommandBuffer->currentGraphicsPipeline = pipeline;
VULKAN_INTERNAL_TrackGraphicsPipeline(renderer, vulkanCommandBuffer, pipeline);
renderer->vkCmdSetViewport(
vulkanCommandBuffer->commandBuffer,
0,
1,
&vulkanCommandBuffer->currentViewport
);
renderer->vkCmdSetScissor(
vulkanCommandBuffer->commandBuffer,
0,
1,
&vulkanCommandBuffer->currentScissor
);
}
static void VULKAN_BindVertexBuffers(
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer,
uint32_t firstBinding,
uint32_t bindingCount,
Refresh_BufferBinding *pBindings
) {
VulkanRenderer* renderer = (VulkanRenderer*) driverData;
VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
VulkanBuffer *currentVulkanBuffer;
VkBuffer *buffers = SDL_stack_alloc(VkBuffer, bindingCount);
VkDeviceSize *offsets = SDL_stack_alloc(VkDeviceSize, bindingCount);
uint32_t i;
for (i = 0; i < bindingCount; i += 1)
{
currentVulkanBuffer = ((VulkanBufferContainer*) pBindings[i].gpuBuffer)->activeBufferHandle->vulkanBuffer;
buffers[i] = currentVulkanBuffer->buffer;
offsets[i] = (VkDeviceSize) pBindings[i].offset;
VULKAN_INTERNAL_TrackBuffer(renderer, vulkanCommandBuffer, currentVulkanBuffer);
}
renderer->vkCmdBindVertexBuffers(
vulkanCommandBuffer->commandBuffer,
firstBinding,
bindingCount,
buffers,
offsets
);
SDL_stack_free(buffers);
SDL_stack_free(offsets);
}
static void VULKAN_BindIndexBuffer(
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer,
Refresh_BufferBinding *pBinding,
Refresh_IndexElementSize indexElementSize
) {
VulkanRenderer* renderer = (VulkanRenderer*) driverData;
VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
VulkanBuffer* vulkanBuffer = ((VulkanBufferContainer*) pBinding->gpuBuffer)->activeBufferHandle->vulkanBuffer;
VULKAN_INTERNAL_TrackBuffer(renderer, vulkanCommandBuffer, vulkanBuffer);
renderer->vkCmdBindIndexBuffer(
vulkanCommandBuffer->commandBuffer,
vulkanBuffer->buffer,
(VkDeviceSize) pBinding->offset,
RefreshToVK_IndexType[indexElementSize]
);
}
static void VULKAN_BindVertexSamplers(
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer,
Refresh_TextureSamplerBinding *pBindings
) {
VulkanRenderer* renderer = (VulkanRenderer*) driverData;
VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
VulkanGraphicsPipeline *graphicsPipeline = vulkanCommandBuffer->currentGraphicsPipeline;
VulkanTexture *currentTexture;
VulkanSampler *currentSampler;
uint32_t i, samplerCount, sliceIndex;
VkDescriptorImageInfo descriptorImageInfos[MAX_TEXTURE_SAMPLERS];
if (graphicsPipeline->pipelineLayout->vertexSamplerDescriptorSetCache == NULL)
{
return;
}
samplerCount = graphicsPipeline->pipelineLayout->vertexSamplerDescriptorSetCache->bindingCount;
for (i = 0; i < samplerCount; i += 1)
{
currentTexture = ((VulkanTextureContainer*) pBindings[i].texture)->activeTextureHandle->vulkanTexture;
currentSampler = (VulkanSampler*) pBindings[i].sampler;
descriptorImageInfos[i].imageView = currentTexture->view;
descriptorImageInfos[i].sampler = currentSampler->sampler;
descriptorImageInfos[i].imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
VULKAN_INTERNAL_TrackSampler(renderer, vulkanCommandBuffer, currentSampler);
for (sliceIndex = 0; sliceIndex < currentTexture->sliceCount; sliceIndex += 1)
{
VULKAN_INTERNAL_TrackTextureSlice(renderer, vulkanCommandBuffer, &currentTexture->slices[sliceIndex]);
}
}
vulkanCommandBuffer->vertexSamplerDescriptorSet = VULKAN_INTERNAL_FetchDescriptorSet(
renderer,
vulkanCommandBuffer,
graphicsPipeline->pipelineLayout->vertexSamplerDescriptorSetCache,
descriptorImageInfos,
NULL
);
}
static void VULKAN_BindFragmentSamplers(
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer,
Refresh_TextureSamplerBinding *pBindings
) {
VulkanRenderer* renderer = (VulkanRenderer*) driverData;
VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
VulkanGraphicsPipeline *graphicsPipeline = vulkanCommandBuffer->currentGraphicsPipeline;
VulkanTexture *currentTexture;
VulkanSampler *currentSampler;
uint32_t i, samplerCount, sliceIndex;
VkDescriptorImageInfo descriptorImageInfos[MAX_TEXTURE_SAMPLERS];
if (graphicsPipeline->pipelineLayout->fragmentSamplerDescriptorSetCache == NULL)
{
return;
}
samplerCount = graphicsPipeline->pipelineLayout->fragmentSamplerDescriptorSetCache->bindingCount;
for (i = 0; i < samplerCount; i += 1)
{
currentTexture = ((VulkanTextureContainer*) pBindings[i].texture)->activeTextureHandle->vulkanTexture;
currentSampler = (VulkanSampler*) pBindings[i].sampler;
descriptorImageInfos[i].imageView = currentTexture->view;
descriptorImageInfos[i].sampler = currentSampler->sampler;
descriptorImageInfos[i].imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
VULKAN_INTERNAL_TrackSampler(renderer, vulkanCommandBuffer, currentSampler);
for (sliceIndex = 0; sliceIndex < currentTexture->sliceCount; sliceIndex += 1)
{
VULKAN_INTERNAL_TrackTextureSlice(renderer, vulkanCommandBuffer, &currentTexture->slices[sliceIndex]);
}
}
vulkanCommandBuffer->fragmentSamplerDescriptorSet = VULKAN_INTERNAL_FetchDescriptorSet(
renderer,
vulkanCommandBuffer,
graphicsPipeline->pipelineLayout->fragmentSamplerDescriptorSetCache,
descriptorImageInfos,
NULL
);
}
static void VULKAN_EndRenderPass(
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer
) {
VulkanRenderer* renderer = (VulkanRenderer*) driverData;
VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
VulkanTextureSlice *currentTextureSlice;
uint32_t i;
renderer->vkCmdEndRenderPass(
vulkanCommandBuffer->commandBuffer
);
/* If the render targets can be sampled, transition them to sample layout */
for (i = 0; i < vulkanCommandBuffer->renderPassColorTargetTextureSliceCount; i += 1)
{
currentTextureSlice = vulkanCommandBuffer->renderPassColorTargetTextureSlices[i];
if (currentTextureSlice->parent->usageFlags & VK_IMAGE_USAGE_SAMPLED_BIT)
{
VULKAN_INTERNAL_ImageMemoryBarrier(
renderer,
vulkanCommandBuffer->commandBuffer,
RESOURCE_ACCESS_ANY_SHADER_READ_SAMPLED_IMAGE,
currentTextureSlice
);
}
else if (currentTextureSlice->parent->usageFlags & VK_IMAGE_USAGE_STORAGE_BIT)
{
VULKAN_INTERNAL_ImageMemoryBarrier(
renderer,
vulkanCommandBuffer->commandBuffer,
RESOURCE_ACCESS_COMPUTE_SHADER_STORAGE_IMAGE_READ_WRITE,
currentTextureSlice
);
}
}
vulkanCommandBuffer->renderPassColorTargetTextureSliceCount = 0;
if (vulkanCommandBuffer->renderPassDepthTextureSlice != NULL)
{
currentTextureSlice = vulkanCommandBuffer->renderPassDepthTextureSlice;
if (currentTextureSlice->parent->usageFlags & VK_IMAGE_USAGE_SAMPLED_BIT)
{
VULKAN_INTERNAL_ImageMemoryBarrier(
renderer,
vulkanCommandBuffer->commandBuffer,
RESOURCE_ACCESS_ANY_SHADER_READ_SAMPLED_IMAGE,
currentTextureSlice
);
}
}
vulkanCommandBuffer->renderPassDepthTextureSlice = NULL;
vulkanCommandBuffer->currentGraphicsPipeline = NULL;
}
static void VULKAN_BeginComputePass(
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer
) {
VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
vulkanCommandBuffer->boundComputeBufferCount = 0;
vulkanCommandBuffer->boundComputeTextureSliceCount = 0;
}
static void VULKAN_BindComputePipeline(
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer,
Refresh_ComputePipeline *computePipeline
) {
VulkanRenderer* renderer = (VulkanRenderer*) driverData;
VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
VulkanComputePipeline *vulkanComputePipeline = (VulkanComputePipeline*) computePipeline;
/* bind dummy sets if necessary */
if (vulkanComputePipeline->pipelineLayout->bufferDescriptorSetCache == NULL)
{
vulkanCommandBuffer->bufferDescriptorSet = renderer->emptyComputeBufferDescriptorSet;
}
if (vulkanComputePipeline->pipelineLayout->imageDescriptorSetCache == NULL)
{
vulkanCommandBuffer->imageDescriptorSet = renderer->emptyComputeImageDescriptorSet;
}
renderer->vkCmdBindPipeline(
vulkanCommandBuffer->commandBuffer,
VK_PIPELINE_BIND_POINT_COMPUTE,
vulkanComputePipeline->pipeline
);
vulkanCommandBuffer->currentComputePipeline = vulkanComputePipeline;
VULKAN_INTERNAL_TrackComputePipeline(renderer, vulkanCommandBuffer, vulkanComputePipeline);
}
static void VULKAN_BindComputeBuffers(
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer,
Refresh_ComputeBufferBinding *pBindings
) {
VulkanRenderer* renderer = (VulkanRenderer*) driverData;
VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
VulkanComputePipeline *computePipeline = vulkanCommandBuffer->currentComputePipeline;
VulkanBufferContainer *bufferContainer;
VulkanBuffer *currentVulkanBuffer;
VkDescriptorBufferInfo descriptorBufferInfos[MAX_BUFFER_BINDINGS];
uint32_t i;
if (computePipeline->pipelineLayout->bufferDescriptorSetCache == NULL)
{
return;
}
for (i = 0; i < computePipeline->pipelineLayout->bufferDescriptorSetCache->bindingCount; i += 1)
{
bufferContainer = (VulkanBufferContainer*) pBindings[i].gpuBuffer;
currentVulkanBuffer = bufferContainer->activeBufferHandle->vulkanBuffer;
if (
pBindings[i].writeOption == REFRESH_WRITEOPTIONS_SAFEDISCARD &&
SDL_AtomicGet(&bufferContainer->activeBufferHandle->vulkanBuffer->referenceCount) > 0
) {
VULKAN_INTERNAL_DiscardActiveBuffer(
renderer,
bufferContainer
);
currentVulkanBuffer = bufferContainer->activeBufferHandle->vulkanBuffer;
}
VULKAN_INTERNAL_BufferMemoryBarrier(
renderer,
vulkanCommandBuffer->commandBuffer,
RESOURCE_ACCESS_COMPUTE_SHADER_BUFFER_READ_WRITE,
currentVulkanBuffer
);
descriptorBufferInfos[i].buffer = currentVulkanBuffer->buffer;
descriptorBufferInfos[i].offset = 0;
descriptorBufferInfos[i].range = currentVulkanBuffer->size;
VULKAN_INTERNAL_TrackBuffer(renderer, vulkanCommandBuffer, currentVulkanBuffer);
VULKAN_INTERNAL_TrackComputeBuffer(renderer, vulkanCommandBuffer, currentVulkanBuffer);
}
2022-03-04 20:30:33 +00:00
vulkanCommandBuffer->bufferDescriptorSet =
VULKAN_INTERNAL_FetchDescriptorSet(
renderer,
vulkanCommandBuffer,
computePipeline->pipelineLayout->bufferDescriptorSetCache,
NULL,
descriptorBufferInfos
);
2022-03-04 20:30:33 +00:00
}
static void VULKAN_BindComputeTextures(
2022-03-04 20:30:33 +00:00
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer,
Refresh_ComputeTextureBinding *pBindings
2022-03-04 20:30:33 +00:00
) {
VulkanRenderer* renderer = (VulkanRenderer*) driverData;
VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
VulkanComputePipeline *computePipeline = vulkanCommandBuffer->currentComputePipeline;
VulkanTextureContainer *currentTextureContainer;
VulkanTextureSlice *currentTextureSlice;
VkDescriptorImageInfo descriptorImageInfos[MAX_TEXTURE_SAMPLERS];
uint32_t i;
2022-03-04 20:30:33 +00:00
if (computePipeline->pipelineLayout->imageDescriptorSetCache == NULL)
{
return;
}
2022-03-04 20:30:33 +00:00
for (i = 0; i < computePipeline->pipelineLayout->imageDescriptorSetCache->bindingCount; i += 1)
{
currentTextureContainer = (VulkanTextureContainer*) pBindings[i].textureSlice.texture;
currentTextureSlice = VULKAN_INTERNAL_RefreshToVulkanTextureSlice(&pBindings[i].textureSlice);
if (
pBindings[i].writeOption == REFRESH_WRITEOPTIONS_SAFEDISCARD &&
SDL_AtomicGet(&currentTextureSlice->referenceCount) > 0
) {
VULKAN_INTERNAL_DiscardActiveTexture(
renderer,
currentTextureContainer
);
currentTextureSlice = VULKAN_INTERNAL_RefreshToVulkanTextureSlice(&pBindings[i].textureSlice);
}
VULKAN_INTERNAL_ImageMemoryBarrier(
renderer,
vulkanCommandBuffer->commandBuffer,
RESOURCE_ACCESS_COMPUTE_SHADER_STORAGE_IMAGE_READ_WRITE,
currentTextureSlice
);
descriptorImageInfos[i].imageView = currentTextureSlice->view;
descriptorImageInfos[i].sampler = VK_NULL_HANDLE;
descriptorImageInfos[i].imageLayout = VK_IMAGE_LAYOUT_GENERAL;
VULKAN_INTERNAL_TrackTextureSlice(renderer, vulkanCommandBuffer, currentTextureSlice);
VULKAN_INTERNAL_TrackComputeTextureSlice(renderer, vulkanCommandBuffer, currentTextureSlice);
}
vulkanCommandBuffer->imageDescriptorSet =
VULKAN_INTERNAL_FetchDescriptorSet(
renderer,
vulkanCommandBuffer,
computePipeline->pipelineLayout->imageDescriptorSetCache,
descriptorImageInfos,
NULL
);
2022-03-04 20:30:33 +00:00
}
static void VULKAN_DispatchCompute(
2022-03-04 20:30:33 +00:00
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer,
uint32_t groupCountX,
uint32_t groupCountY,
uint32_t groupCountZ
2022-03-04 20:30:33 +00:00
) {
VulkanRenderer* renderer = (VulkanRenderer*) driverData;
VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
VulkanComputePipeline *computePipeline = vulkanCommandBuffer->currentComputePipeline;
VkDescriptorSet descriptorSets[3];
2022-03-04 20:30:33 +00:00
descriptorSets[0] = vulkanCommandBuffer->bufferDescriptorSet;
descriptorSets[1] = vulkanCommandBuffer->imageDescriptorSet;
descriptorSets[2] = renderer->computeUniformBufferObject->descriptorSet;
2022-03-04 20:30:33 +00:00
renderer->vkCmdBindDescriptorSets(
2022-03-04 20:30:33 +00:00
vulkanCommandBuffer->commandBuffer,
VK_PIPELINE_BIND_POINT_COMPUTE,
computePipeline->pipelineLayout->pipelineLayout,
2022-03-04 20:30:33 +00:00
0,
3,
descriptorSets,
2022-03-04 20:30:33 +00:00
1,
&vulkanCommandBuffer->computeUniformOffset
);
renderer->vkCmdDispatch(
vulkanCommandBuffer->commandBuffer,
groupCountX,
groupCountY,
groupCountZ
2022-03-04 20:30:33 +00:00
);
}
static void VULKAN_EndComputePass(
2021-01-05 23:00:51 +00:00
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer
2020-12-17 01:23:49 +00:00
) {
VulkanRenderer *renderer = (VulkanRenderer*) driverData;
2021-01-02 06:07:15 +00:00
VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
VulkanBuffer *currentComputeBuffer;
VulkanTextureSlice *currentComputeTextureSlice;
VulkanResourceAccessType resourceAccessType = RESOURCE_ACCESS_NONE;
2020-12-20 08:05:12 +00:00
uint32_t i;
2022-03-04 20:30:33 +00:00
/* Re-transition buffers */
for (i = 0; i < vulkanCommandBuffer->boundComputeBufferCount; i += 1)
2022-03-04 20:30:33 +00:00
{
currentComputeBuffer = vulkanCommandBuffer->boundComputeBuffers[i];
2022-03-04 20:30:33 +00:00
if (currentComputeBuffer->usage & VK_BUFFER_USAGE_VERTEX_BUFFER_BIT)
2022-03-04 20:30:33 +00:00
{
resourceAccessType = RESOURCE_ACCESS_VERTEX_BUFFER;
2022-03-04 20:30:33 +00:00
}
else if (currentComputeBuffer->usage & VK_BUFFER_USAGE_INDEX_BUFFER_BIT)
2022-03-04 20:30:33 +00:00
{
resourceAccessType = RESOURCE_ACCESS_INDEX_BUFFER;
2022-03-04 20:30:33 +00:00
}
else if (currentComputeBuffer->usage & VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT)
2022-03-04 20:30:33 +00:00
{
resourceAccessType = RESOURCE_ACCESS_INDIRECT_BUFFER;
2022-03-04 20:30:33 +00:00
}
if (resourceAccessType != RESOURCE_ACCESS_NONE)
{
VULKAN_INTERNAL_BufferMemoryBarrier(
renderer,
vulkanCommandBuffer->commandBuffer,
resourceAccessType,
currentComputeBuffer
);
}
2020-12-23 07:17:09 +00:00
}
2020-12-20 08:05:12 +00:00
/* Re-transition sampler images */
for (i = 0; i < vulkanCommandBuffer->boundComputeTextureSliceCount; i += 1)
2020-12-20 08:05:12 +00:00
{
currentComputeTextureSlice = vulkanCommandBuffer->boundComputeTextureSlices[i];
2020-12-23 07:17:09 +00:00
if (currentComputeTextureSlice->parent->usageFlags & VK_IMAGE_USAGE_SAMPLED_BIT)
{
VULKAN_INTERNAL_ImageMemoryBarrier(
renderer,
vulkanCommandBuffer->commandBuffer,
RESOURCE_ACCESS_ANY_SHADER_READ_SAMPLED_IMAGE,
currentComputeTextureSlice
);
}
2020-12-20 08:05:12 +00:00
}
vulkanCommandBuffer->currentComputePipeline = NULL;
}
2020-12-23 07:17:09 +00:00
static void VULKAN_SetTransferData(
Refresh_Renderer *driverData,
void* data,
Refresh_TransferBuffer *transferBuffer,
Refresh_BufferCopy *copyParams,
Refresh_TransferOptions transferOption
) {
VulkanRenderer *renderer = (VulkanRenderer*) driverData;
VulkanBufferContainer *transferBufferContainer = (VulkanBufferContainer*) transferBuffer;
if (
transferOption == REFRESH_TRANSFEROPTIONS_SAFEDISCARD &&
SDL_AtomicGet(&transferBufferContainer->activeBufferHandle->vulkanBuffer->referenceCount) > 0
) {
VULKAN_INTERNAL_DiscardActiveBuffer(
renderer,
transferBufferContainer
);
2020-12-20 08:05:12 +00:00
}
uint8_t *bufferPointer =
transferBufferContainer->activeBufferHandle->vulkanBuffer->usedRegion->allocation->mapPointer +
transferBufferContainer->activeBufferHandle->vulkanBuffer->usedRegion->resourceOffset +
copyParams->dstOffset;
SDL_memcpy(
bufferPointer,
((uint8_t*) data) + copyParams->srcOffset,
copyParams->size
2021-01-02 06:07:15 +00:00
);
}
2020-12-20 08:05:12 +00:00
static void VULKAN_GetTransferData(
Refresh_Renderer *driverData,
Refresh_TransferBuffer *transferBuffer,
void* data,
Refresh_BufferCopy *copyParams
) {
VulkanRenderer *renderer = (VulkanRenderer*) driverData;
VulkanBufferContainer *transferBufferContainer = (VulkanBufferContainer*) transferBuffer;
VulkanBuffer *vulkanBuffer = transferBufferContainer->activeBufferHandle->vulkanBuffer;
uint8_t *bufferPointer =
vulkanBuffer->usedRegion->allocation->mapPointer +
vulkanBuffer->usedRegion->resourceOffset +
copyParams->srcOffset;
2022-03-04 20:30:33 +00:00
SDL_memcpy(
((uint8_t*) data) + copyParams->dstOffset,
bufferPointer,
copyParams->size
);
}
2022-03-04 20:30:33 +00:00
static void VULKAN_BeginCopyPass(
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer
) {
VulkanRenderer *renderer = (VulkanRenderer*) driverData;
VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
2022-03-04 20:30:33 +00:00
vulkanCommandBuffer->copiedGpuBufferCount = 0;
vulkanCommandBuffer->copiedTextureSliceCount = 0;
}
static void VULKAN_UploadToTexture(
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer,
Refresh_TransferBuffer *transferBuffer,
Refresh_TextureRegion *textureRegion,
Refresh_BufferImageCopy *copyParams,
Refresh_WriteOptions writeOption
) {
VulkanRenderer *renderer = (VulkanRenderer*) driverData;
VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
VulkanBufferContainer *transferBufferContainer = (VulkanBufferContainer*) transferBuffer;
VulkanTextureContainer *vulkanTextureContainer = (VulkanTextureContainer*) textureRegion->textureSlice.texture;
VulkanTextureSlice *vulkanTextureSlice;
VkBufferImageCopy imageCopy;
vulkanTextureSlice = VULKAN_INTERNAL_RefreshToVulkanTextureSlice(&textureRegion->textureSlice);
if (
writeOption == REFRESH_WRITEOPTIONS_SAFEDISCARD &&
vulkanTextureContainer->canBeDiscarded &&
SDL_AtomicGet(&vulkanTextureSlice->referenceCount) > 0
) {
VULKAN_INTERNAL_DiscardActiveTexture(
renderer,
vulkanTextureContainer
);
vulkanTextureSlice = VULKAN_INTERNAL_RefreshToVulkanTextureSlice(&textureRegion->textureSlice);
}
VULKAN_INTERNAL_BufferMemoryBarrier(
renderer,
vulkanCommandBuffer->commandBuffer,
RESOURCE_ACCESS_TRANSFER_READ,
transferBufferContainer->activeBufferHandle->vulkanBuffer
2022-03-04 20:30:33 +00:00
);
VULKAN_INTERNAL_ImageMemoryBarrier(
renderer,
vulkanCommandBuffer->commandBuffer,
RESOURCE_ACCESS_TRANSFER_WRITE,
vulkanTextureSlice
);
imageCopy.imageExtent.width = textureRegion->w;
imageCopy.imageExtent.height = textureRegion->h;
imageCopy.imageExtent.depth = textureRegion->d;
imageCopy.imageOffset.x = textureRegion->x;
imageCopy.imageOffset.y = textureRegion->y;
imageCopy.imageOffset.z = textureRegion->z;
imageCopy.imageSubresource.aspectMask = vulkanTextureSlice->parent->aspectFlags;
imageCopy.imageSubresource.baseArrayLayer = textureRegion->textureSlice.layer;
imageCopy.imageSubresource.layerCount = 1;
imageCopy.imageSubresource.mipLevel = textureRegion->textureSlice.mipLevel;
imageCopy.bufferOffset = copyParams->bufferOffset;
imageCopy.bufferRowLength = copyParams->bufferStride;
imageCopy.bufferImageHeight = copyParams->bufferImageHeight;
renderer->vkCmdCopyBufferToImage(
vulkanCommandBuffer->commandBuffer,
transferBufferContainer->activeBufferHandle->vulkanBuffer->buffer,
vulkanTextureSlice->parent->image,
AccessMap[vulkanTextureSlice->resourceAccessType].imageLayout,
1,
&imageCopy
2022-03-04 20:30:33 +00:00
);
VULKAN_INTERNAL_TrackBuffer(renderer, vulkanCommandBuffer, transferBufferContainer->activeBufferHandle->vulkanBuffer);
VULKAN_INTERNAL_TrackTextureSlice(renderer, vulkanCommandBuffer, vulkanTextureSlice);
VULKAN_INTERNAL_TrackCopiedTextureSlice(renderer, vulkanCommandBuffer, vulkanTextureSlice);
2020-12-17 01:23:49 +00:00
}
static void VULKAN_UploadToBuffer(
2021-01-05 23:00:51 +00:00
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer,
Refresh_TransferBuffer *transferBuffer,
Refresh_GpuBuffer *gpuBuffer,
Refresh_BufferCopy *copyParams,
Refresh_WriteOptions writeOption
2020-12-17 01:23:49 +00:00
) {
VulkanRenderer *renderer = (VulkanRenderer*) driverData;
2021-01-02 06:07:15 +00:00
VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
VulkanBufferContainer *transferBufferContainer = (VulkanBufferContainer*) transferBuffer;
VulkanBufferContainer *gpuBufferContainer = (VulkanBufferContainer*) gpuBuffer;
VkBufferCopy bufferCopy;
2020-12-20 08:05:53 +00:00
if (
writeOption == REFRESH_WRITEOPTIONS_SAFEDISCARD &&
SDL_AtomicGet(&gpuBufferContainer->activeBufferHandle->vulkanBuffer->referenceCount) > 0
) {
VULKAN_INTERNAL_DiscardActiveBuffer(
renderer,
gpuBufferContainer
);
}
VULKAN_INTERNAL_BufferMemoryBarrier(
renderer,
vulkanCommandBuffer->commandBuffer,
RESOURCE_ACCESS_TRANSFER_READ,
transferBufferContainer->activeBufferHandle->vulkanBuffer
2021-01-02 06:07:15 +00:00
);
2020-12-23 07:17:09 +00:00
VULKAN_INTERNAL_BufferMemoryBarrier(
renderer,
vulkanCommandBuffer->commandBuffer,
RESOURCE_ACCESS_TRANSFER_WRITE,
gpuBufferContainer->activeBufferHandle->vulkanBuffer
);
bufferCopy.srcOffset = copyParams->srcOffset;
bufferCopy.dstOffset = copyParams->dstOffset;
bufferCopy.size = copyParams->size;
renderer->vkCmdCopyBuffer(
vulkanCommandBuffer->commandBuffer,
transferBufferContainer->activeBufferHandle->vulkanBuffer->buffer,
gpuBufferContainer->activeBufferHandle->vulkanBuffer->buffer,
1,
&bufferCopy
);
VULKAN_INTERNAL_TrackBuffer(renderer, vulkanCommandBuffer, transferBufferContainer->activeBufferHandle->vulkanBuffer);
VULKAN_INTERNAL_TrackBuffer(renderer, vulkanCommandBuffer, gpuBufferContainer->activeBufferHandle->vulkanBuffer);
VULKAN_INTERNAL_TrackCopiedBuffer(renderer, vulkanCommandBuffer, gpuBufferContainer->activeBufferHandle->vulkanBuffer);
}
static void VULKAN_DownloadFromTexture(
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer,
Refresh_TextureRegion *textureRegion,
Refresh_TransferBuffer *transferBuffer,
Refresh_BufferImageCopy *copyParams,
Refresh_TransferOptions transferOption
) {
VulkanRenderer *renderer = (VulkanRenderer*) driverData;
VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
VulkanTextureSlice *vulkanTextureSlice;
VulkanBufferContainer *transferBufferContainer = (VulkanBufferContainer*) transferBuffer;
VkBufferImageCopy imageCopy;
vulkanTextureSlice = VULKAN_INTERNAL_RefreshToVulkanTextureSlice(&textureRegion->textureSlice);
if (
transferOption == REFRESH_TRANSFEROPTIONS_SAFEDISCARD &&
SDL_AtomicGet(&transferBufferContainer->activeBufferHandle->vulkanBuffer->referenceCount) > 0
) {
VULKAN_INTERNAL_DiscardActiveBuffer(
renderer,
transferBufferContainer
);
vulkanTextureSlice = VULKAN_INTERNAL_RefreshToVulkanTextureSlice(&textureRegion->textureSlice);
}
VULKAN_INTERNAL_BufferMemoryBarrier(
renderer,
vulkanCommandBuffer->commandBuffer,
RESOURCE_ACCESS_TRANSFER_WRITE,
transferBufferContainer->activeBufferHandle->vulkanBuffer
);
VULKAN_INTERNAL_ImageMemoryBarrier(
renderer,
vulkanCommandBuffer->commandBuffer,
RESOURCE_ACCESS_TRANSFER_READ,
vulkanTextureSlice
);
imageCopy.imageExtent.width = textureRegion->w;
imageCopy.imageExtent.height = textureRegion->h;
imageCopy.imageExtent.depth = textureRegion->d;
imageCopy.imageOffset.x = textureRegion->x;
imageCopy.imageOffset.y = textureRegion->y;
imageCopy.imageOffset.z = textureRegion->z;
imageCopy.imageSubresource.aspectMask = vulkanTextureSlice->parent->aspectFlags;
imageCopy.imageSubresource.baseArrayLayer = textureRegion->textureSlice.layer;
imageCopy.imageSubresource.layerCount = 1;
imageCopy.imageSubresource.mipLevel = textureRegion->textureSlice.mipLevel;
imageCopy.bufferOffset = copyParams->bufferOffset;
imageCopy.bufferRowLength = copyParams->bufferStride;
imageCopy.bufferImageHeight = copyParams->bufferImageHeight;
renderer->vkCmdCopyImageToBuffer(
vulkanCommandBuffer->commandBuffer,
vulkanTextureSlice->parent->image,
AccessMap[vulkanTextureSlice->resourceAccessType].imageLayout,
transferBufferContainer->activeBufferHandle->vulkanBuffer->buffer,
1,
&imageCopy
);
VULKAN_INTERNAL_TrackBuffer(renderer, vulkanCommandBuffer, transferBufferContainer->activeBufferHandle->vulkanBuffer);
VULKAN_INTERNAL_TrackTextureSlice(renderer, vulkanCommandBuffer, vulkanTextureSlice);
VULKAN_INTERNAL_TrackCopiedTextureSlice(renderer, vulkanCommandBuffer, vulkanTextureSlice);
2020-12-17 01:23:49 +00:00
}
static void VULKAN_DownloadFromBuffer(
2021-01-05 23:00:51 +00:00
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer,
Refresh_GpuBuffer *gpuBuffer,
Refresh_TransferBuffer *transferBuffer,
Refresh_BufferCopy *copyParams,
Refresh_TransferOptions transferOption
2020-12-17 01:23:49 +00:00
) {
VulkanRenderer *renderer = (VulkanRenderer*) driverData;
2021-01-02 06:07:15 +00:00
VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
VulkanBufferContainer *gpuBufferContainer = (VulkanBufferContainer*) gpuBuffer;
VulkanBufferContainer *transferBufferContainer = (VulkanBufferContainer*) transferBuffer;
VkBufferCopy bufferCopy;
if (
transferOption == REFRESH_TRANSFEROPTIONS_SAFEDISCARD &&
SDL_AtomicGet(&transferBufferContainer->activeBufferHandle->vulkanBuffer->referenceCount) > 0
) {
VULKAN_INTERNAL_DiscardActiveBuffer(
renderer,
transferBufferContainer
);
}
VULKAN_INTERNAL_BufferMemoryBarrier(
renderer,
vulkanCommandBuffer->commandBuffer,
RESOURCE_ACCESS_TRANSFER_WRITE,
transferBufferContainer->activeBufferHandle->vulkanBuffer
);
VULKAN_INTERNAL_BufferMemoryBarrier(
renderer,
vulkanCommandBuffer->commandBuffer,
RESOURCE_ACCESS_TRANSFER_READ,
gpuBufferContainer->activeBufferHandle->vulkanBuffer
);
bufferCopy.srcOffset = copyParams->srcOffset;
bufferCopy.dstOffset = copyParams->dstOffset;
bufferCopy.size = copyParams->size;
renderer->vkCmdCopyBuffer(
2021-01-02 06:07:15 +00:00
vulkanCommandBuffer->commandBuffer,
gpuBufferContainer->activeBufferHandle->vulkanBuffer->buffer,
transferBufferContainer->activeBufferHandle->vulkanBuffer->buffer,
1,
&bufferCopy
2021-01-02 06:07:15 +00:00
);
2020-12-23 06:56:26 +00:00
VULKAN_INTERNAL_TrackBuffer(renderer, vulkanCommandBuffer, transferBufferContainer->activeBufferHandle->vulkanBuffer);
VULKAN_INTERNAL_TrackBuffer(renderer, vulkanCommandBuffer, gpuBufferContainer->activeBufferHandle->vulkanBuffer);
VULKAN_INTERNAL_TrackCopiedBuffer(renderer, vulkanCommandBuffer, gpuBufferContainer->activeBufferHandle->vulkanBuffer);
}
2022-03-04 20:30:33 +00:00
static void VULKAN_CopyTextureToTexture(
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer,
Refresh_TextureRegion *source,
Refresh_TextureRegion *destination,
Refresh_WriteOptions writeOption
) {
VulkanRenderer *renderer = (VulkanRenderer*) driverData;
VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
VulkanTextureContainer *dstContainer = (VulkanTextureContainer*) destination->textureSlice.texture;
VulkanTextureSlice *srcSlice;
VulkanTextureSlice *dstSlice;
VkImageCopy imageCopy;
srcSlice = VULKAN_INTERNAL_RefreshToVulkanTextureSlice(&source->textureSlice);
dstSlice = VULKAN_INTERNAL_RefreshToVulkanTextureSlice(&destination->textureSlice);
if (
writeOption == REFRESH_WRITEOPTIONS_SAFEDISCARD &&
dstContainer->canBeDiscarded &&
SDL_AtomicGet(&dstSlice->referenceCount) > 0
) {
VULKAN_INTERNAL_DiscardActiveTexture(
renderer,
dstContainer
);
dstSlice = VULKAN_INTERNAL_RefreshToVulkanTextureSlice(&destination->textureSlice);
}
VULKAN_INTERNAL_ImageMemoryBarrier(
renderer,
2022-03-04 20:30:33 +00:00
vulkanCommandBuffer->commandBuffer,
RESOURCE_ACCESS_TRANSFER_READ,
srcSlice
2022-03-04 20:30:33 +00:00
);
VULKAN_INTERNAL_ImageMemoryBarrier(
renderer,
2022-03-04 20:30:33 +00:00
vulkanCommandBuffer->commandBuffer,
RESOURCE_ACCESS_TRANSFER_WRITE,
dstSlice
);
imageCopy.srcOffset.x = source->x;
imageCopy.srcOffset.y = source->y;
imageCopy.srcOffset.z = source->z;
imageCopy.srcSubresource.aspectMask = srcSlice->parent->aspectFlags;
imageCopy.srcSubresource.baseArrayLayer = source->textureSlice.layer;
imageCopy.srcSubresource.layerCount = 1;
imageCopy.srcSubresource.mipLevel = source->textureSlice.mipLevel;
imageCopy.dstOffset.x = destination->x;
imageCopy.dstOffset.y = destination->y;
imageCopy.dstOffset.z = destination->z;
imageCopy.dstSubresource.aspectMask = dstSlice->parent->aspectFlags;
imageCopy.dstSubresource.baseArrayLayer = destination->textureSlice.layer;
imageCopy.dstSubresource.layerCount = 1;
imageCopy.dstSubresource.mipLevel = destination->textureSlice.mipLevel;
imageCopy.extent.width = source->w;
imageCopy.extent.height = source->h;
imageCopy.extent.depth = source->d;
renderer->vkCmdCopyImage(
vulkanCommandBuffer->commandBuffer,
srcSlice->parent->image,
AccessMap[srcSlice->resourceAccessType].imageLayout,
dstSlice->parent->image,
AccessMap[dstSlice->resourceAccessType].imageLayout,
2022-03-04 20:30:33 +00:00
1,
&imageCopy
2022-03-04 20:30:33 +00:00
);
VULKAN_INTERNAL_TrackTextureSlice(renderer, vulkanCommandBuffer, srcSlice);
VULKAN_INTERNAL_TrackTextureSlice(renderer, vulkanCommandBuffer, dstSlice);
VULKAN_INTERNAL_TrackCopiedTextureSlice(renderer, vulkanCommandBuffer, srcSlice);
VULKAN_INTERNAL_TrackCopiedTextureSlice(renderer, vulkanCommandBuffer, dstSlice);
2020-12-17 01:23:49 +00:00
}
static void VULKAN_CopyTextureToBuffer(
2021-01-05 23:00:51 +00:00
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer,
Refresh_TextureRegion *textureRegion,
Refresh_GpuBuffer *gpuBuffer,
Refresh_BufferImageCopy *copyParams,
Refresh_WriteOptions writeOption
2020-12-20 07:31:55 +00:00
) {
VulkanRenderer *renderer = (VulkanRenderer*) driverData;
2021-01-02 06:07:15 +00:00
VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
VulkanTextureSlice *vulkanTextureSlice;
VulkanBufferContainer *bufferContainer = (VulkanBufferContainer*) gpuBuffer;
VkBufferImageCopy imageCopy;
2020-12-20 07:31:55 +00:00
vulkanTextureSlice = VULKAN_INTERNAL_RefreshToVulkanTextureSlice(&textureRegion->textureSlice);
if (
writeOption == REFRESH_WRITEOPTIONS_SAFEDISCARD &&
SDL_AtomicGet(&bufferContainer->activeBufferHandle->vulkanBuffer->referenceCount) > 0
) {
VULKAN_INTERNAL_DiscardActiveBuffer(
renderer,
bufferContainer
);
}
VULKAN_INTERNAL_ImageMemoryBarrier(
renderer,
vulkanCommandBuffer->commandBuffer,
RESOURCE_ACCESS_TRANSFER_READ,
vulkanTextureSlice
);
2020-12-20 07:31:55 +00:00
VULKAN_INTERNAL_BufferMemoryBarrier(
renderer,
2021-01-02 06:07:15 +00:00
vulkanCommandBuffer->commandBuffer,
RESOURCE_ACCESS_TRANSFER_WRITE,
bufferContainer->activeBufferHandle->vulkanBuffer
);
imageCopy.imageExtent.width = textureRegion->w;
imageCopy.imageExtent.height = textureRegion->h;
imageCopy.imageExtent.depth = textureRegion->d;
imageCopy.imageOffset.x = textureRegion->x;
imageCopy.imageOffset.y = textureRegion->y;
imageCopy.imageOffset.z = textureRegion->z;
imageCopy.imageSubresource.aspectMask = vulkanTextureSlice->parent->aspectFlags;
imageCopy.imageSubresource.baseArrayLayer = textureRegion->textureSlice.layer;
imageCopy.imageSubresource.layerCount = 1;
imageCopy.imageSubresource.mipLevel = textureRegion->textureSlice.mipLevel;
imageCopy.bufferOffset = copyParams->bufferOffset;
imageCopy.bufferRowLength = copyParams->bufferStride;
imageCopy.bufferImageHeight = copyParams->bufferImageHeight;
renderer->vkCmdCopyImageToBuffer(
vulkanCommandBuffer->commandBuffer,
vulkanTextureSlice->parent->image,
AccessMap[vulkanTextureSlice->resourceAccessType].imageLayout,
bufferContainer->activeBufferHandle->vulkanBuffer->buffer,
1,
&imageCopy
);
VULKAN_INTERNAL_TrackBuffer(renderer, vulkanCommandBuffer, bufferContainer->activeBufferHandle->vulkanBuffer);
VULKAN_INTERNAL_TrackTextureSlice(renderer, vulkanCommandBuffer, vulkanTextureSlice);
VULKAN_INTERNAL_TrackCopiedBuffer(renderer, vulkanCommandBuffer, bufferContainer->activeBufferHandle->vulkanBuffer);
VULKAN_INTERNAL_TrackCopiedTextureSlice(renderer, vulkanCommandBuffer, vulkanTextureSlice);
2020-12-20 07:31:55 +00:00
}
static void VULKAN_CopyBufferToTexture(
2021-01-05 23:00:51 +00:00
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer,
Refresh_GpuBuffer *gpuBuffer,
Refresh_TextureRegion *textureRegion,
Refresh_BufferImageCopy *copyParams,
Refresh_WriteOptions writeOption
2020-12-20 07:31:55 +00:00
) {
VulkanRenderer *renderer = (VulkanRenderer*) driverData;
2021-01-02 06:07:15 +00:00
VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
VulkanBufferContainer *bufferContainer = (VulkanBufferContainer*) gpuBuffer;
VulkanTextureContainer *textureContainer = (VulkanTextureContainer*) textureRegion->textureSlice.texture;
VulkanTextureSlice *vulkanTextureSlice;
VkBufferImageCopy imageCopy;
2020-12-21 23:44:43 +00:00
vulkanTextureSlice = VULKAN_INTERNAL_RefreshToVulkanTextureSlice(&textureRegion->textureSlice);
if (
writeOption == REFRESH_WRITEOPTIONS_SAFEDISCARD &&
textureContainer->canBeDiscarded &&
SDL_AtomicGet(&vulkanTextureSlice->referenceCount) > 0
) {
VULKAN_INTERNAL_DiscardActiveTexture(
renderer,
textureContainer
);
vulkanTextureSlice = VULKAN_INTERNAL_RefreshToVulkanTextureSlice(&textureRegion->textureSlice);
}
VULKAN_INTERNAL_BufferMemoryBarrier(
renderer,
vulkanCommandBuffer->commandBuffer,
RESOURCE_ACCESS_TRANSFER_READ,
bufferContainer->activeBufferHandle->vulkanBuffer
);
VULKAN_INTERNAL_ImageMemoryBarrier(
renderer,
vulkanCommandBuffer->commandBuffer,
RESOURCE_ACCESS_TRANSFER_WRITE,
vulkanTextureSlice
);
imageCopy.imageExtent.width = textureRegion->w;
imageCopy.imageExtent.height = textureRegion->h;
imageCopy.imageExtent.depth = textureRegion->d;
imageCopy.imageOffset.x = textureRegion->x;
imageCopy.imageOffset.y = textureRegion->y;
imageCopy.imageOffset.z = textureRegion->z;
imageCopy.imageSubresource.aspectMask = vulkanTextureSlice->parent->aspectFlags;
imageCopy.imageSubresource.baseArrayLayer = textureRegion->textureSlice.layer;
imageCopy.imageSubresource.layerCount = 1;
imageCopy.imageSubresource.mipLevel = textureRegion->textureSlice.mipLevel;
imageCopy.bufferOffset = copyParams->bufferOffset;
imageCopy.bufferRowLength = copyParams->bufferStride;
imageCopy.bufferImageHeight = copyParams->bufferImageHeight;
renderer->vkCmdCopyBufferToImage(
2021-01-02 06:07:15 +00:00
vulkanCommandBuffer->commandBuffer,
bufferContainer->activeBufferHandle->vulkanBuffer->buffer,
vulkanTextureSlice->parent->image,
AccessMap[vulkanTextureSlice->resourceAccessType].imageLayout,
1,
&imageCopy
2021-01-02 06:07:15 +00:00
);
VULKAN_INTERNAL_TrackBuffer(renderer, vulkanCommandBuffer, bufferContainer->activeBufferHandle->vulkanBuffer);
VULKAN_INTERNAL_TrackTextureSlice(renderer, vulkanCommandBuffer, vulkanTextureSlice);
VULKAN_INTERNAL_TrackCopiedBuffer(renderer, vulkanCommandBuffer, bufferContainer->activeBufferHandle->vulkanBuffer);
VULKAN_INTERNAL_TrackCopiedTextureSlice(renderer, vulkanCommandBuffer, vulkanTextureSlice);
2020-12-20 07:31:55 +00:00
}
static void VULKAN_CopyBufferToBuffer(
2021-01-05 23:00:51 +00:00
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer,
Refresh_GpuBuffer *source,
Refresh_GpuBuffer *destination,
Refresh_BufferCopy *copyParams,
Refresh_WriteOptions writeOption
2020-12-30 01:31:39 +00:00
) {
VulkanRenderer *renderer = (VulkanRenderer*) driverData;
2021-01-02 06:07:15 +00:00
VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
VulkanBufferContainer *srcContainer = (VulkanBufferContainer*) source;
VulkanBufferContainer *dstContainer = (VulkanBufferContainer*) destination;
VkBufferCopy bufferCopy;
2020-12-31 04:39:47 +00:00
if (
writeOption == REFRESH_WRITEOPTIONS_SAFEDISCARD &&
SDL_AtomicGet(&dstContainer->activeBufferHandle->vulkanBuffer->referenceCount) > 0
) {
VULKAN_INTERNAL_DiscardActiveBuffer(
renderer,
dstContainer
);
}
VULKAN_INTERNAL_BufferMemoryBarrier(
renderer,
vulkanCommandBuffer->commandBuffer,
RESOURCE_ACCESS_TRANSFER_READ,
srcContainer->activeBufferHandle->vulkanBuffer
);
VULKAN_INTERNAL_BufferMemoryBarrier(
renderer,
2021-01-02 06:07:15 +00:00
vulkanCommandBuffer->commandBuffer,
RESOURCE_ACCESS_TRANSFER_WRITE,
dstContainer->activeBufferHandle->vulkanBuffer
2020-12-31 00:47:13 +00:00
);
bufferCopy.srcOffset = copyParams->srcOffset;
bufferCopy.dstOffset = copyParams->dstOffset;
bufferCopy.size = copyParams->size;
renderer->vkCmdCopyBuffer(
vulkanCommandBuffer->commandBuffer,
srcContainer->activeBufferHandle->vulkanBuffer->buffer,
dstContainer->activeBufferHandle->vulkanBuffer->buffer,
1,
&bufferCopy
);
VULKAN_INTERNAL_TrackBuffer(renderer, vulkanCommandBuffer, srcContainer->activeBufferHandle->vulkanBuffer);
VULKAN_INTERNAL_TrackBuffer(renderer, vulkanCommandBuffer, dstContainer->activeBufferHandle->vulkanBuffer);
VULKAN_INTERNAL_TrackCopiedBuffer(renderer, vulkanCommandBuffer, srcContainer->activeBufferHandle->vulkanBuffer);
VULKAN_INTERNAL_TrackCopiedBuffer(renderer, vulkanCommandBuffer, dstContainer->activeBufferHandle->vulkanBuffer);
2020-12-30 01:31:39 +00:00
}
static void VULKAN_GenerateMipmaps(
2021-01-05 23:00:51 +00:00
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer,
Refresh_Texture *texture
2020-12-30 01:31:39 +00:00
) {
VulkanRenderer *renderer = (VulkanRenderer*) driverData;
2021-01-02 06:07:15 +00:00
VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
VulkanTexture *vulkanTexture = ((VulkanTextureContainer*) texture)->activeTextureHandle->vulkanTexture;
VulkanTextureSlice *srcTextureSlice;
VulkanTextureSlice *dstTextureSlice;
VkImageBlit blit;
uint32_t layer, level;
2021-01-02 21:31:17 +00:00
if (vulkanTexture->levelCount <= 1) { return; }
2020-12-31 00:47:13 +00:00
/* Blit each slice sequentially. Barriers, barriers everywhere! */
for (layer = 0; layer < vulkanTexture->layerCount; layer += 1)
for (level = 1; level < vulkanTexture->levelCount; level += 1)
2020-12-31 00:47:13 +00:00
{
srcTextureSlice = VULKAN_INTERNAL_FetchTextureSlice(
vulkanTexture,
layer,
level - 1
);
dstTextureSlice = VULKAN_INTERNAL_FetchTextureSlice(
vulkanTexture,
layer,
level
);
VULKAN_INTERNAL_ImageMemoryBarrier(
renderer,
vulkanCommandBuffer->commandBuffer,
RESOURCE_ACCESS_TRANSFER_READ,
srcTextureSlice
);
VULKAN_INTERNAL_ImageMemoryBarrier(
2020-12-31 00:47:13 +00:00
renderer,
vulkanCommandBuffer->commandBuffer,
RESOURCE_ACCESS_TRANSFER_WRITE,
dstTextureSlice
2020-12-31 00:47:13 +00:00
);
blit.srcOffsets[0].x = 0;
blit.srcOffsets[0].y = 0;
blit.srcOffsets[0].z = 0;
blit.srcOffsets[1].x = vulkanTexture->dimensions.width >> (level - 1);
blit.srcOffsets[1].y = vulkanTexture->dimensions.height >> (level - 1);
blit.srcOffsets[1].z = 1;
blit.dstOffsets[0].x = 0;
blit.dstOffsets[0].y = 0;
blit.dstOffsets[0].z = 0;
blit.dstOffsets[1].x = vulkanTexture->dimensions.width >> level;
blit.dstOffsets[1].y = vulkanTexture->dimensions.height >> level;
blit.dstOffsets[1].z = 1;
blit.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
blit.srcSubresource.baseArrayLayer = layer;
blit.srcSubresource.layerCount = 1;
blit.srcSubresource.mipLevel = level - 1;
blit.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
blit.dstSubresource.baseArrayLayer = layer;
blit.dstSubresource.layerCount = 1;
blit.dstSubresource.mipLevel = level;
renderer->vkCmdBlitImage(
vulkanCommandBuffer->commandBuffer,
vulkanTexture->image,
VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
vulkanTexture->image,
VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
1,
&blit,
VK_FILTER_LINEAR
);
}
2020-12-30 01:31:39 +00:00
}
static void VULKAN_EndCopyPass(
2021-01-05 23:00:51 +00:00
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer
2020-12-30 01:31:39 +00:00
) {
VulkanRenderer *renderer = (VulkanRenderer*) driverData;
2021-01-02 06:07:15 +00:00
VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
VulkanBuffer *currentBuffer;
VulkanTextureSlice *currentTextureSlice;
VulkanResourceAccessType resourceAccessType = RESOURCE_ACCESS_NONE;
2020-12-31 00:47:13 +00:00
uint32_t i;
/* Re-transition GpuBuffers */
for (i = 0; i < vulkanCommandBuffer->copiedGpuBufferCount; i += 1)
2020-12-31 00:47:13 +00:00
{
currentBuffer = vulkanCommandBuffer->copiedGpuBuffers[i];
2020-12-31 00:47:13 +00:00
if (currentBuffer->usage & VK_BUFFER_USAGE_VERTEX_BUFFER_BIT)
{
resourceAccessType = RESOURCE_ACCESS_VERTEX_BUFFER;
}
else if (currentBuffer->usage & VK_BUFFER_USAGE_INDEX_BUFFER_BIT)
{
resourceAccessType = RESOURCE_ACCESS_INDEX_BUFFER;
}
else if (currentBuffer->usage & VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT)
{
resourceAccessType = RESOURCE_ACCESS_INDIRECT_BUFFER;
}
if (resourceAccessType != RESOURCE_ACCESS_NONE)
{
VULKAN_INTERNAL_BufferMemoryBarrier(
renderer,
vulkanCommandBuffer->commandBuffer,
resourceAccessType,
currentBuffer
);
}
}
/* Re-transition textures */
for (i = 0; i < vulkanCommandBuffer->copiedTextureSliceCount; i += 1)
{
currentTextureSlice = vulkanCommandBuffer->copiedTextureSlices[i];
if (currentTextureSlice->parent->usageFlags & VK_IMAGE_USAGE_SAMPLED_BIT)
{
resourceAccessType = RESOURCE_ACCESS_ANY_SHADER_READ_SAMPLED_IMAGE;
VULKAN_INTERNAL_ImageMemoryBarrier(
renderer,
vulkanCommandBuffer->commandBuffer,
RESOURCE_ACCESS_ANY_SHADER_READ_SAMPLED_IMAGE,
currentTextureSlice
);
}
2020-12-31 00:47:13 +00:00
}
vulkanCommandBuffer->copiedGpuBufferCount = 0;
vulkanCommandBuffer->copiedTextureSliceCount = 0;
2020-12-30 01:31:39 +00:00
}
2021-01-03 02:02:20 +00:00
static void VULKAN_INTERNAL_AllocateCommandBuffers(
2021-01-02 06:07:15 +00:00
VulkanRenderer *renderer,
2021-01-03 02:02:20 +00:00
VulkanCommandPool *vulkanCommandPool,
uint32_t allocateCount
2021-01-02 06:07:15 +00:00
) {
VkCommandBufferAllocateInfo allocateInfo;
VkResult vulkanResult;
2021-01-03 02:02:20 +00:00
uint32_t i;
VkCommandBuffer *commandBuffers = SDL_stack_alloc(VkCommandBuffer, allocateCount);
VulkanCommandBuffer *commandBuffer;
2021-01-02 06:07:15 +00:00
2021-01-03 02:02:20 +00:00
vulkanCommandPool->inactiveCommandBufferCapacity += allocateCount;
vulkanCommandPool->inactiveCommandBuffers = SDL_realloc(
vulkanCommandPool->inactiveCommandBuffers,
sizeof(VulkanCommandBuffer*) *
vulkanCommandPool->inactiveCommandBufferCapacity
);
allocateInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
allocateInfo.pNext = NULL;
allocateInfo.commandPool = vulkanCommandPool->commandPool;
allocateInfo.commandBufferCount = allocateCount;
allocateInfo.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
vulkanResult = renderer->vkAllocateCommandBuffers(
renderer->logicalDevice,
&allocateInfo,
commandBuffers
);
if (vulkanResult != VK_SUCCESS)
2021-01-02 06:07:15 +00:00
{
2021-01-27 20:51:36 +00:00
LogVulkanResultAsError("vkAllocateCommandBuffers", vulkanResult);
2021-01-03 02:02:20 +00:00
SDL_stack_free(commandBuffers);
return;
}
2021-01-02 06:07:15 +00:00
2021-01-03 02:02:20 +00:00
for (i = 0; i < allocateCount; i += 1)
{
commandBuffer = SDL_malloc(sizeof(VulkanCommandBuffer));
commandBuffer->commandPool = vulkanCommandPool;
commandBuffer->commandBuffer = commandBuffers[i];
commandBuffer->inFlightFence = VK_NULL_HANDLE;
commandBuffer->renderPassDepthTextureSlice = NULL;
/* Presentation tracking */
commandBuffer->presentDataCapacity = 1;
commandBuffer->presentDataCount = 0;
commandBuffer->presentDatas = SDL_malloc(
commandBuffer->presentDataCapacity * sizeof(VkPresentInfoKHR)
);
commandBuffer->waitSemaphoreCapacity = 1;
commandBuffer->waitSemaphoreCount = 0;
commandBuffer->waitSemaphores = SDL_malloc(
commandBuffer->waitSemaphoreCapacity * sizeof(VkSemaphore)
);
commandBuffer->signalSemaphoreCapacity = 1;
commandBuffer->signalSemaphoreCount = 0;
commandBuffer->signalSemaphores = SDL_malloc(
commandBuffer->signalSemaphoreCapacity * sizeof(VkSemaphore)
);
/* Descriptor set tracking */
commandBuffer->boundDescriptorSetDataCapacity = 16;
commandBuffer->boundDescriptorSetDataCount = 0;
commandBuffer->boundDescriptorSetDatas = SDL_malloc(
commandBuffer->boundDescriptorSetDataCapacity * sizeof(DescriptorSetData)
);
/* Bound compute resource tracking */
commandBuffer->boundComputeBufferCapacity = 16;
commandBuffer->boundComputeBufferCount = 0;
commandBuffer->boundComputeBuffers = SDL_malloc(
commandBuffer->boundComputeBufferCapacity * sizeof(VulkanBuffer*)
);
commandBuffer->boundComputeTextureSliceCapacity = 16;
commandBuffer->boundComputeTextureSliceCount = 0;
commandBuffer->boundComputeTextureSlices = SDL_malloc(
commandBuffer->boundComputeTextureSliceCapacity * sizeof(VulkanTextureSlice*)
);
/* Copy resource tracking */
commandBuffer->copiedGpuBufferCapacity = 16;
commandBuffer->copiedGpuBufferCount = 0;
commandBuffer->copiedGpuBuffers = SDL_malloc(
commandBuffer->copiedGpuBufferCapacity * sizeof(VulkanBuffer*)
);
commandBuffer->copiedTextureSliceCapacity = 16;
commandBuffer->copiedTextureSliceCount = 0;
commandBuffer->copiedTextureSlices = SDL_malloc(
commandBuffer->copiedTextureSliceCapacity * sizeof(VulkanTextureSlice*)
);
/* Resource tracking */
commandBuffer->usedBufferCapacity = 4;
commandBuffer->usedBufferCount = 0;
commandBuffer->usedBuffers = SDL_malloc(
commandBuffer->usedBufferCapacity * sizeof(VulkanBuffer*)
);
commandBuffer->usedTextureSliceCapacity = 4;
commandBuffer->usedTextureSliceCount = 0;
commandBuffer->usedTextureSlices = SDL_malloc(
commandBuffer->usedTextureSliceCapacity * sizeof(VulkanTextureSlice*)
);
commandBuffer->usedSamplerCapacity = 4;
commandBuffer->usedSamplerCount = 0;
commandBuffer->usedSamplers = SDL_malloc(
commandBuffer->usedSamplerCapacity * sizeof(VulkanSampler*)
);
commandBuffer->usedGraphicsPipelineCapacity = 4;
commandBuffer->usedGraphicsPipelineCount = 0;
commandBuffer->usedGraphicsPipelines = SDL_malloc(
commandBuffer->usedGraphicsPipelineCapacity * sizeof(VulkanGraphicsPipeline*)
);
commandBuffer->usedComputePipelineCapacity = 4;
commandBuffer->usedComputePipelineCount = 0;
commandBuffer->usedComputePipelines = SDL_malloc(
commandBuffer->usedComputePipelineCapacity * sizeof(VulkanComputePipeline*)
);
commandBuffer->usedFramebufferCapacity = 4;
commandBuffer->usedFramebufferCount = 0;
commandBuffer->usedFramebuffers = SDL_malloc(
commandBuffer->usedFramebufferCapacity * sizeof(VulkanFramebuffer*)
);
2021-01-03 02:02:20 +00:00
vulkanCommandPool->inactiveCommandBuffers[
vulkanCommandPool->inactiveCommandBufferCount
] = commandBuffer;
2021-01-03 02:02:20 +00:00
vulkanCommandPool->inactiveCommandBufferCount += 1;
}
2021-01-02 06:07:15 +00:00
2021-01-03 02:02:20 +00:00
SDL_stack_free(commandBuffers);
}
2021-01-02 06:07:15 +00:00
2021-01-03 02:02:20 +00:00
static VulkanCommandPool* VULKAN_INTERNAL_FetchCommandPool(
VulkanRenderer *renderer,
SDL_threadID threadID
) {
VulkanCommandPool *vulkanCommandPool;
VkCommandPoolCreateInfo commandPoolCreateInfo;
VkResult vulkanResult;
CommandPoolHash commandPoolHash;
2021-01-02 06:07:15 +00:00
2021-01-03 02:02:20 +00:00
commandPoolHash.threadID = threadID;
vulkanCommandPool = CommandPoolHashTable_Fetch(
&renderer->commandPoolHashTable,
commandPoolHash
);
if (vulkanCommandPool != NULL)
{
return vulkanCommandPool;
}
2021-01-03 03:03:25 +00:00
vulkanCommandPool = (VulkanCommandPool*) SDL_malloc(sizeof(VulkanCommandPool));
2021-01-03 02:02:20 +00:00
commandPoolCreateInfo.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
commandPoolCreateInfo.pNext = NULL;
commandPoolCreateInfo.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
commandPoolCreateInfo.queueFamilyIndex = renderer->queueFamilyIndex;
2021-01-03 02:02:20 +00:00
vulkanResult = renderer->vkCreateCommandPool(
renderer->logicalDevice,
&commandPoolCreateInfo,
NULL,
&vulkanCommandPool->commandPool
);
2021-01-05 07:31:56 +00:00
if (vulkanResult != VK_SUCCESS)
{
2021-01-05 23:00:51 +00:00
Refresh_LogError("Failed to create command pool!");
2021-01-27 20:51:36 +00:00
LogVulkanResultAsError("vkCreateCommandPool", vulkanResult);
2021-01-05 07:31:56 +00:00
return NULL;
}
2021-01-03 02:02:20 +00:00
vulkanCommandPool->threadID = threadID;
vulkanCommandPool->inactiveCommandBufferCapacity = 0;
vulkanCommandPool->inactiveCommandBufferCount = 0;
vulkanCommandPool->inactiveCommandBuffers = NULL;
VULKAN_INTERNAL_AllocateCommandBuffers(
renderer,
vulkanCommandPool,
2
);
CommandPoolHashTable_Insert(
&renderer->commandPoolHashTable,
commandPoolHash,
vulkanCommandPool
);
return vulkanCommandPool;
}
static VulkanCommandBuffer* VULKAN_INTERNAL_GetInactiveCommandBufferFromPool(
VulkanRenderer *renderer,
SDL_threadID threadID
) {
VulkanCommandPool *commandPool =
VULKAN_INTERNAL_FetchCommandPool(renderer, threadID);
VulkanCommandBuffer *commandBuffer;
if (commandPool->inactiveCommandBufferCount == 0)
{
VULKAN_INTERNAL_AllocateCommandBuffers(
renderer,
commandPool,
commandPool->inactiveCommandBufferCapacity
);
2021-01-02 06:07:15 +00:00
}
2021-01-03 03:03:25 +00:00
commandBuffer = commandPool->inactiveCommandBuffers[commandPool->inactiveCommandBufferCount - 1];
2021-01-02 06:07:15 +00:00
commandPool->inactiveCommandBufferCount -= 1;
return commandBuffer;
}
2021-01-05 23:00:51 +00:00
static Refresh_CommandBuffer* VULKAN_AcquireCommandBuffer(
Refresh_Renderer *driverData
2021-01-02 06:07:15 +00:00
) {
VulkanRenderer *renderer = (VulkanRenderer*) driverData;
VkResult result;
2021-01-02 06:07:15 +00:00
SDL_threadID threadID = SDL_ThreadID();
SDL_LockMutex(renderer->acquireCommandBufferLock);
2021-01-02 06:07:15 +00:00
VulkanCommandBuffer *commandBuffer =
VULKAN_INTERNAL_GetInactiveCommandBufferFromPool(renderer, threadID);
SDL_UnlockMutex(renderer->acquireCommandBufferLock);
/* Reset state */
2021-01-02 06:07:15 +00:00
commandBuffer->currentComputePipeline = NULL;
commandBuffer->currentGraphicsPipeline = NULL;
commandBuffer->vertexUniformOffset = 0;
commandBuffer->fragmentUniformOffset = 0;
commandBuffer->computeUniformOffset = 0;
2021-01-02 06:07:15 +00:00
commandBuffer->renderPassColorTargetTextureSliceCount = 0;
commandBuffer->autoReleaseFence = 1;
2021-01-02 06:07:15 +00:00
2024-02-27 08:16:06 +00:00
commandBuffer->isDefrag = 0;
/* Reset the command buffer here to avoid resets being called
* from a separate thread than where the command buffer was acquired
*/
result = renderer->vkResetCommandBuffer(
commandBuffer->commandBuffer,
VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT
);
if (result != VK_SUCCESS)
{
LogVulkanResultAsError("vkResetCommandBuffer", result);
}
2021-01-02 06:07:15 +00:00
VULKAN_INTERNAL_BeginCommandBuffer(renderer, commandBuffer);
2021-01-05 23:00:51 +00:00
return (Refresh_CommandBuffer*) commandBuffer;
2021-01-02 06:07:15 +00:00
}
static WindowData* VULKAN_INTERNAL_FetchWindowData(
void *windowHandle
) {
return (WindowData*) SDL_GetWindowData(windowHandle, WINDOW_DATA);
}
static uint8_t VULKAN_ClaimWindow(
Refresh_Renderer *driverData,
void *windowHandle,
Refresh_PresentMode presentMode
) {
VulkanRenderer *renderer = (VulkanRenderer*) driverData;
WindowData *windowData = VULKAN_INTERNAL_FetchWindowData(windowHandle);
if (windowData == NULL)
{
windowData = SDL_malloc(sizeof(WindowData));
windowData->windowHandle = windowHandle;
windowData->preferredPresentMode = presentMode;
if (VULKAN_INTERNAL_CreateSwapchain(renderer, windowData))
{
SDL_SetWindowData((SDL_Window*) windowHandle, WINDOW_DATA, windowData);
if (renderer->claimedWindowCount >= renderer->claimedWindowCapacity)
{
renderer->claimedWindowCapacity *= 2;
renderer->claimedWindows = SDL_realloc(
renderer->claimedWindows,
renderer->claimedWindowCapacity * sizeof(WindowData*)
);
}
renderer->claimedWindows[renderer->claimedWindowCount] = windowData;
renderer->claimedWindowCount += 1;
return 1;
}
else
{
Refresh_LogError("Could not create swapchain, failed to claim window!");
SDL_free(windowData);
return 0;
}
}
else
{
Refresh_LogWarn("Window already claimed!");
return 0;
}
}
static void VULKAN_UnclaimWindow(
Refresh_Renderer *driverData,
void *windowHandle
) {
VulkanRenderer *renderer = (VulkanRenderer*) driverData;
WindowData *windowData = VULKAN_INTERNAL_FetchWindowData(windowHandle);
uint32_t i;
if (windowData == NULL)
{
return;
}
if (windowData->swapchainData != NULL)
{
VULKAN_Wait(driverData);
VULKAN_INTERNAL_DestroySwapchain(
(VulkanRenderer*) driverData,
windowData
);
}
for (i = 0; i < renderer->claimedWindowCount; i += 1)
{
if (renderer->claimedWindows[i]->windowHandle == windowHandle)
{
renderer->claimedWindows[i] = renderer->claimedWindows[renderer->claimedWindowCount - 1];
renderer->claimedWindowCount -= 1;
break;
}
}
SDL_free(windowData);
SDL_SetWindowData((SDL_Window*) windowHandle, WINDOW_DATA, NULL);
}
static Refresh_Texture* VULKAN_AcquireSwapchainTexture(
2021-01-05 23:00:51 +00:00
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer,
void *windowHandle,
uint32_t *pWidth,
uint32_t *pHeight
2020-12-17 04:19:11 +00:00
) {
VulkanRenderer *renderer = (VulkanRenderer*) driverData;
2021-01-02 06:07:15 +00:00
VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
uint32_t swapchainImageIndex;
WindowData *windowData;
VulkanSwapchainData *swapchainData;
VkResult acquireResult = VK_SUCCESS;
VulkanTextureContainer *swapchainTextureContainer = NULL;
VulkanTextureSlice *swapchainTextureSlice;
VulkanPresentData *presentData;
2020-12-31 07:02:12 +00:00
windowData = VULKAN_INTERNAL_FetchWindowData(windowHandle);
if (windowData == NULL)
{
return NULL;
}
swapchainData = windowData->swapchainData;
/* Drop frames if too many submissions in flight */
if (swapchainData->submissionsInFlight >= MAX_FRAMES_IN_FLIGHT)
{
return NULL;
}
/* Window is claimed but swapchain is invalid! */
if (swapchainData == NULL)
{
if (SDL_GetWindowFlags(windowHandle) & SDL_WINDOW_MINIMIZED)
{
/* Window is minimized, don't bother */
return NULL;
}
/* Let's try to recreate */
VULKAN_INTERNAL_RecreateSwapchain(renderer, windowData);
swapchainData = windowData->swapchainData;
if (swapchainData == NULL)
{
Refresh_LogWarn("Failed to recreate swapchain!");
return NULL;
}
}
acquireResult = renderer->vkAcquireNextImageKHR(
renderer->logicalDevice,
swapchainData->swapchain,
UINT64_MAX,
swapchainData->imageAvailableSemaphore,
VK_NULL_HANDLE,
&swapchainImageIndex
);
/* Acquisition is invalid, let's try to recreate */
if (acquireResult != VK_SUCCESS && acquireResult != VK_SUBOPTIMAL_KHR)
{
VULKAN_INTERNAL_RecreateSwapchain(renderer, windowData);
swapchainData = windowData->swapchainData;
if (swapchainData == NULL)
{
Refresh_LogWarn("Failed to recreate swapchain!");
return NULL;
}
2020-12-27 23:34:15 +00:00
acquireResult = renderer->vkAcquireNextImageKHR(
renderer->logicalDevice,
swapchainData->swapchain,
UINT64_MAX,
2022-02-10 05:42:19 +00:00
swapchainData->imageAvailableSemaphore,
VK_NULL_HANDLE,
&swapchainImageIndex
);
if (acquireResult != VK_SUCCESS && acquireResult != VK_SUBOPTIMAL_KHR)
{
Refresh_LogWarn("Failed to acquire swapchain texture!");
return NULL;
}
}
swapchainTextureContainer = &swapchainData->textureContainers[swapchainImageIndex];
swapchainTextureSlice = VULKAN_INTERNAL_FetchTextureSlice(
swapchainTextureContainer->activeTextureHandle->vulkanTexture,
0,
0
);
VULKAN_INTERNAL_ImageMemoryBarrier(
renderer,
vulkanCommandBuffer->commandBuffer,
RESOURCE_ACCESS_COLOR_ATTACHMENT_WRITE,
swapchainTextureSlice
);
/* Set up present struct */
if (vulkanCommandBuffer->presentDataCount == vulkanCommandBuffer->presentDataCapacity)
{
vulkanCommandBuffer->presentDataCapacity += 1;
vulkanCommandBuffer->presentDatas = SDL_realloc(
vulkanCommandBuffer->presentDatas,
vulkanCommandBuffer->presentDataCapacity * sizeof(VkPresentInfoKHR)
);
}
presentData = &vulkanCommandBuffer->presentDatas[vulkanCommandBuffer->presentDataCount];
vulkanCommandBuffer->presentDataCount += 1;
presentData->windowData = windowData;
presentData->swapchainImageIndex = swapchainImageIndex;
/* Set up present semaphores */
if (vulkanCommandBuffer->waitSemaphoreCount == vulkanCommandBuffer->waitSemaphoreCapacity)
{
vulkanCommandBuffer->waitSemaphoreCapacity += 1;
vulkanCommandBuffer->waitSemaphores = SDL_realloc(
vulkanCommandBuffer->waitSemaphores,
vulkanCommandBuffer->waitSemaphoreCapacity * sizeof(VkSemaphore)
);
}
vulkanCommandBuffer->waitSemaphores[vulkanCommandBuffer->waitSemaphoreCount] = swapchainData->imageAvailableSemaphore;
vulkanCommandBuffer->waitSemaphoreCount += 1;
if (vulkanCommandBuffer->signalSemaphoreCount == vulkanCommandBuffer->signalSemaphoreCapacity)
{
vulkanCommandBuffer->signalSemaphoreCapacity += 1;
vulkanCommandBuffer->signalSemaphores = SDL_realloc(
vulkanCommandBuffer->signalSemaphores,
vulkanCommandBuffer->signalSemaphoreCapacity * sizeof(VkSemaphore)
);
2020-12-27 23:34:15 +00:00
}
vulkanCommandBuffer->signalSemaphores[vulkanCommandBuffer->signalSemaphoreCount] = swapchainData->renderFinishedSemaphore;
vulkanCommandBuffer->signalSemaphoreCount += 1;
*pWidth = swapchainData->extent.width;
*pHeight = swapchainData->extent.height;
return (Refresh_Texture*) swapchainTextureContainer;
}
static Refresh_TextureFormat VULKAN_GetSwapchainFormat(
Refresh_Renderer *driverData,
void *windowHandle
) {
WindowData *windowData = VULKAN_INTERNAL_FetchWindowData(windowHandle);
if (windowData == NULL)
{
Refresh_LogWarn("Cannot get swapchain format, window has not been claimed!");
return 0;
}
if (windowData->swapchainData == NULL)
{
Refresh_LogWarn("Cannot get swapchain format, swapchain is currently invalid!");
return 0;
}
if (windowData->swapchainData->swapchainFormat == VK_FORMAT_R8G8B8A8_UNORM)
{
return REFRESH_TEXTUREFORMAT_R8G8B8A8;
}
else if (windowData->swapchainData->swapchainFormat == VK_FORMAT_B8G8R8A8_UNORM)
{
return REFRESH_TEXTUREFORMAT_B8G8R8A8;
}
else
{
Refresh_LogWarn("Unrecognized swapchain format!");
return 0;
}
2020-12-27 23:34:15 +00:00
}
static void VULKAN_SetSwapchainPresentMode(
Refresh_Renderer *driverData,
void *windowHandle,
Refresh_PresentMode presentMode
) {
WindowData *windowData = VULKAN_INTERNAL_FetchWindowData(windowHandle);
if (windowData == NULL)
{
Refresh_LogWarn("Cannot set present mode, window has not been claimed!");
return;
}
VULKAN_INTERNAL_RecreateSwapchain(
(VulkanRenderer *)driverData,
windowData
);
}
/* Submission structure */
static VkFence VULKAN_INTERNAL_AcquireFenceFromPool(
VulkanRenderer *renderer
) {
VkFenceCreateInfo fenceCreateInfo;
VkFence fence;
VkResult vulkanResult;
if (renderer->fencePool.availableFenceCount == 0)
{
/* Create fence */
fenceCreateInfo.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
fenceCreateInfo.pNext = NULL;
fenceCreateInfo.flags = 0;
vulkanResult = renderer->vkCreateFence(
renderer->logicalDevice,
&fenceCreateInfo,
NULL,
&fence
);
if (vulkanResult != VK_SUCCESS)
{
LogVulkanResultAsError("vkCreateFence", vulkanResult);
return NULL;
}
return fence;
}
SDL_LockMutex(renderer->fencePool.lock);
fence = renderer->fencePool.availableFences[renderer->fencePool.availableFenceCount - 1];
renderer->fencePool.availableFenceCount -= 1;
vulkanResult = renderer->vkResetFences(
renderer->logicalDevice,
1,
&fence
);
if (vulkanResult != VK_SUCCESS)
{
LogVulkanResultAsError("vkResetFences", vulkanResult);
}
SDL_UnlockMutex(renderer->fencePool.lock);
return fence;
}
static void VULKAN_INTERNAL_ReturnFenceToPool(
VulkanRenderer *renderer,
VkFence fence
) {
SDL_LockMutex(renderer->fencePool.lock);
EXPAND_ARRAY_IF_NEEDED(
renderer->fencePool.availableFences,
VkFence,
renderer->fencePool.availableFenceCount + 1,
renderer->fencePool.availableFenceCapacity,
renderer->fencePool.availableFenceCapacity * 2
);
renderer->fencePool.availableFences[renderer->fencePool.availableFenceCount] = fence;
renderer->fencePool.availableFenceCount += 1;
SDL_UnlockMutex(renderer->fencePool.lock);
}
static void VULKAN_INTERNAL_PerformPendingDestroys(
VulkanRenderer *renderer
) {
int32_t i, sliceIndex;
int32_t refCountTotal;
SDL_LockMutex(renderer->disposeLock);
for (i = renderer->texturesToDestroyCount - 1; i >= 0; i -= 1)
{
refCountTotal = 0;
for (sliceIndex = 0; sliceIndex < renderer->texturesToDestroy[i]->sliceCount; sliceIndex += 1)
{
refCountTotal += SDL_AtomicGet(&renderer->texturesToDestroy[i]->slices[sliceIndex].referenceCount);
}
if (refCountTotal == 0)
{
VULKAN_INTERNAL_DestroyTexture(
renderer,
renderer->texturesToDestroy[i]
);
renderer->texturesToDestroy[i] = renderer->texturesToDestroy[renderer->texturesToDestroyCount - 1];
renderer->texturesToDestroyCount -= 1;
}
}
for (i = renderer->buffersToDestroyCount - 1; i >= 0; i -= 1)
{
if (SDL_AtomicGet(&renderer->buffersToDestroy[i]->referenceCount) == 0)
{
VULKAN_INTERNAL_DestroyBuffer(
renderer,
renderer->buffersToDestroy[i]);
renderer->buffersToDestroy[i] = renderer->buffersToDestroy[renderer->buffersToDestroyCount - 1];
renderer->buffersToDestroyCount -= 1;
}
}
for (i = renderer->graphicsPipelinesToDestroyCount - 1; i >= 0; i -= 1)
{
if (SDL_AtomicGet(&renderer->graphicsPipelinesToDestroy[i]->referenceCount) == 0)
{
VULKAN_INTERNAL_DestroyGraphicsPipeline(
renderer,
renderer->graphicsPipelinesToDestroy[i]
);
renderer->graphicsPipelinesToDestroy[i] = renderer->graphicsPipelinesToDestroy[renderer->graphicsPipelinesToDestroyCount - 1];
renderer->graphicsPipelinesToDestroyCount -= 1;
}
}
for (i = renderer->computePipelinesToDestroyCount - 1; i >= 0; i -= 1)
{
if (SDL_AtomicGet(&renderer->computePipelinesToDestroy[i]->referenceCount) == 0)
{
VULKAN_INTERNAL_DestroyComputePipeline(
renderer,
renderer->computePipelinesToDestroy[i]
);
renderer->computePipelinesToDestroy[i] = renderer->computePipelinesToDestroy[renderer->computePipelinesToDestroyCount - 1];
renderer->computePipelinesToDestroyCount -= 1 ;
}
}
2022-02-23 05:54:32 +00:00
for (i = renderer->shaderModulesToDestroyCount - 1; i >= 0; i -= 1)
{
if (SDL_AtomicGet(&renderer->shaderModulesToDestroy[i]->referenceCount) == 0)
{
VULKAN_INTERNAL_DestroyShaderModule(
renderer,
renderer->shaderModulesToDestroy[i]
);
renderer->shaderModulesToDestroy[i] = renderer->shaderModulesToDestroy[renderer->shaderModulesToDestroyCount - 1];
renderer->shaderModulesToDestroyCount -= 1;
}
}
for (i = renderer->samplersToDestroyCount - 1; i >= 0; i -= 1)
{
if (SDL_AtomicGet(&renderer->samplersToDestroy[i]->referenceCount) == 0)
{
VULKAN_INTERNAL_DestroySampler(
renderer,
renderer->samplersToDestroy[i]
);
renderer->samplersToDestroy[i] = renderer->samplersToDestroy[renderer->samplersToDestroyCount - 1];
renderer->samplersToDestroyCount -= 1;
}
}
for (i = renderer->framebuffersToDestroyCount - 1; i >= 0; i -= 1)
{
if (SDL_AtomicGet(&renderer->framebuffersToDestroy[i]->referenceCount) == 0)
{
VULKAN_INTERNAL_DestroyFramebuffer(
renderer,
renderer->framebuffersToDestroy[i]
);
renderer->framebuffersToDestroy[i] = renderer->framebuffersToDestroy[renderer->framebuffersToDestroyCount - 1];
renderer->framebuffersToDestroyCount -= 1;
}
}
SDL_UnlockMutex(renderer->disposeLock);
}
static void VULKAN_INTERNAL_CleanCommandBuffer(
VulkanRenderer *renderer,
VulkanCommandBuffer *commandBuffer
) {
uint32_t i;
DescriptorSetData *descriptorSetData;
if (commandBuffer->autoReleaseFence)
{
VULKAN_INTERNAL_ReturnFenceToPool(
renderer,
commandBuffer->inFlightFence
);
commandBuffer->inFlightFence = VK_NULL_HANDLE;
}
/* Bound descriptor sets are now available */
for (i = 0; i < commandBuffer->boundDescriptorSetDataCount; i += 1)
{
descriptorSetData = &commandBuffer->boundDescriptorSetDatas[i];
SDL_LockMutex(descriptorSetData->descriptorSetCache->lock);
if (descriptorSetData->descriptorSetCache->inactiveDescriptorSetCount == descriptorSetData->descriptorSetCache->inactiveDescriptorSetCapacity)
2022-01-18 05:09:27 +00:00
{
descriptorSetData->descriptorSetCache->inactiveDescriptorSetCapacity *= 2;
descriptorSetData->descriptorSetCache->inactiveDescriptorSets = SDL_realloc(
descriptorSetData->descriptorSetCache->inactiveDescriptorSets,
descriptorSetData->descriptorSetCache->inactiveDescriptorSetCapacity * sizeof(VkDescriptorSet)
2022-01-18 05:09:27 +00:00
);
}
descriptorSetData->descriptorSetCache->inactiveDescriptorSets[descriptorSetData->descriptorSetCache->inactiveDescriptorSetCount] = descriptorSetData->descriptorSet;
descriptorSetData->descriptorSetCache->inactiveDescriptorSetCount += 1;
SDL_UnlockMutex(descriptorSetData->descriptorSetCache->lock);
}
commandBuffer->boundDescriptorSetDataCount = 0;
/* Decrement reference counts */
for (i = 0; i < commandBuffer->usedBufferCount; i += 1)
{
SDL_AtomicDecRef(&commandBuffer->usedBuffers[i]->referenceCount);
}
commandBuffer->usedBufferCount = 0;
for (i = 0; i < commandBuffer->usedTextureSliceCount; i += 1)
{
SDL_AtomicDecRef(&commandBuffer->usedTextureSlices[i]->referenceCount);
}
commandBuffer->usedTextureSliceCount = 0;
for (i = 0; i < commandBuffer->usedSamplerCount; i += 1)
{
SDL_AtomicDecRef(&commandBuffer->usedSamplers[i]->referenceCount);
}
commandBuffer->usedSamplerCount = 0;
for (i = 0; i < commandBuffer->usedGraphicsPipelineCount; i += 1)
{
SDL_AtomicDecRef(&commandBuffer->usedGraphicsPipelines[i]->referenceCount);
}
commandBuffer->usedGraphicsPipelineCount = 0;
for (i = 0; i < commandBuffer->usedComputePipelineCount; i += 1)
{
SDL_AtomicDecRef(&commandBuffer->usedComputePipelines[i]->referenceCount);
}
commandBuffer->usedComputePipelineCount = 0;
for (i = 0; i < commandBuffer->usedFramebufferCount; i += 1)
{
SDL_AtomicDecRef(&commandBuffer->usedFramebuffers[i]->referenceCount);
}
commandBuffer->usedFramebufferCount = 0;
/* Reset presentation data */
for (i = 0; i < commandBuffer->presentDataCount; i += 1)
{
commandBuffer->presentDatas[i].windowData->swapchainData->submissionsInFlight -= 1;
}
commandBuffer->presentDataCount = 0;
commandBuffer->waitSemaphoreCount = 0;
commandBuffer->signalSemaphoreCount = 0;
2024-02-27 08:16:06 +00:00
/* Reset defrag state */
if (commandBuffer->isDefrag)
{
renderer->defragInProgress = 0;
}
/* Return command buffer to pool */
SDL_LockMutex(renderer->acquireCommandBufferLock);
if (commandBuffer->commandPool->inactiveCommandBufferCount == commandBuffer->commandPool->inactiveCommandBufferCapacity)
{
commandBuffer->commandPool->inactiveCommandBufferCapacity += 1;
commandBuffer->commandPool->inactiveCommandBuffers = SDL_realloc(
commandBuffer->commandPool->inactiveCommandBuffers,
commandBuffer->commandPool->inactiveCommandBufferCapacity * sizeof(VulkanCommandBuffer*)
);
}
commandBuffer->commandPool->inactiveCommandBuffers[
commandBuffer->commandPool->inactiveCommandBufferCount
] = commandBuffer;
commandBuffer->commandPool->inactiveCommandBufferCount += 1;
SDL_UnlockMutex(renderer->acquireCommandBufferLock);
/* Remove this command buffer from the submitted list */
for (i = 0; i < renderer->submittedCommandBufferCount; i += 1)
{
if (renderer->submittedCommandBuffers[i] == commandBuffer)
2022-01-18 05:09:27 +00:00
{
renderer->submittedCommandBuffers[i] = renderer->submittedCommandBuffers[renderer->submittedCommandBufferCount - 1];
renderer->submittedCommandBufferCount -= 1;
2022-01-18 05:09:27 +00:00
}
}
}
static void VULKAN_Wait(
2022-02-25 21:42:11 +00:00
Refresh_Renderer *driverData
) {
VulkanRenderer *renderer = (VulkanRenderer*) driverData;
VulkanCommandBuffer *commandBuffer;
VkResult result;
int32_t i;
result = renderer->vkDeviceWaitIdle(renderer->logicalDevice);
if (result != VK_SUCCESS)
2022-01-18 05:09:27 +00:00
{
LogVulkanResultAsError("vkDeviceWaitIdle", result);
return;
}
SDL_LockMutex(renderer->submitLock);
for (i = renderer->submittedCommandBufferCount - 1; i >= 0; i -= 1)
{
commandBuffer = renderer->submittedCommandBuffers[i];
VULKAN_INTERNAL_CleanCommandBuffer(renderer, commandBuffer);
2022-01-18 05:09:27 +00:00
}
VULKAN_INTERNAL_PerformPendingDestroys(renderer);
SDL_UnlockMutex(renderer->submitLock);
}
static Refresh_Fence* VULKAN_SubmitAndAcquireFence(
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer
) {
VulkanCommandBuffer *vulkanCommandBuffer;
vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
vulkanCommandBuffer->autoReleaseFence = 0;
VULKAN_Submit(driverData, commandBuffer);
return (Refresh_Fence*) vulkanCommandBuffer->inFlightFence;
}
2020-12-21 23:44:43 +00:00
static void VULKAN_Submit(
2022-02-25 21:42:11 +00:00
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer
2020-12-17 03:50:31 +00:00
) {
2020-12-21 23:44:43 +00:00
VulkanRenderer* renderer = (VulkanRenderer*)driverData;
VkSubmitInfo submitInfo;
VkPresentInfoKHR presentInfo;
VulkanPresentData *presentData;
2020-12-21 23:44:43 +00:00
VkResult vulkanResult, presentResult = VK_SUCCESS;
VulkanCommandBuffer *vulkanCommandBuffer;
VkPipelineStageFlags waitStages[MAX_PRESENT_COUNT];
uint32_t swapchainImageIndex;
VulkanTextureSlice *swapchainTextureSlice;
uint8_t commandBufferCleaned = 0;
VulkanMemorySubAllocator *allocator;
int32_t i, j;
2020-12-21 23:44:43 +00:00
SDL_LockMutex(renderer->submitLock);
2020-12-21 23:44:43 +00:00
/* FIXME: Can this just be permanent? */
for (i = 0; i < MAX_PRESENT_COUNT; i += 1)
{
waitStages[i] = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
}
vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
2022-02-10 05:42:19 +00:00
for (j = 0; j < vulkanCommandBuffer->presentDataCount; j += 1)
2020-12-22 02:34:57 +00:00
{
swapchainImageIndex = vulkanCommandBuffer->presentDatas[j].swapchainImageIndex;
swapchainTextureSlice = VULKAN_INTERNAL_FetchTextureSlice(
vulkanCommandBuffer->presentDatas[j].windowData->swapchainData->textureContainers[swapchainImageIndex].activeTextureHandle->vulkanTexture,
0,
0
);
VULKAN_INTERNAL_ImageMemoryBarrier(
renderer,
vulkanCommandBuffer->commandBuffer,
RESOURCE_ACCESS_PRESENT,
swapchainTextureSlice
);
}
VULKAN_INTERNAL_EndCommandBuffer(renderer, vulkanCommandBuffer);
vulkanCommandBuffer->inFlightFence = VULKAN_INTERNAL_AcquireFenceFromPool(renderer);
2020-12-22 02:34:57 +00:00
submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
submitInfo.pNext = NULL;
submitInfo.commandBufferCount = 1;
submitInfo.pCommandBuffers = &vulkanCommandBuffer->commandBuffer;
2020-12-21 23:44:43 +00:00
submitInfo.pWaitDstStageMask = waitStages;
submitInfo.pWaitSemaphores = vulkanCommandBuffer->waitSemaphores;
submitInfo.waitSemaphoreCount = vulkanCommandBuffer->waitSemaphoreCount;
submitInfo.pSignalSemaphores = vulkanCommandBuffer->signalSemaphores;
submitInfo.signalSemaphoreCount = vulkanCommandBuffer->signalSemaphoreCount;
vulkanResult = renderer->vkQueueSubmit(
renderer->unifiedQueue,
1,
&submitInfo,
vulkanCommandBuffer->inFlightFence
);
if (vulkanResult != VK_SUCCESS)
{
LogVulkanResultAsError("vkQueueSubmit", vulkanResult);
}
2020-12-21 23:44:43 +00:00
/* Mark command buffers as submitted */
if (renderer->submittedCommandBufferCount + 1 >= renderer->submittedCommandBufferCapacity)
{
renderer->submittedCommandBufferCapacity = renderer->submittedCommandBufferCount + 1;
renderer->submittedCommandBuffers = SDL_realloc(
renderer->submittedCommandBuffers,
sizeof(VulkanCommandBuffer*) * renderer->submittedCommandBufferCapacity
);
}
renderer->submittedCommandBuffers[renderer->submittedCommandBufferCount] = vulkanCommandBuffer;
renderer->submittedCommandBufferCount += 1;
/* Present, if applicable */
for (j = 0; j < vulkanCommandBuffer->presentDataCount; j += 1)
{
presentData = &vulkanCommandBuffer->presentDatas[j];
presentInfo.sType = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR;
presentInfo.pNext = NULL;
presentInfo.pWaitSemaphores = &presentData->windowData->swapchainData->renderFinishedSemaphore;
presentInfo.waitSemaphoreCount = 1;
presentInfo.pSwapchains = &presentData->windowData->swapchainData->swapchain;
presentInfo.swapchainCount = 1;
presentInfo.pImageIndices = &presentData->swapchainImageIndex;
presentInfo.pResults = NULL;
presentResult = renderer->vkQueuePresentKHR(
renderer->unifiedQueue,
&presentInfo
);
if (presentResult != VK_SUCCESS)
{
VULKAN_INTERNAL_RecreateSwapchain(
renderer,
presentData->windowData
);
}
else
{
presentData->windowData->swapchainData->submissionsInFlight += 1;
}
}
/* Check if we can perform any cleanups */
2020-12-21 23:44:43 +00:00
for (i = renderer->submittedCommandBufferCount - 1; i >= 0; i -= 1)
2020-12-17 03:50:31 +00:00
{
vulkanResult = renderer->vkGetFenceStatus(
renderer->logicalDevice,
renderer->submittedCommandBuffers[i]->inFlightFence
);
2020-12-17 03:50:31 +00:00
if (vulkanResult == VK_SUCCESS)
2020-12-17 03:50:31 +00:00
{
VULKAN_INTERNAL_CleanCommandBuffer(
renderer,
renderer->submittedCommandBuffers[i]
);
commandBufferCleaned = 1;
}
}
if (commandBufferCleaned)
{
SDL_LockMutex(renderer->allocatorLock);
for (i = 0; i < VK_MAX_MEMORY_TYPES; i += 1)
{
allocator = &renderer->memoryAllocator->subAllocators[i];
for (j = allocator->allocationCount - 1; j >= 0; j -= 1)
{
if (allocator->allocations[j]->usedRegionCount == 0)
{
VULKAN_INTERNAL_DeallocateMemory(
renderer,
allocator,
j
);
}
}
2020-12-17 03:50:31 +00:00
}
SDL_UnlockMutex(renderer->allocatorLock);
2020-12-17 03:50:31 +00:00
}
/* Check pending destroys */
VULKAN_INTERNAL_PerformPendingDestroys(renderer);
/* Defrag! */
2024-02-27 08:16:06 +00:00
if (renderer->allocationsToDefragCount > 0 && !renderer->defragInProgress)
{
2024-02-27 08:16:06 +00:00
VULKAN_INTERNAL_DefragmentMemory(renderer);
}
SDL_UnlockMutex(renderer->submitLock);
2021-01-03 05:07:51 +00:00
}
static uint8_t VULKAN_INTERNAL_DefragmentMemory(
VulkanRenderer *renderer
) {
VulkanMemoryAllocation *allocation;
VulkanMemoryUsedRegion *currentRegion;
VulkanBuffer* newBuffer;
VulkanTexture* newTexture;
VkBufferCopy bufferCopy;
VkImageCopy imageCopy;
VulkanCommandBuffer *commandBuffer;
VulkanTextureSlice *srcSlice;
VulkanTextureSlice *dstSlice;
uint32_t i, sliceIndex;
SDL_LockMutex(renderer->allocatorLock);
renderer->defragInProgress = 1;
commandBuffer = (VulkanCommandBuffer*) VULKAN_AcquireCommandBuffer((Refresh_Renderer *) renderer);
2024-02-27 08:16:06 +00:00
commandBuffer->isDefrag = 1;
2024-02-27 08:16:06 +00:00
allocation = renderer->allocationsToDefrag[renderer->allocationsToDefragCount - 1];
renderer->allocationsToDefragCount -= 1;
2024-02-27 08:16:06 +00:00
/* For each used region in the allocation
* create a new resource, copy the data
* and re-point the resource containers
*/
for (i = 0; i < allocation->usedRegionCount; i += 1)
{
currentRegion = allocation->usedRegions[i];
2024-02-27 08:16:06 +00:00
if (currentRegion->isBuffer && !currentRegion->vulkanBuffer->markedForDestroy)
{
2024-02-27 08:16:06 +00:00
currentRegion->vulkanBuffer->usage |= VK_BUFFER_USAGE_TRANSFER_DST_BIT;
2024-02-27 08:16:06 +00:00
newBuffer = VULKAN_INTERNAL_CreateBuffer(
renderer,
currentRegion->vulkanBuffer->size,
RESOURCE_ACCESS_NONE,
currentRegion->vulkanBuffer->usage,
currentRegion->vulkanBuffer->requireHostVisible,
currentRegion->vulkanBuffer->preferHostLocal,
currentRegion->vulkanBuffer->preferDeviceLocal,
0
2024-02-27 08:16:06 +00:00
);
if (newBuffer == NULL)
{
2024-02-27 08:16:06 +00:00
Refresh_LogError("Failed to create defrag buffer!");
return 0;
}
/* Copy buffer contents if necessary */
if (currentRegion->vulkanBuffer->resourceAccessType != RESOURCE_ACCESS_NONE)
{
2024-02-27 08:16:06 +00:00
VULKAN_INTERNAL_BufferMemoryBarrier(
renderer,
2024-02-27 08:16:06 +00:00
commandBuffer->commandBuffer,
RESOURCE_ACCESS_TRANSFER_READ,
currentRegion->vulkanBuffer
);
2024-02-27 08:16:06 +00:00
VULKAN_INTERNAL_BufferMemoryBarrier(
renderer,
commandBuffer->commandBuffer,
RESOURCE_ACCESS_TRANSFER_WRITE,
newBuffer
);
2024-02-27 08:16:06 +00:00
bufferCopy.srcOffset = 0;
bufferCopy.dstOffset = 0;
bufferCopy.size = currentRegion->resourceSize;
2024-02-27 08:16:06 +00:00
renderer->vkCmdCopyBuffer(
commandBuffer->commandBuffer,
currentRegion->vulkanBuffer->buffer,
newBuffer->buffer,
1,
&bufferCopy
);
/*
2024-02-27 08:16:06 +00:00
VULKAN_INTERNAL_BufferMemoryBarrier(
renderer,
commandBuffer->commandBuffer,
originalResourceAccessType,
newBuffer
);
*/
2024-02-27 08:16:06 +00:00
VULKAN_INTERNAL_TrackBuffer(renderer, commandBuffer, currentRegion->vulkanBuffer);
VULKAN_INTERNAL_TrackBuffer(renderer, commandBuffer, newBuffer);
}
2024-02-27 08:16:06 +00:00
/* re-point original container to new buffer */
if (currentRegion->vulkanBuffer->handle != NULL)
2024-02-27 08:16:06 +00:00
{
newBuffer->handle = currentRegion->vulkanBuffer->handle;
newBuffer->handle->vulkanBuffer = newBuffer;
currentRegion->vulkanBuffer->handle = NULL;
2024-02-27 08:16:06 +00:00
}
2024-02-27 08:16:06 +00:00
VULKAN_INTERNAL_QueueDestroyBuffer(renderer, currentRegion->vulkanBuffer);
}
else if (!currentRegion->vulkanTexture->markedForDestroy)
{
newTexture = VULKAN_INTERNAL_CreateTexture(
renderer,
currentRegion->vulkanTexture->dimensions.width,
currentRegion->vulkanTexture->dimensions.height,
currentRegion->vulkanTexture->depth,
currentRegion->vulkanTexture->isCube,
currentRegion->vulkanTexture->layerCount,
2024-02-27 08:16:06 +00:00
currentRegion->vulkanTexture->levelCount,
currentRegion->vulkanTexture->sampleCount,
currentRegion->vulkanTexture->format,
currentRegion->vulkanTexture->aspectFlags,
currentRegion->vulkanTexture->usageFlags,
0 /* MSAA is dedicated so never defrags */
2024-02-27 08:16:06 +00:00
);
2024-02-27 08:16:06 +00:00
if (newTexture == NULL)
{
2024-02-27 08:16:06 +00:00
Refresh_LogError("Failed to create defrag texture!");
return 0;
}
for (sliceIndex = 0; sliceIndex < currentRegion->vulkanTexture->sliceCount; sliceIndex += 1)
2024-02-27 08:16:06 +00:00
{
/* copy slice if necessary */
srcSlice = &currentRegion->vulkanTexture->slices[sliceIndex];
dstSlice = &newTexture->slices[sliceIndex];
if (srcSlice->resourceAccessType != RESOURCE_ACCESS_NONE)
{
VULKAN_INTERNAL_ImageMemoryBarrier(
renderer,
commandBuffer->commandBuffer,
RESOURCE_ACCESS_TRANSFER_READ,
srcSlice
);
VULKAN_INTERNAL_ImageMemoryBarrier(
renderer,
commandBuffer->commandBuffer,
RESOURCE_ACCESS_TRANSFER_WRITE,
dstSlice
);
imageCopy.srcOffset.x = 0;
imageCopy.srcOffset.y = 0;
imageCopy.srcOffset.z = 0;
imageCopy.srcSubresource.aspectMask = srcSlice->parent->aspectFlags;
imageCopy.srcSubresource.baseArrayLayer = srcSlice->layer;
imageCopy.srcSubresource.layerCount = 1;
imageCopy.srcSubresource.mipLevel = srcSlice->level;
imageCopy.extent.width = SDL_max(1, srcSlice->parent->dimensions.width >> srcSlice->level);
imageCopy.extent.height = SDL_max(1, srcSlice->parent->dimensions.height >> srcSlice->level);
imageCopy.extent.depth = srcSlice->parent->depth;
imageCopy.dstOffset.x = 0;
imageCopy.dstOffset.y = 0;
imageCopy.dstOffset.z = 0;
imageCopy.dstSubresource.aspectMask = dstSlice->parent->aspectFlags;
imageCopy.dstSubresource.baseArrayLayer = dstSlice->layer;
imageCopy.dstSubresource.layerCount = 1;
imageCopy.dstSubresource.mipLevel = dstSlice->level;
renderer->vkCmdCopyImage(
commandBuffer->commandBuffer,
currentRegion->vulkanTexture->image,
AccessMap[srcSlice->resourceAccessType].imageLayout,
newTexture->image,
AccessMap[dstSlice->resourceAccessType].imageLayout,
1,
&imageCopy
);
VULKAN_INTERNAL_TrackTextureSlice(renderer, commandBuffer, srcSlice);
VULKAN_INTERNAL_TrackTextureSlice(renderer, commandBuffer, dstSlice);
}
2024-02-27 08:16:06 +00:00
}
2024-02-27 08:16:06 +00:00
/* re-point original container to new texture */
newTexture->handle = currentRegion->vulkanTexture->handle;
newTexture->handle->vulkanTexture = newTexture;
currentRegion->vulkanTexture->handle = NULL;
2024-02-27 08:16:06 +00:00
VULKAN_INTERNAL_QueueDestroyTexture(renderer, currentRegion->vulkanTexture);
}
}
SDL_UnlockMutex(renderer->allocatorLock);
VULKAN_Submit(
(Refresh_Renderer*) renderer,
(Refresh_CommandBuffer*) commandBuffer
);
return 1;
}
static void VULKAN_WaitForFences(
Refresh_Renderer *driverData,
uint8_t waitAll,
uint32_t fenceCount,
Refresh_Fence **pFences
) {
VulkanRenderer* renderer = (VulkanRenderer*) driverData;
VkResult result;
result = renderer->vkWaitForFences(
renderer->logicalDevice,
fenceCount,
(VkFence*) pFences,
waitAll,
UINT64_MAX
);
if (result != VK_SUCCESS)
{
LogVulkanResultAsError("vkWaitForFences", result);
}
}
static int VULKAN_QueryFence(
Refresh_Renderer *driverData,
Refresh_Fence *fence
) {
VulkanRenderer* renderer = (VulkanRenderer*) driverData;
VkResult result;
result = renderer->vkGetFenceStatus(
renderer->logicalDevice,
(VkFence) fence
);
if (result == VK_SUCCESS)
{
return 1;
}
else if (result == VK_NOT_READY)
{
return 0;
}
else
{
LogVulkanResultAsError("vkGetFenceStatus", result);
return -1;
}
}
static void VULKAN_ReleaseFence(
Refresh_Renderer *driverData,
Refresh_Fence *fence
) {
VULKAN_INTERNAL_ReturnFenceToPool((VulkanRenderer*) driverData, (VkFence) fence);
}
2020-12-17 03:28:02 +00:00
/* Device instantiation */
static inline uint8_t CheckDeviceExtensions(
VkExtensionProperties *extensions,
uint32_t numExtensions,
VulkanExtensions *supports
) {
uint32_t i;
SDL_memset(supports, '\0', sizeof(VulkanExtensions));
for (i = 0; i < numExtensions; i += 1)
{
const char *name = extensions[i].extensionName;
#define CHECK(ext) \
if (SDL_strcmp(name, "VK_" #ext) == 0) \
{ \
supports->ext = 1; \
}
CHECK(KHR_swapchain)
else CHECK(KHR_maintenance1)
else CHECK(KHR_get_memory_requirements2)
else CHECK(KHR_driver_properties)
else CHECK(EXT_vertex_attribute_divisor)
else CHECK(KHR_portability_subset)
#undef CHECK
}
return ( supports->KHR_swapchain &&
supports->KHR_maintenance1 &&
supports->KHR_get_memory_requirements2 );
}
static inline uint32_t GetDeviceExtensionCount(VulkanExtensions *supports)
{
return (
supports->KHR_swapchain +
supports->KHR_maintenance1 +
supports->KHR_get_memory_requirements2 +
supports->KHR_driver_properties +
supports->EXT_vertex_attribute_divisor +
supports->KHR_portability_subset
);
}
static inline void CreateDeviceExtensionArray(
VulkanExtensions *supports,
const char **extensions
) {
uint8_t cur = 0;
#define CHECK(ext) \
if (supports->ext) \
{ \
extensions[cur++] = "VK_" #ext; \
}
CHECK(KHR_swapchain)
CHECK(KHR_maintenance1)
CHECK(KHR_get_memory_requirements2)
CHECK(KHR_driver_properties)
CHECK(EXT_vertex_attribute_divisor)
CHECK(KHR_portability_subset)
#undef CHECK
}
static inline uint8_t SupportsInstanceExtension(
2020-12-17 03:28:02 +00:00
const char *ext,
VkExtensionProperties *availableExtensions,
uint32_t numAvailableExtensions
) {
uint32_t i;
for (i = 0; i < numAvailableExtensions; i += 1)
{
if (SDL_strcmp(ext, availableExtensions[i].extensionName) == 0)
{
return 1;
}
}
return 0;
}
static uint8_t VULKAN_INTERNAL_CheckInstanceExtensions(
const char **requiredExtensions,
uint32_t requiredExtensionsLength,
uint8_t *supportsDebugUtils
) {
uint32_t extensionCount, i;
VkExtensionProperties *availableExtensions;
uint8_t allExtensionsSupported = 1;
vkEnumerateInstanceExtensionProperties(
NULL,
&extensionCount,
NULL
);
availableExtensions = SDL_malloc(
extensionCount * sizeof(VkExtensionProperties)
2020-12-17 03:28:02 +00:00
);
vkEnumerateInstanceExtensionProperties(
NULL,
&extensionCount,
availableExtensions
);
for (i = 0; i < requiredExtensionsLength; i += 1)
{
if (!SupportsInstanceExtension(
2020-12-17 03:28:02 +00:00
requiredExtensions[i],
availableExtensions,
extensionCount
)) {
allExtensionsSupported = 0;
break;
}
}
/* This is optional, but nice to have! */
*supportsDebugUtils = SupportsInstanceExtension(
2020-12-17 03:28:02 +00:00
VK_EXT_DEBUG_UTILS_EXTENSION_NAME,
availableExtensions,
extensionCount
);
SDL_free(availableExtensions);
return allExtensionsSupported;
}
static uint8_t VULKAN_INTERNAL_CheckDeviceExtensions(
VulkanRenderer *renderer,
VkPhysicalDevice physicalDevice,
VulkanExtensions *physicalDeviceExtensions
) {
uint32_t extensionCount;
VkExtensionProperties *availableExtensions;
uint8_t allExtensionsSupported;
renderer->vkEnumerateDeviceExtensionProperties(
physicalDevice,
NULL,
&extensionCount,
NULL
);
availableExtensions = (VkExtensionProperties*) SDL_malloc(
extensionCount * sizeof(VkExtensionProperties)
);
renderer->vkEnumerateDeviceExtensionProperties(
physicalDevice,
NULL,
&extensionCount,
availableExtensions
);
allExtensionsSupported = CheckDeviceExtensions(
availableExtensions,
extensionCount,
physicalDeviceExtensions
);
SDL_free(availableExtensions);
2020-12-17 03:28:02 +00:00
return allExtensionsSupported;
}
static uint8_t VULKAN_INTERNAL_CheckValidationLayers(
const char** validationLayers,
uint32_t validationLayersLength
) {
uint32_t layerCount;
VkLayerProperties *availableLayers;
uint32_t i, j;
uint8_t layerFound;
2020-12-17 03:28:02 +00:00
vkEnumerateInstanceLayerProperties(&layerCount, NULL);
availableLayers = (VkLayerProperties*) SDL_malloc(
layerCount * sizeof(VkLayerProperties)
);
2020-12-17 03:28:02 +00:00
vkEnumerateInstanceLayerProperties(&layerCount, availableLayers);
for (i = 0; i < validationLayersLength; i += 1)
{
layerFound = 0;
for (j = 0; j < layerCount; j += 1)
{
if (SDL_strcmp(validationLayers[i], availableLayers[j].layerName) == 0)
{
layerFound = 1;
break;
}
}
if (!layerFound)
{
break;
}
}
SDL_free(availableLayers);
2020-12-17 03:28:02 +00:00
return layerFound;
}
static uint8_t VULKAN_INTERNAL_CreateInstance(
2022-02-25 21:42:11 +00:00
VulkanRenderer *renderer,
void *deviceWindowHandle
2020-12-17 03:28:02 +00:00
) {
VkResult vulkanResult;
VkApplicationInfo appInfo;
const char **instanceExtensionNames;
uint32_t instanceExtensionCount;
VkInstanceCreateInfo createInfo;
static const char *layerNames[] = { "VK_LAYER_KHRONOS_validation" };
appInfo.sType = VK_STRUCTURE_TYPE_APPLICATION_INFO;
appInfo.pNext = NULL;
appInfo.pApplicationName = NULL;
appInfo.applicationVersion = 0;
appInfo.pEngineName = "REFRESH";
appInfo.engineVersion = REFRESH_COMPILED_VERSION;
appInfo.apiVersion = VK_MAKE_VERSION(1, 0, 0);
2022-02-25 21:42:11 +00:00
if (!SDL_Vulkan_GetInstanceExtensions(
(SDL_Window*) deviceWindowHandle,
&instanceExtensionCount,
NULL
)) {
Refresh_LogError(
"SDL_Vulkan_GetInstanceExtensions(): getExtensionCount: %s",
SDL_GetError()
);
2020-12-17 03:28:02 +00:00
2022-02-25 21:42:11 +00:00
return 0;
}
2020-12-17 03:28:02 +00:00
/* Extra space for the following extensions:
* VK_KHR_get_physical_device_properties2
* VK_EXT_debug_utils
*/
instanceExtensionNames = SDL_stack_alloc(
const char*,
instanceExtensionCount + 2
);
if (!SDL_Vulkan_GetInstanceExtensions(
(SDL_Window*) deviceWindowHandle,
&instanceExtensionCount,
instanceExtensionNames
)) {
2021-01-05 23:00:51 +00:00
Refresh_LogError(
2020-12-17 03:28:02 +00:00
"SDL_Vulkan_GetInstanceExtensions(): %s",
SDL_GetError()
);
2022-02-25 21:42:11 +00:00
SDL_stack_free((char*) instanceExtensionNames);
return 0;
2020-12-17 03:28:02 +00:00
}
/* Core since 1.1 */
instanceExtensionNames[instanceExtensionCount++] =
VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME;
if (!VULKAN_INTERNAL_CheckInstanceExtensions(
instanceExtensionNames,
instanceExtensionCount,
&renderer->supportsDebugUtils
)) {
2021-01-05 23:00:51 +00:00
Refresh_LogError(
2020-12-17 03:28:02 +00:00
"Required Vulkan instance extensions not supported"
);
2022-02-25 21:42:11 +00:00
SDL_stack_free((char*) instanceExtensionNames);
return 0;
2020-12-17 03:28:02 +00:00
}
if (renderer->supportsDebugUtils)
{
/* Append the debug extension to the end */
instanceExtensionNames[instanceExtensionCount++] =
VK_EXT_DEBUG_UTILS_EXTENSION_NAME;
}
else
{
2021-01-05 23:00:51 +00:00
Refresh_LogWarn(
2020-12-17 03:28:02 +00:00
"%s is not supported!",
VK_EXT_DEBUG_UTILS_EXTENSION_NAME
);
}
2022-02-25 21:42:11 +00:00
createInfo.sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO;
2020-12-17 03:28:02 +00:00
createInfo.pNext = NULL;
createInfo.flags = 0;
createInfo.pApplicationInfo = &appInfo;
createInfo.ppEnabledLayerNames = layerNames;
createInfo.enabledExtensionCount = instanceExtensionCount;
createInfo.ppEnabledExtensionNames = instanceExtensionNames;
if (renderer->debugMode)
{
createInfo.enabledLayerCount = SDL_arraysize(layerNames);
if (!VULKAN_INTERNAL_CheckValidationLayers(
layerNames,
createInfo.enabledLayerCount
)) {
2021-01-05 23:00:51 +00:00
Refresh_LogWarn("Validation layers not found, continuing without validation");
2020-12-17 03:28:02 +00:00
createInfo.enabledLayerCount = 0;
}
else
{
Refresh_LogInfo("Validation layers enabled, expect debug level performance!");
}
2020-12-17 03:28:02 +00:00
}
else
{
createInfo.enabledLayerCount = 0;
}
2022-02-25 21:42:11 +00:00
vulkanResult = vkCreateInstance(&createInfo, NULL, &renderer->instance);
2020-12-17 03:28:02 +00:00
if (vulkanResult != VK_SUCCESS)
{
2021-01-05 23:00:51 +00:00
Refresh_LogError(
2020-12-17 03:28:02 +00:00
"vkCreateInstance failed: %s",
VkErrorMessages(vulkanResult)
);
2022-02-25 21:42:11 +00:00
SDL_stack_free((char*) instanceExtensionNames);
return 0;
2020-12-17 03:28:02 +00:00
}
SDL_stack_free((char*) instanceExtensionNames);
return 1;
}
static uint8_t VULKAN_INTERNAL_IsDeviceSuitable(
VulkanRenderer *renderer,
VkPhysicalDevice physicalDevice,
VulkanExtensions *physicalDeviceExtensions,
2020-12-17 03:28:02 +00:00
VkSurfaceKHR surface,
uint32_t *queueFamilyIndex,
uint8_t *deviceRank
2020-12-17 03:28:02 +00:00
) {
uint32_t queueFamilyCount, queueFamilyRank, queueFamilyBest;
SwapChainSupportDetails swapchainSupportDetails;
2020-12-17 03:28:02 +00:00
VkQueueFamilyProperties *queueProps;
VkBool32 supportsPresent;
uint8_t querySuccess;
2020-12-17 03:28:02 +00:00
VkPhysicalDeviceProperties deviceProperties;
uint32_t i;
2020-12-17 03:28:02 +00:00
/* Get the device rank before doing any checks, in case one fails.
* Note: If no dedicated device exists, one that supports our features
* would be fine
2020-12-17 03:28:02 +00:00
*/
renderer->vkGetPhysicalDeviceProperties(
physicalDevice,
&deviceProperties
);
if (*deviceRank < DEVICE_PRIORITY[deviceProperties.deviceType])
{
/* This device outranks the best device we've found so far!
* This includes a dedicated GPU that has less features than an
* integrated GPU, because this is a freak case that is almost
* never intentionally desired by the end user
*/
*deviceRank = DEVICE_PRIORITY[deviceProperties.deviceType];
}
else if (*deviceRank > DEVICE_PRIORITY[deviceProperties.deviceType])
{
/* Device is outranked by a previous device, don't even try to
* run a query and reset the rank to avoid overwrites
*/
*deviceRank = 0;
return 0;
}
2020-12-17 03:28:02 +00:00
if (!VULKAN_INTERNAL_CheckDeviceExtensions(
renderer,
physicalDevice,
physicalDeviceExtensions
2020-12-17 03:28:02 +00:00
)) {
return 0;
}
renderer->vkGetPhysicalDeviceQueueFamilyProperties(
physicalDevice,
&queueFamilyCount,
NULL
);
queueProps = (VkQueueFamilyProperties*) SDL_stack_alloc(
VkQueueFamilyProperties,
queueFamilyCount
);
renderer->vkGetPhysicalDeviceQueueFamilyProperties(
physicalDevice,
&queueFamilyCount,
queueProps
);
queueFamilyBest = 0;
*queueFamilyIndex = UINT32_MAX;
2020-12-17 03:28:02 +00:00
for (i = 0; i < queueFamilyCount; i += 1)
{
renderer->vkGetPhysicalDeviceSurfaceSupportKHR(
physicalDevice,
i,
surface,
&supportsPresent
);
if ( !supportsPresent ||
!(queueProps[i].queueFlags & VK_QUEUE_GRAPHICS_BIT) )
2020-12-17 03:28:02 +00:00
{
/* Not a graphics family, ignore. */
continue;
}
/* The queue family bitflags are kind of annoying.
*
* We of course need a graphics family, but we ideally want the
* _primary_ graphics family. The spec states that at least one
* graphics family must also be a compute family, so generally
* drivers make that the first one. But hey, maybe something
* genuinely can't do compute or something, and FNA doesn't
* need it, so we'll be open to a non-compute queue family.
*
* Additionally, it's common to see the primary queue family
* have the transfer bit set, which is great! But this is
* actually optional; it's impossible to NOT have transfers in
* graphics/compute but it _is_ possible for a graphics/compute
* family, even the primary one, to just decide not to set the
* bitflag. Admittedly, a driver may want to isolate transfer
* queues to a dedicated family so that queues made solely for
* transfers can have an optimized DMA queue.
*
* That, or the driver author got lazy and decided not to set
* the bit. Looking at you, Android.
*
* -flibit
*/
if (queueProps[i].queueFlags & VK_QUEUE_COMPUTE_BIT)
{
if (queueProps[i].queueFlags & VK_QUEUE_TRANSFER_BIT)
{
/* Has all attribs! */
queueFamilyRank = 3;
}
else
{
/* Probably has a DMA transfer queue family */
queueFamilyRank = 2;
}
}
else
{
/* Just a graphics family, probably has something better */
queueFamilyRank = 1;
}
if (queueFamilyRank > queueFamilyBest)
{
*queueFamilyIndex = i;
queueFamilyBest = queueFamilyRank;
2020-12-17 03:28:02 +00:00
}
}
SDL_stack_free(queueProps);
if (*queueFamilyIndex == UINT32_MAX)
2020-12-17 03:28:02 +00:00
{
/* Somehow no graphics queues existed. Compute-only device? */
return 0;
2020-12-17 03:28:02 +00:00
}
/* FIXME: Need better structure for checking vs storing support details */
querySuccess = VULKAN_INTERNAL_QuerySwapChainSupport(
renderer,
physicalDevice,
surface,
&swapchainSupportDetails
);
if (swapchainSupportDetails.formatsLength > 0)
{
SDL_free(swapchainSupportDetails.formats);
}
if (swapchainSupportDetails.presentModesLength > 0)
{
SDL_free(swapchainSupportDetails.presentModes);
}
return ( querySuccess &&
swapchainSupportDetails.formatsLength > 0 &&
swapchainSupportDetails.presentModesLength > 0 );
2020-12-17 03:28:02 +00:00
}
static uint8_t VULKAN_INTERNAL_DeterminePhysicalDevice(
VulkanRenderer *renderer,
VkSurfaceKHR surface
2020-12-17 03:28:02 +00:00
) {
VkResult vulkanResult;
VkPhysicalDevice *physicalDevices;
VulkanExtensions *physicalDeviceExtensions;
2020-12-17 03:28:02 +00:00
uint32_t physicalDeviceCount, i, suitableIndex;
uint32_t queueFamilyIndex, suitableQueueFamilyIndex;
uint8_t deviceRank, highestRank;
2020-12-17 03:28:02 +00:00
vulkanResult = renderer->vkEnumeratePhysicalDevices(
renderer->instance,
&physicalDeviceCount,
NULL
);
VULKAN_ERROR_CHECK(vulkanResult, vkEnumeratePhysicalDevices, 0)
2020-12-17 03:28:02 +00:00
if (physicalDeviceCount == 0)
{
Refresh_LogWarn("Failed to find any GPUs with Vulkan support");
2020-12-17 03:28:02 +00:00
return 0;
}
physicalDevices = SDL_stack_alloc(VkPhysicalDevice, physicalDeviceCount);
physicalDeviceExtensions = SDL_stack_alloc(VulkanExtensions, physicalDeviceCount);
2020-12-17 03:28:02 +00:00
vulkanResult = renderer->vkEnumeratePhysicalDevices(
renderer->instance,
&physicalDeviceCount,
physicalDevices
);
/* This should be impossible to hit, but from what I can tell this can
* be triggered not because the array is too small, but because there
* were drivers that turned out to be bogus, so this is the loader's way
* of telling us that the list is now smaller than expected :shrug:
*/
if (vulkanResult == VK_INCOMPLETE)
{
Refresh_LogWarn("vkEnumeratePhysicalDevices returned VK_INCOMPLETE, will keep trying anyway...");
vulkanResult = VK_SUCCESS;
}
2020-12-17 03:28:02 +00:00
if (vulkanResult != VK_SUCCESS)
{
Refresh_LogWarn(
2020-12-17 03:28:02 +00:00
"vkEnumeratePhysicalDevices failed: %s",
VkErrorMessages(vulkanResult)
);
SDL_stack_free(physicalDevices);
SDL_stack_free(physicalDeviceExtensions);
2020-12-17 03:28:02 +00:00
return 0;
}
/* Any suitable device will do, but we'd like the best */
suitableIndex = -1;
highestRank = 0;
2020-12-17 03:28:02 +00:00
for (i = 0; i < physicalDeviceCount; i += 1)
{
deviceRank = highestRank;
if (VULKAN_INTERNAL_IsDeviceSuitable(
2020-12-17 03:28:02 +00:00
renderer,
physicalDevices[i],
&physicalDeviceExtensions[i],
surface,
&queueFamilyIndex,
&deviceRank
)) {
/* Use this for rendering.
* Note that this may override a previous device that
* supports rendering, but shares the same device rank.
*/
suitableIndex = i;
suitableQueueFamilyIndex = queueFamilyIndex;
highestRank = deviceRank;
}
else if (deviceRank > highestRank)
{
/* In this case, we found a... "realer?" GPU,
* but it doesn't actually support our Vulkan.
* We should disqualify all devices below as a
* result, because if we don't we end up
* ignoring real hardware and risk using
* something like LLVMpipe instead!
* -flibit
*/
suitableIndex = -1;
highestRank = deviceRank;
2020-12-17 03:28:02 +00:00
}
}
if (suitableIndex != -1)
{
renderer->supports = physicalDeviceExtensions[suitableIndex];
renderer->physicalDevice = physicalDevices[suitableIndex];
renderer->queueFamilyIndex = suitableQueueFamilyIndex;
2020-12-17 03:28:02 +00:00
}
else
{
SDL_stack_free(physicalDevices);
SDL_stack_free(physicalDeviceExtensions);
2020-12-17 03:28:02 +00:00
return 0;
}
renderer->physicalDeviceProperties.sType =
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2;
if (renderer->supports.KHR_driver_properties)
{
renderer->physicalDeviceDriverProperties.sType =
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR;
renderer->physicalDeviceDriverProperties.pNext = NULL;
renderer->physicalDeviceProperties.pNext =
&renderer->physicalDeviceDriverProperties;
}
else
{
renderer->physicalDeviceProperties.pNext = NULL;
}
renderer->vkGetPhysicalDeviceProperties2KHR(
renderer->physicalDevice,
&renderer->physicalDeviceProperties
);
renderer->vkGetPhysicalDeviceMemoryProperties(
renderer->physicalDevice,
&renderer->memoryProperties
);
2020-12-17 03:28:02 +00:00
SDL_stack_free(physicalDevices);
SDL_stack_free(physicalDeviceExtensions);
2020-12-17 03:28:02 +00:00
return 1;
}
static uint8_t VULKAN_INTERNAL_CreateLogicalDevice(
VulkanRenderer *renderer
2020-12-17 03:28:02 +00:00
) {
VkResult vulkanResult;
VkDeviceCreateInfo deviceCreateInfo;
VkPhysicalDeviceFeatures deviceFeatures;
VkPhysicalDevicePortabilitySubsetFeaturesKHR portabilityFeatures;
const char **deviceExtensions;
2020-12-17 03:28:02 +00:00
VkDeviceQueueCreateInfo queueCreateInfo;
2020-12-17 03:28:02 +00:00
float queuePriority = 1.0f;
queueCreateInfo.sType =
2020-12-17 03:28:02 +00:00
VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
queueCreateInfo.pNext = NULL;
queueCreateInfo.flags = 0;
queueCreateInfo.queueFamilyIndex = renderer->queueFamilyIndex;
queueCreateInfo.queueCount = 1;
queueCreateInfo.pQueuePriorities = &queuePriority;
2020-12-17 03:28:02 +00:00
/* specifying used device features */
SDL_zero(deviceFeatures);
deviceFeatures.fillModeNonSolid = VK_TRUE;
deviceFeatures.samplerAnisotropy = VK_TRUE;
deviceFeatures.multiDrawIndirect = VK_TRUE;
2023-06-16 22:00:47 +00:00
deviceFeatures.independentBlend = VK_TRUE;
2020-12-17 03:28:02 +00:00
/* creating the logical device */
deviceCreateInfo.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
if (renderer->supports.KHR_portability_subset)
{
portabilityFeatures.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PORTABILITY_SUBSET_FEATURES_KHR;
portabilityFeatures.pNext = NULL;
portabilityFeatures.constantAlphaColorBlendFactors = VK_FALSE;
portabilityFeatures.events = VK_FALSE;
portabilityFeatures.imageViewFormatReinterpretation = VK_FALSE;
portabilityFeatures.imageViewFormatSwizzle = VK_TRUE;
portabilityFeatures.imageView2DOn3DImage = VK_FALSE;
portabilityFeatures.multisampleArrayImage = VK_FALSE;
portabilityFeatures.mutableComparisonSamplers = VK_FALSE;
portabilityFeatures.pointPolygons = VK_FALSE;
portabilityFeatures.samplerMipLodBias = VK_FALSE; /* Technically should be true, but eh */
portabilityFeatures.separateStencilMaskRef = VK_FALSE;
portabilityFeatures.shaderSampleRateInterpolationFunctions = VK_FALSE;
portabilityFeatures.tessellationIsolines = VK_FALSE;
portabilityFeatures.tessellationPointMode = VK_FALSE;
portabilityFeatures.triangleFans = VK_FALSE;
portabilityFeatures.vertexAttributeAccessBeyondStride = VK_FALSE;
deviceCreateInfo.pNext = &portabilityFeatures;
}
else
{
deviceCreateInfo.pNext = NULL;
}
2020-12-17 03:28:02 +00:00
deviceCreateInfo.flags = 0;
deviceCreateInfo.queueCreateInfoCount = 1;
deviceCreateInfo.pQueueCreateInfos = &queueCreateInfo;
2020-12-17 03:28:02 +00:00
deviceCreateInfo.enabledLayerCount = 0;
deviceCreateInfo.ppEnabledLayerNames = NULL;
deviceCreateInfo.enabledExtensionCount = GetDeviceExtensionCount(
&renderer->supports
);
deviceExtensions = SDL_stack_alloc(
const char*,
deviceCreateInfo.enabledExtensionCount
);
CreateDeviceExtensionArray(&renderer->supports, deviceExtensions);
deviceCreateInfo.ppEnabledExtensionNames = deviceExtensions;
2020-12-17 03:28:02 +00:00
deviceCreateInfo.pEnabledFeatures = &deviceFeatures;
vulkanResult = renderer->vkCreateDevice(
renderer->physicalDevice,
&deviceCreateInfo,
NULL,
&renderer->logicalDevice
);
SDL_stack_free(deviceExtensions);
VULKAN_ERROR_CHECK(vulkanResult, vkCreateDevice, 0)
2020-12-17 03:28:02 +00:00
/* Load vkDevice entry points */
#define VULKAN_DEVICE_FUNCTION(ext, ret, func, params) \
renderer->func = (vkfntype_##func) \
renderer->vkGetDeviceProcAddr( \
renderer->logicalDevice, \
#func \
);
#include "Refresh_Driver_Vulkan_vkfuncs.h"
renderer->vkGetDeviceQueue(
renderer->logicalDevice,
renderer->queueFamilyIndex,
2020-12-17 03:28:02 +00:00
0,
&renderer->unifiedQueue
2021-01-02 21:31:17 +00:00
);
2020-12-17 03:28:02 +00:00
return 1;
}
static void VULKAN_INTERNAL_LoadEntryPoints(void)
{
2023-10-15 05:14:00 +00:00
/* Required for MoltenVK support */
SDL_setenv("MVK_CONFIG_FULL_IMAGE_VIEW_SWIZZLE", "1", 1);
2021-01-14 01:37:54 +00:00
/* Load Vulkan entry points */
if (SDL_Vulkan_LoadLibrary(NULL) < 0)
{
Refresh_LogWarn("Vulkan: SDL_Vulkan_LoadLibrary failed!");
return;
2021-01-14 01:37:54 +00:00
}
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wpedantic"
vkGetInstanceProcAddr = (PFN_vkGetInstanceProcAddr)SDL_Vulkan_GetVkGetInstanceProcAddr();
#pragma GCC diagnostic pop
if (vkGetInstanceProcAddr == NULL)
{
Refresh_LogWarn(
"SDL_Vulkan_GetVkGetInstanceProcAddr(): %s",
SDL_GetError()
);
return;
2021-01-14 01:37:54 +00:00
}
#define VULKAN_GLOBAL_FUNCTION(name) \
name = (PFN_##name) vkGetInstanceProcAddr(VK_NULL_HANDLE, #name); \
if (name == NULL) \
{ \
Refresh_LogWarn("vkGetInstanceProcAddr(VK_NULL_HANDLE, \"" #name "\") failed"); \
return; \
2021-01-14 01:37:54 +00:00
}
#include "Refresh_Driver_Vulkan_vkfuncs.h"
}
static uint8_t VULKAN_INTERNAL_PrepareVulkan(
VulkanRenderer *renderer
) {
SDL_Window *dummyWindowHandle;
VkSurfaceKHR surface;
VULKAN_INTERNAL_LoadEntryPoints();
dummyWindowHandle = SDL_CreateWindow(
"Refresh Vulkan",
0, 0,
128, 128,
SDL_WINDOW_VULKAN | SDL_WINDOW_HIDDEN
);
if (dummyWindowHandle == NULL)
{
Refresh_LogWarn("Vulkan: Could not create dummy window");
return 0;
}
if (!VULKAN_INTERNAL_CreateInstance(renderer, dummyWindowHandle))
{
SDL_DestroyWindow(dummyWindowHandle);
SDL_free(renderer);
Refresh_LogWarn("Vulkan: Could not create Vulkan instance");
return 0;
}
if (!SDL_Vulkan_CreateSurface(
(SDL_Window*) dummyWindowHandle,
renderer->instance,
&surface
)) {
SDL_DestroyWindow(dummyWindowHandle);
SDL_free(renderer);
Refresh_LogWarn(
"SDL_Vulkan_CreateSurface failed: %s",
SDL_GetError()
);
return 0;
}
#define VULKAN_INSTANCE_FUNCTION(ext, ret, func, params) \
renderer->func = (vkfntype_##func) vkGetInstanceProcAddr(renderer->instance, #func);
#include "Refresh_Driver_Vulkan_vkfuncs.h"
if (!VULKAN_INTERNAL_DeterminePhysicalDevice(renderer, surface))
{
return 0;
}
renderer->vkDestroySurfaceKHR(
renderer->instance,
surface,
NULL
);
SDL_DestroyWindow(dummyWindowHandle);
return 1;
}
static uint8_t VULKAN_PrepareDriver(uint32_t *flags)
{
/* Set up dummy VulkanRenderer */
VulkanRenderer *renderer = (VulkanRenderer*) SDL_malloc(sizeof(VulkanRenderer));
uint8_t result;
SDL_memset(renderer, '\0', sizeof(VulkanRenderer));
result = VULKAN_INTERNAL_PrepareVulkan(renderer);
if (!result)
{
Refresh_LogWarn("Vulkan: Failed to determine a suitable physical device");
}
else
{
*flags = SDL_WINDOW_VULKAN;
}
renderer->vkDestroyInstance(renderer->instance, NULL);
SDL_free(renderer);
return result;
}
static Refresh_Device* VULKAN_CreateDevice(
uint8_t debugMode
2020-12-17 01:23:49 +00:00
) {
VulkanRenderer *renderer = (VulkanRenderer*) SDL_malloc(sizeof(VulkanRenderer));
2020-12-17 01:23:49 +00:00
2022-02-25 21:42:11 +00:00
Refresh_Device *result;
VkResult vulkanResult;
2020-12-18 22:35:33 +00:00
uint32_t i;
2020-12-17 04:04:47 +00:00
2020-12-29 22:52:24 +00:00
/* Variables: Descriptor set layouts */
2020-12-17 19:40:49 +00:00
VkDescriptorSetLayoutCreateInfo setLayoutCreateInfo;
VkDescriptorSetLayoutBinding vertexParamLayoutBinding;
VkDescriptorSetLayoutBinding fragmentParamLayoutBinding;
2020-12-31 04:39:47 +00:00
VkDescriptorSetLayoutBinding computeParamLayoutBinding;
2020-12-17 19:40:49 +00:00
2020-12-31 04:39:47 +00:00
VkDescriptorSetLayoutBinding emptyVertexSamplerLayoutBinding;
VkDescriptorSetLayoutBinding emptyFragmentSamplerLayoutBinding;
2020-12-29 22:52:24 +00:00
VkDescriptorSetLayoutBinding emptyComputeBufferDescriptorSetLayoutBinding;
2020-12-31 04:39:47 +00:00
VkDescriptorSetLayoutBinding emptyComputeImageDescriptorSetLayoutBinding;
2020-12-29 22:52:24 +00:00
2020-12-20 09:29:15 +00:00
/* Variables: UBO Creation */
VkDescriptorPoolCreateInfo defaultDescriptorPoolInfo;
2020-12-31 04:39:47 +00:00
VkDescriptorPoolSize poolSizes[4];
2020-12-28 22:07:13 +00:00
VkDescriptorSetAllocateInfo descriptorAllocateInfo;
2020-12-20 09:29:15 +00:00
2022-06-17 07:41:27 +00:00
/* Variables: Image Format Detection */
VkImageFormatProperties imageFormatProperties;
SDL_memset(renderer, '\0', sizeof(VulkanRenderer));
renderer->debugMode = debugMode;
if (!VULKAN_INTERNAL_PrepareVulkan(renderer))
{
Refresh_LogError("Failed to initialize Vulkan!");
return NULL;
}
Refresh_LogInfo("Refresh Driver: Vulkan");
Refresh_LogInfo(
"Vulkan Device: %s",
renderer->physicalDeviceProperties.properties.deviceName
);
Refresh_LogInfo(
"Vulkan Driver: %s %s",
renderer->physicalDeviceDriverProperties.driverName,
renderer->physicalDeviceDriverProperties.driverInfo
);
Refresh_LogInfo(
"Vulkan Conformance: %u.%u.%u",
renderer->physicalDeviceDriverProperties.conformanceVersion.major,
renderer->physicalDeviceDriverProperties.conformanceVersion.minor,
renderer->physicalDeviceDriverProperties.conformanceVersion.patch
);
if (!VULKAN_INTERNAL_CreateLogicalDevice(
renderer
)) {
Refresh_LogError("Failed to create logical device");
return NULL;
}
/* FIXME: just move this into this function */
result = (Refresh_Device*) SDL_malloc(sizeof(Refresh_Device));
2022-02-25 21:42:11 +00:00
ASSIGN_DRIVER(VULKAN)
2020-12-17 01:23:49 +00:00
2022-02-25 21:42:11 +00:00
result->driverData = (Refresh_Renderer*) renderer;
2020-12-17 01:23:49 +00:00
2020-12-17 03:50:31 +00:00
/*
* Create initial swapchain array
2020-12-17 03:50:31 +00:00
*/
renderer->claimedWindowCapacity = 1;
renderer->claimedWindowCount = 0;
renderer->claimedWindows = SDL_malloc(
renderer->claimedWindowCapacity * sizeof(WindowData*)
);
2020-12-17 03:50:31 +00:00
2020-12-22 01:59:08 +00:00
/* Threading */
renderer->allocatorLock = SDL_CreateMutex();
renderer->disposeLock = SDL_CreateMutex();
renderer->submitLock = SDL_CreateMutex();
renderer->acquireCommandBufferLock = SDL_CreateMutex();
renderer->renderPassFetchLock = SDL_CreateMutex();
renderer->framebufferFetchLock = SDL_CreateMutex();
2020-12-17 04:04:47 +00:00
/*
2021-01-02 06:07:15 +00:00
* Create submitted command buffer list
2020-12-17 04:04:47 +00:00
*/
2021-01-02 06:07:15 +00:00
renderer->submittedCommandBufferCapacity = 16;
2020-12-17 04:04:47 +00:00
renderer->submittedCommandBufferCount = 0;
2021-01-02 06:07:15 +00:00
renderer->submittedCommandBuffers = SDL_malloc(sizeof(VulkanCommandBuffer*) * renderer->submittedCommandBufferCapacity);
2020-12-17 04:04:47 +00:00
2020-12-22 01:59:08 +00:00
/* Memory Allocator */
renderer->memoryAllocator = (VulkanMemoryAllocator*) SDL_malloc(
2020-12-22 01:59:08 +00:00
sizeof(VulkanMemoryAllocator)
);
for (i = 0; i < VK_MAX_MEMORY_TYPES; i += 1)
{
renderer->memoryAllocator->subAllocators[i].memoryTypeIndex = i;
2020-12-22 01:59:08 +00:00
renderer->memoryAllocator->subAllocators[i].allocations = NULL;
renderer->memoryAllocator->subAllocators[i].allocationCount = 0;
renderer->memoryAllocator->subAllocators[i].sortedFreeRegions = SDL_malloc(
sizeof(VulkanMemoryFreeRegion*) * 4
);
renderer->memoryAllocator->subAllocators[i].sortedFreeRegionCount = 0;
renderer->memoryAllocator->subAllocators[i].sortedFreeRegionCapacity = 4;
}
/* Set up UBO layouts */
2020-12-31 04:39:47 +00:00
renderer->minUBOAlignment = (uint32_t) renderer->physicalDeviceProperties.properties.limits.minUniformBufferOffsetAlignment;
2020-12-17 19:40:49 +00:00
emptyVertexSamplerLayoutBinding.binding = 0;
emptyVertexSamplerLayoutBinding.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
emptyVertexSamplerLayoutBinding.descriptorCount = 0;
emptyVertexSamplerLayoutBinding.stageFlags = VK_SHADER_STAGE_VERTEX_BIT;
emptyVertexSamplerLayoutBinding.pImmutableSamplers = NULL;
setLayoutCreateInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
setLayoutCreateInfo.pNext = NULL;
setLayoutCreateInfo.flags = 0;
setLayoutCreateInfo.bindingCount = 1;
setLayoutCreateInfo.pBindings = &emptyVertexSamplerLayoutBinding;
vulkanResult = renderer->vkCreateDescriptorSetLayout(
renderer->logicalDevice,
&setLayoutCreateInfo,
NULL,
&renderer->emptyVertexSamplerLayout
);
emptyFragmentSamplerLayoutBinding.binding = 0;
emptyFragmentSamplerLayoutBinding.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
emptyFragmentSamplerLayoutBinding.descriptorCount = 0;
emptyFragmentSamplerLayoutBinding.stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
emptyFragmentSamplerLayoutBinding.pImmutableSamplers = NULL;
setLayoutCreateInfo.pBindings = &emptyFragmentSamplerLayoutBinding;
vulkanResult = renderer->vkCreateDescriptorSetLayout(
renderer->logicalDevice,
&setLayoutCreateInfo,
NULL,
&renderer->emptyFragmentSamplerLayout
);
2020-12-29 22:52:24 +00:00
emptyComputeBufferDescriptorSetLayoutBinding.binding = 0;
emptyComputeBufferDescriptorSetLayoutBinding.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
emptyComputeBufferDescriptorSetLayoutBinding.descriptorCount = 0;
emptyComputeBufferDescriptorSetLayoutBinding.stageFlags = VK_SHADER_STAGE_COMPUTE_BIT;
emptyComputeBufferDescriptorSetLayoutBinding.pImmutableSamplers = NULL;
setLayoutCreateInfo.pBindings = &emptyComputeBufferDescriptorSetLayoutBinding;
vulkanResult = renderer->vkCreateDescriptorSetLayout(
renderer->logicalDevice,
&setLayoutCreateInfo,
NULL,
&renderer->emptyComputeBufferDescriptorSetLayout
);
2020-12-31 04:39:47 +00:00
emptyComputeImageDescriptorSetLayoutBinding.binding = 0;
emptyComputeImageDescriptorSetLayoutBinding.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
2020-12-31 04:39:47 +00:00
emptyComputeImageDescriptorSetLayoutBinding.descriptorCount = 0;
emptyComputeImageDescriptorSetLayoutBinding.stageFlags = VK_SHADER_STAGE_COMPUTE_BIT;
emptyComputeImageDescriptorSetLayoutBinding.pImmutableSamplers = NULL;
setLayoutCreateInfo.pBindings = &emptyComputeImageDescriptorSetLayoutBinding;
vulkanResult = renderer->vkCreateDescriptorSetLayout(
renderer->logicalDevice,
&setLayoutCreateInfo,
NULL,
&renderer->emptyComputeImageDescriptorSetLayout
);
2020-12-17 19:40:49 +00:00
vertexParamLayoutBinding.binding = 0;
vertexParamLayoutBinding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
vertexParamLayoutBinding.descriptorCount = 1;
vertexParamLayoutBinding.stageFlags = VK_SHADER_STAGE_VERTEX_BIT;
vertexParamLayoutBinding.pImmutableSamplers = NULL;
setLayoutCreateInfo.bindingCount = 1;
setLayoutCreateInfo.pBindings = &vertexParamLayoutBinding;
vulkanResult = renderer->vkCreateDescriptorSetLayout(
renderer->logicalDevice,
&setLayoutCreateInfo,
NULL,
&renderer->vertexUniformDescriptorSetLayout
2020-12-17 19:40:49 +00:00
);
if (vulkanResult != VK_SUCCESS)
{
2021-01-05 23:00:51 +00:00
Refresh_LogError("Failed to create vertex UBO layout!");
2020-12-17 19:40:49 +00:00
return NULL;
}
fragmentParamLayoutBinding.binding = 0;
fragmentParamLayoutBinding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
fragmentParamLayoutBinding.descriptorCount = 1;
fragmentParamLayoutBinding.stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
fragmentParamLayoutBinding.pImmutableSamplers = NULL;
setLayoutCreateInfo.bindingCount = 1;
setLayoutCreateInfo.pBindings = &fragmentParamLayoutBinding;
vulkanResult = renderer->vkCreateDescriptorSetLayout(
renderer->logicalDevice,
&setLayoutCreateInfo,
NULL,
&renderer->fragmentUniformDescriptorSetLayout
2020-12-17 19:40:49 +00:00
);
if (vulkanResult != VK_SUCCESS)
{
2021-01-05 23:00:51 +00:00
Refresh_LogError("Failed to create fragment UBO layout!");
2020-12-17 19:40:49 +00:00
return NULL;
}
2020-12-31 04:39:47 +00:00
computeParamLayoutBinding.binding = 0;
computeParamLayoutBinding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
computeParamLayoutBinding.descriptorCount = 1;
computeParamLayoutBinding.stageFlags = VK_SHADER_STAGE_COMPUTE_BIT;
computeParamLayoutBinding.pImmutableSamplers = NULL;
setLayoutCreateInfo.bindingCount = 1;
setLayoutCreateInfo.pBindings = &computeParamLayoutBinding;
vulkanResult = renderer->vkCreateDescriptorSetLayout(
renderer->logicalDevice,
&setLayoutCreateInfo,
NULL,
&renderer->computeUniformDescriptorSetLayout
2020-12-31 04:39:47 +00:00
);
/* Default Descriptors */
2020-12-20 09:29:15 +00:00
poolSizes[0].descriptorCount = 2;
poolSizes[0].type = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
poolSizes[1].descriptorCount = 1;
poolSizes[1].type = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
2020-12-20 09:29:15 +00:00
2020-12-29 22:52:24 +00:00
poolSizes[2].descriptorCount = 1;
poolSizes[2].type = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
2020-12-29 22:52:24 +00:00
poolSizes[3].descriptorCount = 3;
poolSizes[3].type = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
2020-12-31 04:39:47 +00:00
defaultDescriptorPoolInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
defaultDescriptorPoolInfo.pNext = NULL;
defaultDescriptorPoolInfo.flags = 0;
defaultDescriptorPoolInfo.maxSets = 2 + 1 + 1 + 3;
2020-12-31 04:39:47 +00:00
defaultDescriptorPoolInfo.poolSizeCount = 4;
defaultDescriptorPoolInfo.pPoolSizes = poolSizes;
2020-12-20 09:29:15 +00:00
renderer->vkCreateDescriptorPool(
renderer->logicalDevice,
&defaultDescriptorPoolInfo,
2020-12-20 09:29:15 +00:00
NULL,
&renderer->defaultDescriptorPool
);
2020-12-28 22:07:13 +00:00
descriptorAllocateInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
descriptorAllocateInfo.pNext = NULL;
descriptorAllocateInfo.descriptorPool = renderer->defaultDescriptorPool;
descriptorAllocateInfo.descriptorSetCount = 1;
descriptorAllocateInfo.pSetLayouts = &renderer->emptyVertexSamplerLayout;
renderer->vkAllocateDescriptorSets(
renderer->logicalDevice,
2020-12-28 22:07:13 +00:00
&descriptorAllocateInfo,
&renderer->emptyVertexSamplerDescriptorSet
);
2020-12-28 22:07:13 +00:00
descriptorAllocateInfo.pSetLayouts = &renderer->emptyFragmentSamplerLayout;
renderer->vkAllocateDescriptorSets(
renderer->logicalDevice,
2020-12-28 22:07:13 +00:00
&descriptorAllocateInfo,
&renderer->emptyFragmentSamplerDescriptorSet
2020-12-20 09:29:15 +00:00
);
2020-12-29 22:52:24 +00:00
descriptorAllocateInfo.pSetLayouts = &renderer->emptyComputeBufferDescriptorSetLayout;
renderer->vkAllocateDescriptorSets(
renderer->logicalDevice,
&descriptorAllocateInfo,
&renderer->emptyComputeBufferDescriptorSet
);
2020-12-31 04:39:47 +00:00
descriptorAllocateInfo.pSetLayouts = &renderer->emptyComputeImageDescriptorSetLayout;
renderer->vkAllocateDescriptorSets(
renderer->logicalDevice,
&descriptorAllocateInfo,
&renderer->emptyComputeImageDescriptorSet
);
/* Initialize uniform buffer objects */
renderer->vertexUniformBufferObject = VULKAN_INTERNAL_CreateUniformBufferObject(
renderer,
UNIFORM_BUFFER_VERTEX
);
renderer->fragmentUniformBufferObject = VULKAN_INTERNAL_CreateUniformBufferObject(
renderer,
UNIFORM_BUFFER_FRAGMENT
);
renderer->computeUniformBufferObject = VULKAN_INTERNAL_CreateUniformBufferObject(
renderer,
UNIFORM_BUFFER_COMPUTE
);
/* Initialize caches */
2021-01-03 02:02:20 +00:00
for (i = 0; i < NUM_COMMAND_POOL_BUCKETS; i += 1)
{
renderer->commandPoolHashTable.buckets[i].elements = NULL;
renderer->commandPoolHashTable.buckets[i].count = 0;
renderer->commandPoolHashTable.buckets[i].capacity = 0;
}
for (i = 0; i < NUM_PIPELINE_LAYOUT_BUCKETS; i += 1)
{
2020-12-29 22:52:24 +00:00
renderer->graphicsPipelineLayoutHashTable.buckets[i].elements = NULL;
renderer->graphicsPipelineLayoutHashTable.buckets[i].count = 0;
renderer->graphicsPipelineLayoutHashTable.buckets[i].capacity = 0;
}
for (i = 0; i < NUM_PIPELINE_LAYOUT_BUCKETS; i += 1)
{
renderer->computePipelineLayoutHashTable.buckets[i].elements = NULL;
renderer->computePipelineLayoutHashTable.buckets[i].count = 0;
renderer->computePipelineLayoutHashTable.buckets[i].capacity = 0;
}
for (i = 0; i < NUM_DESCRIPTOR_SET_LAYOUT_BUCKETS; i += 1)
{
2020-12-29 23:05:26 +00:00
renderer->descriptorSetLayoutHashTable.buckets[i].elements = NULL;
renderer->descriptorSetLayoutHashTable.buckets[i].count = 0;
renderer->descriptorSetLayoutHashTable.buckets[i].capacity = 0;
2020-12-29 22:52:24 +00:00
}
renderer->renderPassHashArray.elements = NULL;
renderer->renderPassHashArray.count = 0;
renderer->renderPassHashArray.capacity = 0;
renderer->framebufferHashArray.elements = NULL;
renderer->framebufferHashArray.count = 0;
renderer->framebufferHashArray.capacity = 0;
/* Initialize fence pool */
renderer->fencePool.lock = SDL_CreateMutex();
renderer->fencePool.availableFenceCapacity = 4;
renderer->fencePool.availableFenceCount = 0;
renderer->fencePool.availableFences = SDL_malloc(
renderer->fencePool.availableFenceCapacity * sizeof(VkFence)
);
2022-06-17 07:41:27 +00:00
/* Some drivers don't support D16, so we have to fall back to D32. */
vulkanResult = renderer->vkGetPhysicalDeviceImageFormatProperties(
renderer->physicalDevice,
VK_FORMAT_D16_UNORM,
VK_IMAGE_TYPE_2D,
VK_IMAGE_TILING_OPTIMAL,
VK_IMAGE_ASPECT_DEPTH_BIT,
0,
&imageFormatProperties
);
if (vulkanResult == VK_ERROR_FORMAT_NOT_SUPPORTED)
{
renderer->D16Format = VK_FORMAT_D32_SFLOAT;
}
else
{
renderer->D16Format = VK_FORMAT_D16_UNORM;
}
vulkanResult = renderer->vkGetPhysicalDeviceImageFormatProperties(
renderer->physicalDevice,
VK_FORMAT_D16_UNORM_S8_UINT,
VK_IMAGE_TYPE_2D,
VK_IMAGE_TILING_OPTIMAL,
VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT,
0,
&imageFormatProperties
);
if (vulkanResult == VK_ERROR_FORMAT_NOT_SUPPORTED)
{
renderer->D16S8Format = VK_FORMAT_D32_SFLOAT_S8_UINT;
}
else
{
renderer->D16S8Format = VK_FORMAT_D16_UNORM_S8_UINT;
}
/* Deferred destroy storage */
renderer->texturesToDestroyCapacity = 16;
renderer->texturesToDestroyCount = 0;
renderer->texturesToDestroy = (VulkanTexture**)SDL_malloc(
sizeof(VulkanTexture*) *
renderer->texturesToDestroyCapacity
);
renderer->buffersToDestroyCapacity = 16;
renderer->buffersToDestroyCount = 0;
renderer->buffersToDestroy = SDL_malloc(
sizeof(VulkanBuffer*) *
renderer->buffersToDestroyCapacity
);
renderer->samplersToDestroyCapacity = 16;
renderer->samplersToDestroyCount = 0;
renderer->samplersToDestroy = SDL_malloc(
sizeof(VulkanSampler*) *
renderer->samplersToDestroyCapacity
);
renderer->graphicsPipelinesToDestroyCapacity = 16;
renderer->graphicsPipelinesToDestroyCount = 0;
renderer->graphicsPipelinesToDestroy = SDL_malloc(
sizeof(VulkanGraphicsPipeline*) *
renderer->graphicsPipelinesToDestroyCapacity
);
renderer->computePipelinesToDestroyCapacity = 16;
renderer->computePipelinesToDestroyCount = 0;
renderer->computePipelinesToDestroy = SDL_malloc(
sizeof(VulkanComputePipeline*) *
renderer->computePipelinesToDestroyCapacity
);
renderer->shaderModulesToDestroyCapacity = 16;
renderer->shaderModulesToDestroyCount = 0;
renderer->shaderModulesToDestroy = SDL_malloc(
sizeof(VulkanShaderModule*) *
renderer->shaderModulesToDestroyCapacity
);
renderer->framebuffersToDestroyCapacity = 16;
renderer->framebuffersToDestroyCount = 0;
renderer->framebuffersToDestroy = SDL_malloc(
sizeof(VulkanFramebuffer*) *
renderer->framebuffersToDestroyCapacity
);
2024-02-27 08:16:06 +00:00
/* Defrag state */
renderer->defragInProgress = 0;
2024-02-27 08:16:06 +00:00
renderer->allocationsToDefragCount = 0;
renderer->allocationsToDefragCapacity = 4;
renderer->allocationsToDefrag = SDL_malloc(
renderer->allocationsToDefragCapacity * sizeof(VulkanMemoryAllocation*)
);
2022-02-25 21:42:11 +00:00
return result;
2020-12-17 01:23:49 +00:00
}
2021-01-05 23:00:51 +00:00
Refresh_Driver VulkanDriver = {
2022-02-25 21:42:11 +00:00
"Vulkan",
VULKAN_PrepareDriver,
2022-02-25 21:42:11 +00:00
VULKAN_CreateDevice
2020-12-17 00:27:14 +00:00
};
#endif //REFRESH_DRIVER_VULKAN