Refresh/src/Refresh_Driver_Vulkan.c

11243 lines
304 KiB
C
Raw Normal View History

/* Refresh - XNA-inspired 3D Graphics Library with modern capabilities
2020-12-17 00:27:14 +00:00
*
* Copyright (c) 2020 Evan Hemsley
*
* This software is provided 'as-is', without any express or implied warranty.
* In no event will the authors be held liable for any damages arising from
* the use of this software.
*
* Permission is granted to anyone to use this software for any purpose,
* including commercial applications, and to alter it and redistribute it
* freely, subject to the following restrictions:
*
* 1. The origin of this software must not be misrepresented; you must not
* claim that you wrote the original software. If you use this software in a
* product, an acknowledgment in the product documentation would be
* appreciated but is not required.
*
* 2. Altered source versions must be plainly marked as such, and must not be
* misrepresented as being the original software.
*
* 3. This notice may not be removed or altered from any source distribution.
*
* Evan "cosmonaut" Hemsley <evan@moonside.games>
*
*/
#if REFRESH_DRIVER_VULKAN
#define VK_NO_PROTOTYPES
#include "vulkan/vulkan.h"
#include "Refresh_Driver.h"
#include <SDL.h>
#include <SDL_syswm.h>
#include <SDL_vulkan.h>
#define VULKAN_INTERNAL_clamp(val, min, max) SDL_max(min, SDL_min(val, max))
2020-12-17 02:38:22 +00:00
/* Global Vulkan Loader Entry Points */
static PFN_vkGetInstanceProcAddr vkGetInstanceProcAddr = NULL;
#define VULKAN_GLOBAL_FUNCTION(name) \
static PFN_##name name = NULL;
#include "Refresh_Driver_Vulkan_vkfuncs.h"
/* vkInstance/vkDevice function typedefs */
#define VULKAN_INSTANCE_FUNCTION(ext, ret, func, params) \
typedef ret (VKAPI_CALL *vkfntype_##func) params;
#define VULKAN_DEVICE_FUNCTION(ext, ret, func, params) \
typedef ret (VKAPI_CALL *vkfntype_##func) params;
#include "Refresh_Driver_Vulkan_vkfuncs.h"
/* Required extensions */
static const char* deviceExtensionNames[] =
{
/* Globally supported */
VK_KHR_SWAPCHAIN_EXTENSION_NAME,
/* Core since 1.1 */
VK_KHR_MAINTENANCE1_EXTENSION_NAME,
VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME,
VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME,
/* Core since 1.2 */
VK_KHR_DRIVER_PROPERTIES_EXTENSION_NAME,
/* EXT, probably not going to be Core */
VK_EXT_VERTEX_ATTRIBUTE_DIVISOR_EXTENSION_NAME,
};
static uint32_t deviceExtensionCount = SDL_arraysize(deviceExtensionNames);
2020-12-17 08:19:02 +00:00
/* Defines */
2022-02-25 21:51:29 +00:00
#define STARTING_ALLOCATION_SIZE 64000000 /* 64MB */
#define MAX_ALLOCATION_SIZE 256000000 /* 256MB */
#define TRANSFER_BUFFER_STARTING_SIZE 8000000 /* 8MB */
2022-02-25 21:51:29 +00:00
#define UBO_BUFFER_SIZE 16000 /* 16KB */
2020-12-30 00:53:10 +00:00
#define DESCRIPTOR_POOL_STARTING_SIZE 128
2020-12-27 23:34:15 +00:00
#define DESCRIPTOR_SET_DEACTIVATE_FRAMES 10
#define WINDOW_DATA "Refresh_VulkanWindowData"
2020-12-18 22:35:33 +00:00
2022-02-25 21:51:29 +00:00
#define IDENTITY_SWIZZLE \
{ \
VK_COMPONENT_SWIZZLE_IDENTITY, \
VK_COMPONENT_SWIZZLE_IDENTITY, \
VK_COMPONENT_SWIZZLE_IDENTITY, \
VK_COMPONENT_SWIZZLE_IDENTITY \
2020-12-18 22:35:33 +00:00
}
#define NULL_DESC_LAYOUT (VkDescriptorSetLayout) 0
#define NULL_PIPELINE_LAYOUT (VkPipelineLayout) 0
2021-01-05 23:00:51 +00:00
#define NULL_RENDER_PASS (Refresh_RenderPass*) 0
2020-12-17 08:19:02 +00:00
2020-12-29 00:28:14 +00:00
#define EXPAND_ELEMENTS_IF_NEEDED(arr, initialValue, type) \
2022-02-25 21:51:29 +00:00
if (arr->count == arr->capacity) \
{ \
if (arr->capacity == 0) \
{ \
arr->capacity = initialValue; \
} \
else \
{ \
arr->capacity *= 2; \
} \
arr->elements = (type*) SDL_realloc( \
arr->elements, \
arr->capacity * sizeof(type) \
); \
}
2020-12-29 00:42:51 +00:00
#define EXPAND_ARRAY_IF_NEEDED(arr, elementType, newCount, capacity, newCapacity) \
2022-02-25 21:51:29 +00:00
if (newCount >= capacity) \
{ \
capacity = newCapacity; \
arr = (elementType*) SDL_realloc( \
arr, \
sizeof(elementType) * capacity \
); \
2020-12-29 00:28:14 +00:00
}
2020-12-29 00:56:49 +00:00
#define MOVE_ARRAY_CONTENTS_AND_RESET(i, dstArr, dstCount, srcArr, srcCount) \
2022-02-25 21:51:29 +00:00
for (i = 0; i < srcCount; i += 1) \
{ \
dstArr[i] = srcArr[i]; \
} \
dstCount = srcCount; \
2020-12-29 00:56:49 +00:00
srcCount = 0;
2020-12-17 03:28:02 +00:00
/* Enums */
typedef enum VulkanResourceAccessType
{
/* Reads */
RESOURCE_ACCESS_NONE, /* For initialization */
RESOURCE_ACCESS_INDEX_BUFFER,
RESOURCE_ACCESS_VERTEX_BUFFER,
RESOURCE_ACCESS_INDIRECT_BUFFER,
2020-12-17 03:28:02 +00:00
RESOURCE_ACCESS_VERTEX_SHADER_READ_UNIFORM_BUFFER,
RESOURCE_ACCESS_VERTEX_SHADER_READ_SAMPLED_IMAGE,
RESOURCE_ACCESS_FRAGMENT_SHADER_READ_UNIFORM_BUFFER,
RESOURCE_ACCESS_FRAGMENT_SHADER_READ_SAMPLED_IMAGE,
RESOURCE_ACCESS_FRAGMENT_SHADER_READ_COLOR_ATTACHMENT,
RESOURCE_ACCESS_FRAGMENT_SHADER_READ_DEPTH_STENCIL_ATTACHMENT,
2020-12-31 04:39:47 +00:00
RESOURCE_ACCESS_COMPUTE_SHADER_READ_UNIFORM_BUFFER,
RESOURCE_ACCESS_COMPUTE_SHADER_READ_SAMPLED_IMAGE_OR_UNIFORM_TEXEL_BUFFER,
2020-12-31 06:28:37 +00:00
RESOURCE_ACCESS_COMPUTE_SHADER_READ_OTHER,
RESOURCE_ACCESS_ANY_SHADER_READ_SAMPLED_IMAGE,
2020-12-17 03:28:02 +00:00
RESOURCE_ACCESS_COLOR_ATTACHMENT_READ,
RESOURCE_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ,
RESOURCE_ACCESS_TRANSFER_READ,
RESOURCE_ACCESS_HOST_READ,
RESOURCE_ACCESS_PRESENT,
RESOURCE_ACCESS_END_OF_READ,
/* Writes */
RESOURCE_ACCESS_VERTEX_SHADER_WRITE,
RESOURCE_ACCESS_FRAGMENT_SHADER_WRITE,
RESOURCE_ACCESS_COLOR_ATTACHMENT_WRITE,
RESOURCE_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE,
RESOURCE_ACCESS_TRANSFER_WRITE,
RESOURCE_ACCESS_HOST_WRITE,
/* Read-Writes */
RESOURCE_ACCESS_COLOR_ATTACHMENT_READ_WRITE,
RESOURCE_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_WRITE,
RESOURCE_ACCESS_COMPUTE_SHADER_STORAGE_IMAGE_READ_WRITE,
RESOURCE_ACCESS_COMPUTE_SHADER_BUFFER_READ_WRITE,
RESOURCE_ACCESS_TRANSFER_READ_WRITE,
2020-12-17 03:28:02 +00:00
RESOURCE_ACCESS_GENERAL,
/* Count */
RESOURCE_ACCESS_TYPES_COUNT
} VulkanResourceAccessType;
2020-12-17 08:19:02 +00:00
/* Conversions */
static const uint8_t DEVICE_PRIORITY[] =
{
0, /* VK_PHYSICAL_DEVICE_TYPE_OTHER */
3, /* VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU */
4, /* VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU */
2, /* VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU */
1 /* VK_PHYSICAL_DEVICE_TYPE_CPU */
};
2020-12-17 08:19:02 +00:00
static VkFormat RefreshToVK_SurfaceFormat[] =
{
VK_FORMAT_R8G8B8A8_UNORM, /* R8G8B8A8_UNORM */
VK_FORMAT_B8G8R8A8_UNORM, /* B8G8R8A8_UNORM */
VK_FORMAT_R5G6B5_UNORM_PACK16, /* R5G6B5_UNORM */
VK_FORMAT_A1R5G5B5_UNORM_PACK16, /* A1R5G5B5_UNORM */
VK_FORMAT_B4G4R4A4_UNORM_PACK16, /* B4G4R4A4_UNORM */
VK_FORMAT_A2R10G10B10_UNORM_PACK32, /* A2R10G10B10_UNORM */
VK_FORMAT_R16G16_UNORM, /* R16G16_UNORM */
VK_FORMAT_R16G16B16A16_UNORM, /* R16G16B16A16_UNORM */
VK_FORMAT_R8_UNORM, /* R8_UNORM */
VK_FORMAT_BC1_RGBA_UNORM_BLOCK, /* BC1_UNORM */
VK_FORMAT_BC2_UNORM_BLOCK, /* BC2_UNORM */
VK_FORMAT_BC3_UNORM_BLOCK, /* BC3_UNORM */
VK_FORMAT_BC7_UNORM_BLOCK, /* BC7_UNORM */
VK_FORMAT_R8G8_SNORM, /* R8G8_SNORM */
VK_FORMAT_R8G8B8A8_SNORM, /* R8G8B8A8_SNORM */
VK_FORMAT_R16_SFLOAT, /* R16_SFLOAT */
VK_FORMAT_R16G16_SFLOAT, /* R16G16_SFLOAT */
2021-01-29 05:37:11 +00:00
VK_FORMAT_R16G16B16A16_SFLOAT, /* R16G16B16A16_SFLOAT */
VK_FORMAT_R32_SFLOAT, /* R32_SFLOAT */
VK_FORMAT_R32G32_SFLOAT, /* R32G32_SFLOAT */
VK_FORMAT_R32G32B32A32_SFLOAT, /* R32G32B32A32_SFLOAT */
VK_FORMAT_R8_UINT, /* R8_UINT */
VK_FORMAT_R8G8_UINT, /* R8G8_UINT */
VK_FORMAT_R8G8B8A8_UINT, /* R8G8B8A8_UINT */
VK_FORMAT_R16_UINT, /* R16_UINT */
VK_FORMAT_R16G16_UINT, /* R16G16_UINT */
VK_FORMAT_R16G16B16A16_UINT, /* R16G16B16A16_UINT */
VK_FORMAT_D16_UNORM, /* D16_UNORM */
VK_FORMAT_D32_SFLOAT, /* D32_SFLOAT */
VK_FORMAT_D16_UNORM_S8_UINT, /* D16_UNORM_S8_UINT */
VK_FORMAT_D32_SFLOAT_S8_UINT /* D32_SFLOAT_S8_UINT */
2020-12-17 08:19:02 +00:00
};
2020-12-17 19:40:49 +00:00
static VkFormat RefreshToVK_VertexFormat[] =
{
VK_FORMAT_R32_UINT, /* UINT */
VK_FORMAT_R32_SFLOAT, /* FLOAT */
VK_FORMAT_R32G32_SFLOAT, /* VECTOR2 */
VK_FORMAT_R32G32B32_SFLOAT, /* VECTOR3 */
2020-12-17 19:40:49 +00:00
VK_FORMAT_R32G32B32A32_SFLOAT, /* VECTOR4 */
VK_FORMAT_R8G8B8A8_UNORM, /* COLOR */
VK_FORMAT_R8G8B8A8_USCALED, /* BYTE4 */
VK_FORMAT_R16G16_SSCALED, /* SHORT2 */
2020-12-17 19:40:49 +00:00
VK_FORMAT_R16G16B16A16_SSCALED, /* SHORT4 */
VK_FORMAT_R16G16_SNORM, /* NORMALIZEDSHORT2 */
2020-12-17 19:40:49 +00:00
VK_FORMAT_R16G16B16A16_SNORM, /* NORMALIZEDSHORT4 */
VK_FORMAT_R16G16_SFLOAT, /* HALFVECTOR2 */
2020-12-17 19:40:49 +00:00
VK_FORMAT_R16G16B16A16_SFLOAT /* HALFVECTOR4 */
};
2020-12-20 07:31:55 +00:00
static VkIndexType RefreshToVK_IndexType[] =
{
VK_INDEX_TYPE_UINT16,
VK_INDEX_TYPE_UINT32
};
2020-12-17 19:40:49 +00:00
static VkPrimitiveTopology RefreshToVK_PrimitiveType[] =
{
VK_PRIMITIVE_TOPOLOGY_POINT_LIST,
VK_PRIMITIVE_TOPOLOGY_LINE_LIST,
VK_PRIMITIVE_TOPOLOGY_LINE_STRIP,
VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST,
VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP
};
static VkPolygonMode RefreshToVK_PolygonMode[] =
{
VK_POLYGON_MODE_FILL,
VK_POLYGON_MODE_LINE,
VK_POLYGON_MODE_POINT
};
static VkCullModeFlags RefreshToVK_CullMode[] =
{
VK_CULL_MODE_NONE,
VK_CULL_MODE_FRONT_BIT,
VK_CULL_MODE_BACK_BIT,
VK_CULL_MODE_FRONT_AND_BACK
};
static VkFrontFace RefreshToVK_FrontFace[] =
{
VK_FRONT_FACE_COUNTER_CLOCKWISE,
VK_FRONT_FACE_CLOCKWISE
};
static VkBlendFactor RefreshToVK_BlendFactor[] =
{
VK_BLEND_FACTOR_ZERO,
VK_BLEND_FACTOR_ONE,
VK_BLEND_FACTOR_SRC_COLOR,
VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR,
VK_BLEND_FACTOR_DST_COLOR,
VK_BLEND_FACTOR_ONE_MINUS_DST_COLOR,
VK_BLEND_FACTOR_SRC_ALPHA,
VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA,
VK_BLEND_FACTOR_DST_ALPHA,
VK_BLEND_FACTOR_ONE_MINUS_DST_ALPHA,
VK_BLEND_FACTOR_CONSTANT_COLOR,
VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_COLOR,
VK_BLEND_FACTOR_CONSTANT_ALPHA,
VK_BLEND_FACTOR_ONE_MINUS_CONSTANT_ALPHA,
VK_BLEND_FACTOR_SRC_ALPHA_SATURATE
2020-12-17 19:40:49 +00:00
};
static VkBlendOp RefreshToVK_BlendOp[] =
{
VK_BLEND_OP_ADD,
VK_BLEND_OP_SUBTRACT,
VK_BLEND_OP_REVERSE_SUBTRACT,
VK_BLEND_OP_MIN,
VK_BLEND_OP_MAX
};
static VkCompareOp RefreshToVK_CompareOp[] =
{
VK_COMPARE_OP_NEVER,
VK_COMPARE_OP_LESS,
VK_COMPARE_OP_EQUAL,
VK_COMPARE_OP_LESS_OR_EQUAL,
VK_COMPARE_OP_GREATER,
VK_COMPARE_OP_NOT_EQUAL,
VK_COMPARE_OP_GREATER_OR_EQUAL,
VK_COMPARE_OP_ALWAYS
};
static VkStencilOp RefreshToVK_StencilOp[] =
{
VK_STENCIL_OP_KEEP,
VK_STENCIL_OP_ZERO,
VK_STENCIL_OP_REPLACE,
VK_STENCIL_OP_INCREMENT_AND_CLAMP,
VK_STENCIL_OP_DECREMENT_AND_CLAMP,
VK_STENCIL_OP_INVERT,
VK_STENCIL_OP_INCREMENT_AND_WRAP,
VK_STENCIL_OP_DECREMENT_AND_WRAP
};
2020-12-17 08:19:02 +00:00
static VkAttachmentLoadOp RefreshToVK_LoadOp[] =
{
2022-02-25 21:42:11 +00:00
VK_ATTACHMENT_LOAD_OP_LOAD,
VK_ATTACHMENT_LOAD_OP_CLEAR,
VK_ATTACHMENT_LOAD_OP_DONT_CARE
2020-12-17 08:19:02 +00:00
};
static VkAttachmentStoreOp RefreshToVK_StoreOp[] =
{
2022-02-25 21:42:11 +00:00
VK_ATTACHMENT_STORE_OP_STORE,
VK_ATTACHMENT_STORE_OP_DONT_CARE
2020-12-17 08:19:02 +00:00
};
static VkSampleCountFlagBits RefreshToVK_SampleCount[] =
{
2022-02-25 21:42:11 +00:00
VK_SAMPLE_COUNT_1_BIT,
VK_SAMPLE_COUNT_2_BIT,
VK_SAMPLE_COUNT_4_BIT,
VK_SAMPLE_COUNT_8_BIT,
VK_SAMPLE_COUNT_16_BIT,
VK_SAMPLE_COUNT_32_BIT,
VK_SAMPLE_COUNT_64_BIT
2020-12-17 08:19:02 +00:00
};
2020-12-17 19:40:49 +00:00
static VkVertexInputRate RefreshToVK_VertexInputRate[] =
{
VK_VERTEX_INPUT_RATE_VERTEX,
VK_VERTEX_INPUT_RATE_INSTANCE
};
2021-01-03 21:01:29 +00:00
static VkFilter RefreshToVK_Filter[] =
2020-12-18 01:48:26 +00:00
{
VK_FILTER_NEAREST,
2021-01-03 21:01:29 +00:00
VK_FILTER_LINEAR,
VK_FILTER_CUBIC_EXT
2020-12-18 01:48:26 +00:00
};
static VkSamplerMipmapMode RefreshToVK_SamplerMipmapMode[] =
{
VK_SAMPLER_MIPMAP_MODE_NEAREST,
VK_SAMPLER_MIPMAP_MODE_LINEAR
};
static VkSamplerAddressMode RefreshToVK_SamplerAddressMode[] =
{
VK_SAMPLER_ADDRESS_MODE_REPEAT,
VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT,
VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE,
VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER
};
2020-12-18 20:58:03 +00:00
static VkBorderColor RefreshToVK_BorderColor[] =
{
VK_BORDER_COLOR_FLOAT_TRANSPARENT_BLACK,
VK_BORDER_COLOR_INT_TRANSPARENT_BLACK,
VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK,
VK_BORDER_COLOR_INT_OPAQUE_BLACK,
VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE,
VK_BORDER_COLOR_INT_OPAQUE_WHITE
};
2020-12-17 03:28:02 +00:00
/* Structures */
2020-12-19 00:39:03 +00:00
/* Memory Allocation */
2020-12-18 22:35:33 +00:00
typedef struct VulkanMemoryAllocation VulkanMemoryAllocation;
typedef struct VulkanMemoryFreeRegion
{
VulkanMemoryAllocation *allocation;
VkDeviceSize offset;
VkDeviceSize size;
uint32_t allocationIndex;
uint32_t sortedIndex;
} VulkanMemoryFreeRegion;
typedef struct VulkanMemorySubAllocator
{
VkDeviceSize nextAllocationSize;
VulkanMemoryAllocation **allocations;
uint32_t allocationCount;
VulkanMemoryFreeRegion **sortedFreeRegions;
uint32_t sortedFreeRegionCount;
uint32_t sortedFreeRegionCapacity;
} VulkanMemorySubAllocator;
struct VulkanMemoryAllocation
{
VulkanMemorySubAllocator *allocator;
VkDeviceMemory memory;
VkDeviceSize size;
VulkanMemoryFreeRegion **freeRegions;
uint32_t freeRegionCount;
uint32_t freeRegionCapacity;
uint8_t dedicated;
2021-01-14 02:02:45 +00:00
uint8_t *mapPointer;
2021-01-03 21:12:12 +00:00
SDL_mutex *memoryLock;
2020-12-18 22:35:33 +00:00
};
typedef struct VulkanMemoryAllocator
{
VulkanMemorySubAllocator subAllocators[VK_MAX_MEMORY_TYPES];
} VulkanMemoryAllocator;
2020-12-19 00:39:03 +00:00
/* Memory Barriers */
typedef struct VulkanResourceAccessInfo
{
VkPipelineStageFlags stageMask;
VkAccessFlags accessMask;
VkImageLayout imageLayout;
} VulkanResourceAccessInfo;
static const VulkanResourceAccessInfo AccessMap[RESOURCE_ACCESS_TYPES_COUNT] =
{
/* RESOURCE_ACCESS_NONE */
{
0,
0,
VK_IMAGE_LAYOUT_UNDEFINED
},
/* RESOURCE_ACCESS_INDEX_BUFFER */
{
VK_PIPELINE_STAGE_VERTEX_INPUT_BIT,
VK_ACCESS_INDEX_READ_BIT,
VK_IMAGE_LAYOUT_UNDEFINED
},
/* RESOURCE_ACCESS_VERTEX_BUFFER */
{
VK_PIPELINE_STAGE_VERTEX_INPUT_BIT,
VK_ACCESS_VERTEX_ATTRIBUTE_READ_BIT,
VK_IMAGE_LAYOUT_UNDEFINED
},
/* RESOURCE_ACCESS_INDIRECT_BUFFER */
{
VK_PIPELINE_STAGE_DRAW_INDIRECT_BIT,
VK_ACCESS_INDIRECT_COMMAND_READ_BIT,
2020-12-19 00:39:03 +00:00
VK_IMAGE_LAYOUT_UNDEFINED
},
/* RESOURCE_ACCESS_VERTEX_SHADER_READ_UNIFORM_BUFFER */
{
VK_PIPELINE_STAGE_VERTEX_SHADER_BIT,
VK_ACCESS_SHADER_READ_BIT,
VK_IMAGE_LAYOUT_UNDEFINED
},
/* RESOURCE_ACCESS_VERTEX_SHADER_READ_SAMPLED_IMAGE */
{
VK_PIPELINE_STAGE_VERTEX_SHADER_BIT,
VK_ACCESS_SHADER_READ_BIT,
VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL
},
/* RESOURCE_ACCESS_FRAGMENT_SHADER_READ_UNIFORM_BUFFER */
{
VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
VK_ACCESS_UNIFORM_READ_BIT,
VK_IMAGE_LAYOUT_UNDEFINED
},
/* RESOURCE_ACCESS_FRAGMENT_SHADER_READ_SAMPLED_IMAGE */
{
VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
VK_ACCESS_SHADER_READ_BIT,
VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL
},
/* RESOURCE_ACCESS_FRAGMENT_SHADER_READ_COLOR_ATTACHMENT */
{
VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
VK_ACCESS_INPUT_ATTACHMENT_READ_BIT,
VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL
},
/* RESOURCE_ACCESS_FRAGMENT_SHADER_READ_DEPTH_STENCIL_ATTACHMENT */
{
VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
VK_ACCESS_INPUT_ATTACHMENT_READ_BIT,
VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL
},
2020-12-31 04:39:47 +00:00
/* RESOURCE_ACCESS_COMPUTE_SHADER_READ_UNIFORM_BUFFER */
{
VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT,
VK_ACCESS_UNIFORM_READ_BIT,
VK_IMAGE_LAYOUT_UNDEFINED
},
/* RESOURCE_ACCESS_COMPUTE_SHADER_READ_SAMPLED_IMAGE_OR_UNIFORM_TEXEL_BUFFER */
2022-02-25 21:42:11 +00:00
{ VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT,
VK_ACCESS_SHADER_READ_BIT,
VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL
},
2020-12-31 06:28:37 +00:00
/* RESOURCE_ACCESS_COMPUTE_SHADER_READ_OTHER */
{
VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT,
VK_ACCESS_SHADER_READ_BIT,
VK_IMAGE_LAYOUT_UNDEFINED
},
/* RESOURCE_ACCESS_ANY_SHADER_READ_SAMPLED_IMAGE */
{
VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
VK_ACCESS_SHADER_READ_BIT,
VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL
},
2020-12-19 00:39:03 +00:00
/* RESOURCE_ACCESS_COLOR_ATTACHMENT_READ */
{
VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
VK_ACCESS_COLOR_ATTACHMENT_READ_BIT,
VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL
},
/* RESOURCE_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ */
{
VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT,
VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT,
VK_IMAGE_LAYOUT_DEPTH_STENCIL_READ_ONLY_OPTIMAL
},
/* RESOURCE_ACCESS_TRANSFER_READ */
{
VK_PIPELINE_STAGE_TRANSFER_BIT,
VK_ACCESS_TRANSFER_READ_BIT,
VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL
},
/* RESOURCE_ACCESS_HOST_READ */
{
VK_PIPELINE_STAGE_HOST_BIT,
VK_ACCESS_HOST_READ_BIT,
VK_IMAGE_LAYOUT_GENERAL
},
/* RESOURCE_ACCESS_PRESENT */
{
0,
0,
VK_IMAGE_LAYOUT_PRESENT_SRC_KHR
},
/* RESOURCE_ACCESS_END_OF_READ */
{
0,
0,
VK_IMAGE_LAYOUT_UNDEFINED
},
/* RESOURCE_ACCESS_VERTEX_SHADER_WRITE */
{
VK_PIPELINE_STAGE_VERTEX_SHADER_BIT,
VK_ACCESS_SHADER_WRITE_BIT,
VK_IMAGE_LAYOUT_GENERAL
},
/* RESOURCE_ACCESS_FRAGMENT_SHADER_WRITE */
{
VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT,
VK_ACCESS_SHADER_WRITE_BIT,
VK_IMAGE_LAYOUT_GENERAL
},
/* RESOURCE_ACCESS_COLOR_ATTACHMENT_WRITE */
{
VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL
},
/* RESOURCE_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE */
{
VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT,
VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT,
VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL
},
/* RESOURCE_ACCESS_TRANSFER_WRITE */
{
VK_PIPELINE_STAGE_TRANSFER_BIT,
VK_ACCESS_TRANSFER_WRITE_BIT,
VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL
},
/* RESOURCE_ACCESS_HOST_WRITE */
{
VK_PIPELINE_STAGE_HOST_BIT,
VK_ACCESS_HOST_WRITE_BIT,
VK_IMAGE_LAYOUT_GENERAL
},
/* RESOURCE_ACCESS_COLOR_ATTACHMENT_READ_WRITE */
{
VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT,
VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL
},
/* RESOURCE_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_WRITE */
{
VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT,
VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT,
VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL
},
/* RESOURCE_ACCESS_COMPUTE_SHADER_STORAGE_IMAGE_READ_WRITE */
{
VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT,
VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT,
VK_IMAGE_LAYOUT_GENERAL
},
/* RESOURCE_ACCESS_COMPUTE_SHADER_BUFFER_READ_WRITE */
{
VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT,
VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT,
VK_IMAGE_LAYOUT_UNDEFINED
},
/* RESOURCE_ACCESS_TRANSFER_READ_WRITE */
2020-12-19 00:39:03 +00:00
{
VK_PIPELINE_STAGE_TRANSFER_BIT,
VK_ACCESS_TRANSFER_READ_BIT | VK_ACCESS_TRANSFER_WRITE_BIT,
VK_IMAGE_LAYOUT_UNDEFINED
},
/* RESOURCE_ACCESS_GENERAL */
{
VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
VK_ACCESS_MEMORY_READ_BIT | VK_ACCESS_MEMORY_WRITE_BIT,
VK_IMAGE_LAYOUT_GENERAL
}
};
/* Memory structures */
typedef struct VulkanBuffer /* cast from Refresh_Buffer */
2020-12-19 04:08:07 +00:00
{
VkBuffer buffer;
2020-12-19 04:08:07 +00:00
VkDeviceSize size;
VkDeviceSize offset; /* move this to UsedMemoryRegion system */
VkDeviceSize memorySize; /* move this to UsedMemoryRegion system */
VulkanMemoryAllocation *allocation; /* see above */
2020-12-19 04:08:07 +00:00
VulkanResourceAccessType resourceAccessType;
VkBufferUsageFlags usage;
SDL_atomic_t referenceCount; /* Tracks command buffer usage */
} VulkanBuffer;
typedef struct VulkanUniformBufferPool VulkanUniformBufferPool;
typedef struct VulkanUniformBuffer
{
VulkanUniformBufferPool *pool;
VulkanBuffer *vulkanBuffer;
VkDeviceSize offset;
VkDescriptorSet descriptorSet;
} VulkanUniformBuffer;
typedef enum VulkanUniformBufferType
{
UNIFORM_BUFFER_VERTEX,
UNIFORM_BUFFER_FRAGMENT,
UNIFORM_BUFFER_COMPUTE
} VulkanUniformBufferType;
/* Yes, the pool is made of multiple pools.
* For some reason it was considered a good idea to make VkDescriptorPool fixed-size.
*/
typedef struct VulkanUniformDescriptorPool
{
VkDescriptorPool* descriptorPools;
uint32_t descriptorPoolCount;
/* Decremented whenever a descriptor set is allocated and
* incremented whenever a descriptor pool is allocated.
* This lets us keep track of when we need a new pool.
*/
uint32_t availableDescriptorSetCount;
} VulkanUniformDescriptorPool;
struct VulkanUniformBufferPool
{
VulkanUniformBufferType type;
VulkanUniformDescriptorPool descriptorPool;
SDL_mutex *lock;
VulkanUniformBuffer **availableBuffers;
uint32_t availableBufferCount;
uint32_t availableBufferCapacity;
};
2020-12-19 01:03:26 +00:00
2020-12-19 00:39:03 +00:00
/* Renderer Structure */
2020-12-17 02:38:22 +00:00
typedef struct QueueFamilyIndices
{
uint32_t graphicsFamily;
uint32_t presentFamily;
2021-01-03 03:03:25 +00:00
uint32_t computeFamily;
2021-01-02 21:31:17 +00:00
uint32_t transferFamily;
2020-12-17 02:38:22 +00:00
} QueueFamilyIndices;
typedef struct VulkanSampler
{
VkSampler sampler;
SDL_atomic_t referenceCount;
} VulkanSampler;
typedef struct VulkanShaderModule
{
VkShaderModule shaderModule;
SDL_atomic_t referenceCount;
} VulkanShaderModule;
typedef struct VulkanTexture
{
VulkanMemoryAllocation *allocation;
VkDeviceSize offset;
VkDeviceSize memorySize;
VkImage image;
VkImageView view;
VkExtent2D dimensions;
uint8_t is3D;
uint8_t isCube;
uint32_t depth;
uint32_t layerCount;
uint32_t levelCount;
Refresh_SampleCount sampleCount;
VkFormat format;
VulkanResourceAccessType resourceAccessType;
VkImageUsageFlags usageFlags;
VkImageAspectFlags aspectFlags;
struct VulkanTexture *msaaTex;
SDL_atomic_t referenceCount;
} VulkanTexture;
typedef struct VulkanRenderTarget
{
VkImageView view;
} VulkanRenderTarget;
typedef struct VulkanFramebuffer
{
VkFramebuffer framebuffer;
SDL_atomic_t referenceCount;
} VulkanFramebuffer;
typedef struct VulkanSwapchainData
{
/* Window surface */
VkSurfaceKHR surface;
VkSurfaceFormatKHR surfaceFormat;
/* Swapchain for window surface */
VkSwapchainKHR swapchain;
VkFormat swapchainFormat;
VkComponentMapping swapchainSwizzle;
VkPresentModeKHR presentMode;
/* Swapchain images */
VkExtent2D extent;
VulkanTexture *textures;
uint32_t imageCount;
2022-02-10 05:42:19 +00:00
/* Synchronization primitives */
VkSemaphore imageAvailableSemaphore;
VkSemaphore renderFinishedSemaphore;
} VulkanSwapchainData;
typedef struct WindowData
{
void *windowHandle;
VkPresentModeKHR preferredPresentMode;
VulkanSwapchainData *swapchainData;
} WindowData;
2020-12-17 03:28:02 +00:00
typedef struct SwapChainSupportDetails
{
VkSurfaceCapabilitiesKHR capabilities;
VkSurfaceFormatKHR *formats;
uint32_t formatsLength;
VkPresentModeKHR *presentModes;
uint32_t presentModesLength;
} SwapChainSupportDetails;
typedef struct VulkanPresentData
{
WindowData *windowData;
uint32_t swapchainImageIndex;
} VulkanPresentData;
typedef struct DescriptorSetCache DescriptorSetCache;
2020-12-27 23:20:59 +00:00
typedef struct VulkanGraphicsPipelineLayout
2020-12-23 06:56:26 +00:00
{
VkPipelineLayout pipelineLayout;
DescriptorSetCache *vertexSamplerDescriptorSetCache;
DescriptorSetCache *fragmentSamplerDescriptorSetCache;
} VulkanGraphicsPipelineLayout;
typedef struct VulkanGraphicsPipeline
{
VkPipeline pipeline;
VulkanGraphicsPipelineLayout *pipelineLayout;
2021-01-05 23:00:51 +00:00
Refresh_PrimitiveType primitiveType;
VkDeviceSize vertexUniformBlockSize;
VkDeviceSize fragmentUniformBlockSize;
VulkanShaderModule *vertexShaderModule;
VulkanShaderModule *fragmentShaderModule;
SDL_atomic_t referenceCount;
2020-12-23 06:56:26 +00:00
} VulkanGraphicsPipeline;
2020-12-29 22:52:24 +00:00
typedef struct VulkanComputePipelineLayout
{
VkPipelineLayout pipelineLayout;
DescriptorSetCache *bufferDescriptorSetCache;
DescriptorSetCache *imageDescriptorSetCache;
2020-12-29 22:52:24 +00:00
} VulkanComputePipelineLayout;
typedef struct VulkanComputePipeline
{
VkPipeline pipeline;
VulkanComputePipelineLayout *pipelineLayout;
VkDeviceSize uniformBlockSize; /* permanently set in Create function */
VulkanShaderModule *computeShaderModule;
SDL_atomic_t referenceCount;
2020-12-29 22:52:24 +00:00
} VulkanComputePipeline;
/* Cache structures */
2020-12-29 22:52:24 +00:00
/* Descriptor Set Layout Caches*/
#define NUM_DESCRIPTOR_SET_LAYOUT_BUCKETS 1031
2020-12-29 23:05:26 +00:00
typedef struct DescriptorSetLayoutHash
{
2020-12-29 23:05:26 +00:00
VkDescriptorType descriptorType;
uint32_t bindingCount;
VkShaderStageFlagBits stageFlag;
2020-12-29 23:05:26 +00:00
} DescriptorSetLayoutHash;
2020-12-29 23:05:26 +00:00
typedef struct DescriptorSetLayoutHashMap
{
2020-12-29 23:05:26 +00:00
DescriptorSetLayoutHash key;
VkDescriptorSetLayout value;
2020-12-29 23:05:26 +00:00
} DescriptorSetLayoutHashMap;
2020-12-29 23:05:26 +00:00
typedef struct DescriptorSetLayoutHashArray
{
2020-12-29 23:05:26 +00:00
DescriptorSetLayoutHashMap *elements;
int32_t count;
int32_t capacity;
2020-12-29 23:05:26 +00:00
} DescriptorSetLayoutHashArray;
2020-12-29 23:05:26 +00:00
typedef struct DescriptorSetLayoutHashTable
{
2020-12-29 23:05:26 +00:00
DescriptorSetLayoutHashArray buckets[NUM_DESCRIPTOR_SET_LAYOUT_BUCKETS];
} DescriptorSetLayoutHashTable;
2020-12-29 23:05:26 +00:00
static inline uint64_t DescriptorSetLayoutHashTable_GetHashCode(DescriptorSetLayoutHash key)
{
const uint64_t HASH_FACTOR = 97;
uint64_t result = 1;
result = result * HASH_FACTOR + key.descriptorType;
2020-12-29 23:05:26 +00:00
result = result * HASH_FACTOR + key.bindingCount;
result = result * HASH_FACTOR + key.stageFlag;
return result;
}
2020-12-29 23:05:26 +00:00
static inline VkDescriptorSetLayout DescriptorSetLayoutHashTable_Fetch(
DescriptorSetLayoutHashTable *table,
DescriptorSetLayoutHash key
) {
int32_t i;
2020-12-29 23:05:26 +00:00
uint64_t hashcode = DescriptorSetLayoutHashTable_GetHashCode(key);
DescriptorSetLayoutHashArray *arr = &table->buckets[hashcode % NUM_DESCRIPTOR_SET_LAYOUT_BUCKETS];
for (i = 0; i < arr->count; i += 1)
{
2020-12-29 23:05:26 +00:00
const DescriptorSetLayoutHash *e = &arr->elements[i].key;
2022-02-25 21:42:11 +00:00
if ( key.descriptorType == e->descriptorType &&
2020-12-29 23:05:26 +00:00
key.bindingCount == e->bindingCount &&
2022-02-25 21:42:11 +00:00
key.stageFlag == e->stageFlag )
{
return arr->elements[i].value;
}
}
return VK_NULL_HANDLE;
}
2020-12-29 23:05:26 +00:00
static inline void DescriptorSetLayoutHashTable_Insert(
DescriptorSetLayoutHashTable *table,
DescriptorSetLayoutHash key,
VkDescriptorSetLayout value
) {
2020-12-29 23:05:26 +00:00
uint64_t hashcode = DescriptorSetLayoutHashTable_GetHashCode(key);
DescriptorSetLayoutHashArray *arr = &table->buckets[hashcode % NUM_DESCRIPTOR_SET_LAYOUT_BUCKETS];
2020-12-29 23:05:26 +00:00
DescriptorSetLayoutHashMap map;
map.key = key;
map.value = value;
2020-12-29 23:05:26 +00:00
EXPAND_ELEMENTS_IF_NEEDED(arr, 4, DescriptorSetLayoutHashMap);
arr->elements[arr->count] = map;
arr->count += 1;
}
typedef struct RenderPassColorTargetDescription
{
2022-06-06 18:46:08 +00:00
VkFormat format;
Refresh_Vec4 clearColor;
Refresh_LoadOp loadOp;
Refresh_StoreOp storeOp;
} RenderPassColorTargetDescription;
typedef struct RenderPassDepthStencilTargetDescription
{
2022-06-06 18:46:08 +00:00
VkFormat format;
Refresh_LoadOp loadOp;
Refresh_StoreOp storeOp;
Refresh_LoadOp stencilLoadOp;
Refresh_StoreOp stencilStoreOp;
} RenderPassDepthStencilTargetDescription;
typedef struct RenderPassHash
{
RenderPassColorTargetDescription colorTargetDescriptions[MAX_COLOR_TARGET_BINDINGS];
uint32_t colorAttachmentCount;
RenderPassDepthStencilTargetDescription depthStencilTargetDescription;
Miscellaneous API changes + more MSAA fixes (#26) **Breaking API Changes** * Removed `REFRESH_SAMPLECOUNT_16/32/64`, since hardware support for these sample counts is generally poor (and completely non-existent with MoltenVK and certain consoles). * Removed unused `sampleCount` parameter from `Refresh_TextureCreateInfo`. * Removed `sampleCount` parameter from `Refresh_ColorAttachmentDescription`. The existence of this parameter meant you had to sync up three different sample count values (render pass, pipeline, and color attachment description) whenever you wanted to use multisampling. However, Vulkan requires that all color attachments in a given pipeline _must_ match the pipeline's sample count anyway, so we can assume all color attachments will use the pipeline's sample count. * Removed the `renderArea` parameter from `Refresh_BeginRenderPass()` since it didn't serve much practical purpose and slightly complicated things on the MoonWorks managed side. **Behavior Changes** * When creating a render pass or graphics pipeline, the requested multisample count will be converted into a sample count that's actually supported by the GPU. For example, if you request 8x MSAA on a device that only supports up to 4x MSAA, it will silently fall back to 4x MSAA. * All color attachments are now forced to have an internal store op of `STORE`, even if `REFRESH_STORE_OP_DONTCARE` is specified. The one exception is internal multisample textures -- if `DONTCARE` is used, those textures will be discarded to save on bandwidth. (Their resolve textures will still be stored.) * The RenderPass hashing logic was updated so that it would still work correctly with the removal of `Refresh_ColorAttachmentDescription.sampleCount`. **Bug Fixes** * Fixed bugs where multisampling logic wasn't kicking in for certain sample counts due to incorrect enum comparisons. Co-authored-by: Caleb Cornett <caleb.cornett@outlook.com> Reviewed-on: https://gitea.moonside.games/MoonsideGames/Refresh/pulls/26 Co-authored-by: TheSpydog <thespydog@noreply.example.org> Co-committed-by: TheSpydog <thespydog@noreply.example.org>
2022-11-08 19:09:21 +00:00
Refresh_SampleCount colorAttachmentSampleCount;
} RenderPassHash;
typedef struct RenderPassHashMap
{
RenderPassHash key;
VkRenderPass value;
} RenderPassHashMap;
typedef struct RenderPassHashArray
{
RenderPassHashMap *elements;
int32_t count;
int32_t capacity;
} RenderPassHashArray;
static inline uint8_t RenderPassHash_Compare(
RenderPassHash *a,
RenderPassHash *b
) {
uint32_t i;
if (a->colorAttachmentCount != b->colorAttachmentCount)
{
return 0;
}
Miscellaneous API changes + more MSAA fixes (#26) **Breaking API Changes** * Removed `REFRESH_SAMPLECOUNT_16/32/64`, since hardware support for these sample counts is generally poor (and completely non-existent with MoltenVK and certain consoles). * Removed unused `sampleCount` parameter from `Refresh_TextureCreateInfo`. * Removed `sampleCount` parameter from `Refresh_ColorAttachmentDescription`. The existence of this parameter meant you had to sync up three different sample count values (render pass, pipeline, and color attachment description) whenever you wanted to use multisampling. However, Vulkan requires that all color attachments in a given pipeline _must_ match the pipeline's sample count anyway, so we can assume all color attachments will use the pipeline's sample count. * Removed the `renderArea` parameter from `Refresh_BeginRenderPass()` since it didn't serve much practical purpose and slightly complicated things on the MoonWorks managed side. **Behavior Changes** * When creating a render pass or graphics pipeline, the requested multisample count will be converted into a sample count that's actually supported by the GPU. For example, if you request 8x MSAA on a device that only supports up to 4x MSAA, it will silently fall back to 4x MSAA. * All color attachments are now forced to have an internal store op of `STORE`, even if `REFRESH_STORE_OP_DONTCARE` is specified. The one exception is internal multisample textures -- if `DONTCARE` is used, those textures will be discarded to save on bandwidth. (Their resolve textures will still be stored.) * The RenderPass hashing logic was updated so that it would still work correctly with the removal of `Refresh_ColorAttachmentDescription.sampleCount`. **Bug Fixes** * Fixed bugs where multisampling logic wasn't kicking in for certain sample counts due to incorrect enum comparisons. Co-authored-by: Caleb Cornett <caleb.cornett@outlook.com> Reviewed-on: https://gitea.moonside.games/MoonsideGames/Refresh/pulls/26 Co-authored-by: TheSpydog <thespydog@noreply.example.org> Co-committed-by: TheSpydog <thespydog@noreply.example.org>
2022-11-08 19:09:21 +00:00
if (a->colorAttachmentSampleCount != b->colorAttachmentSampleCount)
{
return 0;
}
for (i = 0; i < a->colorAttachmentCount; i += 1)
{
2022-06-06 18:46:08 +00:00
if (a->colorTargetDescriptions[i].format != b->colorTargetDescriptions[i].format)
{
return 0;
}
if ( a->colorTargetDescriptions[i].clearColor.x != b->colorTargetDescriptions[i].clearColor.x ||
a->colorTargetDescriptions[i].clearColor.y != b->colorTargetDescriptions[i].clearColor.y ||
a->colorTargetDescriptions[i].clearColor.z != b->colorTargetDescriptions[i].clearColor.z ||
a->colorTargetDescriptions[i].clearColor.w != b->colorTargetDescriptions[i].clearColor.w )
{
return 0;
}
if (a->colorTargetDescriptions[i].loadOp != b->colorTargetDescriptions[i].loadOp)
{
return 0;
}
if (a->colorTargetDescriptions[i].storeOp != b->colorTargetDescriptions[i].storeOp)
{
return 0;
}
}
2022-06-06 18:46:08 +00:00
if (a->depthStencilTargetDescription.format != b->depthStencilTargetDescription.format)
{
return 0;
}
if (a->depthStencilTargetDescription.loadOp != b->depthStencilTargetDescription.loadOp)
{
return 0;
}
if (a->depthStencilTargetDescription.storeOp != b->depthStencilTargetDescription.storeOp)
{
return 0;
}
if (a->depthStencilTargetDescription.stencilLoadOp != b->depthStencilTargetDescription.stencilLoadOp)
{
return 0;
}
if (a->depthStencilTargetDescription.stencilStoreOp != b->depthStencilTargetDescription.stencilStoreOp)
{
return 0;
}
return 1;
}
static inline VkRenderPass RenderPassHashArray_Fetch(
RenderPassHashArray *arr,
RenderPassHash *key
) {
int32_t i;
for (i = 0; i < arr->count; i += 1)
{
RenderPassHash *e = &arr->elements[i].key;
if (RenderPassHash_Compare(e, key))
{
return arr->elements[i].value;
}
}
return VK_NULL_HANDLE;
}
static inline void RenderPassHashArray_Insert(
RenderPassHashArray *arr,
RenderPassHash key,
VkRenderPass value
) {
RenderPassHashMap map;
map.key = key;
map.value = value;
EXPAND_ELEMENTS_IF_NEEDED(arr, 4, RenderPassHashMap)
arr->elements[arr->count] = map;
arr->count += 1;
}
typedef struct FramebufferHash
{
VkImageView colorAttachmentViews[MAX_COLOR_TARGET_BINDINGS];
VkImageView colorMultiSampleAttachmentViews[MAX_COLOR_TARGET_BINDINGS];
uint32_t colorAttachmentCount;
VkImageView depthStencilAttachmentView;
uint32_t width;
uint32_t height;
} FramebufferHash;
typedef struct FramebufferHashMap
{
FramebufferHash key;
VulkanFramebuffer *value;
} FramebufferHashMap;
typedef struct FramebufferHashArray
{
FramebufferHashMap *elements;
int32_t count;
int32_t capacity;
} FramebufferHashArray;
static inline uint8_t FramebufferHash_Compare(
FramebufferHash *a,
FramebufferHash *b
) {
uint32_t i;
if (a->colorAttachmentCount != b->colorAttachmentCount)
{
return 0;
}
for (i = 0; i < a->colorAttachmentCount; i += 1)
{
if (a->colorAttachmentViews[i] != b->colorAttachmentViews[i])
{
return 0;
}
if (a->colorMultiSampleAttachmentViews[i] != b->colorMultiSampleAttachmentViews[i])
{
return 0;
}
}
if (a->depthStencilAttachmentView != b->depthStencilAttachmentView)
{
return 0;
}
if (a->width != b->width)
{
return 0;
}
if (a->height != b->height)
{
return 0;
}
return 1;
}
static inline VulkanFramebuffer* FramebufferHashArray_Fetch(
FramebufferHashArray *arr,
FramebufferHash *key
) {
int32_t i;
for (i = 0; i < arr->count; i += 1)
{
FramebufferHash *e = &arr->elements[i].key;
if (FramebufferHash_Compare(e, key))
{
return arr->elements[i].value;
}
}
return VK_NULL_HANDLE;
}
static inline void FramebufferHashArray_Insert(
FramebufferHashArray *arr,
FramebufferHash key,
VulkanFramebuffer *value
) {
FramebufferHashMap map;
map.key = key;
map.value = value;
EXPAND_ELEMENTS_IF_NEEDED(arr, 4, FramebufferHashMap)
arr->elements[arr->count] = map;
arr->count += 1;
}
static inline void FramebufferHashArray_Remove(
FramebufferHashArray *arr,
uint32_t index
) {
if (index != arr->count - 1)
{
arr->elements[index] = arr->elements[arr->count - 1];
}
arr->count -= 1;
}
typedef struct RenderTargetHash
{
VulkanTexture *texture;
uint32_t depth;
uint32_t layer;
uint32_t level;
} RenderTargetHash;
typedef struct RenderTargetHashMap
{
RenderTargetHash key;
VulkanRenderTarget *value;
} RenderTargetHashMap;
typedef struct RenderTargetHashArray
{
RenderTargetHashMap *elements;
int32_t count;
int32_t capacity;
} RenderTargetHashArray;
static inline uint8_t RenderTargetHash_Compare(
RenderTargetHash *a,
RenderTargetHash *b
) {
if (a->texture != b->texture)
{
return 0;
}
if (a->layer != b->layer)
{
return 0;
}
if (a->level != b->level)
{
return 0;
}
if (a->depth != b->depth)
{
return 0;
}
return 1;
}
static inline VulkanRenderTarget* RenderTargetHash_Fetch(
RenderTargetHashArray *arr,
RenderTargetHash *key
) {
int32_t i;
for (i = 0; i < arr->count; i += 1)
{
RenderTargetHash *e = &arr->elements[i].key;
if (RenderTargetHash_Compare(e, key))
{
return arr->elements[i].value;
}
}
return NULL;
}
static inline void RenderTargetHash_Insert(
RenderTargetHashArray *arr,
RenderTargetHash key,
VulkanRenderTarget *value
) {
RenderTargetHashMap map;
map.key = key;
map.value = value;
EXPAND_ELEMENTS_IF_NEEDED(arr, 4, RenderTargetHashMap)
arr->elements[arr->count] = map;
arr->count += 1;
}
static inline void RenderTargetHash_Remove(
RenderTargetHashArray *arr,
uint32_t index
) {
if (index != arr->count - 1)
{
arr->elements[index] = arr->elements[arr->count - 1];
}
arr->count -= 1;
}
2020-12-29 22:52:24 +00:00
/* Descriptor Set Caches */
struct DescriptorSetCache
2020-12-30 00:53:10 +00:00
{
SDL_mutex *lock;
2020-12-30 00:53:10 +00:00
VkDescriptorSetLayout descriptorSetLayout;
uint32_t bindingCount;
2020-12-31 04:39:47 +00:00
VkDescriptorType descriptorType;
2020-12-30 00:53:10 +00:00
VkDescriptorPool *descriptorPools;
uint32_t descriptorPoolCount;
2020-12-30 00:53:10 +00:00
uint32_t nextPoolSize;
VkDescriptorSet *inactiveDescriptorSets;
uint32_t inactiveDescriptorSetCount;
uint32_t inactiveDescriptorSetCapacity;
};
2020-12-29 22:52:24 +00:00
/* Pipeline Caches */
2021-01-03 02:02:20 +00:00
#define NUM_PIPELINE_LAYOUT_BUCKETS 1031
2020-12-29 22:52:24 +00:00
typedef struct GraphicsPipelineLayoutHash
{
VkDescriptorSetLayout vertexSamplerLayout;
VkDescriptorSetLayout fragmentSamplerLayout;
VkDescriptorSetLayout vertexUniformLayout;
VkDescriptorSetLayout fragmentUniformLayout;
} GraphicsPipelineLayoutHash;
typedef struct GraphicsPipelineLayoutHashMap
{
GraphicsPipelineLayoutHash key;
VulkanGraphicsPipelineLayout *value;
} GraphicsPipelineLayoutHashMap;
typedef struct GraphicsPipelineLayoutHashArray
{
GraphicsPipelineLayoutHashMap *elements;
int32_t count;
int32_t capacity;
} GraphicsPipelineLayoutHashArray;
typedef struct GraphicsPipelineLayoutHashTable
{
GraphicsPipelineLayoutHashArray buckets[NUM_PIPELINE_LAYOUT_BUCKETS];
} GraphicsPipelineLayoutHashTable;
static inline uint64_t GraphicsPipelineLayoutHashTable_GetHashCode(GraphicsPipelineLayoutHash key)
{
const uint64_t HASH_FACTOR = 97;
uint64_t result = 1;
result = result * HASH_FACTOR + (uint64_t) key.vertexSamplerLayout;
result = result * HASH_FACTOR + (uint64_t) key.fragmentSamplerLayout;
result = result * HASH_FACTOR + (uint64_t) key.vertexUniformLayout;
result = result * HASH_FACTOR + (uint64_t) key.fragmentUniformLayout;
return result;
}
static inline VulkanGraphicsPipelineLayout* GraphicsPipelineLayoutHashArray_Fetch(
GraphicsPipelineLayoutHashTable *table,
GraphicsPipelineLayoutHash key
) {
int32_t i;
uint64_t hashcode = GraphicsPipelineLayoutHashTable_GetHashCode(key);
GraphicsPipelineLayoutHashArray *arr = &table->buckets[hashcode % NUM_PIPELINE_LAYOUT_BUCKETS];
for (i = 0; i < arr->count; i += 1)
{
const GraphicsPipelineLayoutHash *e = &arr->elements[i].key;
if ( key.vertexSamplerLayout == e->vertexSamplerLayout &&
key.fragmentSamplerLayout == e->fragmentSamplerLayout &&
key.vertexUniformLayout == e->vertexUniformLayout &&
key.fragmentUniformLayout == e->fragmentUniformLayout )
{
return arr->elements[i].value;
}
}
return NULL;
}
static inline void GraphicsPipelineLayoutHashArray_Insert(
GraphicsPipelineLayoutHashTable *table,
GraphicsPipelineLayoutHash key,
VulkanGraphicsPipelineLayout *value
) {
uint64_t hashcode = GraphicsPipelineLayoutHashTable_GetHashCode(key);
GraphicsPipelineLayoutHashArray *arr = &table->buckets[hashcode % NUM_PIPELINE_LAYOUT_BUCKETS];
GraphicsPipelineLayoutHashMap map;
map.key = key;
map.value = value;
EXPAND_ELEMENTS_IF_NEEDED(arr, 4, GraphicsPipelineLayoutHashMap)
arr->elements[arr->count] = map;
arr->count += 1;
}
typedef struct ComputePipelineLayoutHash
{
VkDescriptorSetLayout bufferLayout;
VkDescriptorSetLayout imageLayout;
2020-12-31 04:39:47 +00:00
VkDescriptorSetLayout uniformLayout;
2020-12-29 22:52:24 +00:00
} ComputePipelineLayoutHash;
typedef struct ComputePipelineLayoutHashMap
{
ComputePipelineLayoutHash key;
VulkanComputePipelineLayout *value;
} ComputePipelineLayoutHashMap;
typedef struct ComputePipelineLayoutHashArray
{
ComputePipelineLayoutHashMap *elements;
int32_t count;
int32_t capacity;
} ComputePipelineLayoutHashArray;
typedef struct ComputePipelineLayoutHashTable
{
ComputePipelineLayoutHashArray buckets[NUM_PIPELINE_LAYOUT_BUCKETS];
} ComputePipelineLayoutHashTable;
static inline uint64_t ComputePipelineLayoutHashTable_GetHashCode(ComputePipelineLayoutHash key)
{
const uint64_t HASH_FACTOR = 97;
uint64_t result = 1;
result = result * HASH_FACTOR + (uint64_t) key.bufferLayout;
result = result * HASH_FACTOR + (uint64_t) key.imageLayout;
2020-12-31 04:39:47 +00:00
result = result * HASH_FACTOR + (uint64_t) key.uniformLayout;
2020-12-29 22:52:24 +00:00
return result;
}
static inline VulkanComputePipelineLayout* ComputePipelineLayoutHashArray_Fetch(
ComputePipelineLayoutHashTable *table,
ComputePipelineLayoutHash key
) {
int32_t i;
uint64_t hashcode = ComputePipelineLayoutHashTable_GetHashCode(key);
ComputePipelineLayoutHashArray *arr = &table->buckets[hashcode % NUM_PIPELINE_LAYOUT_BUCKETS];
for (i = 0; i < arr->count; i += 1)
{
const ComputePipelineLayoutHash *e = &arr->elements[i].key;
if ( key.bufferLayout == e->bufferLayout &&
2020-12-31 04:39:47 +00:00
key.imageLayout == e->imageLayout &&
key.uniformLayout == e->uniformLayout )
2020-12-29 22:52:24 +00:00
{
return arr->elements[i].value;
}
}
return NULL;
}
static inline void ComputePipelineLayoutHashArray_Insert(
ComputePipelineLayoutHashTable *table,
ComputePipelineLayoutHash key,
VulkanComputePipelineLayout *value
) {
uint64_t hashcode = ComputePipelineLayoutHashTable_GetHashCode(key);
ComputePipelineLayoutHashArray *arr = &table->buckets[hashcode % NUM_PIPELINE_LAYOUT_BUCKETS];
ComputePipelineLayoutHashMap map;
map.key = key;
map.value = value;
EXPAND_ELEMENTS_IF_NEEDED(arr, 4, ComputePipelineLayoutHashMap)
arr->elements[arr->count] = map;
arr->count += 1;
}
2021-01-03 02:02:20 +00:00
/* Command structures */
typedef struct DescriptorSetData
{
DescriptorSetCache *descriptorSetCache;
VkDescriptorSet descriptorSet;
} DescriptorSetData;
2022-01-02 22:35:57 +00:00
typedef struct VulkanTransferBuffer
{
VulkanBuffer* buffer;
VkDeviceSize offset;
} VulkanTransferBuffer;
typedef struct VulkanTransferBufferPool
{
SDL_mutex *lock;
VulkanTransferBuffer **availableBuffers;
uint32_t availableBufferCount;
uint32_t availableBufferCapacity;
} VulkanTransferBufferPool;
2021-01-02 06:07:15 +00:00
typedef struct VulkanCommandPool VulkanCommandPool;
typedef struct VulkanCommandBuffer
{
VkCommandBuffer commandBuffer;
VulkanCommandPool *commandPool;
VulkanPresentData *presentDatas;
uint32_t presentDataCount;
uint32_t presentDataCapacity;
VkSemaphore *waitSemaphores;
uint32_t waitSemaphoreCount;
uint32_t waitSemaphoreCapacity;
VkSemaphore *signalSemaphores;
uint32_t signalSemaphoreCount;
uint32_t signalSemaphoreCapacity;
2021-01-02 06:07:15 +00:00
VulkanComputePipeline *currentComputePipeline;
VulkanGraphicsPipeline *currentGraphicsPipeline;
VulkanTexture *renderPassColorTargetTextures[MAX_COLOR_TARGET_BINDINGS];
uint32_t renderPassColorTargetCount;
VulkanTexture *renderPassDepthTexture; /* can be NULL */
2021-01-02 06:07:15 +00:00
VulkanUniformBuffer *vertexUniformBuffer;
VulkanUniformBuffer *fragmentUniformBuffer;
VulkanUniformBuffer *computeUniformBuffer;
VkDescriptorSet vertexSamplerDescriptorSet; /* updated by BindVertexSamplers */
VkDescriptorSet fragmentSamplerDescriptorSet; /* updated by BindFragmentSamplers */
VkDescriptorSet bufferDescriptorSet; /* updated by BindComputeBuffers */
VkDescriptorSet imageDescriptorSet; /* updated by BindComputeTextures */
2022-01-02 22:35:57 +00:00
VulkanTransferBuffer** transferBuffers;
uint32_t transferBufferCount;
uint32_t transferBufferCapacity;
VulkanUniformBuffer **boundUniformBuffers;
uint32_t boundUniformBufferCount;
uint32_t boundUniformBufferCapacity;
DescriptorSetData *boundDescriptorSetDatas;
uint32_t boundDescriptorSetDataCount;
uint32_t boundDescriptorSetDataCapacity;
/* Keep track of compute resources for memory barriers */
VulkanBuffer **boundComputeBuffers;
uint32_t boundComputeBufferCount;
uint32_t boundComputeBufferCapacity;
VulkanTexture **boundComputeTextures;
uint32_t boundComputeTextureCount;
uint32_t boundComputeTextureCapacity;
/* Viewport/scissor state */
2022-03-04 20:30:33 +00:00
VkViewport currentViewport;
VkRect2D currentScissor;
/* Track used resources */
VulkanBuffer **usedBuffers;
uint32_t usedBufferCount;
uint32_t usedBufferCapacity;
VulkanTexture **usedTextures;
uint32_t usedTextureCount;
uint32_t usedTextureCapacity;
VulkanSampler **usedSamplers;
uint32_t usedSamplerCount;
uint32_t usedSamplerCapacity;
VulkanGraphicsPipeline **usedGraphicsPipelines;
uint32_t usedGraphicsPipelineCount;
uint32_t usedGraphicsPipelineCapacity;
VulkanComputePipeline **usedComputePipelines;
uint32_t usedComputePipelineCount;
uint32_t usedComputePipelineCapacity;
VulkanFramebuffer **usedFramebuffers;
uint32_t usedFramebufferCount;
uint32_t usedFramebufferCapacity;
/* Shader modules have references tracked by pipelines */
VkFence inFlightFence;
2021-01-02 06:07:15 +00:00
} VulkanCommandBuffer;
struct VulkanCommandPool
{
SDL_threadID threadID;
VkCommandPool commandPool;
VulkanCommandBuffer **inactiveCommandBuffers;
uint32_t inactiveCommandBufferCapacity;
uint32_t inactiveCommandBufferCount;
};
2021-01-03 02:02:20 +00:00
#define NUM_COMMAND_POOL_BUCKETS 1031
typedef struct CommandPoolHash
{
SDL_threadID threadID;
} CommandPoolHash;
typedef struct CommandPoolHashMap
{
CommandPoolHash key;
VulkanCommandPool *value;
} CommandPoolHashMap;
typedef struct CommandPoolHashArray
{
CommandPoolHashMap *elements;
uint32_t count;
uint32_t capacity;
} CommandPoolHashArray;
typedef struct CommandPoolHashTable
{
CommandPoolHashArray buckets[NUM_COMMAND_POOL_BUCKETS];
} CommandPoolHashTable;
static inline uint64_t CommandPoolHashTable_GetHashCode(CommandPoolHash key)
{
const uint64_t HASH_FACTOR = 97;
uint64_t result = 1;
result = result * HASH_FACTOR + (uint64_t) key.threadID;
return result;
}
static inline VulkanCommandPool* CommandPoolHashTable_Fetch(
CommandPoolHashTable *table,
CommandPoolHash key
) {
uint32_t i;
uint64_t hashcode = CommandPoolHashTable_GetHashCode(key);
CommandPoolHashArray *arr = &table->buckets[hashcode % NUM_COMMAND_POOL_BUCKETS];
for (i = 0; i < arr->count; i += 1)
{
const CommandPoolHash *e = &arr->elements[i].key;
if (key.threadID == e->threadID)
{
return arr->elements[i].value;
}
}
return NULL;
}
static inline void CommandPoolHashTable_Insert(
CommandPoolHashTable *table,
CommandPoolHash key,
VulkanCommandPool *value
) {
uint64_t hashcode = CommandPoolHashTable_GetHashCode(key);
CommandPoolHashArray *arr = &table->buckets[hashcode % NUM_COMMAND_POOL_BUCKETS];
CommandPoolHashMap map;
map.key = key;
map.value = value;
EXPAND_ELEMENTS_IF_NEEDED(arr, 4, CommandPoolHashMap)
arr->elements[arr->count] = map;
arr->count += 1;
}
/* Context */
2020-12-17 03:28:02 +00:00
typedef struct VulkanRenderer
2020-12-17 01:23:49 +00:00
{
2022-02-25 21:42:11 +00:00
VkInstance instance;
VkPhysicalDevice physicalDevice;
VkPhysicalDeviceProperties2 physicalDeviceProperties;
VkPhysicalDeviceDriverPropertiesKHR physicalDeviceDriverProperties;
VkDevice logicalDevice;
2020-12-17 02:38:22 +00:00
2022-02-25 21:42:11 +00:00
uint8_t supportsDebugUtils;
uint8_t debugMode;
2020-12-17 03:28:02 +00:00
2020-12-18 22:35:33 +00:00
VulkanMemoryAllocator *memoryAllocator;
VkPhysicalDeviceMemoryProperties memoryProperties;
2020-12-18 22:35:33 +00:00
WindowData **claimedWindows;
uint32_t claimedWindowCount;
uint32_t claimedWindowCapacity;
2020-12-17 03:28:02 +00:00
2022-02-25 21:42:11 +00:00
QueueFamilyIndices queueFamilyIndices;
2020-12-17 02:38:22 +00:00
VkQueue graphicsQueue;
VkQueue presentQueue;
2021-01-03 03:03:25 +00:00
VkQueue computeQueue;
2021-01-02 21:31:17 +00:00
VkQueue transferQueue;
2020-12-17 02:38:22 +00:00
2021-01-02 06:07:15 +00:00
VulkanCommandBuffer **submittedCommandBuffers;
2020-12-17 02:38:22 +00:00
uint32_t submittedCommandBufferCount;
2021-01-02 06:07:15 +00:00
uint32_t submittedCommandBufferCapacity;
2020-12-31 06:28:37 +00:00
VulkanTransferBufferPool transferBufferPool;
2021-01-03 02:02:20 +00:00
CommandPoolHashTable commandPoolHashTable;
2020-12-29 23:05:26 +00:00
DescriptorSetLayoutHashTable descriptorSetLayoutHashTable;
2020-12-29 22:52:24 +00:00
GraphicsPipelineLayoutHashTable graphicsPipelineLayoutHashTable;
ComputePipelineLayoutHashTable computePipelineLayoutHashTable;
RenderPassHashArray renderPassHashArray;
FramebufferHashArray framebufferHashArray;
RenderTargetHashArray renderTargetHashArray;
2020-12-19 05:35:21 +00:00
VkDescriptorPool defaultDescriptorPool;
2020-12-29 22:52:24 +00:00
VkDescriptorSetLayout emptyVertexSamplerLayout;
VkDescriptorSetLayout emptyFragmentSamplerLayout;
2020-12-29 22:52:24 +00:00
VkDescriptorSetLayout emptyComputeBufferDescriptorSetLayout;
2020-12-31 04:39:47 +00:00
VkDescriptorSetLayout emptyComputeImageDescriptorSetLayout;
2020-12-29 22:52:24 +00:00
VkDescriptorSet emptyVertexSamplerDescriptorSet;
VkDescriptorSet emptyFragmentSamplerDescriptorSet;
2020-12-29 22:52:24 +00:00
VkDescriptorSet emptyComputeBufferDescriptorSet;
2020-12-31 04:39:47 +00:00
VkDescriptorSet emptyComputeImageDescriptorSet;
VulkanUniformBufferPool *vertexUniformBufferPool;
VulkanUniformBufferPool *fragmentUniformBufferPool;
VulkanUniformBufferPool *computeUniformBufferPool;
2020-12-17 19:40:49 +00:00
VkDescriptorSetLayout vertexUniformDescriptorSetLayout;
VkDescriptorSetLayout fragmentUniformDescriptorSetLayout;
VkDescriptorSetLayout computeUniformDescriptorSetLayout;
VulkanUniformBuffer *dummyVertexUniformBuffer;
VulkanUniformBuffer *dummyFragmentUniformBuffer;
VulkanUniformBuffer *dummyComputeUniformBuffer;
VkDeviceSize minUBOAlignment;
2020-12-19 04:08:07 +00:00
2022-06-17 07:41:27 +00:00
/* Some drivers don't support D16 for some reason. Fun! */
VkFormat D16Format;
VkFormat D16S8Format;
VulkanTexture **texturesToDestroy;
uint32_t texturesToDestroyCount;
uint32_t texturesToDestroyCapacity;
VulkanBuffer **buffersToDestroy;
uint32_t buffersToDestroyCount;
uint32_t buffersToDestroyCapacity;
VulkanSampler **samplersToDestroy;
uint32_t samplersToDestroyCount;
uint32_t samplersToDestroyCapacity;
VulkanGraphicsPipeline **graphicsPipelinesToDestroy;
uint32_t graphicsPipelinesToDestroyCount;
uint32_t graphicsPipelinesToDestroyCapacity;
VulkanComputePipeline **computePipelinesToDestroy;
uint32_t computePipelinesToDestroyCount;
uint32_t computePipelinesToDestroyCapacity;
VulkanShaderModule **shaderModulesToDestroy;
uint32_t shaderModulesToDestroyCount;
uint32_t shaderModulesToDestroyCapacity;
VulkanFramebuffer **framebuffersToDestroy;
uint32_t framebuffersToDestroyCount;
uint32_t framebuffersToDestroyCapacity;
2020-12-18 22:35:33 +00:00
SDL_mutex *allocatorLock;
SDL_mutex *disposeLock;
SDL_mutex *submitLock;
SDL_mutex *acquireCommandBufferLock;
SDL_mutex *renderPassFetchLock;
SDL_mutex *framebufferFetchLock;
SDL_mutex *renderTargetFetchLock;
2020-12-29 03:44:34 +00:00
2022-02-25 21:42:11 +00:00
#define VULKAN_INSTANCE_FUNCTION(ext, ret, func, params) \
2020-12-17 02:38:22 +00:00
vkfntype_##func func;
#define VULKAN_DEVICE_FUNCTION(ext, ret, func, params) \
vkfntype_##func func;
#include "Refresh_Driver_Vulkan_vkfuncs.h"
2020-12-17 03:28:02 +00:00
} VulkanRenderer;
2020-12-19 00:39:03 +00:00
/* Forward declarations */
2021-01-02 06:07:15 +00:00
static void VULKAN_INTERNAL_BeginCommandBuffer(VulkanRenderer *renderer, VulkanCommandBuffer *commandBuffer);
static void VULKAN_UnclaimWindow(Refresh_Renderer *driverData, void *windowHandle);
static void VULKAN_Wait(Refresh_Renderer *driverData);
2021-01-05 23:00:51 +00:00
static void VULKAN_Submit(Refresh_Renderer *driverData, uint32_t commandBufferCount, Refresh_CommandBuffer **pCommandBuffers);
static void VULKAN_INTERNAL_DestroyRenderTarget(VulkanRenderer *renderer, VulkanRenderTarget *renderTarget);
2020-12-19 00:39:03 +00:00
2020-12-17 03:28:02 +00:00
/* Error Handling */
static inline const char* VkErrorMessages(VkResult code)
{
#define ERR_TO_STR(e) \
case e: return #e;
switch (code)
{
ERR_TO_STR(VK_ERROR_OUT_OF_HOST_MEMORY)
ERR_TO_STR(VK_ERROR_OUT_OF_DEVICE_MEMORY)
ERR_TO_STR(VK_ERROR_FRAGMENTED_POOL)
ERR_TO_STR(VK_ERROR_OUT_OF_POOL_MEMORY)
ERR_TO_STR(VK_ERROR_INITIALIZATION_FAILED)
ERR_TO_STR(VK_ERROR_LAYER_NOT_PRESENT)
ERR_TO_STR(VK_ERROR_EXTENSION_NOT_PRESENT)
ERR_TO_STR(VK_ERROR_FEATURE_NOT_PRESENT)
ERR_TO_STR(VK_ERROR_TOO_MANY_OBJECTS)
ERR_TO_STR(VK_ERROR_DEVICE_LOST)
ERR_TO_STR(VK_ERROR_INCOMPATIBLE_DRIVER)
ERR_TO_STR(VK_ERROR_OUT_OF_DATE_KHR)
ERR_TO_STR(VK_ERROR_SURFACE_LOST_KHR)
ERR_TO_STR(VK_ERROR_FULL_SCREEN_EXCLUSIVE_MODE_LOST_EXT)
ERR_TO_STR(VK_SUBOPTIMAL_KHR)
default: return "Unhandled VkResult!";
}
#undef ERR_TO_STR
}
2021-01-27 20:51:36 +00:00
static inline void LogVulkanResultAsError(
2020-12-17 03:28:02 +00:00
const char* vulkanFunctionName,
VkResult result
) {
if (result != VK_SUCCESS)
{
2021-01-05 23:00:51 +00:00
Refresh_LogError(
2020-12-17 03:28:02 +00:00
"%s: %s",
vulkanFunctionName,
VkErrorMessages(result)
);
}
}
2020-12-17 01:23:49 +00:00
2021-01-27 20:51:36 +00:00
static inline void LogVulkanResultAsWarn(
const char* vulkanFunctionName,
VkResult result
) {
if (result != VK_SUCCESS)
{
Refresh_LogWarn(
"%s: %s",
vulkanFunctionName,
VkErrorMessages(result)
);
}
}
2020-12-18 22:35:33 +00:00
/* Utility */
2022-06-17 07:41:27 +00:00
static inline VkFormat RefreshToVK_DepthFormat(
VulkanRenderer* renderer,
Refresh_TextureFormat format
) {
switch (format)
{
case REFRESH_TEXTUREFORMAT_D16_UNORM:
return renderer->D16Format;
case REFRESH_TEXTUREFORMAT_D16_UNORM_S8_UINT:
return renderer->D16S8Format;
case REFRESH_TEXTUREFORMAT_D32_SFLOAT:
return VK_FORMAT_D32_SFLOAT;
case REFRESH_TEXTUREFORMAT_D32_SFLOAT_S8_UINT:
return VK_FORMAT_D32_SFLOAT_S8_UINT;
default:
return VK_FORMAT_UNDEFINED;
}
}
static inline uint8_t IsRefreshDepthFormat(Refresh_TextureFormat format)
{
switch (format)
{
case REFRESH_TEXTUREFORMAT_D16_UNORM:
case REFRESH_TEXTUREFORMAT_D32_SFLOAT:
case REFRESH_TEXTUREFORMAT_D16_UNORM_S8_UINT:
case REFRESH_TEXTUREFORMAT_D32_SFLOAT_S8_UINT:
return 1;
default:
return 0;
}
}
2021-01-27 20:51:36 +00:00
static inline uint8_t IsDepthFormat(VkFormat format)
2020-12-18 22:35:33 +00:00
{
switch(format)
{
2021-01-27 20:51:36 +00:00
case VK_FORMAT_D16_UNORM:
case VK_FORMAT_D32_SFLOAT:
case VK_FORMAT_D16_UNORM_S8_UINT:
case VK_FORMAT_D32_SFLOAT_S8_UINT:
return 1;
default:
2020-12-18 22:35:33 +00:00
return 0;
}
}
2021-01-27 20:51:36 +00:00
static inline uint8_t IsStencilFormat(VkFormat format)
{
switch(format)
{
2021-01-27 20:51:36 +00:00
case VK_FORMAT_D16_UNORM_S8_UINT:
case VK_FORMAT_D32_SFLOAT_S8_UINT:
2020-12-18 22:35:33 +00:00
return 1;
2020-12-18 22:35:33 +00:00
default:
return 0;
}
}
static inline uint32_t VULKAN_INTERNAL_BytesPerPixel(VkFormat format)
{
switch (format)
{
case VK_FORMAT_R32G32B32A32_SFLOAT:
case VK_FORMAT_BC2_UNORM_BLOCK:
case VK_FORMAT_BC3_UNORM_BLOCK:
2022-05-12 04:16:24 +00:00
case VK_FORMAT_BC7_UNORM_BLOCK:
case VK_FORMAT_R16G16B16A16_UINT:
return 16;
2021-02-03 00:37:01 +00:00
case VK_FORMAT_R8G8B8A8_UNORM:
case VK_FORMAT_R8G8B8A8_SNORM:
case VK_FORMAT_B8G8R8A8_UNORM:
case VK_FORMAT_B8G8R8A8_SNORM:
case VK_FORMAT_A2R10G10B10_UNORM_PACK32:
case VK_FORMAT_R16G16_UNORM:
case VK_FORMAT_R16G16_SFLOAT:
case VK_FORMAT_R32_SFLOAT:
case VK_FORMAT_D32_SFLOAT:
case VK_FORMAT_R8G8B8A8_UINT:
case VK_FORMAT_R16G16_UINT:
return 4;
case VK_FORMAT_R5G6B5_UNORM_PACK16:
case VK_FORMAT_A1R5G5B5_UNORM_PACK16:
case VK_FORMAT_B4G4R4A4_UNORM_PACK16:
case VK_FORMAT_R8G8_SNORM:
case VK_FORMAT_R16_SFLOAT:
case VK_FORMAT_D16_UNORM:
case VK_FORMAT_R8G8_UINT:
case VK_FORMAT_R16_UINT:
return 2;
case VK_FORMAT_R16G16B16A16_UNORM:
case VK_FORMAT_R32G32_SFLOAT:
case VK_FORMAT_R16G16B16A16_SFLOAT:
case VK_FORMAT_BC1_RGBA_UNORM_BLOCK:
case VK_FORMAT_R8_UINT:
return 8;
2021-02-03 00:37:01 +00:00
case VK_FORMAT_R8_UNORM:
return 1;
case VK_FORMAT_D16_UNORM_S8_UINT:
return 3;
2021-02-03 00:37:01 +00:00
case VK_FORMAT_D32_SFLOAT_S8_UINT:
return 5;
default:
Refresh_LogError("Invalid texture format!");
return 0;
}
}
2022-05-12 04:16:24 +00:00
static inline uint32_t VULKAN_INTERNAL_GetTextureBlockSize(
VkFormat format
) {
switch (format)
{
case VK_FORMAT_BC1_RGBA_UNORM_BLOCK:
case VK_FORMAT_BC2_UNORM_BLOCK:
case VK_FORMAT_BC3_UNORM_BLOCK:
case VK_FORMAT_BC7_UNORM_BLOCK:
return 4;
case VK_FORMAT_R8G8B8A8_UNORM:
case VK_FORMAT_B8G8R8A8_UNORM:
case VK_FORMAT_R5G6B5_UNORM_PACK16:
case VK_FORMAT_A1R5G5B5_UNORM_PACK16:
case VK_FORMAT_B4G4R4A4_UNORM_PACK16:
case VK_FORMAT_R8G8_SNORM:
case VK_FORMAT_R8G8B8A8_SNORM:
case VK_FORMAT_A2R10G10B10_UNORM_PACK32:
case VK_FORMAT_R16G16_UNORM:
case VK_FORMAT_R16G16B16A16_UNORM:
case VK_FORMAT_R8_UNORM:
case VK_FORMAT_R32_SFLOAT:
case VK_FORMAT_R32G32_SFLOAT:
case VK_FORMAT_R32G32B32A32_SFLOAT:
case VK_FORMAT_R16_SFLOAT:
case VK_FORMAT_R16G16_SFLOAT:
case VK_FORMAT_R16G16B16A16_SFLOAT:
case VK_FORMAT_R8_UINT:
case VK_FORMAT_R8G8_UINT:
case VK_FORMAT_R8G8B8A8_UINT:
case VK_FORMAT_R16_UINT:
case VK_FORMAT_R16G16_UINT:
case VK_FORMAT_R16G16B16A16_UINT:
2022-05-12 04:16:24 +00:00
return 1;
default:
Refresh_LogError("Unrecognized texture format!");
return 0;
}
}
static inline VkDeviceSize VULKAN_INTERNAL_BytesPerImage(
uint32_t width,
uint32_t height,
VkFormat format
) {
uint32_t blocksPerRow = width;
uint32_t blocksPerColumn = height;
2022-05-12 04:16:24 +00:00
uint32_t blockSize = VULKAN_INTERNAL_GetTextureBlockSize(format);
2022-05-12 04:16:24 +00:00
if (blockSize > 1)
{
2022-05-12 04:16:24 +00:00
blocksPerRow = (width + blockSize - 1) / blockSize;
blocksPerColumn = (height + blockSize - 1) / blockSize;
}
return blocksPerRow * blocksPerColumn * VULKAN_INTERNAL_BytesPerPixel(format);
}
Miscellaneous API changes + more MSAA fixes (#26) **Breaking API Changes** * Removed `REFRESH_SAMPLECOUNT_16/32/64`, since hardware support for these sample counts is generally poor (and completely non-existent with MoltenVK and certain consoles). * Removed unused `sampleCount` parameter from `Refresh_TextureCreateInfo`. * Removed `sampleCount` parameter from `Refresh_ColorAttachmentDescription`. The existence of this parameter meant you had to sync up three different sample count values (render pass, pipeline, and color attachment description) whenever you wanted to use multisampling. However, Vulkan requires that all color attachments in a given pipeline _must_ match the pipeline's sample count anyway, so we can assume all color attachments will use the pipeline's sample count. * Removed the `renderArea` parameter from `Refresh_BeginRenderPass()` since it didn't serve much practical purpose and slightly complicated things on the MoonWorks managed side. **Behavior Changes** * When creating a render pass or graphics pipeline, the requested multisample count will be converted into a sample count that's actually supported by the GPU. For example, if you request 8x MSAA on a device that only supports up to 4x MSAA, it will silently fall back to 4x MSAA. * All color attachments are now forced to have an internal store op of `STORE`, even if `REFRESH_STORE_OP_DONTCARE` is specified. The one exception is internal multisample textures -- if `DONTCARE` is used, those textures will be discarded to save on bandwidth. (Their resolve textures will still be stored.) * The RenderPass hashing logic was updated so that it would still work correctly with the removal of `Refresh_ColorAttachmentDescription.sampleCount`. **Bug Fixes** * Fixed bugs where multisampling logic wasn't kicking in for certain sample counts due to incorrect enum comparisons. Co-authored-by: Caleb Cornett <caleb.cornett@outlook.com> Reviewed-on: https://gitea.moonside.games/MoonsideGames/Refresh/pulls/26 Co-authored-by: TheSpydog <thespydog@noreply.example.org> Co-committed-by: TheSpydog <thespydog@noreply.example.org>
2022-11-08 19:09:21 +00:00
static inline Refresh_SampleCount VULKAN_INTERNAL_GetMaxMultiSampleCount(
VulkanRenderer *renderer,
Refresh_SampleCount multiSampleCount
) {
VkSampleCountFlags flags = renderer->physicalDeviceProperties.properties.limits.framebufferColorSampleCounts;
Refresh_SampleCount maxSupported = REFRESH_SAMPLECOUNT_1;
if (flags & VK_SAMPLE_COUNT_8_BIT)
{
maxSupported = REFRESH_SAMPLECOUNT_8;
}
else if (flags & VK_SAMPLE_COUNT_4_BIT)
{
maxSupported = REFRESH_SAMPLECOUNT_4;
}
else if (flags & VK_SAMPLE_COUNT_2_BIT)
{
maxSupported = REFRESH_SAMPLECOUNT_2;
}
return SDL_min(multiSampleCount, maxSupported);
}
2020-12-18 22:35:33 +00:00
/* Memory Management */
static inline VkDeviceSize VULKAN_INTERNAL_NextHighestAlignment(
VkDeviceSize n,
VkDeviceSize align
) {
return align * ((n + align - 1) / align);
}
2021-01-30 00:03:00 +00:00
static void VULKAN_INTERNAL_RemoveMemoryFreeRegion(
VulkanMemoryFreeRegion *freeRegion
) {
uint32_t i;
/* close the gap in the sorted list */
if (freeRegion->allocation->allocator->sortedFreeRegionCount > 1)
{
for (i = freeRegion->sortedIndex; i < freeRegion->allocation->allocator->sortedFreeRegionCount - 1; i += 1)
{
freeRegion->allocation->allocator->sortedFreeRegions[i] =
freeRegion->allocation->allocator->sortedFreeRegions[i + 1];
freeRegion->allocation->allocator->sortedFreeRegions[i]->sortedIndex = i;
}
}
freeRegion->allocation->allocator->sortedFreeRegionCount -= 1;
/* close the gap in the buffer list */
if (freeRegion->allocation->freeRegionCount > 1 && freeRegion->allocationIndex != freeRegion->allocation->freeRegionCount - 1)
{
freeRegion->allocation->freeRegions[freeRegion->allocationIndex] =
freeRegion->allocation->freeRegions[freeRegion->allocation->freeRegionCount - 1];
freeRegion->allocation->freeRegions[freeRegion->allocationIndex]->allocationIndex =
freeRegion->allocationIndex;
}
freeRegion->allocation->freeRegionCount -= 1;
SDL_free(freeRegion);
}
static void VULKAN_INTERNAL_NewMemoryFreeRegion(
2020-12-18 22:35:33 +00:00
VulkanMemoryAllocation *allocation,
VkDeviceSize offset,
VkDeviceSize size
) {
VulkanMemoryFreeRegion *newFreeRegion;
2021-01-30 00:03:00 +00:00
VkDeviceSize newOffset, newSize;
int32_t insertionIndex = 0;
int32_t i;
/* look for an adjacent region to merge */
for (i = allocation->freeRegionCount - 1; i >= 0; i -= 1)
{
/* check left side */
if (allocation->freeRegions[i]->offset + allocation->freeRegions[i]->size == offset)
{
newOffset = allocation->freeRegions[i]->offset;
newSize = allocation->freeRegions[i]->size + size;
VULKAN_INTERNAL_RemoveMemoryFreeRegion(allocation->freeRegions[i]);
VULKAN_INTERNAL_NewMemoryFreeRegion(allocation, newOffset, newSize);
2021-02-01 00:29:38 +00:00
return;
2021-01-30 00:03:00 +00:00
}
/* check right side */
if (allocation->freeRegions[i]->offset == offset + size)
{
newOffset = offset;
newSize = allocation->freeRegions[i]->size + size;
VULKAN_INTERNAL_RemoveMemoryFreeRegion(allocation->freeRegions[i]);
VULKAN_INTERNAL_NewMemoryFreeRegion(allocation, newOffset, newSize);
return;
}
}
/* region is not contiguous with another free region, make a new one */
2020-12-18 22:35:33 +00:00
allocation->freeRegionCount += 1;
if (allocation->freeRegionCount > allocation->freeRegionCapacity)
{
allocation->freeRegionCapacity *= 2;
allocation->freeRegions = SDL_realloc(
allocation->freeRegions,
sizeof(VulkanMemoryFreeRegion*) * allocation->freeRegionCapacity
);
}
newFreeRegion = SDL_malloc(sizeof(VulkanMemoryFreeRegion));
newFreeRegion->offset = offset;
newFreeRegion->size = size;
newFreeRegion->allocation = allocation;
allocation->freeRegions[allocation->freeRegionCount - 1] = newFreeRegion;
newFreeRegion->allocationIndex = allocation->freeRegionCount - 1;
for (i = 0; i < allocation->allocator->sortedFreeRegionCount; i += 1)
{
if (allocation->allocator->sortedFreeRegions[i]->size < size)
{
/* this is where the new region should go */
break;
}
insertionIndex += 1;
}
if (allocation->allocator->sortedFreeRegionCount + 1 > allocation->allocator->sortedFreeRegionCapacity)
{
allocation->allocator->sortedFreeRegionCapacity *= 2;
allocation->allocator->sortedFreeRegions = SDL_realloc(
allocation->allocator->sortedFreeRegions,
sizeof(VulkanMemoryFreeRegion*) * allocation->allocator->sortedFreeRegionCapacity
);
}
/* perform insertion sort */
if (allocation->allocator->sortedFreeRegionCount > 0 && insertionIndex != allocation->allocator->sortedFreeRegionCount)
{
for (i = allocation->allocator->sortedFreeRegionCount; i > insertionIndex && i > 0; i -= 1)
{
allocation->allocator->sortedFreeRegions[i] = allocation->allocator->sortedFreeRegions[i - 1];
allocation->allocator->sortedFreeRegions[i]->sortedIndex = i;
}
}
allocation->allocator->sortedFreeRegionCount += 1;
allocation->allocator->sortedFreeRegions[insertionIndex] = newFreeRegion;
newFreeRegion->sortedIndex = insertionIndex;
}
static uint8_t VULKAN_INTERNAL_FindMemoryType(
VulkanRenderer *renderer,
uint32_t typeFilter,
VkMemoryPropertyFlags requiredProperties,
VkMemoryPropertyFlags ignoredProperties,
uint32_t *memoryTypeIndex
2020-12-18 22:35:33 +00:00
) {
uint32_t i;
for (i = *memoryTypeIndex; i < renderer->memoryProperties.memoryTypeCount; i += 1)
2020-12-18 22:35:33 +00:00
{
if ( (typeFilter & (1 << i)) &&
(renderer->memoryProperties.memoryTypes[i].propertyFlags & requiredProperties) == requiredProperties &&
(renderer->memoryProperties.memoryTypes[i].propertyFlags & ignoredProperties) == 0 )
2020-12-18 22:35:33 +00:00
{
*memoryTypeIndex = i;
2020-12-18 22:35:33 +00:00
return 1;
}
}
Refresh_LogError("Failed to find memory properties %X, required %X, ignored %X", typeFilter, requiredProperties, ignoredProperties);
2020-12-18 22:35:33 +00:00
return 0;
}
static uint8_t VULKAN_INTERNAL_FindBufferMemoryRequirements(
VulkanRenderer *renderer,
VkBuffer buffer,
VkMemoryRequirements2KHR *pMemoryRequirements,
uint32_t *pMemoryTypeIndex
) {
VkBufferMemoryRequirementsInfo2KHR bufferRequirementsInfo;
bufferRequirementsInfo.sType =
VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR;
bufferRequirementsInfo.pNext = NULL;
bufferRequirementsInfo.buffer = buffer;
renderer->vkGetBufferMemoryRequirements2KHR(
renderer->logicalDevice,
&bufferRequirementsInfo,
pMemoryRequirements
);
if (!VULKAN_INTERNAL_FindMemoryType(
renderer,
pMemoryRequirements->memoryRequirements.memoryTypeBits,
VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT,
0,
2020-12-18 22:35:33 +00:00
pMemoryTypeIndex
)) {
2021-01-05 23:00:51 +00:00
Refresh_LogError(
2020-12-18 22:35:33 +00:00
"Could not find valid memory type for buffer creation"
);
return 0;
}
return 1;
}
static uint8_t VULKAN_INTERNAL_FindImageMemoryRequirements(
VulkanRenderer *renderer,
VkImage image,
VkMemoryPropertyFlags requiredMemoryPropertyFlags,
VkMemoryPropertyFlags ignoredMemoryPropertyFlags,
2020-12-18 22:35:33 +00:00
VkMemoryRequirements2KHR *pMemoryRequirements,
uint32_t *pMemoryTypeIndex
) {
VkImageMemoryRequirementsInfo2KHR imageRequirementsInfo;
imageRequirementsInfo.sType =
VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR;
imageRequirementsInfo.pNext = NULL;
imageRequirementsInfo.image = image;
renderer->vkGetImageMemoryRequirements2KHR(
renderer->logicalDevice,
&imageRequirementsInfo,
pMemoryRequirements
);
if (!VULKAN_INTERNAL_FindMemoryType(
renderer,
pMemoryRequirements->memoryRequirements.memoryTypeBits,
requiredMemoryPropertyFlags,
ignoredMemoryPropertyFlags,
2020-12-18 22:35:33 +00:00
pMemoryTypeIndex
)) {
2021-01-05 23:00:51 +00:00
Refresh_LogError(
2020-12-18 22:35:33 +00:00
"Could not find valid memory type for image creation"
);
return 0;
}
return 1;
}
static uint8_t VULKAN_INTERNAL_AllocateMemory(
VulkanRenderer *renderer,
VkBuffer buffer,
VkImage image,
uint32_t memoryTypeIndex,
VkDeviceSize allocationSize,
uint8_t dedicated,
uint8_t isHostVisible,
2020-12-18 22:35:33 +00:00
VulkanMemoryAllocation **pMemoryAllocation
) {
VulkanMemoryAllocation *allocation;
VulkanMemorySubAllocator *allocator = &renderer->memoryAllocator->subAllocators[memoryTypeIndex];
VkMemoryAllocateInfo allocInfo;
VkMemoryDedicatedAllocateInfoKHR dedicatedInfo;
VkResult result;
allocInfo.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
allocInfo.memoryTypeIndex = memoryTypeIndex;
allocInfo.allocationSize = allocationSize;
allocation = SDL_malloc(sizeof(VulkanMemoryAllocation));
allocation->size = allocationSize;
2021-01-03 21:12:12 +00:00
allocation->memoryLock = SDL_CreateMutex();
2020-12-18 22:35:33 +00:00
if (dedicated)
{
dedicatedInfo.sType =
VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR;
dedicatedInfo.pNext = NULL;
dedicatedInfo.buffer = buffer;
dedicatedInfo.image = image;
allocInfo.pNext = &dedicatedInfo;
allocation->dedicated = 1;
}
else
{
allocInfo.pNext = NULL;
allocator->allocationCount += 1;
allocator->allocations = SDL_realloc(
allocator->allocations,
sizeof(VulkanMemoryAllocation*) * allocator->allocationCount
);
allocator->allocations[
allocator->allocationCount - 1
] = allocation;
allocation->dedicated = 0;
}
allocation->freeRegions = SDL_malloc(sizeof(VulkanMemoryFreeRegion*));
allocation->freeRegionCount = 0;
allocation->freeRegionCapacity = 1;
allocation->allocator = allocator;
result = renderer->vkAllocateMemory(
renderer->logicalDevice,
&allocInfo,
NULL,
&allocation->memory
);
if (result != VK_SUCCESS)
{
2021-01-27 20:51:36 +00:00
/* Uh oh, we couldn't allocate, time to clean up */
SDL_free(allocation->freeRegions);
allocator->allocationCount -= 1;
allocator->allocations = SDL_realloc(
allocator->allocations,
sizeof(VulkanMemoryAllocation*) * allocator->allocationCount
);
SDL_free(allocation);
LogVulkanResultAsWarn("vkAllocateMemory", result);
2020-12-18 22:35:33 +00:00
return 0;
}
2021-01-27 20:51:36 +00:00
/* persistent mapping for host memory */
if (isHostVisible)
2021-01-14 02:02:45 +00:00
{
result = renderer->vkMapMemory(
renderer->logicalDevice,
allocation->memory,
0,
allocation->size,
0,
(void**) &allocation->mapPointer
);
if (result != VK_SUCCESS)
{
2021-01-27 20:51:36 +00:00
LogVulkanResultAsError("vkMapMemory", result);
2021-01-14 02:02:45 +00:00
return 0;
}
}
else
{
allocation->mapPointer = NULL;
}
2020-12-18 22:35:33 +00:00
VULKAN_INTERNAL_NewMemoryFreeRegion(
allocation,
0,
allocation->size
);
*pMemoryAllocation = allocation;
return 1;
}
static uint8_t VULKAN_INTERNAL_FindAvailableMemory(
VulkanRenderer *renderer,
2021-01-27 20:51:36 +00:00
uint32_t memoryTypeIndex,
VkMemoryRequirements2KHR *memoryRequirements,
VkMemoryDedicatedRequirementsKHR *dedicatedRequirements,
VkBuffer buffer, /* may be VK_NULL_HANDLE */
VkImage image, /* may be VK_NULL_HANDLE */
2020-12-18 22:35:33 +00:00
VulkanMemoryAllocation **pMemoryAllocation,
VkDeviceSize *pOffset,
VkDeviceSize *pSize
) {
VulkanMemoryAllocation *allocation;
VulkanMemorySubAllocator *allocator;
VulkanMemoryFreeRegion *region;
VkDeviceSize requiredSize, allocationSize;
VkDeviceSize alignedOffset;
2021-01-27 20:51:36 +00:00
uint32_t newRegionSize, newRegionOffset;
uint8_t shouldAllocDedicated =
dedicatedRequirements->prefersDedicatedAllocation ||
dedicatedRequirements->requiresDedicatedAllocation;
uint8_t isHostVisible, allocationResult;
isHostVisible =
(renderer->memoryProperties.memoryTypes[memoryTypeIndex].propertyFlags &
VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0;
2020-12-18 22:35:33 +00:00
allocator = &renderer->memoryAllocator->subAllocators[memoryTypeIndex];
2021-01-27 20:51:36 +00:00
requiredSize = memoryRequirements->memoryRequirements.size;
2020-12-18 22:35:33 +00:00
SDL_LockMutex(renderer->allocatorLock);
/* find the largest free region and use it */
if (allocator->sortedFreeRegionCount > 0)
{
region = allocator->sortedFreeRegions[0];
allocation = region->allocation;
alignedOffset = VULKAN_INTERNAL_NextHighestAlignment(
region->offset,
2021-01-27 20:51:36 +00:00
memoryRequirements->memoryRequirements.alignment
2020-12-18 22:35:33 +00:00
);
if (alignedOffset + requiredSize <= region->offset + region->size)
{
*pMemoryAllocation = allocation;
/* not aligned - create a new free region */
if (region->offset != alignedOffset)
{
VULKAN_INTERNAL_NewMemoryFreeRegion(
allocation,
region->offset,
alignedOffset - region->offset
);
}
*pOffset = alignedOffset;
*pSize = requiredSize;
newRegionSize = region->size - ((alignedOffset - region->offset) + requiredSize);
newRegionOffset = alignedOffset + requiredSize;
/* remove and add modified region to re-sort */
VULKAN_INTERNAL_RemoveMemoryFreeRegion(region);
/* if size is 0, no need to re-insert */
if (newRegionSize != 0)
{
VULKAN_INTERNAL_NewMemoryFreeRegion(
allocation,
newRegionOffset,
newRegionSize
);
}
SDL_UnlockMutex(renderer->allocatorLock);
return 1;
}
}
/* No suitable free regions exist, allocate a new memory region */
2021-01-27 20:51:36 +00:00
if (shouldAllocDedicated)
2020-12-18 22:35:33 +00:00
{
allocationSize = requiredSize;
}
else if (requiredSize > allocator->nextAllocationSize)
{
/* allocate a page of required size aligned to STARTING_ALLOCATION_SIZE increments */
allocationSize =
VULKAN_INTERNAL_NextHighestAlignment(requiredSize, STARTING_ALLOCATION_SIZE);
}
else
{
allocationSize = allocator->nextAllocationSize;
allocator->nextAllocationSize = SDL_min(allocator->nextAllocationSize * 2, MAX_ALLOCATION_SIZE);
}
allocationResult = VULKAN_INTERNAL_AllocateMemory(
renderer,
buffer,
image,
memoryTypeIndex,
allocationSize,
2021-01-27 20:51:36 +00:00
shouldAllocDedicated,
isHostVisible,
2020-12-18 22:35:33 +00:00
&allocation
);
2020-12-17 04:04:47 +00:00
2020-12-18 22:35:33 +00:00
/* Uh oh, we're out of memory */
if (allocationResult == 0)
{
SDL_UnlockMutex(renderer->allocatorLock);
2021-01-27 20:51:36 +00:00
/* Responsibility of the caller to handle being out of memory */
Refresh_LogWarn("Failed to allocate memory!");
2020-12-18 22:35:33 +00:00
return 2;
}
*pMemoryAllocation = allocation;
*pOffset = 0;
*pSize = requiredSize;
region = allocation->freeRegions[0];
newRegionOffset = region->offset + requiredSize;
newRegionSize = region->size - requiredSize;
VULKAN_INTERNAL_RemoveMemoryFreeRegion(region);
if (newRegionSize != 0)
{
VULKAN_INTERNAL_NewMemoryFreeRegion(
allocation,
newRegionOffset,
newRegionSize
);
}
SDL_UnlockMutex(renderer->allocatorLock);
return 1;
}
2021-01-27 20:51:36 +00:00
static uint8_t VULKAN_INTERNAL_FindAvailableBufferMemory(
VulkanRenderer *renderer,
VkBuffer buffer,
VulkanMemoryAllocation **pMemoryAllocation,
VkDeviceSize *pOffset,
VkDeviceSize *pSize
) {
uint32_t memoryTypeIndex = 0;
2021-01-27 20:51:36 +00:00
VkMemoryDedicatedRequirementsKHR dedicatedRequirements =
{
VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR,
NULL
};
VkMemoryRequirements2KHR memoryRequirements =
{
VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR,
&dedicatedRequirements
};
uint8_t findResult = 0;
2021-01-27 20:51:36 +00:00
while (VULKAN_INTERNAL_FindBufferMemoryRequirements(
2021-01-27 20:51:36 +00:00
renderer,
buffer,
&memoryRequirements,
&memoryTypeIndex
)) {
findResult = VULKAN_INTERNAL_FindAvailableMemory(
renderer,
memoryTypeIndex,
&memoryRequirements,
&dedicatedRequirements,
buffer,
VK_NULL_HANDLE,
pMemoryAllocation,
pOffset,
pSize
);
if (findResult == 1)
{
break;
}
else
{
memoryTypeIndex += 1;
}
2021-01-27 20:51:36 +00:00
}
return findResult;
2021-01-27 20:51:36 +00:00
}
static uint8_t VULKAN_INTERNAL_FindAvailableTextureMemory(
VulkanRenderer *renderer,
VkImage image,
uint8_t cpuAllocation,
VulkanMemoryAllocation **pMemoryAllocation,
VkDeviceSize *pOffset,
VkDeviceSize *pSize
) {
uint32_t memoryTypeIndex = 0;
VkMemoryPropertyFlags requiredMemoryPropertyFlags;
VkMemoryPropertyFlags ignoredMemoryPropertyFlags;
2021-01-27 20:51:36 +00:00
VkMemoryDedicatedRequirementsKHR dedicatedRequirements =
{
VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR,
NULL
};
VkMemoryRequirements2KHR memoryRequirements =
{
VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR,
&dedicatedRequirements
};
uint8_t findResult = 0;
2021-01-27 20:51:36 +00:00
if (cpuAllocation)
{
requiredMemoryPropertyFlags = 0;
ignoredMemoryPropertyFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
2021-01-27 20:51:36 +00:00
}
else
{
requiredMemoryPropertyFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
ignoredMemoryPropertyFlags = 0;
2021-01-27 20:51:36 +00:00
}
while (VULKAN_INTERNAL_FindImageMemoryRequirements(
2021-01-27 20:51:36 +00:00
renderer,
image,
requiredMemoryPropertyFlags,
ignoredMemoryPropertyFlags,
2021-01-27 20:51:36 +00:00
&memoryRequirements,
&memoryTypeIndex
)) {
findResult = VULKAN_INTERNAL_FindAvailableMemory(
renderer,
memoryTypeIndex,
&memoryRequirements,
&dedicatedRequirements,
VK_NULL_HANDLE,
image,
pMemoryAllocation,
pOffset,
pSize
);
if (findResult == 1)
{
break;
}
else
{
memoryTypeIndex += 1;
}
2021-01-27 20:51:36 +00:00
}
return findResult;
2021-01-27 20:51:36 +00:00
}
2020-12-19 00:39:03 +00:00
/* Memory Barriers */
2020-12-19 01:03:26 +00:00
static void VULKAN_INTERNAL_BufferMemoryBarrier(
VulkanRenderer *renderer,
2021-01-02 21:31:17 +00:00
VkCommandBuffer commandBuffer,
2020-12-19 01:03:26 +00:00
VulkanResourceAccessType nextResourceAccessType,
VulkanBuffer *buffer
2020-12-19 01:03:26 +00:00
) {
VkPipelineStageFlags srcStages = 0;
VkPipelineStageFlags dstStages = 0;
VkBufferMemoryBarrier memoryBarrier;
VulkanResourceAccessType prevAccess, nextAccess;
const VulkanResourceAccessInfo *prevAccessInfo, *nextAccessInfo;
memoryBarrier.sType = VK_STRUCTURE_TYPE_BUFFER_MEMORY_BARRIER;
memoryBarrier.pNext = NULL;
memoryBarrier.srcAccessMask = 0;
memoryBarrier.dstAccessMask = 0;
memoryBarrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
memoryBarrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
memoryBarrier.buffer = buffer->buffer;
2020-12-19 01:03:26 +00:00
memoryBarrier.offset = 0;
memoryBarrier.size = buffer->size;
prevAccess = buffer->resourceAccessType;
prevAccessInfo = &AccessMap[prevAccess];
srcStages |= prevAccessInfo->stageMask;
if (prevAccess > RESOURCE_ACCESS_END_OF_READ)
{
memoryBarrier.srcAccessMask |= prevAccessInfo->accessMask;
}
nextAccess = nextResourceAccessType;
nextAccessInfo = &AccessMap[nextAccess];
dstStages |= nextAccessInfo->stageMask;
if (memoryBarrier.srcAccessMask != 0)
{
memoryBarrier.dstAccessMask |= nextAccessInfo->accessMask;
}
if (srcStages == 0)
{
srcStages = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
}
if (dstStages == 0)
{
dstStages = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT;
}
2021-01-02 06:07:15 +00:00
renderer->vkCmdPipelineBarrier(
2021-01-02 21:31:17 +00:00
commandBuffer,
2020-12-19 01:03:26 +00:00
srcStages,
dstStages,
0,
0,
NULL,
1,
&memoryBarrier,
0,
NULL
2021-01-02 06:07:15 +00:00
);
2020-12-19 01:03:26 +00:00
buffer->resourceAccessType = nextResourceAccessType;
}
2020-12-19 00:39:03 +00:00
static void VULKAN_INTERNAL_ImageMemoryBarrier(
VulkanRenderer *renderer,
2021-01-02 21:31:17 +00:00
VkCommandBuffer commandBuffer,
2020-12-19 00:39:03 +00:00
VulkanResourceAccessType nextAccess,
VkImageAspectFlags aspectMask,
uint32_t baseLayer,
uint32_t layerCount,
uint32_t baseLevel,
uint32_t levelCount,
uint8_t discardContents,
VkImage image,
VulkanResourceAccessType *resourceAccessType
) {
VkPipelineStageFlags srcStages = 0;
VkPipelineStageFlags dstStages = 0;
VkImageMemoryBarrier memoryBarrier;
VulkanResourceAccessType prevAccess;
const VulkanResourceAccessInfo *pPrevAccessInfo, *pNextAccessInfo;
memoryBarrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
memoryBarrier.pNext = NULL;
memoryBarrier.srcAccessMask = 0;
memoryBarrier.dstAccessMask = 0;
memoryBarrier.oldLayout = VK_IMAGE_LAYOUT_UNDEFINED;
memoryBarrier.newLayout = VK_IMAGE_LAYOUT_UNDEFINED;
2021-01-03 04:03:07 +00:00
memoryBarrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
memoryBarrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
2020-12-19 00:39:03 +00:00
memoryBarrier.image = image;
memoryBarrier.subresourceRange.aspectMask = aspectMask;
memoryBarrier.subresourceRange.baseArrayLayer = baseLayer;
memoryBarrier.subresourceRange.layerCount = layerCount;
memoryBarrier.subresourceRange.baseMipLevel = baseLevel;
memoryBarrier.subresourceRange.levelCount = levelCount;
prevAccess = *resourceAccessType;
pPrevAccessInfo = &AccessMap[prevAccess];
srcStages |= pPrevAccessInfo->stageMask;
if (prevAccess > RESOURCE_ACCESS_END_OF_READ)
{
memoryBarrier.srcAccessMask |= pPrevAccessInfo->accessMask;
}
if (discardContents)
{
memoryBarrier.oldLayout = VK_IMAGE_LAYOUT_UNDEFINED;
}
else
{
memoryBarrier.oldLayout = pPrevAccessInfo->imageLayout;
}
pNextAccessInfo = &AccessMap[nextAccess];
dstStages |= pNextAccessInfo->stageMask;
memoryBarrier.dstAccessMask |= pNextAccessInfo->accessMask;
memoryBarrier.newLayout = pNextAccessInfo->imageLayout;
if (srcStages == 0)
{
srcStages = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
}
if (dstStages == 0)
{
dstStages = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT;
}
2021-01-02 06:07:15 +00:00
renderer->vkCmdPipelineBarrier(
2021-01-02 21:31:17 +00:00
commandBuffer,
2020-12-19 00:39:03 +00:00
srcStages,
dstStages,
0,
0,
NULL,
0,
NULL,
1,
&memoryBarrier
2021-01-02 06:07:15 +00:00
);
2021-01-02 21:31:17 +00:00
2020-12-21 23:44:43 +00:00
*resourceAccessType = nextAccess;
}
/* Resource tracking */
#define TRACK_RESOURCE(resource, type, array, count, capacity) \
uint32_t i; \
\
for (i = 0; i < commandBuffer->count; i += 1) \
{ \
if (commandBuffer->array[i] == resource) \
{ \
return; \
} \
} \
\
if (commandBuffer->count == commandBuffer->capacity) \
{ \
commandBuffer->capacity += 1; \
commandBuffer->array = SDL_realloc( \
commandBuffer->array, \
commandBuffer->capacity * sizeof(type) \
); \
} \
commandBuffer->array[commandBuffer->count] = resource; \
commandBuffer->count += 1; \
\
SDL_AtomicIncRef(&resource->referenceCount);
static void VULKAN_INTERNAL_TrackBuffer(
VulkanRenderer *renderer,
VulkanCommandBuffer *commandBuffer,
VulkanBuffer *buffer
) {
TRACK_RESOURCE(
buffer,
VulkanBuffer*,
usedBuffers,
usedBufferCount,
usedBufferCapacity
)
}
static void VULKAN_INTERNAL_TrackTexture(
VulkanRenderer *renderer,
VulkanCommandBuffer *commandBuffer,
VulkanTexture *texture
) {
TRACK_RESOURCE(
texture,
VulkanTexture*,
usedTextures,
usedTextureCount,
usedTextureCapacity
)
}
static void VULKAN_INTERNAL_TrackSampler(
VulkanRenderer *renderer,
VulkanCommandBuffer *commandBuffer,
VulkanSampler *sampler
) {
TRACK_RESOURCE(
sampler,
VulkanSampler*,
usedSamplers,
usedSamplerCount,
usedSamplerCapacity
)
}
static void VULKAN_INTERNAL_TrackGraphicsPipeline(
VulkanRenderer *renderer,
VulkanCommandBuffer *commandBuffer,
VulkanGraphicsPipeline *graphicsPipeline
) {
TRACK_RESOURCE(
graphicsPipeline,
VulkanGraphicsPipeline*,
usedGraphicsPipelines,
usedGraphicsPipelineCount,
usedGraphicsPipelineCapacity
)
}
static void VULKAN_INTERNAL_TrackComputePipeline(
VulkanRenderer *renderer,
VulkanCommandBuffer *commandBuffer,
VulkanComputePipeline *computePipeline
) {
TRACK_RESOURCE(
computePipeline,
VulkanComputePipeline*,
usedComputePipelines,
usedComputePipelineCount,
usedComputePipelineCapacity
)
}
static void VULKAN_INTERNAL_TrackFramebuffer(
VulkanRenderer *renderer,
VulkanCommandBuffer *commandBuffer,
VulkanFramebuffer *framebuffer
) {
TRACK_RESOURCE(
framebuffer,
VulkanFramebuffer*,
usedFramebuffers,
usedFramebufferCount,
usedFramebufferCapacity
);
}
#undef TRACK_RESOURCE
2020-12-21 23:44:43 +00:00
/* Resource Disposal */
static void VULKAN_INTERNAL_QueueDestroyFramebuffer(
VulkanRenderer *renderer,
VulkanFramebuffer *framebuffer
) {
SDL_LockMutex(renderer->disposeLock);
EXPAND_ARRAY_IF_NEEDED(
renderer->framebuffersToDestroy,
VulkanFramebuffer*,
renderer->framebuffersToDestroyCount + 1,
renderer->framebuffersToDestroyCapacity,
renderer->framebuffersToDestroyCapacity * 2
)
renderer->framebuffersToDestroy[renderer->framebuffersToDestroyCount] = framebuffer;
renderer->framebuffersToDestroyCount += 1;
SDL_UnlockMutex(renderer->disposeLock);
}
static void VULKAN_INTERNAL_DestroyFramebuffer(
VulkanRenderer *renderer,
VulkanFramebuffer *framebuffer
) {
renderer->vkDestroyFramebuffer(
renderer->logicalDevice,
framebuffer->framebuffer,
NULL
);
SDL_free(framebuffer);
}
static void VULKAN_INTERNAL_RemoveFramebuffersContainingView(
VulkanRenderer *renderer,
VkImageView view
) {
FramebufferHash *hash;
int32_t i, j;
SDL_LockMutex(renderer->framebufferFetchLock);
for (i = renderer->framebufferHashArray.count - 1; i >= 0; i -= 1)
{
hash = &renderer->framebufferHashArray.elements[i].key;
for (j = 0; j < hash->colorAttachmentCount; j += 1)
{
2022-12-23 01:23:11 +00:00
if (hash->colorAttachmentViews[j] == view)
{
VULKAN_INTERNAL_QueueDestroyFramebuffer(
renderer,
renderer->framebufferHashArray.elements[i].value
);
FramebufferHashArray_Remove(
&renderer->framebufferHashArray,
i
);
break;
}
}
}
SDL_UnlockMutex(renderer->framebufferFetchLock);
}
static void VULKAN_INTERNAL_RemoveRenderTargetsContainingTexture(
VulkanRenderer *renderer,
VulkanTexture *texture
) {
RenderTargetHash *hash;
int32_t i;
SDL_LockMutex(renderer->renderTargetFetchLock);
for (i = renderer->renderTargetHashArray.count - 1; i >= 0; i -= 1)
{
hash = &renderer->renderTargetHashArray.elements[i].key;
if ((VulkanTexture*) hash->texture == texture)
{
VULKAN_INTERNAL_RemoveFramebuffersContainingView(
renderer,
renderer->renderTargetHashArray.elements[i].value->view
);
VULKAN_INTERNAL_DestroyRenderTarget(
renderer,
renderer->renderTargetHashArray.elements[i].value
);
RenderTargetHash_Remove(
&renderer->renderTargetHashArray,
i
);
}
}
SDL_UnlockMutex(renderer->renderTargetFetchLock);
}
2020-12-29 00:28:14 +00:00
static void VULKAN_INTERNAL_DestroyTexture(
VulkanRenderer* renderer,
VulkanTexture* texture
) {
if (texture->allocation->dedicated)
{
renderer->vkFreeMemory(
renderer->logicalDevice,
texture->allocation->memory,
NULL
);
2021-01-03 21:12:12 +00:00
SDL_DestroyMutex(texture->allocation->memoryLock);
2020-12-29 00:28:14 +00:00
SDL_free(texture->allocation->freeRegions);
SDL_free(texture->allocation);
}
else
{
SDL_LockMutex(renderer->allocatorLock);
VULKAN_INTERNAL_NewMemoryFreeRegion(
texture->allocation,
texture->offset,
texture->memorySize
);
SDL_UnlockMutex(renderer->allocatorLock);
}
VULKAN_INTERNAL_RemoveRenderTargetsContainingTexture(
renderer,
texture
);
2020-12-29 00:28:14 +00:00
renderer->vkDestroyImageView(
renderer->logicalDevice,
texture->view,
NULL
);
renderer->vkDestroyImage(
renderer->logicalDevice,
texture->image,
NULL
);
/* destroy the msaa texture, if there is one */
if (texture->msaaTex != NULL)
{
VULKAN_INTERNAL_DestroyTexture(
renderer,
texture->msaaTex
);
}
2020-12-29 00:28:14 +00:00
SDL_free(texture);
}
static void VULKAN_INTERNAL_DestroyRenderTarget(
2020-12-29 00:28:14 +00:00
VulkanRenderer *renderer,
VulkanRenderTarget *renderTarget
2020-12-29 00:28:14 +00:00
) {
VULKAN_INTERNAL_RemoveFramebuffersContainingView(
renderer,
renderTarget->view
);
2020-12-29 00:28:14 +00:00
renderer->vkDestroyImageView(
renderer->logicalDevice,
renderTarget->view,
2020-12-29 00:28:14 +00:00
NULL
);
SDL_free(renderTarget);
2020-12-29 00:28:14 +00:00
}
2020-12-21 23:44:43 +00:00
static void VULKAN_INTERNAL_DestroyBuffer(
2020-12-28 22:57:14 +00:00
VulkanRenderer* renderer,
VulkanBuffer* buffer
2020-12-21 23:44:43 +00:00
) {
if (buffer->allocation->dedicated)
2020-12-21 23:44:43 +00:00
{
renderer->vkFreeMemory(
2020-12-21 23:44:43 +00:00
renderer->logicalDevice,
buffer->allocation->memory,
2020-12-21 23:44:43 +00:00
NULL
);
SDL_DestroyMutex(buffer->allocation->memoryLock);
SDL_free(buffer->allocation->freeRegions);
SDL_free(buffer->allocation);
}
else
{
SDL_LockMutex(renderer->allocatorLock);
VULKAN_INTERNAL_NewMemoryFreeRegion(
buffer->allocation,
buffer->offset,
buffer->memorySize
);
SDL_UnlockMutex(renderer->allocatorLock);
2020-12-21 23:44:43 +00:00
}
renderer->vkDestroyBuffer(
renderer->logicalDevice,
buffer->buffer,
NULL
);
2020-12-21 23:44:43 +00:00
SDL_free(buffer);
}
2021-01-03 02:02:20 +00:00
static void VULKAN_INTERNAL_DestroyCommandPool(
VulkanRenderer *renderer,
VulkanCommandPool *commandPool
) {
uint32_t i;
VulkanCommandBuffer* commandBuffer;
renderer->vkDestroyCommandPool(
2021-01-03 02:02:20 +00:00
renderer->logicalDevice,
commandPool->commandPool,
NULL
2021-01-03 02:02:20 +00:00
);
for (i = 0; i < commandPool->inactiveCommandBufferCount; i += 1)
{
commandBuffer = commandPool->inactiveCommandBuffers[i];
renderer->vkDestroyFence(
renderer->logicalDevice,
commandBuffer->inFlightFence,
NULL
);
SDL_free(commandBuffer->presentDatas);
SDL_free(commandBuffer->waitSemaphores);
SDL_free(commandBuffer->signalSemaphores);
SDL_free(commandBuffer->transferBuffers);
SDL_free(commandBuffer->boundUniformBuffers);
SDL_free(commandBuffer->boundDescriptorSetDatas);
SDL_free(commandBuffer->usedBuffers);
SDL_free(commandBuffer->usedTextures);
2022-03-04 01:30:26 +00:00
SDL_free(commandBuffer->usedSamplers);
SDL_free(commandBuffer->usedGraphicsPipelines);
SDL_free(commandBuffer->usedComputePipelines);
SDL_free(commandBuffer->usedFramebuffers);
SDL_free(commandBuffer);
}
2021-01-03 02:02:20 +00:00
SDL_free(commandPool->inactiveCommandBuffers);
SDL_free(commandPool);
}
2020-12-29 00:28:14 +00:00
static void VULKAN_INTERNAL_DestroyGraphicsPipeline(
VulkanRenderer *renderer,
VulkanGraphicsPipeline *graphicsPipeline
) {
renderer->vkDestroyPipeline(
renderer->logicalDevice,
graphicsPipeline->pipeline,
NULL
);
SDL_AtomicDecRef(&graphicsPipeline->vertexShaderModule->referenceCount);
SDL_AtomicDecRef(&graphicsPipeline->fragmentShaderModule->referenceCount);
2020-12-29 00:28:14 +00:00
SDL_free(graphicsPipeline);
}
2020-12-31 07:02:12 +00:00
static void VULKAN_INTERNAL_DestroyComputePipeline(
VulkanRenderer *renderer,
VulkanComputePipeline *computePipeline
) {
renderer->vkDestroyPipeline(
renderer->logicalDevice,
computePipeline->pipeline,
NULL
);
SDL_AtomicDecRef(&computePipeline->computeShaderModule->referenceCount);
2020-12-31 07:02:12 +00:00
SDL_free(computePipeline);
}
2020-12-29 00:42:51 +00:00
static void VULKAN_INTERNAL_DestroyShaderModule(
VulkanRenderer *renderer,
VulkanShaderModule *vulkanShaderModule
2020-12-29 00:42:51 +00:00
) {
renderer->vkDestroyShaderModule(
renderer->logicalDevice,
vulkanShaderModule->shaderModule,
2020-12-29 00:42:51 +00:00
NULL
);
SDL_free(vulkanShaderModule);
2020-12-29 00:42:51 +00:00
}
2020-12-29 00:56:49 +00:00
static void VULKAN_INTERNAL_DestroySampler(
VulkanRenderer *renderer,
VulkanSampler *vulkanSampler
2020-12-29 00:56:49 +00:00
) {
renderer->vkDestroySampler(
renderer->logicalDevice,
vulkanSampler->sampler,
2020-12-29 00:56:49 +00:00
NULL
);
SDL_free(vulkanSampler);
2020-12-29 00:56:49 +00:00
}
static void VULKAN_INTERNAL_DestroySwapchain(
VulkanRenderer* renderer,
WindowData *windowData
) {
2020-12-21 23:44:43 +00:00
uint32_t i;
VulkanSwapchainData *swapchainData;
if (windowData == NULL)
{
return;
}
swapchainData = windowData->swapchainData;
if (swapchainData == NULL)
{
return;
}
2020-12-21 23:44:43 +00:00
for (i = 0; i < swapchainData->imageCount; i += 1)
2020-12-21 23:44:43 +00:00
{
VULKAN_INTERNAL_RemoveRenderTargetsContainingTexture(
renderer,
&swapchainData->textures[i]
);
2020-12-21 23:44:43 +00:00
renderer->vkDestroyImageView(
renderer->logicalDevice,
swapchainData->textures[i].view,
2020-12-21 23:44:43 +00:00
NULL
);
}
SDL_free(swapchainData->textures);
2020-12-21 23:44:43 +00:00
renderer->vkDestroySwapchainKHR(
renderer->logicalDevice,
swapchainData->swapchain,
NULL
);
renderer->vkDestroySurfaceKHR(
renderer->instance,
swapchainData->surface,
2020-12-21 23:44:43 +00:00
NULL
);
2022-02-10 05:42:19 +00:00
renderer->vkDestroySemaphore(
renderer->logicalDevice,
swapchainData->imageAvailableSemaphore,
NULL
);
renderer->vkDestroySemaphore(
renderer->logicalDevice,
swapchainData->renderFinishedSemaphore,
NULL
);
windowData->swapchainData = NULL;
SDL_free(swapchainData);
2020-12-21 23:44:43 +00:00
}
static void VULKAN_INTERNAL_DestroyDescriptorSetCache(
2020-12-31 07:02:12 +00:00
VulkanRenderer *renderer,
DescriptorSetCache *cache
2020-12-28 22:57:14 +00:00
) {
uint32_t i;
2020-12-28 23:11:05 +00:00
if (cache == NULL)
{
return;
}
for (i = 0; i < cache->descriptorPoolCount; i += 1)
2020-12-28 22:57:14 +00:00
{
renderer->vkDestroyDescriptorPool(
renderer->logicalDevice,
cache->descriptorPools[i],
2020-12-28 22:57:14 +00:00
NULL
);
}
SDL_free(cache->descriptorPools);
2020-12-28 22:57:14 +00:00
SDL_free(cache->inactiveDescriptorSets);
SDL_DestroyMutex(cache->lock);
2020-12-28 22:57:14 +00:00
SDL_free(cache);
}
/* Descriptor cache stuff */
2020-12-28 23:11:05 +00:00
static uint8_t VULKAN_INTERNAL_CreateDescriptorPool(
VulkanRenderer *renderer,
VkDescriptorType descriptorType,
uint32_t descriptorSetCount,
uint32_t descriptorCount,
VkDescriptorPool *pDescriptorPool
) {
VkResult vulkanResult;
2020-12-29 04:09:31 +00:00
VkDescriptorPoolSize descriptorPoolSize;
VkDescriptorPoolCreateInfo descriptorPoolInfo;
2020-12-28 23:11:05 +00:00
descriptorPoolSize.type = descriptorType;
descriptorPoolSize.descriptorCount = descriptorCount;
2020-12-28 23:11:05 +00:00
descriptorPoolInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
descriptorPoolInfo.pNext = NULL;
descriptorPoolInfo.flags = 0;
descriptorPoolInfo.maxSets = descriptorSetCount;
descriptorPoolInfo.poolSizeCount = 1;
descriptorPoolInfo.pPoolSizes = &descriptorPoolSize;
2020-12-29 00:28:14 +00:00
vulkanResult = renderer->vkCreateDescriptorPool(
renderer->logicalDevice,
&descriptorPoolInfo,
NULL,
pDescriptorPool
);
2020-12-31 07:02:12 +00:00
if (vulkanResult != VK_SUCCESS)
2020-12-29 00:42:51 +00:00
{
LogVulkanResultAsError("vkCreateDescriptorPool", vulkanResult);
return 0;
2020-12-29 00:42:51 +00:00
}
return 1;
}
2020-12-29 00:56:49 +00:00
static uint8_t VULKAN_INTERNAL_AllocateDescriptorSets(
VulkanRenderer *renderer,
VkDescriptorPool descriptorPool,
VkDescriptorSetLayout descriptorSetLayout,
uint32_t descriptorSetCount,
VkDescriptorSet *descriptorSetArray
) {
VkResult vulkanResult;
uint32_t i;
VkDescriptorSetAllocateInfo descriptorSetAllocateInfo;
VkDescriptorSetLayout *descriptorSetLayouts = SDL_stack_alloc(VkDescriptorSetLayout, descriptorSetCount);
for (i = 0; i < descriptorSetCount; i += 1)
2020-12-29 01:35:18 +00:00
{
descriptorSetLayouts[i] = descriptorSetLayout;
2020-12-29 01:35:18 +00:00
}
descriptorSetAllocateInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
descriptorSetAllocateInfo.pNext = NULL;
descriptorSetAllocateInfo.descriptorPool = descriptorPool;
descriptorSetAllocateInfo.descriptorSetCount = descriptorSetCount;
descriptorSetAllocateInfo.pSetLayouts = descriptorSetLayouts;
vulkanResult = renderer->vkAllocateDescriptorSets(
renderer->logicalDevice,
&descriptorSetAllocateInfo,
descriptorSetArray
);
if (vulkanResult != VK_SUCCESS)
2020-12-29 03:44:34 +00:00
{
LogVulkanResultAsError("vkAllocateDescriptorSets", vulkanResult);
SDL_stack_free(descriptorSetLayouts);
return 0;
2020-12-29 03:44:34 +00:00
}
SDL_stack_free(descriptorSetLayouts);
return 1;
}
2020-12-29 00:28:14 +00:00
static DescriptorSetCache* VULKAN_INTERNAL_CreateDescriptorSetCache(
VulkanRenderer *renderer,
VkDescriptorType descriptorType,
VkDescriptorSetLayout descriptorSetLayout,
uint32_t bindingCount
) {
DescriptorSetCache *descriptorSetCache = SDL_malloc(sizeof(DescriptorSetCache));
2020-12-29 04:09:31 +00:00
descriptorSetCache->lock = SDL_CreateMutex();
2020-12-29 00:28:14 +00:00
descriptorSetCache->descriptorSetLayout = descriptorSetLayout;
descriptorSetCache->bindingCount = bindingCount;
descriptorSetCache->descriptorType = descriptorType;
2020-12-29 00:28:14 +00:00
descriptorSetCache->descriptorPools = SDL_malloc(sizeof(VkDescriptorPool));
descriptorSetCache->descriptorPoolCount = 1;
descriptorSetCache->nextPoolSize = DESCRIPTOR_POOL_STARTING_SIZE * 2;
2020-12-31 07:02:12 +00:00
VULKAN_INTERNAL_CreateDescriptorPool(
renderer,
descriptorType,
DESCRIPTOR_POOL_STARTING_SIZE,
DESCRIPTOR_POOL_STARTING_SIZE * bindingCount,
&descriptorSetCache->descriptorPools[0]
);
2020-12-31 07:02:12 +00:00
descriptorSetCache->inactiveDescriptorSetCapacity = DESCRIPTOR_POOL_STARTING_SIZE;
descriptorSetCache->inactiveDescriptorSetCount = DESCRIPTOR_POOL_STARTING_SIZE;
descriptorSetCache->inactiveDescriptorSets = SDL_malloc(
sizeof(VkDescriptorSet) * DESCRIPTOR_POOL_STARTING_SIZE
);
2020-12-29 00:56:49 +00:00
VULKAN_INTERNAL_AllocateDescriptorSets(
renderer,
descriptorSetCache->descriptorPools[0],
descriptorSetCache->descriptorSetLayout,
DESCRIPTOR_POOL_STARTING_SIZE,
descriptorSetCache->inactiveDescriptorSets
);
2020-12-29 00:42:51 +00:00
return descriptorSetCache;
}
2020-12-29 01:35:18 +00:00
static VkDescriptorSetLayout VULKAN_INTERNAL_FetchDescriptorSetLayout(
VulkanRenderer *renderer,
VkDescriptorType descriptorType,
uint32_t bindingCount,
VkShaderStageFlagBits shaderStageFlagBit
) {
DescriptorSetLayoutHash descriptorSetLayoutHash;
VkDescriptorSetLayout descriptorSetLayout;
2020-12-29 03:44:34 +00:00
VkDescriptorSetLayoutBinding setLayoutBindings[MAX_TEXTURE_SAMPLERS];
VkDescriptorSetLayoutCreateInfo setLayoutCreateInfo;
2020-12-29 00:28:14 +00:00
VkResult vulkanResult;
uint32_t i;
2020-12-29 00:28:14 +00:00
if (bindingCount == 0)
2020-12-29 00:28:14 +00:00
{
if (shaderStageFlagBit == VK_SHADER_STAGE_VERTEX_BIT)
2020-12-29 00:28:14 +00:00
{
return renderer->emptyVertexSamplerLayout;
}
else if (shaderStageFlagBit == VK_SHADER_STAGE_FRAGMENT_BIT)
{
return renderer->emptyFragmentSamplerLayout;
}
else if (shaderStageFlagBit == VK_SHADER_STAGE_COMPUTE_BIT)
{
if (descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER)
2020-12-29 00:28:14 +00:00
{
return renderer->emptyComputeBufferDescriptorSetLayout;
2020-12-29 00:28:14 +00:00
}
else if (descriptorType == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE)
{
return renderer->emptyComputeImageDescriptorSetLayout;
}
else
{
Refresh_LogError("Invalid descriptor type for compute shader: ", descriptorType);
return NULL_DESC_LAYOUT;
}
}
else
{
Refresh_LogError("Invalid shader stage flag bit: ", shaderStageFlagBit);
return NULL_DESC_LAYOUT;
2020-12-29 00:28:14 +00:00
}
}
descriptorSetLayoutHash.descriptorType = descriptorType;
descriptorSetLayoutHash.bindingCount = bindingCount;
descriptorSetLayoutHash.stageFlag = shaderStageFlagBit;
2020-12-29 00:28:14 +00:00
descriptorSetLayout = DescriptorSetLayoutHashTable_Fetch(
&renderer->descriptorSetLayoutHashTable,
descriptorSetLayoutHash
);
2020-12-29 00:28:14 +00:00
if (descriptorSetLayout != VK_NULL_HANDLE)
{
return descriptorSetLayout;
2020-12-29 00:28:14 +00:00
}
for (i = 0; i < bindingCount; i += 1)
2020-12-29 00:28:14 +00:00
{
setLayoutBindings[i].binding = i;
setLayoutBindings[i].descriptorCount = 1;
setLayoutBindings[i].descriptorType = descriptorType;
setLayoutBindings[i].stageFlags = shaderStageFlagBit;
setLayoutBindings[i].pImmutableSamplers = NULL;
2020-12-29 00:28:14 +00:00
}
setLayoutCreateInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
setLayoutCreateInfo.pNext = NULL;
setLayoutCreateInfo.flags = 0;
setLayoutCreateInfo.bindingCount = bindingCount;
setLayoutCreateInfo.pBindings = setLayoutBindings;
2020-12-21 23:44:43 +00:00
vulkanResult = renderer->vkCreateDescriptorSetLayout(
renderer->logicalDevice,
&setLayoutCreateInfo,
NULL,
&descriptorSetLayout
);
2020-12-21 23:44:43 +00:00
if (vulkanResult != VK_SUCCESS)
2020-12-21 23:44:43 +00:00
{
LogVulkanResultAsError("vkCreateDescriptorSetLayout", vulkanResult);
return NULL_DESC_LAYOUT;
2020-12-21 23:44:43 +00:00
}
DescriptorSetLayoutHashTable_Insert(
&renderer->descriptorSetLayoutHashTable,
descriptorSetLayoutHash,
descriptorSetLayout
);
2020-12-21 23:44:43 +00:00
return descriptorSetLayout;
2020-12-21 23:44:43 +00:00
}
static VulkanGraphicsPipelineLayout* VULKAN_INTERNAL_FetchGraphicsPipelineLayout(
2020-12-21 23:44:43 +00:00
VulkanRenderer *renderer,
uint32_t vertexSamplerBindingCount,
uint32_t fragmentSamplerBindingCount
2020-12-21 23:44:43 +00:00
) {
VkDescriptorSetLayout setLayouts[4];
2020-12-21 23:44:43 +00:00
GraphicsPipelineLayoutHash pipelineLayoutHash;
VkPipelineLayoutCreateInfo pipelineLayoutCreateInfo;
VkResult vulkanResult;
VulkanGraphicsPipelineLayout *vulkanGraphicsPipelineLayout;
pipelineLayoutHash.vertexSamplerLayout = VULKAN_INTERNAL_FetchDescriptorSetLayout(
renderer,
VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
vertexSamplerBindingCount,
VK_SHADER_STAGE_VERTEX_BIT
2020-12-21 23:44:43 +00:00
);
pipelineLayoutHash.fragmentSamplerLayout = VULKAN_INTERNAL_FetchDescriptorSetLayout(
renderer,
VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
fragmentSamplerBindingCount,
VK_SHADER_STAGE_FRAGMENT_BIT
);
2020-12-21 23:44:43 +00:00
pipelineLayoutHash.vertexUniformLayout = renderer->vertexUniformDescriptorSetLayout;
pipelineLayoutHash.fragmentUniformLayout = renderer->fragmentUniformDescriptorSetLayout;
vulkanGraphicsPipelineLayout = GraphicsPipelineLayoutHashArray_Fetch(
&renderer->graphicsPipelineLayoutHashTable,
pipelineLayoutHash
2020-12-21 23:44:43 +00:00
);
if (vulkanGraphicsPipelineLayout != NULL)
2020-12-21 23:44:43 +00:00
{
return vulkanGraphicsPipelineLayout;
}
2020-12-21 23:44:43 +00:00
vulkanGraphicsPipelineLayout = SDL_malloc(sizeof(VulkanGraphicsPipelineLayout));
2020-12-21 23:44:43 +00:00
setLayouts[0] = pipelineLayoutHash.vertexSamplerLayout;
setLayouts[1] = pipelineLayoutHash.fragmentSamplerLayout;
setLayouts[2] = renderer->vertexUniformDescriptorSetLayout;
setLayouts[3] = renderer->fragmentUniformDescriptorSetLayout;
2020-12-21 23:44:43 +00:00
pipelineLayoutCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
pipelineLayoutCreateInfo.pNext = NULL;
pipelineLayoutCreateInfo.flags = 0;
pipelineLayoutCreateInfo.setLayoutCount = 4;
pipelineLayoutCreateInfo.pSetLayouts = setLayouts;
pipelineLayoutCreateInfo.pushConstantRangeCount = 0;
pipelineLayoutCreateInfo.pPushConstantRanges = NULL;
2020-12-21 23:44:43 +00:00
vulkanResult = renderer->vkCreatePipelineLayout(
renderer->logicalDevice,
&pipelineLayoutCreateInfo,
NULL,
&vulkanGraphicsPipelineLayout->pipelineLayout
2020-12-21 23:44:43 +00:00
);
if (vulkanResult != VK_SUCCESS)
2020-12-21 23:44:43 +00:00
{
LogVulkanResultAsError("vkCreatePipelineLayout", vulkanResult);
return NULL;
}
2020-12-21 23:44:43 +00:00
GraphicsPipelineLayoutHashArray_Insert(
&renderer->graphicsPipelineLayoutHashTable,
pipelineLayoutHash,
vulkanGraphicsPipelineLayout
);
2020-12-21 23:44:43 +00:00
/* If the binding count is 0
* we can just bind the same descriptor set
* so no cache is needed
*/
2020-12-21 23:44:43 +00:00
if (vertexSamplerBindingCount == 0)
{
vulkanGraphicsPipelineLayout->vertexSamplerDescriptorSetCache = NULL;
}
else
{
vulkanGraphicsPipelineLayout->vertexSamplerDescriptorSetCache =
VULKAN_INTERNAL_CreateDescriptorSetCache(
renderer,
VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
pipelineLayoutHash.vertexSamplerLayout,
vertexSamplerBindingCount
);
2020-12-21 23:44:43 +00:00
}
if (fragmentSamplerBindingCount == 0)
2020-12-21 23:44:43 +00:00
{
vulkanGraphicsPipelineLayout->fragmentSamplerDescriptorSetCache = NULL;
}
else
{
vulkanGraphicsPipelineLayout->fragmentSamplerDescriptorSetCache =
VULKAN_INTERNAL_CreateDescriptorSetCache(
renderer,
VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
pipelineLayoutHash.fragmentSamplerLayout,
fragmentSamplerBindingCount
);
2020-12-21 23:44:43 +00:00
}
return vulkanGraphicsPipelineLayout;
2020-12-21 23:44:43 +00:00
}
/* Data Buffer */
2020-12-21 23:44:43 +00:00
static VulkanBuffer* VULKAN_INTERNAL_CreateBuffer(
VulkanRenderer *renderer,
VkDeviceSize size,
VulkanResourceAccessType resourceAccessType,
VkBufferUsageFlags usage
2020-12-21 23:44:43 +00:00
) {
VulkanBuffer* buffer;
2020-12-21 23:44:43 +00:00
VkResult vulkanResult;
VkBufferCreateInfo bufferCreateInfo;
uint8_t findMemoryResult;
2020-12-21 23:44:43 +00:00
buffer = SDL_malloc(sizeof(VulkanBuffer));
2020-12-21 23:44:43 +00:00
buffer->size = size;
buffer->resourceAccessType = resourceAccessType;
buffer->usage = usage;
2020-12-21 23:44:43 +00:00
bufferCreateInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
bufferCreateInfo.pNext = NULL;
bufferCreateInfo.flags = 0;
bufferCreateInfo.size = size;
bufferCreateInfo.usage = usage;
bufferCreateInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
bufferCreateInfo.queueFamilyIndexCount = 1;
bufferCreateInfo.pQueueFamilyIndices = &renderer->queueFamilyIndices.graphicsFamily;
2020-12-21 23:44:43 +00:00
vulkanResult = renderer->vkCreateBuffer(
renderer->logicalDevice,
&bufferCreateInfo,
NULL,
&buffer->buffer
2020-12-21 23:44:43 +00:00
);
if (vulkanResult != VK_SUCCESS)
2020-12-21 23:44:43 +00:00
{
SDL_free(buffer);
LogVulkanResultAsError("vkCreateBuffer", vulkanResult);
Refresh_LogError("Failed to create VkBuffer");
return NULL;
2020-12-21 23:44:43 +00:00
}
findMemoryResult = VULKAN_INTERNAL_FindAvailableBufferMemory(
renderer,
buffer->buffer,
&buffer->allocation,
&buffer->offset,
&buffer->memorySize
);
2020-12-21 23:44:43 +00:00
/* We're out of available memory */
if (findMemoryResult == 2)
2020-12-21 23:44:43 +00:00
{
Refresh_LogWarn("Out of buffer memory!");
return NULL;
2020-12-21 23:44:43 +00:00
}
else if (findMemoryResult == 0)
2020-12-21 23:44:43 +00:00
{
Refresh_LogError("Failed to find buffer memory!");
return NULL;
2020-12-21 23:44:43 +00:00
}
SDL_LockMutex(buffer->allocation->memoryLock);
2020-12-21 23:44:43 +00:00
vulkanResult = renderer->vkBindBufferMemory(
2020-12-21 23:44:43 +00:00
renderer->logicalDevice,
buffer->buffer,
buffer->allocation->memory,
buffer->offset
2020-12-21 23:44:43 +00:00
);
SDL_UnlockMutex(buffer->allocation->memoryLock);
2020-12-21 23:44:43 +00:00
if (vulkanResult != VK_SUCCESS)
{
Refresh_LogError("Failed to bind buffer memory!");
return NULL;
2020-12-21 23:44:43 +00:00
}
buffer->resourceAccessType = resourceAccessType;
SDL_AtomicSet(&buffer->referenceCount, 0);
return buffer;
}
/* Uniform buffer functions */
2022-01-13 07:09:06 +00:00
static uint8_t VULKAN_INTERNAL_AddUniformDescriptorPool(
VulkanRenderer *renderer,
VulkanUniformDescriptorPool *vulkanUniformDescriptorPool
) {
vulkanUniformDescriptorPool->descriptorPools = SDL_realloc(
vulkanUniformDescriptorPool->descriptorPools,
sizeof(VkDescriptorPool) * (vulkanUniformDescriptorPool->descriptorPoolCount + 1)
2020-12-21 23:44:43 +00:00
);
2022-01-13 07:09:06 +00:00
if (!VULKAN_INTERNAL_CreateDescriptorPool(
renderer,
VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC,
DESCRIPTOR_POOL_STARTING_SIZE,
DESCRIPTOR_POOL_STARTING_SIZE,
&vulkanUniformDescriptorPool->descriptorPools[vulkanUniformDescriptorPool->descriptorPoolCount]
2022-01-13 07:09:06 +00:00
)) {
Refresh_LogError("Failed to create descriptor pool!");
return 0;
}
vulkanUniformDescriptorPool->descriptorPoolCount += 1;
vulkanUniformDescriptorPool->availableDescriptorSetCount += DESCRIPTOR_POOL_STARTING_SIZE;
2022-01-13 07:09:06 +00:00
return 1;
}
static VulkanUniformBufferPool* VULKAN_INTERNAL_CreateUniformBufferPool(
VulkanRenderer *renderer,
VulkanUniformBufferType uniformBufferType
) {
VulkanUniformBufferPool* uniformBufferPool = SDL_malloc(sizeof(VulkanUniformBufferPool));
uniformBufferPool->type = uniformBufferType;
uniformBufferPool->lock = SDL_CreateMutex();
uniformBufferPool->availableBufferCapacity = 16;
uniformBufferPool->availableBufferCount = 0;
uniformBufferPool->availableBuffers = SDL_malloc(uniformBufferPool->availableBufferCapacity * sizeof(VulkanUniformBuffer*));
uniformBufferPool->descriptorPool.availableDescriptorSetCount = 0;
uniformBufferPool->descriptorPool.descriptorPoolCount = 0;
uniformBufferPool->descriptorPool.descriptorPools = NULL;
VULKAN_INTERNAL_AddUniformDescriptorPool(renderer, &uniformBufferPool->descriptorPool);
return uniformBufferPool;
}
static void VULKAN_INTERNAL_BindUniformBuffer(
VulkanCommandBuffer *commandBuffer,
VulkanUniformBuffer *uniformBuffer
) {
if (commandBuffer->boundUniformBufferCount >= commandBuffer->boundUniformBufferCapacity)
2020-12-21 23:44:43 +00:00
{
commandBuffer->boundUniformBufferCapacity *= 2;
commandBuffer->boundUniformBuffers = SDL_realloc(
commandBuffer->boundUniformBuffers,
sizeof(VulkanUniformBuffer*) * commandBuffer->boundUniformBufferCapacity
2020-12-21 23:44:43 +00:00
);
}
2020-12-21 23:44:43 +00:00
commandBuffer->boundUniformBuffers[commandBuffer->boundUniformBufferCount] = uniformBuffer;
commandBuffer->boundUniformBufferCount += 1;
2020-12-19 00:39:03 +00:00
}
2022-01-13 07:09:06 +00:00
static uint8_t VULKAN_INTERNAL_CreateUniformBuffer(
VulkanRenderer *renderer,
VulkanUniformBufferPool *bufferPool
) {
VulkanResourceAccessType resourceAccessType;
VkDescriptorSetLayout descriptorSetLayout;
2020-12-19 04:08:07 +00:00
if (bufferPool->type == UNIFORM_BUFFER_VERTEX)
{
resourceAccessType = RESOURCE_ACCESS_VERTEX_SHADER_READ_UNIFORM_BUFFER;
descriptorSetLayout = renderer->vertexUniformDescriptorSetLayout;
}
else if (bufferPool->type == UNIFORM_BUFFER_FRAGMENT)
{
resourceAccessType = RESOURCE_ACCESS_FRAGMENT_SHADER_READ_UNIFORM_BUFFER;
descriptorSetLayout = renderer->fragmentUniformDescriptorSetLayout;
}
else if (bufferPool->type == UNIFORM_BUFFER_COMPUTE)
{
resourceAccessType = RESOURCE_ACCESS_COMPUTE_SHADER_READ_UNIFORM_BUFFER;
descriptorSetLayout = renderer->computeUniformDescriptorSetLayout;
}
else
{
Refresh_LogError("Unrecognized uniform buffer type!");
2022-01-13 07:09:06 +00:00
return 0;
}
2020-12-19 04:08:07 +00:00
VulkanUniformBuffer *buffer = SDL_malloc(sizeof(VulkanUniformBuffer));
buffer->pool = bufferPool;
buffer->vulkanBuffer = VULKAN_INTERNAL_CreateBuffer(
2020-12-21 23:44:43 +00:00
renderer,
UBO_BUFFER_SIZE,
resourceAccessType,
VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT
2020-12-21 23:44:43 +00:00
);
2022-01-13 07:09:06 +00:00
if (buffer->vulkanBuffer == NULL)
{
Refresh_LogError("Failed to create buffer for uniform buffer!");
return 0;
}
buffer->offset = 0;
2020-12-21 23:44:43 +00:00
/* Allocate a descriptor set for the uniform buffer */
2020-12-21 23:44:43 +00:00
if (bufferPool->descriptorPool.availableDescriptorSetCount == 0)
2020-12-19 04:08:07 +00:00
{
2022-01-13 07:09:06 +00:00
if (!VULKAN_INTERNAL_AddUniformDescriptorPool(
renderer,
&bufferPool->descriptorPool
2022-01-13 07:09:06 +00:00
)) {
Refresh_LogError("Failed to add uniform descriptor pool!");
return 0;
}
2020-12-19 04:08:07 +00:00
}
2022-01-13 07:09:06 +00:00
if (!VULKAN_INTERNAL_AllocateDescriptorSets(
renderer,
bufferPool->descriptorPool.descriptorPools[bufferPool->descriptorPool.descriptorPoolCount - 1],
descriptorSetLayout,
1,
&buffer->descriptorSet
2022-01-13 07:09:06 +00:00
)) {
Refresh_LogError("Failed to allocate uniform descriptor set!");
return 0;
}
2020-12-19 04:08:07 +00:00
bufferPool->descriptorPool.availableDescriptorSetCount -= 1;
if (bufferPool->availableBufferCount >= bufferPool->availableBufferCapacity)
2020-12-21 23:44:43 +00:00
{
bufferPool->availableBufferCapacity *= 2;
bufferPool->availableBuffers = SDL_realloc(
bufferPool->availableBuffers,
sizeof(VulkanUniformBuffer*) * bufferPool->availableBufferCapacity
);
2020-12-19 04:08:07 +00:00
}
bufferPool->availableBuffers[bufferPool->availableBufferCount] = buffer;
bufferPool->availableBufferCount += 1;
2022-01-13 07:09:06 +00:00
return 1;
2020-12-19 04:08:07 +00:00
}
2020-12-19 01:03:26 +00:00
static VulkanUniformBuffer* VULKAN_INTERNAL_CreateDummyUniformBuffer(
2020-12-19 01:03:26 +00:00
VulkanRenderer *renderer,
VulkanUniformBufferType uniformBufferType
2020-12-19 01:03:26 +00:00
) {
VulkanResourceAccessType resourceAccessType;
VkDescriptorSetLayout descriptorSetLayout;
VkWriteDescriptorSet writeDescriptorSet;
VkDescriptorBufferInfo descriptorBufferInfo;
2020-12-19 01:03:26 +00:00
if (uniformBufferType == UNIFORM_BUFFER_VERTEX)
{
resourceAccessType = RESOURCE_ACCESS_VERTEX_SHADER_READ_UNIFORM_BUFFER;
descriptorSetLayout = renderer->vertexUniformDescriptorSetLayout;
}
else if (uniformBufferType == UNIFORM_BUFFER_FRAGMENT)
{
resourceAccessType = RESOURCE_ACCESS_FRAGMENT_SHADER_READ_UNIFORM_BUFFER;
descriptorSetLayout = renderer->fragmentUniformDescriptorSetLayout;
}
else if (uniformBufferType == UNIFORM_BUFFER_COMPUTE)
{
resourceAccessType = RESOURCE_ACCESS_COMPUTE_SHADER_READ_UNIFORM_BUFFER;
descriptorSetLayout = renderer->computeUniformDescriptorSetLayout;
}
else
2020-12-19 01:03:26 +00:00
{
Refresh_LogError("Unrecognized uniform buffer type!");
return NULL;
}
2020-12-22 01:59:08 +00:00
VulkanUniformBuffer *buffer = SDL_malloc(sizeof(VulkanUniformBuffer));
2020-12-19 01:03:26 +00:00
buffer->vulkanBuffer = VULKAN_INTERNAL_CreateBuffer(
renderer,
UBO_BUFFER_SIZE,
resourceAccessType,
VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT
);
buffer->offset = 0;
2020-12-19 01:03:26 +00:00
/* Allocate a descriptor set for the uniform buffer */
2020-12-19 01:03:26 +00:00
VULKAN_INTERNAL_AllocateDescriptorSets(
renderer,
renderer->defaultDescriptorPool,
descriptorSetLayout,
1,
&buffer->descriptorSet
);
2020-12-19 01:03:26 +00:00
/* Update the descriptor set for the first and last time! */
2021-01-03 21:12:12 +00:00
descriptorBufferInfo.buffer = buffer->vulkanBuffer->buffer;
descriptorBufferInfo.offset = 0;
descriptorBufferInfo.range = VK_WHOLE_SIZE;
2020-12-19 01:03:26 +00:00
writeDescriptorSet.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
writeDescriptorSet.pNext = NULL;
writeDescriptorSet.descriptorCount = 1;
writeDescriptorSet.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
writeDescriptorSet.dstArrayElement = 0;
writeDescriptorSet.dstBinding = 0;
writeDescriptorSet.dstSet = buffer->descriptorSet;
writeDescriptorSet.pBufferInfo = &descriptorBufferInfo;
writeDescriptorSet.pImageInfo = NULL;
writeDescriptorSet.pTexelBufferView = NULL;
2021-01-03 21:12:12 +00:00
renderer->vkUpdateDescriptorSets(
renderer->logicalDevice,
1,
&writeDescriptorSet,
0,
NULL
);
2020-12-19 01:03:26 +00:00
buffer->pool = NULL; /* No pool because this is a dummy */
2020-12-19 01:03:26 +00:00
2022-01-02 22:35:57 +00:00
return buffer;
2020-12-19 01:03:26 +00:00
}
static void VULKAN_INTERNAL_DestroyUniformBufferPool(
2021-01-02 06:07:15 +00:00
VulkanRenderer *renderer,
VulkanUniformBufferPool *uniformBufferPool
2021-01-02 06:07:15 +00:00
) {
uint32_t i;
2020-12-17 04:04:47 +00:00
for (i = 0; i < uniformBufferPool->descriptorPool.descriptorPoolCount; i += 1)
2020-12-17 04:04:47 +00:00
{
renderer->vkDestroyDescriptorPool(
renderer->logicalDevice,
uniformBufferPool->descriptorPool.descriptorPools[i],
NULL
);
2020-12-17 04:04:47 +00:00
}
SDL_free(uniformBufferPool->descriptorPool.descriptorPools);
2020-12-17 04:04:47 +00:00
/* This is always destroyed after submissions, so all buffers are available */
for (i = 0; i < uniformBufferPool->availableBufferCount; i += 1)
2020-12-17 04:04:47 +00:00
{
VULKAN_INTERNAL_DestroyBuffer(renderer, uniformBufferPool->availableBuffers[i]->vulkanBuffer);
SDL_free(uniformBufferPool->availableBuffers[i]);
2020-12-17 04:04:47 +00:00
}
SDL_DestroyMutex(uniformBufferPool->lock);
SDL_free(uniformBufferPool->availableBuffers);
SDL_free(uniformBufferPool);
2020-12-17 04:04:47 +00:00
}
static VulkanUniformBuffer* VULKAN_INTERNAL_AcquireUniformBufferFromPool(
VulkanRenderer *renderer,
VulkanUniformBufferPool *bufferPool,
VkDeviceSize blockSize
2020-12-22 02:34:57 +00:00
) {
VkWriteDescriptorSet writeDescriptorSet;
VkDescriptorBufferInfo descriptorBufferInfo;
SDL_LockMutex(bufferPool->lock);
2020-12-22 02:34:57 +00:00
if (bufferPool->availableBufferCount == 0)
2020-12-22 02:34:57 +00:00
{
if (!VULKAN_INTERNAL_CreateUniformBuffer(renderer, bufferPool))
2022-01-13 07:09:06 +00:00
{
SDL_UnlockMutex(bufferPool->lock);
Refresh_LogError("Failed to create uniform buffer!");
return NULL;
}
2020-12-22 02:34:57 +00:00
}
VulkanUniformBuffer *uniformBuffer = bufferPool->availableBuffers[bufferPool->availableBufferCount - 1];
bufferPool->availableBufferCount -= 1;
2020-12-22 02:34:57 +00:00
SDL_UnlockMutex(bufferPool->lock);
uniformBuffer->offset = 0;
/* Update the descriptor set with the correct range */
descriptorBufferInfo.buffer = uniformBuffer->vulkanBuffer->buffer;
descriptorBufferInfo.offset = 0;
descriptorBufferInfo.range = blockSize;
writeDescriptorSet.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
writeDescriptorSet.pNext = NULL;
writeDescriptorSet.descriptorCount = 1;
writeDescriptorSet.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
writeDescriptorSet.dstArrayElement = 0;
writeDescriptorSet.dstBinding = 0;
writeDescriptorSet.dstSet = uniformBuffer->descriptorSet;
writeDescriptorSet.pBufferInfo = &descriptorBufferInfo;
writeDescriptorSet.pImageInfo = NULL;
writeDescriptorSet.pTexelBufferView = NULL;
renderer->vkUpdateDescriptorSets(
renderer->logicalDevice,
1,
&writeDescriptorSet,
0,
NULL
);
return uniformBuffer;
}
2020-12-17 04:04:47 +00:00
/* Swapchain */
static uint8_t VULKAN_INTERNAL_QuerySwapChainSupport(
VulkanRenderer *renderer,
VkPhysicalDevice physicalDevice,
VkSurfaceKHR surface,
uint32_t graphicsFamilyIndex,
SwapChainSupportDetails *outputDetails
) {
VkResult result;
uint32_t formatCount;
uint32_t presentModeCount;
VkBool32 supportsPresent;
if (graphicsFamilyIndex != UINT32_MAX)
{
renderer->vkGetPhysicalDeviceSurfaceSupportKHR(
physicalDevice,
graphicsFamilyIndex,
surface,
&supportsPresent
);
if (!supportsPresent)
{
Refresh_LogWarn("This surface does not support presenting!");
return 0;
}
}
result = renderer->vkGetPhysicalDeviceSurfaceCapabilitiesKHR(
physicalDevice,
surface,
&outputDetails->capabilities
);
if (result != VK_SUCCESS)
{
Refresh_LogError(
"vkGetPhysicalDeviceSurfaceCapabilitiesKHR: %s",
VkErrorMessages(result)
);
return 0;
}
renderer->vkGetPhysicalDeviceSurfaceFormatsKHR(
physicalDevice,
surface,
&formatCount,
NULL
);
if (formatCount != 0)
{
outputDetails->formats = (VkSurfaceFormatKHR*) SDL_malloc(
sizeof(VkSurfaceFormatKHR) * formatCount
);
outputDetails->formatsLength = formatCount;
if (!outputDetails->formats)
{
SDL_OutOfMemory();
return 0;
}
result = renderer->vkGetPhysicalDeviceSurfaceFormatsKHR(
physicalDevice,
surface,
&formatCount,
outputDetails->formats
);
if (result != VK_SUCCESS)
{
Refresh_LogError(
"vkGetPhysicalDeviceSurfaceFormatsKHR: %s",
VkErrorMessages(result)
);
SDL_free(outputDetails->formats);
return 0;
}
}
renderer->vkGetPhysicalDeviceSurfacePresentModesKHR(
physicalDevice,
surface,
&presentModeCount,
NULL
);
if (presentModeCount != 0)
{
outputDetails->presentModes = (VkPresentModeKHR*) SDL_malloc(
sizeof(VkPresentModeKHR) * presentModeCount
);
outputDetails->presentModesLength = presentModeCount;
if (!outputDetails->presentModes)
{
SDL_OutOfMemory();
return 0;
}
result = renderer->vkGetPhysicalDeviceSurfacePresentModesKHR(
physicalDevice,
surface,
&presentModeCount,
outputDetails->presentModes
);
if (result != VK_SUCCESS)
{
Refresh_LogError(
"vkGetPhysicalDeviceSurfacePresentModesKHR: %s",
VkErrorMessages(result)
);
SDL_free(outputDetails->formats);
SDL_free(outputDetails->presentModes);
return 0;
}
}
return 1;
2020-12-17 01:23:49 +00:00
}
static uint8_t VULKAN_INTERNAL_ChooseSwapSurfaceFormat(
VkFormat desiredFormat,
VkSurfaceFormatKHR *availableFormats,
uint32_t availableFormatsLength,
VkSurfaceFormatKHR *outputFormat
2020-12-17 01:23:49 +00:00
) {
uint32_t i;
for (i = 0; i < availableFormatsLength; i += 1)
{
if ( availableFormats[i].format == desiredFormat &&
availableFormats[i].colorSpace == VK_COLOR_SPACE_SRGB_NONLINEAR_KHR )
{
*outputFormat = availableFormats[i];
return 1;
}
}
2021-01-02 06:07:15 +00:00
return 0;
}
2020-12-29 06:19:46 +00:00
static uint8_t VULKAN_INTERNAL_ChooseSwapPresentMode(
Refresh_PresentMode desiredPresentInterval,
VkPresentModeKHR *availablePresentModes,
uint32_t availablePresentModesLength,
VkPresentModeKHR *outputPresentMode
) {
#define CHECK_MODE(m) \
for (i = 0; i < availablePresentModesLength; i += 1) \
{ \
if (availablePresentModes[i] == m) \
{ \
*outputPresentMode = m; \
return 1; \
} \
} \
2020-12-29 06:19:46 +00:00
uint32_t i;
2022-02-25 21:42:11 +00:00
if (desiredPresentInterval == REFRESH_PRESENTMODE_IMMEDIATE)
{
CHECK_MODE(VK_PRESENT_MODE_IMMEDIATE_KHR)
}
2022-02-25 21:42:11 +00:00
else if (desiredPresentInterval == REFRESH_PRESENTMODE_MAILBOX)
{
CHECK_MODE(VK_PRESENT_MODE_MAILBOX_KHR)
}
else if (desiredPresentInterval == REFRESH_PRESENTMODE_FIFO)
{
CHECK_MODE(VK_PRESENT_MODE_FIFO_KHR)
}
else if (desiredPresentInterval == REFRESH_PRESENTMODE_FIFO_RELAXED)
{
CHECK_MODE(VK_PRESENT_MODE_FIFO_RELAXED_KHR)
}
else
{
Refresh_LogError(
"Unrecognized PresentInterval: %d",
desiredPresentInterval
);
return 0;
}
#undef CHECK_MODE
*outputPresentMode = VK_PRESENT_MODE_FIFO_KHR;
return 1;
}
static uint8_t VULKAN_INTERNAL_CreateSwapchain(
VulkanRenderer *renderer,
WindowData *windowData
) {
VkResult vulkanResult;
VulkanSwapchainData *swapchainData;
VkSwapchainCreateInfoKHR swapchainCreateInfo;
VkImage *swapchainImages;
2022-02-10 05:42:19 +00:00
VkImageViewCreateInfo imageViewCreateInfo;
VkSemaphoreCreateInfo semaphoreCreateInfo;
SwapChainSupportDetails swapchainSupportDetails;
int32_t drawableWidth, drawableHeight;
uint32_t i;
swapchainData = SDL_malloc(sizeof(VulkanSwapchainData));
/* Each swapchain must have its own surface. */
if (!SDL_Vulkan_CreateSurface(
(SDL_Window*) windowData->windowHandle,
renderer->instance,
&swapchainData->surface
)) {
SDL_free(swapchainData);
Refresh_LogError(
"SDL_Vulkan_CreateSurface failed: %s",
SDL_GetError()
);
return 0;
}
if (!VULKAN_INTERNAL_QuerySwapChainSupport(
renderer,
renderer->physicalDevice,
swapchainData->surface,
renderer->queueFamilyIndices.graphicsFamily,
&swapchainSupportDetails
)) {
renderer->vkDestroySurfaceKHR(
renderer->instance,
swapchainData->surface,
NULL
);
if (swapchainSupportDetails.formatsLength > 0)
{
SDL_free(swapchainSupportDetails.formats);
}
if (swapchainSupportDetails.presentModesLength > 0)
{
SDL_free(swapchainSupportDetails.presentModes);
}
SDL_free(swapchainData);
Refresh_LogError("Device does not support swap chain creation");
return 0;
}
if ( swapchainSupportDetails.capabilities.currentExtent.width == 0 ||
swapchainSupportDetails.capabilities.currentExtent.height == 0)
{
/* Not an error, just minimize behavior! */
renderer->vkDestroySurfaceKHR(
renderer->instance,
swapchainData->surface,
NULL
);
if (swapchainSupportDetails.formatsLength > 0)
{
SDL_free(swapchainSupportDetails.formats);
}
if (swapchainSupportDetails.presentModesLength > 0)
{
SDL_free(swapchainSupportDetails.presentModes);
}
SDL_free(swapchainData);
return 0;
}
swapchainData->swapchainFormat = VK_FORMAT_R8G8B8A8_UNORM;
swapchainData->swapchainSwizzle.r = VK_COMPONENT_SWIZZLE_IDENTITY;
swapchainData->swapchainSwizzle.g = VK_COMPONENT_SWIZZLE_IDENTITY;
swapchainData->swapchainSwizzle.b = VK_COMPONENT_SWIZZLE_IDENTITY;
swapchainData->swapchainSwizzle.a = VK_COMPONENT_SWIZZLE_IDENTITY;
if (!VULKAN_INTERNAL_ChooseSwapSurfaceFormat(
swapchainData->swapchainFormat,
swapchainSupportDetails.formats,
swapchainSupportDetails.formatsLength,
&swapchainData->surfaceFormat
)) {
swapchainData->swapchainFormat = VK_FORMAT_B8G8R8A8_UNORM;
swapchainData->swapchainSwizzle.r = VK_COMPONENT_SWIZZLE_B;
swapchainData->swapchainSwizzle.g = VK_COMPONENT_SWIZZLE_G;
swapchainData->swapchainSwizzle.b = VK_COMPONENT_SWIZZLE_R;
swapchainData->swapchainSwizzle.a = VK_COMPONENT_SWIZZLE_A;
if (!VULKAN_INTERNAL_ChooseSwapSurfaceFormat(
swapchainData->swapchainFormat,
swapchainSupportDetails.formats,
swapchainSupportDetails.formatsLength,
&swapchainData->surfaceFormat
)) {
renderer->vkDestroySurfaceKHR(
renderer->instance,
swapchainData->surface,
NULL
);
if (swapchainSupportDetails.formatsLength > 0)
{
SDL_free(swapchainSupportDetails.formats);
}
if (swapchainSupportDetails.presentModesLength > 0)
{
SDL_free(swapchainSupportDetails.presentModes);
}
SDL_free(swapchainData);
Refresh_LogError("Device does not support swap chain format");
return 0;
}
}
if (!VULKAN_INTERNAL_ChooseSwapPresentMode(
windowData->preferredPresentMode,
swapchainSupportDetails.presentModes,
swapchainSupportDetails.presentModesLength,
&swapchainData->presentMode
)) {
renderer->vkDestroySurfaceKHR(
renderer->instance,
swapchainData->surface,
NULL
);
if (swapchainSupportDetails.formatsLength > 0)
{
SDL_free(swapchainSupportDetails.formats);
}
if (swapchainSupportDetails.presentModesLength > 0)
{
SDL_free(swapchainSupportDetails.presentModes);
}
SDL_free(swapchainData);
Refresh_LogError("Device does not support swap chain present mode");
return 0;
}
SDL_Vulkan_GetDrawableSize(
(SDL_Window*) windowData->windowHandle,
&drawableWidth,
&drawableHeight
);
if ( drawableWidth < swapchainSupportDetails.capabilities.minImageExtent.width ||
drawableWidth > swapchainSupportDetails.capabilities.maxImageExtent.width ||
drawableHeight < swapchainSupportDetails.capabilities.minImageExtent.height ||
drawableHeight > swapchainSupportDetails.capabilities.maxImageExtent.height )
{
if (swapchainSupportDetails.capabilities.currentExtent.width != UINT32_MAX)
{
drawableWidth = VULKAN_INTERNAL_clamp(
drawableWidth,
swapchainSupportDetails.capabilities.minImageExtent.width,
swapchainSupportDetails.capabilities.maxImageExtent.width
);
drawableHeight = VULKAN_INTERNAL_clamp(
drawableHeight,
swapchainSupportDetails.capabilities.minImageExtent.height,
swapchainSupportDetails.capabilities.maxImageExtent.height
);
}
else
{
renderer->vkDestroySurfaceKHR(
renderer->instance,
swapchainData->surface,
NULL
);
if (swapchainSupportDetails.formatsLength > 0)
{
SDL_free(swapchainSupportDetails.formats);
}
if (swapchainSupportDetails.presentModesLength > 0)
{
SDL_free(swapchainSupportDetails.presentModes);
}
SDL_free(swapchainData);
Refresh_LogError("No fallback swapchain size available!");
return 0;
}
}
swapchainData->extent.width = drawableWidth;
swapchainData->extent.height = drawableHeight;
swapchainData->imageCount = swapchainSupportDetails.capabilities.minImageCount + 1;
if ( swapchainSupportDetails.capabilities.maxImageCount > 0 &&
swapchainData->imageCount > swapchainSupportDetails.capabilities.maxImageCount )
{
swapchainData->imageCount = swapchainSupportDetails.capabilities.maxImageCount;
}
if (swapchainData->presentMode == VK_PRESENT_MODE_MAILBOX_KHR)
{
/* Required for proper triple-buffering.
* Note that this is below the above maxImageCount check!
* If the driver advertises MAILBOX but does not support 3 swap
* images, it's not real mailbox support, so let it fail hard.
* -flibit
*/
swapchainData->imageCount = SDL_max(swapchainData->imageCount, 3);
}
swapchainCreateInfo.sType = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR;
swapchainCreateInfo.pNext = NULL;
swapchainCreateInfo.flags = 0;
swapchainCreateInfo.surface = swapchainData->surface;
swapchainCreateInfo.minImageCount = swapchainData->imageCount;
swapchainCreateInfo.imageFormat = swapchainData->surfaceFormat.format;
swapchainCreateInfo.imageColorSpace = swapchainData->surfaceFormat.colorSpace;
swapchainCreateInfo.imageExtent = swapchainData->extent;
swapchainCreateInfo.imageArrayLayers = 1;
swapchainCreateInfo.imageUsage =
VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT |
VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
VK_IMAGE_USAGE_TRANSFER_DST_BIT;
swapchainCreateInfo.imageSharingMode = VK_SHARING_MODE_EXCLUSIVE;
swapchainCreateInfo.queueFamilyIndexCount = 0;
swapchainCreateInfo.pQueueFamilyIndices = NULL;
swapchainCreateInfo.preTransform = swapchainSupportDetails.capabilities.currentTransform;
swapchainCreateInfo.compositeAlpha = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR;
swapchainCreateInfo.presentMode = swapchainData->presentMode;
swapchainCreateInfo.clipped = VK_TRUE;
swapchainCreateInfo.oldSwapchain = VK_NULL_HANDLE;
vulkanResult = renderer->vkCreateSwapchainKHR(
renderer->logicalDevice,
&swapchainCreateInfo,
NULL,
&swapchainData->swapchain
);
if (swapchainSupportDetails.formatsLength > 0)
{
SDL_free(swapchainSupportDetails.formats);
}
if (swapchainSupportDetails.presentModesLength > 0)
{
SDL_free(swapchainSupportDetails.presentModes);
}
if (vulkanResult != VK_SUCCESS)
{
renderer->vkDestroySurfaceKHR(
renderer->instance,
swapchainData->surface,
NULL
);
SDL_free(swapchainData);
LogVulkanResultAsError("vkCreateSwapchainKHR", vulkanResult);
return 0;
}
renderer->vkGetSwapchainImagesKHR(
renderer->logicalDevice,
swapchainData->swapchain,
&swapchainData->imageCount,
NULL
2020-12-29 06:19:46 +00:00
);
swapchainData->textures = SDL_malloc(
sizeof(VulkanTexture) * swapchainData->imageCount
);
2020-12-29 06:19:46 +00:00
if (!swapchainData->textures)
{
SDL_OutOfMemory();
renderer->vkDestroySurfaceKHR(
renderer->instance,
swapchainData->surface,
NULL
);
SDL_free(swapchainData);
return 0;
}
2020-12-29 06:19:46 +00:00
swapchainImages = SDL_stack_alloc(VkImage, swapchainData->imageCount);
2020-12-29 06:19:46 +00:00
renderer->vkGetSwapchainImagesKHR(
renderer->logicalDevice,
swapchainData->swapchain,
&swapchainData->imageCount,
swapchainImages
);
2022-02-10 05:42:19 +00:00
imageViewCreateInfo.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
imageViewCreateInfo.pNext = NULL;
imageViewCreateInfo.flags = 0;
imageViewCreateInfo.viewType = VK_IMAGE_VIEW_TYPE_2D;
imageViewCreateInfo.format = swapchainData->surfaceFormat.format;
imageViewCreateInfo.components = swapchainData->swapchainSwizzle;
imageViewCreateInfo.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
imageViewCreateInfo.subresourceRange.baseMipLevel = 0;
imageViewCreateInfo.subresourceRange.levelCount = 1;
imageViewCreateInfo.subresourceRange.baseArrayLayer = 0;
imageViewCreateInfo.subresourceRange.layerCount = 1;
for (i = 0; i < swapchainData->imageCount; i += 1)
2020-12-29 06:19:46 +00:00
{
swapchainData->textures[i].image = swapchainImages[i];
imageViewCreateInfo.image = swapchainImages[i];
2020-12-29 06:19:46 +00:00
vulkanResult = renderer->vkCreateImageView(
renderer->logicalDevice,
2022-02-10 05:42:19 +00:00
&imageViewCreateInfo,
NULL,
&swapchainData->textures[i].view
);
2020-12-29 06:19:46 +00:00
if (vulkanResult != VK_SUCCESS)
2020-12-29 06:19:46 +00:00
{
renderer->vkDestroySurfaceKHR(
renderer->instance,
swapchainData->surface,
NULL
);
SDL_stack_free(swapchainImages);
SDL_free(swapchainData->textures);
SDL_free(swapchainData);
LogVulkanResultAsError("vkCreateImageView", vulkanResult);
return 0;
2020-12-29 06:19:46 +00:00
}
swapchainData->textures[i].resourceAccessType = RESOURCE_ACCESS_NONE;
/* Swapchain memory is managed by the driver */
swapchainData->textures[i].allocation = NULL;
swapchainData->textures[i].offset = 0;
swapchainData->textures[i].memorySize = 0;
swapchainData->textures[i].dimensions = swapchainData->extent;
swapchainData->textures[i].format = swapchainData->swapchainFormat;
swapchainData->textures[i].is3D = 0;
swapchainData->textures[i].isCube = 0;
swapchainData->textures[i].layerCount = 1;
swapchainData->textures[i].levelCount = 1;
swapchainData->textures[i].sampleCount = REFRESH_SAMPLECOUNT_1;
swapchainData->textures[i].usageFlags =
VK_IMAGE_USAGE_TRANSFER_DST_BIT |
VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
swapchainData->textures[i].aspectFlags = VK_IMAGE_ASPECT_COLOR_BIT;
swapchainData->textures[i].resourceAccessType = RESOURCE_ACCESS_NONE;
swapchainData->textures[i].msaaTex = NULL;
2020-12-29 06:19:46 +00:00
}
SDL_stack_free(swapchainImages);
2022-02-10 05:42:19 +00:00
semaphoreCreateInfo.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
semaphoreCreateInfo.pNext = NULL;
semaphoreCreateInfo.flags = 0;
renderer->vkCreateSemaphore(
renderer->logicalDevice,
&semaphoreCreateInfo,
NULL,
&swapchainData->imageAvailableSemaphore
);
renderer->vkCreateSemaphore(
renderer->logicalDevice,
&semaphoreCreateInfo,
NULL,
&swapchainData->renderFinishedSemaphore
);
windowData->swapchainData = swapchainData;
return 1;
2020-12-17 01:23:49 +00:00
}
static void VULKAN_INTERNAL_RecreateSwapchain(
VulkanRenderer* renderer,
WindowData *windowData
) {
VULKAN_Wait((Refresh_Renderer*) renderer);
VULKAN_INTERNAL_DestroySwapchain(renderer, windowData);
VULKAN_INTERNAL_CreateSwapchain(renderer, windowData);
2020-12-17 01:23:49 +00:00
}
/* Command Buffers */
static void VULKAN_INTERNAL_BeginCommandBuffer(
VulkanRenderer *renderer,
VulkanCommandBuffer *commandBuffer
2020-12-17 01:23:49 +00:00
) {
VkCommandBufferBeginInfo beginInfo;
VkResult result;
2021-01-02 21:31:17 +00:00
beginInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
beginInfo.pNext = NULL;
beginInfo.flags = 0;
beginInfo.pInheritanceInfo = NULL;
beginInfo.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
result = renderer->vkBeginCommandBuffer(
commandBuffer->commandBuffer,
&beginInfo
);
if (result != VK_SUCCESS)
{
LogVulkanResultAsError("vkBeginCommandBuffer", result);
}
}
2020-12-20 09:29:15 +00:00
static void VULKAN_INTERNAL_EndCommandBuffer(
VulkanRenderer* renderer,
VulkanCommandBuffer *commandBuffer
) {
VkResult result;
2020-12-20 09:29:15 +00:00
/* Compute pipelines are not explicitly unbound so we have to clean up here */
if ( commandBuffer->computeUniformBuffer != renderer->dummyComputeUniformBuffer &&
commandBuffer->computeUniformBuffer != NULL
) {
VULKAN_INTERNAL_BindUniformBuffer(
commandBuffer,
commandBuffer->computeUniformBuffer
);
}
commandBuffer->computeUniformBuffer = NULL;
commandBuffer->currentComputePipeline = NULL;
2020-12-20 09:29:15 +00:00
result = renderer->vkEndCommandBuffer(
commandBuffer->commandBuffer
2021-01-02 06:07:15 +00:00
);
2020-12-20 09:29:15 +00:00
if (result != VK_SUCCESS)
{
LogVulkanResultAsError("vkEndCommandBuffer", result);
}
2020-12-17 01:23:49 +00:00
}
static void VULKAN_DestroyDevice(
2022-02-25 21:42:11 +00:00
Refresh_Device *device
2020-12-30 01:31:39 +00:00
) {
VulkanRenderer* renderer = (VulkanRenderer*) device->driverData;
CommandPoolHashArray commandPoolHashArray;
GraphicsPipelineLayoutHashArray graphicsPipelineLayoutHashArray;
ComputePipelineLayoutHashArray computePipelineLayoutHashArray;
VulkanMemorySubAllocator *allocator;
int32_t i, j, k;
2020-12-31 06:28:37 +00:00
VULKAN_Wait(device->driverData);
2020-12-31 00:47:13 +00:00
for (i = renderer->claimedWindowCount - 1; i >= 0; i -= 1)
{
VULKAN_UnclaimWindow(device->driverData, renderer->claimedWindows[i]->windowHandle);
}
SDL_free(renderer->claimedWindows);
VULKAN_Wait(device->driverData);
SDL_free(renderer->submittedCommandBuffers);
2022-01-13 20:03:44 +00:00
VULKAN_INTERNAL_DestroyBuffer(renderer, renderer->dummyVertexUniformBuffer->vulkanBuffer);
VULKAN_INTERNAL_DestroyBuffer(renderer, renderer->dummyFragmentUniformBuffer->vulkanBuffer);
VULKAN_INTERNAL_DestroyBuffer(renderer, renderer->dummyComputeUniformBuffer->vulkanBuffer);
2020-12-31 00:47:13 +00:00
SDL_free(renderer->dummyVertexUniformBuffer);
SDL_free(renderer->dummyFragmentUniformBuffer);
SDL_free(renderer->dummyComputeUniformBuffer);
for (i = 0; i < renderer->transferBufferPool.availableBufferCount; i += 1)
2020-12-31 06:28:37 +00:00
{
VULKAN_INTERNAL_DestroyBuffer(renderer, renderer->transferBufferPool.availableBuffers[i]->buffer);
SDL_free(renderer->transferBufferPool.availableBuffers[i]);
}
SDL_free(renderer->transferBufferPool.availableBuffers);
SDL_DestroyMutex(renderer->transferBufferPool.lock);
for (i = 0; i < NUM_COMMAND_POOL_BUCKETS; i += 1)
{
commandPoolHashArray = renderer->commandPoolHashTable.buckets[i];
for (j = 0; j < commandPoolHashArray.count; j += 1)
2020-12-31 06:28:37 +00:00
{
VULKAN_INTERNAL_DestroyCommandPool(
2020-12-31 06:28:37 +00:00
renderer,
commandPoolHashArray.elements[j].value
2020-12-31 06:28:37 +00:00
);
}
if (commandPoolHashArray.elements != NULL)
2020-12-31 06:28:37 +00:00
{
SDL_free(commandPoolHashArray.elements);
2020-12-31 06:28:37 +00:00
}
}
2020-12-30 01:31:39 +00:00
for (i = 0; i < NUM_PIPELINE_LAYOUT_BUCKETS; i += 1)
{
graphicsPipelineLayoutHashArray = renderer->graphicsPipelineLayoutHashTable.buckets[i];
for (j = 0; j < graphicsPipelineLayoutHashArray.count; j += 1)
{
VULKAN_INTERNAL_DestroyDescriptorSetCache(
renderer,
graphicsPipelineLayoutHashArray.elements[j].value->vertexSamplerDescriptorSetCache
);
2020-12-17 08:19:02 +00:00
VULKAN_INTERNAL_DestroyDescriptorSetCache(
renderer,
graphicsPipelineLayoutHashArray.elements[j].value->fragmentSamplerDescriptorSetCache
);
2020-12-17 08:19:02 +00:00
renderer->vkDestroyPipelineLayout(
renderer->logicalDevice,
graphicsPipelineLayoutHashArray.elements[j].value->pipelineLayout,
NULL
);
SDL_free(graphicsPipelineLayoutHashArray.elements[j].value);
}
2020-12-17 08:19:02 +00:00
if (graphicsPipelineLayoutHashArray.elements != NULL)
{
SDL_free(graphicsPipelineLayoutHashArray.elements);
}
2020-12-22 02:34:57 +00:00
computePipelineLayoutHashArray = renderer->computePipelineLayoutHashTable.buckets[i];
for (j = 0; j < computePipelineLayoutHashArray.count; j += 1)
{
VULKAN_INTERNAL_DestroyDescriptorSetCache(
renderer,
computePipelineLayoutHashArray.elements[j].value->bufferDescriptorSetCache
);
2020-12-17 08:19:02 +00:00
VULKAN_INTERNAL_DestroyDescriptorSetCache(
renderer,
computePipelineLayoutHashArray.elements[j].value->imageDescriptorSetCache
);
2020-12-17 08:19:02 +00:00
renderer->vkDestroyPipelineLayout(
renderer->logicalDevice,
computePipelineLayoutHashArray.elements[j].value->pipelineLayout,
NULL
);
SDL_free(computePipelineLayoutHashArray.elements[j].value);
}
2020-12-17 08:19:02 +00:00
if (computePipelineLayoutHashArray.elements != NULL)
{
SDL_free(computePipelineLayoutHashArray.elements);
}
}
2020-12-17 08:19:02 +00:00
renderer->vkDestroyDescriptorPool(
renderer->logicalDevice,
renderer->defaultDescriptorPool,
NULL
);
2020-12-17 08:19:02 +00:00
for (i = 0; i < NUM_DESCRIPTOR_SET_LAYOUT_BUCKETS; i += 1)
{
for (j = 0; j < renderer->descriptorSetLayoutHashTable.buckets[i].count; j += 1)
{
renderer->vkDestroyDescriptorSetLayout(
renderer->logicalDevice,
renderer->descriptorSetLayoutHashTable.buckets[i].elements[j].value,
NULL
);
}
2020-12-17 08:19:02 +00:00
SDL_free(renderer->descriptorSetLayoutHashTable.buckets[i].elements);
}
2020-12-17 08:19:02 +00:00
renderer->vkDestroyDescriptorSetLayout(
renderer->logicalDevice,
renderer->emptyVertexSamplerLayout,
NULL
);
2020-12-17 08:19:02 +00:00
renderer->vkDestroyDescriptorSetLayout(
renderer->logicalDevice,
renderer->emptyFragmentSamplerLayout,
NULL
);
2020-12-17 08:19:02 +00:00
renderer->vkDestroyDescriptorSetLayout(
renderer->logicalDevice,
renderer->emptyComputeBufferDescriptorSetLayout,
NULL
);
2020-12-17 08:19:02 +00:00
renderer->vkDestroyDescriptorSetLayout(
renderer->logicalDevice,
renderer->emptyComputeImageDescriptorSetLayout,
NULL
);
2020-12-17 08:19:02 +00:00
renderer->vkDestroyDescriptorSetLayout(
renderer->logicalDevice,
renderer->vertexUniformDescriptorSetLayout,
NULL
);
2020-12-17 08:19:02 +00:00
renderer->vkDestroyDescriptorSetLayout(
renderer->logicalDevice,
renderer->fragmentUniformDescriptorSetLayout,
NULL
);
2020-12-17 08:19:02 +00:00
renderer->vkDestroyDescriptorSetLayout(
renderer->logicalDevice,
renderer->computeUniformDescriptorSetLayout,
NULL
);
2020-12-17 08:19:02 +00:00
VULKAN_INTERNAL_DestroyUniformBufferPool(renderer, renderer->vertexUniformBufferPool);
VULKAN_INTERNAL_DestroyUniformBufferPool(renderer, renderer->fragmentUniformBufferPool);
VULKAN_INTERNAL_DestroyUniformBufferPool(renderer, renderer->computeUniformBufferPool);
for (i = 0; i < renderer->framebufferHashArray.count; i += 1)
{
VULKAN_INTERNAL_DestroyFramebuffer(
renderer,
renderer->framebufferHashArray.elements[i].value
);
}
SDL_free(renderer->framebufferHashArray.elements);
for (i = 0; i < renderer->renderPassHashArray.count; i += 1)
{
renderer->vkDestroyRenderPass(
renderer->logicalDevice,
renderer->renderPassHashArray.elements[i].value,
NULL
);
}
SDL_free(renderer->renderPassHashArray.elements);
SDL_free(renderer->renderTargetHashArray.elements);
for (i = 0; i < VK_MAX_MEMORY_TYPES; i += 1)
{
allocator = &renderer->memoryAllocator->subAllocators[i];
2020-12-17 08:19:02 +00:00
for (j = 0; j < allocator->allocationCount; j += 1)
{
for (k = 0; k < allocator->allocations[j]->freeRegionCount; k += 1)
{
SDL_free(allocator->allocations[j]->freeRegions[k]);
}
2020-12-17 08:19:02 +00:00
SDL_free(allocator->allocations[j]->freeRegions);
2020-12-17 08:19:02 +00:00
renderer->vkFreeMemory(
renderer->logicalDevice,
allocator->allocations[j]->memory,
NULL
);
2020-12-17 01:23:49 +00:00
SDL_DestroyMutex(allocator->allocations[j]->memoryLock);
SDL_free(allocator->allocations[j]);
}
2020-12-19 04:08:07 +00:00
SDL_free(allocator->allocations);
SDL_free(allocator->sortedFreeRegions);
}
2020-12-19 04:08:07 +00:00
SDL_free(renderer->memoryAllocator);
2020-12-19 04:08:07 +00:00
SDL_free(renderer->texturesToDestroy);
SDL_free(renderer->buffersToDestroy);
SDL_free(renderer->graphicsPipelinesToDestroy);
SDL_free(renderer->computePipelinesToDestroy);
SDL_free(renderer->shaderModulesToDestroy);
SDL_free(renderer->samplersToDestroy);
SDL_DestroyMutex(renderer->allocatorLock);
SDL_DestroyMutex(renderer->disposeLock);
SDL_DestroyMutex(renderer->submitLock);
SDL_DestroyMutex(renderer->acquireCommandBufferLock);
SDL_DestroyMutex(renderer->renderPassFetchLock);
SDL_DestroyMutex(renderer->framebufferFetchLock);
SDL_DestroyMutex(renderer->renderTargetFetchLock);
2020-12-19 04:08:07 +00:00
renderer->vkDestroyDevice(renderer->logicalDevice, NULL);
renderer->vkDestroyInstance(renderer->instance, NULL);
2020-12-19 04:08:07 +00:00
SDL_free(renderer);
SDL_free(device);
2020-12-19 04:08:07 +00:00
}
static void VULKAN_DrawInstancedPrimitives(
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer,
uint32_t baseVertex,
uint32_t startIndex,
uint32_t primitiveCount,
uint32_t instanceCount,
uint32_t vertexParamOffset,
uint32_t fragmentParamOffset
2020-12-27 23:20:59 +00:00
) {
VulkanRenderer* renderer = (VulkanRenderer*) driverData;
VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
2020-12-28 06:45:12 +00:00
VkDescriptorSet descriptorSets[4];
uint32_t dynamicOffsets[2];
2020-12-27 23:20:59 +00:00
descriptorSets[0] = vulkanCommandBuffer->vertexSamplerDescriptorSet;
descriptorSets[1] = vulkanCommandBuffer->fragmentSamplerDescriptorSet;
descriptorSets[2] = vulkanCommandBuffer->vertexUniformBuffer->descriptorSet;
descriptorSets[3] = vulkanCommandBuffer->fragmentUniformBuffer->descriptorSet;
2020-12-27 23:20:59 +00:00
dynamicOffsets[0] = vertexParamOffset;
dynamicOffsets[1] = fragmentParamOffset;
2020-12-28 06:45:12 +00:00
renderer->vkCmdBindDescriptorSets(
vulkanCommandBuffer->commandBuffer,
VK_PIPELINE_BIND_POINT_GRAPHICS,
vulkanCommandBuffer->currentGraphicsPipeline->pipelineLayout->pipelineLayout,
0,
4,
descriptorSets,
2,
dynamicOffsets
2020-12-27 23:20:59 +00:00
);
renderer->vkCmdDrawIndexed(
vulkanCommandBuffer->commandBuffer,
PrimitiveVerts(
vulkanCommandBuffer->currentGraphicsPipeline->primitiveType,
primitiveCount
),
instanceCount,
startIndex,
baseVertex,
0
2020-12-27 23:20:59 +00:00
);
}
2020-12-27 23:20:59 +00:00
static void VULKAN_DrawIndexedPrimitives(
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer,
uint32_t baseVertex,
uint32_t startIndex,
uint32_t primitiveCount,
uint32_t vertexParamOffset,
uint32_t fragmentParamOffset
) {
VULKAN_DrawInstancedPrimitives(
driverData,
commandBuffer,
baseVertex,
startIndex,
primitiveCount,
1,
vertexParamOffset,
fragmentParamOffset
2020-12-27 23:20:59 +00:00
);
}
static void VULKAN_DrawPrimitives(
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer,
uint32_t vertexStart,
uint32_t primitiveCount,
uint32_t vertexParamOffset,
uint32_t fragmentParamOffset
2020-12-30 00:53:10 +00:00
) {
VulkanRenderer* renderer = (VulkanRenderer*) driverData;
VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
2020-12-30 00:53:10 +00:00
VkDescriptorSet descriptorSets[4];
uint32_t dynamicOffsets[2];
2020-12-31 04:39:47 +00:00
descriptorSets[0] = vulkanCommandBuffer->vertexSamplerDescriptorSet;
descriptorSets[1] = vulkanCommandBuffer->fragmentSamplerDescriptorSet;
descriptorSets[2] = vulkanCommandBuffer->vertexUniformBuffer->descriptorSet;
descriptorSets[3] = vulkanCommandBuffer->fragmentUniformBuffer->descriptorSet;
2020-12-30 00:53:10 +00:00
dynamicOffsets[0] = vertexParamOffset;
dynamicOffsets[1] = fragmentParamOffset;
2020-12-30 00:53:10 +00:00
renderer->vkCmdBindDescriptorSets(
vulkanCommandBuffer->commandBuffer,
VK_PIPELINE_BIND_POINT_GRAPHICS,
vulkanCommandBuffer->currentGraphicsPipeline->pipelineLayout->pipelineLayout,
0,
4,
descriptorSets,
2,
dynamicOffsets
2020-12-30 00:53:10 +00:00
);
renderer->vkCmdDraw(
vulkanCommandBuffer->commandBuffer,
PrimitiveVerts(
vulkanCommandBuffer->currentGraphicsPipeline->primitiveType,
primitiveCount
),
1,
vertexStart,
0
2020-12-30 00:53:10 +00:00
);
}
static void VULKAN_DrawPrimitivesIndirect(
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer,
Refresh_Buffer *buffer,
uint32_t offsetInBytes,
uint32_t drawCount,
uint32_t stride,
uint32_t vertexParamOffset,
uint32_t fragmentParamOffset
) {
VulkanRenderer* renderer = (VulkanRenderer*) driverData;
VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
VulkanBuffer *vulkanBuffer = (VulkanBuffer*) buffer;
VkDescriptorSet descriptorSets[4];
uint32_t dynamicOffsets[2];
descriptorSets[0] = vulkanCommandBuffer->vertexSamplerDescriptorSet;
descriptorSets[1] = vulkanCommandBuffer->fragmentSamplerDescriptorSet;
descriptorSets[2] = vulkanCommandBuffer->vertexUniformBuffer->descriptorSet;
descriptorSets[3] = vulkanCommandBuffer->fragmentUniformBuffer->descriptorSet;
dynamicOffsets[0] = vertexParamOffset;
dynamicOffsets[1] = fragmentParamOffset;
renderer->vkCmdBindDescriptorSets(
vulkanCommandBuffer->commandBuffer,
VK_PIPELINE_BIND_POINT_GRAPHICS,
vulkanCommandBuffer->currentGraphicsPipeline->pipelineLayout->pipelineLayout,
0,
4,
descriptorSets,
2,
dynamicOffsets
);
renderer->vkCmdDrawIndirect(
vulkanCommandBuffer->commandBuffer,
vulkanBuffer->buffer,
offsetInBytes,
drawCount,
stride
);
VULKAN_INTERNAL_TrackBuffer(renderer, vulkanCommandBuffer, vulkanBuffer);
}
static void VULKAN_DispatchCompute(
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer,
uint32_t groupCountX,
uint32_t groupCountY,
uint32_t groupCountZ,
uint32_t computeParamOffset
) {
VulkanRenderer* renderer = (VulkanRenderer*) driverData;
VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
VulkanComputePipeline *computePipeline = vulkanCommandBuffer->currentComputePipeline;
VkDescriptorSet descriptorSets[3];
VulkanResourceAccessType resourceAccessType = RESOURCE_ACCESS_NONE;
VulkanBuffer *currentComputeBuffer;
VulkanTexture *currentComputeTexture;
uint32_t i;
descriptorSets[0] = vulkanCommandBuffer->bufferDescriptorSet;
descriptorSets[1] = vulkanCommandBuffer->imageDescriptorSet;
descriptorSets[2] = vulkanCommandBuffer->computeUniformBuffer->descriptorSet;
renderer->vkCmdBindDescriptorSets(
vulkanCommandBuffer->commandBuffer,
VK_PIPELINE_BIND_POINT_COMPUTE,
computePipeline->pipelineLayout->pipelineLayout,
0,
3,
descriptorSets,
1,
&computeParamOffset
);
renderer->vkCmdDispatch(
vulkanCommandBuffer->commandBuffer,
groupCountX,
groupCountY,
groupCountZ
);
/* Re-transition buffers after dispatch */
for (i = 0; i < vulkanCommandBuffer->boundComputeBufferCount; i += 1)
{
currentComputeBuffer = vulkanCommandBuffer->boundComputeBuffers[i];
if (currentComputeBuffer->usage & VK_BUFFER_USAGE_VERTEX_BUFFER_BIT)
{
resourceAccessType = RESOURCE_ACCESS_VERTEX_BUFFER;
}
else if (currentComputeBuffer->usage & VK_BUFFER_USAGE_INDEX_BUFFER_BIT)
{
resourceAccessType = RESOURCE_ACCESS_INDEX_BUFFER;
}
else if (currentComputeBuffer->usage & VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT)
{
resourceAccessType = RESOURCE_ACCESS_INDIRECT_BUFFER;
}
if (resourceAccessType != RESOURCE_ACCESS_NONE)
{
VULKAN_INTERNAL_BufferMemoryBarrier(
renderer,
vulkanCommandBuffer->commandBuffer,
resourceAccessType,
currentComputeBuffer
);
}
}
vulkanCommandBuffer->boundComputeBufferCount = 0;
/* Re-transition sampler images after dispatch */
for (i = 0; i < vulkanCommandBuffer->boundComputeTextureCount; i += 1)
{
currentComputeTexture = vulkanCommandBuffer->boundComputeTextures[i];
if (currentComputeTexture->usageFlags & VK_IMAGE_USAGE_SAMPLED_BIT)
{
resourceAccessType = RESOURCE_ACCESS_ANY_SHADER_READ_SAMPLED_IMAGE;
VULKAN_INTERNAL_ImageMemoryBarrier(
renderer,
vulkanCommandBuffer->commandBuffer,
resourceAccessType,
currentComputeTexture->aspectFlags,
0,
currentComputeTexture->layerCount,
0,
currentComputeTexture->levelCount,
0,
currentComputeTexture->image,
&currentComputeTexture->resourceAccessType
);
}
}
vulkanCommandBuffer->boundComputeTextureCount = 0;
}
static VulkanTexture* VULKAN_INTERNAL_CreateTexture(
VulkanRenderer *renderer,
uint32_t width,
uint32_t height,
uint32_t depth,
uint32_t isCube,
uint32_t levelCount,
Refresh_SampleCount sampleCount,
VkFormat format,
VkImageAspectFlags aspectMask,
VkImageUsageFlags imageUsageFlags
) {
2022-02-25 21:42:11 +00:00
VkResult vulkanResult;
VkImageCreateInfo imageCreateInfo;
VkImageCreateFlags imageCreateFlags = 0;
VkImageViewCreateInfo imageViewCreateInfo;
uint8_t findMemoryResult;
uint8_t is3D = depth > 1 ? 1 : 0;
uint8_t layerCount = isCube ? 6 : 1;
uint8_t isRenderTarget =
((imageUsageFlags & VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT) != 0) ||
((imageUsageFlags & VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT) != 0);
VkComponentMapping swizzle = IDENTITY_SWIZZLE;
VulkanTexture *texture = SDL_malloc(sizeof(VulkanTexture));
texture->isCube = 0;
texture->is3D = 0;
if (isCube)
{
imageCreateFlags |= VK_IMAGE_CREATE_CUBE_COMPATIBLE_BIT;
texture->isCube = 1;
}
else if (is3D)
{
imageCreateFlags |= VK_IMAGE_CREATE_2D_ARRAY_COMPATIBLE_BIT;
texture->is3D = 1;
}
imageCreateInfo.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
imageCreateInfo.pNext = NULL;
imageCreateInfo.flags = imageCreateFlags;
imageCreateInfo.imageType = is3D ? VK_IMAGE_TYPE_3D : VK_IMAGE_TYPE_2D;
imageCreateInfo.format = format;
imageCreateInfo.extent.width = width;
imageCreateInfo.extent.height = height;
imageCreateInfo.extent.depth = depth;
imageCreateInfo.mipLevels = levelCount;
imageCreateInfo.arrayLayers = layerCount;
imageCreateInfo.samples = RefreshToVK_SampleCount[sampleCount];
imageCreateInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
imageCreateInfo.usage = imageUsageFlags;
imageCreateInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
imageCreateInfo.queueFamilyIndexCount = 0;
imageCreateInfo.pQueueFamilyIndices = NULL;
imageCreateInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
vulkanResult = renderer->vkCreateImage(
renderer->logicalDevice,
&imageCreateInfo,
NULL,
&texture->image
);
if (vulkanResult != VK_SUCCESS)
{
LogVulkanResultAsError("vkCreateImage", vulkanResult);
Refresh_LogError("Failed to create texture!");
}
/* Prefer GPU allocation */
findMemoryResult = VULKAN_INTERNAL_FindAvailableTextureMemory(
renderer,
texture->image,
0,
&texture->allocation,
&texture->offset,
&texture->memorySize
);
/* No device local memory available */
if (findMemoryResult == 2)
{
if (isRenderTarget)
{
Refresh_LogWarn("RenderTarget is allocated in host memory, pre-allocate your targets!");
}
Refresh_LogWarn("Out of device local memory, falling back to host memory");
/* Attempt CPU allocation */
findMemoryResult = VULKAN_INTERNAL_FindAvailableTextureMemory(
renderer,
texture->image,
1,
&texture->allocation,
&texture->offset,
&texture->memorySize
);
/* Memory alloc completely failed, time to die */
if (findMemoryResult == 0)
{
Refresh_LogError("Something went very wrong allocating memory!");
return 0;
}
else if (findMemoryResult == 2)
{
Refresh_LogError("Out of memory!");
return 0;
}
}
SDL_LockMutex(texture->allocation->memoryLock);
vulkanResult = renderer->vkBindImageMemory(
renderer->logicalDevice,
texture->image,
texture->allocation->memory,
texture->offset
);
SDL_UnlockMutex(texture->allocation->memoryLock);
if (vulkanResult != VK_SUCCESS)
{
LogVulkanResultAsError("vkBindImageMemory", vulkanResult);
Refresh_LogError("Failed to bind texture memory!");
return 0;
}
imageViewCreateInfo.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
imageViewCreateInfo.pNext = NULL;
imageViewCreateInfo.flags = 0;
imageViewCreateInfo.image = texture->image;
imageViewCreateInfo.format = format;
imageViewCreateInfo.components = swizzle;
imageViewCreateInfo.subresourceRange.aspectMask = aspectMask;
imageViewCreateInfo.subresourceRange.baseMipLevel = 0;
imageViewCreateInfo.subresourceRange.levelCount = levelCount;
imageViewCreateInfo.subresourceRange.baseArrayLayer = 0;
imageViewCreateInfo.subresourceRange.layerCount = layerCount;
if (isCube)
{
imageViewCreateInfo.viewType = VK_IMAGE_VIEW_TYPE_CUBE;
}
else if (is3D)
{
imageViewCreateInfo.viewType = VK_IMAGE_VIEW_TYPE_3D;
}
else
{
imageViewCreateInfo.viewType = VK_IMAGE_VIEW_TYPE_2D;
}
vulkanResult = renderer->vkCreateImageView(
renderer->logicalDevice,
&imageViewCreateInfo,
NULL,
&texture->view
);
if (vulkanResult != VK_SUCCESS)
{
LogVulkanResultAsError("vkCreateImageView", vulkanResult);
Refresh_LogError("Failed to create texture image view");
return 0;
}
texture->dimensions.width = width;
texture->dimensions.height = height;
texture->depth = depth;
texture->format = format;
texture->levelCount = levelCount;
texture->layerCount = layerCount;
texture->sampleCount = sampleCount;
texture->resourceAccessType = RESOURCE_ACCESS_NONE;
texture->usageFlags = imageUsageFlags;
texture->aspectFlags = aspectMask;
texture->msaaTex = NULL;
SDL_AtomicSet(&texture->referenceCount, 0);
return texture;
}
static VulkanRenderTarget* VULKAN_INTERNAL_CreateRenderTarget(
VulkanRenderer *renderer,
VulkanTexture *texture,
uint32_t depth,
uint32_t layer,
uint32_t level
) {
VkResult vulkanResult;
VulkanRenderTarget *renderTarget = (VulkanRenderTarget*) SDL_malloc(sizeof(VulkanRenderTarget));
VkImageViewCreateInfo imageViewCreateInfo;
VkComponentMapping swizzle = IDENTITY_SWIZZLE;
VkImageAspectFlags aspectFlags = 0;
if (IsDepthFormat(texture->format))
{
aspectFlags |= VK_IMAGE_ASPECT_DEPTH_BIT;
if (IsStencilFormat(texture->format))
{
aspectFlags |= VK_IMAGE_ASPECT_STENCIL_BIT;
}
}
else
{
aspectFlags |= VK_IMAGE_ASPECT_COLOR_BIT;
}
/* create framebuffer compatible views for RenderTarget */
imageViewCreateInfo.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
imageViewCreateInfo.pNext = NULL;
imageViewCreateInfo.flags = 0;
imageViewCreateInfo.image = texture->image;
imageViewCreateInfo.format = texture->format;
imageViewCreateInfo.components = swizzle;
imageViewCreateInfo.subresourceRange.aspectMask = aspectFlags;
imageViewCreateInfo.subresourceRange.baseMipLevel = level;
imageViewCreateInfo.subresourceRange.levelCount = 1;
imageViewCreateInfo.subresourceRange.baseArrayLayer = 0;
if (texture->is3D)
{
imageViewCreateInfo.subresourceRange.baseArrayLayer = depth;
}
else if (texture->isCube)
{
imageViewCreateInfo.subresourceRange.baseArrayLayer = layer;
}
imageViewCreateInfo.subresourceRange.layerCount = 1;
imageViewCreateInfo.viewType = VK_IMAGE_VIEW_TYPE_2D;
vulkanResult = renderer->vkCreateImageView(
renderer->logicalDevice,
&imageViewCreateInfo,
NULL,
&renderTarget->view
);
if (vulkanResult != VK_SUCCESS)
{
LogVulkanResultAsError(
"vkCreateImageView",
vulkanResult
);
Refresh_LogError("Failed to create color attachment image view");
return NULL;
}
return renderTarget;
}
static VulkanRenderTarget* VULKAN_INTERNAL_FetchRenderTarget(
VulkanRenderer *renderer,
VulkanTexture *texture,
uint32_t depth,
uint32_t layer,
uint32_t level
) {
RenderTargetHash hash;
VulkanRenderTarget *renderTarget;
hash.texture = texture;
hash.depth = depth;
hash.layer = layer;
hash.level = level;
SDL_LockMutex(renderer->renderTargetFetchLock);
renderTarget = RenderTargetHash_Fetch(
&renderer->renderTargetHashArray,
&hash
);
if (renderTarget == NULL)
{
renderTarget = VULKAN_INTERNAL_CreateRenderTarget(
renderer,
texture,
depth,
layer,
level
);
RenderTargetHash_Insert(
&renderer->renderTargetHashArray,
hash,
renderTarget
);
}
SDL_UnlockMutex(renderer->renderTargetFetchLock);
return renderTarget;
}
static VkRenderPass VULKAN_INTERNAL_CreateRenderPass(
VulkanRenderer *renderer,
VulkanCommandBuffer *commandBuffer,
Refresh_ColorAttachmentInfo *colorAttachmentInfos,
uint32_t colorAttachmentCount,
Refresh_DepthStencilAttachmentInfo *depthStencilAttachmentInfo
) {
VkResult vulkanResult;
VkAttachmentDescription attachmentDescriptions[2 * MAX_COLOR_TARGET_BINDINGS + 1];
VkAttachmentReference colorAttachmentReferences[MAX_COLOR_TARGET_BINDINGS];
VkAttachmentReference resolveReferences[MAX_COLOR_TARGET_BINDINGS + 1];
VkAttachmentReference depthStencilAttachmentReference;
VkRenderPassCreateInfo renderPassCreateInfo;
2022-02-25 21:42:11 +00:00
VkSubpassDescription subpass;
VkRenderPass renderPass;
uint32_t i;
2022-02-25 21:42:11 +00:00
uint32_t attachmentDescriptionCount = 0;
uint32_t colorAttachmentReferenceCount = 0;
uint32_t resolveReferenceCount = 0;
VulkanTexture *texture;
VulkanTexture *msaaTexture = NULL;
2022-02-25 21:42:11 +00:00
for (i = 0; i < colorAttachmentCount; i += 1)
{
texture = (VulkanTexture*) colorAttachmentInfos[i].texture;
if (texture->msaaTex != NULL)
{
msaaTexture = texture->msaaTex;
Miscellaneous API changes + more MSAA fixes (#26) **Breaking API Changes** * Removed `REFRESH_SAMPLECOUNT_16/32/64`, since hardware support for these sample counts is generally poor (and completely non-existent with MoltenVK and certain consoles). * Removed unused `sampleCount` parameter from `Refresh_TextureCreateInfo`. * Removed `sampleCount` parameter from `Refresh_ColorAttachmentDescription`. The existence of this parameter meant you had to sync up three different sample count values (render pass, pipeline, and color attachment description) whenever you wanted to use multisampling. However, Vulkan requires that all color attachments in a given pipeline _must_ match the pipeline's sample count anyway, so we can assume all color attachments will use the pipeline's sample count. * Removed the `renderArea` parameter from `Refresh_BeginRenderPass()` since it didn't serve much practical purpose and slightly complicated things on the MoonWorks managed side. **Behavior Changes** * When creating a render pass or graphics pipeline, the requested multisample count will be converted into a sample count that's actually supported by the GPU. For example, if you request 8x MSAA on a device that only supports up to 4x MSAA, it will silently fall back to 4x MSAA. * All color attachments are now forced to have an internal store op of `STORE`, even if `REFRESH_STORE_OP_DONTCARE` is specified. The one exception is internal multisample textures -- if `DONTCARE` is used, those textures will be discarded to save on bandwidth. (Their resolve textures will still be stored.) * The RenderPass hashing logic was updated so that it would still work correctly with the removal of `Refresh_ColorAttachmentDescription.sampleCount`. **Bug Fixes** * Fixed bugs where multisampling logic wasn't kicking in for certain sample counts due to incorrect enum comparisons. Co-authored-by: Caleb Cornett <caleb.cornett@outlook.com> Reviewed-on: https://gitea.moonside.games/MoonsideGames/Refresh/pulls/26 Co-authored-by: TheSpydog <thespydog@noreply.example.org> Co-committed-by: TheSpydog <thespydog@noreply.example.org>
2022-11-08 19:09:21 +00:00
/* Transition the multisample attachment */
VULKAN_INTERNAL_ImageMemoryBarrier(
renderer,
commandBuffer->commandBuffer,
RESOURCE_ACCESS_COLOR_ATTACHMENT_WRITE,
VK_IMAGE_ASPECT_COLOR_BIT,
0,
msaaTexture->layerCount,
0,
msaaTexture->levelCount,
0,
msaaTexture->image,
&msaaTexture->resourceAccessType
);
2022-02-25 21:42:11 +00:00
/* Resolve attachment and multisample attachment */
attachmentDescriptions[attachmentDescriptionCount].flags = 0;
attachmentDescriptions[attachmentDescriptionCount].format = texture->format;
2022-02-25 21:42:11 +00:00
attachmentDescriptions[attachmentDescriptionCount].samples =
VK_SAMPLE_COUNT_1_BIT;
attachmentDescriptions[attachmentDescriptionCount].loadOp = RefreshToVK_LoadOp[
colorAttachmentInfos[i].loadOp
];
Miscellaneous API changes + more MSAA fixes (#26) **Breaking API Changes** * Removed `REFRESH_SAMPLECOUNT_16/32/64`, since hardware support for these sample counts is generally poor (and completely non-existent with MoltenVK and certain consoles). * Removed unused `sampleCount` parameter from `Refresh_TextureCreateInfo`. * Removed `sampleCount` parameter from `Refresh_ColorAttachmentDescription`. The existence of this parameter meant you had to sync up three different sample count values (render pass, pipeline, and color attachment description) whenever you wanted to use multisampling. However, Vulkan requires that all color attachments in a given pipeline _must_ match the pipeline's sample count anyway, so we can assume all color attachments will use the pipeline's sample count. * Removed the `renderArea` parameter from `Refresh_BeginRenderPass()` since it didn't serve much practical purpose and slightly complicated things on the MoonWorks managed side. **Behavior Changes** * When creating a render pass or graphics pipeline, the requested multisample count will be converted into a sample count that's actually supported by the GPU. For example, if you request 8x MSAA on a device that only supports up to 4x MSAA, it will silently fall back to 4x MSAA. * All color attachments are now forced to have an internal store op of `STORE`, even if `REFRESH_STORE_OP_DONTCARE` is specified. The one exception is internal multisample textures -- if `DONTCARE` is used, those textures will be discarded to save on bandwidth. (Their resolve textures will still be stored.) * The RenderPass hashing logic was updated so that it would still work correctly with the removal of `Refresh_ColorAttachmentDescription.sampleCount`. **Bug Fixes** * Fixed bugs where multisampling logic wasn't kicking in for certain sample counts due to incorrect enum comparisons. Co-authored-by: Caleb Cornett <caleb.cornett@outlook.com> Reviewed-on: https://gitea.moonside.games/MoonsideGames/Refresh/pulls/26 Co-authored-by: TheSpydog <thespydog@noreply.example.org> Co-committed-by: TheSpydog <thespydog@noreply.example.org>
2022-11-08 19:09:21 +00:00
attachmentDescriptions[attachmentDescriptionCount].storeOp =
VK_ATTACHMENT_STORE_OP_STORE; /* Always store the resolve texture */
2022-02-25 21:42:11 +00:00
attachmentDescriptions[attachmentDescriptionCount].stencilLoadOp =
VK_ATTACHMENT_LOAD_OP_DONT_CARE;
attachmentDescriptions[attachmentDescriptionCount].stencilStoreOp =
VK_ATTACHMENT_STORE_OP_DONT_CARE;
attachmentDescriptions[attachmentDescriptionCount].initialLayout =
VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
attachmentDescriptions[attachmentDescriptionCount].finalLayout =
VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
resolveReferences[resolveReferenceCount].attachment =
attachmentDescriptionCount;
resolveReferences[resolveReferenceCount].layout =
VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
attachmentDescriptionCount += 1;
resolveReferenceCount += 1;
attachmentDescriptions[attachmentDescriptionCount].flags = 0;
attachmentDescriptions[attachmentDescriptionCount].format = msaaTexture->format;
attachmentDescriptions[attachmentDescriptionCount].samples = RefreshToVK_SampleCount[
msaaTexture->sampleCount
];
2022-02-25 21:42:11 +00:00
attachmentDescriptions[attachmentDescriptionCount].loadOp = RefreshToVK_LoadOp[
colorAttachmentInfos[i].loadOp
];
attachmentDescriptions[attachmentDescriptionCount].storeOp = RefreshToVK_StoreOp[
colorAttachmentInfos[i].storeOp
];
attachmentDescriptions[attachmentDescriptionCount].stencilLoadOp =
VK_ATTACHMENT_LOAD_OP_DONT_CARE;
attachmentDescriptions[attachmentDescriptionCount].stencilStoreOp =
VK_ATTACHMENT_STORE_OP_DONT_CARE;
attachmentDescriptions[attachmentDescriptionCount].initialLayout =
VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
attachmentDescriptions[attachmentDescriptionCount].finalLayout =
VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
colorAttachmentReferences[colorAttachmentReferenceCount].attachment =
attachmentDescriptionCount;
colorAttachmentReferences[colorAttachmentReferenceCount].layout =
VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
attachmentDescriptionCount += 1;
colorAttachmentReferenceCount += 1;
}
else
{
attachmentDescriptions[attachmentDescriptionCount].flags = 0;
attachmentDescriptions[attachmentDescriptionCount].format = texture->format;
2022-02-25 21:42:11 +00:00
attachmentDescriptions[attachmentDescriptionCount].samples =
VK_SAMPLE_COUNT_1_BIT;
attachmentDescriptions[attachmentDescriptionCount].loadOp = RefreshToVK_LoadOp[
colorAttachmentInfos[i].loadOp
];
Miscellaneous API changes + more MSAA fixes (#26) **Breaking API Changes** * Removed `REFRESH_SAMPLECOUNT_16/32/64`, since hardware support for these sample counts is generally poor (and completely non-existent with MoltenVK and certain consoles). * Removed unused `sampleCount` parameter from `Refresh_TextureCreateInfo`. * Removed `sampleCount` parameter from `Refresh_ColorAttachmentDescription`. The existence of this parameter meant you had to sync up three different sample count values (render pass, pipeline, and color attachment description) whenever you wanted to use multisampling. However, Vulkan requires that all color attachments in a given pipeline _must_ match the pipeline's sample count anyway, so we can assume all color attachments will use the pipeline's sample count. * Removed the `renderArea` parameter from `Refresh_BeginRenderPass()` since it didn't serve much practical purpose and slightly complicated things on the MoonWorks managed side. **Behavior Changes** * When creating a render pass or graphics pipeline, the requested multisample count will be converted into a sample count that's actually supported by the GPU. For example, if you request 8x MSAA on a device that only supports up to 4x MSAA, it will silently fall back to 4x MSAA. * All color attachments are now forced to have an internal store op of `STORE`, even if `REFRESH_STORE_OP_DONTCARE` is specified. The one exception is internal multisample textures -- if `DONTCARE` is used, those textures will be discarded to save on bandwidth. (Their resolve textures will still be stored.) * The RenderPass hashing logic was updated so that it would still work correctly with the removal of `Refresh_ColorAttachmentDescription.sampleCount`. **Bug Fixes** * Fixed bugs where multisampling logic wasn't kicking in for certain sample counts due to incorrect enum comparisons. Co-authored-by: Caleb Cornett <caleb.cornett@outlook.com> Reviewed-on: https://gitea.moonside.games/MoonsideGames/Refresh/pulls/26 Co-authored-by: TheSpydog <thespydog@noreply.example.org> Co-committed-by: TheSpydog <thespydog@noreply.example.org>
2022-11-08 19:09:21 +00:00
attachmentDescriptions[attachmentDescriptionCount].storeOp =
VK_ATTACHMENT_STORE_OP_STORE; /* Always store non-MSAA textures */
2022-02-25 21:42:11 +00:00
attachmentDescriptions[attachmentDescriptionCount].stencilLoadOp =
VK_ATTACHMENT_LOAD_OP_DONT_CARE;
attachmentDescriptions[attachmentDescriptionCount].stencilStoreOp =
VK_ATTACHMENT_STORE_OP_DONT_CARE;
attachmentDescriptions[attachmentDescriptionCount].initialLayout =
VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
attachmentDescriptions[attachmentDescriptionCount].finalLayout =
VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
colorAttachmentReferences[colorAttachmentReferenceCount].attachment = attachmentDescriptionCount;
colorAttachmentReferences[colorAttachmentReferenceCount].layout =
VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
attachmentDescriptionCount += 1;
colorAttachmentReferenceCount += 1;
}
}
subpass.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
subpass.flags = 0;
subpass.inputAttachmentCount = 0;
subpass.pInputAttachments = NULL;
subpass.colorAttachmentCount = colorAttachmentCount;
subpass.pColorAttachments = colorAttachmentReferences;
subpass.preserveAttachmentCount = 0;
subpass.pPreserveAttachments = NULL;
if (depthStencilAttachmentInfo == NULL)
{
subpass.pDepthStencilAttachment = NULL;
}
else
{
texture = (VulkanTexture*) depthStencilAttachmentInfo->texture;
2022-02-25 21:42:11 +00:00
attachmentDescriptions[attachmentDescriptionCount].flags = 0;
attachmentDescriptions[attachmentDescriptionCount].format = texture->format;
attachmentDescriptions[attachmentDescriptionCount].samples = RefreshToVK_SampleCount[
texture->sampleCount
];
2022-02-25 21:42:11 +00:00
attachmentDescriptions[attachmentDescriptionCount].loadOp = RefreshToVK_LoadOp[
depthStencilAttachmentInfo->loadOp
];
attachmentDescriptions[attachmentDescriptionCount].storeOp = RefreshToVK_StoreOp[
depthStencilAttachmentInfo->storeOp
];
attachmentDescriptions[attachmentDescriptionCount].stencilLoadOp = RefreshToVK_LoadOp[
depthStencilAttachmentInfo->stencilLoadOp
];
attachmentDescriptions[attachmentDescriptionCount].stencilStoreOp = RefreshToVK_StoreOp[
depthStencilAttachmentInfo->stencilStoreOp
];
attachmentDescriptions[attachmentDescriptionCount].initialLayout =
VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
attachmentDescriptions[attachmentDescriptionCount].finalLayout =
VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
depthStencilAttachmentReference.attachment =
attachmentDescriptionCount;
depthStencilAttachmentReference.layout =
VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
subpass.pDepthStencilAttachment =
&depthStencilAttachmentReference;
attachmentDescriptionCount += 1;
}
2020-12-27 23:34:15 +00:00
if (msaaTexture != NULL)
2020-12-27 23:20:59 +00:00
{
subpass.pResolveAttachments = resolveReferences;
2020-12-27 23:20:59 +00:00
}
else
{
subpass.pResolveAttachments = NULL;
2020-12-27 23:20:59 +00:00
}
2022-02-25 21:42:11 +00:00
renderPassCreateInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
renderPassCreateInfo.pNext = NULL;
renderPassCreateInfo.flags = 0;
renderPassCreateInfo.pAttachments = attachmentDescriptions;
renderPassCreateInfo.attachmentCount = attachmentDescriptionCount;
renderPassCreateInfo.subpassCount = 1;
renderPassCreateInfo.pSubpasses = &subpass;
renderPassCreateInfo.dependencyCount = 0;
renderPassCreateInfo.pDependencies = NULL;
2022-02-25 21:42:11 +00:00
vulkanResult = renderer->vkCreateRenderPass(
renderer->logicalDevice,
&renderPassCreateInfo,
NULL,
&renderPass
);
if (vulkanResult != VK_SUCCESS)
2020-12-27 23:20:59 +00:00
{
renderPass = VK_NULL_HANDLE;
LogVulkanResultAsError("vkCreateRenderPass", vulkanResult);
}
2022-02-25 21:42:11 +00:00
return renderPass;
}
static VkRenderPass VULKAN_INTERNAL_CreateTransientRenderPass(
VulkanRenderer *renderer,
Miscellaneous API changes + more MSAA fixes (#26) **Breaking API Changes** * Removed `REFRESH_SAMPLECOUNT_16/32/64`, since hardware support for these sample counts is generally poor (and completely non-existent with MoltenVK and certain consoles). * Removed unused `sampleCount` parameter from `Refresh_TextureCreateInfo`. * Removed `sampleCount` parameter from `Refresh_ColorAttachmentDescription`. The existence of this parameter meant you had to sync up three different sample count values (render pass, pipeline, and color attachment description) whenever you wanted to use multisampling. However, Vulkan requires that all color attachments in a given pipeline _must_ match the pipeline's sample count anyway, so we can assume all color attachments will use the pipeline's sample count. * Removed the `renderArea` parameter from `Refresh_BeginRenderPass()` since it didn't serve much practical purpose and slightly complicated things on the MoonWorks managed side. **Behavior Changes** * When creating a render pass or graphics pipeline, the requested multisample count will be converted into a sample count that's actually supported by the GPU. For example, if you request 8x MSAA on a device that only supports up to 4x MSAA, it will silently fall back to 4x MSAA. * All color attachments are now forced to have an internal store op of `STORE`, even if `REFRESH_STORE_OP_DONTCARE` is specified. The one exception is internal multisample textures -- if `DONTCARE` is used, those textures will be discarded to save on bandwidth. (Their resolve textures will still be stored.) * The RenderPass hashing logic was updated so that it would still work correctly with the removal of `Refresh_ColorAttachmentDescription.sampleCount`. **Bug Fixes** * Fixed bugs where multisampling logic wasn't kicking in for certain sample counts due to incorrect enum comparisons. Co-authored-by: Caleb Cornett <caleb.cornett@outlook.com> Reviewed-on: https://gitea.moonside.games/MoonsideGames/Refresh/pulls/26 Co-authored-by: TheSpydog <thespydog@noreply.example.org> Co-committed-by: TheSpydog <thespydog@noreply.example.org>
2022-11-08 19:09:21 +00:00
Refresh_GraphicsPipelineAttachmentInfo attachmentInfo,
Refresh_SampleCount sampleCount
) {
VkAttachmentDescription attachmentDescriptions[2 * MAX_COLOR_TARGET_BINDINGS + 1];
VkAttachmentReference colorAttachmentReferences[MAX_COLOR_TARGET_BINDINGS];
VkAttachmentReference resolveReferences[MAX_COLOR_TARGET_BINDINGS + 1];
VkAttachmentReference depthStencilAttachmentReference;
Refresh_ColorAttachmentDescription attachmentDescription;
2022-02-25 21:42:11 +00:00
VkSubpassDescription subpass;
VkRenderPassCreateInfo renderPassCreateInfo;
2022-02-25 21:42:11 +00:00
VkRenderPass renderPass;
VkResult result;
uint32_t multisampling = 0;
2022-02-25 21:42:11 +00:00
uint32_t attachmentDescriptionCount = 0;
uint32_t colorAttachmentReferenceCount = 0;
uint32_t resolveReferenceCount = 0;
uint32_t i;
for (i = 0; i < attachmentInfo.colorAttachmentCount; i += 1)
{
attachmentDescription = attachmentInfo.colorAttachmentDescriptions[i];
Miscellaneous API changes + more MSAA fixes (#26) **Breaking API Changes** * Removed `REFRESH_SAMPLECOUNT_16/32/64`, since hardware support for these sample counts is generally poor (and completely non-existent with MoltenVK and certain consoles). * Removed unused `sampleCount` parameter from `Refresh_TextureCreateInfo`. * Removed `sampleCount` parameter from `Refresh_ColorAttachmentDescription`. The existence of this parameter meant you had to sync up three different sample count values (render pass, pipeline, and color attachment description) whenever you wanted to use multisampling. However, Vulkan requires that all color attachments in a given pipeline _must_ match the pipeline's sample count anyway, so we can assume all color attachments will use the pipeline's sample count. * Removed the `renderArea` parameter from `Refresh_BeginRenderPass()` since it didn't serve much practical purpose and slightly complicated things on the MoonWorks managed side. **Behavior Changes** * When creating a render pass or graphics pipeline, the requested multisample count will be converted into a sample count that's actually supported by the GPU. For example, if you request 8x MSAA on a device that only supports up to 4x MSAA, it will silently fall back to 4x MSAA. * All color attachments are now forced to have an internal store op of `STORE`, even if `REFRESH_STORE_OP_DONTCARE` is specified. The one exception is internal multisample textures -- if `DONTCARE` is used, those textures will be discarded to save on bandwidth. (Their resolve textures will still be stored.) * The RenderPass hashing logic was updated so that it would still work correctly with the removal of `Refresh_ColorAttachmentDescription.sampleCount`. **Bug Fixes** * Fixed bugs where multisampling logic wasn't kicking in for certain sample counts due to incorrect enum comparisons. Co-authored-by: Caleb Cornett <caleb.cornett@outlook.com> Reviewed-on: https://gitea.moonside.games/MoonsideGames/Refresh/pulls/26 Co-authored-by: TheSpydog <thespydog@noreply.example.org> Co-committed-by: TheSpydog <thespydog@noreply.example.org>
2022-11-08 19:09:21 +00:00
if (sampleCount > REFRESH_SAMPLECOUNT_1)
{
multisampling = 1;
/* Resolve attachment and multisample attachment */
attachmentDescriptions[attachmentDescriptionCount].flags = 0;
attachmentDescriptions[attachmentDescriptionCount].format = RefreshToVK_SurfaceFormat[
attachmentDescription.format
];
attachmentDescriptions[attachmentDescriptionCount].samples = VK_SAMPLE_COUNT_1_BIT;
attachmentDescriptions[attachmentDescriptionCount].loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
attachmentDescriptions[attachmentDescriptionCount].storeOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
attachmentDescriptions[attachmentDescriptionCount].stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
attachmentDescriptions[attachmentDescriptionCount].stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
attachmentDescriptions[attachmentDescriptionCount].initialLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
attachmentDescriptions[attachmentDescriptionCount].finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
resolveReferences[resolveReferenceCount].attachment = attachmentDescriptionCount;
resolveReferences[resolveReferenceCount].layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
attachmentDescriptionCount += 1;
resolveReferenceCount += 1;
attachmentDescriptions[attachmentDescriptionCount].flags = 0;
attachmentDescriptions[attachmentDescriptionCount].format = RefreshToVK_SurfaceFormat[
attachmentDescription.format
];
attachmentDescriptions[attachmentDescriptionCount].samples = RefreshToVK_SampleCount[
Miscellaneous API changes + more MSAA fixes (#26) **Breaking API Changes** * Removed `REFRESH_SAMPLECOUNT_16/32/64`, since hardware support for these sample counts is generally poor (and completely non-existent with MoltenVK and certain consoles). * Removed unused `sampleCount` parameter from `Refresh_TextureCreateInfo`. * Removed `sampleCount` parameter from `Refresh_ColorAttachmentDescription`. The existence of this parameter meant you had to sync up three different sample count values (render pass, pipeline, and color attachment description) whenever you wanted to use multisampling. However, Vulkan requires that all color attachments in a given pipeline _must_ match the pipeline's sample count anyway, so we can assume all color attachments will use the pipeline's sample count. * Removed the `renderArea` parameter from `Refresh_BeginRenderPass()` since it didn't serve much practical purpose and slightly complicated things on the MoonWorks managed side. **Behavior Changes** * When creating a render pass or graphics pipeline, the requested multisample count will be converted into a sample count that's actually supported by the GPU. For example, if you request 8x MSAA on a device that only supports up to 4x MSAA, it will silently fall back to 4x MSAA. * All color attachments are now forced to have an internal store op of `STORE`, even if `REFRESH_STORE_OP_DONTCARE` is specified. The one exception is internal multisample textures -- if `DONTCARE` is used, those textures will be discarded to save on bandwidth. (Their resolve textures will still be stored.) * The RenderPass hashing logic was updated so that it would still work correctly with the removal of `Refresh_ColorAttachmentDescription.sampleCount`. **Bug Fixes** * Fixed bugs where multisampling logic wasn't kicking in for certain sample counts due to incorrect enum comparisons. Co-authored-by: Caleb Cornett <caleb.cornett@outlook.com> Reviewed-on: https://gitea.moonside.games/MoonsideGames/Refresh/pulls/26 Co-authored-by: TheSpydog <thespydog@noreply.example.org> Co-committed-by: TheSpydog <thespydog@noreply.example.org>
2022-11-08 19:09:21 +00:00
sampleCount
];
attachmentDescriptions[attachmentDescriptionCount].loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
attachmentDescriptions[attachmentDescriptionCount].storeOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
attachmentDescriptions[attachmentDescriptionCount].stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
attachmentDescriptions[attachmentDescriptionCount].stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
attachmentDescriptions[attachmentDescriptionCount].initialLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
attachmentDescriptions[attachmentDescriptionCount].finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
colorAttachmentReferences[colorAttachmentReferenceCount].attachment =
2022-02-25 21:42:11 +00:00
attachmentDescriptionCount;
colorAttachmentReferences[colorAttachmentReferenceCount].layout =
VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
2022-02-25 21:42:11 +00:00
attachmentDescriptionCount += 1;
colorAttachmentReferenceCount += 1;
}
else
2022-02-25 21:42:11 +00:00
{
attachmentDescriptions[attachmentDescriptionCount].flags = 0;
attachmentDescriptions[attachmentDescriptionCount].format = RefreshToVK_SurfaceFormat[
attachmentDescription.format
];
2022-02-25 21:42:11 +00:00
attachmentDescriptions[attachmentDescriptionCount].samples =
VK_SAMPLE_COUNT_1_BIT;
2022-02-25 21:42:11 +00:00
attachmentDescriptions[attachmentDescriptionCount].loadOp =
VK_ATTACHMENT_LOAD_OP_DONT_CARE;
2022-02-25 21:42:11 +00:00
attachmentDescriptions[attachmentDescriptionCount].storeOp =
VK_ATTACHMENT_STORE_OP_DONT_CARE;
2022-02-25 21:42:11 +00:00
attachmentDescriptions[attachmentDescriptionCount].stencilLoadOp =
VK_ATTACHMENT_LOAD_OP_DONT_CARE;
2022-02-25 21:42:11 +00:00
attachmentDescriptions[attachmentDescriptionCount].stencilStoreOp =
VK_ATTACHMENT_STORE_OP_DONT_CARE;
2022-02-25 21:42:11 +00:00
attachmentDescriptions[attachmentDescriptionCount].initialLayout =
VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
2022-02-25 21:42:11 +00:00
attachmentDescriptions[attachmentDescriptionCount].finalLayout =
VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
2022-02-25 21:42:11 +00:00
colorAttachmentReferences[colorAttachmentReferenceCount].attachment = attachmentDescriptionCount;
colorAttachmentReferences[colorAttachmentReferenceCount].layout =
VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
2022-02-25 21:42:11 +00:00
attachmentDescriptionCount += 1;
colorAttachmentReferenceCount += 1;
}
}
subpass.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
subpass.flags = 0;
subpass.inputAttachmentCount = 0;
subpass.pInputAttachments = NULL;
subpass.colorAttachmentCount = attachmentInfo.colorAttachmentCount;
subpass.pColorAttachments = colorAttachmentReferences;
subpass.preserveAttachmentCount = 0;
subpass.pPreserveAttachments = NULL;
if (attachmentInfo.hasDepthStencilAttachment)
{
attachmentDescriptions[attachmentDescriptionCount].flags = 0;
2022-06-17 07:41:27 +00:00
attachmentDescriptions[attachmentDescriptionCount].format = RefreshToVK_DepthFormat(
renderer,
attachmentInfo.depthStencilFormat
2022-06-17 07:41:27 +00:00
);
attachmentDescriptions[attachmentDescriptionCount].samples = RefreshToVK_SampleCount[
sampleCount
];
2022-02-25 21:42:11 +00:00
attachmentDescriptions[attachmentDescriptionCount].loadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
attachmentDescriptions[attachmentDescriptionCount].storeOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
attachmentDescriptions[attachmentDescriptionCount].stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
attachmentDescriptions[attachmentDescriptionCount].stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
attachmentDescriptions[attachmentDescriptionCount].initialLayout =
VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
attachmentDescriptions[attachmentDescriptionCount].finalLayout =
VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
depthStencilAttachmentReference.attachment =
attachmentDescriptionCount;
depthStencilAttachmentReference.layout =
VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
subpass.pDepthStencilAttachment =
&depthStencilAttachmentReference;
attachmentDescriptionCount += 1;
}
else
{
subpass.pDepthStencilAttachment = NULL;
}
if (multisampling)
{
subpass.pResolveAttachments = resolveReferences;
}
else
{
subpass.pResolveAttachments = NULL;
}
2022-02-25 21:42:11 +00:00
renderPassCreateInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
renderPassCreateInfo.pNext = NULL;
renderPassCreateInfo.flags = 0;
renderPassCreateInfo.pAttachments = attachmentDescriptions;
renderPassCreateInfo.attachmentCount = attachmentDescriptionCount;
renderPassCreateInfo.subpassCount = 1;
renderPassCreateInfo.pSubpasses = &subpass;
renderPassCreateInfo.dependencyCount = 0;
renderPassCreateInfo.pDependencies = NULL;
result = renderer->vkCreateRenderPass(
renderer->logicalDevice,
&renderPassCreateInfo,
NULL,
&renderPass
);
if (result != VK_SUCCESS)
{
renderPass = VK_NULL_HANDLE;
LogVulkanResultAsError("vkCreateRenderPass", result);
}
return renderPass;
}
2021-01-05 23:00:51 +00:00
static Refresh_GraphicsPipeline* VULKAN_CreateGraphicsPipeline(
Refresh_Renderer *driverData,
Refresh_GraphicsPipelineCreateInfo *pipelineCreateInfo
2020-12-17 01:23:49 +00:00
) {
2020-12-17 19:40:49 +00:00
VkResult vulkanResult;
uint32_t i;
Miscellaneous API changes + more MSAA fixes (#26) **Breaking API Changes** * Removed `REFRESH_SAMPLECOUNT_16/32/64`, since hardware support for these sample counts is generally poor (and completely non-existent with MoltenVK and certain consoles). * Removed unused `sampleCount` parameter from `Refresh_TextureCreateInfo`. * Removed `sampleCount` parameter from `Refresh_ColorAttachmentDescription`. The existence of this parameter meant you had to sync up three different sample count values (render pass, pipeline, and color attachment description) whenever you wanted to use multisampling. However, Vulkan requires that all color attachments in a given pipeline _must_ match the pipeline's sample count anyway, so we can assume all color attachments will use the pipeline's sample count. * Removed the `renderArea` parameter from `Refresh_BeginRenderPass()` since it didn't serve much practical purpose and slightly complicated things on the MoonWorks managed side. **Behavior Changes** * When creating a render pass or graphics pipeline, the requested multisample count will be converted into a sample count that's actually supported by the GPU. For example, if you request 8x MSAA on a device that only supports up to 4x MSAA, it will silently fall back to 4x MSAA. * All color attachments are now forced to have an internal store op of `STORE`, even if `REFRESH_STORE_OP_DONTCARE` is specified. The one exception is internal multisample textures -- if `DONTCARE` is used, those textures will be discarded to save on bandwidth. (Their resolve textures will still be stored.) * The RenderPass hashing logic was updated so that it would still work correctly with the removal of `Refresh_ColorAttachmentDescription.sampleCount`. **Bug Fixes** * Fixed bugs where multisampling logic wasn't kicking in for certain sample counts due to incorrect enum comparisons. Co-authored-by: Caleb Cornett <caleb.cornett@outlook.com> Reviewed-on: https://gitea.moonside.games/MoonsideGames/Refresh/pulls/26 Co-authored-by: TheSpydog <thespydog@noreply.example.org> Co-committed-by: TheSpydog <thespydog@noreply.example.org>
2022-11-08 19:09:21 +00:00
Refresh_SampleCount actualSampleCount;
2020-12-17 19:40:49 +00:00
2020-12-19 04:08:07 +00:00
VulkanGraphicsPipeline *graphicsPipeline = (VulkanGraphicsPipeline*) SDL_malloc(sizeof(VulkanGraphicsPipeline));
2020-12-17 19:40:49 +00:00
VkGraphicsPipelineCreateInfo vkPipelineCreateInfo;
VkPipelineShaderStageCreateInfo shaderStageCreateInfos[2];
VkPipelineVertexInputStateCreateInfo vertexInputStateCreateInfo;
VkVertexInputBindingDescription *vertexInputBindingDescriptions = SDL_stack_alloc(VkVertexInputBindingDescription, pipelineCreateInfo->vertexInputState.vertexBindingCount);
VkVertexInputAttributeDescription *vertexInputAttributeDescriptions = SDL_stack_alloc(VkVertexInputAttributeDescription, pipelineCreateInfo->vertexInputState.vertexAttributeCount);
2020-12-17 19:40:49 +00:00
VkPipelineInputAssemblyStateCreateInfo inputAssemblyStateCreateInfo;
VkPipelineViewportStateCreateInfo viewportStateCreateInfo;
VkPipelineRasterizationStateCreateInfo rasterizationStateCreateInfo;
VkPipelineMultisampleStateCreateInfo multisampleStateCreateInfo;
VkPipelineDepthStencilStateCreateInfo depthStencilStateCreateInfo;
VkStencilOpState frontStencilState;
VkStencilOpState backStencilState;
VkPipelineColorBlendStateCreateInfo colorBlendStateCreateInfo;
VkPipelineColorBlendAttachmentState *colorBlendAttachmentStates = SDL_stack_alloc(
VkPipelineColorBlendAttachmentState,
pipelineCreateInfo->attachmentInfo.colorAttachmentCount
);
2020-12-17 19:40:49 +00:00
2022-03-04 20:30:33 +00:00
static const VkDynamicState dynamicStates[] =
{
VK_DYNAMIC_STATE_VIEWPORT,
VK_DYNAMIC_STATE_SCISSOR
};
VkPipelineDynamicStateCreateInfo dynamicStateCreateInfo;
2020-12-17 19:40:49 +00:00
VulkanRenderer *renderer = (VulkanRenderer*) driverData;
Miscellaneous API changes + more MSAA fixes (#26) **Breaking API Changes** * Removed `REFRESH_SAMPLECOUNT_16/32/64`, since hardware support for these sample counts is generally poor (and completely non-existent with MoltenVK and certain consoles). * Removed unused `sampleCount` parameter from `Refresh_TextureCreateInfo`. * Removed `sampleCount` parameter from `Refresh_ColorAttachmentDescription`. The existence of this parameter meant you had to sync up three different sample count values (render pass, pipeline, and color attachment description) whenever you wanted to use multisampling. However, Vulkan requires that all color attachments in a given pipeline _must_ match the pipeline's sample count anyway, so we can assume all color attachments will use the pipeline's sample count. * Removed the `renderArea` parameter from `Refresh_BeginRenderPass()` since it didn't serve much practical purpose and slightly complicated things on the MoonWorks managed side. **Behavior Changes** * When creating a render pass or graphics pipeline, the requested multisample count will be converted into a sample count that's actually supported by the GPU. For example, if you request 8x MSAA on a device that only supports up to 4x MSAA, it will silently fall back to 4x MSAA. * All color attachments are now forced to have an internal store op of `STORE`, even if `REFRESH_STORE_OP_DONTCARE` is specified. The one exception is internal multisample textures -- if `DONTCARE` is used, those textures will be discarded to save on bandwidth. (Their resolve textures will still be stored.) * The RenderPass hashing logic was updated so that it would still work correctly with the removal of `Refresh_ColorAttachmentDescription.sampleCount`. **Bug Fixes** * Fixed bugs where multisampling logic wasn't kicking in for certain sample counts due to incorrect enum comparisons. Co-authored-by: Caleb Cornett <caleb.cornett@outlook.com> Reviewed-on: https://gitea.moonside.games/MoonsideGames/Refresh/pulls/26 Co-authored-by: TheSpydog <thespydog@noreply.example.org> Co-committed-by: TheSpydog <thespydog@noreply.example.org>
2022-11-08 19:09:21 +00:00
/* Find a compatible sample count to use */
actualSampleCount = VULKAN_INTERNAL_GetMaxMultiSampleCount(
renderer,
pipelineCreateInfo->multisampleState.multisampleCount
);
/* Create a "compatible" render pass */
VkRenderPass transientRenderPass = VULKAN_INTERNAL_CreateTransientRenderPass(
renderer,
Miscellaneous API changes + more MSAA fixes (#26) **Breaking API Changes** * Removed `REFRESH_SAMPLECOUNT_16/32/64`, since hardware support for these sample counts is generally poor (and completely non-existent with MoltenVK and certain consoles). * Removed unused `sampleCount` parameter from `Refresh_TextureCreateInfo`. * Removed `sampleCount` parameter from `Refresh_ColorAttachmentDescription`. The existence of this parameter meant you had to sync up three different sample count values (render pass, pipeline, and color attachment description) whenever you wanted to use multisampling. However, Vulkan requires that all color attachments in a given pipeline _must_ match the pipeline's sample count anyway, so we can assume all color attachments will use the pipeline's sample count. * Removed the `renderArea` parameter from `Refresh_BeginRenderPass()` since it didn't serve much practical purpose and slightly complicated things on the MoonWorks managed side. **Behavior Changes** * When creating a render pass or graphics pipeline, the requested multisample count will be converted into a sample count that's actually supported by the GPU. For example, if you request 8x MSAA on a device that only supports up to 4x MSAA, it will silently fall back to 4x MSAA. * All color attachments are now forced to have an internal store op of `STORE`, even if `REFRESH_STORE_OP_DONTCARE` is specified. The one exception is internal multisample textures -- if `DONTCARE` is used, those textures will be discarded to save on bandwidth. (Their resolve textures will still be stored.) * The RenderPass hashing logic was updated so that it would still work correctly with the removal of `Refresh_ColorAttachmentDescription.sampleCount`. **Bug Fixes** * Fixed bugs where multisampling logic wasn't kicking in for certain sample counts due to incorrect enum comparisons. Co-authored-by: Caleb Cornett <caleb.cornett@outlook.com> Reviewed-on: https://gitea.moonside.games/MoonsideGames/Refresh/pulls/26 Co-authored-by: TheSpydog <thespydog@noreply.example.org> Co-committed-by: TheSpydog <thespydog@noreply.example.org>
2022-11-08 19:09:21 +00:00
pipelineCreateInfo->attachmentInfo,
actualSampleCount
);
2022-03-04 20:30:33 +00:00
/* Dynamic state */
dynamicStateCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO;
dynamicStateCreateInfo.pNext = NULL;
dynamicStateCreateInfo.flags = 0;
dynamicStateCreateInfo.dynamicStateCount = SDL_arraysize(dynamicStates);
dynamicStateCreateInfo.pDynamicStates = dynamicStates;
2020-12-17 19:40:49 +00:00
/* Shader stages */
graphicsPipeline->vertexShaderModule = (VulkanShaderModule*) pipelineCreateInfo->vertexShaderInfo.shaderModule;
SDL_AtomicIncRef(&graphicsPipeline->vertexShaderModule->referenceCount);
2020-12-17 19:40:49 +00:00
shaderStageCreateInfos[0].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
shaderStageCreateInfos[0].pNext = NULL;
shaderStageCreateInfos[0].flags = 0;
shaderStageCreateInfos[0].stage = VK_SHADER_STAGE_VERTEX_BIT;
shaderStageCreateInfos[0].module = graphicsPipeline->vertexShaderModule->shaderModule;
2022-03-02 19:22:52 +00:00
shaderStageCreateInfos[0].pName = pipelineCreateInfo->vertexShaderInfo.entryPointName;
2020-12-17 19:40:49 +00:00
shaderStageCreateInfos[0].pSpecializationInfo = NULL;
graphicsPipeline->vertexUniformBlockSize =
2020-12-23 21:11:09 +00:00
VULKAN_INTERNAL_NextHighestAlignment(
2022-03-02 19:22:52 +00:00
pipelineCreateInfo->vertexShaderInfo.uniformBufferSize,
2020-12-23 21:11:09 +00:00
renderer->minUBOAlignment
);
graphicsPipeline->fragmentShaderModule = (VulkanShaderModule*) pipelineCreateInfo->fragmentShaderInfo.shaderModule;
SDL_AtomicIncRef(&graphicsPipeline->fragmentShaderModule->referenceCount);
2020-12-17 19:40:49 +00:00
shaderStageCreateInfos[1].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
shaderStageCreateInfos[1].pNext = NULL;
shaderStageCreateInfos[1].flags = 0;
shaderStageCreateInfos[1].stage = VK_SHADER_STAGE_FRAGMENT_BIT;
shaderStageCreateInfos[1].module = graphicsPipeline->fragmentShaderModule->shaderModule;
2022-03-02 19:22:52 +00:00
shaderStageCreateInfos[1].pName = pipelineCreateInfo->fragmentShaderInfo.entryPointName;
2020-12-17 19:40:49 +00:00
shaderStageCreateInfos[1].pSpecializationInfo = NULL;
graphicsPipeline->fragmentUniformBlockSize =
2020-12-23 21:11:09 +00:00
VULKAN_INTERNAL_NextHighestAlignment(
2022-03-02 19:22:52 +00:00
pipelineCreateInfo->fragmentShaderInfo.uniformBufferSize,
2020-12-23 21:11:09 +00:00
renderer->minUBOAlignment
);
2020-12-17 19:40:49 +00:00
/* Vertex input */
for (i = 0; i < pipelineCreateInfo->vertexInputState.vertexBindingCount; i += 1)
{
vertexInputBindingDescriptions[i].binding = pipelineCreateInfo->vertexInputState.vertexBindings[i].binding;
vertexInputBindingDescriptions[i].inputRate = RefreshToVK_VertexInputRate[
pipelineCreateInfo->vertexInputState.vertexBindings[i].inputRate
];
vertexInputBindingDescriptions[i].stride = pipelineCreateInfo->vertexInputState.vertexBindings[i].stride;
}
for (i = 0; i < pipelineCreateInfo->vertexInputState.vertexAttributeCount; i += 1)
{
vertexInputAttributeDescriptions[i].binding = pipelineCreateInfo->vertexInputState.vertexAttributes[i].binding;
vertexInputAttributeDescriptions[i].format = RefreshToVK_VertexFormat[
pipelineCreateInfo->vertexInputState.vertexAttributes[i].format
];
vertexInputAttributeDescriptions[i].location = pipelineCreateInfo->vertexInputState.vertexAttributes[i].location;
vertexInputAttributeDescriptions[i].offset = pipelineCreateInfo->vertexInputState.vertexAttributes[i].offset;
}
vertexInputStateCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO;
vertexInputStateCreateInfo.pNext = NULL;
vertexInputStateCreateInfo.flags = 0;
vertexInputStateCreateInfo.vertexBindingDescriptionCount = pipelineCreateInfo->vertexInputState.vertexBindingCount;
vertexInputStateCreateInfo.pVertexBindingDescriptions = vertexInputBindingDescriptions;
vertexInputStateCreateInfo.vertexAttributeDescriptionCount = pipelineCreateInfo->vertexInputState.vertexAttributeCount;
vertexInputStateCreateInfo.pVertexAttributeDescriptions = vertexInputAttributeDescriptions;
/* Topology */
inputAssemblyStateCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
inputAssemblyStateCreateInfo.pNext = NULL;
inputAssemblyStateCreateInfo.flags = 0;
inputAssemblyStateCreateInfo.primitiveRestartEnable = VK_FALSE;
inputAssemblyStateCreateInfo.topology = RefreshToVK_PrimitiveType[
pipelineCreateInfo->primitiveType
2020-12-17 19:40:49 +00:00
];
graphicsPipeline->primitiveType = pipelineCreateInfo->primitiveType;
2020-12-17 19:40:49 +00:00
/* Viewport */
2022-03-04 20:30:33 +00:00
/* NOTE: viewport and scissor are dynamic, and must be set using the command buffer */
2020-12-17 19:40:49 +00:00
viewportStateCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO;
viewportStateCreateInfo.pNext = NULL;
viewportStateCreateInfo.flags = 0;
2022-03-04 20:30:33 +00:00
viewportStateCreateInfo.viewportCount = 1;
viewportStateCreateInfo.pViewports = NULL;
viewportStateCreateInfo.scissorCount = 1;
viewportStateCreateInfo.pScissors = NULL;
2020-12-17 19:40:49 +00:00
/* Rasterization */
rasterizationStateCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO;
rasterizationStateCreateInfo.pNext = NULL;
rasterizationStateCreateInfo.flags = 0;
rasterizationStateCreateInfo.depthClampEnable = VK_FALSE;
2020-12-17 19:40:49 +00:00
rasterizationStateCreateInfo.rasterizerDiscardEnable = VK_FALSE;
rasterizationStateCreateInfo.polygonMode = RefreshToVK_PolygonMode[
pipelineCreateInfo->rasterizerState.fillMode
];
rasterizationStateCreateInfo.cullMode = RefreshToVK_CullMode[
pipelineCreateInfo->rasterizerState.cullMode
];
rasterizationStateCreateInfo.frontFace = RefreshToVK_FrontFace[
pipelineCreateInfo->rasterizerState.frontFace
];
rasterizationStateCreateInfo.depthBiasEnable =
pipelineCreateInfo->rasterizerState.depthBiasEnable;
rasterizationStateCreateInfo.depthBiasConstantFactor =
pipelineCreateInfo->rasterizerState.depthBiasConstantFactor;
rasterizationStateCreateInfo.depthBiasClamp =
pipelineCreateInfo->rasterizerState.depthBiasClamp;
rasterizationStateCreateInfo.depthBiasSlopeFactor =
pipelineCreateInfo->rasterizerState.depthBiasSlopeFactor;
2022-03-14 17:43:01 +00:00
rasterizationStateCreateInfo.lineWidth = 1.0f;
2020-12-17 19:40:49 +00:00
/* Multisample */
multisampleStateCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO;
multisampleStateCreateInfo.pNext = NULL;
multisampleStateCreateInfo.flags = 0;
Miscellaneous API changes + more MSAA fixes (#26) **Breaking API Changes** * Removed `REFRESH_SAMPLECOUNT_16/32/64`, since hardware support for these sample counts is generally poor (and completely non-existent with MoltenVK and certain consoles). * Removed unused `sampleCount` parameter from `Refresh_TextureCreateInfo`. * Removed `sampleCount` parameter from `Refresh_ColorAttachmentDescription`. The existence of this parameter meant you had to sync up three different sample count values (render pass, pipeline, and color attachment description) whenever you wanted to use multisampling. However, Vulkan requires that all color attachments in a given pipeline _must_ match the pipeline's sample count anyway, so we can assume all color attachments will use the pipeline's sample count. * Removed the `renderArea` parameter from `Refresh_BeginRenderPass()` since it didn't serve much practical purpose and slightly complicated things on the MoonWorks managed side. **Behavior Changes** * When creating a render pass or graphics pipeline, the requested multisample count will be converted into a sample count that's actually supported by the GPU. For example, if you request 8x MSAA on a device that only supports up to 4x MSAA, it will silently fall back to 4x MSAA. * All color attachments are now forced to have an internal store op of `STORE`, even if `REFRESH_STORE_OP_DONTCARE` is specified. The one exception is internal multisample textures -- if `DONTCARE` is used, those textures will be discarded to save on bandwidth. (Their resolve textures will still be stored.) * The RenderPass hashing logic was updated so that it would still work correctly with the removal of `Refresh_ColorAttachmentDescription.sampleCount`. **Bug Fixes** * Fixed bugs where multisampling logic wasn't kicking in for certain sample counts due to incorrect enum comparisons. Co-authored-by: Caleb Cornett <caleb.cornett@outlook.com> Reviewed-on: https://gitea.moonside.games/MoonsideGames/Refresh/pulls/26 Co-authored-by: TheSpydog <thespydog@noreply.example.org> Co-committed-by: TheSpydog <thespydog@noreply.example.org>
2022-11-08 19:09:21 +00:00
multisampleStateCreateInfo.rasterizationSamples = RefreshToVK_SampleCount[actualSampleCount];
2020-12-17 19:40:49 +00:00
multisampleStateCreateInfo.sampleShadingEnable = VK_FALSE;
multisampleStateCreateInfo.minSampleShading = 1.0f;
multisampleStateCreateInfo.pSampleMask =
2021-01-05 23:53:16 +00:00
&pipelineCreateInfo->multisampleState.sampleMask;
2020-12-17 19:40:49 +00:00
multisampleStateCreateInfo.alphaToCoverageEnable = VK_FALSE;
multisampleStateCreateInfo.alphaToOneEnable = VK_FALSE;
/* Depth Stencil State */
frontStencilState.failOp = RefreshToVK_StencilOp[
pipelineCreateInfo->depthStencilState.frontStencilState.failOp
];
frontStencilState.passOp = RefreshToVK_StencilOp[
pipelineCreateInfo->depthStencilState.frontStencilState.passOp
];
frontStencilState.depthFailOp = RefreshToVK_StencilOp[
pipelineCreateInfo->depthStencilState.frontStencilState.depthFailOp
];
frontStencilState.compareOp = RefreshToVK_CompareOp[
pipelineCreateInfo->depthStencilState.frontStencilState.compareOp
];
frontStencilState.compareMask =
pipelineCreateInfo->depthStencilState.frontStencilState.compareMask;
frontStencilState.writeMask =
pipelineCreateInfo->depthStencilState.frontStencilState.writeMask;
frontStencilState.reference =
pipelineCreateInfo->depthStencilState.frontStencilState.reference;
backStencilState.failOp = RefreshToVK_StencilOp[
pipelineCreateInfo->depthStencilState.backStencilState.failOp
];
backStencilState.passOp = RefreshToVK_StencilOp[
pipelineCreateInfo->depthStencilState.backStencilState.passOp
];
backStencilState.depthFailOp = RefreshToVK_StencilOp[
pipelineCreateInfo->depthStencilState.backStencilState.depthFailOp
];
backStencilState.compareOp = RefreshToVK_CompareOp[
pipelineCreateInfo->depthStencilState.backStencilState.compareOp
];
backStencilState.compareMask =
pipelineCreateInfo->depthStencilState.backStencilState.compareMask;
backStencilState.writeMask =
pipelineCreateInfo->depthStencilState.backStencilState.writeMask;
backStencilState.reference =
pipelineCreateInfo->depthStencilState.backStencilState.reference;
depthStencilStateCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO;
depthStencilStateCreateInfo.pNext = NULL;
depthStencilStateCreateInfo.flags = 0;
depthStencilStateCreateInfo.depthTestEnable =
pipelineCreateInfo->depthStencilState.depthTestEnable;
depthStencilStateCreateInfo.depthWriteEnable =
pipelineCreateInfo->depthStencilState.depthWriteEnable;
depthStencilStateCreateInfo.depthCompareOp = RefreshToVK_CompareOp[
pipelineCreateInfo->depthStencilState.compareOp
];
depthStencilStateCreateInfo.depthBoundsTestEnable =
pipelineCreateInfo->depthStencilState.depthBoundsTestEnable;
depthStencilStateCreateInfo.stencilTestEnable =
pipelineCreateInfo->depthStencilState.stencilTestEnable;
depthStencilStateCreateInfo.front = frontStencilState;
depthStencilStateCreateInfo.back = backStencilState;
depthStencilStateCreateInfo.minDepthBounds =
pipelineCreateInfo->depthStencilState.minDepthBounds;
depthStencilStateCreateInfo.maxDepthBounds =
pipelineCreateInfo->depthStencilState.maxDepthBounds;
/* Color Blend */
for (i = 0; i < pipelineCreateInfo->attachmentInfo.colorAttachmentCount; i += 1)
2020-12-17 19:40:49 +00:00
{
Refresh_ColorAttachmentBlendState blendState = pipelineCreateInfo->attachmentInfo.colorAttachmentDescriptions[i].blendState;
2020-12-17 19:40:49 +00:00
colorBlendAttachmentStates[i].blendEnable =
blendState.blendEnable;
2020-12-17 19:40:49 +00:00
colorBlendAttachmentStates[i].srcColorBlendFactor = RefreshToVK_BlendFactor[
blendState.srcColorBlendFactor
2020-12-17 19:40:49 +00:00
];
colorBlendAttachmentStates[i].dstColorBlendFactor = RefreshToVK_BlendFactor[
blendState.dstColorBlendFactor
2020-12-17 19:40:49 +00:00
];
colorBlendAttachmentStates[i].colorBlendOp = RefreshToVK_BlendOp[
blendState.colorBlendOp
2020-12-17 19:40:49 +00:00
];
colorBlendAttachmentStates[i].srcAlphaBlendFactor = RefreshToVK_BlendFactor[
blendState.srcAlphaBlendFactor
2020-12-17 19:40:49 +00:00
];
colorBlendAttachmentStates[i].dstAlphaBlendFactor = RefreshToVK_BlendFactor[
blendState.dstAlphaBlendFactor
2020-12-17 19:40:49 +00:00
];
colorBlendAttachmentStates[i].alphaBlendOp = RefreshToVK_BlendOp[
blendState.alphaBlendOp
2020-12-17 19:40:49 +00:00
];
colorBlendAttachmentStates[i].colorWriteMask =
blendState.colorWriteMask;
2020-12-17 19:40:49 +00:00
}
colorBlendStateCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO;
colorBlendStateCreateInfo.pNext = NULL;
colorBlendStateCreateInfo.flags = 0;
colorBlendStateCreateInfo.attachmentCount =
pipelineCreateInfo->attachmentInfo.colorAttachmentCount;
2020-12-17 19:40:49 +00:00
colorBlendStateCreateInfo.pAttachments =
colorBlendAttachmentStates;
colorBlendStateCreateInfo.blendConstants[0] =
2022-03-02 19:10:28 +00:00
pipelineCreateInfo->blendConstants[0];
2020-12-17 19:40:49 +00:00
colorBlendStateCreateInfo.blendConstants[1] =
2022-03-02 19:10:28 +00:00
pipelineCreateInfo->blendConstants[1];
2020-12-17 19:40:49 +00:00
colorBlendStateCreateInfo.blendConstants[2] =
2022-03-02 19:10:28 +00:00
pipelineCreateInfo->blendConstants[2];
2020-12-17 19:40:49 +00:00
colorBlendStateCreateInfo.blendConstants[3] =
2022-03-02 19:10:28 +00:00
pipelineCreateInfo->blendConstants[3];
/* We don't support LogicOp, so this is easy. */
colorBlendStateCreateInfo.logicOpEnable = VK_FALSE;
colorBlendStateCreateInfo.logicOp = 0;
2020-12-17 19:40:49 +00:00
/* Pipeline Layout */
2020-12-23 21:11:09 +00:00
graphicsPipeline->pipelineLayout = VULKAN_INTERNAL_FetchGraphicsPipelineLayout(
renderer,
2022-03-02 19:22:52 +00:00
pipelineCreateInfo->vertexShaderInfo.samplerBindingCount,
pipelineCreateInfo->fragmentShaderInfo.samplerBindingCount
2020-12-17 19:40:49 +00:00
);
/* Pipeline */
vkPipelineCreateInfo.sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO;
2020-12-23 04:22:17 +00:00
vkPipelineCreateInfo.pNext = NULL;
2020-12-17 19:40:49 +00:00
vkPipelineCreateInfo.flags = 0;
vkPipelineCreateInfo.stageCount = 2;
vkPipelineCreateInfo.pStages = shaderStageCreateInfos;
vkPipelineCreateInfo.pVertexInputState = &vertexInputStateCreateInfo;
vkPipelineCreateInfo.pInputAssemblyState = &inputAssemblyStateCreateInfo;
vkPipelineCreateInfo.pTessellationState = VK_NULL_HANDLE;
vkPipelineCreateInfo.pViewportState = &viewportStateCreateInfo;
vkPipelineCreateInfo.pRasterizationState = &rasterizationStateCreateInfo;
vkPipelineCreateInfo.pMultisampleState = &multisampleStateCreateInfo;
vkPipelineCreateInfo.pDepthStencilState = &depthStencilStateCreateInfo;
vkPipelineCreateInfo.pColorBlendState = &colorBlendStateCreateInfo;
2022-03-04 20:30:33 +00:00
vkPipelineCreateInfo.pDynamicState = &dynamicStateCreateInfo;
vkPipelineCreateInfo.layout = graphicsPipeline->pipelineLayout->pipelineLayout;
vkPipelineCreateInfo.renderPass = transientRenderPass;
2020-12-17 19:40:49 +00:00
vkPipelineCreateInfo.subpass = 0;
vkPipelineCreateInfo.basePipelineHandle = VK_NULL_HANDLE;
vkPipelineCreateInfo.basePipelineIndex = 0;
/* TODO: enable pipeline caching */
vulkanResult = renderer->vkCreateGraphicsPipelines(
renderer->logicalDevice,
VK_NULL_HANDLE,
1,
&vkPipelineCreateInfo,
NULL,
2020-12-19 04:08:07 +00:00
&graphicsPipeline->pipeline
2020-12-17 19:40:49 +00:00
);
SDL_stack_free(vertexInputBindingDescriptions);
SDL_stack_free(vertexInputAttributeDescriptions);
SDL_stack_free(colorBlendAttachmentStates);
2020-12-19 04:08:07 +00:00
renderer->vkDestroyRenderPass(
renderer->logicalDevice,
transientRenderPass,
NULL
);
if (vulkanResult != VK_SUCCESS)
2020-12-28 22:07:13 +00:00
{
SDL_free(graphicsPipeline);
LogVulkanResultAsError("vkCreateGraphicsPipelines", vulkanResult);
Refresh_LogError("Failed to create graphics pipeline!");
return NULL;
2020-12-28 22:07:13 +00:00
}
2020-12-23 21:11:09 +00:00
SDL_AtomicSet(&graphicsPipeline->referenceCount, 0);
2021-01-05 23:00:51 +00:00
return (Refresh_GraphicsPipeline*) graphicsPipeline;
2020-12-17 01:23:49 +00:00
}
2020-12-29 22:52:24 +00:00
static VulkanComputePipelineLayout* VULKAN_INTERNAL_FetchComputePipelineLayout(
VulkanRenderer *renderer,
uint32_t bufferBindingCount,
uint32_t imageBindingCount
) {
VkResult vulkanResult;
2020-12-31 04:39:47 +00:00
VkDescriptorSetLayout setLayouts[3];
2020-12-29 22:52:24 +00:00
VkPipelineLayoutCreateInfo pipelineLayoutCreateInfo;
ComputePipelineLayoutHash pipelineLayoutHash;
VulkanComputePipelineLayout *vulkanComputePipelineLayout;
2020-12-29 23:05:26 +00:00
pipelineLayoutHash.bufferLayout = VULKAN_INTERNAL_FetchDescriptorSetLayout(
2020-12-29 22:52:24 +00:00
renderer,
2020-12-29 23:05:26 +00:00
VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,
bufferBindingCount,
VK_SHADER_STAGE_COMPUTE_BIT
2020-12-29 22:52:24 +00:00
);
2020-12-29 23:05:26 +00:00
pipelineLayoutHash.imageLayout = VULKAN_INTERNAL_FetchDescriptorSetLayout(
2020-12-29 22:52:24 +00:00
renderer,
2020-12-30 00:19:19 +00:00
VK_DESCRIPTOR_TYPE_STORAGE_IMAGE,
2020-12-29 23:05:26 +00:00
imageBindingCount,
VK_SHADER_STAGE_COMPUTE_BIT
2020-12-29 22:52:24 +00:00
);
pipelineLayoutHash.uniformLayout = renderer->computeUniformDescriptorSetLayout;
2020-12-31 04:39:47 +00:00
2020-12-29 22:52:24 +00:00
vulkanComputePipelineLayout = ComputePipelineLayoutHashArray_Fetch(
&renderer->computePipelineLayoutHashTable,
pipelineLayoutHash
);
if (vulkanComputePipelineLayout != NULL)
{
return vulkanComputePipelineLayout;
}
vulkanComputePipelineLayout = SDL_malloc(sizeof(VulkanComputePipelineLayout));
setLayouts[0] = pipelineLayoutHash.bufferLayout;
setLayouts[1] = pipelineLayoutHash.imageLayout;
2020-12-31 04:39:47 +00:00
setLayouts[2] = pipelineLayoutHash.uniformLayout;
2020-12-29 22:52:24 +00:00
pipelineLayoutCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
pipelineLayoutCreateInfo.pNext = NULL;
pipelineLayoutCreateInfo.flags = 0;
2020-12-31 04:39:47 +00:00
pipelineLayoutCreateInfo.setLayoutCount = 3;
2020-12-29 22:52:24 +00:00
pipelineLayoutCreateInfo.pSetLayouts = setLayouts;
pipelineLayoutCreateInfo.pushConstantRangeCount = 0;
pipelineLayoutCreateInfo.pPushConstantRanges = NULL;
vulkanResult = renderer->vkCreatePipelineLayout(
renderer->logicalDevice,
&pipelineLayoutCreateInfo,
NULL,
&vulkanComputePipelineLayout->pipelineLayout
);
if (vulkanResult != VK_SUCCESS)
{
2021-01-27 20:51:36 +00:00
LogVulkanResultAsError("vkCreatePipelineLayout", vulkanResult);
2020-12-29 22:52:24 +00:00
return NULL;
}
ComputePipelineLayoutHashArray_Insert(
&renderer->computePipelineLayoutHashTable,
pipelineLayoutHash,
vulkanComputePipelineLayout
);
/* If the binding count is 0
* we can just bind the same descriptor set
* so no cache is needed
*/
if (bufferBindingCount == 0)
{
vulkanComputePipelineLayout->bufferDescriptorSetCache = NULL;
}
else
{
vulkanComputePipelineLayout->bufferDescriptorSetCache =
VULKAN_INTERNAL_CreateDescriptorSetCache(
2020-12-29 22:52:24 +00:00
renderer,
2020-12-31 04:39:47 +00:00
VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,
2020-12-29 22:52:24 +00:00
pipelineLayoutHash.bufferLayout,
bufferBindingCount
);
}
if (imageBindingCount == 0)
{
vulkanComputePipelineLayout->imageDescriptorSetCache = NULL;
}
else
{
vulkanComputePipelineLayout->imageDescriptorSetCache =
VULKAN_INTERNAL_CreateDescriptorSetCache(
2020-12-29 22:52:24 +00:00
renderer,
2020-12-30 00:53:10 +00:00
VK_DESCRIPTOR_TYPE_STORAGE_IMAGE,
2020-12-29 22:52:24 +00:00
pipelineLayoutHash.imageLayout,
imageBindingCount
);
}
return vulkanComputePipelineLayout;
}
2021-01-05 23:00:51 +00:00
static Refresh_ComputePipeline* VULKAN_CreateComputePipeline(
Refresh_Renderer *driverData,
2022-03-02 19:22:52 +00:00
Refresh_ComputeShaderInfo *computeShaderInfo
2020-12-29 22:52:24 +00:00
) {
VkComputePipelineCreateInfo computePipelineCreateInfo;
VkPipelineShaderStageCreateInfo pipelineShaderStageCreateInfo;
2020-12-31 04:39:47 +00:00
2020-12-29 22:52:24 +00:00
VulkanRenderer *renderer = (VulkanRenderer*) driverData;
VulkanComputePipeline *vulkanComputePipeline = SDL_malloc(sizeof(VulkanComputePipeline));
vulkanComputePipeline->computeShaderModule = (VulkanShaderModule*) computeShaderInfo->shaderModule;
SDL_AtomicIncRef(&vulkanComputePipeline->computeShaderModule->referenceCount);
2020-12-29 22:52:24 +00:00
pipelineShaderStageCreateInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
pipelineShaderStageCreateInfo.pNext = NULL;
pipelineShaderStageCreateInfo.flags = 0;
pipelineShaderStageCreateInfo.stage = VK_SHADER_STAGE_COMPUTE_BIT;
2022-03-04 21:37:45 +00:00
pipelineShaderStageCreateInfo.module = vulkanComputePipeline->computeShaderModule->shaderModule;
2022-03-02 19:22:52 +00:00
pipelineShaderStageCreateInfo.pName = computeShaderInfo->entryPointName;
2020-12-29 22:52:24 +00:00
pipelineShaderStageCreateInfo.pSpecializationInfo = NULL;
2020-12-31 04:39:47 +00:00
vulkanComputePipeline->pipelineLayout = VULKAN_INTERNAL_FetchComputePipelineLayout(
2020-12-29 22:52:24 +00:00
renderer,
2022-03-02 19:22:52 +00:00
computeShaderInfo->bufferBindingCount,
computeShaderInfo->imageBindingCount
2020-12-29 22:52:24 +00:00
);
computePipelineCreateInfo.sType = VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO;
computePipelineCreateInfo.pNext = NULL;
computePipelineCreateInfo.flags = 0;
computePipelineCreateInfo.stage = pipelineShaderStageCreateInfo;
2020-12-31 04:39:47 +00:00
computePipelineCreateInfo.layout =
vulkanComputePipeline->pipelineLayout->pipelineLayout;
2020-12-29 22:52:24 +00:00
computePipelineCreateInfo.basePipelineHandle = NULL;
computePipelineCreateInfo.basePipelineIndex = 0;
renderer->vkCreateComputePipelines(
renderer->logicalDevice,
NULL,
1,
&computePipelineCreateInfo,
NULL,
&vulkanComputePipeline->pipeline
);
vulkanComputePipeline->uniformBlockSize =
2020-12-31 04:39:47 +00:00
VULKAN_INTERNAL_NextHighestAlignment(
2022-03-02 19:22:52 +00:00
computeShaderInfo->uniformBufferSize,
2020-12-31 04:39:47 +00:00
renderer->minUBOAlignment
);
SDL_AtomicSet(&vulkanComputePipeline->referenceCount, 0);
2021-01-05 23:00:51 +00:00
return (Refresh_ComputePipeline*) vulkanComputePipeline;
2020-12-29 22:52:24 +00:00
}
2021-01-05 23:00:51 +00:00
static Refresh_Sampler* VULKAN_CreateSampler(
Refresh_Renderer *driverData,
Refresh_SamplerStateCreateInfo *samplerStateCreateInfo
2020-12-17 01:23:49 +00:00
) {
2020-12-18 01:48:26 +00:00
VulkanRenderer* renderer = (VulkanRenderer*)driverData;
VulkanSampler *vulkanSampler = SDL_malloc(sizeof(VulkanSampler));
VkResult vulkanResult;
2020-12-18 01:48:26 +00:00
VkSamplerCreateInfo vkSamplerCreateInfo;
vkSamplerCreateInfo.sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO;
vkSamplerCreateInfo.pNext = NULL;
vkSamplerCreateInfo.flags = 0;
2021-01-03 21:01:29 +00:00
vkSamplerCreateInfo.magFilter = RefreshToVK_Filter[
2020-12-18 01:48:26 +00:00
samplerStateCreateInfo->magFilter
];
2021-01-03 21:01:29 +00:00
vkSamplerCreateInfo.minFilter = RefreshToVK_Filter[
2020-12-18 01:48:26 +00:00
samplerStateCreateInfo->minFilter
];
vkSamplerCreateInfo.mipmapMode = RefreshToVK_SamplerMipmapMode[
samplerStateCreateInfo->mipmapMode
];
vkSamplerCreateInfo.addressModeU = RefreshToVK_SamplerAddressMode[
samplerStateCreateInfo->addressModeU
];
vkSamplerCreateInfo.addressModeV = RefreshToVK_SamplerAddressMode[
samplerStateCreateInfo->addressModeV
];
vkSamplerCreateInfo.addressModeW = RefreshToVK_SamplerAddressMode[
samplerStateCreateInfo->addressModeW
];
vkSamplerCreateInfo.mipLodBias = samplerStateCreateInfo->mipLodBias;
vkSamplerCreateInfo.anisotropyEnable = samplerStateCreateInfo->anisotropyEnable;
vkSamplerCreateInfo.maxAnisotropy = samplerStateCreateInfo->maxAnisotropy;
vkSamplerCreateInfo.compareEnable = samplerStateCreateInfo->compareEnable;
vkSamplerCreateInfo.compareOp = RefreshToVK_CompareOp[
samplerStateCreateInfo->compareOp
];
vkSamplerCreateInfo.minLod = samplerStateCreateInfo->minLod;
vkSamplerCreateInfo.maxLod = samplerStateCreateInfo->maxLod;
2020-12-18 20:58:03 +00:00
vkSamplerCreateInfo.borderColor = RefreshToVK_BorderColor[
samplerStateCreateInfo->borderColor
];
2020-12-18 01:48:26 +00:00
vkSamplerCreateInfo.unnormalizedCoordinates = VK_FALSE;
vulkanResult = renderer->vkCreateSampler(
renderer->logicalDevice,
&vkSamplerCreateInfo,
NULL,
&vulkanSampler->sampler
2020-12-18 01:48:26 +00:00
);
if (vulkanResult != VK_SUCCESS)
{
SDL_free(vulkanSampler);
2021-01-27 20:51:36 +00:00
LogVulkanResultAsError("vkCreateSampler", vulkanResult);
2020-12-18 01:48:26 +00:00
return NULL;
}
SDL_AtomicSet(&vulkanSampler->referenceCount, 0);
return (Refresh_Sampler*) vulkanSampler;
2020-12-17 01:23:49 +00:00
}
2021-01-05 23:00:51 +00:00
static Refresh_ShaderModule* VULKAN_CreateShaderModule(
Refresh_Renderer *driverData,
Refresh_ShaderModuleCreateInfo *shaderModuleCreateInfo
2020-12-17 01:23:49 +00:00
) {
VulkanShaderModule *vulkanShaderModule = SDL_malloc(sizeof(VulkanShaderModule));
2020-12-19 01:25:22 +00:00
VkResult vulkanResult;
VkShaderModuleCreateInfo vkShaderModuleCreateInfo;
VulkanRenderer *renderer = (VulkanRenderer*) driverData;
vkShaderModuleCreateInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
vkShaderModuleCreateInfo.pNext = NULL;
vkShaderModuleCreateInfo.flags = 0;
vkShaderModuleCreateInfo.codeSize = shaderModuleCreateInfo->codeSize;
vkShaderModuleCreateInfo.pCode = (uint32_t*) shaderModuleCreateInfo->byteCode;
vulkanResult = renderer->vkCreateShaderModule(
renderer->logicalDevice,
&vkShaderModuleCreateInfo,
2020-12-18 22:35:33 +00:00
NULL,
&vulkanShaderModule->shaderModule
2020-12-18 22:35:33 +00:00
);
if (vulkanResult != VK_SUCCESS)
{
SDL_free(vulkanShaderModule);
LogVulkanResultAsError("vkCreateShaderModule", vulkanResult);
Refresh_LogError("Failed to create shader module!");
return NULL;
2020-12-18 22:35:33 +00:00
}
SDL_AtomicSet(&vulkanShaderModule->referenceCount, 0);
return (Refresh_ShaderModule*) vulkanShaderModule;
2020-12-18 22:35:33 +00:00
}
static Refresh_Texture* VULKAN_CreateTexture(
2021-01-05 23:00:51 +00:00
Refresh_Renderer *driverData,
Refresh_TextureCreateInfo *textureCreateInfo
2020-12-17 01:23:49 +00:00
) {
2020-12-18 22:35:33 +00:00
VulkanRenderer *renderer = (VulkanRenderer*) driverData;
VkImageUsageFlags imageUsageFlags = (
2020-12-18 22:35:33 +00:00
VK_IMAGE_USAGE_TRANSFER_DST_BIT |
VK_IMAGE_USAGE_TRANSFER_SRC_BIT
);
2021-01-29 05:37:11 +00:00
VkImageAspectFlags imageAspectFlags;
uint8_t isDepthFormat = IsRefreshDepthFormat(textureCreateInfo->format);
2022-06-17 07:41:27 +00:00
VkFormat format;
VulkanTexture *result;
2022-06-17 07:41:27 +00:00
if (isDepthFormat)
2022-06-17 07:41:27 +00:00
{
format = RefreshToVK_DepthFormat(renderer, textureCreateInfo->format);
}
else
{
format = RefreshToVK_SurfaceFormat[textureCreateInfo->format];
}
2020-12-18 22:35:33 +00:00
if (textureCreateInfo->usageFlags & REFRESH_TEXTUREUSAGE_SAMPLER_BIT)
{
imageUsageFlags |= VK_IMAGE_USAGE_SAMPLED_BIT;
}
if (textureCreateInfo->usageFlags & REFRESH_TEXTUREUSAGE_COLOR_TARGET_BIT)
2020-12-18 22:35:33 +00:00
{
imageUsageFlags |= VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
2020-12-18 22:35:33 +00:00
}
if (textureCreateInfo->usageFlags & REFRESH_TEXTUREUSAGE_DEPTH_STENCIL_TARGET_BIT)
{
imageUsageFlags |= VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
}
if (textureCreateInfo->usageFlags & REFRESH_TEXTUREUSAGE_COMPUTE_BIT)
{
imageUsageFlags |= VK_IMAGE_USAGE_STORAGE_BIT;
}
if (isDepthFormat)
2021-01-29 05:37:11 +00:00
{
imageAspectFlags = VK_IMAGE_ASPECT_DEPTH_BIT;
if (IsStencilFormat(format))
{
imageAspectFlags |= VK_IMAGE_ASPECT_STENCIL_BIT;
}
}
else
{
imageAspectFlags = VK_IMAGE_ASPECT_COLOR_BIT;
}
result = VULKAN_INTERNAL_CreateTexture(
2020-12-18 22:35:33 +00:00
renderer,
textureCreateInfo->width,
textureCreateInfo->height,
textureCreateInfo->depth,
textureCreateInfo->isCube,
textureCreateInfo->levelCount,
isDepthFormat ?
textureCreateInfo->sampleCount : /* depth textures do not have a separate msaaTex */
REFRESH_SAMPLECOUNT_1,
2021-01-29 05:37:11 +00:00
format,
imageAspectFlags,
imageUsageFlags
2020-12-18 22:35:33 +00:00
);
/* create the MSAA texture for color attachments, if needed */
if ( result != NULL &&
!isDepthFormat &&
textureCreateInfo->sampleCount > REFRESH_SAMPLECOUNT_1 )
{
result->msaaTex = VULKAN_INTERNAL_CreateTexture(
renderer,
textureCreateInfo->width,
textureCreateInfo->height,
textureCreateInfo->depth,
textureCreateInfo->isCube,
textureCreateInfo->levelCount,
textureCreateInfo->sampleCount,
format,
imageAspectFlags,
imageUsageFlags
);
}
return (Refresh_Texture*) result;
2020-12-18 22:35:33 +00:00
}
2021-01-05 23:00:51 +00:00
static Refresh_Buffer* VULKAN_CreateBuffer(
Refresh_Renderer *driverData,
Refresh_BufferUsageFlags usageFlags,
2020-12-17 01:23:49 +00:00
uint32_t sizeInBytes
) {
VulkanBuffer* buffer;
VulkanResourceAccessType resourceAccessType;
2021-01-03 03:03:25 +00:00
VkBufferUsageFlags vulkanUsageFlags =
VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
2020-12-31 04:39:47 +00:00
if (usageFlags == 0)
{
resourceAccessType = RESOURCE_ACCESS_TRANSFER_READ_WRITE;
}
2020-12-31 04:39:47 +00:00
if (usageFlags & REFRESH_BUFFERUSAGE_VERTEX_BIT)
{
vulkanUsageFlags |= VK_BUFFER_USAGE_VERTEX_BUFFER_BIT;
resourceAccessType = RESOURCE_ACCESS_VERTEX_BUFFER;
2020-12-19 01:32:27 +00:00
}
2020-12-31 04:39:47 +00:00
if (usageFlags & REFRESH_BUFFERUSAGE_INDEX_BIT)
{
vulkanUsageFlags |= VK_BUFFER_USAGE_INDEX_BUFFER_BIT;
resourceAccessType = RESOURCE_ACCESS_INDEX_BUFFER;
2020-12-31 04:39:47 +00:00
}
2020-12-17 01:23:49 +00:00
2020-12-31 06:28:37 +00:00
if (usageFlags & REFRESH_BUFFERUSAGE_COMPUTE_BIT)
2020-12-31 04:39:47 +00:00
{
vulkanUsageFlags |= VK_BUFFER_USAGE_STORAGE_BUFFER_BIT;
resourceAccessType = RESOURCE_ACCESS_COMPUTE_SHADER_BUFFER_READ_WRITE;
}
if (usageFlags & REFRESH_BUFFERUSAGE_INDIRECT_BIT)
{
vulkanUsageFlags |= VK_BUFFER_USAGE_INDIRECT_BUFFER_BIT;
resourceAccessType = RESOURCE_ACCESS_INDIRECT_BUFFER;
2020-12-31 04:39:47 +00:00
}
2020-12-19 01:32:27 +00:00
2022-01-02 22:35:57 +00:00
buffer = VULKAN_INTERNAL_CreateBuffer(
(VulkanRenderer*)driverData,
2020-12-19 01:32:27 +00:00
sizeInBytes,
resourceAccessType,
vulkanUsageFlags
2022-01-02 22:35:57 +00:00
);
if (buffer == NULL)
{
Refresh_LogError("Failed to create buffer!");
2020-12-19 01:32:27 +00:00
return NULL;
}
return (Refresh_Buffer*) buffer;
2020-12-17 01:23:49 +00:00
}
2020-12-19 04:08:07 +00:00
/* Setters */
static VulkanTransferBuffer* VULKAN_INTERNAL_AcquireTransferBuffer(
2020-12-19 04:08:07 +00:00
VulkanRenderer *renderer,
2022-01-02 22:35:57 +00:00
VulkanCommandBuffer *commandBuffer,
VkDeviceSize requiredSize,
VkDeviceSize alignment
2020-12-19 04:08:07 +00:00
) {
VkDeviceSize size;
VkDeviceSize offset;
uint32_t i;
VulkanTransferBuffer *transferBuffer;
/* Search the command buffer's current transfer buffers */
for (i = 0; i < commandBuffer->transferBufferCount; i += 1)
2020-12-19 04:08:07 +00:00
{
transferBuffer = commandBuffer->transferBuffers[i];
offset = transferBuffer->offset + alignment - (transferBuffer->offset % alignment);
if (offset + requiredSize <= transferBuffer->buffer->size)
2022-01-02 22:35:57 +00:00
{
transferBuffer->offset = offset;
return transferBuffer;
2022-01-02 22:35:57 +00:00
}
}
2020-12-19 04:08:07 +00:00
/* Nothing fits, so let's get a transfer buffer from the pool */
2022-01-02 22:35:57 +00:00
SDL_LockMutex(renderer->transferBufferPool.lock);
for (i = 0; i < renderer->transferBufferPool.availableBufferCount; i += 1)
2022-01-02 22:35:57 +00:00
{
transferBuffer = renderer->transferBufferPool.availableBuffers[i];
offset = transferBuffer->offset + alignment - (transferBuffer->offset % alignment);
2020-12-19 04:08:07 +00:00
if (offset + requiredSize <= transferBuffer->buffer->size)
2022-01-02 22:35:57 +00:00
{
if (commandBuffer->transferBufferCount == commandBuffer->transferBufferCapacity)
{
commandBuffer->transferBufferCapacity *= 2;
commandBuffer->transferBuffers = SDL_realloc(
commandBuffer->transferBuffers,
commandBuffer->transferBufferCapacity * sizeof(VulkanTransferBuffer*)
);
}
commandBuffer->transferBuffers[commandBuffer->transferBufferCount] = transferBuffer;
commandBuffer->transferBufferCount += 1;
renderer->transferBufferPool.availableBuffers[i] = renderer->transferBufferPool.availableBuffers[renderer->transferBufferPool.availableBufferCount - 1];
renderer->transferBufferPool.availableBufferCount -= 1;
SDL_UnlockMutex(renderer->transferBufferPool.lock);
transferBuffer->offset = offset;
return transferBuffer;
2022-01-02 22:35:57 +00:00
}
}
SDL_UnlockMutex(renderer->transferBufferPool.lock);
/* Nothing fits still, so let's create a new transfer buffer */
2022-01-18 05:28:25 +00:00
size = TRANSFER_BUFFER_STARTING_SIZE;
2022-01-02 22:35:57 +00:00
while (size < requiredSize)
{
size *= 2;
}
2022-01-02 22:35:57 +00:00
transferBuffer = SDL_malloc(sizeof(VulkanTransferBuffer));
transferBuffer->offset = 0;
transferBuffer->buffer = VULKAN_INTERNAL_CreateBuffer(
renderer,
size,
RESOURCE_ACCESS_TRANSFER_READ_WRITE,
VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT
);
2022-01-02 22:35:57 +00:00
if (transferBuffer == NULL)
{
Refresh_LogError("Failed to allocate transfer buffer!");
return NULL;
2020-12-19 04:08:07 +00:00
}
if (commandBuffer->transferBufferCount == commandBuffer->transferBufferCapacity)
{
commandBuffer->transferBufferCapacity *= 2;
commandBuffer->transferBuffers = SDL_realloc(
commandBuffer->transferBuffers,
commandBuffer->transferBufferCapacity * sizeof(VulkanTransferBuffer*)
);
}
commandBuffer->transferBuffers[commandBuffer->transferBufferCount] = transferBuffer;
commandBuffer->transferBufferCount += 1;
return transferBuffer;
}
2021-01-03 22:37:02 +00:00
static void VULKAN_SetTextureData(
2021-01-05 23:00:51 +00:00
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer,
2021-01-05 23:00:51 +00:00
Refresh_TextureSlice *textureSlice,
2020-12-17 01:23:49 +00:00
void *data,
uint32_t dataLengthInBytes
) {
2020-12-19 01:03:26 +00:00
VulkanRenderer *renderer = (VulkanRenderer*) driverData;
2021-01-03 22:37:02 +00:00
VulkanTexture *vulkanTexture = (VulkanTexture*) textureSlice->texture;
VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
VulkanTransferBuffer *transferBuffer;
2020-12-19 01:03:26 +00:00
VkBufferImageCopy imageCopy;
2021-01-14 02:02:45 +00:00
uint8_t *stagingBufferPointer;
2022-05-12 04:16:24 +00:00
uint32_t blockSize = VULKAN_INTERNAL_GetTextureBlockSize(vulkanTexture->format);
uint32_t bufferRowLength;
uint32_t bufferImageHeight;
2020-12-19 01:03:26 +00:00
transferBuffer = VULKAN_INTERNAL_AcquireTransferBuffer(
renderer,
2022-01-02 22:35:57 +00:00
vulkanCommandBuffer,
VULKAN_INTERNAL_BytesPerImage(
textureSlice->rectangle.w,
textureSlice->rectangle.h,
vulkanTexture->format
),
VULKAN_INTERNAL_BytesPerPixel(vulkanTexture->format)
);
2020-12-19 04:08:07 +00:00
if (transferBuffer == NULL)
{
return;
}
2021-01-14 02:02:45 +00:00
stagingBufferPointer =
transferBuffer->buffer->allocation->mapPointer +
transferBuffer->buffer->offset +
transferBuffer->offset;
2020-12-19 01:03:26 +00:00
2021-01-14 02:02:45 +00:00
SDL_memcpy(
stagingBufferPointer,
2021-01-21 01:16:43 +00:00
data,
2021-01-14 02:02:45 +00:00
dataLengthInBytes
2020-12-19 01:03:26 +00:00
);
/* TODO: is it worth it to only transition the specific subresource? */
2020-12-19 01:03:26 +00:00
VULKAN_INTERNAL_ImageMemoryBarrier(
renderer,
vulkanCommandBuffer->commandBuffer,
2020-12-19 01:03:26 +00:00
RESOURCE_ACCESS_TRANSFER_WRITE,
VK_IMAGE_ASPECT_COLOR_BIT,
0,
vulkanTexture->layerCount,
0,
vulkanTexture->levelCount,
2020-12-19 01:03:26 +00:00
0,
vulkanTexture->image,
&vulkanTexture->resourceAccessType
);
2022-05-12 04:16:24 +00:00
bufferRowLength = SDL_max(blockSize, textureSlice->rectangle.w);
bufferImageHeight = SDL_max(blockSize, textureSlice->rectangle.h);
2021-01-03 22:37:02 +00:00
imageCopy.imageExtent.width = textureSlice->rectangle.w;
imageCopy.imageExtent.height = textureSlice->rectangle.h;
2020-12-19 01:03:26 +00:00
imageCopy.imageExtent.depth = 1;
2021-01-03 22:37:02 +00:00
imageCopy.imageOffset.x = textureSlice->rectangle.x;
imageCopy.imageOffset.y = textureSlice->rectangle.y;
imageCopy.imageOffset.z = textureSlice->depth;
2020-12-19 01:03:26 +00:00
imageCopy.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
2021-01-03 22:37:02 +00:00
imageCopy.imageSubresource.baseArrayLayer = textureSlice->layer;
2020-12-19 01:03:26 +00:00
imageCopy.imageSubresource.layerCount = 1;
2021-01-03 22:37:02 +00:00
imageCopy.imageSubresource.mipLevel = textureSlice->level;
imageCopy.bufferOffset = transferBuffer->offset;
2022-05-12 04:16:24 +00:00
imageCopy.bufferRowLength = bufferRowLength;
imageCopy.bufferImageHeight = bufferImageHeight;
2020-12-19 01:03:26 +00:00
2021-01-02 06:07:15 +00:00
renderer->vkCmdCopyBufferToImage(
vulkanCommandBuffer->commandBuffer,
transferBuffer->buffer->buffer,
2020-12-19 01:03:26 +00:00
vulkanTexture->image,
AccessMap[vulkanTexture->resourceAccessType].imageLayout,
1,
&imageCopy
2021-01-02 06:07:15 +00:00
);
transferBuffer->offset += dataLengthInBytes;
if (vulkanTexture->usageFlags & VK_IMAGE_USAGE_SAMPLED_BIT)
{
/* TODO: is it worth it to only transition the specific subresource? */
VULKAN_INTERNAL_ImageMemoryBarrier(
renderer,
vulkanCommandBuffer->commandBuffer,
RESOURCE_ACCESS_ANY_SHADER_READ_SAMPLED_IMAGE,
VK_IMAGE_ASPECT_COLOR_BIT,
0,
vulkanTexture->layerCount,
0,
vulkanTexture->levelCount,
0,
vulkanTexture->image,
&vulkanTexture->resourceAccessType
);
}
VULKAN_INTERNAL_TrackTexture(renderer, vulkanCommandBuffer, vulkanTexture);
2020-12-17 01:23:49 +00:00
}
static void VULKAN_SetTextureDataYUV(
2021-01-05 23:00:51 +00:00
Refresh_Renderer *driverData,
Refresh_CommandBuffer* commandBuffer,
2021-01-05 23:00:51 +00:00
Refresh_Texture *y,
Refresh_Texture *u,
Refresh_Texture *v,
2020-12-17 01:23:49 +00:00
uint32_t yWidth,
uint32_t yHeight,
uint32_t uvWidth,
uint32_t uvHeight,
void* data,
uint32_t dataLength
) {
2020-12-19 02:38:15 +00:00
VulkanRenderer *renderer = (VulkanRenderer*) driverData;
VulkanTexture *tex = (VulkanTexture*) y;
2021-01-02 21:31:17 +00:00
VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer*)commandBuffer;
VulkanTransferBuffer *transferBuffer;
2020-12-19 02:38:15 +00:00
uint8_t *dataPtr = (uint8_t*) data;
int32_t yDataLength = BytesPerImage(yWidth, yHeight, REFRESH_TEXTUREFORMAT_R8);
int32_t uvDataLength = BytesPerImage(uvWidth, uvHeight, REFRESH_TEXTUREFORMAT_R8);
2020-12-19 02:38:15 +00:00
VkBufferImageCopy imageCopy;
2021-01-14 02:02:45 +00:00
uint8_t * stagingBufferPointer;
2020-12-19 02:38:15 +00:00
transferBuffer = VULKAN_INTERNAL_AcquireTransferBuffer(
2021-02-03 00:37:01 +00:00
renderer,
2022-01-02 22:35:57 +00:00
vulkanCommandBuffer,
yDataLength + uvDataLength,
VULKAN_INTERNAL_BytesPerPixel(tex->format)
);
2020-12-19 04:08:07 +00:00
if (transferBuffer == NULL)
{
return;
}
2021-01-14 02:02:45 +00:00
stagingBufferPointer =
transferBuffer->buffer->allocation->mapPointer +
transferBuffer->buffer->offset +
transferBuffer->offset;
2021-01-03 21:12:12 +00:00
2020-12-19 02:38:15 +00:00
/* Initialize values that are the same for Y, U, and V */
imageCopy.imageExtent.depth = 1;
imageCopy.imageOffset.x = 0;
imageCopy.imageOffset.y = 0;
imageCopy.imageOffset.z = 0;
imageCopy.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
imageCopy.imageSubresource.baseArrayLayer = 0;
imageCopy.imageSubresource.layerCount = 1;
imageCopy.imageSubresource.mipLevel = 0;
/* Y */
SDL_memcpy(
2021-01-14 02:02:45 +00:00
stagingBufferPointer,
2020-12-19 02:38:15 +00:00
dataPtr,
yDataLength
);
VULKAN_INTERNAL_ImageMemoryBarrier(
renderer,
vulkanCommandBuffer->commandBuffer,
2020-12-19 02:38:15 +00:00
RESOURCE_ACCESS_TRANSFER_WRITE,
VK_IMAGE_ASPECT_COLOR_BIT,
0,
tex->layerCount,
0,
tex->levelCount,
0,
tex->image,
&tex->resourceAccessType
);
2020-12-19 02:38:15 +00:00
imageCopy.imageExtent.width = yWidth;
imageCopy.imageExtent.height = yHeight;
imageCopy.bufferOffset = transferBuffer->offset;
2020-12-19 02:38:15 +00:00
imageCopy.bufferRowLength = yWidth;
imageCopy.bufferImageHeight = yHeight;
2021-01-02 06:07:15 +00:00
renderer->vkCmdCopyBufferToImage(
vulkanCommandBuffer->commandBuffer,
transferBuffer->buffer->buffer,
2020-12-19 02:38:15 +00:00
tex->image,
AccessMap[tex->resourceAccessType].imageLayout,
1,
&imageCopy
2021-01-02 06:07:15 +00:00
);
2020-12-19 02:38:15 +00:00
if (tex->usageFlags & VK_IMAGE_USAGE_SAMPLED_BIT)
{
/* TODO: is it worth it to only transition the specific subresource? */
VULKAN_INTERNAL_ImageMemoryBarrier(
renderer,
vulkanCommandBuffer->commandBuffer,
RESOURCE_ACCESS_ANY_SHADER_READ_SAMPLED_IMAGE,
VK_IMAGE_ASPECT_COLOR_BIT,
0,
tex->layerCount,
0,
tex->levelCount,
0,
tex->image,
&tex->resourceAccessType
);
}
VULKAN_INTERNAL_TrackTexture(renderer, vulkanCommandBuffer, tex);
2020-12-19 02:38:15 +00:00
/* These apply to both U and V */
imageCopy.imageExtent.width = uvWidth;
imageCopy.imageExtent.height = uvHeight;
imageCopy.bufferRowLength = uvWidth;
imageCopy.bufferImageHeight = uvHeight;
/* U */
imageCopy.bufferOffset = transferBuffer->offset + yDataLength;
2020-12-19 02:38:15 +00:00
tex = (VulkanTexture*) u;
SDL_memcpy(
2021-01-14 02:02:45 +00:00
stagingBufferPointer + yDataLength,
2020-12-19 02:38:15 +00:00
dataPtr + yDataLength,
uvDataLength
);
VULKAN_INTERNAL_ImageMemoryBarrier(
renderer,
vulkanCommandBuffer->commandBuffer,
2020-12-19 02:38:15 +00:00
RESOURCE_ACCESS_TRANSFER_WRITE,
VK_IMAGE_ASPECT_COLOR_BIT,
0,
tex->layerCount,
0,
tex->levelCount,
0,
tex->image,
&tex->resourceAccessType
);
2021-01-02 06:07:15 +00:00
renderer->vkCmdCopyBufferToImage(
vulkanCommandBuffer->commandBuffer,
transferBuffer->buffer->buffer,
2020-12-19 02:38:15 +00:00
tex->image,
AccessMap[tex->resourceAccessType].imageLayout,
1,
&imageCopy
2021-01-02 06:07:15 +00:00
);
2020-12-19 02:38:15 +00:00
if (tex->usageFlags & VK_IMAGE_USAGE_SAMPLED_BIT)
{
/* TODO: is it worth it to only transition the specific subresource? */
VULKAN_INTERNAL_ImageMemoryBarrier(
renderer,
vulkanCommandBuffer->commandBuffer,
RESOURCE_ACCESS_ANY_SHADER_READ_SAMPLED_IMAGE,
VK_IMAGE_ASPECT_COLOR_BIT,
0,
tex->layerCount,
0,
tex->levelCount,
0,
tex->image,
&tex->resourceAccessType
);
}
VULKAN_INTERNAL_TrackTexture(renderer, vulkanCommandBuffer, tex);
2020-12-19 02:38:15 +00:00
/* V */
imageCopy.bufferOffset = transferBuffer->offset + yDataLength + uvDataLength;
2020-12-19 02:38:15 +00:00
tex = (VulkanTexture*) v;
SDL_memcpy(
2021-01-14 02:02:45 +00:00
stagingBufferPointer + yDataLength + uvDataLength,
2020-12-19 02:38:15 +00:00
dataPtr + yDataLength + uvDataLength,
uvDataLength
);
VULKAN_INTERNAL_ImageMemoryBarrier(
renderer,
vulkanCommandBuffer->commandBuffer,
2020-12-19 02:38:15 +00:00
RESOURCE_ACCESS_TRANSFER_WRITE,
VK_IMAGE_ASPECT_COLOR_BIT,
0,
tex->layerCount,
0,
tex->levelCount,
0,
tex->image,
&tex->resourceAccessType
);
2021-01-02 06:07:15 +00:00
renderer->vkCmdCopyBufferToImage(
vulkanCommandBuffer->commandBuffer,
transferBuffer->buffer->buffer,
2020-12-19 02:38:15 +00:00
tex->image,
AccessMap[tex->resourceAccessType].imageLayout,
1,
&imageCopy
2021-01-02 06:07:15 +00:00
);
transferBuffer->offset += yDataLength + uvDataLength;
if (tex->usageFlags & VK_IMAGE_USAGE_SAMPLED_BIT)
{
/* TODO: is it worth it to only transition the specific subresource? */
VULKAN_INTERNAL_ImageMemoryBarrier(
renderer,
vulkanCommandBuffer->commandBuffer,
RESOURCE_ACCESS_ANY_SHADER_READ_SAMPLED_IMAGE,
VK_IMAGE_ASPECT_COLOR_BIT,
0,
tex->layerCount,
0,
tex->levelCount,
0,
tex->image,
&tex->resourceAccessType
);
}
VULKAN_INTERNAL_TrackTexture(renderer, vulkanCommandBuffer, tex);
2020-12-17 01:23:49 +00:00
}
2021-01-03 21:01:29 +00:00
static void VULKAN_INTERNAL_BlitImage(
VulkanRenderer *renderer,
VkCommandBuffer commandBuffer,
Refresh_TextureSlice *sourceTextureSlice,
Refresh_TextureSlice *destinationTextureSlice,
VulkanResourceAccessType newDestinationAccessType,
2021-01-03 21:01:29 +00:00
VkFilter filter
) {
VkImageBlit blit;
VulkanTexture *sourceTexture = (VulkanTexture*) sourceTextureSlice->texture;
VulkanTexture *destinationTexture = (VulkanTexture*) destinationTextureSlice->texture;
VulkanResourceAccessType originalSourceAccessType = sourceTexture->resourceAccessType;
2021-01-03 21:01:29 +00:00
/* TODO: is it worth it to only transition the specific subresource? */
2021-01-03 21:01:29 +00:00
VULKAN_INTERNAL_ImageMemoryBarrier(
renderer,
commandBuffer,
RESOURCE_ACCESS_TRANSFER_READ,
VK_IMAGE_ASPECT_COLOR_BIT,
0,
sourceTexture->layerCount,
0,
sourceTexture->levelCount,
0,
sourceTexture->image,
&sourceTexture->resourceAccessType
2021-01-03 21:01:29 +00:00
);
VULKAN_INTERNAL_ImageMemoryBarrier(
renderer,
commandBuffer,
RESOURCE_ACCESS_TRANSFER_WRITE,
VK_IMAGE_ASPECT_COLOR_BIT,
0,
destinationTexture->layerCount,
0,
destinationTexture->levelCount,
0,
destinationTexture->image,
&destinationTexture->resourceAccessType
2021-01-03 21:01:29 +00:00
);
blit.srcOffsets[0].x = sourceTextureSlice->rectangle.x;
blit.srcOffsets[0].y = sourceTextureSlice->rectangle.y;
blit.srcOffsets[0].z = sourceTextureSlice->depth;
blit.srcOffsets[1].x = sourceTextureSlice->rectangle.x + sourceTextureSlice->rectangle.w;
blit.srcOffsets[1].y = sourceTextureSlice->rectangle.y + sourceTextureSlice->rectangle.h;
2021-01-03 21:01:29 +00:00
blit.srcOffsets[1].z = 1;
blit.srcSubresource.mipLevel = sourceTextureSlice->level;
blit.srcSubresource.baseArrayLayer = sourceTextureSlice->layer;
2021-01-03 21:01:29 +00:00
blit.srcSubresource.layerCount = 1;
blit.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
blit.dstOffsets[0].x = destinationTextureSlice->rectangle.x;
blit.dstOffsets[0].y = destinationTextureSlice->rectangle.y;
blit.dstOffsets[0].z = destinationTextureSlice->depth;
blit.dstOffsets[1].x = destinationTextureSlice->rectangle.x + destinationTextureSlice->rectangle.w;
blit.dstOffsets[1].y = destinationTextureSlice->rectangle.y + destinationTextureSlice->rectangle.h;
2021-01-03 21:01:29 +00:00
blit.dstOffsets[1].z = 1;
2022-07-23 03:59:01 +00:00
blit.dstSubresource.mipLevel = destinationTextureSlice->level;
blit.dstSubresource.baseArrayLayer = destinationTextureSlice->layer;
2021-01-03 21:01:29 +00:00
blit.dstSubresource.layerCount = 1;
blit.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
renderer->vkCmdBlitImage(
commandBuffer,
sourceTexture->image,
2021-01-03 21:01:29 +00:00
VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
destinationTexture->image,
2021-01-03 21:01:29 +00:00
VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
1,
&blit,
filter
);
/* TODO: is it worth it to only transition the specific subresource? */
2021-01-03 21:01:29 +00:00
VULKAN_INTERNAL_ImageMemoryBarrier(
renderer,
commandBuffer,
originalSourceAccessType,
2021-01-03 21:01:29 +00:00
VK_IMAGE_ASPECT_COLOR_BIT,
0,
sourceTexture->layerCount,
0,
sourceTexture->levelCount,
0,
sourceTexture->image,
&sourceTexture->resourceAccessType
2021-01-03 21:01:29 +00:00
);
VULKAN_INTERNAL_ImageMemoryBarrier(
renderer,
commandBuffer,
newDestinationAccessType,
2021-01-03 21:01:29 +00:00
VK_IMAGE_ASPECT_COLOR_BIT,
0,
destinationTexture->layerCount,
0,
destinationTexture->levelCount,
0,
destinationTexture->image,
&destinationTexture->resourceAccessType
2021-01-03 21:01:29 +00:00
);
}
REFRESHAPI void VULKAN_CopyTextureToTexture(
2021-01-05 23:00:51 +00:00
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer,
Refresh_TextureSlice *sourceTextureSlice,
Refresh_TextureSlice *destinationTextureSlice,
Refresh_Filter filter
2021-01-03 21:01:29 +00:00
) {
VulkanRenderer *renderer = (VulkanRenderer*)driverData;
VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
VulkanTexture *sourceTexture = (VulkanTexture*) sourceTextureSlice->texture;
2021-01-03 21:01:29 +00:00
VulkanTexture *destinationTexture = (VulkanTexture*) destinationTextureSlice->texture;
VulkanResourceAccessType destinationAccessType = destinationTexture->resourceAccessType;
if (destinationTexture->usageFlags & VK_IMAGE_USAGE_SAMPLED_BIT)
{
destinationAccessType = RESOURCE_ACCESS_ANY_SHADER_READ_SAMPLED_IMAGE;
}
else if (destinationTexture->usageFlags & VK_IMAGE_USAGE_STORAGE_BIT)
{
destinationAccessType = RESOURCE_ACCESS_COMPUTE_SHADER_STORAGE_IMAGE_READ_WRITE;
}
2021-01-03 21:01:29 +00:00
VULKAN_INTERNAL_BlitImage(
renderer,
vulkanCommandBuffer->commandBuffer,
sourceTextureSlice,
destinationTextureSlice,
destinationAccessType,
2021-01-03 21:01:29 +00:00
RefreshToVK_Filter[filter]
);
VULKAN_INTERNAL_TrackTexture(renderer, vulkanCommandBuffer, sourceTexture);
VULKAN_INTERNAL_TrackTexture(renderer, vulkanCommandBuffer, destinationTexture);
2021-01-03 21:01:29 +00:00
}
static void VULKAN_INTERNAL_SetBufferData(
VulkanBuffer* vulkanBuffer,
VkDeviceSize offsetInBytes,
void* data,
uint32_t dataLength
) {
SDL_memcpy(
vulkanBuffer->allocation->mapPointer + vulkanBuffer->offset + offsetInBytes,
data,
dataLength
);
}
2020-12-31 04:39:47 +00:00
static void VULKAN_SetBufferData(
2021-01-05 23:00:51 +00:00
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer,
2021-01-05 23:00:51 +00:00
Refresh_Buffer *buffer,
2020-12-21 20:37:54 +00:00
uint32_t offsetInBytes,
void* data,
uint32_t dataLength
) {
VulkanRenderer* renderer = (VulkanRenderer*) driverData;
VulkanCommandBuffer* vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
VulkanBuffer* vulkanBuffer = (VulkanBuffer*) buffer;
VulkanTransferBuffer* transferBuffer;
uint8_t* transferBufferPointer;
VkBufferCopy bufferCopy;
VulkanResourceAccessType accessType = vulkanBuffer->resourceAccessType;
2020-12-21 20:37:54 +00:00
transferBuffer = VULKAN_INTERNAL_AcquireTransferBuffer(
renderer,
vulkanCommandBuffer,
dataLength,
renderer->physicalDeviceProperties.properties.limits.optimalBufferCopyOffsetAlignment
);
if (transferBuffer == NULL)
{
return;
}
transferBufferPointer =
transferBuffer->buffer->allocation->mapPointer +
transferBuffer->buffer->offset +
transferBuffer->offset;
2020-12-21 20:37:54 +00:00
SDL_memcpy(
transferBufferPointer,
2020-12-21 20:37:54 +00:00
data,
dataLength
);
VULKAN_INTERNAL_BufferMemoryBarrier(
renderer,
vulkanCommandBuffer->commandBuffer,
RESOURCE_ACCESS_TRANSFER_READ,
transferBuffer->buffer
);
VULKAN_INTERNAL_BufferMemoryBarrier(
renderer,
vulkanCommandBuffer->commandBuffer,
RESOURCE_ACCESS_TRANSFER_WRITE,
vulkanBuffer
);
bufferCopy.srcOffset = transferBuffer->offset;
bufferCopy.dstOffset = offsetInBytes;
bufferCopy.size = (VkDeviceSize) dataLength;
renderer->vkCmdCopyBuffer(
vulkanCommandBuffer->commandBuffer,
transferBuffer->buffer->buffer,
vulkanBuffer->buffer,
1,
&bufferCopy
);
VULKAN_INTERNAL_BufferMemoryBarrier(
renderer,
vulkanCommandBuffer->commandBuffer,
accessType,
vulkanBuffer
);
transferBuffer->offset += dataLength;
VULKAN_INTERNAL_TrackBuffer(renderer, vulkanCommandBuffer, vulkanBuffer);
2020-12-21 20:37:54 +00:00
}
/* FIXME: this should return uint64_t */
static uint32_t VULKAN_PushVertexShaderUniforms(
2021-01-05 23:00:51 +00:00
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer,
void *data,
2021-02-03 00:37:01 +00:00
uint32_t dataLengthInBytes
) {
2021-01-02 21:31:17 +00:00
VulkanRenderer* renderer = (VulkanRenderer*) driverData;
VulkanCommandBuffer* vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
VulkanGraphicsPipeline* graphicsPipeline = vulkanCommandBuffer->currentGraphicsPipeline;
uint32_t offset;
2021-01-02 06:07:15 +00:00
if (graphicsPipeline == NULL)
{
Refresh_LogError("Cannot push uniforms if a pipeline is not bound!");
return 0;
}
2020-12-21 21:02:07 +00:00
if (graphicsPipeline->vertexUniformBlockSize == 0)
{
Refresh_LogError("Bound pipeline's vertex stage does not declare uniforms!");
return 0;
}
2020-12-21 21:02:07 +00:00
2020-12-23 21:11:09 +00:00
if (
vulkanCommandBuffer->vertexUniformBuffer->offset +
graphicsPipeline->vertexUniformBlockSize >=
UBO_BUFFER_SIZE
2020-12-23 21:11:09 +00:00
) {
/* We're out of space in this buffer, bind the old one and acquire a new one */
VULKAN_INTERNAL_BindUniformBuffer(
vulkanCommandBuffer,
vulkanCommandBuffer->vertexUniformBuffer
);
vulkanCommandBuffer->vertexUniformBuffer = VULKAN_INTERNAL_AcquireUniformBufferFromPool(
renderer,
renderer->vertexUniformBufferPool,
graphicsPipeline->vertexUniformBlockSize
);
2020-12-21 21:02:07 +00:00
}
offset = vulkanCommandBuffer->vertexUniformBuffer->offset;
VULKAN_INTERNAL_SetBufferData(
vulkanCommandBuffer->vertexUniformBuffer->vulkanBuffer,
vulkanCommandBuffer->vertexUniformBuffer->offset,
2020-12-21 21:02:07 +00:00
data,
2021-02-03 00:37:01 +00:00
dataLengthInBytes
2020-12-21 21:02:07 +00:00
);
vulkanCommandBuffer->vertexUniformBuffer->offset += graphicsPipeline->vertexUniformBlockSize;
2021-01-02 06:07:15 +00:00
return offset;
}
/* FIXME: this should return uint64_t */
static uint32_t VULKAN_PushFragmentShaderUniforms(
2021-01-05 23:00:51 +00:00
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer,
2020-12-17 01:23:49 +00:00
void *data,
2021-02-03 00:37:01 +00:00
uint32_t dataLengthInBytes
2020-12-17 01:23:49 +00:00
) {
2021-01-02 21:31:17 +00:00
VulkanRenderer* renderer = (VulkanRenderer*) driverData;
VulkanCommandBuffer* vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
VulkanGraphicsPipeline* graphicsPipeline = vulkanCommandBuffer->currentGraphicsPipeline;
uint32_t offset;
2021-01-02 06:07:15 +00:00
2020-12-23 21:11:09 +00:00
if (
vulkanCommandBuffer->fragmentUniformBuffer->offset +
graphicsPipeline->fragmentUniformBlockSize >=
UBO_BUFFER_SIZE
2020-12-23 21:11:09 +00:00
) {
/* We're out of space in this buffer, bind the old one and acquire a new one */
VULKAN_INTERNAL_BindUniformBuffer(
vulkanCommandBuffer,
vulkanCommandBuffer->fragmentUniformBuffer
);
vulkanCommandBuffer->fragmentUniformBuffer = VULKAN_INTERNAL_AcquireUniformBufferFromPool(
renderer,
renderer->fragmentUniformBufferPool,
graphicsPipeline->fragmentUniformBlockSize
);
2020-12-21 21:02:07 +00:00
}
offset = vulkanCommandBuffer->fragmentUniformBuffer->offset;
VULKAN_INTERNAL_SetBufferData(
vulkanCommandBuffer->fragmentUniformBuffer->vulkanBuffer,
vulkanCommandBuffer->fragmentUniformBuffer->offset,
2020-12-21 21:02:07 +00:00
data,
2021-02-03 00:37:01 +00:00
dataLengthInBytes
2020-12-21 21:02:07 +00:00
);
vulkanCommandBuffer->fragmentUniformBuffer->offset += graphicsPipeline->fragmentUniformBlockSize;
2021-01-02 06:07:15 +00:00
return offset;
2020-12-17 01:23:49 +00:00
}
static uint32_t VULKAN_PushComputeShaderUniforms(
2021-01-05 23:00:51 +00:00
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer,
2020-12-31 04:39:47 +00:00
void *data,
2021-02-03 00:37:01 +00:00
uint32_t dataLengthInBytes
2020-12-31 04:39:47 +00:00
) {
2021-01-02 21:31:17 +00:00
VulkanRenderer* renderer = (VulkanRenderer*) driverData;
VulkanCommandBuffer* vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
VulkanComputePipeline* computePipeline = vulkanCommandBuffer->currentComputePipeline;
uint32_t offset;
2021-01-02 06:07:15 +00:00
2020-12-31 04:39:47 +00:00
if (
vulkanCommandBuffer->computeUniformBuffer->offset +
computePipeline->uniformBlockSize >=
UBO_BUFFER_SIZE
2020-12-31 04:39:47 +00:00
) {
/* We're out of space in this buffer, bind the old one and acquire a new one */
VULKAN_INTERNAL_BindUniformBuffer(
vulkanCommandBuffer,
vulkanCommandBuffer->computeUniformBuffer
);
vulkanCommandBuffer->computeUniformBuffer = VULKAN_INTERNAL_AcquireUniformBufferFromPool(
renderer,
renderer->computeUniformBufferPool,
computePipeline->uniformBlockSize
);
2020-12-31 04:39:47 +00:00
}
offset = vulkanCommandBuffer->computeUniformBuffer->offset;
VULKAN_INTERNAL_SetBufferData(
vulkanCommandBuffer->computeUniformBuffer->vulkanBuffer,
vulkanCommandBuffer->computeUniformBuffer->offset,
2020-12-31 04:39:47 +00:00
data,
2021-02-03 00:37:01 +00:00
dataLengthInBytes
2020-12-31 04:39:47 +00:00
);
vulkanCommandBuffer->computeUniformBuffer->offset += computePipeline->uniformBlockSize;
2021-01-02 06:07:15 +00:00
return offset;
2020-12-31 04:39:47 +00:00
}
/* If fetching an image descriptor, descriptorImageInfos must not be NULL.
* If fetching a buffer descriptor, descriptorBufferInfos must not be NULL.
*/
static VkDescriptorSet VULKAN_INTERNAL_FetchDescriptorSet(
VulkanRenderer *renderer,
VulkanCommandBuffer *vulkanCommandBuffer,
DescriptorSetCache *descriptorSetCache,
VkDescriptorImageInfo *descriptorImageInfos, /* Can be NULL */
VkDescriptorBufferInfo *descriptorBufferInfos /* Can be NULL */
2020-12-31 00:47:13 +00:00
) {
uint32_t i;
VkDescriptorSet descriptorSet;
VkWriteDescriptorSet writeDescriptorSets[MAX_TEXTURE_SAMPLERS];
uint8_t isImage;
2020-12-31 00:47:13 +00:00
if (descriptorImageInfos == NULL && descriptorBufferInfos == NULL)
2020-12-31 00:47:13 +00:00
{
Refresh_LogError("descriptorImageInfos and descriptorBufferInfos cannot both be NULL!");
return VK_NULL_HANDLE;
2020-12-31 00:47:13 +00:00
}
else if (descriptorImageInfos != NULL && descriptorBufferInfos != NULL)
2020-12-31 00:47:13 +00:00
{
Refresh_LogError("descriptorImageInfos and descriptorBufferInfos cannot both be set!");
return VK_NULL_HANDLE;
2020-12-31 00:47:13 +00:00
}
isImage = descriptorImageInfos != NULL;
SDL_LockMutex(descriptorSetCache->lock);
2020-12-31 00:47:13 +00:00
/* If no inactive descriptor sets remain, create a new pool and allocate new inactive sets */
if (descriptorSetCache->inactiveDescriptorSetCount == 0)
2020-12-31 00:47:13 +00:00
{
descriptorSetCache->descriptorPoolCount += 1;
descriptorSetCache->descriptorPools = SDL_realloc(
descriptorSetCache->descriptorPools,
sizeof(VkDescriptorPool) * descriptorSetCache->descriptorPoolCount
2020-12-31 00:47:13 +00:00
);
2022-01-13 07:09:06 +00:00
if (!VULKAN_INTERNAL_CreateDescriptorPool(
2020-12-31 00:47:13 +00:00
renderer,
descriptorSetCache->descriptorType,
descriptorSetCache->nextPoolSize,
descriptorSetCache->nextPoolSize * descriptorSetCache->bindingCount,
&descriptorSetCache->descriptorPools[descriptorSetCache->descriptorPoolCount - 1]
2022-01-13 07:09:06 +00:00
)) {
SDL_UnlockMutex(descriptorSetCache->lock);
2022-01-13 07:09:06 +00:00
Refresh_LogError("Failed to create descriptor pool!");
return VK_NULL_HANDLE;
}
2020-12-31 00:47:13 +00:00
descriptorSetCache->inactiveDescriptorSetCapacity += descriptorSetCache->nextPoolSize;
2020-12-31 00:47:13 +00:00
descriptorSetCache->inactiveDescriptorSets = SDL_realloc(
descriptorSetCache->inactiveDescriptorSets,
sizeof(VkDescriptorSet) * descriptorSetCache->inactiveDescriptorSetCapacity
2020-12-31 00:47:13 +00:00
);
2022-01-13 07:09:06 +00:00
if (!VULKAN_INTERNAL_AllocateDescriptorSets(
2020-12-31 00:47:13 +00:00
renderer,
descriptorSetCache->descriptorPools[descriptorSetCache->descriptorPoolCount - 1],
descriptorSetCache->descriptorSetLayout,
descriptorSetCache->nextPoolSize,
descriptorSetCache->inactiveDescriptorSets
2022-01-13 07:09:06 +00:00
)) {
SDL_UnlockMutex(descriptorSetCache->lock);
2022-01-13 07:09:06 +00:00
Refresh_LogError("Failed to allocate descriptor sets!");
return VK_NULL_HANDLE;
}
2020-12-31 00:47:13 +00:00
descriptorSetCache->inactiveDescriptorSetCount = descriptorSetCache->nextPoolSize;
2020-12-31 00:47:13 +00:00
descriptorSetCache->nextPoolSize *= 2;
2020-12-31 00:47:13 +00:00
}
descriptorSet = descriptorSetCache->inactiveDescriptorSets[descriptorSetCache->inactiveDescriptorSetCount - 1];
descriptorSetCache->inactiveDescriptorSetCount -= 1;
2020-12-31 00:47:13 +00:00
for (i = 0; i < descriptorSetCache->bindingCount; i += 1)
2020-12-31 00:47:13 +00:00
{
writeDescriptorSets[i].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
writeDescriptorSets[i].pNext = NULL;
writeDescriptorSets[i].descriptorCount = 1;
writeDescriptorSets[i].descriptorType = descriptorSetCache->descriptorType;
2020-12-31 00:47:13 +00:00
writeDescriptorSets[i].dstArrayElement = 0;
writeDescriptorSets[i].dstBinding = i;
writeDescriptorSets[i].dstSet = descriptorSet;
2020-12-31 00:47:13 +00:00
writeDescriptorSets[i].pTexelBufferView = NULL;
if (isImage)
2020-12-27 23:20:59 +00:00
{
writeDescriptorSets[i].pImageInfo = &descriptorImageInfos[i];
writeDescriptorSets[i].pBufferInfo = NULL;
2020-12-19 05:35:21 +00:00
2022-01-13 07:09:06 +00:00
}
else
{
writeDescriptorSets[i].pBufferInfo = &descriptorBufferInfos[i];
writeDescriptorSets[i].pImageInfo = NULL;
2022-01-13 07:09:06 +00:00
}
2020-12-19 05:35:21 +00:00
}
renderer->vkUpdateDescriptorSets(
renderer->logicalDevice,
descriptorSetCache->bindingCount,
2020-12-19 05:35:21 +00:00
writeDescriptorSets,
0,
NULL
);
SDL_UnlockMutex(descriptorSetCache->lock);
2020-12-20 09:29:15 +00:00
if (vulkanCommandBuffer->boundDescriptorSetDataCount == vulkanCommandBuffer->boundDescriptorSetDataCapacity)
2020-12-27 23:20:59 +00:00
{
vulkanCommandBuffer->boundDescriptorSetDataCapacity *= 2;
vulkanCommandBuffer->boundDescriptorSetDatas = SDL_realloc(
vulkanCommandBuffer->boundDescriptorSetDatas,
vulkanCommandBuffer->boundDescriptorSetDataCapacity * sizeof(DescriptorSetData)
2020-12-27 23:20:59 +00:00
);
}
vulkanCommandBuffer->boundDescriptorSetDatas[vulkanCommandBuffer->boundDescriptorSetDataCount].descriptorSet = descriptorSet;
vulkanCommandBuffer->boundDescriptorSetDatas[vulkanCommandBuffer->boundDescriptorSetDataCount].descriptorSetCache = descriptorSetCache;
vulkanCommandBuffer->boundDescriptorSetDataCount += 1;
2020-12-27 23:20:59 +00:00
return descriptorSet;
2020-12-17 01:23:49 +00:00
}
2021-01-06 01:00:06 +00:00
static void VULKAN_BindVertexSamplers(
2021-01-05 23:00:51 +00:00
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer,
Refresh_Texture **pTextures,
Refresh_Sampler **pSamplers
2020-12-17 01:23:49 +00:00
) {
2020-12-19 05:35:21 +00:00
VulkanRenderer* renderer = (VulkanRenderer*) driverData;
2021-01-02 06:07:15 +00:00
VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
VulkanGraphicsPipeline *graphicsPipeline = vulkanCommandBuffer->currentGraphicsPipeline;
2021-01-02 21:31:17 +00:00
VulkanTexture *currentTexture;
VulkanSampler *currentSampler;
2021-01-02 21:31:17 +00:00
uint32_t i, samplerCount;
VkDescriptorImageInfo descriptorImageInfos[MAX_TEXTURE_SAMPLERS];
2020-12-19 05:35:21 +00:00
2020-12-27 23:38:58 +00:00
if (graphicsPipeline->pipelineLayout->vertexSamplerDescriptorSetCache == NULL)
2020-12-27 23:20:59 +00:00
{
return;
}
2020-12-19 05:35:21 +00:00
2020-12-30 00:19:19 +00:00
samplerCount = graphicsPipeline->pipelineLayout->vertexSamplerDescriptorSetCache->bindingCount;
2020-12-27 23:38:58 +00:00
2020-12-27 23:20:59 +00:00
for (i = 0; i < samplerCount; i += 1)
2020-12-19 05:35:21 +00:00
{
currentTexture = (VulkanTexture*) pTextures[i];
currentSampler = (VulkanSampler*) pSamplers[i];
descriptorImageInfos[i].imageView = currentTexture->view;
descriptorImageInfos[i].sampler = currentSampler->sampler;
descriptorImageInfos[i].imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
VULKAN_INTERNAL_TrackTexture(renderer, vulkanCommandBuffer, currentTexture);
VULKAN_INTERNAL_TrackSampler(renderer, vulkanCommandBuffer, currentSampler);
2020-12-19 05:35:21 +00:00
}
vulkanCommandBuffer->vertexSamplerDescriptorSet = VULKAN_INTERNAL_FetchDescriptorSet(
2020-12-27 23:20:59 +00:00
renderer,
vulkanCommandBuffer,
2020-12-27 23:20:59 +00:00
graphicsPipeline->pipelineLayout->vertexSamplerDescriptorSetCache,
descriptorImageInfos,
NULL
2020-12-19 05:35:21 +00:00
);
2020-12-27 23:20:59 +00:00
}
2020-12-19 05:35:21 +00:00
2021-01-06 01:00:06 +00:00
static void VULKAN_BindFragmentSamplers(
2021-01-05 23:00:51 +00:00
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer,
Refresh_Texture **pTextures,
Refresh_Sampler **pSamplers
2020-12-27 23:20:59 +00:00
) {
VulkanRenderer* renderer = (VulkanRenderer*) driverData;
2021-01-02 06:07:15 +00:00
VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
VulkanGraphicsPipeline *graphicsPipeline = vulkanCommandBuffer->currentGraphicsPipeline;
2021-01-02 21:31:17 +00:00
VulkanTexture *currentTexture;
VulkanSampler *currentSampler;
2021-01-02 21:31:17 +00:00
uint32_t i, samplerCount;
VkDescriptorImageInfo descriptorImageInfos[MAX_TEXTURE_SAMPLERS];
2020-12-27 23:20:59 +00:00
2020-12-28 06:45:12 +00:00
if (graphicsPipeline->pipelineLayout->fragmentSamplerDescriptorSetCache == NULL)
2020-12-27 23:20:59 +00:00
{
return;
}
2020-12-30 00:19:19 +00:00
samplerCount = graphicsPipeline->pipelineLayout->fragmentSamplerDescriptorSetCache->bindingCount;
2020-12-27 23:38:58 +00:00
2020-12-27 23:20:59 +00:00
for (i = 0; i < samplerCount; i += 1)
{
currentTexture = (VulkanTexture*) pTextures[i];
currentSampler = (VulkanSampler*) pSamplers[i];
descriptorImageInfos[i].imageView = currentTexture->view;
descriptorImageInfos[i].sampler = currentSampler->sampler;
descriptorImageInfos[i].imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
VULKAN_INTERNAL_TrackTexture(renderer, vulkanCommandBuffer, currentTexture);
VULKAN_INTERNAL_TrackSampler(renderer, vulkanCommandBuffer, currentSampler);
2020-12-27 23:20:59 +00:00
}
vulkanCommandBuffer->fragmentSamplerDescriptorSet = VULKAN_INTERNAL_FetchDescriptorSet(
2020-12-27 23:20:59 +00:00
renderer,
vulkanCommandBuffer,
2020-12-27 23:20:59 +00:00
graphicsPipeline->pipelineLayout->fragmentSamplerDescriptorSetCache,
descriptorImageInfos,
NULL
2020-12-27 23:20:59 +00:00
);
2020-12-17 01:23:49 +00:00
}
2021-01-03 01:00:52 +00:00
static void VULKAN_GetBufferData(
2021-01-05 23:00:51 +00:00
Refresh_Renderer *driverData,
Refresh_Buffer *buffer,
2021-01-03 01:00:52 +00:00
void *data,
uint32_t dataLengthInBytes
) {
VulkanBuffer* vulkanBuffer = (VulkanBuffer*) buffer;
2021-01-03 01:00:52 +00:00
uint8_t *dataPtr = (uint8_t*) data;
uint8_t *mapPointer;
2021-01-14 02:02:45 +00:00
mapPointer =
vulkanBuffer->allocation->mapPointer +
vulkanBuffer->offset;
2021-01-03 01:00:52 +00:00
SDL_memcpy(
dataPtr,
mapPointer,
dataLengthInBytes
);
}
2021-01-03 22:37:02 +00:00
static void VULKAN_CopyTextureToBuffer(
2021-01-05 23:00:51 +00:00
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer,
Refresh_TextureSlice *textureSlice,
Refresh_Buffer *buffer
2020-12-29 07:41:59 +00:00
) {
VulkanRenderer *renderer = (VulkanRenderer*) driverData;
2021-01-03 01:00:52 +00:00
VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
2021-01-03 22:37:02 +00:00
VulkanTexture *vulkanTexture = (VulkanTexture*) textureSlice->texture;
VulkanBuffer* vulkanBuffer = (VulkanBuffer*) buffer;
2021-01-02 21:31:17 +00:00
2020-12-29 07:41:59 +00:00
VulkanResourceAccessType prevResourceAccess;
VkBufferImageCopy imageCopy;
/* Cache this so we can restore it later */
prevResourceAccess = vulkanTexture->resourceAccessType;
VULKAN_INTERNAL_ImageMemoryBarrier(
renderer,
2021-01-03 01:00:52 +00:00
vulkanCommandBuffer->commandBuffer,
2020-12-29 07:41:59 +00:00
RESOURCE_ACCESS_TRANSFER_READ,
VK_IMAGE_ASPECT_COLOR_BIT,
2021-01-03 22:37:02 +00:00
textureSlice->layer,
1,
textureSlice->level,
1,
2020-12-29 07:41:59 +00:00
0,
vulkanTexture->image,
&vulkanTexture->resourceAccessType
);
2021-01-03 01:00:52 +00:00
/* Save texture data to buffer */
2020-12-29 07:41:59 +00:00
2021-01-03 22:37:02 +00:00
imageCopy.imageExtent.width = textureSlice->rectangle.w;
imageCopy.imageExtent.height = textureSlice->rectangle.h;
2020-12-29 07:41:59 +00:00
imageCopy.imageExtent.depth = 1;
2021-01-03 22:37:02 +00:00
imageCopy.bufferRowLength = textureSlice->rectangle.w;
imageCopy.bufferImageHeight = textureSlice->rectangle.h;
imageCopy.imageOffset.x = textureSlice->rectangle.x;
imageCopy.imageOffset.y = textureSlice->rectangle.y;
imageCopy.imageOffset.z = textureSlice->depth;
2020-12-29 07:41:59 +00:00
imageCopy.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
2021-01-03 22:37:02 +00:00
imageCopy.imageSubresource.baseArrayLayer = textureSlice->layer;
2020-12-29 07:41:59 +00:00
imageCopy.imageSubresource.layerCount = 1;
2021-01-03 22:37:02 +00:00
imageCopy.imageSubresource.mipLevel = textureSlice->level;
2020-12-29 07:41:59 +00:00
imageCopy.bufferOffset = 0;
2021-01-02 06:07:15 +00:00
renderer->vkCmdCopyImageToBuffer(
2021-01-03 01:00:52 +00:00
vulkanCommandBuffer->commandBuffer,
2020-12-29 07:41:59 +00:00
vulkanTexture->image,
AccessMap[vulkanTexture->resourceAccessType].imageLayout,
vulkanBuffer->buffer,
2020-12-29 07:41:59 +00:00
1,
&imageCopy
2021-01-02 06:07:15 +00:00
);
2020-12-29 07:41:59 +00:00
2021-01-03 03:03:25 +00:00
/* Restore the image layout */
2020-12-29 07:41:59 +00:00
VULKAN_INTERNAL_ImageMemoryBarrier(
renderer,
2021-01-03 01:00:52 +00:00
vulkanCommandBuffer->commandBuffer,
2020-12-29 07:41:59 +00:00
prevResourceAccess,
VK_IMAGE_ASPECT_COLOR_BIT,
2021-01-03 22:37:02 +00:00
textureSlice->layer,
1,
textureSlice->level,
1,
2020-12-29 07:41:59 +00:00
0,
vulkanTexture->image,
&vulkanTexture->resourceAccessType
);
VULKAN_INTERNAL_TrackBuffer(renderer, vulkanCommandBuffer, vulkanBuffer);
VULKAN_INTERNAL_TrackTexture(renderer, vulkanCommandBuffer, vulkanTexture);
2020-12-29 07:41:59 +00:00
}
2021-01-06 01:02:36 +00:00
static void VULKAN_QueueDestroyTexture(
2021-01-05 23:00:51 +00:00
Refresh_Renderer *driverData,
Refresh_Texture *texture
2020-12-17 01:23:49 +00:00
) {
VulkanRenderer *renderer = (VulkanRenderer*) driverData;
VulkanTexture* vulkanTexture = (VulkanTexture*) texture;
2020-12-28 23:11:05 +00:00
SDL_LockMutex(renderer->disposeLock);
2020-12-29 00:28:14 +00:00
EXPAND_ARRAY_IF_NEEDED(
renderer->texturesToDestroy,
2020-12-29 00:28:14 +00:00
VulkanTexture*,
renderer->texturesToDestroyCount + 1,
renderer->texturesToDestroyCapacity,
renderer->texturesToDestroyCapacity * 2
2020-12-29 00:28:14 +00:00
)
2020-12-28 23:11:05 +00:00
renderer->texturesToDestroy[renderer->texturesToDestroyCount] = vulkanTexture;
renderer->texturesToDestroyCount += 1;
SDL_UnlockMutex(renderer->disposeLock);
2020-12-17 01:23:49 +00:00
}
2021-01-06 01:02:36 +00:00
static void VULKAN_QueueDestroySampler(
2021-01-05 23:00:51 +00:00
Refresh_Renderer *driverData,
Refresh_Sampler *sampler
2020-12-17 01:23:49 +00:00
) {
VulkanRenderer *renderer = (VulkanRenderer*) driverData;
VulkanSampler* vulkanSampler = (VulkanSampler*) sampler;
SDL_LockMutex(renderer->disposeLock);
2020-12-29 00:56:49 +00:00
EXPAND_ARRAY_IF_NEEDED(
renderer->samplersToDestroy,
VulkanSampler*,
renderer->samplersToDestroyCount + 1,
renderer->samplersToDestroyCapacity,
renderer->samplersToDestroyCapacity * 2
2020-12-29 00:56:49 +00:00
)
renderer->samplersToDestroy[renderer->samplersToDestroyCount] = vulkanSampler;
renderer->samplersToDestroyCount += 1;
SDL_UnlockMutex(renderer->disposeLock);
2020-12-17 01:23:49 +00:00
}
2021-01-06 01:02:36 +00:00
static void VULKAN_QueueDestroyBuffer(
2021-01-05 23:00:51 +00:00
Refresh_Renderer *driverData,
Refresh_Buffer *buffer
2020-12-17 01:23:49 +00:00
) {
VulkanRenderer *renderer = (VulkanRenderer*) driverData;
VulkanBuffer *vulkanBuffer = (VulkanBuffer*) buffer;
2020-12-17 01:23:49 +00:00
SDL_LockMutex(renderer->disposeLock);
EXPAND_ARRAY_IF_NEEDED(
renderer->buffersToDestroy,
VulkanBuffer*,
renderer->buffersToDestroyCount + 1,
renderer->buffersToDestroyCapacity,
renderer->buffersToDestroyCapacity * 2
)
renderer->buffersToDestroy[
renderer->buffersToDestroyCount
] = vulkanBuffer;
renderer->buffersToDestroyCount += 1;
SDL_UnlockMutex(renderer->disposeLock);
2020-12-17 01:23:49 +00:00
}
2021-01-06 01:02:36 +00:00
static void VULKAN_QueueDestroyShaderModule(
2021-01-05 23:00:51 +00:00
Refresh_Renderer *driverData,
Refresh_ShaderModule *shaderModule
2020-12-17 01:23:49 +00:00
) {
VulkanRenderer *renderer = (VulkanRenderer*) driverData;
VulkanShaderModule *vulkanShaderModule = (VulkanShaderModule*) shaderModule;
SDL_LockMutex(renderer->disposeLock);
2020-12-29 00:42:51 +00:00
EXPAND_ARRAY_IF_NEEDED(
renderer->shaderModulesToDestroy,
VulkanShaderModule*,
renderer->shaderModulesToDestroyCount + 1,
renderer->shaderModulesToDestroyCapacity,
renderer->shaderModulesToDestroyCapacity * 2
2020-12-29 00:42:51 +00:00
)
renderer->shaderModulesToDestroy[renderer->shaderModulesToDestroyCount] = vulkanShaderModule;
renderer->shaderModulesToDestroyCount += 1;
SDL_UnlockMutex(renderer->disposeLock);
2020-12-17 01:23:49 +00:00
}
2021-01-06 01:02:36 +00:00
static void VULKAN_QueueDestroyComputePipeline(
2021-01-05 23:00:51 +00:00
Refresh_Renderer *driverData,
Refresh_ComputePipeline *computePipeline
2020-12-29 22:52:24 +00:00
) {
VulkanRenderer *renderer = (VulkanRenderer*) driverData;
2020-12-31 07:02:12 +00:00
VulkanComputePipeline *vulkanComputePipeline = (VulkanComputePipeline*) computePipeline;
SDL_LockMutex(renderer->disposeLock);
2020-12-31 07:02:12 +00:00
EXPAND_ARRAY_IF_NEEDED(
renderer->computePipelinesToDestroy,
2020-12-31 07:02:12 +00:00
VulkanComputePipeline*,
renderer->computePipelinesToDestroyCount + 1,
renderer->computePipelinesToDestroyCapacity,
renderer->computePipelinesToDestroyCapacity * 2
2020-12-31 07:02:12 +00:00
)
renderer->computePipelinesToDestroy[renderer->computePipelinesToDestroyCount] = vulkanComputePipeline;
renderer->computePipelinesToDestroyCount += 1;
SDL_UnlockMutex(renderer->disposeLock);
2020-12-29 22:52:24 +00:00
}
2021-01-06 01:02:36 +00:00
static void VULKAN_QueueDestroyGraphicsPipeline(
2021-01-05 23:00:51 +00:00
Refresh_Renderer *driverData,
Refresh_GraphicsPipeline *graphicsPipeline
2020-12-17 01:23:49 +00:00
) {
VulkanRenderer *renderer = (VulkanRenderer*) driverData;
2020-12-29 00:28:14 +00:00
VulkanGraphicsPipeline *vulkanGraphicsPipeline = (VulkanGraphicsPipeline*) graphicsPipeline;
SDL_LockMutex(renderer->disposeLock);
2020-12-29 00:28:14 +00:00
EXPAND_ARRAY_IF_NEEDED(
renderer->graphicsPipelinesToDestroy,
2020-12-29 00:28:14 +00:00
VulkanGraphicsPipeline*,
renderer->graphicsPipelinesToDestroyCount + 1,
renderer->graphicsPipelinesToDestroyCapacity,
renderer->graphicsPipelinesToDestroyCapacity * 2
2020-12-29 00:28:14 +00:00
)
renderer->graphicsPipelinesToDestroy[renderer->graphicsPipelinesToDestroyCount] = vulkanGraphicsPipeline;
renderer->graphicsPipelinesToDestroyCount += 1;
SDL_UnlockMutex(renderer->disposeLock);
2020-12-17 01:23:49 +00:00
}
/* Command Buffer render state */
static VkRenderPass VULKAN_INTERNAL_FetchRenderPass(
VulkanRenderer *renderer,
VulkanCommandBuffer *commandBuffer,
Refresh_ColorAttachmentInfo *colorAttachmentInfos,
uint32_t colorAttachmentCount,
Refresh_DepthStencilAttachmentInfo *depthStencilAttachmentInfo
) {
VkRenderPass renderPass;
RenderPassHash hash;
uint32_t i;
VulkanTexture *texture;
SDL_LockMutex(renderer->renderPassFetchLock);
for (i = 0; i < colorAttachmentCount; i += 1)
{
2022-06-06 18:46:08 +00:00
hash.colorTargetDescriptions[i].format = ((VulkanTexture*) colorAttachmentInfos[i].texture)->format;
hash.colorTargetDescriptions[i].clearColor = colorAttachmentInfos[i].clearColor;
hash.colorTargetDescriptions[i].loadOp = colorAttachmentInfos[i].loadOp;
hash.colorTargetDescriptions[i].storeOp = colorAttachmentInfos[i].storeOp;
}
hash.colorAttachmentSampleCount = REFRESH_SAMPLECOUNT_1;
if (colorAttachmentCount > 0)
{
texture = (VulkanTexture*) colorAttachmentInfos[0].texture;
if (texture->msaaTex != NULL)
{
hash.colorAttachmentSampleCount = texture->msaaTex->sampleCount;
}
}
Miscellaneous API changes + more MSAA fixes (#26) **Breaking API Changes** * Removed `REFRESH_SAMPLECOUNT_16/32/64`, since hardware support for these sample counts is generally poor (and completely non-existent with MoltenVK and certain consoles). * Removed unused `sampleCount` parameter from `Refresh_TextureCreateInfo`. * Removed `sampleCount` parameter from `Refresh_ColorAttachmentDescription`. The existence of this parameter meant you had to sync up three different sample count values (render pass, pipeline, and color attachment description) whenever you wanted to use multisampling. However, Vulkan requires that all color attachments in a given pipeline _must_ match the pipeline's sample count anyway, so we can assume all color attachments will use the pipeline's sample count. * Removed the `renderArea` parameter from `Refresh_BeginRenderPass()` since it didn't serve much practical purpose and slightly complicated things on the MoonWorks managed side. **Behavior Changes** * When creating a render pass or graphics pipeline, the requested multisample count will be converted into a sample count that's actually supported by the GPU. For example, if you request 8x MSAA on a device that only supports up to 4x MSAA, it will silently fall back to 4x MSAA. * All color attachments are now forced to have an internal store op of `STORE`, even if `REFRESH_STORE_OP_DONTCARE` is specified. The one exception is internal multisample textures -- if `DONTCARE` is used, those textures will be discarded to save on bandwidth. (Their resolve textures will still be stored.) * The RenderPass hashing logic was updated so that it would still work correctly with the removal of `Refresh_ColorAttachmentDescription.sampleCount`. **Bug Fixes** * Fixed bugs where multisampling logic wasn't kicking in for certain sample counts due to incorrect enum comparisons. Co-authored-by: Caleb Cornett <caleb.cornett@outlook.com> Reviewed-on: https://gitea.moonside.games/MoonsideGames/Refresh/pulls/26 Co-authored-by: TheSpydog <thespydog@noreply.example.org> Co-committed-by: TheSpydog <thespydog@noreply.example.org>
2022-11-08 19:09:21 +00:00
2022-02-25 05:29:52 +00:00
hash.colorAttachmentCount = colorAttachmentCount;
if (depthStencilAttachmentInfo == NULL)
{
2022-06-06 18:46:08 +00:00
hash.depthStencilTargetDescription.format = 0;
hash.depthStencilTargetDescription.loadOp = REFRESH_LOADOP_DONT_CARE;
hash.depthStencilTargetDescription.storeOp = REFRESH_STOREOP_DONT_CARE;
hash.depthStencilTargetDescription.stencilLoadOp = REFRESH_LOADOP_DONT_CARE;
hash.depthStencilTargetDescription.stencilStoreOp = REFRESH_STOREOP_DONT_CARE;
}
else
{
2022-06-06 18:46:08 +00:00
hash.depthStencilTargetDescription.format = ((VulkanTexture*) depthStencilAttachmentInfo->texture)->format;
hash.depthStencilTargetDescription.loadOp = depthStencilAttachmentInfo->loadOp;
hash.depthStencilTargetDescription.storeOp = depthStencilAttachmentInfo->storeOp;
hash.depthStencilTargetDescription.stencilLoadOp = depthStencilAttachmentInfo->stencilLoadOp;
hash.depthStencilTargetDescription.stencilStoreOp = depthStencilAttachmentInfo->stencilStoreOp;
}
renderPass = RenderPassHashArray_Fetch(
&renderer->renderPassHashArray,
&hash
);
if (renderPass != VK_NULL_HANDLE)
{
SDL_UnlockMutex(renderer->renderPassFetchLock);
return renderPass;
}
renderPass = VULKAN_INTERNAL_CreateRenderPass(
renderer,
commandBuffer,
colorAttachmentInfos,
colorAttachmentCount,
depthStencilAttachmentInfo
);
if (renderPass != VK_NULL_HANDLE)
{
RenderPassHashArray_Insert(
&renderer->renderPassHashArray,
hash,
renderPass
);
}
SDL_UnlockMutex(renderer->renderPassFetchLock);
return renderPass;
}
static VulkanFramebuffer* VULKAN_INTERNAL_FetchFramebuffer(
VulkanRenderer *renderer,
VkRenderPass renderPass,
Refresh_ColorAttachmentInfo *colorAttachmentInfos,
uint32_t colorAttachmentCount,
2022-03-04 20:30:33 +00:00
Refresh_DepthStencilAttachmentInfo *depthStencilAttachmentInfo,
uint32_t width,
uint32_t height
) {
VulkanFramebuffer *vulkanFramebuffer;
VkFramebufferCreateInfo framebufferInfo;
VkResult result;
VkImageView imageViewAttachments[2 * MAX_COLOR_TARGET_BINDINGS + 1];
FramebufferHash hash;
VulkanTexture *texture;
VulkanRenderTarget *renderTarget;
uint32_t attachmentCount = 0;
uint32_t i;
SDL_LockMutex(renderer->framebufferFetchLock);
for (i = 0; i < MAX_COLOR_TARGET_BINDINGS; i += 1)
{
hash.colorAttachmentViews[i] = VK_NULL_HANDLE;
hash.colorMultiSampleAttachmentViews[i] = VK_NULL_HANDLE;
}
hash.colorAttachmentCount = colorAttachmentCount;
for (i = 0; i < colorAttachmentCount; i += 1)
{
texture = (VulkanTexture*) colorAttachmentInfos[i].texture;
renderTarget = VULKAN_INTERNAL_FetchRenderTarget(
renderer,
texture,
colorAttachmentInfos[i].depth,
colorAttachmentInfos[i].layer,
colorAttachmentInfos[i].level
);
hash.colorAttachmentViews[i] = (
renderTarget->view
);
if (texture->msaaTex != NULL)
{
renderTarget = VULKAN_INTERNAL_FetchRenderTarget(
renderer,
texture->msaaTex,
colorAttachmentInfos[i].depth,
colorAttachmentInfos[i].layer,
colorAttachmentInfos[i].level
);
hash.colorMultiSampleAttachmentViews[i] = (
renderTarget->view
);
}
}
if (depthStencilAttachmentInfo == NULL)
{
hash.depthStencilAttachmentView = VK_NULL_HANDLE;
}
else
{
texture = (VulkanTexture*) depthStencilAttachmentInfo->texture;
renderTarget = VULKAN_INTERNAL_FetchRenderTarget(
renderer,
texture,
depthStencilAttachmentInfo->depth,
depthStencilAttachmentInfo->layer,
depthStencilAttachmentInfo->level
);
hash.depthStencilAttachmentView = renderTarget->view;
}
2022-03-04 20:30:33 +00:00
hash.width = width;
hash.height = height;
vulkanFramebuffer = FramebufferHashArray_Fetch(
&renderer->framebufferHashArray,
&hash
);
if (vulkanFramebuffer != NULL)
{
SDL_UnlockMutex(renderer->framebufferFetchLock);
return vulkanFramebuffer;
}
vulkanFramebuffer = SDL_malloc(sizeof(VulkanFramebuffer));
SDL_AtomicSet(&vulkanFramebuffer->referenceCount, 0);
/* Create a new framebuffer */
for (i = 0; i < colorAttachmentCount; i += 1)
{
texture = (VulkanTexture*) colorAttachmentInfos[i].texture;
renderTarget = VULKAN_INTERNAL_FetchRenderTarget(
renderer,
texture,
colorAttachmentInfos[i].depth,
colorAttachmentInfos[i].layer,
colorAttachmentInfos[i].level
);
imageViewAttachments[attachmentCount] =
renderTarget->view;
attachmentCount += 1;
if (texture->msaaTex != NULL)
{
renderTarget = VULKAN_INTERNAL_FetchRenderTarget(
renderer,
texture->msaaTex,
colorAttachmentInfos[i].depth,
colorAttachmentInfos[i].layer,
colorAttachmentInfos[i].level
);
imageViewAttachments[attachmentCount] =
renderTarget->view;
attachmentCount += 1;
}
}
if (depthStencilAttachmentInfo != NULL)
{
texture = (VulkanTexture*) depthStencilAttachmentInfo->texture;
renderTarget = VULKAN_INTERNAL_FetchRenderTarget(
renderer,
texture,
depthStencilAttachmentInfo->depth,
depthStencilAttachmentInfo->layer,
depthStencilAttachmentInfo->level
);
imageViewAttachments[attachmentCount] = renderTarget->view;
attachmentCount += 1;
}
framebufferInfo.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO;
framebufferInfo.pNext = NULL;
framebufferInfo.flags = 0;
framebufferInfo.renderPass = renderPass;
framebufferInfo.attachmentCount = attachmentCount;
framebufferInfo.pAttachments = imageViewAttachments;
framebufferInfo.width = hash.width;
framebufferInfo.height = hash.height;
framebufferInfo.layers = 1;
result = renderer->vkCreateFramebuffer(
renderer->logicalDevice,
&framebufferInfo,
NULL,
&vulkanFramebuffer->framebuffer
);
if (result == VK_SUCCESS)
{
FramebufferHashArray_Insert(
&renderer->framebufferHashArray,
hash,
vulkanFramebuffer
);
}
else
{
LogVulkanResultAsError("vkCreateFramebuffer", result);
SDL_free(vulkanFramebuffer);
vulkanFramebuffer = NULL;
}
SDL_UnlockMutex(renderer->framebufferFetchLock);
return vulkanFramebuffer;
}
2022-03-04 20:30:33 +00:00
static void VULKAN_INTERNAL_SetCurrentViewport(
VulkanCommandBuffer *commandBuffer,
Refresh_Viewport *viewport
) {
VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
vulkanCommandBuffer->currentViewport.x = viewport->x;
vulkanCommandBuffer->currentViewport.y = viewport->y;
vulkanCommandBuffer->currentViewport.width = viewport->w;
vulkanCommandBuffer->currentViewport.height = viewport->h;
vulkanCommandBuffer->currentViewport.minDepth = viewport->minDepth;
vulkanCommandBuffer->currentViewport.maxDepth = viewport->maxDepth;
}
static void VULKAN_SetViewport(
2022-03-04 20:30:33 +00:00
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer,
Refresh_Viewport *viewport
) {
VulkanRenderer* renderer = (VulkanRenderer*) driverData;
VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
VULKAN_INTERNAL_SetCurrentViewport(
vulkanCommandBuffer,
viewport
);
renderer->vkCmdSetViewport(
vulkanCommandBuffer->commandBuffer,
0,
1,
&vulkanCommandBuffer->currentViewport
);
}
static void VULKAN_INTERNAL_SetCurrentScissor(
VulkanCommandBuffer *vulkanCommandBuffer,
Refresh_Rect *scissor
) {
vulkanCommandBuffer->currentScissor.offset.x = scissor->x;
vulkanCommandBuffer->currentScissor.offset.y = scissor->y;
vulkanCommandBuffer->currentScissor.extent.width = scissor->w;
vulkanCommandBuffer->currentScissor.extent.height = scissor->h;
}
static void VULKAN_SetScissor(
2022-03-04 20:30:33 +00:00
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer,
Refresh_Rect *scissor
) {
VulkanRenderer* renderer = (VulkanRenderer*) driverData;
VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
VULKAN_INTERNAL_SetCurrentScissor(
vulkanCommandBuffer,
scissor
);
renderer->vkCmdSetScissor(
vulkanCommandBuffer->commandBuffer,
0,
1,
&vulkanCommandBuffer->currentScissor
);
}
2020-12-17 01:23:49 +00:00
static void VULKAN_BeginRenderPass(
2021-01-05 23:00:51 +00:00
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer,
Refresh_ColorAttachmentInfo *colorAttachmentInfos,
uint32_t colorAttachmentCount,
Refresh_DepthStencilAttachmentInfo *depthStencilAttachmentInfo
2020-12-17 01:23:49 +00:00
) {
2021-01-02 21:31:17 +00:00
VulkanRenderer* renderer = (VulkanRenderer*) driverData;
2021-01-02 06:07:15 +00:00
VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
VkRenderPass renderPass;
VulkanFramebuffer *framebuffer;
2021-01-02 21:31:17 +00:00
VulkanTexture *texture;
uint32_t w, h;
2020-12-20 08:05:12 +00:00
VkClearValue *clearValues;
uint32_t clearCount = colorAttachmentCount;
uint32_t multisampleAttachmentCount = 0;
uint32_t totalColorAttachmentCount = 0;
2020-12-20 08:05:12 +00:00
uint32_t i;
2020-12-23 07:17:09 +00:00
VkImageAspectFlags depthAspectFlags;
2022-03-04 20:30:33 +00:00
Refresh_Viewport defaultViewport;
Refresh_Rect defaultScissor;
uint32_t framebufferWidth = UINT32_MAX;
uint32_t framebufferHeight = UINT32_MAX;
2020-12-23 07:17:09 +00:00
2022-03-04 20:30:33 +00:00
/* The framebuffer cannot be larger than the smallest attachment. */
for (i = 0; i < colorAttachmentCount; i += 1)
{
texture = (VulkanTexture*) colorAttachmentInfos[i].texture;
w = texture->dimensions.width >> colorAttachmentInfos[i].level;
h = texture->dimensions.height >> colorAttachmentInfos[i].level;
2022-03-04 20:30:33 +00:00
if (w < framebufferWidth)
2022-03-04 20:30:33 +00:00
{
framebufferWidth = w;
2022-03-04 20:30:33 +00:00
}
if (h < framebufferHeight)
2022-03-04 20:30:33 +00:00
{
framebufferHeight = h;
2022-03-04 20:30:33 +00:00
}
}
if (depthStencilAttachmentInfo != NULL)
{
texture = (VulkanTexture*) depthStencilAttachmentInfo->texture;
w = texture->dimensions.width >> depthStencilAttachmentInfo->level;
h = texture->dimensions.height >> depthStencilAttachmentInfo->level;
if (w < framebufferWidth)
2022-03-04 20:30:33 +00:00
{
framebufferWidth = w;
2022-03-04 20:30:33 +00:00
}
if (h < framebufferHeight)
2022-03-04 20:30:33 +00:00
{
framebufferHeight = h;
2022-03-04 20:30:33 +00:00
}
}
/* Fetch required render objects */
renderPass = VULKAN_INTERNAL_FetchRenderPass(
renderer,
vulkanCommandBuffer,
colorAttachmentInfos,
colorAttachmentCount,
depthStencilAttachmentInfo
);
framebuffer = VULKAN_INTERNAL_FetchFramebuffer(
renderer,
renderPass,
colorAttachmentInfos,
colorAttachmentCount,
2022-03-04 20:30:33 +00:00
depthStencilAttachmentInfo,
framebufferWidth,
framebufferHeight
);
VULKAN_INTERNAL_TrackFramebuffer(renderer, vulkanCommandBuffer, framebuffer);
2020-12-23 07:17:09 +00:00
/* Layout transitions */
for (i = 0; i < colorAttachmentCount; i += 1)
2020-12-23 07:17:09 +00:00
{
texture = (VulkanTexture*) colorAttachmentInfos[i].texture;
2020-12-23 07:17:09 +00:00
VULKAN_INTERNAL_ImageMemoryBarrier(
renderer,
2021-01-02 21:31:17 +00:00
vulkanCommandBuffer->commandBuffer,
RESOURCE_ACCESS_COLOR_ATTACHMENT_WRITE,
2020-12-23 07:17:09 +00:00
VK_IMAGE_ASPECT_COLOR_BIT,
0,
texture->layerCount,
2021-02-10 01:52:26 +00:00
0,
texture->levelCount,
2020-12-23 07:17:09 +00:00
0,
texture->image,
&texture->resourceAccessType
2020-12-23 07:17:09 +00:00
);
if (texture->msaaTex != NULL)
{
clearCount += 1;
multisampleAttachmentCount += 1;
}
VULKAN_INTERNAL_TrackTexture(renderer, vulkanCommandBuffer, texture);
2020-12-23 07:17:09 +00:00
}
2020-12-20 08:05:12 +00:00
if (depthStencilAttachmentInfo != NULL)
2020-12-20 08:05:12 +00:00
{
texture = (VulkanTexture*) depthStencilAttachmentInfo->texture;
2020-12-23 07:17:09 +00:00
depthAspectFlags = VK_IMAGE_ASPECT_DEPTH_BIT;
2021-02-10 01:52:26 +00:00
if (IsStencilFormat(texture->format))
{
2020-12-23 07:17:09 +00:00
depthAspectFlags |= VK_IMAGE_ASPECT_STENCIL_BIT;
}
VULKAN_INTERNAL_ImageMemoryBarrier(
renderer,
2021-01-02 21:31:17 +00:00
vulkanCommandBuffer->commandBuffer,
RESOURCE_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE,
2020-12-23 07:17:09 +00:00
depthAspectFlags,
0,
texture->layerCount,
2020-12-23 07:17:09 +00:00
0,
texture->levelCount,
2020-12-23 07:17:09 +00:00
0,
texture->image,
&texture->resourceAccessType
2020-12-23 07:17:09 +00:00
);
clearCount += 1;
VULKAN_INTERNAL_TrackTexture(renderer, vulkanCommandBuffer, texture);
2020-12-20 08:05:12 +00:00
}
2020-12-23 07:17:09 +00:00
/* Set clear values */
clearValues = SDL_stack_alloc(VkClearValue, clearCount);
2020-12-20 08:05:12 +00:00
totalColorAttachmentCount = colorAttachmentCount + multisampleAttachmentCount;
for (i = 0; i < totalColorAttachmentCount; i += 1)
2020-12-20 08:05:12 +00:00
{
clearValues[i].color.float32[0] = colorAttachmentInfos[i].clearColor.x;
clearValues[i].color.float32[1] = colorAttachmentInfos[i].clearColor.y;
clearValues[i].color.float32[2] = colorAttachmentInfos[i].clearColor.z;
clearValues[i].color.float32[3] = colorAttachmentInfos[i].clearColor.w;
texture = (VulkanTexture*) colorAttachmentInfos[i].texture;
if (texture->msaaTex != NULL)
{
clearValues[i+1].color.float32[0] = colorAttachmentInfos[i].clearColor.x;
clearValues[i+1].color.float32[1] = colorAttachmentInfos[i].clearColor.y;
clearValues[i+1].color.float32[2] = colorAttachmentInfos[i].clearColor.z;
clearValues[i+1].color.float32[3] = colorAttachmentInfos[i].clearColor.w;
i += 1;
}
2020-12-20 08:05:12 +00:00
}
if (depthStencilAttachmentInfo != NULL)
2020-12-20 08:05:12 +00:00
{
clearValues[totalColorAttachmentCount].depthStencil.depth =
depthStencilAttachmentInfo->depthStencilClearValue.depth;
clearValues[totalColorAttachmentCount].depthStencil.stencil =
depthStencilAttachmentInfo->depthStencilClearValue.stencil;
2020-12-20 08:05:12 +00:00
}
VkRenderPassBeginInfo renderPassBeginInfo;
renderPassBeginInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO;
renderPassBeginInfo.pNext = NULL;
renderPassBeginInfo.renderPass = renderPass;
renderPassBeginInfo.framebuffer = framebuffer->framebuffer;
2020-12-20 08:05:12 +00:00
renderPassBeginInfo.pClearValues = clearValues;
2020-12-23 07:17:09 +00:00
renderPassBeginInfo.clearValueCount = clearCount;
Miscellaneous API changes + more MSAA fixes (#26) **Breaking API Changes** * Removed `REFRESH_SAMPLECOUNT_16/32/64`, since hardware support for these sample counts is generally poor (and completely non-existent with MoltenVK and certain consoles). * Removed unused `sampleCount` parameter from `Refresh_TextureCreateInfo`. * Removed `sampleCount` parameter from `Refresh_ColorAttachmentDescription`. The existence of this parameter meant you had to sync up three different sample count values (render pass, pipeline, and color attachment description) whenever you wanted to use multisampling. However, Vulkan requires that all color attachments in a given pipeline _must_ match the pipeline's sample count anyway, so we can assume all color attachments will use the pipeline's sample count. * Removed the `renderArea` parameter from `Refresh_BeginRenderPass()` since it didn't serve much practical purpose and slightly complicated things on the MoonWorks managed side. **Behavior Changes** * When creating a render pass or graphics pipeline, the requested multisample count will be converted into a sample count that's actually supported by the GPU. For example, if you request 8x MSAA on a device that only supports up to 4x MSAA, it will silently fall back to 4x MSAA. * All color attachments are now forced to have an internal store op of `STORE`, even if `REFRESH_STORE_OP_DONTCARE` is specified. The one exception is internal multisample textures -- if `DONTCARE` is used, those textures will be discarded to save on bandwidth. (Their resolve textures will still be stored.) * The RenderPass hashing logic was updated so that it would still work correctly with the removal of `Refresh_ColorAttachmentDescription.sampleCount`. **Bug Fixes** * Fixed bugs where multisampling logic wasn't kicking in for certain sample counts due to incorrect enum comparisons. Co-authored-by: Caleb Cornett <caleb.cornett@outlook.com> Reviewed-on: https://gitea.moonside.games/MoonsideGames/Refresh/pulls/26 Co-authored-by: TheSpydog <thespydog@noreply.example.org> Co-committed-by: TheSpydog <thespydog@noreply.example.org>
2022-11-08 19:09:21 +00:00
renderPassBeginInfo.renderArea.extent.width = framebufferWidth;
renderPassBeginInfo.renderArea.extent.height = framebufferHeight;
renderPassBeginInfo.renderArea.offset.x = 0;
renderPassBeginInfo.renderArea.offset.y = 0;
2021-01-02 06:07:15 +00:00
renderer->vkCmdBeginRenderPass(
vulkanCommandBuffer->commandBuffer,
2020-12-20 08:05:12 +00:00
&renderPassBeginInfo,
VK_SUBPASS_CONTENTS_INLINE
2021-01-02 06:07:15 +00:00
);
2020-12-20 08:05:12 +00:00
SDL_stack_free(clearValues);
for (i = 0; i < colorAttachmentCount; i += 1)
{
vulkanCommandBuffer->renderPassColorTargetTextures[i] =
(VulkanTexture*) colorAttachmentInfos[i].texture;
}
vulkanCommandBuffer->renderPassColorTargetCount = colorAttachmentCount;
2022-03-04 20:30:33 +00:00
if (depthStencilAttachmentInfo != NULL)
{
vulkanCommandBuffer->renderPassDepthTexture = (VulkanTexture*) depthStencilAttachmentInfo->texture;
}
2022-03-04 20:30:33 +00:00
/* Set sensible default viewport state */
defaultViewport.x = 0;
defaultViewport.y = 0;
defaultViewport.w = framebufferWidth;
defaultViewport.h = framebufferHeight;
defaultViewport.minDepth = 0;
defaultViewport.maxDepth = 1;
VULKAN_INTERNAL_SetCurrentViewport(
vulkanCommandBuffer,
&defaultViewport
);
defaultScissor.x = 0;
defaultScissor.y = 0;
defaultScissor.w = framebufferWidth;
defaultScissor.h = framebufferHeight;
VULKAN_INTERNAL_SetCurrentScissor(
vulkanCommandBuffer,
&defaultScissor
);
2020-12-17 01:23:49 +00:00
}
static void VULKAN_EndRenderPass(
2021-01-05 23:00:51 +00:00
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer
2020-12-17 01:23:49 +00:00
) {
2021-01-02 21:31:17 +00:00
VulkanRenderer* renderer = (VulkanRenderer*) driverData;
2021-01-02 06:07:15 +00:00
VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
VulkanTexture *currentTexture;
2021-01-02 21:31:17 +00:00
uint32_t i;
2020-12-20 08:05:53 +00:00
2021-01-02 06:07:15 +00:00
renderer->vkCmdEndRenderPass(
vulkanCommandBuffer->commandBuffer
);
2020-12-23 07:17:09 +00:00
if ( vulkanCommandBuffer->vertexUniformBuffer != renderer->dummyVertexUniformBuffer &&
vulkanCommandBuffer->vertexUniformBuffer != NULL
) {
VULKAN_INTERNAL_BindUniformBuffer(
vulkanCommandBuffer,
vulkanCommandBuffer->vertexUniformBuffer
);
}
vulkanCommandBuffer->vertexUniformBuffer = NULL;
if ( vulkanCommandBuffer->fragmentUniformBuffer != renderer->dummyFragmentUniformBuffer &&
vulkanCommandBuffer->fragmentUniformBuffer != NULL
) {
VULKAN_INTERNAL_BindUniformBuffer(
vulkanCommandBuffer,
vulkanCommandBuffer->fragmentUniformBuffer
);
}
vulkanCommandBuffer->fragmentUniformBuffer = NULL;
/* If the render targets can be sampled, transition them to sample layout */
for (i = 0; i < vulkanCommandBuffer->renderPassColorTargetCount; i += 1)
{
currentTexture = vulkanCommandBuffer->renderPassColorTargetTextures[i];
if (currentTexture->usageFlags & VK_IMAGE_USAGE_SAMPLED_BIT)
{
VULKAN_INTERNAL_ImageMemoryBarrier(
renderer,
2021-01-02 21:31:17 +00:00
vulkanCommandBuffer->commandBuffer,
RESOURCE_ACCESS_ANY_SHADER_READ_SAMPLED_IMAGE,
currentTexture->aspectFlags,
0,
currentTexture->layerCount,
0,
currentTexture->levelCount,
0,
currentTexture->image,
&currentTexture->resourceAccessType
);
}
else if (currentTexture->usageFlags & VK_IMAGE_USAGE_STORAGE_BIT)
{
VULKAN_INTERNAL_ImageMemoryBarrier(
renderer,
vulkanCommandBuffer->commandBuffer,
RESOURCE_ACCESS_COMPUTE_SHADER_STORAGE_IMAGE_READ_WRITE,
currentTexture->aspectFlags,
0,
currentTexture->layerCount,
0,
currentTexture->levelCount,
0,
currentTexture->image,
&currentTexture->resourceAccessType
);
}
}
vulkanCommandBuffer->renderPassColorTargetCount = 0;
2021-02-10 01:52:26 +00:00
if (vulkanCommandBuffer->renderPassDepthTexture != NULL)
{
currentTexture = vulkanCommandBuffer->renderPassDepthTexture;
if (currentTexture->usageFlags & VK_IMAGE_USAGE_SAMPLED_BIT)
{
VULKAN_INTERNAL_ImageMemoryBarrier(
renderer,
vulkanCommandBuffer->commandBuffer,
RESOURCE_ACCESS_ANY_SHADER_READ_SAMPLED_IMAGE,
currentTexture->aspectFlags,
0,
currentTexture->layerCount,
0,
currentTexture->levelCount,
0,
currentTexture->image,
&currentTexture->resourceAccessType
);
}
}
vulkanCommandBuffer->renderPassDepthTexture = NULL;
2021-01-02 06:07:15 +00:00
vulkanCommandBuffer->currentGraphicsPipeline = NULL;
2020-12-17 01:23:49 +00:00
}
static void VULKAN_BindGraphicsPipeline(
2021-01-05 23:00:51 +00:00
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer,
Refresh_GraphicsPipeline *graphicsPipeline
2020-12-17 01:23:49 +00:00
) {
2020-12-20 07:17:55 +00:00
VulkanRenderer* renderer = (VulkanRenderer*) driverData;
2021-01-02 06:07:15 +00:00
VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
2020-12-20 07:17:55 +00:00
VulkanGraphicsPipeline* pipeline = (VulkanGraphicsPipeline*) graphicsPipeline;
if ( vulkanCommandBuffer->vertexUniformBuffer != renderer->dummyVertexUniformBuffer &&
vulkanCommandBuffer->vertexUniformBuffer != NULL
) {
VULKAN_INTERNAL_BindUniformBuffer(
vulkanCommandBuffer,
vulkanCommandBuffer->vertexUniformBuffer
);
}
if (pipeline->vertexUniformBlockSize == 0)
{
vulkanCommandBuffer->vertexUniformBuffer = renderer->dummyVertexUniformBuffer;
}
else
{
vulkanCommandBuffer->vertexUniformBuffer = VULKAN_INTERNAL_AcquireUniformBufferFromPool(
renderer,
renderer->vertexUniformBufferPool,
pipeline->vertexUniformBlockSize
);
}
if ( vulkanCommandBuffer->fragmentUniformBuffer != renderer->dummyFragmentUniformBuffer &&
vulkanCommandBuffer->fragmentUniformBuffer != NULL
) {
VULKAN_INTERNAL_BindUniformBuffer(
vulkanCommandBuffer,
vulkanCommandBuffer->fragmentUniformBuffer
);
}
if (pipeline->fragmentUniformBlockSize == 0)
{
vulkanCommandBuffer->fragmentUniformBuffer = renderer->dummyFragmentUniformBuffer;
}
else
{
vulkanCommandBuffer->fragmentUniformBuffer = VULKAN_INTERNAL_AcquireUniformBufferFromPool(
renderer,
renderer->fragmentUniformBufferPool,
pipeline->fragmentUniformBlockSize
);
}
/* bind dummy sets if necessary */
2020-12-27 23:20:59 +00:00
if (pipeline->pipelineLayout->vertexSamplerDescriptorSetCache == NULL)
{
vulkanCommandBuffer->vertexSamplerDescriptorSet = renderer->emptyVertexSamplerDescriptorSet;
}
2020-12-27 23:20:59 +00:00
if (pipeline->pipelineLayout->fragmentSamplerDescriptorSetCache == NULL)
{
vulkanCommandBuffer->fragmentSamplerDescriptorSet = renderer->emptyFragmentSamplerDescriptorSet;
}
2021-01-02 06:07:15 +00:00
renderer->vkCmdBindPipeline(
vulkanCommandBuffer->commandBuffer,
2020-12-20 07:17:55 +00:00
VK_PIPELINE_BIND_POINT_GRAPHICS,
pipeline->pipeline
2021-01-02 06:07:15 +00:00
);
2020-12-23 06:56:26 +00:00
2021-01-02 06:07:15 +00:00
vulkanCommandBuffer->currentGraphicsPipeline = pipeline;
VULKAN_INTERNAL_TrackGraphicsPipeline(renderer, vulkanCommandBuffer, pipeline);
2022-03-04 20:30:33 +00:00
renderer->vkCmdSetViewport(
vulkanCommandBuffer->commandBuffer,
0,
1,
&vulkanCommandBuffer->currentViewport
);
renderer->vkCmdSetScissor(
vulkanCommandBuffer->commandBuffer,
0,
1,
&vulkanCommandBuffer->currentScissor
);
2020-12-17 01:23:49 +00:00
}
2020-12-20 07:31:55 +00:00
static void VULKAN_BindVertexBuffers(
2021-01-05 23:00:51 +00:00
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer,
2020-12-20 07:31:55 +00:00
uint32_t firstBinding,
uint32_t bindingCount,
2021-01-05 23:00:51 +00:00
Refresh_Buffer **pBuffers,
2020-12-20 07:31:55 +00:00
uint64_t *pOffsets
) {
2021-01-02 21:31:17 +00:00
VulkanRenderer* renderer = (VulkanRenderer*) driverData;
2021-01-02 06:07:15 +00:00
VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
VulkanBuffer *currentVulkanBuffer;
2021-01-02 21:31:17 +00:00
VkBuffer *buffers = SDL_stack_alloc(VkBuffer, bindingCount);
2020-12-21 20:37:54 +00:00
uint32_t i;
2020-12-20 07:31:55 +00:00
2020-12-21 20:37:54 +00:00
for (i = 0; i < bindingCount; i += 1)
2020-12-20 07:31:55 +00:00
{
currentVulkanBuffer = (VulkanBuffer*) pBuffers[i];
buffers[i] = currentVulkanBuffer->buffer;
VULKAN_INTERNAL_TrackBuffer(renderer, vulkanCommandBuffer, currentVulkanBuffer);
2020-12-20 07:31:55 +00:00
}
2021-01-02 06:07:15 +00:00
renderer->vkCmdBindVertexBuffers(
vulkanCommandBuffer->commandBuffer,
2020-12-20 07:31:55 +00:00
firstBinding,
bindingCount,
buffers,
pOffsets
2021-01-02 06:07:15 +00:00
);
2020-12-20 07:31:55 +00:00
SDL_stack_free(buffers);
}
static void VULKAN_BindIndexBuffer(
2021-01-05 23:00:51 +00:00
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer,
Refresh_Buffer *buffer,
2020-12-20 07:31:55 +00:00
uint64_t offset,
2021-01-05 23:00:51 +00:00
Refresh_IndexElementSize indexElementSize
2020-12-20 07:31:55 +00:00
) {
VulkanRenderer* renderer = (VulkanRenderer*) driverData;
2021-01-02 06:07:15 +00:00
VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
VulkanBuffer* vulkanBuffer = (VulkanBuffer*) buffer;
2020-12-21 23:44:43 +00:00
VULKAN_INTERNAL_TrackBuffer(renderer, vulkanCommandBuffer, vulkanBuffer);
2021-01-02 06:07:15 +00:00
renderer->vkCmdBindIndexBuffer(
vulkanCommandBuffer->commandBuffer,
vulkanBuffer->buffer,
2020-12-20 07:31:55 +00:00
offset,
RefreshToVK_IndexType[indexElementSize]
2021-01-02 06:07:15 +00:00
);
2020-12-20 07:31:55 +00:00
}
2020-12-30 01:31:39 +00:00
static void VULKAN_BindComputePipeline(
2021-01-05 23:00:51 +00:00
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer,
Refresh_ComputePipeline *computePipeline
2020-12-30 01:31:39 +00:00
) {
2021-01-02 21:31:17 +00:00
VulkanRenderer* renderer = (VulkanRenderer*) driverData;
2021-01-02 06:07:15 +00:00
VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
2020-12-31 00:47:13 +00:00
VulkanComputePipeline *vulkanComputePipeline = (VulkanComputePipeline*) computePipeline;
2020-12-31 04:39:47 +00:00
/* bind dummy sets */
if (vulkanComputePipeline->pipelineLayout->bufferDescriptorSetCache == NULL)
{
vulkanCommandBuffer->bufferDescriptorSet = renderer->emptyComputeBufferDescriptorSet;
2020-12-31 04:39:47 +00:00
}
if (vulkanComputePipeline->pipelineLayout->imageDescriptorSetCache == NULL)
{
vulkanCommandBuffer->imageDescriptorSet = renderer->emptyComputeImageDescriptorSet;
}
if ( vulkanCommandBuffer->computeUniformBuffer != renderer->dummyComputeUniformBuffer &&
vulkanCommandBuffer->computeUniformBuffer != NULL
) {
VULKAN_INTERNAL_BindUniformBuffer(
vulkanCommandBuffer,
vulkanCommandBuffer->computeUniformBuffer
);
2020-12-31 04:39:47 +00:00
}
2020-12-31 00:47:13 +00:00
renderer->vkCmdBindPipeline(
2021-01-02 06:07:15 +00:00
vulkanCommandBuffer->commandBuffer,
2020-12-31 00:47:13 +00:00
VK_PIPELINE_BIND_POINT_COMPUTE,
vulkanComputePipeline->pipeline
);
2021-01-02 06:07:15 +00:00
vulkanCommandBuffer->currentComputePipeline = vulkanComputePipeline;
if (vulkanComputePipeline->uniformBlockSize == 0)
{
vulkanCommandBuffer->computeUniformBuffer = renderer->dummyComputeUniformBuffer;
}
else
{
vulkanCommandBuffer->computeUniformBuffer = VULKAN_INTERNAL_AcquireUniformBufferFromPool(
renderer,
renderer->computeUniformBufferPool,
vulkanComputePipeline->uniformBlockSize
);
}
VULKAN_INTERNAL_TrackComputePipeline(renderer, vulkanCommandBuffer, vulkanComputePipeline);
2020-12-30 01:31:39 +00:00
}
static void VULKAN_BindComputeBuffers(
2021-01-05 23:00:51 +00:00
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer,
Refresh_Buffer **pBuffers
2020-12-30 01:31:39 +00:00
) {
2021-01-02 21:31:17 +00:00
VulkanRenderer* renderer = (VulkanRenderer*) driverData;
2021-01-02 06:07:15 +00:00
VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
VulkanComputePipeline *computePipeline = vulkanCommandBuffer->currentComputePipeline;
2021-01-02 21:31:17 +00:00
VulkanBuffer *currentVulkanBuffer;
VkDescriptorBufferInfo descriptorBufferInfos[MAX_BUFFER_BINDINGS];
2020-12-31 00:47:13 +00:00
uint32_t i;
if (computePipeline->pipelineLayout->bufferDescriptorSetCache == NULL)
{
return;
}
for (i = 0; i < computePipeline->pipelineLayout->bufferDescriptorSetCache->bindingCount; i += 1)
{
currentVulkanBuffer = (VulkanBuffer*) pBuffers[i];
2020-12-31 06:28:37 +00:00
descriptorBufferInfos[i].buffer = currentVulkanBuffer->buffer;
descriptorBufferInfos[i].offset = 0;
descriptorBufferInfos[i].range = currentVulkanBuffer->size;
2020-12-31 06:28:37 +00:00
VULKAN_INTERNAL_BufferMemoryBarrier(
renderer,
vulkanCommandBuffer->commandBuffer,
RESOURCE_ACCESS_COMPUTE_SHADER_BUFFER_READ_WRITE,
currentVulkanBuffer
);
VULKAN_INTERNAL_TrackBuffer(renderer, vulkanCommandBuffer, currentVulkanBuffer);
2020-12-31 00:47:13 +00:00
}
vulkanCommandBuffer->bufferDescriptorSet =
VULKAN_INTERNAL_FetchDescriptorSet(
2020-12-31 00:47:13 +00:00
renderer,
vulkanCommandBuffer,
2020-12-31 00:47:13 +00:00
computePipeline->pipelineLayout->bufferDescriptorSetCache,
NULL,
descriptorBufferInfos
2020-12-31 00:47:13 +00:00
);
if (vulkanCommandBuffer->boundComputeBufferCount == vulkanCommandBuffer->boundComputeBufferCapacity)
{
vulkanCommandBuffer->boundComputeBufferCapacity *= 2;
vulkanCommandBuffer->boundComputeBuffers = SDL_realloc(
vulkanCommandBuffer->boundComputeBuffers,
vulkanCommandBuffer->boundComputeBufferCapacity * sizeof(VulkanBuffer*)
);
}
vulkanCommandBuffer->boundComputeBuffers[vulkanCommandBuffer->boundComputeBufferCount] = currentVulkanBuffer;
vulkanCommandBuffer->boundComputeBufferCount += 1;
2020-12-30 01:31:39 +00:00
}
static void VULKAN_BindComputeTextures(
2021-01-05 23:00:51 +00:00
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer,
Refresh_Texture **pTextures
2020-12-30 01:31:39 +00:00
) {
2021-01-02 21:31:17 +00:00
VulkanRenderer* renderer = (VulkanRenderer*) driverData;
2021-01-02 06:07:15 +00:00
VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
VulkanComputePipeline *computePipeline = vulkanCommandBuffer->currentComputePipeline;
2021-01-02 21:31:17 +00:00
2020-12-31 00:47:13 +00:00
VulkanTexture *currentTexture;
VkDescriptorImageInfo descriptorImageInfos[MAX_TEXTURE_SAMPLERS];
2020-12-31 00:47:13 +00:00
uint32_t i;
if (computePipeline->pipelineLayout->imageDescriptorSetCache == NULL)
{
return;
}
for (i = 0; i < computePipeline->pipelineLayout->imageDescriptorSetCache->bindingCount; i += 1)
{
currentTexture = (VulkanTexture*) pTextures[i];
descriptorImageInfos[i].imageView = currentTexture->view;
descriptorImageInfos[i].sampler = VK_NULL_HANDLE;
descriptorImageInfos[i].imageLayout = VK_IMAGE_LAYOUT_GENERAL;
VULKAN_INTERNAL_ImageMemoryBarrier(
renderer,
vulkanCommandBuffer->commandBuffer,
RESOURCE_ACCESS_COMPUTE_SHADER_STORAGE_IMAGE_READ_WRITE,
VK_IMAGE_ASPECT_COLOR_BIT,
0,
currentTexture->layerCount,
0,
currentTexture->levelCount,
0,
currentTexture->image,
&currentTexture->resourceAccessType
);
VULKAN_INTERNAL_TrackTexture(renderer, vulkanCommandBuffer, currentTexture);
if (vulkanCommandBuffer->boundComputeTextureCount == vulkanCommandBuffer->boundComputeTextureCapacity)
{
vulkanCommandBuffer->boundComputeTextureCapacity *= 2;
vulkanCommandBuffer->boundComputeTextures = SDL_realloc(
vulkanCommandBuffer->boundComputeTextures,
vulkanCommandBuffer->boundComputeTextureCapacity * sizeof(VulkanTexture *)
);
}
vulkanCommandBuffer->boundComputeTextures[i] = currentTexture;
vulkanCommandBuffer->boundComputeTextureCount += 1;
2020-12-31 00:47:13 +00:00
}
vulkanCommandBuffer->imageDescriptorSet =
VULKAN_INTERNAL_FetchDescriptorSet(
2020-12-31 00:47:13 +00:00
renderer,
vulkanCommandBuffer,
2020-12-31 00:47:13 +00:00
computePipeline->pipelineLayout->imageDescriptorSetCache,
descriptorImageInfos,
NULL
2020-12-31 00:47:13 +00:00
);
2020-12-30 01:31:39 +00:00
}
2021-01-03 02:02:20 +00:00
static void VULKAN_INTERNAL_AllocateCommandBuffers(
2021-01-02 06:07:15 +00:00
VulkanRenderer *renderer,
2021-01-03 02:02:20 +00:00
VulkanCommandPool *vulkanCommandPool,
uint32_t allocateCount
2021-01-02 06:07:15 +00:00
) {
VkCommandBufferAllocateInfo allocateInfo;
VkFenceCreateInfo fenceCreateInfo;
2021-01-02 06:07:15 +00:00
VkResult vulkanResult;
2021-01-03 02:02:20 +00:00
uint32_t i;
VkCommandBuffer *commandBuffers = SDL_stack_alloc(VkCommandBuffer, allocateCount);
VulkanCommandBuffer *commandBuffer;
2021-01-02 06:07:15 +00:00
2021-01-03 02:02:20 +00:00
vulkanCommandPool->inactiveCommandBufferCapacity += allocateCount;
vulkanCommandPool->inactiveCommandBuffers = SDL_realloc(
vulkanCommandPool->inactiveCommandBuffers,
sizeof(VulkanCommandBuffer*) *
vulkanCommandPool->inactiveCommandBufferCapacity
);
allocateInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
allocateInfo.pNext = NULL;
allocateInfo.commandPool = vulkanCommandPool->commandPool;
allocateInfo.commandBufferCount = allocateCount;
allocateInfo.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
vulkanResult = renderer->vkAllocateCommandBuffers(
renderer->logicalDevice,
&allocateInfo,
commandBuffers
);
if (vulkanResult != VK_SUCCESS)
2021-01-02 06:07:15 +00:00
{
2021-01-27 20:51:36 +00:00
LogVulkanResultAsError("vkAllocateCommandBuffers", vulkanResult);
2021-01-03 02:02:20 +00:00
SDL_stack_free(commandBuffers);
return;
}
2021-01-02 06:07:15 +00:00
2021-01-03 02:02:20 +00:00
for (i = 0; i < allocateCount; i += 1)
{
commandBuffer = SDL_malloc(sizeof(VulkanCommandBuffer));
commandBuffer->commandPool = vulkanCommandPool;
commandBuffer->commandBuffer = commandBuffers[i];
/* Create fence */
fenceCreateInfo.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
fenceCreateInfo.pNext = NULL;
fenceCreateInfo.flags = 0;
vulkanResult = renderer->vkCreateFence(
renderer->logicalDevice,
&fenceCreateInfo,
NULL,
&commandBuffer->inFlightFence
);
if (vulkanResult != VK_SUCCESS)
{
LogVulkanResultAsError("vkCreateFence", vulkanResult);
}
commandBuffer->renderPassDepthTexture = NULL;
/* Presentation tracking */
commandBuffer->presentDataCapacity = 1;
commandBuffer->presentDataCount = 0;
commandBuffer->presentDatas = SDL_malloc(
commandBuffer->presentDataCapacity * sizeof(VkPresentInfoKHR)
);
commandBuffer->waitSemaphoreCapacity = 1;
commandBuffer->waitSemaphoreCount = 0;
commandBuffer->waitSemaphores = SDL_malloc(
commandBuffer->waitSemaphoreCapacity * sizeof(VkSemaphore)
);
commandBuffer->signalSemaphoreCapacity = 1;
commandBuffer->signalSemaphoreCount = 0;
commandBuffer->signalSemaphores = SDL_malloc(
commandBuffer->signalSemaphoreCapacity * sizeof(VkSemaphore)
);
/* Transfer buffer tracking */
commandBuffer->transferBufferCapacity = 4;
commandBuffer->transferBufferCount = 0;
commandBuffer->transferBuffers = SDL_malloc(
commandBuffer->transferBufferCapacity * sizeof(VulkanTransferBuffer*)
);
/* Bound buffer tracking */
commandBuffer->boundUniformBufferCapacity = 16;
commandBuffer->boundUniformBufferCount = 0;
commandBuffer->boundUniformBuffers = SDL_malloc(
commandBuffer->boundUniformBufferCapacity * sizeof(VulkanUniformBuffer*)
);
/* Descriptor set tracking */
commandBuffer->boundDescriptorSetDataCapacity = 16;
commandBuffer->boundDescriptorSetDataCount = 0;
commandBuffer->boundDescriptorSetDatas = SDL_malloc(
commandBuffer->boundDescriptorSetDataCapacity * sizeof(DescriptorSetData)
);
/* Bound compute resource tracking */
commandBuffer->boundComputeBufferCapacity = 16;
commandBuffer->boundComputeBufferCount = 0;
commandBuffer->boundComputeBuffers = SDL_malloc(
commandBuffer->boundComputeBufferCapacity * sizeof(VulkanBuffer*)
);
commandBuffer->boundComputeTextureCapacity = 16;
commandBuffer->boundComputeTextureCount = 0;
commandBuffer->boundComputeTextures = SDL_malloc(
commandBuffer->boundComputeTextureCapacity * sizeof(VulkanTexture*)
);
/* Resource tracking */
commandBuffer->usedBufferCapacity = 4;
commandBuffer->usedBufferCount = 0;
commandBuffer->usedBuffers = SDL_malloc(
commandBuffer->usedBufferCapacity * sizeof(VulkanBuffer*)
);
commandBuffer->usedTextureCapacity = 4;
commandBuffer->usedTextureCount = 0;
commandBuffer->usedTextures = SDL_malloc(
commandBuffer->usedTextureCapacity * sizeof(VulkanTexture*)
);
commandBuffer->usedSamplerCapacity = 4;
commandBuffer->usedSamplerCount = 0;
commandBuffer->usedSamplers = SDL_malloc(
commandBuffer->usedSamplerCapacity * sizeof(VulkanSampler*)
);
commandBuffer->usedGraphicsPipelineCapacity = 4;
commandBuffer->usedGraphicsPipelineCount = 0;
commandBuffer->usedGraphicsPipelines = SDL_malloc(
commandBuffer->usedGraphicsPipelineCapacity * sizeof(VulkanGraphicsPipeline*)
);
commandBuffer->usedComputePipelineCapacity = 4;
commandBuffer->usedComputePipelineCount = 0;
commandBuffer->usedComputePipelines = SDL_malloc(
commandBuffer->usedComputePipelineCapacity * sizeof(VulkanComputePipeline*)
);
commandBuffer->usedFramebufferCapacity = 4;
commandBuffer->usedFramebufferCount = 0;
commandBuffer->usedFramebuffers = SDL_malloc(
commandBuffer->usedFramebufferCapacity * sizeof(VulkanFramebuffer*)
);
2021-01-03 02:02:20 +00:00
vulkanCommandPool->inactiveCommandBuffers[
vulkanCommandPool->inactiveCommandBufferCount
] = commandBuffer;
2021-01-03 02:02:20 +00:00
vulkanCommandPool->inactiveCommandBufferCount += 1;
}
2021-01-02 06:07:15 +00:00
2021-01-03 02:02:20 +00:00
SDL_stack_free(commandBuffers);
}
2021-01-02 06:07:15 +00:00
2021-01-03 02:02:20 +00:00
static VulkanCommandPool* VULKAN_INTERNAL_FetchCommandPool(
VulkanRenderer *renderer,
SDL_threadID threadID
) {
VulkanCommandPool *vulkanCommandPool;
VkCommandPoolCreateInfo commandPoolCreateInfo;
VkResult vulkanResult;
CommandPoolHash commandPoolHash;
2021-01-02 06:07:15 +00:00
2021-01-03 02:02:20 +00:00
commandPoolHash.threadID = threadID;
vulkanCommandPool = CommandPoolHashTable_Fetch(
&renderer->commandPoolHashTable,
commandPoolHash
);
if (vulkanCommandPool != NULL)
{
return vulkanCommandPool;
}
2021-01-03 03:03:25 +00:00
vulkanCommandPool = (VulkanCommandPool*) SDL_malloc(sizeof(VulkanCommandPool));
2021-01-03 02:02:20 +00:00
commandPoolCreateInfo.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
commandPoolCreateInfo.pNext = NULL;
commandPoolCreateInfo.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
commandPoolCreateInfo.queueFamilyIndex = renderer->queueFamilyIndices.graphicsFamily;
vulkanResult = renderer->vkCreateCommandPool(
renderer->logicalDevice,
&commandPoolCreateInfo,
NULL,
&vulkanCommandPool->commandPool
);
2021-01-05 07:31:56 +00:00
if (vulkanResult != VK_SUCCESS)
{
2021-01-05 23:00:51 +00:00
Refresh_LogError("Failed to create command pool!");
2021-01-27 20:51:36 +00:00
LogVulkanResultAsError("vkCreateCommandPool", vulkanResult);
2021-01-05 07:31:56 +00:00
return NULL;
}
2021-01-03 02:02:20 +00:00
vulkanCommandPool->threadID = threadID;
vulkanCommandPool->inactiveCommandBufferCapacity = 0;
vulkanCommandPool->inactiveCommandBufferCount = 0;
vulkanCommandPool->inactiveCommandBuffers = NULL;
VULKAN_INTERNAL_AllocateCommandBuffers(
renderer,
vulkanCommandPool,
2
);
CommandPoolHashTable_Insert(
&renderer->commandPoolHashTable,
commandPoolHash,
vulkanCommandPool
);
return vulkanCommandPool;
}
static VulkanCommandBuffer* VULKAN_INTERNAL_GetInactiveCommandBufferFromPool(
VulkanRenderer *renderer,
SDL_threadID threadID
) {
VulkanCommandPool *commandPool =
VULKAN_INTERNAL_FetchCommandPool(renderer, threadID);
VulkanCommandBuffer *commandBuffer;
if (commandPool->inactiveCommandBufferCount == 0)
{
VULKAN_INTERNAL_AllocateCommandBuffers(
renderer,
commandPool,
commandPool->inactiveCommandBufferCapacity
);
2021-01-02 06:07:15 +00:00
}
2021-01-03 03:03:25 +00:00
commandBuffer = commandPool->inactiveCommandBuffers[commandPool->inactiveCommandBufferCount - 1];
2021-01-02 06:07:15 +00:00
commandPool->inactiveCommandBufferCount -= 1;
return commandBuffer;
}
2021-01-05 23:00:51 +00:00
static Refresh_CommandBuffer* VULKAN_AcquireCommandBuffer(
Refresh_Renderer *driverData
2021-01-02 06:07:15 +00:00
) {
VulkanRenderer *renderer = (VulkanRenderer*) driverData;
VkResult result;
2021-01-02 06:07:15 +00:00
SDL_threadID threadID = SDL_ThreadID();
SDL_LockMutex(renderer->acquireCommandBufferLock);
2021-01-02 06:07:15 +00:00
VulkanCommandBuffer *commandBuffer =
VULKAN_INTERNAL_GetInactiveCommandBufferFromPool(renderer, threadID);
SDL_UnlockMutex(renderer->acquireCommandBufferLock);
/* Reset state */
2021-01-02 06:07:15 +00:00
commandBuffer->currentComputePipeline = NULL;
commandBuffer->currentGraphicsPipeline = NULL;
commandBuffer->vertexUniformBuffer = NULL;
commandBuffer->fragmentUniformBuffer = NULL;
commandBuffer->computeUniformBuffer = NULL;
2021-01-02 06:07:15 +00:00
commandBuffer->renderPassColorTargetCount = 0;
2021-01-02 06:07:15 +00:00
/* Reset the command buffer here to avoid resets being called
* from a separate thread than where the command buffer was acquired
*/
result = renderer->vkResetCommandBuffer(
commandBuffer->commandBuffer,
VK_COMMAND_BUFFER_RESET_RELEASE_RESOURCES_BIT
);
if (result != VK_SUCCESS)
{
LogVulkanResultAsError("vkResetCommandBuffer", result);
}
result = renderer->vkResetFences(
renderer->logicalDevice,
1,
&commandBuffer->inFlightFence
);
if (result != VK_SUCCESS)
{
LogVulkanResultAsError("vkResetFences", result);
}
2021-01-02 06:07:15 +00:00
VULKAN_INTERNAL_BeginCommandBuffer(renderer, commandBuffer);
2021-01-05 23:00:51 +00:00
return (Refresh_CommandBuffer*) commandBuffer;
2021-01-02 06:07:15 +00:00
}
static WindowData* VULKAN_INTERNAL_FetchWindowData(
void *windowHandle
) {
return (WindowData*) SDL_GetWindowData(windowHandle, WINDOW_DATA);
}
static uint8_t VULKAN_ClaimWindow(
Refresh_Renderer *driverData,
void *windowHandle,
Refresh_PresentMode presentMode
) {
VulkanRenderer *renderer = (VulkanRenderer*) driverData;
WindowData *windowData = VULKAN_INTERNAL_FetchWindowData(windowHandle);
if (windowData == NULL)
{
windowData = SDL_malloc(sizeof(WindowData));
windowData->windowHandle = windowHandle;
windowData->preferredPresentMode = presentMode;
if (VULKAN_INTERNAL_CreateSwapchain(renderer, windowData))
{
SDL_SetWindowData((SDL_Window*) windowHandle, WINDOW_DATA, windowData);
if (renderer->claimedWindowCount >= renderer->claimedWindowCapacity)
{
renderer->claimedWindowCapacity *= 2;
renderer->claimedWindows = SDL_realloc(
renderer->claimedWindows,
renderer->claimedWindowCapacity * sizeof(WindowData*)
);
}
renderer->claimedWindows[renderer->claimedWindowCount] = windowData;
renderer->claimedWindowCount += 1;
return 1;
}
else
{
Refresh_LogError("Could not create swapchain, failed to claim window!");
SDL_free(windowData);
return 0;
}
}
else
{
Refresh_LogWarn("Window already claimed!");
return 0;
}
}
static void VULKAN_UnclaimWindow(
Refresh_Renderer *driverData,
void *windowHandle
) {
VulkanRenderer *renderer = (VulkanRenderer*) driverData;
WindowData *windowData = VULKAN_INTERNAL_FetchWindowData(windowHandle);
uint32_t i;
if (windowData == NULL)
{
return;
}
if (windowData->swapchainData != NULL)
{
VULKAN_Wait(driverData);
VULKAN_INTERNAL_DestroySwapchain(
(VulkanRenderer*) driverData,
windowData
);
}
for (i = 0; i < renderer->claimedWindowCount; i += 1)
{
if (renderer->claimedWindows[i]->windowHandle == windowHandle)
{
renderer->claimedWindows[i] = renderer->claimedWindows[renderer->claimedWindowCount - 1];
renderer->claimedWindowCount -= 1;
break;
}
}
SDL_free(windowData);
SDL_SetWindowData((SDL_Window*) windowHandle, WINDOW_DATA, NULL);
}
static Refresh_Texture* VULKAN_AcquireSwapchainTexture(
2021-01-05 23:00:51 +00:00
Refresh_Renderer *driverData,
Refresh_CommandBuffer *commandBuffer,
void *windowHandle,
uint32_t *pWidth,
uint32_t *pHeight
2020-12-17 04:19:11 +00:00
) {
VulkanRenderer *renderer = (VulkanRenderer*) driverData;
2021-01-02 06:07:15 +00:00
VulkanCommandBuffer *vulkanCommandBuffer = (VulkanCommandBuffer*) commandBuffer;
uint32_t swapchainImageIndex;
WindowData *windowData;
VulkanSwapchainData *swapchainData;
VkResult acquireResult = VK_SUCCESS;
VulkanTexture *swapchainTexture = NULL;
VulkanPresentData *presentData;
2020-12-31 07:02:12 +00:00
windowData = VULKAN_INTERNAL_FetchWindowData(windowHandle);
swapchainData = windowData->swapchainData;
/* Window is claimed but swapchain is invalid! */
if (swapchainData == NULL)
{
if (SDL_GetWindowFlags(windowHandle) & SDL_WINDOW_MINIMIZED)
{
/* Window is minimized, don't bother */
return NULL;
}
/* Let's try to recreate */
VULKAN_INTERNAL_RecreateSwapchain(renderer, windowData);
swapchainData = windowData->swapchainData;
if (swapchainData == NULL)
{
Refresh_LogWarn("Failed to recreate swapchain!");
return NULL;
}
}
acquireResult = renderer->vkAcquireNextImageKHR(
renderer->logicalDevice,
swapchainData->swapchain,
UINT64_MAX,
swapchainData->imageAvailableSemaphore,
VK_NULL_HANDLE,
&swapchainImageIndex
);
/* Acquisition is invalid, let's try to recreate */
if (acquireResult != VK_SUCCESS && acquireResult != VK_SUBOPTIMAL_KHR)
{
VULKAN_INTERNAL_RecreateSwapchain(renderer, windowData);
swapchainData = windowData->swapchainData;
if (swapchainData == NULL)
{
Refresh_LogWarn("Failed to recreate swapchain!");
return NULL;
}
2020-12-27 23:34:15 +00:00
acquireResult = renderer->vkAcquireNextImageKHR(
renderer->logicalDevice,
swapchainData->swapchain,
UINT64_MAX,
2022-02-10 05:42:19 +00:00
swapchainData->imageAvailableSemaphore,
VK_NULL_HANDLE,
&swapchainImageIndex
);
if (acquireResult != VK_SUCCESS && acquireResult != VK_SUBOPTIMAL_KHR)
{
Refresh_LogWarn("Failed to acquire swapchain texture!");
return NULL;
}
}
swapchainTexture = &swapchainData->textures[swapchainImageIndex];
VULKAN_INTERNAL_ImageMemoryBarrier(
renderer,
vulkanCommandBuffer->commandBuffer,
RESOURCE_ACCESS_COLOR_ATTACHMENT_WRITE,
VK_IMAGE_ASPECT_COLOR_BIT,
0,
1,
0,
1,
0,
swapchainTexture->image,
&swapchainTexture->resourceAccessType
);
/* Set up present struct */
if (vulkanCommandBuffer->presentDataCount == vulkanCommandBuffer->presentDataCapacity)
{
vulkanCommandBuffer->presentDataCapacity += 1;
vulkanCommandBuffer->presentDatas = SDL_realloc(
vulkanCommandBuffer->presentDatas,
vulkanCommandBuffer->presentDataCapacity * sizeof(VkPresentInfoKHR)
);
}
presentData = &vulkanCommandBuffer->presentDatas[vulkanCommandBuffer->presentDataCount];
vulkanCommandBuffer->presentDataCount += 1;
presentData->windowData = windowData;
presentData->swapchainImageIndex = swapchainImageIndex;
/* Set up present semaphores */
if (vulkanCommandBuffer->waitSemaphoreCount == vulkanCommandBuffer->waitSemaphoreCapacity)
{
vulkanCommandBuffer->waitSemaphoreCapacity += 1;
vulkanCommandBuffer->waitSemaphores = SDL_realloc(
vulkanCommandBuffer->waitSemaphores,
vulkanCommandBuffer->waitSemaphoreCapacity * sizeof(VkSemaphore)
);
}
vulkanCommandBuffer->waitSemaphores[vulkanCommandBuffer->waitSemaphoreCount] = swapchainData->imageAvailableSemaphore;
vulkanCommandBuffer->waitSemaphoreCount += 1;
if (vulkanCommandBuffer->signalSemaphoreCount == vulkanCommandBuffer->signalSemaphoreCapacity)
{
vulkanCommandBuffer->signalSemaphoreCapacity += 1;
vulkanCommandBuffer->signalSemaphores = SDL_realloc(
vulkanCommandBuffer->signalSemaphores,
vulkanCommandBuffer->signalSemaphoreCapacity * sizeof(VkSemaphore)
);
2020-12-27 23:34:15 +00:00
}
vulkanCommandBuffer->signalSemaphores[vulkanCommandBuffer->signalSemaphoreCount] = swapchainData->renderFinishedSemaphore;
vulkanCommandBuffer->signalSemaphoreCount += 1;
*pWidth = swapchainData->extent.width;
*pHeight = swapchainData->extent.height;
return (Refresh_Texture*) swapchainTexture;
}
static Refresh_TextureFormat VULKAN_GetSwapchainFormat(
Refresh_Renderer *driverData,
void *windowHandle
) {
WindowData *windowData = VULKAN_INTERNAL_FetchWindowData(windowHandle);
if (windowData == NULL)
{
Refresh_LogWarn("Cannot get swapchain format, window has not been claimed!");
return 0;
}
if (windowData->swapchainData == NULL)
{
Refresh_LogWarn("Cannot get swapchain format, swapchain is currently invalid!");
return 0;
}
if (windowData->swapchainData->swapchainFormat == VK_FORMAT_R8G8B8A8_UNORM)
{
return REFRESH_TEXTUREFORMAT_R8G8B8A8;
}
else if (windowData->swapchainData->swapchainFormat == VK_FORMAT_B8G8R8A8_UNORM)
{
return REFRESH_TEXTUREFORMAT_B8G8R8A8;
}
else
{
Refresh_LogWarn("Unrecognized swapchain format!");
return 0;
}
2020-12-27 23:34:15 +00:00
}
static void VULKAN_SetSwapchainPresentMode(
Refresh_Renderer *driverData,
void *windowHandle,
Refresh_PresentMode presentMode
) {
WindowData *windowData = VULKAN_INTERNAL_FetchWindowData(windowHandle);
if (windowData == NULL)
{
Refresh_LogWarn("Cannot set present mode, window has not been claimed!");
return;
}
VULKAN_INTERNAL_RecreateSwapchain(
(VulkanRenderer *)driverData,
windowData
);
}
/* Submission structure */
static void VULKAN_INTERNAL_PerformPendingDestroys(
VulkanRenderer *renderer
) {
int32_t i;
SDL_LockMutex(renderer->disposeLock);
for (i = renderer->texturesToDestroyCount - 1; i >= 0; i -= 1)
{
if (SDL_AtomicGet(&renderer->texturesToDestroy[i]->referenceCount) == 0)
{
VULKAN_INTERNAL_DestroyTexture(
renderer,
renderer->texturesToDestroy[i]
);
renderer->texturesToDestroy[i] = renderer->texturesToDestroy[renderer->texturesToDestroyCount - 1];
renderer->texturesToDestroyCount -= 1;
}
}
for (i = renderer->buffersToDestroyCount - 1; i >= 0; i -= 1)
{
if (SDL_AtomicGet(&renderer->buffersToDestroy[i]->referenceCount) == 0)
{
VULKAN_INTERNAL_DestroyBuffer(
renderer,
renderer->buffersToDestroy[i]
);
renderer->buffersToDestroy[i] = renderer->buffersToDestroy[renderer->buffersToDestroyCount - 1];
renderer->buffersToDestroyCount -= 1;
}
}
for (i = renderer->graphicsPipelinesToDestroyCount - 1; i >= 0; i -= 1)
{
if (SDL_AtomicGet(&renderer->graphicsPipelinesToDestroy[i]->referenceCount) == 0)
{
VULKAN_INTERNAL_DestroyGraphicsPipeline(
renderer,
renderer->graphicsPipelinesToDestroy[i]
);
renderer->graphicsPipelinesToDestroy[i] = renderer->graphicsPipelinesToDestroy[renderer->graphicsPipelinesToDestroyCount - 1];
renderer->graphicsPipelinesToDestroyCount -= 1;
}
}
for (i = renderer->computePipelinesToDestroyCount - 1; i >= 0; i -= 1)
{
if (SDL_AtomicGet(&renderer->computePipelinesToDestroy[i]->referenceCount) == 0)
{
VULKAN_INTERNAL_DestroyComputePipeline(
renderer,
renderer->computePipelinesToDestroy[i]
);
renderer->computePipelinesToDestroy[i] = renderer->computePipelinesToDestroy[renderer->computePipelinesToDestroyCount - 1];
renderer->computePipelinesToDestroyCount -= 1 ;
}
}
2022-02-23 05:54:32 +00:00
for (i = renderer->shaderModulesToDestroyCount - 1; i >= 0; i -= 1)
{
if (SDL_AtomicGet(&renderer->shaderModulesToDestroy[i]->referenceCount) == 0)
{
VULKAN_INTERNAL_DestroyShaderModule(
renderer,
renderer->shaderModulesToDestroy[i]
);
renderer->shaderModulesToDestroy[i] = renderer->shaderModulesToDestroy[renderer->shaderModulesToDestroyCount - 1];
renderer->shaderModulesToDestroyCount -= 1;
}
}
for (i = renderer->samplersToDestroyCount - 1; i >= 0; i -= 1)
{
if (SDL_AtomicGet(&renderer->samplersToDestroy[i]->referenceCount) == 0)
{
VULKAN_INTERNAL_DestroySampler(
renderer,
renderer->samplersToDestroy[i]
);
renderer->samplersToDestroy[i] = renderer->samplersToDestroy[renderer->samplersToDestroyCount - 1];
renderer->samplersToDestroyCount -= 1;
}
}
for (i = renderer->framebuffersToDestroyCount - 1; i >= 0; i -= 1)
{
if (SDL_AtomicGet(&renderer->framebuffersToDestroy[i]->referenceCount) == 0)
{
VULKAN_INTERNAL_DestroyFramebuffer(
renderer,
renderer->framebuffersToDestroy[i]
);
renderer->framebuffersToDestroy[i] = renderer->framebuffersToDestroy[renderer->framebuffersToDestroyCount - 1];
renderer->framebuffersToDestroyCount -= 1;
}
}
SDL_UnlockMutex(renderer->disposeLock);
}
static void VULKAN_INTERNAL_CleanCommandBuffer(
VulkanRenderer *renderer,
VulkanCommandBuffer *commandBuffer
) {
uint32_t i;
VulkanUniformBuffer *uniformBuffer;
DescriptorSetData *descriptorSetData;
VkResult result;
/* Bound uniform buffers are now available */
for (i = 0; i < commandBuffer->boundUniformBufferCount; i += 1)
{
uniformBuffer = commandBuffer->boundUniformBuffers[i];
SDL_LockMutex(uniformBuffer->pool->lock);
if (uniformBuffer->pool->availableBufferCount == uniformBuffer->pool->availableBufferCapacity)
{
uniformBuffer->pool->availableBufferCapacity *= 2;
uniformBuffer->pool->availableBuffers = SDL_realloc(
uniformBuffer->pool->availableBuffers,
uniformBuffer->pool->availableBufferCapacity * sizeof(VulkanUniformBuffer*)
);
}
uniformBuffer->pool->availableBuffers[uniformBuffer->pool->availableBufferCount] = uniformBuffer;
uniformBuffer->pool->availableBufferCount += 1;
SDL_UnlockMutex(uniformBuffer->pool->lock);
}
commandBuffer->boundUniformBufferCount = 0;
SDL_LockMutex(renderer->transferBufferPool.lock);
if (renderer->transferBufferPool.availableBufferCount + commandBuffer->transferBufferCount >= renderer->transferBufferPool.availableBufferCapacity)
{
renderer->transferBufferPool.availableBufferCapacity = renderer->transferBufferPool.availableBufferCount + commandBuffer->transferBufferCount;
renderer->transferBufferPool.availableBuffers = SDL_realloc(
renderer->transferBufferPool.availableBuffers,
renderer->transferBufferPool.availableBufferCapacity * sizeof(VulkanTransferBuffer*)
);
}
for (i = 0; i < commandBuffer->transferBufferCount; i += 1)
{
commandBuffer->transferBuffers[i]->offset = 0;
renderer->transferBufferPool.availableBuffers[renderer->transferBufferPool.availableBufferCount] = commandBuffer->transferBuffers[i];
renderer->transferBufferPool.availableBufferCount += 1;
}
SDL_UnlockMutex(renderer->transferBufferPool.lock);
commandBuffer->transferBufferCount = 0;
/* Bound descriptor sets are now available */
for (i = 0; i < commandBuffer->boundDescriptorSetDataCount; i += 1)
{
descriptorSetData = &commandBuffer->boundDescriptorSetDatas[i];
SDL_LockMutex(descriptorSetData->descriptorSetCache->lock);
if (descriptorSetData->descriptorSetCache->inactiveDescriptorSetCount == descriptorSetData->descriptorSetCache->inactiveDescriptorSetCapacity)
2022-01-18 05:09:27 +00:00
{
descriptorSetData->descriptorSetCache->inactiveDescriptorSetCapacity *= 2;
descriptorSetData->descriptorSetCache->inactiveDescriptorSets = SDL_realloc(
descriptorSetData->descriptorSetCache->inactiveDescriptorSets,
descriptorSetData->descriptorSetCache->inactiveDescriptorSetCapacity * sizeof(VkDescriptorSet)
2022-01-18 05:09:27 +00:00
);
}
descriptorSetData->descriptorSetCache->inactiveDescriptorSets[descriptorSetData->descriptorSetCache->inactiveDescriptorSetCount] = descriptorSetData->descriptorSet;
descriptorSetData->descriptorSetCache->inactiveDescriptorSetCount += 1;
SDL_UnlockMutex(descriptorSetData->descriptorSetCache->lock);
}
commandBuffer->boundDescriptorSetDataCount = 0;
/* Decrement reference counts */
for (i = 0; i < commandBuffer->usedBufferCount; i += 1)
{
SDL_AtomicDecRef(&commandBuffer->usedBuffers[i]->referenceCount);
}
commandBuffer->usedBufferCount = 0;
for (i = 0; i < commandBuffer->usedTextureCount; i += 1)
{
SDL_AtomicDecRef(&commandBuffer->usedTextures[i]->referenceCount);
}
commandBuffer->usedTextureCount = 0;
for (i = 0; i < commandBuffer->usedSamplerCount; i += 1)
{
SDL_AtomicDecRef(&commandBuffer->usedSamplers[i]->referenceCount);
}
commandBuffer->usedSamplerCount = 0;
for (i = 0; i < commandBuffer->usedGraphicsPipelineCount; i += 1)
{
SDL_AtomicDecRef(&commandBuffer->usedGraphicsPipelines[i]->referenceCount);
}
commandBuffer->usedGraphicsPipelineCount = 0;
for (i = 0; i < commandBuffer->usedComputePipelineCount; i += 1)
{
SDL_AtomicDecRef(&commandBuffer->usedComputePipelines[i]->referenceCount);
}
commandBuffer->usedComputePipelineCount = 0;
for (i = 0; i < commandBuffer->usedFramebufferCount; i += 1)
{
SDL_AtomicDecRef(&commandBuffer->usedFramebuffers[i]->referenceCount);
}
commandBuffer->usedFramebufferCount = 0;
/* Reset presentation data */
commandBuffer->presentDataCount = 0;
commandBuffer->waitSemaphoreCount = 0;
commandBuffer->signalSemaphoreCount = 0;
/* Return command buffer to pool */
SDL_LockMutex(renderer->acquireCommandBufferLock);
if (commandBuffer->commandPool->inactiveCommandBufferCount == commandBuffer->commandPool->inactiveCommandBufferCapacity)
{
commandBuffer->commandPool->inactiveCommandBufferCapacity += 1;
commandBuffer->commandPool->inactiveCommandBuffers = SDL_realloc(
commandBuffer->commandPool->inactiveCommandBuffers,
commandBuffer->commandPool->inactiveCommandBufferCapacity * sizeof(VulkanCommandBuffer*)
);
}
commandBuffer->commandPool->inactiveCommandBuffers[
commandBuffer->commandPool->inactiveCommandBufferCount
] = commandBuffer;
commandBuffer->commandPool->inactiveCommandBufferCount += 1;
SDL_UnlockMutex(renderer->acquireCommandBufferLock);
/* Remove this command buffer from the submitted list */
for (i = 0; i < renderer->submittedCommandBufferCount; i += 1)
{
if (renderer->submittedCommandBuffers[i] == commandBuffer)
2022-01-18 05:09:27 +00:00
{
renderer->submittedCommandBuffers[i] = renderer->submittedCommandBuffers[renderer->submittedCommandBufferCount - 1];
renderer->submittedCommandBufferCount -= 1;
2022-01-18 05:09:27 +00:00
}
}
}
static void VULKAN_Wait(
2022-02-25 21:42:11 +00:00
Refresh_Renderer *driverData
) {
VulkanRenderer *renderer = (VulkanRenderer*) driverData;
VulkanCommandBuffer *commandBuffer;
VkResult result;
int32_t i;
SDL_LockMutex(renderer->submitLock);
for (i = renderer->submittedCommandBufferCount - 1; i >= 0; i -= 1)
2022-01-18 05:09:27 +00:00
{
commandBuffer = renderer->submittedCommandBuffers[i];
result = renderer->vkWaitForFences(
2022-01-18 05:09:27 +00:00
renderer->logicalDevice,
1,
&commandBuffer->inFlightFence,
2022-01-18 05:09:27 +00:00
VK_TRUE,
UINT64_MAX
);
if (result != VK_SUCCESS)
{
LogVulkanResultAsError("vkWaitForFences", result);
}
VULKAN_INTERNAL_CleanCommandBuffer(renderer, commandBuffer);
2022-01-18 05:09:27 +00:00
}
VULKAN_INTERNAL_PerformPendingDestroys(renderer);
SDL_UnlockMutex(renderer->submitLock);
}
2020-12-21 23:44:43 +00:00
static void VULKAN_Submit(
2022-02-25 21:42:11 +00:00
Refresh_Renderer *driverData,
2021-01-03 22:57:46 +00:00
uint32_t commandBufferCount,
2021-01-05 23:00:51 +00:00
Refresh_CommandBuffer **pCommandBuffers
2020-12-17 03:50:31 +00:00
) {
2020-12-21 23:44:43 +00:00
VulkanRenderer* renderer = (VulkanRenderer*)driverData;
VkSubmitInfo submitInfo;
VkPresentInfoKHR presentInfo;
VulkanPresentData *presentData;
2020-12-21 23:44:43 +00:00
VkResult vulkanResult, presentResult = VK_SUCCESS;
2021-01-02 06:07:15 +00:00
VulkanCommandBuffer *currentCommandBuffer;
VkPipelineStageFlags waitStages[MAX_PRESENT_COUNT];
uint32_t swapchainImageIndex;
int32_t i, j;
2020-12-21 23:44:43 +00:00
SDL_LockMutex(renderer->submitLock);
2020-12-21 23:44:43 +00:00
/* FIXME: Can this just be permanent? */
for (i = 0; i < MAX_PRESENT_COUNT; i += 1)
{
waitStages[i] = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
}
/* Submit the commands finally */
2022-02-10 05:42:19 +00:00
2021-01-02 06:07:15 +00:00
for (i = 0; i < commandBufferCount; i += 1)
2020-12-22 02:34:57 +00:00
{
2021-01-02 06:07:15 +00:00
currentCommandBuffer = (VulkanCommandBuffer*)pCommandBuffers[i];
for (j = 0; j < currentCommandBuffer->presentDataCount; j += 1)
{
swapchainImageIndex = currentCommandBuffer->presentDatas[j].swapchainImageIndex;
VULKAN_INTERNAL_ImageMemoryBarrier(
renderer,
currentCommandBuffer->commandBuffer,
RESOURCE_ACCESS_PRESENT,
VK_IMAGE_ASPECT_COLOR_BIT,
0,
1,
0,
1,
0,
currentCommandBuffer->presentDatas[j].windowData->swapchainData->textures[swapchainImageIndex].image,
&currentCommandBuffer->presentDatas[j].windowData->swapchainData->textures[swapchainImageIndex].resourceAccessType
);
}
2021-01-02 06:07:15 +00:00
VULKAN_INTERNAL_EndCommandBuffer(renderer, currentCommandBuffer);
2020-12-22 02:34:57 +00:00
submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
submitInfo.pNext = NULL;
submitInfo.commandBufferCount = 1;
submitInfo.pCommandBuffers = &currentCommandBuffer->commandBuffer;
2020-12-21 23:44:43 +00:00
submitInfo.pWaitDstStageMask = waitStages;
submitInfo.pWaitSemaphores = currentCommandBuffer->waitSemaphores;
submitInfo.waitSemaphoreCount = currentCommandBuffer->waitSemaphoreCount;
submitInfo.pSignalSemaphores = currentCommandBuffer->signalSemaphores;
submitInfo.signalSemaphoreCount = currentCommandBuffer->signalSemaphoreCount;
vulkanResult = renderer->vkQueueSubmit(
renderer->graphicsQueue,
1,
&submitInfo,
currentCommandBuffer->inFlightFence
);
if (vulkanResult != VK_SUCCESS)
2020-12-21 23:44:43 +00:00
{
LogVulkanResultAsError("vkQueueSubmit", vulkanResult);
2020-12-21 23:44:43 +00:00
}
/* Mark command buffers as submitted */
if (renderer->submittedCommandBufferCount + 1 >= renderer->submittedCommandBufferCapacity)
{
renderer->submittedCommandBufferCapacity = renderer->submittedCommandBufferCount + 1;
renderer->submittedCommandBuffers = SDL_realloc(
renderer->submittedCommandBuffers,
sizeof(VulkanCommandBuffer*) * renderer->submittedCommandBufferCapacity
);
}
renderer->submittedCommandBuffers[renderer->submittedCommandBufferCount] = (VulkanCommandBuffer*) pCommandBuffers[i];
renderer->submittedCommandBufferCount += 1;
/* Present, if applicable */
for (j = 0; j < currentCommandBuffer->presentDataCount; j += 1)
{
presentData = &currentCommandBuffer->presentDatas[j];
2020-12-17 03:50:31 +00:00
presentInfo.sType = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR;
presentInfo.pNext = NULL;
presentInfo.pWaitSemaphores = &presentData->windowData->swapchainData->renderFinishedSemaphore;
presentInfo.waitSemaphoreCount = 1;
presentInfo.pSwapchains = &presentData->windowData->swapchainData->swapchain;
presentInfo.swapchainCount = 1;
presentInfo.pImageIndices = &presentData->swapchainImageIndex;
presentInfo.pResults = NULL;
2020-12-17 03:50:31 +00:00
presentResult = renderer->vkQueuePresentKHR(
renderer->presentQueue,
&presentInfo
);
if (presentResult != VK_SUCCESS)
{
VULKAN_INTERNAL_RecreateSwapchain(
renderer,
presentData->windowData
);
}
}
}
/* Check if we can perform any cleanups */
2020-12-21 23:44:43 +00:00
for (i = renderer->submittedCommandBufferCount - 1; i >= 0; i -= 1)
2020-12-17 03:50:31 +00:00
{
vulkanResult = renderer->vkGetFenceStatus(
renderer->logicalDevice,
renderer->submittedCommandBuffers[i]->inFlightFence
);
2020-12-17 03:50:31 +00:00
if (vulkanResult == VK_SUCCESS)
2020-12-17 03:50:31 +00:00
{
VULKAN_INTERNAL_CleanCommandBuffer(
renderer,
renderer->submittedCommandBuffers[i]
);
2020-12-17 03:50:31 +00:00
}
}
/* Check pending destroys */
VULKAN_INTERNAL_PerformPendingDestroys(renderer);
SDL_UnlockMutex(renderer->submitLock);
2021-01-03 05:07:51 +00:00
}
2020-12-17 03:28:02 +00:00
/* Device instantiation */
static inline uint8_t VULKAN_INTERNAL_SupportsExtension(
const char *ext,
VkExtensionProperties *availableExtensions,
uint32_t numAvailableExtensions
) {
uint32_t i;
for (i = 0; i < numAvailableExtensions; i += 1)
{
if (SDL_strcmp(ext, availableExtensions[i].extensionName) == 0)
{
return 1;
}
}
return 0;
}
static uint8_t VULKAN_INTERNAL_CheckInstanceExtensions(
const char **requiredExtensions,
uint32_t requiredExtensionsLength,
uint8_t *supportsDebugUtils
) {
uint32_t extensionCount, i;
VkExtensionProperties *availableExtensions;
uint8_t allExtensionsSupported = 1;
vkEnumerateInstanceExtensionProperties(
NULL,
&extensionCount,
NULL
);
availableExtensions = SDL_stack_alloc(
VkExtensionProperties,
extensionCount
);
vkEnumerateInstanceExtensionProperties(
NULL,
&extensionCount,
availableExtensions
);
for (i = 0; i < requiredExtensionsLength; i += 1)
{
if (!VULKAN_INTERNAL_SupportsExtension(
requiredExtensions[i],
availableExtensions,
extensionCount
)) {
allExtensionsSupported = 0;
break;
}
}
/* This is optional, but nice to have! */
*supportsDebugUtils = VULKAN_INTERNAL_SupportsExtension(
VK_EXT_DEBUG_UTILS_EXTENSION_NAME,
availableExtensions,
extensionCount
);
SDL_stack_free(availableExtensions);
return allExtensionsSupported;
}
static uint8_t VULKAN_INTERNAL_CheckValidationLayers(
const char** validationLayers,
uint32_t validationLayersLength
) {
uint32_t layerCount;
VkLayerProperties *availableLayers;
uint32_t i, j;
uint8_t layerFound = 0;
2020-12-17 03:28:02 +00:00
vkEnumerateInstanceLayerProperties(&layerCount, NULL);
availableLayers = SDL_stack_alloc(VkLayerProperties, layerCount);
vkEnumerateInstanceLayerProperties(&layerCount, availableLayers);
for (i = 0; i < validationLayersLength; i += 1)
{
layerFound = 0;
for (j = 0; j < layerCount; j += 1)
{
if (SDL_strcmp(validationLayers[i], availableLayers[j].layerName) == 0)
{
layerFound = 1;
break;
}
}
if (!layerFound)
{
break;
}
}
SDL_stack_free(availableLayers);
return layerFound;
}
static uint8_t VULKAN_INTERNAL_CreateInstance(
2022-02-25 21:42:11 +00:00
VulkanRenderer *renderer,
void *deviceWindowHandle
2020-12-17 03:28:02 +00:00
) {
VkResult vulkanResult;
VkApplicationInfo appInfo;
const char **instanceExtensionNames;
uint32_t instanceExtensionCount;
VkInstanceCreateInfo createInfo;
static const char *layerNames[] = { "VK_LAYER_KHRONOS_validation" };
appInfo.sType = VK_STRUCTURE_TYPE_APPLICATION_INFO;
appInfo.pNext = NULL;
appInfo.pApplicationName = NULL;
appInfo.applicationVersion = 0;
appInfo.pEngineName = "REFRESH";
appInfo.engineVersion = REFRESH_COMPILED_VERSION;
appInfo.apiVersion = VK_MAKE_VERSION(1, 0, 0);
2022-02-25 21:42:11 +00:00
if (!SDL_Vulkan_GetInstanceExtensions(
(SDL_Window*) deviceWindowHandle,
&instanceExtensionCount,
NULL
)) {
Refresh_LogError(
"SDL_Vulkan_GetInstanceExtensions(): getExtensionCount: %s",
SDL_GetError()
);
2020-12-17 03:28:02 +00:00
2022-02-25 21:42:11 +00:00
return 0;
}
2020-12-17 03:28:02 +00:00
/* Extra space for the following extensions:
* VK_KHR_get_physical_device_properties2
* VK_EXT_debug_utils
*/
instanceExtensionNames = SDL_stack_alloc(
const char*,
instanceExtensionCount + 2
);
if (!SDL_Vulkan_GetInstanceExtensions(
(SDL_Window*) deviceWindowHandle,
&instanceExtensionCount,
instanceExtensionNames
)) {
2021-01-05 23:00:51 +00:00
Refresh_LogError(
2020-12-17 03:28:02 +00:00
"SDL_Vulkan_GetInstanceExtensions(): %s",
SDL_GetError()
);
2022-02-25 21:42:11 +00:00
SDL_stack_free((char*) instanceExtensionNames);
return 0;
2020-12-17 03:28:02 +00:00
}
/* Core since 1.1 */
instanceExtensionNames[instanceExtensionCount++] =
VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME;
if (!VULKAN_INTERNAL_CheckInstanceExtensions(
instanceExtensionNames,
instanceExtensionCount,
&renderer->supportsDebugUtils
)) {
2021-01-05 23:00:51 +00:00
Refresh_LogError(
2020-12-17 03:28:02 +00:00
"Required Vulkan instance extensions not supported"
);
2022-02-25 21:42:11 +00:00
SDL_stack_free((char*) instanceExtensionNames);
return 0;
2020-12-17 03:28:02 +00:00
}
if (renderer->supportsDebugUtils)
{
/* Append the debug extension to the end */
instanceExtensionNames[instanceExtensionCount++] =
VK_EXT_DEBUG_UTILS_EXTENSION_NAME;
}
else
{
2021-01-05 23:00:51 +00:00
Refresh_LogWarn(
2020-12-17 03:28:02 +00:00
"%s is not supported!",
VK_EXT_DEBUG_UTILS_EXTENSION_NAME
);
}
2022-02-25 21:42:11 +00:00
createInfo.sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO;
2020-12-17 03:28:02 +00:00
createInfo.pNext = NULL;
createInfo.flags = 0;
createInfo.pApplicationInfo = &appInfo;
createInfo.ppEnabledLayerNames = layerNames;
createInfo.enabledExtensionCount = instanceExtensionCount;
createInfo.ppEnabledExtensionNames = instanceExtensionNames;
if (renderer->debugMode)
{
createInfo.enabledLayerCount = SDL_arraysize(layerNames);
if (!VULKAN_INTERNAL_CheckValidationLayers(
layerNames,
createInfo.enabledLayerCount
)) {
2021-01-05 23:00:51 +00:00
Refresh_LogWarn("Validation layers not found, continuing without validation");
2020-12-17 03:28:02 +00:00
createInfo.enabledLayerCount = 0;
}
else
{
Refresh_LogInfo("Validation layers enabled, expect debug level performance!");
}
2020-12-17 03:28:02 +00:00
}
else
{
createInfo.enabledLayerCount = 0;
}
2022-02-25 21:42:11 +00:00
vulkanResult = vkCreateInstance(&createInfo, NULL, &renderer->instance);
2020-12-17 03:28:02 +00:00
if (vulkanResult != VK_SUCCESS)
{
2021-01-05 23:00:51 +00:00
Refresh_LogError(
2020-12-17 03:28:02 +00:00
"vkCreateInstance failed: %s",
VkErrorMessages(vulkanResult)
);
2022-02-25 21:42:11 +00:00
SDL_stack_free((char*) instanceExtensionNames);
return 0;
2020-12-17 03:28:02 +00:00
}
SDL_stack_free((char*) instanceExtensionNames);
return 1;
}
static uint8_t VULKAN_INTERNAL_CheckDeviceExtensions(
VulkanRenderer *renderer,
VkPhysicalDevice physicalDevice,
const char** requiredExtensions,
uint32_t requiredExtensionsLength
) {
uint32_t extensionCount, i;
VkExtensionProperties *availableExtensions;
uint8_t allExtensionsSupported = 1;
renderer->vkEnumerateDeviceExtensionProperties(
physicalDevice,
NULL,
&extensionCount,
NULL
);
availableExtensions = SDL_stack_alloc(
VkExtensionProperties,
extensionCount
);
renderer->vkEnumerateDeviceExtensionProperties(
physicalDevice,
NULL,
&extensionCount,
availableExtensions
);
for (i = 0; i < requiredExtensionsLength; i += 1)
{
if (!VULKAN_INTERNAL_SupportsExtension(
requiredExtensions[i],
availableExtensions,
extensionCount
)) {
allExtensionsSupported = 0;
break;
}
}
SDL_stack_free(availableExtensions);
return allExtensionsSupported;
}
static uint8_t VULKAN_INTERNAL_IsDeviceSuitable(
VulkanRenderer *renderer,
VkPhysicalDevice physicalDevice,
const char** requiredExtensionNames,
uint32_t requiredExtensionNamesLength,
VkSurfaceKHR surface,
QueueFamilyIndices *queueFamilyIndices,
uint8_t *deviceRank
2020-12-17 03:28:02 +00:00
) {
uint32_t queueFamilyCount, i;
SwapChainSupportDetails swapChainSupportDetails;
VkQueueFamilyProperties *queueProps;
VkBool32 supportsPresent;
2021-01-03 03:03:25 +00:00
uint8_t querySuccess = 0;
uint8_t foundSuitableDevice = 0;
2020-12-17 03:28:02 +00:00
VkPhysicalDeviceProperties deviceProperties;
queueFamilyIndices->graphicsFamily = UINT32_MAX;
queueFamilyIndices->presentFamily = UINT32_MAX;
2021-01-03 03:03:25 +00:00
queueFamilyIndices->computeFamily = UINT32_MAX;
2021-01-02 21:31:17 +00:00
queueFamilyIndices->transferFamily = UINT32_MAX;
*deviceRank = 0;
2020-12-17 03:28:02 +00:00
/* Note: If no dedicated device exists,
* one that supports our features would be fine
*/
if (!VULKAN_INTERNAL_CheckDeviceExtensions(
renderer,
physicalDevice,
requiredExtensionNames,
requiredExtensionNamesLength
)) {
return 0;
}
renderer->vkGetPhysicalDeviceQueueFamilyProperties(
physicalDevice,
&queueFamilyCount,
NULL
);
/* FIXME: Need better structure for checking vs storing support details */
querySuccess = VULKAN_INTERNAL_QuerySwapChainSupport(
renderer,
physicalDevice,
surface,
UINT32_MAX,
2020-12-17 03:28:02 +00:00
&swapChainSupportDetails
);
2020-12-17 03:28:02 +00:00
SDL_free(swapChainSupportDetails.formats);
SDL_free(swapChainSupportDetails.presentModes);
2020-12-17 03:28:02 +00:00
if ( querySuccess == 0 ||
swapChainSupportDetails.formatsLength == 0 ||
swapChainSupportDetails.presentModesLength == 0 )
{
return 0;
}
queueProps = (VkQueueFamilyProperties*) SDL_stack_alloc(
VkQueueFamilyProperties,
queueFamilyCount
);
renderer->vkGetPhysicalDeviceQueueFamilyProperties(
physicalDevice,
&queueFamilyCount,
queueProps
);
for (i = 0; i < queueFamilyCount; i += 1)
{
renderer->vkGetPhysicalDeviceSurfaceSupportKHR(
physicalDevice,
i,
surface,
&supportsPresent
);
2021-01-02 21:31:17 +00:00
if ( supportsPresent &&
(queueProps[i].queueFlags & VK_QUEUE_GRAPHICS_BIT) &&
(queueProps[i].queueFlags & VK_QUEUE_COMPUTE_BIT) &&
(queueProps[i].queueFlags & VK_QUEUE_TRANSFER_BIT) )
2020-12-17 03:28:02 +00:00
{
queueFamilyIndices->graphicsFamily = i;
queueFamilyIndices->presentFamily = i;
queueFamilyIndices->computeFamily = i;
queueFamilyIndices->transferFamily = i;
2020-12-17 03:28:02 +00:00
foundSuitableDevice = 1;
break;
}
}
SDL_stack_free(queueProps);
if (foundSuitableDevice)
{
/* Try to make sure we pick the best device available */
2020-12-17 03:28:02 +00:00
renderer->vkGetPhysicalDeviceProperties(
physicalDevice,
&deviceProperties
);
*deviceRank = DEVICE_PRIORITY[deviceProperties.deviceType];
2020-12-17 03:28:02 +00:00
return 1;
}
/* This device is useless for us, next! */
return 0;
}
static void VULKAN_INTERNAL_GetPhysicalDeviceProperties(
2021-01-14 01:37:54 +00:00
VulkanRenderer *renderer
) {
renderer->physicalDeviceDriverProperties.sType =
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES_KHR;
renderer->physicalDeviceDriverProperties.pNext = NULL;
renderer->physicalDeviceProperties.sType =
VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2;
renderer->physicalDeviceProperties.pNext =
&renderer->physicalDeviceDriverProperties;
renderer->vkGetPhysicalDeviceProperties2KHR(
renderer->physicalDevice,
&renderer->physicalDeviceProperties
);
renderer->vkGetPhysicalDeviceMemoryProperties(
renderer->physicalDevice,
&renderer->memoryProperties
);
2021-01-14 01:37:54 +00:00
}
2020-12-17 03:28:02 +00:00
static uint8_t VULKAN_INTERNAL_DeterminePhysicalDevice(
VulkanRenderer *renderer,
VkSurfaceKHR surface
2020-12-17 03:28:02 +00:00
) {
VkResult vulkanResult;
VkPhysicalDevice *physicalDevices;
uint32_t physicalDeviceCount, i, suitableIndex;
QueueFamilyIndices queueFamilyIndices, suitableQueueFamilyIndices;
uint8_t deviceRank, highestRank;
2020-12-17 03:28:02 +00:00
vulkanResult = renderer->vkEnumeratePhysicalDevices(
renderer->instance,
&physicalDeviceCount,
NULL
);
if (vulkanResult != VK_SUCCESS)
{
2021-01-05 23:00:51 +00:00
Refresh_LogError(
2020-12-17 03:28:02 +00:00
"vkEnumeratePhysicalDevices failed: %s",
VkErrorMessages(vulkanResult)
);
return 0;
}
if (physicalDeviceCount == 0)
{
2021-01-05 23:00:51 +00:00
Refresh_LogError("Failed to find any GPUs with Vulkan support");
2020-12-17 03:28:02 +00:00
return 0;
}
physicalDevices = SDL_stack_alloc(VkPhysicalDevice, physicalDeviceCount);
vulkanResult = renderer->vkEnumeratePhysicalDevices(
renderer->instance,
&physicalDeviceCount,
physicalDevices
);
if (vulkanResult != VK_SUCCESS)
{
2021-01-05 23:00:51 +00:00
Refresh_LogError(
2020-12-17 03:28:02 +00:00
"vkEnumeratePhysicalDevices failed: %s",
VkErrorMessages(vulkanResult)
);
SDL_stack_free(physicalDevices);
return 0;
}
/* Any suitable device will do, but we'd like the best */
suitableIndex = -1;
deviceRank = 0;
highestRank = 0;
2020-12-17 03:28:02 +00:00
for (i = 0; i < physicalDeviceCount; i += 1)
{
const uint8_t suitable = VULKAN_INTERNAL_IsDeviceSuitable(
2020-12-17 03:28:02 +00:00
renderer,
physicalDevices[i],
deviceExtensionNames,
deviceExtensionCount,
surface,
2020-12-17 03:28:02 +00:00
&queueFamilyIndices,
&deviceRank
);
if (deviceRank >= highestRank)
{
if (suitable)
2020-12-17 03:28:02 +00:00
{
suitableIndex = i;
suitableQueueFamilyIndices.computeFamily = queueFamilyIndices.computeFamily;
suitableQueueFamilyIndices.graphicsFamily = queueFamilyIndices.graphicsFamily;
suitableQueueFamilyIndices.presentFamily = queueFamilyIndices.presentFamily;
suitableQueueFamilyIndices.transferFamily = queueFamilyIndices.transferFamily;
}
else if (deviceRank > highestRank)
{
/* In this case, we found a... "realer?" GPU,
* but it doesn't actually support our Vulkan.
* We should disqualify all devices below as a
* result, because if we don't we end up
* ignoring real hardware and risk using
* something like LLVMpipe instead!
* -flibit
*/
suitableIndex = -1;
2020-12-17 03:28:02 +00:00
}
highestRank = deviceRank;
2020-12-17 03:28:02 +00:00
}
}
if (suitableIndex != -1)
{
renderer->physicalDevice = physicalDevices[suitableIndex];
renderer->queueFamilyIndices = suitableQueueFamilyIndices;
2020-12-17 03:28:02 +00:00
}
else
{
2021-01-05 23:00:51 +00:00
Refresh_LogError("No suitable physical devices found");
2020-12-17 03:28:02 +00:00
SDL_stack_free(physicalDevices);
return 0;
}
2021-01-14 01:37:54 +00:00
VULKAN_INTERNAL_GetPhysicalDeviceProperties(renderer);
2020-12-17 03:28:02 +00:00
SDL_stack_free(physicalDevices);
return 1;
}
static uint8_t VULKAN_INTERNAL_CreateLogicalDevice(
VulkanRenderer *renderer,
const char **deviceExtensionNames,
uint32_t deviceExtensionCount
) {
VkResult vulkanResult;
2020-12-17 04:04:47 +00:00
2020-12-17 03:28:02 +00:00
VkDeviceCreateInfo deviceCreateInfo;
VkPhysicalDeviceFeatures deviceFeatures;
VkDeviceQueueCreateInfo queueCreateInfos[2];
2020-12-17 03:28:02 +00:00
VkDeviceQueueCreateInfo queueCreateInfoGraphics;
VkDeviceQueueCreateInfo queueCreateInfoPresent;
int32_t queueInfoCount = 1;
2020-12-17 03:28:02 +00:00
float queuePriority = 1.0f;
queueCreateInfoGraphics.sType =
VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
queueCreateInfoGraphics.pNext = NULL;
queueCreateInfoGraphics.flags = 0;
queueCreateInfoGraphics.queueFamilyIndex =
renderer->queueFamilyIndices.graphicsFamily;
queueCreateInfoGraphics.queueCount = 1;
queueCreateInfoGraphics.pQueuePriorities = &queuePriority;
queueCreateInfos[0] = queueCreateInfoGraphics;
if (renderer->queueFamilyIndices.presentFamily != renderer->queueFamilyIndices.graphicsFamily)
{
queueCreateInfoPresent.sType =
VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
queueCreateInfoPresent.pNext = NULL;
queueCreateInfoPresent.flags = 0;
queueCreateInfoPresent.queueFamilyIndex =
renderer->queueFamilyIndices.presentFamily;
queueCreateInfoPresent.queueCount = 1;
queueCreateInfoPresent.pQueuePriorities = &queuePriority;
2021-01-02 21:31:17 +00:00
queueCreateInfos[queueInfoCount] = queueCreateInfoPresent;
2020-12-17 03:28:02 +00:00
queueInfoCount += 1;
}
/* specifying used device features */
SDL_zero(deviceFeatures);
deviceFeatures.fillModeNonSolid = VK_TRUE;
deviceFeatures.samplerAnisotropy = VK_TRUE;
deviceFeatures.multiDrawIndirect = VK_TRUE;
2020-12-17 03:28:02 +00:00
/* creating the logical device */
deviceCreateInfo.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
deviceCreateInfo.pNext = NULL;
deviceCreateInfo.flags = 0;
deviceCreateInfo.queueCreateInfoCount = queueInfoCount;
deviceCreateInfo.pQueueCreateInfos = queueCreateInfos;
deviceCreateInfo.enabledLayerCount = 0;
deviceCreateInfo.ppEnabledLayerNames = NULL;
deviceCreateInfo.enabledExtensionCount = deviceExtensionCount;
deviceCreateInfo.ppEnabledExtensionNames = deviceExtensionNames;
deviceCreateInfo.pEnabledFeatures = &deviceFeatures;
vulkanResult = renderer->vkCreateDevice(
renderer->physicalDevice,
&deviceCreateInfo,
NULL,
&renderer->logicalDevice
);
if (vulkanResult != VK_SUCCESS)
{
2021-01-05 23:00:51 +00:00
Refresh_LogError(
2020-12-17 03:28:02 +00:00
"vkCreateDevice failed: %s",
VkErrorMessages(vulkanResult)
);
return 0;
}
/* Load vkDevice entry points */
#define VULKAN_DEVICE_FUNCTION(ext, ret, func, params) \
renderer->func = (vkfntype_##func) \
renderer->vkGetDeviceProcAddr( \
renderer->logicalDevice, \
#func \
);
#include "Refresh_Driver_Vulkan_vkfuncs.h"
renderer->vkGetDeviceQueue(
renderer->logicalDevice,
renderer->queueFamilyIndices.graphicsFamily,
0,
&renderer->graphicsQueue
);
renderer->vkGetDeviceQueue(
renderer->logicalDevice,
renderer->queueFamilyIndices.presentFamily,
0,
&renderer->presentQueue
);
2021-01-03 03:03:25 +00:00
renderer->vkGetDeviceQueue(
renderer->logicalDevice,
renderer->queueFamilyIndices.computeFamily,
0,
&renderer->computeQueue
);
2021-01-02 21:31:17 +00:00
renderer->vkGetDeviceQueue(
renderer->logicalDevice,
renderer->queueFamilyIndices.transferFamily,
0,
&renderer->transferQueue
);
2020-12-17 03:28:02 +00:00
return 1;
}
static void VULKAN_INTERNAL_LoadEntryPoints()
{
2021-01-14 01:37:54 +00:00
/* Load Vulkan entry points */
if (SDL_Vulkan_LoadLibrary(NULL) < 0)
{
Refresh_LogWarn("Vulkan: SDL_Vulkan_LoadLibrary failed!");
return;
2021-01-14 01:37:54 +00:00
}
#pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wpedantic"
vkGetInstanceProcAddr = (PFN_vkGetInstanceProcAddr)SDL_Vulkan_GetVkGetInstanceProcAddr();
#pragma GCC diagnostic pop
if (vkGetInstanceProcAddr == NULL)
{
Refresh_LogWarn(
"SDL_Vulkan_GetVkGetInstanceProcAddr(): %s",
SDL_GetError()
);
return;
2021-01-14 01:37:54 +00:00
}
#define VULKAN_GLOBAL_FUNCTION(name) \
name = (PFN_##name) vkGetInstanceProcAddr(VK_NULL_HANDLE, #name); \
if (name == NULL) \
{ \
Refresh_LogWarn("vkGetInstanceProcAddr(VK_NULL_HANDLE, \"" #name "\") failed"); \
return; \
2021-01-14 01:37:54 +00:00
}
#include "Refresh_Driver_Vulkan_vkfuncs.h"
}
static uint8_t VULKAN_INTERNAL_PrepareVulkan(
VulkanRenderer *renderer
) {
SDL_Window *dummyWindowHandle;
VkSurfaceKHR surface;
VULKAN_INTERNAL_LoadEntryPoints();
dummyWindowHandle = SDL_CreateWindow(
"Refresh Vulkan",
0, 0,
128, 128,
SDL_WINDOW_VULKAN | SDL_WINDOW_HIDDEN
);
if (dummyWindowHandle == NULL)
{
Refresh_LogWarn("Vulkan: Could not create dummy window");
return 0;
}
if (!VULKAN_INTERNAL_CreateInstance(renderer, dummyWindowHandle))
{
SDL_DestroyWindow(dummyWindowHandle);
SDL_free(renderer);
Refresh_LogWarn("Vulkan: Could not create Vulkan instance");
return 0;
}
if (!SDL_Vulkan_CreateSurface(
(SDL_Window*) dummyWindowHandle,
renderer->instance,
&surface
)) {
SDL_DestroyWindow(dummyWindowHandle);
SDL_free(renderer);
Refresh_LogWarn(
"SDL_Vulkan_CreateSurface failed: %s",
SDL_GetError()
);
return 0;
}
#define VULKAN_INSTANCE_FUNCTION(ext, ret, func, params) \
renderer->func = (vkfntype_##func) vkGetInstanceProcAddr(renderer->instance, #func);
#include "Refresh_Driver_Vulkan_vkfuncs.h"
if (!VULKAN_INTERNAL_DeterminePhysicalDevice(renderer, surface))
{
return 0;
}
renderer->vkDestroySurfaceKHR(
renderer->instance,
surface,
NULL
);
SDL_DestroyWindow(dummyWindowHandle);
return 1;
}
static uint8_t VULKAN_PrepareDriver(uint32_t *flags)
{
/* Set up dummy VulkanRenderer */
VulkanRenderer *renderer = (VulkanRenderer*) SDL_malloc(sizeof(VulkanRenderer));
uint8_t result;
SDL_memset(renderer, '\0', sizeof(VulkanRenderer));
result = VULKAN_INTERNAL_PrepareVulkan(renderer);
if (!result)
{
Refresh_LogWarn("Vulkan: Failed to determine a suitable physical device");
}
else
{
*flags = SDL_WINDOW_VULKAN;
}
renderer->vkDestroyInstance(renderer->instance, NULL);
SDL_free(renderer);
return result;
}
static Refresh_Device* VULKAN_CreateDevice(
uint8_t debugMode
2020-12-17 01:23:49 +00:00
) {
VulkanRenderer *renderer = (VulkanRenderer*) SDL_malloc(sizeof(VulkanRenderer));
2020-12-17 01:23:49 +00:00
2022-02-25 21:42:11 +00:00
Refresh_Device *result;
VkResult vulkanResult;
2020-12-18 22:35:33 +00:00
uint32_t i;
2020-12-17 04:04:47 +00:00
2020-12-29 22:52:24 +00:00
/* Variables: Descriptor set layouts */
2020-12-17 19:40:49 +00:00
VkDescriptorSetLayoutCreateInfo setLayoutCreateInfo;
VkDescriptorSetLayoutBinding vertexParamLayoutBinding;
VkDescriptorSetLayoutBinding fragmentParamLayoutBinding;
2020-12-31 04:39:47 +00:00
VkDescriptorSetLayoutBinding computeParamLayoutBinding;
2020-12-17 19:40:49 +00:00
2020-12-31 04:39:47 +00:00
VkDescriptorSetLayoutBinding emptyVertexSamplerLayoutBinding;
VkDescriptorSetLayoutBinding emptyFragmentSamplerLayoutBinding;
2020-12-29 22:52:24 +00:00
VkDescriptorSetLayoutBinding emptyComputeBufferDescriptorSetLayoutBinding;
2020-12-31 04:39:47 +00:00
VkDescriptorSetLayoutBinding emptyComputeImageDescriptorSetLayoutBinding;
2020-12-29 22:52:24 +00:00
2020-12-20 09:29:15 +00:00
/* Variables: UBO Creation */
VkDescriptorPoolCreateInfo defaultDescriptorPoolInfo;
2020-12-31 04:39:47 +00:00
VkDescriptorPoolSize poolSizes[4];
2020-12-28 22:07:13 +00:00
VkDescriptorSetAllocateInfo descriptorAllocateInfo;
2020-12-20 09:29:15 +00:00
2022-06-17 07:41:27 +00:00
/* Variables: Image Format Detection */
VkImageFormatProperties imageFormatProperties;
renderer->debugMode = debugMode;
if (!VULKAN_INTERNAL_PrepareVulkan(renderer))
{
Refresh_LogError("Failed to initialize Vulkan!");
return NULL;
}
Refresh_LogInfo("Refresh Driver: Vulkan");
Refresh_LogInfo(
"Vulkan Device: %s",
renderer->physicalDeviceProperties.properties.deviceName
);
Refresh_LogInfo(
"Vulkan Driver: %s %s",
renderer->physicalDeviceDriverProperties.driverName,
renderer->physicalDeviceDriverProperties.driverInfo
);
Refresh_LogInfo(
"Vulkan Conformance: %u.%u.%u",
renderer->physicalDeviceDriverProperties.conformanceVersion.major,
renderer->physicalDeviceDriverProperties.conformanceVersion.minor,
renderer->physicalDeviceDriverProperties.conformanceVersion.patch
);
Refresh_LogWarn(
2022-02-25 21:42:11 +00:00
"\n"
2022-02-26 07:13:17 +00:00
"!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n"
2022-02-25 21:42:11 +00:00
"! Refresh Vulkan is still in development! !\n"
2022-02-26 07:13:17 +00:00
"! The API is unstable and subject to change !\n"
"! You have been warned! !\n"
"!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
);
if (!VULKAN_INTERNAL_CreateLogicalDevice(
renderer,
deviceExtensionNames,
deviceExtensionCount
)) {
Refresh_LogError("Failed to create logical device");
return NULL;
}
/* FIXME: just move this into this function */
result = (Refresh_Device*) SDL_malloc(sizeof(Refresh_Device));
2022-02-25 21:42:11 +00:00
ASSIGN_DRIVER(VULKAN)
2020-12-17 01:23:49 +00:00
2022-02-25 21:42:11 +00:00
result->driverData = (Refresh_Renderer*) renderer;
2020-12-17 01:23:49 +00:00
2020-12-17 03:50:31 +00:00
/*
* Create initial swapchain array
2020-12-17 03:50:31 +00:00
*/
renderer->claimedWindowCapacity = 1;
renderer->claimedWindowCount = 0;
renderer->claimedWindows = SDL_malloc(
renderer->claimedWindowCapacity * sizeof(WindowData*)
);
2020-12-17 03:50:31 +00:00
2020-12-22 01:59:08 +00:00
/* Threading */
renderer->allocatorLock = SDL_CreateMutex();
renderer->disposeLock = SDL_CreateMutex();
renderer->submitLock = SDL_CreateMutex();
renderer->acquireCommandBufferLock = SDL_CreateMutex();
renderer->renderPassFetchLock = SDL_CreateMutex();
renderer->framebufferFetchLock = SDL_CreateMutex();
renderer->renderTargetFetchLock = SDL_CreateMutex();
2020-12-17 04:04:47 +00:00
/*
2021-01-02 06:07:15 +00:00
* Create submitted command buffer list
2020-12-17 04:04:47 +00:00
*/
2021-01-02 06:07:15 +00:00
renderer->submittedCommandBufferCapacity = 16;
2020-12-17 04:04:47 +00:00
renderer->submittedCommandBufferCount = 0;
2021-01-02 06:07:15 +00:00
renderer->submittedCommandBuffers = SDL_malloc(sizeof(VulkanCommandBuffer*) * renderer->submittedCommandBufferCapacity);
2020-12-17 04:04:47 +00:00
2020-12-22 01:59:08 +00:00
/* Memory Allocator */
renderer->memoryAllocator = (VulkanMemoryAllocator*) SDL_malloc(
2020-12-22 01:59:08 +00:00
sizeof(VulkanMemoryAllocator)
);
for (i = 0; i < VK_MAX_MEMORY_TYPES; i += 1)
{
renderer->memoryAllocator->subAllocators[i].nextAllocationSize = STARTING_ALLOCATION_SIZE;
renderer->memoryAllocator->subAllocators[i].allocations = NULL;
renderer->memoryAllocator->subAllocators[i].allocationCount = 0;
renderer->memoryAllocator->subAllocators[i].sortedFreeRegions = SDL_malloc(
sizeof(VulkanMemoryFreeRegion*) * 4
);
renderer->memoryAllocator->subAllocators[i].sortedFreeRegionCount = 0;
renderer->memoryAllocator->subAllocators[i].sortedFreeRegionCapacity = 4;
}
/* Set up UBO layouts */
2020-12-31 04:39:47 +00:00
2020-12-23 21:11:09 +00:00
renderer->minUBOAlignment = renderer->physicalDeviceProperties.properties.limits.minUniformBufferOffsetAlignment;
2020-12-17 19:40:49 +00:00
emptyVertexSamplerLayoutBinding.binding = 0;
emptyVertexSamplerLayoutBinding.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
emptyVertexSamplerLayoutBinding.descriptorCount = 0;
emptyVertexSamplerLayoutBinding.stageFlags = VK_SHADER_STAGE_VERTEX_BIT;
emptyVertexSamplerLayoutBinding.pImmutableSamplers = NULL;
setLayoutCreateInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
setLayoutCreateInfo.pNext = NULL;
setLayoutCreateInfo.flags = 0;
setLayoutCreateInfo.bindingCount = 1;
setLayoutCreateInfo.pBindings = &emptyVertexSamplerLayoutBinding;
vulkanResult = renderer->vkCreateDescriptorSetLayout(
renderer->logicalDevice,
&setLayoutCreateInfo,
NULL,
&renderer->emptyVertexSamplerLayout
);
emptyFragmentSamplerLayoutBinding.binding = 0;
emptyFragmentSamplerLayoutBinding.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
emptyFragmentSamplerLayoutBinding.descriptorCount = 0;
emptyFragmentSamplerLayoutBinding.stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
emptyFragmentSamplerLayoutBinding.pImmutableSamplers = NULL;
setLayoutCreateInfo.pBindings = &emptyFragmentSamplerLayoutBinding;
vulkanResult = renderer->vkCreateDescriptorSetLayout(
renderer->logicalDevice,
&setLayoutCreateInfo,
NULL,
&renderer->emptyFragmentSamplerLayout
);
2020-12-29 22:52:24 +00:00
emptyComputeBufferDescriptorSetLayoutBinding.binding = 0;
emptyComputeBufferDescriptorSetLayoutBinding.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
emptyComputeBufferDescriptorSetLayoutBinding.descriptorCount = 0;
emptyComputeBufferDescriptorSetLayoutBinding.stageFlags = VK_SHADER_STAGE_COMPUTE_BIT;
emptyComputeBufferDescriptorSetLayoutBinding.pImmutableSamplers = NULL;
setLayoutCreateInfo.pBindings = &emptyComputeBufferDescriptorSetLayoutBinding;
vulkanResult = renderer->vkCreateDescriptorSetLayout(
renderer->logicalDevice,
&setLayoutCreateInfo,
NULL,
&renderer->emptyComputeBufferDescriptorSetLayout
);
2020-12-31 04:39:47 +00:00
emptyComputeImageDescriptorSetLayoutBinding.binding = 0;
emptyComputeImageDescriptorSetLayoutBinding.descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
2020-12-31 04:39:47 +00:00
emptyComputeImageDescriptorSetLayoutBinding.descriptorCount = 0;
emptyComputeImageDescriptorSetLayoutBinding.stageFlags = VK_SHADER_STAGE_COMPUTE_BIT;
emptyComputeImageDescriptorSetLayoutBinding.pImmutableSamplers = NULL;
setLayoutCreateInfo.pBindings = &emptyComputeImageDescriptorSetLayoutBinding;
vulkanResult = renderer->vkCreateDescriptorSetLayout(
renderer->logicalDevice,
&setLayoutCreateInfo,
NULL,
&renderer->emptyComputeImageDescriptorSetLayout
);
2020-12-17 19:40:49 +00:00
vertexParamLayoutBinding.binding = 0;
vertexParamLayoutBinding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
vertexParamLayoutBinding.descriptorCount = 1;
vertexParamLayoutBinding.stageFlags = VK_SHADER_STAGE_VERTEX_BIT;
vertexParamLayoutBinding.pImmutableSamplers = NULL;
setLayoutCreateInfo.bindingCount = 1;
setLayoutCreateInfo.pBindings = &vertexParamLayoutBinding;
vulkanResult = renderer->vkCreateDescriptorSetLayout(
renderer->logicalDevice,
&setLayoutCreateInfo,
NULL,
&renderer->vertexUniformDescriptorSetLayout
2020-12-17 19:40:49 +00:00
);
if (vulkanResult != VK_SUCCESS)
{
2021-01-05 23:00:51 +00:00
Refresh_LogError("Failed to create vertex UBO layout!");
2020-12-17 19:40:49 +00:00
return NULL;
}
fragmentParamLayoutBinding.binding = 0;
fragmentParamLayoutBinding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
fragmentParamLayoutBinding.descriptorCount = 1;
fragmentParamLayoutBinding.stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
fragmentParamLayoutBinding.pImmutableSamplers = NULL;
setLayoutCreateInfo.bindingCount = 1;
setLayoutCreateInfo.pBindings = &fragmentParamLayoutBinding;
vulkanResult = renderer->vkCreateDescriptorSetLayout(
renderer->logicalDevice,
&setLayoutCreateInfo,
NULL,
&renderer->fragmentUniformDescriptorSetLayout
2020-12-17 19:40:49 +00:00
);
if (vulkanResult != VK_SUCCESS)
{
2021-01-05 23:00:51 +00:00
Refresh_LogError("Failed to create fragment UBO layout!");
2020-12-17 19:40:49 +00:00
return NULL;
}
2020-12-31 04:39:47 +00:00
computeParamLayoutBinding.binding = 0;
computeParamLayoutBinding.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
computeParamLayoutBinding.descriptorCount = 1;
computeParamLayoutBinding.stageFlags = VK_SHADER_STAGE_COMPUTE_BIT;
computeParamLayoutBinding.pImmutableSamplers = NULL;
setLayoutCreateInfo.bindingCount = 1;
setLayoutCreateInfo.pBindings = &computeParamLayoutBinding;
vulkanResult = renderer->vkCreateDescriptorSetLayout(
renderer->logicalDevice,
&setLayoutCreateInfo,
NULL,
&renderer->computeUniformDescriptorSetLayout
2020-12-31 04:39:47 +00:00
);
/* Default Descriptors */
2020-12-20 09:29:15 +00:00
poolSizes[0].descriptorCount = 2;
poolSizes[0].type = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
poolSizes[1].descriptorCount = 1;
poolSizes[1].type = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
2020-12-20 09:29:15 +00:00
2020-12-29 22:52:24 +00:00
poolSizes[2].descriptorCount = 1;
poolSizes[2].type = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
2020-12-29 22:52:24 +00:00
poolSizes[3].descriptorCount = 3;
poolSizes[3].type = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC;
2020-12-31 04:39:47 +00:00
defaultDescriptorPoolInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
defaultDescriptorPoolInfo.pNext = NULL;
defaultDescriptorPoolInfo.flags = 0;
defaultDescriptorPoolInfo.maxSets = 2 + 1 + 1 + 3;
2020-12-31 04:39:47 +00:00
defaultDescriptorPoolInfo.poolSizeCount = 4;
defaultDescriptorPoolInfo.pPoolSizes = poolSizes;
2020-12-20 09:29:15 +00:00
renderer->vkCreateDescriptorPool(
renderer->logicalDevice,
&defaultDescriptorPoolInfo,
2020-12-20 09:29:15 +00:00
NULL,
&renderer->defaultDescriptorPool
);
2020-12-28 22:07:13 +00:00
descriptorAllocateInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
descriptorAllocateInfo.pNext = NULL;
descriptorAllocateInfo.descriptorPool = renderer->defaultDescriptorPool;
descriptorAllocateInfo.descriptorSetCount = 1;
descriptorAllocateInfo.pSetLayouts = &renderer->emptyVertexSamplerLayout;
renderer->vkAllocateDescriptorSets(
renderer->logicalDevice,
2020-12-28 22:07:13 +00:00
&descriptorAllocateInfo,
&renderer->emptyVertexSamplerDescriptorSet
);
2020-12-28 22:07:13 +00:00
descriptorAllocateInfo.pSetLayouts = &renderer->emptyFragmentSamplerLayout;
renderer->vkAllocateDescriptorSets(
renderer->logicalDevice,
2020-12-28 22:07:13 +00:00
&descriptorAllocateInfo,
&renderer->emptyFragmentSamplerDescriptorSet
2020-12-20 09:29:15 +00:00
);
2020-12-29 22:52:24 +00:00
descriptorAllocateInfo.pSetLayouts = &renderer->emptyComputeBufferDescriptorSetLayout;
renderer->vkAllocateDescriptorSets(
renderer->logicalDevice,
&descriptorAllocateInfo,
&renderer->emptyComputeBufferDescriptorSet
);
2020-12-31 04:39:47 +00:00
descriptorAllocateInfo.pSetLayouts = &renderer->emptyComputeImageDescriptorSetLayout;
renderer->vkAllocateDescriptorSets(
renderer->logicalDevice,
&descriptorAllocateInfo,
&renderer->emptyComputeImageDescriptorSet
);
2020-12-28 22:07:13 +00:00
/* Dummy Uniform Buffers */
renderer->dummyVertexUniformBuffer = VULKAN_INTERNAL_CreateDummyUniformBuffer(
2020-12-28 22:07:13 +00:00
renderer,
UNIFORM_BUFFER_VERTEX
2022-01-02 22:35:57 +00:00
);
if (renderer->dummyVertexUniformBuffer == NULL)
{
2021-01-05 23:00:51 +00:00
Refresh_LogError("Failed to create dummy vertex uniform buffer!");
2020-12-28 22:07:13 +00:00
return NULL;
}
renderer->dummyFragmentUniformBuffer = VULKAN_INTERNAL_CreateDummyUniformBuffer(
2020-12-28 22:07:13 +00:00
renderer,
UNIFORM_BUFFER_FRAGMENT
2022-01-02 22:35:57 +00:00
);
if (renderer->dummyFragmentUniformBuffer == NULL)
{
2021-01-05 23:00:51 +00:00
Refresh_LogError("Failed to create dummy fragment uniform buffer!");
2020-12-28 22:07:13 +00:00
return NULL;
}
renderer->dummyComputeUniformBuffer = VULKAN_INTERNAL_CreateDummyUniformBuffer(
2020-12-31 04:39:47 +00:00
renderer,
UNIFORM_BUFFER_COMPUTE
2022-01-02 22:35:57 +00:00
);
2022-01-03 18:36:47 +00:00
if (renderer->dummyComputeUniformBuffer == NULL)
2022-01-02 22:35:57 +00:00
{
Refresh_LogError("Failed to create dummy compute uniform buffer!");
2020-12-31 04:39:47 +00:00
return NULL;
}
/* Initialize uniform buffer pools */
renderer->vertexUniformBufferPool = VULKAN_INTERNAL_CreateUniformBufferPool(
renderer,
UNIFORM_BUFFER_VERTEX
);
renderer->fragmentUniformBufferPool = VULKAN_INTERNAL_CreateUniformBufferPool(
renderer,
UNIFORM_BUFFER_FRAGMENT
);
renderer->computeUniformBufferPool = VULKAN_INTERNAL_CreateUniformBufferPool(
renderer,
UNIFORM_BUFFER_COMPUTE
);
/* Initialize caches */
2021-01-03 02:02:20 +00:00
for (i = 0; i < NUM_COMMAND_POOL_BUCKETS; i += 1)
{
renderer->commandPoolHashTable.buckets[i].elements = NULL;
renderer->commandPoolHashTable.buckets[i].count = 0;
renderer->commandPoolHashTable.buckets[i].capacity = 0;
}
for (i = 0; i < NUM_PIPELINE_LAYOUT_BUCKETS; i += 1)
{
2020-12-29 22:52:24 +00:00
renderer->graphicsPipelineLayoutHashTable.buckets[i].elements = NULL;
renderer->graphicsPipelineLayoutHashTable.buckets[i].count = 0;
renderer->graphicsPipelineLayoutHashTable.buckets[i].capacity = 0;
}
for (i = 0; i < NUM_PIPELINE_LAYOUT_BUCKETS; i += 1)
{
renderer->computePipelineLayoutHashTable.buckets[i].elements = NULL;
renderer->computePipelineLayoutHashTable.buckets[i].count = 0;
renderer->computePipelineLayoutHashTable.buckets[i].capacity = 0;
}
for (i = 0; i < NUM_DESCRIPTOR_SET_LAYOUT_BUCKETS; i += 1)
{
2020-12-29 23:05:26 +00:00
renderer->descriptorSetLayoutHashTable.buckets[i].elements = NULL;
renderer->descriptorSetLayoutHashTable.buckets[i].count = 0;
renderer->descriptorSetLayoutHashTable.buckets[i].capacity = 0;
2020-12-29 22:52:24 +00:00
}
renderer->renderPassHashArray.elements = NULL;
renderer->renderPassHashArray.count = 0;
renderer->renderPassHashArray.capacity = 0;
renderer->framebufferHashArray.elements = NULL;
renderer->framebufferHashArray.count = 0;
renderer->framebufferHashArray.capacity = 0;
renderer->renderTargetHashArray.elements = NULL;
renderer->renderTargetHashArray.count = 0;
renderer->renderTargetHashArray.capacity = 0;
/* Initialize transfer buffer pool */
2022-06-17 07:41:27 +00:00
renderer->transferBufferPool.lock = SDL_CreateMutex();
renderer->transferBufferPool.availableBufferCapacity = 4;
renderer->transferBufferPool.availableBufferCount = 0;
renderer->transferBufferPool.availableBuffers = SDL_malloc(renderer->transferBufferPool.availableBufferCapacity * sizeof(VulkanTransferBuffer*));
2022-06-17 07:41:27 +00:00
/* Some drivers don't support D16, so we have to fall back to D32. */
vulkanResult = renderer->vkGetPhysicalDeviceImageFormatProperties(
renderer->physicalDevice,
VK_FORMAT_D16_UNORM,
VK_IMAGE_TYPE_2D,
VK_IMAGE_TILING_OPTIMAL,
VK_IMAGE_ASPECT_DEPTH_BIT,
0,
&imageFormatProperties
);
if (vulkanResult == VK_ERROR_FORMAT_NOT_SUPPORTED)
{
renderer->D16Format = VK_FORMAT_D32_SFLOAT;
}
else
{
renderer->D16Format = VK_FORMAT_D16_UNORM;
}
vulkanResult = renderer->vkGetPhysicalDeviceImageFormatProperties(
renderer->physicalDevice,
VK_FORMAT_D16_UNORM_S8_UINT,
VK_IMAGE_TYPE_2D,
VK_IMAGE_TILING_OPTIMAL,
VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT,
0,
&imageFormatProperties
);
if (vulkanResult == VK_ERROR_FORMAT_NOT_SUPPORTED)
{
renderer->D16S8Format = VK_FORMAT_D32_SFLOAT_S8_UINT;
}
else
{
renderer->D16S8Format = VK_FORMAT_D16_UNORM_S8_UINT;
}
/* Deferred destroy storage */
renderer->texturesToDestroyCapacity = 16;
renderer->texturesToDestroyCount = 0;
renderer->texturesToDestroy = (VulkanTexture**)SDL_malloc(
sizeof(VulkanTexture*) *
renderer->texturesToDestroyCapacity
);
renderer->buffersToDestroyCapacity = 16;
renderer->buffersToDestroyCount = 0;
renderer->buffersToDestroy = SDL_malloc(
sizeof(VulkanBuffer*) *
renderer->buffersToDestroyCapacity
);
renderer->samplersToDestroyCapacity = 16;
renderer->samplersToDestroyCount = 0;
renderer->samplersToDestroy = SDL_malloc(
sizeof(VulkanSampler*) *
renderer->samplersToDestroyCapacity
);
renderer->graphicsPipelinesToDestroyCapacity = 16;
renderer->graphicsPipelinesToDestroyCount = 0;
renderer->graphicsPipelinesToDestroy = SDL_malloc(
sizeof(VulkanGraphicsPipeline*) *
renderer->graphicsPipelinesToDestroyCapacity
);
renderer->computePipelinesToDestroyCapacity = 16;
renderer->computePipelinesToDestroyCount = 0;
renderer->computePipelinesToDestroy = SDL_malloc(
sizeof(VulkanComputePipeline*) *
renderer->computePipelinesToDestroyCapacity
);
renderer->shaderModulesToDestroyCapacity = 16;
renderer->shaderModulesToDestroyCount = 0;
renderer->shaderModulesToDestroy = SDL_malloc(
sizeof(VulkanShaderModule*) *
renderer->shaderModulesToDestroyCapacity
);
renderer->framebuffersToDestroyCapacity = 16;
renderer->framebuffersToDestroyCount = 0;
renderer->framebuffersToDestroy = SDL_malloc(
sizeof(VulkanFramebuffer*) *
renderer->framebuffersToDestroyCapacity
);
2022-02-25 21:42:11 +00:00
return result;
2020-12-17 01:23:49 +00:00
}
2021-01-05 23:00:51 +00:00
Refresh_Driver VulkanDriver = {
2022-02-25 21:42:11 +00:00
"Vulkan",
VULKAN_PrepareDriver,
2022-02-25 21:42:11 +00:00
VULKAN_CreateDevice
2020-12-17 00:27:14 +00:00
};
#endif //REFRESH_DRIVER_VULKAN