From 3636d64100d77c6f167d17a9ee990f6ddd1aafc3 Mon Sep 17 00:00:00 2001 From: Peter Harris Date: Wed, 13 Aug 2025 22:53:13 +0100 Subject: [PATCH 1/4] Pass through direct to driver for no-op intercepts --- generator/generate_vulkan_common.py | 233 +- .../vk_codegen/device_dispatch_table.txt | 10 +- .../vk_codegen/instance_dispatch_table.txt | 11 +- generator/vk_codegen/root_CMakeLists.txt | 6 +- generator/vk_layer/source/instance.hpp | 2 +- layer_example/CMakeLists.txt | 7 +- layer_example/source/instance.hpp | 2 +- .../source/layer_device_functions.cpp | 2 +- layer_gpu_profile/CMakeLists.txt | 6 +- layer_gpu_profile/source/device.hpp | 3 +- layer_gpu_profile/source/instance.hpp | 2 +- layer_gpu_support/CMakeLists.txt | 6 +- layer_gpu_support/source/instance.hpp | 2 +- layer_gpu_timeline/CMakeLists.txt | 6 +- layer_gpu_timeline/source/instance.hpp | 2 +- source_common/compiler_helper.cmake | 6 +- .../framework/device_dispatch_table.hpp | 10 +- source_common/framework/device_functions.cpp | 1452 ++- source_common/framework/device_functions.hpp | 8242 ++++++------ .../framework/device_functions_query.hpp | 10748 ++++++++++++++++ source_common/framework/entry.cpp | 16 +- .../framework/instance_dispatch_table.hpp | 11 +- .../framework/instance_functions.cpp | 204 +- .../framework/instance_functions.hpp | 1298 +- .../framework/instance_functions_query.hpp | 1708 +++ source_common/framework/manual_functions.cpp | 142 +- source_common/framework/manual_functions.hpp | 31 +- source_common/framework/utils.hpp | 19 +- 28 files changed, 18182 insertions(+), 6005 deletions(-) create mode 100644 source_common/framework/device_functions_query.hpp create mode 100644 source_common/framework/instance_functions_query.hpp diff --git a/generator/generate_vulkan_common.py b/generator/generate_vulkan_common.py index 8260c62..42473bf 100755 --- a/generator/generate_vulkan_common.py +++ b/generator/generate_vulkan_common.py @@ -508,40 +508,112 @@ def generate_instance_decls( if plat_define: lines.append(f'#if defined({plat_define})\n') - # Declare the default implementation + # Explicitly delete the generic primary template lines.append('/* See Vulkan API for documentation. */') - lines.append('/* Default common code pass-through implementation. */') - decl = f'VKAPI_ATTR {command.rtype} ' \ - f'VKAPI_CALL layer_{command.name}_default(' + lines.append('/* Delete the generic match-all */') + decl = f'template \n' \ + f'VKAPI_ATTR {command.rtype} ' \ + f'VKAPI_CALL layer_{command.name}(' lines.append(decl) for i, (ptype, pname, array) in enumerate(command.params): ending = ',' if i == len(command.params) - 1: - ending = ');' + ending = ') = delete;' parl = f' {ptype} {pname}{array}{ending}' lines.append(parl) lines.append('') - # Define the default tag dispatch handler - lines.append('/* Match-all template to use default implementation. */') - decl = 'template ' - lines.append(decl) - decl = f'VKAPI_ATTR {command.rtype} VKAPI_CALL layer_{command.name}(' + # Define the default_tag template + lines.append('/* Default common code implementation. */') + decl = f'template <>\n' \ + f'VKAPI_ATTR {command.rtype} ' \ + f'VKAPI_CALL layer_{command.name}(' lines.append(decl) for i, (ptype, pname, array) in enumerate(command.params): ending = ',' if i == len(command.params) - 1: - ending = '' + ending = ');' parl = f' {ptype} {pname}{array}{ending}' lines.append(parl) + lines.append('') - parmfwd = ', '.join([x[1] for x in command.params]) - retfwd = 'return ' if command.rtype != 'void' else '' - lines.append(') {') - lines.append(f' {retfwd}layer_{command.name}_default({parmfwd});') - lines.append('}\n') + if plat_define: + lines.append('#endif\n') + + file.write('\n'.join(lines)) + file.write('\n') + + file.write('// clang-format on\n') + + +def generate_instance_queries( + file: TextIO, mapping: VersionInfo, commands: list[Command]) -> None: + ''' + Generate the instance intercept declarations header. + + Args: + file: The file to write. + mapping: The version mapping information for the commands. + commands: The list of commands read from the spec. + ''' + # Write the copyright header to the file + write_copyright_header(file) + + file.write('#pragma once\n') + file.write('\n') + + file.write('// clang-format off\n') + file.write('\n') + + file.write('#include \n') + file.write('\n') + + # Create a listing of API versions and API extensions + for command in commands: + if command.dispatch_type != 'instance': + continue + + lines = [] + assert command.name + + plat_define = mapping.get_platform_define(command.name) + if plat_define: + lines.append(f'#if defined({plat_define})\n') + + # Define the concept to test if user_tag specialization exists + plist = [] + nlist = [] + for i, (ptype, pname, array) in enumerate(command.params): + plist.append(f'{ptype} {pname}{array}') + nlist.append(pname) + plistStr = ', '.join(plist) + nlistStr = ', '.join(nlist) + + lines.append('/* Test for user_tag availability. */') + decl = f'template \n' \ + f'concept hasLayerPtr_{command.name} = ' \ + f'requires(\n {plistStr}\n) {{\n layer_{command.name}({nlistStr});\n}};' + lines.append(decl) + lines.append('') + + # Define the function pointer resolution + lines.append('/* Function pointer resolution. */') + decl = f'constexpr PFN_{command.name} getLayerPtr_{command.name}()\n' \ + f'{{\n' \ + f' return [] \n' \ + f' {{\n' \ + f' if constexpr(hasLayerPtr_{command.name})\n' \ + f' {{\n' \ + f' return layer_{command.name};\n' \ + f' }}\n' \ + f'\n' \ + f' return layer_{command.name};\n' \ + f' }}.operator()();\n' \ + f'}}' + lines.append(decl) + lines.append('') if plat_define: lines.append('#endif\n') @@ -582,8 +654,9 @@ def generate_instance_defs( lines.append(f'#if defined({plat_define})\n') lines.append('/* See Vulkan API for documentation. */') - decl = f'VKAPI_ATTR {command.rtype} ' \ - f'VKAPI_CALL layer_{command.name}_default(' + decl = f'template <>\n' \ + f'VKAPI_ATTR {command.rtype} ' \ + f'VKAPI_CALL layer_{command.name}(' lines.append(decl) for i, (ptype, pname, array) in enumerate(command.params): @@ -691,7 +764,8 @@ def generate_device_decls( file.write('// clang-format off\n') file.write('\n') - file.write('#include \n') + file.write('#include \n\n') + file.write('#include "framework/utils.hpp"\n') file.write('\n') # Create a listing of API versions and API extensions @@ -706,39 +780,36 @@ def generate_device_decls( if plat_define: lines.append(f'#if defined({plat_define})\n') + # Explicitly delete the generic primary template lines.append('/* See Vulkan API for documentation. */') - lines.append('/* Default common code pass-through implementation. */') - decl = f'VKAPI_ATTR {command.rtype} ' \ - f'VKAPI_CALL layer_{command.name}_default(' + lines.append('/* Delete the generic match-all */') + decl = f'template \n' \ + f'VKAPI_ATTR {command.rtype} ' \ + f'VKAPI_CALL layer_{command.name}(' lines.append(decl) for i, (ptype, pname, array) in enumerate(command.params): ending = ',' if i == len(command.params) - 1: - ending = ');' + ending = ') = delete;' parl = f' {ptype} {pname}{array}{ending}' lines.append(parl) lines.append('') - # Define the default tag dispatch handler - lines.append('/* Match-all template to use default implementation. */') - decl = 'template ' - lines.append(decl) - decl = f'VKAPI_ATTR {command.rtype} VKAPI_CALL layer_{command.name}(' + # Define the default_tag template + lines.append('/* Default common code implementation. */') + decl = f'template <>\n' \ + f'VKAPI_ATTR {command.rtype} ' \ + f'VKAPI_CALL layer_{command.name}(' lines.append(decl) for i, (ptype, pname, array) in enumerate(command.params): ending = ',' if i == len(command.params) - 1: - ending = '' + ending = ');' parl = f' {ptype} {pname}{array}{ending}' lines.append(parl) - - parmfwd = ', '.join([x[1] for x in command.params]) - retfwd = 'return ' if command.rtype != 'void' else '' - lines.append(') {') - lines.append(f' {retfwd}layer_{command.name}_default({parmfwd});') - lines.append('}\n') + lines.append('') if plat_define: lines.append('#endif\n') @@ -779,9 +850,9 @@ def generate_device_defs( lines.append(f'#if defined({plat_define})\n') lines.append('/* See Vulkan API for documentation. */') - - decl = f'VKAPI_ATTR {command.rtype} ' \ - f'VKAPI_CALL layer_{command.name}_default(' + decl = f'template <>\n' \ + f'VKAPI_ATTR {command.rtype} ' \ + f'VKAPI_CALL layer_{command.name}(' lines.append(decl) for i, (ptype, pname, array) in enumerate(command.params): @@ -815,6 +886,84 @@ def generate_device_defs( file.write(data) +def generate_device_queries( + file: TextIO, mapping: VersionInfo, commands: list[Command]) -> None: + ''' + Generate the device intercept queries header. + + Args: + file: The file to write. + mapping: The version mapping information for the commands. + commands: The list of commands read from the spec. + ''' + + # Write the copyright header to the file + write_copyright_header(file) + + file.write('#pragma once\n') + file.write('\n') + + file.write('// clang-format off\n') + file.write('\n') + + file.write('#include \n\n') + file.write('#include "framework/utils.hpp"\n') + file.write('\n') + + # Create a listing of API versions and API extensions + for command in commands: + if command.dispatch_type != 'device': + continue + + assert command.name + + lines = [] + plat_define = mapping.get_platform_define(command.name) + if plat_define: + lines.append(f'#if defined({plat_define})\n') + + # Define the concept to test if user_tag specialization exists + plist = [] + nlist = [] + for i, (ptype, pname, array) in enumerate(command.params): + plist.append(f'{ptype} {pname}{array}') + nlist.append(pname) + plistStr = ', '.join(plist) + nlistStr = ', '.join(nlist) + + lines.append('/* Test for user_tag availability. */') + decl = f'template \n' \ + f'concept hasLayerPtr_{command.name} = ' \ + f'requires(\n {plistStr}\n) {{\n layer_{command.name}({nlistStr});\n}};' + lines.append(decl) + lines.append('') + + # Define the function pointer resolution + lines.append('/* Function pointer resolution. */') + decl = f'constexpr PFN_{command.name} getLayerPtr_{command.name}()\n' \ + f'{{\n' \ + f' return [] \n' \ + f' {{\n' \ + f' if constexpr(hasLayerPtr_{command.name})\n' \ + f' {{\n' \ + f' return layer_{command.name};\n' \ + f' }}\n' \ + f'\n' \ + f' return layer_{command.name};\n' \ + f' }}.operator()();\n' \ + f'}}' + lines.append(decl) + lines.append('') + + if plat_define: + lines.append('#endif\n') + + file.write('\n'.join(lines)) + file.write('\n') + + file.write('// clang-format on\n') + + def main() -> int: ''' Tool main function. @@ -867,6 +1016,10 @@ def main() -> int: with open(outfile, 'w', encoding='utf-8', newline='\n') as handle: generate_instance_decls(handle, mapping, commands) + outfile = os.path.join(outdir, 'instance_functions_query.hpp') + with open(outfile, 'w', encoding='utf-8', newline='\n') as handle: + generate_instance_queries(handle, mapping, commands) + outfile = os.path.join(outdir, 'instance_functions.cpp') with open(outfile, 'w', encoding='utf-8', newline='\n') as handle: generate_instance_defs(handle, mapping, commands) @@ -879,6 +1032,10 @@ def main() -> int: with open(outfile, 'w', encoding='utf-8', newline='\n') as handle: generate_device_decls(handle, mapping, commands) + outfile = os.path.join(outdir, 'device_functions_query.hpp') + with open(outfile, 'w', encoding='utf-8', newline='\n') as handle: + generate_device_queries(handle, mapping, commands) + outfile = os.path.join(outdir, 'device_functions.cpp') with open(outfile, 'w', encoding='utf-8', newline='\n') as handle: generate_device_defs(handle, mapping, commands) diff --git a/generator/vk_codegen/device_dispatch_table.txt b/generator/vk_codegen/device_dispatch_table.txt index 8550328..f589dba 100644 --- a/generator/vk_codegen/device_dispatch_table.txt +++ b/generator/vk_codegen/device_dispatch_table.txt @@ -12,6 +12,9 @@ #include "layer_device_functions.hpp" #endif +// These must be after the layer_*_functions.hpp includes +#include "framework/device_functions_query.hpp" + /** * @brief Interception table lookup entry. */ @@ -26,9 +29,14 @@ struct DeviceInterceptTableEntry * @brief The layer function pointer. */ PFN_vkVoidFunction function; + + /** + * @brief Did the layer provide a specialization? + */ + bool hasLayerSpecialization; }; -#define ENTRY(fnc) { STR(fnc), reinterpret_cast(layer_##fnc) } +#define ENTRY(fnc) { STR(fnc), reinterpret_cast(getLayerPtr_##fnc()), hasLayerPtr_##fnc } /** * @brief The device dispatch table used to call the driver. diff --git a/generator/vk_codegen/instance_dispatch_table.txt b/generator/vk_codegen/instance_dispatch_table.txt index 96c3b82..1380102 100644 --- a/generator/vk_codegen/instance_dispatch_table.txt +++ b/generator/vk_codegen/instance_dispatch_table.txt @@ -13,6 +13,10 @@ #include "layer_instance_functions.hpp" #endif +// These must be after the layer_*_functions.hpp includes +#include "framework/device_functions_query.hpp" +#include "framework/instance_functions_query.hpp" + /** * @brief Interception table lookup entry. */ @@ -27,9 +31,14 @@ struct InstanceInterceptTableEntry * @brief The layer function pointer. */ PFN_vkVoidFunction function; + + /** + * @brief Did the layer provide a specialization? + */ + bool hasLayerSpecialization; }; -#define ENTRY(fnc) { STR(fnc), reinterpret_cast(layer_##fnc) } +#define ENTRY(fnc) { STR(fnc), reinterpret_cast(getLayerPtr_##fnc()), hasLayerPtr_##fnc } /** * @brief The instance dispatch table used to call the driver. diff --git a/generator/vk_codegen/root_CMakeLists.txt b/generator/vk_codegen/root_CMakeLists.txt index edadf4d..4430ed6 100644 --- a/generator/vk_codegen/root_CMakeLists.txt +++ b/generator/vk_codegen/root_CMakeLists.txt @@ -30,8 +30,10 @@ project({PROJECT_NAME} VERSION 1.0.0) # Common configuration set(LGL_LOG_TAG "{LAYER_NAME}") -set(LGL_CONFIG_TRACE 0) -set(LGL_CONFIG_LOG 1) + +option(LGL_CONFIG_TRACE "Enable Vulkan entrypoint logging") +option(LGL_CONFIG_OPTIMIZE_DISPATCH "Enable Vulkan entrypoint dispatch optimization" ON) +option(LGL_CONFIG_LOG "Enable general layer logging" ON) include(../source_common/compiler_helper.cmake) include(../cmake/clang-tools.cmake) diff --git a/generator/vk_layer/source/instance.hpp b/generator/vk_layer/source/instance.hpp index d53d999..8660e3c 100644 --- a/generator/vk_layer/source/instance.hpp +++ b/generator/vk_layer/source/instance.hpp @@ -128,7 +128,7 @@ class Instance VkInstance instance; /** - * @brief The next layer's \c vkGetInstanceProcAddr() function pointer. + * @brief The next layer's @c vkGetInstanceProcAddr() function pointer. */ PFN_vkGetInstanceProcAddr nlayerGetProcAddress; diff --git a/layer_example/CMakeLists.txt b/layer_example/CMakeLists.txt index 4707970..41ce304 100644 --- a/layer_example/CMakeLists.txt +++ b/layer_example/CMakeLists.txt @@ -30,8 +30,11 @@ project(VkLayerExample VERSION 1.0.0) # Common configuration set(LGL_LOG_TAG "VkLayerExample") -set(LGL_CONFIG_TRACE 0) -set(LGL_CONFIG_LOG 1) + +option(LGL_CONFIG_TRACE "Enable Vulkan entrypoint logging") +option(LGL_CONFIG_OPTIMIZE_DISPATCH "Enable Vulkan entrypoint dispatch optimization" ON) +option(LGL_CONFIG_LOG "Enable general layer logging" ON) + include(../source_common/compiler_helper.cmake) include(../cmake/clang-tools.cmake) diff --git a/layer_example/source/instance.hpp b/layer_example/source/instance.hpp index 4c989b2..cc05dcb 100644 --- a/layer_example/source/instance.hpp +++ b/layer_example/source/instance.hpp @@ -122,7 +122,7 @@ class Instance VkInstance instance; /** - * @brief The next layer's \c vkGetInstanceProcAddr() function pointer. + * @brief The next layer's @c vkGetInstanceProcAddr() function pointer. */ PFN_vkGetInstanceProcAddr nlayerGetProcAddress; diff --git a/layer_example/source/layer_device_functions.cpp b/layer_example/source/layer_device_functions.cpp index 9073fb2..b2fbfe7 100644 --- a/layer_example/source/layer_device_functions.cpp +++ b/layer_example/source/layer_device_functions.cpp @@ -23,8 +23,8 @@ * ---------------------------------------------------------------------------- */ -#include "device.hpp" #include "framework/device_dispatch_table.hpp" +#include "device.hpp" #include #include diff --git a/layer_gpu_profile/CMakeLists.txt b/layer_gpu_profile/CMakeLists.txt index e2d2bed..0d7124d 100644 --- a/layer_gpu_profile/CMakeLists.txt +++ b/layer_gpu_profile/CMakeLists.txt @@ -30,8 +30,10 @@ project(VkLayerGPUProfile VERSION 1.0.0) # Common configuration set(LGL_LOG_TAG "VkLayerGPUProfile") -set(LGL_CONFIG_TRACE 0) -set(LGL_CONFIG_LOG 1) + +option(LGL_CONFIG_TRACE "Enable Vulkan entrypoint logging") +option(LGL_CONFIG_OPTIMIZE_DISPATCH "Enable Vulkan entrypoint dispatch optimization" ON) +option(LGL_CONFIG_LOG "Enable general layer logging" ON) include(../source_common/compiler_helper.cmake) include(../cmake/clang-tools.cmake) diff --git a/layer_gpu_profile/source/device.hpp b/layer_gpu_profile/source/device.hpp index f746e51..c900258 100644 --- a/layer_gpu_profile/source/device.hpp +++ b/layer_gpu_profile/source/device.hpp @@ -62,9 +62,10 @@ #include #include +#include "framework/device_dispatch_table.hpp" + #include "layer_comms.hpp" #include "comms/comms_module.hpp" -#include "framework/device_dispatch_table.hpp" #include "instance.hpp" #include "trackers/device.hpp" diff --git a/layer_gpu_profile/source/instance.hpp b/layer_gpu_profile/source/instance.hpp index 606ad7f..854745f 100644 --- a/layer_gpu_profile/source/instance.hpp +++ b/layer_gpu_profile/source/instance.hpp @@ -123,7 +123,7 @@ class Instance VkInstance instance; /** - * @brief The next layer's \c vkGetInstanceProcAddr() function pointer. + * @brief The next layer's @c vkGetInstanceProcAddr() function pointer. */ PFN_vkGetInstanceProcAddr nlayerGetProcAddress; diff --git a/layer_gpu_support/CMakeLists.txt b/layer_gpu_support/CMakeLists.txt index 9c21abe..ecf9abb 100644 --- a/layer_gpu_support/CMakeLists.txt +++ b/layer_gpu_support/CMakeLists.txt @@ -30,8 +30,10 @@ project(VkLayerGPUSupport VERSION 1.0.0) # Common configuration set(LGL_LOG_TAG "VkLayerGPUSupport") -set(LGL_CONFIG_TRACE 0) -set(LGL_CONFIG_LOG 1) + +option(LGL_CONFIG_TRACE "Enable Vulkan entrypoint logging") +option(LGL_CONFIG_OPTIMIZE_DISPATCH "Enable Vulkan entrypoint dispatch optimization" ON) +option(LGL_CONFIG_LOG "Enable general layer logging" ON) include(../source_common/compiler_helper.cmake) include(../cmake/clang-tools.cmake) diff --git a/layer_gpu_support/source/instance.hpp b/layer_gpu_support/source/instance.hpp index 8142d5b..df78da7 100644 --- a/layer_gpu_support/source/instance.hpp +++ b/layer_gpu_support/source/instance.hpp @@ -122,7 +122,7 @@ class Instance VkInstance instance; /** - * @brief The next layer's \c vkGetInstanceProcAddr() function pointer. + * @brief The next layer's @c vkGetInstanceProcAddr() function pointer. */ PFN_vkGetInstanceProcAddr nlayerGetProcAddress; diff --git a/layer_gpu_timeline/CMakeLists.txt b/layer_gpu_timeline/CMakeLists.txt index 472c47d..0f1ad76 100644 --- a/layer_gpu_timeline/CMakeLists.txt +++ b/layer_gpu_timeline/CMakeLists.txt @@ -30,8 +30,10 @@ project(VkLayerGPUTimeline VERSION 1.0.0) # Common configuration set(LGL_LOG_TAG "VkLayerGPUTimeline") -set(LGL_CONFIG_TRACE 0) -set(LGL_CONFIG_LOG 1) + +option(LGL_CONFIG_TRACE "Enable Vulkan entrypoint logging" ON) +option(LGL_CONFIG_OPTIMIZE_DISPATCH "Enable Vulkan entrypoint dispatch optimization" ON) +option(LGL_CONFIG_LOG "Enable general layer logging" ON) include(../source_common/compiler_helper.cmake) include(../cmake/clang-tools.cmake) diff --git a/layer_gpu_timeline/source/instance.hpp b/layer_gpu_timeline/source/instance.hpp index 4c989b2..cc05dcb 100644 --- a/layer_gpu_timeline/source/instance.hpp +++ b/layer_gpu_timeline/source/instance.hpp @@ -122,7 +122,7 @@ class Instance VkInstance instance; /** - * @brief The next layer's \c vkGetInstanceProcAddr() function pointer. + * @brief The next layer's @c vkGetInstanceProcAddr() function pointer. */ PFN_vkGetInstanceProcAddr nlayerGetProcAddress; diff --git a/source_common/compiler_helper.cmake b/source_common/compiler_helper.cmake index a83d036..054231d 100644 --- a/source_common/compiler_helper.cmake +++ b/source_common/compiler_helper.cmake @@ -72,6 +72,8 @@ macro(lgl_set_build_options BUILD_TARGET_NAME) ${BUILD_TARGET_NAME} PRIVATE $<$:VK_USE_PLATFORM_ANDROID_KHR=1> $<$:LGL_LOG_TAG="${LGL_LOG_TAG}"> - CONFIG_TRACE=${LGL_CONFIG_TRACE} - CONFIG_LOG=${LGL_CONFIG_LOG}) + CONFIG_TRACE=$ + CONFIG_LOG=$ + CONFIG_OPTIMIZE_DISPATCH=$) + endmacro() diff --git a/source_common/framework/device_dispatch_table.hpp b/source_common/framework/device_dispatch_table.hpp index 0f8d7fb..fd405b0 100644 --- a/source_common/framework/device_dispatch_table.hpp +++ b/source_common/framework/device_dispatch_table.hpp @@ -37,6 +37,9 @@ #include "layer_device_functions.hpp" #endif +// These must be after the layer_*_functions.hpp includes +#include "framework/device_functions_query.hpp" + /** * @brief Interception table lookup entry. */ @@ -51,9 +54,14 @@ struct DeviceInterceptTableEntry * @brief The layer function pointer. */ PFN_vkVoidFunction function; + + /** + * @brief Did the layer provide a specialization? + */ + bool hasLayerSpecialization; }; -#define ENTRY(fnc) { STR(fnc), reinterpret_cast(layer_##fnc) } +#define ENTRY(fnc) { STR(fnc), reinterpret_cast(getLayerPtr_##fnc()), hasLayerPtr_##fnc } /** * @brief The device dispatch table used to call the driver. diff --git a/source_common/framework/device_functions.cpp b/source_common/framework/device_functions.cpp index a26c66c..bbfa3f5 100644 --- a/source_common/framework/device_functions.cpp +++ b/source_common/framework/device_functions.cpp @@ -37,7 +37,8 @@ extern std::mutex g_vulkanLock; /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkAcquireNextImage2KHR_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkAcquireNextImage2KHR( VkDevice device, const VkAcquireNextImageInfoKHR* pAcquireInfo, uint32_t* pImageIndex @@ -54,7 +55,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkAcquireNextImage2KHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkAcquireNextImageKHR_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkAcquireNextImageKHR( VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout, @@ -74,7 +76,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkAcquireNextImageKHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkAcquireProfilingLockKHR_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkAcquireProfilingLockKHR( VkDevice device, const VkAcquireProfilingLockInfoKHR* pInfo ) { @@ -90,7 +93,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkAcquireProfilingLockKHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkAllocateCommandBuffers_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkAllocateCommandBuffers( VkDevice device, const VkCommandBufferAllocateInfo* pAllocateInfo, VkCommandBuffer* pCommandBuffers @@ -107,7 +111,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkAllocateCommandBuffers_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkAllocateDescriptorSets_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkAllocateDescriptorSets( VkDevice device, const VkDescriptorSetAllocateInfo* pAllocateInfo, VkDescriptorSet* pDescriptorSets @@ -124,7 +129,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkAllocateDescriptorSets_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkAllocateMemory_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkAllocateMemory( VkDevice device, const VkMemoryAllocateInfo* pAllocateInfo, const VkAllocationCallbacks* pAllocator, @@ -142,7 +148,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkAllocateMemory_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkBeginCommandBuffer_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkBeginCommandBuffer( VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo* pBeginInfo ) { @@ -158,7 +165,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkBeginCommandBuffer_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkBindBufferMemory_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkBindBufferMemory( VkDevice device, VkBuffer buffer, VkDeviceMemory memory, @@ -176,7 +184,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkBindBufferMemory_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkBindBufferMemory2_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkBindBufferMemory2( VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo* pBindInfos @@ -193,7 +202,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkBindBufferMemory2_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkBindBufferMemory2KHR_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkBindBufferMemory2KHR( VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo* pBindInfos @@ -210,7 +220,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkBindBufferMemory2KHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkBindImageMemory_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkBindImageMemory( VkDevice device, VkImage image, VkDeviceMemory memory, @@ -228,7 +239,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkBindImageMemory_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkBindImageMemory2_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkBindImageMemory2( VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo* pBindInfos @@ -245,7 +257,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkBindImageMemory2_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkBindImageMemory2KHR_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkBindImageMemory2KHR( VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo* pBindInfos @@ -262,7 +275,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkBindImageMemory2KHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkBindTensorMemoryARM_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkBindTensorMemoryARM( VkDevice device, uint32_t bindInfoCount, const VkBindTensorMemoryInfoARM* pBindInfos @@ -279,7 +293,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkBindTensorMemoryARM_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkBuildAccelerationStructuresKHR_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkBuildAccelerationStructuresKHR( VkDevice device, VkDeferredOperationKHR deferredOperation, uint32_t infoCount, @@ -298,7 +313,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkBuildAccelerationStructuresKHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkBuildMicromapsEXT_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkBuildMicromapsEXT( VkDevice device, VkDeferredOperationKHR deferredOperation, uint32_t infoCount, @@ -316,7 +332,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkBuildMicromapsEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdBeginConditionalRenderingEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdBeginConditionalRenderingEXT( VkCommandBuffer commandBuffer, const VkConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin ) { @@ -332,7 +349,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdBeginConditionalRenderingEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdBeginDebugUtilsLabelEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdBeginDebugUtilsLabelEXT( VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT* pLabelInfo ) { @@ -348,7 +366,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdBeginDebugUtilsLabelEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdBeginQuery_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdBeginQuery( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, @@ -366,7 +385,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdBeginQuery_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdBeginQueryIndexedEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdBeginQueryIndexedEXT( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, @@ -385,7 +405,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdBeginQueryIndexedEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdBeginRenderPass_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdBeginRenderPass( VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, VkSubpassContents contents @@ -402,7 +423,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdBeginRenderPass_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdBeginRenderPass2_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdBeginRenderPass2( VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, const VkSubpassBeginInfo* pSubpassBeginInfo @@ -419,7 +441,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdBeginRenderPass2_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdBeginRenderPass2KHR_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdBeginRenderPass2KHR( VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, const VkSubpassBeginInfo* pSubpassBeginInfo @@ -436,7 +459,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdBeginRenderPass2KHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdBeginRendering_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdBeginRendering( VkCommandBuffer commandBuffer, const VkRenderingInfo* pRenderingInfo ) { @@ -452,7 +476,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdBeginRendering_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdBeginRenderingKHR_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdBeginRenderingKHR( VkCommandBuffer commandBuffer, const VkRenderingInfo* pRenderingInfo ) { @@ -468,7 +493,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdBeginRenderingKHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdBeginTransformFeedbackEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdBeginTransformFeedbackEXT( VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, @@ -487,7 +513,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdBeginTransformFeedbackEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindDescriptorBufferEmbeddedSamplers2EXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindDescriptorBufferEmbeddedSamplers2EXT( VkCommandBuffer commandBuffer, const VkBindDescriptorBufferEmbeddedSamplersInfoEXT* pBindDescriptorBufferEmbeddedSamplersInfo ) { @@ -503,7 +530,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindDescriptorBufferEmbeddedSamplers2EXT_d } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindDescriptorBufferEmbeddedSamplersEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindDescriptorBufferEmbeddedSamplersEXT( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, @@ -521,7 +549,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindDescriptorBufferEmbeddedSamplersEXT_de } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindDescriptorBuffersEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindDescriptorBuffersEXT( VkCommandBuffer commandBuffer, uint32_t bufferCount, const VkDescriptorBufferBindingInfoEXT* pBindingInfos @@ -538,7 +567,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindDescriptorBuffersEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindDescriptorSets_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindDescriptorSets( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, @@ -560,7 +590,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindDescriptorSets_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindDescriptorSets2_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindDescriptorSets2( VkCommandBuffer commandBuffer, const VkBindDescriptorSetsInfo* pBindDescriptorSetsInfo ) { @@ -576,7 +607,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindDescriptorSets2_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindDescriptorSets2KHR_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindDescriptorSets2KHR( VkCommandBuffer commandBuffer, const VkBindDescriptorSetsInfo* pBindDescriptorSetsInfo ) { @@ -592,7 +624,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindDescriptorSets2KHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindIndexBuffer_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindIndexBuffer( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, @@ -610,7 +643,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindIndexBuffer_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindIndexBuffer2_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindIndexBuffer2( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, @@ -629,7 +663,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindIndexBuffer2_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindIndexBuffer2KHR_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindIndexBuffer2KHR( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, @@ -648,7 +683,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindIndexBuffer2KHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindPipeline_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindPipeline( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline @@ -665,7 +701,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindPipeline_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindShadersEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindShadersEXT( VkCommandBuffer commandBuffer, uint32_t stageCount, const VkShaderStageFlagBits* pStages, @@ -683,7 +720,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindShadersEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindTransformFeedbackBuffersEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindTransformFeedbackBuffersEXT( VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, @@ -703,7 +741,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindTransformFeedbackBuffersEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindVertexBuffers_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindVertexBuffers( VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, @@ -722,7 +761,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindVertexBuffers_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindVertexBuffers2_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindVertexBuffers2( VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, @@ -743,7 +783,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindVertexBuffers2_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindVertexBuffers2EXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindVertexBuffers2EXT( VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, @@ -764,7 +805,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindVertexBuffers2EXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdBlitImage_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdBlitImage( VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, @@ -786,7 +828,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdBlitImage_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdBlitImage2_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdBlitImage2( VkCommandBuffer commandBuffer, const VkBlitImageInfo2* pBlitImageInfo ) { @@ -802,7 +845,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdBlitImage2_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdBlitImage2KHR_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdBlitImage2KHR( VkCommandBuffer commandBuffer, const VkBlitImageInfo2* pBlitImageInfo ) { @@ -818,7 +862,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdBlitImage2KHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdBuildAccelerationStructuresIndirectKHR_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdBuildAccelerationStructuresIndirectKHR( VkCommandBuffer commandBuffer, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, @@ -838,7 +883,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdBuildAccelerationStructuresIndirectKHR_def } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdBuildAccelerationStructuresKHR_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdBuildAccelerationStructuresKHR( VkCommandBuffer commandBuffer, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, @@ -856,7 +902,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdBuildAccelerationStructuresKHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdBuildMicromapsEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdBuildMicromapsEXT( VkCommandBuffer commandBuffer, uint32_t infoCount, const VkMicromapBuildInfoEXT* pInfos @@ -873,7 +920,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdBuildMicromapsEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdClearAttachments_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdClearAttachments( VkCommandBuffer commandBuffer, uint32_t attachmentCount, const VkClearAttachment* pAttachments, @@ -892,7 +940,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdClearAttachments_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdClearColorImage_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdClearColorImage( VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, @@ -912,7 +961,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdClearColorImage_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdClearDepthStencilImage_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdClearDepthStencilImage( VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, @@ -932,7 +982,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdClearDepthStencilImage_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyAccelerationStructureKHR_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyAccelerationStructureKHR( VkCommandBuffer commandBuffer, const VkCopyAccelerationStructureInfoKHR* pInfo ) { @@ -948,7 +999,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyAccelerationStructureKHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyAccelerationStructureToMemoryKHR_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyAccelerationStructureToMemoryKHR( VkCommandBuffer commandBuffer, const VkCopyAccelerationStructureToMemoryInfoKHR* pInfo ) { @@ -964,7 +1016,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyAccelerationStructureToMemoryKHR_defau } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyBuffer_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyBuffer( VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, @@ -983,7 +1036,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyBuffer_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyBuffer2_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyBuffer2( VkCommandBuffer commandBuffer, const VkCopyBufferInfo2* pCopyBufferInfo ) { @@ -999,7 +1053,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyBuffer2_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyBuffer2KHR_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyBuffer2KHR( VkCommandBuffer commandBuffer, const VkCopyBufferInfo2* pCopyBufferInfo ) { @@ -1015,7 +1070,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyBuffer2KHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyBufferToImage_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyBufferToImage( VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage, @@ -1035,7 +1091,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyBufferToImage_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyBufferToImage2_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyBufferToImage2( VkCommandBuffer commandBuffer, const VkCopyBufferToImageInfo2* pCopyBufferToImageInfo ) { @@ -1051,7 +1108,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyBufferToImage2_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyBufferToImage2KHR_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyBufferToImage2KHR( VkCommandBuffer commandBuffer, const VkCopyBufferToImageInfo2* pCopyBufferToImageInfo ) { @@ -1067,7 +1125,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyBufferToImage2KHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyImage_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyImage( VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, @@ -1088,7 +1147,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyImage_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyImage2_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyImage2( VkCommandBuffer commandBuffer, const VkCopyImageInfo2* pCopyImageInfo ) { @@ -1104,7 +1164,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyImage2_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyImage2KHR_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyImage2KHR( VkCommandBuffer commandBuffer, const VkCopyImageInfo2* pCopyImageInfo ) { @@ -1120,7 +1181,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyImage2KHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyImageToBuffer_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyImageToBuffer( VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, @@ -1140,7 +1202,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyImageToBuffer_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyImageToBuffer2_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyImageToBuffer2( VkCommandBuffer commandBuffer, const VkCopyImageToBufferInfo2* pCopyImageToBufferInfo ) { @@ -1156,7 +1219,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyImageToBuffer2_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyImageToBuffer2KHR_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyImageToBuffer2KHR( VkCommandBuffer commandBuffer, const VkCopyImageToBufferInfo2* pCopyImageToBufferInfo ) { @@ -1172,7 +1236,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyImageToBuffer2KHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyMemoryToAccelerationStructureKHR_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyMemoryToAccelerationStructureKHR( VkCommandBuffer commandBuffer, const VkCopyMemoryToAccelerationStructureInfoKHR* pInfo ) { @@ -1188,7 +1253,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyMemoryToAccelerationStructureKHR_defau } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyMemoryToMicromapEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyMemoryToMicromapEXT( VkCommandBuffer commandBuffer, const VkCopyMemoryToMicromapInfoEXT* pInfo ) { @@ -1204,7 +1270,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyMemoryToMicromapEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyMicromapEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyMicromapEXT( VkCommandBuffer commandBuffer, const VkCopyMicromapInfoEXT* pInfo ) { @@ -1220,7 +1287,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyMicromapEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyMicromapToMemoryEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyMicromapToMemoryEXT( VkCommandBuffer commandBuffer, const VkCopyMicromapToMemoryInfoEXT* pInfo ) { @@ -1236,7 +1304,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyMicromapToMemoryEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyQueryPoolResults_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyQueryPoolResults( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, @@ -1258,7 +1327,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyQueryPoolResults_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyTensorARM_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyTensorARM( VkCommandBuffer commandBuffer, const VkCopyTensorInfoARM* pCopyTensorInfo ) { @@ -1274,7 +1344,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyTensorARM_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdDebugMarkerBeginEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdDebugMarkerBeginEXT( VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo ) { @@ -1290,7 +1361,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdDebugMarkerBeginEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdDebugMarkerEndEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdDebugMarkerEndEXT( VkCommandBuffer commandBuffer ) { LAYER_TRACE(__func__); @@ -1305,7 +1377,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdDebugMarkerEndEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdDebugMarkerInsertEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdDebugMarkerInsertEXT( VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo ) { @@ -1321,7 +1394,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdDebugMarkerInsertEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdDispatch_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdDispatch( VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, @@ -1339,7 +1413,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdDispatch_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdDispatchBase_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdDispatchBase( VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, @@ -1360,7 +1435,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdDispatchBase_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdDispatchBaseKHR_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdDispatchBaseKHR( VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, @@ -1381,7 +1457,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdDispatchBaseKHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdDispatchIndirect_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdDispatchIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset @@ -1398,7 +1475,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdDispatchIndirect_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdDraw_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdDraw( VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, @@ -1417,7 +1495,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdDraw_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawIndexed_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawIndexed( VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount, @@ -1437,7 +1516,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawIndexed_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawIndexedIndirect_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawIndexedIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, @@ -1456,7 +1536,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawIndexedIndirect_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawIndexedIndirectCount_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawIndexedIndirectCount( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, @@ -1477,7 +1558,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawIndexedIndirectCount_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawIndexedIndirectCountKHR_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawIndexedIndirectCountKHR( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, @@ -1498,7 +1580,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawIndexedIndirectCountKHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawIndirect_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, @@ -1517,7 +1600,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawIndirect_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawIndirectByteCountEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawIndirectByteCountEXT( VkCommandBuffer commandBuffer, uint32_t instanceCount, uint32_t firstInstance, @@ -1538,7 +1622,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawIndirectByteCountEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawIndirectCount_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawIndirectCount( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, @@ -1559,7 +1644,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawIndirectCount_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawIndirectCountKHR_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawIndirectCountKHR( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, @@ -1580,7 +1666,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawIndirectCountKHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawMeshTasksEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawMeshTasksEXT( VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, @@ -1598,7 +1685,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawMeshTasksEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawMeshTasksIndirectCountEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawMeshTasksIndirectCountEXT( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, @@ -1619,7 +1707,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawMeshTasksIndirectCountEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawMeshTasksIndirectEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawMeshTasksIndirectEXT( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, @@ -1638,7 +1727,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawMeshTasksIndirectEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawMultiEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawMultiEXT( VkCommandBuffer commandBuffer, uint32_t drawCount, const VkMultiDrawInfoEXT* pVertexInfo, @@ -1658,7 +1748,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawMultiEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawMultiIndexedEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawMultiIndexedEXT( VkCommandBuffer commandBuffer, uint32_t drawCount, const VkMultiDrawIndexedInfoEXT* pIndexInfo, @@ -1679,7 +1770,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawMultiIndexedEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdEndConditionalRenderingEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdEndConditionalRenderingEXT( VkCommandBuffer commandBuffer ) { LAYER_TRACE(__func__); @@ -1694,7 +1786,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdEndConditionalRenderingEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdEndDebugUtilsLabelEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdEndDebugUtilsLabelEXT( VkCommandBuffer commandBuffer ) { LAYER_TRACE(__func__); @@ -1709,7 +1802,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdEndDebugUtilsLabelEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdEndQuery_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdEndQuery( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query @@ -1726,7 +1820,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdEndQuery_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdEndQueryIndexedEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdEndQueryIndexedEXT( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, @@ -1744,7 +1839,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdEndQueryIndexedEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdEndRenderPass_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdEndRenderPass( VkCommandBuffer commandBuffer ) { LAYER_TRACE(__func__); @@ -1759,7 +1855,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdEndRenderPass_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdEndRenderPass2_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdEndRenderPass2( VkCommandBuffer commandBuffer, const VkSubpassEndInfo* pSubpassEndInfo ) { @@ -1775,7 +1872,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdEndRenderPass2_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdEndRenderPass2KHR_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdEndRenderPass2KHR( VkCommandBuffer commandBuffer, const VkSubpassEndInfo* pSubpassEndInfo ) { @@ -1791,7 +1889,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdEndRenderPass2KHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdEndRendering_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdEndRendering( VkCommandBuffer commandBuffer ) { LAYER_TRACE(__func__); @@ -1806,7 +1905,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdEndRendering_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdEndRendering2EXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdEndRendering2EXT( VkCommandBuffer commandBuffer, const VkRenderingEndInfoEXT* pRenderingEndInfo ) { @@ -1822,7 +1922,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdEndRendering2EXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdEndRenderingKHR_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdEndRenderingKHR( VkCommandBuffer commandBuffer ) { LAYER_TRACE(__func__); @@ -1837,7 +1938,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdEndRenderingKHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdEndTransformFeedbackEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdEndTransformFeedbackEXT( VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, @@ -1856,7 +1958,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdEndTransformFeedbackEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdExecuteCommands_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdExecuteCommands( VkCommandBuffer commandBuffer, uint32_t commandBufferCount, const VkCommandBuffer* pCommandBuffers @@ -1873,7 +1976,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdExecuteCommands_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdExecuteGeneratedCommandsEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdExecuteGeneratedCommandsEXT( VkCommandBuffer commandBuffer, VkBool32 isPreprocessed, const VkGeneratedCommandsInfoEXT* pGeneratedCommandsInfo @@ -1890,7 +1994,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdExecuteGeneratedCommandsEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdFillBuffer_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdFillBuffer( VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, @@ -1909,7 +2014,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdFillBuffer_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdInsertDebugUtilsLabelEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdInsertDebugUtilsLabelEXT( VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT* pLabelInfo ) { @@ -1925,7 +2031,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdInsertDebugUtilsLabelEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdNextSubpass_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdNextSubpass( VkCommandBuffer commandBuffer, VkSubpassContents contents ) { @@ -1941,7 +2048,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdNextSubpass_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdNextSubpass2_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdNextSubpass2( VkCommandBuffer commandBuffer, const VkSubpassBeginInfo* pSubpassBeginInfo, const VkSubpassEndInfo* pSubpassEndInfo @@ -1958,7 +2066,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdNextSubpass2_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdNextSubpass2KHR_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdNextSubpass2KHR( VkCommandBuffer commandBuffer, const VkSubpassBeginInfo* pSubpassBeginInfo, const VkSubpassEndInfo* pSubpassEndInfo @@ -1975,7 +2084,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdNextSubpass2KHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdPipelineBarrier_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdPipelineBarrier( VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, @@ -1999,7 +2109,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdPipelineBarrier_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdPipelineBarrier2_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdPipelineBarrier2( VkCommandBuffer commandBuffer, const VkDependencyInfo* pDependencyInfo ) { @@ -2015,7 +2126,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdPipelineBarrier2_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdPipelineBarrier2KHR_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdPipelineBarrier2KHR( VkCommandBuffer commandBuffer, const VkDependencyInfo* pDependencyInfo ) { @@ -2031,7 +2143,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdPipelineBarrier2KHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdPreprocessGeneratedCommandsEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdPreprocessGeneratedCommandsEXT( VkCommandBuffer commandBuffer, const VkGeneratedCommandsInfoEXT* pGeneratedCommandsInfo, VkCommandBuffer stateCommandBuffer @@ -2048,7 +2161,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdPreprocessGeneratedCommandsEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdPushConstants_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdPushConstants( VkCommandBuffer commandBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags, @@ -2068,7 +2182,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdPushConstants_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdPushConstants2_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdPushConstants2( VkCommandBuffer commandBuffer, const VkPushConstantsInfo* pPushConstantsInfo ) { @@ -2084,7 +2199,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdPushConstants2_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdPushConstants2KHR_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdPushConstants2KHR( VkCommandBuffer commandBuffer, const VkPushConstantsInfo* pPushConstantsInfo ) { @@ -2100,7 +2216,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdPushConstants2KHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdPushDescriptorSet_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdPushDescriptorSet( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, @@ -2120,7 +2237,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdPushDescriptorSet_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdPushDescriptorSet2_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdPushDescriptorSet2( VkCommandBuffer commandBuffer, const VkPushDescriptorSetInfo* pPushDescriptorSetInfo ) { @@ -2136,7 +2254,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdPushDescriptorSet2_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdPushDescriptorSet2KHR_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdPushDescriptorSet2KHR( VkCommandBuffer commandBuffer, const VkPushDescriptorSetInfo* pPushDescriptorSetInfo ) { @@ -2152,7 +2271,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdPushDescriptorSet2KHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdPushDescriptorSetKHR_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdPushDescriptorSetKHR( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, @@ -2172,7 +2292,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdPushDescriptorSetKHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdPushDescriptorSetWithTemplate_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdPushDescriptorSetWithTemplate( VkCommandBuffer commandBuffer, VkDescriptorUpdateTemplate descriptorUpdateTemplate, VkPipelineLayout layout, @@ -2191,7 +2312,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdPushDescriptorSetWithTemplate_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdPushDescriptorSetWithTemplate2_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdPushDescriptorSetWithTemplate2( VkCommandBuffer commandBuffer, const VkPushDescriptorSetWithTemplateInfo* pPushDescriptorSetWithTemplateInfo ) { @@ -2207,7 +2329,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdPushDescriptorSetWithTemplate2_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdPushDescriptorSetWithTemplate2KHR_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdPushDescriptorSetWithTemplate2KHR( VkCommandBuffer commandBuffer, const VkPushDescriptorSetWithTemplateInfo* pPushDescriptorSetWithTemplateInfo ) { @@ -2223,7 +2346,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdPushDescriptorSetWithTemplate2KHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdPushDescriptorSetWithTemplateKHR_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdPushDescriptorSetWithTemplateKHR( VkCommandBuffer commandBuffer, VkDescriptorUpdateTemplate descriptorUpdateTemplate, VkPipelineLayout layout, @@ -2242,7 +2366,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdPushDescriptorSetWithTemplateKHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdResetEvent_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdResetEvent( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask @@ -2259,7 +2384,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdResetEvent_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdResetEvent2_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdResetEvent2( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags2 stageMask @@ -2276,7 +2402,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdResetEvent2_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdResetEvent2KHR_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdResetEvent2KHR( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags2 stageMask @@ -2293,7 +2420,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdResetEvent2KHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdResetQueryPool_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdResetQueryPool( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, @@ -2311,7 +2439,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdResetQueryPool_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdResolveImage_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdResolveImage( VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, @@ -2332,7 +2461,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdResolveImage_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdResolveImage2_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdResolveImage2( VkCommandBuffer commandBuffer, const VkResolveImageInfo2* pResolveImageInfo ) { @@ -2348,7 +2478,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdResolveImage2_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdResolveImage2KHR_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdResolveImage2KHR( VkCommandBuffer commandBuffer, const VkResolveImageInfo2* pResolveImageInfo ) { @@ -2364,7 +2495,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdResolveImage2KHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetAlphaToCoverageEnableEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetAlphaToCoverageEnableEXT( VkCommandBuffer commandBuffer, VkBool32 alphaToCoverageEnable ) { @@ -2380,7 +2512,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetAlphaToCoverageEnableEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetAlphaToOneEnableEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetAlphaToOneEnableEXT( VkCommandBuffer commandBuffer, VkBool32 alphaToOneEnable ) { @@ -2396,7 +2529,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetAlphaToOneEnableEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetAttachmentFeedbackLoopEnableEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetAttachmentFeedbackLoopEnableEXT( VkCommandBuffer commandBuffer, VkImageAspectFlags aspectMask ) { @@ -2412,7 +2546,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetAttachmentFeedbackLoopEnableEXT_default } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetBlendConstants_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetBlendConstants( VkCommandBuffer commandBuffer, const float blendConstants[4] ) { @@ -2428,7 +2563,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetBlendConstants_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetColorBlendAdvancedEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetColorBlendAdvancedEXT( VkCommandBuffer commandBuffer, uint32_t firstAttachment, uint32_t attachmentCount, @@ -2446,7 +2582,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetColorBlendAdvancedEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetColorBlendEnableEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetColorBlendEnableEXT( VkCommandBuffer commandBuffer, uint32_t firstAttachment, uint32_t attachmentCount, @@ -2464,7 +2601,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetColorBlendEnableEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetColorBlendEquationEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetColorBlendEquationEXT( VkCommandBuffer commandBuffer, uint32_t firstAttachment, uint32_t attachmentCount, @@ -2482,7 +2620,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetColorBlendEquationEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetColorWriteEnableEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetColorWriteEnableEXT( VkCommandBuffer commandBuffer, uint32_t attachmentCount, const VkBool32* pColorWriteEnables @@ -2499,7 +2638,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetColorWriteEnableEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetColorWriteMaskEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetColorWriteMaskEXT( VkCommandBuffer commandBuffer, uint32_t firstAttachment, uint32_t attachmentCount, @@ -2517,7 +2657,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetColorWriteMaskEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetConservativeRasterizationModeEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetConservativeRasterizationModeEXT( VkCommandBuffer commandBuffer, VkConservativeRasterizationModeEXT conservativeRasterizationMode ) { @@ -2533,7 +2674,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetConservativeRasterizationModeEXT_defaul } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetCoverageModulationModeNV_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetCoverageModulationModeNV( VkCommandBuffer commandBuffer, VkCoverageModulationModeNV coverageModulationMode ) { @@ -2549,7 +2691,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetCoverageModulationModeNV_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetCoverageModulationTableEnableNV_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetCoverageModulationTableEnableNV( VkCommandBuffer commandBuffer, VkBool32 coverageModulationTableEnable ) { @@ -2565,7 +2708,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetCoverageModulationTableEnableNV_default } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetCoverageModulationTableNV_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetCoverageModulationTableNV( VkCommandBuffer commandBuffer, uint32_t coverageModulationTableCount, const float* pCoverageModulationTable @@ -2582,7 +2726,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetCoverageModulationTableNV_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetCoverageReductionModeNV_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetCoverageReductionModeNV( VkCommandBuffer commandBuffer, VkCoverageReductionModeNV coverageReductionMode ) { @@ -2598,7 +2743,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetCoverageReductionModeNV_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetCoverageToColorEnableNV_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetCoverageToColorEnableNV( VkCommandBuffer commandBuffer, VkBool32 coverageToColorEnable ) { @@ -2614,7 +2760,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetCoverageToColorEnableNV_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetCoverageToColorLocationNV_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetCoverageToColorLocationNV( VkCommandBuffer commandBuffer, uint32_t coverageToColorLocation ) { @@ -2630,7 +2777,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetCoverageToColorLocationNV_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetCullMode_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetCullMode( VkCommandBuffer commandBuffer, VkCullModeFlags cullMode ) { @@ -2646,7 +2794,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetCullMode_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetCullModeEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetCullModeEXT( VkCommandBuffer commandBuffer, VkCullModeFlags cullMode ) { @@ -2662,7 +2811,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetCullModeEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthBias_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthBias( VkCommandBuffer commandBuffer, float depthBiasConstantFactor, float depthBiasClamp, @@ -2680,7 +2830,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthBias_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthBias2EXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthBias2EXT( VkCommandBuffer commandBuffer, const VkDepthBiasInfoEXT* pDepthBiasInfo ) { @@ -2696,7 +2847,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthBias2EXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthBiasEnable_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthBiasEnable( VkCommandBuffer commandBuffer, VkBool32 depthBiasEnable ) { @@ -2712,7 +2864,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthBiasEnable_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthBiasEnableEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthBiasEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthBiasEnable ) { @@ -2728,7 +2881,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthBiasEnableEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthBounds_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthBounds( VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds @@ -2745,7 +2899,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthBounds_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthBoundsTestEnable_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthBoundsTestEnable( VkCommandBuffer commandBuffer, VkBool32 depthBoundsTestEnable ) { @@ -2761,7 +2916,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthBoundsTestEnable_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthBoundsTestEnableEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthBoundsTestEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthBoundsTestEnable ) { @@ -2777,7 +2933,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthBoundsTestEnableEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthClampEnableEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthClampEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthClampEnable ) { @@ -2793,7 +2950,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthClampEnableEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthClampRangeEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthClampRangeEXT( VkCommandBuffer commandBuffer, VkDepthClampModeEXT depthClampMode, const VkDepthClampRangeEXT* pDepthClampRange @@ -2810,7 +2968,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthClampRangeEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthClipEnableEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthClipEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthClipEnable ) { @@ -2826,7 +2985,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthClipEnableEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthClipNegativeOneToOneEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthClipNegativeOneToOneEXT( VkCommandBuffer commandBuffer, VkBool32 negativeOneToOne ) { @@ -2842,7 +3002,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthClipNegativeOneToOneEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthCompareOp_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthCompareOp( VkCommandBuffer commandBuffer, VkCompareOp depthCompareOp ) { @@ -2858,7 +3019,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthCompareOp_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthCompareOpEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthCompareOpEXT( VkCommandBuffer commandBuffer, VkCompareOp depthCompareOp ) { @@ -2874,7 +3036,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthCompareOpEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthTestEnable_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthTestEnable( VkCommandBuffer commandBuffer, VkBool32 depthTestEnable ) { @@ -2890,7 +3053,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthTestEnable_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthTestEnableEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthTestEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthTestEnable ) { @@ -2906,7 +3070,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthTestEnableEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthWriteEnable_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthWriteEnable( VkCommandBuffer commandBuffer, VkBool32 depthWriteEnable ) { @@ -2922,7 +3087,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthWriteEnable_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthWriteEnableEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthWriteEnableEXT( VkCommandBuffer commandBuffer, VkBool32 depthWriteEnable ) { @@ -2938,7 +3104,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthWriteEnableEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDescriptorBufferOffsets2EXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDescriptorBufferOffsets2EXT( VkCommandBuffer commandBuffer, const VkSetDescriptorBufferOffsetsInfoEXT* pSetDescriptorBufferOffsetsInfo ) { @@ -2954,7 +3121,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDescriptorBufferOffsets2EXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDescriptorBufferOffsetsEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDescriptorBufferOffsetsEXT( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, @@ -2975,7 +3143,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDescriptorBufferOffsetsEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDeviceMask_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDeviceMask( VkCommandBuffer commandBuffer, uint32_t deviceMask ) { @@ -2991,7 +3160,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDeviceMask_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDeviceMaskKHR_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDeviceMaskKHR( VkCommandBuffer commandBuffer, uint32_t deviceMask ) { @@ -3007,7 +3177,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDeviceMaskKHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDiscardRectangleEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDiscardRectangleEXT( VkCommandBuffer commandBuffer, uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, @@ -3025,7 +3196,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDiscardRectangleEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDiscardRectangleEnableEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDiscardRectangleEnableEXT( VkCommandBuffer commandBuffer, VkBool32 discardRectangleEnable ) { @@ -3041,7 +3213,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDiscardRectangleEnableEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDiscardRectangleModeEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDiscardRectangleModeEXT( VkCommandBuffer commandBuffer, VkDiscardRectangleModeEXT discardRectangleMode ) { @@ -3057,7 +3230,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDiscardRectangleModeEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetEvent_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetEvent( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask @@ -3074,7 +3248,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetEvent_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetEvent2_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetEvent2( VkCommandBuffer commandBuffer, VkEvent event, const VkDependencyInfo* pDependencyInfo @@ -3091,7 +3266,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetEvent2_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetEvent2KHR_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetEvent2KHR( VkCommandBuffer commandBuffer, VkEvent event, const VkDependencyInfo* pDependencyInfo @@ -3108,7 +3284,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetEvent2KHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetExtraPrimitiveOverestimationSizeEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetExtraPrimitiveOverestimationSizeEXT( VkCommandBuffer commandBuffer, float extraPrimitiveOverestimationSize ) { @@ -3124,7 +3301,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetExtraPrimitiveOverestimationSizeEXT_def } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetFragmentShadingRateKHR_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetFragmentShadingRateKHR( VkCommandBuffer commandBuffer, const VkExtent2D* pFragmentSize, const VkFragmentShadingRateCombinerOpKHR combinerOps[2] @@ -3141,7 +3319,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetFragmentShadingRateKHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetFrontFace_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetFrontFace( VkCommandBuffer commandBuffer, VkFrontFace frontFace ) { @@ -3157,7 +3336,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetFrontFace_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetFrontFaceEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetFrontFaceEXT( VkCommandBuffer commandBuffer, VkFrontFace frontFace ) { @@ -3173,7 +3353,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetFrontFaceEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetLineRasterizationModeEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetLineRasterizationModeEXT( VkCommandBuffer commandBuffer, VkLineRasterizationModeEXT lineRasterizationMode ) { @@ -3189,7 +3370,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetLineRasterizationModeEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetLineStipple_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetLineStipple( VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, uint16_t lineStipplePattern @@ -3206,7 +3388,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetLineStipple_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetLineStippleEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetLineStippleEXT( VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, uint16_t lineStipplePattern @@ -3223,7 +3406,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetLineStippleEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetLineStippleEnableEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetLineStippleEnableEXT( VkCommandBuffer commandBuffer, VkBool32 stippledLineEnable ) { @@ -3239,7 +3423,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetLineStippleEnableEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetLineStippleKHR_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetLineStippleKHR( VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, uint16_t lineStipplePattern @@ -3256,7 +3441,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetLineStippleKHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetLineWidth_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetLineWidth( VkCommandBuffer commandBuffer, float lineWidth ) { @@ -3272,7 +3458,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetLineWidth_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetLogicOpEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetLogicOpEXT( VkCommandBuffer commandBuffer, VkLogicOp logicOp ) { @@ -3288,7 +3475,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetLogicOpEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetLogicOpEnableEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetLogicOpEnableEXT( VkCommandBuffer commandBuffer, VkBool32 logicOpEnable ) { @@ -3304,7 +3492,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetLogicOpEnableEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetPatchControlPointsEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetPatchControlPointsEXT( VkCommandBuffer commandBuffer, uint32_t patchControlPoints ) { @@ -3320,7 +3509,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetPatchControlPointsEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetPolygonModeEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetPolygonModeEXT( VkCommandBuffer commandBuffer, VkPolygonMode polygonMode ) { @@ -3336,7 +3526,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetPolygonModeEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetPrimitiveRestartEnable_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetPrimitiveRestartEnable( VkCommandBuffer commandBuffer, VkBool32 primitiveRestartEnable ) { @@ -3352,7 +3543,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetPrimitiveRestartEnable_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetPrimitiveRestartEnableEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetPrimitiveRestartEnableEXT( VkCommandBuffer commandBuffer, VkBool32 primitiveRestartEnable ) { @@ -3368,7 +3560,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetPrimitiveRestartEnableEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetPrimitiveTopology_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetPrimitiveTopology( VkCommandBuffer commandBuffer, VkPrimitiveTopology primitiveTopology ) { @@ -3384,7 +3577,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetPrimitiveTopology_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetPrimitiveTopologyEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetPrimitiveTopologyEXT( VkCommandBuffer commandBuffer, VkPrimitiveTopology primitiveTopology ) { @@ -3400,7 +3594,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetPrimitiveTopologyEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetProvokingVertexModeEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetProvokingVertexModeEXT( VkCommandBuffer commandBuffer, VkProvokingVertexModeEXT provokingVertexMode ) { @@ -3416,7 +3611,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetProvokingVertexModeEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetRasterizationSamplesEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetRasterizationSamplesEXT( VkCommandBuffer commandBuffer, VkSampleCountFlagBits rasterizationSamples ) { @@ -3432,7 +3628,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetRasterizationSamplesEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetRasterizationStreamEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetRasterizationStreamEXT( VkCommandBuffer commandBuffer, uint32_t rasterizationStream ) { @@ -3448,7 +3645,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetRasterizationStreamEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetRasterizerDiscardEnable_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetRasterizerDiscardEnable( VkCommandBuffer commandBuffer, VkBool32 rasterizerDiscardEnable ) { @@ -3464,7 +3662,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetRasterizerDiscardEnable_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetRasterizerDiscardEnableEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetRasterizerDiscardEnableEXT( VkCommandBuffer commandBuffer, VkBool32 rasterizerDiscardEnable ) { @@ -3480,7 +3679,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetRasterizerDiscardEnableEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetRayTracingPipelineStackSizeKHR_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetRayTracingPipelineStackSizeKHR( VkCommandBuffer commandBuffer, uint32_t pipelineStackSize ) { @@ -3496,7 +3696,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetRayTracingPipelineStackSizeKHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetRenderingAttachmentLocations_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetRenderingAttachmentLocations( VkCommandBuffer commandBuffer, const VkRenderingAttachmentLocationInfo* pLocationInfo ) { @@ -3512,7 +3713,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetRenderingAttachmentLocations_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetRenderingAttachmentLocationsKHR_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetRenderingAttachmentLocationsKHR( VkCommandBuffer commandBuffer, const VkRenderingAttachmentLocationInfo* pLocationInfo ) { @@ -3528,7 +3730,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetRenderingAttachmentLocationsKHR_default } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetRenderingInputAttachmentIndices_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetRenderingInputAttachmentIndices( VkCommandBuffer commandBuffer, const VkRenderingInputAttachmentIndexInfo* pInputAttachmentIndexInfo ) { @@ -3544,7 +3747,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetRenderingInputAttachmentIndices_default } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetRenderingInputAttachmentIndicesKHR_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetRenderingInputAttachmentIndicesKHR( VkCommandBuffer commandBuffer, const VkRenderingInputAttachmentIndexInfo* pInputAttachmentIndexInfo ) { @@ -3560,7 +3764,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetRenderingInputAttachmentIndicesKHR_defa } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetRepresentativeFragmentTestEnableNV_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetRepresentativeFragmentTestEnableNV( VkCommandBuffer commandBuffer, VkBool32 representativeFragmentTestEnable ) { @@ -3576,7 +3781,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetRepresentativeFragmentTestEnableNV_defa } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetSampleLocationsEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetSampleLocationsEXT( VkCommandBuffer commandBuffer, const VkSampleLocationsInfoEXT* pSampleLocationsInfo ) { @@ -3592,7 +3798,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetSampleLocationsEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetSampleLocationsEnableEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetSampleLocationsEnableEXT( VkCommandBuffer commandBuffer, VkBool32 sampleLocationsEnable ) { @@ -3608,7 +3815,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetSampleLocationsEnableEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetSampleMaskEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetSampleMaskEXT( VkCommandBuffer commandBuffer, VkSampleCountFlagBits samples, const VkSampleMask* pSampleMask @@ -3625,7 +3833,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetSampleMaskEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetScissor_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetScissor( VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount, @@ -3643,7 +3852,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetScissor_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetScissorWithCount_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetScissorWithCount( VkCommandBuffer commandBuffer, uint32_t scissorCount, const VkRect2D* pScissors @@ -3660,7 +3870,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetScissorWithCount_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetScissorWithCountEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetScissorWithCountEXT( VkCommandBuffer commandBuffer, uint32_t scissorCount, const VkRect2D* pScissors @@ -3677,7 +3888,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetScissorWithCountEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetShadingRateImageEnableNV_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetShadingRateImageEnableNV( VkCommandBuffer commandBuffer, VkBool32 shadingRateImageEnable ) { @@ -3693,7 +3905,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetShadingRateImageEnableNV_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetStencilCompareMask_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetStencilCompareMask( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t compareMask @@ -3710,7 +3923,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetStencilCompareMask_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetStencilOp_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetStencilOp( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, VkStencilOp failOp, @@ -3730,7 +3944,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetStencilOp_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetStencilOpEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetStencilOpEXT( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, VkStencilOp failOp, @@ -3750,7 +3965,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetStencilOpEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetStencilReference_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetStencilReference( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t reference @@ -3767,7 +3983,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetStencilReference_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetStencilTestEnable_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetStencilTestEnable( VkCommandBuffer commandBuffer, VkBool32 stencilTestEnable ) { @@ -3783,7 +4000,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetStencilTestEnable_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetStencilTestEnableEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetStencilTestEnableEXT( VkCommandBuffer commandBuffer, VkBool32 stencilTestEnable ) { @@ -3799,7 +4017,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetStencilTestEnableEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetStencilWriteMask_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetStencilWriteMask( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t writeMask @@ -3816,7 +4035,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetStencilWriteMask_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetTessellationDomainOriginEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetTessellationDomainOriginEXT( VkCommandBuffer commandBuffer, VkTessellationDomainOrigin domainOrigin ) { @@ -3832,7 +4052,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetTessellationDomainOriginEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetVertexInputEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetVertexInputEXT( VkCommandBuffer commandBuffer, uint32_t vertexBindingDescriptionCount, const VkVertexInputBindingDescription2EXT* pVertexBindingDescriptions, @@ -3851,7 +4072,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetVertexInputEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetViewport_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetViewport( VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, @@ -3869,7 +4091,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetViewport_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetViewportSwizzleNV_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetViewportSwizzleNV( VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, @@ -3887,7 +4110,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetViewportSwizzleNV_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetViewportWScalingEnableNV_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetViewportWScalingEnableNV( VkCommandBuffer commandBuffer, VkBool32 viewportWScalingEnable ) { @@ -3903,7 +4127,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetViewportWScalingEnableNV_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetViewportWithCount_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetViewportWithCount( VkCommandBuffer commandBuffer, uint32_t viewportCount, const VkViewport* pViewports @@ -3920,7 +4145,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetViewportWithCount_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetViewportWithCountEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetViewportWithCountEXT( VkCommandBuffer commandBuffer, uint32_t viewportCount, const VkViewport* pViewports @@ -3937,7 +4163,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetViewportWithCountEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdTraceRaysIndirect2KHR_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdTraceRaysIndirect2KHR( VkCommandBuffer commandBuffer, VkDeviceAddress indirectDeviceAddress ) { @@ -3953,7 +4180,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdTraceRaysIndirect2KHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdTraceRaysIndirectKHR_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdTraceRaysIndirectKHR( VkCommandBuffer commandBuffer, const VkStridedDeviceAddressRegionKHR* pRaygenShaderBindingTable, const VkStridedDeviceAddressRegionKHR* pMissShaderBindingTable, @@ -3973,7 +4201,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdTraceRaysIndirectKHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdTraceRaysKHR_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdTraceRaysKHR( VkCommandBuffer commandBuffer, const VkStridedDeviceAddressRegionKHR* pRaygenShaderBindingTable, const VkStridedDeviceAddressRegionKHR* pMissShaderBindingTable, @@ -3995,7 +4224,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdTraceRaysKHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdUpdateBuffer_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdUpdateBuffer( VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, @@ -4014,7 +4244,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdUpdateBuffer_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdWaitEvents_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdWaitEvents( VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents, @@ -4039,7 +4270,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdWaitEvents_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdWaitEvents2_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdWaitEvents2( VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents, @@ -4057,7 +4289,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdWaitEvents2_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdWaitEvents2KHR_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdWaitEvents2KHR( VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents, @@ -4075,7 +4308,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdWaitEvents2KHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdWriteAccelerationStructuresPropertiesKHR_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdWriteAccelerationStructuresPropertiesKHR( VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureKHR* pAccelerationStructures, @@ -4095,7 +4329,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdWriteAccelerationStructuresPropertiesKHR_d } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdWriteMicromapsPropertiesEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdWriteMicromapsPropertiesEXT( VkCommandBuffer commandBuffer, uint32_t micromapCount, const VkMicromapEXT* pMicromaps, @@ -4115,7 +4350,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdWriteMicromapsPropertiesEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdWriteTimestamp_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdWriteTimestamp( VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkQueryPool queryPool, @@ -4133,7 +4369,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdWriteTimestamp_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdWriteTimestamp2_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdWriteTimestamp2( VkCommandBuffer commandBuffer, VkPipelineStageFlags2 stage, VkQueryPool queryPool, @@ -4151,7 +4388,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdWriteTimestamp2_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdWriteTimestamp2KHR_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdWriteTimestamp2KHR( VkCommandBuffer commandBuffer, VkPipelineStageFlags2 stage, VkQueryPool queryPool, @@ -4169,7 +4407,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdWriteTimestamp2KHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCopyAccelerationStructureKHR_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCopyAccelerationStructureKHR( VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyAccelerationStructureInfoKHR* pInfo @@ -4186,7 +4425,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkCopyAccelerationStructureKHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCopyAccelerationStructureToMemoryKHR_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCopyAccelerationStructureToMemoryKHR( VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyAccelerationStructureToMemoryInfoKHR* pInfo @@ -4203,7 +4443,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkCopyAccelerationStructureToMemoryKHR_defa } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCopyImageToImage_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCopyImageToImage( VkDevice device, const VkCopyImageToImageInfo* pCopyImageToImageInfo ) { @@ -4219,7 +4460,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkCopyImageToImage_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCopyImageToImageEXT_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCopyImageToImageEXT( VkDevice device, const VkCopyImageToImageInfo* pCopyImageToImageInfo ) { @@ -4235,7 +4477,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkCopyImageToImageEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCopyImageToMemory_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCopyImageToMemory( VkDevice device, const VkCopyImageToMemoryInfo* pCopyImageToMemoryInfo ) { @@ -4251,7 +4494,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkCopyImageToMemory_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCopyImageToMemoryEXT_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCopyImageToMemoryEXT( VkDevice device, const VkCopyImageToMemoryInfo* pCopyImageToMemoryInfo ) { @@ -4267,7 +4511,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkCopyImageToMemoryEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCopyMemoryToAccelerationStructureKHR_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCopyMemoryToAccelerationStructureKHR( VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyMemoryToAccelerationStructureInfoKHR* pInfo @@ -4284,7 +4529,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkCopyMemoryToAccelerationStructureKHR_defa } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCopyMemoryToImage_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCopyMemoryToImage( VkDevice device, const VkCopyMemoryToImageInfo* pCopyMemoryToImageInfo ) { @@ -4300,7 +4546,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkCopyMemoryToImage_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCopyMemoryToImageEXT_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCopyMemoryToImageEXT( VkDevice device, const VkCopyMemoryToImageInfo* pCopyMemoryToImageInfo ) { @@ -4316,7 +4563,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkCopyMemoryToImageEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCopyMemoryToMicromapEXT_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCopyMemoryToMicromapEXT( VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyMemoryToMicromapInfoEXT* pInfo @@ -4333,7 +4581,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkCopyMemoryToMicromapEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCopyMicromapEXT_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCopyMicromapEXT( VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyMicromapInfoEXT* pInfo @@ -4350,7 +4599,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkCopyMicromapEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCopyMicromapToMemoryEXT_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCopyMicromapToMemoryEXT( VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyMicromapToMemoryInfoEXT* pInfo @@ -4367,7 +4617,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkCopyMicromapToMemoryEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateAccelerationStructureKHR_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateAccelerationStructureKHR( VkDevice device, const VkAccelerationStructureCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, @@ -4385,7 +4636,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateAccelerationStructureKHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateBuffer_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateBuffer( VkDevice device, const VkBufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, @@ -4403,7 +4655,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateBuffer_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateBufferView_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateBufferView( VkDevice device, const VkBufferViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, @@ -4421,7 +4674,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateBufferView_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateCommandPool_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateCommandPool( VkDevice device, const VkCommandPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, @@ -4439,7 +4693,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateCommandPool_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateComputePipelines_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateComputePipelines( VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, @@ -4459,7 +4714,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateComputePipelines_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateDeferredOperationKHR_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateDeferredOperationKHR( VkDevice device, const VkAllocationCallbacks* pAllocator, VkDeferredOperationKHR* pDeferredOperation @@ -4476,7 +4732,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateDeferredOperationKHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateDescriptorPool_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateDescriptorPool( VkDevice device, const VkDescriptorPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, @@ -4494,7 +4751,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateDescriptorPool_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateDescriptorSetLayout_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateDescriptorSetLayout( VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, @@ -4512,7 +4770,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateDescriptorSetLayout_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateDescriptorUpdateTemplate_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateDescriptorUpdateTemplate( VkDevice device, const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, @@ -4530,7 +4789,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateDescriptorUpdateTemplate_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateDescriptorUpdateTemplateKHR_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateDescriptorUpdateTemplateKHR( VkDevice device, const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, @@ -4548,7 +4808,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateDescriptorUpdateTemplateKHR_default } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateEvent_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateEvent( VkDevice device, const VkEventCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, @@ -4566,7 +4827,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateEvent_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateFence_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateFence( VkDevice device, const VkFenceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, @@ -4584,7 +4846,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateFence_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateFramebuffer_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateFramebuffer( VkDevice device, const VkFramebufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, @@ -4602,7 +4865,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateFramebuffer_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateGraphicsPipelines_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateGraphicsPipelines( VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, @@ -4622,7 +4886,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateGraphicsPipelines_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateImage_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateImage( VkDevice device, const VkImageCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, @@ -4640,7 +4905,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateImage_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateImageView_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateImageView( VkDevice device, const VkImageViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, @@ -4658,7 +4924,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateImageView_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateIndirectCommandsLayoutEXT_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateIndirectCommandsLayoutEXT( VkDevice device, const VkIndirectCommandsLayoutCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, @@ -4676,7 +4943,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateIndirectCommandsLayoutEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateIndirectExecutionSetEXT_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateIndirectExecutionSetEXT( VkDevice device, const VkIndirectExecutionSetCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, @@ -4694,7 +4962,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateIndirectExecutionSetEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateMicromapEXT_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateMicromapEXT( VkDevice device, const VkMicromapCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, @@ -4712,7 +4981,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateMicromapEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreatePipelineBinariesKHR_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreatePipelineBinariesKHR( VkDevice device, const VkPipelineBinaryCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, @@ -4730,7 +5000,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreatePipelineBinariesKHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreatePipelineCache_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreatePipelineCache( VkDevice device, const VkPipelineCacheCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, @@ -4748,7 +5019,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreatePipelineCache_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreatePipelineLayout_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreatePipelineLayout( VkDevice device, const VkPipelineLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, @@ -4766,7 +5038,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreatePipelineLayout_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreatePrivateDataSlot_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreatePrivateDataSlot( VkDevice device, const VkPrivateDataSlotCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, @@ -4784,7 +5057,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreatePrivateDataSlot_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreatePrivateDataSlotEXT_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreatePrivateDataSlotEXT( VkDevice device, const VkPrivateDataSlotCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, @@ -4802,7 +5076,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreatePrivateDataSlotEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateQueryPool_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateQueryPool( VkDevice device, const VkQueryPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, @@ -4820,7 +5095,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateQueryPool_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateRayTracingPipelinesKHR_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateRayTracingPipelinesKHR( VkDevice device, VkDeferredOperationKHR deferredOperation, VkPipelineCache pipelineCache, @@ -4841,7 +5117,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateRayTracingPipelinesKHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateRenderPass_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateRenderPass( VkDevice device, const VkRenderPassCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, @@ -4859,7 +5136,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateRenderPass_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateRenderPass2_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateRenderPass2( VkDevice device, const VkRenderPassCreateInfo2* pCreateInfo, const VkAllocationCallbacks* pAllocator, @@ -4877,7 +5155,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateRenderPass2_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateRenderPass2KHR_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateRenderPass2KHR( VkDevice device, const VkRenderPassCreateInfo2* pCreateInfo, const VkAllocationCallbacks* pAllocator, @@ -4895,7 +5174,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateRenderPass2KHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateSampler_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateSampler( VkDevice device, const VkSamplerCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, @@ -4913,7 +5193,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateSampler_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateSamplerYcbcrConversion_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateSamplerYcbcrConversion( VkDevice device, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, @@ -4931,7 +5212,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateSamplerYcbcrConversion_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateSamplerYcbcrConversionKHR_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateSamplerYcbcrConversionKHR( VkDevice device, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, @@ -4949,7 +5231,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateSamplerYcbcrConversionKHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateSemaphore_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateSemaphore( VkDevice device, const VkSemaphoreCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, @@ -4967,7 +5250,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateSemaphore_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateShaderModule_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateShaderModule( VkDevice device, const VkShaderModuleCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, @@ -4985,7 +5269,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateShaderModule_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateShadersEXT_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateShadersEXT( VkDevice device, uint32_t createInfoCount, const VkShaderCreateInfoEXT* pCreateInfos, @@ -5004,7 +5289,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateShadersEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateSharedSwapchainsKHR_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateSharedSwapchainsKHR( VkDevice device, uint32_t swapchainCount, const VkSwapchainCreateInfoKHR* pCreateInfos, @@ -5023,7 +5309,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateSharedSwapchainsKHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateSwapchainKHR_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateSwapchainKHR( VkDevice device, const VkSwapchainCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, @@ -5041,7 +5328,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateSwapchainKHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateTensorARM_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateTensorARM( VkDevice device, const VkTensorCreateInfoARM* pCreateInfo, const VkAllocationCallbacks* pAllocator, @@ -5059,7 +5347,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateTensorARM_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateTensorViewARM_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateTensorViewARM( VkDevice device, const VkTensorViewCreateInfoARM* pCreateInfo, const VkAllocationCallbacks* pAllocator, @@ -5077,7 +5366,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateTensorViewARM_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateValidationCacheEXT_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateValidationCacheEXT( VkDevice device, const VkValidationCacheCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, @@ -5095,7 +5385,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateValidationCacheEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkDebugMarkerSetObjectNameEXT_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkDebugMarkerSetObjectNameEXT( VkDevice device, const VkDebugMarkerObjectNameInfoEXT* pNameInfo ) { @@ -5111,7 +5402,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkDebugMarkerSetObjectNameEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkDebugMarkerSetObjectTagEXT_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkDebugMarkerSetObjectTagEXT( VkDevice device, const VkDebugMarkerObjectTagInfoEXT* pTagInfo ) { @@ -5127,7 +5419,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkDebugMarkerSetObjectTagEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkDeferredOperationJoinKHR_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkDeferredOperationJoinKHR( VkDevice device, VkDeferredOperationKHR operation ) { @@ -5143,7 +5436,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkDeferredOperationJoinKHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkDestroyAccelerationStructureKHR_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkDestroyAccelerationStructureKHR( VkDevice device, VkAccelerationStructureKHR accelerationStructure, const VkAllocationCallbacks* pAllocator @@ -5160,7 +5454,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkDestroyAccelerationStructureKHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkDestroyBuffer_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkDestroyBuffer( VkDevice device, VkBuffer buffer, const VkAllocationCallbacks* pAllocator @@ -5177,7 +5472,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkDestroyBuffer_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkDestroyBufferView_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkDestroyBufferView( VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks* pAllocator @@ -5194,7 +5490,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkDestroyBufferView_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkDestroyCommandPool_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkDestroyCommandPool( VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks* pAllocator @@ -5211,7 +5508,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkDestroyCommandPool_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkDestroyDeferredOperationKHR_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkDestroyDeferredOperationKHR( VkDevice device, VkDeferredOperationKHR operation, const VkAllocationCallbacks* pAllocator @@ -5228,7 +5526,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkDestroyDeferredOperationKHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkDestroyDescriptorPool_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkDestroyDescriptorPool( VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks* pAllocator @@ -5245,7 +5544,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkDestroyDescriptorPool_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkDestroyDescriptorSetLayout_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkDestroyDescriptorSetLayout( VkDevice device, VkDescriptorSetLayout descriptorSetLayout, const VkAllocationCallbacks* pAllocator @@ -5262,7 +5562,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkDestroyDescriptorSetLayout_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkDestroyDescriptorUpdateTemplate_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkDestroyDescriptorUpdateTemplate( VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator @@ -5279,7 +5580,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkDestroyDescriptorUpdateTemplate_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkDestroyDescriptorUpdateTemplateKHR_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkDestroyDescriptorUpdateTemplateKHR( VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator @@ -5296,7 +5598,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkDestroyDescriptorUpdateTemplateKHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkDestroyEvent_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkDestroyEvent( VkDevice device, VkEvent event, const VkAllocationCallbacks* pAllocator @@ -5313,7 +5616,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkDestroyEvent_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkDestroyFence_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkDestroyFence( VkDevice device, VkFence fence, const VkAllocationCallbacks* pAllocator @@ -5330,7 +5634,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkDestroyFence_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkDestroyFramebuffer_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkDestroyFramebuffer( VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks* pAllocator @@ -5347,7 +5652,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkDestroyFramebuffer_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkDestroyImage_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkDestroyImage( VkDevice device, VkImage image, const VkAllocationCallbacks* pAllocator @@ -5364,7 +5670,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkDestroyImage_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkDestroyImageView_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkDestroyImageView( VkDevice device, VkImageView imageView, const VkAllocationCallbacks* pAllocator @@ -5381,7 +5688,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkDestroyImageView_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkDestroyIndirectCommandsLayoutEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkDestroyIndirectCommandsLayoutEXT( VkDevice device, VkIndirectCommandsLayoutEXT indirectCommandsLayout, const VkAllocationCallbacks* pAllocator @@ -5398,7 +5706,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkDestroyIndirectCommandsLayoutEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkDestroyIndirectExecutionSetEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkDestroyIndirectExecutionSetEXT( VkDevice device, VkIndirectExecutionSetEXT indirectExecutionSet, const VkAllocationCallbacks* pAllocator @@ -5415,7 +5724,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkDestroyIndirectExecutionSetEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkDestroyMicromapEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkDestroyMicromapEXT( VkDevice device, VkMicromapEXT micromap, const VkAllocationCallbacks* pAllocator @@ -5432,7 +5742,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkDestroyMicromapEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkDestroyPipeline_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkDestroyPipeline( VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks* pAllocator @@ -5449,7 +5760,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkDestroyPipeline_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkDestroyPipelineBinaryKHR_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkDestroyPipelineBinaryKHR( VkDevice device, VkPipelineBinaryKHR pipelineBinary, const VkAllocationCallbacks* pAllocator @@ -5466,7 +5778,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkDestroyPipelineBinaryKHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkDestroyPipelineCache_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkDestroyPipelineCache( VkDevice device, VkPipelineCache pipelineCache, const VkAllocationCallbacks* pAllocator @@ -5483,7 +5796,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkDestroyPipelineCache_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkDestroyPipelineLayout_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkDestroyPipelineLayout( VkDevice device, VkPipelineLayout pipelineLayout, const VkAllocationCallbacks* pAllocator @@ -5500,7 +5814,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkDestroyPipelineLayout_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkDestroyPrivateDataSlot_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkDestroyPrivateDataSlot( VkDevice device, VkPrivateDataSlot privateDataSlot, const VkAllocationCallbacks* pAllocator @@ -5517,7 +5832,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkDestroyPrivateDataSlot_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkDestroyPrivateDataSlotEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkDestroyPrivateDataSlotEXT( VkDevice device, VkPrivateDataSlot privateDataSlot, const VkAllocationCallbacks* pAllocator @@ -5534,7 +5850,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkDestroyPrivateDataSlotEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkDestroyQueryPool_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkDestroyQueryPool( VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks* pAllocator @@ -5551,7 +5868,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkDestroyQueryPool_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkDestroyRenderPass_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkDestroyRenderPass( VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks* pAllocator @@ -5568,7 +5886,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkDestroyRenderPass_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkDestroySampler_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkDestroySampler( VkDevice device, VkSampler sampler, const VkAllocationCallbacks* pAllocator @@ -5585,7 +5904,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkDestroySampler_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkDestroySamplerYcbcrConversion_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkDestroySamplerYcbcrConversion( VkDevice device, VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks* pAllocator @@ -5602,7 +5922,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkDestroySamplerYcbcrConversion_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkDestroySamplerYcbcrConversionKHR_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkDestroySamplerYcbcrConversionKHR( VkDevice device, VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks* pAllocator @@ -5619,7 +5940,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkDestroySamplerYcbcrConversionKHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkDestroySemaphore_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkDestroySemaphore( VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks* pAllocator @@ -5636,7 +5958,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkDestroySemaphore_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkDestroyShaderEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkDestroyShaderEXT( VkDevice device, VkShaderEXT shader, const VkAllocationCallbacks* pAllocator @@ -5653,7 +5976,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkDestroyShaderEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkDestroyShaderModule_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkDestroyShaderModule( VkDevice device, VkShaderModule shaderModule, const VkAllocationCallbacks* pAllocator @@ -5670,7 +5994,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkDestroyShaderModule_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkDestroySwapchainKHR_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkDestroySwapchainKHR( VkDevice device, VkSwapchainKHR swapchain, const VkAllocationCallbacks* pAllocator @@ -5687,7 +6012,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkDestroySwapchainKHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkDestroyTensorARM_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkDestroyTensorARM( VkDevice device, VkTensorARM tensor, const VkAllocationCallbacks* pAllocator @@ -5704,7 +6030,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkDestroyTensorARM_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkDestroyTensorViewARM_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkDestroyTensorViewARM( VkDevice device, VkTensorViewARM tensorView, const VkAllocationCallbacks* pAllocator @@ -5721,7 +6048,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkDestroyTensorViewARM_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkDestroyValidationCacheEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkDestroyValidationCacheEXT( VkDevice device, VkValidationCacheEXT validationCache, const VkAllocationCallbacks* pAllocator @@ -5738,7 +6066,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkDestroyValidationCacheEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkDeviceWaitIdle_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkDeviceWaitIdle( VkDevice device ) { LAYER_TRACE(__func__); @@ -5753,7 +6082,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkDeviceWaitIdle_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkDisplayPowerControlEXT_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkDisplayPowerControlEXT( VkDevice device, VkDisplayKHR display, const VkDisplayPowerInfoEXT* pDisplayPowerInfo @@ -5770,7 +6100,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkDisplayPowerControlEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkEndCommandBuffer_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkEndCommandBuffer( VkCommandBuffer commandBuffer ) { LAYER_TRACE(__func__); @@ -5785,7 +6116,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkEndCommandBuffer_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkFlushMappedMemoryRanges_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkFlushMappedMemoryRanges( VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges @@ -5802,7 +6134,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkFlushMappedMemoryRanges_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkFreeCommandBuffers_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkFreeCommandBuffers( VkDevice device, VkCommandPool commandPool, uint32_t commandBufferCount, @@ -5820,7 +6153,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkFreeCommandBuffers_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkFreeDescriptorSets_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkFreeDescriptorSets( VkDevice device, VkDescriptorPool descriptorPool, uint32_t descriptorSetCount, @@ -5838,7 +6172,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkFreeDescriptorSets_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkFreeMemory_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkFreeMemory( VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks* pAllocator @@ -5855,7 +6190,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkFreeMemory_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetAccelerationStructureBuildSizesKHR_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetAccelerationStructureBuildSizesKHR( VkDevice device, VkAccelerationStructureBuildTypeKHR buildType, const VkAccelerationStructureBuildGeometryInfoKHR* pBuildInfo, @@ -5874,7 +6210,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkGetAccelerationStructureBuildSizesKHR_default } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkDeviceAddress VKAPI_CALL layer_vkGetAccelerationStructureDeviceAddressKHR_default( +template <> +VKAPI_ATTR VkDeviceAddress VKAPI_CALL layer_vkGetAccelerationStructureDeviceAddressKHR( VkDevice device, const VkAccelerationStructureDeviceAddressInfoKHR* pInfo ) { @@ -5890,7 +6227,8 @@ VKAPI_ATTR VkDeviceAddress VKAPI_CALL layer_vkGetAccelerationStructureDeviceAddr } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( VkDevice device, const VkAccelerationStructureCaptureDescriptorDataInfoEXT* pInfo, void* pData @@ -5907,7 +6245,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetAccelerationStructureOpaqueCaptureDesc } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkDeviceAddress VKAPI_CALL layer_vkGetBufferDeviceAddress_default( +template <> +VKAPI_ATTR VkDeviceAddress VKAPI_CALL layer_vkGetBufferDeviceAddress( VkDevice device, const VkBufferDeviceAddressInfo* pInfo ) { @@ -5923,7 +6262,8 @@ VKAPI_ATTR VkDeviceAddress VKAPI_CALL layer_vkGetBufferDeviceAddress_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkDeviceAddress VKAPI_CALL layer_vkGetBufferDeviceAddressEXT_default( +template <> +VKAPI_ATTR VkDeviceAddress VKAPI_CALL layer_vkGetBufferDeviceAddressEXT( VkDevice device, const VkBufferDeviceAddressInfo* pInfo ) { @@ -5939,7 +6279,8 @@ VKAPI_ATTR VkDeviceAddress VKAPI_CALL layer_vkGetBufferDeviceAddressEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkDeviceAddress VKAPI_CALL layer_vkGetBufferDeviceAddressKHR_default( +template <> +VKAPI_ATTR VkDeviceAddress VKAPI_CALL layer_vkGetBufferDeviceAddressKHR( VkDevice device, const VkBufferDeviceAddressInfo* pInfo ) { @@ -5955,7 +6296,8 @@ VKAPI_ATTR VkDeviceAddress VKAPI_CALL layer_vkGetBufferDeviceAddressKHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetBufferMemoryRequirements_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetBufferMemoryRequirements( VkDevice device, VkBuffer buffer, VkMemoryRequirements* pMemoryRequirements @@ -5972,7 +6314,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkGetBufferMemoryRequirements_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetBufferMemoryRequirements2_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetBufferMemoryRequirements2( VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements @@ -5989,7 +6332,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkGetBufferMemoryRequirements2_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetBufferMemoryRequirements2KHR_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetBufferMemoryRequirements2KHR( VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements @@ -6006,7 +6350,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkGetBufferMemoryRequirements2KHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR uint64_t VKAPI_CALL layer_vkGetBufferOpaqueCaptureAddress_default( +template <> +VKAPI_ATTR uint64_t VKAPI_CALL layer_vkGetBufferOpaqueCaptureAddress( VkDevice device, const VkBufferDeviceAddressInfo* pInfo ) { @@ -6022,7 +6367,8 @@ VKAPI_ATTR uint64_t VKAPI_CALL layer_vkGetBufferOpaqueCaptureAddress_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR uint64_t VKAPI_CALL layer_vkGetBufferOpaqueCaptureAddressKHR_default( +template <> +VKAPI_ATTR uint64_t VKAPI_CALL layer_vkGetBufferOpaqueCaptureAddressKHR( VkDevice device, const VkBufferDeviceAddressInfo* pInfo ) { @@ -6038,7 +6384,8 @@ VKAPI_ATTR uint64_t VKAPI_CALL layer_vkGetBufferOpaqueCaptureAddressKHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetBufferOpaqueCaptureDescriptorDataEXT_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetBufferOpaqueCaptureDescriptorDataEXT( VkDevice device, const VkBufferCaptureDescriptorDataInfoEXT* pInfo, void* pData @@ -6055,7 +6402,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetBufferOpaqueCaptureDescriptorDataEXT_d } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetCalibratedTimestampsEXT_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetCalibratedTimestampsEXT( VkDevice device, uint32_t timestampCount, const VkCalibratedTimestampInfoKHR* pTimestampInfos, @@ -6074,7 +6422,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetCalibratedTimestampsEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetCalibratedTimestampsKHR_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetCalibratedTimestampsKHR( VkDevice device, uint32_t timestampCount, const VkCalibratedTimestampInfoKHR* pTimestampInfos, @@ -6093,7 +6442,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetCalibratedTimestampsKHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR uint32_t VKAPI_CALL layer_vkGetDeferredOperationMaxConcurrencyKHR_default( +template <> +VKAPI_ATTR uint32_t VKAPI_CALL layer_vkGetDeferredOperationMaxConcurrencyKHR( VkDevice device, VkDeferredOperationKHR operation ) { @@ -6109,7 +6459,8 @@ VKAPI_ATTR uint32_t VKAPI_CALL layer_vkGetDeferredOperationMaxConcurrencyKHR_def } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetDeferredOperationResultKHR_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetDeferredOperationResultKHR( VkDevice device, VkDeferredOperationKHR operation ) { @@ -6125,7 +6476,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetDeferredOperationResultKHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetDescriptorEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetDescriptorEXT( VkDevice device, const VkDescriptorGetInfoEXT* pDescriptorInfo, size_t dataSize, @@ -6143,7 +6495,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkGetDescriptorEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetDescriptorSetLayoutBindingOffsetEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetDescriptorSetLayoutBindingOffsetEXT( VkDevice device, VkDescriptorSetLayout layout, uint32_t binding, @@ -6161,7 +6514,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkGetDescriptorSetLayoutBindingOffsetEXT_defaul } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetDescriptorSetLayoutSizeEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetDescriptorSetLayoutSizeEXT( VkDevice device, VkDescriptorSetLayout layout, VkDeviceSize* pLayoutSizeInBytes @@ -6178,7 +6532,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkGetDescriptorSetLayoutSizeEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetDescriptorSetLayoutSupport_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetDescriptorSetLayoutSupport( VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, VkDescriptorSetLayoutSupport* pSupport @@ -6195,7 +6550,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkGetDescriptorSetLayoutSupport_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetDescriptorSetLayoutSupportKHR_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetDescriptorSetLayoutSupportKHR( VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, VkDescriptorSetLayoutSupport* pSupport @@ -6212,7 +6568,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkGetDescriptorSetLayoutSupportKHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceAccelerationStructureCompatibilityKHR_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceAccelerationStructureCompatibilityKHR( VkDevice device, const VkAccelerationStructureVersionInfoKHR* pVersionInfo, VkAccelerationStructureCompatibilityKHR* pCompatibility @@ -6229,7 +6586,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceAccelerationStructureCompatibilityKH } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceBufferMemoryRequirements_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceBufferMemoryRequirements( VkDevice device, const VkDeviceBufferMemoryRequirements* pInfo, VkMemoryRequirements2* pMemoryRequirements @@ -6246,7 +6604,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceBufferMemoryRequirements_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceBufferMemoryRequirementsKHR_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceBufferMemoryRequirementsKHR( VkDevice device, const VkDeviceBufferMemoryRequirements* pInfo, VkMemoryRequirements2* pMemoryRequirements @@ -6263,7 +6622,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceBufferMemoryRequirementsKHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetDeviceFaultInfoEXT_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetDeviceFaultInfoEXT( VkDevice device, VkDeviceFaultCountsEXT* pFaultCounts, VkDeviceFaultInfoEXT* pFaultInfo @@ -6280,7 +6640,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetDeviceFaultInfoEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceGroupPeerMemoryFeatures_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceGroupPeerMemoryFeatures( VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, @@ -6299,7 +6660,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceGroupPeerMemoryFeatures_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceGroupPeerMemoryFeaturesKHR_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceGroupPeerMemoryFeaturesKHR( VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, @@ -6318,7 +6680,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceGroupPeerMemoryFeaturesKHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetDeviceGroupPresentCapabilitiesKHR_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetDeviceGroupPresentCapabilitiesKHR( VkDevice device, VkDeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities ) { @@ -6334,7 +6697,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetDeviceGroupPresentCapabilitiesKHR_defa } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetDeviceGroupSurfacePresentModesKHR_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetDeviceGroupSurfacePresentModesKHR( VkDevice device, VkSurfaceKHR surface, VkDeviceGroupPresentModeFlagsKHR* pModes @@ -6351,7 +6715,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetDeviceGroupSurfacePresentModesKHR_defa } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceImageMemoryRequirements_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceImageMemoryRequirements( VkDevice device, const VkDeviceImageMemoryRequirements* pInfo, VkMemoryRequirements2* pMemoryRequirements @@ -6368,7 +6733,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceImageMemoryRequirements_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceImageSparseMemoryRequirements_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceImageSparseMemoryRequirements( VkDevice device, const VkDeviceImageMemoryRequirements* pInfo, uint32_t* pSparseMemoryRequirementCount, @@ -6386,7 +6752,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceImageSparseMemoryRequirements_defaul } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceImageSparseMemoryRequirementsKHR_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceImageSparseMemoryRequirementsKHR( VkDevice device, const VkDeviceImageMemoryRequirements* pInfo, uint32_t* pSparseMemoryRequirementCount, @@ -6404,7 +6771,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceImageSparseMemoryRequirementsKHR_def } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceImageSubresourceLayout_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceImageSubresourceLayout( VkDevice device, const VkDeviceImageSubresourceInfo* pInfo, VkSubresourceLayout2* pLayout @@ -6421,7 +6789,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceImageSubresourceLayout_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceImageSubresourceLayoutKHR_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceImageSubresourceLayoutKHR( VkDevice device, const VkDeviceImageSubresourceInfo* pInfo, VkSubresourceLayout2* pLayout @@ -6438,7 +6807,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceImageSubresourceLayoutKHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceMemoryCommitment_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceMemoryCommitment( VkDevice device, VkDeviceMemory memory, VkDeviceSize* pCommittedMemoryInBytes @@ -6455,7 +6825,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceMemoryCommitment_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR uint64_t VKAPI_CALL layer_vkGetDeviceMemoryOpaqueCaptureAddress_default( +template <> +VKAPI_ATTR uint64_t VKAPI_CALL layer_vkGetDeviceMemoryOpaqueCaptureAddress( VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfo* pInfo ) { @@ -6471,7 +6842,8 @@ VKAPI_ATTR uint64_t VKAPI_CALL layer_vkGetDeviceMemoryOpaqueCaptureAddress_defau } /* See Vulkan API for documentation. */ -VKAPI_ATTR uint64_t VKAPI_CALL layer_vkGetDeviceMemoryOpaqueCaptureAddressKHR_default( +template <> +VKAPI_ATTR uint64_t VKAPI_CALL layer_vkGetDeviceMemoryOpaqueCaptureAddressKHR( VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfo* pInfo ) { @@ -6487,7 +6859,8 @@ VKAPI_ATTR uint64_t VKAPI_CALL layer_vkGetDeviceMemoryOpaqueCaptureAddressKHR_de } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceMicromapCompatibilityEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceMicromapCompatibilityEXT( VkDevice device, const VkMicromapVersionInfoEXT* pVersionInfo, VkAccelerationStructureCompatibilityKHR* pCompatibility @@ -6504,7 +6877,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceMicromapCompatibilityEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceQueue_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceQueue( VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex, @@ -6522,7 +6896,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceQueue_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceQueue2_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceQueue2( VkDevice device, const VkDeviceQueueInfo2* pQueueInfo, VkQueue* pQueue @@ -6539,7 +6914,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceQueue2_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceTensorMemoryRequirementsARM_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceTensorMemoryRequirementsARM( VkDevice device, const VkDeviceTensorMemoryRequirementsARM* pInfo, VkMemoryRequirements2* pMemoryRequirements @@ -6556,7 +6932,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceTensorMemoryRequirementsARM_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetEventStatus_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetEventStatus( VkDevice device, VkEvent event ) { @@ -6572,7 +6949,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetEventStatus_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetFenceFdKHR_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetFenceFdKHR( VkDevice device, const VkFenceGetFdInfoKHR* pGetFdInfo, int* pFd @@ -6589,7 +6967,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetFenceFdKHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetFenceStatus_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetFenceStatus( VkDevice device, VkFence fence ) { @@ -6605,7 +6984,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetFenceStatus_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetGeneratedCommandsMemoryRequirementsEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetGeneratedCommandsMemoryRequirementsEXT( VkDevice device, const VkGeneratedCommandsMemoryRequirementsInfoEXT* pInfo, VkMemoryRequirements2* pMemoryRequirements @@ -6622,7 +7002,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkGetGeneratedCommandsMemoryRequirementsEXT_def } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetImageDrmFormatModifierPropertiesEXT_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetImageDrmFormatModifierPropertiesEXT( VkDevice device, VkImage image, VkImageDrmFormatModifierPropertiesEXT* pProperties @@ -6639,7 +7020,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetImageDrmFormatModifierPropertiesEXT_de } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetImageMemoryRequirements_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetImageMemoryRequirements( VkDevice device, VkImage image, VkMemoryRequirements* pMemoryRequirements @@ -6656,7 +7038,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkGetImageMemoryRequirements_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetImageMemoryRequirements2_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetImageMemoryRequirements2( VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements @@ -6673,7 +7056,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkGetImageMemoryRequirements2_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetImageMemoryRequirements2KHR_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetImageMemoryRequirements2KHR( VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements @@ -6690,7 +7074,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkGetImageMemoryRequirements2KHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetImageOpaqueCaptureDescriptorDataEXT_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetImageOpaqueCaptureDescriptorDataEXT( VkDevice device, const VkImageCaptureDescriptorDataInfoEXT* pInfo, void* pData @@ -6707,7 +7092,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetImageOpaqueCaptureDescriptorDataEXT_de } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetImageSparseMemoryRequirements_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetImageSparseMemoryRequirements( VkDevice device, VkImage image, uint32_t* pSparseMemoryRequirementCount, @@ -6725,7 +7111,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkGetImageSparseMemoryRequirements_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetImageSparseMemoryRequirements2_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetImageSparseMemoryRequirements2( VkDevice device, const VkImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, @@ -6743,7 +7130,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkGetImageSparseMemoryRequirements2_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetImageSparseMemoryRequirements2KHR_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetImageSparseMemoryRequirements2KHR( VkDevice device, const VkImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, @@ -6761,7 +7149,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkGetImageSparseMemoryRequirements2KHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetImageSubresourceLayout_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetImageSubresourceLayout( VkDevice device, VkImage image, const VkImageSubresource* pSubresource, @@ -6779,7 +7168,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkGetImageSubresourceLayout_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetImageSubresourceLayout2_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetImageSubresourceLayout2( VkDevice device, VkImage image, const VkImageSubresource2* pSubresource, @@ -6797,7 +7187,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkGetImageSubresourceLayout2_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetImageSubresourceLayout2EXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetImageSubresourceLayout2EXT( VkDevice device, VkImage image, const VkImageSubresource2* pSubresource, @@ -6815,7 +7206,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkGetImageSubresourceLayout2EXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetImageSubresourceLayout2KHR_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetImageSubresourceLayout2KHR( VkDevice device, VkImage image, const VkImageSubresource2* pSubresource, @@ -6833,7 +7225,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkGetImageSubresourceLayout2KHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetImageViewOpaqueCaptureDescriptorDataEXT_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetImageViewOpaqueCaptureDescriptorDataEXT( VkDevice device, const VkImageViewCaptureDescriptorDataInfoEXT* pInfo, void* pData @@ -6850,7 +7243,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetImageViewOpaqueCaptureDescriptorDataEX } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetMemoryFdKHR_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetMemoryFdKHR( VkDevice device, const VkMemoryGetFdInfoKHR* pGetFdInfo, int* pFd @@ -6867,7 +7261,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetMemoryFdKHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetMemoryFdPropertiesKHR_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetMemoryFdPropertiesKHR( VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, int fd, @@ -6885,7 +7280,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetMemoryFdPropertiesKHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetMemoryHostPointerPropertiesEXT_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetMemoryHostPointerPropertiesEXT( VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, @@ -6903,7 +7299,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetMemoryHostPointerPropertiesEXT_default } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetMicromapBuildSizesEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetMicromapBuildSizesEXT( VkDevice device, VkAccelerationStructureBuildTypeKHR buildType, const VkMicromapBuildInfoEXT* pBuildInfo, @@ -6921,7 +7318,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkGetMicromapBuildSizesEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPipelineBinaryDataKHR_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPipelineBinaryDataKHR( VkDevice device, const VkPipelineBinaryDataInfoKHR* pInfo, VkPipelineBinaryKeyKHR* pPipelineBinaryKey, @@ -6940,7 +7338,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPipelineBinaryDataKHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPipelineCacheData_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPipelineCacheData( VkDevice device, VkPipelineCache pipelineCache, size_t* pDataSize, @@ -6958,7 +7357,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPipelineCacheData_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPipelineExecutableInternalRepresentationsKHR_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPipelineExecutableInternalRepresentationsKHR( VkDevice device, const VkPipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pInternalRepresentationCount, @@ -6976,7 +7376,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPipelineExecutableInternalRepresentati } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPipelineExecutablePropertiesKHR_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPipelineExecutablePropertiesKHR( VkDevice device, const VkPipelineInfoKHR* pPipelineInfo, uint32_t* pExecutableCount, @@ -6994,7 +7395,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPipelineExecutablePropertiesKHR_defaul } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPipelineExecutableStatisticsKHR_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPipelineExecutableStatisticsKHR( VkDevice device, const VkPipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pStatisticCount, @@ -7012,7 +7414,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPipelineExecutableStatisticsKHR_defaul } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPipelineKeyKHR_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPipelineKeyKHR( VkDevice device, const VkPipelineCreateInfoKHR* pPipelineCreateInfo, VkPipelineBinaryKeyKHR* pPipelineKey @@ -7029,7 +7432,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPipelineKeyKHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPipelinePropertiesEXT_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPipelinePropertiesEXT( VkDevice device, const VkPipelineInfoEXT* pPipelineInfo, VkBaseOutStructure* pPipelineProperties @@ -7046,7 +7450,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPipelinePropertiesEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetPrivateData_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetPrivateData( VkDevice device, VkObjectType objectType, uint64_t objectHandle, @@ -7065,7 +7470,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkGetPrivateData_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetPrivateDataEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetPrivateDataEXT( VkDevice device, VkObjectType objectType, uint64_t objectHandle, @@ -7084,7 +7490,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkGetPrivateDataEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetQueryPoolResults_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetQueryPoolResults( VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, @@ -7106,7 +7513,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetQueryPoolResults_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( VkDevice device, VkPipeline pipeline, uint32_t firstGroup, @@ -7126,7 +7534,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetRayTracingCaptureReplayShaderGroupHand } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetRayTracingShaderGroupHandlesKHR_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetRayTracingShaderGroupHandlesKHR( VkDevice device, VkPipeline pipeline, uint32_t firstGroup, @@ -7146,7 +7555,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetRayTracingShaderGroupHandlesKHR_defaul } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkDeviceSize VKAPI_CALL layer_vkGetRayTracingShaderGroupStackSizeKHR_default( +template <> +VKAPI_ATTR VkDeviceSize VKAPI_CALL layer_vkGetRayTracingShaderGroupStackSizeKHR( VkDevice device, VkPipeline pipeline, uint32_t group, @@ -7164,7 +7574,8 @@ VKAPI_ATTR VkDeviceSize VKAPI_CALL layer_vkGetRayTracingShaderGroupStackSizeKHR_ } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetRenderAreaGranularity_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetRenderAreaGranularity( VkDevice device, VkRenderPass renderPass, VkExtent2D* pGranularity @@ -7181,7 +7592,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkGetRenderAreaGranularity_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetRenderingAreaGranularity_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetRenderingAreaGranularity( VkDevice device, const VkRenderingAreaInfo* pRenderingAreaInfo, VkExtent2D* pGranularity @@ -7198,7 +7610,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkGetRenderingAreaGranularity_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetRenderingAreaGranularityKHR_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetRenderingAreaGranularityKHR( VkDevice device, const VkRenderingAreaInfo* pRenderingAreaInfo, VkExtent2D* pGranularity @@ -7215,7 +7628,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkGetRenderingAreaGranularityKHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetSamplerOpaqueCaptureDescriptorDataEXT_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetSamplerOpaqueCaptureDescriptorDataEXT( VkDevice device, const VkSamplerCaptureDescriptorDataInfoEXT* pInfo, void* pData @@ -7232,7 +7646,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetSamplerOpaqueCaptureDescriptorDataEXT_ } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetSemaphoreCounterValue_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetSemaphoreCounterValue( VkDevice device, VkSemaphore semaphore, uint64_t* pValue @@ -7249,7 +7664,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetSemaphoreCounterValue_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetSemaphoreCounterValueKHR_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetSemaphoreCounterValueKHR( VkDevice device, VkSemaphore semaphore, uint64_t* pValue @@ -7266,7 +7682,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetSemaphoreCounterValueKHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetSemaphoreFdKHR_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetSemaphoreFdKHR( VkDevice device, const VkSemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd @@ -7283,7 +7700,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetSemaphoreFdKHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetShaderBinaryDataEXT_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetShaderBinaryDataEXT( VkDevice device, VkShaderEXT shader, size_t* pDataSize, @@ -7301,7 +7719,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetShaderBinaryDataEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetShaderModuleCreateInfoIdentifierEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetShaderModuleCreateInfoIdentifierEXT( VkDevice device, const VkShaderModuleCreateInfo* pCreateInfo, VkShaderModuleIdentifierEXT* pIdentifier @@ -7318,7 +7737,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkGetShaderModuleCreateInfoIdentifierEXT_defaul } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetShaderModuleIdentifierEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetShaderModuleIdentifierEXT( VkDevice device, VkShaderModule shaderModule, VkShaderModuleIdentifierEXT* pIdentifier @@ -7335,7 +7755,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkGetShaderModuleIdentifierEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetSwapchainCounterEXT_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetSwapchainCounterEXT( VkDevice device, VkSwapchainKHR swapchain, VkSurfaceCounterFlagBitsEXT counter, @@ -7353,7 +7774,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetSwapchainCounterEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetSwapchainImagesKHR_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetSwapchainImagesKHR( VkDevice device, VkSwapchainKHR swapchain, uint32_t* pSwapchainImageCount, @@ -7371,7 +7793,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetSwapchainImagesKHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetSwapchainStatusKHR_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetSwapchainStatusKHR( VkDevice device, VkSwapchainKHR swapchain ) { @@ -7387,7 +7810,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetSwapchainStatusKHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetTensorMemoryRequirementsARM_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetTensorMemoryRequirementsARM( VkDevice device, const VkTensorMemoryRequirementsInfoARM* pInfo, VkMemoryRequirements2* pMemoryRequirements @@ -7404,7 +7828,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkGetTensorMemoryRequirementsARM_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetTensorOpaqueCaptureDescriptorDataARM_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetTensorOpaqueCaptureDescriptorDataARM( VkDevice device, const VkTensorCaptureDescriptorDataInfoARM* pInfo, void* pData @@ -7421,7 +7846,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetTensorOpaqueCaptureDescriptorDataARM_d } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetTensorViewOpaqueCaptureDescriptorDataARM_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetTensorViewOpaqueCaptureDescriptorDataARM( VkDevice device, const VkTensorViewCaptureDescriptorDataInfoARM* pInfo, void* pData @@ -7438,7 +7864,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetTensorViewOpaqueCaptureDescriptorDataA } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetValidationCacheDataEXT_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetValidationCacheDataEXT( VkDevice device, VkValidationCacheEXT validationCache, size_t* pDataSize, @@ -7456,7 +7883,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetValidationCacheDataEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkImportFenceFdKHR_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkImportFenceFdKHR( VkDevice device, const VkImportFenceFdInfoKHR* pImportFenceFdInfo ) { @@ -7472,7 +7900,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkImportFenceFdKHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkImportSemaphoreFdKHR_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkImportSemaphoreFdKHR( VkDevice device, const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo ) { @@ -7488,7 +7917,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkImportSemaphoreFdKHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkInvalidateMappedMemoryRanges_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkInvalidateMappedMemoryRanges( VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges @@ -7505,7 +7935,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkInvalidateMappedMemoryRanges_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkMapMemory_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkMapMemory( VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, @@ -7525,7 +7956,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkMapMemory_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkMapMemory2_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkMapMemory2( VkDevice device, const VkMemoryMapInfo* pMemoryMapInfo, void** ppData @@ -7542,7 +7974,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkMapMemory2_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkMapMemory2KHR_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkMapMemory2KHR( VkDevice device, const VkMemoryMapInfo* pMemoryMapInfo, void** ppData @@ -7559,7 +7992,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkMapMemory2KHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkMergePipelineCaches_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkMergePipelineCaches( VkDevice device, VkPipelineCache dstCache, uint32_t srcCacheCount, @@ -7577,7 +8011,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkMergePipelineCaches_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkMergeValidationCachesEXT_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkMergeValidationCachesEXT( VkDevice device, VkValidationCacheEXT dstCache, uint32_t srcCacheCount, @@ -7595,7 +8030,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkMergeValidationCachesEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkQueueBeginDebugUtilsLabelEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkQueueBeginDebugUtilsLabelEXT( VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo ) { @@ -7611,7 +8047,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkQueueBeginDebugUtilsLabelEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkQueueBindSparse_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkQueueBindSparse( VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo* pBindInfo, @@ -7629,7 +8066,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkQueueBindSparse_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkQueueEndDebugUtilsLabelEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkQueueEndDebugUtilsLabelEXT( VkQueue queue ) { LAYER_TRACE(__func__); @@ -7644,7 +8082,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkQueueEndDebugUtilsLabelEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkQueueInsertDebugUtilsLabelEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkQueueInsertDebugUtilsLabelEXT( VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo ) { @@ -7660,7 +8099,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkQueueInsertDebugUtilsLabelEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkQueuePresentKHR_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkQueuePresentKHR( VkQueue queue, const VkPresentInfoKHR* pPresentInfo ) { @@ -7676,7 +8116,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkQueuePresentKHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkQueueSubmit_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkQueueSubmit( VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmits, @@ -7694,7 +8135,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkQueueSubmit_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkQueueSubmit2_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkQueueSubmit2( VkQueue queue, uint32_t submitCount, const VkSubmitInfo2* pSubmits, @@ -7712,7 +8154,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkQueueSubmit2_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkQueueSubmit2KHR_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkQueueSubmit2KHR( VkQueue queue, uint32_t submitCount, const VkSubmitInfo2* pSubmits, @@ -7730,7 +8173,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkQueueSubmit2KHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkQueueWaitIdle_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkQueueWaitIdle( VkQueue queue ) { LAYER_TRACE(__func__); @@ -7745,7 +8189,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkQueueWaitIdle_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkRegisterDeviceEventEXT_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkRegisterDeviceEventEXT( VkDevice device, const VkDeviceEventInfoEXT* pDeviceEventInfo, const VkAllocationCallbacks* pAllocator, @@ -7763,7 +8208,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkRegisterDeviceEventEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkRegisterDisplayEventEXT_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkRegisterDisplayEventEXT( VkDevice device, VkDisplayKHR display, const VkDisplayEventInfoEXT* pDisplayEventInfo, @@ -7782,7 +8228,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkRegisterDisplayEventEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkReleaseCapturedPipelineDataKHR_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkReleaseCapturedPipelineDataKHR( VkDevice device, const VkReleaseCapturedPipelineDataInfoKHR* pInfo, const VkAllocationCallbacks* pAllocator @@ -7799,7 +8246,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkReleaseCapturedPipelineDataKHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkReleaseProfilingLockKHR_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkReleaseProfilingLockKHR( VkDevice device ) { LAYER_TRACE(__func__); @@ -7814,7 +8262,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkReleaseProfilingLockKHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkReleaseSwapchainImagesEXT_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkReleaseSwapchainImagesEXT( VkDevice device, const VkReleaseSwapchainImagesInfoEXT* pReleaseInfo ) { @@ -7830,7 +8279,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkReleaseSwapchainImagesEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkResetCommandBuffer_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkResetCommandBuffer( VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags ) { @@ -7846,7 +8296,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkResetCommandBuffer_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkResetCommandPool_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkResetCommandPool( VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags @@ -7863,7 +8314,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkResetCommandPool_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkResetDescriptorPool_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkResetDescriptorPool( VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags @@ -7880,7 +8332,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkResetDescriptorPool_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkResetEvent_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkResetEvent( VkDevice device, VkEvent event ) { @@ -7896,7 +8349,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkResetEvent_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkResetFences_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkResetFences( VkDevice device, uint32_t fenceCount, const VkFence* pFences @@ -7913,7 +8367,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkResetFences_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkResetQueryPool_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkResetQueryPool( VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, @@ -7931,7 +8386,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkResetQueryPool_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkResetQueryPoolEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkResetQueryPoolEXT( VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, @@ -7949,7 +8405,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkResetQueryPoolEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkSetDebugUtilsObjectNameEXT_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkSetDebugUtilsObjectNameEXT( VkDevice device, const VkDebugUtilsObjectNameInfoEXT* pNameInfo ) { @@ -7965,7 +8422,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkSetDebugUtilsObjectNameEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkSetDebugUtilsObjectTagEXT_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkSetDebugUtilsObjectTagEXT( VkDevice device, const VkDebugUtilsObjectTagInfoEXT* pTagInfo ) { @@ -7981,7 +8439,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkSetDebugUtilsObjectTagEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkSetDeviceMemoryPriorityEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkSetDeviceMemoryPriorityEXT( VkDevice device, VkDeviceMemory memory, float priority @@ -7998,7 +8457,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkSetDeviceMemoryPriorityEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkSetEvent_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkSetEvent( VkDevice device, VkEvent event ) { @@ -8014,7 +8474,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkSetEvent_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkSetHdrMetadataEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkSetHdrMetadataEXT( VkDevice device, uint32_t swapchainCount, const VkSwapchainKHR* pSwapchains, @@ -8032,7 +8493,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkSetHdrMetadataEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkSetPrivateData_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkSetPrivateData( VkDevice device, VkObjectType objectType, uint64_t objectHandle, @@ -8051,7 +8513,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkSetPrivateData_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkSetPrivateDataEXT_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkSetPrivateDataEXT( VkDevice device, VkObjectType objectType, uint64_t objectHandle, @@ -8070,7 +8533,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkSetPrivateDataEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkSignalSemaphore_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkSignalSemaphore( VkDevice device, const VkSemaphoreSignalInfo* pSignalInfo ) { @@ -8086,7 +8550,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkSignalSemaphore_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkSignalSemaphoreKHR_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkSignalSemaphoreKHR( VkDevice device, const VkSemaphoreSignalInfo* pSignalInfo ) { @@ -8102,7 +8567,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkSignalSemaphoreKHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkTransitionImageLayout_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkTransitionImageLayout( VkDevice device, uint32_t transitionCount, const VkHostImageLayoutTransitionInfo* pTransitions @@ -8119,7 +8585,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkTransitionImageLayout_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkTransitionImageLayoutEXT_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkTransitionImageLayoutEXT( VkDevice device, uint32_t transitionCount, const VkHostImageLayoutTransitionInfo* pTransitions @@ -8136,7 +8603,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkTransitionImageLayoutEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkTrimCommandPool_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkTrimCommandPool( VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags @@ -8153,7 +8621,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkTrimCommandPool_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkTrimCommandPoolKHR_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkTrimCommandPoolKHR( VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags @@ -8170,7 +8639,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkTrimCommandPoolKHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkUnmapMemory_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkUnmapMemory( VkDevice device, VkDeviceMemory memory ) { @@ -8186,7 +8656,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkUnmapMemory_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkUnmapMemory2_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkUnmapMemory2( VkDevice device, const VkMemoryUnmapInfo* pMemoryUnmapInfo ) { @@ -8202,7 +8673,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkUnmapMemory2_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkUnmapMemory2KHR_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkUnmapMemory2KHR( VkDevice device, const VkMemoryUnmapInfo* pMemoryUnmapInfo ) { @@ -8218,7 +8690,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkUnmapMemory2KHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkUpdateDescriptorSetWithTemplate_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkUpdateDescriptorSetWithTemplate( VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, @@ -8236,7 +8709,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkUpdateDescriptorSetWithTemplate_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkUpdateDescriptorSetWithTemplateKHR_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkUpdateDescriptorSetWithTemplateKHR( VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, @@ -8254,7 +8728,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkUpdateDescriptorSetWithTemplateKHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkUpdateDescriptorSets_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkUpdateDescriptorSets( VkDevice device, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites, @@ -8273,7 +8748,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkUpdateDescriptorSets_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkUpdateIndirectExecutionSetPipelineEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkUpdateIndirectExecutionSetPipelineEXT( VkDevice device, VkIndirectExecutionSetEXT indirectExecutionSet, uint32_t executionSetWriteCount, @@ -8291,7 +8767,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkUpdateIndirectExecutionSetPipelineEXT_default } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkUpdateIndirectExecutionSetShaderEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkUpdateIndirectExecutionSetShaderEXT( VkDevice device, VkIndirectExecutionSetEXT indirectExecutionSet, uint32_t executionSetWriteCount, @@ -8309,7 +8786,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkUpdateIndirectExecutionSetShaderEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkWaitForFences_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkWaitForFences( VkDevice device, uint32_t fenceCount, const VkFence* pFences, @@ -8328,7 +8806,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkWaitForFences_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkWaitForPresent2KHR_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkWaitForPresent2KHR( VkDevice device, VkSwapchainKHR swapchain, const VkPresentWait2InfoKHR* pPresentWait2Info @@ -8345,7 +8824,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkWaitForPresent2KHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkWaitForPresentKHR_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkWaitForPresentKHR( VkDevice device, VkSwapchainKHR swapchain, uint64_t presentId, @@ -8363,7 +8843,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkWaitForPresentKHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkWaitSemaphores_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkWaitSemaphores( VkDevice device, const VkSemaphoreWaitInfo* pWaitInfo, uint64_t timeout @@ -8380,7 +8861,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkWaitSemaphores_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkWaitSemaphoresKHR_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkWaitSemaphoresKHR( VkDevice device, const VkSemaphoreWaitInfo* pWaitInfo, uint64_t timeout @@ -8397,7 +8879,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkWaitSemaphoresKHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkWriteAccelerationStructuresPropertiesKHR_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkWriteAccelerationStructuresPropertiesKHR( VkDevice device, uint32_t accelerationStructureCount, const VkAccelerationStructureKHR* pAccelerationStructures, @@ -8418,7 +8901,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkWriteAccelerationStructuresPropertiesKHR_ } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkWriteMicromapsPropertiesEXT_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkWriteMicromapsPropertiesEXT( VkDevice device, uint32_t micromapCount, const VkMicromapEXT* pMicromaps, diff --git a/source_common/framework/device_functions.hpp b/source_common/framework/device_functions.hpp index 2e7398c..700ff0b 100644 --- a/source_common/framework/device_functions.hpp +++ b/source_common/framework/device_functions.hpp @@ -29,518 +29,463 @@ #include +#include "framework/utils.hpp" + /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkAcquireNextImage2KHR_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR VkResult VKAPI_CALL layer_vkAcquireNextImage2KHR( VkDevice device, const VkAcquireNextImageInfoKHR* pAcquireInfo, - uint32_t* pImageIndex); + uint32_t* pImageIndex) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR VkResult VKAPI_CALL layer_vkAcquireNextImage2KHR( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkAcquireNextImage2KHR( VkDevice device, const VkAcquireNextImageInfoKHR* pAcquireInfo, - uint32_t* pImageIndex -) { - return layer_vkAcquireNextImage2KHR_default(device, pAcquireInfo, pImageIndex); -} + uint32_t* pImageIndex); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkAcquireNextImageKHR_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR VkResult VKAPI_CALL layer_vkAcquireNextImageKHR( VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout, VkSemaphore semaphore, VkFence fence, - uint32_t* pImageIndex); + uint32_t* pImageIndex) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR VkResult VKAPI_CALL layer_vkAcquireNextImageKHR( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkAcquireNextImageKHR( VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout, VkSemaphore semaphore, VkFence fence, - uint32_t* pImageIndex -) { - return layer_vkAcquireNextImageKHR_default(device, swapchain, timeout, semaphore, fence, pImageIndex); -} + uint32_t* pImageIndex); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkAcquireProfilingLockKHR_default( - VkDevice device, - const VkAcquireProfilingLockInfoKHR* pInfo); - -/* Match-all template to use default implementation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkAcquireProfilingLockKHR( VkDevice device, - const VkAcquireProfilingLockInfoKHR* pInfo -) { - return layer_vkAcquireProfilingLockKHR_default(device, pInfo); -} + const VkAcquireProfilingLockInfoKHR* pInfo) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkAllocateCommandBuffers_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkAcquireProfilingLockKHR( VkDevice device, - const VkCommandBufferAllocateInfo* pAllocateInfo, - VkCommandBuffer* pCommandBuffers); + const VkAcquireProfilingLockInfoKHR* pInfo); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkAllocateCommandBuffers( VkDevice device, const VkCommandBufferAllocateInfo* pAllocateInfo, - VkCommandBuffer* pCommandBuffers -) { - return layer_vkAllocateCommandBuffers_default(device, pAllocateInfo, pCommandBuffers); -} + VkCommandBuffer* pCommandBuffers) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkAllocateDescriptorSets_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkAllocateCommandBuffers( VkDevice device, - const VkDescriptorSetAllocateInfo* pAllocateInfo, - VkDescriptorSet* pDescriptorSets); + const VkCommandBufferAllocateInfo* pAllocateInfo, + VkCommandBuffer* pCommandBuffers); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkAllocateDescriptorSets( VkDevice device, const VkDescriptorSetAllocateInfo* pAllocateInfo, - VkDescriptorSet* pDescriptorSets -) { - return layer_vkAllocateDescriptorSets_default(device, pAllocateInfo, pDescriptorSets); -} + VkDescriptorSet* pDescriptorSets) = delete; + +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkAllocateDescriptorSets( + VkDevice device, + const VkDescriptorSetAllocateInfo* pAllocateInfo, + VkDescriptorSet* pDescriptorSets); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkAllocateMemory_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR VkResult VKAPI_CALL layer_vkAllocateMemory( VkDevice device, const VkMemoryAllocateInfo* pAllocateInfo, const VkAllocationCallbacks* pAllocator, - VkDeviceMemory* pMemory); + VkDeviceMemory* pMemory) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR VkResult VKAPI_CALL layer_vkAllocateMemory( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkAllocateMemory( VkDevice device, const VkMemoryAllocateInfo* pAllocateInfo, const VkAllocationCallbacks* pAllocator, - VkDeviceMemory* pMemory -) { - return layer_vkAllocateMemory_default(device, pAllocateInfo, pAllocator, pMemory); -} + VkDeviceMemory* pMemory); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkBeginCommandBuffer_default( - VkCommandBuffer commandBuffer, - const VkCommandBufferBeginInfo* pBeginInfo); - -/* Match-all template to use default implementation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkBeginCommandBuffer( VkCommandBuffer commandBuffer, - const VkCommandBufferBeginInfo* pBeginInfo -) { - return layer_vkBeginCommandBuffer_default(commandBuffer, pBeginInfo); -} + const VkCommandBufferBeginInfo* pBeginInfo) = delete; + +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkBeginCommandBuffer( + VkCommandBuffer commandBuffer, + const VkCommandBufferBeginInfo* pBeginInfo); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkBindBufferMemory_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR VkResult VKAPI_CALL layer_vkBindBufferMemory( VkDevice device, VkBuffer buffer, VkDeviceMemory memory, - VkDeviceSize memoryOffset); + VkDeviceSize memoryOffset) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR VkResult VKAPI_CALL layer_vkBindBufferMemory( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkBindBufferMemory( VkDevice device, VkBuffer buffer, VkDeviceMemory memory, - VkDeviceSize memoryOffset -) { - return layer_vkBindBufferMemory_default(device, buffer, memory, memoryOffset); -} + VkDeviceSize memoryOffset); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkBindBufferMemory2_default( - VkDevice device, - uint32_t bindInfoCount, - const VkBindBufferMemoryInfo* pBindInfos); - -/* Match-all template to use default implementation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkBindBufferMemory2( VkDevice device, uint32_t bindInfoCount, - const VkBindBufferMemoryInfo* pBindInfos -) { - return layer_vkBindBufferMemory2_default(device, bindInfoCount, pBindInfos); -} + const VkBindBufferMemoryInfo* pBindInfos) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkBindBufferMemory2KHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkBindBufferMemory2( VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo* pBindInfos); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkBindBufferMemory2KHR( VkDevice device, uint32_t bindInfoCount, - const VkBindBufferMemoryInfo* pBindInfos -) { - return layer_vkBindBufferMemory2KHR_default(device, bindInfoCount, pBindInfos); -} + const VkBindBufferMemoryInfo* pBindInfos) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkBindImageMemory_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkBindBufferMemory2KHR( VkDevice device, - VkImage image, - VkDeviceMemory memory, - VkDeviceSize memoryOffset); + uint32_t bindInfoCount, + const VkBindBufferMemoryInfo* pBindInfos); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkBindImageMemory( VkDevice device, VkImage image, VkDeviceMemory memory, - VkDeviceSize memoryOffset -) { - return layer_vkBindImageMemory_default(device, image, memory, memoryOffset); -} + VkDeviceSize memoryOffset) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkBindImageMemory2_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkBindImageMemory( VkDevice device, - uint32_t bindInfoCount, - const VkBindImageMemoryInfo* pBindInfos); + VkImage image, + VkDeviceMemory memory, + VkDeviceSize memoryOffset); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkBindImageMemory2( VkDevice device, uint32_t bindInfoCount, - const VkBindImageMemoryInfo* pBindInfos -) { - return layer_vkBindImageMemory2_default(device, bindInfoCount, pBindInfos); -} + const VkBindImageMemoryInfo* pBindInfos) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkBindImageMemory2KHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkBindImageMemory2( VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo* pBindInfos); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkBindImageMemory2KHR( VkDevice device, uint32_t bindInfoCount, - const VkBindImageMemoryInfo* pBindInfos -) { - return layer_vkBindImageMemory2KHR_default(device, bindInfoCount, pBindInfos); -} + const VkBindImageMemoryInfo* pBindInfos) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkBindTensorMemoryARM_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkBindImageMemory2KHR( VkDevice device, uint32_t bindInfoCount, - const VkBindTensorMemoryInfoARM* pBindInfos); + const VkBindImageMemoryInfo* pBindInfos); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkBindTensorMemoryARM( VkDevice device, uint32_t bindInfoCount, - const VkBindTensorMemoryInfoARM* pBindInfos -) { - return layer_vkBindTensorMemoryARM_default(device, bindInfoCount, pBindInfos); -} + const VkBindTensorMemoryInfoARM* pBindInfos) = delete; + +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkBindTensorMemoryARM( + VkDevice device, + uint32_t bindInfoCount, + const VkBindTensorMemoryInfoARM* pBindInfos); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkBuildAccelerationStructuresKHR_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR VkResult VKAPI_CALL layer_vkBuildAccelerationStructuresKHR( VkDevice device, VkDeferredOperationKHR deferredOperation, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, - const VkAccelerationStructureBuildRangeInfoKHR* const* ppBuildRangeInfos); + const VkAccelerationStructureBuildRangeInfoKHR* const* ppBuildRangeInfos) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR VkResult VKAPI_CALL layer_vkBuildAccelerationStructuresKHR( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkBuildAccelerationStructuresKHR( VkDevice device, VkDeferredOperationKHR deferredOperation, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, - const VkAccelerationStructureBuildRangeInfoKHR* const* ppBuildRangeInfos -) { - return layer_vkBuildAccelerationStructuresKHR_default(device, deferredOperation, infoCount, pInfos, ppBuildRangeInfos); -} + const VkAccelerationStructureBuildRangeInfoKHR* const* ppBuildRangeInfos); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkBuildMicromapsEXT_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR VkResult VKAPI_CALL layer_vkBuildMicromapsEXT( VkDevice device, VkDeferredOperationKHR deferredOperation, uint32_t infoCount, - const VkMicromapBuildInfoEXT* pInfos); + const VkMicromapBuildInfoEXT* pInfos) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR VkResult VKAPI_CALL layer_vkBuildMicromapsEXT( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkBuildMicromapsEXT( VkDevice device, VkDeferredOperationKHR deferredOperation, uint32_t infoCount, - const VkMicromapBuildInfoEXT* pInfos -) { - return layer_vkBuildMicromapsEXT_default(device, deferredOperation, infoCount, pInfos); -} + const VkMicromapBuildInfoEXT* pInfos); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdBeginConditionalRenderingEXT_default( - VkCommandBuffer commandBuffer, - const VkConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin); - -/* Match-all template to use default implementation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdBeginConditionalRenderingEXT( VkCommandBuffer commandBuffer, - const VkConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin -) { - layer_vkCmdBeginConditionalRenderingEXT_default(commandBuffer, pConditionalRenderingBegin); -} + const VkConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdBeginDebugUtilsLabelEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdBeginConditionalRenderingEXT( VkCommandBuffer commandBuffer, - const VkDebugUtilsLabelEXT* pLabelInfo); + const VkConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdBeginDebugUtilsLabelEXT( VkCommandBuffer commandBuffer, - const VkDebugUtilsLabelEXT* pLabelInfo -) { - layer_vkCmdBeginDebugUtilsLabelEXT_default(commandBuffer, pLabelInfo); -} + const VkDebugUtilsLabelEXT* pLabelInfo) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdBeginQuery_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdBeginDebugUtilsLabelEXT( VkCommandBuffer commandBuffer, - VkQueryPool queryPool, - uint32_t query, - VkQueryControlFlags flags); + const VkDebugUtilsLabelEXT* pLabelInfo); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdBeginQuery( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, - VkQueryControlFlags flags -) { - layer_vkCmdBeginQuery_default(commandBuffer, queryPool, query, flags); -} + VkQueryControlFlags flags) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdBeginQueryIndexedEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdBeginQuery( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, - VkQueryControlFlags flags, - uint32_t index); + VkQueryControlFlags flags); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdBeginQueryIndexedEXT( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags, - uint32_t index -) { - layer_vkCmdBeginQueryIndexedEXT_default(commandBuffer, queryPool, query, flags, index); -} + uint32_t index) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdBeginRenderPass_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdBeginQueryIndexedEXT( VkCommandBuffer commandBuffer, - const VkRenderPassBeginInfo* pRenderPassBegin, - VkSubpassContents contents); + VkQueryPool queryPool, + uint32_t query, + VkQueryControlFlags flags, + uint32_t index); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdBeginRenderPass( VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, - VkSubpassContents contents -) { - layer_vkCmdBeginRenderPass_default(commandBuffer, pRenderPassBegin, contents); -} + VkSubpassContents contents) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdBeginRenderPass2_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdBeginRenderPass( VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, - const VkSubpassBeginInfo* pSubpassBeginInfo); + VkSubpassContents contents); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdBeginRenderPass2( VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, - const VkSubpassBeginInfo* pSubpassBeginInfo -) { - layer_vkCmdBeginRenderPass2_default(commandBuffer, pRenderPassBegin, pSubpassBeginInfo); -} + const VkSubpassBeginInfo* pSubpassBeginInfo) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdBeginRenderPass2KHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdBeginRenderPass2( VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, const VkSubpassBeginInfo* pSubpassBeginInfo); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdBeginRenderPass2KHR( VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, - const VkSubpassBeginInfo* pSubpassBeginInfo -) { - layer_vkCmdBeginRenderPass2KHR_default(commandBuffer, pRenderPassBegin, pSubpassBeginInfo); -} + const VkSubpassBeginInfo* pSubpassBeginInfo) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdBeginRendering_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdBeginRenderPass2KHR( VkCommandBuffer commandBuffer, - const VkRenderingInfo* pRenderingInfo); + const VkRenderPassBeginInfo* pRenderPassBegin, + const VkSubpassBeginInfo* pSubpassBeginInfo); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdBeginRendering( VkCommandBuffer commandBuffer, - const VkRenderingInfo* pRenderingInfo -) { - layer_vkCmdBeginRendering_default(commandBuffer, pRenderingInfo); -} + const VkRenderingInfo* pRenderingInfo) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdBeginRenderingKHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdBeginRendering( VkCommandBuffer commandBuffer, const VkRenderingInfo* pRenderingInfo); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdBeginRenderingKHR( VkCommandBuffer commandBuffer, - const VkRenderingInfo* pRenderingInfo -) { - layer_vkCmdBeginRenderingKHR_default(commandBuffer, pRenderingInfo); -} + const VkRenderingInfo* pRenderingInfo) = delete; + +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdBeginRenderingKHR( + VkCommandBuffer commandBuffer, + const VkRenderingInfo* pRenderingInfo); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdBeginTransformFeedbackEXT_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR void VKAPI_CALL layer_vkCmdBeginTransformFeedbackEXT( VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VkBuffer* pCounterBuffers, - const VkDeviceSize* pCounterBufferOffsets); + const VkDeviceSize* pCounterBufferOffsets) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR void VKAPI_CALL layer_vkCmdBeginTransformFeedbackEXT( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdBeginTransformFeedbackEXT( VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VkBuffer* pCounterBuffers, - const VkDeviceSize* pCounterBufferOffsets -) { - layer_vkCmdBeginTransformFeedbackEXT_default(commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets); -} + const VkDeviceSize* pCounterBufferOffsets); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindDescriptorBufferEmbeddedSamplers2EXT_default( - VkCommandBuffer commandBuffer, - const VkBindDescriptorBufferEmbeddedSamplersInfoEXT* pBindDescriptorBufferEmbeddedSamplersInfo); - -/* Match-all template to use default implementation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindDescriptorBufferEmbeddedSamplers2EXT( VkCommandBuffer commandBuffer, - const VkBindDescriptorBufferEmbeddedSamplersInfoEXT* pBindDescriptorBufferEmbeddedSamplersInfo -) { - layer_vkCmdBindDescriptorBufferEmbeddedSamplers2EXT_default(commandBuffer, pBindDescriptorBufferEmbeddedSamplersInfo); -} + const VkBindDescriptorBufferEmbeddedSamplersInfoEXT* pBindDescriptorBufferEmbeddedSamplersInfo) = delete; + +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindDescriptorBufferEmbeddedSamplers2EXT( + VkCommandBuffer commandBuffer, + const VkBindDescriptorBufferEmbeddedSamplersInfoEXT* pBindDescriptorBufferEmbeddedSamplersInfo); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindDescriptorBufferEmbeddedSamplersEXT_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindDescriptorBufferEmbeddedSamplersEXT( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, - uint32_t set); + uint32_t set) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindDescriptorBufferEmbeddedSamplersEXT( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindDescriptorBufferEmbeddedSamplersEXT( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, - uint32_t set -) { - layer_vkCmdBindDescriptorBufferEmbeddedSamplersEXT_default(commandBuffer, pipelineBindPoint, layout, set); -} + uint32_t set); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindDescriptorBuffersEXT_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindDescriptorBuffersEXT( VkCommandBuffer commandBuffer, uint32_t bufferCount, - const VkDescriptorBufferBindingInfoEXT* pBindingInfos); + const VkDescriptorBufferBindingInfoEXT* pBindingInfos) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindDescriptorBuffersEXT( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindDescriptorBuffersEXT( VkCommandBuffer commandBuffer, uint32_t bufferCount, - const VkDescriptorBufferBindingInfoEXT* pBindingInfos -) { - layer_vkCmdBindDescriptorBuffersEXT_default(commandBuffer, bufferCount, pBindingInfos); -} + const VkDescriptorBufferBindingInfoEXT* pBindingInfos); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindDescriptorSets_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindDescriptorSets( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, @@ -548,11 +493,11 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindDescriptorSets_default( uint32_t descriptorSetCount, const VkDescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, - const uint32_t* pDynamicOffsets); + const uint32_t* pDynamicOffsets) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindDescriptorSets( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindDescriptorSets( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, @@ -560,235 +505,211 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindDescriptorSets( uint32_t descriptorSetCount, const VkDescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, - const uint32_t* pDynamicOffsets -) { - layer_vkCmdBindDescriptorSets_default(commandBuffer, pipelineBindPoint, layout, firstSet, descriptorSetCount, pDescriptorSets, dynamicOffsetCount, pDynamicOffsets); -} + const uint32_t* pDynamicOffsets); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindDescriptorSets2_default( - VkCommandBuffer commandBuffer, - const VkBindDescriptorSetsInfo* pBindDescriptorSetsInfo); - -/* Match-all template to use default implementation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindDescriptorSets2( VkCommandBuffer commandBuffer, - const VkBindDescriptorSetsInfo* pBindDescriptorSetsInfo -) { - layer_vkCmdBindDescriptorSets2_default(commandBuffer, pBindDescriptorSetsInfo); -} + const VkBindDescriptorSetsInfo* pBindDescriptorSetsInfo) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindDescriptorSets2KHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindDescriptorSets2( VkCommandBuffer commandBuffer, const VkBindDescriptorSetsInfo* pBindDescriptorSetsInfo); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindDescriptorSets2KHR( VkCommandBuffer commandBuffer, - const VkBindDescriptorSetsInfo* pBindDescriptorSetsInfo -) { - layer_vkCmdBindDescriptorSets2KHR_default(commandBuffer, pBindDescriptorSetsInfo); -} + const VkBindDescriptorSetsInfo* pBindDescriptorSetsInfo) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindIndexBuffer_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindDescriptorSets2KHR( VkCommandBuffer commandBuffer, - VkBuffer buffer, - VkDeviceSize offset, - VkIndexType indexType); + const VkBindDescriptorSetsInfo* pBindDescriptorSetsInfo); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindIndexBuffer( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, - VkIndexType indexType -) { - layer_vkCmdBindIndexBuffer_default(commandBuffer, buffer, offset, indexType); -} + VkIndexType indexType) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindIndexBuffer2_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindIndexBuffer( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, - VkDeviceSize size, VkIndexType indexType); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindIndexBuffer2( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkDeviceSize size, - VkIndexType indexType -) { - layer_vkCmdBindIndexBuffer2_default(commandBuffer, buffer, offset, size, indexType); -} + VkIndexType indexType) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindIndexBuffer2KHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindIndexBuffer2( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkDeviceSize size, VkIndexType indexType); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindIndexBuffer2KHR( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkDeviceSize size, - VkIndexType indexType -) { - layer_vkCmdBindIndexBuffer2KHR_default(commandBuffer, buffer, offset, size, indexType); -} + VkIndexType indexType) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindPipeline_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindIndexBuffer2KHR( VkCommandBuffer commandBuffer, - VkPipelineBindPoint pipelineBindPoint, - VkPipeline pipeline); + VkBuffer buffer, + VkDeviceSize offset, + VkDeviceSize size, + VkIndexType indexType); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindPipeline( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, - VkPipeline pipeline -) { - layer_vkCmdBindPipeline_default(commandBuffer, pipelineBindPoint, pipeline); -} + VkPipeline pipeline) = delete; + +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindPipeline( + VkCommandBuffer commandBuffer, + VkPipelineBindPoint pipelineBindPoint, + VkPipeline pipeline); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindShadersEXT_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindShadersEXT( VkCommandBuffer commandBuffer, uint32_t stageCount, const VkShaderStageFlagBits* pStages, - const VkShaderEXT* pShaders); + const VkShaderEXT* pShaders) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindShadersEXT( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindShadersEXT( VkCommandBuffer commandBuffer, uint32_t stageCount, const VkShaderStageFlagBits* pStages, - const VkShaderEXT* pShaders -) { - layer_vkCmdBindShadersEXT_default(commandBuffer, stageCount, pStages, pShaders); -} + const VkShaderEXT* pShaders); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindTransformFeedbackBuffersEXT_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindTransformFeedbackBuffersEXT( VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets, - const VkDeviceSize* pSizes); + const VkDeviceSize* pSizes) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindTransformFeedbackBuffersEXT( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindTransformFeedbackBuffersEXT( VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets, - const VkDeviceSize* pSizes -) { - layer_vkCmdBindTransformFeedbackBuffersEXT_default(commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes); -} + const VkDeviceSize* pSizes); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindVertexBuffers_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindVertexBuffers( VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, - const VkDeviceSize* pOffsets); + const VkDeviceSize* pOffsets) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindVertexBuffers( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindVertexBuffers( VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, - const VkDeviceSize* pOffsets -) { - layer_vkCmdBindVertexBuffers_default(commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets); -} + const VkDeviceSize* pOffsets); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindVertexBuffers2_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindVertexBuffers2( VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets, const VkDeviceSize* pSizes, - const VkDeviceSize* pStrides); + const VkDeviceSize* pStrides) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindVertexBuffers2( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindVertexBuffers2( VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets, const VkDeviceSize* pSizes, - const VkDeviceSize* pStrides -) { - layer_vkCmdBindVertexBuffers2_default(commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes, pStrides); -} + const VkDeviceSize* pStrides); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindVertexBuffers2EXT_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindVertexBuffers2EXT( VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets, const VkDeviceSize* pSizes, - const VkDeviceSize* pStrides); + const VkDeviceSize* pStrides) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindVertexBuffers2EXT( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdBindVertexBuffers2EXT( VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets, const VkDeviceSize* pSizes, - const VkDeviceSize* pStrides -) { - layer_vkCmdBindVertexBuffers2EXT_default(commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes, pStrides); -} + const VkDeviceSize* pStrides); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdBlitImage_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR void VKAPI_CALL layer_vkCmdBlitImage( VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, @@ -796,11 +717,11 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdBlitImage_default( VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageBlit* pRegions, - VkFilter filter); + VkFilter filter) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR void VKAPI_CALL layer_vkCmdBlitImage( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdBlitImage( VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, @@ -808,472 +729,418 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdBlitImage( VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageBlit* pRegions, - VkFilter filter -) { - layer_vkCmdBlitImage_default(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter); -} + VkFilter filter); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdBlitImage2_default( - VkCommandBuffer commandBuffer, - const VkBlitImageInfo2* pBlitImageInfo); - -/* Match-all template to use default implementation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdBlitImage2( VkCommandBuffer commandBuffer, - const VkBlitImageInfo2* pBlitImageInfo -) { - layer_vkCmdBlitImage2_default(commandBuffer, pBlitImageInfo); -} + const VkBlitImageInfo2* pBlitImageInfo) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdBlitImage2KHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdBlitImage2( VkCommandBuffer commandBuffer, const VkBlitImageInfo2* pBlitImageInfo); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdBlitImage2KHR( VkCommandBuffer commandBuffer, - const VkBlitImageInfo2* pBlitImageInfo -) { - layer_vkCmdBlitImage2KHR_default(commandBuffer, pBlitImageInfo); -} + const VkBlitImageInfo2* pBlitImageInfo) = delete; + +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdBlitImage2KHR( + VkCommandBuffer commandBuffer, + const VkBlitImageInfo2* pBlitImageInfo); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdBuildAccelerationStructuresIndirectKHR_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR void VKAPI_CALL layer_vkCmdBuildAccelerationStructuresIndirectKHR( VkCommandBuffer commandBuffer, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, const VkDeviceAddress* pIndirectDeviceAddresses, const uint32_t* pIndirectStrides, - const uint32_t* const* ppMaxPrimitiveCounts); + const uint32_t* const* ppMaxPrimitiveCounts) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR void VKAPI_CALL layer_vkCmdBuildAccelerationStructuresIndirectKHR( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdBuildAccelerationStructuresIndirectKHR( VkCommandBuffer commandBuffer, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, const VkDeviceAddress* pIndirectDeviceAddresses, const uint32_t* pIndirectStrides, - const uint32_t* const* ppMaxPrimitiveCounts -) { - layer_vkCmdBuildAccelerationStructuresIndirectKHR_default(commandBuffer, infoCount, pInfos, pIndirectDeviceAddresses, pIndirectStrides, ppMaxPrimitiveCounts); -} + const uint32_t* const* ppMaxPrimitiveCounts); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdBuildAccelerationStructuresKHR_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR void VKAPI_CALL layer_vkCmdBuildAccelerationStructuresKHR( VkCommandBuffer commandBuffer, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, - const VkAccelerationStructureBuildRangeInfoKHR* const* ppBuildRangeInfos); + const VkAccelerationStructureBuildRangeInfoKHR* const* ppBuildRangeInfos) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR void VKAPI_CALL layer_vkCmdBuildAccelerationStructuresKHR( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdBuildAccelerationStructuresKHR( VkCommandBuffer commandBuffer, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, - const VkAccelerationStructureBuildRangeInfoKHR* const* ppBuildRangeInfos -) { - layer_vkCmdBuildAccelerationStructuresKHR_default(commandBuffer, infoCount, pInfos, ppBuildRangeInfos); -} + const VkAccelerationStructureBuildRangeInfoKHR* const* ppBuildRangeInfos); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdBuildMicromapsEXT_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR void VKAPI_CALL layer_vkCmdBuildMicromapsEXT( VkCommandBuffer commandBuffer, uint32_t infoCount, - const VkMicromapBuildInfoEXT* pInfos); + const VkMicromapBuildInfoEXT* pInfos) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR void VKAPI_CALL layer_vkCmdBuildMicromapsEXT( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdBuildMicromapsEXT( VkCommandBuffer commandBuffer, uint32_t infoCount, - const VkMicromapBuildInfoEXT* pInfos -) { - layer_vkCmdBuildMicromapsEXT_default(commandBuffer, infoCount, pInfos); -} + const VkMicromapBuildInfoEXT* pInfos); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdClearAttachments_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR void VKAPI_CALL layer_vkCmdClearAttachments( VkCommandBuffer commandBuffer, uint32_t attachmentCount, const VkClearAttachment* pAttachments, uint32_t rectCount, - const VkClearRect* pRects); + const VkClearRect* pRects) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR void VKAPI_CALL layer_vkCmdClearAttachments( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdClearAttachments( VkCommandBuffer commandBuffer, uint32_t attachmentCount, const VkClearAttachment* pAttachments, uint32_t rectCount, - const VkClearRect* pRects -) { - layer_vkCmdClearAttachments_default(commandBuffer, attachmentCount, pAttachments, rectCount, pRects); -} + const VkClearRect* pRects); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdClearColorImage_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR void VKAPI_CALL layer_vkCmdClearColorImage( VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearColorValue* pColor, uint32_t rangeCount, - const VkImageSubresourceRange* pRanges); + const VkImageSubresourceRange* pRanges) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR void VKAPI_CALL layer_vkCmdClearColorImage( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdClearColorImage( VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearColorValue* pColor, uint32_t rangeCount, - const VkImageSubresourceRange* pRanges -) { - layer_vkCmdClearColorImage_default(commandBuffer, image, imageLayout, pColor, rangeCount, pRanges); -} + const VkImageSubresourceRange* pRanges); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdClearDepthStencilImage_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR void VKAPI_CALL layer_vkCmdClearDepthStencilImage( VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, - const VkImageSubresourceRange* pRanges); + const VkImageSubresourceRange* pRanges) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR void VKAPI_CALL layer_vkCmdClearDepthStencilImage( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdClearDepthStencilImage( VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, - const VkImageSubresourceRange* pRanges -) { - layer_vkCmdClearDepthStencilImage_default(commandBuffer, image, imageLayout, pDepthStencil, rangeCount, pRanges); -} + const VkImageSubresourceRange* pRanges); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyAccelerationStructureKHR_default( - VkCommandBuffer commandBuffer, - const VkCopyAccelerationStructureInfoKHR* pInfo); - -/* Match-all template to use default implementation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyAccelerationStructureKHR( VkCommandBuffer commandBuffer, - const VkCopyAccelerationStructureInfoKHR* pInfo -) { - layer_vkCmdCopyAccelerationStructureKHR_default(commandBuffer, pInfo); -} + const VkCopyAccelerationStructureInfoKHR* pInfo) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyAccelerationStructureToMemoryKHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyAccelerationStructureKHR( VkCommandBuffer commandBuffer, - const VkCopyAccelerationStructureToMemoryInfoKHR* pInfo); + const VkCopyAccelerationStructureInfoKHR* pInfo); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyAccelerationStructureToMemoryKHR( VkCommandBuffer commandBuffer, - const VkCopyAccelerationStructureToMemoryInfoKHR* pInfo -) { - layer_vkCmdCopyAccelerationStructureToMemoryKHR_default(commandBuffer, pInfo); -} + const VkCopyAccelerationStructureToMemoryInfoKHR* pInfo) = delete; + +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyAccelerationStructureToMemoryKHR( + VkCommandBuffer commandBuffer, + const VkCopyAccelerationStructureToMemoryInfoKHR* pInfo); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyBuffer_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyBuffer( VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount, - const VkBufferCopy* pRegions); + const VkBufferCopy* pRegions) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyBuffer( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyBuffer( VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount, - const VkBufferCopy* pRegions -) { - layer_vkCmdCopyBuffer_default(commandBuffer, srcBuffer, dstBuffer, regionCount, pRegions); -} + const VkBufferCopy* pRegions); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyBuffer2_default( - VkCommandBuffer commandBuffer, - const VkCopyBufferInfo2* pCopyBufferInfo); - -/* Match-all template to use default implementation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyBuffer2( VkCommandBuffer commandBuffer, - const VkCopyBufferInfo2* pCopyBufferInfo -) { - layer_vkCmdCopyBuffer2_default(commandBuffer, pCopyBufferInfo); -} + const VkCopyBufferInfo2* pCopyBufferInfo) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyBuffer2KHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyBuffer2( VkCommandBuffer commandBuffer, const VkCopyBufferInfo2* pCopyBufferInfo); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyBuffer2KHR( VkCommandBuffer commandBuffer, - const VkCopyBufferInfo2* pCopyBufferInfo -) { - layer_vkCmdCopyBuffer2KHR_default(commandBuffer, pCopyBufferInfo); -} + const VkCopyBufferInfo2* pCopyBufferInfo) = delete; + +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyBuffer2KHR( + VkCommandBuffer commandBuffer, + const VkCopyBufferInfo2* pCopyBufferInfo); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyBufferToImage_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyBufferToImage( VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, - const VkBufferImageCopy* pRegions); + const VkBufferImageCopy* pRegions) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyBufferToImage( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyBufferToImage( VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, - const VkBufferImageCopy* pRegions -) { - layer_vkCmdCopyBufferToImage_default(commandBuffer, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions); -} + const VkBufferImageCopy* pRegions); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyBufferToImage2_default( - VkCommandBuffer commandBuffer, - const VkCopyBufferToImageInfo2* pCopyBufferToImageInfo); - -/* Match-all template to use default implementation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyBufferToImage2( VkCommandBuffer commandBuffer, - const VkCopyBufferToImageInfo2* pCopyBufferToImageInfo -) { - layer_vkCmdCopyBufferToImage2_default(commandBuffer, pCopyBufferToImageInfo); -} + const VkCopyBufferToImageInfo2* pCopyBufferToImageInfo) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyBufferToImage2KHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyBufferToImage2( VkCommandBuffer commandBuffer, const VkCopyBufferToImageInfo2* pCopyBufferToImageInfo); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyBufferToImage2KHR( VkCommandBuffer commandBuffer, - const VkCopyBufferToImageInfo2* pCopyBufferToImageInfo -) { - layer_vkCmdCopyBufferToImage2KHR_default(commandBuffer, pCopyBufferToImageInfo); -} + const VkCopyBufferToImageInfo2* pCopyBufferToImageInfo) = delete; + +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyBufferToImage2KHR( + VkCommandBuffer commandBuffer, + const VkCopyBufferToImageInfo2* pCopyBufferToImageInfo); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyImage_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyImage( VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, - const VkImageCopy* pRegions); + const VkImageCopy* pRegions) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyImage( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyImage( VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, - const VkImageCopy* pRegions -) { - layer_vkCmdCopyImage_default(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions); -} + const VkImageCopy* pRegions); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyImage2_default( - VkCommandBuffer commandBuffer, - const VkCopyImageInfo2* pCopyImageInfo); - -/* Match-all template to use default implementation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyImage2( VkCommandBuffer commandBuffer, - const VkCopyImageInfo2* pCopyImageInfo -) { - layer_vkCmdCopyImage2_default(commandBuffer, pCopyImageInfo); -} + const VkCopyImageInfo2* pCopyImageInfo) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyImage2KHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyImage2( VkCommandBuffer commandBuffer, const VkCopyImageInfo2* pCopyImageInfo); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyImage2KHR( VkCommandBuffer commandBuffer, - const VkCopyImageInfo2* pCopyImageInfo -) { - layer_vkCmdCopyImage2KHR_default(commandBuffer, pCopyImageInfo); -} + const VkCopyImageInfo2* pCopyImageInfo) = delete; + +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyImage2KHR( + VkCommandBuffer commandBuffer, + const VkCopyImageInfo2* pCopyImageInfo); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyImageToBuffer_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyImageToBuffer( VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkBuffer dstBuffer, uint32_t regionCount, - const VkBufferImageCopy* pRegions); + const VkBufferImageCopy* pRegions) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyImageToBuffer( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyImageToBuffer( VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkBuffer dstBuffer, uint32_t regionCount, - const VkBufferImageCopy* pRegions -) { - layer_vkCmdCopyImageToBuffer_default(commandBuffer, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions); -} + const VkBufferImageCopy* pRegions); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyImageToBuffer2_default( - VkCommandBuffer commandBuffer, - const VkCopyImageToBufferInfo2* pCopyImageToBufferInfo); - -/* Match-all template to use default implementation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyImageToBuffer2( VkCommandBuffer commandBuffer, - const VkCopyImageToBufferInfo2* pCopyImageToBufferInfo -) { - layer_vkCmdCopyImageToBuffer2_default(commandBuffer, pCopyImageToBufferInfo); -} + const VkCopyImageToBufferInfo2* pCopyImageToBufferInfo) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyImageToBuffer2KHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyImageToBuffer2( VkCommandBuffer commandBuffer, const VkCopyImageToBufferInfo2* pCopyImageToBufferInfo); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyImageToBuffer2KHR( VkCommandBuffer commandBuffer, - const VkCopyImageToBufferInfo2* pCopyImageToBufferInfo -) { - layer_vkCmdCopyImageToBuffer2KHR_default(commandBuffer, pCopyImageToBufferInfo); -} + const VkCopyImageToBufferInfo2* pCopyImageToBufferInfo) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyMemoryToAccelerationStructureKHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyImageToBuffer2KHR( VkCommandBuffer commandBuffer, - const VkCopyMemoryToAccelerationStructureInfoKHR* pInfo); + const VkCopyImageToBufferInfo2* pCopyImageToBufferInfo); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyMemoryToAccelerationStructureKHR( VkCommandBuffer commandBuffer, - const VkCopyMemoryToAccelerationStructureInfoKHR* pInfo -) { - layer_vkCmdCopyMemoryToAccelerationStructureKHR_default(commandBuffer, pInfo); -} + const VkCopyMemoryToAccelerationStructureInfoKHR* pInfo) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyMemoryToMicromapEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyMemoryToAccelerationStructureKHR( VkCommandBuffer commandBuffer, - const VkCopyMemoryToMicromapInfoEXT* pInfo); + const VkCopyMemoryToAccelerationStructureInfoKHR* pInfo); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyMemoryToMicromapEXT( VkCommandBuffer commandBuffer, - const VkCopyMemoryToMicromapInfoEXT* pInfo -) { - layer_vkCmdCopyMemoryToMicromapEXT_default(commandBuffer, pInfo); -} + const VkCopyMemoryToMicromapInfoEXT* pInfo) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyMicromapEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyMemoryToMicromapEXT( VkCommandBuffer commandBuffer, - const VkCopyMicromapInfoEXT* pInfo); + const VkCopyMemoryToMicromapInfoEXT* pInfo); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyMicromapEXT( VkCommandBuffer commandBuffer, - const VkCopyMicromapInfoEXT* pInfo -) { - layer_vkCmdCopyMicromapEXT_default(commandBuffer, pInfo); -} + const VkCopyMicromapInfoEXT* pInfo) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyMicromapToMemoryEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyMicromapEXT( VkCommandBuffer commandBuffer, - const VkCopyMicromapToMemoryInfoEXT* pInfo); + const VkCopyMicromapInfoEXT* pInfo); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyMicromapToMemoryEXT( VkCommandBuffer commandBuffer, - const VkCopyMicromapToMemoryInfoEXT* pInfo -) { - layer_vkCmdCopyMicromapToMemoryEXT_default(commandBuffer, pInfo); -} + const VkCopyMicromapToMemoryInfoEXT* pInfo) = delete; + +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyMicromapToMemoryEXT( + VkCommandBuffer commandBuffer, + const VkCopyMicromapToMemoryInfoEXT* pInfo); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyQueryPoolResults_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyQueryPoolResults( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, @@ -1281,11 +1148,11 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyQueryPoolResults_default( VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize stride, - VkQueryResultFlags flags); + VkQueryResultFlags flags) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyQueryPoolResults( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyQueryPoolResults( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, @@ -1293,100 +1160,77 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyQueryPoolResults( VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize stride, - VkQueryResultFlags flags -) { - layer_vkCmdCopyQueryPoolResults_default(commandBuffer, queryPool, firstQuery, queryCount, dstBuffer, dstOffset, stride, flags); -} + VkQueryResultFlags flags); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyTensorARM_default( - VkCommandBuffer commandBuffer, - const VkCopyTensorInfoARM* pCopyTensorInfo); - -/* Match-all template to use default implementation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyTensorARM( VkCommandBuffer commandBuffer, - const VkCopyTensorInfoARM* pCopyTensorInfo -) { - layer_vkCmdCopyTensorARM_default(commandBuffer, pCopyTensorInfo); -} + const VkCopyTensorInfoARM* pCopyTensorInfo) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdDebugMarkerBeginEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdCopyTensorARM( VkCommandBuffer commandBuffer, - const VkDebugMarkerMarkerInfoEXT* pMarkerInfo); + const VkCopyTensorInfoARM* pCopyTensorInfo); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdDebugMarkerBeginEXT( VkCommandBuffer commandBuffer, - const VkDebugMarkerMarkerInfoEXT* pMarkerInfo -) { - layer_vkCmdDebugMarkerBeginEXT_default(commandBuffer, pMarkerInfo); -} + const VkDebugMarkerMarkerInfoEXT* pMarkerInfo) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdDebugMarkerEndEXT_default( - VkCommandBuffer commandBuffer); +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdDebugMarkerBeginEXT( + VkCommandBuffer commandBuffer, + const VkDebugMarkerMarkerInfoEXT* pMarkerInfo); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdDebugMarkerEndEXT( - VkCommandBuffer commandBuffer -) { - layer_vkCmdDebugMarkerEndEXT_default(commandBuffer); -} + VkCommandBuffer commandBuffer) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdDebugMarkerInsertEXT_default( - VkCommandBuffer commandBuffer, - const VkDebugMarkerMarkerInfoEXT* pMarkerInfo); +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdDebugMarkerEndEXT( + VkCommandBuffer commandBuffer); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdDebugMarkerInsertEXT( VkCommandBuffer commandBuffer, - const VkDebugMarkerMarkerInfoEXT* pMarkerInfo -) { - layer_vkCmdDebugMarkerInsertEXT_default(commandBuffer, pMarkerInfo); -} + const VkDebugMarkerMarkerInfoEXT* pMarkerInfo) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdDispatch_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdDebugMarkerInsertEXT( VkCommandBuffer commandBuffer, - uint32_t groupCountX, - uint32_t groupCountY, - uint32_t groupCountZ); + const VkDebugMarkerMarkerInfoEXT* pMarkerInfo); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdDispatch( VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, - uint32_t groupCountZ -) { - layer_vkCmdDispatch_default(commandBuffer, groupCountX, groupCountY, groupCountZ); -} + uint32_t groupCountZ) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdDispatchBase_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdDispatch( VkCommandBuffer commandBuffer, - uint32_t baseGroupX, - uint32_t baseGroupY, - uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdDispatchBase( VkCommandBuffer commandBuffer, @@ -1395,14 +1239,11 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdDispatchBase( uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, - uint32_t groupCountZ -) { - layer_vkCmdDispatchBase_default(commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ); -} + uint32_t groupCountZ) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdDispatchBaseKHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdDispatchBase( VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, @@ -1411,7 +1252,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdDispatchBaseKHR_default( uint32_t groupCountY, uint32_t groupCountZ); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdDispatchBaseKHR( VkCommandBuffer commandBuffer, @@ -1420,641 +1262,584 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdDispatchBaseKHR( uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, - uint32_t groupCountZ -) { - layer_vkCmdDispatchBaseKHR_default(commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ); -} + uint32_t groupCountZ) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdDispatchIndirect_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdDispatchBaseKHR( VkCommandBuffer commandBuffer, - VkBuffer buffer, - VkDeviceSize offset); + uint32_t baseGroupX, + uint32_t baseGroupY, + uint32_t baseGroupZ, + uint32_t groupCountX, + uint32_t groupCountY, + uint32_t groupCountZ); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdDispatchIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, - VkDeviceSize offset -) { - layer_vkCmdDispatchIndirect_default(commandBuffer, buffer, offset); -} + VkDeviceSize offset) = delete; + +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdDispatchIndirect( + VkCommandBuffer commandBuffer, + VkBuffer buffer, + VkDeviceSize offset); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdDraw_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR void VKAPI_CALL layer_vkCmdDraw( VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, - uint32_t firstInstance); + uint32_t firstInstance) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR void VKAPI_CALL layer_vkCmdDraw( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdDraw( VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, - uint32_t firstInstance -) { - layer_vkCmdDraw_default(commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance); -} + uint32_t firstInstance); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawIndexed_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawIndexed( VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, - uint32_t firstInstance); + uint32_t firstInstance) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawIndexed( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawIndexed( VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, - uint32_t firstInstance -) { - layer_vkCmdDrawIndexed_default(commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance); -} + uint32_t firstInstance); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawIndexedIndirect_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawIndexedIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, - uint32_t stride); + uint32_t stride) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawIndexedIndirect( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawIndexedIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, - uint32_t stride -) { - layer_vkCmdDrawIndexedIndirect_default(commandBuffer, buffer, offset, drawCount, stride); -} + uint32_t stride); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawIndexedIndirectCount_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawIndexedIndirectCount( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, - uint32_t stride); + uint32_t stride) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawIndexedIndirectCount( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawIndexedIndirectCount( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, - uint32_t stride -) { - layer_vkCmdDrawIndexedIndirectCount_default(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride); -} + uint32_t stride); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawIndexedIndirectCountKHR_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawIndexedIndirectCountKHR( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, - uint32_t stride); + uint32_t stride) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawIndexedIndirectCountKHR( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawIndexedIndirectCountKHR( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, - uint32_t stride -) { - layer_vkCmdDrawIndexedIndirectCountKHR_default(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride); -} + uint32_t stride); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawIndirect_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, - uint32_t stride); + uint32_t stride) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawIndirect( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawIndirect( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, - uint32_t stride -) { - layer_vkCmdDrawIndirect_default(commandBuffer, buffer, offset, drawCount, stride); -} + uint32_t stride); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawIndirectByteCountEXT_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawIndirectByteCountEXT( VkCommandBuffer commandBuffer, uint32_t instanceCount, uint32_t firstInstance, VkBuffer counterBuffer, VkDeviceSize counterBufferOffset, uint32_t counterOffset, - uint32_t vertexStride); + uint32_t vertexStride) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawIndirectByteCountEXT( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawIndirectByteCountEXT( VkCommandBuffer commandBuffer, uint32_t instanceCount, uint32_t firstInstance, VkBuffer counterBuffer, VkDeviceSize counterBufferOffset, uint32_t counterOffset, - uint32_t vertexStride -) { - layer_vkCmdDrawIndirectByteCountEXT_default(commandBuffer, instanceCount, firstInstance, counterBuffer, counterBufferOffset, counterOffset, vertexStride); -} + uint32_t vertexStride); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawIndirectCount_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawIndirectCount( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, - uint32_t stride); + uint32_t stride) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawIndirectCount( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawIndirectCount( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, - uint32_t stride -) { - layer_vkCmdDrawIndirectCount_default(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride); -} + uint32_t stride); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawIndirectCountKHR_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawIndirectCountKHR( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, - uint32_t stride); + uint32_t stride) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawIndirectCountKHR( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawIndirectCountKHR( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, - uint32_t stride -) { - layer_vkCmdDrawIndirectCountKHR_default(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride); -} + uint32_t stride); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawMeshTasksEXT_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawMeshTasksEXT( VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, - uint32_t groupCountZ); + uint32_t groupCountZ) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawMeshTasksEXT( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawMeshTasksEXT( VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, - uint32_t groupCountZ -) { - layer_vkCmdDrawMeshTasksEXT_default(commandBuffer, groupCountX, groupCountY, groupCountZ); -} + uint32_t groupCountZ); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawMeshTasksIndirectCountEXT_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawMeshTasksIndirectCountEXT( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, - uint32_t stride); + uint32_t stride) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawMeshTasksIndirectCountEXT( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawMeshTasksIndirectCountEXT( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, - uint32_t stride -) { - layer_vkCmdDrawMeshTasksIndirectCountEXT_default(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride); -} + uint32_t stride); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawMeshTasksIndirectEXT_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawMeshTasksIndirectEXT( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, - uint32_t stride); + uint32_t stride) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawMeshTasksIndirectEXT( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawMeshTasksIndirectEXT( VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, - uint32_t stride -) { - layer_vkCmdDrawMeshTasksIndirectEXT_default(commandBuffer, buffer, offset, drawCount, stride); -} + uint32_t stride); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawMultiEXT_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawMultiEXT( VkCommandBuffer commandBuffer, uint32_t drawCount, const VkMultiDrawInfoEXT* pVertexInfo, uint32_t instanceCount, uint32_t firstInstance, - uint32_t stride); + uint32_t stride) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawMultiEXT( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawMultiEXT( VkCommandBuffer commandBuffer, uint32_t drawCount, const VkMultiDrawInfoEXT* pVertexInfo, uint32_t instanceCount, uint32_t firstInstance, - uint32_t stride -) { - layer_vkCmdDrawMultiEXT_default(commandBuffer, drawCount, pVertexInfo, instanceCount, firstInstance, stride); -} + uint32_t stride); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawMultiIndexedEXT_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawMultiIndexedEXT( VkCommandBuffer commandBuffer, uint32_t drawCount, const VkMultiDrawIndexedInfoEXT* pIndexInfo, uint32_t instanceCount, uint32_t firstInstance, uint32_t stride, - const int32_t* pVertexOffset); + const int32_t* pVertexOffset) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawMultiIndexedEXT( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdDrawMultiIndexedEXT( VkCommandBuffer commandBuffer, uint32_t drawCount, const VkMultiDrawIndexedInfoEXT* pIndexInfo, uint32_t instanceCount, uint32_t firstInstance, uint32_t stride, - const int32_t* pVertexOffset -) { - layer_vkCmdDrawMultiIndexedEXT_default(commandBuffer, drawCount, pIndexInfo, instanceCount, firstInstance, stride, pVertexOffset); -} + const int32_t* pVertexOffset); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdEndConditionalRenderingEXT_default( - VkCommandBuffer commandBuffer); - -/* Match-all template to use default implementation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdEndConditionalRenderingEXT( - VkCommandBuffer commandBuffer -) { - layer_vkCmdEndConditionalRenderingEXT_default(commandBuffer); -} + VkCommandBuffer commandBuffer) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdEndDebugUtilsLabelEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdEndConditionalRenderingEXT( VkCommandBuffer commandBuffer); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdEndDebugUtilsLabelEXT( - VkCommandBuffer commandBuffer -) { - layer_vkCmdEndDebugUtilsLabelEXT_default(commandBuffer); -} + VkCommandBuffer commandBuffer) = delete; + +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdEndDebugUtilsLabelEXT( + VkCommandBuffer commandBuffer); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdEndQuery_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR void VKAPI_CALL layer_vkCmdEndQuery( VkCommandBuffer commandBuffer, VkQueryPool queryPool, - uint32_t query); + uint32_t query) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR void VKAPI_CALL layer_vkCmdEndQuery( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdEndQuery( VkCommandBuffer commandBuffer, VkQueryPool queryPool, - uint32_t query -) { - layer_vkCmdEndQuery_default(commandBuffer, queryPool, query); -} + uint32_t query); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdEndQueryIndexedEXT_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR void VKAPI_CALL layer_vkCmdEndQueryIndexedEXT( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, - uint32_t index); + uint32_t index) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR void VKAPI_CALL layer_vkCmdEndQueryIndexedEXT( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdEndQueryIndexedEXT( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, - uint32_t index -) { - layer_vkCmdEndQueryIndexedEXT_default(commandBuffer, queryPool, query, index); -} + uint32_t index); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdEndRenderPass_default( - VkCommandBuffer commandBuffer); - -/* Match-all template to use default implementation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdEndRenderPass( - VkCommandBuffer commandBuffer -) { - layer_vkCmdEndRenderPass_default(commandBuffer); -} + VkCommandBuffer commandBuffer) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdEndRenderPass2_default( - VkCommandBuffer commandBuffer, - const VkSubpassEndInfo* pSubpassEndInfo); +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdEndRenderPass( + VkCommandBuffer commandBuffer); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdEndRenderPass2( VkCommandBuffer commandBuffer, - const VkSubpassEndInfo* pSubpassEndInfo -) { - layer_vkCmdEndRenderPass2_default(commandBuffer, pSubpassEndInfo); -} + const VkSubpassEndInfo* pSubpassEndInfo) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdEndRenderPass2KHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdEndRenderPass2( VkCommandBuffer commandBuffer, const VkSubpassEndInfo* pSubpassEndInfo); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdEndRenderPass2KHR( VkCommandBuffer commandBuffer, - const VkSubpassEndInfo* pSubpassEndInfo -) { - layer_vkCmdEndRenderPass2KHR_default(commandBuffer, pSubpassEndInfo); -} + const VkSubpassEndInfo* pSubpassEndInfo) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdEndRendering_default( - VkCommandBuffer commandBuffer); +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdEndRenderPass2KHR( + VkCommandBuffer commandBuffer, + const VkSubpassEndInfo* pSubpassEndInfo); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdEndRendering( - VkCommandBuffer commandBuffer -) { - layer_vkCmdEndRendering_default(commandBuffer); -} + VkCommandBuffer commandBuffer) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdEndRendering2EXT_default( - VkCommandBuffer commandBuffer, - const VkRenderingEndInfoEXT* pRenderingEndInfo); +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdEndRendering( + VkCommandBuffer commandBuffer); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdEndRendering2EXT( VkCommandBuffer commandBuffer, - const VkRenderingEndInfoEXT* pRenderingEndInfo -) { - layer_vkCmdEndRendering2EXT_default(commandBuffer, pRenderingEndInfo); -} + const VkRenderingEndInfoEXT* pRenderingEndInfo) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdEndRenderingKHR_default( - VkCommandBuffer commandBuffer); +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdEndRendering2EXT( + VkCommandBuffer commandBuffer, + const VkRenderingEndInfoEXT* pRenderingEndInfo); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdEndRenderingKHR( - VkCommandBuffer commandBuffer -) { - layer_vkCmdEndRenderingKHR_default(commandBuffer); -} + VkCommandBuffer commandBuffer) = delete; + +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdEndRenderingKHR( + VkCommandBuffer commandBuffer); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdEndTransformFeedbackEXT_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR void VKAPI_CALL layer_vkCmdEndTransformFeedbackEXT( VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VkBuffer* pCounterBuffers, - const VkDeviceSize* pCounterBufferOffsets); + const VkDeviceSize* pCounterBufferOffsets) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR void VKAPI_CALL layer_vkCmdEndTransformFeedbackEXT( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdEndTransformFeedbackEXT( VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VkBuffer* pCounterBuffers, - const VkDeviceSize* pCounterBufferOffsets -) { - layer_vkCmdEndTransformFeedbackEXT_default(commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets); -} + const VkDeviceSize* pCounterBufferOffsets); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdExecuteCommands_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR void VKAPI_CALL layer_vkCmdExecuteCommands( VkCommandBuffer commandBuffer, uint32_t commandBufferCount, - const VkCommandBuffer* pCommandBuffers); + const VkCommandBuffer* pCommandBuffers) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR void VKAPI_CALL layer_vkCmdExecuteCommands( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdExecuteCommands( VkCommandBuffer commandBuffer, uint32_t commandBufferCount, - const VkCommandBuffer* pCommandBuffers -) { - layer_vkCmdExecuteCommands_default(commandBuffer, commandBufferCount, pCommandBuffers); -} + const VkCommandBuffer* pCommandBuffers); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdExecuteGeneratedCommandsEXT_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR void VKAPI_CALL layer_vkCmdExecuteGeneratedCommandsEXT( VkCommandBuffer commandBuffer, VkBool32 isPreprocessed, - const VkGeneratedCommandsInfoEXT* pGeneratedCommandsInfo); + const VkGeneratedCommandsInfoEXT* pGeneratedCommandsInfo) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR void VKAPI_CALL layer_vkCmdExecuteGeneratedCommandsEXT( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdExecuteGeneratedCommandsEXT( VkCommandBuffer commandBuffer, VkBool32 isPreprocessed, - const VkGeneratedCommandsInfoEXT* pGeneratedCommandsInfo -) { - layer_vkCmdExecuteGeneratedCommandsEXT_default(commandBuffer, isPreprocessed, pGeneratedCommandsInfo); -} + const VkGeneratedCommandsInfoEXT* pGeneratedCommandsInfo); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdFillBuffer_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR void VKAPI_CALL layer_vkCmdFillBuffer( VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size, - uint32_t data); + uint32_t data) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR void VKAPI_CALL layer_vkCmdFillBuffer( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdFillBuffer( VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size, - uint32_t data -) { - layer_vkCmdFillBuffer_default(commandBuffer, dstBuffer, dstOffset, size, data); -} + uint32_t data); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdInsertDebugUtilsLabelEXT_default( - VkCommandBuffer commandBuffer, - const VkDebugUtilsLabelEXT* pLabelInfo); - -/* Match-all template to use default implementation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdInsertDebugUtilsLabelEXT( VkCommandBuffer commandBuffer, - const VkDebugUtilsLabelEXT* pLabelInfo -) { - layer_vkCmdInsertDebugUtilsLabelEXT_default(commandBuffer, pLabelInfo); -} + const VkDebugUtilsLabelEXT* pLabelInfo) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdNextSubpass_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdInsertDebugUtilsLabelEXT( VkCommandBuffer commandBuffer, - VkSubpassContents contents); + const VkDebugUtilsLabelEXT* pLabelInfo); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdNextSubpass( VkCommandBuffer commandBuffer, - VkSubpassContents contents -) { - layer_vkCmdNextSubpass_default(commandBuffer, contents); -} + VkSubpassContents contents) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdNextSubpass2_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdNextSubpass( VkCommandBuffer commandBuffer, - const VkSubpassBeginInfo* pSubpassBeginInfo, - const VkSubpassEndInfo* pSubpassEndInfo); + VkSubpassContents contents); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdNextSubpass2( VkCommandBuffer commandBuffer, const VkSubpassBeginInfo* pSubpassBeginInfo, - const VkSubpassEndInfo* pSubpassEndInfo -) { - layer_vkCmdNextSubpass2_default(commandBuffer, pSubpassBeginInfo, pSubpassEndInfo); -} + const VkSubpassEndInfo* pSubpassEndInfo) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdNextSubpass2KHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdNextSubpass2( VkCommandBuffer commandBuffer, const VkSubpassBeginInfo* pSubpassBeginInfo, const VkSubpassEndInfo* pSubpassEndInfo); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdNextSubpass2KHR( VkCommandBuffer commandBuffer, const VkSubpassBeginInfo* pSubpassBeginInfo, - const VkSubpassEndInfo* pSubpassEndInfo -) { - layer_vkCmdNextSubpass2KHR_default(commandBuffer, pSubpassBeginInfo, pSubpassEndInfo); -} + const VkSubpassEndInfo* pSubpassEndInfo) = delete; + +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdNextSubpass2KHR( + VkCommandBuffer commandBuffer, + const VkSubpassBeginInfo* pSubpassBeginInfo, + const VkSubpassEndInfo* pSubpassEndInfo); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdPipelineBarrier_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR void VKAPI_CALL layer_vkCmdPipelineBarrier( VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, @@ -2064,11 +1849,11 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdPipelineBarrier_default( uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, - const VkImageMemoryBarrier* pImageMemoryBarriers); + const VkImageMemoryBarrier* pImageMemoryBarriers) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR void VKAPI_CALL layer_vkCmdPipelineBarrier( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdPipelineBarrier( VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, @@ -2078,1693 +1863,1475 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdPipelineBarrier( uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, - const VkImageMemoryBarrier* pImageMemoryBarriers -) { - layer_vkCmdPipelineBarrier_default(commandBuffer, srcStageMask, dstStageMask, dependencyFlags, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers); -} + const VkImageMemoryBarrier* pImageMemoryBarriers); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdPipelineBarrier2_default( - VkCommandBuffer commandBuffer, - const VkDependencyInfo* pDependencyInfo); - -/* Match-all template to use default implementation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdPipelineBarrier2( VkCommandBuffer commandBuffer, - const VkDependencyInfo* pDependencyInfo -) { - layer_vkCmdPipelineBarrier2_default(commandBuffer, pDependencyInfo); -} + const VkDependencyInfo* pDependencyInfo) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdPipelineBarrier2KHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdPipelineBarrier2( VkCommandBuffer commandBuffer, const VkDependencyInfo* pDependencyInfo); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdPipelineBarrier2KHR( VkCommandBuffer commandBuffer, - const VkDependencyInfo* pDependencyInfo -) { - layer_vkCmdPipelineBarrier2KHR_default(commandBuffer, pDependencyInfo); -} + const VkDependencyInfo* pDependencyInfo) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdPreprocessGeneratedCommandsEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdPipelineBarrier2KHR( VkCommandBuffer commandBuffer, - const VkGeneratedCommandsInfoEXT* pGeneratedCommandsInfo, - VkCommandBuffer stateCommandBuffer); + const VkDependencyInfo* pDependencyInfo); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdPreprocessGeneratedCommandsEXT( VkCommandBuffer commandBuffer, const VkGeneratedCommandsInfoEXT* pGeneratedCommandsInfo, - VkCommandBuffer stateCommandBuffer -) { - layer_vkCmdPreprocessGeneratedCommandsEXT_default(commandBuffer, pGeneratedCommandsInfo, stateCommandBuffer); -} + VkCommandBuffer stateCommandBuffer) = delete; + +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdPreprocessGeneratedCommandsEXT( + VkCommandBuffer commandBuffer, + const VkGeneratedCommandsInfoEXT* pGeneratedCommandsInfo, + VkCommandBuffer stateCommandBuffer); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdPushConstants_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR void VKAPI_CALL layer_vkCmdPushConstants( VkCommandBuffer commandBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size, - const void* pValues); + const void* pValues) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR void VKAPI_CALL layer_vkCmdPushConstants( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdPushConstants( VkCommandBuffer commandBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size, - const void* pValues -) { - layer_vkCmdPushConstants_default(commandBuffer, layout, stageFlags, offset, size, pValues); -} + const void* pValues); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdPushConstants2_default( - VkCommandBuffer commandBuffer, - const VkPushConstantsInfo* pPushConstantsInfo); - -/* Match-all template to use default implementation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdPushConstants2( VkCommandBuffer commandBuffer, - const VkPushConstantsInfo* pPushConstantsInfo -) { - layer_vkCmdPushConstants2_default(commandBuffer, pPushConstantsInfo); -} + const VkPushConstantsInfo* pPushConstantsInfo) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdPushConstants2KHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdPushConstants2( VkCommandBuffer commandBuffer, const VkPushConstantsInfo* pPushConstantsInfo); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdPushConstants2KHR( VkCommandBuffer commandBuffer, - const VkPushConstantsInfo* pPushConstantsInfo -) { - layer_vkCmdPushConstants2KHR_default(commandBuffer, pPushConstantsInfo); -} + const VkPushConstantsInfo* pPushConstantsInfo) = delete; + +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdPushConstants2KHR( + VkCommandBuffer commandBuffer, + const VkPushConstantsInfo* pPushConstantsInfo); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdPushDescriptorSet_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR void VKAPI_CALL layer_vkCmdPushDescriptorSet( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, - const VkWriteDescriptorSet* pDescriptorWrites); + const VkWriteDescriptorSet* pDescriptorWrites) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR void VKAPI_CALL layer_vkCmdPushDescriptorSet( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdPushDescriptorSet( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, - const VkWriteDescriptorSet* pDescriptorWrites -) { - layer_vkCmdPushDescriptorSet_default(commandBuffer, pipelineBindPoint, layout, set, descriptorWriteCount, pDescriptorWrites); -} + const VkWriteDescriptorSet* pDescriptorWrites); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdPushDescriptorSet2_default( - VkCommandBuffer commandBuffer, - const VkPushDescriptorSetInfo* pPushDescriptorSetInfo); - -/* Match-all template to use default implementation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdPushDescriptorSet2( VkCommandBuffer commandBuffer, - const VkPushDescriptorSetInfo* pPushDescriptorSetInfo -) { - layer_vkCmdPushDescriptorSet2_default(commandBuffer, pPushDescriptorSetInfo); -} + const VkPushDescriptorSetInfo* pPushDescriptorSetInfo) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdPushDescriptorSet2KHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdPushDescriptorSet2( VkCommandBuffer commandBuffer, const VkPushDescriptorSetInfo* pPushDescriptorSetInfo); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdPushDescriptorSet2KHR( VkCommandBuffer commandBuffer, - const VkPushDescriptorSetInfo* pPushDescriptorSetInfo -) { - layer_vkCmdPushDescriptorSet2KHR_default(commandBuffer, pPushDescriptorSetInfo); -} + const VkPushDescriptorSetInfo* pPushDescriptorSetInfo) = delete; + +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdPushDescriptorSet2KHR( + VkCommandBuffer commandBuffer, + const VkPushDescriptorSetInfo* pPushDescriptorSetInfo); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdPushDescriptorSetKHR_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR void VKAPI_CALL layer_vkCmdPushDescriptorSetKHR( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, - const VkWriteDescriptorSet* pDescriptorWrites); + const VkWriteDescriptorSet* pDescriptorWrites) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR void VKAPI_CALL layer_vkCmdPushDescriptorSetKHR( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdPushDescriptorSetKHR( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, - const VkWriteDescriptorSet* pDescriptorWrites -) { - layer_vkCmdPushDescriptorSetKHR_default(commandBuffer, pipelineBindPoint, layout, set, descriptorWriteCount, pDescriptorWrites); -} + const VkWriteDescriptorSet* pDescriptorWrites); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdPushDescriptorSetWithTemplate_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR void VKAPI_CALL layer_vkCmdPushDescriptorSetWithTemplate( VkCommandBuffer commandBuffer, VkDescriptorUpdateTemplate descriptorUpdateTemplate, VkPipelineLayout layout, uint32_t set, - const void* pData); + const void* pData) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR void VKAPI_CALL layer_vkCmdPushDescriptorSetWithTemplate( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdPushDescriptorSetWithTemplate( VkCommandBuffer commandBuffer, VkDescriptorUpdateTemplate descriptorUpdateTemplate, VkPipelineLayout layout, uint32_t set, - const void* pData -) { - layer_vkCmdPushDescriptorSetWithTemplate_default(commandBuffer, descriptorUpdateTemplate, layout, set, pData); -} + const void* pData); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdPushDescriptorSetWithTemplate2_default( - VkCommandBuffer commandBuffer, - const VkPushDescriptorSetWithTemplateInfo* pPushDescriptorSetWithTemplateInfo); - -/* Match-all template to use default implementation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdPushDescriptorSetWithTemplate2( VkCommandBuffer commandBuffer, - const VkPushDescriptorSetWithTemplateInfo* pPushDescriptorSetWithTemplateInfo -) { - layer_vkCmdPushDescriptorSetWithTemplate2_default(commandBuffer, pPushDescriptorSetWithTemplateInfo); -} + const VkPushDescriptorSetWithTemplateInfo* pPushDescriptorSetWithTemplateInfo) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdPushDescriptorSetWithTemplate2KHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdPushDescriptorSetWithTemplate2( VkCommandBuffer commandBuffer, const VkPushDescriptorSetWithTemplateInfo* pPushDescriptorSetWithTemplateInfo); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdPushDescriptorSetWithTemplate2KHR( VkCommandBuffer commandBuffer, - const VkPushDescriptorSetWithTemplateInfo* pPushDescriptorSetWithTemplateInfo -) { - layer_vkCmdPushDescriptorSetWithTemplate2KHR_default(commandBuffer, pPushDescriptorSetWithTemplateInfo); -} + const VkPushDescriptorSetWithTemplateInfo* pPushDescriptorSetWithTemplateInfo) = delete; + +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdPushDescriptorSetWithTemplate2KHR( + VkCommandBuffer commandBuffer, + const VkPushDescriptorSetWithTemplateInfo* pPushDescriptorSetWithTemplateInfo); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdPushDescriptorSetWithTemplateKHR_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR void VKAPI_CALL layer_vkCmdPushDescriptorSetWithTemplateKHR( VkCommandBuffer commandBuffer, VkDescriptorUpdateTemplate descriptorUpdateTemplate, VkPipelineLayout layout, uint32_t set, - const void* pData); + const void* pData) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR void VKAPI_CALL layer_vkCmdPushDescriptorSetWithTemplateKHR( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdPushDescriptorSetWithTemplateKHR( VkCommandBuffer commandBuffer, VkDescriptorUpdateTemplate descriptorUpdateTemplate, VkPipelineLayout layout, uint32_t set, - const void* pData -) { - layer_vkCmdPushDescriptorSetWithTemplateKHR_default(commandBuffer, descriptorUpdateTemplate, layout, set, pData); -} + const void* pData); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdResetEvent_default( - VkCommandBuffer commandBuffer, - VkEvent event, - VkPipelineStageFlags stageMask); - -/* Match-all template to use default implementation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdResetEvent( VkCommandBuffer commandBuffer, VkEvent event, - VkPipelineStageFlags stageMask -) { - layer_vkCmdResetEvent_default(commandBuffer, event, stageMask); -} + VkPipelineStageFlags stageMask) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdResetEvent2_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdResetEvent( VkCommandBuffer commandBuffer, VkEvent event, - VkPipelineStageFlags2 stageMask); + VkPipelineStageFlags stageMask); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdResetEvent2( VkCommandBuffer commandBuffer, VkEvent event, - VkPipelineStageFlags2 stageMask -) { - layer_vkCmdResetEvent2_default(commandBuffer, event, stageMask); -} + VkPipelineStageFlags2 stageMask) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdResetEvent2KHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdResetEvent2( VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags2 stageMask); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdResetEvent2KHR( VkCommandBuffer commandBuffer, VkEvent event, - VkPipelineStageFlags2 stageMask -) { - layer_vkCmdResetEvent2KHR_default(commandBuffer, event, stageMask); -} + VkPipelineStageFlags2 stageMask) = delete; + +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdResetEvent2KHR( + VkCommandBuffer commandBuffer, + VkEvent event, + VkPipelineStageFlags2 stageMask); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdResetQueryPool_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR void VKAPI_CALL layer_vkCmdResetQueryPool( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, - uint32_t queryCount); + uint32_t queryCount) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR void VKAPI_CALL layer_vkCmdResetQueryPool( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdResetQueryPool( VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, - uint32_t queryCount -) { - layer_vkCmdResetQueryPool_default(commandBuffer, queryPool, firstQuery, queryCount); -} + uint32_t queryCount); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdResolveImage_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR void VKAPI_CALL layer_vkCmdResolveImage( VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, - const VkImageResolve* pRegions); + const VkImageResolve* pRegions) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR void VKAPI_CALL layer_vkCmdResolveImage( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdResolveImage( VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, - const VkImageResolve* pRegions -) { - layer_vkCmdResolveImage_default(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions); -} + const VkImageResolve* pRegions); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdResolveImage2_default( - VkCommandBuffer commandBuffer, - const VkResolveImageInfo2* pResolveImageInfo); - -/* Match-all template to use default implementation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdResolveImage2( VkCommandBuffer commandBuffer, - const VkResolveImageInfo2* pResolveImageInfo -) { - layer_vkCmdResolveImage2_default(commandBuffer, pResolveImageInfo); -} + const VkResolveImageInfo2* pResolveImageInfo) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdResolveImage2KHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdResolveImage2( VkCommandBuffer commandBuffer, const VkResolveImageInfo2* pResolveImageInfo); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdResolveImage2KHR( VkCommandBuffer commandBuffer, - const VkResolveImageInfo2* pResolveImageInfo -) { - layer_vkCmdResolveImage2KHR_default(commandBuffer, pResolveImageInfo); -} + const VkResolveImageInfo2* pResolveImageInfo) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetAlphaToCoverageEnableEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdResolveImage2KHR( VkCommandBuffer commandBuffer, - VkBool32 alphaToCoverageEnable); + const VkResolveImageInfo2* pResolveImageInfo); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetAlphaToCoverageEnableEXT( VkCommandBuffer commandBuffer, - VkBool32 alphaToCoverageEnable -) { - layer_vkCmdSetAlphaToCoverageEnableEXT_default(commandBuffer, alphaToCoverageEnable); -} + VkBool32 alphaToCoverageEnable) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetAlphaToOneEnableEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetAlphaToCoverageEnableEXT( VkCommandBuffer commandBuffer, - VkBool32 alphaToOneEnable); + VkBool32 alphaToCoverageEnable); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetAlphaToOneEnableEXT( VkCommandBuffer commandBuffer, - VkBool32 alphaToOneEnable -) { - layer_vkCmdSetAlphaToOneEnableEXT_default(commandBuffer, alphaToOneEnable); -} + VkBool32 alphaToOneEnable) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetAttachmentFeedbackLoopEnableEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetAlphaToOneEnableEXT( VkCommandBuffer commandBuffer, - VkImageAspectFlags aspectMask); + VkBool32 alphaToOneEnable); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetAttachmentFeedbackLoopEnableEXT( VkCommandBuffer commandBuffer, - VkImageAspectFlags aspectMask -) { - layer_vkCmdSetAttachmentFeedbackLoopEnableEXT_default(commandBuffer, aspectMask); -} + VkImageAspectFlags aspectMask) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetBlendConstants_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetAttachmentFeedbackLoopEnableEXT( VkCommandBuffer commandBuffer, - const float blendConstants[4]); + VkImageAspectFlags aspectMask); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetBlendConstants( VkCommandBuffer commandBuffer, - const float blendConstants[4] -) { - layer_vkCmdSetBlendConstants_default(commandBuffer, blendConstants); -} + const float blendConstants[4]) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetColorBlendAdvancedEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetBlendConstants( VkCommandBuffer commandBuffer, - uint32_t firstAttachment, - uint32_t attachmentCount, - const VkColorBlendAdvancedEXT* pColorBlendAdvanced); + const float blendConstants[4]); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetColorBlendAdvancedEXT( VkCommandBuffer commandBuffer, uint32_t firstAttachment, uint32_t attachmentCount, - const VkColorBlendAdvancedEXT* pColorBlendAdvanced -) { - layer_vkCmdSetColorBlendAdvancedEXT_default(commandBuffer, firstAttachment, attachmentCount, pColorBlendAdvanced); -} + const VkColorBlendAdvancedEXT* pColorBlendAdvanced) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetColorBlendEnableEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetColorBlendAdvancedEXT( VkCommandBuffer commandBuffer, uint32_t firstAttachment, uint32_t attachmentCount, - const VkBool32* pColorBlendEnables); + const VkColorBlendAdvancedEXT* pColorBlendAdvanced); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetColorBlendEnableEXT( VkCommandBuffer commandBuffer, uint32_t firstAttachment, uint32_t attachmentCount, - const VkBool32* pColorBlendEnables -) { - layer_vkCmdSetColorBlendEnableEXT_default(commandBuffer, firstAttachment, attachmentCount, pColorBlendEnables); -} + const VkBool32* pColorBlendEnables) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetColorBlendEquationEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetColorBlendEnableEXT( VkCommandBuffer commandBuffer, uint32_t firstAttachment, uint32_t attachmentCount, - const VkColorBlendEquationEXT* pColorBlendEquations); + const VkBool32* pColorBlendEnables); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetColorBlendEquationEXT( VkCommandBuffer commandBuffer, uint32_t firstAttachment, uint32_t attachmentCount, - const VkColorBlendEquationEXT* pColorBlendEquations -) { - layer_vkCmdSetColorBlendEquationEXT_default(commandBuffer, firstAttachment, attachmentCount, pColorBlendEquations); -} + const VkColorBlendEquationEXT* pColorBlendEquations) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetColorWriteEnableEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetColorBlendEquationEXT( VkCommandBuffer commandBuffer, + uint32_t firstAttachment, uint32_t attachmentCount, - const VkBool32* pColorWriteEnables); + const VkColorBlendEquationEXT* pColorBlendEquations); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetColorWriteEnableEXT( VkCommandBuffer commandBuffer, uint32_t attachmentCount, - const VkBool32* pColorWriteEnables -) { - layer_vkCmdSetColorWriteEnableEXT_default(commandBuffer, attachmentCount, pColorWriteEnables); -} + const VkBool32* pColorWriteEnables) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetColorWriteMaskEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetColorWriteEnableEXT( VkCommandBuffer commandBuffer, - uint32_t firstAttachment, uint32_t attachmentCount, - const VkColorComponentFlags* pColorWriteMasks); + const VkBool32* pColorWriteEnables); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetColorWriteMaskEXT( VkCommandBuffer commandBuffer, uint32_t firstAttachment, uint32_t attachmentCount, - const VkColorComponentFlags* pColorWriteMasks -) { - layer_vkCmdSetColorWriteMaskEXT_default(commandBuffer, firstAttachment, attachmentCount, pColorWriteMasks); -} + const VkColorComponentFlags* pColorWriteMasks) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetConservativeRasterizationModeEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetColorWriteMaskEXT( VkCommandBuffer commandBuffer, - VkConservativeRasterizationModeEXT conservativeRasterizationMode); + uint32_t firstAttachment, + uint32_t attachmentCount, + const VkColorComponentFlags* pColorWriteMasks); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetConservativeRasterizationModeEXT( VkCommandBuffer commandBuffer, - VkConservativeRasterizationModeEXT conservativeRasterizationMode -) { - layer_vkCmdSetConservativeRasterizationModeEXT_default(commandBuffer, conservativeRasterizationMode); -} + VkConservativeRasterizationModeEXT conservativeRasterizationMode) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetCoverageModulationModeNV_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetConservativeRasterizationModeEXT( VkCommandBuffer commandBuffer, - VkCoverageModulationModeNV coverageModulationMode); + VkConservativeRasterizationModeEXT conservativeRasterizationMode); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetCoverageModulationModeNV( VkCommandBuffer commandBuffer, - VkCoverageModulationModeNV coverageModulationMode -) { - layer_vkCmdSetCoverageModulationModeNV_default(commandBuffer, coverageModulationMode); -} + VkCoverageModulationModeNV coverageModulationMode) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetCoverageModulationTableEnableNV_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetCoverageModulationModeNV( VkCommandBuffer commandBuffer, - VkBool32 coverageModulationTableEnable); + VkCoverageModulationModeNV coverageModulationMode); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetCoverageModulationTableEnableNV( VkCommandBuffer commandBuffer, - VkBool32 coverageModulationTableEnable -) { - layer_vkCmdSetCoverageModulationTableEnableNV_default(commandBuffer, coverageModulationTableEnable); -} + VkBool32 coverageModulationTableEnable) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetCoverageModulationTableNV_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetCoverageModulationTableEnableNV( VkCommandBuffer commandBuffer, - uint32_t coverageModulationTableCount, - const float* pCoverageModulationTable); + VkBool32 coverageModulationTableEnable); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetCoverageModulationTableNV( VkCommandBuffer commandBuffer, uint32_t coverageModulationTableCount, - const float* pCoverageModulationTable -) { - layer_vkCmdSetCoverageModulationTableNV_default(commandBuffer, coverageModulationTableCount, pCoverageModulationTable); -} + const float* pCoverageModulationTable) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetCoverageReductionModeNV_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetCoverageModulationTableNV( VkCommandBuffer commandBuffer, - VkCoverageReductionModeNV coverageReductionMode); + uint32_t coverageModulationTableCount, + const float* pCoverageModulationTable); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetCoverageReductionModeNV( VkCommandBuffer commandBuffer, - VkCoverageReductionModeNV coverageReductionMode -) { - layer_vkCmdSetCoverageReductionModeNV_default(commandBuffer, coverageReductionMode); -} + VkCoverageReductionModeNV coverageReductionMode) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetCoverageToColorEnableNV_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetCoverageReductionModeNV( VkCommandBuffer commandBuffer, - VkBool32 coverageToColorEnable); + VkCoverageReductionModeNV coverageReductionMode); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetCoverageToColorEnableNV( VkCommandBuffer commandBuffer, - VkBool32 coverageToColorEnable -) { - layer_vkCmdSetCoverageToColorEnableNV_default(commandBuffer, coverageToColorEnable); -} + VkBool32 coverageToColorEnable) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetCoverageToColorLocationNV_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetCoverageToColorEnableNV( VkCommandBuffer commandBuffer, - uint32_t coverageToColorLocation); + VkBool32 coverageToColorEnable); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetCoverageToColorLocationNV( VkCommandBuffer commandBuffer, - uint32_t coverageToColorLocation -) { - layer_vkCmdSetCoverageToColorLocationNV_default(commandBuffer, coverageToColorLocation); -} + uint32_t coverageToColorLocation) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetCullMode_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetCoverageToColorLocationNV( VkCommandBuffer commandBuffer, - VkCullModeFlags cullMode); + uint32_t coverageToColorLocation); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetCullMode( VkCommandBuffer commandBuffer, - VkCullModeFlags cullMode -) { - layer_vkCmdSetCullMode_default(commandBuffer, cullMode); -} + VkCullModeFlags cullMode) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetCullModeEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetCullMode( VkCommandBuffer commandBuffer, VkCullModeFlags cullMode); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetCullModeEXT( VkCommandBuffer commandBuffer, - VkCullModeFlags cullMode -) { - layer_vkCmdSetCullModeEXT_default(commandBuffer, cullMode); -} + VkCullModeFlags cullMode) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthBias_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetCullModeEXT( VkCommandBuffer commandBuffer, - float depthBiasConstantFactor, - float depthBiasClamp, - float depthBiasSlopeFactor); + VkCullModeFlags cullMode); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthBias( VkCommandBuffer commandBuffer, float depthBiasConstantFactor, float depthBiasClamp, - float depthBiasSlopeFactor -) { - layer_vkCmdSetDepthBias_default(commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor); -} + float depthBiasSlopeFactor) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthBias2EXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthBias( VkCommandBuffer commandBuffer, - const VkDepthBiasInfoEXT* pDepthBiasInfo); + float depthBiasConstantFactor, + float depthBiasClamp, + float depthBiasSlopeFactor); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthBias2EXT( VkCommandBuffer commandBuffer, - const VkDepthBiasInfoEXT* pDepthBiasInfo -) { - layer_vkCmdSetDepthBias2EXT_default(commandBuffer, pDepthBiasInfo); -} + const VkDepthBiasInfoEXT* pDepthBiasInfo) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthBiasEnable_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthBias2EXT( VkCommandBuffer commandBuffer, - VkBool32 depthBiasEnable); + const VkDepthBiasInfoEXT* pDepthBiasInfo); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthBiasEnable( VkCommandBuffer commandBuffer, - VkBool32 depthBiasEnable -) { - layer_vkCmdSetDepthBiasEnable_default(commandBuffer, depthBiasEnable); -} + VkBool32 depthBiasEnable) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthBiasEnableEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthBiasEnable( VkCommandBuffer commandBuffer, VkBool32 depthBiasEnable); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthBiasEnableEXT( VkCommandBuffer commandBuffer, - VkBool32 depthBiasEnable -) { - layer_vkCmdSetDepthBiasEnableEXT_default(commandBuffer, depthBiasEnable); -} + VkBool32 depthBiasEnable) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthBounds_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthBiasEnableEXT( VkCommandBuffer commandBuffer, - float minDepthBounds, - float maxDepthBounds); + VkBool32 depthBiasEnable); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthBounds( VkCommandBuffer commandBuffer, float minDepthBounds, - float maxDepthBounds -) { - layer_vkCmdSetDepthBounds_default(commandBuffer, minDepthBounds, maxDepthBounds); -} + float maxDepthBounds) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthBoundsTestEnable_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthBounds( VkCommandBuffer commandBuffer, - VkBool32 depthBoundsTestEnable); + float minDepthBounds, + float maxDepthBounds); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthBoundsTestEnable( VkCommandBuffer commandBuffer, - VkBool32 depthBoundsTestEnable -) { - layer_vkCmdSetDepthBoundsTestEnable_default(commandBuffer, depthBoundsTestEnable); -} + VkBool32 depthBoundsTestEnable) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthBoundsTestEnableEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthBoundsTestEnable( VkCommandBuffer commandBuffer, VkBool32 depthBoundsTestEnable); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthBoundsTestEnableEXT( VkCommandBuffer commandBuffer, - VkBool32 depthBoundsTestEnable -) { - layer_vkCmdSetDepthBoundsTestEnableEXT_default(commandBuffer, depthBoundsTestEnable); -} + VkBool32 depthBoundsTestEnable) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthClampEnableEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthBoundsTestEnableEXT( VkCommandBuffer commandBuffer, - VkBool32 depthClampEnable); + VkBool32 depthBoundsTestEnable); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthClampEnableEXT( VkCommandBuffer commandBuffer, - VkBool32 depthClampEnable -) { - layer_vkCmdSetDepthClampEnableEXT_default(commandBuffer, depthClampEnable); -} + VkBool32 depthClampEnable) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthClampRangeEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthClampEnableEXT( VkCommandBuffer commandBuffer, - VkDepthClampModeEXT depthClampMode, - const VkDepthClampRangeEXT* pDepthClampRange); + VkBool32 depthClampEnable); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthClampRangeEXT( VkCommandBuffer commandBuffer, VkDepthClampModeEXT depthClampMode, - const VkDepthClampRangeEXT* pDepthClampRange -) { - layer_vkCmdSetDepthClampRangeEXT_default(commandBuffer, depthClampMode, pDepthClampRange); -} + const VkDepthClampRangeEXT* pDepthClampRange) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthClipEnableEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthClampRangeEXT( VkCommandBuffer commandBuffer, - VkBool32 depthClipEnable); + VkDepthClampModeEXT depthClampMode, + const VkDepthClampRangeEXT* pDepthClampRange); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthClipEnableEXT( VkCommandBuffer commandBuffer, - VkBool32 depthClipEnable -) { - layer_vkCmdSetDepthClipEnableEXT_default(commandBuffer, depthClipEnable); -} + VkBool32 depthClipEnable) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthClipNegativeOneToOneEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthClipEnableEXT( VkCommandBuffer commandBuffer, - VkBool32 negativeOneToOne); + VkBool32 depthClipEnable); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthClipNegativeOneToOneEXT( VkCommandBuffer commandBuffer, - VkBool32 negativeOneToOne -) { - layer_vkCmdSetDepthClipNegativeOneToOneEXT_default(commandBuffer, negativeOneToOne); -} + VkBool32 negativeOneToOne) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthCompareOp_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthClipNegativeOneToOneEXT( VkCommandBuffer commandBuffer, - VkCompareOp depthCompareOp); + VkBool32 negativeOneToOne); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthCompareOp( VkCommandBuffer commandBuffer, - VkCompareOp depthCompareOp -) { - layer_vkCmdSetDepthCompareOp_default(commandBuffer, depthCompareOp); -} + VkCompareOp depthCompareOp) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthCompareOpEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthCompareOp( VkCommandBuffer commandBuffer, VkCompareOp depthCompareOp); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthCompareOpEXT( VkCommandBuffer commandBuffer, - VkCompareOp depthCompareOp -) { - layer_vkCmdSetDepthCompareOpEXT_default(commandBuffer, depthCompareOp); -} + VkCompareOp depthCompareOp) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthTestEnable_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthCompareOpEXT( VkCommandBuffer commandBuffer, - VkBool32 depthTestEnable); + VkCompareOp depthCompareOp); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthTestEnable( VkCommandBuffer commandBuffer, - VkBool32 depthTestEnable -) { - layer_vkCmdSetDepthTestEnable_default(commandBuffer, depthTestEnable); -} + VkBool32 depthTestEnable) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthTestEnableEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthTestEnable( VkCommandBuffer commandBuffer, VkBool32 depthTestEnable); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthTestEnableEXT( VkCommandBuffer commandBuffer, - VkBool32 depthTestEnable -) { - layer_vkCmdSetDepthTestEnableEXT_default(commandBuffer, depthTestEnable); -} + VkBool32 depthTestEnable) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthWriteEnable_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthTestEnableEXT( VkCommandBuffer commandBuffer, - VkBool32 depthWriteEnable); + VkBool32 depthTestEnable); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthWriteEnable( VkCommandBuffer commandBuffer, - VkBool32 depthWriteEnable -) { - layer_vkCmdSetDepthWriteEnable_default(commandBuffer, depthWriteEnable); -} + VkBool32 depthWriteEnable) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthWriteEnableEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthWriteEnable( VkCommandBuffer commandBuffer, VkBool32 depthWriteEnable); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthWriteEnableEXT( VkCommandBuffer commandBuffer, - VkBool32 depthWriteEnable -) { - layer_vkCmdSetDepthWriteEnableEXT_default(commandBuffer, depthWriteEnable); -} + VkBool32 depthWriteEnable) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDescriptorBufferOffsets2EXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDepthWriteEnableEXT( VkCommandBuffer commandBuffer, - const VkSetDescriptorBufferOffsetsInfoEXT* pSetDescriptorBufferOffsetsInfo); + VkBool32 depthWriteEnable); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDescriptorBufferOffsets2EXT( VkCommandBuffer commandBuffer, - const VkSetDescriptorBufferOffsetsInfoEXT* pSetDescriptorBufferOffsetsInfo -) { - layer_vkCmdSetDescriptorBufferOffsets2EXT_default(commandBuffer, pSetDescriptorBufferOffsetsInfo); -} + const VkSetDescriptorBufferOffsetsInfoEXT* pSetDescriptorBufferOffsetsInfo) = delete; + +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDescriptorBufferOffsets2EXT( + VkCommandBuffer commandBuffer, + const VkSetDescriptorBufferOffsetsInfoEXT* pSetDescriptorBufferOffsetsInfo); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDescriptorBufferOffsetsEXT_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDescriptorBufferOffsetsEXT( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t firstSet, uint32_t setCount, const uint32_t* pBufferIndices, - const VkDeviceSize* pOffsets); + const VkDeviceSize* pOffsets) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDescriptorBufferOffsetsEXT( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDescriptorBufferOffsetsEXT( VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t firstSet, uint32_t setCount, const uint32_t* pBufferIndices, - const VkDeviceSize* pOffsets -) { - layer_vkCmdSetDescriptorBufferOffsetsEXT_default(commandBuffer, pipelineBindPoint, layout, firstSet, setCount, pBufferIndices, pOffsets); -} + const VkDeviceSize* pOffsets); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDeviceMask_default( - VkCommandBuffer commandBuffer, - uint32_t deviceMask); - -/* Match-all template to use default implementation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDeviceMask( VkCommandBuffer commandBuffer, - uint32_t deviceMask -) { - layer_vkCmdSetDeviceMask_default(commandBuffer, deviceMask); -} + uint32_t deviceMask) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDeviceMaskKHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDeviceMask( VkCommandBuffer commandBuffer, uint32_t deviceMask); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDeviceMaskKHR( VkCommandBuffer commandBuffer, - uint32_t deviceMask -) { - layer_vkCmdSetDeviceMaskKHR_default(commandBuffer, deviceMask); -} + uint32_t deviceMask) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDiscardRectangleEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDeviceMaskKHR( VkCommandBuffer commandBuffer, - uint32_t firstDiscardRectangle, - uint32_t discardRectangleCount, - const VkRect2D* pDiscardRectangles); + uint32_t deviceMask); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDiscardRectangleEXT( VkCommandBuffer commandBuffer, uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, - const VkRect2D* pDiscardRectangles -) { - layer_vkCmdSetDiscardRectangleEXT_default(commandBuffer, firstDiscardRectangle, discardRectangleCount, pDiscardRectangles); -} + const VkRect2D* pDiscardRectangles) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDiscardRectangleEnableEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDiscardRectangleEXT( VkCommandBuffer commandBuffer, - VkBool32 discardRectangleEnable); + uint32_t firstDiscardRectangle, + uint32_t discardRectangleCount, + const VkRect2D* pDiscardRectangles); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDiscardRectangleEnableEXT( VkCommandBuffer commandBuffer, - VkBool32 discardRectangleEnable -) { - layer_vkCmdSetDiscardRectangleEnableEXT_default(commandBuffer, discardRectangleEnable); -} + VkBool32 discardRectangleEnable) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDiscardRectangleModeEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDiscardRectangleEnableEXT( VkCommandBuffer commandBuffer, - VkDiscardRectangleModeEXT discardRectangleMode); + VkBool32 discardRectangleEnable); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDiscardRectangleModeEXT( VkCommandBuffer commandBuffer, - VkDiscardRectangleModeEXT discardRectangleMode -) { - layer_vkCmdSetDiscardRectangleModeEXT_default(commandBuffer, discardRectangleMode); -} + VkDiscardRectangleModeEXT discardRectangleMode) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetEvent_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetDiscardRectangleModeEXT( VkCommandBuffer commandBuffer, - VkEvent event, - VkPipelineStageFlags stageMask); + VkDiscardRectangleModeEXT discardRectangleMode); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetEvent( VkCommandBuffer commandBuffer, VkEvent event, - VkPipelineStageFlags stageMask -) { - layer_vkCmdSetEvent_default(commandBuffer, event, stageMask); -} + VkPipelineStageFlags stageMask) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetEvent2_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetEvent( VkCommandBuffer commandBuffer, VkEvent event, - const VkDependencyInfo* pDependencyInfo); + VkPipelineStageFlags stageMask); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetEvent2( VkCommandBuffer commandBuffer, VkEvent event, - const VkDependencyInfo* pDependencyInfo -) { - layer_vkCmdSetEvent2_default(commandBuffer, event, pDependencyInfo); -} + const VkDependencyInfo* pDependencyInfo) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetEvent2KHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetEvent2( VkCommandBuffer commandBuffer, VkEvent event, const VkDependencyInfo* pDependencyInfo); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetEvent2KHR( VkCommandBuffer commandBuffer, VkEvent event, - const VkDependencyInfo* pDependencyInfo -) { - layer_vkCmdSetEvent2KHR_default(commandBuffer, event, pDependencyInfo); -} + const VkDependencyInfo* pDependencyInfo) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetExtraPrimitiveOverestimationSizeEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetEvent2KHR( VkCommandBuffer commandBuffer, - float extraPrimitiveOverestimationSize); + VkEvent event, + const VkDependencyInfo* pDependencyInfo); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetExtraPrimitiveOverestimationSizeEXT( VkCommandBuffer commandBuffer, - float extraPrimitiveOverestimationSize -) { - layer_vkCmdSetExtraPrimitiveOverestimationSizeEXT_default(commandBuffer, extraPrimitiveOverestimationSize); -} + float extraPrimitiveOverestimationSize) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetFragmentShadingRateKHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetExtraPrimitiveOverestimationSizeEXT( VkCommandBuffer commandBuffer, - const VkExtent2D* pFragmentSize, - const VkFragmentShadingRateCombinerOpKHR combinerOps[2]); + float extraPrimitiveOverestimationSize); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetFragmentShadingRateKHR( VkCommandBuffer commandBuffer, const VkExtent2D* pFragmentSize, - const VkFragmentShadingRateCombinerOpKHR combinerOps[2] -) { - layer_vkCmdSetFragmentShadingRateKHR_default(commandBuffer, pFragmentSize, combinerOps); -} + const VkFragmentShadingRateCombinerOpKHR combinerOps[2]) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetFrontFace_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetFragmentShadingRateKHR( VkCommandBuffer commandBuffer, - VkFrontFace frontFace); + const VkExtent2D* pFragmentSize, + const VkFragmentShadingRateCombinerOpKHR combinerOps[2]); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetFrontFace( VkCommandBuffer commandBuffer, - VkFrontFace frontFace -) { - layer_vkCmdSetFrontFace_default(commandBuffer, frontFace); -} + VkFrontFace frontFace) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetFrontFaceEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetFrontFace( VkCommandBuffer commandBuffer, VkFrontFace frontFace); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetFrontFaceEXT( VkCommandBuffer commandBuffer, - VkFrontFace frontFace -) { - layer_vkCmdSetFrontFaceEXT_default(commandBuffer, frontFace); -} + VkFrontFace frontFace) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetLineRasterizationModeEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetFrontFaceEXT( VkCommandBuffer commandBuffer, - VkLineRasterizationModeEXT lineRasterizationMode); + VkFrontFace frontFace); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetLineRasterizationModeEXT( VkCommandBuffer commandBuffer, - VkLineRasterizationModeEXT lineRasterizationMode -) { - layer_vkCmdSetLineRasterizationModeEXT_default(commandBuffer, lineRasterizationMode); -} + VkLineRasterizationModeEXT lineRasterizationMode) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetLineStipple_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetLineRasterizationModeEXT( VkCommandBuffer commandBuffer, - uint32_t lineStippleFactor, - uint16_t lineStipplePattern); + VkLineRasterizationModeEXT lineRasterizationMode); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetLineStipple( VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, - uint16_t lineStipplePattern -) { - layer_vkCmdSetLineStipple_default(commandBuffer, lineStippleFactor, lineStipplePattern); -} + uint16_t lineStipplePattern) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetLineStippleEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetLineStipple( VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, uint16_t lineStipplePattern); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetLineStippleEXT( VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, - uint16_t lineStipplePattern -) { - layer_vkCmdSetLineStippleEXT_default(commandBuffer, lineStippleFactor, lineStipplePattern); -} + uint16_t lineStipplePattern) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetLineStippleEnableEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetLineStippleEXT( VkCommandBuffer commandBuffer, - VkBool32 stippledLineEnable); + uint32_t lineStippleFactor, + uint16_t lineStipplePattern); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetLineStippleEnableEXT( VkCommandBuffer commandBuffer, - VkBool32 stippledLineEnable -) { - layer_vkCmdSetLineStippleEnableEXT_default(commandBuffer, stippledLineEnable); -} + VkBool32 stippledLineEnable) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetLineStippleKHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetLineStippleEnableEXT( VkCommandBuffer commandBuffer, - uint32_t lineStippleFactor, - uint16_t lineStipplePattern); + VkBool32 stippledLineEnable); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetLineStippleKHR( VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, - uint16_t lineStipplePattern -) { - layer_vkCmdSetLineStippleKHR_default(commandBuffer, lineStippleFactor, lineStipplePattern); -} + uint16_t lineStipplePattern) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetLineWidth_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetLineStippleKHR( VkCommandBuffer commandBuffer, - float lineWidth); + uint32_t lineStippleFactor, + uint16_t lineStipplePattern); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetLineWidth( VkCommandBuffer commandBuffer, - float lineWidth -) { - layer_vkCmdSetLineWidth_default(commandBuffer, lineWidth); -} + float lineWidth) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetLogicOpEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetLineWidth( VkCommandBuffer commandBuffer, - VkLogicOp logicOp); + float lineWidth); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetLogicOpEXT( VkCommandBuffer commandBuffer, - VkLogicOp logicOp -) { - layer_vkCmdSetLogicOpEXT_default(commandBuffer, logicOp); -} + VkLogicOp logicOp) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetLogicOpEnableEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetLogicOpEXT( VkCommandBuffer commandBuffer, - VkBool32 logicOpEnable); + VkLogicOp logicOp); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetLogicOpEnableEXT( VkCommandBuffer commandBuffer, - VkBool32 logicOpEnable -) { - layer_vkCmdSetLogicOpEnableEXT_default(commandBuffer, logicOpEnable); -} + VkBool32 logicOpEnable) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetPatchControlPointsEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetLogicOpEnableEXT( VkCommandBuffer commandBuffer, - uint32_t patchControlPoints); + VkBool32 logicOpEnable); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetPatchControlPointsEXT( VkCommandBuffer commandBuffer, - uint32_t patchControlPoints -) { - layer_vkCmdSetPatchControlPointsEXT_default(commandBuffer, patchControlPoints); -} + uint32_t patchControlPoints) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetPolygonModeEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetPatchControlPointsEXT( VkCommandBuffer commandBuffer, - VkPolygonMode polygonMode); + uint32_t patchControlPoints); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetPolygonModeEXT( VkCommandBuffer commandBuffer, - VkPolygonMode polygonMode -) { - layer_vkCmdSetPolygonModeEXT_default(commandBuffer, polygonMode); -} + VkPolygonMode polygonMode) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetPrimitiveRestartEnable_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetPolygonModeEXT( VkCommandBuffer commandBuffer, - VkBool32 primitiveRestartEnable); + VkPolygonMode polygonMode); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetPrimitiveRestartEnable( VkCommandBuffer commandBuffer, - VkBool32 primitiveRestartEnable -) { - layer_vkCmdSetPrimitiveRestartEnable_default(commandBuffer, primitiveRestartEnable); -} + VkBool32 primitiveRestartEnable) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetPrimitiveRestartEnableEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetPrimitiveRestartEnable( VkCommandBuffer commandBuffer, VkBool32 primitiveRestartEnable); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetPrimitiveRestartEnableEXT( VkCommandBuffer commandBuffer, - VkBool32 primitiveRestartEnable -) { - layer_vkCmdSetPrimitiveRestartEnableEXT_default(commandBuffer, primitiveRestartEnable); -} + VkBool32 primitiveRestartEnable) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetPrimitiveTopology_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetPrimitiveRestartEnableEXT( VkCommandBuffer commandBuffer, - VkPrimitiveTopology primitiveTopology); + VkBool32 primitiveRestartEnable); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetPrimitiveTopology( VkCommandBuffer commandBuffer, - VkPrimitiveTopology primitiveTopology -) { - layer_vkCmdSetPrimitiveTopology_default(commandBuffer, primitiveTopology); -} + VkPrimitiveTopology primitiveTopology) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetPrimitiveTopologyEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetPrimitiveTopology( VkCommandBuffer commandBuffer, VkPrimitiveTopology primitiveTopology); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetPrimitiveTopologyEXT( VkCommandBuffer commandBuffer, - VkPrimitiveTopology primitiveTopology -) { - layer_vkCmdSetPrimitiveTopologyEXT_default(commandBuffer, primitiveTopology); -} + VkPrimitiveTopology primitiveTopology) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetProvokingVertexModeEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetPrimitiveTopologyEXT( VkCommandBuffer commandBuffer, - VkProvokingVertexModeEXT provokingVertexMode); + VkPrimitiveTopology primitiveTopology); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetProvokingVertexModeEXT( VkCommandBuffer commandBuffer, - VkProvokingVertexModeEXT provokingVertexMode -) { - layer_vkCmdSetProvokingVertexModeEXT_default(commandBuffer, provokingVertexMode); -} + VkProvokingVertexModeEXT provokingVertexMode) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetRasterizationSamplesEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetProvokingVertexModeEXT( VkCommandBuffer commandBuffer, - VkSampleCountFlagBits rasterizationSamples); + VkProvokingVertexModeEXT provokingVertexMode); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetRasterizationSamplesEXT( VkCommandBuffer commandBuffer, - VkSampleCountFlagBits rasterizationSamples -) { - layer_vkCmdSetRasterizationSamplesEXT_default(commandBuffer, rasterizationSamples); -} + VkSampleCountFlagBits rasterizationSamples) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetRasterizationStreamEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetRasterizationSamplesEXT( VkCommandBuffer commandBuffer, - uint32_t rasterizationStream); + VkSampleCountFlagBits rasterizationSamples); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetRasterizationStreamEXT( VkCommandBuffer commandBuffer, - uint32_t rasterizationStream -) { - layer_vkCmdSetRasterizationStreamEXT_default(commandBuffer, rasterizationStream); -} + uint32_t rasterizationStream) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetRasterizerDiscardEnable_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetRasterizationStreamEXT( VkCommandBuffer commandBuffer, - VkBool32 rasterizerDiscardEnable); + uint32_t rasterizationStream); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetRasterizerDiscardEnable( VkCommandBuffer commandBuffer, - VkBool32 rasterizerDiscardEnable -) { - layer_vkCmdSetRasterizerDiscardEnable_default(commandBuffer, rasterizerDiscardEnable); -} + VkBool32 rasterizerDiscardEnable) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetRasterizerDiscardEnableEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetRasterizerDiscardEnable( VkCommandBuffer commandBuffer, VkBool32 rasterizerDiscardEnable); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetRasterizerDiscardEnableEXT( VkCommandBuffer commandBuffer, - VkBool32 rasterizerDiscardEnable -) { - layer_vkCmdSetRasterizerDiscardEnableEXT_default(commandBuffer, rasterizerDiscardEnable); -} + VkBool32 rasterizerDiscardEnable) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetRayTracingPipelineStackSizeKHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetRasterizerDiscardEnableEXT( VkCommandBuffer commandBuffer, - uint32_t pipelineStackSize); + VkBool32 rasterizerDiscardEnable); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetRayTracingPipelineStackSizeKHR( VkCommandBuffer commandBuffer, - uint32_t pipelineStackSize -) { - layer_vkCmdSetRayTracingPipelineStackSizeKHR_default(commandBuffer, pipelineStackSize); -} + uint32_t pipelineStackSize) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetRenderingAttachmentLocations_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetRayTracingPipelineStackSizeKHR( VkCommandBuffer commandBuffer, - const VkRenderingAttachmentLocationInfo* pLocationInfo); + uint32_t pipelineStackSize); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetRenderingAttachmentLocations( VkCommandBuffer commandBuffer, - const VkRenderingAttachmentLocationInfo* pLocationInfo -) { - layer_vkCmdSetRenderingAttachmentLocations_default(commandBuffer, pLocationInfo); -} + const VkRenderingAttachmentLocationInfo* pLocationInfo) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetRenderingAttachmentLocationsKHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetRenderingAttachmentLocations( VkCommandBuffer commandBuffer, const VkRenderingAttachmentLocationInfo* pLocationInfo); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetRenderingAttachmentLocationsKHR( VkCommandBuffer commandBuffer, - const VkRenderingAttachmentLocationInfo* pLocationInfo -) { - layer_vkCmdSetRenderingAttachmentLocationsKHR_default(commandBuffer, pLocationInfo); -} + const VkRenderingAttachmentLocationInfo* pLocationInfo) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetRenderingInputAttachmentIndices_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetRenderingAttachmentLocationsKHR( VkCommandBuffer commandBuffer, - const VkRenderingInputAttachmentIndexInfo* pInputAttachmentIndexInfo); + const VkRenderingAttachmentLocationInfo* pLocationInfo); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetRenderingInputAttachmentIndices( VkCommandBuffer commandBuffer, - const VkRenderingInputAttachmentIndexInfo* pInputAttachmentIndexInfo -) { - layer_vkCmdSetRenderingInputAttachmentIndices_default(commandBuffer, pInputAttachmentIndexInfo); -} + const VkRenderingInputAttachmentIndexInfo* pInputAttachmentIndexInfo) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetRenderingInputAttachmentIndicesKHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetRenderingInputAttachmentIndices( VkCommandBuffer commandBuffer, const VkRenderingInputAttachmentIndexInfo* pInputAttachmentIndexInfo); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetRenderingInputAttachmentIndicesKHR( VkCommandBuffer commandBuffer, - const VkRenderingInputAttachmentIndexInfo* pInputAttachmentIndexInfo -) { - layer_vkCmdSetRenderingInputAttachmentIndicesKHR_default(commandBuffer, pInputAttachmentIndexInfo); -} + const VkRenderingInputAttachmentIndexInfo* pInputAttachmentIndexInfo) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetRepresentativeFragmentTestEnableNV_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetRenderingInputAttachmentIndicesKHR( VkCommandBuffer commandBuffer, - VkBool32 representativeFragmentTestEnable); + const VkRenderingInputAttachmentIndexInfo* pInputAttachmentIndexInfo); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetRepresentativeFragmentTestEnableNV( VkCommandBuffer commandBuffer, - VkBool32 representativeFragmentTestEnable -) { - layer_vkCmdSetRepresentativeFragmentTestEnableNV_default(commandBuffer, representativeFragmentTestEnable); -} + VkBool32 representativeFragmentTestEnable) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetSampleLocationsEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetRepresentativeFragmentTestEnableNV( VkCommandBuffer commandBuffer, - const VkSampleLocationsInfoEXT* pSampleLocationsInfo); + VkBool32 representativeFragmentTestEnable); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetSampleLocationsEXT( VkCommandBuffer commandBuffer, - const VkSampleLocationsInfoEXT* pSampleLocationsInfo -) { - layer_vkCmdSetSampleLocationsEXT_default(commandBuffer, pSampleLocationsInfo); -} + const VkSampleLocationsInfoEXT* pSampleLocationsInfo) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetSampleLocationsEnableEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetSampleLocationsEXT( VkCommandBuffer commandBuffer, - VkBool32 sampleLocationsEnable); + const VkSampleLocationsInfoEXT* pSampleLocationsInfo); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetSampleLocationsEnableEXT( VkCommandBuffer commandBuffer, - VkBool32 sampleLocationsEnable -) { - layer_vkCmdSetSampleLocationsEnableEXT_default(commandBuffer, sampleLocationsEnable); -} + VkBool32 sampleLocationsEnable) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetSampleMaskEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetSampleLocationsEnableEXT( VkCommandBuffer commandBuffer, - VkSampleCountFlagBits samples, - const VkSampleMask* pSampleMask); + VkBool32 sampleLocationsEnable); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetSampleMaskEXT( VkCommandBuffer commandBuffer, VkSampleCountFlagBits samples, - const VkSampleMask* pSampleMask -) { - layer_vkCmdSetSampleMaskEXT_default(commandBuffer, samples, pSampleMask); -} + const VkSampleMask* pSampleMask) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetScissor_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetSampleMaskEXT( VkCommandBuffer commandBuffer, - uint32_t firstScissor, - uint32_t scissorCount, - const VkRect2D* pScissors); + VkSampleCountFlagBits samples, + const VkSampleMask* pSampleMask); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetScissor( VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount, - const VkRect2D* pScissors -) { - layer_vkCmdSetScissor_default(commandBuffer, firstScissor, scissorCount, pScissors); -} + const VkRect2D* pScissors) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetScissorWithCount_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetScissor( VkCommandBuffer commandBuffer, + uint32_t firstScissor, uint32_t scissorCount, const VkRect2D* pScissors); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetScissorWithCount( VkCommandBuffer commandBuffer, uint32_t scissorCount, - const VkRect2D* pScissors -) { - layer_vkCmdSetScissorWithCount_default(commandBuffer, scissorCount, pScissors); -} + const VkRect2D* pScissors) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetScissorWithCountEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetScissorWithCount( VkCommandBuffer commandBuffer, uint32_t scissorCount, const VkRect2D* pScissors); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetScissorWithCountEXT( VkCommandBuffer commandBuffer, uint32_t scissorCount, - const VkRect2D* pScissors -) { - layer_vkCmdSetScissorWithCountEXT_default(commandBuffer, scissorCount, pScissors); -} + const VkRect2D* pScissors) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetShadingRateImageEnableNV_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetScissorWithCountEXT( VkCommandBuffer commandBuffer, - VkBool32 shadingRateImageEnable); + uint32_t scissorCount, + const VkRect2D* pScissors); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetShadingRateImageEnableNV( VkCommandBuffer commandBuffer, - VkBool32 shadingRateImageEnable -) { - layer_vkCmdSetShadingRateImageEnableNV_default(commandBuffer, shadingRateImageEnable); -} + VkBool32 shadingRateImageEnable) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetStencilCompareMask_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetShadingRateImageEnableNV( VkCommandBuffer commandBuffer, - VkStencilFaceFlags faceMask, - uint32_t compareMask); + VkBool32 shadingRateImageEnable); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetStencilCompareMask( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, - uint32_t compareMask -) { - layer_vkCmdSetStencilCompareMask_default(commandBuffer, faceMask, compareMask); -} + uint32_t compareMask) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetStencilOp_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetStencilCompareMask( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, - VkStencilOp failOp, - VkStencilOp passOp, - VkStencilOp depthFailOp, - VkCompareOp compareOp); + uint32_t compareMask); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetStencilOp( VkCommandBuffer commandBuffer, @@ -3772,14 +3339,11 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetStencilOp( VkStencilOp failOp, VkStencilOp passOp, VkStencilOp depthFailOp, - VkCompareOp compareOp -) { - layer_vkCmdSetStencilOp_default(commandBuffer, faceMask, failOp, passOp, depthFailOp, compareOp); -} + VkCompareOp compareOp) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetStencilOpEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetStencilOp( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, VkStencilOp failOp, @@ -3787,7 +3351,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetStencilOpEXT_default( VkStencilOp depthFailOp, VkCompareOp compareOp); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetStencilOpEXT( VkCommandBuffer commandBuffer, @@ -3795,239 +3360,221 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetStencilOpEXT( VkStencilOp failOp, VkStencilOp passOp, VkStencilOp depthFailOp, - VkCompareOp compareOp -) { - layer_vkCmdSetStencilOpEXT_default(commandBuffer, faceMask, failOp, passOp, depthFailOp, compareOp); -} + VkCompareOp compareOp) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetStencilReference_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetStencilOpEXT( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, - uint32_t reference); - -/* Match-all template to use default implementation. */ -template + VkStencilOp failOp, + VkStencilOp passOp, + VkStencilOp depthFailOp, + VkCompareOp compareOp); + +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ +template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetStencilReference( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, - uint32_t reference -) { - layer_vkCmdSetStencilReference_default(commandBuffer, faceMask, reference); -} + uint32_t reference) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetStencilTestEnable_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetStencilReference( VkCommandBuffer commandBuffer, - VkBool32 stencilTestEnable); + VkStencilFaceFlags faceMask, + uint32_t reference); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetStencilTestEnable( VkCommandBuffer commandBuffer, - VkBool32 stencilTestEnable -) { - layer_vkCmdSetStencilTestEnable_default(commandBuffer, stencilTestEnable); -} + VkBool32 stencilTestEnable) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetStencilTestEnableEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetStencilTestEnable( VkCommandBuffer commandBuffer, VkBool32 stencilTestEnable); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetStencilTestEnableEXT( VkCommandBuffer commandBuffer, - VkBool32 stencilTestEnable -) { - layer_vkCmdSetStencilTestEnableEXT_default(commandBuffer, stencilTestEnable); -} + VkBool32 stencilTestEnable) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetStencilWriteMask_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetStencilTestEnableEXT( VkCommandBuffer commandBuffer, - VkStencilFaceFlags faceMask, - uint32_t writeMask); + VkBool32 stencilTestEnable); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetStencilWriteMask( VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, - uint32_t writeMask -) { - layer_vkCmdSetStencilWriteMask_default(commandBuffer, faceMask, writeMask); -} + uint32_t writeMask) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetTessellationDomainOriginEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetStencilWriteMask( VkCommandBuffer commandBuffer, - VkTessellationDomainOrigin domainOrigin); + VkStencilFaceFlags faceMask, + uint32_t writeMask); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetTessellationDomainOriginEXT( VkCommandBuffer commandBuffer, - VkTessellationDomainOrigin domainOrigin -) { - layer_vkCmdSetTessellationDomainOriginEXT_default(commandBuffer, domainOrigin); -} + VkTessellationDomainOrigin domainOrigin) = delete; + +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetTessellationDomainOriginEXT( + VkCommandBuffer commandBuffer, + VkTessellationDomainOrigin domainOrigin); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetVertexInputEXT_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetVertexInputEXT( VkCommandBuffer commandBuffer, uint32_t vertexBindingDescriptionCount, const VkVertexInputBindingDescription2EXT* pVertexBindingDescriptions, uint32_t vertexAttributeDescriptionCount, - const VkVertexInputAttributeDescription2EXT* pVertexAttributeDescriptions); + const VkVertexInputAttributeDescription2EXT* pVertexAttributeDescriptions) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetVertexInputEXT( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetVertexInputEXT( VkCommandBuffer commandBuffer, uint32_t vertexBindingDescriptionCount, const VkVertexInputBindingDescription2EXT* pVertexBindingDescriptions, uint32_t vertexAttributeDescriptionCount, - const VkVertexInputAttributeDescription2EXT* pVertexAttributeDescriptions -) { - layer_vkCmdSetVertexInputEXT_default(commandBuffer, vertexBindingDescriptionCount, pVertexBindingDescriptions, vertexAttributeDescriptionCount, pVertexAttributeDescriptions); -} + const VkVertexInputAttributeDescription2EXT* pVertexAttributeDescriptions); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetViewport_default( - VkCommandBuffer commandBuffer, - uint32_t firstViewport, - uint32_t viewportCount, - const VkViewport* pViewports); - -/* Match-all template to use default implementation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetViewport( VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, - const VkViewport* pViewports -) { - layer_vkCmdSetViewport_default(commandBuffer, firstViewport, viewportCount, pViewports); -} + const VkViewport* pViewports) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetViewportSwizzleNV_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetViewport( VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, - const VkViewportSwizzleNV* pViewportSwizzles); + const VkViewport* pViewports); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetViewportSwizzleNV( VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, - const VkViewportSwizzleNV* pViewportSwizzles -) { - layer_vkCmdSetViewportSwizzleNV_default(commandBuffer, firstViewport, viewportCount, pViewportSwizzles); -} + const VkViewportSwizzleNV* pViewportSwizzles) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetViewportWScalingEnableNV_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetViewportSwizzleNV( VkCommandBuffer commandBuffer, - VkBool32 viewportWScalingEnable); + uint32_t firstViewport, + uint32_t viewportCount, + const VkViewportSwizzleNV* pViewportSwizzles); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetViewportWScalingEnableNV( VkCommandBuffer commandBuffer, - VkBool32 viewportWScalingEnable -) { - layer_vkCmdSetViewportWScalingEnableNV_default(commandBuffer, viewportWScalingEnable); -} + VkBool32 viewportWScalingEnable) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetViewportWithCount_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetViewportWScalingEnableNV( VkCommandBuffer commandBuffer, - uint32_t viewportCount, - const VkViewport* pViewports); + VkBool32 viewportWScalingEnable); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetViewportWithCount( VkCommandBuffer commandBuffer, uint32_t viewportCount, - const VkViewport* pViewports -) { - layer_vkCmdSetViewportWithCount_default(commandBuffer, viewportCount, pViewports); -} + const VkViewport* pViewports) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetViewportWithCountEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetViewportWithCount( VkCommandBuffer commandBuffer, uint32_t viewportCount, const VkViewport* pViewports); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetViewportWithCountEXT( VkCommandBuffer commandBuffer, uint32_t viewportCount, - const VkViewport* pViewports -) { - layer_vkCmdSetViewportWithCountEXT_default(commandBuffer, viewportCount, pViewports); -} + const VkViewport* pViewports) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdTraceRaysIndirect2KHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdSetViewportWithCountEXT( VkCommandBuffer commandBuffer, - VkDeviceAddress indirectDeviceAddress); + uint32_t viewportCount, + const VkViewport* pViewports); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdTraceRaysIndirect2KHR( VkCommandBuffer commandBuffer, - VkDeviceAddress indirectDeviceAddress -) { - layer_vkCmdTraceRaysIndirect2KHR_default(commandBuffer, indirectDeviceAddress); -} + VkDeviceAddress indirectDeviceAddress) = delete; + +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdTraceRaysIndirect2KHR( + VkCommandBuffer commandBuffer, + VkDeviceAddress indirectDeviceAddress); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdTraceRaysIndirectKHR_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR void VKAPI_CALL layer_vkCmdTraceRaysIndirectKHR( VkCommandBuffer commandBuffer, const VkStridedDeviceAddressRegionKHR* pRaygenShaderBindingTable, const VkStridedDeviceAddressRegionKHR* pMissShaderBindingTable, const VkStridedDeviceAddressRegionKHR* pHitShaderBindingTable, const VkStridedDeviceAddressRegionKHR* pCallableShaderBindingTable, - VkDeviceAddress indirectDeviceAddress); + VkDeviceAddress indirectDeviceAddress) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR void VKAPI_CALL layer_vkCmdTraceRaysIndirectKHR( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdTraceRaysIndirectKHR( VkCommandBuffer commandBuffer, const VkStridedDeviceAddressRegionKHR* pRaygenShaderBindingTable, const VkStridedDeviceAddressRegionKHR* pMissShaderBindingTable, const VkStridedDeviceAddressRegionKHR* pHitShaderBindingTable, const VkStridedDeviceAddressRegionKHR* pCallableShaderBindingTable, - VkDeviceAddress indirectDeviceAddress -) { - layer_vkCmdTraceRaysIndirectKHR_default(commandBuffer, pRaygenShaderBindingTable, pMissShaderBindingTable, pHitShaderBindingTable, pCallableShaderBindingTable, indirectDeviceAddress); -} + VkDeviceAddress indirectDeviceAddress); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdTraceRaysKHR_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR void VKAPI_CALL layer_vkCmdTraceRaysKHR( VkCommandBuffer commandBuffer, const VkStridedDeviceAddressRegionKHR* pRaygenShaderBindingTable, const VkStridedDeviceAddressRegionKHR* pMissShaderBindingTable, @@ -4035,11 +3582,11 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdTraceRaysKHR_default( const VkStridedDeviceAddressRegionKHR* pCallableShaderBindingTable, uint32_t width, uint32_t height, - uint32_t depth); + uint32_t depth) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR void VKAPI_CALL layer_vkCmdTraceRaysKHR( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdTraceRaysKHR( VkCommandBuffer commandBuffer, const VkStridedDeviceAddressRegionKHR* pRaygenShaderBindingTable, const VkStridedDeviceAddressRegionKHR* pMissShaderBindingTable, @@ -4047,35 +3594,31 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdTraceRaysKHR( const VkStridedDeviceAddressRegionKHR* pCallableShaderBindingTable, uint32_t width, uint32_t height, - uint32_t depth -) { - layer_vkCmdTraceRaysKHR_default(commandBuffer, pRaygenShaderBindingTable, pMissShaderBindingTable, pHitShaderBindingTable, pCallableShaderBindingTable, width, height, depth); -} + uint32_t depth); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdUpdateBuffer_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR void VKAPI_CALL layer_vkCmdUpdateBuffer( VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize dataSize, - const void* pData); + const void* pData) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR void VKAPI_CALL layer_vkCmdUpdateBuffer( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdUpdateBuffer( VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize dataSize, - const void* pData -) { - layer_vkCmdUpdateBuffer_default(commandBuffer, dstBuffer, dstOffset, dataSize, pData); -} + const void* pData); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdWaitEvents_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR void VKAPI_CALL layer_vkCmdWaitEvents( VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents, @@ -4086,11 +3629,11 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdWaitEvents_default( uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, - const VkImageMemoryBarrier* pImageMemoryBarriers); + const VkImageMemoryBarrier* pImageMemoryBarriers) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR void VKAPI_CALL layer_vkCmdWaitEvents( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdWaitEvents( VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents, @@ -4101,604 +3644,528 @@ VKAPI_ATTR void VKAPI_CALL layer_vkCmdWaitEvents( uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, - const VkImageMemoryBarrier* pImageMemoryBarriers -) { - layer_vkCmdWaitEvents_default(commandBuffer, eventCount, pEvents, srcStageMask, dstStageMask, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers); -} + const VkImageMemoryBarrier* pImageMemoryBarriers); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdWaitEvents2_default( - VkCommandBuffer commandBuffer, - uint32_t eventCount, - const VkEvent* pEvents, - const VkDependencyInfo* pDependencyInfos); - -/* Match-all template to use default implementation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdWaitEvents2( VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents, - const VkDependencyInfo* pDependencyInfos -) { - layer_vkCmdWaitEvents2_default(commandBuffer, eventCount, pEvents, pDependencyInfos); -} + const VkDependencyInfo* pDependencyInfos) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdWaitEvents2KHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdWaitEvents2( VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents, const VkDependencyInfo* pDependencyInfos); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdWaitEvents2KHR( VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents, - const VkDependencyInfo* pDependencyInfos -) { - layer_vkCmdWaitEvents2KHR_default(commandBuffer, eventCount, pEvents, pDependencyInfos); -} + const VkDependencyInfo* pDependencyInfos) = delete; + +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdWaitEvents2KHR( + VkCommandBuffer commandBuffer, + uint32_t eventCount, + const VkEvent* pEvents, + const VkDependencyInfo* pDependencyInfos); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdWriteAccelerationStructuresPropertiesKHR_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR void VKAPI_CALL layer_vkCmdWriteAccelerationStructuresPropertiesKHR( VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureKHR* pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, - uint32_t firstQuery); + uint32_t firstQuery) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR void VKAPI_CALL layer_vkCmdWriteAccelerationStructuresPropertiesKHR( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdWriteAccelerationStructuresPropertiesKHR( VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureKHR* pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, - uint32_t firstQuery -) { - layer_vkCmdWriteAccelerationStructuresPropertiesKHR_default(commandBuffer, accelerationStructureCount, pAccelerationStructures, queryType, queryPool, firstQuery); -} + uint32_t firstQuery); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdWriteMicromapsPropertiesEXT_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR void VKAPI_CALL layer_vkCmdWriteMicromapsPropertiesEXT( VkCommandBuffer commandBuffer, uint32_t micromapCount, const VkMicromapEXT* pMicromaps, VkQueryType queryType, VkQueryPool queryPool, - uint32_t firstQuery); + uint32_t firstQuery) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR void VKAPI_CALL layer_vkCmdWriteMicromapsPropertiesEXT( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdWriteMicromapsPropertiesEXT( VkCommandBuffer commandBuffer, uint32_t micromapCount, const VkMicromapEXT* pMicromaps, VkQueryType queryType, VkQueryPool queryPool, - uint32_t firstQuery -) { - layer_vkCmdWriteMicromapsPropertiesEXT_default(commandBuffer, micromapCount, pMicromaps, queryType, queryPool, firstQuery); -} + uint32_t firstQuery); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdWriteTimestamp_default( - VkCommandBuffer commandBuffer, - VkPipelineStageFlagBits pipelineStage, - VkQueryPool queryPool, - uint32_t query); - -/* Match-all template to use default implementation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdWriteTimestamp( VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkQueryPool queryPool, - uint32_t query -) { - layer_vkCmdWriteTimestamp_default(commandBuffer, pipelineStage, queryPool, query); -} + uint32_t query) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdWriteTimestamp2_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdWriteTimestamp( VkCommandBuffer commandBuffer, - VkPipelineStageFlags2 stage, + VkPipelineStageFlagBits pipelineStage, VkQueryPool queryPool, uint32_t query); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdWriteTimestamp2( VkCommandBuffer commandBuffer, VkPipelineStageFlags2 stage, VkQueryPool queryPool, - uint32_t query -) { - layer_vkCmdWriteTimestamp2_default(commandBuffer, stage, queryPool, query); -} + uint32_t query) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkCmdWriteTimestamp2KHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdWriteTimestamp2( VkCommandBuffer commandBuffer, VkPipelineStageFlags2 stage, VkQueryPool queryPool, uint32_t query); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkCmdWriteTimestamp2KHR( VkCommandBuffer commandBuffer, VkPipelineStageFlags2 stage, VkQueryPool queryPool, - uint32_t query -) { - layer_vkCmdWriteTimestamp2KHR_default(commandBuffer, stage, queryPool, query); -} + uint32_t query) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCopyAccelerationStructureKHR_default( - VkDevice device, - VkDeferredOperationKHR deferredOperation, - const VkCopyAccelerationStructureInfoKHR* pInfo); +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkCmdWriteTimestamp2KHR( + VkCommandBuffer commandBuffer, + VkPipelineStageFlags2 stage, + VkQueryPool queryPool, + uint32_t query); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkCopyAccelerationStructureKHR( VkDevice device, VkDeferredOperationKHR deferredOperation, - const VkCopyAccelerationStructureInfoKHR* pInfo -) { - return layer_vkCopyAccelerationStructureKHR_default(device, deferredOperation, pInfo); -} + const VkCopyAccelerationStructureInfoKHR* pInfo) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCopyAccelerationStructureToMemoryKHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCopyAccelerationStructureKHR( VkDevice device, VkDeferredOperationKHR deferredOperation, - const VkCopyAccelerationStructureToMemoryInfoKHR* pInfo); + const VkCopyAccelerationStructureInfoKHR* pInfo); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkCopyAccelerationStructureToMemoryKHR( VkDevice device, VkDeferredOperationKHR deferredOperation, - const VkCopyAccelerationStructureToMemoryInfoKHR* pInfo -) { - return layer_vkCopyAccelerationStructureToMemoryKHR_default(device, deferredOperation, pInfo); -} + const VkCopyAccelerationStructureToMemoryInfoKHR* pInfo) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCopyImageToImage_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCopyAccelerationStructureToMemoryKHR( VkDevice device, - const VkCopyImageToImageInfo* pCopyImageToImageInfo); + VkDeferredOperationKHR deferredOperation, + const VkCopyAccelerationStructureToMemoryInfoKHR* pInfo); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkCopyImageToImage( VkDevice device, - const VkCopyImageToImageInfo* pCopyImageToImageInfo -) { - return layer_vkCopyImageToImage_default(device, pCopyImageToImageInfo); -} + const VkCopyImageToImageInfo* pCopyImageToImageInfo) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCopyImageToImageEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCopyImageToImage( VkDevice device, const VkCopyImageToImageInfo* pCopyImageToImageInfo); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkCopyImageToImageEXT( VkDevice device, - const VkCopyImageToImageInfo* pCopyImageToImageInfo -) { - return layer_vkCopyImageToImageEXT_default(device, pCopyImageToImageInfo); -} + const VkCopyImageToImageInfo* pCopyImageToImageInfo) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCopyImageToMemory_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCopyImageToImageEXT( VkDevice device, - const VkCopyImageToMemoryInfo* pCopyImageToMemoryInfo); + const VkCopyImageToImageInfo* pCopyImageToImageInfo); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkCopyImageToMemory( VkDevice device, - const VkCopyImageToMemoryInfo* pCopyImageToMemoryInfo -) { - return layer_vkCopyImageToMemory_default(device, pCopyImageToMemoryInfo); -} + const VkCopyImageToMemoryInfo* pCopyImageToMemoryInfo) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCopyImageToMemoryEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCopyImageToMemory( VkDevice device, const VkCopyImageToMemoryInfo* pCopyImageToMemoryInfo); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkCopyImageToMemoryEXT( VkDevice device, - const VkCopyImageToMemoryInfo* pCopyImageToMemoryInfo -) { - return layer_vkCopyImageToMemoryEXT_default(device, pCopyImageToMemoryInfo); -} + const VkCopyImageToMemoryInfo* pCopyImageToMemoryInfo) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCopyMemoryToAccelerationStructureKHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCopyImageToMemoryEXT( VkDevice device, - VkDeferredOperationKHR deferredOperation, - const VkCopyMemoryToAccelerationStructureInfoKHR* pInfo); + const VkCopyImageToMemoryInfo* pCopyImageToMemoryInfo); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkCopyMemoryToAccelerationStructureKHR( VkDevice device, VkDeferredOperationKHR deferredOperation, - const VkCopyMemoryToAccelerationStructureInfoKHR* pInfo -) { - return layer_vkCopyMemoryToAccelerationStructureKHR_default(device, deferredOperation, pInfo); -} + const VkCopyMemoryToAccelerationStructureInfoKHR* pInfo) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCopyMemoryToImage_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCopyMemoryToAccelerationStructureKHR( VkDevice device, - const VkCopyMemoryToImageInfo* pCopyMemoryToImageInfo); + VkDeferredOperationKHR deferredOperation, + const VkCopyMemoryToAccelerationStructureInfoKHR* pInfo); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkCopyMemoryToImage( VkDevice device, - const VkCopyMemoryToImageInfo* pCopyMemoryToImageInfo -) { - return layer_vkCopyMemoryToImage_default(device, pCopyMemoryToImageInfo); -} + const VkCopyMemoryToImageInfo* pCopyMemoryToImageInfo) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCopyMemoryToImageEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCopyMemoryToImage( VkDevice device, const VkCopyMemoryToImageInfo* pCopyMemoryToImageInfo); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkCopyMemoryToImageEXT( VkDevice device, - const VkCopyMemoryToImageInfo* pCopyMemoryToImageInfo -) { - return layer_vkCopyMemoryToImageEXT_default(device, pCopyMemoryToImageInfo); -} + const VkCopyMemoryToImageInfo* pCopyMemoryToImageInfo) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCopyMemoryToMicromapEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCopyMemoryToImageEXT( VkDevice device, - VkDeferredOperationKHR deferredOperation, - const VkCopyMemoryToMicromapInfoEXT* pInfo); + const VkCopyMemoryToImageInfo* pCopyMemoryToImageInfo); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkCopyMemoryToMicromapEXT( VkDevice device, VkDeferredOperationKHR deferredOperation, - const VkCopyMemoryToMicromapInfoEXT* pInfo -) { - return layer_vkCopyMemoryToMicromapEXT_default(device, deferredOperation, pInfo); -} + const VkCopyMemoryToMicromapInfoEXT* pInfo) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCopyMicromapEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCopyMemoryToMicromapEXT( VkDevice device, VkDeferredOperationKHR deferredOperation, - const VkCopyMicromapInfoEXT* pInfo); + const VkCopyMemoryToMicromapInfoEXT* pInfo); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkCopyMicromapEXT( VkDevice device, VkDeferredOperationKHR deferredOperation, - const VkCopyMicromapInfoEXT* pInfo -) { - return layer_vkCopyMicromapEXT_default(device, deferredOperation, pInfo); -} + const VkCopyMicromapInfoEXT* pInfo) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCopyMicromapToMemoryEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCopyMicromapEXT( VkDevice device, VkDeferredOperationKHR deferredOperation, - const VkCopyMicromapToMemoryInfoEXT* pInfo); + const VkCopyMicromapInfoEXT* pInfo); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkCopyMicromapToMemoryEXT( VkDevice device, VkDeferredOperationKHR deferredOperation, - const VkCopyMicromapToMemoryInfoEXT* pInfo -) { - return layer_vkCopyMicromapToMemoryEXT_default(device, deferredOperation, pInfo); -} + const VkCopyMicromapToMemoryInfoEXT* pInfo) = delete; + +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCopyMicromapToMemoryEXT( + VkDevice device, + VkDeferredOperationKHR deferredOperation, + const VkCopyMicromapToMemoryInfoEXT* pInfo); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateAccelerationStructureKHR_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateAccelerationStructureKHR( VkDevice device, const VkAccelerationStructureCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, - VkAccelerationStructureKHR* pAccelerationStructure); + VkAccelerationStructureKHR* pAccelerationStructure) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateAccelerationStructureKHR( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateAccelerationStructureKHR( VkDevice device, const VkAccelerationStructureCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, - VkAccelerationStructureKHR* pAccelerationStructure -) { - return layer_vkCreateAccelerationStructureKHR_default(device, pCreateInfo, pAllocator, pAccelerationStructure); -} + VkAccelerationStructureKHR* pAccelerationStructure); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateBuffer_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateBuffer( VkDevice device, const VkBufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, - VkBuffer* pBuffer); + VkBuffer* pBuffer) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateBuffer( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateBuffer( VkDevice device, const VkBufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, - VkBuffer* pBuffer -) { - return layer_vkCreateBuffer_default(device, pCreateInfo, pAllocator, pBuffer); -} + VkBuffer* pBuffer); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateBufferView_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateBufferView( VkDevice device, const VkBufferViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, - VkBufferView* pView); + VkBufferView* pView) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateBufferView( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateBufferView( VkDevice device, const VkBufferViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, - VkBufferView* pView -) { - return layer_vkCreateBufferView_default(device, pCreateInfo, pAllocator, pView); -} + VkBufferView* pView); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateCommandPool_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateCommandPool( VkDevice device, const VkCommandPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, - VkCommandPool* pCommandPool); + VkCommandPool* pCommandPool) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateCommandPool( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateCommandPool( VkDevice device, const VkCommandPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, - VkCommandPool* pCommandPool -) { - return layer_vkCreateCommandPool_default(device, pCreateInfo, pAllocator, pCommandPool); -} + VkCommandPool* pCommandPool); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateComputePipelines_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateComputePipelines( VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkComputePipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, - VkPipeline* pPipelines); + VkPipeline* pPipelines) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateComputePipelines( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateComputePipelines( VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkComputePipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, - VkPipeline* pPipelines -) { - return layer_vkCreateComputePipelines_default(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines); -} + VkPipeline* pPipelines); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateDeferredOperationKHR_default( - VkDevice device, - const VkAllocationCallbacks* pAllocator, - VkDeferredOperationKHR* pDeferredOperation); - -/* Match-all template to use default implementation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateDeferredOperationKHR( VkDevice device, const VkAllocationCallbacks* pAllocator, - VkDeferredOperationKHR* pDeferredOperation -) { - return layer_vkCreateDeferredOperationKHR_default(device, pAllocator, pDeferredOperation); -} + VkDeferredOperationKHR* pDeferredOperation) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateDescriptorPool_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateDeferredOperationKHR( VkDevice device, - const VkDescriptorPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, - VkDescriptorPool* pDescriptorPool); + VkDeferredOperationKHR* pDeferredOperation); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateDescriptorPool( VkDevice device, const VkDescriptorPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, - VkDescriptorPool* pDescriptorPool -) { - return layer_vkCreateDescriptorPool_default(device, pCreateInfo, pAllocator, pDescriptorPool); -} + VkDescriptorPool* pDescriptorPool) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateDescriptorSetLayout_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateDescriptorPool( VkDevice device, - const VkDescriptorSetLayoutCreateInfo* pCreateInfo, + const VkDescriptorPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, - VkDescriptorSetLayout* pSetLayout); + VkDescriptorPool* pDescriptorPool); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateDescriptorSetLayout( VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, - VkDescriptorSetLayout* pSetLayout -) { - return layer_vkCreateDescriptorSetLayout_default(device, pCreateInfo, pAllocator, pSetLayout); -} + VkDescriptorSetLayout* pSetLayout) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateDescriptorUpdateTemplate_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateDescriptorSetLayout( VkDevice device, - const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, + const VkDescriptorSetLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, - VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate); + VkDescriptorSetLayout* pSetLayout); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateDescriptorUpdateTemplate( VkDevice device, const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, - VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate -) { - return layer_vkCreateDescriptorUpdateTemplate_default(device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate); -} + VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateDescriptorUpdateTemplateKHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateDescriptorUpdateTemplate( VkDevice device, const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateDescriptorUpdateTemplateKHR( VkDevice device, const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, - VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate -) { - return layer_vkCreateDescriptorUpdateTemplateKHR_default(device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate); -} + VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateEvent_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateDescriptorUpdateTemplateKHR( VkDevice device, - const VkEventCreateInfo* pCreateInfo, + const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, - VkEvent* pEvent); + VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateEvent( VkDevice device, const VkEventCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, - VkEvent* pEvent -) { - return layer_vkCreateEvent_default(device, pCreateInfo, pAllocator, pEvent); -} + VkEvent* pEvent) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateFence_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateEvent( VkDevice device, - const VkFenceCreateInfo* pCreateInfo, + const VkEventCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, - VkFence* pFence); + VkEvent* pEvent); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateFence( VkDevice device, const VkFenceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, - VkFence* pFence -) { - return layer_vkCreateFence_default(device, pCreateInfo, pAllocator, pFence); -} + VkFence* pFence) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateFramebuffer_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateFence( VkDevice device, - const VkFramebufferCreateInfo* pCreateInfo, + const VkFenceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, - VkFramebuffer* pFramebuffer); + VkFence* pFence); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateFramebuffer( VkDevice device, const VkFramebufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, - VkFramebuffer* pFramebuffer -) { - return layer_vkCreateFramebuffer_default(device, pCreateInfo, pAllocator, pFramebuffer); -} + VkFramebuffer* pFramebuffer) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateGraphicsPipelines_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateFramebuffer( VkDevice device, - VkPipelineCache pipelineCache, - uint32_t createInfoCount, - const VkGraphicsPipelineCreateInfo* pCreateInfos, + const VkFramebufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, - VkPipeline* pPipelines); + VkFramebuffer* pFramebuffer); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateGraphicsPipelines( VkDevice device, @@ -4706,2571 +4173,2289 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateGraphicsPipelines( uint32_t createInfoCount, const VkGraphicsPipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, - VkPipeline* pPipelines -) { - return layer_vkCreateGraphicsPipelines_default(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines); -} + VkPipeline* pPipelines) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateImage_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateGraphicsPipelines( VkDevice device, - const VkImageCreateInfo* pCreateInfo, + VkPipelineCache pipelineCache, + uint32_t createInfoCount, + const VkGraphicsPipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, - VkImage* pImage); + VkPipeline* pPipelines); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateImage( VkDevice device, const VkImageCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, - VkImage* pImage -) { - return layer_vkCreateImage_default(device, pCreateInfo, pAllocator, pImage); -} + VkImage* pImage) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateImageView_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateImage( VkDevice device, - const VkImageViewCreateInfo* pCreateInfo, + const VkImageCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, - VkImageView* pView); + VkImage* pImage); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateImageView( VkDevice device, const VkImageViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, - VkImageView* pView -) { - return layer_vkCreateImageView_default(device, pCreateInfo, pAllocator, pView); -} + VkImageView* pView) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateIndirectCommandsLayoutEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateImageView( VkDevice device, - const VkIndirectCommandsLayoutCreateInfoEXT* pCreateInfo, + const VkImageViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, - VkIndirectCommandsLayoutEXT* pIndirectCommandsLayout); + VkImageView* pView); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateIndirectCommandsLayoutEXT( VkDevice device, const VkIndirectCommandsLayoutCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, - VkIndirectCommandsLayoutEXT* pIndirectCommandsLayout -) { - return layer_vkCreateIndirectCommandsLayoutEXT_default(device, pCreateInfo, pAllocator, pIndirectCommandsLayout); -} + VkIndirectCommandsLayoutEXT* pIndirectCommandsLayout) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateIndirectExecutionSetEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateIndirectCommandsLayoutEXT( VkDevice device, - const VkIndirectExecutionSetCreateInfoEXT* pCreateInfo, + const VkIndirectCommandsLayoutCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, - VkIndirectExecutionSetEXT* pIndirectExecutionSet); + VkIndirectCommandsLayoutEXT* pIndirectCommandsLayout); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateIndirectExecutionSetEXT( VkDevice device, const VkIndirectExecutionSetCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, - VkIndirectExecutionSetEXT* pIndirectExecutionSet -) { - return layer_vkCreateIndirectExecutionSetEXT_default(device, pCreateInfo, pAllocator, pIndirectExecutionSet); -} + VkIndirectExecutionSetEXT* pIndirectExecutionSet) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateMicromapEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateIndirectExecutionSetEXT( VkDevice device, - const VkMicromapCreateInfoEXT* pCreateInfo, + const VkIndirectExecutionSetCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, - VkMicromapEXT* pMicromap); + VkIndirectExecutionSetEXT* pIndirectExecutionSet); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateMicromapEXT( VkDevice device, const VkMicromapCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, - VkMicromapEXT* pMicromap -) { - return layer_vkCreateMicromapEXT_default(device, pCreateInfo, pAllocator, pMicromap); -} + VkMicromapEXT* pMicromap) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreatePipelineBinariesKHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateMicromapEXT( VkDevice device, - const VkPipelineBinaryCreateInfoKHR* pCreateInfo, + const VkMicromapCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, - VkPipelineBinaryHandlesInfoKHR* pBinaries); + VkMicromapEXT* pMicromap); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreatePipelineBinariesKHR( VkDevice device, const VkPipelineBinaryCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, - VkPipelineBinaryHandlesInfoKHR* pBinaries -) { - return layer_vkCreatePipelineBinariesKHR_default(device, pCreateInfo, pAllocator, pBinaries); -} + VkPipelineBinaryHandlesInfoKHR* pBinaries) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreatePipelineCache_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreatePipelineBinariesKHR( VkDevice device, - const VkPipelineCacheCreateInfo* pCreateInfo, + const VkPipelineBinaryCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, - VkPipelineCache* pPipelineCache); + VkPipelineBinaryHandlesInfoKHR* pBinaries); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreatePipelineCache( VkDevice device, const VkPipelineCacheCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, - VkPipelineCache* pPipelineCache -) { - return layer_vkCreatePipelineCache_default(device, pCreateInfo, pAllocator, pPipelineCache); -} + VkPipelineCache* pPipelineCache) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreatePipelineLayout_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreatePipelineCache( VkDevice device, - const VkPipelineLayoutCreateInfo* pCreateInfo, + const VkPipelineCacheCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, - VkPipelineLayout* pPipelineLayout); + VkPipelineCache* pPipelineCache); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreatePipelineLayout( VkDevice device, const VkPipelineLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, - VkPipelineLayout* pPipelineLayout -) { - return layer_vkCreatePipelineLayout_default(device, pCreateInfo, pAllocator, pPipelineLayout); -} + VkPipelineLayout* pPipelineLayout) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreatePrivateDataSlot_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreatePipelineLayout( VkDevice device, - const VkPrivateDataSlotCreateInfo* pCreateInfo, + const VkPipelineLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, - VkPrivateDataSlot* pPrivateDataSlot); + VkPipelineLayout* pPipelineLayout); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreatePrivateDataSlot( VkDevice device, const VkPrivateDataSlotCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, - VkPrivateDataSlot* pPrivateDataSlot -) { - return layer_vkCreatePrivateDataSlot_default(device, pCreateInfo, pAllocator, pPrivateDataSlot); -} + VkPrivateDataSlot* pPrivateDataSlot) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreatePrivateDataSlotEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreatePrivateDataSlot( VkDevice device, const VkPrivateDataSlotCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPrivateDataSlot* pPrivateDataSlot); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreatePrivateDataSlotEXT( VkDevice device, const VkPrivateDataSlotCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, - VkPrivateDataSlot* pPrivateDataSlot -) { - return layer_vkCreatePrivateDataSlotEXT_default(device, pCreateInfo, pAllocator, pPrivateDataSlot); -} + VkPrivateDataSlot* pPrivateDataSlot) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateQueryPool_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreatePrivateDataSlotEXT( VkDevice device, - const VkQueryPoolCreateInfo* pCreateInfo, + const VkPrivateDataSlotCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, - VkQueryPool* pQueryPool); + VkPrivateDataSlot* pPrivateDataSlot); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateQueryPool( VkDevice device, const VkQueryPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, - VkQueryPool* pQueryPool -) { - return layer_vkCreateQueryPool_default(device, pCreateInfo, pAllocator, pQueryPool); -} + VkQueryPool* pQueryPool) = delete; + +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateQueryPool( + VkDevice device, + const VkQueryPoolCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkQueryPool* pQueryPool); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateRayTracingPipelinesKHR_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateRayTracingPipelinesKHR( VkDevice device, VkDeferredOperationKHR deferredOperation, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoKHR* pCreateInfos, const VkAllocationCallbacks* pAllocator, - VkPipeline* pPipelines); + VkPipeline* pPipelines) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateRayTracingPipelinesKHR( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateRayTracingPipelinesKHR( VkDevice device, VkDeferredOperationKHR deferredOperation, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoKHR* pCreateInfos, const VkAllocationCallbacks* pAllocator, - VkPipeline* pPipelines -) { - return layer_vkCreateRayTracingPipelinesKHR_default(device, deferredOperation, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines); -} + VkPipeline* pPipelines); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateRenderPass_default( - VkDevice device, - const VkRenderPassCreateInfo* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkRenderPass* pRenderPass); - -/* Match-all template to use default implementation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateRenderPass( VkDevice device, const VkRenderPassCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, - VkRenderPass* pRenderPass -) { - return layer_vkCreateRenderPass_default(device, pCreateInfo, pAllocator, pRenderPass); -} + VkRenderPass* pRenderPass) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateRenderPass2_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateRenderPass( VkDevice device, - const VkRenderPassCreateInfo2* pCreateInfo, + const VkRenderPassCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateRenderPass2( VkDevice device, const VkRenderPassCreateInfo2* pCreateInfo, const VkAllocationCallbacks* pAllocator, - VkRenderPass* pRenderPass -) { - return layer_vkCreateRenderPass2_default(device, pCreateInfo, pAllocator, pRenderPass); -} + VkRenderPass* pRenderPass) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateRenderPass2KHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateRenderPass2( VkDevice device, const VkRenderPassCreateInfo2* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateRenderPass2KHR( VkDevice device, const VkRenderPassCreateInfo2* pCreateInfo, const VkAllocationCallbacks* pAllocator, - VkRenderPass* pRenderPass -) { - return layer_vkCreateRenderPass2KHR_default(device, pCreateInfo, pAllocator, pRenderPass); -} + VkRenderPass* pRenderPass) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateSampler_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateRenderPass2KHR( VkDevice device, - const VkSamplerCreateInfo* pCreateInfo, + const VkRenderPassCreateInfo2* pCreateInfo, const VkAllocationCallbacks* pAllocator, - VkSampler* pSampler); + VkRenderPass* pRenderPass); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateSampler( VkDevice device, const VkSamplerCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, - VkSampler* pSampler -) { - return layer_vkCreateSampler_default(device, pCreateInfo, pAllocator, pSampler); -} + VkSampler* pSampler) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateSamplerYcbcrConversion_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateSampler( VkDevice device, - const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, + const VkSamplerCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, - VkSamplerYcbcrConversion* pYcbcrConversion); + VkSampler* pSampler); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateSamplerYcbcrConversion( VkDevice device, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, - VkSamplerYcbcrConversion* pYcbcrConversion -) { - return layer_vkCreateSamplerYcbcrConversion_default(device, pCreateInfo, pAllocator, pYcbcrConversion); -} + VkSamplerYcbcrConversion* pYcbcrConversion) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateSamplerYcbcrConversionKHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateSamplerYcbcrConversion( VkDevice device, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSamplerYcbcrConversion* pYcbcrConversion); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateSamplerYcbcrConversionKHR( VkDevice device, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, - VkSamplerYcbcrConversion* pYcbcrConversion -) { - return layer_vkCreateSamplerYcbcrConversionKHR_default(device, pCreateInfo, pAllocator, pYcbcrConversion); -} + VkSamplerYcbcrConversion* pYcbcrConversion) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateSemaphore_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateSamplerYcbcrConversionKHR( VkDevice device, - const VkSemaphoreCreateInfo* pCreateInfo, + const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, - VkSemaphore* pSemaphore); + VkSamplerYcbcrConversion* pYcbcrConversion); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateSemaphore( VkDevice device, const VkSemaphoreCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, - VkSemaphore* pSemaphore -) { - return layer_vkCreateSemaphore_default(device, pCreateInfo, pAllocator, pSemaphore); -} + VkSemaphore* pSemaphore) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateShaderModule_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateSemaphore( VkDevice device, - const VkShaderModuleCreateInfo* pCreateInfo, + const VkSemaphoreCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, - VkShaderModule* pShaderModule); + VkSemaphore* pSemaphore); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateShaderModule( VkDevice device, const VkShaderModuleCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, - VkShaderModule* pShaderModule -) { - return layer_vkCreateShaderModule_default(device, pCreateInfo, pAllocator, pShaderModule); -} + VkShaderModule* pShaderModule) = delete; + +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateShaderModule( + VkDevice device, + const VkShaderModuleCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkShaderModule* pShaderModule); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateShadersEXT_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateShadersEXT( VkDevice device, uint32_t createInfoCount, const VkShaderCreateInfoEXT* pCreateInfos, const VkAllocationCallbacks* pAllocator, - VkShaderEXT* pShaders); + VkShaderEXT* pShaders) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateShadersEXT( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateShadersEXT( VkDevice device, uint32_t createInfoCount, const VkShaderCreateInfoEXT* pCreateInfos, const VkAllocationCallbacks* pAllocator, - VkShaderEXT* pShaders -) { - return layer_vkCreateShadersEXT_default(device, createInfoCount, pCreateInfos, pAllocator, pShaders); -} + VkShaderEXT* pShaders); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateSharedSwapchainsKHR_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateSharedSwapchainsKHR( VkDevice device, uint32_t swapchainCount, const VkSwapchainCreateInfoKHR* pCreateInfos, const VkAllocationCallbacks* pAllocator, - VkSwapchainKHR* pSwapchains); + VkSwapchainKHR* pSwapchains) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateSharedSwapchainsKHR( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateSharedSwapchainsKHR( VkDevice device, uint32_t swapchainCount, const VkSwapchainCreateInfoKHR* pCreateInfos, const VkAllocationCallbacks* pAllocator, - VkSwapchainKHR* pSwapchains -) { - return layer_vkCreateSharedSwapchainsKHR_default(device, swapchainCount, pCreateInfos, pAllocator, pSwapchains); -} + VkSwapchainKHR* pSwapchains); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateSwapchainKHR_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateSwapchainKHR( VkDevice device, const VkSwapchainCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, - VkSwapchainKHR* pSwapchain); + VkSwapchainKHR* pSwapchain) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateSwapchainKHR( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateSwapchainKHR( VkDevice device, const VkSwapchainCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, - VkSwapchainKHR* pSwapchain -) { - return layer_vkCreateSwapchainKHR_default(device, pCreateInfo, pAllocator, pSwapchain); -} + VkSwapchainKHR* pSwapchain); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateTensorARM_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateTensorARM( VkDevice device, const VkTensorCreateInfoARM* pCreateInfo, const VkAllocationCallbacks* pAllocator, - VkTensorARM* pTensor); + VkTensorARM* pTensor) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateTensorARM( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateTensorARM( VkDevice device, const VkTensorCreateInfoARM* pCreateInfo, const VkAllocationCallbacks* pAllocator, - VkTensorARM* pTensor -) { - return layer_vkCreateTensorARM_default(device, pCreateInfo, pAllocator, pTensor); -} + VkTensorARM* pTensor); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateTensorViewARM_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateTensorViewARM( VkDevice device, const VkTensorViewCreateInfoARM* pCreateInfo, const VkAllocationCallbacks* pAllocator, - VkTensorViewARM* pView); + VkTensorViewARM* pView) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateTensorViewARM( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateTensorViewARM( VkDevice device, const VkTensorViewCreateInfoARM* pCreateInfo, const VkAllocationCallbacks* pAllocator, - VkTensorViewARM* pView -) { - return layer_vkCreateTensorViewARM_default(device, pCreateInfo, pAllocator, pView); -} + VkTensorViewARM* pView); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateValidationCacheEXT_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateValidationCacheEXT( VkDevice device, const VkValidationCacheCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, - VkValidationCacheEXT* pValidationCache); + VkValidationCacheEXT* pValidationCache) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateValidationCacheEXT( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateValidationCacheEXT( VkDevice device, const VkValidationCacheCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, - VkValidationCacheEXT* pValidationCache -) { - return layer_vkCreateValidationCacheEXT_default(device, pCreateInfo, pAllocator, pValidationCache); -} + VkValidationCacheEXT* pValidationCache); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkDebugMarkerSetObjectNameEXT_default( - VkDevice device, - const VkDebugMarkerObjectNameInfoEXT* pNameInfo); - -/* Match-all template to use default implementation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkDebugMarkerSetObjectNameEXT( VkDevice device, - const VkDebugMarkerObjectNameInfoEXT* pNameInfo -) { - return layer_vkDebugMarkerSetObjectNameEXT_default(device, pNameInfo); -} + const VkDebugMarkerObjectNameInfoEXT* pNameInfo) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkDebugMarkerSetObjectTagEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkDebugMarkerSetObjectNameEXT( VkDevice device, - const VkDebugMarkerObjectTagInfoEXT* pTagInfo); + const VkDebugMarkerObjectNameInfoEXT* pNameInfo); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkDebugMarkerSetObjectTagEXT( VkDevice device, - const VkDebugMarkerObjectTagInfoEXT* pTagInfo -) { - return layer_vkDebugMarkerSetObjectTagEXT_default(device, pTagInfo); -} + const VkDebugMarkerObjectTagInfoEXT* pTagInfo) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkDeferredOperationJoinKHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkDebugMarkerSetObjectTagEXT( VkDevice device, - VkDeferredOperationKHR operation); + const VkDebugMarkerObjectTagInfoEXT* pTagInfo); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkDeferredOperationJoinKHR( VkDevice device, - VkDeferredOperationKHR operation -) { - return layer_vkDeferredOperationJoinKHR_default(device, operation); -} + VkDeferredOperationKHR operation) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkDestroyAccelerationStructureKHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkDeferredOperationJoinKHR( VkDevice device, - VkAccelerationStructureKHR accelerationStructure, - const VkAllocationCallbacks* pAllocator); + VkDeferredOperationKHR operation); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkDestroyAccelerationStructureKHR( VkDevice device, VkAccelerationStructureKHR accelerationStructure, - const VkAllocationCallbacks* pAllocator -) { - layer_vkDestroyAccelerationStructureKHR_default(device, accelerationStructure, pAllocator); -} + const VkAllocationCallbacks* pAllocator) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkDestroyBuffer_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkDestroyAccelerationStructureKHR( VkDevice device, - VkBuffer buffer, + VkAccelerationStructureKHR accelerationStructure, const VkAllocationCallbacks* pAllocator); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkDestroyBuffer( VkDevice device, VkBuffer buffer, - const VkAllocationCallbacks* pAllocator -) { - layer_vkDestroyBuffer_default(device, buffer, pAllocator); -} + const VkAllocationCallbacks* pAllocator) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkDestroyBufferView_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkDestroyBuffer( VkDevice device, - VkBufferView bufferView, + VkBuffer buffer, const VkAllocationCallbacks* pAllocator); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkDestroyBufferView( VkDevice device, VkBufferView bufferView, - const VkAllocationCallbacks* pAllocator -) { - layer_vkDestroyBufferView_default(device, bufferView, pAllocator); -} + const VkAllocationCallbacks* pAllocator) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkDestroyCommandPool_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkDestroyBufferView( VkDevice device, - VkCommandPool commandPool, + VkBufferView bufferView, const VkAllocationCallbacks* pAllocator); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkDestroyCommandPool( VkDevice device, VkCommandPool commandPool, - const VkAllocationCallbacks* pAllocator -) { - layer_vkDestroyCommandPool_default(device, commandPool, pAllocator); -} + const VkAllocationCallbacks* pAllocator) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkDestroyDeferredOperationKHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkDestroyCommandPool( VkDevice device, - VkDeferredOperationKHR operation, + VkCommandPool commandPool, const VkAllocationCallbacks* pAllocator); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkDestroyDeferredOperationKHR( VkDevice device, VkDeferredOperationKHR operation, - const VkAllocationCallbacks* pAllocator -) { - layer_vkDestroyDeferredOperationKHR_default(device, operation, pAllocator); -} + const VkAllocationCallbacks* pAllocator) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkDestroyDescriptorPool_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkDestroyDeferredOperationKHR( VkDevice device, - VkDescriptorPool descriptorPool, + VkDeferredOperationKHR operation, const VkAllocationCallbacks* pAllocator); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkDestroyDescriptorPool( VkDevice device, VkDescriptorPool descriptorPool, - const VkAllocationCallbacks* pAllocator -) { - layer_vkDestroyDescriptorPool_default(device, descriptorPool, pAllocator); -} + const VkAllocationCallbacks* pAllocator) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkDestroyDescriptorSetLayout_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkDestroyDescriptorPool( VkDevice device, - VkDescriptorSetLayout descriptorSetLayout, + VkDescriptorPool descriptorPool, const VkAllocationCallbacks* pAllocator); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkDestroyDescriptorSetLayout( VkDevice device, VkDescriptorSetLayout descriptorSetLayout, - const VkAllocationCallbacks* pAllocator -) { - layer_vkDestroyDescriptorSetLayout_default(device, descriptorSetLayout, pAllocator); -} + const VkAllocationCallbacks* pAllocator) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkDestroyDescriptorUpdateTemplate_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkDestroyDescriptorSetLayout( VkDevice device, - VkDescriptorUpdateTemplate descriptorUpdateTemplate, + VkDescriptorSetLayout descriptorSetLayout, const VkAllocationCallbacks* pAllocator); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkDestroyDescriptorUpdateTemplate( VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, - const VkAllocationCallbacks* pAllocator -) { - layer_vkDestroyDescriptorUpdateTemplate_default(device, descriptorUpdateTemplate, pAllocator); -} + const VkAllocationCallbacks* pAllocator) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkDestroyDescriptorUpdateTemplateKHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkDestroyDescriptorUpdateTemplate( VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkDestroyDescriptorUpdateTemplateKHR( VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, - const VkAllocationCallbacks* pAllocator -) { - layer_vkDestroyDescriptorUpdateTemplateKHR_default(device, descriptorUpdateTemplate, pAllocator); -} + const VkAllocationCallbacks* pAllocator) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkDestroyDevice_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkDestroyDescriptorUpdateTemplateKHR( VkDevice device, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkDestroyDevice( VkDevice device, - const VkAllocationCallbacks* pAllocator -) { - layer_vkDestroyDevice_default(device, pAllocator); -} + const VkAllocationCallbacks* pAllocator) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkDestroyEvent_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkDestroyDevice( VkDevice device, - VkEvent event, const VkAllocationCallbacks* pAllocator); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkDestroyEvent( VkDevice device, VkEvent event, - const VkAllocationCallbacks* pAllocator -) { - layer_vkDestroyEvent_default(device, event, pAllocator); -} + const VkAllocationCallbacks* pAllocator) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkDestroyFence_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkDestroyEvent( VkDevice device, - VkFence fence, + VkEvent event, const VkAllocationCallbacks* pAllocator); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkDestroyFence( VkDevice device, VkFence fence, - const VkAllocationCallbacks* pAllocator -) { - layer_vkDestroyFence_default(device, fence, pAllocator); -} + const VkAllocationCallbacks* pAllocator) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkDestroyFramebuffer_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkDestroyFence( VkDevice device, - VkFramebuffer framebuffer, + VkFence fence, const VkAllocationCallbacks* pAllocator); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkDestroyFramebuffer( VkDevice device, VkFramebuffer framebuffer, - const VkAllocationCallbacks* pAllocator -) { - layer_vkDestroyFramebuffer_default(device, framebuffer, pAllocator); -} + const VkAllocationCallbacks* pAllocator) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkDestroyImage_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkDestroyFramebuffer( VkDevice device, - VkImage image, + VkFramebuffer framebuffer, const VkAllocationCallbacks* pAllocator); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkDestroyImage( VkDevice device, VkImage image, - const VkAllocationCallbacks* pAllocator -) { - layer_vkDestroyImage_default(device, image, pAllocator); -} + const VkAllocationCallbacks* pAllocator) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkDestroyImageView_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkDestroyImage( VkDevice device, - VkImageView imageView, + VkImage image, const VkAllocationCallbacks* pAllocator); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkDestroyImageView( VkDevice device, VkImageView imageView, - const VkAllocationCallbacks* pAllocator -) { - layer_vkDestroyImageView_default(device, imageView, pAllocator); -} + const VkAllocationCallbacks* pAllocator) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkDestroyIndirectCommandsLayoutEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkDestroyImageView( VkDevice device, - VkIndirectCommandsLayoutEXT indirectCommandsLayout, + VkImageView imageView, const VkAllocationCallbacks* pAllocator); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkDestroyIndirectCommandsLayoutEXT( VkDevice device, VkIndirectCommandsLayoutEXT indirectCommandsLayout, - const VkAllocationCallbacks* pAllocator -) { - layer_vkDestroyIndirectCommandsLayoutEXT_default(device, indirectCommandsLayout, pAllocator); -} + const VkAllocationCallbacks* pAllocator) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkDestroyIndirectExecutionSetEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkDestroyIndirectCommandsLayoutEXT( VkDevice device, - VkIndirectExecutionSetEXT indirectExecutionSet, + VkIndirectCommandsLayoutEXT indirectCommandsLayout, const VkAllocationCallbacks* pAllocator); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkDestroyIndirectExecutionSetEXT( VkDevice device, VkIndirectExecutionSetEXT indirectExecutionSet, - const VkAllocationCallbacks* pAllocator -) { - layer_vkDestroyIndirectExecutionSetEXT_default(device, indirectExecutionSet, pAllocator); -} + const VkAllocationCallbacks* pAllocator) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkDestroyMicromapEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkDestroyIndirectExecutionSetEXT( VkDevice device, - VkMicromapEXT micromap, + VkIndirectExecutionSetEXT indirectExecutionSet, const VkAllocationCallbacks* pAllocator); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkDestroyMicromapEXT( VkDevice device, VkMicromapEXT micromap, - const VkAllocationCallbacks* pAllocator -) { - layer_vkDestroyMicromapEXT_default(device, micromap, pAllocator); -} + const VkAllocationCallbacks* pAllocator) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkDestroyPipeline_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkDestroyMicromapEXT( VkDevice device, - VkPipeline pipeline, + VkMicromapEXT micromap, const VkAllocationCallbacks* pAllocator); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkDestroyPipeline( VkDevice device, VkPipeline pipeline, - const VkAllocationCallbacks* pAllocator -) { - layer_vkDestroyPipeline_default(device, pipeline, pAllocator); -} + const VkAllocationCallbacks* pAllocator) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkDestroyPipelineBinaryKHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkDestroyPipeline( VkDevice device, - VkPipelineBinaryKHR pipelineBinary, + VkPipeline pipeline, const VkAllocationCallbacks* pAllocator); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkDestroyPipelineBinaryKHR( VkDevice device, VkPipelineBinaryKHR pipelineBinary, - const VkAllocationCallbacks* pAllocator -) { - layer_vkDestroyPipelineBinaryKHR_default(device, pipelineBinary, pAllocator); -} + const VkAllocationCallbacks* pAllocator) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkDestroyPipelineCache_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkDestroyPipelineBinaryKHR( VkDevice device, - VkPipelineCache pipelineCache, + VkPipelineBinaryKHR pipelineBinary, const VkAllocationCallbacks* pAllocator); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkDestroyPipelineCache( VkDevice device, VkPipelineCache pipelineCache, - const VkAllocationCallbacks* pAllocator -) { - layer_vkDestroyPipelineCache_default(device, pipelineCache, pAllocator); -} + const VkAllocationCallbacks* pAllocator) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkDestroyPipelineLayout_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkDestroyPipelineCache( VkDevice device, - VkPipelineLayout pipelineLayout, + VkPipelineCache pipelineCache, const VkAllocationCallbacks* pAllocator); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkDestroyPipelineLayout( VkDevice device, VkPipelineLayout pipelineLayout, - const VkAllocationCallbacks* pAllocator -) { - layer_vkDestroyPipelineLayout_default(device, pipelineLayout, pAllocator); -} + const VkAllocationCallbacks* pAllocator) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkDestroyPrivateDataSlot_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkDestroyPipelineLayout( VkDevice device, - VkPrivateDataSlot privateDataSlot, + VkPipelineLayout pipelineLayout, const VkAllocationCallbacks* pAllocator); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkDestroyPrivateDataSlot( VkDevice device, VkPrivateDataSlot privateDataSlot, - const VkAllocationCallbacks* pAllocator -) { - layer_vkDestroyPrivateDataSlot_default(device, privateDataSlot, pAllocator); -} + const VkAllocationCallbacks* pAllocator) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkDestroyPrivateDataSlotEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkDestroyPrivateDataSlot( VkDevice device, VkPrivateDataSlot privateDataSlot, const VkAllocationCallbacks* pAllocator); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkDestroyPrivateDataSlotEXT( VkDevice device, VkPrivateDataSlot privateDataSlot, - const VkAllocationCallbacks* pAllocator -) { - layer_vkDestroyPrivateDataSlotEXT_default(device, privateDataSlot, pAllocator); -} + const VkAllocationCallbacks* pAllocator) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkDestroyQueryPool_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkDestroyPrivateDataSlotEXT( VkDevice device, - VkQueryPool queryPool, + VkPrivateDataSlot privateDataSlot, const VkAllocationCallbacks* pAllocator); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkDestroyQueryPool( VkDevice device, VkQueryPool queryPool, - const VkAllocationCallbacks* pAllocator -) { - layer_vkDestroyQueryPool_default(device, queryPool, pAllocator); -} + const VkAllocationCallbacks* pAllocator) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkDestroyRenderPass_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkDestroyQueryPool( VkDevice device, - VkRenderPass renderPass, + VkQueryPool queryPool, const VkAllocationCallbacks* pAllocator); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkDestroyRenderPass( VkDevice device, VkRenderPass renderPass, - const VkAllocationCallbacks* pAllocator -) { - layer_vkDestroyRenderPass_default(device, renderPass, pAllocator); -} + const VkAllocationCallbacks* pAllocator) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkDestroySampler_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkDestroyRenderPass( VkDevice device, - VkSampler sampler, + VkRenderPass renderPass, const VkAllocationCallbacks* pAllocator); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkDestroySampler( VkDevice device, VkSampler sampler, - const VkAllocationCallbacks* pAllocator -) { - layer_vkDestroySampler_default(device, sampler, pAllocator); -} + const VkAllocationCallbacks* pAllocator) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkDestroySamplerYcbcrConversion_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkDestroySampler( VkDevice device, - VkSamplerYcbcrConversion ycbcrConversion, + VkSampler sampler, const VkAllocationCallbacks* pAllocator); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkDestroySamplerYcbcrConversion( VkDevice device, VkSamplerYcbcrConversion ycbcrConversion, - const VkAllocationCallbacks* pAllocator -) { - layer_vkDestroySamplerYcbcrConversion_default(device, ycbcrConversion, pAllocator); -} + const VkAllocationCallbacks* pAllocator) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkDestroySamplerYcbcrConversionKHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkDestroySamplerYcbcrConversion( VkDevice device, VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks* pAllocator); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkDestroySamplerYcbcrConversionKHR( VkDevice device, VkSamplerYcbcrConversion ycbcrConversion, - const VkAllocationCallbacks* pAllocator -) { - layer_vkDestroySamplerYcbcrConversionKHR_default(device, ycbcrConversion, pAllocator); -} + const VkAllocationCallbacks* pAllocator) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkDestroySemaphore_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkDestroySamplerYcbcrConversionKHR( VkDevice device, - VkSemaphore semaphore, + VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks* pAllocator); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkDestroySemaphore( VkDevice device, VkSemaphore semaphore, - const VkAllocationCallbacks* pAllocator -) { - layer_vkDestroySemaphore_default(device, semaphore, pAllocator); -} + const VkAllocationCallbacks* pAllocator) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkDestroyShaderEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkDestroySemaphore( VkDevice device, - VkShaderEXT shader, + VkSemaphore semaphore, const VkAllocationCallbacks* pAllocator); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkDestroyShaderEXT( VkDevice device, VkShaderEXT shader, - const VkAllocationCallbacks* pAllocator -) { - layer_vkDestroyShaderEXT_default(device, shader, pAllocator); -} + const VkAllocationCallbacks* pAllocator) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkDestroyShaderModule_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkDestroyShaderEXT( VkDevice device, - VkShaderModule shaderModule, + VkShaderEXT shader, const VkAllocationCallbacks* pAllocator); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkDestroyShaderModule( VkDevice device, VkShaderModule shaderModule, - const VkAllocationCallbacks* pAllocator -) { - layer_vkDestroyShaderModule_default(device, shaderModule, pAllocator); -} + const VkAllocationCallbacks* pAllocator) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkDestroySwapchainKHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkDestroyShaderModule( VkDevice device, - VkSwapchainKHR swapchain, + VkShaderModule shaderModule, const VkAllocationCallbacks* pAllocator); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkDestroySwapchainKHR( VkDevice device, VkSwapchainKHR swapchain, - const VkAllocationCallbacks* pAllocator -) { - layer_vkDestroySwapchainKHR_default(device, swapchain, pAllocator); -} + const VkAllocationCallbacks* pAllocator) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkDestroyTensorARM_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkDestroySwapchainKHR( VkDevice device, - VkTensorARM tensor, + VkSwapchainKHR swapchain, const VkAllocationCallbacks* pAllocator); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkDestroyTensorARM( VkDevice device, VkTensorARM tensor, - const VkAllocationCallbacks* pAllocator -) { - layer_vkDestroyTensorARM_default(device, tensor, pAllocator); -} + const VkAllocationCallbacks* pAllocator) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkDestroyTensorViewARM_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkDestroyTensorARM( VkDevice device, - VkTensorViewARM tensorView, + VkTensorARM tensor, const VkAllocationCallbacks* pAllocator); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkDestroyTensorViewARM( VkDevice device, VkTensorViewARM tensorView, - const VkAllocationCallbacks* pAllocator -) { - layer_vkDestroyTensorViewARM_default(device, tensorView, pAllocator); -} + const VkAllocationCallbacks* pAllocator) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkDestroyValidationCacheEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkDestroyTensorViewARM( VkDevice device, - VkValidationCacheEXT validationCache, + VkTensorViewARM tensorView, const VkAllocationCallbacks* pAllocator); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkDestroyValidationCacheEXT( VkDevice device, VkValidationCacheEXT validationCache, - const VkAllocationCallbacks* pAllocator -) { - layer_vkDestroyValidationCacheEXT_default(device, validationCache, pAllocator); -} + const VkAllocationCallbacks* pAllocator) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkDeviceWaitIdle_default( - VkDevice device); +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkDestroyValidationCacheEXT( + VkDevice device, + VkValidationCacheEXT validationCache, + const VkAllocationCallbacks* pAllocator); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkDeviceWaitIdle( - VkDevice device -) { - return layer_vkDeviceWaitIdle_default(device); -} + VkDevice device) = delete; + +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkDeviceWaitIdle( + VkDevice device); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkDisplayPowerControlEXT_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR VkResult VKAPI_CALL layer_vkDisplayPowerControlEXT( VkDevice device, VkDisplayKHR display, - const VkDisplayPowerInfoEXT* pDisplayPowerInfo); + const VkDisplayPowerInfoEXT* pDisplayPowerInfo) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR VkResult VKAPI_CALL layer_vkDisplayPowerControlEXT( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkDisplayPowerControlEXT( VkDevice device, VkDisplayKHR display, - const VkDisplayPowerInfoEXT* pDisplayPowerInfo -) { - return layer_vkDisplayPowerControlEXT_default(device, display, pDisplayPowerInfo); -} + const VkDisplayPowerInfoEXT* pDisplayPowerInfo); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkEndCommandBuffer_default( - VkCommandBuffer commandBuffer); - -/* Match-all template to use default implementation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkEndCommandBuffer( - VkCommandBuffer commandBuffer -) { - return layer_vkEndCommandBuffer_default(commandBuffer); -} + VkCommandBuffer commandBuffer) = delete; + +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkEndCommandBuffer( + VkCommandBuffer commandBuffer); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkFlushMappedMemoryRanges_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR VkResult VKAPI_CALL layer_vkFlushMappedMemoryRanges( VkDevice device, uint32_t memoryRangeCount, - const VkMappedMemoryRange* pMemoryRanges); + const VkMappedMemoryRange* pMemoryRanges) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR VkResult VKAPI_CALL layer_vkFlushMappedMemoryRanges( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkFlushMappedMemoryRanges( VkDevice device, uint32_t memoryRangeCount, - const VkMappedMemoryRange* pMemoryRanges -) { - return layer_vkFlushMappedMemoryRanges_default(device, memoryRangeCount, pMemoryRanges); -} + const VkMappedMemoryRange* pMemoryRanges); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkFreeCommandBuffers_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR void VKAPI_CALL layer_vkFreeCommandBuffers( VkDevice device, VkCommandPool commandPool, uint32_t commandBufferCount, - const VkCommandBuffer* pCommandBuffers); + const VkCommandBuffer* pCommandBuffers) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR void VKAPI_CALL layer_vkFreeCommandBuffers( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkFreeCommandBuffers( VkDevice device, VkCommandPool commandPool, uint32_t commandBufferCount, - const VkCommandBuffer* pCommandBuffers -) { - layer_vkFreeCommandBuffers_default(device, commandPool, commandBufferCount, pCommandBuffers); -} + const VkCommandBuffer* pCommandBuffers); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkFreeDescriptorSets_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR VkResult VKAPI_CALL layer_vkFreeDescriptorSets( VkDevice device, VkDescriptorPool descriptorPool, uint32_t descriptorSetCount, - const VkDescriptorSet* pDescriptorSets); + const VkDescriptorSet* pDescriptorSets) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR VkResult VKAPI_CALL layer_vkFreeDescriptorSets( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkFreeDescriptorSets( VkDevice device, VkDescriptorPool descriptorPool, uint32_t descriptorSetCount, - const VkDescriptorSet* pDescriptorSets -) { - return layer_vkFreeDescriptorSets_default(device, descriptorPool, descriptorSetCount, pDescriptorSets); -} + const VkDescriptorSet* pDescriptorSets); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkFreeMemory_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR void VKAPI_CALL layer_vkFreeMemory( VkDevice device, VkDeviceMemory memory, - const VkAllocationCallbacks* pAllocator); + const VkAllocationCallbacks* pAllocator) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR void VKAPI_CALL layer_vkFreeMemory( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkFreeMemory( VkDevice device, VkDeviceMemory memory, - const VkAllocationCallbacks* pAllocator -) { - layer_vkFreeMemory_default(device, memory, pAllocator); -} + const VkAllocationCallbacks* pAllocator); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetAccelerationStructureBuildSizesKHR_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR void VKAPI_CALL layer_vkGetAccelerationStructureBuildSizesKHR( VkDevice device, VkAccelerationStructureBuildTypeKHR buildType, const VkAccelerationStructureBuildGeometryInfoKHR* pBuildInfo, const uint32_t* pMaxPrimitiveCounts, - VkAccelerationStructureBuildSizesInfoKHR* pSizeInfo); + VkAccelerationStructureBuildSizesInfoKHR* pSizeInfo) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR void VKAPI_CALL layer_vkGetAccelerationStructureBuildSizesKHR( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetAccelerationStructureBuildSizesKHR( VkDevice device, VkAccelerationStructureBuildTypeKHR buildType, const VkAccelerationStructureBuildGeometryInfoKHR* pBuildInfo, const uint32_t* pMaxPrimitiveCounts, - VkAccelerationStructureBuildSizesInfoKHR* pSizeInfo -) { - layer_vkGetAccelerationStructureBuildSizesKHR_default(device, buildType, pBuildInfo, pMaxPrimitiveCounts, pSizeInfo); -} + VkAccelerationStructureBuildSizesInfoKHR* pSizeInfo); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkDeviceAddress VKAPI_CALL layer_vkGetAccelerationStructureDeviceAddressKHR_default( - VkDevice device, - const VkAccelerationStructureDeviceAddressInfoKHR* pInfo); - -/* Match-all template to use default implementation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkDeviceAddress VKAPI_CALL layer_vkGetAccelerationStructureDeviceAddressKHR( VkDevice device, - const VkAccelerationStructureDeviceAddressInfoKHR* pInfo -) { - return layer_vkGetAccelerationStructureDeviceAddressKHR_default(device, pInfo); -} + const VkAccelerationStructureDeviceAddressInfoKHR* pInfo) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkDeviceAddress VKAPI_CALL layer_vkGetAccelerationStructureDeviceAddressKHR( VkDevice device, - const VkAccelerationStructureCaptureDescriptorDataInfoEXT* pInfo, - void* pData); + const VkAccelerationStructureDeviceAddressInfoKHR* pInfo); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( VkDevice device, const VkAccelerationStructureCaptureDescriptorDataInfoEXT* pInfo, - void* pData -) { - return layer_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT_default(device, pInfo, pData); -} + void* pData) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkDeviceAddress VKAPI_CALL layer_vkGetBufferDeviceAddress_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT( VkDevice device, - const VkBufferDeviceAddressInfo* pInfo); + const VkAccelerationStructureCaptureDescriptorDataInfoEXT* pInfo, + void* pData); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkDeviceAddress VKAPI_CALL layer_vkGetBufferDeviceAddress( VkDevice device, - const VkBufferDeviceAddressInfo* pInfo -) { - return layer_vkGetBufferDeviceAddress_default(device, pInfo); -} + const VkBufferDeviceAddressInfo* pInfo) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkDeviceAddress VKAPI_CALL layer_vkGetBufferDeviceAddressEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkDeviceAddress VKAPI_CALL layer_vkGetBufferDeviceAddress( VkDevice device, const VkBufferDeviceAddressInfo* pInfo); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkDeviceAddress VKAPI_CALL layer_vkGetBufferDeviceAddressEXT( VkDevice device, - const VkBufferDeviceAddressInfo* pInfo -) { - return layer_vkGetBufferDeviceAddressEXT_default(device, pInfo); -} + const VkBufferDeviceAddressInfo* pInfo) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkDeviceAddress VKAPI_CALL layer_vkGetBufferDeviceAddressKHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkDeviceAddress VKAPI_CALL layer_vkGetBufferDeviceAddressEXT( VkDevice device, const VkBufferDeviceAddressInfo* pInfo); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkDeviceAddress VKAPI_CALL layer_vkGetBufferDeviceAddressKHR( VkDevice device, - const VkBufferDeviceAddressInfo* pInfo -) { - return layer_vkGetBufferDeviceAddressKHR_default(device, pInfo); -} + const VkBufferDeviceAddressInfo* pInfo) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetBufferMemoryRequirements_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkDeviceAddress VKAPI_CALL layer_vkGetBufferDeviceAddressKHR( VkDevice device, - VkBuffer buffer, - VkMemoryRequirements* pMemoryRequirements); + const VkBufferDeviceAddressInfo* pInfo); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkGetBufferMemoryRequirements( VkDevice device, VkBuffer buffer, - VkMemoryRequirements* pMemoryRequirements -) { - layer_vkGetBufferMemoryRequirements_default(device, buffer, pMemoryRequirements); -} + VkMemoryRequirements* pMemoryRequirements) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetBufferMemoryRequirements2_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetBufferMemoryRequirements( VkDevice device, - const VkBufferMemoryRequirementsInfo2* pInfo, - VkMemoryRequirements2* pMemoryRequirements); + VkBuffer buffer, + VkMemoryRequirements* pMemoryRequirements); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkGetBufferMemoryRequirements2( VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo, - VkMemoryRequirements2* pMemoryRequirements -) { - layer_vkGetBufferMemoryRequirements2_default(device, pInfo, pMemoryRequirements); -} + VkMemoryRequirements2* pMemoryRequirements) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetBufferMemoryRequirements2KHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetBufferMemoryRequirements2( VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkGetBufferMemoryRequirements2KHR( VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo, - VkMemoryRequirements2* pMemoryRequirements -) { - layer_vkGetBufferMemoryRequirements2KHR_default(device, pInfo, pMemoryRequirements); -} + VkMemoryRequirements2* pMemoryRequirements) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR uint64_t VKAPI_CALL layer_vkGetBufferOpaqueCaptureAddress_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetBufferMemoryRequirements2KHR( VkDevice device, - const VkBufferDeviceAddressInfo* pInfo); + const VkBufferMemoryRequirementsInfo2* pInfo, + VkMemoryRequirements2* pMemoryRequirements); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR uint64_t VKAPI_CALL layer_vkGetBufferOpaqueCaptureAddress( VkDevice device, - const VkBufferDeviceAddressInfo* pInfo -) { - return layer_vkGetBufferOpaqueCaptureAddress_default(device, pInfo); -} + const VkBufferDeviceAddressInfo* pInfo) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR uint64_t VKAPI_CALL layer_vkGetBufferOpaqueCaptureAddressKHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR uint64_t VKAPI_CALL layer_vkGetBufferOpaqueCaptureAddress( VkDevice device, const VkBufferDeviceAddressInfo* pInfo); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR uint64_t VKAPI_CALL layer_vkGetBufferOpaqueCaptureAddressKHR( VkDevice device, - const VkBufferDeviceAddressInfo* pInfo -) { - return layer_vkGetBufferOpaqueCaptureAddressKHR_default(device, pInfo); -} + const VkBufferDeviceAddressInfo* pInfo) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetBufferOpaqueCaptureDescriptorDataEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR uint64_t VKAPI_CALL layer_vkGetBufferOpaqueCaptureAddressKHR( VkDevice device, - const VkBufferCaptureDescriptorDataInfoEXT* pInfo, - void* pData); + const VkBufferDeviceAddressInfo* pInfo); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetBufferOpaqueCaptureDescriptorDataEXT( VkDevice device, const VkBufferCaptureDescriptorDataInfoEXT* pInfo, - void* pData -) { - return layer_vkGetBufferOpaqueCaptureDescriptorDataEXT_default(device, pInfo, pData); -} + void* pData) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetCalibratedTimestampsEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetBufferOpaqueCaptureDescriptorDataEXT( VkDevice device, - uint32_t timestampCount, - const VkCalibratedTimestampInfoKHR* pTimestampInfos, - uint64_t* pTimestamps, - uint64_t* pMaxDeviation); + const VkBufferCaptureDescriptorDataInfoEXT* pInfo, + void* pData); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetCalibratedTimestampsEXT( VkDevice device, uint32_t timestampCount, const VkCalibratedTimestampInfoKHR* pTimestampInfos, uint64_t* pTimestamps, - uint64_t* pMaxDeviation -) { - return layer_vkGetCalibratedTimestampsEXT_default(device, timestampCount, pTimestampInfos, pTimestamps, pMaxDeviation); -} + uint64_t* pMaxDeviation) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetCalibratedTimestampsKHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetCalibratedTimestampsEXT( VkDevice device, uint32_t timestampCount, const VkCalibratedTimestampInfoKHR* pTimestampInfos, uint64_t* pTimestamps, uint64_t* pMaxDeviation); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetCalibratedTimestampsKHR( VkDevice device, uint32_t timestampCount, const VkCalibratedTimestampInfoKHR* pTimestampInfos, uint64_t* pTimestamps, - uint64_t* pMaxDeviation -) { - return layer_vkGetCalibratedTimestampsKHR_default(device, timestampCount, pTimestampInfos, pTimestamps, pMaxDeviation); -} + uint64_t* pMaxDeviation) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR uint32_t VKAPI_CALL layer_vkGetDeferredOperationMaxConcurrencyKHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetCalibratedTimestampsKHR( VkDevice device, - VkDeferredOperationKHR operation); + uint32_t timestampCount, + const VkCalibratedTimestampInfoKHR* pTimestampInfos, + uint64_t* pTimestamps, + uint64_t* pMaxDeviation); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR uint32_t VKAPI_CALL layer_vkGetDeferredOperationMaxConcurrencyKHR( VkDevice device, - VkDeferredOperationKHR operation -) { - return layer_vkGetDeferredOperationMaxConcurrencyKHR_default(device, operation); -} + VkDeferredOperationKHR operation) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetDeferredOperationResultKHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR uint32_t VKAPI_CALL layer_vkGetDeferredOperationMaxConcurrencyKHR( VkDevice device, VkDeferredOperationKHR operation); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetDeferredOperationResultKHR( VkDevice device, - VkDeferredOperationKHR operation -) { - return layer_vkGetDeferredOperationResultKHR_default(device, operation); -} + VkDeferredOperationKHR operation) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetDescriptorEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetDeferredOperationResultKHR( VkDevice device, - const VkDescriptorGetInfoEXT* pDescriptorInfo, - size_t dataSize, - void* pDescriptor); + VkDeferredOperationKHR operation); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkGetDescriptorEXT( VkDevice device, const VkDescriptorGetInfoEXT* pDescriptorInfo, size_t dataSize, - void* pDescriptor -) { - layer_vkGetDescriptorEXT_default(device, pDescriptorInfo, dataSize, pDescriptor); -} + void* pDescriptor) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetDescriptorSetLayoutBindingOffsetEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetDescriptorEXT( VkDevice device, - VkDescriptorSetLayout layout, - uint32_t binding, - VkDeviceSize* pOffset); + const VkDescriptorGetInfoEXT* pDescriptorInfo, + size_t dataSize, + void* pDescriptor); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkGetDescriptorSetLayoutBindingOffsetEXT( VkDevice device, VkDescriptorSetLayout layout, uint32_t binding, - VkDeviceSize* pOffset -) { - layer_vkGetDescriptorSetLayoutBindingOffsetEXT_default(device, layout, binding, pOffset); -} + VkDeviceSize* pOffset) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetDescriptorSetLayoutSizeEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetDescriptorSetLayoutBindingOffsetEXT( VkDevice device, VkDescriptorSetLayout layout, - VkDeviceSize* pLayoutSizeInBytes); + uint32_t binding, + VkDeviceSize* pOffset); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkGetDescriptorSetLayoutSizeEXT( VkDevice device, VkDescriptorSetLayout layout, - VkDeviceSize* pLayoutSizeInBytes -) { - layer_vkGetDescriptorSetLayoutSizeEXT_default(device, layout, pLayoutSizeInBytes); -} + VkDeviceSize* pLayoutSizeInBytes) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetDescriptorSetLayoutSupport_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetDescriptorSetLayoutSizeEXT( VkDevice device, - const VkDescriptorSetLayoutCreateInfo* pCreateInfo, - VkDescriptorSetLayoutSupport* pSupport); + VkDescriptorSetLayout layout, + VkDeviceSize* pLayoutSizeInBytes); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkGetDescriptorSetLayoutSupport( VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, - VkDescriptorSetLayoutSupport* pSupport -) { - layer_vkGetDescriptorSetLayoutSupport_default(device, pCreateInfo, pSupport); -} + VkDescriptorSetLayoutSupport* pSupport) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetDescriptorSetLayoutSupportKHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetDescriptorSetLayoutSupport( VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, VkDescriptorSetLayoutSupport* pSupport); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkGetDescriptorSetLayoutSupportKHR( VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, - VkDescriptorSetLayoutSupport* pSupport -) { - layer_vkGetDescriptorSetLayoutSupportKHR_default(device, pCreateInfo, pSupport); -} + VkDescriptorSetLayoutSupport* pSupport) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceAccelerationStructureCompatibilityKHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetDescriptorSetLayoutSupportKHR( VkDevice device, - const VkAccelerationStructureVersionInfoKHR* pVersionInfo, - VkAccelerationStructureCompatibilityKHR* pCompatibility); + const VkDescriptorSetLayoutCreateInfo* pCreateInfo, + VkDescriptorSetLayoutSupport* pSupport); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceAccelerationStructureCompatibilityKHR( VkDevice device, const VkAccelerationStructureVersionInfoKHR* pVersionInfo, - VkAccelerationStructureCompatibilityKHR* pCompatibility -) { - layer_vkGetDeviceAccelerationStructureCompatibilityKHR_default(device, pVersionInfo, pCompatibility); -} + VkAccelerationStructureCompatibilityKHR* pCompatibility) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceBufferMemoryRequirements_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceAccelerationStructureCompatibilityKHR( VkDevice device, - const VkDeviceBufferMemoryRequirements* pInfo, - VkMemoryRequirements2* pMemoryRequirements); + const VkAccelerationStructureVersionInfoKHR* pVersionInfo, + VkAccelerationStructureCompatibilityKHR* pCompatibility); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceBufferMemoryRequirements( VkDevice device, const VkDeviceBufferMemoryRequirements* pInfo, - VkMemoryRequirements2* pMemoryRequirements -) { - layer_vkGetDeviceBufferMemoryRequirements_default(device, pInfo, pMemoryRequirements); -} + VkMemoryRequirements2* pMemoryRequirements) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceBufferMemoryRequirementsKHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceBufferMemoryRequirements( VkDevice device, const VkDeviceBufferMemoryRequirements* pInfo, VkMemoryRequirements2* pMemoryRequirements); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceBufferMemoryRequirementsKHR( VkDevice device, const VkDeviceBufferMemoryRequirements* pInfo, - VkMemoryRequirements2* pMemoryRequirements -) { - layer_vkGetDeviceBufferMemoryRequirementsKHR_default(device, pInfo, pMemoryRequirements); -} + VkMemoryRequirements2* pMemoryRequirements) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetDeviceFaultInfoEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceBufferMemoryRequirementsKHR( VkDevice device, - VkDeviceFaultCountsEXT* pFaultCounts, - VkDeviceFaultInfoEXT* pFaultInfo); + const VkDeviceBufferMemoryRequirements* pInfo, + VkMemoryRequirements2* pMemoryRequirements); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetDeviceFaultInfoEXT( VkDevice device, VkDeviceFaultCountsEXT* pFaultCounts, - VkDeviceFaultInfoEXT* pFaultInfo -) { - return layer_vkGetDeviceFaultInfoEXT_default(device, pFaultCounts, pFaultInfo); -} + VkDeviceFaultInfoEXT* pFaultInfo) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceGroupPeerMemoryFeatures_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetDeviceFaultInfoEXT( VkDevice device, - uint32_t heapIndex, - uint32_t localDeviceIndex, - uint32_t remoteDeviceIndex, - VkPeerMemoryFeatureFlags* pPeerMemoryFeatures); + VkDeviceFaultCountsEXT* pFaultCounts, + VkDeviceFaultInfoEXT* pFaultInfo); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceGroupPeerMemoryFeatures( VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, - VkPeerMemoryFeatureFlags* pPeerMemoryFeatures -) { - layer_vkGetDeviceGroupPeerMemoryFeatures_default(device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures); -} + VkPeerMemoryFeatureFlags* pPeerMemoryFeatures) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceGroupPeerMemoryFeaturesKHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceGroupPeerMemoryFeatures( VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlags* pPeerMemoryFeatures); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceGroupPeerMemoryFeaturesKHR( VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, - VkPeerMemoryFeatureFlags* pPeerMemoryFeatures -) { - layer_vkGetDeviceGroupPeerMemoryFeaturesKHR_default(device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures); -} + VkPeerMemoryFeatureFlags* pPeerMemoryFeatures) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetDeviceGroupPresentCapabilitiesKHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceGroupPeerMemoryFeaturesKHR( VkDevice device, - VkDeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities); + uint32_t heapIndex, + uint32_t localDeviceIndex, + uint32_t remoteDeviceIndex, + VkPeerMemoryFeatureFlags* pPeerMemoryFeatures); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetDeviceGroupPresentCapabilitiesKHR( VkDevice device, - VkDeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities -) { - return layer_vkGetDeviceGroupPresentCapabilitiesKHR_default(device, pDeviceGroupPresentCapabilities); -} + VkDeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetDeviceGroupSurfacePresentModesKHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetDeviceGroupPresentCapabilitiesKHR( VkDevice device, - VkSurfaceKHR surface, - VkDeviceGroupPresentModeFlagsKHR* pModes); + VkDeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetDeviceGroupSurfacePresentModesKHR( VkDevice device, VkSurfaceKHR surface, - VkDeviceGroupPresentModeFlagsKHR* pModes -) { - return layer_vkGetDeviceGroupSurfacePresentModesKHR_default(device, surface, pModes); -} + VkDeviceGroupPresentModeFlagsKHR* pModes) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceImageMemoryRequirements_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetDeviceGroupSurfacePresentModesKHR( VkDevice device, - const VkDeviceImageMemoryRequirements* pInfo, - VkMemoryRequirements2* pMemoryRequirements); + VkSurfaceKHR surface, + VkDeviceGroupPresentModeFlagsKHR* pModes); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceImageMemoryRequirements( VkDevice device, const VkDeviceImageMemoryRequirements* pInfo, - VkMemoryRequirements2* pMemoryRequirements -) { - layer_vkGetDeviceImageMemoryRequirements_default(device, pInfo, pMemoryRequirements); -} + VkMemoryRequirements2* pMemoryRequirements) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceImageMemoryRequirementsKHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceImageMemoryRequirements( VkDevice device, const VkDeviceImageMemoryRequirements* pInfo, VkMemoryRequirements2* pMemoryRequirements); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceImageMemoryRequirementsKHR( VkDevice device, const VkDeviceImageMemoryRequirements* pInfo, - VkMemoryRequirements2* pMemoryRequirements -) { - layer_vkGetDeviceImageMemoryRequirementsKHR_default(device, pInfo, pMemoryRequirements); -} + VkMemoryRequirements2* pMemoryRequirements) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceImageSparseMemoryRequirements_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceImageMemoryRequirementsKHR( VkDevice device, const VkDeviceImageMemoryRequirements* pInfo, - uint32_t* pSparseMemoryRequirementCount, - VkSparseImageMemoryRequirements2* pSparseMemoryRequirements); + VkMemoryRequirements2* pMemoryRequirements); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceImageSparseMemoryRequirements( VkDevice device, const VkDeviceImageMemoryRequirements* pInfo, uint32_t* pSparseMemoryRequirementCount, - VkSparseImageMemoryRequirements2* pSparseMemoryRequirements -) { - layer_vkGetDeviceImageSparseMemoryRequirements_default(device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements); -} + VkSparseImageMemoryRequirements2* pSparseMemoryRequirements) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceImageSparseMemoryRequirementsKHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceImageSparseMemoryRequirements( VkDevice device, const VkDeviceImageMemoryRequirements* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceImageSparseMemoryRequirementsKHR( VkDevice device, const VkDeviceImageMemoryRequirements* pInfo, uint32_t* pSparseMemoryRequirementCount, - VkSparseImageMemoryRequirements2* pSparseMemoryRequirements -) { - layer_vkGetDeviceImageSparseMemoryRequirementsKHR_default(device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements); -} + VkSparseImageMemoryRequirements2* pSparseMemoryRequirements) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceImageSubresourceLayout_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceImageSparseMemoryRequirementsKHR( VkDevice device, - const VkDeviceImageSubresourceInfo* pInfo, - VkSubresourceLayout2* pLayout); + const VkDeviceImageMemoryRequirements* pInfo, + uint32_t* pSparseMemoryRequirementCount, + VkSparseImageMemoryRequirements2* pSparseMemoryRequirements); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceImageSubresourceLayout( VkDevice device, const VkDeviceImageSubresourceInfo* pInfo, - VkSubresourceLayout2* pLayout -) { - layer_vkGetDeviceImageSubresourceLayout_default(device, pInfo, pLayout); -} + VkSubresourceLayout2* pLayout) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceImageSubresourceLayoutKHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceImageSubresourceLayout( VkDevice device, const VkDeviceImageSubresourceInfo* pInfo, VkSubresourceLayout2* pLayout); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceImageSubresourceLayoutKHR( VkDevice device, const VkDeviceImageSubresourceInfo* pInfo, - VkSubresourceLayout2* pLayout -) { - layer_vkGetDeviceImageSubresourceLayoutKHR_default(device, pInfo, pLayout); -} + VkSubresourceLayout2* pLayout) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceMemoryCommitment_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceImageSubresourceLayoutKHR( VkDevice device, - VkDeviceMemory memory, - VkDeviceSize* pCommittedMemoryInBytes); + const VkDeviceImageSubresourceInfo* pInfo, + VkSubresourceLayout2* pLayout); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceMemoryCommitment( VkDevice device, VkDeviceMemory memory, - VkDeviceSize* pCommittedMemoryInBytes -) { - layer_vkGetDeviceMemoryCommitment_default(device, memory, pCommittedMemoryInBytes); -} + VkDeviceSize* pCommittedMemoryInBytes) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR uint64_t VKAPI_CALL layer_vkGetDeviceMemoryOpaqueCaptureAddress_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceMemoryCommitment( VkDevice device, - const VkDeviceMemoryOpaqueCaptureAddressInfo* pInfo); - -/* Match-all template to use default implementation. */ + VkDeviceMemory memory, + VkDeviceSize* pCommittedMemoryInBytes); + +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR uint64_t VKAPI_CALL layer_vkGetDeviceMemoryOpaqueCaptureAddress( VkDevice device, - const VkDeviceMemoryOpaqueCaptureAddressInfo* pInfo -) { - return layer_vkGetDeviceMemoryOpaqueCaptureAddress_default(device, pInfo); -} + const VkDeviceMemoryOpaqueCaptureAddressInfo* pInfo) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR uint64_t VKAPI_CALL layer_vkGetDeviceMemoryOpaqueCaptureAddressKHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR uint64_t VKAPI_CALL layer_vkGetDeviceMemoryOpaqueCaptureAddress( VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfo* pInfo); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR uint64_t VKAPI_CALL layer_vkGetDeviceMemoryOpaqueCaptureAddressKHR( VkDevice device, - const VkDeviceMemoryOpaqueCaptureAddressInfo* pInfo -) { - return layer_vkGetDeviceMemoryOpaqueCaptureAddressKHR_default(device, pInfo); -} + const VkDeviceMemoryOpaqueCaptureAddressInfo* pInfo) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceMicromapCompatibilityEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR uint64_t VKAPI_CALL layer_vkGetDeviceMemoryOpaqueCaptureAddressKHR( VkDevice device, - const VkMicromapVersionInfoEXT* pVersionInfo, - VkAccelerationStructureCompatibilityKHR* pCompatibility); + const VkDeviceMemoryOpaqueCaptureAddressInfo* pInfo); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceMicromapCompatibilityEXT( VkDevice device, const VkMicromapVersionInfoEXT* pVersionInfo, - VkAccelerationStructureCompatibilityKHR* pCompatibility -) { - layer_vkGetDeviceMicromapCompatibilityEXT_default(device, pVersionInfo, pCompatibility); -} + VkAccelerationStructureCompatibilityKHR* pCompatibility) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL layer_vkGetDeviceProcAddr_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceMicromapCompatibilityEXT( VkDevice device, - const char* pName); + const VkMicromapVersionInfoEXT* pVersionInfo, + VkAccelerationStructureCompatibilityKHR* pCompatibility); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL layer_vkGetDeviceProcAddr( VkDevice device, - const char* pName -) { - return layer_vkGetDeviceProcAddr_default(device, pName); -} + const char* pName) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceQueue_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL layer_vkGetDeviceProcAddr( VkDevice device, - uint32_t queueFamilyIndex, - uint32_t queueIndex, - VkQueue* pQueue); + const char* pName); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceQueue( VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex, - VkQueue* pQueue -) { - layer_vkGetDeviceQueue_default(device, queueFamilyIndex, queueIndex, pQueue); -} + VkQueue* pQueue) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceQueue2_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceQueue( VkDevice device, - const VkDeviceQueueInfo2* pQueueInfo, + uint32_t queueFamilyIndex, + uint32_t queueIndex, VkQueue* pQueue); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceQueue2( VkDevice device, const VkDeviceQueueInfo2* pQueueInfo, - VkQueue* pQueue -) { - layer_vkGetDeviceQueue2_default(device, pQueueInfo, pQueue); -} + VkQueue* pQueue) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceTensorMemoryRequirementsARM_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceQueue2( VkDevice device, - const VkDeviceTensorMemoryRequirementsARM* pInfo, - VkMemoryRequirements2* pMemoryRequirements); + const VkDeviceQueueInfo2* pQueueInfo, + VkQueue* pQueue); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceTensorMemoryRequirementsARM( VkDevice device, const VkDeviceTensorMemoryRequirementsARM* pInfo, - VkMemoryRequirements2* pMemoryRequirements -) { - layer_vkGetDeviceTensorMemoryRequirementsARM_default(device, pInfo, pMemoryRequirements); -} + VkMemoryRequirements2* pMemoryRequirements) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetEventStatus_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceTensorMemoryRequirementsARM( VkDevice device, - VkEvent event); + const VkDeviceTensorMemoryRequirementsARM* pInfo, + VkMemoryRequirements2* pMemoryRequirements); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetEventStatus( VkDevice device, - VkEvent event -) { - return layer_vkGetEventStatus_default(device, event); -} + VkEvent event) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetFenceFdKHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetEventStatus( VkDevice device, - const VkFenceGetFdInfoKHR* pGetFdInfo, - int* pFd); + VkEvent event); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetFenceFdKHR( VkDevice device, const VkFenceGetFdInfoKHR* pGetFdInfo, - int* pFd -) { - return layer_vkGetFenceFdKHR_default(device, pGetFdInfo, pFd); -} + int* pFd) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetFenceStatus_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetFenceFdKHR( VkDevice device, - VkFence fence); + const VkFenceGetFdInfoKHR* pGetFdInfo, + int* pFd); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetFenceStatus( VkDevice device, - VkFence fence -) { - return layer_vkGetFenceStatus_default(device, fence); -} + VkFence fence) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetGeneratedCommandsMemoryRequirementsEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetFenceStatus( VkDevice device, - const VkGeneratedCommandsMemoryRequirementsInfoEXT* pInfo, - VkMemoryRequirements2* pMemoryRequirements); + VkFence fence); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkGetGeneratedCommandsMemoryRequirementsEXT( VkDevice device, const VkGeneratedCommandsMemoryRequirementsInfoEXT* pInfo, - VkMemoryRequirements2* pMemoryRequirements -) { - layer_vkGetGeneratedCommandsMemoryRequirementsEXT_default(device, pInfo, pMemoryRequirements); -} + VkMemoryRequirements2* pMemoryRequirements) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetImageDrmFormatModifierPropertiesEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetGeneratedCommandsMemoryRequirementsEXT( VkDevice device, - VkImage image, - VkImageDrmFormatModifierPropertiesEXT* pProperties); + const VkGeneratedCommandsMemoryRequirementsInfoEXT* pInfo, + VkMemoryRequirements2* pMemoryRequirements); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetImageDrmFormatModifierPropertiesEXT( VkDevice device, VkImage image, - VkImageDrmFormatModifierPropertiesEXT* pProperties -) { - return layer_vkGetImageDrmFormatModifierPropertiesEXT_default(device, image, pProperties); -} + VkImageDrmFormatModifierPropertiesEXT* pProperties) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetImageMemoryRequirements_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetImageDrmFormatModifierPropertiesEXT( VkDevice device, VkImage image, - VkMemoryRequirements* pMemoryRequirements); + VkImageDrmFormatModifierPropertiesEXT* pProperties); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkGetImageMemoryRequirements( VkDevice device, VkImage image, - VkMemoryRequirements* pMemoryRequirements -) { - layer_vkGetImageMemoryRequirements_default(device, image, pMemoryRequirements); -} + VkMemoryRequirements* pMemoryRequirements) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetImageMemoryRequirements2_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetImageMemoryRequirements( VkDevice device, - const VkImageMemoryRequirementsInfo2* pInfo, - VkMemoryRequirements2* pMemoryRequirements); + VkImage image, + VkMemoryRequirements* pMemoryRequirements); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkGetImageMemoryRequirements2( VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo, - VkMemoryRequirements2* pMemoryRequirements -) { - layer_vkGetImageMemoryRequirements2_default(device, pInfo, pMemoryRequirements); -} + VkMemoryRequirements2* pMemoryRequirements) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetImageMemoryRequirements2KHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetImageMemoryRequirements2( VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkGetImageMemoryRequirements2KHR( VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo, - VkMemoryRequirements2* pMemoryRequirements -) { - layer_vkGetImageMemoryRequirements2KHR_default(device, pInfo, pMemoryRequirements); -} + VkMemoryRequirements2* pMemoryRequirements) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetImageOpaqueCaptureDescriptorDataEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetImageMemoryRequirements2KHR( VkDevice device, - const VkImageCaptureDescriptorDataInfoEXT* pInfo, - void* pData); + const VkImageMemoryRequirementsInfo2* pInfo, + VkMemoryRequirements2* pMemoryRequirements); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetImageOpaqueCaptureDescriptorDataEXT( VkDevice device, const VkImageCaptureDescriptorDataInfoEXT* pInfo, - void* pData -) { - return layer_vkGetImageOpaqueCaptureDescriptorDataEXT_default(device, pInfo, pData); -} + void* pData) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetImageSparseMemoryRequirements_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetImageOpaqueCaptureDescriptorDataEXT( VkDevice device, - VkImage image, - uint32_t* pSparseMemoryRequirementCount, - VkSparseImageMemoryRequirements* pSparseMemoryRequirements); + const VkImageCaptureDescriptorDataInfoEXT* pInfo, + void* pData); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkGetImageSparseMemoryRequirements( VkDevice device, VkImage image, uint32_t* pSparseMemoryRequirementCount, - VkSparseImageMemoryRequirements* pSparseMemoryRequirements -) { - layer_vkGetImageSparseMemoryRequirements_default(device, image, pSparseMemoryRequirementCount, pSparseMemoryRequirements); -} + VkSparseImageMemoryRequirements* pSparseMemoryRequirements) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetImageSparseMemoryRequirements2_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetImageSparseMemoryRequirements( VkDevice device, - const VkImageSparseMemoryRequirementsInfo2* pInfo, + VkImage image, uint32_t* pSparseMemoryRequirementCount, - VkSparseImageMemoryRequirements2* pSparseMemoryRequirements); + VkSparseImageMemoryRequirements* pSparseMemoryRequirements); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkGetImageSparseMemoryRequirements2( VkDevice device, const VkImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, - VkSparseImageMemoryRequirements2* pSparseMemoryRequirements -) { - layer_vkGetImageSparseMemoryRequirements2_default(device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements); -} + VkSparseImageMemoryRequirements2* pSparseMemoryRequirements) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetImageSparseMemoryRequirements2KHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetImageSparseMemoryRequirements2( VkDevice device, const VkImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkGetImageSparseMemoryRequirements2KHR( VkDevice device, const VkImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, - VkSparseImageMemoryRequirements2* pSparseMemoryRequirements -) { - layer_vkGetImageSparseMemoryRequirements2KHR_default(device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements); -} + VkSparseImageMemoryRequirements2* pSparseMemoryRequirements) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetImageSubresourceLayout_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetImageSparseMemoryRequirements2KHR( VkDevice device, - VkImage image, - const VkImageSubresource* pSubresource, - VkSubresourceLayout* pLayout); + const VkImageSparseMemoryRequirementsInfo2* pInfo, + uint32_t* pSparseMemoryRequirementCount, + VkSparseImageMemoryRequirements2* pSparseMemoryRequirements); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkGetImageSubresourceLayout( VkDevice device, VkImage image, const VkImageSubresource* pSubresource, - VkSubresourceLayout* pLayout -) { - layer_vkGetImageSubresourceLayout_default(device, image, pSubresource, pLayout); -} + VkSubresourceLayout* pLayout) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetImageSubresourceLayout2_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetImageSubresourceLayout( VkDevice device, VkImage image, - const VkImageSubresource2* pSubresource, - VkSubresourceLayout2* pLayout); + const VkImageSubresource* pSubresource, + VkSubresourceLayout* pLayout); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkGetImageSubresourceLayout2( VkDevice device, VkImage image, const VkImageSubresource2* pSubresource, - VkSubresourceLayout2* pLayout -) { - layer_vkGetImageSubresourceLayout2_default(device, image, pSubresource, pLayout); -} + VkSubresourceLayout2* pLayout) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetImageSubresourceLayout2EXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetImageSubresourceLayout2( VkDevice device, VkImage image, const VkImageSubresource2* pSubresource, VkSubresourceLayout2* pLayout); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkGetImageSubresourceLayout2EXT( VkDevice device, VkImage image, const VkImageSubresource2* pSubresource, - VkSubresourceLayout2* pLayout -) { - layer_vkGetImageSubresourceLayout2EXT_default(device, image, pSubresource, pLayout); -} + VkSubresourceLayout2* pLayout) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetImageSubresourceLayout2KHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetImageSubresourceLayout2EXT( VkDevice device, VkImage image, const VkImageSubresource2* pSubresource, VkSubresourceLayout2* pLayout); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkGetImageSubresourceLayout2KHR( VkDevice device, VkImage image, const VkImageSubresource2* pSubresource, - VkSubresourceLayout2* pLayout -) { - layer_vkGetImageSubresourceLayout2KHR_default(device, image, pSubresource, pLayout); -} + VkSubresourceLayout2* pLayout) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetImageViewOpaqueCaptureDescriptorDataEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetImageSubresourceLayout2KHR( VkDevice device, - const VkImageViewCaptureDescriptorDataInfoEXT* pInfo, - void* pData); + VkImage image, + const VkImageSubresource2* pSubresource, + VkSubresourceLayout2* pLayout); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetImageViewOpaqueCaptureDescriptorDataEXT( VkDevice device, const VkImageViewCaptureDescriptorDataInfoEXT* pInfo, - void* pData -) { - return layer_vkGetImageViewOpaqueCaptureDescriptorDataEXT_default(device, pInfo, pData); -} + void* pData) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetMemoryFdKHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetImageViewOpaqueCaptureDescriptorDataEXT( VkDevice device, - const VkMemoryGetFdInfoKHR* pGetFdInfo, - int* pFd); + const VkImageViewCaptureDescriptorDataInfoEXT* pInfo, + void* pData); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetMemoryFdKHR( VkDevice device, const VkMemoryGetFdInfoKHR* pGetFdInfo, - int* pFd -) { - return layer_vkGetMemoryFdKHR_default(device, pGetFdInfo, pFd); -} + int* pFd) = delete; + +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetMemoryFdKHR( + VkDevice device, + const VkMemoryGetFdInfoKHR* pGetFdInfo, + int* pFd); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetMemoryFdPropertiesKHR_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetMemoryFdPropertiesKHR( VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, int fd, - VkMemoryFdPropertiesKHR* pMemoryFdProperties); + VkMemoryFdPropertiesKHR* pMemoryFdProperties) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetMemoryFdPropertiesKHR( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetMemoryFdPropertiesKHR( VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, int fd, - VkMemoryFdPropertiesKHR* pMemoryFdProperties -) { - return layer_vkGetMemoryFdPropertiesKHR_default(device, handleType, fd, pMemoryFdProperties); -} + VkMemoryFdPropertiesKHR* pMemoryFdProperties); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetMemoryHostPointerPropertiesEXT_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetMemoryHostPointerPropertiesEXT( VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, - VkMemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties); + VkMemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetMemoryHostPointerPropertiesEXT( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetMemoryHostPointerPropertiesEXT( VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, - VkMemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties -) { - return layer_vkGetMemoryHostPointerPropertiesEXT_default(device, handleType, pHostPointer, pMemoryHostPointerProperties); -} + VkMemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetMicromapBuildSizesEXT_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR void VKAPI_CALL layer_vkGetMicromapBuildSizesEXT( VkDevice device, VkAccelerationStructureBuildTypeKHR buildType, const VkMicromapBuildInfoEXT* pBuildInfo, - VkMicromapBuildSizesInfoEXT* pSizeInfo); + VkMicromapBuildSizesInfoEXT* pSizeInfo) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR void VKAPI_CALL layer_vkGetMicromapBuildSizesEXT( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetMicromapBuildSizesEXT( VkDevice device, VkAccelerationStructureBuildTypeKHR buildType, const VkMicromapBuildInfoEXT* pBuildInfo, - VkMicromapBuildSizesInfoEXT* pSizeInfo -) { - layer_vkGetMicromapBuildSizesEXT_default(device, buildType, pBuildInfo, pSizeInfo); -} + VkMicromapBuildSizesInfoEXT* pSizeInfo); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPipelineBinaryDataKHR_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPipelineBinaryDataKHR( VkDevice device, const VkPipelineBinaryDataInfoKHR* pInfo, VkPipelineBinaryKeyKHR* pPipelineBinaryKey, size_t* pPipelineBinaryDataSize, - void* pPipelineBinaryData); + void* pPipelineBinaryData) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPipelineBinaryDataKHR( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPipelineBinaryDataKHR( VkDevice device, const VkPipelineBinaryDataInfoKHR* pInfo, VkPipelineBinaryKeyKHR* pPipelineBinaryKey, size_t* pPipelineBinaryDataSize, - void* pPipelineBinaryData -) { - return layer_vkGetPipelineBinaryDataKHR_default(device, pInfo, pPipelineBinaryKey, pPipelineBinaryDataSize, pPipelineBinaryData); -} + void* pPipelineBinaryData); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPipelineCacheData_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPipelineCacheData( VkDevice device, VkPipelineCache pipelineCache, size_t* pDataSize, - void* pData); + void* pData) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPipelineCacheData( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPipelineCacheData( VkDevice device, VkPipelineCache pipelineCache, size_t* pDataSize, - void* pData -) { - return layer_vkGetPipelineCacheData_default(device, pipelineCache, pDataSize, pData); -} + void* pData); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPipelineExecutableInternalRepresentationsKHR_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPipelineExecutableInternalRepresentationsKHR( VkDevice device, const VkPipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pInternalRepresentationCount, - VkPipelineExecutableInternalRepresentationKHR* pInternalRepresentations); + VkPipelineExecutableInternalRepresentationKHR* pInternalRepresentations) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPipelineExecutableInternalRepresentationsKHR( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPipelineExecutableInternalRepresentationsKHR( VkDevice device, const VkPipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pInternalRepresentationCount, - VkPipelineExecutableInternalRepresentationKHR* pInternalRepresentations -) { - return layer_vkGetPipelineExecutableInternalRepresentationsKHR_default(device, pExecutableInfo, pInternalRepresentationCount, pInternalRepresentations); -} + VkPipelineExecutableInternalRepresentationKHR* pInternalRepresentations); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPipelineExecutablePropertiesKHR_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPipelineExecutablePropertiesKHR( VkDevice device, const VkPipelineInfoKHR* pPipelineInfo, uint32_t* pExecutableCount, - VkPipelineExecutablePropertiesKHR* pProperties); + VkPipelineExecutablePropertiesKHR* pProperties) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPipelineExecutablePropertiesKHR( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPipelineExecutablePropertiesKHR( VkDevice device, const VkPipelineInfoKHR* pPipelineInfo, uint32_t* pExecutableCount, - VkPipelineExecutablePropertiesKHR* pProperties -) { - return layer_vkGetPipelineExecutablePropertiesKHR_default(device, pPipelineInfo, pExecutableCount, pProperties); -} + VkPipelineExecutablePropertiesKHR* pProperties); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPipelineExecutableStatisticsKHR_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPipelineExecutableStatisticsKHR( VkDevice device, const VkPipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pStatisticCount, - VkPipelineExecutableStatisticKHR* pStatistics); + VkPipelineExecutableStatisticKHR* pStatistics) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPipelineExecutableStatisticsKHR( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPipelineExecutableStatisticsKHR( VkDevice device, const VkPipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pStatisticCount, - VkPipelineExecutableStatisticKHR* pStatistics -) { - return layer_vkGetPipelineExecutableStatisticsKHR_default(device, pExecutableInfo, pStatisticCount, pStatistics); -} + VkPipelineExecutableStatisticKHR* pStatistics); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPipelineKeyKHR_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPipelineKeyKHR( VkDevice device, const VkPipelineCreateInfoKHR* pPipelineCreateInfo, - VkPipelineBinaryKeyKHR* pPipelineKey); + VkPipelineBinaryKeyKHR* pPipelineKey) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPipelineKeyKHR( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPipelineKeyKHR( VkDevice device, const VkPipelineCreateInfoKHR* pPipelineCreateInfo, - VkPipelineBinaryKeyKHR* pPipelineKey -) { - return layer_vkGetPipelineKeyKHR_default(device, pPipelineCreateInfo, pPipelineKey); -} + VkPipelineBinaryKeyKHR* pPipelineKey); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPipelinePropertiesEXT_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPipelinePropertiesEXT( VkDevice device, const VkPipelineInfoEXT* pPipelineInfo, - VkBaseOutStructure* pPipelineProperties); + VkBaseOutStructure* pPipelineProperties) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPipelinePropertiesEXT( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPipelinePropertiesEXT( VkDevice device, const VkPipelineInfoEXT* pPipelineInfo, - VkBaseOutStructure* pPipelineProperties -) { - return layer_vkGetPipelinePropertiesEXT_default(device, pPipelineInfo, pPipelineProperties); -} + VkBaseOutStructure* pPipelineProperties); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetPrivateData_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR void VKAPI_CALL layer_vkGetPrivateData( VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, - uint64_t* pData); + uint64_t* pData) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR void VKAPI_CALL layer_vkGetPrivateData( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetPrivateData( VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, - uint64_t* pData -) { - layer_vkGetPrivateData_default(device, objectType, objectHandle, privateDataSlot, pData); -} + uint64_t* pData); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetPrivateDataEXT_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR void VKAPI_CALL layer_vkGetPrivateDataEXT( VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, - uint64_t* pData); + uint64_t* pData) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR void VKAPI_CALL layer_vkGetPrivateDataEXT( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetPrivateDataEXT( VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, - uint64_t* pData -) { - layer_vkGetPrivateDataEXT_default(device, objectType, objectHandle, privateDataSlot, pData); -} + uint64_t* pData); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetQueryPoolResults_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetQueryPoolResults( VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, @@ -7278,11 +6463,11 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetQueryPoolResults_default( size_t dataSize, void* pData, VkDeviceSize stride, - VkQueryResultFlags flags); + VkQueryResultFlags flags) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetQueryPoolResults( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetQueryPoolResults( VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, @@ -7290,1366 +6475,1209 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetQueryPoolResults( size_t dataSize, void* pData, VkDeviceSize stride, - VkQueryResultFlags flags -) { - return layer_vkGetQueryPoolResults_default(device, queryPool, firstQuery, queryCount, dataSize, pData, stride, flags); -} + VkQueryResultFlags flags); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, - void* pData); + void* pData) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR( VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, - void* pData -) { - return layer_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR_default(device, pipeline, firstGroup, groupCount, dataSize, pData); -} + void* pData); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetRayTracingShaderGroupHandlesKHR_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetRayTracingShaderGroupHandlesKHR( VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, - void* pData); + void* pData) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetRayTracingShaderGroupHandlesKHR( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetRayTracingShaderGroupHandlesKHR( VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, - void* pData -) { - return layer_vkGetRayTracingShaderGroupHandlesKHR_default(device, pipeline, firstGroup, groupCount, dataSize, pData); -} + void* pData); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkDeviceSize VKAPI_CALL layer_vkGetRayTracingShaderGroupStackSizeKHR_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR VkDeviceSize VKAPI_CALL layer_vkGetRayTracingShaderGroupStackSizeKHR( VkDevice device, VkPipeline pipeline, uint32_t group, - VkShaderGroupShaderKHR groupShader); + VkShaderGroupShaderKHR groupShader) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR VkDeviceSize VKAPI_CALL layer_vkGetRayTracingShaderGroupStackSizeKHR( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkDeviceSize VKAPI_CALL layer_vkGetRayTracingShaderGroupStackSizeKHR( VkDevice device, VkPipeline pipeline, uint32_t group, - VkShaderGroupShaderKHR groupShader -) { - return layer_vkGetRayTracingShaderGroupStackSizeKHR_default(device, pipeline, group, groupShader); -} + VkShaderGroupShaderKHR groupShader); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetRenderAreaGranularity_default( - VkDevice device, - VkRenderPass renderPass, - VkExtent2D* pGranularity); - -/* Match-all template to use default implementation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkGetRenderAreaGranularity( VkDevice device, VkRenderPass renderPass, - VkExtent2D* pGranularity -) { - layer_vkGetRenderAreaGranularity_default(device, renderPass, pGranularity); -} + VkExtent2D* pGranularity) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetRenderingAreaGranularity_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetRenderAreaGranularity( VkDevice device, - const VkRenderingAreaInfo* pRenderingAreaInfo, + VkRenderPass renderPass, VkExtent2D* pGranularity); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkGetRenderingAreaGranularity( VkDevice device, const VkRenderingAreaInfo* pRenderingAreaInfo, - VkExtent2D* pGranularity -) { - layer_vkGetRenderingAreaGranularity_default(device, pRenderingAreaInfo, pGranularity); -} + VkExtent2D* pGranularity) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetRenderingAreaGranularityKHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetRenderingAreaGranularity( VkDevice device, const VkRenderingAreaInfo* pRenderingAreaInfo, VkExtent2D* pGranularity); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkGetRenderingAreaGranularityKHR( VkDevice device, const VkRenderingAreaInfo* pRenderingAreaInfo, - VkExtent2D* pGranularity -) { - layer_vkGetRenderingAreaGranularityKHR_default(device, pRenderingAreaInfo, pGranularity); -} + VkExtent2D* pGranularity) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetSamplerOpaqueCaptureDescriptorDataEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetRenderingAreaGranularityKHR( VkDevice device, - const VkSamplerCaptureDescriptorDataInfoEXT* pInfo, - void* pData); + const VkRenderingAreaInfo* pRenderingAreaInfo, + VkExtent2D* pGranularity); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetSamplerOpaqueCaptureDescriptorDataEXT( VkDevice device, const VkSamplerCaptureDescriptorDataInfoEXT* pInfo, - void* pData -) { - return layer_vkGetSamplerOpaqueCaptureDescriptorDataEXT_default(device, pInfo, pData); -} + void* pData) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetSemaphoreCounterValue_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetSamplerOpaqueCaptureDescriptorDataEXT( VkDevice device, - VkSemaphore semaphore, - uint64_t* pValue); + const VkSamplerCaptureDescriptorDataInfoEXT* pInfo, + void* pData); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetSemaphoreCounterValue( VkDevice device, VkSemaphore semaphore, - uint64_t* pValue -) { - return layer_vkGetSemaphoreCounterValue_default(device, semaphore, pValue); -} + uint64_t* pValue) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetSemaphoreCounterValueKHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetSemaphoreCounterValue( VkDevice device, VkSemaphore semaphore, uint64_t* pValue); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetSemaphoreCounterValueKHR( VkDevice device, VkSemaphore semaphore, - uint64_t* pValue -) { - return layer_vkGetSemaphoreCounterValueKHR_default(device, semaphore, pValue); -} + uint64_t* pValue) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetSemaphoreFdKHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetSemaphoreCounterValueKHR( VkDevice device, - const VkSemaphoreGetFdInfoKHR* pGetFdInfo, - int* pFd); + VkSemaphore semaphore, + uint64_t* pValue); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetSemaphoreFdKHR( VkDevice device, const VkSemaphoreGetFdInfoKHR* pGetFdInfo, - int* pFd -) { - return layer_vkGetSemaphoreFdKHR_default(device, pGetFdInfo, pFd); -} + int* pFd) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetShaderBinaryDataEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetSemaphoreFdKHR( VkDevice device, - VkShaderEXT shader, - size_t* pDataSize, - void* pData); + const VkSemaphoreGetFdInfoKHR* pGetFdInfo, + int* pFd); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetShaderBinaryDataEXT( VkDevice device, VkShaderEXT shader, size_t* pDataSize, - void* pData -) { - return layer_vkGetShaderBinaryDataEXT_default(device, shader, pDataSize, pData); -} + void* pData) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetShaderModuleCreateInfoIdentifierEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetShaderBinaryDataEXT( VkDevice device, - const VkShaderModuleCreateInfo* pCreateInfo, - VkShaderModuleIdentifierEXT* pIdentifier); + VkShaderEXT shader, + size_t* pDataSize, + void* pData); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkGetShaderModuleCreateInfoIdentifierEXT( VkDevice device, const VkShaderModuleCreateInfo* pCreateInfo, - VkShaderModuleIdentifierEXT* pIdentifier -) { - layer_vkGetShaderModuleCreateInfoIdentifierEXT_default(device, pCreateInfo, pIdentifier); -} + VkShaderModuleIdentifierEXT* pIdentifier) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetShaderModuleIdentifierEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetShaderModuleCreateInfoIdentifierEXT( VkDevice device, - VkShaderModule shaderModule, + const VkShaderModuleCreateInfo* pCreateInfo, VkShaderModuleIdentifierEXT* pIdentifier); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkGetShaderModuleIdentifierEXT( VkDevice device, VkShaderModule shaderModule, - VkShaderModuleIdentifierEXT* pIdentifier -) { - layer_vkGetShaderModuleIdentifierEXT_default(device, shaderModule, pIdentifier); -} + VkShaderModuleIdentifierEXT* pIdentifier) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetSwapchainCounterEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetShaderModuleIdentifierEXT( VkDevice device, - VkSwapchainKHR swapchain, - VkSurfaceCounterFlagBitsEXT counter, - uint64_t* pCounterValue); + VkShaderModule shaderModule, + VkShaderModuleIdentifierEXT* pIdentifier); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetSwapchainCounterEXT( VkDevice device, VkSwapchainKHR swapchain, VkSurfaceCounterFlagBitsEXT counter, - uint64_t* pCounterValue -) { - return layer_vkGetSwapchainCounterEXT_default(device, swapchain, counter, pCounterValue); -} + uint64_t* pCounterValue) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetSwapchainImagesKHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetSwapchainCounterEXT( VkDevice device, VkSwapchainKHR swapchain, - uint32_t* pSwapchainImageCount, - VkImage* pSwapchainImages); + VkSurfaceCounterFlagBitsEXT counter, + uint64_t* pCounterValue); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetSwapchainImagesKHR( VkDevice device, VkSwapchainKHR swapchain, uint32_t* pSwapchainImageCount, - VkImage* pSwapchainImages -) { - return layer_vkGetSwapchainImagesKHR_default(device, swapchain, pSwapchainImageCount, pSwapchainImages); -} + VkImage* pSwapchainImages) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetSwapchainStatusKHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetSwapchainImagesKHR( VkDevice device, - VkSwapchainKHR swapchain); + VkSwapchainKHR swapchain, + uint32_t* pSwapchainImageCount, + VkImage* pSwapchainImages); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetSwapchainStatusKHR( VkDevice device, - VkSwapchainKHR swapchain -) { - return layer_vkGetSwapchainStatusKHR_default(device, swapchain); -} + VkSwapchainKHR swapchain) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetTensorMemoryRequirementsARM_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetSwapchainStatusKHR( VkDevice device, - const VkTensorMemoryRequirementsInfoARM* pInfo, - VkMemoryRequirements2* pMemoryRequirements); + VkSwapchainKHR swapchain); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkGetTensorMemoryRequirementsARM( VkDevice device, const VkTensorMemoryRequirementsInfoARM* pInfo, - VkMemoryRequirements2* pMemoryRequirements -) { - layer_vkGetTensorMemoryRequirementsARM_default(device, pInfo, pMemoryRequirements); -} + VkMemoryRequirements2* pMemoryRequirements) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetTensorOpaqueCaptureDescriptorDataARM_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetTensorMemoryRequirementsARM( VkDevice device, - const VkTensorCaptureDescriptorDataInfoARM* pInfo, - void* pData); + const VkTensorMemoryRequirementsInfoARM* pInfo, + VkMemoryRequirements2* pMemoryRequirements); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetTensorOpaqueCaptureDescriptorDataARM( VkDevice device, const VkTensorCaptureDescriptorDataInfoARM* pInfo, - void* pData -) { - return layer_vkGetTensorOpaqueCaptureDescriptorDataARM_default(device, pInfo, pData); -} + void* pData) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetTensorViewOpaqueCaptureDescriptorDataARM_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetTensorOpaqueCaptureDescriptorDataARM( VkDevice device, - const VkTensorViewCaptureDescriptorDataInfoARM* pInfo, + const VkTensorCaptureDescriptorDataInfoARM* pInfo, void* pData); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetTensorViewOpaqueCaptureDescriptorDataARM( VkDevice device, const VkTensorViewCaptureDescriptorDataInfoARM* pInfo, - void* pData -) { - return layer_vkGetTensorViewOpaqueCaptureDescriptorDataARM_default(device, pInfo, pData); -} + void* pData) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetValidationCacheDataEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetTensorViewOpaqueCaptureDescriptorDataARM( VkDevice device, - VkValidationCacheEXT validationCache, - size_t* pDataSize, + const VkTensorViewCaptureDescriptorDataInfoARM* pInfo, void* pData); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetValidationCacheDataEXT( VkDevice device, VkValidationCacheEXT validationCache, size_t* pDataSize, - void* pData -) { - return layer_vkGetValidationCacheDataEXT_default(device, validationCache, pDataSize, pData); -} + void* pData) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkImportFenceFdKHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetValidationCacheDataEXT( VkDevice device, - const VkImportFenceFdInfoKHR* pImportFenceFdInfo); + VkValidationCacheEXT validationCache, + size_t* pDataSize, + void* pData); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkImportFenceFdKHR( VkDevice device, - const VkImportFenceFdInfoKHR* pImportFenceFdInfo -) { - return layer_vkImportFenceFdKHR_default(device, pImportFenceFdInfo); -} + const VkImportFenceFdInfoKHR* pImportFenceFdInfo) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkImportSemaphoreFdKHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkImportFenceFdKHR( VkDevice device, - const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo); + const VkImportFenceFdInfoKHR* pImportFenceFdInfo); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkImportSemaphoreFdKHR( VkDevice device, - const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo -) { - return layer_vkImportSemaphoreFdKHR_default(device, pImportSemaphoreFdInfo); -} + const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkInvalidateMappedMemoryRanges_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkImportSemaphoreFdKHR( VkDevice device, - uint32_t memoryRangeCount, - const VkMappedMemoryRange* pMemoryRanges); + const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkInvalidateMappedMemoryRanges( VkDevice device, uint32_t memoryRangeCount, - const VkMappedMemoryRange* pMemoryRanges -) { - return layer_vkInvalidateMappedMemoryRanges_default(device, memoryRangeCount, pMemoryRanges); -} + const VkMappedMemoryRange* pMemoryRanges) = delete; + +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkInvalidateMappedMemoryRanges( + VkDevice device, + uint32_t memoryRangeCount, + const VkMappedMemoryRange* pMemoryRanges); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkMapMemory_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR VkResult VKAPI_CALL layer_vkMapMemory( VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, - void** ppData); + void** ppData) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR VkResult VKAPI_CALL layer_vkMapMemory( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkMapMemory( VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, - void** ppData -) { - return layer_vkMapMemory_default(device, memory, offset, size, flags, ppData); -} - -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkMapMemory2_default( - VkDevice device, - const VkMemoryMapInfo* pMemoryMapInfo, void** ppData); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkMapMemory2( VkDevice device, const VkMemoryMapInfo* pMemoryMapInfo, - void** ppData -) { - return layer_vkMapMemory2_default(device, pMemoryMapInfo, ppData); -} + void** ppData) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkMapMemory2KHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkMapMemory2( VkDevice device, const VkMemoryMapInfo* pMemoryMapInfo, void** ppData); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkMapMemory2KHR( VkDevice device, const VkMemoryMapInfo* pMemoryMapInfo, - void** ppData -) { - return layer_vkMapMemory2KHR_default(device, pMemoryMapInfo, ppData); -} + void** ppData) = delete; + +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkMapMemory2KHR( + VkDevice device, + const VkMemoryMapInfo* pMemoryMapInfo, + void** ppData); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkMergePipelineCaches_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR VkResult VKAPI_CALL layer_vkMergePipelineCaches( VkDevice device, VkPipelineCache dstCache, uint32_t srcCacheCount, - const VkPipelineCache* pSrcCaches); + const VkPipelineCache* pSrcCaches) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR VkResult VKAPI_CALL layer_vkMergePipelineCaches( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkMergePipelineCaches( VkDevice device, VkPipelineCache dstCache, uint32_t srcCacheCount, - const VkPipelineCache* pSrcCaches -) { - return layer_vkMergePipelineCaches_default(device, dstCache, srcCacheCount, pSrcCaches); -} + const VkPipelineCache* pSrcCaches); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkMergeValidationCachesEXT_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR VkResult VKAPI_CALL layer_vkMergeValidationCachesEXT( VkDevice device, VkValidationCacheEXT dstCache, uint32_t srcCacheCount, - const VkValidationCacheEXT* pSrcCaches); + const VkValidationCacheEXT* pSrcCaches) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR VkResult VKAPI_CALL layer_vkMergeValidationCachesEXT( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkMergeValidationCachesEXT( VkDevice device, VkValidationCacheEXT dstCache, uint32_t srcCacheCount, - const VkValidationCacheEXT* pSrcCaches -) { - return layer_vkMergeValidationCachesEXT_default(device, dstCache, srcCacheCount, pSrcCaches); -} + const VkValidationCacheEXT* pSrcCaches); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkQueueBeginDebugUtilsLabelEXT_default( - VkQueue queue, - const VkDebugUtilsLabelEXT* pLabelInfo); - -/* Match-all template to use default implementation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkQueueBeginDebugUtilsLabelEXT( VkQueue queue, - const VkDebugUtilsLabelEXT* pLabelInfo -) { - layer_vkQueueBeginDebugUtilsLabelEXT_default(queue, pLabelInfo); -} + const VkDebugUtilsLabelEXT* pLabelInfo) = delete; + +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkQueueBeginDebugUtilsLabelEXT( + VkQueue queue, + const VkDebugUtilsLabelEXT* pLabelInfo); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkQueueBindSparse_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR VkResult VKAPI_CALL layer_vkQueueBindSparse( VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo* pBindInfo, - VkFence fence); + VkFence fence) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR VkResult VKAPI_CALL layer_vkQueueBindSparse( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkQueueBindSparse( VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo* pBindInfo, - VkFence fence -) { - return layer_vkQueueBindSparse_default(queue, bindInfoCount, pBindInfo, fence); -} + VkFence fence); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkQueueEndDebugUtilsLabelEXT_default( - VkQueue queue); - -/* Match-all template to use default implementation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkQueueEndDebugUtilsLabelEXT( - VkQueue queue -) { - layer_vkQueueEndDebugUtilsLabelEXT_default(queue); -} + VkQueue queue) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkQueueInsertDebugUtilsLabelEXT_default( - VkQueue queue, - const VkDebugUtilsLabelEXT* pLabelInfo); +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkQueueEndDebugUtilsLabelEXT( + VkQueue queue); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkQueueInsertDebugUtilsLabelEXT( VkQueue queue, - const VkDebugUtilsLabelEXT* pLabelInfo -) { - layer_vkQueueInsertDebugUtilsLabelEXT_default(queue, pLabelInfo); -} + const VkDebugUtilsLabelEXT* pLabelInfo) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkQueuePresentKHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkQueueInsertDebugUtilsLabelEXT( VkQueue queue, - const VkPresentInfoKHR* pPresentInfo); + const VkDebugUtilsLabelEXT* pLabelInfo); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkQueuePresentKHR( VkQueue queue, - const VkPresentInfoKHR* pPresentInfo -) { - return layer_vkQueuePresentKHR_default(queue, pPresentInfo); -} + const VkPresentInfoKHR* pPresentInfo) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkQueueSubmit_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkQueuePresentKHR( VkQueue queue, - uint32_t submitCount, - const VkSubmitInfo* pSubmits, - VkFence fence); + const VkPresentInfoKHR* pPresentInfo); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkQueueSubmit( VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmits, - VkFence fence -) { - return layer_vkQueueSubmit_default(queue, submitCount, pSubmits, fence); -} + VkFence fence) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkQueueSubmit2_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkQueueSubmit( VkQueue queue, uint32_t submitCount, - const VkSubmitInfo2* pSubmits, + const VkSubmitInfo* pSubmits, VkFence fence); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkQueueSubmit2( VkQueue queue, uint32_t submitCount, const VkSubmitInfo2* pSubmits, - VkFence fence -) { - return layer_vkQueueSubmit2_default(queue, submitCount, pSubmits, fence); -} + VkFence fence) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkQueueSubmit2KHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkQueueSubmit2( VkQueue queue, uint32_t submitCount, const VkSubmitInfo2* pSubmits, VkFence fence); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkQueueSubmit2KHR( VkQueue queue, uint32_t submitCount, const VkSubmitInfo2* pSubmits, - VkFence fence -) { - return layer_vkQueueSubmit2KHR_default(queue, submitCount, pSubmits, fence); -} + VkFence fence) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkQueueWaitIdle_default( - VkQueue queue); +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkQueueSubmit2KHR( + VkQueue queue, + uint32_t submitCount, + const VkSubmitInfo2* pSubmits, + VkFence fence); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkQueueWaitIdle( - VkQueue queue -) { - return layer_vkQueueWaitIdle_default(queue); -} + VkQueue queue) = delete; + +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkQueueWaitIdle( + VkQueue queue); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkRegisterDeviceEventEXT_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR VkResult VKAPI_CALL layer_vkRegisterDeviceEventEXT( VkDevice device, const VkDeviceEventInfoEXT* pDeviceEventInfo, const VkAllocationCallbacks* pAllocator, - VkFence* pFence); + VkFence* pFence) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR VkResult VKAPI_CALL layer_vkRegisterDeviceEventEXT( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkRegisterDeviceEventEXT( VkDevice device, const VkDeviceEventInfoEXT* pDeviceEventInfo, const VkAllocationCallbacks* pAllocator, - VkFence* pFence -) { - return layer_vkRegisterDeviceEventEXT_default(device, pDeviceEventInfo, pAllocator, pFence); -} + VkFence* pFence); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkRegisterDisplayEventEXT_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR VkResult VKAPI_CALL layer_vkRegisterDisplayEventEXT( VkDevice device, VkDisplayKHR display, const VkDisplayEventInfoEXT* pDisplayEventInfo, const VkAllocationCallbacks* pAllocator, - VkFence* pFence); + VkFence* pFence) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR VkResult VKAPI_CALL layer_vkRegisterDisplayEventEXT( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkRegisterDisplayEventEXT( VkDevice device, VkDisplayKHR display, const VkDisplayEventInfoEXT* pDisplayEventInfo, const VkAllocationCallbacks* pAllocator, - VkFence* pFence -) { - return layer_vkRegisterDisplayEventEXT_default(device, display, pDisplayEventInfo, pAllocator, pFence); -} + VkFence* pFence); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkReleaseCapturedPipelineDataKHR_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR VkResult VKAPI_CALL layer_vkReleaseCapturedPipelineDataKHR( VkDevice device, const VkReleaseCapturedPipelineDataInfoKHR* pInfo, - const VkAllocationCallbacks* pAllocator); + const VkAllocationCallbacks* pAllocator) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR VkResult VKAPI_CALL layer_vkReleaseCapturedPipelineDataKHR( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkReleaseCapturedPipelineDataKHR( VkDevice device, const VkReleaseCapturedPipelineDataInfoKHR* pInfo, - const VkAllocationCallbacks* pAllocator -) { - return layer_vkReleaseCapturedPipelineDataKHR_default(device, pInfo, pAllocator); -} + const VkAllocationCallbacks* pAllocator); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkReleaseProfilingLockKHR_default( - VkDevice device); - -/* Match-all template to use default implementation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkReleaseProfilingLockKHR( - VkDevice device -) { - layer_vkReleaseProfilingLockKHR_default(device); -} + VkDevice device) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkReleaseSwapchainImagesEXT_default( - VkDevice device, - const VkReleaseSwapchainImagesInfoEXT* pReleaseInfo); +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkReleaseProfilingLockKHR( + VkDevice device); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkReleaseSwapchainImagesEXT( VkDevice device, - const VkReleaseSwapchainImagesInfoEXT* pReleaseInfo -) { - return layer_vkReleaseSwapchainImagesEXT_default(device, pReleaseInfo); -} + const VkReleaseSwapchainImagesInfoEXT* pReleaseInfo) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkResetCommandBuffer_default( - VkCommandBuffer commandBuffer, - VkCommandBufferResetFlags flags); +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkReleaseSwapchainImagesEXT( + VkDevice device, + const VkReleaseSwapchainImagesInfoEXT* pReleaseInfo); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkResetCommandBuffer( VkCommandBuffer commandBuffer, - VkCommandBufferResetFlags flags -) { - return layer_vkResetCommandBuffer_default(commandBuffer, flags); -} + VkCommandBufferResetFlags flags) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkResetCommandPool_default( - VkDevice device, - VkCommandPool commandPool, - VkCommandPoolResetFlags flags); +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkResetCommandBuffer( + VkCommandBuffer commandBuffer, + VkCommandBufferResetFlags flags); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkResetCommandPool( VkDevice device, VkCommandPool commandPool, - VkCommandPoolResetFlags flags -) { - return layer_vkResetCommandPool_default(device, commandPool, flags); -} + VkCommandPoolResetFlags flags) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkResetDescriptorPool_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkResetCommandPool( VkDevice device, - VkDescriptorPool descriptorPool, - VkDescriptorPoolResetFlags flags); + VkCommandPool commandPool, + VkCommandPoolResetFlags flags); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkResetDescriptorPool( VkDevice device, VkDescriptorPool descriptorPool, - VkDescriptorPoolResetFlags flags -) { - return layer_vkResetDescriptorPool_default(device, descriptorPool, flags); -} + VkDescriptorPoolResetFlags flags) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkResetEvent_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkResetDescriptorPool( VkDevice device, - VkEvent event); + VkDescriptorPool descriptorPool, + VkDescriptorPoolResetFlags flags); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkResetEvent( VkDevice device, - VkEvent event -) { - return layer_vkResetEvent_default(device, event); -} + VkEvent event) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkResetFences_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkResetEvent( VkDevice device, - uint32_t fenceCount, - const VkFence* pFences); + VkEvent event); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkResetFences( VkDevice device, uint32_t fenceCount, - const VkFence* pFences -) { - return layer_vkResetFences_default(device, fenceCount, pFences); -} + const VkFence* pFences) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkResetQueryPool_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkResetFences( VkDevice device, - VkQueryPool queryPool, - uint32_t firstQuery, - uint32_t queryCount); + uint32_t fenceCount, + const VkFence* pFences); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkResetQueryPool( VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, - uint32_t queryCount -) { - layer_vkResetQueryPool_default(device, queryPool, firstQuery, queryCount); -} + uint32_t queryCount) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkResetQueryPoolEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkResetQueryPool( VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkResetQueryPoolEXT( VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, - uint32_t queryCount -) { - layer_vkResetQueryPoolEXT_default(device, queryPool, firstQuery, queryCount); -} + uint32_t queryCount) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkSetDebugUtilsObjectNameEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkResetQueryPoolEXT( VkDevice device, - const VkDebugUtilsObjectNameInfoEXT* pNameInfo); + VkQueryPool queryPool, + uint32_t firstQuery, + uint32_t queryCount); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkSetDebugUtilsObjectNameEXT( VkDevice device, - const VkDebugUtilsObjectNameInfoEXT* pNameInfo -) { - return layer_vkSetDebugUtilsObjectNameEXT_default(device, pNameInfo); -} + const VkDebugUtilsObjectNameInfoEXT* pNameInfo) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkSetDebugUtilsObjectTagEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkSetDebugUtilsObjectNameEXT( VkDevice device, - const VkDebugUtilsObjectTagInfoEXT* pTagInfo); + const VkDebugUtilsObjectNameInfoEXT* pNameInfo); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkSetDebugUtilsObjectTagEXT( VkDevice device, - const VkDebugUtilsObjectTagInfoEXT* pTagInfo -) { - return layer_vkSetDebugUtilsObjectTagEXT_default(device, pTagInfo); -} + const VkDebugUtilsObjectTagInfoEXT* pTagInfo) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkSetDeviceMemoryPriorityEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkSetDebugUtilsObjectTagEXT( VkDevice device, - VkDeviceMemory memory, - float priority); + const VkDebugUtilsObjectTagInfoEXT* pTagInfo); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkSetDeviceMemoryPriorityEXT( VkDevice device, VkDeviceMemory memory, - float priority -) { - layer_vkSetDeviceMemoryPriorityEXT_default(device, memory, priority); -} + float priority) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkSetEvent_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkSetDeviceMemoryPriorityEXT( VkDevice device, - VkEvent event); + VkDeviceMemory memory, + float priority); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkSetEvent( VkDevice device, - VkEvent event -) { - return layer_vkSetEvent_default(device, event); -} + VkEvent event) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkSetHdrMetadataEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkSetEvent( VkDevice device, - uint32_t swapchainCount, - const VkSwapchainKHR* pSwapchains, - const VkHdrMetadataEXT* pMetadata); + VkEvent event); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkSetHdrMetadataEXT( VkDevice device, uint32_t swapchainCount, const VkSwapchainKHR* pSwapchains, - const VkHdrMetadataEXT* pMetadata -) { - layer_vkSetHdrMetadataEXT_default(device, swapchainCount, pSwapchains, pMetadata); -} + const VkHdrMetadataEXT* pMetadata) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkSetPrivateData_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkSetHdrMetadataEXT( VkDevice device, - VkObjectType objectType, - uint64_t objectHandle, - VkPrivateDataSlot privateDataSlot, - uint64_t data); + uint32_t swapchainCount, + const VkSwapchainKHR* pSwapchains, + const VkHdrMetadataEXT* pMetadata); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkSetPrivateData( VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, - uint64_t data -) { - return layer_vkSetPrivateData_default(device, objectType, objectHandle, privateDataSlot, data); -} + uint64_t data) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkSetPrivateDataEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkSetPrivateData( VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t data); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkSetPrivateDataEXT( VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, - uint64_t data -) { - return layer_vkSetPrivateDataEXT_default(device, objectType, objectHandle, privateDataSlot, data); -} + uint64_t data) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkSignalSemaphore_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkSetPrivateDataEXT( VkDevice device, - const VkSemaphoreSignalInfo* pSignalInfo); + VkObjectType objectType, + uint64_t objectHandle, + VkPrivateDataSlot privateDataSlot, + uint64_t data); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkSignalSemaphore( VkDevice device, - const VkSemaphoreSignalInfo* pSignalInfo -) { - return layer_vkSignalSemaphore_default(device, pSignalInfo); -} + const VkSemaphoreSignalInfo* pSignalInfo) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkSignalSemaphoreKHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkSignalSemaphore( VkDevice device, const VkSemaphoreSignalInfo* pSignalInfo); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkSignalSemaphoreKHR( VkDevice device, - const VkSemaphoreSignalInfo* pSignalInfo -) { - return layer_vkSignalSemaphoreKHR_default(device, pSignalInfo); -} + const VkSemaphoreSignalInfo* pSignalInfo) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkTransitionImageLayout_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkSignalSemaphoreKHR( VkDevice device, - uint32_t transitionCount, - const VkHostImageLayoutTransitionInfo* pTransitions); + const VkSemaphoreSignalInfo* pSignalInfo); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkTransitionImageLayout( VkDevice device, uint32_t transitionCount, - const VkHostImageLayoutTransitionInfo* pTransitions -) { - return layer_vkTransitionImageLayout_default(device, transitionCount, pTransitions); -} + const VkHostImageLayoutTransitionInfo* pTransitions) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkTransitionImageLayoutEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkTransitionImageLayout( VkDevice device, uint32_t transitionCount, const VkHostImageLayoutTransitionInfo* pTransitions); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkTransitionImageLayoutEXT( VkDevice device, uint32_t transitionCount, - const VkHostImageLayoutTransitionInfo* pTransitions -) { - return layer_vkTransitionImageLayoutEXT_default(device, transitionCount, pTransitions); -} + const VkHostImageLayoutTransitionInfo* pTransitions) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkTrimCommandPool_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkTransitionImageLayoutEXT( VkDevice device, - VkCommandPool commandPool, - VkCommandPoolTrimFlags flags); + uint32_t transitionCount, + const VkHostImageLayoutTransitionInfo* pTransitions); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkTrimCommandPool( VkDevice device, VkCommandPool commandPool, - VkCommandPoolTrimFlags flags -) { - layer_vkTrimCommandPool_default(device, commandPool, flags); -} + VkCommandPoolTrimFlags flags) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkTrimCommandPoolKHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkTrimCommandPool( VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkTrimCommandPoolKHR( VkDevice device, VkCommandPool commandPool, - VkCommandPoolTrimFlags flags -) { - layer_vkTrimCommandPoolKHR_default(device, commandPool, flags); -} + VkCommandPoolTrimFlags flags) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkUnmapMemory_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkTrimCommandPoolKHR( VkDevice device, - VkDeviceMemory memory); + VkCommandPool commandPool, + VkCommandPoolTrimFlags flags); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkUnmapMemory( VkDevice device, - VkDeviceMemory memory -) { - layer_vkUnmapMemory_default(device, memory); -} + VkDeviceMemory memory) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkUnmapMemory2_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkUnmapMemory( VkDevice device, - const VkMemoryUnmapInfo* pMemoryUnmapInfo); + VkDeviceMemory memory); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkUnmapMemory2( VkDevice device, - const VkMemoryUnmapInfo* pMemoryUnmapInfo -) { - return layer_vkUnmapMemory2_default(device, pMemoryUnmapInfo); -} + const VkMemoryUnmapInfo* pMemoryUnmapInfo) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkUnmapMemory2KHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkUnmapMemory2( VkDevice device, const VkMemoryUnmapInfo* pMemoryUnmapInfo); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkUnmapMemory2KHR( VkDevice device, - const VkMemoryUnmapInfo* pMemoryUnmapInfo -) { - return layer_vkUnmapMemory2KHR_default(device, pMemoryUnmapInfo); -} + const VkMemoryUnmapInfo* pMemoryUnmapInfo) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkUpdateDescriptorSetWithTemplate_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkUnmapMemory2KHR( VkDevice device, - VkDescriptorSet descriptorSet, - VkDescriptorUpdateTemplate descriptorUpdateTemplate, - const void* pData); + const VkMemoryUnmapInfo* pMemoryUnmapInfo); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkUpdateDescriptorSetWithTemplate( VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, - const void* pData -) { - layer_vkUpdateDescriptorSetWithTemplate_default(device, descriptorSet, descriptorUpdateTemplate, pData); -} + const void* pData) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkUpdateDescriptorSetWithTemplateKHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkUpdateDescriptorSetWithTemplate( VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkUpdateDescriptorSetWithTemplateKHR( VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, - const void* pData -) { - layer_vkUpdateDescriptorSetWithTemplateKHR_default(device, descriptorSet, descriptorUpdateTemplate, pData); -} + const void* pData) = delete; + +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkUpdateDescriptorSetWithTemplateKHR( + VkDevice device, + VkDescriptorSet descriptorSet, + VkDescriptorUpdateTemplate descriptorUpdateTemplate, + const void* pData); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkUpdateDescriptorSets_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR void VKAPI_CALL layer_vkUpdateDescriptorSets( VkDevice device, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount, - const VkCopyDescriptorSet* pDescriptorCopies); + const VkCopyDescriptorSet* pDescriptorCopies) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR void VKAPI_CALL layer_vkUpdateDescriptorSets( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkUpdateDescriptorSets( VkDevice device, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount, - const VkCopyDescriptorSet* pDescriptorCopies -) { - layer_vkUpdateDescriptorSets_default(device, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies); -} + const VkCopyDescriptorSet* pDescriptorCopies); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkUpdateIndirectExecutionSetPipelineEXT_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR void VKAPI_CALL layer_vkUpdateIndirectExecutionSetPipelineEXT( VkDevice device, VkIndirectExecutionSetEXT indirectExecutionSet, uint32_t executionSetWriteCount, - const VkWriteIndirectExecutionSetPipelineEXT* pExecutionSetWrites); + const VkWriteIndirectExecutionSetPipelineEXT* pExecutionSetWrites) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR void VKAPI_CALL layer_vkUpdateIndirectExecutionSetPipelineEXT( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkUpdateIndirectExecutionSetPipelineEXT( VkDevice device, VkIndirectExecutionSetEXT indirectExecutionSet, uint32_t executionSetWriteCount, - const VkWriteIndirectExecutionSetPipelineEXT* pExecutionSetWrites -) { - layer_vkUpdateIndirectExecutionSetPipelineEXT_default(device, indirectExecutionSet, executionSetWriteCount, pExecutionSetWrites); -} + const VkWriteIndirectExecutionSetPipelineEXT* pExecutionSetWrites); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkUpdateIndirectExecutionSetShaderEXT_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR void VKAPI_CALL layer_vkUpdateIndirectExecutionSetShaderEXT( VkDevice device, VkIndirectExecutionSetEXT indirectExecutionSet, uint32_t executionSetWriteCount, - const VkWriteIndirectExecutionSetShaderEXT* pExecutionSetWrites); + const VkWriteIndirectExecutionSetShaderEXT* pExecutionSetWrites) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR void VKAPI_CALL layer_vkUpdateIndirectExecutionSetShaderEXT( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkUpdateIndirectExecutionSetShaderEXT( VkDevice device, VkIndirectExecutionSetEXT indirectExecutionSet, uint32_t executionSetWriteCount, - const VkWriteIndirectExecutionSetShaderEXT* pExecutionSetWrites -) { - layer_vkUpdateIndirectExecutionSetShaderEXT_default(device, indirectExecutionSet, executionSetWriteCount, pExecutionSetWrites); -} + const VkWriteIndirectExecutionSetShaderEXT* pExecutionSetWrites); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkWaitForFences_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR VkResult VKAPI_CALL layer_vkWaitForFences( VkDevice device, uint32_t fenceCount, const VkFence* pFences, VkBool32 waitAll, - uint64_t timeout); + uint64_t timeout) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR VkResult VKAPI_CALL layer_vkWaitForFences( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkWaitForFences( VkDevice device, uint32_t fenceCount, const VkFence* pFences, VkBool32 waitAll, - uint64_t timeout -) { - return layer_vkWaitForFences_default(device, fenceCount, pFences, waitAll, timeout); -} + uint64_t timeout); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkWaitForPresent2KHR_default( - VkDevice device, - VkSwapchainKHR swapchain, - const VkPresentWait2InfoKHR* pPresentWait2Info); - -/* Match-all template to use default implementation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkWaitForPresent2KHR( VkDevice device, VkSwapchainKHR swapchain, - const VkPresentWait2InfoKHR* pPresentWait2Info -) { - return layer_vkWaitForPresent2KHR_default(device, swapchain, pPresentWait2Info); -} + const VkPresentWait2InfoKHR* pPresentWait2Info) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkWaitForPresentKHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkWaitForPresent2KHR( VkDevice device, VkSwapchainKHR swapchain, - uint64_t presentId, - uint64_t timeout); + const VkPresentWait2InfoKHR* pPresentWait2Info); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkWaitForPresentKHR( VkDevice device, VkSwapchainKHR swapchain, uint64_t presentId, - uint64_t timeout -) { - return layer_vkWaitForPresentKHR_default(device, swapchain, presentId, timeout); -} + uint64_t timeout) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkWaitSemaphores_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkWaitForPresentKHR( VkDevice device, - const VkSemaphoreWaitInfo* pWaitInfo, + VkSwapchainKHR swapchain, + uint64_t presentId, uint64_t timeout); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkWaitSemaphores( VkDevice device, const VkSemaphoreWaitInfo* pWaitInfo, - uint64_t timeout -) { - return layer_vkWaitSemaphores_default(device, pWaitInfo, timeout); -} + uint64_t timeout) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkWaitSemaphoresKHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkWaitSemaphores( VkDevice device, const VkSemaphoreWaitInfo* pWaitInfo, uint64_t timeout); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkWaitSemaphoresKHR( VkDevice device, const VkSemaphoreWaitInfo* pWaitInfo, - uint64_t timeout -) { - return layer_vkWaitSemaphoresKHR_default(device, pWaitInfo, timeout); -} + uint64_t timeout) = delete; + +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkWaitSemaphoresKHR( + VkDevice device, + const VkSemaphoreWaitInfo* pWaitInfo, + uint64_t timeout); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkWriteAccelerationStructuresPropertiesKHR_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR VkResult VKAPI_CALL layer_vkWriteAccelerationStructuresPropertiesKHR( VkDevice device, uint32_t accelerationStructureCount, const VkAccelerationStructureKHR* pAccelerationStructures, VkQueryType queryType, size_t dataSize, void* pData, - size_t stride); + size_t stride) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR VkResult VKAPI_CALL layer_vkWriteAccelerationStructuresPropertiesKHR( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkWriteAccelerationStructuresPropertiesKHR( VkDevice device, uint32_t accelerationStructureCount, const VkAccelerationStructureKHR* pAccelerationStructures, VkQueryType queryType, size_t dataSize, void* pData, - size_t stride -) { - return layer_vkWriteAccelerationStructuresPropertiesKHR_default(device, accelerationStructureCount, pAccelerationStructures, queryType, dataSize, pData, stride); -} + size_t stride); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkWriteMicromapsPropertiesEXT_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR VkResult VKAPI_CALL layer_vkWriteMicromapsPropertiesEXT( VkDevice device, uint32_t micromapCount, const VkMicromapEXT* pMicromaps, VkQueryType queryType, size_t dataSize, void* pData, - size_t stride); + size_t stride) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR VkResult VKAPI_CALL layer_vkWriteMicromapsPropertiesEXT( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkWriteMicromapsPropertiesEXT( VkDevice device, uint32_t micromapCount, const VkMicromapEXT* pMicromaps, VkQueryType queryType, size_t dataSize, void* pData, - size_t stride -) { - return layer_vkWriteMicromapsPropertiesEXT_default(device, micromapCount, pMicromaps, queryType, dataSize, pData, stride); -} + size_t stride); // clang-format on diff --git a/source_common/framework/device_functions_query.hpp b/source_common/framework/device_functions_query.hpp new file mode 100644 index 0000000..a5752ba --- /dev/null +++ b/source_common/framework/device_functions_query.hpp @@ -0,0 +1,10748 @@ +/* + * SPDX-License-Identifier: MIT + * ---------------------------------------------------------------------------- + * Copyright (c) 2024-2025 Arm Limited + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to + * deal in the Software without restriction, including without limitation the + * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or + * sell copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS + * IN THE SOFTWARE. + * ---------------------------------------------------------------------------- + */ + +#pragma once + +// clang-format off + +#include + +#include "framework/utils.hpp" + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkAcquireNextImage2KHR = requires( + VkDevice device, const VkAcquireNextImageInfoKHR* pAcquireInfo, uint32_t* pImageIndex +) { + layer_vkAcquireNextImage2KHR(device, pAcquireInfo, pImageIndex); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkAcquireNextImage2KHR getLayerPtr_vkAcquireNextImage2KHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkAcquireNextImage2KHR) + { + return layer_vkAcquireNextImage2KHR; + } + + return layer_vkAcquireNextImage2KHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkAcquireNextImageKHR = requires( + VkDevice device, VkSwapchainKHR swapchain, uint64_t timeout, VkSemaphore semaphore, VkFence fence, uint32_t* pImageIndex +) { + layer_vkAcquireNextImageKHR(device, swapchain, timeout, semaphore, fence, pImageIndex); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkAcquireNextImageKHR getLayerPtr_vkAcquireNextImageKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkAcquireNextImageKHR) + { + return layer_vkAcquireNextImageKHR; + } + + return layer_vkAcquireNextImageKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkAcquireProfilingLockKHR = requires( + VkDevice device, const VkAcquireProfilingLockInfoKHR* pInfo +) { + layer_vkAcquireProfilingLockKHR(device, pInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkAcquireProfilingLockKHR getLayerPtr_vkAcquireProfilingLockKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkAcquireProfilingLockKHR) + { + return layer_vkAcquireProfilingLockKHR; + } + + return layer_vkAcquireProfilingLockKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkAllocateCommandBuffers = requires( + VkDevice device, const VkCommandBufferAllocateInfo* pAllocateInfo, VkCommandBuffer* pCommandBuffers +) { + layer_vkAllocateCommandBuffers(device, pAllocateInfo, pCommandBuffers); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkAllocateCommandBuffers getLayerPtr_vkAllocateCommandBuffers() +{ + return [] + { + if constexpr(hasLayerPtr_vkAllocateCommandBuffers) + { + return layer_vkAllocateCommandBuffers; + } + + return layer_vkAllocateCommandBuffers; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkAllocateDescriptorSets = requires( + VkDevice device, const VkDescriptorSetAllocateInfo* pAllocateInfo, VkDescriptorSet* pDescriptorSets +) { + layer_vkAllocateDescriptorSets(device, pAllocateInfo, pDescriptorSets); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkAllocateDescriptorSets getLayerPtr_vkAllocateDescriptorSets() +{ + return [] + { + if constexpr(hasLayerPtr_vkAllocateDescriptorSets) + { + return layer_vkAllocateDescriptorSets; + } + + return layer_vkAllocateDescriptorSets; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkAllocateMemory = requires( + VkDevice device, const VkMemoryAllocateInfo* pAllocateInfo, const VkAllocationCallbacks* pAllocator, VkDeviceMemory* pMemory +) { + layer_vkAllocateMemory(device, pAllocateInfo, pAllocator, pMemory); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkAllocateMemory getLayerPtr_vkAllocateMemory() +{ + return [] + { + if constexpr(hasLayerPtr_vkAllocateMemory) + { + return layer_vkAllocateMemory; + } + + return layer_vkAllocateMemory; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkBeginCommandBuffer = requires( + VkCommandBuffer commandBuffer, const VkCommandBufferBeginInfo* pBeginInfo +) { + layer_vkBeginCommandBuffer(commandBuffer, pBeginInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkBeginCommandBuffer getLayerPtr_vkBeginCommandBuffer() +{ + return [] + { + if constexpr(hasLayerPtr_vkBeginCommandBuffer) + { + return layer_vkBeginCommandBuffer; + } + + return layer_vkBeginCommandBuffer; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkBindBufferMemory = requires( + VkDevice device, VkBuffer buffer, VkDeviceMemory memory, VkDeviceSize memoryOffset +) { + layer_vkBindBufferMemory(device, buffer, memory, memoryOffset); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkBindBufferMemory getLayerPtr_vkBindBufferMemory() +{ + return [] + { + if constexpr(hasLayerPtr_vkBindBufferMemory) + { + return layer_vkBindBufferMemory; + } + + return layer_vkBindBufferMemory; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkBindBufferMemory2 = requires( + VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo* pBindInfos +) { + layer_vkBindBufferMemory2(device, bindInfoCount, pBindInfos); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkBindBufferMemory2 getLayerPtr_vkBindBufferMemory2() +{ + return [] + { + if constexpr(hasLayerPtr_vkBindBufferMemory2) + { + return layer_vkBindBufferMemory2; + } + + return layer_vkBindBufferMemory2; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkBindBufferMemory2KHR = requires( + VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo* pBindInfos +) { + layer_vkBindBufferMemory2KHR(device, bindInfoCount, pBindInfos); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkBindBufferMemory2KHR getLayerPtr_vkBindBufferMemory2KHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkBindBufferMemory2KHR) + { + return layer_vkBindBufferMemory2KHR; + } + + return layer_vkBindBufferMemory2KHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkBindImageMemory = requires( + VkDevice device, VkImage image, VkDeviceMemory memory, VkDeviceSize memoryOffset +) { + layer_vkBindImageMemory(device, image, memory, memoryOffset); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkBindImageMemory getLayerPtr_vkBindImageMemory() +{ + return [] + { + if constexpr(hasLayerPtr_vkBindImageMemory) + { + return layer_vkBindImageMemory; + } + + return layer_vkBindImageMemory; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkBindImageMemory2 = requires( + VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo* pBindInfos +) { + layer_vkBindImageMemory2(device, bindInfoCount, pBindInfos); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkBindImageMemory2 getLayerPtr_vkBindImageMemory2() +{ + return [] + { + if constexpr(hasLayerPtr_vkBindImageMemory2) + { + return layer_vkBindImageMemory2; + } + + return layer_vkBindImageMemory2; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkBindImageMemory2KHR = requires( + VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo* pBindInfos +) { + layer_vkBindImageMemory2KHR(device, bindInfoCount, pBindInfos); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkBindImageMemory2KHR getLayerPtr_vkBindImageMemory2KHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkBindImageMemory2KHR) + { + return layer_vkBindImageMemory2KHR; + } + + return layer_vkBindImageMemory2KHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkBindTensorMemoryARM = requires( + VkDevice device, uint32_t bindInfoCount, const VkBindTensorMemoryInfoARM* pBindInfos +) { + layer_vkBindTensorMemoryARM(device, bindInfoCount, pBindInfos); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkBindTensorMemoryARM getLayerPtr_vkBindTensorMemoryARM() +{ + return [] + { + if constexpr(hasLayerPtr_vkBindTensorMemoryARM) + { + return layer_vkBindTensorMemoryARM; + } + + return layer_vkBindTensorMemoryARM; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkBuildAccelerationStructuresKHR = requires( + VkDevice device, VkDeferredOperationKHR deferredOperation, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, const VkAccelerationStructureBuildRangeInfoKHR* const* ppBuildRangeInfos +) { + layer_vkBuildAccelerationStructuresKHR(device, deferredOperation, infoCount, pInfos, ppBuildRangeInfos); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkBuildAccelerationStructuresKHR getLayerPtr_vkBuildAccelerationStructuresKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkBuildAccelerationStructuresKHR) + { + return layer_vkBuildAccelerationStructuresKHR; + } + + return layer_vkBuildAccelerationStructuresKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkBuildMicromapsEXT = requires( + VkDevice device, VkDeferredOperationKHR deferredOperation, uint32_t infoCount, const VkMicromapBuildInfoEXT* pInfos +) { + layer_vkBuildMicromapsEXT(device, deferredOperation, infoCount, pInfos); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkBuildMicromapsEXT getLayerPtr_vkBuildMicromapsEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkBuildMicromapsEXT) + { + return layer_vkBuildMicromapsEXT; + } + + return layer_vkBuildMicromapsEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdBeginConditionalRenderingEXT = requires( + VkCommandBuffer commandBuffer, const VkConditionalRenderingBeginInfoEXT* pConditionalRenderingBegin +) { + layer_vkCmdBeginConditionalRenderingEXT(commandBuffer, pConditionalRenderingBegin); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdBeginConditionalRenderingEXT getLayerPtr_vkCmdBeginConditionalRenderingEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdBeginConditionalRenderingEXT) + { + return layer_vkCmdBeginConditionalRenderingEXT; + } + + return layer_vkCmdBeginConditionalRenderingEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdBeginDebugUtilsLabelEXT = requires( + VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT* pLabelInfo +) { + layer_vkCmdBeginDebugUtilsLabelEXT(commandBuffer, pLabelInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdBeginDebugUtilsLabelEXT getLayerPtr_vkCmdBeginDebugUtilsLabelEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdBeginDebugUtilsLabelEXT) + { + return layer_vkCmdBeginDebugUtilsLabelEXT; + } + + return layer_vkCmdBeginDebugUtilsLabelEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdBeginQuery = requires( + VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags +) { + layer_vkCmdBeginQuery(commandBuffer, queryPool, query, flags); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdBeginQuery getLayerPtr_vkCmdBeginQuery() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdBeginQuery) + { + return layer_vkCmdBeginQuery; + } + + return layer_vkCmdBeginQuery; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdBeginQueryIndexedEXT = requires( + VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, VkQueryControlFlags flags, uint32_t index +) { + layer_vkCmdBeginQueryIndexedEXT(commandBuffer, queryPool, query, flags, index); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdBeginQueryIndexedEXT getLayerPtr_vkCmdBeginQueryIndexedEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdBeginQueryIndexedEXT) + { + return layer_vkCmdBeginQueryIndexedEXT; + } + + return layer_vkCmdBeginQueryIndexedEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdBeginRenderPass = requires( + VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, VkSubpassContents contents +) { + layer_vkCmdBeginRenderPass(commandBuffer, pRenderPassBegin, contents); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdBeginRenderPass getLayerPtr_vkCmdBeginRenderPass() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdBeginRenderPass) + { + return layer_vkCmdBeginRenderPass; + } + + return layer_vkCmdBeginRenderPass; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdBeginRenderPass2 = requires( + VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, const VkSubpassBeginInfo* pSubpassBeginInfo +) { + layer_vkCmdBeginRenderPass2(commandBuffer, pRenderPassBegin, pSubpassBeginInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdBeginRenderPass2 getLayerPtr_vkCmdBeginRenderPass2() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdBeginRenderPass2) + { + return layer_vkCmdBeginRenderPass2; + } + + return layer_vkCmdBeginRenderPass2; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdBeginRenderPass2KHR = requires( + VkCommandBuffer commandBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, const VkSubpassBeginInfo* pSubpassBeginInfo +) { + layer_vkCmdBeginRenderPass2KHR(commandBuffer, pRenderPassBegin, pSubpassBeginInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdBeginRenderPass2KHR getLayerPtr_vkCmdBeginRenderPass2KHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdBeginRenderPass2KHR) + { + return layer_vkCmdBeginRenderPass2KHR; + } + + return layer_vkCmdBeginRenderPass2KHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdBeginRendering = requires( + VkCommandBuffer commandBuffer, const VkRenderingInfo* pRenderingInfo +) { + layer_vkCmdBeginRendering(commandBuffer, pRenderingInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdBeginRendering getLayerPtr_vkCmdBeginRendering() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdBeginRendering) + { + return layer_vkCmdBeginRendering; + } + + return layer_vkCmdBeginRendering; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdBeginRenderingKHR = requires( + VkCommandBuffer commandBuffer, const VkRenderingInfo* pRenderingInfo +) { + layer_vkCmdBeginRenderingKHR(commandBuffer, pRenderingInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdBeginRenderingKHR getLayerPtr_vkCmdBeginRenderingKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdBeginRenderingKHR) + { + return layer_vkCmdBeginRenderingKHR; + } + + return layer_vkCmdBeginRenderingKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdBeginTransformFeedbackEXT = requires( + VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VkBuffer* pCounterBuffers, const VkDeviceSize* pCounterBufferOffsets +) { + layer_vkCmdBeginTransformFeedbackEXT(commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdBeginTransformFeedbackEXT getLayerPtr_vkCmdBeginTransformFeedbackEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdBeginTransformFeedbackEXT) + { + return layer_vkCmdBeginTransformFeedbackEXT; + } + + return layer_vkCmdBeginTransformFeedbackEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdBindDescriptorBufferEmbeddedSamplers2EXT = requires( + VkCommandBuffer commandBuffer, const VkBindDescriptorBufferEmbeddedSamplersInfoEXT* pBindDescriptorBufferEmbeddedSamplersInfo +) { + layer_vkCmdBindDescriptorBufferEmbeddedSamplers2EXT(commandBuffer, pBindDescriptorBufferEmbeddedSamplersInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdBindDescriptorBufferEmbeddedSamplers2EXT getLayerPtr_vkCmdBindDescriptorBufferEmbeddedSamplers2EXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdBindDescriptorBufferEmbeddedSamplers2EXT) + { + return layer_vkCmdBindDescriptorBufferEmbeddedSamplers2EXT; + } + + return layer_vkCmdBindDescriptorBufferEmbeddedSamplers2EXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdBindDescriptorBufferEmbeddedSamplersEXT = requires( + VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t set +) { + layer_vkCmdBindDescriptorBufferEmbeddedSamplersEXT(commandBuffer, pipelineBindPoint, layout, set); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdBindDescriptorBufferEmbeddedSamplersEXT getLayerPtr_vkCmdBindDescriptorBufferEmbeddedSamplersEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdBindDescriptorBufferEmbeddedSamplersEXT) + { + return layer_vkCmdBindDescriptorBufferEmbeddedSamplersEXT; + } + + return layer_vkCmdBindDescriptorBufferEmbeddedSamplersEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdBindDescriptorBuffersEXT = requires( + VkCommandBuffer commandBuffer, uint32_t bufferCount, const VkDescriptorBufferBindingInfoEXT* pBindingInfos +) { + layer_vkCmdBindDescriptorBuffersEXT(commandBuffer, bufferCount, pBindingInfos); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdBindDescriptorBuffersEXT getLayerPtr_vkCmdBindDescriptorBuffersEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdBindDescriptorBuffersEXT) + { + return layer_vkCmdBindDescriptorBuffersEXT; + } + + return layer_vkCmdBindDescriptorBuffersEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdBindDescriptorSets = requires( + VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t firstSet, uint32_t descriptorSetCount, const VkDescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets +) { + layer_vkCmdBindDescriptorSets(commandBuffer, pipelineBindPoint, layout, firstSet, descriptorSetCount, pDescriptorSets, dynamicOffsetCount, pDynamicOffsets); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdBindDescriptorSets getLayerPtr_vkCmdBindDescriptorSets() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdBindDescriptorSets) + { + return layer_vkCmdBindDescriptorSets; + } + + return layer_vkCmdBindDescriptorSets; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdBindDescriptorSets2 = requires( + VkCommandBuffer commandBuffer, const VkBindDescriptorSetsInfo* pBindDescriptorSetsInfo +) { + layer_vkCmdBindDescriptorSets2(commandBuffer, pBindDescriptorSetsInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdBindDescriptorSets2 getLayerPtr_vkCmdBindDescriptorSets2() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdBindDescriptorSets2) + { + return layer_vkCmdBindDescriptorSets2; + } + + return layer_vkCmdBindDescriptorSets2; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdBindDescriptorSets2KHR = requires( + VkCommandBuffer commandBuffer, const VkBindDescriptorSetsInfo* pBindDescriptorSetsInfo +) { + layer_vkCmdBindDescriptorSets2KHR(commandBuffer, pBindDescriptorSetsInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdBindDescriptorSets2KHR getLayerPtr_vkCmdBindDescriptorSets2KHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdBindDescriptorSets2KHR) + { + return layer_vkCmdBindDescriptorSets2KHR; + } + + return layer_vkCmdBindDescriptorSets2KHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdBindIndexBuffer = requires( + VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType +) { + layer_vkCmdBindIndexBuffer(commandBuffer, buffer, offset, indexType); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdBindIndexBuffer getLayerPtr_vkCmdBindIndexBuffer() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdBindIndexBuffer) + { + return layer_vkCmdBindIndexBuffer; + } + + return layer_vkCmdBindIndexBuffer; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdBindIndexBuffer2 = requires( + VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkDeviceSize size, VkIndexType indexType +) { + layer_vkCmdBindIndexBuffer2(commandBuffer, buffer, offset, size, indexType); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdBindIndexBuffer2 getLayerPtr_vkCmdBindIndexBuffer2() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdBindIndexBuffer2) + { + return layer_vkCmdBindIndexBuffer2; + } + + return layer_vkCmdBindIndexBuffer2; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdBindIndexBuffer2KHR = requires( + VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkDeviceSize size, VkIndexType indexType +) { + layer_vkCmdBindIndexBuffer2KHR(commandBuffer, buffer, offset, size, indexType); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdBindIndexBuffer2KHR getLayerPtr_vkCmdBindIndexBuffer2KHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdBindIndexBuffer2KHR) + { + return layer_vkCmdBindIndexBuffer2KHR; + } + + return layer_vkCmdBindIndexBuffer2KHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdBindPipeline = requires( + VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline +) { + layer_vkCmdBindPipeline(commandBuffer, pipelineBindPoint, pipeline); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdBindPipeline getLayerPtr_vkCmdBindPipeline() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdBindPipeline) + { + return layer_vkCmdBindPipeline; + } + + return layer_vkCmdBindPipeline; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdBindShadersEXT = requires( + VkCommandBuffer commandBuffer, uint32_t stageCount, const VkShaderStageFlagBits* pStages, const VkShaderEXT* pShaders +) { + layer_vkCmdBindShadersEXT(commandBuffer, stageCount, pStages, pShaders); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdBindShadersEXT getLayerPtr_vkCmdBindShadersEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdBindShadersEXT) + { + return layer_vkCmdBindShadersEXT; + } + + return layer_vkCmdBindShadersEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdBindTransformFeedbackBuffersEXT = requires( + VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets, const VkDeviceSize* pSizes +) { + layer_vkCmdBindTransformFeedbackBuffersEXT(commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdBindTransformFeedbackBuffersEXT getLayerPtr_vkCmdBindTransformFeedbackBuffersEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdBindTransformFeedbackBuffersEXT) + { + return layer_vkCmdBindTransformFeedbackBuffersEXT; + } + + return layer_vkCmdBindTransformFeedbackBuffersEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdBindVertexBuffers = requires( + VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets +) { + layer_vkCmdBindVertexBuffers(commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdBindVertexBuffers getLayerPtr_vkCmdBindVertexBuffers() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdBindVertexBuffers) + { + return layer_vkCmdBindVertexBuffers; + } + + return layer_vkCmdBindVertexBuffers; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdBindVertexBuffers2 = requires( + VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets, const VkDeviceSize* pSizes, const VkDeviceSize* pStrides +) { + layer_vkCmdBindVertexBuffers2(commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes, pStrides); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdBindVertexBuffers2 getLayerPtr_vkCmdBindVertexBuffers2() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdBindVertexBuffers2) + { + return layer_vkCmdBindVertexBuffers2; + } + + return layer_vkCmdBindVertexBuffers2; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdBindVertexBuffers2EXT = requires( + VkCommandBuffer commandBuffer, uint32_t firstBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets, const VkDeviceSize* pSizes, const VkDeviceSize* pStrides +) { + layer_vkCmdBindVertexBuffers2EXT(commandBuffer, firstBinding, bindingCount, pBuffers, pOffsets, pSizes, pStrides); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdBindVertexBuffers2EXT getLayerPtr_vkCmdBindVertexBuffers2EXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdBindVertexBuffers2EXT) + { + return layer_vkCmdBindVertexBuffers2EXT; + } + + return layer_vkCmdBindVertexBuffers2EXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdBlitImage = requires( + VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageBlit* pRegions, VkFilter filter +) { + layer_vkCmdBlitImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions, filter); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdBlitImage getLayerPtr_vkCmdBlitImage() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdBlitImage) + { + return layer_vkCmdBlitImage; + } + + return layer_vkCmdBlitImage; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdBlitImage2 = requires( + VkCommandBuffer commandBuffer, const VkBlitImageInfo2* pBlitImageInfo +) { + layer_vkCmdBlitImage2(commandBuffer, pBlitImageInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdBlitImage2 getLayerPtr_vkCmdBlitImage2() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdBlitImage2) + { + return layer_vkCmdBlitImage2; + } + + return layer_vkCmdBlitImage2; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdBlitImage2KHR = requires( + VkCommandBuffer commandBuffer, const VkBlitImageInfo2* pBlitImageInfo +) { + layer_vkCmdBlitImage2KHR(commandBuffer, pBlitImageInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdBlitImage2KHR getLayerPtr_vkCmdBlitImage2KHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdBlitImage2KHR) + { + return layer_vkCmdBlitImage2KHR; + } + + return layer_vkCmdBlitImage2KHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdBuildAccelerationStructuresIndirectKHR = requires( + VkCommandBuffer commandBuffer, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, const VkDeviceAddress* pIndirectDeviceAddresses, const uint32_t* pIndirectStrides, const uint32_t* const* ppMaxPrimitiveCounts +) { + layer_vkCmdBuildAccelerationStructuresIndirectKHR(commandBuffer, infoCount, pInfos, pIndirectDeviceAddresses, pIndirectStrides, ppMaxPrimitiveCounts); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdBuildAccelerationStructuresIndirectKHR getLayerPtr_vkCmdBuildAccelerationStructuresIndirectKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdBuildAccelerationStructuresIndirectKHR) + { + return layer_vkCmdBuildAccelerationStructuresIndirectKHR; + } + + return layer_vkCmdBuildAccelerationStructuresIndirectKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdBuildAccelerationStructuresKHR = requires( + VkCommandBuffer commandBuffer, uint32_t infoCount, const VkAccelerationStructureBuildGeometryInfoKHR* pInfos, const VkAccelerationStructureBuildRangeInfoKHR* const* ppBuildRangeInfos +) { + layer_vkCmdBuildAccelerationStructuresKHR(commandBuffer, infoCount, pInfos, ppBuildRangeInfos); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdBuildAccelerationStructuresKHR getLayerPtr_vkCmdBuildAccelerationStructuresKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdBuildAccelerationStructuresKHR) + { + return layer_vkCmdBuildAccelerationStructuresKHR; + } + + return layer_vkCmdBuildAccelerationStructuresKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdBuildMicromapsEXT = requires( + VkCommandBuffer commandBuffer, uint32_t infoCount, const VkMicromapBuildInfoEXT* pInfos +) { + layer_vkCmdBuildMicromapsEXT(commandBuffer, infoCount, pInfos); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdBuildMicromapsEXT getLayerPtr_vkCmdBuildMicromapsEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdBuildMicromapsEXT) + { + return layer_vkCmdBuildMicromapsEXT; + } + + return layer_vkCmdBuildMicromapsEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdClearAttachments = requires( + VkCommandBuffer commandBuffer, uint32_t attachmentCount, const VkClearAttachment* pAttachments, uint32_t rectCount, const VkClearRect* pRects +) { + layer_vkCmdClearAttachments(commandBuffer, attachmentCount, pAttachments, rectCount, pRects); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdClearAttachments getLayerPtr_vkCmdClearAttachments() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdClearAttachments) + { + return layer_vkCmdClearAttachments; + } + + return layer_vkCmdClearAttachments; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdClearColorImage = requires( + VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearColorValue* pColor, uint32_t rangeCount, const VkImageSubresourceRange* pRanges +) { + layer_vkCmdClearColorImage(commandBuffer, image, imageLayout, pColor, rangeCount, pRanges); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdClearColorImage getLayerPtr_vkCmdClearColorImage() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdClearColorImage) + { + return layer_vkCmdClearColorImage; + } + + return layer_vkCmdClearColorImage; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdClearDepthStencilImage = requires( + VkCommandBuffer commandBuffer, VkImage image, VkImageLayout imageLayout, const VkClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const VkImageSubresourceRange* pRanges +) { + layer_vkCmdClearDepthStencilImage(commandBuffer, image, imageLayout, pDepthStencil, rangeCount, pRanges); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdClearDepthStencilImage getLayerPtr_vkCmdClearDepthStencilImage() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdClearDepthStencilImage) + { + return layer_vkCmdClearDepthStencilImage; + } + + return layer_vkCmdClearDepthStencilImage; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdCopyAccelerationStructureKHR = requires( + VkCommandBuffer commandBuffer, const VkCopyAccelerationStructureInfoKHR* pInfo +) { + layer_vkCmdCopyAccelerationStructureKHR(commandBuffer, pInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdCopyAccelerationStructureKHR getLayerPtr_vkCmdCopyAccelerationStructureKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdCopyAccelerationStructureKHR) + { + return layer_vkCmdCopyAccelerationStructureKHR; + } + + return layer_vkCmdCopyAccelerationStructureKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdCopyAccelerationStructureToMemoryKHR = requires( + VkCommandBuffer commandBuffer, const VkCopyAccelerationStructureToMemoryInfoKHR* pInfo +) { + layer_vkCmdCopyAccelerationStructureToMemoryKHR(commandBuffer, pInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdCopyAccelerationStructureToMemoryKHR getLayerPtr_vkCmdCopyAccelerationStructureToMemoryKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdCopyAccelerationStructureToMemoryKHR) + { + return layer_vkCmdCopyAccelerationStructureToMemoryKHR; + } + + return layer_vkCmdCopyAccelerationStructureToMemoryKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdCopyBuffer = requires( + VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferCopy* pRegions +) { + layer_vkCmdCopyBuffer(commandBuffer, srcBuffer, dstBuffer, regionCount, pRegions); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdCopyBuffer getLayerPtr_vkCmdCopyBuffer() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdCopyBuffer) + { + return layer_vkCmdCopyBuffer; + } + + return layer_vkCmdCopyBuffer; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdCopyBuffer2 = requires( + VkCommandBuffer commandBuffer, const VkCopyBufferInfo2* pCopyBufferInfo +) { + layer_vkCmdCopyBuffer2(commandBuffer, pCopyBufferInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdCopyBuffer2 getLayerPtr_vkCmdCopyBuffer2() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdCopyBuffer2) + { + return layer_vkCmdCopyBuffer2; + } + + return layer_vkCmdCopyBuffer2; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdCopyBuffer2KHR = requires( + VkCommandBuffer commandBuffer, const VkCopyBufferInfo2* pCopyBufferInfo +) { + layer_vkCmdCopyBuffer2KHR(commandBuffer, pCopyBufferInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdCopyBuffer2KHR getLayerPtr_vkCmdCopyBuffer2KHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdCopyBuffer2KHR) + { + return layer_vkCmdCopyBuffer2KHR; + } + + return layer_vkCmdCopyBuffer2KHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdCopyBufferToImage = requires( + VkCommandBuffer commandBuffer, VkBuffer srcBuffer, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkBufferImageCopy* pRegions +) { + layer_vkCmdCopyBufferToImage(commandBuffer, srcBuffer, dstImage, dstImageLayout, regionCount, pRegions); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdCopyBufferToImage getLayerPtr_vkCmdCopyBufferToImage() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdCopyBufferToImage) + { + return layer_vkCmdCopyBufferToImage; + } + + return layer_vkCmdCopyBufferToImage; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdCopyBufferToImage2 = requires( + VkCommandBuffer commandBuffer, const VkCopyBufferToImageInfo2* pCopyBufferToImageInfo +) { + layer_vkCmdCopyBufferToImage2(commandBuffer, pCopyBufferToImageInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdCopyBufferToImage2 getLayerPtr_vkCmdCopyBufferToImage2() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdCopyBufferToImage2) + { + return layer_vkCmdCopyBufferToImage2; + } + + return layer_vkCmdCopyBufferToImage2; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdCopyBufferToImage2KHR = requires( + VkCommandBuffer commandBuffer, const VkCopyBufferToImageInfo2* pCopyBufferToImageInfo +) { + layer_vkCmdCopyBufferToImage2KHR(commandBuffer, pCopyBufferToImageInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdCopyBufferToImage2KHR getLayerPtr_vkCmdCopyBufferToImage2KHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdCopyBufferToImage2KHR) + { + return layer_vkCmdCopyBufferToImage2KHR; + } + + return layer_vkCmdCopyBufferToImage2KHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdCopyImage = requires( + VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageCopy* pRegions +) { + layer_vkCmdCopyImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdCopyImage getLayerPtr_vkCmdCopyImage() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdCopyImage) + { + return layer_vkCmdCopyImage; + } + + return layer_vkCmdCopyImage; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdCopyImage2 = requires( + VkCommandBuffer commandBuffer, const VkCopyImageInfo2* pCopyImageInfo +) { + layer_vkCmdCopyImage2(commandBuffer, pCopyImageInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdCopyImage2 getLayerPtr_vkCmdCopyImage2() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdCopyImage2) + { + return layer_vkCmdCopyImage2; + } + + return layer_vkCmdCopyImage2; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdCopyImage2KHR = requires( + VkCommandBuffer commandBuffer, const VkCopyImageInfo2* pCopyImageInfo +) { + layer_vkCmdCopyImage2KHR(commandBuffer, pCopyImageInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdCopyImage2KHR getLayerPtr_vkCmdCopyImage2KHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdCopyImage2KHR) + { + return layer_vkCmdCopyImage2KHR; + } + + return layer_vkCmdCopyImage2KHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdCopyImageToBuffer = requires( + VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkBuffer dstBuffer, uint32_t regionCount, const VkBufferImageCopy* pRegions +) { + layer_vkCmdCopyImageToBuffer(commandBuffer, srcImage, srcImageLayout, dstBuffer, regionCount, pRegions); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdCopyImageToBuffer getLayerPtr_vkCmdCopyImageToBuffer() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdCopyImageToBuffer) + { + return layer_vkCmdCopyImageToBuffer; + } + + return layer_vkCmdCopyImageToBuffer; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdCopyImageToBuffer2 = requires( + VkCommandBuffer commandBuffer, const VkCopyImageToBufferInfo2* pCopyImageToBufferInfo +) { + layer_vkCmdCopyImageToBuffer2(commandBuffer, pCopyImageToBufferInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdCopyImageToBuffer2 getLayerPtr_vkCmdCopyImageToBuffer2() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdCopyImageToBuffer2) + { + return layer_vkCmdCopyImageToBuffer2; + } + + return layer_vkCmdCopyImageToBuffer2; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdCopyImageToBuffer2KHR = requires( + VkCommandBuffer commandBuffer, const VkCopyImageToBufferInfo2* pCopyImageToBufferInfo +) { + layer_vkCmdCopyImageToBuffer2KHR(commandBuffer, pCopyImageToBufferInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdCopyImageToBuffer2KHR getLayerPtr_vkCmdCopyImageToBuffer2KHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdCopyImageToBuffer2KHR) + { + return layer_vkCmdCopyImageToBuffer2KHR; + } + + return layer_vkCmdCopyImageToBuffer2KHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdCopyMemoryToAccelerationStructureKHR = requires( + VkCommandBuffer commandBuffer, const VkCopyMemoryToAccelerationStructureInfoKHR* pInfo +) { + layer_vkCmdCopyMemoryToAccelerationStructureKHR(commandBuffer, pInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdCopyMemoryToAccelerationStructureKHR getLayerPtr_vkCmdCopyMemoryToAccelerationStructureKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdCopyMemoryToAccelerationStructureKHR) + { + return layer_vkCmdCopyMemoryToAccelerationStructureKHR; + } + + return layer_vkCmdCopyMemoryToAccelerationStructureKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdCopyMemoryToMicromapEXT = requires( + VkCommandBuffer commandBuffer, const VkCopyMemoryToMicromapInfoEXT* pInfo +) { + layer_vkCmdCopyMemoryToMicromapEXT(commandBuffer, pInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdCopyMemoryToMicromapEXT getLayerPtr_vkCmdCopyMemoryToMicromapEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdCopyMemoryToMicromapEXT) + { + return layer_vkCmdCopyMemoryToMicromapEXT; + } + + return layer_vkCmdCopyMemoryToMicromapEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdCopyMicromapEXT = requires( + VkCommandBuffer commandBuffer, const VkCopyMicromapInfoEXT* pInfo +) { + layer_vkCmdCopyMicromapEXT(commandBuffer, pInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdCopyMicromapEXT getLayerPtr_vkCmdCopyMicromapEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdCopyMicromapEXT) + { + return layer_vkCmdCopyMicromapEXT; + } + + return layer_vkCmdCopyMicromapEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdCopyMicromapToMemoryEXT = requires( + VkCommandBuffer commandBuffer, const VkCopyMicromapToMemoryInfoEXT* pInfo +) { + layer_vkCmdCopyMicromapToMemoryEXT(commandBuffer, pInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdCopyMicromapToMemoryEXT getLayerPtr_vkCmdCopyMicromapToMemoryEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdCopyMicromapToMemoryEXT) + { + return layer_vkCmdCopyMicromapToMemoryEXT; + } + + return layer_vkCmdCopyMicromapToMemoryEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdCopyQueryPoolResults = requires( + VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize stride, VkQueryResultFlags flags +) { + layer_vkCmdCopyQueryPoolResults(commandBuffer, queryPool, firstQuery, queryCount, dstBuffer, dstOffset, stride, flags); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdCopyQueryPoolResults getLayerPtr_vkCmdCopyQueryPoolResults() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdCopyQueryPoolResults) + { + return layer_vkCmdCopyQueryPoolResults; + } + + return layer_vkCmdCopyQueryPoolResults; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdCopyTensorARM = requires( + VkCommandBuffer commandBuffer, const VkCopyTensorInfoARM* pCopyTensorInfo +) { + layer_vkCmdCopyTensorARM(commandBuffer, pCopyTensorInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdCopyTensorARM getLayerPtr_vkCmdCopyTensorARM() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdCopyTensorARM) + { + return layer_vkCmdCopyTensorARM; + } + + return layer_vkCmdCopyTensorARM; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdDebugMarkerBeginEXT = requires( + VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo +) { + layer_vkCmdDebugMarkerBeginEXT(commandBuffer, pMarkerInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdDebugMarkerBeginEXT getLayerPtr_vkCmdDebugMarkerBeginEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdDebugMarkerBeginEXT) + { + return layer_vkCmdDebugMarkerBeginEXT; + } + + return layer_vkCmdDebugMarkerBeginEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdDebugMarkerEndEXT = requires( + VkCommandBuffer commandBuffer +) { + layer_vkCmdDebugMarkerEndEXT(commandBuffer); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdDebugMarkerEndEXT getLayerPtr_vkCmdDebugMarkerEndEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdDebugMarkerEndEXT) + { + return layer_vkCmdDebugMarkerEndEXT; + } + + return layer_vkCmdDebugMarkerEndEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdDebugMarkerInsertEXT = requires( + VkCommandBuffer commandBuffer, const VkDebugMarkerMarkerInfoEXT* pMarkerInfo +) { + layer_vkCmdDebugMarkerInsertEXT(commandBuffer, pMarkerInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdDebugMarkerInsertEXT getLayerPtr_vkCmdDebugMarkerInsertEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdDebugMarkerInsertEXT) + { + return layer_vkCmdDebugMarkerInsertEXT; + } + + return layer_vkCmdDebugMarkerInsertEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdDispatch = requires( + VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ +) { + layer_vkCmdDispatch(commandBuffer, groupCountX, groupCountY, groupCountZ); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdDispatch getLayerPtr_vkCmdDispatch() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdDispatch) + { + return layer_vkCmdDispatch; + } + + return layer_vkCmdDispatch; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdDispatchBase = requires( + VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ +) { + layer_vkCmdDispatchBase(commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdDispatchBase getLayerPtr_vkCmdDispatchBase() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdDispatchBase) + { + return layer_vkCmdDispatchBase; + } + + return layer_vkCmdDispatchBase; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdDispatchBaseKHR = requires( + VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ +) { + layer_vkCmdDispatchBaseKHR(commandBuffer, baseGroupX, baseGroupY, baseGroupZ, groupCountX, groupCountY, groupCountZ); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdDispatchBaseKHR getLayerPtr_vkCmdDispatchBaseKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdDispatchBaseKHR) + { + return layer_vkCmdDispatchBaseKHR; + } + + return layer_vkCmdDispatchBaseKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdDispatchIndirect = requires( + VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset +) { + layer_vkCmdDispatchIndirect(commandBuffer, buffer, offset); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdDispatchIndirect getLayerPtr_vkCmdDispatchIndirect() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdDispatchIndirect) + { + return layer_vkCmdDispatchIndirect; + } + + return layer_vkCmdDispatchIndirect; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdDraw = requires( + VkCommandBuffer commandBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance +) { + layer_vkCmdDraw(commandBuffer, vertexCount, instanceCount, firstVertex, firstInstance); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdDraw getLayerPtr_vkCmdDraw() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdDraw) + { + return layer_vkCmdDraw; + } + + return layer_vkCmdDraw; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdDrawIndexed = requires( + VkCommandBuffer commandBuffer, uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance +) { + layer_vkCmdDrawIndexed(commandBuffer, indexCount, instanceCount, firstIndex, vertexOffset, firstInstance); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdDrawIndexed getLayerPtr_vkCmdDrawIndexed() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdDrawIndexed) + { + return layer_vkCmdDrawIndexed; + } + + return layer_vkCmdDrawIndexed; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdDrawIndexedIndirect = requires( + VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride +) { + layer_vkCmdDrawIndexedIndirect(commandBuffer, buffer, offset, drawCount, stride); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdDrawIndexedIndirect getLayerPtr_vkCmdDrawIndexedIndirect() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdDrawIndexedIndirect) + { + return layer_vkCmdDrawIndexedIndirect; + } + + return layer_vkCmdDrawIndexedIndirect; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdDrawIndexedIndirectCount = requires( + VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride +) { + layer_vkCmdDrawIndexedIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdDrawIndexedIndirectCount getLayerPtr_vkCmdDrawIndexedIndirectCount() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdDrawIndexedIndirectCount) + { + return layer_vkCmdDrawIndexedIndirectCount; + } + + return layer_vkCmdDrawIndexedIndirectCount; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdDrawIndexedIndirectCountKHR = requires( + VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride +) { + layer_vkCmdDrawIndexedIndirectCountKHR(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdDrawIndexedIndirectCountKHR getLayerPtr_vkCmdDrawIndexedIndirectCountKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdDrawIndexedIndirectCountKHR) + { + return layer_vkCmdDrawIndexedIndirectCountKHR; + } + + return layer_vkCmdDrawIndexedIndirectCountKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdDrawIndirect = requires( + VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride +) { + layer_vkCmdDrawIndirect(commandBuffer, buffer, offset, drawCount, stride); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdDrawIndirect getLayerPtr_vkCmdDrawIndirect() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdDrawIndirect) + { + return layer_vkCmdDrawIndirect; + } + + return layer_vkCmdDrawIndirect; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdDrawIndirectByteCountEXT = requires( + VkCommandBuffer commandBuffer, uint32_t instanceCount, uint32_t firstInstance, VkBuffer counterBuffer, VkDeviceSize counterBufferOffset, uint32_t counterOffset, uint32_t vertexStride +) { + layer_vkCmdDrawIndirectByteCountEXT(commandBuffer, instanceCount, firstInstance, counterBuffer, counterBufferOffset, counterOffset, vertexStride); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdDrawIndirectByteCountEXT getLayerPtr_vkCmdDrawIndirectByteCountEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdDrawIndirectByteCountEXT) + { + return layer_vkCmdDrawIndirectByteCountEXT; + } + + return layer_vkCmdDrawIndirectByteCountEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdDrawIndirectCount = requires( + VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride +) { + layer_vkCmdDrawIndirectCount(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdDrawIndirectCount getLayerPtr_vkCmdDrawIndirectCount() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdDrawIndirectCount) + { + return layer_vkCmdDrawIndirectCount; + } + + return layer_vkCmdDrawIndirectCount; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdDrawIndirectCountKHR = requires( + VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride +) { + layer_vkCmdDrawIndirectCountKHR(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdDrawIndirectCountKHR getLayerPtr_vkCmdDrawIndirectCountKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdDrawIndirectCountKHR) + { + return layer_vkCmdDrawIndirectCountKHR; + } + + return layer_vkCmdDrawIndirectCountKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdDrawMeshTasksEXT = requires( + VkCommandBuffer commandBuffer, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ +) { + layer_vkCmdDrawMeshTasksEXT(commandBuffer, groupCountX, groupCountY, groupCountZ); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdDrawMeshTasksEXT getLayerPtr_vkCmdDrawMeshTasksEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdDrawMeshTasksEXT) + { + return layer_vkCmdDrawMeshTasksEXT; + } + + return layer_vkCmdDrawMeshTasksEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdDrawMeshTasksIndirectCountEXT = requires( + VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, VkBuffer countBuffer, VkDeviceSize countBufferOffset, uint32_t maxDrawCount, uint32_t stride +) { + layer_vkCmdDrawMeshTasksIndirectCountEXT(commandBuffer, buffer, offset, countBuffer, countBufferOffset, maxDrawCount, stride); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdDrawMeshTasksIndirectCountEXT getLayerPtr_vkCmdDrawMeshTasksIndirectCountEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdDrawMeshTasksIndirectCountEXT) + { + return layer_vkCmdDrawMeshTasksIndirectCountEXT; + } + + return layer_vkCmdDrawMeshTasksIndirectCountEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdDrawMeshTasksIndirectEXT = requires( + VkCommandBuffer commandBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t drawCount, uint32_t stride +) { + layer_vkCmdDrawMeshTasksIndirectEXT(commandBuffer, buffer, offset, drawCount, stride); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdDrawMeshTasksIndirectEXT getLayerPtr_vkCmdDrawMeshTasksIndirectEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdDrawMeshTasksIndirectEXT) + { + return layer_vkCmdDrawMeshTasksIndirectEXT; + } + + return layer_vkCmdDrawMeshTasksIndirectEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdDrawMultiEXT = requires( + VkCommandBuffer commandBuffer, uint32_t drawCount, const VkMultiDrawInfoEXT* pVertexInfo, uint32_t instanceCount, uint32_t firstInstance, uint32_t stride +) { + layer_vkCmdDrawMultiEXT(commandBuffer, drawCount, pVertexInfo, instanceCount, firstInstance, stride); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdDrawMultiEXT getLayerPtr_vkCmdDrawMultiEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdDrawMultiEXT) + { + return layer_vkCmdDrawMultiEXT; + } + + return layer_vkCmdDrawMultiEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdDrawMultiIndexedEXT = requires( + VkCommandBuffer commandBuffer, uint32_t drawCount, const VkMultiDrawIndexedInfoEXT* pIndexInfo, uint32_t instanceCount, uint32_t firstInstance, uint32_t stride, const int32_t* pVertexOffset +) { + layer_vkCmdDrawMultiIndexedEXT(commandBuffer, drawCount, pIndexInfo, instanceCount, firstInstance, stride, pVertexOffset); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdDrawMultiIndexedEXT getLayerPtr_vkCmdDrawMultiIndexedEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdDrawMultiIndexedEXT) + { + return layer_vkCmdDrawMultiIndexedEXT; + } + + return layer_vkCmdDrawMultiIndexedEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdEndConditionalRenderingEXT = requires( + VkCommandBuffer commandBuffer +) { + layer_vkCmdEndConditionalRenderingEXT(commandBuffer); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdEndConditionalRenderingEXT getLayerPtr_vkCmdEndConditionalRenderingEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdEndConditionalRenderingEXT) + { + return layer_vkCmdEndConditionalRenderingEXT; + } + + return layer_vkCmdEndConditionalRenderingEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdEndDebugUtilsLabelEXT = requires( + VkCommandBuffer commandBuffer +) { + layer_vkCmdEndDebugUtilsLabelEXT(commandBuffer); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdEndDebugUtilsLabelEXT getLayerPtr_vkCmdEndDebugUtilsLabelEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdEndDebugUtilsLabelEXT) + { + return layer_vkCmdEndDebugUtilsLabelEXT; + } + + return layer_vkCmdEndDebugUtilsLabelEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdEndQuery = requires( + VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query +) { + layer_vkCmdEndQuery(commandBuffer, queryPool, query); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdEndQuery getLayerPtr_vkCmdEndQuery() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdEndQuery) + { + return layer_vkCmdEndQuery; + } + + return layer_vkCmdEndQuery; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdEndQueryIndexedEXT = requires( + VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t query, uint32_t index +) { + layer_vkCmdEndQueryIndexedEXT(commandBuffer, queryPool, query, index); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdEndQueryIndexedEXT getLayerPtr_vkCmdEndQueryIndexedEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdEndQueryIndexedEXT) + { + return layer_vkCmdEndQueryIndexedEXT; + } + + return layer_vkCmdEndQueryIndexedEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdEndRenderPass = requires( + VkCommandBuffer commandBuffer +) { + layer_vkCmdEndRenderPass(commandBuffer); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdEndRenderPass getLayerPtr_vkCmdEndRenderPass() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdEndRenderPass) + { + return layer_vkCmdEndRenderPass; + } + + return layer_vkCmdEndRenderPass; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdEndRenderPass2 = requires( + VkCommandBuffer commandBuffer, const VkSubpassEndInfo* pSubpassEndInfo +) { + layer_vkCmdEndRenderPass2(commandBuffer, pSubpassEndInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdEndRenderPass2 getLayerPtr_vkCmdEndRenderPass2() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdEndRenderPass2) + { + return layer_vkCmdEndRenderPass2; + } + + return layer_vkCmdEndRenderPass2; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdEndRenderPass2KHR = requires( + VkCommandBuffer commandBuffer, const VkSubpassEndInfo* pSubpassEndInfo +) { + layer_vkCmdEndRenderPass2KHR(commandBuffer, pSubpassEndInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdEndRenderPass2KHR getLayerPtr_vkCmdEndRenderPass2KHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdEndRenderPass2KHR) + { + return layer_vkCmdEndRenderPass2KHR; + } + + return layer_vkCmdEndRenderPass2KHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdEndRendering = requires( + VkCommandBuffer commandBuffer +) { + layer_vkCmdEndRendering(commandBuffer); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdEndRendering getLayerPtr_vkCmdEndRendering() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdEndRendering) + { + return layer_vkCmdEndRendering; + } + + return layer_vkCmdEndRendering; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdEndRendering2EXT = requires( + VkCommandBuffer commandBuffer, const VkRenderingEndInfoEXT* pRenderingEndInfo +) { + layer_vkCmdEndRendering2EXT(commandBuffer, pRenderingEndInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdEndRendering2EXT getLayerPtr_vkCmdEndRendering2EXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdEndRendering2EXT) + { + return layer_vkCmdEndRendering2EXT; + } + + return layer_vkCmdEndRendering2EXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdEndRenderingKHR = requires( + VkCommandBuffer commandBuffer +) { + layer_vkCmdEndRenderingKHR(commandBuffer); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdEndRenderingKHR getLayerPtr_vkCmdEndRenderingKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdEndRenderingKHR) + { + return layer_vkCmdEndRenderingKHR; + } + + return layer_vkCmdEndRenderingKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdEndTransformFeedbackEXT = requires( + VkCommandBuffer commandBuffer, uint32_t firstCounterBuffer, uint32_t counterBufferCount, const VkBuffer* pCounterBuffers, const VkDeviceSize* pCounterBufferOffsets +) { + layer_vkCmdEndTransformFeedbackEXT(commandBuffer, firstCounterBuffer, counterBufferCount, pCounterBuffers, pCounterBufferOffsets); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdEndTransformFeedbackEXT getLayerPtr_vkCmdEndTransformFeedbackEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdEndTransformFeedbackEXT) + { + return layer_vkCmdEndTransformFeedbackEXT; + } + + return layer_vkCmdEndTransformFeedbackEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdExecuteCommands = requires( + VkCommandBuffer commandBuffer, uint32_t commandBufferCount, const VkCommandBuffer* pCommandBuffers +) { + layer_vkCmdExecuteCommands(commandBuffer, commandBufferCount, pCommandBuffers); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdExecuteCommands getLayerPtr_vkCmdExecuteCommands() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdExecuteCommands) + { + return layer_vkCmdExecuteCommands; + } + + return layer_vkCmdExecuteCommands; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdExecuteGeneratedCommandsEXT = requires( + VkCommandBuffer commandBuffer, VkBool32 isPreprocessed, const VkGeneratedCommandsInfoEXT* pGeneratedCommandsInfo +) { + layer_vkCmdExecuteGeneratedCommandsEXT(commandBuffer, isPreprocessed, pGeneratedCommandsInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdExecuteGeneratedCommandsEXT getLayerPtr_vkCmdExecuteGeneratedCommandsEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdExecuteGeneratedCommandsEXT) + { + return layer_vkCmdExecuteGeneratedCommandsEXT; + } + + return layer_vkCmdExecuteGeneratedCommandsEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdFillBuffer = requires( + VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize size, uint32_t data +) { + layer_vkCmdFillBuffer(commandBuffer, dstBuffer, dstOffset, size, data); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdFillBuffer getLayerPtr_vkCmdFillBuffer() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdFillBuffer) + { + return layer_vkCmdFillBuffer; + } + + return layer_vkCmdFillBuffer; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdInsertDebugUtilsLabelEXT = requires( + VkCommandBuffer commandBuffer, const VkDebugUtilsLabelEXT* pLabelInfo +) { + layer_vkCmdInsertDebugUtilsLabelEXT(commandBuffer, pLabelInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdInsertDebugUtilsLabelEXT getLayerPtr_vkCmdInsertDebugUtilsLabelEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdInsertDebugUtilsLabelEXT) + { + return layer_vkCmdInsertDebugUtilsLabelEXT; + } + + return layer_vkCmdInsertDebugUtilsLabelEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdNextSubpass = requires( + VkCommandBuffer commandBuffer, VkSubpassContents contents +) { + layer_vkCmdNextSubpass(commandBuffer, contents); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdNextSubpass getLayerPtr_vkCmdNextSubpass() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdNextSubpass) + { + return layer_vkCmdNextSubpass; + } + + return layer_vkCmdNextSubpass; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdNextSubpass2 = requires( + VkCommandBuffer commandBuffer, const VkSubpassBeginInfo* pSubpassBeginInfo, const VkSubpassEndInfo* pSubpassEndInfo +) { + layer_vkCmdNextSubpass2(commandBuffer, pSubpassBeginInfo, pSubpassEndInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdNextSubpass2 getLayerPtr_vkCmdNextSubpass2() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdNextSubpass2) + { + return layer_vkCmdNextSubpass2; + } + + return layer_vkCmdNextSubpass2; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdNextSubpass2KHR = requires( + VkCommandBuffer commandBuffer, const VkSubpassBeginInfo* pSubpassBeginInfo, const VkSubpassEndInfo* pSubpassEndInfo +) { + layer_vkCmdNextSubpass2KHR(commandBuffer, pSubpassBeginInfo, pSubpassEndInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdNextSubpass2KHR getLayerPtr_vkCmdNextSubpass2KHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdNextSubpass2KHR) + { + return layer_vkCmdNextSubpass2KHR; + } + + return layer_vkCmdNextSubpass2KHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdPipelineBarrier = requires( + VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers +) { + layer_vkCmdPipelineBarrier(commandBuffer, srcStageMask, dstStageMask, dependencyFlags, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdPipelineBarrier getLayerPtr_vkCmdPipelineBarrier() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdPipelineBarrier) + { + return layer_vkCmdPipelineBarrier; + } + + return layer_vkCmdPipelineBarrier; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdPipelineBarrier2 = requires( + VkCommandBuffer commandBuffer, const VkDependencyInfo* pDependencyInfo +) { + layer_vkCmdPipelineBarrier2(commandBuffer, pDependencyInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdPipelineBarrier2 getLayerPtr_vkCmdPipelineBarrier2() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdPipelineBarrier2) + { + return layer_vkCmdPipelineBarrier2; + } + + return layer_vkCmdPipelineBarrier2; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdPipelineBarrier2KHR = requires( + VkCommandBuffer commandBuffer, const VkDependencyInfo* pDependencyInfo +) { + layer_vkCmdPipelineBarrier2KHR(commandBuffer, pDependencyInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdPipelineBarrier2KHR getLayerPtr_vkCmdPipelineBarrier2KHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdPipelineBarrier2KHR) + { + return layer_vkCmdPipelineBarrier2KHR; + } + + return layer_vkCmdPipelineBarrier2KHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdPreprocessGeneratedCommandsEXT = requires( + VkCommandBuffer commandBuffer, const VkGeneratedCommandsInfoEXT* pGeneratedCommandsInfo, VkCommandBuffer stateCommandBuffer +) { + layer_vkCmdPreprocessGeneratedCommandsEXT(commandBuffer, pGeneratedCommandsInfo, stateCommandBuffer); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdPreprocessGeneratedCommandsEXT getLayerPtr_vkCmdPreprocessGeneratedCommandsEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdPreprocessGeneratedCommandsEXT) + { + return layer_vkCmdPreprocessGeneratedCommandsEXT; + } + + return layer_vkCmdPreprocessGeneratedCommandsEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdPushConstants = requires( + VkCommandBuffer commandBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags, uint32_t offset, uint32_t size, const void* pValues +) { + layer_vkCmdPushConstants(commandBuffer, layout, stageFlags, offset, size, pValues); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdPushConstants getLayerPtr_vkCmdPushConstants() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdPushConstants) + { + return layer_vkCmdPushConstants; + } + + return layer_vkCmdPushConstants; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdPushConstants2 = requires( + VkCommandBuffer commandBuffer, const VkPushConstantsInfo* pPushConstantsInfo +) { + layer_vkCmdPushConstants2(commandBuffer, pPushConstantsInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdPushConstants2 getLayerPtr_vkCmdPushConstants2() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdPushConstants2) + { + return layer_vkCmdPushConstants2; + } + + return layer_vkCmdPushConstants2; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdPushConstants2KHR = requires( + VkCommandBuffer commandBuffer, const VkPushConstantsInfo* pPushConstantsInfo +) { + layer_vkCmdPushConstants2KHR(commandBuffer, pPushConstantsInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdPushConstants2KHR getLayerPtr_vkCmdPushConstants2KHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdPushConstants2KHR) + { + return layer_vkCmdPushConstants2KHR; + } + + return layer_vkCmdPushConstants2KHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdPushDescriptorSet = requires( + VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites +) { + layer_vkCmdPushDescriptorSet(commandBuffer, pipelineBindPoint, layout, set, descriptorWriteCount, pDescriptorWrites); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdPushDescriptorSet getLayerPtr_vkCmdPushDescriptorSet() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdPushDescriptorSet) + { + return layer_vkCmdPushDescriptorSet; + } + + return layer_vkCmdPushDescriptorSet; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdPushDescriptorSet2 = requires( + VkCommandBuffer commandBuffer, const VkPushDescriptorSetInfo* pPushDescriptorSetInfo +) { + layer_vkCmdPushDescriptorSet2(commandBuffer, pPushDescriptorSetInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdPushDescriptorSet2 getLayerPtr_vkCmdPushDescriptorSet2() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdPushDescriptorSet2) + { + return layer_vkCmdPushDescriptorSet2; + } + + return layer_vkCmdPushDescriptorSet2; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdPushDescriptorSet2KHR = requires( + VkCommandBuffer commandBuffer, const VkPushDescriptorSetInfo* pPushDescriptorSetInfo +) { + layer_vkCmdPushDescriptorSet2KHR(commandBuffer, pPushDescriptorSetInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdPushDescriptorSet2KHR getLayerPtr_vkCmdPushDescriptorSet2KHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdPushDescriptorSet2KHR) + { + return layer_vkCmdPushDescriptorSet2KHR; + } + + return layer_vkCmdPushDescriptorSet2KHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdPushDescriptorSetKHR = requires( + VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t set, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites +) { + layer_vkCmdPushDescriptorSetKHR(commandBuffer, pipelineBindPoint, layout, set, descriptorWriteCount, pDescriptorWrites); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdPushDescriptorSetKHR getLayerPtr_vkCmdPushDescriptorSetKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdPushDescriptorSetKHR) + { + return layer_vkCmdPushDescriptorSetKHR; + } + + return layer_vkCmdPushDescriptorSetKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdPushDescriptorSetWithTemplate = requires( + VkCommandBuffer commandBuffer, VkDescriptorUpdateTemplate descriptorUpdateTemplate, VkPipelineLayout layout, uint32_t set, const void* pData +) { + layer_vkCmdPushDescriptorSetWithTemplate(commandBuffer, descriptorUpdateTemplate, layout, set, pData); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdPushDescriptorSetWithTemplate getLayerPtr_vkCmdPushDescriptorSetWithTemplate() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdPushDescriptorSetWithTemplate) + { + return layer_vkCmdPushDescriptorSetWithTemplate; + } + + return layer_vkCmdPushDescriptorSetWithTemplate; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdPushDescriptorSetWithTemplate2 = requires( + VkCommandBuffer commandBuffer, const VkPushDescriptorSetWithTemplateInfo* pPushDescriptorSetWithTemplateInfo +) { + layer_vkCmdPushDescriptorSetWithTemplate2(commandBuffer, pPushDescriptorSetWithTemplateInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdPushDescriptorSetWithTemplate2 getLayerPtr_vkCmdPushDescriptorSetWithTemplate2() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdPushDescriptorSetWithTemplate2) + { + return layer_vkCmdPushDescriptorSetWithTemplate2; + } + + return layer_vkCmdPushDescriptorSetWithTemplate2; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdPushDescriptorSetWithTemplate2KHR = requires( + VkCommandBuffer commandBuffer, const VkPushDescriptorSetWithTemplateInfo* pPushDescriptorSetWithTemplateInfo +) { + layer_vkCmdPushDescriptorSetWithTemplate2KHR(commandBuffer, pPushDescriptorSetWithTemplateInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdPushDescriptorSetWithTemplate2KHR getLayerPtr_vkCmdPushDescriptorSetWithTemplate2KHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdPushDescriptorSetWithTemplate2KHR) + { + return layer_vkCmdPushDescriptorSetWithTemplate2KHR; + } + + return layer_vkCmdPushDescriptorSetWithTemplate2KHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdPushDescriptorSetWithTemplateKHR = requires( + VkCommandBuffer commandBuffer, VkDescriptorUpdateTemplate descriptorUpdateTemplate, VkPipelineLayout layout, uint32_t set, const void* pData +) { + layer_vkCmdPushDescriptorSetWithTemplateKHR(commandBuffer, descriptorUpdateTemplate, layout, set, pData); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdPushDescriptorSetWithTemplateKHR getLayerPtr_vkCmdPushDescriptorSetWithTemplateKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdPushDescriptorSetWithTemplateKHR) + { + return layer_vkCmdPushDescriptorSetWithTemplateKHR; + } + + return layer_vkCmdPushDescriptorSetWithTemplateKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdResetEvent = requires( + VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask +) { + layer_vkCmdResetEvent(commandBuffer, event, stageMask); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdResetEvent getLayerPtr_vkCmdResetEvent() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdResetEvent) + { + return layer_vkCmdResetEvent; + } + + return layer_vkCmdResetEvent; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdResetEvent2 = requires( + VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags2 stageMask +) { + layer_vkCmdResetEvent2(commandBuffer, event, stageMask); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdResetEvent2 getLayerPtr_vkCmdResetEvent2() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdResetEvent2) + { + return layer_vkCmdResetEvent2; + } + + return layer_vkCmdResetEvent2; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdResetEvent2KHR = requires( + VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags2 stageMask +) { + layer_vkCmdResetEvent2KHR(commandBuffer, event, stageMask); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdResetEvent2KHR getLayerPtr_vkCmdResetEvent2KHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdResetEvent2KHR) + { + return layer_vkCmdResetEvent2KHR; + } + + return layer_vkCmdResetEvent2KHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdResetQueryPool = requires( + VkCommandBuffer commandBuffer, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount +) { + layer_vkCmdResetQueryPool(commandBuffer, queryPool, firstQuery, queryCount); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdResetQueryPool getLayerPtr_vkCmdResetQueryPool() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdResetQueryPool) + { + return layer_vkCmdResetQueryPool; + } + + return layer_vkCmdResetQueryPool; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdResolveImage = requires( + VkCommandBuffer commandBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage dstImage, VkImageLayout dstImageLayout, uint32_t regionCount, const VkImageResolve* pRegions +) { + layer_vkCmdResolveImage(commandBuffer, srcImage, srcImageLayout, dstImage, dstImageLayout, regionCount, pRegions); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdResolveImage getLayerPtr_vkCmdResolveImage() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdResolveImage) + { + return layer_vkCmdResolveImage; + } + + return layer_vkCmdResolveImage; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdResolveImage2 = requires( + VkCommandBuffer commandBuffer, const VkResolveImageInfo2* pResolveImageInfo +) { + layer_vkCmdResolveImage2(commandBuffer, pResolveImageInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdResolveImage2 getLayerPtr_vkCmdResolveImage2() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdResolveImage2) + { + return layer_vkCmdResolveImage2; + } + + return layer_vkCmdResolveImage2; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdResolveImage2KHR = requires( + VkCommandBuffer commandBuffer, const VkResolveImageInfo2* pResolveImageInfo +) { + layer_vkCmdResolveImage2KHR(commandBuffer, pResolveImageInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdResolveImage2KHR getLayerPtr_vkCmdResolveImage2KHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdResolveImage2KHR) + { + return layer_vkCmdResolveImage2KHR; + } + + return layer_vkCmdResolveImage2KHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetAlphaToCoverageEnableEXT = requires( + VkCommandBuffer commandBuffer, VkBool32 alphaToCoverageEnable +) { + layer_vkCmdSetAlphaToCoverageEnableEXT(commandBuffer, alphaToCoverageEnable); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetAlphaToCoverageEnableEXT getLayerPtr_vkCmdSetAlphaToCoverageEnableEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetAlphaToCoverageEnableEXT) + { + return layer_vkCmdSetAlphaToCoverageEnableEXT; + } + + return layer_vkCmdSetAlphaToCoverageEnableEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetAlphaToOneEnableEXT = requires( + VkCommandBuffer commandBuffer, VkBool32 alphaToOneEnable +) { + layer_vkCmdSetAlphaToOneEnableEXT(commandBuffer, alphaToOneEnable); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetAlphaToOneEnableEXT getLayerPtr_vkCmdSetAlphaToOneEnableEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetAlphaToOneEnableEXT) + { + return layer_vkCmdSetAlphaToOneEnableEXT; + } + + return layer_vkCmdSetAlphaToOneEnableEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetAttachmentFeedbackLoopEnableEXT = requires( + VkCommandBuffer commandBuffer, VkImageAspectFlags aspectMask +) { + layer_vkCmdSetAttachmentFeedbackLoopEnableEXT(commandBuffer, aspectMask); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetAttachmentFeedbackLoopEnableEXT getLayerPtr_vkCmdSetAttachmentFeedbackLoopEnableEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetAttachmentFeedbackLoopEnableEXT) + { + return layer_vkCmdSetAttachmentFeedbackLoopEnableEXT; + } + + return layer_vkCmdSetAttachmentFeedbackLoopEnableEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetBlendConstants = requires( + VkCommandBuffer commandBuffer, const float blendConstants[4] +) { + layer_vkCmdSetBlendConstants(commandBuffer, blendConstants); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetBlendConstants getLayerPtr_vkCmdSetBlendConstants() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetBlendConstants) + { + return layer_vkCmdSetBlendConstants; + } + + return layer_vkCmdSetBlendConstants; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetColorBlendAdvancedEXT = requires( + VkCommandBuffer commandBuffer, uint32_t firstAttachment, uint32_t attachmentCount, const VkColorBlendAdvancedEXT* pColorBlendAdvanced +) { + layer_vkCmdSetColorBlendAdvancedEXT(commandBuffer, firstAttachment, attachmentCount, pColorBlendAdvanced); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetColorBlendAdvancedEXT getLayerPtr_vkCmdSetColorBlendAdvancedEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetColorBlendAdvancedEXT) + { + return layer_vkCmdSetColorBlendAdvancedEXT; + } + + return layer_vkCmdSetColorBlendAdvancedEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetColorBlendEnableEXT = requires( + VkCommandBuffer commandBuffer, uint32_t firstAttachment, uint32_t attachmentCount, const VkBool32* pColorBlendEnables +) { + layer_vkCmdSetColorBlendEnableEXT(commandBuffer, firstAttachment, attachmentCount, pColorBlendEnables); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetColorBlendEnableEXT getLayerPtr_vkCmdSetColorBlendEnableEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetColorBlendEnableEXT) + { + return layer_vkCmdSetColorBlendEnableEXT; + } + + return layer_vkCmdSetColorBlendEnableEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetColorBlendEquationEXT = requires( + VkCommandBuffer commandBuffer, uint32_t firstAttachment, uint32_t attachmentCount, const VkColorBlendEquationEXT* pColorBlendEquations +) { + layer_vkCmdSetColorBlendEquationEXT(commandBuffer, firstAttachment, attachmentCount, pColorBlendEquations); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetColorBlendEquationEXT getLayerPtr_vkCmdSetColorBlendEquationEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetColorBlendEquationEXT) + { + return layer_vkCmdSetColorBlendEquationEXT; + } + + return layer_vkCmdSetColorBlendEquationEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetColorWriteEnableEXT = requires( + VkCommandBuffer commandBuffer, uint32_t attachmentCount, const VkBool32* pColorWriteEnables +) { + layer_vkCmdSetColorWriteEnableEXT(commandBuffer, attachmentCount, pColorWriteEnables); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetColorWriteEnableEXT getLayerPtr_vkCmdSetColorWriteEnableEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetColorWriteEnableEXT) + { + return layer_vkCmdSetColorWriteEnableEXT; + } + + return layer_vkCmdSetColorWriteEnableEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetColorWriteMaskEXT = requires( + VkCommandBuffer commandBuffer, uint32_t firstAttachment, uint32_t attachmentCount, const VkColorComponentFlags* pColorWriteMasks +) { + layer_vkCmdSetColorWriteMaskEXT(commandBuffer, firstAttachment, attachmentCount, pColorWriteMasks); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetColorWriteMaskEXT getLayerPtr_vkCmdSetColorWriteMaskEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetColorWriteMaskEXT) + { + return layer_vkCmdSetColorWriteMaskEXT; + } + + return layer_vkCmdSetColorWriteMaskEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetConservativeRasterizationModeEXT = requires( + VkCommandBuffer commandBuffer, VkConservativeRasterizationModeEXT conservativeRasterizationMode +) { + layer_vkCmdSetConservativeRasterizationModeEXT(commandBuffer, conservativeRasterizationMode); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetConservativeRasterizationModeEXT getLayerPtr_vkCmdSetConservativeRasterizationModeEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetConservativeRasterizationModeEXT) + { + return layer_vkCmdSetConservativeRasterizationModeEXT; + } + + return layer_vkCmdSetConservativeRasterizationModeEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetCoverageModulationModeNV = requires( + VkCommandBuffer commandBuffer, VkCoverageModulationModeNV coverageModulationMode +) { + layer_vkCmdSetCoverageModulationModeNV(commandBuffer, coverageModulationMode); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetCoverageModulationModeNV getLayerPtr_vkCmdSetCoverageModulationModeNV() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetCoverageModulationModeNV) + { + return layer_vkCmdSetCoverageModulationModeNV; + } + + return layer_vkCmdSetCoverageModulationModeNV; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetCoverageModulationTableEnableNV = requires( + VkCommandBuffer commandBuffer, VkBool32 coverageModulationTableEnable +) { + layer_vkCmdSetCoverageModulationTableEnableNV(commandBuffer, coverageModulationTableEnable); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetCoverageModulationTableEnableNV getLayerPtr_vkCmdSetCoverageModulationTableEnableNV() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetCoverageModulationTableEnableNV) + { + return layer_vkCmdSetCoverageModulationTableEnableNV; + } + + return layer_vkCmdSetCoverageModulationTableEnableNV; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetCoverageModulationTableNV = requires( + VkCommandBuffer commandBuffer, uint32_t coverageModulationTableCount, const float* pCoverageModulationTable +) { + layer_vkCmdSetCoverageModulationTableNV(commandBuffer, coverageModulationTableCount, pCoverageModulationTable); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetCoverageModulationTableNV getLayerPtr_vkCmdSetCoverageModulationTableNV() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetCoverageModulationTableNV) + { + return layer_vkCmdSetCoverageModulationTableNV; + } + + return layer_vkCmdSetCoverageModulationTableNV; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetCoverageReductionModeNV = requires( + VkCommandBuffer commandBuffer, VkCoverageReductionModeNV coverageReductionMode +) { + layer_vkCmdSetCoverageReductionModeNV(commandBuffer, coverageReductionMode); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetCoverageReductionModeNV getLayerPtr_vkCmdSetCoverageReductionModeNV() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetCoverageReductionModeNV) + { + return layer_vkCmdSetCoverageReductionModeNV; + } + + return layer_vkCmdSetCoverageReductionModeNV; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetCoverageToColorEnableNV = requires( + VkCommandBuffer commandBuffer, VkBool32 coverageToColorEnable +) { + layer_vkCmdSetCoverageToColorEnableNV(commandBuffer, coverageToColorEnable); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetCoverageToColorEnableNV getLayerPtr_vkCmdSetCoverageToColorEnableNV() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetCoverageToColorEnableNV) + { + return layer_vkCmdSetCoverageToColorEnableNV; + } + + return layer_vkCmdSetCoverageToColorEnableNV; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetCoverageToColorLocationNV = requires( + VkCommandBuffer commandBuffer, uint32_t coverageToColorLocation +) { + layer_vkCmdSetCoverageToColorLocationNV(commandBuffer, coverageToColorLocation); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetCoverageToColorLocationNV getLayerPtr_vkCmdSetCoverageToColorLocationNV() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetCoverageToColorLocationNV) + { + return layer_vkCmdSetCoverageToColorLocationNV; + } + + return layer_vkCmdSetCoverageToColorLocationNV; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetCullMode = requires( + VkCommandBuffer commandBuffer, VkCullModeFlags cullMode +) { + layer_vkCmdSetCullMode(commandBuffer, cullMode); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetCullMode getLayerPtr_vkCmdSetCullMode() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetCullMode) + { + return layer_vkCmdSetCullMode; + } + + return layer_vkCmdSetCullMode; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetCullModeEXT = requires( + VkCommandBuffer commandBuffer, VkCullModeFlags cullMode +) { + layer_vkCmdSetCullModeEXT(commandBuffer, cullMode); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetCullModeEXT getLayerPtr_vkCmdSetCullModeEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetCullModeEXT) + { + return layer_vkCmdSetCullModeEXT; + } + + return layer_vkCmdSetCullModeEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetDepthBias = requires( + VkCommandBuffer commandBuffer, float depthBiasConstantFactor, float depthBiasClamp, float depthBiasSlopeFactor +) { + layer_vkCmdSetDepthBias(commandBuffer, depthBiasConstantFactor, depthBiasClamp, depthBiasSlopeFactor); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetDepthBias getLayerPtr_vkCmdSetDepthBias() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetDepthBias) + { + return layer_vkCmdSetDepthBias; + } + + return layer_vkCmdSetDepthBias; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetDepthBias2EXT = requires( + VkCommandBuffer commandBuffer, const VkDepthBiasInfoEXT* pDepthBiasInfo +) { + layer_vkCmdSetDepthBias2EXT(commandBuffer, pDepthBiasInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetDepthBias2EXT getLayerPtr_vkCmdSetDepthBias2EXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetDepthBias2EXT) + { + return layer_vkCmdSetDepthBias2EXT; + } + + return layer_vkCmdSetDepthBias2EXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetDepthBiasEnable = requires( + VkCommandBuffer commandBuffer, VkBool32 depthBiasEnable +) { + layer_vkCmdSetDepthBiasEnable(commandBuffer, depthBiasEnable); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetDepthBiasEnable getLayerPtr_vkCmdSetDepthBiasEnable() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetDepthBiasEnable) + { + return layer_vkCmdSetDepthBiasEnable; + } + + return layer_vkCmdSetDepthBiasEnable; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetDepthBiasEnableEXT = requires( + VkCommandBuffer commandBuffer, VkBool32 depthBiasEnable +) { + layer_vkCmdSetDepthBiasEnableEXT(commandBuffer, depthBiasEnable); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetDepthBiasEnableEXT getLayerPtr_vkCmdSetDepthBiasEnableEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetDepthBiasEnableEXT) + { + return layer_vkCmdSetDepthBiasEnableEXT; + } + + return layer_vkCmdSetDepthBiasEnableEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetDepthBounds = requires( + VkCommandBuffer commandBuffer, float minDepthBounds, float maxDepthBounds +) { + layer_vkCmdSetDepthBounds(commandBuffer, minDepthBounds, maxDepthBounds); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetDepthBounds getLayerPtr_vkCmdSetDepthBounds() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetDepthBounds) + { + return layer_vkCmdSetDepthBounds; + } + + return layer_vkCmdSetDepthBounds; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetDepthBoundsTestEnable = requires( + VkCommandBuffer commandBuffer, VkBool32 depthBoundsTestEnable +) { + layer_vkCmdSetDepthBoundsTestEnable(commandBuffer, depthBoundsTestEnable); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetDepthBoundsTestEnable getLayerPtr_vkCmdSetDepthBoundsTestEnable() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetDepthBoundsTestEnable) + { + return layer_vkCmdSetDepthBoundsTestEnable; + } + + return layer_vkCmdSetDepthBoundsTestEnable; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetDepthBoundsTestEnableEXT = requires( + VkCommandBuffer commandBuffer, VkBool32 depthBoundsTestEnable +) { + layer_vkCmdSetDepthBoundsTestEnableEXT(commandBuffer, depthBoundsTestEnable); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetDepthBoundsTestEnableEXT getLayerPtr_vkCmdSetDepthBoundsTestEnableEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetDepthBoundsTestEnableEXT) + { + return layer_vkCmdSetDepthBoundsTestEnableEXT; + } + + return layer_vkCmdSetDepthBoundsTestEnableEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetDepthClampEnableEXT = requires( + VkCommandBuffer commandBuffer, VkBool32 depthClampEnable +) { + layer_vkCmdSetDepthClampEnableEXT(commandBuffer, depthClampEnable); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetDepthClampEnableEXT getLayerPtr_vkCmdSetDepthClampEnableEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetDepthClampEnableEXT) + { + return layer_vkCmdSetDepthClampEnableEXT; + } + + return layer_vkCmdSetDepthClampEnableEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetDepthClampRangeEXT = requires( + VkCommandBuffer commandBuffer, VkDepthClampModeEXT depthClampMode, const VkDepthClampRangeEXT* pDepthClampRange +) { + layer_vkCmdSetDepthClampRangeEXT(commandBuffer, depthClampMode, pDepthClampRange); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetDepthClampRangeEXT getLayerPtr_vkCmdSetDepthClampRangeEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetDepthClampRangeEXT) + { + return layer_vkCmdSetDepthClampRangeEXT; + } + + return layer_vkCmdSetDepthClampRangeEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetDepthClipEnableEXT = requires( + VkCommandBuffer commandBuffer, VkBool32 depthClipEnable +) { + layer_vkCmdSetDepthClipEnableEXT(commandBuffer, depthClipEnable); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetDepthClipEnableEXT getLayerPtr_vkCmdSetDepthClipEnableEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetDepthClipEnableEXT) + { + return layer_vkCmdSetDepthClipEnableEXT; + } + + return layer_vkCmdSetDepthClipEnableEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetDepthClipNegativeOneToOneEXT = requires( + VkCommandBuffer commandBuffer, VkBool32 negativeOneToOne +) { + layer_vkCmdSetDepthClipNegativeOneToOneEXT(commandBuffer, negativeOneToOne); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetDepthClipNegativeOneToOneEXT getLayerPtr_vkCmdSetDepthClipNegativeOneToOneEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetDepthClipNegativeOneToOneEXT) + { + return layer_vkCmdSetDepthClipNegativeOneToOneEXT; + } + + return layer_vkCmdSetDepthClipNegativeOneToOneEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetDepthCompareOp = requires( + VkCommandBuffer commandBuffer, VkCompareOp depthCompareOp +) { + layer_vkCmdSetDepthCompareOp(commandBuffer, depthCompareOp); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetDepthCompareOp getLayerPtr_vkCmdSetDepthCompareOp() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetDepthCompareOp) + { + return layer_vkCmdSetDepthCompareOp; + } + + return layer_vkCmdSetDepthCompareOp; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetDepthCompareOpEXT = requires( + VkCommandBuffer commandBuffer, VkCompareOp depthCompareOp +) { + layer_vkCmdSetDepthCompareOpEXT(commandBuffer, depthCompareOp); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetDepthCompareOpEXT getLayerPtr_vkCmdSetDepthCompareOpEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetDepthCompareOpEXT) + { + return layer_vkCmdSetDepthCompareOpEXT; + } + + return layer_vkCmdSetDepthCompareOpEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetDepthTestEnable = requires( + VkCommandBuffer commandBuffer, VkBool32 depthTestEnable +) { + layer_vkCmdSetDepthTestEnable(commandBuffer, depthTestEnable); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetDepthTestEnable getLayerPtr_vkCmdSetDepthTestEnable() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetDepthTestEnable) + { + return layer_vkCmdSetDepthTestEnable; + } + + return layer_vkCmdSetDepthTestEnable; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetDepthTestEnableEXT = requires( + VkCommandBuffer commandBuffer, VkBool32 depthTestEnable +) { + layer_vkCmdSetDepthTestEnableEXT(commandBuffer, depthTestEnable); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetDepthTestEnableEXT getLayerPtr_vkCmdSetDepthTestEnableEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetDepthTestEnableEXT) + { + return layer_vkCmdSetDepthTestEnableEXT; + } + + return layer_vkCmdSetDepthTestEnableEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetDepthWriteEnable = requires( + VkCommandBuffer commandBuffer, VkBool32 depthWriteEnable +) { + layer_vkCmdSetDepthWriteEnable(commandBuffer, depthWriteEnable); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetDepthWriteEnable getLayerPtr_vkCmdSetDepthWriteEnable() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetDepthWriteEnable) + { + return layer_vkCmdSetDepthWriteEnable; + } + + return layer_vkCmdSetDepthWriteEnable; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetDepthWriteEnableEXT = requires( + VkCommandBuffer commandBuffer, VkBool32 depthWriteEnable +) { + layer_vkCmdSetDepthWriteEnableEXT(commandBuffer, depthWriteEnable); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetDepthWriteEnableEXT getLayerPtr_vkCmdSetDepthWriteEnableEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetDepthWriteEnableEXT) + { + return layer_vkCmdSetDepthWriteEnableEXT; + } + + return layer_vkCmdSetDepthWriteEnableEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetDescriptorBufferOffsets2EXT = requires( + VkCommandBuffer commandBuffer, const VkSetDescriptorBufferOffsetsInfoEXT* pSetDescriptorBufferOffsetsInfo +) { + layer_vkCmdSetDescriptorBufferOffsets2EXT(commandBuffer, pSetDescriptorBufferOffsetsInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetDescriptorBufferOffsets2EXT getLayerPtr_vkCmdSetDescriptorBufferOffsets2EXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetDescriptorBufferOffsets2EXT) + { + return layer_vkCmdSetDescriptorBufferOffsets2EXT; + } + + return layer_vkCmdSetDescriptorBufferOffsets2EXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetDescriptorBufferOffsetsEXT = requires( + VkCommandBuffer commandBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t firstSet, uint32_t setCount, const uint32_t* pBufferIndices, const VkDeviceSize* pOffsets +) { + layer_vkCmdSetDescriptorBufferOffsetsEXT(commandBuffer, pipelineBindPoint, layout, firstSet, setCount, pBufferIndices, pOffsets); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetDescriptorBufferOffsetsEXT getLayerPtr_vkCmdSetDescriptorBufferOffsetsEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetDescriptorBufferOffsetsEXT) + { + return layer_vkCmdSetDescriptorBufferOffsetsEXT; + } + + return layer_vkCmdSetDescriptorBufferOffsetsEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetDeviceMask = requires( + VkCommandBuffer commandBuffer, uint32_t deviceMask +) { + layer_vkCmdSetDeviceMask(commandBuffer, deviceMask); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetDeviceMask getLayerPtr_vkCmdSetDeviceMask() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetDeviceMask) + { + return layer_vkCmdSetDeviceMask; + } + + return layer_vkCmdSetDeviceMask; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetDeviceMaskKHR = requires( + VkCommandBuffer commandBuffer, uint32_t deviceMask +) { + layer_vkCmdSetDeviceMaskKHR(commandBuffer, deviceMask); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetDeviceMaskKHR getLayerPtr_vkCmdSetDeviceMaskKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetDeviceMaskKHR) + { + return layer_vkCmdSetDeviceMaskKHR; + } + + return layer_vkCmdSetDeviceMaskKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetDiscardRectangleEXT = requires( + VkCommandBuffer commandBuffer, uint32_t firstDiscardRectangle, uint32_t discardRectangleCount, const VkRect2D* pDiscardRectangles +) { + layer_vkCmdSetDiscardRectangleEXT(commandBuffer, firstDiscardRectangle, discardRectangleCount, pDiscardRectangles); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetDiscardRectangleEXT getLayerPtr_vkCmdSetDiscardRectangleEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetDiscardRectangleEXT) + { + return layer_vkCmdSetDiscardRectangleEXT; + } + + return layer_vkCmdSetDiscardRectangleEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetDiscardRectangleEnableEXT = requires( + VkCommandBuffer commandBuffer, VkBool32 discardRectangleEnable +) { + layer_vkCmdSetDiscardRectangleEnableEXT(commandBuffer, discardRectangleEnable); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetDiscardRectangleEnableEXT getLayerPtr_vkCmdSetDiscardRectangleEnableEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetDiscardRectangleEnableEXT) + { + return layer_vkCmdSetDiscardRectangleEnableEXT; + } + + return layer_vkCmdSetDiscardRectangleEnableEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetDiscardRectangleModeEXT = requires( + VkCommandBuffer commandBuffer, VkDiscardRectangleModeEXT discardRectangleMode +) { + layer_vkCmdSetDiscardRectangleModeEXT(commandBuffer, discardRectangleMode); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetDiscardRectangleModeEXT getLayerPtr_vkCmdSetDiscardRectangleModeEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetDiscardRectangleModeEXT) + { + return layer_vkCmdSetDiscardRectangleModeEXT; + } + + return layer_vkCmdSetDiscardRectangleModeEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetEvent = requires( + VkCommandBuffer commandBuffer, VkEvent event, VkPipelineStageFlags stageMask +) { + layer_vkCmdSetEvent(commandBuffer, event, stageMask); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetEvent getLayerPtr_vkCmdSetEvent() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetEvent) + { + return layer_vkCmdSetEvent; + } + + return layer_vkCmdSetEvent; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetEvent2 = requires( + VkCommandBuffer commandBuffer, VkEvent event, const VkDependencyInfo* pDependencyInfo +) { + layer_vkCmdSetEvent2(commandBuffer, event, pDependencyInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetEvent2 getLayerPtr_vkCmdSetEvent2() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetEvent2) + { + return layer_vkCmdSetEvent2; + } + + return layer_vkCmdSetEvent2; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetEvent2KHR = requires( + VkCommandBuffer commandBuffer, VkEvent event, const VkDependencyInfo* pDependencyInfo +) { + layer_vkCmdSetEvent2KHR(commandBuffer, event, pDependencyInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetEvent2KHR getLayerPtr_vkCmdSetEvent2KHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetEvent2KHR) + { + return layer_vkCmdSetEvent2KHR; + } + + return layer_vkCmdSetEvent2KHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetExtraPrimitiveOverestimationSizeEXT = requires( + VkCommandBuffer commandBuffer, float extraPrimitiveOverestimationSize +) { + layer_vkCmdSetExtraPrimitiveOverestimationSizeEXT(commandBuffer, extraPrimitiveOverestimationSize); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT getLayerPtr_vkCmdSetExtraPrimitiveOverestimationSizeEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetExtraPrimitiveOverestimationSizeEXT) + { + return layer_vkCmdSetExtraPrimitiveOverestimationSizeEXT; + } + + return layer_vkCmdSetExtraPrimitiveOverestimationSizeEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetFragmentShadingRateKHR = requires( + VkCommandBuffer commandBuffer, const VkExtent2D* pFragmentSize, const VkFragmentShadingRateCombinerOpKHR combinerOps[2] +) { + layer_vkCmdSetFragmentShadingRateKHR(commandBuffer, pFragmentSize, combinerOps); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetFragmentShadingRateKHR getLayerPtr_vkCmdSetFragmentShadingRateKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetFragmentShadingRateKHR) + { + return layer_vkCmdSetFragmentShadingRateKHR; + } + + return layer_vkCmdSetFragmentShadingRateKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetFrontFace = requires( + VkCommandBuffer commandBuffer, VkFrontFace frontFace +) { + layer_vkCmdSetFrontFace(commandBuffer, frontFace); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetFrontFace getLayerPtr_vkCmdSetFrontFace() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetFrontFace) + { + return layer_vkCmdSetFrontFace; + } + + return layer_vkCmdSetFrontFace; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetFrontFaceEXT = requires( + VkCommandBuffer commandBuffer, VkFrontFace frontFace +) { + layer_vkCmdSetFrontFaceEXT(commandBuffer, frontFace); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetFrontFaceEXT getLayerPtr_vkCmdSetFrontFaceEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetFrontFaceEXT) + { + return layer_vkCmdSetFrontFaceEXT; + } + + return layer_vkCmdSetFrontFaceEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetLineRasterizationModeEXT = requires( + VkCommandBuffer commandBuffer, VkLineRasterizationModeEXT lineRasterizationMode +) { + layer_vkCmdSetLineRasterizationModeEXT(commandBuffer, lineRasterizationMode); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetLineRasterizationModeEXT getLayerPtr_vkCmdSetLineRasterizationModeEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetLineRasterizationModeEXT) + { + return layer_vkCmdSetLineRasterizationModeEXT; + } + + return layer_vkCmdSetLineRasterizationModeEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetLineStipple = requires( + VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, uint16_t lineStipplePattern +) { + layer_vkCmdSetLineStipple(commandBuffer, lineStippleFactor, lineStipplePattern); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetLineStipple getLayerPtr_vkCmdSetLineStipple() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetLineStipple) + { + return layer_vkCmdSetLineStipple; + } + + return layer_vkCmdSetLineStipple; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetLineStippleEXT = requires( + VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, uint16_t lineStipplePattern +) { + layer_vkCmdSetLineStippleEXT(commandBuffer, lineStippleFactor, lineStipplePattern); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetLineStippleEXT getLayerPtr_vkCmdSetLineStippleEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetLineStippleEXT) + { + return layer_vkCmdSetLineStippleEXT; + } + + return layer_vkCmdSetLineStippleEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetLineStippleEnableEXT = requires( + VkCommandBuffer commandBuffer, VkBool32 stippledLineEnable +) { + layer_vkCmdSetLineStippleEnableEXT(commandBuffer, stippledLineEnable); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetLineStippleEnableEXT getLayerPtr_vkCmdSetLineStippleEnableEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetLineStippleEnableEXT) + { + return layer_vkCmdSetLineStippleEnableEXT; + } + + return layer_vkCmdSetLineStippleEnableEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetLineStippleKHR = requires( + VkCommandBuffer commandBuffer, uint32_t lineStippleFactor, uint16_t lineStipplePattern +) { + layer_vkCmdSetLineStippleKHR(commandBuffer, lineStippleFactor, lineStipplePattern); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetLineStippleKHR getLayerPtr_vkCmdSetLineStippleKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetLineStippleKHR) + { + return layer_vkCmdSetLineStippleKHR; + } + + return layer_vkCmdSetLineStippleKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetLineWidth = requires( + VkCommandBuffer commandBuffer, float lineWidth +) { + layer_vkCmdSetLineWidth(commandBuffer, lineWidth); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetLineWidth getLayerPtr_vkCmdSetLineWidth() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetLineWidth) + { + return layer_vkCmdSetLineWidth; + } + + return layer_vkCmdSetLineWidth; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetLogicOpEXT = requires( + VkCommandBuffer commandBuffer, VkLogicOp logicOp +) { + layer_vkCmdSetLogicOpEXT(commandBuffer, logicOp); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetLogicOpEXT getLayerPtr_vkCmdSetLogicOpEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetLogicOpEXT) + { + return layer_vkCmdSetLogicOpEXT; + } + + return layer_vkCmdSetLogicOpEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetLogicOpEnableEXT = requires( + VkCommandBuffer commandBuffer, VkBool32 logicOpEnable +) { + layer_vkCmdSetLogicOpEnableEXT(commandBuffer, logicOpEnable); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetLogicOpEnableEXT getLayerPtr_vkCmdSetLogicOpEnableEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetLogicOpEnableEXT) + { + return layer_vkCmdSetLogicOpEnableEXT; + } + + return layer_vkCmdSetLogicOpEnableEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetPatchControlPointsEXT = requires( + VkCommandBuffer commandBuffer, uint32_t patchControlPoints +) { + layer_vkCmdSetPatchControlPointsEXT(commandBuffer, patchControlPoints); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetPatchControlPointsEXT getLayerPtr_vkCmdSetPatchControlPointsEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetPatchControlPointsEXT) + { + return layer_vkCmdSetPatchControlPointsEXT; + } + + return layer_vkCmdSetPatchControlPointsEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetPolygonModeEXT = requires( + VkCommandBuffer commandBuffer, VkPolygonMode polygonMode +) { + layer_vkCmdSetPolygonModeEXT(commandBuffer, polygonMode); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetPolygonModeEXT getLayerPtr_vkCmdSetPolygonModeEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetPolygonModeEXT) + { + return layer_vkCmdSetPolygonModeEXT; + } + + return layer_vkCmdSetPolygonModeEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetPrimitiveRestartEnable = requires( + VkCommandBuffer commandBuffer, VkBool32 primitiveRestartEnable +) { + layer_vkCmdSetPrimitiveRestartEnable(commandBuffer, primitiveRestartEnable); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetPrimitiveRestartEnable getLayerPtr_vkCmdSetPrimitiveRestartEnable() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetPrimitiveRestartEnable) + { + return layer_vkCmdSetPrimitiveRestartEnable; + } + + return layer_vkCmdSetPrimitiveRestartEnable; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetPrimitiveRestartEnableEXT = requires( + VkCommandBuffer commandBuffer, VkBool32 primitiveRestartEnable +) { + layer_vkCmdSetPrimitiveRestartEnableEXT(commandBuffer, primitiveRestartEnable); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetPrimitiveRestartEnableEXT getLayerPtr_vkCmdSetPrimitiveRestartEnableEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetPrimitiveRestartEnableEXT) + { + return layer_vkCmdSetPrimitiveRestartEnableEXT; + } + + return layer_vkCmdSetPrimitiveRestartEnableEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetPrimitiveTopology = requires( + VkCommandBuffer commandBuffer, VkPrimitiveTopology primitiveTopology +) { + layer_vkCmdSetPrimitiveTopology(commandBuffer, primitiveTopology); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetPrimitiveTopology getLayerPtr_vkCmdSetPrimitiveTopology() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetPrimitiveTopology) + { + return layer_vkCmdSetPrimitiveTopology; + } + + return layer_vkCmdSetPrimitiveTopology; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetPrimitiveTopologyEXT = requires( + VkCommandBuffer commandBuffer, VkPrimitiveTopology primitiveTopology +) { + layer_vkCmdSetPrimitiveTopologyEXT(commandBuffer, primitiveTopology); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetPrimitiveTopologyEXT getLayerPtr_vkCmdSetPrimitiveTopologyEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetPrimitiveTopologyEXT) + { + return layer_vkCmdSetPrimitiveTopologyEXT; + } + + return layer_vkCmdSetPrimitiveTopologyEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetProvokingVertexModeEXT = requires( + VkCommandBuffer commandBuffer, VkProvokingVertexModeEXT provokingVertexMode +) { + layer_vkCmdSetProvokingVertexModeEXT(commandBuffer, provokingVertexMode); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetProvokingVertexModeEXT getLayerPtr_vkCmdSetProvokingVertexModeEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetProvokingVertexModeEXT) + { + return layer_vkCmdSetProvokingVertexModeEXT; + } + + return layer_vkCmdSetProvokingVertexModeEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetRasterizationSamplesEXT = requires( + VkCommandBuffer commandBuffer, VkSampleCountFlagBits rasterizationSamples +) { + layer_vkCmdSetRasterizationSamplesEXT(commandBuffer, rasterizationSamples); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetRasterizationSamplesEXT getLayerPtr_vkCmdSetRasterizationSamplesEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetRasterizationSamplesEXT) + { + return layer_vkCmdSetRasterizationSamplesEXT; + } + + return layer_vkCmdSetRasterizationSamplesEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetRasterizationStreamEXT = requires( + VkCommandBuffer commandBuffer, uint32_t rasterizationStream +) { + layer_vkCmdSetRasterizationStreamEXT(commandBuffer, rasterizationStream); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetRasterizationStreamEXT getLayerPtr_vkCmdSetRasterizationStreamEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetRasterizationStreamEXT) + { + return layer_vkCmdSetRasterizationStreamEXT; + } + + return layer_vkCmdSetRasterizationStreamEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetRasterizerDiscardEnable = requires( + VkCommandBuffer commandBuffer, VkBool32 rasterizerDiscardEnable +) { + layer_vkCmdSetRasterizerDiscardEnable(commandBuffer, rasterizerDiscardEnable); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetRasterizerDiscardEnable getLayerPtr_vkCmdSetRasterizerDiscardEnable() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetRasterizerDiscardEnable) + { + return layer_vkCmdSetRasterizerDiscardEnable; + } + + return layer_vkCmdSetRasterizerDiscardEnable; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetRasterizerDiscardEnableEXT = requires( + VkCommandBuffer commandBuffer, VkBool32 rasterizerDiscardEnable +) { + layer_vkCmdSetRasterizerDiscardEnableEXT(commandBuffer, rasterizerDiscardEnable); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetRasterizerDiscardEnableEXT getLayerPtr_vkCmdSetRasterizerDiscardEnableEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetRasterizerDiscardEnableEXT) + { + return layer_vkCmdSetRasterizerDiscardEnableEXT; + } + + return layer_vkCmdSetRasterizerDiscardEnableEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetRayTracingPipelineStackSizeKHR = requires( + VkCommandBuffer commandBuffer, uint32_t pipelineStackSize +) { + layer_vkCmdSetRayTracingPipelineStackSizeKHR(commandBuffer, pipelineStackSize); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetRayTracingPipelineStackSizeKHR getLayerPtr_vkCmdSetRayTracingPipelineStackSizeKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetRayTracingPipelineStackSizeKHR) + { + return layer_vkCmdSetRayTracingPipelineStackSizeKHR; + } + + return layer_vkCmdSetRayTracingPipelineStackSizeKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetRenderingAttachmentLocations = requires( + VkCommandBuffer commandBuffer, const VkRenderingAttachmentLocationInfo* pLocationInfo +) { + layer_vkCmdSetRenderingAttachmentLocations(commandBuffer, pLocationInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetRenderingAttachmentLocations getLayerPtr_vkCmdSetRenderingAttachmentLocations() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetRenderingAttachmentLocations) + { + return layer_vkCmdSetRenderingAttachmentLocations; + } + + return layer_vkCmdSetRenderingAttachmentLocations; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetRenderingAttachmentLocationsKHR = requires( + VkCommandBuffer commandBuffer, const VkRenderingAttachmentLocationInfo* pLocationInfo +) { + layer_vkCmdSetRenderingAttachmentLocationsKHR(commandBuffer, pLocationInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetRenderingAttachmentLocationsKHR getLayerPtr_vkCmdSetRenderingAttachmentLocationsKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetRenderingAttachmentLocationsKHR) + { + return layer_vkCmdSetRenderingAttachmentLocationsKHR; + } + + return layer_vkCmdSetRenderingAttachmentLocationsKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetRenderingInputAttachmentIndices = requires( + VkCommandBuffer commandBuffer, const VkRenderingInputAttachmentIndexInfo* pInputAttachmentIndexInfo +) { + layer_vkCmdSetRenderingInputAttachmentIndices(commandBuffer, pInputAttachmentIndexInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetRenderingInputAttachmentIndices getLayerPtr_vkCmdSetRenderingInputAttachmentIndices() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetRenderingInputAttachmentIndices) + { + return layer_vkCmdSetRenderingInputAttachmentIndices; + } + + return layer_vkCmdSetRenderingInputAttachmentIndices; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetRenderingInputAttachmentIndicesKHR = requires( + VkCommandBuffer commandBuffer, const VkRenderingInputAttachmentIndexInfo* pInputAttachmentIndexInfo +) { + layer_vkCmdSetRenderingInputAttachmentIndicesKHR(commandBuffer, pInputAttachmentIndexInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetRenderingInputAttachmentIndicesKHR getLayerPtr_vkCmdSetRenderingInputAttachmentIndicesKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetRenderingInputAttachmentIndicesKHR) + { + return layer_vkCmdSetRenderingInputAttachmentIndicesKHR; + } + + return layer_vkCmdSetRenderingInputAttachmentIndicesKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetRepresentativeFragmentTestEnableNV = requires( + VkCommandBuffer commandBuffer, VkBool32 representativeFragmentTestEnable +) { + layer_vkCmdSetRepresentativeFragmentTestEnableNV(commandBuffer, representativeFragmentTestEnable); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetRepresentativeFragmentTestEnableNV getLayerPtr_vkCmdSetRepresentativeFragmentTestEnableNV() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetRepresentativeFragmentTestEnableNV) + { + return layer_vkCmdSetRepresentativeFragmentTestEnableNV; + } + + return layer_vkCmdSetRepresentativeFragmentTestEnableNV; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetSampleLocationsEXT = requires( + VkCommandBuffer commandBuffer, const VkSampleLocationsInfoEXT* pSampleLocationsInfo +) { + layer_vkCmdSetSampleLocationsEXT(commandBuffer, pSampleLocationsInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetSampleLocationsEXT getLayerPtr_vkCmdSetSampleLocationsEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetSampleLocationsEXT) + { + return layer_vkCmdSetSampleLocationsEXT; + } + + return layer_vkCmdSetSampleLocationsEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetSampleLocationsEnableEXT = requires( + VkCommandBuffer commandBuffer, VkBool32 sampleLocationsEnable +) { + layer_vkCmdSetSampleLocationsEnableEXT(commandBuffer, sampleLocationsEnable); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetSampleLocationsEnableEXT getLayerPtr_vkCmdSetSampleLocationsEnableEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetSampleLocationsEnableEXT) + { + return layer_vkCmdSetSampleLocationsEnableEXT; + } + + return layer_vkCmdSetSampleLocationsEnableEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetSampleMaskEXT = requires( + VkCommandBuffer commandBuffer, VkSampleCountFlagBits samples, const VkSampleMask* pSampleMask +) { + layer_vkCmdSetSampleMaskEXT(commandBuffer, samples, pSampleMask); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetSampleMaskEXT getLayerPtr_vkCmdSetSampleMaskEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetSampleMaskEXT) + { + return layer_vkCmdSetSampleMaskEXT; + } + + return layer_vkCmdSetSampleMaskEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetScissor = requires( + VkCommandBuffer commandBuffer, uint32_t firstScissor, uint32_t scissorCount, const VkRect2D* pScissors +) { + layer_vkCmdSetScissor(commandBuffer, firstScissor, scissorCount, pScissors); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetScissor getLayerPtr_vkCmdSetScissor() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetScissor) + { + return layer_vkCmdSetScissor; + } + + return layer_vkCmdSetScissor; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetScissorWithCount = requires( + VkCommandBuffer commandBuffer, uint32_t scissorCount, const VkRect2D* pScissors +) { + layer_vkCmdSetScissorWithCount(commandBuffer, scissorCount, pScissors); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetScissorWithCount getLayerPtr_vkCmdSetScissorWithCount() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetScissorWithCount) + { + return layer_vkCmdSetScissorWithCount; + } + + return layer_vkCmdSetScissorWithCount; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetScissorWithCountEXT = requires( + VkCommandBuffer commandBuffer, uint32_t scissorCount, const VkRect2D* pScissors +) { + layer_vkCmdSetScissorWithCountEXT(commandBuffer, scissorCount, pScissors); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetScissorWithCountEXT getLayerPtr_vkCmdSetScissorWithCountEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetScissorWithCountEXT) + { + return layer_vkCmdSetScissorWithCountEXT; + } + + return layer_vkCmdSetScissorWithCountEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetShadingRateImageEnableNV = requires( + VkCommandBuffer commandBuffer, VkBool32 shadingRateImageEnable +) { + layer_vkCmdSetShadingRateImageEnableNV(commandBuffer, shadingRateImageEnable); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetShadingRateImageEnableNV getLayerPtr_vkCmdSetShadingRateImageEnableNV() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetShadingRateImageEnableNV) + { + return layer_vkCmdSetShadingRateImageEnableNV; + } + + return layer_vkCmdSetShadingRateImageEnableNV; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetStencilCompareMask = requires( + VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t compareMask +) { + layer_vkCmdSetStencilCompareMask(commandBuffer, faceMask, compareMask); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetStencilCompareMask getLayerPtr_vkCmdSetStencilCompareMask() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetStencilCompareMask) + { + return layer_vkCmdSetStencilCompareMask; + } + + return layer_vkCmdSetStencilCompareMask; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetStencilOp = requires( + VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, VkStencilOp failOp, VkStencilOp passOp, VkStencilOp depthFailOp, VkCompareOp compareOp +) { + layer_vkCmdSetStencilOp(commandBuffer, faceMask, failOp, passOp, depthFailOp, compareOp); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetStencilOp getLayerPtr_vkCmdSetStencilOp() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetStencilOp) + { + return layer_vkCmdSetStencilOp; + } + + return layer_vkCmdSetStencilOp; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetStencilOpEXT = requires( + VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, VkStencilOp failOp, VkStencilOp passOp, VkStencilOp depthFailOp, VkCompareOp compareOp +) { + layer_vkCmdSetStencilOpEXT(commandBuffer, faceMask, failOp, passOp, depthFailOp, compareOp); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetStencilOpEXT getLayerPtr_vkCmdSetStencilOpEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetStencilOpEXT) + { + return layer_vkCmdSetStencilOpEXT; + } + + return layer_vkCmdSetStencilOpEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetStencilReference = requires( + VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t reference +) { + layer_vkCmdSetStencilReference(commandBuffer, faceMask, reference); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetStencilReference getLayerPtr_vkCmdSetStencilReference() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetStencilReference) + { + return layer_vkCmdSetStencilReference; + } + + return layer_vkCmdSetStencilReference; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetStencilTestEnable = requires( + VkCommandBuffer commandBuffer, VkBool32 stencilTestEnable +) { + layer_vkCmdSetStencilTestEnable(commandBuffer, stencilTestEnable); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetStencilTestEnable getLayerPtr_vkCmdSetStencilTestEnable() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetStencilTestEnable) + { + return layer_vkCmdSetStencilTestEnable; + } + + return layer_vkCmdSetStencilTestEnable; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetStencilTestEnableEXT = requires( + VkCommandBuffer commandBuffer, VkBool32 stencilTestEnable +) { + layer_vkCmdSetStencilTestEnableEXT(commandBuffer, stencilTestEnable); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetStencilTestEnableEXT getLayerPtr_vkCmdSetStencilTestEnableEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetStencilTestEnableEXT) + { + return layer_vkCmdSetStencilTestEnableEXT; + } + + return layer_vkCmdSetStencilTestEnableEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetStencilWriteMask = requires( + VkCommandBuffer commandBuffer, VkStencilFaceFlags faceMask, uint32_t writeMask +) { + layer_vkCmdSetStencilWriteMask(commandBuffer, faceMask, writeMask); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetStencilWriteMask getLayerPtr_vkCmdSetStencilWriteMask() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetStencilWriteMask) + { + return layer_vkCmdSetStencilWriteMask; + } + + return layer_vkCmdSetStencilWriteMask; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetTessellationDomainOriginEXT = requires( + VkCommandBuffer commandBuffer, VkTessellationDomainOrigin domainOrigin +) { + layer_vkCmdSetTessellationDomainOriginEXT(commandBuffer, domainOrigin); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetTessellationDomainOriginEXT getLayerPtr_vkCmdSetTessellationDomainOriginEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetTessellationDomainOriginEXT) + { + return layer_vkCmdSetTessellationDomainOriginEXT; + } + + return layer_vkCmdSetTessellationDomainOriginEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetVertexInputEXT = requires( + VkCommandBuffer commandBuffer, uint32_t vertexBindingDescriptionCount, const VkVertexInputBindingDescription2EXT* pVertexBindingDescriptions, uint32_t vertexAttributeDescriptionCount, const VkVertexInputAttributeDescription2EXT* pVertexAttributeDescriptions +) { + layer_vkCmdSetVertexInputEXT(commandBuffer, vertexBindingDescriptionCount, pVertexBindingDescriptions, vertexAttributeDescriptionCount, pVertexAttributeDescriptions); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetVertexInputEXT getLayerPtr_vkCmdSetVertexInputEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetVertexInputEXT) + { + return layer_vkCmdSetVertexInputEXT; + } + + return layer_vkCmdSetVertexInputEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetViewport = requires( + VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewport* pViewports +) { + layer_vkCmdSetViewport(commandBuffer, firstViewport, viewportCount, pViewports); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetViewport getLayerPtr_vkCmdSetViewport() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetViewport) + { + return layer_vkCmdSetViewport; + } + + return layer_vkCmdSetViewport; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetViewportSwizzleNV = requires( + VkCommandBuffer commandBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewportSwizzleNV* pViewportSwizzles +) { + layer_vkCmdSetViewportSwizzleNV(commandBuffer, firstViewport, viewportCount, pViewportSwizzles); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetViewportSwizzleNV getLayerPtr_vkCmdSetViewportSwizzleNV() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetViewportSwizzleNV) + { + return layer_vkCmdSetViewportSwizzleNV; + } + + return layer_vkCmdSetViewportSwizzleNV; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetViewportWScalingEnableNV = requires( + VkCommandBuffer commandBuffer, VkBool32 viewportWScalingEnable +) { + layer_vkCmdSetViewportWScalingEnableNV(commandBuffer, viewportWScalingEnable); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetViewportWScalingEnableNV getLayerPtr_vkCmdSetViewportWScalingEnableNV() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetViewportWScalingEnableNV) + { + return layer_vkCmdSetViewportWScalingEnableNV; + } + + return layer_vkCmdSetViewportWScalingEnableNV; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetViewportWithCount = requires( + VkCommandBuffer commandBuffer, uint32_t viewportCount, const VkViewport* pViewports +) { + layer_vkCmdSetViewportWithCount(commandBuffer, viewportCount, pViewports); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetViewportWithCount getLayerPtr_vkCmdSetViewportWithCount() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetViewportWithCount) + { + return layer_vkCmdSetViewportWithCount; + } + + return layer_vkCmdSetViewportWithCount; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdSetViewportWithCountEXT = requires( + VkCommandBuffer commandBuffer, uint32_t viewportCount, const VkViewport* pViewports +) { + layer_vkCmdSetViewportWithCountEXT(commandBuffer, viewportCount, pViewports); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdSetViewportWithCountEXT getLayerPtr_vkCmdSetViewportWithCountEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdSetViewportWithCountEXT) + { + return layer_vkCmdSetViewportWithCountEXT; + } + + return layer_vkCmdSetViewportWithCountEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdTraceRaysIndirect2KHR = requires( + VkCommandBuffer commandBuffer, VkDeviceAddress indirectDeviceAddress +) { + layer_vkCmdTraceRaysIndirect2KHR(commandBuffer, indirectDeviceAddress); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdTraceRaysIndirect2KHR getLayerPtr_vkCmdTraceRaysIndirect2KHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdTraceRaysIndirect2KHR) + { + return layer_vkCmdTraceRaysIndirect2KHR; + } + + return layer_vkCmdTraceRaysIndirect2KHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdTraceRaysIndirectKHR = requires( + VkCommandBuffer commandBuffer, const VkStridedDeviceAddressRegionKHR* pRaygenShaderBindingTable, const VkStridedDeviceAddressRegionKHR* pMissShaderBindingTable, const VkStridedDeviceAddressRegionKHR* pHitShaderBindingTable, const VkStridedDeviceAddressRegionKHR* pCallableShaderBindingTable, VkDeviceAddress indirectDeviceAddress +) { + layer_vkCmdTraceRaysIndirectKHR(commandBuffer, pRaygenShaderBindingTable, pMissShaderBindingTable, pHitShaderBindingTable, pCallableShaderBindingTable, indirectDeviceAddress); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdTraceRaysIndirectKHR getLayerPtr_vkCmdTraceRaysIndirectKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdTraceRaysIndirectKHR) + { + return layer_vkCmdTraceRaysIndirectKHR; + } + + return layer_vkCmdTraceRaysIndirectKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdTraceRaysKHR = requires( + VkCommandBuffer commandBuffer, const VkStridedDeviceAddressRegionKHR* pRaygenShaderBindingTable, const VkStridedDeviceAddressRegionKHR* pMissShaderBindingTable, const VkStridedDeviceAddressRegionKHR* pHitShaderBindingTable, const VkStridedDeviceAddressRegionKHR* pCallableShaderBindingTable, uint32_t width, uint32_t height, uint32_t depth +) { + layer_vkCmdTraceRaysKHR(commandBuffer, pRaygenShaderBindingTable, pMissShaderBindingTable, pHitShaderBindingTable, pCallableShaderBindingTable, width, height, depth); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdTraceRaysKHR getLayerPtr_vkCmdTraceRaysKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdTraceRaysKHR) + { + return layer_vkCmdTraceRaysKHR; + } + + return layer_vkCmdTraceRaysKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdUpdateBuffer = requires( + VkCommandBuffer commandBuffer, VkBuffer dstBuffer, VkDeviceSize dstOffset, VkDeviceSize dataSize, const void* pData +) { + layer_vkCmdUpdateBuffer(commandBuffer, dstBuffer, dstOffset, dataSize, pData); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdUpdateBuffer getLayerPtr_vkCmdUpdateBuffer() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdUpdateBuffer) + { + return layer_vkCmdUpdateBuffer; + } + + return layer_vkCmdUpdateBuffer; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdWaitEvents = requires( + VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers +) { + layer_vkCmdWaitEvents(commandBuffer, eventCount, pEvents, srcStageMask, dstStageMask, memoryBarrierCount, pMemoryBarriers, bufferMemoryBarrierCount, pBufferMemoryBarriers, imageMemoryBarrierCount, pImageMemoryBarriers); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdWaitEvents getLayerPtr_vkCmdWaitEvents() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdWaitEvents) + { + return layer_vkCmdWaitEvents; + } + + return layer_vkCmdWaitEvents; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdWaitEvents2 = requires( + VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents, const VkDependencyInfo* pDependencyInfos +) { + layer_vkCmdWaitEvents2(commandBuffer, eventCount, pEvents, pDependencyInfos); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdWaitEvents2 getLayerPtr_vkCmdWaitEvents2() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdWaitEvents2) + { + return layer_vkCmdWaitEvents2; + } + + return layer_vkCmdWaitEvents2; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdWaitEvents2KHR = requires( + VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents, const VkDependencyInfo* pDependencyInfos +) { + layer_vkCmdWaitEvents2KHR(commandBuffer, eventCount, pEvents, pDependencyInfos); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdWaitEvents2KHR getLayerPtr_vkCmdWaitEvents2KHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdWaitEvents2KHR) + { + return layer_vkCmdWaitEvents2KHR; + } + + return layer_vkCmdWaitEvents2KHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdWriteAccelerationStructuresPropertiesKHR = requires( + VkCommandBuffer commandBuffer, uint32_t accelerationStructureCount, const VkAccelerationStructureKHR* pAccelerationStructures, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery +) { + layer_vkCmdWriteAccelerationStructuresPropertiesKHR(commandBuffer, accelerationStructureCount, pAccelerationStructures, queryType, queryPool, firstQuery); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdWriteAccelerationStructuresPropertiesKHR getLayerPtr_vkCmdWriteAccelerationStructuresPropertiesKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdWriteAccelerationStructuresPropertiesKHR) + { + return layer_vkCmdWriteAccelerationStructuresPropertiesKHR; + } + + return layer_vkCmdWriteAccelerationStructuresPropertiesKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdWriteMicromapsPropertiesEXT = requires( + VkCommandBuffer commandBuffer, uint32_t micromapCount, const VkMicromapEXT* pMicromaps, VkQueryType queryType, VkQueryPool queryPool, uint32_t firstQuery +) { + layer_vkCmdWriteMicromapsPropertiesEXT(commandBuffer, micromapCount, pMicromaps, queryType, queryPool, firstQuery); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdWriteMicromapsPropertiesEXT getLayerPtr_vkCmdWriteMicromapsPropertiesEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdWriteMicromapsPropertiesEXT) + { + return layer_vkCmdWriteMicromapsPropertiesEXT; + } + + return layer_vkCmdWriteMicromapsPropertiesEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdWriteTimestamp = requires( + VkCommandBuffer commandBuffer, VkPipelineStageFlagBits pipelineStage, VkQueryPool queryPool, uint32_t query +) { + layer_vkCmdWriteTimestamp(commandBuffer, pipelineStage, queryPool, query); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdWriteTimestamp getLayerPtr_vkCmdWriteTimestamp() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdWriteTimestamp) + { + return layer_vkCmdWriteTimestamp; + } + + return layer_vkCmdWriteTimestamp; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdWriteTimestamp2 = requires( + VkCommandBuffer commandBuffer, VkPipelineStageFlags2 stage, VkQueryPool queryPool, uint32_t query +) { + layer_vkCmdWriteTimestamp2(commandBuffer, stage, queryPool, query); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdWriteTimestamp2 getLayerPtr_vkCmdWriteTimestamp2() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdWriteTimestamp2) + { + return layer_vkCmdWriteTimestamp2; + } + + return layer_vkCmdWriteTimestamp2; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCmdWriteTimestamp2KHR = requires( + VkCommandBuffer commandBuffer, VkPipelineStageFlags2 stage, VkQueryPool queryPool, uint32_t query +) { + layer_vkCmdWriteTimestamp2KHR(commandBuffer, stage, queryPool, query); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCmdWriteTimestamp2KHR getLayerPtr_vkCmdWriteTimestamp2KHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkCmdWriteTimestamp2KHR) + { + return layer_vkCmdWriteTimestamp2KHR; + } + + return layer_vkCmdWriteTimestamp2KHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCopyAccelerationStructureKHR = requires( + VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyAccelerationStructureInfoKHR* pInfo +) { + layer_vkCopyAccelerationStructureKHR(device, deferredOperation, pInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCopyAccelerationStructureKHR getLayerPtr_vkCopyAccelerationStructureKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkCopyAccelerationStructureKHR) + { + return layer_vkCopyAccelerationStructureKHR; + } + + return layer_vkCopyAccelerationStructureKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCopyAccelerationStructureToMemoryKHR = requires( + VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyAccelerationStructureToMemoryInfoKHR* pInfo +) { + layer_vkCopyAccelerationStructureToMemoryKHR(device, deferredOperation, pInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCopyAccelerationStructureToMemoryKHR getLayerPtr_vkCopyAccelerationStructureToMemoryKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkCopyAccelerationStructureToMemoryKHR) + { + return layer_vkCopyAccelerationStructureToMemoryKHR; + } + + return layer_vkCopyAccelerationStructureToMemoryKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCopyImageToImage = requires( + VkDevice device, const VkCopyImageToImageInfo* pCopyImageToImageInfo +) { + layer_vkCopyImageToImage(device, pCopyImageToImageInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCopyImageToImage getLayerPtr_vkCopyImageToImage() +{ + return [] + { + if constexpr(hasLayerPtr_vkCopyImageToImage) + { + return layer_vkCopyImageToImage; + } + + return layer_vkCopyImageToImage; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCopyImageToImageEXT = requires( + VkDevice device, const VkCopyImageToImageInfo* pCopyImageToImageInfo +) { + layer_vkCopyImageToImageEXT(device, pCopyImageToImageInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCopyImageToImageEXT getLayerPtr_vkCopyImageToImageEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCopyImageToImageEXT) + { + return layer_vkCopyImageToImageEXT; + } + + return layer_vkCopyImageToImageEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCopyImageToMemory = requires( + VkDevice device, const VkCopyImageToMemoryInfo* pCopyImageToMemoryInfo +) { + layer_vkCopyImageToMemory(device, pCopyImageToMemoryInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCopyImageToMemory getLayerPtr_vkCopyImageToMemory() +{ + return [] + { + if constexpr(hasLayerPtr_vkCopyImageToMemory) + { + return layer_vkCopyImageToMemory; + } + + return layer_vkCopyImageToMemory; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCopyImageToMemoryEXT = requires( + VkDevice device, const VkCopyImageToMemoryInfo* pCopyImageToMemoryInfo +) { + layer_vkCopyImageToMemoryEXT(device, pCopyImageToMemoryInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCopyImageToMemoryEXT getLayerPtr_vkCopyImageToMemoryEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCopyImageToMemoryEXT) + { + return layer_vkCopyImageToMemoryEXT; + } + + return layer_vkCopyImageToMemoryEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCopyMemoryToAccelerationStructureKHR = requires( + VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyMemoryToAccelerationStructureInfoKHR* pInfo +) { + layer_vkCopyMemoryToAccelerationStructureKHR(device, deferredOperation, pInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCopyMemoryToAccelerationStructureKHR getLayerPtr_vkCopyMemoryToAccelerationStructureKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkCopyMemoryToAccelerationStructureKHR) + { + return layer_vkCopyMemoryToAccelerationStructureKHR; + } + + return layer_vkCopyMemoryToAccelerationStructureKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCopyMemoryToImage = requires( + VkDevice device, const VkCopyMemoryToImageInfo* pCopyMemoryToImageInfo +) { + layer_vkCopyMemoryToImage(device, pCopyMemoryToImageInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCopyMemoryToImage getLayerPtr_vkCopyMemoryToImage() +{ + return [] + { + if constexpr(hasLayerPtr_vkCopyMemoryToImage) + { + return layer_vkCopyMemoryToImage; + } + + return layer_vkCopyMemoryToImage; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCopyMemoryToImageEXT = requires( + VkDevice device, const VkCopyMemoryToImageInfo* pCopyMemoryToImageInfo +) { + layer_vkCopyMemoryToImageEXT(device, pCopyMemoryToImageInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCopyMemoryToImageEXT getLayerPtr_vkCopyMemoryToImageEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCopyMemoryToImageEXT) + { + return layer_vkCopyMemoryToImageEXT; + } + + return layer_vkCopyMemoryToImageEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCopyMemoryToMicromapEXT = requires( + VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyMemoryToMicromapInfoEXT* pInfo +) { + layer_vkCopyMemoryToMicromapEXT(device, deferredOperation, pInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCopyMemoryToMicromapEXT getLayerPtr_vkCopyMemoryToMicromapEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCopyMemoryToMicromapEXT) + { + return layer_vkCopyMemoryToMicromapEXT; + } + + return layer_vkCopyMemoryToMicromapEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCopyMicromapEXT = requires( + VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyMicromapInfoEXT* pInfo +) { + layer_vkCopyMicromapEXT(device, deferredOperation, pInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCopyMicromapEXT getLayerPtr_vkCopyMicromapEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCopyMicromapEXT) + { + return layer_vkCopyMicromapEXT; + } + + return layer_vkCopyMicromapEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCopyMicromapToMemoryEXT = requires( + VkDevice device, VkDeferredOperationKHR deferredOperation, const VkCopyMicromapToMemoryInfoEXT* pInfo +) { + layer_vkCopyMicromapToMemoryEXT(device, deferredOperation, pInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCopyMicromapToMemoryEXT getLayerPtr_vkCopyMicromapToMemoryEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCopyMicromapToMemoryEXT) + { + return layer_vkCopyMicromapToMemoryEXT; + } + + return layer_vkCopyMicromapToMemoryEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCreateAccelerationStructureKHR = requires( + VkDevice device, const VkAccelerationStructureCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkAccelerationStructureKHR* pAccelerationStructure +) { + layer_vkCreateAccelerationStructureKHR(device, pCreateInfo, pAllocator, pAccelerationStructure); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCreateAccelerationStructureKHR getLayerPtr_vkCreateAccelerationStructureKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkCreateAccelerationStructureKHR) + { + return layer_vkCreateAccelerationStructureKHR; + } + + return layer_vkCreateAccelerationStructureKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCreateBuffer = requires( + VkDevice device, const VkBufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBuffer* pBuffer +) { + layer_vkCreateBuffer(device, pCreateInfo, pAllocator, pBuffer); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCreateBuffer getLayerPtr_vkCreateBuffer() +{ + return [] + { + if constexpr(hasLayerPtr_vkCreateBuffer) + { + return layer_vkCreateBuffer; + } + + return layer_vkCreateBuffer; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCreateBufferView = requires( + VkDevice device, const VkBufferViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkBufferView* pView +) { + layer_vkCreateBufferView(device, pCreateInfo, pAllocator, pView); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCreateBufferView getLayerPtr_vkCreateBufferView() +{ + return [] + { + if constexpr(hasLayerPtr_vkCreateBufferView) + { + return layer_vkCreateBufferView; + } + + return layer_vkCreateBufferView; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCreateCommandPool = requires( + VkDevice device, const VkCommandPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkCommandPool* pCommandPool +) { + layer_vkCreateCommandPool(device, pCreateInfo, pAllocator, pCommandPool); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCreateCommandPool getLayerPtr_vkCreateCommandPool() +{ + return [] + { + if constexpr(hasLayerPtr_vkCreateCommandPool) + { + return layer_vkCreateCommandPool; + } + + return layer_vkCreateCommandPool; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCreateComputePipelines = requires( + VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkComputePipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines +) { + layer_vkCreateComputePipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCreateComputePipelines getLayerPtr_vkCreateComputePipelines() +{ + return [] + { + if constexpr(hasLayerPtr_vkCreateComputePipelines) + { + return layer_vkCreateComputePipelines; + } + + return layer_vkCreateComputePipelines; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCreateDeferredOperationKHR = requires( + VkDevice device, const VkAllocationCallbacks* pAllocator, VkDeferredOperationKHR* pDeferredOperation +) { + layer_vkCreateDeferredOperationKHR(device, pAllocator, pDeferredOperation); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCreateDeferredOperationKHR getLayerPtr_vkCreateDeferredOperationKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkCreateDeferredOperationKHR) + { + return layer_vkCreateDeferredOperationKHR; + } + + return layer_vkCreateDeferredOperationKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCreateDescriptorPool = requires( + VkDevice device, const VkDescriptorPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorPool* pDescriptorPool +) { + layer_vkCreateDescriptorPool(device, pCreateInfo, pAllocator, pDescriptorPool); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCreateDescriptorPool getLayerPtr_vkCreateDescriptorPool() +{ + return [] + { + if constexpr(hasLayerPtr_vkCreateDescriptorPool) + { + return layer_vkCreateDescriptorPool; + } + + return layer_vkCreateDescriptorPool; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCreateDescriptorSetLayout = requires( + VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorSetLayout* pSetLayout +) { + layer_vkCreateDescriptorSetLayout(device, pCreateInfo, pAllocator, pSetLayout); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCreateDescriptorSetLayout getLayerPtr_vkCreateDescriptorSetLayout() +{ + return [] + { + if constexpr(hasLayerPtr_vkCreateDescriptorSetLayout) + { + return layer_vkCreateDescriptorSetLayout; + } + + return layer_vkCreateDescriptorSetLayout; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCreateDescriptorUpdateTemplate = requires( + VkDevice device, const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate +) { + layer_vkCreateDescriptorUpdateTemplate(device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCreateDescriptorUpdateTemplate getLayerPtr_vkCreateDescriptorUpdateTemplate() +{ + return [] + { + if constexpr(hasLayerPtr_vkCreateDescriptorUpdateTemplate) + { + return layer_vkCreateDescriptorUpdateTemplate; + } + + return layer_vkCreateDescriptorUpdateTemplate; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCreateDescriptorUpdateTemplateKHR = requires( + VkDevice device, const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate +) { + layer_vkCreateDescriptorUpdateTemplateKHR(device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCreateDescriptorUpdateTemplateKHR getLayerPtr_vkCreateDescriptorUpdateTemplateKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkCreateDescriptorUpdateTemplateKHR) + { + return layer_vkCreateDescriptorUpdateTemplateKHR; + } + + return layer_vkCreateDescriptorUpdateTemplateKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCreateEvent = requires( + VkDevice device, const VkEventCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkEvent* pEvent +) { + layer_vkCreateEvent(device, pCreateInfo, pAllocator, pEvent); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCreateEvent getLayerPtr_vkCreateEvent() +{ + return [] + { + if constexpr(hasLayerPtr_vkCreateEvent) + { + return layer_vkCreateEvent; + } + + return layer_vkCreateEvent; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCreateFence = requires( + VkDevice device, const VkFenceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence +) { + layer_vkCreateFence(device, pCreateInfo, pAllocator, pFence); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCreateFence getLayerPtr_vkCreateFence() +{ + return [] + { + if constexpr(hasLayerPtr_vkCreateFence) + { + return layer_vkCreateFence; + } + + return layer_vkCreateFence; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCreateFramebuffer = requires( + VkDevice device, const VkFramebufferCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkFramebuffer* pFramebuffer +) { + layer_vkCreateFramebuffer(device, pCreateInfo, pAllocator, pFramebuffer); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCreateFramebuffer getLayerPtr_vkCreateFramebuffer() +{ + return [] + { + if constexpr(hasLayerPtr_vkCreateFramebuffer) + { + return layer_vkCreateFramebuffer; + } + + return layer_vkCreateFramebuffer; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCreateGraphicsPipelines = requires( + VkDevice device, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkGraphicsPipelineCreateInfo* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines +) { + layer_vkCreateGraphicsPipelines(device, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCreateGraphicsPipelines getLayerPtr_vkCreateGraphicsPipelines() +{ + return [] + { + if constexpr(hasLayerPtr_vkCreateGraphicsPipelines) + { + return layer_vkCreateGraphicsPipelines; + } + + return layer_vkCreateGraphicsPipelines; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCreateImage = requires( + VkDevice device, const VkImageCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImage* pImage +) { + layer_vkCreateImage(device, pCreateInfo, pAllocator, pImage); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCreateImage getLayerPtr_vkCreateImage() +{ + return [] + { + if constexpr(hasLayerPtr_vkCreateImage) + { + return layer_vkCreateImage; + } + + return layer_vkCreateImage; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCreateImageView = requires( + VkDevice device, const VkImageViewCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkImageView* pView +) { + layer_vkCreateImageView(device, pCreateInfo, pAllocator, pView); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCreateImageView getLayerPtr_vkCreateImageView() +{ + return [] + { + if constexpr(hasLayerPtr_vkCreateImageView) + { + return layer_vkCreateImageView; + } + + return layer_vkCreateImageView; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCreateIndirectCommandsLayoutEXT = requires( + VkDevice device, const VkIndirectCommandsLayoutCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkIndirectCommandsLayoutEXT* pIndirectCommandsLayout +) { + layer_vkCreateIndirectCommandsLayoutEXT(device, pCreateInfo, pAllocator, pIndirectCommandsLayout); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCreateIndirectCommandsLayoutEXT getLayerPtr_vkCreateIndirectCommandsLayoutEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCreateIndirectCommandsLayoutEXT) + { + return layer_vkCreateIndirectCommandsLayoutEXT; + } + + return layer_vkCreateIndirectCommandsLayoutEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCreateIndirectExecutionSetEXT = requires( + VkDevice device, const VkIndirectExecutionSetCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkIndirectExecutionSetEXT* pIndirectExecutionSet +) { + layer_vkCreateIndirectExecutionSetEXT(device, pCreateInfo, pAllocator, pIndirectExecutionSet); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCreateIndirectExecutionSetEXT getLayerPtr_vkCreateIndirectExecutionSetEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCreateIndirectExecutionSetEXT) + { + return layer_vkCreateIndirectExecutionSetEXT; + } + + return layer_vkCreateIndirectExecutionSetEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCreateMicromapEXT = requires( + VkDevice device, const VkMicromapCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkMicromapEXT* pMicromap +) { + layer_vkCreateMicromapEXT(device, pCreateInfo, pAllocator, pMicromap); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCreateMicromapEXT getLayerPtr_vkCreateMicromapEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCreateMicromapEXT) + { + return layer_vkCreateMicromapEXT; + } + + return layer_vkCreateMicromapEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCreatePipelineBinariesKHR = requires( + VkDevice device, const VkPipelineBinaryCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineBinaryHandlesInfoKHR* pBinaries +) { + layer_vkCreatePipelineBinariesKHR(device, pCreateInfo, pAllocator, pBinaries); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCreatePipelineBinariesKHR getLayerPtr_vkCreatePipelineBinariesKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkCreatePipelineBinariesKHR) + { + return layer_vkCreatePipelineBinariesKHR; + } + + return layer_vkCreatePipelineBinariesKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCreatePipelineCache = requires( + VkDevice device, const VkPipelineCacheCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineCache* pPipelineCache +) { + layer_vkCreatePipelineCache(device, pCreateInfo, pAllocator, pPipelineCache); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCreatePipelineCache getLayerPtr_vkCreatePipelineCache() +{ + return [] + { + if constexpr(hasLayerPtr_vkCreatePipelineCache) + { + return layer_vkCreatePipelineCache; + } + + return layer_vkCreatePipelineCache; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCreatePipelineLayout = requires( + VkDevice device, const VkPipelineLayoutCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPipelineLayout* pPipelineLayout +) { + layer_vkCreatePipelineLayout(device, pCreateInfo, pAllocator, pPipelineLayout); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCreatePipelineLayout getLayerPtr_vkCreatePipelineLayout() +{ + return [] + { + if constexpr(hasLayerPtr_vkCreatePipelineLayout) + { + return layer_vkCreatePipelineLayout; + } + + return layer_vkCreatePipelineLayout; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCreatePrivateDataSlot = requires( + VkDevice device, const VkPrivateDataSlotCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPrivateDataSlot* pPrivateDataSlot +) { + layer_vkCreatePrivateDataSlot(device, pCreateInfo, pAllocator, pPrivateDataSlot); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCreatePrivateDataSlot getLayerPtr_vkCreatePrivateDataSlot() +{ + return [] + { + if constexpr(hasLayerPtr_vkCreatePrivateDataSlot) + { + return layer_vkCreatePrivateDataSlot; + } + + return layer_vkCreatePrivateDataSlot; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCreatePrivateDataSlotEXT = requires( + VkDevice device, const VkPrivateDataSlotCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkPrivateDataSlot* pPrivateDataSlot +) { + layer_vkCreatePrivateDataSlotEXT(device, pCreateInfo, pAllocator, pPrivateDataSlot); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCreatePrivateDataSlotEXT getLayerPtr_vkCreatePrivateDataSlotEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCreatePrivateDataSlotEXT) + { + return layer_vkCreatePrivateDataSlotEXT; + } + + return layer_vkCreatePrivateDataSlotEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCreateQueryPool = requires( + VkDevice device, const VkQueryPoolCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkQueryPool* pQueryPool +) { + layer_vkCreateQueryPool(device, pCreateInfo, pAllocator, pQueryPool); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCreateQueryPool getLayerPtr_vkCreateQueryPool() +{ + return [] + { + if constexpr(hasLayerPtr_vkCreateQueryPool) + { + return layer_vkCreateQueryPool; + } + + return layer_vkCreateQueryPool; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCreateRayTracingPipelinesKHR = requires( + VkDevice device, VkDeferredOperationKHR deferredOperation, VkPipelineCache pipelineCache, uint32_t createInfoCount, const VkRayTracingPipelineCreateInfoKHR* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkPipeline* pPipelines +) { + layer_vkCreateRayTracingPipelinesKHR(device, deferredOperation, pipelineCache, createInfoCount, pCreateInfos, pAllocator, pPipelines); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCreateRayTracingPipelinesKHR getLayerPtr_vkCreateRayTracingPipelinesKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkCreateRayTracingPipelinesKHR) + { + return layer_vkCreateRayTracingPipelinesKHR; + } + + return layer_vkCreateRayTracingPipelinesKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCreateRenderPass = requires( + VkDevice device, const VkRenderPassCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass +) { + layer_vkCreateRenderPass(device, pCreateInfo, pAllocator, pRenderPass); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCreateRenderPass getLayerPtr_vkCreateRenderPass() +{ + return [] + { + if constexpr(hasLayerPtr_vkCreateRenderPass) + { + return layer_vkCreateRenderPass; + } + + return layer_vkCreateRenderPass; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCreateRenderPass2 = requires( + VkDevice device, const VkRenderPassCreateInfo2* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass +) { + layer_vkCreateRenderPass2(device, pCreateInfo, pAllocator, pRenderPass); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCreateRenderPass2 getLayerPtr_vkCreateRenderPass2() +{ + return [] + { + if constexpr(hasLayerPtr_vkCreateRenderPass2) + { + return layer_vkCreateRenderPass2; + } + + return layer_vkCreateRenderPass2; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCreateRenderPass2KHR = requires( + VkDevice device, const VkRenderPassCreateInfo2* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkRenderPass* pRenderPass +) { + layer_vkCreateRenderPass2KHR(device, pCreateInfo, pAllocator, pRenderPass); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCreateRenderPass2KHR getLayerPtr_vkCreateRenderPass2KHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkCreateRenderPass2KHR) + { + return layer_vkCreateRenderPass2KHR; + } + + return layer_vkCreateRenderPass2KHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCreateSampler = requires( + VkDevice device, const VkSamplerCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSampler* pSampler +) { + layer_vkCreateSampler(device, pCreateInfo, pAllocator, pSampler); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCreateSampler getLayerPtr_vkCreateSampler() +{ + return [] + { + if constexpr(hasLayerPtr_vkCreateSampler) + { + return layer_vkCreateSampler; + } + + return layer_vkCreateSampler; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCreateSamplerYcbcrConversion = requires( + VkDevice device, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSamplerYcbcrConversion* pYcbcrConversion +) { + layer_vkCreateSamplerYcbcrConversion(device, pCreateInfo, pAllocator, pYcbcrConversion); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCreateSamplerYcbcrConversion getLayerPtr_vkCreateSamplerYcbcrConversion() +{ + return [] + { + if constexpr(hasLayerPtr_vkCreateSamplerYcbcrConversion) + { + return layer_vkCreateSamplerYcbcrConversion; + } + + return layer_vkCreateSamplerYcbcrConversion; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCreateSamplerYcbcrConversionKHR = requires( + VkDevice device, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSamplerYcbcrConversion* pYcbcrConversion +) { + layer_vkCreateSamplerYcbcrConversionKHR(device, pCreateInfo, pAllocator, pYcbcrConversion); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCreateSamplerYcbcrConversionKHR getLayerPtr_vkCreateSamplerYcbcrConversionKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkCreateSamplerYcbcrConversionKHR) + { + return layer_vkCreateSamplerYcbcrConversionKHR; + } + + return layer_vkCreateSamplerYcbcrConversionKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCreateSemaphore = requires( + VkDevice device, const VkSemaphoreCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSemaphore* pSemaphore +) { + layer_vkCreateSemaphore(device, pCreateInfo, pAllocator, pSemaphore); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCreateSemaphore getLayerPtr_vkCreateSemaphore() +{ + return [] + { + if constexpr(hasLayerPtr_vkCreateSemaphore) + { + return layer_vkCreateSemaphore; + } + + return layer_vkCreateSemaphore; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCreateShaderModule = requires( + VkDevice device, const VkShaderModuleCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkShaderModule* pShaderModule +) { + layer_vkCreateShaderModule(device, pCreateInfo, pAllocator, pShaderModule); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCreateShaderModule getLayerPtr_vkCreateShaderModule() +{ + return [] + { + if constexpr(hasLayerPtr_vkCreateShaderModule) + { + return layer_vkCreateShaderModule; + } + + return layer_vkCreateShaderModule; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCreateShadersEXT = requires( + VkDevice device, uint32_t createInfoCount, const VkShaderCreateInfoEXT* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkShaderEXT* pShaders +) { + layer_vkCreateShadersEXT(device, createInfoCount, pCreateInfos, pAllocator, pShaders); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCreateShadersEXT getLayerPtr_vkCreateShadersEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCreateShadersEXT) + { + return layer_vkCreateShadersEXT; + } + + return layer_vkCreateShadersEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCreateSharedSwapchainsKHR = requires( + VkDevice device, uint32_t swapchainCount, const VkSwapchainCreateInfoKHR* pCreateInfos, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchains +) { + layer_vkCreateSharedSwapchainsKHR(device, swapchainCount, pCreateInfos, pAllocator, pSwapchains); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCreateSharedSwapchainsKHR getLayerPtr_vkCreateSharedSwapchainsKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkCreateSharedSwapchainsKHR) + { + return layer_vkCreateSharedSwapchainsKHR; + } + + return layer_vkCreateSharedSwapchainsKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCreateSwapchainKHR = requires( + VkDevice device, const VkSwapchainCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSwapchainKHR* pSwapchain +) { + layer_vkCreateSwapchainKHR(device, pCreateInfo, pAllocator, pSwapchain); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCreateSwapchainKHR getLayerPtr_vkCreateSwapchainKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkCreateSwapchainKHR) + { + return layer_vkCreateSwapchainKHR; + } + + return layer_vkCreateSwapchainKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCreateTensorARM = requires( + VkDevice device, const VkTensorCreateInfoARM* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkTensorARM* pTensor +) { + layer_vkCreateTensorARM(device, pCreateInfo, pAllocator, pTensor); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCreateTensorARM getLayerPtr_vkCreateTensorARM() +{ + return [] + { + if constexpr(hasLayerPtr_vkCreateTensorARM) + { + return layer_vkCreateTensorARM; + } + + return layer_vkCreateTensorARM; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCreateTensorViewARM = requires( + VkDevice device, const VkTensorViewCreateInfoARM* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkTensorViewARM* pView +) { + layer_vkCreateTensorViewARM(device, pCreateInfo, pAllocator, pView); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCreateTensorViewARM getLayerPtr_vkCreateTensorViewARM() +{ + return [] + { + if constexpr(hasLayerPtr_vkCreateTensorViewARM) + { + return layer_vkCreateTensorViewARM; + } + + return layer_vkCreateTensorViewARM; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCreateValidationCacheEXT = requires( + VkDevice device, const VkValidationCacheCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkValidationCacheEXT* pValidationCache +) { + layer_vkCreateValidationCacheEXT(device, pCreateInfo, pAllocator, pValidationCache); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCreateValidationCacheEXT getLayerPtr_vkCreateValidationCacheEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCreateValidationCacheEXT) + { + return layer_vkCreateValidationCacheEXT; + } + + return layer_vkCreateValidationCacheEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkDebugMarkerSetObjectNameEXT = requires( + VkDevice device, const VkDebugMarkerObjectNameInfoEXT* pNameInfo +) { + layer_vkDebugMarkerSetObjectNameEXT(device, pNameInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkDebugMarkerSetObjectNameEXT getLayerPtr_vkDebugMarkerSetObjectNameEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkDebugMarkerSetObjectNameEXT) + { + return layer_vkDebugMarkerSetObjectNameEXT; + } + + return layer_vkDebugMarkerSetObjectNameEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkDebugMarkerSetObjectTagEXT = requires( + VkDevice device, const VkDebugMarkerObjectTagInfoEXT* pTagInfo +) { + layer_vkDebugMarkerSetObjectTagEXT(device, pTagInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkDebugMarkerSetObjectTagEXT getLayerPtr_vkDebugMarkerSetObjectTagEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkDebugMarkerSetObjectTagEXT) + { + return layer_vkDebugMarkerSetObjectTagEXT; + } + + return layer_vkDebugMarkerSetObjectTagEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkDeferredOperationJoinKHR = requires( + VkDevice device, VkDeferredOperationKHR operation +) { + layer_vkDeferredOperationJoinKHR(device, operation); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkDeferredOperationJoinKHR getLayerPtr_vkDeferredOperationJoinKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkDeferredOperationJoinKHR) + { + return layer_vkDeferredOperationJoinKHR; + } + + return layer_vkDeferredOperationJoinKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkDestroyAccelerationStructureKHR = requires( + VkDevice device, VkAccelerationStructureKHR accelerationStructure, const VkAllocationCallbacks* pAllocator +) { + layer_vkDestroyAccelerationStructureKHR(device, accelerationStructure, pAllocator); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkDestroyAccelerationStructureKHR getLayerPtr_vkDestroyAccelerationStructureKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkDestroyAccelerationStructureKHR) + { + return layer_vkDestroyAccelerationStructureKHR; + } + + return layer_vkDestroyAccelerationStructureKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkDestroyBuffer = requires( + VkDevice device, VkBuffer buffer, const VkAllocationCallbacks* pAllocator +) { + layer_vkDestroyBuffer(device, buffer, pAllocator); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkDestroyBuffer getLayerPtr_vkDestroyBuffer() +{ + return [] + { + if constexpr(hasLayerPtr_vkDestroyBuffer) + { + return layer_vkDestroyBuffer; + } + + return layer_vkDestroyBuffer; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkDestroyBufferView = requires( + VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks* pAllocator +) { + layer_vkDestroyBufferView(device, bufferView, pAllocator); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkDestroyBufferView getLayerPtr_vkDestroyBufferView() +{ + return [] + { + if constexpr(hasLayerPtr_vkDestroyBufferView) + { + return layer_vkDestroyBufferView; + } + + return layer_vkDestroyBufferView; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkDestroyCommandPool = requires( + VkDevice device, VkCommandPool commandPool, const VkAllocationCallbacks* pAllocator +) { + layer_vkDestroyCommandPool(device, commandPool, pAllocator); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkDestroyCommandPool getLayerPtr_vkDestroyCommandPool() +{ + return [] + { + if constexpr(hasLayerPtr_vkDestroyCommandPool) + { + return layer_vkDestroyCommandPool; + } + + return layer_vkDestroyCommandPool; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkDestroyDeferredOperationKHR = requires( + VkDevice device, VkDeferredOperationKHR operation, const VkAllocationCallbacks* pAllocator +) { + layer_vkDestroyDeferredOperationKHR(device, operation, pAllocator); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkDestroyDeferredOperationKHR getLayerPtr_vkDestroyDeferredOperationKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkDestroyDeferredOperationKHR) + { + return layer_vkDestroyDeferredOperationKHR; + } + + return layer_vkDestroyDeferredOperationKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkDestroyDescriptorPool = requires( + VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks* pAllocator +) { + layer_vkDestroyDescriptorPool(device, descriptorPool, pAllocator); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkDestroyDescriptorPool getLayerPtr_vkDestroyDescriptorPool() +{ + return [] + { + if constexpr(hasLayerPtr_vkDestroyDescriptorPool) + { + return layer_vkDestroyDescriptorPool; + } + + return layer_vkDestroyDescriptorPool; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkDestroyDescriptorSetLayout = requires( + VkDevice device, VkDescriptorSetLayout descriptorSetLayout, const VkAllocationCallbacks* pAllocator +) { + layer_vkDestroyDescriptorSetLayout(device, descriptorSetLayout, pAllocator); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkDestroyDescriptorSetLayout getLayerPtr_vkDestroyDescriptorSetLayout() +{ + return [] + { + if constexpr(hasLayerPtr_vkDestroyDescriptorSetLayout) + { + return layer_vkDestroyDescriptorSetLayout; + } + + return layer_vkDestroyDescriptorSetLayout; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkDestroyDescriptorUpdateTemplate = requires( + VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator +) { + layer_vkDestroyDescriptorUpdateTemplate(device, descriptorUpdateTemplate, pAllocator); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkDestroyDescriptorUpdateTemplate getLayerPtr_vkDestroyDescriptorUpdateTemplate() +{ + return [] + { + if constexpr(hasLayerPtr_vkDestroyDescriptorUpdateTemplate) + { + return layer_vkDestroyDescriptorUpdateTemplate; + } + + return layer_vkDestroyDescriptorUpdateTemplate; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkDestroyDescriptorUpdateTemplateKHR = requires( + VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator +) { + layer_vkDestroyDescriptorUpdateTemplateKHR(device, descriptorUpdateTemplate, pAllocator); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkDestroyDescriptorUpdateTemplateKHR getLayerPtr_vkDestroyDescriptorUpdateTemplateKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkDestroyDescriptorUpdateTemplateKHR) + { + return layer_vkDestroyDescriptorUpdateTemplateKHR; + } + + return layer_vkDestroyDescriptorUpdateTemplateKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkDestroyDevice = requires( + VkDevice device, const VkAllocationCallbacks* pAllocator +) { + layer_vkDestroyDevice(device, pAllocator); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkDestroyDevice getLayerPtr_vkDestroyDevice() +{ + return [] + { + if constexpr(hasLayerPtr_vkDestroyDevice) + { + return layer_vkDestroyDevice; + } + + return layer_vkDestroyDevice; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkDestroyEvent = requires( + VkDevice device, VkEvent event, const VkAllocationCallbacks* pAllocator +) { + layer_vkDestroyEvent(device, event, pAllocator); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkDestroyEvent getLayerPtr_vkDestroyEvent() +{ + return [] + { + if constexpr(hasLayerPtr_vkDestroyEvent) + { + return layer_vkDestroyEvent; + } + + return layer_vkDestroyEvent; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkDestroyFence = requires( + VkDevice device, VkFence fence, const VkAllocationCallbacks* pAllocator +) { + layer_vkDestroyFence(device, fence, pAllocator); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkDestroyFence getLayerPtr_vkDestroyFence() +{ + return [] + { + if constexpr(hasLayerPtr_vkDestroyFence) + { + return layer_vkDestroyFence; + } + + return layer_vkDestroyFence; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkDestroyFramebuffer = requires( + VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks* pAllocator +) { + layer_vkDestroyFramebuffer(device, framebuffer, pAllocator); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkDestroyFramebuffer getLayerPtr_vkDestroyFramebuffer() +{ + return [] + { + if constexpr(hasLayerPtr_vkDestroyFramebuffer) + { + return layer_vkDestroyFramebuffer; + } + + return layer_vkDestroyFramebuffer; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkDestroyImage = requires( + VkDevice device, VkImage image, const VkAllocationCallbacks* pAllocator +) { + layer_vkDestroyImage(device, image, pAllocator); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkDestroyImage getLayerPtr_vkDestroyImage() +{ + return [] + { + if constexpr(hasLayerPtr_vkDestroyImage) + { + return layer_vkDestroyImage; + } + + return layer_vkDestroyImage; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkDestroyImageView = requires( + VkDevice device, VkImageView imageView, const VkAllocationCallbacks* pAllocator +) { + layer_vkDestroyImageView(device, imageView, pAllocator); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkDestroyImageView getLayerPtr_vkDestroyImageView() +{ + return [] + { + if constexpr(hasLayerPtr_vkDestroyImageView) + { + return layer_vkDestroyImageView; + } + + return layer_vkDestroyImageView; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkDestroyIndirectCommandsLayoutEXT = requires( + VkDevice device, VkIndirectCommandsLayoutEXT indirectCommandsLayout, const VkAllocationCallbacks* pAllocator +) { + layer_vkDestroyIndirectCommandsLayoutEXT(device, indirectCommandsLayout, pAllocator); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkDestroyIndirectCommandsLayoutEXT getLayerPtr_vkDestroyIndirectCommandsLayoutEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkDestroyIndirectCommandsLayoutEXT) + { + return layer_vkDestroyIndirectCommandsLayoutEXT; + } + + return layer_vkDestroyIndirectCommandsLayoutEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkDestroyIndirectExecutionSetEXT = requires( + VkDevice device, VkIndirectExecutionSetEXT indirectExecutionSet, const VkAllocationCallbacks* pAllocator +) { + layer_vkDestroyIndirectExecutionSetEXT(device, indirectExecutionSet, pAllocator); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkDestroyIndirectExecutionSetEXT getLayerPtr_vkDestroyIndirectExecutionSetEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkDestroyIndirectExecutionSetEXT) + { + return layer_vkDestroyIndirectExecutionSetEXT; + } + + return layer_vkDestroyIndirectExecutionSetEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkDestroyMicromapEXT = requires( + VkDevice device, VkMicromapEXT micromap, const VkAllocationCallbacks* pAllocator +) { + layer_vkDestroyMicromapEXT(device, micromap, pAllocator); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkDestroyMicromapEXT getLayerPtr_vkDestroyMicromapEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkDestroyMicromapEXT) + { + return layer_vkDestroyMicromapEXT; + } + + return layer_vkDestroyMicromapEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkDestroyPipeline = requires( + VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks* pAllocator +) { + layer_vkDestroyPipeline(device, pipeline, pAllocator); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkDestroyPipeline getLayerPtr_vkDestroyPipeline() +{ + return [] + { + if constexpr(hasLayerPtr_vkDestroyPipeline) + { + return layer_vkDestroyPipeline; + } + + return layer_vkDestroyPipeline; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkDestroyPipelineBinaryKHR = requires( + VkDevice device, VkPipelineBinaryKHR pipelineBinary, const VkAllocationCallbacks* pAllocator +) { + layer_vkDestroyPipelineBinaryKHR(device, pipelineBinary, pAllocator); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkDestroyPipelineBinaryKHR getLayerPtr_vkDestroyPipelineBinaryKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkDestroyPipelineBinaryKHR) + { + return layer_vkDestroyPipelineBinaryKHR; + } + + return layer_vkDestroyPipelineBinaryKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkDestroyPipelineCache = requires( + VkDevice device, VkPipelineCache pipelineCache, const VkAllocationCallbacks* pAllocator +) { + layer_vkDestroyPipelineCache(device, pipelineCache, pAllocator); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkDestroyPipelineCache getLayerPtr_vkDestroyPipelineCache() +{ + return [] + { + if constexpr(hasLayerPtr_vkDestroyPipelineCache) + { + return layer_vkDestroyPipelineCache; + } + + return layer_vkDestroyPipelineCache; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkDestroyPipelineLayout = requires( + VkDevice device, VkPipelineLayout pipelineLayout, const VkAllocationCallbacks* pAllocator +) { + layer_vkDestroyPipelineLayout(device, pipelineLayout, pAllocator); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkDestroyPipelineLayout getLayerPtr_vkDestroyPipelineLayout() +{ + return [] + { + if constexpr(hasLayerPtr_vkDestroyPipelineLayout) + { + return layer_vkDestroyPipelineLayout; + } + + return layer_vkDestroyPipelineLayout; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkDestroyPrivateDataSlot = requires( + VkDevice device, VkPrivateDataSlot privateDataSlot, const VkAllocationCallbacks* pAllocator +) { + layer_vkDestroyPrivateDataSlot(device, privateDataSlot, pAllocator); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkDestroyPrivateDataSlot getLayerPtr_vkDestroyPrivateDataSlot() +{ + return [] + { + if constexpr(hasLayerPtr_vkDestroyPrivateDataSlot) + { + return layer_vkDestroyPrivateDataSlot; + } + + return layer_vkDestroyPrivateDataSlot; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkDestroyPrivateDataSlotEXT = requires( + VkDevice device, VkPrivateDataSlot privateDataSlot, const VkAllocationCallbacks* pAllocator +) { + layer_vkDestroyPrivateDataSlotEXT(device, privateDataSlot, pAllocator); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkDestroyPrivateDataSlotEXT getLayerPtr_vkDestroyPrivateDataSlotEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkDestroyPrivateDataSlotEXT) + { + return layer_vkDestroyPrivateDataSlotEXT; + } + + return layer_vkDestroyPrivateDataSlotEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkDestroyQueryPool = requires( + VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks* pAllocator +) { + layer_vkDestroyQueryPool(device, queryPool, pAllocator); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkDestroyQueryPool getLayerPtr_vkDestroyQueryPool() +{ + return [] + { + if constexpr(hasLayerPtr_vkDestroyQueryPool) + { + return layer_vkDestroyQueryPool; + } + + return layer_vkDestroyQueryPool; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkDestroyRenderPass = requires( + VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks* pAllocator +) { + layer_vkDestroyRenderPass(device, renderPass, pAllocator); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkDestroyRenderPass getLayerPtr_vkDestroyRenderPass() +{ + return [] + { + if constexpr(hasLayerPtr_vkDestroyRenderPass) + { + return layer_vkDestroyRenderPass; + } + + return layer_vkDestroyRenderPass; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkDestroySampler = requires( + VkDevice device, VkSampler sampler, const VkAllocationCallbacks* pAllocator +) { + layer_vkDestroySampler(device, sampler, pAllocator); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkDestroySampler getLayerPtr_vkDestroySampler() +{ + return [] + { + if constexpr(hasLayerPtr_vkDestroySampler) + { + return layer_vkDestroySampler; + } + + return layer_vkDestroySampler; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkDestroySamplerYcbcrConversion = requires( + VkDevice device, VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks* pAllocator +) { + layer_vkDestroySamplerYcbcrConversion(device, ycbcrConversion, pAllocator); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkDestroySamplerYcbcrConversion getLayerPtr_vkDestroySamplerYcbcrConversion() +{ + return [] + { + if constexpr(hasLayerPtr_vkDestroySamplerYcbcrConversion) + { + return layer_vkDestroySamplerYcbcrConversion; + } + + return layer_vkDestroySamplerYcbcrConversion; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkDestroySamplerYcbcrConversionKHR = requires( + VkDevice device, VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks* pAllocator +) { + layer_vkDestroySamplerYcbcrConversionKHR(device, ycbcrConversion, pAllocator); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkDestroySamplerYcbcrConversionKHR getLayerPtr_vkDestroySamplerYcbcrConversionKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkDestroySamplerYcbcrConversionKHR) + { + return layer_vkDestroySamplerYcbcrConversionKHR; + } + + return layer_vkDestroySamplerYcbcrConversionKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkDestroySemaphore = requires( + VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks* pAllocator +) { + layer_vkDestroySemaphore(device, semaphore, pAllocator); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkDestroySemaphore getLayerPtr_vkDestroySemaphore() +{ + return [] + { + if constexpr(hasLayerPtr_vkDestroySemaphore) + { + return layer_vkDestroySemaphore; + } + + return layer_vkDestroySemaphore; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkDestroyShaderEXT = requires( + VkDevice device, VkShaderEXT shader, const VkAllocationCallbacks* pAllocator +) { + layer_vkDestroyShaderEXT(device, shader, pAllocator); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkDestroyShaderEXT getLayerPtr_vkDestroyShaderEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkDestroyShaderEXT) + { + return layer_vkDestroyShaderEXT; + } + + return layer_vkDestroyShaderEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkDestroyShaderModule = requires( + VkDevice device, VkShaderModule shaderModule, const VkAllocationCallbacks* pAllocator +) { + layer_vkDestroyShaderModule(device, shaderModule, pAllocator); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkDestroyShaderModule getLayerPtr_vkDestroyShaderModule() +{ + return [] + { + if constexpr(hasLayerPtr_vkDestroyShaderModule) + { + return layer_vkDestroyShaderModule; + } + + return layer_vkDestroyShaderModule; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkDestroySwapchainKHR = requires( + VkDevice device, VkSwapchainKHR swapchain, const VkAllocationCallbacks* pAllocator +) { + layer_vkDestroySwapchainKHR(device, swapchain, pAllocator); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkDestroySwapchainKHR getLayerPtr_vkDestroySwapchainKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkDestroySwapchainKHR) + { + return layer_vkDestroySwapchainKHR; + } + + return layer_vkDestroySwapchainKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkDestroyTensorARM = requires( + VkDevice device, VkTensorARM tensor, const VkAllocationCallbacks* pAllocator +) { + layer_vkDestroyTensorARM(device, tensor, pAllocator); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkDestroyTensorARM getLayerPtr_vkDestroyTensorARM() +{ + return [] + { + if constexpr(hasLayerPtr_vkDestroyTensorARM) + { + return layer_vkDestroyTensorARM; + } + + return layer_vkDestroyTensorARM; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkDestroyTensorViewARM = requires( + VkDevice device, VkTensorViewARM tensorView, const VkAllocationCallbacks* pAllocator +) { + layer_vkDestroyTensorViewARM(device, tensorView, pAllocator); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkDestroyTensorViewARM getLayerPtr_vkDestroyTensorViewARM() +{ + return [] + { + if constexpr(hasLayerPtr_vkDestroyTensorViewARM) + { + return layer_vkDestroyTensorViewARM; + } + + return layer_vkDestroyTensorViewARM; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkDestroyValidationCacheEXT = requires( + VkDevice device, VkValidationCacheEXT validationCache, const VkAllocationCallbacks* pAllocator +) { + layer_vkDestroyValidationCacheEXT(device, validationCache, pAllocator); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkDestroyValidationCacheEXT getLayerPtr_vkDestroyValidationCacheEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkDestroyValidationCacheEXT) + { + return layer_vkDestroyValidationCacheEXT; + } + + return layer_vkDestroyValidationCacheEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkDeviceWaitIdle = requires( + VkDevice device +) { + layer_vkDeviceWaitIdle(device); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkDeviceWaitIdle getLayerPtr_vkDeviceWaitIdle() +{ + return [] + { + if constexpr(hasLayerPtr_vkDeviceWaitIdle) + { + return layer_vkDeviceWaitIdle; + } + + return layer_vkDeviceWaitIdle; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkDisplayPowerControlEXT = requires( + VkDevice device, VkDisplayKHR display, const VkDisplayPowerInfoEXT* pDisplayPowerInfo +) { + layer_vkDisplayPowerControlEXT(device, display, pDisplayPowerInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkDisplayPowerControlEXT getLayerPtr_vkDisplayPowerControlEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkDisplayPowerControlEXT) + { + return layer_vkDisplayPowerControlEXT; + } + + return layer_vkDisplayPowerControlEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkEndCommandBuffer = requires( + VkCommandBuffer commandBuffer +) { + layer_vkEndCommandBuffer(commandBuffer); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkEndCommandBuffer getLayerPtr_vkEndCommandBuffer() +{ + return [] + { + if constexpr(hasLayerPtr_vkEndCommandBuffer) + { + return layer_vkEndCommandBuffer; + } + + return layer_vkEndCommandBuffer; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkFlushMappedMemoryRanges = requires( + VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges +) { + layer_vkFlushMappedMemoryRanges(device, memoryRangeCount, pMemoryRanges); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkFlushMappedMemoryRanges getLayerPtr_vkFlushMappedMemoryRanges() +{ + return [] + { + if constexpr(hasLayerPtr_vkFlushMappedMemoryRanges) + { + return layer_vkFlushMappedMemoryRanges; + } + + return layer_vkFlushMappedMemoryRanges; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkFreeCommandBuffers = requires( + VkDevice device, VkCommandPool commandPool, uint32_t commandBufferCount, const VkCommandBuffer* pCommandBuffers +) { + layer_vkFreeCommandBuffers(device, commandPool, commandBufferCount, pCommandBuffers); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkFreeCommandBuffers getLayerPtr_vkFreeCommandBuffers() +{ + return [] + { + if constexpr(hasLayerPtr_vkFreeCommandBuffers) + { + return layer_vkFreeCommandBuffers; + } + + return layer_vkFreeCommandBuffers; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkFreeDescriptorSets = requires( + VkDevice device, VkDescriptorPool descriptorPool, uint32_t descriptorSetCount, const VkDescriptorSet* pDescriptorSets +) { + layer_vkFreeDescriptorSets(device, descriptorPool, descriptorSetCount, pDescriptorSets); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkFreeDescriptorSets getLayerPtr_vkFreeDescriptorSets() +{ + return [] + { + if constexpr(hasLayerPtr_vkFreeDescriptorSets) + { + return layer_vkFreeDescriptorSets; + } + + return layer_vkFreeDescriptorSets; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkFreeMemory = requires( + VkDevice device, VkDeviceMemory memory, const VkAllocationCallbacks* pAllocator +) { + layer_vkFreeMemory(device, memory, pAllocator); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkFreeMemory getLayerPtr_vkFreeMemory() +{ + return [] + { + if constexpr(hasLayerPtr_vkFreeMemory) + { + return layer_vkFreeMemory; + } + + return layer_vkFreeMemory; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetAccelerationStructureBuildSizesKHR = requires( + VkDevice device, VkAccelerationStructureBuildTypeKHR buildType, const VkAccelerationStructureBuildGeometryInfoKHR* pBuildInfo, const uint32_t* pMaxPrimitiveCounts, VkAccelerationStructureBuildSizesInfoKHR* pSizeInfo +) { + layer_vkGetAccelerationStructureBuildSizesKHR(device, buildType, pBuildInfo, pMaxPrimitiveCounts, pSizeInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetAccelerationStructureBuildSizesKHR getLayerPtr_vkGetAccelerationStructureBuildSizesKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetAccelerationStructureBuildSizesKHR) + { + return layer_vkGetAccelerationStructureBuildSizesKHR; + } + + return layer_vkGetAccelerationStructureBuildSizesKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetAccelerationStructureDeviceAddressKHR = requires( + VkDevice device, const VkAccelerationStructureDeviceAddressInfoKHR* pInfo +) { + layer_vkGetAccelerationStructureDeviceAddressKHR(device, pInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetAccelerationStructureDeviceAddressKHR getLayerPtr_vkGetAccelerationStructureDeviceAddressKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetAccelerationStructureDeviceAddressKHR) + { + return layer_vkGetAccelerationStructureDeviceAddressKHR; + } + + return layer_vkGetAccelerationStructureDeviceAddressKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT = requires( + VkDevice device, const VkAccelerationStructureCaptureDescriptorDataInfoEXT* pInfo, void* pData +) { + layer_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT(device, pInfo, pData); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT getLayerPtr_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT) + { + return layer_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT; + } + + return layer_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetBufferDeviceAddress = requires( + VkDevice device, const VkBufferDeviceAddressInfo* pInfo +) { + layer_vkGetBufferDeviceAddress(device, pInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetBufferDeviceAddress getLayerPtr_vkGetBufferDeviceAddress() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetBufferDeviceAddress) + { + return layer_vkGetBufferDeviceAddress; + } + + return layer_vkGetBufferDeviceAddress; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetBufferDeviceAddressEXT = requires( + VkDevice device, const VkBufferDeviceAddressInfo* pInfo +) { + layer_vkGetBufferDeviceAddressEXT(device, pInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetBufferDeviceAddressEXT getLayerPtr_vkGetBufferDeviceAddressEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetBufferDeviceAddressEXT) + { + return layer_vkGetBufferDeviceAddressEXT; + } + + return layer_vkGetBufferDeviceAddressEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetBufferDeviceAddressKHR = requires( + VkDevice device, const VkBufferDeviceAddressInfo* pInfo +) { + layer_vkGetBufferDeviceAddressKHR(device, pInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetBufferDeviceAddressKHR getLayerPtr_vkGetBufferDeviceAddressKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetBufferDeviceAddressKHR) + { + return layer_vkGetBufferDeviceAddressKHR; + } + + return layer_vkGetBufferDeviceAddressKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetBufferMemoryRequirements = requires( + VkDevice device, VkBuffer buffer, VkMemoryRequirements* pMemoryRequirements +) { + layer_vkGetBufferMemoryRequirements(device, buffer, pMemoryRequirements); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetBufferMemoryRequirements getLayerPtr_vkGetBufferMemoryRequirements() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetBufferMemoryRequirements) + { + return layer_vkGetBufferMemoryRequirements; + } + + return layer_vkGetBufferMemoryRequirements; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetBufferMemoryRequirements2 = requires( + VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements +) { + layer_vkGetBufferMemoryRequirements2(device, pInfo, pMemoryRequirements); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetBufferMemoryRequirements2 getLayerPtr_vkGetBufferMemoryRequirements2() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetBufferMemoryRequirements2) + { + return layer_vkGetBufferMemoryRequirements2; + } + + return layer_vkGetBufferMemoryRequirements2; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetBufferMemoryRequirements2KHR = requires( + VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements +) { + layer_vkGetBufferMemoryRequirements2KHR(device, pInfo, pMemoryRequirements); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetBufferMemoryRequirements2KHR getLayerPtr_vkGetBufferMemoryRequirements2KHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetBufferMemoryRequirements2KHR) + { + return layer_vkGetBufferMemoryRequirements2KHR; + } + + return layer_vkGetBufferMemoryRequirements2KHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetBufferOpaqueCaptureAddress = requires( + VkDevice device, const VkBufferDeviceAddressInfo* pInfo +) { + layer_vkGetBufferOpaqueCaptureAddress(device, pInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetBufferOpaqueCaptureAddress getLayerPtr_vkGetBufferOpaqueCaptureAddress() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetBufferOpaqueCaptureAddress) + { + return layer_vkGetBufferOpaqueCaptureAddress; + } + + return layer_vkGetBufferOpaqueCaptureAddress; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetBufferOpaqueCaptureAddressKHR = requires( + VkDevice device, const VkBufferDeviceAddressInfo* pInfo +) { + layer_vkGetBufferOpaqueCaptureAddressKHR(device, pInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetBufferOpaqueCaptureAddressKHR getLayerPtr_vkGetBufferOpaqueCaptureAddressKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetBufferOpaqueCaptureAddressKHR) + { + return layer_vkGetBufferOpaqueCaptureAddressKHR; + } + + return layer_vkGetBufferOpaqueCaptureAddressKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetBufferOpaqueCaptureDescriptorDataEXT = requires( + VkDevice device, const VkBufferCaptureDescriptorDataInfoEXT* pInfo, void* pData +) { + layer_vkGetBufferOpaqueCaptureDescriptorDataEXT(device, pInfo, pData); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetBufferOpaqueCaptureDescriptorDataEXT getLayerPtr_vkGetBufferOpaqueCaptureDescriptorDataEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetBufferOpaqueCaptureDescriptorDataEXT) + { + return layer_vkGetBufferOpaqueCaptureDescriptorDataEXT; + } + + return layer_vkGetBufferOpaqueCaptureDescriptorDataEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetCalibratedTimestampsEXT = requires( + VkDevice device, uint32_t timestampCount, const VkCalibratedTimestampInfoKHR* pTimestampInfos, uint64_t* pTimestamps, uint64_t* pMaxDeviation +) { + layer_vkGetCalibratedTimestampsEXT(device, timestampCount, pTimestampInfos, pTimestamps, pMaxDeviation); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetCalibratedTimestampsEXT getLayerPtr_vkGetCalibratedTimestampsEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetCalibratedTimestampsEXT) + { + return layer_vkGetCalibratedTimestampsEXT; + } + + return layer_vkGetCalibratedTimestampsEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetCalibratedTimestampsKHR = requires( + VkDevice device, uint32_t timestampCount, const VkCalibratedTimestampInfoKHR* pTimestampInfos, uint64_t* pTimestamps, uint64_t* pMaxDeviation +) { + layer_vkGetCalibratedTimestampsKHR(device, timestampCount, pTimestampInfos, pTimestamps, pMaxDeviation); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetCalibratedTimestampsKHR getLayerPtr_vkGetCalibratedTimestampsKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetCalibratedTimestampsKHR) + { + return layer_vkGetCalibratedTimestampsKHR; + } + + return layer_vkGetCalibratedTimestampsKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetDeferredOperationMaxConcurrencyKHR = requires( + VkDevice device, VkDeferredOperationKHR operation +) { + layer_vkGetDeferredOperationMaxConcurrencyKHR(device, operation); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetDeferredOperationMaxConcurrencyKHR getLayerPtr_vkGetDeferredOperationMaxConcurrencyKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetDeferredOperationMaxConcurrencyKHR) + { + return layer_vkGetDeferredOperationMaxConcurrencyKHR; + } + + return layer_vkGetDeferredOperationMaxConcurrencyKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetDeferredOperationResultKHR = requires( + VkDevice device, VkDeferredOperationKHR operation +) { + layer_vkGetDeferredOperationResultKHR(device, operation); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetDeferredOperationResultKHR getLayerPtr_vkGetDeferredOperationResultKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetDeferredOperationResultKHR) + { + return layer_vkGetDeferredOperationResultKHR; + } + + return layer_vkGetDeferredOperationResultKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetDescriptorEXT = requires( + VkDevice device, const VkDescriptorGetInfoEXT* pDescriptorInfo, size_t dataSize, void* pDescriptor +) { + layer_vkGetDescriptorEXT(device, pDescriptorInfo, dataSize, pDescriptor); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetDescriptorEXT getLayerPtr_vkGetDescriptorEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetDescriptorEXT) + { + return layer_vkGetDescriptorEXT; + } + + return layer_vkGetDescriptorEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetDescriptorSetLayoutBindingOffsetEXT = requires( + VkDevice device, VkDescriptorSetLayout layout, uint32_t binding, VkDeviceSize* pOffset +) { + layer_vkGetDescriptorSetLayoutBindingOffsetEXT(device, layout, binding, pOffset); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetDescriptorSetLayoutBindingOffsetEXT getLayerPtr_vkGetDescriptorSetLayoutBindingOffsetEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetDescriptorSetLayoutBindingOffsetEXT) + { + return layer_vkGetDescriptorSetLayoutBindingOffsetEXT; + } + + return layer_vkGetDescriptorSetLayoutBindingOffsetEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetDescriptorSetLayoutSizeEXT = requires( + VkDevice device, VkDescriptorSetLayout layout, VkDeviceSize* pLayoutSizeInBytes +) { + layer_vkGetDescriptorSetLayoutSizeEXT(device, layout, pLayoutSizeInBytes); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetDescriptorSetLayoutSizeEXT getLayerPtr_vkGetDescriptorSetLayoutSizeEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetDescriptorSetLayoutSizeEXT) + { + return layer_vkGetDescriptorSetLayoutSizeEXT; + } + + return layer_vkGetDescriptorSetLayoutSizeEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetDescriptorSetLayoutSupport = requires( + VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, VkDescriptorSetLayoutSupport* pSupport +) { + layer_vkGetDescriptorSetLayoutSupport(device, pCreateInfo, pSupport); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetDescriptorSetLayoutSupport getLayerPtr_vkGetDescriptorSetLayoutSupport() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetDescriptorSetLayoutSupport) + { + return layer_vkGetDescriptorSetLayoutSupport; + } + + return layer_vkGetDescriptorSetLayoutSupport; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetDescriptorSetLayoutSupportKHR = requires( + VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, VkDescriptorSetLayoutSupport* pSupport +) { + layer_vkGetDescriptorSetLayoutSupportKHR(device, pCreateInfo, pSupport); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetDescriptorSetLayoutSupportKHR getLayerPtr_vkGetDescriptorSetLayoutSupportKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetDescriptorSetLayoutSupportKHR) + { + return layer_vkGetDescriptorSetLayoutSupportKHR; + } + + return layer_vkGetDescriptorSetLayoutSupportKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetDeviceAccelerationStructureCompatibilityKHR = requires( + VkDevice device, const VkAccelerationStructureVersionInfoKHR* pVersionInfo, VkAccelerationStructureCompatibilityKHR* pCompatibility +) { + layer_vkGetDeviceAccelerationStructureCompatibilityKHR(device, pVersionInfo, pCompatibility); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetDeviceAccelerationStructureCompatibilityKHR getLayerPtr_vkGetDeviceAccelerationStructureCompatibilityKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetDeviceAccelerationStructureCompatibilityKHR) + { + return layer_vkGetDeviceAccelerationStructureCompatibilityKHR; + } + + return layer_vkGetDeviceAccelerationStructureCompatibilityKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetDeviceBufferMemoryRequirements = requires( + VkDevice device, const VkDeviceBufferMemoryRequirements* pInfo, VkMemoryRequirements2* pMemoryRequirements +) { + layer_vkGetDeviceBufferMemoryRequirements(device, pInfo, pMemoryRequirements); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetDeviceBufferMemoryRequirements getLayerPtr_vkGetDeviceBufferMemoryRequirements() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetDeviceBufferMemoryRequirements) + { + return layer_vkGetDeviceBufferMemoryRequirements; + } + + return layer_vkGetDeviceBufferMemoryRequirements; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetDeviceBufferMemoryRequirementsKHR = requires( + VkDevice device, const VkDeviceBufferMemoryRequirements* pInfo, VkMemoryRequirements2* pMemoryRequirements +) { + layer_vkGetDeviceBufferMemoryRequirementsKHR(device, pInfo, pMemoryRequirements); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetDeviceBufferMemoryRequirementsKHR getLayerPtr_vkGetDeviceBufferMemoryRequirementsKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetDeviceBufferMemoryRequirementsKHR) + { + return layer_vkGetDeviceBufferMemoryRequirementsKHR; + } + + return layer_vkGetDeviceBufferMemoryRequirementsKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetDeviceFaultInfoEXT = requires( + VkDevice device, VkDeviceFaultCountsEXT* pFaultCounts, VkDeviceFaultInfoEXT* pFaultInfo +) { + layer_vkGetDeviceFaultInfoEXT(device, pFaultCounts, pFaultInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetDeviceFaultInfoEXT getLayerPtr_vkGetDeviceFaultInfoEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetDeviceFaultInfoEXT) + { + return layer_vkGetDeviceFaultInfoEXT; + } + + return layer_vkGetDeviceFaultInfoEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetDeviceGroupPeerMemoryFeatures = requires( + VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlags* pPeerMemoryFeatures +) { + layer_vkGetDeviceGroupPeerMemoryFeatures(device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetDeviceGroupPeerMemoryFeatures getLayerPtr_vkGetDeviceGroupPeerMemoryFeatures() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetDeviceGroupPeerMemoryFeatures) + { + return layer_vkGetDeviceGroupPeerMemoryFeatures; + } + + return layer_vkGetDeviceGroupPeerMemoryFeatures; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetDeviceGroupPeerMemoryFeaturesKHR = requires( + VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlags* pPeerMemoryFeatures +) { + layer_vkGetDeviceGroupPeerMemoryFeaturesKHR(device, heapIndex, localDeviceIndex, remoteDeviceIndex, pPeerMemoryFeatures); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetDeviceGroupPeerMemoryFeaturesKHR getLayerPtr_vkGetDeviceGroupPeerMemoryFeaturesKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetDeviceGroupPeerMemoryFeaturesKHR) + { + return layer_vkGetDeviceGroupPeerMemoryFeaturesKHR; + } + + return layer_vkGetDeviceGroupPeerMemoryFeaturesKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetDeviceGroupPresentCapabilitiesKHR = requires( + VkDevice device, VkDeviceGroupPresentCapabilitiesKHR* pDeviceGroupPresentCapabilities +) { + layer_vkGetDeviceGroupPresentCapabilitiesKHR(device, pDeviceGroupPresentCapabilities); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetDeviceGroupPresentCapabilitiesKHR getLayerPtr_vkGetDeviceGroupPresentCapabilitiesKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetDeviceGroupPresentCapabilitiesKHR) + { + return layer_vkGetDeviceGroupPresentCapabilitiesKHR; + } + + return layer_vkGetDeviceGroupPresentCapabilitiesKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetDeviceGroupSurfacePresentModesKHR = requires( + VkDevice device, VkSurfaceKHR surface, VkDeviceGroupPresentModeFlagsKHR* pModes +) { + layer_vkGetDeviceGroupSurfacePresentModesKHR(device, surface, pModes); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetDeviceGroupSurfacePresentModesKHR getLayerPtr_vkGetDeviceGroupSurfacePresentModesKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetDeviceGroupSurfacePresentModesKHR) + { + return layer_vkGetDeviceGroupSurfacePresentModesKHR; + } + + return layer_vkGetDeviceGroupSurfacePresentModesKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetDeviceImageMemoryRequirements = requires( + VkDevice device, const VkDeviceImageMemoryRequirements* pInfo, VkMemoryRequirements2* pMemoryRequirements +) { + layer_vkGetDeviceImageMemoryRequirements(device, pInfo, pMemoryRequirements); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetDeviceImageMemoryRequirements getLayerPtr_vkGetDeviceImageMemoryRequirements() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetDeviceImageMemoryRequirements) + { + return layer_vkGetDeviceImageMemoryRequirements; + } + + return layer_vkGetDeviceImageMemoryRequirements; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetDeviceImageMemoryRequirementsKHR = requires( + VkDevice device, const VkDeviceImageMemoryRequirements* pInfo, VkMemoryRequirements2* pMemoryRequirements +) { + layer_vkGetDeviceImageMemoryRequirementsKHR(device, pInfo, pMemoryRequirements); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetDeviceImageMemoryRequirementsKHR getLayerPtr_vkGetDeviceImageMemoryRequirementsKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetDeviceImageMemoryRequirementsKHR) + { + return layer_vkGetDeviceImageMemoryRequirementsKHR; + } + + return layer_vkGetDeviceImageMemoryRequirementsKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetDeviceImageSparseMemoryRequirements = requires( + VkDevice device, const VkDeviceImageMemoryRequirements* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements +) { + layer_vkGetDeviceImageSparseMemoryRequirements(device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetDeviceImageSparseMemoryRequirements getLayerPtr_vkGetDeviceImageSparseMemoryRequirements() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetDeviceImageSparseMemoryRequirements) + { + return layer_vkGetDeviceImageSparseMemoryRequirements; + } + + return layer_vkGetDeviceImageSparseMemoryRequirements; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetDeviceImageSparseMemoryRequirementsKHR = requires( + VkDevice device, const VkDeviceImageMemoryRequirements* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements +) { + layer_vkGetDeviceImageSparseMemoryRequirementsKHR(device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetDeviceImageSparseMemoryRequirementsKHR getLayerPtr_vkGetDeviceImageSparseMemoryRequirementsKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetDeviceImageSparseMemoryRequirementsKHR) + { + return layer_vkGetDeviceImageSparseMemoryRequirementsKHR; + } + + return layer_vkGetDeviceImageSparseMemoryRequirementsKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetDeviceImageSubresourceLayout = requires( + VkDevice device, const VkDeviceImageSubresourceInfo* pInfo, VkSubresourceLayout2* pLayout +) { + layer_vkGetDeviceImageSubresourceLayout(device, pInfo, pLayout); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetDeviceImageSubresourceLayout getLayerPtr_vkGetDeviceImageSubresourceLayout() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetDeviceImageSubresourceLayout) + { + return layer_vkGetDeviceImageSubresourceLayout; + } + + return layer_vkGetDeviceImageSubresourceLayout; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetDeviceImageSubresourceLayoutKHR = requires( + VkDevice device, const VkDeviceImageSubresourceInfo* pInfo, VkSubresourceLayout2* pLayout +) { + layer_vkGetDeviceImageSubresourceLayoutKHR(device, pInfo, pLayout); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetDeviceImageSubresourceLayoutKHR getLayerPtr_vkGetDeviceImageSubresourceLayoutKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetDeviceImageSubresourceLayoutKHR) + { + return layer_vkGetDeviceImageSubresourceLayoutKHR; + } + + return layer_vkGetDeviceImageSubresourceLayoutKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetDeviceMemoryCommitment = requires( + VkDevice device, VkDeviceMemory memory, VkDeviceSize* pCommittedMemoryInBytes +) { + layer_vkGetDeviceMemoryCommitment(device, memory, pCommittedMemoryInBytes); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetDeviceMemoryCommitment getLayerPtr_vkGetDeviceMemoryCommitment() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetDeviceMemoryCommitment) + { + return layer_vkGetDeviceMemoryCommitment; + } + + return layer_vkGetDeviceMemoryCommitment; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetDeviceMemoryOpaqueCaptureAddress = requires( + VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfo* pInfo +) { + layer_vkGetDeviceMemoryOpaqueCaptureAddress(device, pInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetDeviceMemoryOpaqueCaptureAddress getLayerPtr_vkGetDeviceMemoryOpaqueCaptureAddress() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetDeviceMemoryOpaqueCaptureAddress) + { + return layer_vkGetDeviceMemoryOpaqueCaptureAddress; + } + + return layer_vkGetDeviceMemoryOpaqueCaptureAddress; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetDeviceMemoryOpaqueCaptureAddressKHR = requires( + VkDevice device, const VkDeviceMemoryOpaqueCaptureAddressInfo* pInfo +) { + layer_vkGetDeviceMemoryOpaqueCaptureAddressKHR(device, pInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR getLayerPtr_vkGetDeviceMemoryOpaqueCaptureAddressKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetDeviceMemoryOpaqueCaptureAddressKHR) + { + return layer_vkGetDeviceMemoryOpaqueCaptureAddressKHR; + } + + return layer_vkGetDeviceMemoryOpaqueCaptureAddressKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetDeviceMicromapCompatibilityEXT = requires( + VkDevice device, const VkMicromapVersionInfoEXT* pVersionInfo, VkAccelerationStructureCompatibilityKHR* pCompatibility +) { + layer_vkGetDeviceMicromapCompatibilityEXT(device, pVersionInfo, pCompatibility); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetDeviceMicromapCompatibilityEXT getLayerPtr_vkGetDeviceMicromapCompatibilityEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetDeviceMicromapCompatibilityEXT) + { + return layer_vkGetDeviceMicromapCompatibilityEXT; + } + + return layer_vkGetDeviceMicromapCompatibilityEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetDeviceProcAddr = requires( + VkDevice device, const char* pName +) { + layer_vkGetDeviceProcAddr(device, pName); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetDeviceProcAddr getLayerPtr_vkGetDeviceProcAddr() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetDeviceProcAddr) + { + return layer_vkGetDeviceProcAddr; + } + + return layer_vkGetDeviceProcAddr; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetDeviceQueue = requires( + VkDevice device, uint32_t queueFamilyIndex, uint32_t queueIndex, VkQueue* pQueue +) { + layer_vkGetDeviceQueue(device, queueFamilyIndex, queueIndex, pQueue); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetDeviceQueue getLayerPtr_vkGetDeviceQueue() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetDeviceQueue) + { + return layer_vkGetDeviceQueue; + } + + return layer_vkGetDeviceQueue; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetDeviceQueue2 = requires( + VkDevice device, const VkDeviceQueueInfo2* pQueueInfo, VkQueue* pQueue +) { + layer_vkGetDeviceQueue2(device, pQueueInfo, pQueue); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetDeviceQueue2 getLayerPtr_vkGetDeviceQueue2() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetDeviceQueue2) + { + return layer_vkGetDeviceQueue2; + } + + return layer_vkGetDeviceQueue2; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetDeviceTensorMemoryRequirementsARM = requires( + VkDevice device, const VkDeviceTensorMemoryRequirementsARM* pInfo, VkMemoryRequirements2* pMemoryRequirements +) { + layer_vkGetDeviceTensorMemoryRequirementsARM(device, pInfo, pMemoryRequirements); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetDeviceTensorMemoryRequirementsARM getLayerPtr_vkGetDeviceTensorMemoryRequirementsARM() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetDeviceTensorMemoryRequirementsARM) + { + return layer_vkGetDeviceTensorMemoryRequirementsARM; + } + + return layer_vkGetDeviceTensorMemoryRequirementsARM; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetEventStatus = requires( + VkDevice device, VkEvent event +) { + layer_vkGetEventStatus(device, event); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetEventStatus getLayerPtr_vkGetEventStatus() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetEventStatus) + { + return layer_vkGetEventStatus; + } + + return layer_vkGetEventStatus; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetFenceFdKHR = requires( + VkDevice device, const VkFenceGetFdInfoKHR* pGetFdInfo, int* pFd +) { + layer_vkGetFenceFdKHR(device, pGetFdInfo, pFd); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetFenceFdKHR getLayerPtr_vkGetFenceFdKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetFenceFdKHR) + { + return layer_vkGetFenceFdKHR; + } + + return layer_vkGetFenceFdKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetFenceStatus = requires( + VkDevice device, VkFence fence +) { + layer_vkGetFenceStatus(device, fence); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetFenceStatus getLayerPtr_vkGetFenceStatus() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetFenceStatus) + { + return layer_vkGetFenceStatus; + } + + return layer_vkGetFenceStatus; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetGeneratedCommandsMemoryRequirementsEXT = requires( + VkDevice device, const VkGeneratedCommandsMemoryRequirementsInfoEXT* pInfo, VkMemoryRequirements2* pMemoryRequirements +) { + layer_vkGetGeneratedCommandsMemoryRequirementsEXT(device, pInfo, pMemoryRequirements); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetGeneratedCommandsMemoryRequirementsEXT getLayerPtr_vkGetGeneratedCommandsMemoryRequirementsEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetGeneratedCommandsMemoryRequirementsEXT) + { + return layer_vkGetGeneratedCommandsMemoryRequirementsEXT; + } + + return layer_vkGetGeneratedCommandsMemoryRequirementsEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetImageDrmFormatModifierPropertiesEXT = requires( + VkDevice device, VkImage image, VkImageDrmFormatModifierPropertiesEXT* pProperties +) { + layer_vkGetImageDrmFormatModifierPropertiesEXT(device, image, pProperties); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetImageDrmFormatModifierPropertiesEXT getLayerPtr_vkGetImageDrmFormatModifierPropertiesEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetImageDrmFormatModifierPropertiesEXT) + { + return layer_vkGetImageDrmFormatModifierPropertiesEXT; + } + + return layer_vkGetImageDrmFormatModifierPropertiesEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetImageMemoryRequirements = requires( + VkDevice device, VkImage image, VkMemoryRequirements* pMemoryRequirements +) { + layer_vkGetImageMemoryRequirements(device, image, pMemoryRequirements); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetImageMemoryRequirements getLayerPtr_vkGetImageMemoryRequirements() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetImageMemoryRequirements) + { + return layer_vkGetImageMemoryRequirements; + } + + return layer_vkGetImageMemoryRequirements; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetImageMemoryRequirements2 = requires( + VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements +) { + layer_vkGetImageMemoryRequirements2(device, pInfo, pMemoryRequirements); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetImageMemoryRequirements2 getLayerPtr_vkGetImageMemoryRequirements2() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetImageMemoryRequirements2) + { + return layer_vkGetImageMemoryRequirements2; + } + + return layer_vkGetImageMemoryRequirements2; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetImageMemoryRequirements2KHR = requires( + VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements +) { + layer_vkGetImageMemoryRequirements2KHR(device, pInfo, pMemoryRequirements); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetImageMemoryRequirements2KHR getLayerPtr_vkGetImageMemoryRequirements2KHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetImageMemoryRequirements2KHR) + { + return layer_vkGetImageMemoryRequirements2KHR; + } + + return layer_vkGetImageMemoryRequirements2KHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetImageOpaqueCaptureDescriptorDataEXT = requires( + VkDevice device, const VkImageCaptureDescriptorDataInfoEXT* pInfo, void* pData +) { + layer_vkGetImageOpaqueCaptureDescriptorDataEXT(device, pInfo, pData); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetImageOpaqueCaptureDescriptorDataEXT getLayerPtr_vkGetImageOpaqueCaptureDescriptorDataEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetImageOpaqueCaptureDescriptorDataEXT) + { + return layer_vkGetImageOpaqueCaptureDescriptorDataEXT; + } + + return layer_vkGetImageOpaqueCaptureDescriptorDataEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetImageSparseMemoryRequirements = requires( + VkDevice device, VkImage image, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements* pSparseMemoryRequirements +) { + layer_vkGetImageSparseMemoryRequirements(device, image, pSparseMemoryRequirementCount, pSparseMemoryRequirements); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetImageSparseMemoryRequirements getLayerPtr_vkGetImageSparseMemoryRequirements() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetImageSparseMemoryRequirements) + { + return layer_vkGetImageSparseMemoryRequirements; + } + + return layer_vkGetImageSparseMemoryRequirements; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetImageSparseMemoryRequirements2 = requires( + VkDevice device, const VkImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements +) { + layer_vkGetImageSparseMemoryRequirements2(device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetImageSparseMemoryRequirements2 getLayerPtr_vkGetImageSparseMemoryRequirements2() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetImageSparseMemoryRequirements2) + { + return layer_vkGetImageSparseMemoryRequirements2; + } + + return layer_vkGetImageSparseMemoryRequirements2; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetImageSparseMemoryRequirements2KHR = requires( + VkDevice device, const VkImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements +) { + layer_vkGetImageSparseMemoryRequirements2KHR(device, pInfo, pSparseMemoryRequirementCount, pSparseMemoryRequirements); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetImageSparseMemoryRequirements2KHR getLayerPtr_vkGetImageSparseMemoryRequirements2KHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetImageSparseMemoryRequirements2KHR) + { + return layer_vkGetImageSparseMemoryRequirements2KHR; + } + + return layer_vkGetImageSparseMemoryRequirements2KHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetImageSubresourceLayout = requires( + VkDevice device, VkImage image, const VkImageSubresource* pSubresource, VkSubresourceLayout* pLayout +) { + layer_vkGetImageSubresourceLayout(device, image, pSubresource, pLayout); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetImageSubresourceLayout getLayerPtr_vkGetImageSubresourceLayout() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetImageSubresourceLayout) + { + return layer_vkGetImageSubresourceLayout; + } + + return layer_vkGetImageSubresourceLayout; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetImageSubresourceLayout2 = requires( + VkDevice device, VkImage image, const VkImageSubresource2* pSubresource, VkSubresourceLayout2* pLayout +) { + layer_vkGetImageSubresourceLayout2(device, image, pSubresource, pLayout); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetImageSubresourceLayout2 getLayerPtr_vkGetImageSubresourceLayout2() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetImageSubresourceLayout2) + { + return layer_vkGetImageSubresourceLayout2; + } + + return layer_vkGetImageSubresourceLayout2; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetImageSubresourceLayout2EXT = requires( + VkDevice device, VkImage image, const VkImageSubresource2* pSubresource, VkSubresourceLayout2* pLayout +) { + layer_vkGetImageSubresourceLayout2EXT(device, image, pSubresource, pLayout); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetImageSubresourceLayout2EXT getLayerPtr_vkGetImageSubresourceLayout2EXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetImageSubresourceLayout2EXT) + { + return layer_vkGetImageSubresourceLayout2EXT; + } + + return layer_vkGetImageSubresourceLayout2EXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetImageSubresourceLayout2KHR = requires( + VkDevice device, VkImage image, const VkImageSubresource2* pSubresource, VkSubresourceLayout2* pLayout +) { + layer_vkGetImageSubresourceLayout2KHR(device, image, pSubresource, pLayout); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetImageSubresourceLayout2KHR getLayerPtr_vkGetImageSubresourceLayout2KHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetImageSubresourceLayout2KHR) + { + return layer_vkGetImageSubresourceLayout2KHR; + } + + return layer_vkGetImageSubresourceLayout2KHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetImageViewOpaqueCaptureDescriptorDataEXT = requires( + VkDevice device, const VkImageViewCaptureDescriptorDataInfoEXT* pInfo, void* pData +) { + layer_vkGetImageViewOpaqueCaptureDescriptorDataEXT(device, pInfo, pData); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetImageViewOpaqueCaptureDescriptorDataEXT getLayerPtr_vkGetImageViewOpaqueCaptureDescriptorDataEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetImageViewOpaqueCaptureDescriptorDataEXT) + { + return layer_vkGetImageViewOpaqueCaptureDescriptorDataEXT; + } + + return layer_vkGetImageViewOpaqueCaptureDescriptorDataEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetMemoryFdKHR = requires( + VkDevice device, const VkMemoryGetFdInfoKHR* pGetFdInfo, int* pFd +) { + layer_vkGetMemoryFdKHR(device, pGetFdInfo, pFd); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetMemoryFdKHR getLayerPtr_vkGetMemoryFdKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetMemoryFdKHR) + { + return layer_vkGetMemoryFdKHR; + } + + return layer_vkGetMemoryFdKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetMemoryFdPropertiesKHR = requires( + VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, int fd, VkMemoryFdPropertiesKHR* pMemoryFdProperties +) { + layer_vkGetMemoryFdPropertiesKHR(device, handleType, fd, pMemoryFdProperties); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetMemoryFdPropertiesKHR getLayerPtr_vkGetMemoryFdPropertiesKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetMemoryFdPropertiesKHR) + { + return layer_vkGetMemoryFdPropertiesKHR; + } + + return layer_vkGetMemoryFdPropertiesKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetMemoryHostPointerPropertiesEXT = requires( + VkDevice device, VkExternalMemoryHandleTypeFlagBits handleType, const void* pHostPointer, VkMemoryHostPointerPropertiesEXT* pMemoryHostPointerProperties +) { + layer_vkGetMemoryHostPointerPropertiesEXT(device, handleType, pHostPointer, pMemoryHostPointerProperties); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetMemoryHostPointerPropertiesEXT getLayerPtr_vkGetMemoryHostPointerPropertiesEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetMemoryHostPointerPropertiesEXT) + { + return layer_vkGetMemoryHostPointerPropertiesEXT; + } + + return layer_vkGetMemoryHostPointerPropertiesEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetMicromapBuildSizesEXT = requires( + VkDevice device, VkAccelerationStructureBuildTypeKHR buildType, const VkMicromapBuildInfoEXT* pBuildInfo, VkMicromapBuildSizesInfoEXT* pSizeInfo +) { + layer_vkGetMicromapBuildSizesEXT(device, buildType, pBuildInfo, pSizeInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetMicromapBuildSizesEXT getLayerPtr_vkGetMicromapBuildSizesEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetMicromapBuildSizesEXT) + { + return layer_vkGetMicromapBuildSizesEXT; + } + + return layer_vkGetMicromapBuildSizesEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetPipelineBinaryDataKHR = requires( + VkDevice device, const VkPipelineBinaryDataInfoKHR* pInfo, VkPipelineBinaryKeyKHR* pPipelineBinaryKey, size_t* pPipelineBinaryDataSize, void* pPipelineBinaryData +) { + layer_vkGetPipelineBinaryDataKHR(device, pInfo, pPipelineBinaryKey, pPipelineBinaryDataSize, pPipelineBinaryData); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetPipelineBinaryDataKHR getLayerPtr_vkGetPipelineBinaryDataKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetPipelineBinaryDataKHR) + { + return layer_vkGetPipelineBinaryDataKHR; + } + + return layer_vkGetPipelineBinaryDataKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetPipelineCacheData = requires( + VkDevice device, VkPipelineCache pipelineCache, size_t* pDataSize, void* pData +) { + layer_vkGetPipelineCacheData(device, pipelineCache, pDataSize, pData); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetPipelineCacheData getLayerPtr_vkGetPipelineCacheData() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetPipelineCacheData) + { + return layer_vkGetPipelineCacheData; + } + + return layer_vkGetPipelineCacheData; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetPipelineExecutableInternalRepresentationsKHR = requires( + VkDevice device, const VkPipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pInternalRepresentationCount, VkPipelineExecutableInternalRepresentationKHR* pInternalRepresentations +) { + layer_vkGetPipelineExecutableInternalRepresentationsKHR(device, pExecutableInfo, pInternalRepresentationCount, pInternalRepresentations); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetPipelineExecutableInternalRepresentationsKHR getLayerPtr_vkGetPipelineExecutableInternalRepresentationsKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetPipelineExecutableInternalRepresentationsKHR) + { + return layer_vkGetPipelineExecutableInternalRepresentationsKHR; + } + + return layer_vkGetPipelineExecutableInternalRepresentationsKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetPipelineExecutablePropertiesKHR = requires( + VkDevice device, const VkPipelineInfoKHR* pPipelineInfo, uint32_t* pExecutableCount, VkPipelineExecutablePropertiesKHR* pProperties +) { + layer_vkGetPipelineExecutablePropertiesKHR(device, pPipelineInfo, pExecutableCount, pProperties); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetPipelineExecutablePropertiesKHR getLayerPtr_vkGetPipelineExecutablePropertiesKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetPipelineExecutablePropertiesKHR) + { + return layer_vkGetPipelineExecutablePropertiesKHR; + } + + return layer_vkGetPipelineExecutablePropertiesKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetPipelineExecutableStatisticsKHR = requires( + VkDevice device, const VkPipelineExecutableInfoKHR* pExecutableInfo, uint32_t* pStatisticCount, VkPipelineExecutableStatisticKHR* pStatistics +) { + layer_vkGetPipelineExecutableStatisticsKHR(device, pExecutableInfo, pStatisticCount, pStatistics); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetPipelineExecutableStatisticsKHR getLayerPtr_vkGetPipelineExecutableStatisticsKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetPipelineExecutableStatisticsKHR) + { + return layer_vkGetPipelineExecutableStatisticsKHR; + } + + return layer_vkGetPipelineExecutableStatisticsKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetPipelineKeyKHR = requires( + VkDevice device, const VkPipelineCreateInfoKHR* pPipelineCreateInfo, VkPipelineBinaryKeyKHR* pPipelineKey +) { + layer_vkGetPipelineKeyKHR(device, pPipelineCreateInfo, pPipelineKey); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetPipelineKeyKHR getLayerPtr_vkGetPipelineKeyKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetPipelineKeyKHR) + { + return layer_vkGetPipelineKeyKHR; + } + + return layer_vkGetPipelineKeyKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetPipelinePropertiesEXT = requires( + VkDevice device, const VkPipelineInfoEXT* pPipelineInfo, VkBaseOutStructure* pPipelineProperties +) { + layer_vkGetPipelinePropertiesEXT(device, pPipelineInfo, pPipelineProperties); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetPipelinePropertiesEXT getLayerPtr_vkGetPipelinePropertiesEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetPipelinePropertiesEXT) + { + return layer_vkGetPipelinePropertiesEXT; + } + + return layer_vkGetPipelinePropertiesEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetPrivateData = requires( + VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t* pData +) { + layer_vkGetPrivateData(device, objectType, objectHandle, privateDataSlot, pData); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetPrivateData getLayerPtr_vkGetPrivateData() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetPrivateData) + { + return layer_vkGetPrivateData; + } + + return layer_vkGetPrivateData; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetPrivateDataEXT = requires( + VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t* pData +) { + layer_vkGetPrivateDataEXT(device, objectType, objectHandle, privateDataSlot, pData); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetPrivateDataEXT getLayerPtr_vkGetPrivateDataEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetPrivateDataEXT) + { + return layer_vkGetPrivateDataEXT; + } + + return layer_vkGetPrivateDataEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetQueryPoolResults = requires( + VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount, size_t dataSize, void* pData, VkDeviceSize stride, VkQueryResultFlags flags +) { + layer_vkGetQueryPoolResults(device, queryPool, firstQuery, queryCount, dataSize, pData, stride, flags); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetQueryPoolResults getLayerPtr_vkGetQueryPoolResults() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetQueryPoolResults) + { + return layer_vkGetQueryPoolResults; + } + + return layer_vkGetQueryPoolResults; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR = requires( + VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData +) { + layer_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR(device, pipeline, firstGroup, groupCount, dataSize, pData); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR getLayerPtr_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR) + { + return layer_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR; + } + + return layer_vkGetRayTracingCaptureReplayShaderGroupHandlesKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetRayTracingShaderGroupHandlesKHR = requires( + VkDevice device, VkPipeline pipeline, uint32_t firstGroup, uint32_t groupCount, size_t dataSize, void* pData +) { + layer_vkGetRayTracingShaderGroupHandlesKHR(device, pipeline, firstGroup, groupCount, dataSize, pData); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetRayTracingShaderGroupHandlesKHR getLayerPtr_vkGetRayTracingShaderGroupHandlesKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetRayTracingShaderGroupHandlesKHR) + { + return layer_vkGetRayTracingShaderGroupHandlesKHR; + } + + return layer_vkGetRayTracingShaderGroupHandlesKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetRayTracingShaderGroupStackSizeKHR = requires( + VkDevice device, VkPipeline pipeline, uint32_t group, VkShaderGroupShaderKHR groupShader +) { + layer_vkGetRayTracingShaderGroupStackSizeKHR(device, pipeline, group, groupShader); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetRayTracingShaderGroupStackSizeKHR getLayerPtr_vkGetRayTracingShaderGroupStackSizeKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetRayTracingShaderGroupStackSizeKHR) + { + return layer_vkGetRayTracingShaderGroupStackSizeKHR; + } + + return layer_vkGetRayTracingShaderGroupStackSizeKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetRenderAreaGranularity = requires( + VkDevice device, VkRenderPass renderPass, VkExtent2D* pGranularity +) { + layer_vkGetRenderAreaGranularity(device, renderPass, pGranularity); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetRenderAreaGranularity getLayerPtr_vkGetRenderAreaGranularity() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetRenderAreaGranularity) + { + return layer_vkGetRenderAreaGranularity; + } + + return layer_vkGetRenderAreaGranularity; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetRenderingAreaGranularity = requires( + VkDevice device, const VkRenderingAreaInfo* pRenderingAreaInfo, VkExtent2D* pGranularity +) { + layer_vkGetRenderingAreaGranularity(device, pRenderingAreaInfo, pGranularity); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetRenderingAreaGranularity getLayerPtr_vkGetRenderingAreaGranularity() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetRenderingAreaGranularity) + { + return layer_vkGetRenderingAreaGranularity; + } + + return layer_vkGetRenderingAreaGranularity; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetRenderingAreaGranularityKHR = requires( + VkDevice device, const VkRenderingAreaInfo* pRenderingAreaInfo, VkExtent2D* pGranularity +) { + layer_vkGetRenderingAreaGranularityKHR(device, pRenderingAreaInfo, pGranularity); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetRenderingAreaGranularityKHR getLayerPtr_vkGetRenderingAreaGranularityKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetRenderingAreaGranularityKHR) + { + return layer_vkGetRenderingAreaGranularityKHR; + } + + return layer_vkGetRenderingAreaGranularityKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetSamplerOpaqueCaptureDescriptorDataEXT = requires( + VkDevice device, const VkSamplerCaptureDescriptorDataInfoEXT* pInfo, void* pData +) { + layer_vkGetSamplerOpaqueCaptureDescriptorDataEXT(device, pInfo, pData); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetSamplerOpaqueCaptureDescriptorDataEXT getLayerPtr_vkGetSamplerOpaqueCaptureDescriptorDataEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetSamplerOpaqueCaptureDescriptorDataEXT) + { + return layer_vkGetSamplerOpaqueCaptureDescriptorDataEXT; + } + + return layer_vkGetSamplerOpaqueCaptureDescriptorDataEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetSemaphoreCounterValue = requires( + VkDevice device, VkSemaphore semaphore, uint64_t* pValue +) { + layer_vkGetSemaphoreCounterValue(device, semaphore, pValue); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetSemaphoreCounterValue getLayerPtr_vkGetSemaphoreCounterValue() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetSemaphoreCounterValue) + { + return layer_vkGetSemaphoreCounterValue; + } + + return layer_vkGetSemaphoreCounterValue; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetSemaphoreCounterValueKHR = requires( + VkDevice device, VkSemaphore semaphore, uint64_t* pValue +) { + layer_vkGetSemaphoreCounterValueKHR(device, semaphore, pValue); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetSemaphoreCounterValueKHR getLayerPtr_vkGetSemaphoreCounterValueKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetSemaphoreCounterValueKHR) + { + return layer_vkGetSemaphoreCounterValueKHR; + } + + return layer_vkGetSemaphoreCounterValueKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetSemaphoreFdKHR = requires( + VkDevice device, const VkSemaphoreGetFdInfoKHR* pGetFdInfo, int* pFd +) { + layer_vkGetSemaphoreFdKHR(device, pGetFdInfo, pFd); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetSemaphoreFdKHR getLayerPtr_vkGetSemaphoreFdKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetSemaphoreFdKHR) + { + return layer_vkGetSemaphoreFdKHR; + } + + return layer_vkGetSemaphoreFdKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetShaderBinaryDataEXT = requires( + VkDevice device, VkShaderEXT shader, size_t* pDataSize, void* pData +) { + layer_vkGetShaderBinaryDataEXT(device, shader, pDataSize, pData); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetShaderBinaryDataEXT getLayerPtr_vkGetShaderBinaryDataEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetShaderBinaryDataEXT) + { + return layer_vkGetShaderBinaryDataEXT; + } + + return layer_vkGetShaderBinaryDataEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetShaderModuleCreateInfoIdentifierEXT = requires( + VkDevice device, const VkShaderModuleCreateInfo* pCreateInfo, VkShaderModuleIdentifierEXT* pIdentifier +) { + layer_vkGetShaderModuleCreateInfoIdentifierEXT(device, pCreateInfo, pIdentifier); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetShaderModuleCreateInfoIdentifierEXT getLayerPtr_vkGetShaderModuleCreateInfoIdentifierEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetShaderModuleCreateInfoIdentifierEXT) + { + return layer_vkGetShaderModuleCreateInfoIdentifierEXT; + } + + return layer_vkGetShaderModuleCreateInfoIdentifierEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetShaderModuleIdentifierEXT = requires( + VkDevice device, VkShaderModule shaderModule, VkShaderModuleIdentifierEXT* pIdentifier +) { + layer_vkGetShaderModuleIdentifierEXT(device, shaderModule, pIdentifier); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetShaderModuleIdentifierEXT getLayerPtr_vkGetShaderModuleIdentifierEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetShaderModuleIdentifierEXT) + { + return layer_vkGetShaderModuleIdentifierEXT; + } + + return layer_vkGetShaderModuleIdentifierEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetSwapchainCounterEXT = requires( + VkDevice device, VkSwapchainKHR swapchain, VkSurfaceCounterFlagBitsEXT counter, uint64_t* pCounterValue +) { + layer_vkGetSwapchainCounterEXT(device, swapchain, counter, pCounterValue); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetSwapchainCounterEXT getLayerPtr_vkGetSwapchainCounterEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetSwapchainCounterEXT) + { + return layer_vkGetSwapchainCounterEXT; + } + + return layer_vkGetSwapchainCounterEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetSwapchainImagesKHR = requires( + VkDevice device, VkSwapchainKHR swapchain, uint32_t* pSwapchainImageCount, VkImage* pSwapchainImages +) { + layer_vkGetSwapchainImagesKHR(device, swapchain, pSwapchainImageCount, pSwapchainImages); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetSwapchainImagesKHR getLayerPtr_vkGetSwapchainImagesKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetSwapchainImagesKHR) + { + return layer_vkGetSwapchainImagesKHR; + } + + return layer_vkGetSwapchainImagesKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetSwapchainStatusKHR = requires( + VkDevice device, VkSwapchainKHR swapchain +) { + layer_vkGetSwapchainStatusKHR(device, swapchain); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetSwapchainStatusKHR getLayerPtr_vkGetSwapchainStatusKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetSwapchainStatusKHR) + { + return layer_vkGetSwapchainStatusKHR; + } + + return layer_vkGetSwapchainStatusKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetTensorMemoryRequirementsARM = requires( + VkDevice device, const VkTensorMemoryRequirementsInfoARM* pInfo, VkMemoryRequirements2* pMemoryRequirements +) { + layer_vkGetTensorMemoryRequirementsARM(device, pInfo, pMemoryRequirements); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetTensorMemoryRequirementsARM getLayerPtr_vkGetTensorMemoryRequirementsARM() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetTensorMemoryRequirementsARM) + { + return layer_vkGetTensorMemoryRequirementsARM; + } + + return layer_vkGetTensorMemoryRequirementsARM; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetTensorOpaqueCaptureDescriptorDataARM = requires( + VkDevice device, const VkTensorCaptureDescriptorDataInfoARM* pInfo, void* pData +) { + layer_vkGetTensorOpaqueCaptureDescriptorDataARM(device, pInfo, pData); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetTensorOpaqueCaptureDescriptorDataARM getLayerPtr_vkGetTensorOpaqueCaptureDescriptorDataARM() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetTensorOpaqueCaptureDescriptorDataARM) + { + return layer_vkGetTensorOpaqueCaptureDescriptorDataARM; + } + + return layer_vkGetTensorOpaqueCaptureDescriptorDataARM; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetTensorViewOpaqueCaptureDescriptorDataARM = requires( + VkDevice device, const VkTensorViewCaptureDescriptorDataInfoARM* pInfo, void* pData +) { + layer_vkGetTensorViewOpaqueCaptureDescriptorDataARM(device, pInfo, pData); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetTensorViewOpaqueCaptureDescriptorDataARM getLayerPtr_vkGetTensorViewOpaqueCaptureDescriptorDataARM() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetTensorViewOpaqueCaptureDescriptorDataARM) + { + return layer_vkGetTensorViewOpaqueCaptureDescriptorDataARM; + } + + return layer_vkGetTensorViewOpaqueCaptureDescriptorDataARM; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetValidationCacheDataEXT = requires( + VkDevice device, VkValidationCacheEXT validationCache, size_t* pDataSize, void* pData +) { + layer_vkGetValidationCacheDataEXT(device, validationCache, pDataSize, pData); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetValidationCacheDataEXT getLayerPtr_vkGetValidationCacheDataEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetValidationCacheDataEXT) + { + return layer_vkGetValidationCacheDataEXT; + } + + return layer_vkGetValidationCacheDataEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkImportFenceFdKHR = requires( + VkDevice device, const VkImportFenceFdInfoKHR* pImportFenceFdInfo +) { + layer_vkImportFenceFdKHR(device, pImportFenceFdInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkImportFenceFdKHR getLayerPtr_vkImportFenceFdKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkImportFenceFdKHR) + { + return layer_vkImportFenceFdKHR; + } + + return layer_vkImportFenceFdKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkImportSemaphoreFdKHR = requires( + VkDevice device, const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo +) { + layer_vkImportSemaphoreFdKHR(device, pImportSemaphoreFdInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkImportSemaphoreFdKHR getLayerPtr_vkImportSemaphoreFdKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkImportSemaphoreFdKHR) + { + return layer_vkImportSemaphoreFdKHR; + } + + return layer_vkImportSemaphoreFdKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkInvalidateMappedMemoryRanges = requires( + VkDevice device, uint32_t memoryRangeCount, const VkMappedMemoryRange* pMemoryRanges +) { + layer_vkInvalidateMappedMemoryRanges(device, memoryRangeCount, pMemoryRanges); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkInvalidateMappedMemoryRanges getLayerPtr_vkInvalidateMappedMemoryRanges() +{ + return [] + { + if constexpr(hasLayerPtr_vkInvalidateMappedMemoryRanges) + { + return layer_vkInvalidateMappedMemoryRanges; + } + + return layer_vkInvalidateMappedMemoryRanges; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkMapMemory = requires( + VkDevice device, VkDeviceMemory memory, VkDeviceSize offset, VkDeviceSize size, VkMemoryMapFlags flags, void** ppData +) { + layer_vkMapMemory(device, memory, offset, size, flags, ppData); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkMapMemory getLayerPtr_vkMapMemory() +{ + return [] + { + if constexpr(hasLayerPtr_vkMapMemory) + { + return layer_vkMapMemory; + } + + return layer_vkMapMemory; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkMapMemory2 = requires( + VkDevice device, const VkMemoryMapInfo* pMemoryMapInfo, void** ppData +) { + layer_vkMapMemory2(device, pMemoryMapInfo, ppData); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkMapMemory2 getLayerPtr_vkMapMemory2() +{ + return [] + { + if constexpr(hasLayerPtr_vkMapMemory2) + { + return layer_vkMapMemory2; + } + + return layer_vkMapMemory2; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkMapMemory2KHR = requires( + VkDevice device, const VkMemoryMapInfo* pMemoryMapInfo, void** ppData +) { + layer_vkMapMemory2KHR(device, pMemoryMapInfo, ppData); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkMapMemory2KHR getLayerPtr_vkMapMemory2KHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkMapMemory2KHR) + { + return layer_vkMapMemory2KHR; + } + + return layer_vkMapMemory2KHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkMergePipelineCaches = requires( + VkDevice device, VkPipelineCache dstCache, uint32_t srcCacheCount, const VkPipelineCache* pSrcCaches +) { + layer_vkMergePipelineCaches(device, dstCache, srcCacheCount, pSrcCaches); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkMergePipelineCaches getLayerPtr_vkMergePipelineCaches() +{ + return [] + { + if constexpr(hasLayerPtr_vkMergePipelineCaches) + { + return layer_vkMergePipelineCaches; + } + + return layer_vkMergePipelineCaches; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkMergeValidationCachesEXT = requires( + VkDevice device, VkValidationCacheEXT dstCache, uint32_t srcCacheCount, const VkValidationCacheEXT* pSrcCaches +) { + layer_vkMergeValidationCachesEXT(device, dstCache, srcCacheCount, pSrcCaches); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkMergeValidationCachesEXT getLayerPtr_vkMergeValidationCachesEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkMergeValidationCachesEXT) + { + return layer_vkMergeValidationCachesEXT; + } + + return layer_vkMergeValidationCachesEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkQueueBeginDebugUtilsLabelEXT = requires( + VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo +) { + layer_vkQueueBeginDebugUtilsLabelEXT(queue, pLabelInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkQueueBeginDebugUtilsLabelEXT getLayerPtr_vkQueueBeginDebugUtilsLabelEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkQueueBeginDebugUtilsLabelEXT) + { + return layer_vkQueueBeginDebugUtilsLabelEXT; + } + + return layer_vkQueueBeginDebugUtilsLabelEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkQueueBindSparse = requires( + VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo* pBindInfo, VkFence fence +) { + layer_vkQueueBindSparse(queue, bindInfoCount, pBindInfo, fence); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkQueueBindSparse getLayerPtr_vkQueueBindSparse() +{ + return [] + { + if constexpr(hasLayerPtr_vkQueueBindSparse) + { + return layer_vkQueueBindSparse; + } + + return layer_vkQueueBindSparse; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkQueueEndDebugUtilsLabelEXT = requires( + VkQueue queue +) { + layer_vkQueueEndDebugUtilsLabelEXT(queue); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkQueueEndDebugUtilsLabelEXT getLayerPtr_vkQueueEndDebugUtilsLabelEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkQueueEndDebugUtilsLabelEXT) + { + return layer_vkQueueEndDebugUtilsLabelEXT; + } + + return layer_vkQueueEndDebugUtilsLabelEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkQueueInsertDebugUtilsLabelEXT = requires( + VkQueue queue, const VkDebugUtilsLabelEXT* pLabelInfo +) { + layer_vkQueueInsertDebugUtilsLabelEXT(queue, pLabelInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkQueueInsertDebugUtilsLabelEXT getLayerPtr_vkQueueInsertDebugUtilsLabelEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkQueueInsertDebugUtilsLabelEXT) + { + return layer_vkQueueInsertDebugUtilsLabelEXT; + } + + return layer_vkQueueInsertDebugUtilsLabelEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkQueuePresentKHR = requires( + VkQueue queue, const VkPresentInfoKHR* pPresentInfo +) { + layer_vkQueuePresentKHR(queue, pPresentInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkQueuePresentKHR getLayerPtr_vkQueuePresentKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkQueuePresentKHR) + { + return layer_vkQueuePresentKHR; + } + + return layer_vkQueuePresentKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkQueueSubmit = requires( + VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmits, VkFence fence +) { + layer_vkQueueSubmit(queue, submitCount, pSubmits, fence); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkQueueSubmit getLayerPtr_vkQueueSubmit() +{ + return [] + { + if constexpr(hasLayerPtr_vkQueueSubmit) + { + return layer_vkQueueSubmit; + } + + return layer_vkQueueSubmit; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkQueueSubmit2 = requires( + VkQueue queue, uint32_t submitCount, const VkSubmitInfo2* pSubmits, VkFence fence +) { + layer_vkQueueSubmit2(queue, submitCount, pSubmits, fence); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkQueueSubmit2 getLayerPtr_vkQueueSubmit2() +{ + return [] + { + if constexpr(hasLayerPtr_vkQueueSubmit2) + { + return layer_vkQueueSubmit2; + } + + return layer_vkQueueSubmit2; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkQueueSubmit2KHR = requires( + VkQueue queue, uint32_t submitCount, const VkSubmitInfo2* pSubmits, VkFence fence +) { + layer_vkQueueSubmit2KHR(queue, submitCount, pSubmits, fence); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkQueueSubmit2KHR getLayerPtr_vkQueueSubmit2KHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkQueueSubmit2KHR) + { + return layer_vkQueueSubmit2KHR; + } + + return layer_vkQueueSubmit2KHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkQueueWaitIdle = requires( + VkQueue queue +) { + layer_vkQueueWaitIdle(queue); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkQueueWaitIdle getLayerPtr_vkQueueWaitIdle() +{ + return [] + { + if constexpr(hasLayerPtr_vkQueueWaitIdle) + { + return layer_vkQueueWaitIdle; + } + + return layer_vkQueueWaitIdle; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkRegisterDeviceEventEXT = requires( + VkDevice device, const VkDeviceEventInfoEXT* pDeviceEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence +) { + layer_vkRegisterDeviceEventEXT(device, pDeviceEventInfo, pAllocator, pFence); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkRegisterDeviceEventEXT getLayerPtr_vkRegisterDeviceEventEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkRegisterDeviceEventEXT) + { + return layer_vkRegisterDeviceEventEXT; + } + + return layer_vkRegisterDeviceEventEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkRegisterDisplayEventEXT = requires( + VkDevice device, VkDisplayKHR display, const VkDisplayEventInfoEXT* pDisplayEventInfo, const VkAllocationCallbacks* pAllocator, VkFence* pFence +) { + layer_vkRegisterDisplayEventEXT(device, display, pDisplayEventInfo, pAllocator, pFence); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkRegisterDisplayEventEXT getLayerPtr_vkRegisterDisplayEventEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkRegisterDisplayEventEXT) + { + return layer_vkRegisterDisplayEventEXT; + } + + return layer_vkRegisterDisplayEventEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkReleaseCapturedPipelineDataKHR = requires( + VkDevice device, const VkReleaseCapturedPipelineDataInfoKHR* pInfo, const VkAllocationCallbacks* pAllocator +) { + layer_vkReleaseCapturedPipelineDataKHR(device, pInfo, pAllocator); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkReleaseCapturedPipelineDataKHR getLayerPtr_vkReleaseCapturedPipelineDataKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkReleaseCapturedPipelineDataKHR) + { + return layer_vkReleaseCapturedPipelineDataKHR; + } + + return layer_vkReleaseCapturedPipelineDataKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkReleaseProfilingLockKHR = requires( + VkDevice device +) { + layer_vkReleaseProfilingLockKHR(device); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkReleaseProfilingLockKHR getLayerPtr_vkReleaseProfilingLockKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkReleaseProfilingLockKHR) + { + return layer_vkReleaseProfilingLockKHR; + } + + return layer_vkReleaseProfilingLockKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkReleaseSwapchainImagesEXT = requires( + VkDevice device, const VkReleaseSwapchainImagesInfoEXT* pReleaseInfo +) { + layer_vkReleaseSwapchainImagesEXT(device, pReleaseInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkReleaseSwapchainImagesEXT getLayerPtr_vkReleaseSwapchainImagesEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkReleaseSwapchainImagesEXT) + { + return layer_vkReleaseSwapchainImagesEXT; + } + + return layer_vkReleaseSwapchainImagesEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkResetCommandBuffer = requires( + VkCommandBuffer commandBuffer, VkCommandBufferResetFlags flags +) { + layer_vkResetCommandBuffer(commandBuffer, flags); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkResetCommandBuffer getLayerPtr_vkResetCommandBuffer() +{ + return [] + { + if constexpr(hasLayerPtr_vkResetCommandBuffer) + { + return layer_vkResetCommandBuffer; + } + + return layer_vkResetCommandBuffer; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkResetCommandPool = requires( + VkDevice device, VkCommandPool commandPool, VkCommandPoolResetFlags flags +) { + layer_vkResetCommandPool(device, commandPool, flags); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkResetCommandPool getLayerPtr_vkResetCommandPool() +{ + return [] + { + if constexpr(hasLayerPtr_vkResetCommandPool) + { + return layer_vkResetCommandPool; + } + + return layer_vkResetCommandPool; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkResetDescriptorPool = requires( + VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags +) { + layer_vkResetDescriptorPool(device, descriptorPool, flags); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkResetDescriptorPool getLayerPtr_vkResetDescriptorPool() +{ + return [] + { + if constexpr(hasLayerPtr_vkResetDescriptorPool) + { + return layer_vkResetDescriptorPool; + } + + return layer_vkResetDescriptorPool; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkResetEvent = requires( + VkDevice device, VkEvent event +) { + layer_vkResetEvent(device, event); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkResetEvent getLayerPtr_vkResetEvent() +{ + return [] + { + if constexpr(hasLayerPtr_vkResetEvent) + { + return layer_vkResetEvent; + } + + return layer_vkResetEvent; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkResetFences = requires( + VkDevice device, uint32_t fenceCount, const VkFence* pFences +) { + layer_vkResetFences(device, fenceCount, pFences); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkResetFences getLayerPtr_vkResetFences() +{ + return [] + { + if constexpr(hasLayerPtr_vkResetFences) + { + return layer_vkResetFences; + } + + return layer_vkResetFences; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkResetQueryPool = requires( + VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount +) { + layer_vkResetQueryPool(device, queryPool, firstQuery, queryCount); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkResetQueryPool getLayerPtr_vkResetQueryPool() +{ + return [] + { + if constexpr(hasLayerPtr_vkResetQueryPool) + { + return layer_vkResetQueryPool; + } + + return layer_vkResetQueryPool; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkResetQueryPoolEXT = requires( + VkDevice device, VkQueryPool queryPool, uint32_t firstQuery, uint32_t queryCount +) { + layer_vkResetQueryPoolEXT(device, queryPool, firstQuery, queryCount); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkResetQueryPoolEXT getLayerPtr_vkResetQueryPoolEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkResetQueryPoolEXT) + { + return layer_vkResetQueryPoolEXT; + } + + return layer_vkResetQueryPoolEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkSetDebugUtilsObjectNameEXT = requires( + VkDevice device, const VkDebugUtilsObjectNameInfoEXT* pNameInfo +) { + layer_vkSetDebugUtilsObjectNameEXT(device, pNameInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkSetDebugUtilsObjectNameEXT getLayerPtr_vkSetDebugUtilsObjectNameEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkSetDebugUtilsObjectNameEXT) + { + return layer_vkSetDebugUtilsObjectNameEXT; + } + + return layer_vkSetDebugUtilsObjectNameEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkSetDebugUtilsObjectTagEXT = requires( + VkDevice device, const VkDebugUtilsObjectTagInfoEXT* pTagInfo +) { + layer_vkSetDebugUtilsObjectTagEXT(device, pTagInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkSetDebugUtilsObjectTagEXT getLayerPtr_vkSetDebugUtilsObjectTagEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkSetDebugUtilsObjectTagEXT) + { + return layer_vkSetDebugUtilsObjectTagEXT; + } + + return layer_vkSetDebugUtilsObjectTagEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkSetDeviceMemoryPriorityEXT = requires( + VkDevice device, VkDeviceMemory memory, float priority +) { + layer_vkSetDeviceMemoryPriorityEXT(device, memory, priority); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkSetDeviceMemoryPriorityEXT getLayerPtr_vkSetDeviceMemoryPriorityEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkSetDeviceMemoryPriorityEXT) + { + return layer_vkSetDeviceMemoryPriorityEXT; + } + + return layer_vkSetDeviceMemoryPriorityEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkSetEvent = requires( + VkDevice device, VkEvent event +) { + layer_vkSetEvent(device, event); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkSetEvent getLayerPtr_vkSetEvent() +{ + return [] + { + if constexpr(hasLayerPtr_vkSetEvent) + { + return layer_vkSetEvent; + } + + return layer_vkSetEvent; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkSetHdrMetadataEXT = requires( + VkDevice device, uint32_t swapchainCount, const VkSwapchainKHR* pSwapchains, const VkHdrMetadataEXT* pMetadata +) { + layer_vkSetHdrMetadataEXT(device, swapchainCount, pSwapchains, pMetadata); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkSetHdrMetadataEXT getLayerPtr_vkSetHdrMetadataEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkSetHdrMetadataEXT) + { + return layer_vkSetHdrMetadataEXT; + } + + return layer_vkSetHdrMetadataEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkSetPrivateData = requires( + VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t data +) { + layer_vkSetPrivateData(device, objectType, objectHandle, privateDataSlot, data); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkSetPrivateData getLayerPtr_vkSetPrivateData() +{ + return [] + { + if constexpr(hasLayerPtr_vkSetPrivateData) + { + return layer_vkSetPrivateData; + } + + return layer_vkSetPrivateData; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkSetPrivateDataEXT = requires( + VkDevice device, VkObjectType objectType, uint64_t objectHandle, VkPrivateDataSlot privateDataSlot, uint64_t data +) { + layer_vkSetPrivateDataEXT(device, objectType, objectHandle, privateDataSlot, data); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkSetPrivateDataEXT getLayerPtr_vkSetPrivateDataEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkSetPrivateDataEXT) + { + return layer_vkSetPrivateDataEXT; + } + + return layer_vkSetPrivateDataEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkSignalSemaphore = requires( + VkDevice device, const VkSemaphoreSignalInfo* pSignalInfo +) { + layer_vkSignalSemaphore(device, pSignalInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkSignalSemaphore getLayerPtr_vkSignalSemaphore() +{ + return [] + { + if constexpr(hasLayerPtr_vkSignalSemaphore) + { + return layer_vkSignalSemaphore; + } + + return layer_vkSignalSemaphore; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkSignalSemaphoreKHR = requires( + VkDevice device, const VkSemaphoreSignalInfo* pSignalInfo +) { + layer_vkSignalSemaphoreKHR(device, pSignalInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkSignalSemaphoreKHR getLayerPtr_vkSignalSemaphoreKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkSignalSemaphoreKHR) + { + return layer_vkSignalSemaphoreKHR; + } + + return layer_vkSignalSemaphoreKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkTransitionImageLayout = requires( + VkDevice device, uint32_t transitionCount, const VkHostImageLayoutTransitionInfo* pTransitions +) { + layer_vkTransitionImageLayout(device, transitionCount, pTransitions); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkTransitionImageLayout getLayerPtr_vkTransitionImageLayout() +{ + return [] + { + if constexpr(hasLayerPtr_vkTransitionImageLayout) + { + return layer_vkTransitionImageLayout; + } + + return layer_vkTransitionImageLayout; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkTransitionImageLayoutEXT = requires( + VkDevice device, uint32_t transitionCount, const VkHostImageLayoutTransitionInfo* pTransitions +) { + layer_vkTransitionImageLayoutEXT(device, transitionCount, pTransitions); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkTransitionImageLayoutEXT getLayerPtr_vkTransitionImageLayoutEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkTransitionImageLayoutEXT) + { + return layer_vkTransitionImageLayoutEXT; + } + + return layer_vkTransitionImageLayoutEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkTrimCommandPool = requires( + VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags +) { + layer_vkTrimCommandPool(device, commandPool, flags); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkTrimCommandPool getLayerPtr_vkTrimCommandPool() +{ + return [] + { + if constexpr(hasLayerPtr_vkTrimCommandPool) + { + return layer_vkTrimCommandPool; + } + + return layer_vkTrimCommandPool; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkTrimCommandPoolKHR = requires( + VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags +) { + layer_vkTrimCommandPoolKHR(device, commandPool, flags); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkTrimCommandPoolKHR getLayerPtr_vkTrimCommandPoolKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkTrimCommandPoolKHR) + { + return layer_vkTrimCommandPoolKHR; + } + + return layer_vkTrimCommandPoolKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkUnmapMemory = requires( + VkDevice device, VkDeviceMemory memory +) { + layer_vkUnmapMemory(device, memory); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkUnmapMemory getLayerPtr_vkUnmapMemory() +{ + return [] + { + if constexpr(hasLayerPtr_vkUnmapMemory) + { + return layer_vkUnmapMemory; + } + + return layer_vkUnmapMemory; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkUnmapMemory2 = requires( + VkDevice device, const VkMemoryUnmapInfo* pMemoryUnmapInfo +) { + layer_vkUnmapMemory2(device, pMemoryUnmapInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkUnmapMemory2 getLayerPtr_vkUnmapMemory2() +{ + return [] + { + if constexpr(hasLayerPtr_vkUnmapMemory2) + { + return layer_vkUnmapMemory2; + } + + return layer_vkUnmapMemory2; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkUnmapMemory2KHR = requires( + VkDevice device, const VkMemoryUnmapInfo* pMemoryUnmapInfo +) { + layer_vkUnmapMemory2KHR(device, pMemoryUnmapInfo); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkUnmapMemory2KHR getLayerPtr_vkUnmapMemory2KHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkUnmapMemory2KHR) + { + return layer_vkUnmapMemory2KHR; + } + + return layer_vkUnmapMemory2KHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkUpdateDescriptorSetWithTemplate = requires( + VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData +) { + layer_vkUpdateDescriptorSetWithTemplate(device, descriptorSet, descriptorUpdateTemplate, pData); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkUpdateDescriptorSetWithTemplate getLayerPtr_vkUpdateDescriptorSetWithTemplate() +{ + return [] + { + if constexpr(hasLayerPtr_vkUpdateDescriptorSetWithTemplate) + { + return layer_vkUpdateDescriptorSetWithTemplate; + } + + return layer_vkUpdateDescriptorSetWithTemplate; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkUpdateDescriptorSetWithTemplateKHR = requires( + VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData +) { + layer_vkUpdateDescriptorSetWithTemplateKHR(device, descriptorSet, descriptorUpdateTemplate, pData); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkUpdateDescriptorSetWithTemplateKHR getLayerPtr_vkUpdateDescriptorSetWithTemplateKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkUpdateDescriptorSetWithTemplateKHR) + { + return layer_vkUpdateDescriptorSetWithTemplateKHR; + } + + return layer_vkUpdateDescriptorSetWithTemplateKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkUpdateDescriptorSets = requires( + VkDevice device, uint32_t descriptorWriteCount, const VkWriteDescriptorSet* pDescriptorWrites, uint32_t descriptorCopyCount, const VkCopyDescriptorSet* pDescriptorCopies +) { + layer_vkUpdateDescriptorSets(device, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkUpdateDescriptorSets getLayerPtr_vkUpdateDescriptorSets() +{ + return [] + { + if constexpr(hasLayerPtr_vkUpdateDescriptorSets) + { + return layer_vkUpdateDescriptorSets; + } + + return layer_vkUpdateDescriptorSets; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkUpdateIndirectExecutionSetPipelineEXT = requires( + VkDevice device, VkIndirectExecutionSetEXT indirectExecutionSet, uint32_t executionSetWriteCount, const VkWriteIndirectExecutionSetPipelineEXT* pExecutionSetWrites +) { + layer_vkUpdateIndirectExecutionSetPipelineEXT(device, indirectExecutionSet, executionSetWriteCount, pExecutionSetWrites); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkUpdateIndirectExecutionSetPipelineEXT getLayerPtr_vkUpdateIndirectExecutionSetPipelineEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkUpdateIndirectExecutionSetPipelineEXT) + { + return layer_vkUpdateIndirectExecutionSetPipelineEXT; + } + + return layer_vkUpdateIndirectExecutionSetPipelineEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkUpdateIndirectExecutionSetShaderEXT = requires( + VkDevice device, VkIndirectExecutionSetEXT indirectExecutionSet, uint32_t executionSetWriteCount, const VkWriteIndirectExecutionSetShaderEXT* pExecutionSetWrites +) { + layer_vkUpdateIndirectExecutionSetShaderEXT(device, indirectExecutionSet, executionSetWriteCount, pExecutionSetWrites); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkUpdateIndirectExecutionSetShaderEXT getLayerPtr_vkUpdateIndirectExecutionSetShaderEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkUpdateIndirectExecutionSetShaderEXT) + { + return layer_vkUpdateIndirectExecutionSetShaderEXT; + } + + return layer_vkUpdateIndirectExecutionSetShaderEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkWaitForFences = requires( + VkDevice device, uint32_t fenceCount, const VkFence* pFences, VkBool32 waitAll, uint64_t timeout +) { + layer_vkWaitForFences(device, fenceCount, pFences, waitAll, timeout); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkWaitForFences getLayerPtr_vkWaitForFences() +{ + return [] + { + if constexpr(hasLayerPtr_vkWaitForFences) + { + return layer_vkWaitForFences; + } + + return layer_vkWaitForFences; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkWaitForPresent2KHR = requires( + VkDevice device, VkSwapchainKHR swapchain, const VkPresentWait2InfoKHR* pPresentWait2Info +) { + layer_vkWaitForPresent2KHR(device, swapchain, pPresentWait2Info); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkWaitForPresent2KHR getLayerPtr_vkWaitForPresent2KHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkWaitForPresent2KHR) + { + return layer_vkWaitForPresent2KHR; + } + + return layer_vkWaitForPresent2KHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkWaitForPresentKHR = requires( + VkDevice device, VkSwapchainKHR swapchain, uint64_t presentId, uint64_t timeout +) { + layer_vkWaitForPresentKHR(device, swapchain, presentId, timeout); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkWaitForPresentKHR getLayerPtr_vkWaitForPresentKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkWaitForPresentKHR) + { + return layer_vkWaitForPresentKHR; + } + + return layer_vkWaitForPresentKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkWaitSemaphores = requires( + VkDevice device, const VkSemaphoreWaitInfo* pWaitInfo, uint64_t timeout +) { + layer_vkWaitSemaphores(device, pWaitInfo, timeout); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkWaitSemaphores getLayerPtr_vkWaitSemaphores() +{ + return [] + { + if constexpr(hasLayerPtr_vkWaitSemaphores) + { + return layer_vkWaitSemaphores; + } + + return layer_vkWaitSemaphores; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkWaitSemaphoresKHR = requires( + VkDevice device, const VkSemaphoreWaitInfo* pWaitInfo, uint64_t timeout +) { + layer_vkWaitSemaphoresKHR(device, pWaitInfo, timeout); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkWaitSemaphoresKHR getLayerPtr_vkWaitSemaphoresKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkWaitSemaphoresKHR) + { + return layer_vkWaitSemaphoresKHR; + } + + return layer_vkWaitSemaphoresKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkWriteAccelerationStructuresPropertiesKHR = requires( + VkDevice device, uint32_t accelerationStructureCount, const VkAccelerationStructureKHR* pAccelerationStructures, VkQueryType queryType, size_t dataSize, void* pData, size_t stride +) { + layer_vkWriteAccelerationStructuresPropertiesKHR(device, accelerationStructureCount, pAccelerationStructures, queryType, dataSize, pData, stride); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkWriteAccelerationStructuresPropertiesKHR getLayerPtr_vkWriteAccelerationStructuresPropertiesKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkWriteAccelerationStructuresPropertiesKHR) + { + return layer_vkWriteAccelerationStructuresPropertiesKHR; + } + + return layer_vkWriteAccelerationStructuresPropertiesKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkWriteMicromapsPropertiesEXT = requires( + VkDevice device, uint32_t micromapCount, const VkMicromapEXT* pMicromaps, VkQueryType queryType, size_t dataSize, void* pData, size_t stride +) { + layer_vkWriteMicromapsPropertiesEXT(device, micromapCount, pMicromaps, queryType, dataSize, pData, stride); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkWriteMicromapsPropertiesEXT getLayerPtr_vkWriteMicromapsPropertiesEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkWriteMicromapsPropertiesEXT) + { + return layer_vkWriteMicromapsPropertiesEXT; + } + + return layer_vkWriteMicromapsPropertiesEXT; + }.operator()(); +} + +// clang-format on diff --git a/source_common/framework/entry.cpp b/source_common/framework/entry.cpp index 5718822..86532d2 100644 --- a/source_common/framework/entry.cpp +++ b/source_common/framework/entry.cpp @@ -44,6 +44,10 @@ # include "layer_instance_functions.hpp" #endif +// These must be after the layer_*_functions.hpp includes +#include "framework/device_functions_query.hpp" +#include "framework/instance_functions_query.hpp" + std::mutex g_vulkanLock; extern "C" @@ -51,13 +55,13 @@ extern "C" /** See Vulkan API for documentation. */ VK_LAYER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetDeviceProcAddr(VkDevice device, const char* pName) { - return layer_vkGetDeviceProcAddr(device, pName); + return getLayerPtr_vkGetDeviceProcAddr()(device, pName); } /** See Vulkan API for documentation. */ VK_LAYER_EXPORT VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetInstanceProcAddr(VkInstance instance, const char* pName) { - return layer_vkGetInstanceProcAddr(instance, pName); + return getLayerPtr_vkGetInstanceProcAddr()(instance, pName); } /** See Vulkan API for documentation. */ @@ -66,7 +70,7 @@ VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL uint32_t* pPropertyCount, VkExtensionProperties* pProperties) { - return layer_vkEnumerateInstanceExtensionProperties(pLayerName, pPropertyCount, pProperties); + return getLayerPtr_vkEnumerateInstanceExtensionProperties()(pLayerName, pPropertyCount, pProperties); } /** See Vulkan API for documentation. */ @@ -76,14 +80,14 @@ VK_LAYER_EXPORT_ANDROID VKAPI_ATTR VkResult VKAPI_CALL uint32_t* pPropertyCount, VkExtensionProperties* pProperties) { - return layer_vkEnumerateDeviceExtensionProperties(gpu, pLayerName, pPropertyCount, pProperties); + return getLayerPtr_vkEnumerateDeviceExtensionProperties()(gpu, pLayerName, pPropertyCount, pProperties); } /** See Vulkan API for documentation. */ VK_LAYER_EXPORT VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateInstanceLayerProperties(uint32_t* pPropertyCount, VkLayerProperties* pProperties) { - return layer_vkEnumerateInstanceLayerProperties(pPropertyCount, pProperties); + return getLayerPtr_vkEnumerateInstanceLayerProperties()(pPropertyCount, pProperties); } /** See Vulkan API for documentation. */ @@ -91,6 +95,6 @@ VK_LAYER_EXPORT_ANDROID VKAPI_ATTR VkResult VKAPI_CALL vkEnumerateDeviceLayerPro uint32_t* pPropertyCount, VkLayerProperties* pProperties) { - return layer_vkEnumerateDeviceLayerProperties(gpu, pPropertyCount, pProperties); + return getLayerPtr_vkEnumerateDeviceLayerProperties()(gpu, pPropertyCount, pProperties); } } diff --git a/source_common/framework/instance_dispatch_table.hpp b/source_common/framework/instance_dispatch_table.hpp index ede8c6b..ee856d8 100644 --- a/source_common/framework/instance_dispatch_table.hpp +++ b/source_common/framework/instance_dispatch_table.hpp @@ -38,6 +38,10 @@ #include "layer_instance_functions.hpp" #endif +// These must be after the layer_*_functions.hpp includes +#include "framework/device_functions_query.hpp" +#include "framework/instance_functions_query.hpp" + /** * @brief Interception table lookup entry. */ @@ -52,9 +56,14 @@ struct InstanceInterceptTableEntry * @brief The layer function pointer. */ PFN_vkVoidFunction function; + + /** + * @brief Did the layer provide a specialization? + */ + bool hasLayerSpecialization; }; -#define ENTRY(fnc) { STR(fnc), reinterpret_cast(layer_##fnc) } +#define ENTRY(fnc) { STR(fnc), reinterpret_cast(getLayerPtr_##fnc()), hasLayerPtr_##fnc } /** * @brief The instance dispatch table used to call the driver. diff --git a/source_common/framework/instance_functions.cpp b/source_common/framework/instance_functions.cpp index b76f0df..ce58e46 100644 --- a/source_common/framework/instance_functions.cpp +++ b/source_common/framework/instance_functions.cpp @@ -41,7 +41,8 @@ extern std::mutex g_vulkanLock; #if defined(VK_USE_PLATFORM_ANDROID_KHR) /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateAndroidSurfaceKHR_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateAndroidSurfaceKHR( VkInstance instance, const VkAndroidSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, @@ -61,7 +62,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateAndroidSurfaceKHR_default( #endif /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateDebugReportCallbackEXT_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateDebugReportCallbackEXT( VkInstance instance, const VkDebugReportCallbackCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, @@ -79,7 +81,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateDebugReportCallbackEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateDebugUtilsMessengerEXT_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateDebugUtilsMessengerEXT( VkInstance instance, const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, @@ -97,7 +100,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateDebugUtilsMessengerEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateDisplayModeKHR_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateDisplayModeKHR( VkPhysicalDevice physicalDevice, VkDisplayKHR display, const VkDisplayModeCreateInfoKHR* pCreateInfo, @@ -116,7 +120,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateDisplayModeKHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateDisplayPlaneSurfaceKHR_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateDisplayPlaneSurfaceKHR( VkInstance instance, const VkDisplaySurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, @@ -134,7 +139,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateDisplayPlaneSurfaceKHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkDebugReportMessageEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkDebugReportMessageEXT( VkInstance instance, VkDebugReportFlagsEXT flags, VkDebugReportObjectTypeEXT objectType, @@ -156,7 +162,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkDebugReportMessageEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkDestroyDebugReportCallbackEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkDestroyDebugReportCallbackEXT( VkInstance instance, VkDebugReportCallbackEXT callback, const VkAllocationCallbacks* pAllocator @@ -173,7 +180,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkDestroyDebugReportCallbackEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkDestroyDebugUtilsMessengerEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkDestroyDebugUtilsMessengerEXT( VkInstance instance, VkDebugUtilsMessengerEXT messenger, const VkAllocationCallbacks* pAllocator @@ -190,7 +198,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkDestroyDebugUtilsMessengerEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkDestroySurfaceKHR_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkDestroySurfaceKHR( VkInstance instance, VkSurfaceKHR surface, const VkAllocationCallbacks* pAllocator @@ -207,7 +216,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkDestroySurfaceKHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkEnumeratePhysicalDeviceGroups_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkEnumeratePhysicalDeviceGroups( VkInstance instance, uint32_t* pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties @@ -224,7 +234,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkEnumeratePhysicalDeviceGroups_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkEnumeratePhysicalDeviceGroupsKHR_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkEnumeratePhysicalDeviceGroupsKHR( VkInstance instance, uint32_t* pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties @@ -241,7 +252,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkEnumeratePhysicalDeviceGroupsKHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, uint32_t* pCounterCount, @@ -260,7 +272,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkEnumeratePhysicalDeviceQueueFamilyPerform } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkEnumeratePhysicalDevices_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkEnumeratePhysicalDevices( VkInstance instance, uint32_t* pPhysicalDeviceCount, VkPhysicalDevice* pPhysicalDevices @@ -277,7 +290,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkEnumeratePhysicalDevices_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetDisplayModeProperties2KHR_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetDisplayModeProperties2KHR( VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, @@ -295,7 +309,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetDisplayModeProperties2KHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetDisplayModePropertiesKHR_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetDisplayModePropertiesKHR( VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, @@ -313,7 +328,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetDisplayModePropertiesKHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetDisplayPlaneCapabilities2KHR_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetDisplayPlaneCapabilities2KHR( VkPhysicalDevice physicalDevice, const VkDisplayPlaneInfo2KHR* pDisplayPlaneInfo, VkDisplayPlaneCapabilities2KHR* pCapabilities @@ -330,7 +346,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetDisplayPlaneCapabilities2KHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetDisplayPlaneCapabilitiesKHR_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetDisplayPlaneCapabilitiesKHR( VkPhysicalDevice physicalDevice, VkDisplayModeKHR mode, uint32_t planeIndex, @@ -348,7 +365,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetDisplayPlaneCapabilitiesKHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetDisplayPlaneSupportedDisplaysKHR_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetDisplayPlaneSupportedDisplaysKHR( VkPhysicalDevice physicalDevice, uint32_t planeIndex, uint32_t* pDisplayCount, @@ -366,7 +384,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetDisplayPlaneSupportedDisplaysKHR_defau } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( VkPhysicalDevice physicalDevice, uint32_t* pTimeDomainCount, VkTimeDomainKHR* pTimeDomains @@ -383,7 +402,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceCalibrateableTimeDomains } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceCalibrateableTimeDomainsKHR_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( VkPhysicalDevice physicalDevice, uint32_t* pTimeDomainCount, VkTimeDomainKHR* pTimeDomains @@ -400,7 +420,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceCalibrateableTimeDomains } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkCooperativeMatrixPropertiesKHR* pProperties @@ -417,7 +438,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceCooperativeMatrixPropert } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceDisplayPlaneProperties2KHR_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceDisplayPlaneProperties2KHR( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPlaneProperties2KHR* pProperties @@ -434,7 +456,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceDisplayPlaneProperties2K } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceDisplayPlanePropertiesKHR_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceDisplayPlanePropertiesKHR( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPlanePropertiesKHR* pProperties @@ -451,7 +474,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceDisplayPlanePropertiesKH } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceDisplayProperties2KHR_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceDisplayProperties2KHR( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayProperties2KHR* pProperties @@ -468,7 +492,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceDisplayProperties2KHR_de } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceDisplayPropertiesKHR_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceDisplayPropertiesKHR( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPropertiesKHR* pProperties @@ -485,7 +510,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceDisplayPropertiesKHR_def } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceExternalBufferProperties_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceExternalBufferProperties( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VkExternalBufferProperties* pExternalBufferProperties @@ -502,7 +528,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceExternalBufferProperties_def } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceExternalBufferPropertiesKHR_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceExternalBufferPropertiesKHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VkExternalBufferProperties* pExternalBufferProperties @@ -519,7 +546,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceExternalBufferPropertiesKHR_ } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceExternalFenceProperties_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceExternalFenceProperties( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VkExternalFenceProperties* pExternalFenceProperties @@ -536,7 +564,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceExternalFenceProperties_defa } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceExternalFencePropertiesKHR_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceExternalFencePropertiesKHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VkExternalFenceProperties* pExternalFenceProperties @@ -553,7 +582,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceExternalFencePropertiesKHR_d } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceExternalSemaphoreProperties_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceExternalSemaphoreProperties( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VkExternalSemaphoreProperties* pExternalSemaphoreProperties @@ -570,7 +600,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceExternalSemaphoreProperties_ } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VkExternalSemaphoreProperties* pExternalSemaphoreProperties @@ -587,7 +618,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceExternalSemaphorePropertiesK } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceExternalTensorPropertiesARM_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceExternalTensorPropertiesARM( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalTensorInfoARM* pExternalTensorInfo, VkExternalTensorPropertiesARM* pExternalTensorProperties @@ -604,7 +636,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceExternalTensorPropertiesARM_ } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceFeatures_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceFeatures( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures* pFeatures ) { @@ -620,7 +653,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceFeatures_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceFeatures2_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceFeatures2( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2* pFeatures ) { @@ -636,7 +670,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceFeatures2_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceFeatures2KHR_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceFeatures2KHR( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2* pFeatures ) { @@ -652,7 +687,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceFeatures2KHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceFormatProperties_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceFormatProperties( VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties* pFormatProperties @@ -669,7 +705,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceFormatProperties_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceFormatProperties2_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceFormatProperties2( VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2* pFormatProperties @@ -686,7 +723,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceFormatProperties2_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceFormatProperties2KHR_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceFormatProperties2KHR( VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2* pFormatProperties @@ -703,7 +741,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceFormatProperties2KHR_default } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceFragmentShadingRatesKHR_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceFragmentShadingRatesKHR( VkPhysicalDevice physicalDevice, uint32_t* pFragmentShadingRateCount, VkPhysicalDeviceFragmentShadingRateKHR* pFragmentShadingRates @@ -720,7 +759,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceFragmentShadingRatesKHR_ } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceImageFormatProperties_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceImageFormatProperties( VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, @@ -741,7 +781,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceImageFormatProperties_de } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceImageFormatProperties2_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceImageFormatProperties2( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, VkImageFormatProperties2* pImageFormatProperties @@ -758,7 +799,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceImageFormatProperties2_d } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceImageFormatProperties2KHR_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceImageFormatProperties2KHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, VkImageFormatProperties2* pImageFormatProperties @@ -775,7 +817,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceImageFormatProperties2KH } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceMemoryProperties_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceMemoryProperties( VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties* pMemoryProperties ) { @@ -791,7 +834,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceMemoryProperties_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceMemoryProperties2_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceMemoryProperties2( VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2* pMemoryProperties ) { @@ -807,7 +851,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceMemoryProperties2_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceMemoryProperties2KHR_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceMemoryProperties2KHR( VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2* pMemoryProperties ) { @@ -823,7 +868,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceMemoryProperties2KHR_default } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceMultisamplePropertiesEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceMultisamplePropertiesEXT( VkPhysicalDevice physicalDevice, VkSampleCountFlagBits samples, VkMultisamplePropertiesEXT* pMultisampleProperties @@ -840,7 +886,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceMultisamplePropertiesEXT_def } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDevicePresentRectanglesKHR_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDevicePresentRectanglesKHR( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pRectCount, @@ -858,7 +905,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDevicePresentRectanglesKHR_def } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceProperties_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceProperties( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties* pProperties ) { @@ -874,7 +922,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceProperties_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceProperties2_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceProperties2( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2* pProperties ) { @@ -890,7 +939,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceProperties2_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceProperties2KHR_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceProperties2KHR( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2* pProperties ) { @@ -906,7 +956,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceProperties2KHR_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( VkPhysicalDevice physicalDevice, const VkQueryPoolPerformanceCreateInfoKHR* pPerformanceQueryCreateInfo, uint32_t* pNumPasses @@ -923,7 +974,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceQueueFamilyPerformanceQueryP } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceQueueFamilyProperties_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceQueueFamilyProperties( VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties* pQueueFamilyProperties @@ -940,7 +992,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceQueueFamilyProperties_defaul } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceQueueFamilyProperties2_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceQueueFamilyProperties2( VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties2* pQueueFamilyProperties @@ -957,7 +1010,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceQueueFamilyProperties2_defau } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceQueueFamilyProperties2KHR_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceQueueFamilyProperties2KHR( VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties2* pQueueFamilyProperties @@ -974,7 +1028,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceQueueFamilyProperties2KHR_de } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceSparseImageFormatProperties_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceSparseImageFormatProperties( VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, @@ -996,7 +1051,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceSparseImageFormatProperties_ } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceSparseImageFormatProperties2_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceSparseImageFormatProperties2( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, @@ -1014,7 +1070,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceSparseImageFormatProperties2 } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceSparseImageFormatProperties2KHR_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceSparseImageFormatProperties2KHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, @@ -1032,7 +1089,8 @@ VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceSparseImageFormatProperties2 } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceSurfaceCapabilities2EXT_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceSurfaceCapabilities2EXT( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilities2EXT* pSurfaceCapabilities @@ -1049,7 +1107,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceSurfaceCapabilities2EXT_ } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceSurfaceCapabilities2KHR_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceSurfaceCapabilities2KHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VkSurfaceCapabilities2KHR* pSurfaceCapabilities @@ -1066,7 +1125,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceSurfaceCapabilities2KHR_ } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceSurfaceCapabilitiesKHR_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceSurfaceCapabilitiesKHR( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilitiesKHR* pSurfaceCapabilities @@ -1083,7 +1143,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceSurfaceCapabilitiesKHR_d } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceSurfaceFormats2KHR_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceSurfaceFormats2KHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pSurfaceFormatCount, @@ -1101,7 +1162,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceSurfaceFormats2KHR_defau } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceSurfaceFormatsKHR_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceSurfaceFormatsKHR( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pSurfaceFormatCount, @@ -1119,7 +1181,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceSurfaceFormatsKHR_defaul } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceSurfacePresentModesKHR_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceSurfacePresentModesKHR( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pPresentModeCount, @@ -1137,7 +1200,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceSurfacePresentModesKHR_d } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceSurfaceSupportKHR_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceSurfaceSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, VkSurfaceKHR surface, @@ -1155,7 +1219,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceSurfaceSupportKHR_defaul } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceToolProperties_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceToolProperties( VkPhysicalDevice physicalDevice, uint32_t* pToolCount, VkPhysicalDeviceToolProperties* pToolProperties @@ -1172,7 +1237,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceToolProperties_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceToolPropertiesEXT_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceToolPropertiesEXT( VkPhysicalDevice physicalDevice, uint32_t* pToolCount, VkPhysicalDeviceToolProperties* pToolProperties @@ -1189,7 +1255,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceToolPropertiesEXT_defaul } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkReleaseDisplayEXT_default( +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkReleaseDisplayEXT( VkPhysicalDevice physicalDevice, VkDisplayKHR display ) { @@ -1205,7 +1272,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkReleaseDisplayEXT_default( } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkSubmitDebugUtilsMessageEXT_default( +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkSubmitDebugUtilsMessageEXT( VkInstance instance, VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VkDebugUtilsMessageTypeFlagsEXT messageTypes, diff --git a/source_common/framework/instance_functions.hpp b/source_common/framework/instance_functions.hpp index 4296095..c49cf5b 100644 --- a/source_common/framework/instance_functions.hpp +++ b/source_common/framework/instance_functions.hpp @@ -32,143 +32,130 @@ #if defined(VK_USE_PLATFORM_ANDROID_KHR) /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateAndroidSurfaceKHR_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateAndroidSurfaceKHR( VkInstance instance, const VkAndroidSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, - VkSurfaceKHR* pSurface); + VkSurfaceKHR* pSurface) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateAndroidSurfaceKHR( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateAndroidSurfaceKHR( VkInstance instance, const VkAndroidSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, - VkSurfaceKHR* pSurface -) { - return layer_vkCreateAndroidSurfaceKHR_default(instance, pCreateInfo, pAllocator, pSurface); -} + VkSurfaceKHR* pSurface); #endif /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateDebugReportCallbackEXT_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateDebugReportCallbackEXT( VkInstance instance, const VkDebugReportCallbackCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, - VkDebugReportCallbackEXT* pCallback); + VkDebugReportCallbackEXT* pCallback) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateDebugReportCallbackEXT( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateDebugReportCallbackEXT( VkInstance instance, const VkDebugReportCallbackCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, - VkDebugReportCallbackEXT* pCallback -) { - return layer_vkCreateDebugReportCallbackEXT_default(instance, pCreateInfo, pAllocator, pCallback); -} + VkDebugReportCallbackEXT* pCallback); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateDebugUtilsMessengerEXT_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateDebugUtilsMessengerEXT( VkInstance instance, const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, - VkDebugUtilsMessengerEXT* pMessenger); + VkDebugUtilsMessengerEXT* pMessenger) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateDebugUtilsMessengerEXT( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateDebugUtilsMessengerEXT( VkInstance instance, const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, - VkDebugUtilsMessengerEXT* pMessenger -) { - return layer_vkCreateDebugUtilsMessengerEXT_default(instance, pCreateInfo, pAllocator, pMessenger); -} + VkDebugUtilsMessengerEXT* pMessenger); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateDevice_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateDevice( VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, - VkDevice* pDevice); + VkDevice* pDevice) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateDevice( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateDevice( VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, - VkDevice* pDevice -) { - return layer_vkCreateDevice_default(physicalDevice, pCreateInfo, pAllocator, pDevice); -} + VkDevice* pDevice); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateDisplayModeKHR_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateDisplayModeKHR( VkPhysicalDevice physicalDevice, VkDisplayKHR display, const VkDisplayModeCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, - VkDisplayModeKHR* pMode); + VkDisplayModeKHR* pMode) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateDisplayModeKHR( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateDisplayModeKHR( VkPhysicalDevice physicalDevice, VkDisplayKHR display, const VkDisplayModeCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, - VkDisplayModeKHR* pMode -) { - return layer_vkCreateDisplayModeKHR_default(physicalDevice, display, pCreateInfo, pAllocator, pMode); -} + VkDisplayModeKHR* pMode); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateDisplayPlaneSurfaceKHR_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateDisplayPlaneSurfaceKHR( VkInstance instance, const VkDisplaySurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, - VkSurfaceKHR* pSurface); + VkSurfaceKHR* pSurface) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateDisplayPlaneSurfaceKHR( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateDisplayPlaneSurfaceKHR( VkInstance instance, const VkDisplaySurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, - VkSurfaceKHR* pSurface -) { - return layer_vkCreateDisplayPlaneSurfaceKHR_default(instance, pCreateInfo, pAllocator, pSurface); -} + VkSurfaceKHR* pSurface); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateInstance_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateInstance( const VkInstanceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, - VkInstance* pInstance); + VkInstance* pInstance) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateInstance( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateInstance( const VkInstanceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, - VkInstance* pInstance -) { - return layer_vkCreateInstance_default(pCreateInfo, pAllocator, pInstance); -} + VkInstance* pInstance); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkDebugReportMessageEXT_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR void VKAPI_CALL layer_vkDebugReportMessageEXT( VkInstance instance, VkDebugReportFlagsEXT flags, VkDebugReportObjectTypeEXT objectType, @@ -176,11 +163,11 @@ VKAPI_ATTR void VKAPI_CALL layer_vkDebugReportMessageEXT_default( size_t location, int32_t messageCode, const char* pLayerPrefix, - const char* pMessage); + const char* pMessage) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR void VKAPI_CALL layer_vkDebugReportMessageEXT( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkDebugReportMessageEXT( VkInstance instance, VkDebugReportFlagsEXT flags, VkDebugReportObjectTypeEXT objectType, @@ -188,932 +175,822 @@ VKAPI_ATTR void VKAPI_CALL layer_vkDebugReportMessageEXT( size_t location, int32_t messageCode, const char* pLayerPrefix, - const char* pMessage -) { - layer_vkDebugReportMessageEXT_default(instance, flags, objectType, object, location, messageCode, pLayerPrefix, pMessage); -} + const char* pMessage); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkDestroyDebugReportCallbackEXT_default( - VkInstance instance, - VkDebugReportCallbackEXT callback, - const VkAllocationCallbacks* pAllocator); - -/* Match-all template to use default implementation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkDestroyDebugReportCallbackEXT( VkInstance instance, VkDebugReportCallbackEXT callback, - const VkAllocationCallbacks* pAllocator -) { - layer_vkDestroyDebugReportCallbackEXT_default(instance, callback, pAllocator); -} + const VkAllocationCallbacks* pAllocator) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkDestroyDebugUtilsMessengerEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkDestroyDebugReportCallbackEXT( VkInstance instance, - VkDebugUtilsMessengerEXT messenger, + VkDebugReportCallbackEXT callback, const VkAllocationCallbacks* pAllocator); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkDestroyDebugUtilsMessengerEXT( VkInstance instance, VkDebugUtilsMessengerEXT messenger, - const VkAllocationCallbacks* pAllocator -) { - layer_vkDestroyDebugUtilsMessengerEXT_default(instance, messenger, pAllocator); -} + const VkAllocationCallbacks* pAllocator) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkDestroyInstance_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkDestroyDebugUtilsMessengerEXT( VkInstance instance, + VkDebugUtilsMessengerEXT messenger, const VkAllocationCallbacks* pAllocator); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkDestroyInstance( VkInstance instance, - const VkAllocationCallbacks* pAllocator -) { - layer_vkDestroyInstance_default(instance, pAllocator); -} + const VkAllocationCallbacks* pAllocator) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkDestroySurfaceKHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkDestroyInstance( VkInstance instance, - VkSurfaceKHR surface, const VkAllocationCallbacks* pAllocator); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkDestroySurfaceKHR( VkInstance instance, VkSurfaceKHR surface, - const VkAllocationCallbacks* pAllocator -) { - layer_vkDestroySurfaceKHR_default(instance, surface, pAllocator); -} + const VkAllocationCallbacks* pAllocator) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkEnumerateDeviceExtensionProperties_default( - VkPhysicalDevice physicalDevice, - const char* pLayerName, - uint32_t* pPropertyCount, - VkExtensionProperties* pProperties); +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkDestroySurfaceKHR( + VkInstance instance, + VkSurfaceKHR surface, + const VkAllocationCallbacks* pAllocator); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkEnumerateDeviceExtensionProperties( VkPhysicalDevice physicalDevice, const char* pLayerName, uint32_t* pPropertyCount, - VkExtensionProperties* pProperties -) { - return layer_vkEnumerateDeviceExtensionProperties_default(physicalDevice, pLayerName, pPropertyCount, pProperties); -} + VkExtensionProperties* pProperties) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkEnumerateDeviceLayerProperties_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkEnumerateDeviceExtensionProperties( VkPhysicalDevice physicalDevice, + const char* pLayerName, uint32_t* pPropertyCount, - VkLayerProperties* pProperties); + VkExtensionProperties* pProperties); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkEnumerateDeviceLayerProperties( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, - VkLayerProperties* pProperties -) { - return layer_vkEnumerateDeviceLayerProperties_default(physicalDevice, pPropertyCount, pProperties); -} + VkLayerProperties* pProperties) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkEnumerateInstanceExtensionProperties_default( - const char* pLayerName, +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkEnumerateDeviceLayerProperties( + VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, - VkExtensionProperties* pProperties); + VkLayerProperties* pProperties); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkEnumerateInstanceExtensionProperties( const char* pLayerName, uint32_t* pPropertyCount, - VkExtensionProperties* pProperties -) { - return layer_vkEnumerateInstanceExtensionProperties_default(pLayerName, pPropertyCount, pProperties); -} + VkExtensionProperties* pProperties) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkEnumerateInstanceLayerProperties_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkEnumerateInstanceExtensionProperties( + const char* pLayerName, uint32_t* pPropertyCount, - VkLayerProperties* pProperties); + VkExtensionProperties* pProperties); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkEnumerateInstanceLayerProperties( uint32_t* pPropertyCount, - VkLayerProperties* pProperties -) { - return layer_vkEnumerateInstanceLayerProperties_default(pPropertyCount, pProperties); -} + VkLayerProperties* pProperties) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkEnumeratePhysicalDeviceGroups_default( - VkInstance instance, - uint32_t* pPhysicalDeviceGroupCount, - VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties); +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkEnumerateInstanceLayerProperties( + uint32_t* pPropertyCount, + VkLayerProperties* pProperties); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkEnumeratePhysicalDeviceGroups( VkInstance instance, uint32_t* pPhysicalDeviceGroupCount, - VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties -) { - return layer_vkEnumeratePhysicalDeviceGroups_default(instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties); -} + VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkEnumeratePhysicalDeviceGroupsKHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkEnumeratePhysicalDeviceGroups( VkInstance instance, uint32_t* pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkEnumeratePhysicalDeviceGroupsKHR( VkInstance instance, uint32_t* pPhysicalDeviceGroupCount, - VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties -) { - return layer_vkEnumeratePhysicalDeviceGroupsKHR_default(instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties); -} + VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties) = delete; + +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkEnumeratePhysicalDeviceGroupsKHR( + VkInstance instance, + uint32_t* pPhysicalDeviceGroupCount, + VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR VkResult VKAPI_CALL layer_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, uint32_t* pCounterCount, VkPerformanceCounterKHR* pCounters, - VkPerformanceCounterDescriptionKHR* pCounterDescriptions); + VkPerformanceCounterDescriptionKHR* pCounterDescriptions) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR VkResult VKAPI_CALL layer_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, uint32_t* pCounterCount, VkPerformanceCounterKHR* pCounters, - VkPerformanceCounterDescriptionKHR* pCounterDescriptions -) { - return layer_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR_default(physicalDevice, queueFamilyIndex, pCounterCount, pCounters, pCounterDescriptions); -} + VkPerformanceCounterDescriptionKHR* pCounterDescriptions); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkEnumeratePhysicalDevices_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR VkResult VKAPI_CALL layer_vkEnumeratePhysicalDevices( VkInstance instance, uint32_t* pPhysicalDeviceCount, - VkPhysicalDevice* pPhysicalDevices); + VkPhysicalDevice* pPhysicalDevices) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR VkResult VKAPI_CALL layer_vkEnumeratePhysicalDevices( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkEnumeratePhysicalDevices( VkInstance instance, uint32_t* pPhysicalDeviceCount, - VkPhysicalDevice* pPhysicalDevices -) { - return layer_vkEnumeratePhysicalDevices_default(instance, pPhysicalDeviceCount, pPhysicalDevices); -} + VkPhysicalDevice* pPhysicalDevices); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetDisplayModeProperties2KHR_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetDisplayModeProperties2KHR( VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, - VkDisplayModeProperties2KHR* pProperties); + VkDisplayModeProperties2KHR* pProperties) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetDisplayModeProperties2KHR( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetDisplayModeProperties2KHR( VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, - VkDisplayModeProperties2KHR* pProperties -) { - return layer_vkGetDisplayModeProperties2KHR_default(physicalDevice, display, pPropertyCount, pProperties); -} + VkDisplayModeProperties2KHR* pProperties); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetDisplayModePropertiesKHR_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetDisplayModePropertiesKHR( VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, - VkDisplayModePropertiesKHR* pProperties); + VkDisplayModePropertiesKHR* pProperties) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetDisplayModePropertiesKHR( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetDisplayModePropertiesKHR( VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, - VkDisplayModePropertiesKHR* pProperties -) { - return layer_vkGetDisplayModePropertiesKHR_default(physicalDevice, display, pPropertyCount, pProperties); -} + VkDisplayModePropertiesKHR* pProperties); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetDisplayPlaneCapabilities2KHR_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetDisplayPlaneCapabilities2KHR( VkPhysicalDevice physicalDevice, const VkDisplayPlaneInfo2KHR* pDisplayPlaneInfo, - VkDisplayPlaneCapabilities2KHR* pCapabilities); + VkDisplayPlaneCapabilities2KHR* pCapabilities) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetDisplayPlaneCapabilities2KHR( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetDisplayPlaneCapabilities2KHR( VkPhysicalDevice physicalDevice, const VkDisplayPlaneInfo2KHR* pDisplayPlaneInfo, - VkDisplayPlaneCapabilities2KHR* pCapabilities -) { - return layer_vkGetDisplayPlaneCapabilities2KHR_default(physicalDevice, pDisplayPlaneInfo, pCapabilities); -} + VkDisplayPlaneCapabilities2KHR* pCapabilities); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetDisplayPlaneCapabilitiesKHR_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetDisplayPlaneCapabilitiesKHR( VkPhysicalDevice physicalDevice, VkDisplayModeKHR mode, uint32_t planeIndex, - VkDisplayPlaneCapabilitiesKHR* pCapabilities); + VkDisplayPlaneCapabilitiesKHR* pCapabilities) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetDisplayPlaneCapabilitiesKHR( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetDisplayPlaneCapabilitiesKHR( VkPhysicalDevice physicalDevice, VkDisplayModeKHR mode, uint32_t planeIndex, - VkDisplayPlaneCapabilitiesKHR* pCapabilities -) { - return layer_vkGetDisplayPlaneCapabilitiesKHR_default(physicalDevice, mode, planeIndex, pCapabilities); -} + VkDisplayPlaneCapabilitiesKHR* pCapabilities); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetDisplayPlaneSupportedDisplaysKHR_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetDisplayPlaneSupportedDisplaysKHR( VkPhysicalDevice physicalDevice, uint32_t planeIndex, uint32_t* pDisplayCount, - VkDisplayKHR* pDisplays); + VkDisplayKHR* pDisplays) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetDisplayPlaneSupportedDisplaysKHR( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetDisplayPlaneSupportedDisplaysKHR( VkPhysicalDevice physicalDevice, uint32_t planeIndex, uint32_t* pDisplayCount, - VkDisplayKHR* pDisplays -) { - return layer_vkGetDisplayPlaneSupportedDisplaysKHR_default(physicalDevice, planeIndex, pDisplayCount, pDisplays); -} + VkDisplayKHR* pDisplays); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL layer_vkGetInstanceProcAddr_default( - VkInstance instance, - const char* pName); - -/* Match-all template to use default implementation. */ +/* Delete the generic match-all */ template VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL layer_vkGetInstanceProcAddr( VkInstance instance, - const char* pName -) { - return layer_vkGetInstanceProcAddr_default(instance, pName); -} + const char* pName) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT_default( - VkPhysicalDevice physicalDevice, - uint32_t* pTimeDomainCount, - VkTimeDomainKHR* pTimeDomains); +/* Default common code implementation. */ +template <> +VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL layer_vkGetInstanceProcAddr( + VkInstance instance, + const char* pName); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( VkPhysicalDevice physicalDevice, uint32_t* pTimeDomainCount, - VkTimeDomainKHR* pTimeDomains -) { - return layer_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT_default(physicalDevice, pTimeDomainCount, pTimeDomains); -} + VkTimeDomainKHR* pTimeDomains) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceCalibrateableTimeDomainsKHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT( VkPhysicalDevice physicalDevice, uint32_t* pTimeDomainCount, VkTimeDomainKHR* pTimeDomains); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( VkPhysicalDevice physicalDevice, uint32_t* pTimeDomainCount, - VkTimeDomainKHR* pTimeDomains -) { - return layer_vkGetPhysicalDeviceCalibrateableTimeDomainsKHR_default(physicalDevice, pTimeDomainCount, pTimeDomains); -} + VkTimeDomainKHR* pTimeDomains) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceCalibrateableTimeDomainsKHR( VkPhysicalDevice physicalDevice, - uint32_t* pPropertyCount, - VkCooperativeMatrixPropertiesKHR* pProperties); + uint32_t* pTimeDomainCount, + VkTimeDomainKHR* pTimeDomains); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, - VkCooperativeMatrixPropertiesKHR* pProperties -) { - return layer_vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR_default(physicalDevice, pPropertyCount, pProperties); -} + VkCooperativeMatrixPropertiesKHR* pProperties) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceDisplayPlaneProperties2KHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, - VkDisplayPlaneProperties2KHR* pProperties); + VkCooperativeMatrixPropertiesKHR* pProperties); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceDisplayPlaneProperties2KHR( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, - VkDisplayPlaneProperties2KHR* pProperties -) { - return layer_vkGetPhysicalDeviceDisplayPlaneProperties2KHR_default(physicalDevice, pPropertyCount, pProperties); -} + VkDisplayPlaneProperties2KHR* pProperties) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceDisplayPlanePropertiesKHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceDisplayPlaneProperties2KHR( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, - VkDisplayPlanePropertiesKHR* pProperties); + VkDisplayPlaneProperties2KHR* pProperties); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceDisplayPlanePropertiesKHR( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, - VkDisplayPlanePropertiesKHR* pProperties -) { - return layer_vkGetPhysicalDeviceDisplayPlanePropertiesKHR_default(physicalDevice, pPropertyCount, pProperties); -} + VkDisplayPlanePropertiesKHR* pProperties) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceDisplayProperties2KHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceDisplayPlanePropertiesKHR( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, - VkDisplayProperties2KHR* pProperties); + VkDisplayPlanePropertiesKHR* pProperties); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceDisplayProperties2KHR( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, - VkDisplayProperties2KHR* pProperties -) { - return layer_vkGetPhysicalDeviceDisplayProperties2KHR_default(physicalDevice, pPropertyCount, pProperties); -} + VkDisplayProperties2KHR* pProperties) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceDisplayPropertiesKHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceDisplayProperties2KHR( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, - VkDisplayPropertiesKHR* pProperties); + VkDisplayProperties2KHR* pProperties); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceDisplayPropertiesKHR( VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, - VkDisplayPropertiesKHR* pProperties -) { - return layer_vkGetPhysicalDeviceDisplayPropertiesKHR_default(physicalDevice, pPropertyCount, pProperties); -} + VkDisplayPropertiesKHR* pProperties) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceExternalBufferProperties_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceDisplayPropertiesKHR( VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, - VkExternalBufferProperties* pExternalBufferProperties); + uint32_t* pPropertyCount, + VkDisplayPropertiesKHR* pProperties); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceExternalBufferProperties( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, - VkExternalBufferProperties* pExternalBufferProperties -) { - layer_vkGetPhysicalDeviceExternalBufferProperties_default(physicalDevice, pExternalBufferInfo, pExternalBufferProperties); -} + VkExternalBufferProperties* pExternalBufferProperties) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceExternalBufferPropertiesKHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceExternalBufferProperties( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VkExternalBufferProperties* pExternalBufferProperties); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceExternalBufferPropertiesKHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, - VkExternalBufferProperties* pExternalBufferProperties -) { - layer_vkGetPhysicalDeviceExternalBufferPropertiesKHR_default(physicalDevice, pExternalBufferInfo, pExternalBufferProperties); -} + VkExternalBufferProperties* pExternalBufferProperties) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceExternalFenceProperties_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceExternalBufferPropertiesKHR( VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, - VkExternalFenceProperties* pExternalFenceProperties); + const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, + VkExternalBufferProperties* pExternalBufferProperties); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceExternalFenceProperties( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, - VkExternalFenceProperties* pExternalFenceProperties -) { - layer_vkGetPhysicalDeviceExternalFenceProperties_default(physicalDevice, pExternalFenceInfo, pExternalFenceProperties); -} + VkExternalFenceProperties* pExternalFenceProperties) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceExternalFencePropertiesKHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceExternalFenceProperties( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VkExternalFenceProperties* pExternalFenceProperties); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceExternalFencePropertiesKHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, - VkExternalFenceProperties* pExternalFenceProperties -) { - layer_vkGetPhysicalDeviceExternalFencePropertiesKHR_default(physicalDevice, pExternalFenceInfo, pExternalFenceProperties); -} + VkExternalFenceProperties* pExternalFenceProperties) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceExternalSemaphoreProperties_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceExternalFencePropertiesKHR( VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, - VkExternalSemaphoreProperties* pExternalSemaphoreProperties); + const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, + VkExternalFenceProperties* pExternalFenceProperties); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceExternalSemaphoreProperties( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, - VkExternalSemaphoreProperties* pExternalSemaphoreProperties -) { - layer_vkGetPhysicalDeviceExternalSemaphoreProperties_default(physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties); -} + VkExternalSemaphoreProperties* pExternalSemaphoreProperties) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceExternalSemaphoreProperties( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VkExternalSemaphoreProperties* pExternalSemaphoreProperties); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, - VkExternalSemaphoreProperties* pExternalSemaphoreProperties -) { - layer_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR_default(physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties); -} + VkExternalSemaphoreProperties* pExternalSemaphoreProperties) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceExternalTensorPropertiesARM_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR( VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceExternalTensorInfoARM* pExternalTensorInfo, - VkExternalTensorPropertiesARM* pExternalTensorProperties); + const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, + VkExternalSemaphoreProperties* pExternalSemaphoreProperties); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceExternalTensorPropertiesARM( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalTensorInfoARM* pExternalTensorInfo, - VkExternalTensorPropertiesARM* pExternalTensorProperties -) { - layer_vkGetPhysicalDeviceExternalTensorPropertiesARM_default(physicalDevice, pExternalTensorInfo, pExternalTensorProperties); -} + VkExternalTensorPropertiesARM* pExternalTensorProperties) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceFeatures_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceExternalTensorPropertiesARM( VkPhysicalDevice physicalDevice, - VkPhysicalDeviceFeatures* pFeatures); + const VkPhysicalDeviceExternalTensorInfoARM* pExternalTensorInfo, + VkExternalTensorPropertiesARM* pExternalTensorProperties); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceFeatures( VkPhysicalDevice physicalDevice, - VkPhysicalDeviceFeatures* pFeatures -) { - layer_vkGetPhysicalDeviceFeatures_default(physicalDevice, pFeatures); -} + VkPhysicalDeviceFeatures* pFeatures) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceFeatures2_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceFeatures( VkPhysicalDevice physicalDevice, - VkPhysicalDeviceFeatures2* pFeatures); + VkPhysicalDeviceFeatures* pFeatures); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceFeatures2( VkPhysicalDevice physicalDevice, - VkPhysicalDeviceFeatures2* pFeatures -) { - layer_vkGetPhysicalDeviceFeatures2_default(physicalDevice, pFeatures); -} + VkPhysicalDeviceFeatures2* pFeatures) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceFeatures2KHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceFeatures2( VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2* pFeatures); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceFeatures2KHR( VkPhysicalDevice physicalDevice, - VkPhysicalDeviceFeatures2* pFeatures -) { - layer_vkGetPhysicalDeviceFeatures2KHR_default(physicalDevice, pFeatures); -} + VkPhysicalDeviceFeatures2* pFeatures) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceFormatProperties_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceFeatures2KHR( VkPhysicalDevice physicalDevice, - VkFormat format, - VkFormatProperties* pFormatProperties); + VkPhysicalDeviceFeatures2* pFeatures); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceFormatProperties( VkPhysicalDevice physicalDevice, VkFormat format, - VkFormatProperties* pFormatProperties -) { - layer_vkGetPhysicalDeviceFormatProperties_default(physicalDevice, format, pFormatProperties); -} + VkFormatProperties* pFormatProperties) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceFormatProperties2_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceFormatProperties( VkPhysicalDevice physicalDevice, VkFormat format, - VkFormatProperties2* pFormatProperties); + VkFormatProperties* pFormatProperties); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceFormatProperties2( VkPhysicalDevice physicalDevice, VkFormat format, - VkFormatProperties2* pFormatProperties -) { - layer_vkGetPhysicalDeviceFormatProperties2_default(physicalDevice, format, pFormatProperties); -} + VkFormatProperties2* pFormatProperties) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceFormatProperties2KHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceFormatProperties2( VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2* pFormatProperties); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceFormatProperties2KHR( VkPhysicalDevice physicalDevice, VkFormat format, - VkFormatProperties2* pFormatProperties -) { - layer_vkGetPhysicalDeviceFormatProperties2KHR_default(physicalDevice, format, pFormatProperties); -} + VkFormatProperties2* pFormatProperties) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceFragmentShadingRatesKHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceFormatProperties2KHR( VkPhysicalDevice physicalDevice, - uint32_t* pFragmentShadingRateCount, - VkPhysicalDeviceFragmentShadingRateKHR* pFragmentShadingRates); + VkFormat format, + VkFormatProperties2* pFormatProperties); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceFragmentShadingRatesKHR( VkPhysicalDevice physicalDevice, uint32_t* pFragmentShadingRateCount, - VkPhysicalDeviceFragmentShadingRateKHR* pFragmentShadingRates -) { - return layer_vkGetPhysicalDeviceFragmentShadingRatesKHR_default(physicalDevice, pFragmentShadingRateCount, pFragmentShadingRates); -} + VkPhysicalDeviceFragmentShadingRateKHR* pFragmentShadingRates) = delete; + +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceFragmentShadingRatesKHR( + VkPhysicalDevice physicalDevice, + uint32_t* pFragmentShadingRateCount, + VkPhysicalDeviceFragmentShadingRateKHR* pFragmentShadingRates); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceImageFormatProperties_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceImageFormatProperties( VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, - VkImageFormatProperties* pImageFormatProperties); + VkImageFormatProperties* pImageFormatProperties) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceImageFormatProperties( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceImageFormatProperties( VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, - VkImageFormatProperties* pImageFormatProperties -) { - return layer_vkGetPhysicalDeviceImageFormatProperties_default(physicalDevice, format, type, tiling, usage, flags, pImageFormatProperties); -} + VkImageFormatProperties* pImageFormatProperties); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceImageFormatProperties2_default( - VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, - VkImageFormatProperties2* pImageFormatProperties); - -/* Match-all template to use default implementation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceImageFormatProperties2( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, - VkImageFormatProperties2* pImageFormatProperties -) { - return layer_vkGetPhysicalDeviceImageFormatProperties2_default(physicalDevice, pImageFormatInfo, pImageFormatProperties); -} + VkImageFormatProperties2* pImageFormatProperties) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceImageFormatProperties2KHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceImageFormatProperties2( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, VkImageFormatProperties2* pImageFormatProperties); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceImageFormatProperties2KHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, - VkImageFormatProperties2* pImageFormatProperties -) { - return layer_vkGetPhysicalDeviceImageFormatProperties2KHR_default(physicalDevice, pImageFormatInfo, pImageFormatProperties); -} + VkImageFormatProperties2* pImageFormatProperties) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceMemoryProperties_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceImageFormatProperties2KHR( VkPhysicalDevice physicalDevice, - VkPhysicalDeviceMemoryProperties* pMemoryProperties); + const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, + VkImageFormatProperties2* pImageFormatProperties); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceMemoryProperties( VkPhysicalDevice physicalDevice, - VkPhysicalDeviceMemoryProperties* pMemoryProperties -) { - layer_vkGetPhysicalDeviceMemoryProperties_default(physicalDevice, pMemoryProperties); -} + VkPhysicalDeviceMemoryProperties* pMemoryProperties) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceMemoryProperties2_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceMemoryProperties( VkPhysicalDevice physicalDevice, - VkPhysicalDeviceMemoryProperties2* pMemoryProperties); + VkPhysicalDeviceMemoryProperties* pMemoryProperties); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceMemoryProperties2( VkPhysicalDevice physicalDevice, - VkPhysicalDeviceMemoryProperties2* pMemoryProperties -) { - layer_vkGetPhysicalDeviceMemoryProperties2_default(physicalDevice, pMemoryProperties); -} + VkPhysicalDeviceMemoryProperties2* pMemoryProperties) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceMemoryProperties2KHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceMemoryProperties2( VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2* pMemoryProperties); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceMemoryProperties2KHR( VkPhysicalDevice physicalDevice, - VkPhysicalDeviceMemoryProperties2* pMemoryProperties -) { - layer_vkGetPhysicalDeviceMemoryProperties2KHR_default(physicalDevice, pMemoryProperties); -} + VkPhysicalDeviceMemoryProperties2* pMemoryProperties) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceMultisamplePropertiesEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceMemoryProperties2KHR( VkPhysicalDevice physicalDevice, - VkSampleCountFlagBits samples, - VkMultisamplePropertiesEXT* pMultisampleProperties); + VkPhysicalDeviceMemoryProperties2* pMemoryProperties); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceMultisamplePropertiesEXT( VkPhysicalDevice physicalDevice, VkSampleCountFlagBits samples, - VkMultisamplePropertiesEXT* pMultisampleProperties -) { - layer_vkGetPhysicalDeviceMultisamplePropertiesEXT_default(physicalDevice, samples, pMultisampleProperties); -} + VkMultisamplePropertiesEXT* pMultisampleProperties) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDevicePresentRectanglesKHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceMultisamplePropertiesEXT( VkPhysicalDevice physicalDevice, - VkSurfaceKHR surface, - uint32_t* pRectCount, - VkRect2D* pRects); + VkSampleCountFlagBits samples, + VkMultisamplePropertiesEXT* pMultisampleProperties); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDevicePresentRectanglesKHR( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pRectCount, - VkRect2D* pRects -) { - return layer_vkGetPhysicalDevicePresentRectanglesKHR_default(physicalDevice, surface, pRectCount, pRects); -} + VkRect2D* pRects) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceProperties_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDevicePresentRectanglesKHR( VkPhysicalDevice physicalDevice, - VkPhysicalDeviceProperties* pProperties); + VkSurfaceKHR surface, + uint32_t* pRectCount, + VkRect2D* pRects); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceProperties( VkPhysicalDevice physicalDevice, - VkPhysicalDeviceProperties* pProperties -) { - layer_vkGetPhysicalDeviceProperties_default(physicalDevice, pProperties); -} + VkPhysicalDeviceProperties* pProperties) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceProperties2_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceProperties( VkPhysicalDevice physicalDevice, - VkPhysicalDeviceProperties2* pProperties); + VkPhysicalDeviceProperties* pProperties); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceProperties2( VkPhysicalDevice physicalDevice, - VkPhysicalDeviceProperties2* pProperties -) { - layer_vkGetPhysicalDeviceProperties2_default(physicalDevice, pProperties); -} + VkPhysicalDeviceProperties2* pProperties) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceProperties2KHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceProperties2( VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2* pProperties); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceProperties2KHR( VkPhysicalDevice physicalDevice, - VkPhysicalDeviceProperties2* pProperties -) { - layer_vkGetPhysicalDeviceProperties2KHR_default(physicalDevice, pProperties); -} + VkPhysicalDeviceProperties2* pProperties) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceProperties2KHR( VkPhysicalDevice physicalDevice, - const VkQueryPoolPerformanceCreateInfoKHR* pPerformanceQueryCreateInfo, - uint32_t* pNumPasses); + VkPhysicalDeviceProperties2* pProperties); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( VkPhysicalDevice physicalDevice, const VkQueryPoolPerformanceCreateInfoKHR* pPerformanceQueryCreateInfo, - uint32_t* pNumPasses -) { - layer_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR_default(physicalDevice, pPerformanceQueryCreateInfo, pNumPasses); -} + uint32_t* pNumPasses) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceQueueFamilyProperties_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR( VkPhysicalDevice physicalDevice, - uint32_t* pQueueFamilyPropertyCount, - VkQueueFamilyProperties* pQueueFamilyProperties); + const VkQueryPoolPerformanceCreateInfoKHR* pPerformanceQueryCreateInfo, + uint32_t* pNumPasses); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceQueueFamilyProperties( VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, - VkQueueFamilyProperties* pQueueFamilyProperties -) { - layer_vkGetPhysicalDeviceQueueFamilyProperties_default(physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties); -} + VkQueueFamilyProperties* pQueueFamilyProperties) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceQueueFamilyProperties2_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceQueueFamilyProperties( VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, - VkQueueFamilyProperties2* pQueueFamilyProperties); + VkQueueFamilyProperties* pQueueFamilyProperties); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceQueueFamilyProperties2( VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, - VkQueueFamilyProperties2* pQueueFamilyProperties -) { - layer_vkGetPhysicalDeviceQueueFamilyProperties2_default(physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties); -} + VkQueueFamilyProperties2* pQueueFamilyProperties) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceQueueFamilyProperties2KHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceQueueFamilyProperties2( VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties2* pQueueFamilyProperties); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceQueueFamilyProperties2KHR( VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, - VkQueueFamilyProperties2* pQueueFamilyProperties -) { - layer_vkGetPhysicalDeviceQueueFamilyProperties2KHR_default(physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties); -} + VkQueueFamilyProperties2* pQueueFamilyProperties) = delete; + +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceQueueFamilyProperties2KHR( + VkPhysicalDevice physicalDevice, + uint32_t* pQueueFamilyPropertyCount, + VkQueueFamilyProperties2* pQueueFamilyProperties); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceSparseImageFormatProperties_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceSparseImageFormatProperties( VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, @@ -1121,11 +998,11 @@ VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceSparseImageFormatProperties_ VkImageUsageFlags usage, VkImageTiling tiling, uint32_t* pPropertyCount, - VkSparseImageFormatProperties* pProperties); + VkSparseImageFormatProperties* pProperties) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceSparseImageFormatProperties( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceSparseImageFormatProperties( VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, @@ -1133,242 +1010,213 @@ VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceSparseImageFormatProperties( VkImageUsageFlags usage, VkImageTiling tiling, uint32_t* pPropertyCount, - VkSparseImageFormatProperties* pProperties -) { - layer_vkGetPhysicalDeviceSparseImageFormatProperties_default(physicalDevice, format, type, samples, usage, tiling, pPropertyCount, pProperties); -} + VkSparseImageFormatProperties* pProperties); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceSparseImageFormatProperties2_default( - VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, - uint32_t* pPropertyCount, - VkSparseImageFormatProperties2* pProperties); - -/* Match-all template to use default implementation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceSparseImageFormatProperties2( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, - VkSparseImageFormatProperties2* pProperties -) { - layer_vkGetPhysicalDeviceSparseImageFormatProperties2_default(physicalDevice, pFormatInfo, pPropertyCount, pProperties); -} + VkSparseImageFormatProperties2* pProperties) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceSparseImageFormatProperties2KHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceSparseImageFormatProperties2( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VkSparseImageFormatProperties2* pProperties); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceSparseImageFormatProperties2KHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, - VkSparseImageFormatProperties2* pProperties -) { - layer_vkGetPhysicalDeviceSparseImageFormatProperties2KHR_default(physicalDevice, pFormatInfo, pPropertyCount, pProperties); -} + VkSparseImageFormatProperties2* pProperties) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceSurfaceCapabilities2EXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetPhysicalDeviceSparseImageFormatProperties2KHR( VkPhysicalDevice physicalDevice, - VkSurfaceKHR surface, - VkSurfaceCapabilities2EXT* pSurfaceCapabilities); + const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, + uint32_t* pPropertyCount, + VkSparseImageFormatProperties2* pProperties); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceSurfaceCapabilities2EXT( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, - VkSurfaceCapabilities2EXT* pSurfaceCapabilities -) { - return layer_vkGetPhysicalDeviceSurfaceCapabilities2EXT_default(physicalDevice, surface, pSurfaceCapabilities); -} + VkSurfaceCapabilities2EXT* pSurfaceCapabilities) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceSurfaceCapabilities2KHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceSurfaceCapabilities2EXT( VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, - VkSurfaceCapabilities2KHR* pSurfaceCapabilities); + VkSurfaceKHR surface, + VkSurfaceCapabilities2EXT* pSurfaceCapabilities); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceSurfaceCapabilities2KHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, - VkSurfaceCapabilities2KHR* pSurfaceCapabilities -) { - return layer_vkGetPhysicalDeviceSurfaceCapabilities2KHR_default(physicalDevice, pSurfaceInfo, pSurfaceCapabilities); -} + VkSurfaceCapabilities2KHR* pSurfaceCapabilities) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceSurfaceCapabilitiesKHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceSurfaceCapabilities2KHR( VkPhysicalDevice physicalDevice, - VkSurfaceKHR surface, - VkSurfaceCapabilitiesKHR* pSurfaceCapabilities); + const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, + VkSurfaceCapabilities2KHR* pSurfaceCapabilities); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceSurfaceCapabilitiesKHR( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, - VkSurfaceCapabilitiesKHR* pSurfaceCapabilities -) { - return layer_vkGetPhysicalDeviceSurfaceCapabilitiesKHR_default(physicalDevice, surface, pSurfaceCapabilities); -} + VkSurfaceCapabilitiesKHR* pSurfaceCapabilities) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceSurfaceFormats2KHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceSurfaceCapabilitiesKHR( VkPhysicalDevice physicalDevice, - const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, - uint32_t* pSurfaceFormatCount, - VkSurfaceFormat2KHR* pSurfaceFormats); + VkSurfaceKHR surface, + VkSurfaceCapabilitiesKHR* pSurfaceCapabilities); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceSurfaceFormats2KHR( VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pSurfaceFormatCount, - VkSurfaceFormat2KHR* pSurfaceFormats -) { - return layer_vkGetPhysicalDeviceSurfaceFormats2KHR_default(physicalDevice, pSurfaceInfo, pSurfaceFormatCount, pSurfaceFormats); -} + VkSurfaceFormat2KHR* pSurfaceFormats) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceSurfaceFormatsKHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceSurfaceFormats2KHR( VkPhysicalDevice physicalDevice, - VkSurfaceKHR surface, + const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pSurfaceFormatCount, - VkSurfaceFormatKHR* pSurfaceFormats); + VkSurfaceFormat2KHR* pSurfaceFormats); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceSurfaceFormatsKHR( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pSurfaceFormatCount, - VkSurfaceFormatKHR* pSurfaceFormats -) { - return layer_vkGetPhysicalDeviceSurfaceFormatsKHR_default(physicalDevice, surface, pSurfaceFormatCount, pSurfaceFormats); -} + VkSurfaceFormatKHR* pSurfaceFormats) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceSurfacePresentModesKHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceSurfaceFormatsKHR( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, - uint32_t* pPresentModeCount, - VkPresentModeKHR* pPresentModes); + uint32_t* pSurfaceFormatCount, + VkSurfaceFormatKHR* pSurfaceFormats); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceSurfacePresentModesKHR( VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pPresentModeCount, - VkPresentModeKHR* pPresentModes -) { - return layer_vkGetPhysicalDeviceSurfacePresentModesKHR_default(physicalDevice, surface, pPresentModeCount, pPresentModes); -} + VkPresentModeKHR* pPresentModes) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceSurfaceSupportKHR_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceSurfacePresentModesKHR( VkPhysicalDevice physicalDevice, - uint32_t queueFamilyIndex, VkSurfaceKHR surface, - VkBool32* pSupported); + uint32_t* pPresentModeCount, + VkPresentModeKHR* pPresentModes); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceSurfaceSupportKHR( VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, VkSurfaceKHR surface, - VkBool32* pSupported -) { - return layer_vkGetPhysicalDeviceSurfaceSupportKHR_default(physicalDevice, queueFamilyIndex, surface, pSupported); -} + VkBool32* pSupported) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceToolProperties_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceSurfaceSupportKHR( VkPhysicalDevice physicalDevice, - uint32_t* pToolCount, - VkPhysicalDeviceToolProperties* pToolProperties); + uint32_t queueFamilyIndex, + VkSurfaceKHR surface, + VkBool32* pSupported); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceToolProperties( VkPhysicalDevice physicalDevice, uint32_t* pToolCount, - VkPhysicalDeviceToolProperties* pToolProperties -) { - return layer_vkGetPhysicalDeviceToolProperties_default(physicalDevice, pToolCount, pToolProperties); -} + VkPhysicalDeviceToolProperties* pToolProperties) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceToolPropertiesEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceToolProperties( VkPhysicalDevice physicalDevice, uint32_t* pToolCount, VkPhysicalDeviceToolProperties* pToolProperties); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceToolPropertiesEXT( VkPhysicalDevice physicalDevice, uint32_t* pToolCount, - VkPhysicalDeviceToolProperties* pToolProperties -) { - return layer_vkGetPhysicalDeviceToolPropertiesEXT_default(physicalDevice, pToolCount, pToolProperties); -} + VkPhysicalDeviceToolProperties* pToolProperties) = delete; -/* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkReleaseDisplayEXT_default( +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkGetPhysicalDeviceToolPropertiesEXT( VkPhysicalDevice physicalDevice, - VkDisplayKHR display); + uint32_t* pToolCount, + VkPhysicalDeviceToolProperties* pToolProperties); -/* Match-all template to use default implementation. */ +/* See Vulkan API for documentation. */ +/* Delete the generic match-all */ template VKAPI_ATTR VkResult VKAPI_CALL layer_vkReleaseDisplayEXT( VkPhysicalDevice physicalDevice, - VkDisplayKHR display -) { - return layer_vkReleaseDisplayEXT_default(physicalDevice, display); -} + VkDisplayKHR display) = delete; + +/* Default common code implementation. */ +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkReleaseDisplayEXT( + VkPhysicalDevice physicalDevice, + VkDisplayKHR display); /* See Vulkan API for documentation. */ -/* Default common code pass-through implementation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkSubmitDebugUtilsMessageEXT_default( +/* Delete the generic match-all */ +template +VKAPI_ATTR void VKAPI_CALL layer_vkSubmitDebugUtilsMessageEXT( VkInstance instance, VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VkDebugUtilsMessageTypeFlagsEXT messageTypes, - const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData); + const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData) = delete; -/* Match-all template to use default implementation. */ -template -VKAPI_ATTR void VKAPI_CALL layer_vkSubmitDebugUtilsMessageEXT( +/* Default common code implementation. */ +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkSubmitDebugUtilsMessageEXT( VkInstance instance, VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VkDebugUtilsMessageTypeFlagsEXT messageTypes, - const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData -) { - layer_vkSubmitDebugUtilsMessageEXT_default(instance, messageSeverity, messageTypes, pCallbackData); -} + const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData); // clang-format on diff --git a/source_common/framework/instance_functions_query.hpp b/source_common/framework/instance_functions_query.hpp new file mode 100644 index 0000000..013cc0b --- /dev/null +++ b/source_common/framework/instance_functions_query.hpp @@ -0,0 +1,1708 @@ +/* + * SPDX-License-Identifier: MIT + * ---------------------------------------------------------------------------- + * Copyright (c) 2024-2025 Arm Limited + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to + * deal in the Software without restriction, including without limitation the + * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or + * sell copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS + * IN THE SOFTWARE. + * ---------------------------------------------------------------------------- + */ + +#pragma once + +// clang-format off + +#include + +#if defined(VK_USE_PLATFORM_ANDROID_KHR) + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCreateAndroidSurfaceKHR = requires( + VkInstance instance, const VkAndroidSurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface +) { + layer_vkCreateAndroidSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCreateAndroidSurfaceKHR getLayerPtr_vkCreateAndroidSurfaceKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkCreateAndroidSurfaceKHR) + { + return layer_vkCreateAndroidSurfaceKHR; + } + + return layer_vkCreateAndroidSurfaceKHR; + }.operator()(); +} + +#endif + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCreateDebugReportCallbackEXT = requires( + VkInstance instance, const VkDebugReportCallbackCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDebugReportCallbackEXT* pCallback +) { + layer_vkCreateDebugReportCallbackEXT(instance, pCreateInfo, pAllocator, pCallback); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCreateDebugReportCallbackEXT getLayerPtr_vkCreateDebugReportCallbackEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCreateDebugReportCallbackEXT) + { + return layer_vkCreateDebugReportCallbackEXT; + } + + return layer_vkCreateDebugReportCallbackEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCreateDebugUtilsMessengerEXT = requires( + VkInstance instance, const VkDebugUtilsMessengerCreateInfoEXT* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDebugUtilsMessengerEXT* pMessenger +) { + layer_vkCreateDebugUtilsMessengerEXT(instance, pCreateInfo, pAllocator, pMessenger); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCreateDebugUtilsMessengerEXT getLayerPtr_vkCreateDebugUtilsMessengerEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkCreateDebugUtilsMessengerEXT) + { + return layer_vkCreateDebugUtilsMessengerEXT; + } + + return layer_vkCreateDebugUtilsMessengerEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCreateDevice = requires( + VkPhysicalDevice physicalDevice, const VkDeviceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDevice* pDevice +) { + layer_vkCreateDevice(physicalDevice, pCreateInfo, pAllocator, pDevice); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCreateDevice getLayerPtr_vkCreateDevice() +{ + return [] + { + if constexpr(hasLayerPtr_vkCreateDevice) + { + return layer_vkCreateDevice; + } + + return layer_vkCreateDevice; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCreateDisplayModeKHR = requires( + VkPhysicalDevice physicalDevice, VkDisplayKHR display, const VkDisplayModeCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDisplayModeKHR* pMode +) { + layer_vkCreateDisplayModeKHR(physicalDevice, display, pCreateInfo, pAllocator, pMode); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCreateDisplayModeKHR getLayerPtr_vkCreateDisplayModeKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkCreateDisplayModeKHR) + { + return layer_vkCreateDisplayModeKHR; + } + + return layer_vkCreateDisplayModeKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCreateDisplayPlaneSurfaceKHR = requires( + VkInstance instance, const VkDisplaySurfaceCreateInfoKHR* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSurfaceKHR* pSurface +) { + layer_vkCreateDisplayPlaneSurfaceKHR(instance, pCreateInfo, pAllocator, pSurface); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCreateDisplayPlaneSurfaceKHR getLayerPtr_vkCreateDisplayPlaneSurfaceKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkCreateDisplayPlaneSurfaceKHR) + { + return layer_vkCreateDisplayPlaneSurfaceKHR; + } + + return layer_vkCreateDisplayPlaneSurfaceKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkCreateInstance = requires( + const VkInstanceCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkInstance* pInstance +) { + layer_vkCreateInstance(pCreateInfo, pAllocator, pInstance); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkCreateInstance getLayerPtr_vkCreateInstance() +{ + return [] + { + if constexpr(hasLayerPtr_vkCreateInstance) + { + return layer_vkCreateInstance; + } + + return layer_vkCreateInstance; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkDebugReportMessageEXT = requires( + VkInstance instance, VkDebugReportFlagsEXT flags, VkDebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage +) { + layer_vkDebugReportMessageEXT(instance, flags, objectType, object, location, messageCode, pLayerPrefix, pMessage); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkDebugReportMessageEXT getLayerPtr_vkDebugReportMessageEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkDebugReportMessageEXT) + { + return layer_vkDebugReportMessageEXT; + } + + return layer_vkDebugReportMessageEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkDestroyDebugReportCallbackEXT = requires( + VkInstance instance, VkDebugReportCallbackEXT callback, const VkAllocationCallbacks* pAllocator +) { + layer_vkDestroyDebugReportCallbackEXT(instance, callback, pAllocator); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkDestroyDebugReportCallbackEXT getLayerPtr_vkDestroyDebugReportCallbackEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkDestroyDebugReportCallbackEXT) + { + return layer_vkDestroyDebugReportCallbackEXT; + } + + return layer_vkDestroyDebugReportCallbackEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkDestroyDebugUtilsMessengerEXT = requires( + VkInstance instance, VkDebugUtilsMessengerEXT messenger, const VkAllocationCallbacks* pAllocator +) { + layer_vkDestroyDebugUtilsMessengerEXT(instance, messenger, pAllocator); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkDestroyDebugUtilsMessengerEXT getLayerPtr_vkDestroyDebugUtilsMessengerEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkDestroyDebugUtilsMessengerEXT) + { + return layer_vkDestroyDebugUtilsMessengerEXT; + } + + return layer_vkDestroyDebugUtilsMessengerEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkDestroyInstance = requires( + VkInstance instance, const VkAllocationCallbacks* pAllocator +) { + layer_vkDestroyInstance(instance, pAllocator); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkDestroyInstance getLayerPtr_vkDestroyInstance() +{ + return [] + { + if constexpr(hasLayerPtr_vkDestroyInstance) + { + return layer_vkDestroyInstance; + } + + return layer_vkDestroyInstance; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkDestroySurfaceKHR = requires( + VkInstance instance, VkSurfaceKHR surface, const VkAllocationCallbacks* pAllocator +) { + layer_vkDestroySurfaceKHR(instance, surface, pAllocator); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkDestroySurfaceKHR getLayerPtr_vkDestroySurfaceKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkDestroySurfaceKHR) + { + return layer_vkDestroySurfaceKHR; + } + + return layer_vkDestroySurfaceKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkEnumerateDeviceExtensionProperties = requires( + VkPhysicalDevice physicalDevice, const char* pLayerName, uint32_t* pPropertyCount, VkExtensionProperties* pProperties +) { + layer_vkEnumerateDeviceExtensionProperties(physicalDevice, pLayerName, pPropertyCount, pProperties); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkEnumerateDeviceExtensionProperties getLayerPtr_vkEnumerateDeviceExtensionProperties() +{ + return [] + { + if constexpr(hasLayerPtr_vkEnumerateDeviceExtensionProperties) + { + return layer_vkEnumerateDeviceExtensionProperties; + } + + return layer_vkEnumerateDeviceExtensionProperties; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkEnumerateDeviceLayerProperties = requires( + VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkLayerProperties* pProperties +) { + layer_vkEnumerateDeviceLayerProperties(physicalDevice, pPropertyCount, pProperties); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkEnumerateDeviceLayerProperties getLayerPtr_vkEnumerateDeviceLayerProperties() +{ + return [] + { + if constexpr(hasLayerPtr_vkEnumerateDeviceLayerProperties) + { + return layer_vkEnumerateDeviceLayerProperties; + } + + return layer_vkEnumerateDeviceLayerProperties; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkEnumerateInstanceExtensionProperties = requires( + const char* pLayerName, uint32_t* pPropertyCount, VkExtensionProperties* pProperties +) { + layer_vkEnumerateInstanceExtensionProperties(pLayerName, pPropertyCount, pProperties); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkEnumerateInstanceExtensionProperties getLayerPtr_vkEnumerateInstanceExtensionProperties() +{ + return [] + { + if constexpr(hasLayerPtr_vkEnumerateInstanceExtensionProperties) + { + return layer_vkEnumerateInstanceExtensionProperties; + } + + return layer_vkEnumerateInstanceExtensionProperties; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkEnumerateInstanceLayerProperties = requires( + uint32_t* pPropertyCount, VkLayerProperties* pProperties +) { + layer_vkEnumerateInstanceLayerProperties(pPropertyCount, pProperties); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkEnumerateInstanceLayerProperties getLayerPtr_vkEnumerateInstanceLayerProperties() +{ + return [] + { + if constexpr(hasLayerPtr_vkEnumerateInstanceLayerProperties) + { + return layer_vkEnumerateInstanceLayerProperties; + } + + return layer_vkEnumerateInstanceLayerProperties; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkEnumeratePhysicalDeviceGroups = requires( + VkInstance instance, uint32_t* pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties +) { + layer_vkEnumeratePhysicalDeviceGroups(instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkEnumeratePhysicalDeviceGroups getLayerPtr_vkEnumeratePhysicalDeviceGroups() +{ + return [] + { + if constexpr(hasLayerPtr_vkEnumeratePhysicalDeviceGroups) + { + return layer_vkEnumeratePhysicalDeviceGroups; + } + + return layer_vkEnumeratePhysicalDeviceGroups; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkEnumeratePhysicalDeviceGroupsKHR = requires( + VkInstance instance, uint32_t* pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties +) { + layer_vkEnumeratePhysicalDeviceGroupsKHR(instance, pPhysicalDeviceGroupCount, pPhysicalDeviceGroupProperties); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkEnumeratePhysicalDeviceGroupsKHR getLayerPtr_vkEnumeratePhysicalDeviceGroupsKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkEnumeratePhysicalDeviceGroupsKHR) + { + return layer_vkEnumeratePhysicalDeviceGroupsKHR; + } + + return layer_vkEnumeratePhysicalDeviceGroupsKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR = requires( + VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, uint32_t* pCounterCount, VkPerformanceCounterKHR* pCounters, VkPerformanceCounterDescriptionKHR* pCounterDescriptions +) { + layer_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR(physicalDevice, queueFamilyIndex, pCounterCount, pCounters, pCounterDescriptions); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR getLayerPtr_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR) + { + return layer_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR; + } + + return layer_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkEnumeratePhysicalDevices = requires( + VkInstance instance, uint32_t* pPhysicalDeviceCount, VkPhysicalDevice* pPhysicalDevices +) { + layer_vkEnumeratePhysicalDevices(instance, pPhysicalDeviceCount, pPhysicalDevices); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkEnumeratePhysicalDevices getLayerPtr_vkEnumeratePhysicalDevices() +{ + return [] + { + if constexpr(hasLayerPtr_vkEnumeratePhysicalDevices) + { + return layer_vkEnumeratePhysicalDevices; + } + + return layer_vkEnumeratePhysicalDevices; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetDisplayModeProperties2KHR = requires( + VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, VkDisplayModeProperties2KHR* pProperties +) { + layer_vkGetDisplayModeProperties2KHR(physicalDevice, display, pPropertyCount, pProperties); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetDisplayModeProperties2KHR getLayerPtr_vkGetDisplayModeProperties2KHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetDisplayModeProperties2KHR) + { + return layer_vkGetDisplayModeProperties2KHR; + } + + return layer_vkGetDisplayModeProperties2KHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetDisplayModePropertiesKHR = requires( + VkPhysicalDevice physicalDevice, VkDisplayKHR display, uint32_t* pPropertyCount, VkDisplayModePropertiesKHR* pProperties +) { + layer_vkGetDisplayModePropertiesKHR(physicalDevice, display, pPropertyCount, pProperties); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetDisplayModePropertiesKHR getLayerPtr_vkGetDisplayModePropertiesKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetDisplayModePropertiesKHR) + { + return layer_vkGetDisplayModePropertiesKHR; + } + + return layer_vkGetDisplayModePropertiesKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetDisplayPlaneCapabilities2KHR = requires( + VkPhysicalDevice physicalDevice, const VkDisplayPlaneInfo2KHR* pDisplayPlaneInfo, VkDisplayPlaneCapabilities2KHR* pCapabilities +) { + layer_vkGetDisplayPlaneCapabilities2KHR(physicalDevice, pDisplayPlaneInfo, pCapabilities); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetDisplayPlaneCapabilities2KHR getLayerPtr_vkGetDisplayPlaneCapabilities2KHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetDisplayPlaneCapabilities2KHR) + { + return layer_vkGetDisplayPlaneCapabilities2KHR; + } + + return layer_vkGetDisplayPlaneCapabilities2KHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetDisplayPlaneCapabilitiesKHR = requires( + VkPhysicalDevice physicalDevice, VkDisplayModeKHR mode, uint32_t planeIndex, VkDisplayPlaneCapabilitiesKHR* pCapabilities +) { + layer_vkGetDisplayPlaneCapabilitiesKHR(physicalDevice, mode, planeIndex, pCapabilities); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetDisplayPlaneCapabilitiesKHR getLayerPtr_vkGetDisplayPlaneCapabilitiesKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetDisplayPlaneCapabilitiesKHR) + { + return layer_vkGetDisplayPlaneCapabilitiesKHR; + } + + return layer_vkGetDisplayPlaneCapabilitiesKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetDisplayPlaneSupportedDisplaysKHR = requires( + VkPhysicalDevice physicalDevice, uint32_t planeIndex, uint32_t* pDisplayCount, VkDisplayKHR* pDisplays +) { + layer_vkGetDisplayPlaneSupportedDisplaysKHR(physicalDevice, planeIndex, pDisplayCount, pDisplays); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetDisplayPlaneSupportedDisplaysKHR getLayerPtr_vkGetDisplayPlaneSupportedDisplaysKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetDisplayPlaneSupportedDisplaysKHR) + { + return layer_vkGetDisplayPlaneSupportedDisplaysKHR; + } + + return layer_vkGetDisplayPlaneSupportedDisplaysKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetInstanceProcAddr = requires( + VkInstance instance, const char* pName +) { + layer_vkGetInstanceProcAddr(instance, pName); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetInstanceProcAddr getLayerPtr_vkGetInstanceProcAddr() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetInstanceProcAddr) + { + return layer_vkGetInstanceProcAddr; + } + + return layer_vkGetInstanceProcAddr; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT = requires( + VkPhysicalDevice physicalDevice, uint32_t* pTimeDomainCount, VkTimeDomainKHR* pTimeDomains +) { + layer_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT(physicalDevice, pTimeDomainCount, pTimeDomains); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT getLayerPtr_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT) + { + return layer_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT; + } + + return layer_vkGetPhysicalDeviceCalibrateableTimeDomainsEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetPhysicalDeviceCalibrateableTimeDomainsKHR = requires( + VkPhysicalDevice physicalDevice, uint32_t* pTimeDomainCount, VkTimeDomainKHR* pTimeDomains +) { + layer_vkGetPhysicalDeviceCalibrateableTimeDomainsKHR(physicalDevice, pTimeDomainCount, pTimeDomains); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetPhysicalDeviceCalibrateableTimeDomainsKHR getLayerPtr_vkGetPhysicalDeviceCalibrateableTimeDomainsKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetPhysicalDeviceCalibrateableTimeDomainsKHR) + { + return layer_vkGetPhysicalDeviceCalibrateableTimeDomainsKHR; + } + + return layer_vkGetPhysicalDeviceCalibrateableTimeDomainsKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR = requires( + VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkCooperativeMatrixPropertiesKHR* pProperties +) { + layer_vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR(physicalDevice, pPropertyCount, pProperties); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR getLayerPtr_vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR) + { + return layer_vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR; + } + + return layer_vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetPhysicalDeviceDisplayPlaneProperties2KHR = requires( + VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPlaneProperties2KHR* pProperties +) { + layer_vkGetPhysicalDeviceDisplayPlaneProperties2KHR(physicalDevice, pPropertyCount, pProperties); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetPhysicalDeviceDisplayPlaneProperties2KHR getLayerPtr_vkGetPhysicalDeviceDisplayPlaneProperties2KHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetPhysicalDeviceDisplayPlaneProperties2KHR) + { + return layer_vkGetPhysicalDeviceDisplayPlaneProperties2KHR; + } + + return layer_vkGetPhysicalDeviceDisplayPlaneProperties2KHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetPhysicalDeviceDisplayPlanePropertiesKHR = requires( + VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPlanePropertiesKHR* pProperties +) { + layer_vkGetPhysicalDeviceDisplayPlanePropertiesKHR(physicalDevice, pPropertyCount, pProperties); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetPhysicalDeviceDisplayPlanePropertiesKHR getLayerPtr_vkGetPhysicalDeviceDisplayPlanePropertiesKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetPhysicalDeviceDisplayPlanePropertiesKHR) + { + return layer_vkGetPhysicalDeviceDisplayPlanePropertiesKHR; + } + + return layer_vkGetPhysicalDeviceDisplayPlanePropertiesKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetPhysicalDeviceDisplayProperties2KHR = requires( + VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayProperties2KHR* pProperties +) { + layer_vkGetPhysicalDeviceDisplayProperties2KHR(physicalDevice, pPropertyCount, pProperties); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetPhysicalDeviceDisplayProperties2KHR getLayerPtr_vkGetPhysicalDeviceDisplayProperties2KHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetPhysicalDeviceDisplayProperties2KHR) + { + return layer_vkGetPhysicalDeviceDisplayProperties2KHR; + } + + return layer_vkGetPhysicalDeviceDisplayProperties2KHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetPhysicalDeviceDisplayPropertiesKHR = requires( + VkPhysicalDevice physicalDevice, uint32_t* pPropertyCount, VkDisplayPropertiesKHR* pProperties +) { + layer_vkGetPhysicalDeviceDisplayPropertiesKHR(physicalDevice, pPropertyCount, pProperties); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetPhysicalDeviceDisplayPropertiesKHR getLayerPtr_vkGetPhysicalDeviceDisplayPropertiesKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetPhysicalDeviceDisplayPropertiesKHR) + { + return layer_vkGetPhysicalDeviceDisplayPropertiesKHR; + } + + return layer_vkGetPhysicalDeviceDisplayPropertiesKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetPhysicalDeviceExternalBufferProperties = requires( + VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VkExternalBufferProperties* pExternalBufferProperties +) { + layer_vkGetPhysicalDeviceExternalBufferProperties(physicalDevice, pExternalBufferInfo, pExternalBufferProperties); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetPhysicalDeviceExternalBufferProperties getLayerPtr_vkGetPhysicalDeviceExternalBufferProperties() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetPhysicalDeviceExternalBufferProperties) + { + return layer_vkGetPhysicalDeviceExternalBufferProperties; + } + + return layer_vkGetPhysicalDeviceExternalBufferProperties; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetPhysicalDeviceExternalBufferPropertiesKHR = requires( + VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VkExternalBufferProperties* pExternalBufferProperties +) { + layer_vkGetPhysicalDeviceExternalBufferPropertiesKHR(physicalDevice, pExternalBufferInfo, pExternalBufferProperties); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetPhysicalDeviceExternalBufferPropertiesKHR getLayerPtr_vkGetPhysicalDeviceExternalBufferPropertiesKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetPhysicalDeviceExternalBufferPropertiesKHR) + { + return layer_vkGetPhysicalDeviceExternalBufferPropertiesKHR; + } + + return layer_vkGetPhysicalDeviceExternalBufferPropertiesKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetPhysicalDeviceExternalFenceProperties = requires( + VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VkExternalFenceProperties* pExternalFenceProperties +) { + layer_vkGetPhysicalDeviceExternalFenceProperties(physicalDevice, pExternalFenceInfo, pExternalFenceProperties); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetPhysicalDeviceExternalFenceProperties getLayerPtr_vkGetPhysicalDeviceExternalFenceProperties() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetPhysicalDeviceExternalFenceProperties) + { + return layer_vkGetPhysicalDeviceExternalFenceProperties; + } + + return layer_vkGetPhysicalDeviceExternalFenceProperties; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetPhysicalDeviceExternalFencePropertiesKHR = requires( + VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VkExternalFenceProperties* pExternalFenceProperties +) { + layer_vkGetPhysicalDeviceExternalFencePropertiesKHR(physicalDevice, pExternalFenceInfo, pExternalFenceProperties); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetPhysicalDeviceExternalFencePropertiesKHR getLayerPtr_vkGetPhysicalDeviceExternalFencePropertiesKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetPhysicalDeviceExternalFencePropertiesKHR) + { + return layer_vkGetPhysicalDeviceExternalFencePropertiesKHR; + } + + return layer_vkGetPhysicalDeviceExternalFencePropertiesKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetPhysicalDeviceExternalSemaphoreProperties = requires( + VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VkExternalSemaphoreProperties* pExternalSemaphoreProperties +) { + layer_vkGetPhysicalDeviceExternalSemaphoreProperties(physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetPhysicalDeviceExternalSemaphoreProperties getLayerPtr_vkGetPhysicalDeviceExternalSemaphoreProperties() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetPhysicalDeviceExternalSemaphoreProperties) + { + return layer_vkGetPhysicalDeviceExternalSemaphoreProperties; + } + + return layer_vkGetPhysicalDeviceExternalSemaphoreProperties; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR = requires( + VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VkExternalSemaphoreProperties* pExternalSemaphoreProperties +) { + layer_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR(physicalDevice, pExternalSemaphoreInfo, pExternalSemaphoreProperties); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR getLayerPtr_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR) + { + return layer_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR; + } + + return layer_vkGetPhysicalDeviceExternalSemaphorePropertiesKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetPhysicalDeviceExternalTensorPropertiesARM = requires( + VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalTensorInfoARM* pExternalTensorInfo, VkExternalTensorPropertiesARM* pExternalTensorProperties +) { + layer_vkGetPhysicalDeviceExternalTensorPropertiesARM(physicalDevice, pExternalTensorInfo, pExternalTensorProperties); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetPhysicalDeviceExternalTensorPropertiesARM getLayerPtr_vkGetPhysicalDeviceExternalTensorPropertiesARM() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetPhysicalDeviceExternalTensorPropertiesARM) + { + return layer_vkGetPhysicalDeviceExternalTensorPropertiesARM; + } + + return layer_vkGetPhysicalDeviceExternalTensorPropertiesARM; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetPhysicalDeviceFeatures = requires( + VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures* pFeatures +) { + layer_vkGetPhysicalDeviceFeatures(physicalDevice, pFeatures); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetPhysicalDeviceFeatures getLayerPtr_vkGetPhysicalDeviceFeatures() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetPhysicalDeviceFeatures) + { + return layer_vkGetPhysicalDeviceFeatures; + } + + return layer_vkGetPhysicalDeviceFeatures; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetPhysicalDeviceFeatures2 = requires( + VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2* pFeatures +) { + layer_vkGetPhysicalDeviceFeatures2(physicalDevice, pFeatures); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetPhysicalDeviceFeatures2 getLayerPtr_vkGetPhysicalDeviceFeatures2() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetPhysicalDeviceFeatures2) + { + return layer_vkGetPhysicalDeviceFeatures2; + } + + return layer_vkGetPhysicalDeviceFeatures2; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetPhysicalDeviceFeatures2KHR = requires( + VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2* pFeatures +) { + layer_vkGetPhysicalDeviceFeatures2KHR(physicalDevice, pFeatures); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetPhysicalDeviceFeatures2KHR getLayerPtr_vkGetPhysicalDeviceFeatures2KHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetPhysicalDeviceFeatures2KHR) + { + return layer_vkGetPhysicalDeviceFeatures2KHR; + } + + return layer_vkGetPhysicalDeviceFeatures2KHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetPhysicalDeviceFormatProperties = requires( + VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties* pFormatProperties +) { + layer_vkGetPhysicalDeviceFormatProperties(physicalDevice, format, pFormatProperties); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetPhysicalDeviceFormatProperties getLayerPtr_vkGetPhysicalDeviceFormatProperties() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetPhysicalDeviceFormatProperties) + { + return layer_vkGetPhysicalDeviceFormatProperties; + } + + return layer_vkGetPhysicalDeviceFormatProperties; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetPhysicalDeviceFormatProperties2 = requires( + VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2* pFormatProperties +) { + layer_vkGetPhysicalDeviceFormatProperties2(physicalDevice, format, pFormatProperties); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetPhysicalDeviceFormatProperties2 getLayerPtr_vkGetPhysicalDeviceFormatProperties2() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetPhysicalDeviceFormatProperties2) + { + return layer_vkGetPhysicalDeviceFormatProperties2; + } + + return layer_vkGetPhysicalDeviceFormatProperties2; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetPhysicalDeviceFormatProperties2KHR = requires( + VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2* pFormatProperties +) { + layer_vkGetPhysicalDeviceFormatProperties2KHR(physicalDevice, format, pFormatProperties); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetPhysicalDeviceFormatProperties2KHR getLayerPtr_vkGetPhysicalDeviceFormatProperties2KHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetPhysicalDeviceFormatProperties2KHR) + { + return layer_vkGetPhysicalDeviceFormatProperties2KHR; + } + + return layer_vkGetPhysicalDeviceFormatProperties2KHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetPhysicalDeviceFragmentShadingRatesKHR = requires( + VkPhysicalDevice physicalDevice, uint32_t* pFragmentShadingRateCount, VkPhysicalDeviceFragmentShadingRateKHR* pFragmentShadingRates +) { + layer_vkGetPhysicalDeviceFragmentShadingRatesKHR(physicalDevice, pFragmentShadingRateCount, pFragmentShadingRates); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetPhysicalDeviceFragmentShadingRatesKHR getLayerPtr_vkGetPhysicalDeviceFragmentShadingRatesKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetPhysicalDeviceFragmentShadingRatesKHR) + { + return layer_vkGetPhysicalDeviceFragmentShadingRatesKHR; + } + + return layer_vkGetPhysicalDeviceFragmentShadingRatesKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetPhysicalDeviceImageFormatProperties = requires( + VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkImageFormatProperties* pImageFormatProperties +) { + layer_vkGetPhysicalDeviceImageFormatProperties(physicalDevice, format, type, tiling, usage, flags, pImageFormatProperties); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetPhysicalDeviceImageFormatProperties getLayerPtr_vkGetPhysicalDeviceImageFormatProperties() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetPhysicalDeviceImageFormatProperties) + { + return layer_vkGetPhysicalDeviceImageFormatProperties; + } + + return layer_vkGetPhysicalDeviceImageFormatProperties; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetPhysicalDeviceImageFormatProperties2 = requires( + VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, VkImageFormatProperties2* pImageFormatProperties +) { + layer_vkGetPhysicalDeviceImageFormatProperties2(physicalDevice, pImageFormatInfo, pImageFormatProperties); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetPhysicalDeviceImageFormatProperties2 getLayerPtr_vkGetPhysicalDeviceImageFormatProperties2() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetPhysicalDeviceImageFormatProperties2) + { + return layer_vkGetPhysicalDeviceImageFormatProperties2; + } + + return layer_vkGetPhysicalDeviceImageFormatProperties2; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetPhysicalDeviceImageFormatProperties2KHR = requires( + VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, VkImageFormatProperties2* pImageFormatProperties +) { + layer_vkGetPhysicalDeviceImageFormatProperties2KHR(physicalDevice, pImageFormatInfo, pImageFormatProperties); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetPhysicalDeviceImageFormatProperties2KHR getLayerPtr_vkGetPhysicalDeviceImageFormatProperties2KHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetPhysicalDeviceImageFormatProperties2KHR) + { + return layer_vkGetPhysicalDeviceImageFormatProperties2KHR; + } + + return layer_vkGetPhysicalDeviceImageFormatProperties2KHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetPhysicalDeviceMemoryProperties = requires( + VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties* pMemoryProperties +) { + layer_vkGetPhysicalDeviceMemoryProperties(physicalDevice, pMemoryProperties); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetPhysicalDeviceMemoryProperties getLayerPtr_vkGetPhysicalDeviceMemoryProperties() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetPhysicalDeviceMemoryProperties) + { + return layer_vkGetPhysicalDeviceMemoryProperties; + } + + return layer_vkGetPhysicalDeviceMemoryProperties; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetPhysicalDeviceMemoryProperties2 = requires( + VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2* pMemoryProperties +) { + layer_vkGetPhysicalDeviceMemoryProperties2(physicalDevice, pMemoryProperties); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetPhysicalDeviceMemoryProperties2 getLayerPtr_vkGetPhysicalDeviceMemoryProperties2() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetPhysicalDeviceMemoryProperties2) + { + return layer_vkGetPhysicalDeviceMemoryProperties2; + } + + return layer_vkGetPhysicalDeviceMemoryProperties2; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetPhysicalDeviceMemoryProperties2KHR = requires( + VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2* pMemoryProperties +) { + layer_vkGetPhysicalDeviceMemoryProperties2KHR(physicalDevice, pMemoryProperties); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetPhysicalDeviceMemoryProperties2KHR getLayerPtr_vkGetPhysicalDeviceMemoryProperties2KHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetPhysicalDeviceMemoryProperties2KHR) + { + return layer_vkGetPhysicalDeviceMemoryProperties2KHR; + } + + return layer_vkGetPhysicalDeviceMemoryProperties2KHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetPhysicalDeviceMultisamplePropertiesEXT = requires( + VkPhysicalDevice physicalDevice, VkSampleCountFlagBits samples, VkMultisamplePropertiesEXT* pMultisampleProperties +) { + layer_vkGetPhysicalDeviceMultisamplePropertiesEXT(physicalDevice, samples, pMultisampleProperties); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT getLayerPtr_vkGetPhysicalDeviceMultisamplePropertiesEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetPhysicalDeviceMultisamplePropertiesEXT) + { + return layer_vkGetPhysicalDeviceMultisamplePropertiesEXT; + } + + return layer_vkGetPhysicalDeviceMultisamplePropertiesEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetPhysicalDevicePresentRectanglesKHR = requires( + VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pRectCount, VkRect2D* pRects +) { + layer_vkGetPhysicalDevicePresentRectanglesKHR(physicalDevice, surface, pRectCount, pRects); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetPhysicalDevicePresentRectanglesKHR getLayerPtr_vkGetPhysicalDevicePresentRectanglesKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetPhysicalDevicePresentRectanglesKHR) + { + return layer_vkGetPhysicalDevicePresentRectanglesKHR; + } + + return layer_vkGetPhysicalDevicePresentRectanglesKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetPhysicalDeviceProperties = requires( + VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties* pProperties +) { + layer_vkGetPhysicalDeviceProperties(physicalDevice, pProperties); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetPhysicalDeviceProperties getLayerPtr_vkGetPhysicalDeviceProperties() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetPhysicalDeviceProperties) + { + return layer_vkGetPhysicalDeviceProperties; + } + + return layer_vkGetPhysicalDeviceProperties; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetPhysicalDeviceProperties2 = requires( + VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2* pProperties +) { + layer_vkGetPhysicalDeviceProperties2(physicalDevice, pProperties); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetPhysicalDeviceProperties2 getLayerPtr_vkGetPhysicalDeviceProperties2() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetPhysicalDeviceProperties2) + { + return layer_vkGetPhysicalDeviceProperties2; + } + + return layer_vkGetPhysicalDeviceProperties2; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetPhysicalDeviceProperties2KHR = requires( + VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2* pProperties +) { + layer_vkGetPhysicalDeviceProperties2KHR(physicalDevice, pProperties); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetPhysicalDeviceProperties2KHR getLayerPtr_vkGetPhysicalDeviceProperties2KHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetPhysicalDeviceProperties2KHR) + { + return layer_vkGetPhysicalDeviceProperties2KHR; + } + + return layer_vkGetPhysicalDeviceProperties2KHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR = requires( + VkPhysicalDevice physicalDevice, const VkQueryPoolPerformanceCreateInfoKHR* pPerformanceQueryCreateInfo, uint32_t* pNumPasses +) { + layer_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR(physicalDevice, pPerformanceQueryCreateInfo, pNumPasses); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR getLayerPtr_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR) + { + return layer_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR; + } + + return layer_vkGetPhysicalDeviceQueueFamilyPerformanceQueryPassesKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetPhysicalDeviceQueueFamilyProperties = requires( + VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties* pQueueFamilyProperties +) { + layer_vkGetPhysicalDeviceQueueFamilyProperties(physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetPhysicalDeviceQueueFamilyProperties getLayerPtr_vkGetPhysicalDeviceQueueFamilyProperties() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetPhysicalDeviceQueueFamilyProperties) + { + return layer_vkGetPhysicalDeviceQueueFamilyProperties; + } + + return layer_vkGetPhysicalDeviceQueueFamilyProperties; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetPhysicalDeviceQueueFamilyProperties2 = requires( + VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties2* pQueueFamilyProperties +) { + layer_vkGetPhysicalDeviceQueueFamilyProperties2(physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetPhysicalDeviceQueueFamilyProperties2 getLayerPtr_vkGetPhysicalDeviceQueueFamilyProperties2() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetPhysicalDeviceQueueFamilyProperties2) + { + return layer_vkGetPhysicalDeviceQueueFamilyProperties2; + } + + return layer_vkGetPhysicalDeviceQueueFamilyProperties2; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetPhysicalDeviceQueueFamilyProperties2KHR = requires( + VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties2* pQueueFamilyProperties +) { + layer_vkGetPhysicalDeviceQueueFamilyProperties2KHR(physicalDevice, pQueueFamilyPropertyCount, pQueueFamilyProperties); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetPhysicalDeviceQueueFamilyProperties2KHR getLayerPtr_vkGetPhysicalDeviceQueueFamilyProperties2KHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetPhysicalDeviceQueueFamilyProperties2KHR) + { + return layer_vkGetPhysicalDeviceQueueFamilyProperties2KHR; + } + + return layer_vkGetPhysicalDeviceQueueFamilyProperties2KHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetPhysicalDeviceSparseImageFormatProperties = requires( + VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkSampleCountFlagBits samples, VkImageUsageFlags usage, VkImageTiling tiling, uint32_t* pPropertyCount, VkSparseImageFormatProperties* pProperties +) { + layer_vkGetPhysicalDeviceSparseImageFormatProperties(physicalDevice, format, type, samples, usage, tiling, pPropertyCount, pProperties); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetPhysicalDeviceSparseImageFormatProperties getLayerPtr_vkGetPhysicalDeviceSparseImageFormatProperties() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetPhysicalDeviceSparseImageFormatProperties) + { + return layer_vkGetPhysicalDeviceSparseImageFormatProperties; + } + + return layer_vkGetPhysicalDeviceSparseImageFormatProperties; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetPhysicalDeviceSparseImageFormatProperties2 = requires( + VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VkSparseImageFormatProperties2* pProperties +) { + layer_vkGetPhysicalDeviceSparseImageFormatProperties2(physicalDevice, pFormatInfo, pPropertyCount, pProperties); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetPhysicalDeviceSparseImageFormatProperties2 getLayerPtr_vkGetPhysicalDeviceSparseImageFormatProperties2() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetPhysicalDeviceSparseImageFormatProperties2) + { + return layer_vkGetPhysicalDeviceSparseImageFormatProperties2; + } + + return layer_vkGetPhysicalDeviceSparseImageFormatProperties2; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetPhysicalDeviceSparseImageFormatProperties2KHR = requires( + VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VkSparseImageFormatProperties2* pProperties +) { + layer_vkGetPhysicalDeviceSparseImageFormatProperties2KHR(physicalDevice, pFormatInfo, pPropertyCount, pProperties); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetPhysicalDeviceSparseImageFormatProperties2KHR getLayerPtr_vkGetPhysicalDeviceSparseImageFormatProperties2KHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetPhysicalDeviceSparseImageFormatProperties2KHR) + { + return layer_vkGetPhysicalDeviceSparseImageFormatProperties2KHR; + } + + return layer_vkGetPhysicalDeviceSparseImageFormatProperties2KHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetPhysicalDeviceSurfaceCapabilities2EXT = requires( + VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilities2EXT* pSurfaceCapabilities +) { + layer_vkGetPhysicalDeviceSurfaceCapabilities2EXT(physicalDevice, surface, pSurfaceCapabilities); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT getLayerPtr_vkGetPhysicalDeviceSurfaceCapabilities2EXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetPhysicalDeviceSurfaceCapabilities2EXT) + { + return layer_vkGetPhysicalDeviceSurfaceCapabilities2EXT; + } + + return layer_vkGetPhysicalDeviceSurfaceCapabilities2EXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetPhysicalDeviceSurfaceCapabilities2KHR = requires( + VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, VkSurfaceCapabilities2KHR* pSurfaceCapabilities +) { + layer_vkGetPhysicalDeviceSurfaceCapabilities2KHR(physicalDevice, pSurfaceInfo, pSurfaceCapabilities); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetPhysicalDeviceSurfaceCapabilities2KHR getLayerPtr_vkGetPhysicalDeviceSurfaceCapabilities2KHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetPhysicalDeviceSurfaceCapabilities2KHR) + { + return layer_vkGetPhysicalDeviceSurfaceCapabilities2KHR; + } + + return layer_vkGetPhysicalDeviceSurfaceCapabilities2KHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetPhysicalDeviceSurfaceCapabilitiesKHR = requires( + VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, VkSurfaceCapabilitiesKHR* pSurfaceCapabilities +) { + layer_vkGetPhysicalDeviceSurfaceCapabilitiesKHR(physicalDevice, surface, pSurfaceCapabilities); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR getLayerPtr_vkGetPhysicalDeviceSurfaceCapabilitiesKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetPhysicalDeviceSurfaceCapabilitiesKHR) + { + return layer_vkGetPhysicalDeviceSurfaceCapabilitiesKHR; + } + + return layer_vkGetPhysicalDeviceSurfaceCapabilitiesKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetPhysicalDeviceSurfaceFormats2KHR = requires( + VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSurfaceInfo2KHR* pSurfaceInfo, uint32_t* pSurfaceFormatCount, VkSurfaceFormat2KHR* pSurfaceFormats +) { + layer_vkGetPhysicalDeviceSurfaceFormats2KHR(physicalDevice, pSurfaceInfo, pSurfaceFormatCount, pSurfaceFormats); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetPhysicalDeviceSurfaceFormats2KHR getLayerPtr_vkGetPhysicalDeviceSurfaceFormats2KHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetPhysicalDeviceSurfaceFormats2KHR) + { + return layer_vkGetPhysicalDeviceSurfaceFormats2KHR; + } + + return layer_vkGetPhysicalDeviceSurfaceFormats2KHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetPhysicalDeviceSurfaceFormatsKHR = requires( + VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pSurfaceFormatCount, VkSurfaceFormatKHR* pSurfaceFormats +) { + layer_vkGetPhysicalDeviceSurfaceFormatsKHR(physicalDevice, surface, pSurfaceFormatCount, pSurfaceFormats); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetPhysicalDeviceSurfaceFormatsKHR getLayerPtr_vkGetPhysicalDeviceSurfaceFormatsKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetPhysicalDeviceSurfaceFormatsKHR) + { + return layer_vkGetPhysicalDeviceSurfaceFormatsKHR; + } + + return layer_vkGetPhysicalDeviceSurfaceFormatsKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetPhysicalDeviceSurfacePresentModesKHR = requires( + VkPhysicalDevice physicalDevice, VkSurfaceKHR surface, uint32_t* pPresentModeCount, VkPresentModeKHR* pPresentModes +) { + layer_vkGetPhysicalDeviceSurfacePresentModesKHR(physicalDevice, surface, pPresentModeCount, pPresentModes); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetPhysicalDeviceSurfacePresentModesKHR getLayerPtr_vkGetPhysicalDeviceSurfacePresentModesKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetPhysicalDeviceSurfacePresentModesKHR) + { + return layer_vkGetPhysicalDeviceSurfacePresentModesKHR; + } + + return layer_vkGetPhysicalDeviceSurfacePresentModesKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetPhysicalDeviceSurfaceSupportKHR = requires( + VkPhysicalDevice physicalDevice, uint32_t queueFamilyIndex, VkSurfaceKHR surface, VkBool32* pSupported +) { + layer_vkGetPhysicalDeviceSurfaceSupportKHR(physicalDevice, queueFamilyIndex, surface, pSupported); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetPhysicalDeviceSurfaceSupportKHR getLayerPtr_vkGetPhysicalDeviceSurfaceSupportKHR() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetPhysicalDeviceSurfaceSupportKHR) + { + return layer_vkGetPhysicalDeviceSurfaceSupportKHR; + } + + return layer_vkGetPhysicalDeviceSurfaceSupportKHR; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetPhysicalDeviceToolProperties = requires( + VkPhysicalDevice physicalDevice, uint32_t* pToolCount, VkPhysicalDeviceToolProperties* pToolProperties +) { + layer_vkGetPhysicalDeviceToolProperties(physicalDevice, pToolCount, pToolProperties); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetPhysicalDeviceToolProperties getLayerPtr_vkGetPhysicalDeviceToolProperties() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetPhysicalDeviceToolProperties) + { + return layer_vkGetPhysicalDeviceToolProperties; + } + + return layer_vkGetPhysicalDeviceToolProperties; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkGetPhysicalDeviceToolPropertiesEXT = requires( + VkPhysicalDevice physicalDevice, uint32_t* pToolCount, VkPhysicalDeviceToolProperties* pToolProperties +) { + layer_vkGetPhysicalDeviceToolPropertiesEXT(physicalDevice, pToolCount, pToolProperties); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkGetPhysicalDeviceToolPropertiesEXT getLayerPtr_vkGetPhysicalDeviceToolPropertiesEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkGetPhysicalDeviceToolPropertiesEXT) + { + return layer_vkGetPhysicalDeviceToolPropertiesEXT; + } + + return layer_vkGetPhysicalDeviceToolPropertiesEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkReleaseDisplayEXT = requires( + VkPhysicalDevice physicalDevice, VkDisplayKHR display +) { + layer_vkReleaseDisplayEXT(physicalDevice, display); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkReleaseDisplayEXT getLayerPtr_vkReleaseDisplayEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkReleaseDisplayEXT) + { + return layer_vkReleaseDisplayEXT; + } + + return layer_vkReleaseDisplayEXT; + }.operator()(); +} + +/* Test for user_tag availability. */ +template +concept hasLayerPtr_vkSubmitDebugUtilsMessageEXT = requires( + VkInstance instance, VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity, VkDebugUtilsMessageTypeFlagsEXT messageTypes, const VkDebugUtilsMessengerCallbackDataEXT* pCallbackData +) { + layer_vkSubmitDebugUtilsMessageEXT(instance, messageSeverity, messageTypes, pCallbackData); +}; + +/* Function pointer resolution. */ +constexpr PFN_vkSubmitDebugUtilsMessageEXT getLayerPtr_vkSubmitDebugUtilsMessageEXT() +{ + return [] + { + if constexpr(hasLayerPtr_vkSubmitDebugUtilsMessageEXT) + { + return layer_vkSubmitDebugUtilsMessageEXT; + } + + return layer_vkSubmitDebugUtilsMessageEXT; + }.operator()(); +} + +// clang-format on diff --git a/source_common/framework/manual_functions.cpp b/source_common/framework/manual_functions.cpp index 9428612..fc602b6 100644 --- a/source_common/framework/manual_functions.cpp +++ b/source_common/framework/manual_functions.cpp @@ -29,6 +29,7 @@ * implemented as library code which can be swapped for alternative * implementations on a per-layer basis if needed. */ + #include #include "framework/manual_functions.hpp" @@ -111,7 +112,35 @@ VkLayerDeviceCreateInfo* getChainInfo(const VkDeviceCreateInfo* pCreateInfo) } /* See header for documentation. */ -std::pair getInstanceLayerFunction(const char* name) +bool isFunctionAlwaysExported(const char* name) +{ + const std::array alwaysExportedFunctions { + "vkGetInstanceProcAddr", + "vkGetDeviceProcAddr", + "vkEnumerateInstanceExtensionProperties", + "vkEnumerateDeviceExtensionProperties", + "vkEnumerateInstanceLayerProperties", + "vkEnumerateDeviceLayerProperties", + "vkCreateInstance", + "vkDestroyInstance", + "vkCreateDevice", + "vkDestroyDevice", + "vkGetDeviceImageMemoryRequirementsKHR", + }; + + for (const char* functionName : alwaysExportedFunctions) + { + if (!strcmp(functionName, name)) + { + return true; + } + } + + return false; +} + +/* See header for documentation. */ +std::tuple getInstanceLayerFunction(const char* name) { const std::array globalFunctions { // Supported since Vulkan 1.0 @@ -125,7 +154,7 @@ std::pair getInstanceLayerFunction(const char* name) }; bool isGlobal {false}; - for (const auto* globalName : globalFunctions) + for (const char* globalName : globalFunctions) { if (!strcmp(globalName, name)) { @@ -138,25 +167,25 @@ std::pair getInstanceLayerFunction(const char* name) { if (!strcmp(function.name, name)) { - return {isGlobal, function.function}; + return std::make_tuple(isGlobal, function.function, function.hasLayerSpecialization); } } - return {isGlobal, nullptr}; + return std::make_tuple(isGlobal, nullptr, false); } /* See header for documentation. */ -PFN_vkVoidFunction getDeviceLayerFunction(const char* name) +std::pair getDeviceLayerFunction(const char* name) { for (auto& function : deviceIntercepts) { if (!strcmp(function.name, name)) { - return function.function; + return {function.function, function.hasLayerSpecialization}; } } - return nullptr; + return {nullptr, false}; } /* See header for documentation. */ @@ -495,9 +524,10 @@ void enableDeviceVkExtImageCompressionControl(Instance& instance, } /** See Vulkan API for documentation. */ -PFN_vkVoidFunction layer_vkGetInstanceProcAddr_default(VkInstance instance, const char* pName) +template <> +PFN_vkVoidFunction layer_vkGetInstanceProcAddr(VkInstance instance, const char* pName) { - auto [isGlobal, layerFunction] = getInstanceLayerFunction(pName); + auto [isGlobal, layerFunction, hasSpecialization] = getInstanceLayerFunction(pName); // Global functions must be exposed and do not require the caller to pass // a valid instance pointer, although it is required to be nullptr in @@ -507,6 +537,20 @@ PFN_vkVoidFunction layer_vkGetInstanceProcAddr_default(VkInstance instance, cons return layerFunction; } + // Function is not exported because layer doesn't implement it at all + bool exportLayerFunction { layerFunction != nullptr }; + + // Function is not exported because layer doesn't specialize a user_tag version + if constexpr(CONFIG_OPTIMIZE_DISPATCH) + { + if (!isFunctionAlwaysExported(pName) && !hasSpecialization) + { + exportLayerFunction = false; + } + } + + LAYER_LOG("Export: %s = %u", pName, hasSpecialization); + // For other functions, only expose functions that the driver exposes to // avoid changing queryable interface behavior seen by the application if (instance) @@ -519,7 +563,7 @@ PFN_vkVoidFunction layer_vkGetInstanceProcAddr_default(VkInstance instance, cons PFN_vkVoidFunction driverFunction = layer->nlayerGetProcAddress(instance, pName); // If driver exposes it and the layer has one, use the layer function - if (driverFunction && layerFunction) + if (driverFunction && exportLayerFunction) { return layerFunction; } @@ -532,7 +576,8 @@ PFN_vkVoidFunction layer_vkGetInstanceProcAddr_default(VkInstance instance, cons } /** See Vulkan API for documentation. */ -PFN_vkVoidFunction layer_vkGetDeviceProcAddr_default(VkDevice device, const char* pName) +template <> +PFN_vkVoidFunction layer_vkGetDeviceProcAddr(VkDevice device, const char* pName) { // Hold the lock to access layer-wide global store std::unique_lock lock {g_vulkanLock}; @@ -542,10 +587,24 @@ PFN_vkVoidFunction layer_vkGetDeviceProcAddr_default(VkDevice device, const char // Only expose functions that the driver exposes to avoid changing // queryable interface behavior seen by the application auto driverFunction = layer->driver.vkGetDeviceProcAddr(device, pName); - auto layerFunction = getDeviceLayerFunction(pName); + auto [layerFunction, hasSpecialization] = getDeviceLayerFunction(pName); + + // Function is not exported because layer doesn't implement it at all + bool exportLayerFunction { layerFunction != nullptr }; + + // Function is not exported because layer doesn't specialize a user_tag version + if constexpr(CONFIG_OPTIMIZE_DISPATCH) + { + if (!isFunctionAlwaysExported(pName) && !hasSpecialization) + { + exportLayerFunction = false; + } + } + + LAYER_LOG("Export: %s = %u", pName, hasSpecialization); // If driver exposes it and the layer has one, use the layer function - if (driverFunction && layerFunction) + if (driverFunction && exportLayerFunction) { return layerFunction; } @@ -555,9 +614,10 @@ PFN_vkVoidFunction layer_vkGetDeviceProcAddr_default(VkDevice device, const char } /** See Vulkan API for documentation. */ -VkResult layer_vkEnumerateInstanceExtensionProperties_default(const char* pLayerName, - uint32_t* pPropertyCount, - VkExtensionProperties* pProperties) +template <> +VkResult layer_vkEnumerateInstanceExtensionProperties(const char* pLayerName, + uint32_t* pPropertyCount, + VkExtensionProperties* pProperties) { LAYER_TRACE(__func__); @@ -573,10 +633,11 @@ VkResult layer_vkEnumerateInstanceExtensionProperties_default(const char* pLayer } /** See Vulkan API for documentation. */ -VkResult layer_vkEnumerateDeviceExtensionProperties_default(VkPhysicalDevice gpu, - const char* pLayerName, - uint32_t* pPropertyCount, - VkExtensionProperties* pProperties) +template <> +VkResult layer_vkEnumerateDeviceExtensionProperties(VkPhysicalDevice gpu, + const char* pLayerName, + uint32_t* pPropertyCount, + VkExtensionProperties* pProperties) { LAYER_TRACE(__func__); @@ -608,7 +669,8 @@ VkResult layer_vkEnumerateDeviceExtensionProperties_default(VkPhysicalDevice gpu } /** See Vulkan API for documentation. */ -VkResult layer_vkEnumerateInstanceLayerProperties_default(uint32_t* pPropertyCount, VkLayerProperties* pProperties) +template <> +VkResult layer_vkEnumerateInstanceLayerProperties(uint32_t* pPropertyCount, VkLayerProperties* pProperties) { LAYER_TRACE(__func__); @@ -630,9 +692,10 @@ VkResult layer_vkEnumerateInstanceLayerProperties_default(uint32_t* pPropertyCou } /** See Vulkan API for documentation. */ -VkResult layer_vkEnumerateDeviceLayerProperties_default(VkPhysicalDevice gpu, - uint32_t* pPropertyCount, - VkLayerProperties* pProperties) +template <> +VkResult layer_vkEnumerateDeviceLayerProperties(VkPhysicalDevice gpu, + uint32_t* pPropertyCount, + VkLayerProperties* pProperties) { LAYER_TRACE(__func__); @@ -656,9 +719,10 @@ VkResult layer_vkEnumerateDeviceLayerProperties_default(VkPhysicalDevice gpu, } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateInstance_default(const VkInstanceCreateInfo* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkInstance* pInstance) +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateInstance(const VkInstanceCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkInstance* pInstance) { LAYER_TRACE(__func__); @@ -743,7 +807,9 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateInstance_default(const VkInstanceCr } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkDestroyInstance_default(VkInstance instance, const VkAllocationCallbacks* pAllocator) +template<> +VKAPI_ATTR void VKAPI_CALL layer_vkDestroyInstance(VkInstance instance, + const VkAllocationCallbacks* pAllocator) { LAYER_TRACE(__func__); @@ -759,10 +825,11 @@ VKAPI_ATTR void VKAPI_CALL layer_vkDestroyInstance_default(VkInstance instance, } /* See Vulkan API for documentation. */ -VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateDevice_default(VkPhysicalDevice physicalDevice, - const VkDeviceCreateInfo* pCreateInfo, - const VkAllocationCallbacks* pAllocator, - VkDevice* pDevice) +template <> +VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateDevice(VkPhysicalDevice physicalDevice, + const VkDeviceCreateInfo* pCreateInfo, + const VkAllocationCallbacks* pAllocator, + VkDevice* pDevice) { LAYER_TRACE(__func__); @@ -829,7 +896,8 @@ VKAPI_ATTR VkResult VKAPI_CALL layer_vkCreateDevice_default(VkPhysicalDevice phy } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL layer_vkDestroyDevice_default(VkDevice device, const VkAllocationCallbacks* pAllocator) +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkDestroyDevice(VkDevice device, const VkAllocationCallbacks* pAllocator) { LAYER_TRACE(__func__); @@ -845,10 +913,10 @@ VKAPI_ATTR void VKAPI_CALL layer_vkDestroyDevice_default(VkDevice device, const } /* See Vulkan API for documentation. */ -VKAPI_ATTR void VKAPI_CALL - layer_vkGetDeviceImageMemoryRequirementsKHR_default(VkDevice device, - const VkDeviceImageMemoryRequirements* pInfo, - VkMemoryRequirements2* pMemoryRequirements) +template <> +VKAPI_ATTR void VKAPI_CALL layer_vkGetDeviceImageMemoryRequirementsKHR(VkDevice device, + const VkDeviceImageMemoryRequirements* pInfo, + VkMemoryRequirements2* pMemoryRequirements) { LAYER_TRACE(__func__); diff --git a/source_common/framework/manual_functions.hpp b/source_common/framework/manual_functions.hpp index 6854c58..18849d8 100644 --- a/source_common/framework/manual_functions.hpp +++ b/source_common/framework/manual_functions.hpp @@ -44,6 +44,7 @@ #include #include #include +#include #include #include @@ -66,17 +67,18 @@ VkLayerInstanceCreateInfo* getChainInfo(const VkInstanceCreateInfo* pCreateInfo) VkLayerDeviceCreateInfo* getChainInfo(const VkDeviceCreateInfo* pCreateInfo); /** - * @brief Fetch the function for a given static instance entrypoint name. + * @brief Is this function always exported by the layer? * - * This function is used for finding the fixed entrypoints that must exist and - * that can return a valid function pointer without needing a created instance. + * When optimizing entrypoints the layer will not use the layer function + * if no user_tag variant exists, unless the function is marked as always + * exported. * * @param name The Vulkan function name. * - * @return The layer function pointer, or \c nullptr if the layer doesn't - * intercept the function. + * @return @c true if default implementation is always exported, @c false if + * only @c specializations are exported . */ -PFN_vkVoidFunction getFixedInstanceLayerFunction(const char* name); +bool isFunctionAlwaysExported(const char* name); /** * @brief Fetch the function for a given dynamic instance entrypoint name. @@ -86,11 +88,12 @@ PFN_vkVoidFunction getFixedInstanceLayerFunction(const char* name); * * @param name The Vulkan function name. * - * @return Boolean indicating if this is a globally accessible function, and - * the layer function pointer, or \c nullptr if the layer doesn't - * intercept the function. + * @return Boolean indicating if this is a globally accessible function, the + * active layer function pointer, and whether the layer function was a + * layer specialization. Returns @c nullptr for the function if the + * layer doesn't implement any handler for the function. */ -std::pair getInstanceLayerFunction(const char* name); +std::tuple getInstanceLayerFunction(const char* name); /** * @brief Fetch the function for a given dynamic instance entrypoint name. @@ -100,10 +103,12 @@ std::pair getInstanceLayerFunction(const char* name); * * @param name The Vulkan function name. * - * @return The layer function pointer, or \c nullptr if the layer doesn't - * intercept the function. + * @return The active the layer function pointer, and whether the layer + * function was a layer specialization. Returns @c nullptr for the + * function pointers if the layer doesn't implement any handler for the + * function. */ -PFN_vkVoidFunction getDeviceLayerFunction(const char* name); +std::pair getDeviceLayerFunction(const char* name); /** * @brief Fetch the maximum supported instance API version. diff --git a/source_common/framework/utils.hpp b/source_common/framework/utils.hpp index 1ed7a87..696c966 100644 --- a/source_common/framework/utils.hpp +++ b/source_common/framework/utils.hpp @@ -61,11 +61,22 @@ using APIVersion = std::pair; /** - * @brief Tag type used for template function dispatch; + * @brief Tag type used for template function dispatch. + * + * Layer-specified intercepts implemented for a specific layer must use + * user_tag type for their specializations to ensure that their version of + * the function is selected. */ -struct user_tag -{ -}; +struct user_tag {}; + +/** + * @brief Tag type used for template function dispatch. + * + * Layer-specified intercepts implemented for a specific layer must NOT use + * default_tag type for their specializations. This is a dummy tag used in the + * dispatch logic that will not match a layer-specific specialization. + */ +struct default_tag {}; /** * @brief Convert a dispatchable API handle to the underlying dispatch key. From 44735f45aaed5368dc3e5c6e730644e42b503879 Mon Sep 17 00:00:00 2001 From: Peter Harris Date: Mon, 18 Aug 2025 18:12:47 +0100 Subject: [PATCH 2/4] Remove debug logging --- source_common/framework/manual_functions.cpp | 4 ---- 1 file changed, 4 deletions(-) diff --git a/source_common/framework/manual_functions.cpp b/source_common/framework/manual_functions.cpp index fc602b6..cde9d3d 100644 --- a/source_common/framework/manual_functions.cpp +++ b/source_common/framework/manual_functions.cpp @@ -549,8 +549,6 @@ PFN_vkVoidFunction layer_vkGetInstanceProcAddr(VkInstance instance, } } - LAYER_LOG("Export: %s = %u", pName, hasSpecialization); - // For other functions, only expose functions that the driver exposes to // avoid changing queryable interface behavior seen by the application if (instance) @@ -601,8 +599,6 @@ PFN_vkVoidFunction layer_vkGetDeviceProcAddr(VkDevice device, const } } - LAYER_LOG("Export: %s = %u", pName, hasSpecialization); - // If driver exposes it and the layer has one, use the layer function if (driverFunction && exportLayerFunction) { From e6ac5df372d50236cd67a5d19f733e81676788f6 Mon Sep 17 00:00:00 2001 From: Peter Harris Date: Mon, 18 Aug 2025 21:26:32 +0100 Subject: [PATCH 3/4] Fix python code style warning --- .pylintrc | 2 +- generator/generate_vulkan_common.py | 26 ++++++++++++++++---------- lgl_android_install.py | 8 ++++---- 3 files changed, 21 insertions(+), 15 deletions(-) diff --git a/.pylintrc b/.pylintrc index 47a269a..cfb49d3 100644 --- a/.pylintrc +++ b/.pylintrc @@ -64,7 +64,7 @@ ignore-patterns=^\.# # manipulated during runtime and thus existing member attributes cannot be # deduced by static analysis). It supports qualified module names, as well as # Unix pattern matching. -ignored-modules=cairo,protos +ignored-modules=cairo,protos,lglpy.timeline.protos.* # Python code to execute, usually for sys.path manipulation such as # pygtk.require(). diff --git a/generator/generate_vulkan_common.py b/generator/generate_vulkan_common.py index 42473bf..89d6b49 100755 --- a/generator/generate_vulkan_common.py +++ b/generator/generate_vulkan_common.py @@ -585,16 +585,19 @@ def generate_instance_queries( # Define the concept to test if user_tag specialization exists plist = [] nlist = [] - for i, (ptype, pname, array) in enumerate(command.params): + for (ptype, pname, array) in command.params: plist.append(f'{ptype} {pname}{array}') nlist.append(pname) - plistStr = ', '.join(plist) - nlistStr = ', '.join(nlist) + plist_str = ', '.join(plist) + nlist_str = ', '.join(nlist) lines.append('/* Test for user_tag availability. */') decl = f'template \n' \ - f'concept hasLayerPtr_{command.name} = ' \ - f'requires(\n {plistStr}\n) {{\n layer_{command.name}({nlistStr});\n}};' + f'concept hasLayerPtr_{command.name} = requires(\n' \ + f' {plist_str}\n' \ + f') {{\n' \ + f' layer_{command.name}({nlist_str});\n' \ + f'}};' lines.append(decl) lines.append('') @@ -925,16 +928,19 @@ def generate_device_queries( # Define the concept to test if user_tag specialization exists plist = [] nlist = [] - for i, (ptype, pname, array) in enumerate(command.params): + for (ptype, pname, array) in command.params: plist.append(f'{ptype} {pname}{array}') nlist.append(pname) - plistStr = ', '.join(plist) - nlistStr = ', '.join(nlist) + plist_str = ', '.join(plist) + nlist_str = ', '.join(nlist) lines.append('/* Test for user_tag availability. */') decl = f'template \n' \ - f'concept hasLayerPtr_{command.name} = ' \ - f'requires(\n {plistStr}\n) {{\n layer_{command.name}({nlistStr});\n}};' + f'concept hasLayerPtr_{command.name} = requires(\n' \ + f' {plist_str}\n' \ + f') {{\n' \ + f' layer_{command.name}({nlist_str});\n' \ + f'}};' lines.append(decl) lines.append('') diff --git a/lgl_android_install.py b/lgl_android_install.py index 8e6cc9c..8d99c5f 100755 --- a/lgl_android_install.py +++ b/lgl_android_install.py @@ -612,12 +612,12 @@ def configure_server(conn: ADBConnect, instance = server.CommsServer(0) if timeline_file: - serviceTL = service_gpu_timeline.GPUTimelineService(timeline_file) - instance.register_endpoint(serviceTL) + service_tl = service_gpu_timeline.GPUTimelineService(timeline_file) + instance.register_endpoint(service_tl) if profile_dir: - serviceProf = service_gpu_profile.GPUProfileService(profile_dir) - instance.register_endpoint(serviceProf) + service_prof = service_gpu_profile.GPUProfileService(profile_dir) + instance.register_endpoint(service_prof) # Start it running thread = threading.Thread(target=instance.run, daemon=True) From dae4f411c8778d62b428c7629f9719ac8bc537a7 Mon Sep 17 00:00:00 2001 From: Peter Harris Date: Mon, 18 Aug 2025 21:39:56 +0100 Subject: [PATCH 4/4] Core review cleanups --- layer_example/source/layer_device_functions.cpp | 2 +- layer_gpu_profile/source/device.hpp | 3 +-- source_common/framework/entry.cpp | 2 +- source_common/framework/utils.hpp | 3 ++- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/layer_example/source/layer_device_functions.cpp b/layer_example/source/layer_device_functions.cpp index b2fbfe7..9073fb2 100644 --- a/layer_example/source/layer_device_functions.cpp +++ b/layer_example/source/layer_device_functions.cpp @@ -23,8 +23,8 @@ * ---------------------------------------------------------------------------- */ -#include "framework/device_dispatch_table.hpp" #include "device.hpp" +#include "framework/device_dispatch_table.hpp" #include #include diff --git a/layer_gpu_profile/source/device.hpp b/layer_gpu_profile/source/device.hpp index c900258..f746e51 100644 --- a/layer_gpu_profile/source/device.hpp +++ b/layer_gpu_profile/source/device.hpp @@ -62,10 +62,9 @@ #include #include -#include "framework/device_dispatch_table.hpp" - #include "layer_comms.hpp" #include "comms/comms_module.hpp" +#include "framework/device_dispatch_table.hpp" #include "instance.hpp" #include "trackers/device.hpp" diff --git a/source_common/framework/entry.cpp b/source_common/framework/entry.cpp index 86532d2..996658d 100644 --- a/source_common/framework/entry.cpp +++ b/source_common/framework/entry.cpp @@ -1,7 +1,7 @@ /* * SPDX-License-Identifier: MIT * ---------------------------------------------------------------------------- - * Copyright (c) 2024 Arm Limited + * Copyright (c) 2024-2025 Arm Limited * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to diff --git a/source_common/framework/utils.hpp b/source_common/framework/utils.hpp index 696c966..0ba36ad 100644 --- a/source_common/framework/utils.hpp +++ b/source_common/framework/utils.hpp @@ -74,7 +74,8 @@ struct user_tag {}; * * Layer-specified intercepts implemented for a specific layer must NOT use * default_tag type for their specializations. This is a dummy tag used in the - * dispatch logic that will not match a layer-specific specialization. + * dispatch logic to select the common-code implementation instead of a + * layer-specific specialization. */ struct default_tag {};