New flags to allow running in RelDebug.
This commit is contained in:
parent
7ba132ec0c
commit
8a865e2e49
|
|
@ -9,9 +9,10 @@ set(CMAKE_CXX_STANDARD_REQUIRED ON)
|
|||
set(CMAKE_CXX_EXTENSIONS OFF)
|
||||
|
||||
if (MSVC)
|
||||
set(CMAKE_CXX_FLAGS "/W4 /GR- /Zi")
|
||||
set(CMAKE_CXX_FLAGS "/W4 /GR- ${MSVC_FLAGS}")
|
||||
add_compile_definitions(_HAS_EXCEPTIONS=0)
|
||||
add_compile_definitions(_CRT_SECURE_NO_WARNINGS)
|
||||
add_compile_definitions(${MSVC_DEFINES})
|
||||
else ()
|
||||
set(CMAKE_CXX_FLAGS "-Wall -g -fno-rtti -fno-exceptions")
|
||||
endif ()
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
{
|
||||
"version": 2,
|
||||
"version": 6,
|
||||
"configurePresets": [
|
||||
{
|
||||
"name": "linux",
|
||||
|
|
@ -11,16 +11,65 @@
|
|||
"CMAKE_C_COMPILER": "/usr/bin/clang",
|
||||
"CMAKE_CXX_COMPILER": "/usr/bin/clang++",
|
||||
"CMAKE_TOOLCHAIN_FILE": "${sourceDir}/vcpkg/scripts/buildsystems/vcpkg.cmake"
|
||||
},
|
||||
"condition": {
|
||||
"type": "equals",
|
||||
"lhs": "${hostSystemName}",
|
||||
"rhs": "Linux"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "windows",
|
||||
"name": "windows-debug",
|
||||
"generator": "Ninja",
|
||||
"binaryDir": "${sourceDir}/build",
|
||||
"binaryDir": "${sourceDir}/build/debug/",
|
||||
"cacheVariables": {
|
||||
"CMAKE_BUILD_TYPE": "Debug",
|
||||
"CMAKE_EXPORT_COMPILE_COMMANDS": true,
|
||||
"CMAKE_MAKE_PROGRAM": "ninja",
|
||||
"CMAKE_TOOLCHAIN_FILE": "${sourceDir}/vcpkg/scripts/buildsystems/vcpkg.cmake"
|
||||
"CMAKE_TOOLCHAIN_FILE": "${sourceDir}/vcpkg/scripts/buildsystems/vcpkg.cmake",
|
||||
"DXC_SHADER_FLAGS": "-Zi",
|
||||
"GLSLC_SHADER_FLAGS": "-g"
|
||||
},
|
||||
"condition": {
|
||||
"type": "equals",
|
||||
"lhs": "${hostSystemName}",
|
||||
"rhs": "Windows"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "windows-reldebug",
|
||||
"generator": "Ninja",
|
||||
"binaryDir": "${sourceDir}/build/reldebug/",
|
||||
"cacheVariables": {
|
||||
"CMAKE_BUILD_TYPE": "RelWithDebInfo",
|
||||
"CMAKE_EXPORT_COMPILE_COMMANDS": true,
|
||||
"CMAKE_MAKE_PROGRAM": "ninja",
|
||||
"CMAKE_TOOLCHAIN_FILE": "${sourceDir}/vcpkg/scripts/buildsystems/vcpkg.cmake",
|
||||
"DXC_SHADER_FLAGS": "-Zi",
|
||||
"GLSLC_SHADER_FLAGS": "-g",
|
||||
"MSVC_DEFINES": "ASTER_NO_BREAK"
|
||||
},
|
||||
"condition": {
|
||||
"type": "equals",
|
||||
"lhs": "${hostSystemName}",
|
||||
"rhs": "Windows"
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "windows-release",
|
||||
"generator": "Ninja",
|
||||
"binaryDir": "${sourceDir}/build/release/",
|
||||
"cacheVariables": {
|
||||
"CMAKE_BUILD_TYPE": "Release",
|
||||
"CMAKE_EXPORT_COMPILE_COMMANDS": true,
|
||||
"CMAKE_MAKE_PROGRAM": "ninja",
|
||||
"CMAKE_TOOLCHAIN_FILE": "${sourceDir}/vcpkg/scripts/buildsystems/vcpkg.cmake",
|
||||
"MSVC_DEFINES": "ASTER_NDEBUG"
|
||||
},
|
||||
"condition": {
|
||||
"type": "equals",
|
||||
"lhs": "${hostSystemName}",
|
||||
"rhs": "Windows"
|
||||
}
|
||||
}
|
||||
]
|
||||
|
|
|
|||
|
|
@ -16,7 +16,7 @@ function(add_shader TARGET SHADER)
|
|||
message("Marked as hlsl file. ${current-output-path}")
|
||||
add_custom_command(
|
||||
OUTPUT ${current-output-path}
|
||||
COMMAND Vulkan::dxc_exe -spirv -T "${shader-type}_6_0" -E main ${current-shader-path} -Fo ${current-output-path}
|
||||
COMMAND Vulkan::dxc_exe ${DXC_SHADER_FLAGS} -spirv -T "${shader-type}_6_0" -E main ${current-shader-path} -Fo ${current-output-path}
|
||||
DEPENDS ${current-shader-path}
|
||||
IMPLICIT_DEPENDS CXX ${current-shader-path}
|
||||
VERBATIM)
|
||||
|
|
@ -24,7 +24,7 @@ function(add_shader TARGET SHADER)
|
|||
message("Marked as glsl file. ${current-output-path}")
|
||||
add_custom_command(
|
||||
OUTPUT ${current-output-path}
|
||||
COMMAND Vulkan::glslc -o ${current-output-path} ${current-shader-path}
|
||||
COMMAND Vulkan::glslc ${GLSLC_SHADER_FLAGS} -o ${current-output-path} ${current-shader-path}
|
||||
DEPENDS ${current-shader-path}
|
||||
IMPLICIT_DEPENDS CXX ${current-shader-path}
|
||||
VERBATIM)
|
||||
|
|
|
|||
|
|
@ -20,7 +20,7 @@
|
|||
|
||||
#define EASTL_NO_EXCEPTIONS 1
|
||||
|
||||
#if defined(NDEBUG)
|
||||
#if defined(ASTER_NDEBUG)
|
||||
#define USE_OPTICK (0)
|
||||
#else
|
||||
#define USE_OPTICK (1)
|
||||
|
|
|
|||
|
|
@ -21,12 +21,18 @@ struct Context final
|
|||
vk::DebugUtilsMessengerEXT m_DebugMessenger = nullptr;
|
||||
|
||||
// Ctor/Dtor
|
||||
Context(cstr appName, Version version, bool enableValidation = true);
|
||||
Context(cstr appName, Version version, bool enableValidation = ENABLE_VALIDATION_DEFAULT_VALUE);
|
||||
~Context();
|
||||
|
||||
// Move
|
||||
Context(Context &&other) noexcept;
|
||||
Context &operator=(Context &&other) noexcept;
|
||||
|
||||
#if !defined(ASTER_NDEBUG)
|
||||
constexpr static bool ENABLE_VALIDATION_DEFAULT_VALUE = true;
|
||||
#else
|
||||
constexpr static bool ENABLE_VALIDATION_DEFAULT_VALUE = false;
|
||||
#endif
|
||||
|
||||
DISALLOW_COPY_AND_ASSIGN(Context);
|
||||
};
|
||||
|
|
|
|||
|
|
@ -28,6 +28,7 @@ Device::Device(const Context *context, PhysicalDevice *physicalDevice, Features
|
|||
NameString &&name)
|
||||
: m_Name(std::move(name))
|
||||
, m_PhysicalDevice(physicalDevice->m_PhysicalDevice)
|
||||
, m_ValidationEnabled(context->m_DebugMessenger != nullptr)
|
||||
{
|
||||
// Shouldn't have more than 4 deviceQueueFamilies in use anyway. Else we can heap
|
||||
eastl::fixed_vector<vk::DeviceQueueCreateInfo, 4> deviceQueueCreateInfos;
|
||||
|
|
|
|||
|
|
@ -29,6 +29,7 @@ struct Device final
|
|||
vk::Device m_Device = nullptr;
|
||||
VmaAllocator m_Allocator = nullptr;
|
||||
vk::PipelineCache m_PipelineCache = nullptr;
|
||||
bool m_ValidationEnabled = true;
|
||||
|
||||
template <typename T>
|
||||
requires vk::isVulkanHandleType<T>::value void SetName(const T &object, cstr name) const;
|
||||
|
|
@ -54,8 +55,9 @@ template <typename T>
|
|||
requires vk::isVulkanHandleType<T>::value void
|
||||
Device::SetName(const T &object, cstr name) const
|
||||
{
|
||||
if (!name)
|
||||
if (!m_ValidationEnabled || !name || !object)
|
||||
return;
|
||||
|
||||
auto handle = Recast<u64>(Cast<typename T::NativeType>(object));
|
||||
const vk::DebugUtilsObjectNameInfoEXT objectNameInfo = {
|
||||
.objectType = object.objectType,
|
||||
|
|
|
|||
|
|
@ -23,7 +23,9 @@
|
|||
#undef min
|
||||
#endif
|
||||
|
||||
#if !defined(NDEBUG)
|
||||
#define VULKAN_HPP_ASSERT(expr) DEBUG_IF(!(expr), "Vulkan assert failed")
|
||||
#endif
|
||||
#include <EASTL/fixed_string.h>
|
||||
#include <EASTL/string.h>
|
||||
#include <vk_mem_alloc.h>
|
||||
|
|
|
|||
|
|
@ -15,6 +15,5 @@ void
|
|||
AssertionFailure(const char *af)
|
||||
{
|
||||
ERROR("{}", af);
|
||||
debug_break();
|
||||
}
|
||||
} // namespace eastl
|
||||
|
|
|
|||
|
|
@ -69,12 +69,12 @@ struct Logger
|
|||
fmt::println("{}{} {} {} at {}:{}{}", ToColorCstr<TLogLevel>(), ToCstr<TLogLevel>(), message.data(),
|
||||
ansi_color::Black, loc, line, ansi_color::Reset);
|
||||
}
|
||||
#if !defined(NDEBUG)
|
||||
#if !defined(ASTER_NDEBUG) && !defined(ASTER_NO_BREAK)
|
||||
if constexpr (TLogLevel == LogType::eError)
|
||||
{
|
||||
debug_break();
|
||||
}
|
||||
#endif // !defined(NDEBUG)
|
||||
#endif
|
||||
}
|
||||
|
||||
template <LogType TLogLevel>
|
||||
|
|
@ -86,12 +86,12 @@ struct Logger
|
|||
fmt::println("{}{} ({}) {} {} at {}:{}{}", ToColorCstr<TLogLevel>(), ToCstr<TLogLevel>(), exprStr,
|
||||
message.data(), ansi_color::Black, loc, line, ansi_color::Reset);
|
||||
}
|
||||
#if !defined(NDEBUG)
|
||||
#if !defined(ASTER_NDEBUG) && !defined(ASTER_NO_BREAK)
|
||||
if constexpr (TLogLevel == LogType::eError)
|
||||
{
|
||||
debug_break();
|
||||
}
|
||||
#endif // !defined(NDEBUG)
|
||||
#endif
|
||||
}
|
||||
};
|
||||
|
||||
|
|
@ -135,7 +135,7 @@ extern Logger g_Logger;
|
|||
; \
|
||||
else g_Logger.Log<Logger::LogType::eInfo>(fmt::format(__VA_ARGS__), __FILE__, __LINE__)
|
||||
|
||||
#if !defined(DEBUG_LOG_DISABLED) && !defined(NDEBUG)
|
||||
#if !defined(DEBUG_LOG_DISABLED) && !defined(ASTER_NDEBUG)
|
||||
|
||||
#define DEBUG(...) g_Logger.Log<Logger::LogType::eDebug>(fmt::format(__VA_ARGS__), __FILE__, __LINE__)
|
||||
#define DEBUG_IF(expr, ...) \
|
||||
|
|
@ -151,24 +151,26 @@ extern Logger g_Logger;
|
|||
|
||||
#else // !defined(DEBUG_LOG_DISABLED)
|
||||
|
||||
#define DEBUG(msg) \
|
||||
#define DEBUG(...) \
|
||||
{ \
|
||||
}
|
||||
#define DEBUG_IF(expr, msg) \
|
||||
if (expr) \
|
||||
(void)msg
|
||||
#define ELSE_IF_DEBUG(expr, msg) \
|
||||
#define DEBUG_IF(expr, ...) \
|
||||
if (false) \
|
||||
{ \
|
||||
}
|
||||
#define ELSE_IF_DEBUG(expr, ...) \
|
||||
; \
|
||||
if (expr) \
|
||||
(void)msg
|
||||
#define ELSE_DEBUG(msg) \
|
||||
if (false) \
|
||||
{ \
|
||||
}
|
||||
#define ELSE_DEBUG(...) \
|
||||
; \
|
||||
{ \
|
||||
}
|
||||
|
||||
#endif // !defined(DEBUG_LOG_DISABLED)
|
||||
|
||||
#if !defined(VERBOSE_LOG_DISABLED) && !defined(NDEBUG)
|
||||
#if !defined(VERBOSE_LOG_DISABLED) && !defined(ASTER_NDEBUG)
|
||||
|
||||
#define VERBOSE(...) g_Logger.Log<Logger::LogType::eVerbose>(fmt::format(__VA_ARGS__), __FILE__, __LINE__)
|
||||
#define VERBOSE_IF(expr, ...) \
|
||||
|
|
@ -184,17 +186,20 @@ extern Logger g_Logger;
|
|||
|
||||
#else // !defined(DEBUG_LOG_DISABLED)
|
||||
|
||||
#define VERBOSE(msg) \
|
||||
#define VERBOSE(...) \
|
||||
{ \
|
||||
}
|
||||
#define VERBOSE_IF(expr, msg) \
|
||||
if (expr) \
|
||||
(void)msg
|
||||
#define ELSE_IF_VERBOSE(expr, msg) \
|
||||
#define VERBOSE_IF(expr, ...) \
|
||||
if (false) \
|
||||
{ \
|
||||
} \
|
||||
}
|
||||
#define ELSE_IF_VERBOSE(expr, ...) \
|
||||
; \
|
||||
if (expr) \
|
||||
(void)msg
|
||||
#define ELSE_VERBOSE(msg) \
|
||||
if (false) \
|
||||
{ \
|
||||
}
|
||||
#define ELSE_VERBOSE(...) \
|
||||
; \
|
||||
{ \
|
||||
}
|
||||
|
|
|
|||
|
|
@ -230,7 +230,7 @@ GpuResourceManager::Commit(StorageBuffer *storageBuffer)
|
|||
|
||||
m_WriteOwner.emplace_back(HandleType::eBuffer, handle.m_Index);
|
||||
|
||||
#if !defined(NDEBUG)
|
||||
#if !defined(ASTER_NDEBUG)
|
||||
++m_CommitedBufferCount;
|
||||
#endif
|
||||
|
||||
|
|
@ -276,7 +276,7 @@ GpuResourceManager::Release(BufferHandle handle)
|
|||
|
||||
m_BufferManager.Release(m_Device, handle);
|
||||
|
||||
#if !defined(NDEBUG)
|
||||
#if !defined(ASTER_NDEBUG)
|
||||
--m_CommitedBufferCount;
|
||||
#endif
|
||||
}
|
||||
|
|
@ -304,7 +304,7 @@ GpuResourceManager::Release(TextureHandle handle)
|
|||
|
||||
m_TextureManager.Release(m_Device, handle);
|
||||
|
||||
#if !defined(NDEBUG)
|
||||
#if !defined(ASTER_NDEBUG)
|
||||
--m_CommitedTextureCount;
|
||||
#endif
|
||||
}
|
||||
|
|
@ -344,7 +344,7 @@ GpuResourceManager::CommitTexture(Texture *texture)
|
|||
|
||||
m_WriteOwner.emplace_back(HandleType::eTexture, handle.m_Index);
|
||||
|
||||
#if !defined(NDEBUG)
|
||||
#if !defined(ASTER_NDEBUG)
|
||||
++m_CommitedTextureCount;
|
||||
#endif
|
||||
|
||||
|
|
@ -373,7 +373,7 @@ GpuResourceManager::CommitStorageTexture(StorageTexture *storageTexture)
|
|||
|
||||
m_WriteOwner.emplace_back(HandleType::eStorageTexture, handle.m_Index);
|
||||
|
||||
#if !defined(NDEBUG)
|
||||
#if !defined(ASTER_NDEBUG)
|
||||
++m_CommitedStorageTextureCount;
|
||||
#endif
|
||||
|
||||
|
|
@ -390,7 +390,7 @@ GpuResourceManager::Release(StorageTextureHandle handle)
|
|||
|
||||
m_StorageTextureManager.Release(m_Device, handle);
|
||||
|
||||
#if !defined(NDEBUG)
|
||||
#if !defined(ASTER_NDEBUG)
|
||||
--m_CommitedStorageTextureCount;
|
||||
#endif
|
||||
}
|
||||
|
|
@ -543,7 +543,7 @@ GpuResourceManager::GpuResourceManager(Device *device, u16 maxSize)
|
|||
|
||||
GpuResourceManager::~GpuResourceManager()
|
||||
{
|
||||
#if !defined(NDEBUG)
|
||||
#if !defined(ASTER_NDEBUG)
|
||||
WARN_IF(m_CommitedBufferCount > 0 || m_CommitedTextureCount > 0 || m_CommitedStorageTextureCount > 0,
|
||||
"Resources alive: SSBO = {}, Textures = {}, RWTexture = {}", m_CommitedBufferCount, m_CommitedTextureCount,
|
||||
m_CommitedStorageTextureCount);
|
||||
|
|
@ -569,8 +569,11 @@ GpuResourceManager::GpuResourceManager(GpuResourceManager &&other) noexcept
|
|||
, m_DescriptorPool(other.m_DescriptorPool)
|
||||
, m_SetLayout(other.m_SetLayout)
|
||||
, m_DescriptorSet(other.m_DescriptorSet)
|
||||
#if !defined(ASTER_NDEBUG)
|
||||
, m_CommitedBufferCount(other.m_CommitedBufferCount)
|
||||
, m_CommitedTextureCount(other.m_CommitedTextureCount)
|
||||
, m_CommitedStorageTextureCount(other.m_CommitedStorageTextureCount)
|
||||
#endif
|
||||
{
|
||||
assert(!other.m_Device);
|
||||
}
|
||||
|
|
@ -591,8 +594,11 @@ GpuResourceManager::operator=(GpuResourceManager &&other) noexcept
|
|||
m_DescriptorPool = other.m_DescriptorPool;
|
||||
m_SetLayout = other.m_SetLayout;
|
||||
m_DescriptorSet = other.m_DescriptorSet;
|
||||
#if !defined(ASTER_NDEBUG)
|
||||
m_CommitedBufferCount = other.m_CommitedBufferCount;
|
||||
m_CommitedTextureCount = other.m_CommitedTextureCount;
|
||||
m_CommitedStorageTextureCount = other.m_CommitedStorageTextureCount;
|
||||
#endif
|
||||
|
||||
assert(!other.m_Device);
|
||||
return *this;
|
||||
|
|
|
|||
|
|
@ -143,7 +143,7 @@ struct GpuResourceManager
|
|||
GpuResourceManager(GpuResourceManager &&other) noexcept;
|
||||
GpuResourceManager &operator=(GpuResourceManager &&other) noexcept;
|
||||
|
||||
#if !defined(NDEBUG)
|
||||
#if !defined(ASTER_NDEBUG)
|
||||
usize m_CommitedBufferCount = 0;
|
||||
usize m_CommitedTextureCount = 0;
|
||||
usize m_CommitedStorageTextureCount = 0;
|
||||
|
|
|
|||
|
|
@ -165,7 +165,7 @@ Draw(const vk::CommandBuffer commandBuffer, const vk::Extent2D extent, const vk:
|
|||
{
|
||||
// OPTICK_EVENT();
|
||||
|
||||
#if !defined(NDEBUG)
|
||||
#if !defined(ASTER_NDEBUG)
|
||||
constexpr vk::DebugUtilsLabelEXT label = {
|
||||
.pLabelName = "UI pass",
|
||||
.color = std::array{0.9f, 0.9f, 1.0f, 1.0f},
|
||||
|
|
@ -195,7 +195,7 @@ Draw(const vk::CommandBuffer commandBuffer, const vk::Extent2D extent, const vk:
|
|||
|
||||
commandBuffer.endRendering();
|
||||
|
||||
#if !defined(NDEBUG)
|
||||
#if !defined(ASTER_NDEBUG)
|
||||
commandBuffer.endDebugUtilsLabelEXT();
|
||||
#endif
|
||||
}
|
||||
|
|
|
|||
|
|
@ -17,8 +17,8 @@
|
|||
#include "gpu_resource_manager.h"
|
||||
#include "helpers.h"
|
||||
#include "image.h"
|
||||
#include "EASTL/fixed_vector.h"
|
||||
|
||||
#include <EASTL/array.h>
|
||||
#include <EASTL/hash_map.h>
|
||||
#include <glm/gtc/type_ptr.hpp>
|
||||
|
||||
|
|
@ -151,7 +151,7 @@ AssetLoader::LoadHdrImage(Texture *texture, cstr path, cstr name) const
|
|||
|
||||
AbortIfFailed(m_CommandBuffer.begin(&OneTimeCmdBeginInfo));
|
||||
|
||||
#if !defined(NDEBUG)
|
||||
#if !defined(ASTER_NDEBUG)
|
||||
StackString<128> loadActionName = "Load: ";
|
||||
loadActionName += name ? name : path;
|
||||
vk::DebugUtilsLabelEXT debugLabel = {
|
||||
|
|
@ -165,7 +165,7 @@ AssetLoader::LoadHdrImage(Texture *texture, cstr path, cstr name) const
|
|||
m_CommandBuffer.copyBufferToImage2(&stagingInfo);
|
||||
m_CommandBuffer.pipelineBarrier2(&postStagingDependency);
|
||||
|
||||
#if !defined(NDEBUG)
|
||||
#if !defined(ASTER_NDEBUG)
|
||||
m_CommandBuffer.endDebugUtilsLabelEXT();
|
||||
#endif
|
||||
|
||||
|
|
@ -190,6 +190,164 @@ AssetLoader::LoadHdrImage(Texture *texture, cstr path, cstr name) const
|
|||
stagingBuffer.Destroy(pDevice);
|
||||
}
|
||||
|
||||
void
|
||||
GenerateMipMaps(vk::CommandBuffer commandBuffer, Texture *texture, vk::ImageLayout initialLayout, vk::ImageLayout finalLayout)
|
||||
{
|
||||
vk::ImageMemoryBarrier2 imageStartBarrier = {
|
||||
.srcStageMask = vk::PipelineStageFlagBits2::eAllCommands,
|
||||
.srcAccessMask = vk::AccessFlagBits2::eNone,
|
||||
.dstStageMask = vk::PipelineStageFlagBits2::eTransfer,
|
||||
.dstAccessMask = vk::AccessFlagBits2::eTransferRead,
|
||||
.oldLayout = initialLayout,
|
||||
.newLayout = vk::ImageLayout::eTransferSrcOptimal,
|
||||
.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
|
||||
.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
|
||||
.image = texture->m_Image,
|
||||
.subresourceRange =
|
||||
{
|
||||
.aspectMask = vk::ImageAspectFlagBits::eColor,
|
||||
.baseMipLevel = 0,
|
||||
.levelCount = 1,
|
||||
.baseArrayLayer = 0,
|
||||
.layerCount = 1,
|
||||
},
|
||||
};
|
||||
vk::ImageMemoryBarrier2 mipsStartBarrier = imageStartBarrier;
|
||||
mipsStartBarrier.dstAccessMask = vk::AccessFlagBits2::eTransferWrite;
|
||||
mipsStartBarrier.oldLayout = vk::ImageLayout::eUndefined;
|
||||
mipsStartBarrier.newLayout = vk::ImageLayout::eTransferDstOptimal;
|
||||
mipsStartBarrier.subresourceRange = {
|
||||
.aspectMask = vk::ImageAspectFlagBits::eColor,
|
||||
.baseMipLevel = 1,
|
||||
.levelCount = texture->GetMipLevels() - 1,
|
||||
.baseArrayLayer = 0,
|
||||
.layerCount = 1,
|
||||
};
|
||||
eastl::fixed_vector<vk::ImageMemoryBarrier2, 2> startBarriers = {
|
||||
mipsStartBarrier,
|
||||
};
|
||||
if (initialLayout != imageStartBarrier.newLayout)
|
||||
{
|
||||
startBarriers.push_back(imageStartBarrier);
|
||||
}
|
||||
|
||||
vk::DependencyInfo imageStartDependency = {
|
||||
.imageMemoryBarrierCount = Cast<u32>(startBarriers.size()),
|
||||
.pImageMemoryBarriers = startBarriers.data(),
|
||||
};
|
||||
|
||||
vk::ImageMemoryBarrier2 nextMipBarrier = {
|
||||
.srcStageMask = vk::PipelineStageFlagBits2::eTransfer,
|
||||
.srcAccessMask = vk::AccessFlagBits2::eTransferWrite,
|
||||
.dstStageMask = vk::PipelineStageFlagBits2::eTransfer,
|
||||
.dstAccessMask = vk::AccessFlagBits2::eTransferRead,
|
||||
.oldLayout = vk::ImageLayout::eTransferDstOptimal,
|
||||
.newLayout = vk::ImageLayout::eTransferSrcOptimal,
|
||||
.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
|
||||
.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
|
||||
.image = texture->m_Image,
|
||||
.subresourceRange =
|
||||
{
|
||||
.aspectMask = vk::ImageAspectFlagBits::eColor,
|
||||
.baseMipLevel = 0,
|
||||
.levelCount = 1,
|
||||
.baseArrayLayer = 0,
|
||||
.layerCount = 1,
|
||||
},
|
||||
};
|
||||
vk::DependencyInfo interMipDependency = {
|
||||
.imageMemoryBarrierCount = 1,
|
||||
.pImageMemoryBarriers = &nextMipBarrier,
|
||||
};
|
||||
|
||||
vk::ImageMemoryBarrier2 imageReadyBarrier = {
|
||||
.srcStageMask = vk::PipelineStageFlagBits2::eTransfer,
|
||||
.srcAccessMask = vk::AccessFlagBits2::eTransferWrite,
|
||||
.dstStageMask = vk::PipelineStageFlagBits2::eAllCommands,
|
||||
.dstAccessMask = vk::AccessFlagBits2::eShaderRead,
|
||||
.oldLayout = vk::ImageLayout::eTransferSrcOptimal,
|
||||
.newLayout = finalLayout,
|
||||
.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
|
||||
.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
|
||||
.image = texture->m_Image,
|
||||
.subresourceRange =
|
||||
{
|
||||
.aspectMask = vk::ImageAspectFlagBits::eColor,
|
||||
.baseMipLevel = 0,
|
||||
.levelCount = texture->GetMipLevels(),
|
||||
.baseArrayLayer = 0,
|
||||
.layerCount = 1,
|
||||
},
|
||||
};
|
||||
vk::DependencyInfo imageReadyDependency = {
|
||||
.imageMemoryBarrierCount = 1,
|
||||
.pImageMemoryBarriers = &imageReadyBarrier,
|
||||
};
|
||||
|
||||
vk::ImageBlit2 blitRegion = {
|
||||
.srcSubresource =
|
||||
{
|
||||
.aspectMask = vk::ImageAspectFlagBits::eColor,
|
||||
.baseArrayLayer = 0,
|
||||
.layerCount = 1,
|
||||
},
|
||||
.dstSubresource =
|
||||
{
|
||||
.aspectMask = vk::ImageAspectFlagBits::eColor,
|
||||
.baseArrayLayer = 0,
|
||||
.layerCount = 1,
|
||||
},
|
||||
};
|
||||
|
||||
vk::BlitImageInfo2 mipBlitInfo = {
|
||||
.srcImage = texture->m_Image,
|
||||
.srcImageLayout = vk::ImageLayout::eTransferSrcOptimal,
|
||||
.dstImage = texture->m_Image,
|
||||
.dstImageLayout = vk::ImageLayout::eTransferDstOptimal,
|
||||
.regionCount = 1,
|
||||
.pRegions = &blitRegion,
|
||||
.filter = vk::Filter::eLinear,
|
||||
};
|
||||
|
||||
auto calcNextMip = [](i32 prev) { return eastl::max(prev / 2, 1); };
|
||||
|
||||
// Mip Mapping
|
||||
|
||||
commandBuffer.pipelineBarrier2(&imageStartDependency);
|
||||
|
||||
i32 prevMipWidth = Cast<i32>(texture->m_Extent.width);
|
||||
i32 prevMipHeight = Cast<i32>(texture->m_Extent.height);
|
||||
|
||||
u32 maxPrevMip = texture->GetMipLevels() - 1;
|
||||
for (u32 prevMipLevel = 0; prevMipLevel < maxPrevMip; ++prevMipLevel)
|
||||
{
|
||||
i32 currentMipWidth = calcNextMip(prevMipWidth);
|
||||
i32 currentMipHeight = calcNextMip(prevMipHeight);
|
||||
u32 currentMipLevel = prevMipLevel + 1;
|
||||
|
||||
blitRegion.srcSubresource.mipLevel = prevMipLevel;
|
||||
blitRegion.srcOffsets = std::array{
|
||||
vk::Offset3D{0, 0, 0},
|
||||
vk::Offset3D{prevMipWidth, prevMipHeight, 1},
|
||||
};
|
||||
blitRegion.dstSubresource.mipLevel = currentMipLevel;
|
||||
blitRegion.dstOffsets = std::array{
|
||||
vk::Offset3D{0, 0, 0},
|
||||
vk::Offset3D{currentMipWidth, currentMipHeight, 1},
|
||||
};
|
||||
|
||||
nextMipBarrier.subresourceRange.baseMipLevel = currentMipLevel;
|
||||
|
||||
commandBuffer.blitImage2(&mipBlitInfo);
|
||||
commandBuffer.pipelineBarrier2(&interMipDependency);
|
||||
|
||||
prevMipHeight = currentMipHeight;
|
||||
prevMipWidth = currentMipWidth;
|
||||
}
|
||||
|
||||
commandBuffer.pipelineBarrier2(&imageReadyDependency);
|
||||
}
|
||||
|
||||
TextureHandle
|
||||
AssetLoader::LoadImageToGpu(StagingBuffer *stagingBuffer, tinygltf::Image *image, bool isSrgb) const
|
||||
{
|
||||
|
|
@ -209,7 +367,7 @@ AssetLoader::LoadImageToGpu(StagingBuffer *stagingBuffer, tinygltf::Image *image
|
|||
stagingBuffer->Init(m_ResourceManager->m_Device, byteSize);
|
||||
stagingBuffer->Write(m_ResourceManager->m_Device, 0, byteSize, image->image.data());
|
||||
|
||||
#if !defined(NDEBUG)
|
||||
#if !defined(ASTER_NDEBUG)
|
||||
StackString<128> loadActionName = "Load: ";
|
||||
loadActionName += image->name.empty() ? "<texture>" : image->name.c_str();
|
||||
vk::DebugUtilsLabelEXT debugLabel = {
|
||||
|
|
@ -235,7 +393,7 @@ AssetLoader::LoadImageToGpu(StagingBuffer *stagingBuffer, tinygltf::Image *image
|
|||
{
|
||||
.aspectMask = vk::ImageAspectFlagBits::eColor,
|
||||
.baseMipLevel = 0,
|
||||
.levelCount = texture.GetMipLevels(),
|
||||
.levelCount = 1,
|
||||
.baseArrayLayer = 0,
|
||||
.layerCount = 1,
|
||||
},
|
||||
|
|
@ -247,15 +405,13 @@ AssetLoader::LoadImageToGpu(StagingBuffer *stagingBuffer, tinygltf::Image *image
|
|||
.pImageMemoryBarriers = &imageStartBarrier,
|
||||
};
|
||||
|
||||
vk::ImageMemoryBarrier2 nextMipBarrier = {
|
||||
.srcStageMask = vk::PipelineStageFlagBits2::eTransfer,
|
||||
.srcAccessMask = vk::AccessFlagBits2::eTransferWrite,
|
||||
.dstStageMask = vk::PipelineStageFlagBits2::eTransfer,
|
||||
.dstAccessMask = vk::AccessFlagBits2::eTransferRead,
|
||||
vk::ImageMemoryBarrier2 postStagingBarrier = {
|
||||
.srcStageMask = vk::PipelineStageFlagBits2::eAllTransfer,
|
||||
.dstStageMask = vk::PipelineStageFlagBits2::eAllTransfer,
|
||||
.oldLayout = vk::ImageLayout::eTransferDstOptimal,
|
||||
.newLayout = vk::ImageLayout::eTransferSrcOptimal,
|
||||
.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
|
||||
.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
|
||||
.srcQueueFamilyIndex = m_TransferQueueIndex,
|
||||
.dstQueueFamilyIndex = m_GraphicsQueueIndex,
|
||||
.image = texture.m_Image,
|
||||
.subresourceRange =
|
||||
{
|
||||
|
|
@ -266,33 +422,11 @@ AssetLoader::LoadImageToGpu(StagingBuffer *stagingBuffer, tinygltf::Image *image
|
|||
.layerCount = 1,
|
||||
},
|
||||
};
|
||||
vk::DependencyInfo interMipDependency = {
|
||||
.imageMemoryBarrierCount = 1,
|
||||
.pImageMemoryBarriers = &nextMipBarrier,
|
||||
};
|
||||
;
|
||||
|
||||
vk::ImageMemoryBarrier2 imageReadyBarrier = {
|
||||
.srcStageMask = vk::PipelineStageFlagBits2::eTransfer,
|
||||
.srcAccessMask = vk::AccessFlagBits2::eTransferRead,
|
||||
.dstStageMask = vk::PipelineStageFlagBits2::eFragmentShader,
|
||||
.dstAccessMask = vk::AccessFlagBits2::eShaderRead,
|
||||
.oldLayout = vk::ImageLayout::eTransferSrcOptimal,
|
||||
.newLayout = vk::ImageLayout::eShaderReadOnlyOptimal,
|
||||
.srcQueueFamilyIndex = m_TransferQueueIndex,
|
||||
.dstQueueFamilyIndex = m_GraphicsQueueIndex,
|
||||
.image = texture.m_Image,
|
||||
.subresourceRange =
|
||||
{
|
||||
.aspectMask = vk::ImageAspectFlagBits::eColor,
|
||||
.baseMipLevel = 0,
|
||||
.levelCount = texture.GetMipLevels(),
|
||||
.baseArrayLayer = 0,
|
||||
.layerCount = 1,
|
||||
},
|
||||
};
|
||||
vk::DependencyInfo imageReadyDependency = {
|
||||
vk::DependencyInfo postStagingDependency = {
|
||||
.imageMemoryBarrierCount = 1,
|
||||
.pImageMemoryBarriers = &imageReadyBarrier,
|
||||
.pImageMemoryBarriers = &postStagingBarrier,
|
||||
};
|
||||
|
||||
vk::BufferImageCopy2 imageCopy = {
|
||||
|
|
@ -317,74 +451,16 @@ AssetLoader::LoadImageToGpu(StagingBuffer *stagingBuffer, tinygltf::Image *image
|
|||
.pRegions = &imageCopy,
|
||||
};
|
||||
|
||||
vk::ImageBlit2 blitRegion = {
|
||||
.srcSubresource =
|
||||
{
|
||||
.aspectMask = vk::ImageAspectFlagBits::eColor,
|
||||
.baseArrayLayer = 0,
|
||||
.layerCount = 1,
|
||||
},
|
||||
.dstSubresource =
|
||||
{
|
||||
.aspectMask = vk::ImageAspectFlagBits::eColor,
|
||||
.baseArrayLayer = 0,
|
||||
.layerCount = 1,
|
||||
},
|
||||
};
|
||||
|
||||
vk::BlitImageInfo2 mipBlitInfo = {
|
||||
.srcImage = texture.m_Image,
|
||||
.srcImageLayout = vk::ImageLayout::eTransferSrcOptimal,
|
||||
.dstImage = texture.m_Image,
|
||||
.dstImageLayout = vk::ImageLayout::eTransferDstOptimal,
|
||||
.regionCount = 1,
|
||||
.pRegions = &blitRegion,
|
||||
.filter = vk::Filter::eLinear,
|
||||
};
|
||||
|
||||
#pragma endregion
|
||||
|
||||
m_CommandBuffer.pipelineBarrier2(&imageStartDependency);
|
||||
m_CommandBuffer.copyBufferToImage2(&stagingCopyInfo);
|
||||
m_CommandBuffer.pipelineBarrier2(&interMipDependency);
|
||||
m_CommandBuffer.pipelineBarrier2(&postStagingDependency);
|
||||
|
||||
auto calcNextMip = [](i32 prev) { return eastl::max(prev / 2, 1); };
|
||||
GenerateMipMaps(m_CommandBuffer, &texture, vk::ImageLayout::eTransferSrcOptimal,
|
||||
vk::ImageLayout::eShaderReadOnlyOptimal);
|
||||
|
||||
// Mip Mapping
|
||||
|
||||
i32 prevMipWidth = Cast<i32>(texture.m_Extent.width);
|
||||
i32 prevMipHeight = Cast<i32>(texture.m_Extent.height);
|
||||
|
||||
u32 maxPrevMip = texture.GetMipLevels() - 1;
|
||||
for (u32 prevMipLevel = 0; prevMipLevel < maxPrevMip; ++prevMipLevel)
|
||||
{
|
||||
i32 currentMipWidth = calcNextMip(prevMipWidth);
|
||||
i32 currentMipHeight = calcNextMip(prevMipHeight);
|
||||
u32 currentMipLevel = prevMipLevel + 1;
|
||||
|
||||
blitRegion.srcSubresource.mipLevel = prevMipLevel;
|
||||
blitRegion.srcOffsets = std::array{
|
||||
vk::Offset3D{0, 0, 0},
|
||||
vk::Offset3D{prevMipWidth, prevMipHeight, 1},
|
||||
};
|
||||
blitRegion.dstSubresource.mipLevel = currentMipLevel;
|
||||
blitRegion.dstOffsets = std::array{
|
||||
vk::Offset3D{0, 0, 0},
|
||||
vk::Offset3D{currentMipWidth, currentMipHeight, 1},
|
||||
};
|
||||
|
||||
nextMipBarrier.subresourceRange.baseMipLevel = currentMipLevel;
|
||||
|
||||
m_CommandBuffer.blitImage2(&mipBlitInfo);
|
||||
m_CommandBuffer.pipelineBarrier2(&interMipDependency);
|
||||
|
||||
prevMipHeight = currentMipHeight;
|
||||
prevMipWidth = currentMipWidth;
|
||||
}
|
||||
|
||||
m_CommandBuffer.pipelineBarrier2(&imageReadyDependency);
|
||||
|
||||
#if !defined(NDEBUG)
|
||||
#if !defined(ASTER_NDEBUG)
|
||||
m_CommandBuffer.endDebugUtilsLabelEXT();
|
||||
#endif
|
||||
|
||||
|
|
@ -425,7 +501,7 @@ AssetLoader::LoadModelToGpu(cstr path, cstr name)
|
|||
|
||||
AbortIfFailed(m_CommandBuffer.begin(&OneTimeCmdBeginInfo));
|
||||
|
||||
#if !defined(NDEBUG)
|
||||
#if !defined(ASTER_NDEBUG)
|
||||
StackString<128> loadActionName = "Load: ";
|
||||
loadActionName += name ? name : path;
|
||||
vk::DebugUtilsLabelEXT debugLabel = {
|
||||
|
|
@ -808,7 +884,7 @@ AssetLoader::LoadModelToGpu(cstr path, cstr name)
|
|||
|
||||
#pragma endregion
|
||||
|
||||
#if !defined(NDEBUG)
|
||||
#if !defined(ASTER_NDEBUG)
|
||||
m_CommandBuffer.endDebugUtilsLabelEXT();
|
||||
#endif
|
||||
AbortIfFailed(m_CommandBuffer.end());
|
||||
|
|
|
|||
|
|
@ -117,4 +117,7 @@ struct AssetLoader
|
|||
AssetLoader &operator=(AssetLoader &&other) noexcept;
|
||||
|
||||
DISALLOW_COPY_AND_ASSIGN(AssetLoader);
|
||||
};
|
||||
};
|
||||
|
||||
void GenerateMipMaps(vk::CommandBuffer commandBuffer, Texture *texture, vk::ImageLayout initialLayout,
|
||||
vk::ImageLayout finalLayout);
|
||||
|
|
@ -30,7 +30,7 @@ CreateCubeFromHdrEnv(AssetLoader *assetLoader, vk::Queue computeQueue, const u32
|
|||
diffuseIrradianceName += name ? name : "";
|
||||
|
||||
StorageTextureCube skybox;
|
||||
skybox.Init(pDevice, cubeSide, vk::Format::eR16G16B16A16Sfloat, true, false, skyboxName.c_str());
|
||||
skybox.Init(pDevice, cubeSide, vk::Format::eR16G16B16A16Sfloat, true, true, skyboxName.c_str());
|
||||
TextureHandle skyboxHandle = resMan->CommitTexture(&skybox);
|
||||
StorageTextureHandle skyboxStorageHandle = resMan->CommitStorageTexture(&skybox);
|
||||
|
||||
|
|
@ -68,24 +68,6 @@ CreateCubeFromHdrEnv(AssetLoader *assetLoader, vk::Queue computeQueue, const u32
|
|||
.pImageMemoryBarriers = readyToWriteBarriers.data(),
|
||||
};
|
||||
|
||||
vk::ImageMemoryBarrier2 skyboxWriteToReadBarrier = {
|
||||
.srcStageMask = vk::PipelineStageFlagBits2::eComputeShader,
|
||||
.srcAccessMask = vk::AccessFlagBits2::eShaderStorageWrite,
|
||||
.dstStageMask = vk::PipelineStageFlagBits2::eComputeShader,
|
||||
.dstAccessMask = vk::AccessFlagBits2::eShaderStorageRead,
|
||||
.oldLayout = vk::ImageLayout::eGeneral,
|
||||
.newLayout = vk::ImageLayout::eGeneral,
|
||||
.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
|
||||
.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
|
||||
.image = skybox.m_Image,
|
||||
.subresourceRange = cubeSubresRange,
|
||||
};
|
||||
|
||||
vk::DependencyInfo skyboxWriteToReadDependency = {
|
||||
.imageMemoryBarrierCount = 1,
|
||||
.pImageMemoryBarriers = &skyboxWriteToReadBarrier,
|
||||
};
|
||||
|
||||
vk::ImageMemoryBarrier2 skyboxToSampleBarrier = {
|
||||
.srcStageMask = vk::PipelineStageFlagBits2::eComputeShader,
|
||||
.srcAccessMask = vk::AccessFlagBits2::eShaderStorageWrite | vk::AccessFlagBits2::eShaderStorageRead,
|
||||
|
|
@ -191,7 +173,7 @@ CreateCubeFromHdrEnv(AssetLoader *assetLoader, vk::Queue computeQueue, const u32
|
|||
constexpr vk::CommandBufferBeginInfo beginInfo = {.flags = vk::CommandBufferUsageFlagBits::eOneTimeSubmit};
|
||||
AbortIfFailed(cmd.begin(&beginInfo));
|
||||
|
||||
#if !defined(NDEBUG)
|
||||
#if !defined(ASTER_NDEBUG)
|
||||
StackString<128> labelName = "Eqrect -> Cubemap: ";
|
||||
labelName += name ? name : "<unknown env>";
|
||||
vk::DebugUtilsLabelEXT label = {
|
||||
|
|
@ -209,7 +191,8 @@ CreateCubeFromHdrEnv(AssetLoader *assetLoader, vk::Queue computeQueue, const u32
|
|||
&skyboxPushConstant);
|
||||
cmd.dispatch(cubeSide / 16, cubeSide / 16, 6);
|
||||
|
||||
cmd.pipelineBarrier2(&skyboxWriteToReadDependency);
|
||||
// Internal Barriers will ensure waiting for the next command.
|
||||
GenerateMipMaps(cmd, &skybox, vk::ImageLayout::eGeneral, vk::ImageLayout::eGeneral);
|
||||
|
||||
cmd.pushConstants(pipelineLayout, vk::ShaderStageFlagBits::eCompute, 0, sizeof skyboxPushConstant,
|
||||
&diffuseIrradiancePushConstants);
|
||||
|
|
@ -218,7 +201,7 @@ CreateCubeFromHdrEnv(AssetLoader *assetLoader, vk::Queue computeQueue, const u32
|
|||
|
||||
cmd.pipelineBarrier2(&skyboxToSampleDependency);
|
||||
|
||||
#if !defined(NDEBUG)
|
||||
#if !defined(ASTER_NDEBUG)
|
||||
cmd.endDebugUtilsLabelEXT();
|
||||
#endif
|
||||
|
||||
|
|
|
|||
|
|
@ -180,7 +180,7 @@ main(int, char **)
|
|||
assetLoader.LoadHdrImage(&environment, BACKDROP_FILE);
|
||||
auto envHandle = resourceManager.CommitTexture(&environment);
|
||||
|
||||
auto [texCube, diffuseIrr] = CreateCubeFromHdrEnv(&assetLoader, graphicsQueue, 1024, envHandle, "Cube Env");
|
||||
auto [texCube, diffuseIrr] = CreateCubeFromHdrEnv(&assetLoader, graphicsQueue, 512, envHandle, "Cube Env");
|
||||
|
||||
resourceManager.Release(envHandle);
|
||||
|
||||
|
|
|
|||
Loading…
Reference in New Issue