Merge pull request 'add-image-output :完成图像序列输出和图像序列到视频的流程' (#1) from add-image-output into main

Reviewed-on: #1
pull/2/head
inksoul 2024-03-27 17:31:09 +08:00
commit f380b7f6bb
14 changed files with 479 additions and 43 deletions

5
.gitignore vendored
View File

@ -41,4 +41,7 @@ vulkan_asset_pack_gltf.zip
*.zip
# vscode build file
build/
build/
# output ppm image sequence
*.ppm

View File

@ -3,7 +3,6 @@ cmake_policy(VERSION 3.5)
set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} "${CMAKE_SOURCE_DIR}/cmake")
set(NAME PlumageRender)
project(${NAME})
include_directories(external)
@ -49,6 +48,7 @@ ELSEIF(LINUX)
MESSAGE("Using bundled Vulkan library version")
ENDIF()
ENDIF()
find_package(Threads REQUIRED)
IF(USE_D2D_WSI)
MESSAGE("Using direct to display extension...")
@ -82,10 +82,11 @@ ELSEIF(LINUX)
include_directories(${CMAKE_BINARY_DIR})
ELSEIF(USE_HEADLESS)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -DVK_USE_PLATFORM_HEADLESS_EXT")
ELSE(USE_D2D_WSI)
find_package(XCB REQUIRED)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -DVK_USE_PLATFORM_XCB_KHR")
#ELSE(USE_D2D_WSI)
# find_package(XCB REQUIRED)
# set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -DVK_USE_PLATFORM_XCB_KHR")
ENDIF(USE_D2D_WSI)
ELSEIF(APPLE)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -DVK_USE_PLATFORM_MACOS_MVK -DVK_EXAMPLE_XCODE_GENERATED")
# Todo : android?
@ -107,7 +108,7 @@ endif()
add_definitions(-D_CRT_SECURE_NO_WARNINGS)
set(CMAKE_CXX_STANDARD 11)
set(CMAKE_CXX_STANDARD 17)
set(CMAKE_CXX_STANDARD_REQUIRED ON)
file(GLOB SOURCE *.cpp )

View File

@ -42,8 +42,45 @@
"cmakeCommandArgs": "",
"buildCommandArgs": "",
"ctestCommandArgs": "",
"inheritEnvironments": [ "msvc_x64_x64" ],
"variables": []
"inheritEnvironments": [ "msvc_x64_x64" ]
},
{
"name": "Linux-Clang-Debug",
"generator": "Ninja",
"configurationType": "Debug",
"cmakeExecutable": "cmake",
"remoteCopySourcesExclusionList": [ ".vs", ".git", "out" ],
"cmakeCommandArgs": "",
"buildCommandArgs": "",
"ctestCommandArgs": "",
"inheritEnvironments": [ "linux_clang_x64" ],
"remoteMachineName": "${defaultRemoteMachineName}",
"remoteCMakeListsRoot": "$HOME/.vs/${projectDirName}/${workspaceHash}/src",
"remoteBuildRoot": "$HOME/.vs/${projectDirName}/${workspaceHash}/out/build/${name}",
"remoteInstallRoot": "$HOME/.vs/${projectDirName}/${workspaceHash}/out/install/${name}",
"remoteCopySources": true,
"rsyncCommandArgs": "-t --delete",
"remoteCopyBuildOutput": false,
"remoteCopySourcesMethod": "rsync"
},
{
"name": "Linux-GCC-Debug",
"generator": "Ninja",
"configurationType": "Debug",
"cmakeExecutable": "cmake",
"remoteCopySourcesExclusionList": [ ".vs", ".git", "out" ],
"cmakeCommandArgs": "",
"buildCommandArgs": "",
"ctestCommandArgs": "",
"inheritEnvironments": [ "linux_x64" ],
"remoteMachineName": "${defaultRemoteMachineName}",
"remoteCMakeListsRoot": "$HOME/.vs/${projectDirName}/${workspaceHash}/src",
"remoteBuildRoot": "$HOME/.vs/${projectDirName}/${workspaceHash}/out/build/${name}",
"remoteInstallRoot": "$HOME/.vs/${projectDirName}/${workspaceHash}/out/install/${name}",
"remoteCopySources": true,
"rsyncCommandArgs": "-t --delete",
"remoteCopyBuildOutput": false,
"remoteCopySourcesMethod": "rsync"
}
]
}

View File

@ -10,6 +10,16 @@
3. 支持模型的PBR材质渲染
4. 使用基于IBL的环境光照
#### 分支add image output ToDo list
1. 添加ppm格式图片序列输出
2. 单帧输出,预览效果
3. 接入ffmpeg将图片序列转图片
#### todo list
1. 参数化模板选择:维护一个模板数组,前端发送一个数组索引
#### 展示
展示使用录制的GIF并转换为animated webp存在**画质损失**,仅供参考

View File

@ -8,6 +8,7 @@
#include "VulkanExampleBase.h"
std::vector<const char*> VulkanExampleBase::args;
VKAPI_ATTR VkBool32 VKAPI_CALL debugMessageCallback(VkDebugReportFlagsEXT flags, VkDebugReportObjectTypeEXT objType, uint64_t srcObject, size_t location, int32_t msgCode, const char * pLayerPrefix, const char * pMsg, void * pUserData)
@ -48,28 +49,39 @@ VkResult VulkanExampleBase::createInstance(bool enableValidation)
appInfo.pEngineName = name.c_str();
appInfo.apiVersion = VK_API_VERSION_1_0;
std::vector<const char*> instanceExtensions = { VK_KHR_SURFACE_EXTENSION_NAME };
std::vector<const char*> instanceExtensions = { };
// Enable surface extensions depending on os
if (settings.headless)
{
instanceExtensions.push_back("VK_EXT_headless_surface");
}
else
{
instanceExtensions.push_back(VK_KHR_SURFACE_EXTENSION_NAME);
// Enable surface extensions depending on os
#if defined(_WIN32)
instanceExtensions.push_back(VK_KHR_WIN32_SURFACE_EXTENSION_NAME);
instanceExtensions.push_back(VK_KHR_WIN32_SURFACE_EXTENSION_NAME);
#elif defined(VK_USE_PLATFORM_ANDROID_KHR)
instanceExtensions.push_back(VK_KHR_ANDROID_SURFACE_EXTENSION_NAME);
instanceExtensions.push_back(VK_KHR_ANDROID_SURFACE_EXTENSION_NAME);
#elif defined(_DIRECT2DISPLAY)
instanceExtensions.push_back(VK_KHR_DISPLAY_EXTENSION_NAME);
instanceExtensions.push_back(VK_KHR_DISPLAY_EXTENSION_NAME);
#elif defined(VK_USE_PLATFORM_WAYLAND_KHR)
instanceExtensions.push_back(VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME);
instanceExtensions.push_back(VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME);
#elif defined(VK_USE_PLATFORM_XCB_KHR)
instanceExtensions.push_back(VK_KHR_XCB_SURFACE_EXTENSION_NAME);
instanceExtensions.push_back(VK_KHR_XCB_SURFACE_EXTENSION_NAME);
#elif defined(VK_USE_PLATFORM_MACOS_MVK)
instanceExtensions.push_back(VK_MVK_MACOS_SURFACE_EXTENSION_NAME);
instanceExtensions.push_back(VK_MVK_MACOS_SURFACE_EXTENSION_NAME);
#endif
#if defined(VK_USE_PLATFORM_MACOS_MVK) && (VK_HEADER_VERSION >= 216)
instanceExtensions.push_back(VK_KHR_PORTABILITY_ENUMERATION_EXTENSION_NAME);
instanceExtensions.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
instanceExtensions.push_back(VK_KHR_PORTABILITY_ENUMERATION_EXTENSION_NAME);
instanceExtensions.push_back(VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME);
#endif
}
VkInstanceCreateInfo instanceCreateInfo = {};
instanceCreateInfo.sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO;
instanceCreateInfo.pNext = NULL;
@ -94,6 +106,8 @@ VkResult VulkanExampleBase::createInstance(bool enableValidation)
instanceCreateInfo.ppEnabledLayerNames = validationLayerNames.data();
}
return vkCreateInstance(&instanceCreateInfo, nullptr, &instance);
}
void VulkanExampleBase::prepare()
{
@ -304,6 +318,7 @@ void VulkanExampleBase::renderFrame()
auto tStart = std::chrono::high_resolution_clock::now();
render();
frameCounter++;
auto tEnd = std::chrono::high_resolution_clock::now();
auto tDiff = std::chrono::duration<double, std::milli>(tEnd - tStart).count();
@ -625,13 +640,13 @@ void VulkanExampleBase::initVulkan()
for (size_t i = 0; i < args.size(); i++) {
if ((args[i] == std::string("-g")) || (args[i] == std::string("--gpu"))) {
char* endptr;
uint32_t index = strtol(args[i + 1], &endptr, 10);
selectedPhysicalDeviceIndex = strtol(args[i + 1], &endptr, 10);
if (endptr != args[i + 1]) {
if (index > gpuCount - 1) {
std::cerr << "Selected device index " << index << " is out of range, reverting to device 0 (use -listgpus to show available Vulkan devices)" << std::endl;
if (selectedPhysicalDeviceIndex > gpuCount - 1) {
std::cerr << "Selected device index " << selectedPhysicalDeviceIndex << " is out of range, reverting to device 0 (use -listgpus to show available Vulkan devices)" << std::endl;
} else {
std::cout << "Selected Vulkan device " << index << std::endl;
selectedDevice = index;
std::cout << "Selected Vulkan device " << selectedPhysicalDeviceIndex << std::endl;
selectedDevice = selectedPhysicalDeviceIndex;
}
};
break;

View File

@ -102,6 +102,7 @@ protected:
void windowResize();
public:
static std::vector<const char*> args;
uint32_t selectedPhysicalDeviceIndex = 0;
bool prepared = false;
uint32_t width = 1280;
uint32_t height = 720;
@ -112,11 +113,21 @@ public:
uint32_t lastFPS = 0;
struct Settings {
bool validation = false;
bool fullscreen = false;
bool vsync = false;
bool multiSampling = true;
VkSampleCountFlagBits sampleCount = VK_SAMPLE_COUNT_4_BIT;
bool validation = false; // 校验层开关
bool fullscreen = false; // 全屏开关
bool vsync = false; // 垂直同步开关
bool multiSampling = true; // 多重采样
bool rotateModel = true; // 模型自旋转(暂时失效)
bool headless = false; // 无头开关
bool enableSaveToImageSequeue = false; // 图片序列开关(暂时弃用)
uint32_t outputFrameCount = 10; // 图片序列结束帧
bool takeScreenShot = false; // 截屏(暂时弃用)
uint32_t startFrameCount = 1; // 图片序列开始帧
uint32_t videoFrameRate = 25;
VkSampleCountFlagBits sampleCount = VK_SAMPLE_COUNT_4_BIT; // 多重采样倍率
} settings;
struct DepthStencil {

Binary file not shown.

View File

@ -0,0 +1,2 @@
ffmpeg -y -r %1 -i %2 -b:v 1440k -vcodec libx264 -crf 18 %3

View File

@ -0,0 +1 @@
ffmpeg -y -r $1 -i $2 -b:v 1440k -vcodec libx264 -crf 18 $3

View File

@ -34,7 +34,7 @@ function(buildHomework HOMEWORK_NAME)
"render/glTFModel.h"
"render/glTFModel.cpp"
)
"render/renderFoundation.h" "render/renderFoundation.cpp")
target_link_libraries(${HOMEWORK_NAME} base ${Vulkan_LIBRARY} ${WINLIBS})
else(WIN32)
add_executable(${HOMEWORK_NAME} ${MAIN_CPP} ${SOURCE} ${MAIN_HEADER} ${SHADERS_GLSL} ${SHADERS_HLSL} ${README_FILES})

View File

@ -130,6 +130,8 @@ PlumageRender::PlumageRender()
renderPassBeginInfo.clearValueCount = settings.multiSampling ? 3 : 2;
renderPassBeginInfo.pClearValues = clearValues;
for (uint32_t i = 0; i < commandBuffers.size(); ++i) {
renderPassBeginInfo.framebuffer = frameBuffers[i];
@ -185,6 +187,7 @@ PlumageRender::PlumageRender()
vkCmdEndRenderPass(currentCB);
VK_CHECK_RESULT(vkEndCommandBuffer(currentCB));
}
}
@ -300,7 +303,7 @@ PlumageRender::PlumageRender()
uint32_t materialCount = 0;
uint32_t meshCount = 0;
// Environment samplers (radiance, irradiance, brdf lut)
// Environment samplers (radiance, irradiance, brdflut)
imageSamplerCount += 3;
std::vector<glTFModel::Model*> modellist = { &models.skybox, &models.scene };
@ -657,6 +660,9 @@ PlumageRender::PlumageRender()
}
//Create Tone Mapping render pipeline
//CreateToneMappingPipeline();
}
// generate two cube maps
@ -1572,6 +1578,309 @@ PlumageRender::PlumageRender()
prepared = true;
}
void PlumageRender::submitWork(VkCommandBuffer cmdBuffer, VkQueue queue)
{
VkSubmitInfo submitInfo = vks::initializers::submitInfo();
submitInfo.commandBufferCount = 1;
submitInfo.pCommandBuffers = &cmdBuffer;
VkFenceCreateInfo fenceInfo = vks::initializers::fenceCreateInfo();
VkFence fence;
VK_CHECK_RESULT(vkCreateFence(device, &fenceInfo, nullptr, &fence));
VK_CHECK_RESULT(vkQueueSubmit(queue, 1, &submitInfo, fence));
VK_CHECK_RESULT(vkWaitForFences(device, 1, &fence, VK_TRUE, UINT64_MAX));
vkDestroyFence(device, fence, nullptr);
}
// todo :根据physicalDeviceIndex确定子文件夹路径frameIndex确定fileName
void PlumageRender::writeImageToFile(std::string filePath)
{
bool screenshotSaved = false;
bool supportsBlit = true;
// Check blit support for source and destination
VkFormatProperties formatProps;
// Check if the device supports blitting from optimal images (the swapchain images are in optimal format)
vkGetPhysicalDeviceFormatProperties(physicalDevice, swapChain.colorFormat, &formatProps);
if (!(formatProps.optimalTilingFeatures & VK_FORMAT_FEATURE_BLIT_SRC_BIT)) {
std::cerr << "Device does not support blitting from optimal tiled images, using copy instead of blit!" << std::endl;
supportsBlit = false;
}
// Check if the device supports blitting to linear images
vkGetPhysicalDeviceFormatProperties(physicalDevice, VK_FORMAT_R8G8B8A8_UNORM, &formatProps);
if (!(formatProps.linearTilingFeatures & VK_FORMAT_FEATURE_BLIT_DST_BIT)) {
std::cerr << "Device does not support blitting to linear tiled images, using copy instead of blit!" << std::endl;
supportsBlit = false;
}
// Source for the copy is the last rendered swapchain image
VkImage srcImage = swapChain.images[currentBuffer];
// Create the linear tiled destination image to copy to and to read the memory from
VkImageCreateInfo imageCreateCI(vks::initializers::imageCreateInfo());
imageCreateCI.imageType = VK_IMAGE_TYPE_2D;
// Note that vkCmdBlitImage (if supported) will also do format conversions if the swapchain color format would differ
imageCreateCI.format = VK_FORMAT_R8G8B8A8_UNORM;
imageCreateCI.extent.width = width;
imageCreateCI.extent.height = height;
imageCreateCI.extent.depth = 1;
imageCreateCI.arrayLayers = 1;
imageCreateCI.mipLevels = 1;
imageCreateCI.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
imageCreateCI.samples = VK_SAMPLE_COUNT_1_BIT;
imageCreateCI.tiling = VK_IMAGE_TILING_LINEAR;
imageCreateCI.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT;
// Create the image
VkImage dstImage;
VK_CHECK_RESULT(vkCreateImage(device, &imageCreateCI, nullptr, &dstImage));
// Create memory to back up the image
VkMemoryRequirements memRequirements;
VkMemoryAllocateInfo memAllocInfo(vks::initializers::memoryAllocateInfo());
VkDeviceMemory dstImageMemory;
vkGetImageMemoryRequirements(device, dstImage, &memRequirements);
memAllocInfo.allocationSize = memRequirements.size;
// Memory must be host visible to copy from
memAllocInfo.memoryTypeIndex = vulkanDevice->getMemoryType(memRequirements.memoryTypeBits, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
VK_CHECK_RESULT(vkAllocateMemory(device, &memAllocInfo, nullptr, &dstImageMemory));
VK_CHECK_RESULT(vkBindImageMemory(device, dstImage, dstImageMemory, 0));
// Do the actual blit from the swapchain image to our host visible destination image
VkCommandBuffer copyCmd = vulkanDevice->createCommandBuffer(VK_COMMAND_BUFFER_LEVEL_PRIMARY, true);
// Transition destination image to transfer destination layout
vks::tools::insertImageMemoryBarrier(
copyCmd,
dstImage,
0,
VK_ACCESS_TRANSFER_WRITE_BIT,
VK_IMAGE_LAYOUT_UNDEFINED,
VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
VK_PIPELINE_STAGE_TRANSFER_BIT,
VK_PIPELINE_STAGE_TRANSFER_BIT,
VkImageSubresourceRange{ VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1 });
// Transition swapchain image from present to transfer source layout
vks::tools::insertImageMemoryBarrier(
copyCmd,
srcImage,
VK_ACCESS_MEMORY_READ_BIT,
VK_ACCESS_TRANSFER_READ_BIT,
VK_IMAGE_LAYOUT_PRESENT_SRC_KHR,
VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
VK_PIPELINE_STAGE_TRANSFER_BIT,
VK_PIPELINE_STAGE_TRANSFER_BIT,
VkImageSubresourceRange{ VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1 });
// If source and destination support blit we'll blit as this also does automatic format conversion (e.g. from BGR to RGB)
if (supportsBlit)
{
// Define the region to blit (we will blit the whole swapchain image)
VkOffset3D blitSize;
blitSize.x = width;
blitSize.y = height;
blitSize.z = 1;
VkImageBlit imageBlitRegion{};
imageBlitRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
imageBlitRegion.srcSubresource.layerCount = 1;
imageBlitRegion.srcOffsets[1] = blitSize;
imageBlitRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
imageBlitRegion.dstSubresource.layerCount = 1;
imageBlitRegion.dstOffsets[1] = blitSize;
// Issue the blit command
vkCmdBlitImage(
copyCmd,
srcImage, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
dstImage, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
1,
&imageBlitRegion,
VK_FILTER_NEAREST);
}
else
{
// Otherwise use image copy (requires us to manually flip components)
VkImageCopy imageCopyRegion{};
imageCopyRegion.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
imageCopyRegion.srcSubresource.layerCount = 1;
imageCopyRegion.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
imageCopyRegion.dstSubresource.layerCount = 1;
imageCopyRegion.extent.width = width;
imageCopyRegion.extent.height = height;
imageCopyRegion.extent.depth = 1;
// Issue the copy command
vkCmdCopyImage(
copyCmd,
srcImage, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
dstImage, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
1,
&imageCopyRegion);
}
// Transition destination image to general layout, which is the required layout for mapping the image memory later on
vks::tools::insertImageMemoryBarrier(
copyCmd,
dstImage,
VK_ACCESS_TRANSFER_WRITE_BIT,
VK_ACCESS_MEMORY_READ_BIT,
VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
VK_IMAGE_LAYOUT_GENERAL,
VK_PIPELINE_STAGE_TRANSFER_BIT,
VK_PIPELINE_STAGE_TRANSFER_BIT,
VkImageSubresourceRange{ VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1 });
// Transition back the swap chain image after the blit is done
vks::tools::insertImageMemoryBarrier(
copyCmd,
srcImage,
VK_ACCESS_TRANSFER_READ_BIT,
VK_ACCESS_MEMORY_READ_BIT,
VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
VK_IMAGE_LAYOUT_PRESENT_SRC_KHR,
VK_PIPELINE_STAGE_TRANSFER_BIT,
VK_PIPELINE_STAGE_TRANSFER_BIT,
VkImageSubresourceRange{ VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1 });
vulkanDevice->flushCommandBuffer(copyCmd, queue);
// Get layout of the image (including row pitch)
VkImageSubresource subResource{ VK_IMAGE_ASPECT_COLOR_BIT, 0, 0 };
VkSubresourceLayout subResourceLayout;
vkGetImageSubresourceLayout(device, dstImage, &subResource, &subResourceLayout);
// Map image memory so we can start copying from it
const char* data;
vkMapMemory(device, dstImageMemory, 0, VK_WHOLE_SIZE, 0, (void**)&data);
data += subResourceLayout.offset;
std::ofstream file(filePath, std::ios::out | std::ios::binary);
// ppm header
file << "P6\n" << width << "\n" << height << "\n" << 255 << "\n";
// If source is BGR (destination is always RGB) and we can't use blit (which does automatic conversion), we'll have to manually swizzle color components
bool colorSwizzle = false;
// Check if source is BGR
// Note: Not complete, only contains most common and basic BGR surface formats for demonstration purposes
if (!supportsBlit)
{
std::vector<VkFormat> formatsBGR = { VK_FORMAT_B8G8R8A8_SRGB, VK_FORMAT_B8G8R8A8_UNORM, VK_FORMAT_B8G8R8A8_SNORM };
colorSwizzle = (std::find(formatsBGR.begin(), formatsBGR.end(), swapChain.colorFormat) != formatsBGR.end());
}
// ppm binary pixel data
for (uint32_t y = 0; y < height; y++)
{
unsigned int* row = (unsigned int*)data;
for (uint32_t x = 0; x < width; x++)
{
if (colorSwizzle)
{
file.write((char*)row + 2, 1);
file.write((char*)row + 1, 1);
file.write((char*)row, 1);
}
else
{
file.write((char*)row, 3);
}
row++;
}
data += subResourceLayout.rowPitch;
}
file.close();
std::cout << "Screenshot saved to " << filePath << std::endl;
// Clean up resources
vkUnmapMemory(device, dstImageMemory);
vkFreeMemory(device, dstImageMemory, nullptr);
vkDestroyImage(device, dstImage, nullptr);
screenshotSaved = true;
}
void PlumageRender::outputImageSequence()
{
std::string deviceFilePath = filePath.imageOutputPath + "/device" + std::to_string(selectedPhysicalDeviceIndex);
if (savedFrameCounter > settings.outputFrameCount)
{
if (signal.imageSequenceOutputComplete) // 避免重复改变为true
{
return;
}
signal.imageSequenceOutputComplete = true;
std::string fileName = "/%dresult.ppm";
filePath.totalImageOutputPath = deviceFilePath + fileName;
return;
}
if (_access(deviceFilePath.c_str(), 0) == -1)
{
std::filesystem::create_directories(deviceFilePath.c_str());
}
std::string fileName = "/" + std::to_string(savedFrameCounter) + "result.ppm";
filePath.totalImageOutputPath = deviceFilePath + fileName;
//std::cout << outputPath << std::endl;
writeImageToFile(filePath.totalImageOutputPath.c_str());
savedFrameCounter++;
}
void PlumageRender::imageSequenceToVideo()
{
if (!signal.imageSequenceOutputComplete)
{
return;
}
if (signal.imageSequenceToVideoComplete)
{
return;
}
std::string deviceFilePath = filePath.videoOutputPath + "/device" + std::to_string(selectedPhysicalDeviceIndex);
if (_access(deviceFilePath.c_str(), 0) == -1)
{
std::filesystem::create_directories(deviceFilePath.c_str());
}
std::string resultVideoPath = deviceFilePath + "/result.mp4";
std::string commandLineImageSequencePath = filePath.totalImageOutputPath;
//std::string commandLineCodecAndResultPath = resultVideoPath;
std::string commandLineFrameRate = std::to_string(settings.videoFrameRate);
#if defined(_WIN32)
std::string commandLine = filePath.image2videoBatFilePath + " " + commandLineFrameRate + " " + commandLineImageSequencePath + " " + resultVideoPath;
#else
std::string commandLine = filePath.image2videoShFilePath + " " + commandLineFrameRate + " " + commandLineImageSequencePath + " " + resultVideoPath;
#endif
std::cout << commandLine << std::endl;
std::system(commandLine.c_str());
signal.imageSequenceToVideoComplete = true;
std::cout << "vidoe codec complete,saved in:" << resultVideoPath << std::endl;
}
uint32_t PlumageRender::getMemoryTypeIndex(uint32_t typeBits, VkMemoryPropertyFlags properties)
{
VkPhysicalDeviceMemoryProperties deviceMemoryProperties;
vkGetPhysicalDeviceMemoryProperties(physicalDevice, &deviceMemoryProperties);
for (uint32_t i = 0; i < deviceMemoryProperties.memoryTypeCount; i++)
{
if ((typeBits & 1) == 1)
{
if ((deviceMemoryProperties.memoryTypes[i].propertyFlags & properties) == properties)
{
return i;
}
}
typeBits >>= 1;
}
return 0;
}
void PlumageRender::render()
{
if (!prepared) {
@ -1579,8 +1888,15 @@ PlumageRender::PlumageRender()
}
updateUIOverlay();
//加入写到文件的函数
//swapChainImage = swapChain.images[frameIndex];
//outputImageSequeue(swapChainImage,filePath.imageSequenceFilePath);
VK_CHECK_RESULT(vkWaitForFences(device, 1, &waitFences[frameIndex], VK_TRUE, UINT64_MAX));
outputImageSequence();
imageSequenceToVideo();
VK_CHECK_RESULT(vkResetFences(device, 1, &waitFences[frameIndex]));
VkResult acquire = swapChain.acquireNextImage(presentCompleteSemaphores[frameIndex], &currentBuffer);
@ -1589,6 +1905,8 @@ PlumageRender::PlumageRender()
}
else {
VK_CHECK_RESULT(acquire);
}
// Update UBOs
@ -1610,6 +1928,7 @@ PlumageRender::PlumageRender()
submitInfo.commandBufferCount = 1;
VK_CHECK_RESULT(vkQueueSubmit(queue, 1, &submitInfo, waitFences[frameIndex]));
//显示队列
VkResult present = swapChain.queuePresent(queue, currentBuffer, renderCompleteSemaphores[frameIndex]);
if (!((present == VK_SUCCESS) || (present == VK_SUBOPTIMAL_KHR))) {
if (present == VK_ERROR_OUT_OF_DATE_KHR) {
@ -1625,7 +1944,7 @@ PlumageRender::PlumageRender()
frameIndex %= renderAhead;
if (!paused) {
if (rotateModel) {
if (settings.rotateModel) {
modelrot.y += frameTimer * 35.0f;
if (modelrot.y > 360.0f) {
modelrot.y -= 360.0f;
@ -1639,14 +1958,14 @@ PlumageRender::PlumageRender()
models.scene.updateAnimation(animationIndex, animationTimer);
}
updateShaderData();
if (rotateModel) {
if (settings.rotateModel) {
updateUniformBuffers();
}
}
if (camera.updated) {
updateUniformBuffers();
}
}
void PlumageRender::fileDropped(std::string filename)

View File

@ -1,5 +1,16 @@
#pragma once
#if defined(_WIN32)
#include <io.h>
#include <direct.h>
#else
#include<sys/io.h>
#include<dirent.h>
#endif
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
@ -7,11 +18,13 @@
#include <vector>
#include <chrono>
#include <map>
#include <io.h>
#include <locale>
#include <codecvt>
#include "algorithm"
#include <iostream>
#include <sys/stat.h>
#include <filesystem>
#include <vulkan/vulkan.h>
#include "VulkanExampleBase.h"
@ -37,6 +50,13 @@ public:
{
} info ;
struct Signal
{
bool imageSequenceOutputComplete = false;
bool imageSequenceToVideoComplete = false;
}signal;
struct Models
@ -159,11 +179,22 @@ public:
//ttf file path
std::string ttfFilePath = getAssetPath() + "/data/Roboto-Medium.ttf";
// output file path
std::string imageOutputPath = getAssetPath() + "output/imageSequence";
std::string videoOutputPath = getAssetPath() + "output/video";
std::string totalImageOutputPath;
// script file path
std::string image2videoBatFilePath = getAssetPath() + "script/image2video.bat";
std::string image2videoShFilePath = getAssetPath() + "script/image2video.sh";
} filePath;
bool rotateModel = false;
glm::vec3 modelrot = glm::vec3(0.0f);
glm::vec3 modelPos = glm::vec3(0.0f);
enum PBRWorkflows { PBR_WORKFLOW_METALLIC_ROUGHNESS = 0, PBR_WORKFLOW_SPECULAR_GLOSINESS = 1 };
@ -209,13 +240,7 @@ public:
VkDescriptorSet tonemappingDescriptorSet = VK_NULL_HANDLE;
};
struct Settings {
bool validation = false;
bool fullscreen = false;
bool vsync = false;
bool multiSampling = true;
VkSampleCountFlagBits sampleCount = VK_SAMPLE_COUNT_4_BIT;
} settings;
struct DescriptorSetLayouts {
VkDescriptorSetLayout scene;
@ -235,6 +260,8 @@ public:
const uint32_t renderAhead = 2;
uint32_t frameIndex = 0;
//VkImage swapChainImage;
int32_t animationIndex = 0;
float animationTimer = 0.0f;
bool animate = true;
@ -272,6 +299,8 @@ public:
} prefilterPushBlock;
UI* gui;
uint64_t savedFrameCounter = settings.startFrameCount;
PlumageRender();
@ -331,6 +360,13 @@ public:
void updateShaderData();
void windowResized();
void prepare();
void submitWork(VkCommandBuffer cmdBuffer, VkQueue queue);
void writeImageToFile(std::string filePath);
void outputImageSequence();
void imageSequenceToVideo();
//void outputScreenShot();
uint32_t getMemoryTypeIndex(uint32_t typeBits, VkMemoryPropertyFlags properties);
virtual void render();
virtual void updateUIOverlay();
virtual void fileDropped(std::string filename);

View File

View File

@ -0,0 +1 @@
#pragma once