From af47eb6956d30e8a01cd3f4828132e97e28e41a8 Mon Sep 17 00:00:00 2001 From: Stenzek Date: Sun, 11 Aug 2024 20:45:14 +1000 Subject: [PATCH] System: Add video capture feature --- src/core/gpu.cpp | 78 +- src/core/gpu.h | 8 + src/core/hotkeys.cpp | 11 + src/core/imgui_overlays.cpp | 196 +- src/core/settings.cpp | 52 + src/core/settings.h | 26 + src/core/spu.cpp | 17 +- src/core/spu.h | 9 - src/core/system.cpp | 203 +- src/core/system.h | 27 +- src/duckstation-qt/foldersettingswidget.cpp | 11 +- src/duckstation-qt/foldersettingswidget.ui | 87 +- src/duckstation-qt/graphicssettingswidget.cpp | 176 ++ src/duckstation-qt/graphicssettingswidget.h | 7 + src/duckstation-qt/graphicssettingswidget.ui | 238 +++ src/duckstation-qt/mainwindow.cpp | 58 +- src/duckstation-qt/mainwindow.h | 3 + src/duckstation-qt/mainwindow.ui | 19 +- src/duckstation-qt/qthost.cpp | 14 +- src/duckstation-qt/qthost.h | 2 + src/duckstation-regtest/regtest_host.cpp | 10 + src/util/CMakeLists.txt | 2 + src/util/media_capture.cpp | 1679 +++++++++++++++++ src/util/media_capture.h | 75 + src/util/util.props | 2 +- src/util/util.vcxproj | 2 + src/util/util.vcxproj.filters | 2 + 27 files changed, 2791 insertions(+), 223 deletions(-) create mode 100644 src/util/media_capture.cpp create mode 100644 src/util/media_capture.h diff --git a/src/core/gpu.cpp b/src/core/gpu.cpp index 801651884..5a826cd63 100644 --- a/src/core/gpu.cpp +++ b/src/core/gpu.cpp @@ -14,6 +14,7 @@ #include "util/gpu_device.h" #include "util/image.h" #include "util/imgui_manager.h" +#include "util/media_capture.h" #include "util/postprocessing.h" #include "util/shadergen.h" #include "util/state_wrapper.h" @@ -2116,6 +2117,26 @@ bool GPU::RenderDisplay(GPUTexture* target, const GSVector4i display_rect, const return true; } +bool GPU::SendDisplayToMediaCapture(MediaCapture* cap) +{ + GPUTexture* target = cap->GetRenderTexture(); + if (!target) + return false; + + const bool apply_aspect_ratio = + (g_settings.display_screenshot_mode != DisplayScreenshotMode::UncorrectedInternalResolution); + const bool postfx = (g_settings.display_screenshot_mode != DisplayScreenshotMode::InternalResolution); + GSVector4i display_rect, draw_rect; + CalculateDrawRect(target->GetWidth(), target->GetHeight(), !g_settings.debugging.show_vram, apply_aspect_ratio, + &display_rect, &draw_rect); + if (!RenderDisplay(target, display_rect, draw_rect, postfx)) + return false; + + // TODO: Check for frame rate change + + return cap->DeliverVideoFrame(target); +} + void GPU::DestroyDeinterlaceTextures() { for (std::unique_ptr& tex : m_deinterlace_buffers) @@ -2676,21 +2697,20 @@ bool GPU::RenderScreenshotToBuffer(u32 width, u32 height, const GSVector4i displ return true; } -bool GPU::RenderScreenshotToFile(std::string filename, DisplayScreenshotMode mode, u8 quality, bool compress_on_thread, - bool show_osd_message) +void GPU::CalculateScreenshotSize(DisplayScreenshotMode mode, u32* width, u32* height, GSVector4i* display_rect, + GSVector4i* draw_rect) const { - u32 width = g_gpu_device->GetWindowWidth(); - u32 height = g_gpu_device->GetWindowHeight(); - GSVector4i display_rect, draw_rect; - CalculateDrawRect(width, height, true, !g_settings.debugging.show_vram, &display_rect, &draw_rect); + *width = g_gpu_device->GetWindowWidth(); + *height = g_gpu_device->GetWindowHeight(); + CalculateDrawRect(*width, *height, true, !g_settings.debugging.show_vram, display_rect, draw_rect); const bool internal_resolution = (mode != DisplayScreenshotMode::ScreenResolution || g_settings.debugging.show_vram); if (internal_resolution && m_display_texture_view_width != 0 && m_display_texture_view_height != 0) { if (mode == DisplayScreenshotMode::InternalResolution) { - const u32 draw_width = static_cast(draw_rect.width()); - const u32 draw_height = static_cast(draw_rect.height()); + const u32 draw_width = static_cast(draw_rect->width()); + const u32 draw_height = static_cast(draw_rect->height()); // If internal res, scale the computed draw rectangle to the internal res. // We re-use the draw rect because it's already been AR corrected. @@ -2701,42 +2721,52 @@ bool GPU::RenderScreenshotToFile(std::string filename, DisplayScreenshotMode mod { // stretch height, preserve width const float scale = static_cast(m_display_texture_view_width) / static_cast(draw_width); - width = m_display_texture_view_width; - height = static_cast(std::round(static_cast(draw_height) * scale)); + *width = m_display_texture_view_width; + *height = static_cast(std::round(static_cast(draw_height) * scale)); } else { // stretch width, preserve height const float scale = static_cast(m_display_texture_view_height) / static_cast(draw_height); - width = static_cast(std::round(static_cast(draw_width) * scale)); - height = m_display_texture_view_height; + *width = static_cast(std::round(static_cast(draw_width) * scale)); + *height = m_display_texture_view_height; } // DX11 won't go past 16K texture size. const u32 max_texture_size = g_gpu_device->GetMaxTextureSize(); - if (width > max_texture_size) + if (*width > max_texture_size) { - height = static_cast(static_cast(height) / - (static_cast(width) / static_cast(max_texture_size))); - width = max_texture_size; + *height = static_cast(static_cast(*height) / + (static_cast(*width) / static_cast(max_texture_size))); + *width = max_texture_size; } - if (height > max_texture_size) + if (*height > max_texture_size) { - height = max_texture_size; - width = static_cast(static_cast(width) / - (static_cast(height) / static_cast(max_texture_size))); + *height = max_texture_size; + *width = static_cast(static_cast(*width) / + (static_cast(*height) / static_cast(max_texture_size))); } } else // if (mode == DisplayScreenshotMode::UncorrectedInternalResolution) { - width = m_display_texture_view_width; - height = m_display_texture_view_height; + *width = m_display_texture_view_width; + *height = m_display_texture_view_height; } // Remove padding, it's not part of the framebuffer. - draw_rect = GSVector4i(0, 0, static_cast(width), static_cast(height)); - display_rect = draw_rect; + *draw_rect = GSVector4i(0, 0, static_cast(*width), static_cast(*height)); + *display_rect = *draw_rect; } +} + +bool GPU::RenderScreenshotToFile(std::string filename, DisplayScreenshotMode mode, u8 quality, bool compress_on_thread, + bool show_osd_message) +{ + u32 width, height; + GSVector4i display_rect, draw_rect; + CalculateScreenshotSize(mode, &width, &height, &display_rect, &draw_rect); + + const bool internal_resolution = (mode != DisplayScreenshotMode::ScreenResolution); if (width == 0 || height == 0) return false; diff --git a/src/core/gpu.h b/src/core/gpu.h index a08ec1b90..12c0d8502 100644 --- a/src/core/gpu.h +++ b/src/core/gpu.h @@ -28,6 +28,7 @@ class StateWrapper; class GPUDevice; class GPUTexture; class GPUPipeline; +class MediaCapture; struct Settings; @@ -210,6 +211,10 @@ public: void CalculateDrawRect(s32 window_width, s32 window_height, bool apply_rotation, bool apply_aspect_ratio, GSVector4i* display_rect, GSVector4i* draw_rect) const; + /// Helper function for computing screenshot bounds. + void CalculateScreenshotSize(DisplayScreenshotMode mode, u32* width, u32* height, GSVector4i* display_rect, + GSVector4i* draw_rect) const; + /// Helper function to save current display texture to PNG. bool WriteDisplayTextureToFile(std::string filename, bool compress_on_thread = false); @@ -225,6 +230,9 @@ public: /// Draws the current display texture, with any post-processing. bool PresentDisplay(); + /// Sends the current frame to media capture. + bool SendDisplayToMediaCapture(MediaCapture* cap); + /// Reads the CLUT from the specified coordinates, accounting for wrap-around. static void ReadCLUT(u16* dest, GPUTexturePaletteReg reg, bool clut_is_8bit); diff --git a/src/core/hotkeys.cpp b/src/core/hotkeys.cpp index f99fd9a60..bfb752c5b 100644 --- a/src/core/hotkeys.cpp +++ b/src/core/hotkeys.cpp @@ -358,6 +358,17 @@ DEFINE_HOTKEY("ResetEmulationSpeed", TRANSLATE_NOOP("Hotkeys", "System"), } }) +DEFINE_HOTKEY("ToggleMediaCapture", TRANSLATE_NOOP("Hotkeys", "System"), + TRANSLATE_NOOP("Hotkeys", "Toggle Media Capture"), [](s32 pressed) { + if (!pressed) + { + if (System::GetMediaCapture()) + System::StopMediaCapture(); + else + System::StartMediaCapture(); + } + }) + DEFINE_HOTKEY("ToggleSoftwareRendering", TRANSLATE_NOOP("Hotkeys", "Graphics"), TRANSLATE_NOOP("Hotkeys", "Toggle Software Rendering"), [](s32 pressed) { if (!pressed && System::IsValid()) diff --git a/src/core/imgui_overlays.cpp b/src/core/imgui_overlays.cpp index 7737f1446..b0dedd3a7 100644 --- a/src/core/imgui_overlays.cpp +++ b/src/core/imgui_overlays.cpp @@ -21,6 +21,7 @@ #include "util/imgui_fullscreen.h" #include "util/imgui_manager.h" #include "util/input_manager.h" +#include "util/media_capture.h" #include "common/align.h" #include "common/error.h" @@ -48,7 +49,9 @@ Log_SetChannel(ImGuiManager); namespace ImGuiManager { static void FormatProcessorStat(SmallStringBase& text, double usage, double time); -static void DrawPerformanceOverlay(); +static void DrawPerformanceOverlay(float& position_y, float scale, float margin, float spacing); +static void DrawMediaCaptureOverlay(float& position_y, float scale, float margin, float spacing); +static void DrawFrameTimeOverlay(float& position_y, float scale, float margin, float spacing); static void DrawEnhancementsOverlay(); static void DrawInputsOverlay(); } // namespace ImGuiManager @@ -191,7 +194,13 @@ void ImGuiManager::RenderTextOverlays() const System::State state = System::GetState(); if (state != System::State::Shutdown) { - DrawPerformanceOverlay(); + const float scale = ImGuiManager::GetGlobalScale(); + const float margin = std::ceil(10.0f * scale); + const float spacing = std::ceil(5.0f * scale); + float position_y = margin; + DrawPerformanceOverlay(position_y, scale, margin, spacing); + DrawFrameTimeOverlay(position_y, scale, margin, spacing); + DrawMediaCaptureOverlay(position_y, scale, margin, spacing); if (g_settings.display_show_enhancements && state != System::State::Paused) DrawEnhancementsOverlay(); @@ -212,7 +221,7 @@ void ImGuiManager::FormatProcessorStat(SmallStringBase& text, double usage, doub text.append_format("{:.1f}% ({:.2f}ms)", usage, time); } -void ImGuiManager::DrawPerformanceOverlay() +void ImGuiManager::DrawPerformanceOverlay(float& position_y, float scale, float margin, float spacing) { if (!(g_settings.display_show_fps || g_settings.display_show_speed || g_settings.display_show_gpu_stats || g_settings.display_show_resolution || g_settings.display_show_cpu_usage || @@ -222,14 +231,9 @@ void ImGuiManager::DrawPerformanceOverlay() return; } - const float scale = ImGuiManager::GetGlobalScale(); const float shadow_offset = std::ceil(1.0f * scale); - const float margin = std::ceil(10.0f * scale); - const float spacing = std::ceil(5.0f * scale); ImFont* fixed_font = ImGuiManager::GetFixedFont(); ImFont* standard_font = ImGuiManager::GetStandardFont(); - float position_y = margin; - ImDrawList* dl = ImGui::GetBackgroundDrawList(); SmallString text; ImVec2 text_size; @@ -364,6 +368,13 @@ void ImGuiManager::DrawPerformanceOverlay() FormatProcessorStat(text, System::GetSWThreadUsage(), System::GetSWThreadAverageTime()); DRAW_LINE(fixed_font, text, IM_COL32(255, 255, 255, 255)); } + + if (MediaCapture* cap = System::GetMediaCapture()) + { + text.assign("CAP: "); + FormatProcessorStat(text, cap->GetCaptureThreadUsage(), cap->GetCaptureThreadTime()); + DRAW_LINE(fixed_font, text, IM_COL32(255, 255, 255, 255)); + } } if (g_settings.display_show_gpu_usage && g_gpu_device->IsGPUTimingEnabled()) @@ -382,67 +393,6 @@ void ImGuiManager::DrawPerformanceOverlay() DRAW_LINE(standard_font, text, IM_COL32(255, 255, 255, 255)); } } - - if (g_settings.display_show_frame_times) - { - const ImVec2 history_size(200.0f * scale, 50.0f * scale); - ImGui::SetNextWindowSize(ImVec2(history_size.x, history_size.y)); - ImGui::SetNextWindowPos(ImVec2(ImGui::GetIO().DisplaySize.x - margin - history_size.x, position_y)); - ImGui::PushStyleColor(ImGuiCol_WindowBg, ImVec4(0.0f, 0.0f, 0.0f, 0.25f)); - ImGui::PushStyleColor(ImGuiCol_FrameBg, ImVec4(0.0f, 0.0f, 0.0f, 0.0f)); - ImGui::PushStyleColor(ImGuiCol_PlotLines, ImVec4(1.0f, 1.0f, 1.0f, 1.0f)); - ImGui::PushStyleVar(ImGuiStyleVar_WindowRounding, 0.0f); - ImGui::PushStyleVar(ImGuiStyleVar_WindowPadding, ImVec2(0.0f, 0.0f)); - ImGui::PushStyleVar(ImGuiStyleVar_WindowBorderSize, 0.0f); - ImGui::PushStyleVar(ImGuiStyleVar_FramePadding, ImVec2(0.0f, 0.0f)); - ImGui::PushStyleVar(ImGuiStyleVar_FrameBorderSize, 0.0f); - if (ImGui::Begin("##frame_times", nullptr, ImGuiWindowFlags_NoDecoration | ImGuiWindowFlags_NoInputs)) - { - ImGui::PushFont(fixed_font); - - auto [min, max] = GetMinMax(System::GetFrameTimeHistory()); - - // add a little bit of space either side, so we're not constantly resizing - if ((max - min) < 4.0f) - { - min = min - std::fmod(min, 1.0f); - max = max - std::fmod(max, 1.0f) + 1.0f; - min = std::max(min - 2.0f, 0.0f); - max += 2.0f; - } - - ImGui::PlotEx( - ImGuiPlotType_Lines, "##frame_times", - [](void*, int idx) -> float { - return System::GetFrameTimeHistory()[((System::GetFrameTimeHistoryPos() + idx) % - System::NUM_FRAME_TIME_SAMPLES)]; - }, - nullptr, System::NUM_FRAME_TIME_SAMPLES, 0, nullptr, min, max, history_size); - - ImDrawList* win_dl = ImGui::GetCurrentWindow()->DrawList; - const ImVec2 wpos(ImGui::GetCurrentWindow()->Pos); - - text.format("{:.1f} ms", max); - text_size = fixed_font->CalcTextSizeA(fixed_font->FontSize, FLT_MAX, 0.0f, text.c_str(), text.end_ptr()); - win_dl->AddText(ImVec2(wpos.x + history_size.x - text_size.x - spacing + shadow_offset, wpos.y + shadow_offset), - IM_COL32(0, 0, 0, 100), text.c_str(), text.end_ptr()); - win_dl->AddText(ImVec2(wpos.x + history_size.x - text_size.x - spacing, wpos.y), IM_COL32(255, 255, 255, 255), - text.c_str(), text.end_ptr()); - - text.format("{:.1f} ms", min); - text_size = fixed_font->CalcTextSizeA(fixed_font->FontSize, FLT_MAX, 0.0f, text.c_str(), text.end_ptr()); - win_dl->AddText(ImVec2(wpos.x + history_size.x - text_size.x - spacing + shadow_offset, - wpos.y + history_size.y - fixed_font->FontSize + shadow_offset), - IM_COL32(0, 0, 0, 100), text.c_str(), text.end_ptr()); - win_dl->AddText( - ImVec2(wpos.x + history_size.x - text_size.x - spacing, wpos.y + history_size.y - fixed_font->FontSize), - IM_COL32(255, 255, 255, 255), text.c_str(), text.end_ptr()); - ImGui::PopFont(); - } - ImGui::End(); - ImGui::PopStyleVar(5); - ImGui::PopStyleColor(3); - } } else if (g_settings.display_show_status_indicators && state == System::State::Paused && !FullscreenUI::HasActiveWindow()) @@ -547,6 +497,114 @@ void ImGuiManager::DrawEnhancementsOverlay() IM_COL32(255, 255, 255, 255), text.c_str(), text.end_ptr()); } +void ImGuiManager::DrawMediaCaptureOverlay(float& position_y, float scale, float margin, float spacing) +{ + MediaCapture* const cap = System::GetMediaCapture(); + if (!cap || FullscreenUI::HasActiveWindow()) + return; + + const float shadow_offset = std::ceil(scale); + ImFont* const standard_font = ImGuiManager::GetStandardFont(); + ImDrawList* dl = ImGui::GetBackgroundDrawList(); + + static constexpr const char* ICON = ICON_FA_VIDEO; + const time_t elapsed_time = cap->GetElapsedTime(); + const TinyString text_msg = TinyString::from_format(" {:02d}:{:02d}:{:02d}", elapsed_time / 3600, + (elapsed_time % 3600) / 60, (elapsed_time % 3600) % 60); + const ImVec2 icon_size = standard_font->CalcTextSizeA(standard_font->FontSize, std::numeric_limits::max(), + -1.0f, ICON, nullptr, nullptr); + const ImVec2 text_size = standard_font->CalcTextSizeA(standard_font->FontSize, std::numeric_limits::max(), + -1.0f, text_msg.c_str(), text_msg.end_ptr(), nullptr); + + const float box_margin = 2.0f * scale; + const ImVec2 box_size = ImVec2(icon_size.x + shadow_offset + text_size.x + box_margin * 2.0f, + std::max(icon_size.x, text_size.y) + box_margin * 2.0f); + const ImVec2 box_pos = ImVec2(ImGui::GetIO().DisplaySize.x - margin - box_size.x, position_y); + dl->AddRectFilled(box_pos, box_pos + box_size, IM_COL32(0, 0, 0, 64), box_margin); + + const ImVec2 text_start = ImVec2(box_pos.x + box_margin, box_pos.y + box_margin); + dl->AddText(standard_font, standard_font->FontSize, + ImVec2(text_start.x + shadow_offset, text_start.y + shadow_offset), IM_COL32(0, 0, 0, 100), ICON); + dl->AddText(standard_font, standard_font->FontSize, + ImVec2(text_start.x + icon_size.x + shadow_offset, text_start.y + shadow_offset), IM_COL32(0, 0, 0, 100), + text_msg.c_str(), text_msg.end_ptr()); + dl->AddText(standard_font, standard_font->FontSize, text_start, IM_COL32(255, 0, 0, 255), ICON); + dl->AddText(standard_font, standard_font->FontSize, ImVec2(text_start.x + icon_size.x, text_start.y), + IM_COL32(255, 255, 255, 255), text_msg.c_str(), text_msg.end_ptr()); + + position_y += box_size.y + spacing; +} + +void ImGuiManager::DrawFrameTimeOverlay(float& position_y, float scale, float margin, float spacing) +{ + if (!g_settings.display_show_frame_times || System::IsPaused()) + return; + + const float shadow_offset = std::ceil(1.0f * scale); + ImFont* fixed_font = ImGuiManager::GetFixedFont(); + + const ImVec2 history_size(200.0f * scale, 50.0f * scale); + ImGui::SetNextWindowSize(ImVec2(history_size.x, history_size.y)); + ImGui::SetNextWindowPos(ImVec2(ImGui::GetIO().DisplaySize.x - margin - history_size.x, position_y)); + ImGui::PushStyleColor(ImGuiCol_WindowBg, ImVec4(0.0f, 0.0f, 0.0f, 0.25f)); + ImGui::PushStyleColor(ImGuiCol_FrameBg, ImVec4(0.0f, 0.0f, 0.0f, 0.0f)); + ImGui::PushStyleColor(ImGuiCol_PlotLines, ImVec4(1.0f, 1.0f, 1.0f, 1.0f)); + ImGui::PushStyleVar(ImGuiStyleVar_WindowRounding, 0.0f); + ImGui::PushStyleVar(ImGuiStyleVar_WindowPadding, ImVec2(0.0f, 0.0f)); + ImGui::PushStyleVar(ImGuiStyleVar_WindowBorderSize, 0.0f); + ImGui::PushStyleVar(ImGuiStyleVar_FramePadding, ImVec2(0.0f, 0.0f)); + ImGui::PushStyleVar(ImGuiStyleVar_FrameBorderSize, 0.0f); + if (ImGui::Begin("##frame_times", nullptr, ImGuiWindowFlags_NoDecoration | ImGuiWindowFlags_NoInputs)) + { + ImGui::PushFont(fixed_font); + + auto [min, max] = GetMinMax(System::GetFrameTimeHistory()); + + // add a little bit of space either side, so we're not constantly resizing + if ((max - min) < 4.0f) + { + min = min - std::fmod(min, 1.0f); + max = max - std::fmod(max, 1.0f) + 1.0f; + min = std::max(min - 2.0f, 0.0f); + max += 2.0f; + } + + ImGui::PlotEx( + ImGuiPlotType_Lines, "##frame_times", + [](void*, int idx) -> float { + return System::GetFrameTimeHistory()[((System::GetFrameTimeHistoryPos() + idx) % + System::NUM_FRAME_TIME_SAMPLES)]; + }, + nullptr, System::NUM_FRAME_TIME_SAMPLES, 0, nullptr, min, max, history_size); + + ImDrawList* win_dl = ImGui::GetCurrentWindow()->DrawList; + const ImVec2 wpos(ImGui::GetCurrentWindow()->Pos); + + TinyString text; + text.format("{:.1f} ms", max); + ImVec2 text_size = fixed_font->CalcTextSizeA(fixed_font->FontSize, FLT_MAX, 0.0f, text.c_str(), text.end_ptr()); + win_dl->AddText(ImVec2(wpos.x + history_size.x - text_size.x - spacing + shadow_offset, wpos.y + shadow_offset), + IM_COL32(0, 0, 0, 100), text.c_str(), text.end_ptr()); + win_dl->AddText(ImVec2(wpos.x + history_size.x - text_size.x - spacing, wpos.y), IM_COL32(255, 255, 255, 255), + text.c_str(), text.end_ptr()); + + text.format("{:.1f} ms", min); + text_size = fixed_font->CalcTextSizeA(fixed_font->FontSize, FLT_MAX, 0.0f, text.c_str(), text.end_ptr()); + win_dl->AddText(ImVec2(wpos.x + history_size.x - text_size.x - spacing + shadow_offset, + wpos.y + history_size.y - fixed_font->FontSize + shadow_offset), + IM_COL32(0, 0, 0, 100), text.c_str(), text.end_ptr()); + win_dl->AddText( + ImVec2(wpos.x + history_size.x - text_size.x - spacing, wpos.y + history_size.y - fixed_font->FontSize), + IM_COL32(255, 255, 255, 255), text.c_str(), text.end_ptr()); + ImGui::PopFont(); + } + ImGui::End(); + ImGui::PopStyleVar(5); + ImGui::PopStyleColor(3); + + position_y += history_size.y + spacing; +} + void ImGuiManager::DrawInputsOverlay() { const float scale = ImGuiManager::GetGlobalScale(); diff --git a/src/core/settings.cpp b/src/core/settings.cpp index 93f5387f3..5e256e938 100644 --- a/src/core/settings.cpp +++ b/src/core/settings.cpp @@ -10,6 +10,7 @@ #include "util/gpu_device.h" #include "util/imgui_manager.h" #include "util/input_manager.h" +#include "util/media_capture.h" #include "common/assert.h" #include "common/file_system.h" @@ -82,6 +83,12 @@ float SettingInfo::FloatStepValue() const return step_value ? StringUtil::FromChars(step_value).value_or(fallback_value) : fallback_value; } +#if defined(_WIN32) +const MediaCaptureBackend Settings::DEFAULT_MEDIA_CAPTURE_BACKEND = MediaCaptureBackend::MediaFoundation; +#elif !defined(__ANDROID__) +const MediaCaptureBackend Settings::DEFAULT_MEDIA_CAPTURE_BACKEND = MediaCaptureBackend::FFMPEG; +#endif + Settings::Settings() { controller_types[0] = DEFAULT_CONTROLLER_1_TYPE; @@ -405,6 +412,27 @@ void Settings::Load(SettingsInterface& si) achievements_leaderboard_duration = si.GetIntValue("Cheevos", "LeaderboardsDuration", DEFAULT_LEADERBOARD_NOTIFICATION_TIME); +#ifndef __ANDROID__ + media_capture_backend = + MediaCapture::ParseBackendName( + si.GetStringValue("MediaCapture", "Backend", MediaCapture::GetBackendName(DEFAULT_MEDIA_CAPTURE_BACKEND)).c_str()) + .value_or(DEFAULT_MEDIA_CAPTURE_BACKEND); + media_capture_container = si.GetStringValue("MediaCapture", "Container", "mp4"); + media_capture_video = si.GetBoolValue("MediaCapture", "VideoCapture", true); + media_capture_video_width = si.GetUIntValue("MediaCapture", "VideoWidth", 640); + media_capture_video_height = si.GetUIntValue("MediaCapture", "VideoHeight", 480); + media_capture_video_auto_size = si.GetBoolValue("MediaCapture", "VideoAutoSize", false); + media_capture_video_bitrate = si.GetUIntValue("MediaCapture", "VideoBitrate", 6000); + media_capture_video_codec = si.GetStringValue("MediaCapture", "VideoCodec"); + media_capture_video_codec_use_args = si.GetBoolValue("MediaCapture", "VideoCodecUseArgs", false); + media_capture_video_codec_args = si.GetStringValue("MediaCapture", "AudioCodecArgs"); + media_capture_audio = si.GetBoolValue("MediaCapture", "AudioCapture", true); + media_capture_audio_bitrate = si.GetUIntValue("MediaCapture", "AudioBitrate", 128); + media_capture_audio_codec = si.GetStringValue("MediaCapture", "AudioCodec"); + media_capture_audio_codec_use_args = si.GetBoolValue("MediaCapture", "AudioCodecUseArgs", false); + media_capture_audio_codec_args = si.GetStringValue("MediaCapture", "AudioCodecArgs"); +#endif + log_level = ParseLogLevelName(si.GetStringValue("Logging", "LogLevel", GetLogLevelName(DEFAULT_LOG_LEVEL)).c_str()) .value_or(DEFAULT_LOG_LEVEL); log_filter = si.GetStringValue("Logging", "LogFilter", ""); @@ -657,6 +685,24 @@ void Settings::Save(SettingsInterface& si, bool ignore_base) const si.SetIntValue("Cheevos", "NotificationsDuration", achievements_notification_duration); si.SetIntValue("Cheevos", "LeaderboardsDuration", achievements_leaderboard_duration); +#ifndef __ANDROID__ + si.SetStringValue("MediaCapture", "Backend", MediaCapture::GetBackendName(media_capture_backend)); + si.SetStringValue("MediaCapture", "Container", media_capture_container.c_str()); + si.SetBoolValue("MediaCapture", "VideoCapture", media_capture_video); + si.SetUIntValue("MediaCapture", "VideoWidth", media_capture_video_width); + si.SetUIntValue("MediaCapture", "VideoHeight", media_capture_video_height); + si.SetBoolValue("MediaCapture", "VideoAutoSize", media_capture_video_auto_size); + si.SetUIntValue("MediaCapture", "VideoBitrate", media_capture_video_bitrate); + si.SetStringValue("MediaCapture", "VideoCodec", media_capture_video_codec.c_str()); + si.SetBoolValue("MediaCapture", "VideoCodecUseArgs", media_capture_video_codec_use_args); + si.SetStringValue("MediaCapture", "AudioCodecArgs", media_capture_video_codec_args.c_str()); + si.SetBoolValue("MediaCapture", "AudioCapture", media_capture_audio); + si.SetUIntValue("MediaCapture", "AudioBitrate", media_capture_audio_bitrate); + si.SetStringValue("MediaCapture", "AudioCodec", media_capture_audio_codec.c_str()); + si.SetBoolValue("MediaCapture", "AudioCodecUseArgs", media_capture_audio_codec_use_args); + si.SetStringValue("MediaCapture", "AudioCodecArgs", media_capture_audio_codec_args.c_str()); +#endif + if (!ignore_base) { si.SetStringValue("Logging", "LogLevel", GetLogLevelName(log_level)); @@ -1823,6 +1869,7 @@ std::string EmuFolders::Screenshots; std::string EmuFolders::Shaders; std::string EmuFolders::Textures; std::string EmuFolders::UserResources; +std::string EmuFolders::Videos; void EmuFolders::SetDefaults() { @@ -1840,6 +1887,7 @@ void EmuFolders::SetDefaults() Shaders = Path::Combine(DataRoot, "shaders"); Textures = Path::Combine(DataRoot, "textures"); UserResources = Path::Combine(DataRoot, "resources"); + Videos = Path::Combine(DataRoot, "videos"); } static std::string LoadPathFromSettings(SettingsInterface& si, const std::string& root, const char* section, @@ -1870,6 +1918,7 @@ void EmuFolders::LoadConfig(SettingsInterface& si) Shaders = LoadPathFromSettings(si, DataRoot, "Folders", "Shaders", "shaders"); Textures = LoadPathFromSettings(si, DataRoot, "Folders", "Textures", "textures"); UserResources = LoadPathFromSettings(si, DataRoot, "Folders", "UserResources", "resources"); + Videos = LoadPathFromSettings(si, DataRoot, "Folders", "Videos", "videos"); DEV_LOG("BIOS Directory: {}", Bios); DEV_LOG("Cache Directory: {}", Cache); @@ -1886,6 +1935,7 @@ void EmuFolders::LoadConfig(SettingsInterface& si) DEV_LOG("Shaders Directory: {}", Shaders); DEV_LOG("Textures Directory: {}", Textures); DEV_LOG("User Resources Directory: {}", UserResources); + DEV_LOG("Videos Directory: {}", Videos); } void EmuFolders::Save(SettingsInterface& si) @@ -1905,6 +1955,7 @@ void EmuFolders::Save(SettingsInterface& si) si.SetStringValue("Folders", "Shaders", Path::MakeRelative(Shaders, DataRoot).c_str()); si.SetStringValue("Folders", "Textures", Path::MakeRelative(Textures, DataRoot).c_str()); si.SetStringValue("Folders", "UserResources", Path::MakeRelative(UserResources, DataRoot).c_str()); + si.SetStringValue("Folders", "Videos", Path::MakeRelative(UserResources, Videos).c_str()); } void EmuFolders::Update() @@ -1954,6 +2005,7 @@ bool EmuFolders::EnsureFoldersExist() result; result = FileSystem::EnsureDirectoryExists(Textures.c_str(), false) && result; result = FileSystem::EnsureDirectoryExists(UserResources.c_str(), false) && result; + result = FileSystem::EnsureDirectoryExists(Videos.c_str(), false) && result; return result; } diff --git a/src/core/settings.h b/src/core/settings.h index df673bda4..98dd2b047 100644 --- a/src/core/settings.h +++ b/src/core/settings.h @@ -19,6 +19,7 @@ #include enum class RenderAPI : u32; +enum class MediaCaptureBackend : u8; struct SettingInfo { @@ -223,6 +224,25 @@ struct Settings s32 achievements_notification_duration = DEFAULT_ACHIEVEMENT_NOTIFICATION_TIME; s32 achievements_leaderboard_duration = DEFAULT_LEADERBOARD_NOTIFICATION_TIME; +#ifndef __ANDROID__ + // media capture + std::string media_capture_container; + std::string media_capture_audio_codec; + std::string media_capture_audio_codec_args; + std::string media_capture_video_codec; + std::string media_capture_video_codec_args; + u32 media_capture_video_width = 640; + u32 media_capture_video_height = 480; + u32 media_capture_video_bitrate = 6000; + u32 media_capture_audio_bitrate = 128; + MediaCaptureBackend media_capture_backend = DEFAULT_MEDIA_CAPTURE_BACKEND; + bool media_capture_video : 1 = true; + bool media_capture_video_codec_use_args : 1 = true; + bool media_capture_video_auto_size : 1 = false; + bool media_capture_audio : 1 = true; + bool media_capture_audio_codec_use_args : 1 = true; +#endif + struct DebugSettings { bool show_vram : 1 = false; @@ -517,6 +537,11 @@ struct Settings static constexpr SaveStateCompressionMode DEFAULT_SAVE_STATE_COMPRESSION_MODE = SaveStateCompressionMode::ZstDefault; +#ifndef __ANDROID__ + static const MediaCaptureBackend DEFAULT_MEDIA_CAPTURE_BACKEND; + static constexpr const char* DEFAULT_MEDIA_CAPTURE_CONTAINER = "mp4"; +#endif + // Enable console logging by default on Linux platforms. #if defined(__linux__) && !defined(__ANDROID__) static constexpr bool DEFAULT_LOG_TO_CONSOLE = true; @@ -562,6 +587,7 @@ extern std::string Screenshots; extern std::string Shaders; extern std::string Textures; extern std::string UserResources; +extern std::string Videos; // Assumes that AppRoot and DataRoot have been initialized. void SetDefaults(); diff --git a/src/core/spu.cpp b/src/core/spu.cpp index 018301183..08f030def 100644 --- a/src/core/spu.cpp +++ b/src/core/spu.cpp @@ -12,6 +12,7 @@ #include "util/audio_stream.h" #include "util/imgui_manager.h" +#include "util/media_capture.h" #include "util/state_wrapper.h" #include "util/wav_writer.h" @@ -482,7 +483,6 @@ void SPU::CPUClockChanged() void SPU::Shutdown() { - StopDumpingAudio(); s_state.tick_event.Deactivate(); s_state.transfer_event.Deactivate(); s_state.audio_stream.reset(); @@ -1508,11 +1508,8 @@ void SPU::InternalGeneratePendingSamples() s_state.tick_event.InvokeEarly(force_exec); } -bool SPU::IsDumpingAudio() -{ - return static_cast(s_state.dump_writer); -} - +#if 0 +// TODO: FIXME bool SPU::StartDumpingAudio(const char* filename) { s_state.dump_writer.reset(); @@ -1562,6 +1559,7 @@ bool SPU::StopDumpingAudio() return true; } +#endif const std::array& SPU::GetRAM() { @@ -2435,8 +2433,11 @@ void SPU::Execute(void* param, TickCount ticks, TickCount ticks_late) } } - if (s_state.dump_writer) [[unlikely]] - s_state.dump_writer->WriteFrames(output_frame_start, frames_in_this_batch); + if (MediaCapture* cap = System::GetMediaCapture()) [[unlikely]] + { + if (!cap->DeliverAudioFrames(output_frame_start, frames_in_this_batch)) + System::StopMediaCapture(); + } output_stream->EndWrite(frames_in_this_batch); remaining_frames -= frames_in_this_batch; diff --git a/src/core/spu.h b/src/core/spu.h index 3f61eae19..46674e422 100644 --- a/src/core/spu.h +++ b/src/core/spu.h @@ -38,15 +38,6 @@ void DrawDebugStateWindow(); // Executes the SPU, generating any pending samples. void GeneratePendingSamples(); -/// Returns true if currently dumping audio. -bool IsDumpingAudio(); - -/// Starts dumping audio to file. -bool StartDumpingAudio(const char* filename); - -/// Stops dumping audio to file, if started. -bool StopDumpingAudio(); - /// Access to SPU RAM. const std::array& GetRAM(); std::array& GetWritableRAM(); diff --git a/src/core/system.cpp b/src/core/system.cpp index a6e1f14c8..5934ce113 100644 --- a/src/core/system.cpp +++ b/src/core/system.cpp @@ -41,6 +41,7 @@ #include "util/ini_settings_interface.h" #include "util/input_manager.h" #include "util/iso_reader.h" +#include "util/media_capture.h" #include "util/platform_misc.h" #include "util/postprocessing.h" #include "util/sockets.h" @@ -78,6 +79,7 @@ Log_SetChannel(System); #ifdef _WIN32 #include "common/windows_headers.h" +#include #include #include #endif @@ -302,6 +304,7 @@ static Common::Timer s_frame_timer; static Threading::ThreadHandle s_cpu_thread_handle; static std::unique_ptr s_cheat_list; +static std::unique_ptr s_media_capture; // temporary save state, created when loading, used to undo load state static std::optional s_undo_load_state; @@ -445,6 +448,8 @@ void System::Internal::ProcessShutdown() bool System::Internal::CPUThreadInitialize(Error* error) { + Threading::SetNameOfCurrentThread("CPU Thread"); + #ifdef _WIN32 // On Win32, we have a bunch of things which use COM (e.g. SDL, Cubeb, etc). // We need to initialize COM first, before anything else does, because otherwise they might @@ -1690,8 +1695,8 @@ bool System::BootSystem(SystemBootParameters parameters, Error* error) if (parameters.load_image_to_ram || g_settings.cdrom_load_image_to_ram) CDROM::PrecacheMedia(); - if (parameters.start_audio_dump) - StartDumpingAudio(); + if (parameters.start_media_capture) + StartMediaCapture({}); if (g_settings.start_paused || parameters.override_start_paused.value_or(false)) PauseSystem(true); @@ -1809,6 +1814,9 @@ void System::DestroySystem() if (s_state == State::Shutdown) return; + if (s_media_capture) + StopMediaCapture(); + s_undo_load_state.reset(); #ifdef ENABLE_GDB_SERVER @@ -2003,6 +2011,13 @@ void System::FrameDone() SaveRunaheadState(); } + // Kick off media capture early, might take a while. + if (s_media_capture && s_media_capture->IsCapturingVideo()) [[unlikely]] + { + if (!g_gpu->SendDisplayToMediaCapture(s_media_capture.get())) [[unlikely]] + StopMediaCapture(); + } + Common::Timer::Value current_time = Common::Timer::GetCurrentValue(); // pre-frame sleep accounting (input lag reduction) @@ -3134,6 +3149,9 @@ void System::UpdatePerformanceCounters() s_sw_thread_usage = static_cast(static_cast(sw_delta) * pct_divider); s_sw_thread_time = static_cast(static_cast(sw_delta) * time_divider); + if (s_media_capture) + s_media_capture->UpdateCaptureThreadUsage(pct_divider, time_divider); + s_fps_timer.ResetTo(now_ticks); if (g_gpu_device->IsGPUTimingEnabled()) @@ -4896,61 +4914,6 @@ void System::UpdateVolume() SPU::GetOutputStream()->SetOutputVolume(GetAudioOutputVolume()); } -bool System::IsDumpingAudio() -{ - return SPU::IsDumpingAudio(); -} - -bool System::StartDumpingAudio(const char* filename) -{ - if (System::IsShutdown()) - return false; - - std::string auto_filename; - if (!filename) - { - const auto& serial = System::GetGameSerial(); - if (serial.empty()) - { - auto_filename = Path::Combine( - EmuFolders::Dumps, fmt::format("audio" FS_OSPATH_SEPARATOR_STR "{}.wav", GetTimestampStringForFileName())); - } - else - { - auto_filename = Path::Combine(EmuFolders::Dumps, fmt::format("audio" FS_OSPATH_SEPARATOR_STR "{}_{}.wav", serial, - GetTimestampStringForFileName())); - } - - filename = auto_filename.c_str(); - } - - if (SPU::StartDumpingAudio(filename)) - { - Host::AddIconOSDMessage( - "audio_dumping", ICON_FA_VOLUME_UP, - fmt::format(TRANSLATE_FS("OSDMessage", "Started dumping audio to '{}'."), Path::GetFileName(filename)), - Host::OSD_INFO_DURATION); - return true; - } - else - { - Host::AddIconOSDMessage( - "audio_dumping", ICON_FA_VOLUME_UP, - fmt::format(TRANSLATE_FS("OSDMessage", "Failed to start dumping audio to '{}'."), Path::GetFileName(filename)), - Host::OSD_ERROR_DURATION); - return false; - } -} - -void System::StopDumpingAudio() -{ - if (System::IsShutdown() || !SPU::StopDumpingAudio()) - return; - - Host::AddIconOSDMessage("audio_dumping", ICON_FA_VOLUME_MUTE, TRANSLATE_STR("OSDMessage", "Stopped dumping audio."), - Host::OSD_INFO_DURATION); -} - bool System::SaveScreenshot(const char* filename, DisplayScreenshotMode mode, DisplayScreenshotFormat format, u8 quality, bool compress_on_thread) { @@ -4985,6 +4948,132 @@ bool System::SaveScreenshot(const char* filename, DisplayScreenshotMode mode, Di return g_gpu->RenderScreenshotToFile(filename, mode, quality, compress_on_thread, true); } +static std::string_view GetCaptureTypeForMessage(bool capture_video, bool capture_audio) +{ + return capture_video ? (capture_audio ? TRANSLATE_SV("System", "capturing audio and video") : + TRANSLATE_SV("System", "capturing video")) : + TRANSLATE_SV("System", "capturing audio"); +} + +MediaCapture* System::GetMediaCapture() +{ + return s_media_capture.get(); +} + +std::string System::GetNewMediaCapturePath(const std::string_view title, const std::string_view container) +{ + const std::string sanitized_name = Path::SanitizeFileName(title); + std::string path; + if (sanitized_name.empty()) + { + path = Path::Combine(EmuFolders::Videos, fmt::format("{}.{}", GetTimestampStringForFileName(), container)); + } + else + { + path = Path::Combine(EmuFolders::Videos, + fmt::format("{} {}.{}", sanitized_name, GetTimestampStringForFileName(), container)); + } + + return path; +} + +bool System::StartMediaCapture(std::string path, bool capture_video, bool capture_audio) +{ + if (!IsValid()) + return false; + + if (s_media_capture) + StopMediaCapture(); + + // Need to work out the size. + u32 capture_width = g_settings.media_capture_video_width; + u32 capture_height = g_settings.media_capture_video_height; + const GPUTexture::Format capture_format = + g_gpu_device->HasSurface() ? g_gpu_device->GetWindowFormat() : GPUTexture::Format::RGBA8; + const float fps = g_gpu->ComputeVerticalFrequency(); + if (capture_video) + { + // TODO: This will be a mess with GPU thread. + if (g_settings.media_capture_video_auto_size) + { + GSVector4i unused_display_rect, unused_draw_rect; + g_gpu->CalculateScreenshotSize(DisplayScreenshotMode::InternalResolution, &capture_width, &capture_height, + &unused_display_rect, &unused_draw_rect); + } + + MediaCapture::AdjustVideoSize(&capture_width, &capture_height); + } + + // TODO: Render anamorphic capture instead? + constexpr float aspect = 1.0f; + + if (path.empty()) + path = GetNewMediaCapturePath(GetGameTitle(), g_settings.media_capture_container); + + Error error; + s_media_capture = MediaCapture::Create(g_settings.media_capture_backend, &error); + if (!s_media_capture || + !s_media_capture->BeginCapture( + fps, aspect, capture_width, capture_height, capture_format, SPU::SAMPLE_RATE, std::move(path), capture_video, + g_settings.media_capture_video_codec, g_settings.media_capture_video_bitrate, + g_settings.media_capture_video_codec_use_args ? std::string_view(g_settings.media_capture_video_codec_args) : + std::string_view(), + capture_audio, g_settings.media_capture_audio_codec, g_settings.media_capture_audio_bitrate, + g_settings.media_capture_audio_codec_use_args ? std::string_view(g_settings.media_capture_audio_codec_args) : + std::string_view(), + &error)) + { + Host::AddIconOSDMessage( + "MediaCapture", ICON_FA_EXCLAMATION_TRIANGLE, + fmt::format(TRANSLATE_FS("System", "Failed to create media capture: {0}"), error.GetDescription()), + Host::OSD_ERROR_DURATION); + s_media_capture.reset(); + Host::OnMediaCaptureStopped(); + return false; + } + + Host::AddIconOSDMessage( + "MediaCapture", ICON_FA_CAMERA, + fmt::format(TRANSLATE_FS("System", "Starting {0} to '{1}'."), + GetCaptureTypeForMessage(s_media_capture->IsCapturingVideo(), s_media_capture->IsCapturingAudio()), + Path::GetFileName(s_media_capture->GetPath())), + Host::OSD_INFO_DURATION); + + Host::OnMediaCaptureStarted(); + return true; +} + +void System::StopMediaCapture() +{ + if (!s_media_capture) + return; + + const bool was_capturing_audio = s_media_capture->IsCapturingAudio(); + const bool was_capturing_video = s_media_capture->IsCapturingVideo(); + + Error error; + if (s_media_capture->EndCapture(&error)) + { + Host::AddIconOSDMessage("MediaCapture", ICON_FA_CAMERA, + fmt::format(TRANSLATE_FS("System", "Stopped {0} to '{1}'."), + GetCaptureTypeForMessage(was_capturing_video, was_capturing_audio), + Path::GetFileName(s_media_capture->GetPath())), + Host::OSD_INFO_DURATION); + } + else + { + Host::AddIconOSDMessage( + "MediaCapture", ICON_FA_EXCLAMATION_TRIANGLE, + fmt::format(TRANSLATE_FS("System", "Stopped {0}: {1}."), + GetCaptureTypeForMessage(s_media_capture->IsCapturingVideo(), s_media_capture->IsCapturingAudio()), + error.GetDescription()), + Host::OSD_INFO_DURATION); + } + s_media_capture.reset(); + + Host::OnMediaCaptureStopped(); +} + std::string System::GetGameSaveStateFileName(std::string_view serial, s32 slot) { if (slot < 0) diff --git a/src/core/system.h b/src/core/system.h index e26fe9aa0..e233f0530 100644 --- a/src/core/system.h +++ b/src/core/system.h @@ -27,6 +27,7 @@ struct CheatCode; class CheatList; class GPUTexture; +class MediaCapture; namespace BIOS { struct ImageInfo; @@ -54,7 +55,7 @@ struct SystemBootParameters bool load_image_to_ram = false; bool force_software_renderer = false; bool disable_achievements_hardcore_mode = false; - bool start_audio_dump = false; + bool start_media_capture = false; }; struct SaveStateInfo @@ -382,20 +383,22 @@ std::string GetGameMemoryCardPath(std::string_view serial, std::string_view path s32 GetAudioOutputVolume(); void UpdateVolume(); -/// Returns true if currently dumping audio. -bool IsDumpingAudio(); - -/// Starts dumping audio to a file. If no file name is provided, one will be generated automatically. -bool StartDumpingAudio(const char* filename = nullptr); - -/// Stops dumping audio to file if it has been started. -void StopDumpingAudio(); - /// Saves a screenshot to the specified file. If no file name is provided, one will be generated automatically. bool SaveScreenshot(const char* filename = nullptr, DisplayScreenshotMode mode = g_settings.display_screenshot_mode, DisplayScreenshotFormat format = g_settings.display_screenshot_format, u8 quality = g_settings.display_screenshot_quality, bool compress_on_thread = true); +/// Returns the path that a new media capture would be saved to by default. Safe to call from any thread. +std::string GetNewMediaCapturePath(const std::string_view title, const std::string_view container); + +/// Current media capture (if active). +MediaCapture* GetMediaCapture(); + +/// Media capture (video and/or audio). If no path is provided, one will be generated automatically. +bool StartMediaCapture(std::string path = {}, bool capture_video = g_settings.media_capture_video, + bool capture_audio = g_settings.media_capture_audio); +void StopMediaCapture(); + /// Loads the cheat list for the current game title from the user directory. bool LoadCheatList(); @@ -508,6 +511,10 @@ void OnPerformanceCountersUpdated(); /// Provided by the host; called when the running executable changes. void OnGameChanged(const std::string& disc_path, const std::string& game_serial, const std::string& game_name); +/// Called when media capture starts/stops. +void OnMediaCaptureStarted(); +void OnMediaCaptureStopped(); + /// Provided by the host; called once per frame at guest vsync. void PumpMessagesOnCPUThread(); diff --git a/src/duckstation-qt/foldersettingswidget.cpp b/src/duckstation-qt/foldersettingswidget.cpp index 07b35b396..803e7c39c 100644 --- a/src/duckstation-qt/foldersettingswidget.cpp +++ b/src/duckstation-qt/foldersettingswidget.cpp @@ -21,11 +21,14 @@ FolderSettingsWidget::FolderSettingsWidget(SettingsWindow* dialog, QWidget* pare m_ui.coversOpen, m_ui.coversReset, "Folders", "Covers", Path::Combine(EmuFolders::DataRoot, "covers")); SettingWidgetBinder::BindWidgetToFolderSetting( - sif, m_ui.screenshots, m_ui.screenshotsBrowse, tr("Select Screenshot Directory"), m_ui.screenshotsOpen, - m_ui.screenshotsReset, "Folders", "Screenshots", Path::Combine(EmuFolders::DataRoot, "screenshots")); - SettingWidgetBinder::BindWidgetToFolderSetting( - sif, m_ui.saveStates, m_ui.saveStatesBrowse, tr("Select Save State Directory"), m_ui.saveStatesOpen, + sif, m_ui.saveStates, m_ui.saveStatesBrowse, tr("Select Save States Directory"), m_ui.saveStatesOpen, m_ui.saveStatesReset, "Folders", "SaveStates", Path::Combine(EmuFolders::DataRoot, "savestates")); + SettingWidgetBinder::BindWidgetToFolderSetting( + sif, m_ui.screenshots, m_ui.screenshotsBrowse, tr("Select Screenshots Directory"), m_ui.screenshotsOpen, + m_ui.screenshotsReset, "Folders", "Screenshots", Path::Combine(EmuFolders::DataRoot, "screenshots")); + SettingWidgetBinder::BindWidgetToFolderSetting(sif, m_ui.videos, m_ui.videosBrowse, tr("Select Videos Directory"), + m_ui.videosOpen, m_ui.videosReset, "Folders", "Videos", + Path::Combine(EmuFolders::DataRoot, "videos")); } FolderSettingsWidget::~FolderSettingsWidget() = default; diff --git a/src/duckstation-qt/foldersettingswidget.ui b/src/duckstation-qt/foldersettingswidget.ui index 248a2c36e..ad29c99e4 100644 --- a/src/duckstation-qt/foldersettingswidget.ui +++ b/src/duckstation-qt/foldersettingswidget.ui @@ -104,39 +104,39 @@ - + - Screenshots Directory + Save States Directory - + - + - + Browse... - + Open... - + Reset - + - Used for screenshots. + Used for storing save states. @@ -144,39 +144,79 @@ - + - Save States Directory + Screenshots Directory - - - + + + + + Open... + + + + + + + Used for screenshots. + + - + Browse... + + + + + + + Reset + + + + + + + + + + Videos Directory + + - + Open... - - + + - Reset + Used for media capture, regardless of whether audio and/or video is enabled. - - + + - Used for storing save states. + Browse... + + + + + + + + + + Reset @@ -186,7 +226,7 @@ - Qt::Vertical + Qt::Orientation::Vertical @@ -198,5 +238,6 @@ + diff --git a/src/duckstation-qt/graphicssettingswidget.cpp b/src/duckstation-qt/graphicssettingswidget.cpp index 63523a0d3..6a82e058b 100644 --- a/src/duckstation-qt/graphicssettingswidget.cpp +++ b/src/duckstation-qt/graphicssettingswidget.cpp @@ -10,6 +10,8 @@ #include "core/gpu.h" #include "core/settings.h" +#include "util/media_capture.h" + #include static QVariant GetMSAAModeValue(uint multisamples, bool ssaa) @@ -202,6 +204,33 @@ GraphicsSettingsWidget::GraphicsSettingsWidget(SettingsWindow* dialog, QWidget* SettingWidgetBinder::BindWidgetToIntSetting(sif, m_ui.screenshotQuality, "Display", "ScreenshotQuality", Settings::DEFAULT_DISPLAY_SCREENSHOT_QUALITY); + SettingWidgetBinder::BindWidgetToEnumSetting(sif, m_ui.mediaCaptureBackend, "MediaCapture", "Backend", + &MediaCapture::ParseBackendName, &MediaCapture::GetBackendName, + Settings::DEFAULT_MEDIA_CAPTURE_BACKEND); + SettingWidgetBinder::BindWidgetToBoolSetting(sif, m_ui.enableVideoCapture, "MediaCapture", "VideoCapture", true); + SettingWidgetBinder::BindWidgetToIntSetting(sif, m_ui.videoCaptureWidth, "MediaCapture", "VideoWidth", 640); + SettingWidgetBinder::BindWidgetToIntSetting(sif, m_ui.videoCaptureHeight, "MediaCapture", "VideoHeight", 480); + SettingWidgetBinder::BindWidgetToBoolSetting(sif, m_ui.videoCaptureResolutionAuto, "MediaCapture", "VideoAutoSize", + false); + SettingWidgetBinder::BindWidgetToIntSetting(sif, m_ui.videoCaptureBitrate, "MediaCapture", "VideoBitrate", 6000); + SettingWidgetBinder::BindWidgetToBoolSetting(sif, m_ui.enableVideoCaptureArguments, "MediaCapture", + "VideoCodecUseArgs", false); + SettingWidgetBinder::BindWidgetToStringSetting(sif, m_ui.videoCaptureArguments, "MediaCapture", "AudioCodecArgs"); + SettingWidgetBinder::BindWidgetToBoolSetting(sif, m_ui.enableAudioCapture, "MediaCapture", "AudioCapture", true); + SettingWidgetBinder::BindWidgetToIntSetting(sif, m_ui.audioCaptureBitrate, "MediaCapture", "AudioBitrate", 128); + SettingWidgetBinder::BindWidgetToBoolSetting(sif, m_ui.enableVideoCaptureArguments, "MediaCapture", + "VideoCodecUseArgs", false); + SettingWidgetBinder::BindWidgetToStringSetting(sif, m_ui.audioCaptureArguments, "MediaCapture", "AudioCodecArgs"); + + connect(m_ui.mediaCaptureBackend, QOverload::of(&QComboBox::currentIndexChanged), this, + &GraphicsSettingsWidget::onMediaCaptureBackendChanged); + connect(m_ui.enableVideoCapture, &QCheckBox::checkStateChanged, this, + &GraphicsSettingsWidget::onMediaCaptureVideoEnabledChanged); + connect(m_ui.videoCaptureResolutionAuto, &QCheckBox::checkStateChanged, this, + &GraphicsSettingsWidget::onMediaCaptureVideoAutoResolutionChanged); + connect(m_ui.enableAudioCapture, &QCheckBox::checkStateChanged, this, + &GraphicsSettingsWidget::onMediaCaptureAudioEnabledChanged); + // Texture Replacements Tab SettingWidgetBinder::BindWidgetToBoolSetting(sif, m_ui.vramWriteReplacement, "TextureReplacements", @@ -241,6 +270,9 @@ GraphicsSettingsWidget::GraphicsSettingsWidget(SettingsWindow* dialog, QWidget* onAspectRatioChanged(); onDownsampleModeChanged(); updateResolutionDependentOptions(); + onMediaCaptureBackendChanged(); + onMediaCaptureAudioEnabledChanged(); + onMediaCaptureVideoEnabledChanged(); onEnableAnyTextureReplacementsChanged(); onEnableVRAMWriteDumpingChanged(); onShowDebugSettingsChanged(QtHost::ShouldShowDebugOptions()); @@ -483,6 +515,40 @@ GraphicsSettingsWidget::GraphicsSettingsWidget(SettingsWindow* dialog, QWidget* QStringLiteral("%1%").arg(Settings::DEFAULT_DISPLAY_SCREENSHOT_QUALITY), tr("Selects the quality at which screenshots will be compressed. Higher values preserve " "more detail for JPEG, and reduce file size for PNG.")); + dialog->registerWidgetHelp( + m_ui.mediaCaptureBackend, tr("Backend"), + QString::fromUtf8(MediaCapture::GetBackendDisplayName(Settings::DEFAULT_MEDIA_CAPTURE_BACKEND)), + tr("Selects the framework that is used to encode video/audio.")); + dialog->registerWidgetHelp(m_ui.captureContainer, tr("Container"), tr("MP4"), + tr("Determines the file format used to contain the captured audio/video")); + dialog->registerWidgetHelp( + m_ui.videoCaptureCodec, tr("Video Codec"), tr("Default"), + tr("Selects which Video Codec to be used for Video Capture. If unsure, leave it on default.")); + dialog->registerWidgetHelp(m_ui.videoCaptureBitrate, tr("Video Bitrate"), tr("6000 kbps"), + tr("Sets the video bitrate to be used. Larger bitrate generally yields better video " + "quality at the cost of larger resulting file size.")); + dialog->registerWidgetHelp( + m_ui.videoCaptureResolutionAuto, tr("Automatic Resolution"), tr("Unchecked"), + tr("When checked, the video capture resolution will follows the internal resolution of the running " + "game. Be careful when using this setting especially when you are upscaling, as higher internal " + "resolutions (above 4x) can cause system slowdown.")); + dialog->registerWidgetHelp(m_ui.enableVideoCaptureArguments, tr("Enable Extra Video Arguments"), tr("Unchecked"), + tr("Allows you to pass arguments to the selected video codec.")); + dialog->registerWidgetHelp( + m_ui.videoCaptureArguments, tr("Extra Video Arguments"), tr("Empty"), + tr("Parameters passed to the selected video codec.
You must use '=' to separate key from value and ':' to " + "separate two pairs from each other.
For example: \"crf = 21 : preset = veryfast\"")); + dialog->registerWidgetHelp( + m_ui.audioCaptureCodec, tr("Audio Codec"), tr("Default"), + tr("Selects which Audio Codec to be used for Video Capture. If unsure, leave it on default.")); + dialog->registerWidgetHelp(m_ui.audioCaptureBitrate, tr("Audio Bitrate"), tr("160 kbps"), + tr("Sets the audio bitrate to be used.")); + dialog->registerWidgetHelp(m_ui.enableAudioCaptureArguments, tr("Enable Extra Audio Arguments"), tr("Unchecked"), + tr("Allows you to pass arguments to the selected audio codec.")); + dialog->registerWidgetHelp( + m_ui.audioCaptureArguments, tr("Extra Audio Arguments"), tr("Empty"), + tr("Parameters passed to the selected audio codec.
You must use '=' to separate key from value and ':' to " + "separate two pairs from each other.
For example: \"compression_level = 4 : joint_stereo = 1\"")); // Texture Replacements Tab @@ -625,6 +691,12 @@ void GraphicsSettingsWidget::setupAdditionalUi() QString::fromUtf8(Settings::GetDisplayScreenshotFormatDisplayName(static_cast(i)))); } + for (u32 i = 0; i < static_cast(MediaCaptureBackend::MaxCount); i++) + { + m_ui.mediaCaptureBackend->addItem( + QString::fromUtf8(MediaCapture::GetBackendDisplayName(static_cast(i)))); + } + // Debugging Tab for (u32 i = 0; i < static_cast(GPUWireframeMode::Count); i++) @@ -931,6 +1003,110 @@ void GraphicsSettingsWidget::onDownsampleModeChanged() } } +void GraphicsSettingsWidget::onMediaCaptureBackendChanged() +{ + SettingsInterface* const sif = m_dialog->getSettingsInterface(); + const MediaCaptureBackend backend = + MediaCapture::ParseBackendName( + m_dialog + ->getEffectiveStringValue("MediaCapture", "Backend", + MediaCapture::GetBackendName(Settings::DEFAULT_MEDIA_CAPTURE_BACKEND)) + .c_str()) + .value_or(Settings::DEFAULT_MEDIA_CAPTURE_BACKEND); + + { + m_ui.captureContainer->disconnect(); + m_ui.captureContainer->clear(); + + for (const auto& [name, display_name] : MediaCapture::GetContainerList(backend)) + { + const QString qname = QString::fromStdString(name); + m_ui.captureContainer->addItem(tr("%1 (%2)").arg(QString::fromStdString(display_name)).arg(qname), qname); + } + + SettingWidgetBinder::BindWidgetToStringSetting(sif, m_ui.captureContainer, "MediaCapture", "Container", "mp4"); + connect(m_ui.captureContainer, QOverload::of(&QComboBox::currentIndexChanged), this, + &GraphicsSettingsWidget::onMediaCaptureContainerChanged); + } + + onMediaCaptureContainerChanged(); +} + +void GraphicsSettingsWidget::onMediaCaptureContainerChanged() +{ + SettingsInterface* const sif = m_dialog->getSettingsInterface(); + const MediaCaptureBackend backend = + MediaCapture::ParseBackendName( + m_dialog + ->getEffectiveStringValue("MediaCapture", "Backend", + MediaCapture::GetBackendName(Settings::DEFAULT_MEDIA_CAPTURE_BACKEND)) + .c_str()) + .value_or(Settings::DEFAULT_MEDIA_CAPTURE_BACKEND); + const std::string container = m_dialog->getEffectiveStringValue("MediaCapture", "Container", "mp4"); + + { + m_ui.videoCaptureCodec->disconnect(); + m_ui.videoCaptureCodec->clear(); + m_ui.videoCaptureCodec->addItem(tr("Default"), QVariant(QString())); + + for (const auto& [name, display_name] : MediaCapture::GetVideoCodecList(backend, container.c_str())) + { + const QString qname = QString::fromStdString(name); + m_ui.videoCaptureCodec->addItem(tr("%1 (%2)").arg(QString::fromStdString(display_name)).arg(qname), qname); + } + + SettingWidgetBinder::BindWidgetToStringSetting(sif, m_ui.videoCaptureCodec, "MediaCapture", "VideoCodec"); + } + + { + m_ui.audioCaptureCodec->disconnect(); + m_ui.audioCaptureCodec->clear(); + m_ui.audioCaptureCodec->addItem(tr("Default"), QVariant(QString())); + + for (const auto& [name, display_name] : MediaCapture::GetAudioCodecList(backend, container.c_str())) + { + const QString qname = QString::fromStdString(name); + m_ui.audioCaptureCodec->addItem(tr("%1 (%2)").arg(QString::fromStdString(display_name)).arg(qname), qname); + } + + SettingWidgetBinder::BindWidgetToStringSetting(sif, m_ui.audioCaptureCodec, "MediaCapture", "AudioCodec"); + } +} + +void GraphicsSettingsWidget::onMediaCaptureVideoEnabledChanged() +{ + const bool enabled = m_dialog->getEffectiveBoolValue("MediaCapture", "VideoCapture", true); + m_ui.videoCaptureCodecLabel->setEnabled(enabled); + m_ui.videoCaptureCodec->setEnabled(enabled); + m_ui.videoCaptureBitrateLabel->setEnabled(enabled); + m_ui.videoCaptureBitrate->setEnabled(enabled); + m_ui.videoCaptureResolutionLabel->setEnabled(enabled); + m_ui.videoCaptureResolutionAuto->setEnabled(enabled); + m_ui.enableVideoCaptureArguments->setEnabled(enabled); + m_ui.videoCaptureArguments->setEnabled(enabled); + onMediaCaptureVideoAutoResolutionChanged(); +} + +void GraphicsSettingsWidget::onMediaCaptureVideoAutoResolutionChanged() +{ + const bool enabled = m_dialog->getEffectiveBoolValue("MediaCapture", "VideoCapture", true); + const bool auto_enabled = m_dialog->getEffectiveBoolValue("MediaCapture", "VideoAutoSize", false); + m_ui.videoCaptureWidth->setEnabled(enabled && !auto_enabled); + m_ui.xLabel->setEnabled(enabled && !auto_enabled); + m_ui.videoCaptureHeight->setEnabled(enabled && !auto_enabled); +} + +void GraphicsSettingsWidget::onMediaCaptureAudioEnabledChanged() +{ + const bool enabled = m_dialog->getEffectiveBoolValue("MediaCapture", "AudioCapture", true); + m_ui.audioCaptureCodecLabel->setEnabled(enabled); + m_ui.audioCaptureCodec->setEnabled(enabled); + m_ui.audioCaptureBitrateLabel->setEnabled(enabled); + m_ui.audioCaptureBitrate->setEnabled(enabled); + m_ui.enableAudioCaptureArguments->setEnabled(enabled); + m_ui.audioCaptureArguments->setEnabled(enabled); +} + void GraphicsSettingsWidget::onEnableAnyTextureReplacementsChanged() { const bool any_replacements_enabled = diff --git a/src/duckstation-qt/graphicssettingswidget.h b/src/duckstation-qt/graphicssettingswidget.h index 71d16d544..361ecd109 100644 --- a/src/duckstation-qt/graphicssettingswidget.h +++ b/src/duckstation-qt/graphicssettingswidget.h @@ -32,6 +32,13 @@ private Q_SLOTS: void updateResolutionDependentOptions(); void onTrueColorChanged(); void onDownsampleModeChanged(); + + void onMediaCaptureBackendChanged(); + void onMediaCaptureContainerChanged(); + void onMediaCaptureVideoEnabledChanged(); + void onMediaCaptureVideoAutoResolutionChanged(); + void onMediaCaptureAudioEnabledChanged(); + void onEnableAnyTextureReplacementsChanged(); void onEnableVRAMWriteDumpingChanged(); diff --git a/src/duckstation-qt/graphicssettingswidget.ui b/src/duckstation-qt/graphicssettingswidget.ui index d06e288d7..4da2d9be4 100644 --- a/src/duckstation-qt/graphicssettingswidget.ui +++ b/src/duckstation-qt/graphicssettingswidget.ui @@ -817,6 +817,244 @@
+ + + + Media Capture + + + + + + Backend: + + + + + + + + + + Container: + + + + + + + + + + 20 + + + 10 + + + + + + 0 + + + 0 + + + 0 + + + 0 + + + + + Codec: + + + + + + + + + + Bitrate: + + + + + + + kbps + + + 16 + + + 2048 + + + 1 + + + 128 + + + + + + + Extra Arguments + + + + + + + + + + + + + Capture Audio + + + + + + + + 0 + + + 0 + + + 0 + + + 0 + + + + + Codec: + + + + + + + + + + Bitrate: + + + + + + + kbps + + + 100 + + + 100000 + + + 100 + + + 6000 + + + + + + + Resolution: + + + + + + + + + 320 + + + 32768 + + + 16 + + + 640 + + + + + + + x + + + + + + + 240 + + + 32768 + + + 16 + + + 480 + + + + + + + Auto + + + + + + + + + + + + Extra Arguments + + + + + + + + + + Capture Video + + + + + + + + diff --git a/src/duckstation-qt/mainwindow.cpp b/src/duckstation-qt/mainwindow.cpp index e046d1129..2d9a6d430 100644 --- a/src/duckstation-qt/mainwindow.cpp +++ b/src/duckstation-qt/mainwindow.cpp @@ -623,6 +623,18 @@ void MainWindow::onRunningGameChanged(const QString& filename, const QString& ga updateWindowTitle(); } +void MainWindow::onMediaCaptureStarted() +{ + QSignalBlocker sb(m_ui.actionMediaCapture); + m_ui.actionMediaCapture->setChecked(true); +} + +void MainWindow::onMediaCaptureStopped() +{ + QSignalBlocker sb(m_ui.actionMediaCapture); + m_ui.actionMediaCapture->setChecked(false); +} + void MainWindow::onApplicationStateChanged(Qt::ApplicationState state) { if (!s_system_valid) @@ -1122,7 +1134,7 @@ const GameList::Entry* MainWindow::resolveDiscSetEntry(const GameList::Entry* en std::shared_ptr MainWindow::getSystemBootParameters(std::string file) { std::shared_ptr ret = std::make_shared(std::move(file)); - ret->start_audio_dump = m_ui.actionDumpAudio->isChecked(); + ret->start_media_capture = m_ui.actionMediaCapture->isChecked(); return ret; } @@ -2103,6 +2115,7 @@ void MainWindow::connectSignals() connect(m_ui.actionMemoryCardEditor, &QAction::triggered, this, &MainWindow::onToolsMemoryCardEditorTriggered); connect(m_ui.actionMemoryScanner, &QAction::triggered, this, &MainWindow::onToolsMemoryScannerTriggered); connect(m_ui.actionCoverDownloader, &QAction::triggered, this, &MainWindow::onToolsCoverDownloaderTriggered); + connect(m_ui.actionMediaCapture, &QAction::toggled, this, &MainWindow::onToolsMediaCaptureToggled); connect(m_ui.actionCPUDebugger, &QAction::triggered, this, &MainWindow::openCPUDebugger); SettingWidgetBinder::BindWidgetToBoolSetting(nullptr, m_ui.actionEnableGDBServer, "Debug", "EnableGDBServer", false); connect(m_ui.actionOpenDataDirectory, &QAction::triggered, this, &MainWindow::onToolsOpenDataDirectoryTriggered); @@ -2137,6 +2150,8 @@ void MainWindow::connectSignals() connect(g_emu_thread, &EmuThread::systemPaused, this, &MainWindow::onSystemPaused); connect(g_emu_thread, &EmuThread::systemResumed, this, &MainWindow::onSystemResumed); connect(g_emu_thread, &EmuThread::runningGameChanged, this, &MainWindow::onRunningGameChanged); + connect(g_emu_thread, &EmuThread::mediaCaptureStarted, this, &MainWindow::onMediaCaptureStarted); + connect(g_emu_thread, &EmuThread::mediaCaptureStopped, this, &MainWindow::onMediaCaptureStopped); connect(g_emu_thread, &EmuThread::mouseModeRequested, this, &MainWindow::onMouseModeRequested); connect(g_emu_thread, &EmuThread::fullscreenUIStateChange, this, &MainWindow::onFullscreenUIStateChange); connect(g_emu_thread, &EmuThread::achievementsLoginRequested, this, &MainWindow::onAchievementsLoginRequested); @@ -2162,12 +2177,6 @@ void MainWindow::connectSignals() "DumpCPUToVRAMCopies", false); SettingWidgetBinder::BindWidgetToBoolSetting(nullptr, m_ui.actionDebugDumpVRAMtoCPUCopies, "Debug", "DumpVRAMToCPUCopies", false); - connect(m_ui.actionDumpAudio, &QAction::toggled, [](bool checked) { - if (checked) - g_emu_thread->startDumpingAudio(); - else - g_emu_thread->stopDumpingAudio(); - }); connect(m_ui.actionDumpRAM, &QAction::triggered, [this]() { const QString filename = QDir::toNativeSeparators( QFileDialog::getSaveFileName(this, tr("Destination File"), QString(), tr("Binary Files (*.bin)"))); @@ -3034,6 +3043,41 @@ void MainWindow::onToolsCoverDownloaderTriggered() dlg.exec(); } +void MainWindow::onToolsMediaCaptureToggled(bool checked) +{ + if (!QtHost::IsSystemValid()) + { + // leave it for later, we'll fill in the boot params + return; + } + + if (!checked) + { + Host::RunOnCPUThread(&System::StopMediaCapture); + return; + } + + const std::string container = + Host::GetStringSettingValue("MediaCapture", "Container", Settings::DEFAULT_MEDIA_CAPTURE_CONTAINER); + const QString qcontainer = QString::fromStdString(container); + const QString filter(tr("%1 Files (*.%2)").arg(qcontainer.toUpper()).arg(qcontainer)); + + QString path = + QString::fromStdString(System::GetNewMediaCapturePath(QtHost::GetCurrentGameTitle().toStdString(), container)); + path = QDir::toNativeSeparators(QFileDialog::getSaveFileName(this, tr("Video Capture"), path, filter)); + if (path.isEmpty()) + { + // uncheck it again + const QSignalBlocker sb(m_ui.actionMediaCapture); + m_ui.actionMediaCapture->setChecked(false); + return; + } + + Host::RunOnCPUThread([path = path.toStdString()]() { + System::StartMediaCapture(path, g_settings.media_capture_video, g_settings.media_capture_audio); + }); +} + void MainWindow::onToolsMemoryScannerTriggered() { if (Achievements::IsHardcoreModeActive()) diff --git a/src/duckstation-qt/mainwindow.h b/src/duckstation-qt/mainwindow.h index 6e9dd5e45..252a9c4f9 100644 --- a/src/duckstation-qt/mainwindow.h +++ b/src/duckstation-qt/mainwindow.h @@ -140,6 +140,8 @@ private Q_SLOTS: void onSystemPaused(); void onSystemResumed(); void onRunningGameChanged(const QString& filename, const QString& game_serial, const QString& game_title); + void onMediaCaptureStarted(); + void onMediaCaptureStopped(); void onAchievementsLoginRequested(Achievements::LoginRequestReason reason); void onAchievementsChallengeModeChanged(bool enabled); void onApplicationStateChanged(Qt::ApplicationState state); @@ -174,6 +176,7 @@ private Q_SLOTS: void onToolsMemoryCardEditorTriggered(); void onToolsMemoryScannerTriggered(); void onToolsCoverDownloaderTriggered(); + void onToolsMediaCaptureToggled(bool checked); void onToolsOpenDataDirectoryTriggered(); void onSettingsTriggeredFromToolbar(); diff --git a/src/duckstation-qt/mainwindow.ui b/src/duckstation-qt/mainwindow.ui index 388183a5c..45b31b47b 100644 --- a/src/duckstation-qt/mainwindow.ui +++ b/src/duckstation-qt/mainwindow.ui @@ -188,7 +188,6 @@ - @@ -234,6 +233,8 @@ + + @@ -663,14 +664,6 @@ Disable All Enhancements - - - true - - - Dump Audio - - Dump RAM... @@ -945,6 +938,14 @@ Show Game Icons (List View) + + + true + + + Media Ca&pture + + diff --git a/src/duckstation-qt/qthost.cpp b/src/duckstation-qt/qthost.cpp index 49ff8d131..1781badda 100644 --- a/src/duckstation-qt/qthost.cpp +++ b/src/duckstation-qt/qthost.cpp @@ -1481,7 +1481,7 @@ void EmuThread::startDumpingAudio() return; } - System::StartDumpingAudio(); + //System::StartDumpingAudio(); } void EmuThread::stopDumpingAudio() @@ -1492,7 +1492,7 @@ void EmuThread::stopDumpingAudio() return; } - System::StopDumpingAudio(); + //System::StopDumpingAudio(); } void EmuThread::singleStepCPU() @@ -2065,6 +2065,16 @@ void Host::OnGameChanged(const std::string& disc_path, const std::string& game_s QString::fromStdString(game_name)); } +void Host::OnMediaCaptureStarted() +{ + emit g_emu_thread->mediaCaptureStarted(); +} + +void Host::OnMediaCaptureStopped() +{ + emit g_emu_thread->mediaCaptureStopped(); +} + void Host::SetMouseMode(bool relative, bool hide_cursor) { emit g_emu_thread->mouseModeRequested(relative, hide_cursor); diff --git a/src/duckstation-qt/qthost.h b/src/duckstation-qt/qthost.h index 22afadf49..80730c9de 100644 --- a/src/duckstation-qt/qthost.h +++ b/src/duckstation-qt/qthost.h @@ -148,6 +148,8 @@ Q_SIGNALS: void achievementsRefreshed(quint32 id, const QString& game_info_string); void achievementsChallengeModeChanged(bool enabled); void cheatEnabled(quint32 index, bool enabled); + void mediaCaptureStarted(); + void mediaCaptureStopped(); /// Big Picture UI requests. void onCoverDownloaderOpenRequested(); diff --git a/src/duckstation-regtest/regtest_host.cpp b/src/duckstation-regtest/regtest_host.cpp index 92c309964..98af6f4bd 100644 --- a/src/duckstation-regtest/regtest_host.cpp +++ b/src/duckstation-regtest/regtest_host.cpp @@ -283,6 +283,16 @@ void Host::OnGameChanged(const std::string& disc_path, const std::string& game_s INFO_LOG("Game Name: {}", game_name); } +void Host::OnMediaCaptureStarted() +{ + // +} + +void Host::OnMediaCaptureStopped() +{ + // +} + void Host::PumpMessagesOnCPUThread() { s_frames_to_run--; diff --git a/src/util/CMakeLists.txt b/src/util/CMakeLists.txt index 0ab0efa9c..ac63b8d5c 100644 --- a/src/util/CMakeLists.txt +++ b/src/util/CMakeLists.txt @@ -44,6 +44,8 @@ add_library(util input_source.h iso_reader.cpp iso_reader.h + media_capture.cpp + media_capture.h page_fault_handler.cpp page_fault_handler.h platform_misc.h diff --git a/src/util/media_capture.cpp b/src/util/media_capture.cpp new file mode 100644 index 000000000..0a92db49a --- /dev/null +++ b/src/util/media_capture.cpp @@ -0,0 +1,1679 @@ +// SPDX-FileCopyrightText: 2019-2024 Connor McLaughlin +// SPDX-License-Identifier: (GPL-3.0 OR CC-BY-NC-ND-4.0) + +#include "media_capture.h" +#include "gpu_device.h" +#include "host.h" + +#include "common/align.h" +#include "common/error.h" +#include "common/file_system.h" +#include "common/gsvector.h" +#include "common/log.h" +#include "common/path.h" +#include "common/string_util.h" +#include "common/threading.h" + +#include "IconsFontAwesome5.h" +#include "fmt/format.h" + +#include +#include +#include +#include +#include +#include +#include +#include + +#ifdef _WIN32 +#include "common/windows_headers.h" + +#include +#include +#include +#include +#include +#include + +#pragma comment(lib, "mfreadwrite") +#pragma comment(lib, "mfplat") +#pragma comment(lib, "mfuuid") +#endif + +Log_SetChannel(MediaCapture); + +namespace { + +static constexpr u32 VIDEO_WIDTH_ALIGNMENT = 8; +static constexpr u32 VIDEO_HEIGHT_ALIGNMENT = 8; + +class ALIGN_TO_CACHE_LINE MediaCaptureBase : public MediaCapture +{ +public: + static constexpr u32 NUM_FRAMES_IN_FLIGHT = 3; + static constexpr u32 MAX_PENDING_FRAMES = NUM_FRAMES_IN_FLIGHT * 2; + static constexpr u32 AUDIO_CHANNELS = 2; + static constexpr u32 AUDIO_BITS_PER_SAMPLE = sizeof(s16) * 8; + + virtual ~MediaCaptureBase() override; + + bool BeginCapture(float fps, float aspect, u32 width, u32 height, GPUTexture::Format texture_format, u32 sample_rate, + std::string path, bool capture_video, std::string_view video_codec, u32 video_bitrate, + std::string_view video_codec_args, bool capture_audio, std::string_view audio_codec, + u32 audio_bitrate, std::string_view audio_codec_args, Error* error) override; + + const std::string& GetPath() const override; + u32 GetVideoWidth() const override; + u32 GetVideoHeight() const override; + + float GetCaptureThreadUsage() const override; + float GetCaptureThreadTime() const override; + void UpdateCaptureThreadUsage(double pct_divider, double time_divider) override; + + GPUTexture* GetRenderTexture() override; + bool DeliverVideoFrame(GPUTexture* stex) override; + bool DeliverAudioFrames(const s16* frames, u32 num_frames) override; + bool EndCapture(Error* error) override; + void Flush() override; + +protected: + struct PendingFrame + { + enum class State + { + Unused, + NeedsMap, + NeedsEncoding + }; + + std::unique_ptr tex; + s64 pts; + State state; + }; + + ALWAYS_INLINE u32 GetAudioBufferSizeInFrames() const + { + return (static_cast(m_audio_buffer.size()) / AUDIO_CHANNELS); + } + + void ProcessFramePendingMap(std::unique_lock& lock); + void ProcessAllInFlightFrames(std::unique_lock& lock); + void EncoderThreadEntryPoint(); + void StartEncoderThread(); + void StopEncoderThread(std::unique_lock& lock); + void DeleteOutputFile(); + + virtual void ClearState(); + virtual bool SendFrame(const PendingFrame& pf, Error* error) = 0; + virtual bool ProcessAudioPackets(s64 video_pts, Error* error) = 0; + + virtual bool InternalBeginCapture(float fps, float aspect, u32 sample_rate, bool capture_video, + std::string_view video_codec, u32 video_bitrate, std::string_view video_codec_args, + bool capture_audio, std::string_view audio_codec, u32 audio_bitrate, + std::string_view audio_codec_args, Error* error) = 0; + virtual bool InternalEndCapture(std::unique_lock& lock, Error* error); + + mutable std::mutex m_lock; + std::string m_path; + std::atomic_bool m_capturing{false}; + std::atomic_bool m_encoding_error{false}; + + u32 m_video_width = 0; + u32 m_video_height = 0; + GPUTexture::Format m_video_render_texture_format = GPUTexture::Format::Unknown; + s64 m_next_video_pts = 0; + std::unique_ptr m_render_texture; + + s64 m_next_audio_pts = 0; + u32 m_audio_frame_pos = 0; + u32 m_audio_frame_size = 0; + + Threading::Thread m_encoder_thread; + u64 m_encoder_thread_last_time = 0; + float m_encoder_thread_usage = 0.0f; + float m_encoder_thread_time = 0.0f; + + std::condition_variable m_frame_ready_cv; + std::condition_variable m_frame_encoded_cv; + std::array m_pending_frames = {}; + u32 m_pending_frames_pos = 0; + u32 m_frames_pending_map = 0; + u32 m_frames_map_consume_pos = 0; + u32 m_frames_pending_encode = 0; + u32 m_frames_encode_consume_pos = 0; + + DynamicHeapArray m_audio_buffer; + std::atomic m_audio_buffer_size{0}; + u32 m_audio_buffer_write_pos = 0; + ALIGN_TO_CACHE_LINE u32 m_audio_buffer_read_pos = 0; +}; + +MediaCaptureBase::~MediaCaptureBase() = default; + +bool MediaCaptureBase::BeginCapture(float fps, float aspect, u32 width, u32 height, GPUTexture::Format texture_format, + u32 sample_rate, std::string path, bool capture_video, std::string_view video_codec, + u32 video_bitrate, std::string_view video_codec_args, bool capture_audio, + std::string_view audio_codec, u32 audio_bitrate, std::string_view audio_codec_args, + Error* error) +{ + m_video_width = width; + m_video_height = height; + m_video_render_texture_format = texture_format; + + if (path.empty()) + { + Error::SetStringView(error, "No path specified."); + return false; + } + else if (fps == 0.0f || m_video_width == 0 || !Common::IsAlignedPow2(m_video_width, VIDEO_WIDTH_ALIGNMENT) || + m_video_height == 0 || !Common::IsAlignedPow2(m_video_height, VIDEO_HEIGHT_ALIGNMENT)) + { + Error::SetStringView(error, "Invalid video dimensions/rate."); + return false; + } + + m_path = std::move(path); + m_capturing.store(true, std::memory_order_release); + + // allocate audio buffer, dynamic based on sample rate + if (capture_audio) + m_audio_buffer.resize(sample_rate * MAX_PENDING_FRAMES * AUDIO_CHANNELS); + + INFO_LOG("Initializing capture:"); + if (capture_video) + { + INFO_LOG(" Video: FPS={}, Aspect={}, Codec={}, Bitrate={}, Args={}", fps, aspect, video_codec, video_bitrate, + video_codec_args); + } + if (capture_audio) + { + INFO_LOG(" Audio: SampleRate={}, Codec={}, Bitrate={}, Args={}", sample_rate, audio_codec, audio_bitrate, + audio_codec_args); + } + + if (!InternalBeginCapture(fps, aspect, sample_rate, capture_video, video_codec, video_bitrate, video_codec_args, + capture_audio, audio_codec, audio_bitrate, audio_codec_args, error)) + { + return false; + } + + StartEncoderThread(); + return true; +} + +GPUTexture* MediaCaptureBase::GetRenderTexture() +{ + if (m_render_texture) [[likely]] + return m_render_texture.get(); + + m_render_texture = g_gpu_device->CreateTexture(m_video_width, m_video_height, 1, 1, 1, GPUTexture::Type::RenderTarget, + m_video_render_texture_format); + if (!m_render_texture) [[unlikely]] + { + ERROR_LOG("Failed to create {}x{} render texture.", m_video_width, m_video_height); + return nullptr; + } + + return m_render_texture.get(); +} + +bool MediaCaptureBase::DeliverVideoFrame(GPUTexture* stex) +{ + std::unique_lock lock(m_lock); + + // If the encoder thread reported an error, stop the capture. + if (m_encoding_error.load(std::memory_order_acquire)) + return false; + + if (m_frames_pending_map >= NUM_FRAMES_IN_FLIGHT) + ProcessFramePendingMap(lock); + + PendingFrame& pf = m_pending_frames[m_pending_frames_pos]; + + // It shouldn't be pending map, but the encode thread might be lagging. + DebugAssert(pf.state != PendingFrame::State::NeedsMap); + if (pf.state == PendingFrame::State::NeedsEncoding) + { + m_frame_encoded_cv.wait(lock, [&pf]() { return pf.state == PendingFrame::State::Unused; }); + } + + if (!pf.tex || pf.tex->GetWidth() != static_cast(stex->GetWidth()) || + pf.tex->GetHeight() != static_cast(stex->GetHeight())) + { + pf.tex.reset(); + pf.tex = g_gpu_device->CreateDownloadTexture(stex->GetWidth(), stex->GetHeight(), stex->GetFormat()); + if (!pf.tex) + { + ERROR_LOG("Failed to create {}x{} download texture", stex->GetWidth(), stex->GetHeight()); + return false; + } + +#ifdef _DEBUG + GL_OBJECT_NAME_FMT(pf.tex, "GSCapture {}x{} Download Texture", stex->GetWidth(), stex->GetHeight()); +#endif + } + + pf.tex->CopyFromTexture(0, 0, stex, 0, 0, m_video_width, m_video_height, 0, 0); + pf.pts = m_next_video_pts++; + pf.state = PendingFrame::State::NeedsMap; + + m_pending_frames_pos = (m_pending_frames_pos + 1) % MAX_PENDING_FRAMES; + m_frames_pending_map++; + return true; +} + +void MediaCaptureBase::ProcessFramePendingMap(std::unique_lock& lock) +{ + DebugAssert(m_frames_pending_map > 0); + + PendingFrame& pf = m_pending_frames[m_frames_map_consume_pos]; + DebugAssert(pf.state == PendingFrame::State::NeedsMap); + + // Flushing is potentially expensive, so we leave it unlocked in case the encode thread + // needs to pick up another thread while we're waiting. + lock.unlock(); + + if (pf.tex->NeedsFlush()) + pf.tex->Flush(); + + // Even if the map failed, we need to kick it to the encode thread anyway, because + // otherwise our queue indices will get desynchronized. + if (!pf.tex->Map(0, 0, m_video_width, m_video_height)) + WARNING_LOG("Failed to map previously flushed frame."); + + lock.lock(); + + // Kick to encoder thread! + pf.state = PendingFrame::State::NeedsEncoding; + m_frames_map_consume_pos = (m_frames_map_consume_pos + 1) % MAX_PENDING_FRAMES; + m_frames_pending_map--; + m_frames_pending_encode++; + m_frame_ready_cv.notify_one(); +} + +void MediaCaptureBase::EncoderThreadEntryPoint() +{ + Threading::SetNameOfCurrentThread("Media Capture Encoding"); + + Error error; + std::unique_lock lock(m_lock); + + for (;;) + { + m_frame_ready_cv.wait( + lock, [this]() { return (m_frames_pending_encode > 0 || !m_capturing.load(std::memory_order_acquire)); }); + if (m_frames_pending_encode == 0 && !m_capturing.load(std::memory_order_acquire)) + break; + + PendingFrame& pf = m_pending_frames[m_frames_encode_consume_pos]; + DebugAssert(pf.state == PendingFrame::State::NeedsEncoding); + + lock.unlock(); + + bool okay = !m_encoding_error; + + // If the frame failed to map, this will be false, and we'll just skip it. + if (okay && IsCapturingVideo() && pf.tex->IsMapped()) + okay = SendFrame(pf, &error); + + // Encode as many audio frames while the video is ahead. + if (okay && IsCapturingAudio()) + okay = ProcessAudioPackets(pf.pts, &error); + + lock.lock(); + + // If we had an encoding error, tell the GS thread to shut down the capture (later). + if (!okay) [[unlikely]] + { + ERROR_LOG("Encoding error: {}", error.GetDescription()); + m_encoding_error.store(true, std::memory_order_release); + } + + // Done with this frame! Wait for the next. + pf.state = PendingFrame::State::Unused; + m_frames_encode_consume_pos = (m_frames_encode_consume_pos + 1) % MAX_PENDING_FRAMES; + m_frames_pending_encode--; + m_frame_encoded_cv.notify_all(); + } +} + +void MediaCaptureBase::StartEncoderThread() +{ + INFO_LOG("Starting encoder thread."); + DebugAssert(m_capturing.load(std::memory_order_acquire) && !m_encoder_thread.Joinable()); + m_encoder_thread.Start([this]() { EncoderThreadEntryPoint(); }); +} + +void MediaCaptureBase::StopEncoderThread(std::unique_lock& lock) +{ + // Thread will exit when s_capturing is false. + DebugAssert(!m_capturing.load(std::memory_order_acquire)); + + if (m_encoder_thread.Joinable()) + { + INFO_LOG("Stopping encoder thread."); + + // Might be sleeping, so wake it before joining. + m_frame_ready_cv.notify_one(); + lock.unlock(); + m_encoder_thread.Join(); + lock.lock(); + } +} + +void MediaCaptureBase::ProcessAllInFlightFrames(std::unique_lock& lock) +{ + while (m_frames_pending_map > 0) + ProcessFramePendingMap(lock); + + while (m_frames_pending_encode > 0) + { + m_frame_encoded_cv.wait(lock, [this]() { return (m_frames_pending_encode == 0 || m_encoding_error); }); + } +} + +bool MediaCaptureBase::DeliverAudioFrames(const s16* frames, u32 num_frames) +{ + if (!IsCapturingAudio()) + return true; + else if (!m_capturing.load(std::memory_order_acquire)) + return false; + + const u32 audio_buffer_size = GetAudioBufferSizeInFrames(); + if ((audio_buffer_size - m_audio_buffer_size.load(std::memory_order_acquire)) < num_frames) + { + // Need to wait for it to drain a bit. + std::unique_lock lock(m_lock); + m_frame_encoded_cv.wait(lock, [this, &num_frames, &audio_buffer_size]() { + return (!m_capturing.load(std::memory_order_acquire) || + ((audio_buffer_size - m_audio_buffer_size.load(std::memory_order_acquire)) >= num_frames)); + }); + if (!m_capturing.load(std::memory_order_acquire)) + return false; + } + + for (u32 remaining_frames = num_frames;;) + { + const u32 contig_frames = std::min(audio_buffer_size - m_audio_buffer_write_pos, remaining_frames); + std::memcpy(&m_audio_buffer[m_audio_buffer_write_pos * AUDIO_CHANNELS], frames, + sizeof(s16) * AUDIO_CHANNELS * contig_frames); + m_audio_buffer_write_pos = (m_audio_buffer_write_pos + contig_frames) % audio_buffer_size; + remaining_frames -= contig_frames; + if (remaining_frames == 0) + break; + } + + const u32 buffer_size = m_audio_buffer_size.fetch_add(num_frames, std::memory_order_release) + num_frames; + if (!IsCapturingVideo() && buffer_size >= m_audio_frame_size) + { + // If we're not capturing video, push "frames" when we hit the audio packet size. + std::unique_lock lock(m_lock); + if (!m_capturing.load(std::memory_order_acquire)) + return false; + + PendingFrame& pf = m_pending_frames[m_pending_frames_pos]; + pf.state = PendingFrame::State::NeedsEncoding; + m_pending_frames_pos = (m_pending_frames_pos + 1) % MAX_PENDING_FRAMES; + + m_frames_pending_encode++; + m_frame_ready_cv.notify_one(); + } + + return true; +} + +bool MediaCaptureBase::InternalEndCapture(std::unique_lock& lock, Error* error) +{ + DebugAssert(m_capturing.load(std::memory_order_acquire)); + + const bool had_error = m_encoding_error.load(std::memory_order_acquire); + if (!had_error) + ProcessAllInFlightFrames(lock); + + m_capturing.store(false, std::memory_order_release); + StopEncoderThread(lock); + return !had_error; +} + +void MediaCaptureBase::ClearState() +{ + m_pending_frames = {}; + m_pending_frames_pos = 0; + m_frames_pending_map = 0; + m_frames_map_consume_pos = 0; + m_frames_pending_encode = 0; + m_frames_encode_consume_pos = 0; + + m_audio_buffer_read_pos = 0; + m_audio_buffer_write_pos = 0; + m_audio_buffer_size.store(0, std::memory_order_release); + m_audio_frame_pos = 0; + m_audio_buffer_size = 0; + m_audio_buffer.deallocate(); + + m_encoding_error.store(false, std::memory_order_release); +} + +bool MediaCaptureBase::EndCapture(Error* error) +{ + std::unique_lock lock(m_lock); + if (!InternalEndCapture(lock, error)) + { + DeleteOutputFile(); + ClearState(); + return false; + } + + ClearState(); + return true; +} + +const std::string& MediaCaptureBase::GetPath() const +{ + return m_path; +} + +u32 MediaCaptureBase::GetVideoWidth() const +{ + return m_video_width; +} + +u32 MediaCaptureBase::GetVideoHeight() const +{ + return m_video_height; +} + +float MediaCaptureBase::GetCaptureThreadUsage() const +{ + return m_encoder_thread_usage; +} + +float MediaCaptureBase::GetCaptureThreadTime() const +{ + return m_encoder_thread_time; +} + +void MediaCaptureBase::UpdateCaptureThreadUsage(double pct_divider, double time_divider) +{ + const u64 time = m_encoder_thread.GetCPUTime(); + const u64 delta = time - m_encoder_thread_last_time; + m_encoder_thread_usage = static_cast(static_cast(delta) * pct_divider); + m_encoder_thread_time = static_cast(static_cast(delta) * time_divider); + m_encoder_thread_last_time = time; +} + +void MediaCaptureBase::Flush() +{ + std::unique_lock lock(m_lock); + + if (m_encoding_error) + return; + + ProcessAllInFlightFrames(lock); + + if (IsCapturingAudio()) + { + // Clear any buffered audio frames out, we don't want to delay the CPU thread. + const u32 audio_frames = m_audio_buffer_size.load(std::memory_order_acquire); + if (audio_frames > 0) + WARNING_LOG("Dropping {} audio frames for buffer clear.", audio_frames); + + m_audio_buffer_read_pos = 0; + m_audio_buffer_write_pos = 0; + m_audio_buffer_size.store(0, std::memory_order_release); + } +} + +void MediaCaptureBase::DeleteOutputFile() +{ + if (m_path.empty()) + return; + + Error error; + if (FileSystem::DeleteFile(m_path.c_str(), &error)) + { + INFO_LOG("Deleted output file {}", Path::GetFileName(m_path)); + m_path = {}; + } + else + { + ERROR_LOG("Failed to delete output file '{}': {}", Path::GetFileName(m_path), error.GetDescription()); + } +} + +#ifdef _WIN32 + +class MediaCaptureMF final : public MediaCaptureBase +{ + template + using ComPtr = Microsoft::WRL::ComPtr; + + static constexpr u32 TEN_NANOSECONDS = 10 * 1000 * 1000; + static constexpr DWORD INVALID_STREAM_INDEX = std::numeric_limits::max(); + + static constexpr const GUID& AUDIO_INPUT_MEDIA_FORMAT = MFAudioFormat_PCM; + static constexpr const GUID& VIDEO_RGB_MEDIA_FORMAT = MFVideoFormat_RGB32; + static constexpr const GUID& VIDEO_YUV_MEDIA_FORMAT = MFVideoFormat_NV12; + +public: + ~MediaCaptureMF() override; + + static std::unique_ptr Create(Error* error); + static ContainerList GetContainerList(); + static CodecList GetVideoCodecList(const char* container); + static CodecList GetAudioCodecList(const char* container); + + bool IsCapturingAudio() const override; + bool IsCapturingVideo() const override; + time_t GetElapsedTime() const override; + +protected: + void ClearState() override; + bool SendFrame(const PendingFrame& pf, Error* error) override; + bool ProcessAudioPackets(s64 video_pts, Error* error) override; + bool InternalBeginCapture(float fps, float aspect, u32 sample_rate, bool capture_video, std::string_view video_codec, + u32 video_bitrate, std::string_view video_codec_args, bool capture_audio, + std::string_view audio_codec, u32 audio_bitrate, std::string_view audio_codec_args, + Error* error) override; + bool InternalEndCapture(std::unique_lock& lock, Error* error) override; + +private: + ComPtr CreateVideoYUVTransform(ComPtr* output_type, Error* error); + ComPtr CreateVideoEncodeTransform(std::string_view codec, u32 bitrate, IMFMediaType* input_type, + ComPtr* output_type, bool* use_async_transform, + Error* error); + bool GetAudioTypes(std::string_view codec, ComPtr* input_type, ComPtr* output_type, + u32 sample_rate, u32 bitrate, Error* error); + static void ConvertVideoFrame(u8* dst, size_t dst_stride, const u8* src, size_t src_stride, u32 width, u32 height); + + bool ProcessVideoOutputSamples(Error* error); // synchronous + bool ProcessVideoEvents(Error* error); // asynchronous + + ComPtr m_sink_writer; + + DWORD m_video_stream_index = INVALID_STREAM_INDEX; + DWORD m_audio_stream_index = INVALID_STREAM_INDEX; + + LONGLONG m_video_sample_duration = 0; + LONGLONG m_audio_sample_duration = 0; + + u32 m_frame_rate_numerator = 0; + + ComPtr m_video_yuv_transform; + ComPtr m_video_yuv_sample; + ComPtr m_video_encode_transform; + ComPtr m_video_encode_event_generator; + std::deque> m_pending_video_samples; + ComPtr m_video_output_sample; + u32 m_wanted_video_samples = 0; + DWORD m_video_sample_size = 0; +}; + +static std::once_flag s_media_foundation_initialized_flag; +static HRESULT s_media_foundation_initialized = S_OK; + +struct MediaFoundationCodec +{ + const char* name; + const char* display_name; + const GUID& guid; + bool require_hardware; +}; +static constexpr const MediaFoundationCodec s_media_foundation_audio_codecs[] = { + {"aac", "Advanced Audio Coding", MFAudioFormat_AAC}, + {"mp3", "MPEG-2 Audio Layer III", MFAudioFormat_MP3}, + {"pcm", "Uncompressed PCM", MFAudioFormat_PCM}, +}; +static constexpr const MediaFoundationCodec s_media_foundation_video_codecs[] = { + {"h264", "H.264 with Software Encoding", MFVideoFormat_H264, false}, + {"h264_hw", "H.264 with Hardware Encoding", MFVideoFormat_H264, true}, + {"h265", "H.265 with Software Encoding", MFVideoFormat_H265, false}, + {"h265_hw", "H.265 with Hardware Encoding", MFVideoFormat_H265, true}, + {"hevc", "HEVC with Software Encoding", MFVideoFormat_HEVC, false}, + {"hevc_hw", "HEVC with Hardware Encoding", MFVideoFormat_HEVC, true}, + {"vp9", "VP9 with Software Encoding", MFVideoFormat_VP90, false}, + {"vp9_hw", "VP9 with Hardware Encoding", MFVideoFormat_VP90, true}, + {"av1", "AV1 with Software Encoding", MFVideoFormat_AV1, false}, + {"av1_hw", "AV1 with Hardware Encoding", MFVideoFormat_AV1, false}, +}; + +static bool InitializeMediaFoundation(Error* error) +{ + std::call_once(s_media_foundation_initialized_flag, []() { + s_media_foundation_initialized = MFStartup(MF_VERSION, MFSTARTUP_NOSOCKET); + if (SUCCEEDED(s_media_foundation_initialized)) + std::atexit([]() { MFShutdown(); }); + }); + if (FAILED(s_media_foundation_initialized)) [[unlikely]] + { + Error::SetHResult(error, "MFStartup() failed: ", s_media_foundation_initialized); + return false; + } + + return true; +} + +MediaCaptureMF::~MediaCaptureMF() = default; + +std::unique_ptr MediaCaptureMF::Create(Error* error) +{ + if (!InitializeMediaFoundation(error)) + return nullptr; + + return std::make_unique(); +} + +MediaCapture::ContainerList MediaCaptureMF::GetContainerList() +{ + return { + {"avi", "Audio Video Interleave"}, {"mp4", "MPEG-4 Part 14"}, + {"mkv", "Matroska Media Container"}, {"mp3", "MPEG-2 Audio Layer III"}, + {"wav", "Waveform Audio File Format"}, + }; +} + +MediaCapture::ContainerList MediaCaptureMF::GetAudioCodecList(const char* container) +{ + ContainerList ret; + ret.reserve(std::size(s_media_foundation_audio_codecs)); + for (const MediaFoundationCodec& codec : s_media_foundation_audio_codecs) + ret.emplace_back(codec.name, codec.display_name); + return ret; +} + +MediaCapture::ContainerList MediaCaptureMF::GetVideoCodecList(const char* container) +{ + ContainerList ret; + ret.reserve(std::size(s_media_foundation_video_codecs)); + for (const MediaFoundationCodec& codec : s_media_foundation_video_codecs) + ret.emplace_back(codec.name, codec.display_name); + return ret; +} + +bool MediaCaptureMF::IsCapturingVideo() const +{ + return (m_video_stream_index != INVALID_STREAM_INDEX); +} + +bool MediaCaptureMF::IsCapturingAudio() const +{ + return (m_audio_stream_index != INVALID_STREAM_INDEX); +} + +time_t MediaCaptureMF::GetElapsedTime() const +{ + if (IsCapturingVideo()) + return static_cast(static_cast(m_next_video_pts * m_video_sample_duration) / TEN_NANOSECONDS); + else + return static_cast(static_cast(m_next_audio_pts * m_audio_sample_duration) / TEN_NANOSECONDS); +} + +bool MediaCaptureMF::InternalBeginCapture(float fps, float aspect, u32 sample_rate, bool capture_video, + std::string_view video_codec, u32 video_bitrate, + std::string_view video_codec_args, bool capture_audio, + std::string_view audio_codec, u32 audio_bitrate, + std::string_view audio_codec_args, Error* error) +{ + HRESULT hr; + + ComPtr video_media_type; + bool use_async_video_transform = false; + + if (capture_video) + { + m_frame_rate_numerator = static_cast(fps * TEN_NANOSECONDS); + m_video_sample_duration = static_cast(static_cast(TEN_NANOSECONDS) / static_cast(fps)); + + ComPtr yuv_media_type; + if (!(m_video_yuv_transform = CreateVideoYUVTransform(&yuv_media_type, error)) || + !(m_video_encode_transform = CreateVideoEncodeTransform(video_codec, video_bitrate, yuv_media_type.Get(), + &video_media_type, &use_async_video_transform, error))) + { + return false; + } + } + + ComPtr audio_input_type, audio_output_type; + if (capture_audio) + { + if (!GetAudioTypes(audio_codec, &audio_input_type, &audio_output_type, sample_rate, audio_bitrate, error)) + return false; + + // only used when not capturing video + m_audio_frame_size = static_cast(static_cast(sample_rate) / fps); + + m_audio_sample_duration = + static_cast(static_cast(TEN_NANOSECONDS) / static_cast(sample_rate)); + } + + if (FAILED(hr = MFCreateSinkWriterFromURL(StringUtil::UTF8StringToWideString(m_path).c_str(), nullptr, nullptr, + m_sink_writer.GetAddressOf()))) + { + Error::SetHResult(error, "MFCreateSinkWriterFromURL() failed: ", hr); + return false; + } + + if (capture_video) + { + if (SUCCEEDED(hr) && FAILED(hr = m_sink_writer->AddStream(video_media_type.Get(), &m_video_stream_index))) + [[unlikely]] + { + Error::SetHResult(error, "Video AddStream() failed: ", hr); + } + + if (SUCCEEDED(hr) && FAILED(hr = m_sink_writer->SetInputMediaType(m_video_stream_index, video_media_type.Get(), + nullptr))) [[unlikely]] + { + Error::SetHResult(error, "Video SetInputMediaType() failed: ", hr); + } + } + + if (capture_audio) + { + if (SUCCEEDED(hr) && FAILED(hr = m_sink_writer->AddStream(audio_output_type.Get(), &m_audio_stream_index))) + [[unlikely]] + { + Error::SetHResult(error, "Audio AddStream() failed: ", hr); + } + + if (SUCCEEDED(hr) && FAILED(hr = m_sink_writer->SetInputMediaType(m_audio_stream_index, audio_input_type.Get(), + nullptr))) [[unlikely]] + { + Error::SetHResult(error, "Audio SetInputMediaType() failed: ", hr); + } + } + + if (SUCCEEDED(hr) && FAILED(hr = m_sink_writer->BeginWriting())) + Error::SetHResult(error, "BeginWriting() failed: ", hr); + + if (use_async_video_transform) + { + if (SUCCEEDED(hr) && FAILED(hr = m_video_encode_transform.As(&m_video_encode_event_generator))) + Error::SetHResult(error, "Getting video encode event generator failed: ", hr); + } + + if (SUCCEEDED(hr) && FAILED(hr = m_video_encode_transform->ProcessMessage(MFT_MESSAGE_NOTIFY_START_OF_STREAM, 0))) + Error::SetHResult(error, "MFT_MESSAGE_NOTIFY_START_OF_STREAM failed: ", hr); + + if (FAILED(hr)) + { + m_sink_writer.Reset(); + DeleteOutputFile(); + return false; + } + + return true; +} + +bool MediaCaptureMF::InternalEndCapture(std::unique_lock& lock, Error* error) +{ + HRESULT hr = MediaCaptureBase::InternalEndCapture(lock, error) ? S_OK : E_FAIL; + + // need to drain all input frames + if (m_video_encode_transform) + { + if (SUCCEEDED(hr) && FAILED(hr = m_video_encode_transform->ProcessMessage(MFT_MESSAGE_NOTIFY_END_OF_STREAM, 0))) + { + Error::SetHResult(error, "MFT_MESSAGE_NOTIFY_END_OF_STREAM failed: ", hr); + return false; + } + + if (m_video_encode_event_generator) + hr = ProcessVideoEvents(error) ? S_OK : E_FAIL; + else + hr = ProcessVideoOutputSamples(error) ? S_OK : E_FAIL; + } + + if (SUCCEEDED(hr) && FAILED(hr = m_sink_writer->Finalize())) [[unlikely]] + Error::SetHResult(error, "Finalize() failed: ", hr); + + m_sink_writer.Reset(); + return SUCCEEDED(hr); +} + +MediaCaptureMF::ComPtr MediaCaptureMF::CreateVideoYUVTransform(ComPtr* output_type, + Error* error) +{ + const MFT_REGISTER_TYPE_INFO input_type_info = {.guidMajorType = MFMediaType_Video, + .guidSubtype = VIDEO_RGB_MEDIA_FORMAT}; + const MFT_REGISTER_TYPE_INFO output_type_info = {.guidMajorType = MFMediaType_Video, + .guidSubtype = VIDEO_YUV_MEDIA_FORMAT}; + + IMFActivate** transforms = nullptr; + UINT32 num_transforms = 0; + HRESULT hr = MFTEnumEx(MFT_CATEGORY_VIDEO_PROCESSOR, MFT_ENUM_FLAG_SORTANDFILTER, &input_type_info, &output_type_info, + &transforms, &num_transforms); + if (FAILED(hr)) [[unlikely]] + { + Error::SetHResult(error, "YUV MFTEnumEx() failed: ", hr); + return nullptr; + } + else if (num_transforms == 0) [[unlikely]] + { + Error::SetStringView(error, "No video processors found."); + return nullptr; + } + + ComPtr transform; + hr = transforms[0]->ActivateObject(IID_PPV_ARGS(transform.GetAddressOf())); + if (transforms) + MFHeapFree(transforms); + if (FAILED(hr)) [[unlikely]] + { + Error::SetHResult(error, "YUV ActivateObject() failed: ", hr); + return nullptr; + } + + ComPtr input_type; + if (FAILED(hr = MFCreateMediaType(input_type.GetAddressOf())) || + FAILED(hr = MFCreateMediaType(output_type->GetAddressOf()))) [[unlikely]] + { + Error::SetHResult(error, "YUV MFCreateMediaType() failed: ", hr); + return nullptr; + } + + if (FAILED(hr = input_type->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video)) || + FAILED(hr = input_type->SetGUID(MF_MT_SUBTYPE, VIDEO_RGB_MEDIA_FORMAT)) || + FAILED(hr = input_type->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive)) || + FAILED(hr = MFSetAttributeSize(input_type.Get(), MF_MT_FRAME_SIZE, m_video_width, m_video_height)) || + FAILED(hr = (*output_type)->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video)) || + FAILED(hr = (*output_type)->SetGUID(MF_MT_SUBTYPE, VIDEO_YUV_MEDIA_FORMAT)) || + FAILED(hr = (*output_type)->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive)) || + FAILED(hr = MFSetAttributeSize(output_type->Get(), MF_MT_FRAME_SIZE, m_video_width, m_video_height)) || + FAILED(hr = MFSetAttributeRatio(output_type->Get(), MF_MT_FRAME_RATE, m_frame_rate_numerator, TEN_NANOSECONDS))) + [[unlikely]] + { + Error::SetHResult(error, "YUV setting attributes failed: ", hr); + return nullptr; + } + + if (FAILED(hr = transform->SetOutputType(0, output_type->Get(), 0))) [[unlikely]] + { + Error::SetHResult(error, "YUV SetOutputType() failed: ", hr); + return nullptr; + } + + if (FAILED(hr = transform->SetInputType(0, input_type.Get(), 0))) [[unlikely]] + { + Error::SetHResult(error, "YUV SetInputType() failed: ", hr); + return nullptr; + } + + return transform; +} + +MediaCaptureMF::ComPtr MediaCaptureMF::CreateVideoEncodeTransform(std::string_view codec, u32 bitrate, + IMFMediaType* input_type, + ComPtr* output_type, + bool* use_async_transform, Error* error) +{ + const MFT_REGISTER_TYPE_INFO input_type_info = {.guidMajorType = MFMediaType_Video, + .guidSubtype = VIDEO_YUV_MEDIA_FORMAT}; + MFT_REGISTER_TYPE_INFO output_type_info = {.guidMajorType = MFMediaType_Video, .guidSubtype = MFVideoFormat_H264}; + bool hardware = false; + if (!codec.empty()) + { + bool found = false; + for (const MediaFoundationCodec& tcodec : s_media_foundation_video_codecs) + { + if (StringUtil::EqualNoCase(codec, tcodec.name)) + { + output_type_info.guidSubtype = tcodec.guid; + hardware = tcodec.require_hardware; + found = true; + break; + } + } + if (!found) + { + Error::SetStringFmt(error, "Unknown video codec '{}'", codec); + return nullptr; + } + } + + IMFActivate** transforms = nullptr; + UINT32 num_transforms = 0; + HRESULT hr = + MFTEnumEx(MFT_CATEGORY_VIDEO_ENCODER, (hardware ? MFT_ENUM_FLAG_HARDWARE : 0) | MFT_ENUM_FLAG_SORTANDFILTER, + &input_type_info, &output_type_info, &transforms, &num_transforms); + if (FAILED(hr)) [[unlikely]] + { + Error::SetHResult(error, "Encoder MFTEnumEx() failed: ", hr); + return nullptr; + } + else if (num_transforms == 0) [[unlikely]] + { + Error::SetStringView(error, "No video encoders found."); + return nullptr; + } + + ComPtr transform; + hr = transforms[0]->ActivateObject(IID_PPV_ARGS(transform.GetAddressOf())); + if (transforms) + MFHeapFree(transforms); + if (FAILED(hr)) [[unlikely]] + { + Error::SetHResult(error, "Encoder ActivateObject() failed: ", hr); + return nullptr; + } + + *use_async_transform = false; + if (hardware) + { + ComPtr attributes; + if (FAILED(transform->GetAttributes(attributes.GetAddressOf()))) [[unlikely]] + { + Error::SetHResult(error, "YUV GetAttributes() failed: ", hr); + return nullptr; + } + UINT32 async_supported; + *use_async_transform = + (SUCCEEDED(hr = attributes->GetUINT32(MF_TRANSFORM_ASYNC, &async_supported)) && async_supported == TRUE && + SUCCEEDED(hr = attributes->SetUINT32(MF_TRANSFORM_ASYNC_UNLOCK, 1))); + if (use_async_transform) + INFO_LOG("Using async video transform."); + } + + if (FAILED(hr = MFCreateMediaType(output_type->GetAddressOf()))) [[unlikely]] + { + Error::SetHResult(error, "Encoder MFCreateMediaType() failed: ", hr); + return nullptr; + } + + constexpr u32 par_numerator = 1; + constexpr u32 par_denominator = 1; + + u32 profile = 0; + if (output_type_info.guidSubtype == MFVideoFormat_H264) + profile = eAVEncH264VProfile_Main; + else if (output_type_info.guidSubtype == MFVideoFormat_H265) + profile = eAVEncH265VProfile_Main_420_8; + else if (output_type_info.guidSubtype == MFVideoFormat_VP90) + profile = eAVEncVP9VProfile_420_8; + + if (FAILED(hr = (*output_type)->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video)) || + FAILED(hr = (*output_type)->SetGUID(MF_MT_SUBTYPE, output_type_info.guidSubtype)) || + FAILED(hr = (*output_type)->SetUINT32(MF_MT_AVG_BITRATE, bitrate * 1000)) || + FAILED(hr = (*output_type)->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive)) || + FAILED(hr = (*output_type)->SetUINT32(MF_MT_MPEG2_PROFILE, profile)) || + FAILED(hr = MFSetAttributeSize(output_type->Get(), MF_MT_FRAME_SIZE, m_video_width, m_video_height)) || + FAILED(hr = MFSetAttributeRatio(output_type->Get(), MF_MT_FRAME_RATE, m_frame_rate_numerator, TEN_NANOSECONDS)) || + FAILED(hr = MFSetAttributeRatio(output_type->Get(), MF_MT_PIXEL_ASPECT_RATIO, par_numerator, par_denominator))) + [[unlikely]] + { + Error::SetHResult(error, "Encoder setting attributes failed: ", hr); + return nullptr; + } + + if (FAILED(hr = transform->SetOutputType(0, output_type->Get(), 0))) [[unlikely]] + { + Error::SetHResult(error, "Encoder SetOutputType() failed: ", hr); + return nullptr; + } + + if (FAILED(hr = transform->SetInputType(0, input_type, 0))) [[unlikely]] + { + Error::SetHResult(error, "Encoder SetInputType() failed: ", hr); + return nullptr; + } + + MFT_OUTPUT_STREAM_INFO osi; + if (FAILED(hr = transform->GetOutputStreamInfo(0, &osi))) [[unlikely]] + { + Error::SetHResult(error, "Encoder GetOutputStreamInfo() failed: ", hr); + return nullptr; + } + + if (!(osi.dwFlags & MFT_OUTPUT_STREAM_PROVIDES_SAMPLES)) + { + if (osi.cbSize == 0) + { + Error::SetStringFmt(error, "Invalid sample size for non-output-providing stream"); + return nullptr; + } + + m_video_sample_size = osi.cbSize; + } + + INFO_LOG("Video sample size: {}", m_video_sample_size); + return transform; +} + +ALWAYS_INLINE_RELEASE void MediaCaptureMF::ConvertVideoFrame(u8* dst, size_t dst_stride, const u8* src, + size_t src_stride, u32 width, u32 height) +{ + // need to convert rgba -> bgra, as well as flipping vertically + const u32 vector_width = 4; + const u32 aligned_width = Common::AlignDownPow2(width, vector_width); + src += src_stride * (height - 1); + + for (u32 remaining_rows = height;;) + { + const u8* row_src = src; + u8* row_dst = dst; + + u32 x = 0; + for (; x < aligned_width; x += vector_width) + { + static constexpr GSVector4i mask = GSVector4i::cxpr8(2, 1, 0, 3, 6, 5, 4, 7, 10, 9, 8, 11, 14, 13, 12, 15); + GSVector4i::store(row_dst, GSVector4i::load(row_src).shuffle8(mask)); + row_src += vector_width * sizeof(u32); + row_dst += vector_width * sizeof(u32); + } + + for (; x < width; x++) + { + row_dst[0] = row_src[2]; + row_dst[1] = row_src[1]; + row_dst[2] = row_src[0]; + row_dst[3] = row_src[3]; + row_src += sizeof(u32); + row_dst += sizeof(u32); + } + + src -= src_stride; + dst += dst_stride; + + remaining_rows--; + if (remaining_rows == 0) + break; + } +} + +void MediaCaptureMF::ClearState() +{ + MediaCaptureBase::ClearState(); + + m_sink_writer.Reset(); + + m_video_stream_index = INVALID_STREAM_INDEX; + m_audio_stream_index = INVALID_STREAM_INDEX; + + m_video_sample_duration = 0; + m_audio_sample_duration = 0; + m_frame_rate_numerator = 0; + + m_video_yuv_transform.Reset(); + m_video_yuv_sample.Reset(); + m_video_encode_transform.Reset(); + m_video_encode_event_generator.Reset(); + m_pending_video_samples.clear(); + m_video_output_sample.Reset(); + m_wanted_video_samples = 0; + m_video_sample_size = 0; +} + +bool MediaCaptureMF::SendFrame(const PendingFrame& pf, Error* error) +{ + const u32 buffer_stride = m_video_width * sizeof(u32); + const u32 buffer_size = buffer_stride * m_video_height; + + HRESULT hr; + ComPtr buffer; + if (FAILED(hr = MFCreateMemoryBuffer(buffer_size, buffer.GetAddressOf()))) [[unlikely]] + { + Error::SetHResult(error, "MFCreateMemoryBuffer() failed: ", hr); + return false; + } + + BYTE* buffer_data; + if (FAILED(hr = buffer->Lock(&buffer_data, nullptr, nullptr))) [[unlikely]] + { + Error::SetHResult(error, "Lock() failed: ", hr); + return false; + } + + ConvertVideoFrame(buffer_data, buffer_stride, pf.tex->GetMapPointer(), pf.tex->GetMapPitch(), m_video_width, + m_video_height); + buffer->Unlock(); + + if (FAILED(hr = buffer->SetCurrentLength(buffer_size))) [[unlikely]] + { + Error::SetHResult(error, "SetCurrentLength() failed: ", hr); + return false; + } + + ComPtr sample; + if (FAILED(hr = MFCreateSample(sample.GetAddressOf()))) [[unlikely]] + { + Error::SetHResult(error, "MFCreateSample() failed: ", hr); + return false; + } + + if (FAILED(hr = sample->AddBuffer(buffer.Get()))) [[unlikely]] + { + Error::SetHResult(error, "AddBuffer() failed: ", hr); + return false; + } + + const LONGLONG timestamp = static_cast(pf.pts) * m_video_sample_duration; + if (FAILED(hr = sample->SetSampleTime(timestamp))) [[unlikely]] + { + Error::SetHResult(error, "SetSampleTime() failed: ", hr); + return false; + } + + if (FAILED(hr = sample->SetSampleDuration(m_video_sample_duration))) [[unlikely]] + { + Error::SetHResult(error, "SetSampleDuration() failed: ", hr); + return false; + } + + ////////////////////////////////////////////////////////////////////////// + // RGB -> YUV + ////////////////////////////////////////////////////////////////////////// + + if (FAILED(hr = m_video_yuv_transform->ProcessInput(0, sample.Get(), 0))) [[unlikely]] + { + Error::SetHResult(error, "YUV ProcessInput() failed: ", hr); + return false; + } + + for (;;) + { + if (!m_video_yuv_sample) + { + ComPtr yuv_membuf; + if (FAILED(hr = MFCreateMemoryBuffer(buffer_size, yuv_membuf.GetAddressOf()))) [[unlikely]] + { + Error::SetHResult(error, "YUV MFCreateMemoryBuffer() failed: ", hr); + return false; + } + + if (FAILED(hr = MFCreateSample(m_video_yuv_sample.GetAddressOf()))) [[unlikely]] + { + Error::SetHResult(error, "YUV MFCreateSample() failed: ", hr); + return false; + } + if (FAILED(hr = m_video_yuv_sample->AddBuffer(yuv_membuf.Get()))) [[unlikely]] + { + Error::SetHResult(error, "YUV AddBuffer() failed: ", hr); + return false; + } + } + + DWORD status; + MFT_OUTPUT_DATA_BUFFER yuv_buf = {.pSample = m_video_yuv_sample.Get()}; + hr = m_video_yuv_transform->ProcessOutput(0, 1, &yuv_buf, &status); + if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT) + break; + + if (FAILED(hr)) [[unlikely]] + { + Error::SetHResult(error, "YUV ProcessOutput() failed: ", hr); + return false; + } + if (yuv_buf.pEvents) + yuv_buf.pEvents->Release(); + + m_pending_video_samples.push_back(std::move(m_video_yuv_sample)); + + if (m_video_encode_event_generator) + { + if (!ProcessVideoEvents(error)) [[unlikely]] + return false; + } + else + { + if (!ProcessVideoOutputSamples(error)) [[unlikely]] + return false; + } + } + + return true; +} + +bool MediaCaptureMF::ProcessVideoOutputSamples(Error* error) +{ + HRESULT hr; + + for (;;) + { + while (!m_pending_video_samples.empty()) + { + if (FAILED(hr = m_video_encode_transform->ProcessInput(0, m_pending_video_samples.front().Get(), 0))) [[unlikely]] + { + Error::SetHResult(error, "Video ProcessInput() failed: ", hr); + return false; + } + m_pending_video_samples.pop_front(); + } + + if (m_video_sample_size > 0 && !m_video_output_sample) + { + ComPtr video_membuf; + if (FAILED(hr = MFCreateMemoryBuffer(m_video_sample_size, video_membuf.GetAddressOf()))) [[unlikely]] + { + Error::SetHResult(error, "YUV MFCreateMemoryBuffer() failed: ", hr); + return false; + } + + if (FAILED(hr = MFCreateSample(m_video_output_sample.GetAddressOf()))) [[unlikely]] + { + Error::SetHResult(error, "YUV MFCreateSample() failed: ", hr); + return false; + } + if (FAILED(hr = m_video_output_sample->AddBuffer(video_membuf.Get()))) [[unlikely]] + { + Error::SetHResult(error, "YUV AddBuffer() failed: ", hr); + return false; + } + } + + MFT_OUTPUT_DATA_BUFFER video_buf = {.pSample = m_video_output_sample.Get()}; + DWORD status; + hr = m_video_encode_transform->ProcessOutput(0, 1, &video_buf, &status); + if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT) + break; + + if (FAILED(hr)) [[unlikely]] + { + Error::SetHResult(error, "Video ProcessOutput() failed: ", hr); + return false; + } + if (video_buf.pEvents) + video_buf.pEvents->Release(); + + hr = m_sink_writer->WriteSample(m_video_stream_index, video_buf.pSample); + if (FAILED(hr)) [[unlikely]] + { + Error::SetHResult(error, "Video WriteSample() failed: ", hr); + return false; + } + + // might be transform-provided + if (m_video_output_sample) + m_video_output_sample.Reset(); + else + video_buf.pSample->Release(); + } + + return true; +} + +bool MediaCaptureMF::ProcessVideoEvents(Error* error) +{ + HRESULT hr; + + for (;;) + { + // push any wanted input + while (m_wanted_video_samples > 0) + { + if (m_pending_video_samples.empty()) + break; + + if (FAILED(hr = m_video_encode_transform->ProcessInput(0, m_pending_video_samples.front().Get(), 0))) [[unlikely]] + { + Error::SetHResult(error, "Video ProcessInput() failed: ", hr); + return false; + } + m_pending_video_samples.pop_front(); + + m_wanted_video_samples--; + } + + ComPtr event; + hr = m_video_encode_event_generator->GetEvent(MF_EVENT_FLAG_NO_WAIT, event.GetAddressOf()); + if (hr == MF_E_NO_EVENTS_AVAILABLE) + return true; + + if (FAILED(hr)) [[unlikely]] + { + Error::SetHResult(error, "GetEvent() failed: ", hr); + return false; + } + + MediaEventType type; + if (FAILED(hr = event->GetType(&type))) [[unlikely]] + { + Error::SetHResult(error, "GetEvent() failed: ", hr); + return false; + } + + UINT32 stream_id = 0; + if (type == METransformNeedInput || type == METransformHaveOutput) + { + if (FAILED(hr = event->GetUINT32(MF_EVENT_MFT_INPUT_STREAM_ID, &stream_id))) + { + Error::SetHResult(error, "Get stream ID failed: ", hr); + return false; + } + else if (stream_id != 0) + { + Error::SetStringFmt(error, "Unexpected stream ID {}", stream_id); + return false; + } + } + + switch (type) + { + case METransformNeedInput: + { + m_wanted_video_samples++; + } + break; + + case METransformHaveOutput: + { + if (m_video_sample_size > 0 && !m_video_output_sample) + { + ComPtr video_membuf; + if (FAILED(hr = MFCreateMemoryBuffer(m_video_sample_size, video_membuf.GetAddressOf()))) [[unlikely]] + { + Error::SetHResult(error, "YUV MFCreateMemoryBuffer() failed: ", hr); + return false; + } + + if (FAILED(hr = MFCreateSample(m_video_output_sample.GetAddressOf()))) [[unlikely]] + { + Error::SetHResult(error, "YUV MFCreateSample() failed: ", hr); + return false; + } + if (FAILED(hr = m_video_output_sample->AddBuffer(video_membuf.Get()))) [[unlikely]] + { + Error::SetHResult(error, "YUV AddBuffer() failed: ", hr); + return false; + } + } + + MFT_OUTPUT_DATA_BUFFER video_buf = {.pSample = m_video_output_sample.Get()}; + DWORD status; + if (FAILED(hr = m_video_encode_transform->ProcessOutput(0, 1, &video_buf, &status))) [[unlikely]] + { + Error::SetHResult(error, "Video ProcessOutput() failed: ", hr); + return false; + } + if (video_buf.pEvents) + video_buf.pEvents->Release(); + + hr = m_sink_writer->WriteSample(m_video_stream_index, video_buf.pSample); + if (FAILED(hr)) [[unlikely]] + { + Error::SetHResult(error, "Video WriteSample() failed: ", hr); + return false; + } + + // might be transform-provided + if (m_video_output_sample) + m_video_output_sample.Reset(); + else + video_buf.pSample->Release(); + } + break; + + default: + WARNING_LOG("Unhandled video event {}", static_cast(type)); + break; + } + } +} + +bool MediaCaptureMF::GetAudioTypes(std::string_view codec, ComPtr* input_type, + ComPtr* output_type, u32 sample_rate, u32 bitrate, Error* error) +{ + HRESULT hr; + if (FAILED(hr = MFCreateMediaType(input_type->GetAddressOf())) || + FAILED(hr = MFCreateMediaType(output_type->GetAddressOf()))) [[unlikely]] + { + Error::SetHResult(error, "Audio MFCreateMediaType() failed: ", hr); + return false; + } + + GUID output_subtype = MFAudioFormat_AAC; + if (!codec.empty()) + { + bool found = false; + for (const MediaFoundationCodec& tcodec : s_media_foundation_audio_codecs) + { + if (StringUtil::EqualNoCase(codec, tcodec.name)) + { + output_subtype = tcodec.guid; + found = true; + break; + } + } + if (!found) + { + Error::SetStringFmt(error, "Unknown audio codec '{}'", codec); + return false; + } + } + + if (FAILED(hr = (*input_type)->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Audio)) || + FAILED(hr = (*input_type)->SetGUID(MF_MT_SUBTYPE, AUDIO_INPUT_MEDIA_FORMAT)) || + FAILED(hr = (*input_type)->SetUINT32(MF_MT_AUDIO_NUM_CHANNELS, AUDIO_CHANNELS)) || + FAILED(hr = (*input_type)->SetUINT32(MF_MT_AUDIO_BITS_PER_SAMPLE, AUDIO_BITS_PER_SAMPLE)) || + FAILED(hr = (*input_type)->SetUINT32(MF_MT_AUDIO_SAMPLES_PER_SECOND, sample_rate)) || + + FAILED(hr = (*output_type)->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Audio)) || + FAILED(hr = (*output_type)->SetGUID(MF_MT_SUBTYPE, output_subtype)) || + FAILED(hr = (*output_type)->SetUINT32(MF_MT_AUDIO_NUM_CHANNELS, AUDIO_CHANNELS)) || + FAILED(hr = (*output_type)->SetUINT32(MF_MT_AUDIO_BITS_PER_SAMPLE, AUDIO_BITS_PER_SAMPLE)) || + FAILED(hr = (*output_type)->SetUINT32(MF_MT_AUDIO_SAMPLES_PER_SECOND, sample_rate)) || + FAILED(hr = (*output_type)->SetUINT32(MF_MT_AVG_BITRATE, bitrate * 1000))) [[unlikely]] + { + Error::SetHResult(error, "Audio setting attributes failed: ", hr); + return false; + } + + return true; +} + +bool MediaCaptureMF::ProcessAudioPackets(s64 video_pts, Error* error) +{ + const u32 max_audio_buffer_size = GetAudioBufferSizeInFrames(); + HRESULT hr; + + u32 pending_frames = m_audio_buffer_size.load(std::memory_order_acquire); + while (pending_frames > 0 && (!IsCapturingVideo() || + ((m_next_audio_pts * m_audio_sample_duration) < (video_pts * m_video_sample_duration)))) + { + // Grab as many source frames as we can. + const u32 contig_frames = std::min(pending_frames, max_audio_buffer_size - m_audio_buffer_read_pos); + DebugAssert(contig_frames > 0); + + const u32 buffer_size = contig_frames * sizeof(s16) * AUDIO_CHANNELS; + ComPtr buffer; + if (FAILED(hr = MFCreateMemoryBuffer(buffer_size, buffer.GetAddressOf()))) [[unlikely]] + { + Error::SetHResult(error, "Audio MFCreateMemoryBuffer() failed: ", hr); + return false; + } + + BYTE* buffer_data; + if (FAILED(hr = buffer->Lock(&buffer_data, nullptr, nullptr))) [[unlikely]] + { + Error::SetHResult(error, "Audio Lock() failed: ", hr); + return false; + } + + std::memcpy(buffer_data, &m_audio_buffer[m_audio_buffer_read_pos * AUDIO_CHANNELS], buffer_size); + buffer->Unlock(); + + if (FAILED(hr = buffer->SetCurrentLength(buffer_size))) [[unlikely]] + { + Error::SetHResult(error, "Audio SetCurrentLength() failed: ", hr); + return false; + } + + ComPtr sample; + if (FAILED(hr = MFCreateSample(sample.GetAddressOf()))) [[unlikely]] + { + Error::SetHResult(error, "Audio MFCreateSample() failed: ", hr); + return false; + } + + if (FAILED(hr = sample->AddBuffer(buffer.Get()))) [[unlikely]] + { + Error::SetHResult(error, "Audio AddBuffer() failed: ", hr); + return false; + } + + const LONGLONG timestamp = static_cast(m_next_audio_pts) * m_audio_sample_duration; + if (FAILED(hr = sample->SetSampleTime(timestamp))) [[unlikely]] + { + Error::SetHResult(error, "Audio SetSampleTime() failed: ", hr); + return false; + } + + const LONGLONG duration = static_cast(contig_frames) * m_audio_sample_duration; + if (FAILED(hr = sample->SetSampleDuration(duration))) [[unlikely]] + { + Error::SetHResult(error, "Audio SetSampleDuration() failed: ", hr); + return false; + } + + m_next_audio_pts += contig_frames; + + hr = m_sink_writer->WriteSample(m_audio_stream_index, sample.Get()); + if (FAILED(hr)) [[unlikely]] + { + Error::SetHResult(error, "Audio WriteSample() failed: ", hr); + return false; + } + + m_audio_buffer_read_pos = (m_audio_buffer_read_pos + contig_frames) % max_audio_buffer_size; + m_audio_buffer_size.fetch_sub(contig_frames, std::memory_order_acq_rel); + m_audio_frame_pos += contig_frames; + pending_frames -= contig_frames; + } + + return true; +} + +#endif + +} // namespace + +static constexpr const std::array s_backend_names = { +#ifdef _WIN32 + "MediaFoundation", +#endif +#ifndef __ANDROID__ + "FFMPEG", +#endif +}; +static constexpr const std::array s_backend_display_names = { +#ifdef _WIN32 + TRANSLATE_NOOP("MediaCapture", "Media Foundation"), +#endif +#ifndef __ANDROID__ + TRANSLATE_NOOP("MediaCapture", "FFMPEG"), +#endif +}; +static_assert(s_backend_names.size() == static_cast(MediaCaptureBackend::MaxCount)); +static_assert(s_backend_display_names.size() == static_cast(MediaCaptureBackend::MaxCount)); + +MediaCapture::~MediaCapture() = default; + +std::optional MediaCapture::ParseBackendName(const char* str) +{ + int index = 0; + for (const char* name : s_backend_names) + { + if (std::strcmp(name, str) == 0) + return static_cast(index); + + index++; + } + + return std::nullopt; +} + +const char* MediaCapture::GetBackendName(MediaCaptureBackend backend) +{ + return s_backend_names[static_cast(backend)]; +} + +const char* MediaCapture::GetBackendDisplayName(MediaCaptureBackend backend) +{ + return Host::TranslateToCString("MediaCapture", s_backend_display_names[static_cast(backend)]); +} + +void MediaCapture::AdjustVideoSize(u32* width, u32* height) +{ + *width = Common::AlignUpPow2(*width, VIDEO_WIDTH_ALIGNMENT); + *height = Common::AlignUpPow2(*height, VIDEO_HEIGHT_ALIGNMENT); +} + +MediaCapture::ContainerList MediaCapture::GetContainerList(MediaCaptureBackend backend) +{ + ContainerList ret; + switch (backend) + { +#ifdef _WIN32 + case MediaCaptureBackend::MediaFoundation: + ret = MediaCaptureMF::GetContainerList(); + break; +#endif +#ifndef __ANDROID__ + case MediaCaptureBackend::FFMPEG: + // ret = MediaCaptureFFMPEG::GetContainerList(); + break; +#endif + default: + break; + } + return ret; +} + +MediaCapture::CodecList MediaCapture::GetVideoCodecList(MediaCaptureBackend backend, const char* container) +{ + CodecList ret; + switch (backend) + { +#ifdef _WIN32 + case MediaCaptureBackend::MediaFoundation: + ret = MediaCaptureMF::GetVideoCodecList(container); + break; +#endif +#ifndef __ANDROID__ + case MediaCaptureBackend::FFMPEG: + // ret = MediaCaptureFFMPEG::GetVideoCodecList(container); + break; +#endif + default: + break; + } + return ret; +} + +MediaCapture::CodecList MediaCapture::GetAudioCodecList(MediaCaptureBackend backend, const char* container) +{ + CodecList ret; + switch (backend) + { +#ifdef _WIN32 + case MediaCaptureBackend::MediaFoundation: + ret = MediaCaptureMF::GetAudioCodecList(container); + break; +#endif +#ifndef __ANDROID__ + case MediaCaptureBackend::FFMPEG: + // ret = MediaCaptureFFMPEG::GetAudioCodecList(container); + break; +#endif + default: + break; + } + return ret; +} + +std::unique_ptr MediaCapture::Create(MediaCaptureBackend backend, Error* error) +{ + switch (backend) + { +#ifdef _WIN32 + case MediaCaptureBackend::MediaFoundation: + return MediaCaptureMF::Create(error); +#endif +#ifndef __ANDROID__ + case MediaCaptureBackend::FFMPEG: + return nullptr; +#endif + default: + return nullptr; + } +} diff --git a/src/util/media_capture.h b/src/util/media_capture.h new file mode 100644 index 000000000..cd9766994 --- /dev/null +++ b/src/util/media_capture.h @@ -0,0 +1,75 @@ +// SPDX-FileCopyrightText: 2019-2024 Connor McLaughlin +// SPDX-License-Identifier: (GPL-3.0 OR CC-BY-NC-ND-4.0) + +#pragma once + +#include "gpu_texture.h" + +#include +#include +#include +#include +#include + +class Error; +class GPUTexture; + +enum class MediaCaptureBackend : u8 +{ +#ifdef _WIN32 + MediaFoundation, +#endif +#ifndef __ANDROID__ + FFMPEG, +#endif + MaxCount, +}; + +class MediaCapture +{ +public: + virtual ~MediaCapture(); + + using ContainerName = std::pair; // configname,longname + using ContainerList = std::vector; + using CodecName = std::pair; // configname,longname + using CodecList = std::vector; + + static std::optional ParseBackendName(const char* str); + static const char* GetBackendName(MediaCaptureBackend backend); + static const char* GetBackendDisplayName(MediaCaptureBackend backend); + + static ContainerList GetContainerList(MediaCaptureBackend backend); + static CodecList GetVideoCodecList(MediaCaptureBackend backend, const char* container); + static CodecList GetAudioCodecList(MediaCaptureBackend backend, const char* container); + + static void AdjustVideoSize(u32* width, u32* height); + + static std::unique_ptr Create(MediaCaptureBackend backend, Error* error); + + virtual bool BeginCapture(float fps, float aspect, u32 width, u32 height, GPUTexture::Format texture_format, + u32 sample_rate, std::string path, bool capture_video, std::string_view video_codec, + u32 video_bitrate, std::string_view video_codec_args, bool capture_audio, + std::string_view audio_codec, u32 audio_bitrate, std::string_view audio_codec_args, + Error* error) = 0; + virtual bool EndCapture(Error* error) = 0; + + // TODO: make non-virtual? + virtual const std::string& GetPath() const = 0; + virtual bool IsCapturingAudio() const = 0; + virtual bool IsCapturingVideo() const = 0; + virtual u32 GetVideoWidth() const = 0; + virtual u32 GetVideoHeight() const = 0; + + /// Returns the elapsed time in seconds. + virtual time_t GetElapsedTime() const = 0; + + virtual float GetCaptureThreadUsage() const = 0; + virtual float GetCaptureThreadTime() const = 0; + virtual void UpdateCaptureThreadUsage(double pct_divider, double time_divider) = 0; + + virtual GPUTexture* GetRenderTexture() = 0; + virtual bool DeliverVideoFrame(GPUTexture* stex) = 0; + virtual bool DeliverAudioFrames(const s16* frames, u32 num_frames) = 0; + virtual void Flush() = 0; +}; diff --git a/src/util/util.props b/src/util/util.props index 60318f115..15397a646 100644 --- a/src/util/util.props +++ b/src/util/util.props @@ -13,7 +13,7 @@ - %(AdditionalDependencies);d3d11.lib;d3d12.lib;d3dcompiler.lib;dxgi.lib;Dwmapi.lib;winhttp.lib + %(AdditionalDependencies);d3d11.lib;d3d12.lib;d3dcompiler.lib;dxgi.lib;Dwmapi.lib;winhttp.lib;Mfplat.lib;Mfreadwrite.lib %(AdditionalDependencies);opengl32.lib diff --git a/src/util/util.vcxproj b/src/util/util.vcxproj index 633f11927..2e8fece14 100644 --- a/src/util/util.vcxproj +++ b/src/util/util.vcxproj @@ -36,6 +36,7 @@ + true @@ -146,6 +147,7 @@ + true diff --git a/src/util/util.vcxproj.filters b/src/util/util.vcxproj.filters index 66b8e75d5..5ea0928a9 100644 --- a/src/util/util.vcxproj.filters +++ b/src/util/util.vcxproj.filters @@ -71,6 +71,7 @@ + @@ -150,6 +151,7 @@ +