518 lines
16 KiB
C++
518 lines
16 KiB
C++
|
|
// SimpleRecorder.cpp — Full implementation
|
||
|
|
// Video: back-buffer → raw BGRA → FFmpeg pipe → NVENC → video_only.mp4
|
||
|
|
// Audio: Submix listener → float PCM → WAV file → audio_only.wav
|
||
|
|
// Mux: FFmpeg CLI → final_video_with_audio.mp4
|
||
|
|
|
||
|
|
#include "SimpleRecorder.h"
|
||
|
|
|
||
|
|
#include "AudioVideoRecord.h" // For LogAudioVideoRecord
|
||
|
|
#include "AudioDevice.h"
|
||
|
|
#include "AudioMixerDevice.h"
|
||
|
|
#include "Engine/GameViewportClient.h"
|
||
|
|
#include "Framework/Application/SlateApplication.h"
|
||
|
|
#include "Misc/FileHelper.h"
|
||
|
|
#include "Misc/Paths.h"
|
||
|
|
#include "HAL/PlatformFileManager.h"
|
||
|
|
#include "HAL/PlatformProcess.h"
|
||
|
|
#include "RenderingThread.h"
|
||
|
|
#include "RHI.h"
|
||
|
|
#include "RHICommandList.h"
|
||
|
|
#include "RHIResources.h"
|
||
|
|
#include "Serialization/Archive.h"
|
||
|
|
|
||
|
|
// =====================================================================
|
||
|
|
// FSubmixBridge — wraps a callback in a TSharedRef for UE 5.6 API
|
||
|
|
// UObjects can't use AsShared(), so we need this bridge.
|
||
|
|
// =====================================================================
|
||
|
|
class FSimpleRecorderSubmixBridge
|
||
|
|
: public ISubmixBufferListener
|
||
|
|
{
|
||
|
|
public:
|
||
|
|
TWeakObjectPtr<USimpleRecorder> Owner;
|
||
|
|
|
||
|
|
FSimpleRecorderSubmixBridge(USimpleRecorder* InOwner) : Owner(InOwner) {}
|
||
|
|
|
||
|
|
virtual void OnNewSubmixBuffer(
|
||
|
|
const USoundSubmix* OwningSubmix,
|
||
|
|
float* AudioData,
|
||
|
|
int32 NumSamples,
|
||
|
|
int32 NumChannels,
|
||
|
|
const int32 SampleRate,
|
||
|
|
double AudioClock) override
|
||
|
|
{
|
||
|
|
if (USimpleRecorder* Rec = Owner.Get())
|
||
|
|
{
|
||
|
|
Rec->OnNewSubmixBuffer(OwningSubmix, AudioData, NumSamples, NumChannels, SampleRate, AudioClock);
|
||
|
|
}
|
||
|
|
}
|
||
|
|
};
|
||
|
|
|
||
|
|
// =====================================================================
|
||
|
|
// Constructor
|
||
|
|
// =====================================================================
|
||
|
|
USimpleRecorder::USimpleRecorder()
|
||
|
|
{
|
||
|
|
// Default output directory: <ProjectDir>/Saved/Recordings
|
||
|
|
OutputDirectory = FPaths::Combine(FPaths::ProjectSavedDir(), TEXT("Recordings"));
|
||
|
|
}
|
||
|
|
|
||
|
|
// =====================================================================
|
||
|
|
// BeginDestroy — safety net: stop recording if object is garbage-collected
|
||
|
|
// =====================================================================
|
||
|
|
void USimpleRecorder::BeginDestroy()
|
||
|
|
{
|
||
|
|
if (bIsRecording)
|
||
|
|
{
|
||
|
|
StopRecording();
|
||
|
|
}
|
||
|
|
Super::BeginDestroy();
|
||
|
|
}
|
||
|
|
|
||
|
|
// =====================================================================
|
||
|
|
// Helpers
|
||
|
|
// =====================================================================
|
||
|
|
|
||
|
|
/** Resolve the ffmpeg executable path. */
|
||
|
|
FString USimpleRecorder::GetFFmpegExecutable() const
|
||
|
|
{
|
||
|
|
if (!FFmpegPath.IsEmpty())
|
||
|
|
{
|
||
|
|
return FFmpegPath;
|
||
|
|
}
|
||
|
|
// Assume ffmpeg is on the system PATH
|
||
|
|
return TEXT("ffmpeg");
|
||
|
|
}
|
||
|
|
|
||
|
|
/** Set up output file paths and make sure the directory exists. */
|
||
|
|
void USimpleRecorder::InitOutputPaths()
|
||
|
|
{
|
||
|
|
// Ensure absolute path
|
||
|
|
FString Dir = OutputDirectory;
|
||
|
|
if (FPaths::IsRelative(Dir))
|
||
|
|
{
|
||
|
|
Dir = FPaths::ConvertRelativePathToFull(Dir);
|
||
|
|
}
|
||
|
|
|
||
|
|
// Create directory if it doesn't exist
|
||
|
|
IPlatformFile& PlatformFile = FPlatformFileManager::Get().GetPlatformFile();
|
||
|
|
if (!PlatformFile.DirectoryExists(*Dir))
|
||
|
|
{
|
||
|
|
PlatformFile.CreateDirectoryTree(*Dir);
|
||
|
|
}
|
||
|
|
|
||
|
|
VideoFilePath = FPaths::Combine(Dir, TEXT("video_only.mp4"));
|
||
|
|
AudioFilePath = FPaths::Combine(Dir, TEXT("audio_only.wav"));
|
||
|
|
FinalFilePath = FPaths::Combine(Dir, TEXT("final_video_with_audio.mp4"));
|
||
|
|
}
|
||
|
|
|
||
|
|
// =====================================================================
|
||
|
|
// StartRecording
|
||
|
|
// =====================================================================
|
||
|
|
void USimpleRecorder::StartRecording()
|
||
|
|
{
|
||
|
|
if (bIsRecording)
|
||
|
|
{
|
||
|
|
UE_LOG(LogAudioVideoRecord, Warning, TEXT("SimpleRecorder: Already recording!"));
|
||
|
|
return;
|
||
|
|
}
|
||
|
|
|
||
|
|
InitOutputPaths();
|
||
|
|
bIsRecording = true;
|
||
|
|
|
||
|
|
// ─── 0. Auto-detect viewport size if not explicitly set ─────────
|
||
|
|
if (bAutoDetectResolution)
|
||
|
|
{
|
||
|
|
if (GEngine && GEngine->GameViewport)
|
||
|
|
{
|
||
|
|
FVector2D ViewportSize;
|
||
|
|
GEngine->GameViewport->GetViewportSize(ViewportSize);
|
||
|
|
if (ViewportSize.X > 0 && ViewportSize.Y > 0)
|
||
|
|
{
|
||
|
|
CaptureWidth = FMath::RoundToInt32(ViewportSize.X);
|
||
|
|
CaptureHeight = FMath::RoundToInt32(ViewportSize.Y);
|
||
|
|
// Ensure even dimensions (required by most video encoders)
|
||
|
|
CaptureWidth = CaptureWidth & ~1;
|
||
|
|
CaptureHeight = CaptureHeight & ~1;
|
||
|
|
}
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
UE_LOG(LogAudioVideoRecord, Log, TEXT("SimpleRecorder: Starting recording..."));
|
||
|
|
UE_LOG(LogAudioVideoRecord, Log, TEXT(" Resolution: %dx%d @ %d FPS"), CaptureWidth, CaptureHeight, CaptureFPS);
|
||
|
|
UE_LOG(LogAudioVideoRecord, Log, TEXT(" Video → %s"), *VideoFilePath);
|
||
|
|
UE_LOG(LogAudioVideoRecord, Log, TEXT(" Audio → %s"), *AudioFilePath);
|
||
|
|
UE_LOG(LogAudioVideoRecord, Log, TEXT(" Final → %s"), *FinalFilePath);
|
||
|
|
|
||
|
|
// ─── 1. Open FFmpeg video pipe ──────────────────────────────────
|
||
|
|
{
|
||
|
|
FString FFmpeg = GetFFmpegExecutable();
|
||
|
|
|
||
|
|
// Build command:
|
||
|
|
// ffmpeg -y -f rawvideo -pix_fmt bgra
|
||
|
|
// -video_size WxH -framerate FPS
|
||
|
|
// -i - (stdin)
|
||
|
|
// -c:v <encoder> ... -b:v BITRATE
|
||
|
|
// output.mp4
|
||
|
|
//
|
||
|
|
// Encoder selection per platform:
|
||
|
|
// Mac: h264_videotoolbox (Apple HW encoder)
|
||
|
|
// Windows: h264_nvenc / h264_amf / h264_qsv (detected at runtime)
|
||
|
|
// Linux: libx264 (software fallback)
|
||
|
|
#if PLATFORM_MAC
|
||
|
|
FString EncoderFlags = FString::Printf(TEXT("-c:v h264_videotoolbox -b:v %s"), *VideoBitrate);
|
||
|
|
#elif PLATFORM_WINDOWS
|
||
|
|
FString EncoderFlags = FString::Printf(TEXT("-c:v h264_nvenc -preset p5 -b:v %s"), *VideoBitrate);
|
||
|
|
#else
|
||
|
|
FString EncoderFlags = FString::Printf(TEXT("-c:v libx264 -preset medium -b:v %s"), *VideoBitrate);
|
||
|
|
#endif
|
||
|
|
|
||
|
|
FString Cmd = FString::Printf(
|
||
|
|
TEXT("\"%s\" -y -f rawvideo -pix_fmt bgra -video_size %dx%d -framerate %d ")
|
||
|
|
TEXT("-i - %s \"%s\""),
|
||
|
|
*FFmpeg,
|
||
|
|
CaptureWidth, CaptureHeight, CaptureFPS,
|
||
|
|
*EncoderFlags,
|
||
|
|
*VideoFilePath
|
||
|
|
);
|
||
|
|
|
||
|
|
UE_LOG(LogAudioVideoRecord, Log, TEXT(" FFmpeg cmd: %s"), *Cmd);
|
||
|
|
|
||
|
|
// popen opens a pipe — we write raw BGRA bytes into FFmpeg's stdin
|
||
|
|
#if PLATFORM_WINDOWS
|
||
|
|
FFmpegVideoPipe = _popen(TCHAR_TO_ANSI(*Cmd), "wb");
|
||
|
|
#else
|
||
|
|
FFmpegVideoPipe = popen(TCHAR_TO_ANSI(*Cmd), "w");
|
||
|
|
#endif
|
||
|
|
if (!FFmpegVideoPipe)
|
||
|
|
{
|
||
|
|
UE_LOG(LogAudioVideoRecord, Error,
|
||
|
|
TEXT("SimpleRecorder: Failed to open FFmpeg pipe! Make sure ffmpeg.exe is on your PATH."));
|
||
|
|
bIsRecording = false;
|
||
|
|
return;
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
// ─── 2. Register back-buffer delegate (video frames) ────────────
|
||
|
|
if (FSlateApplication::IsInitialized())
|
||
|
|
{
|
||
|
|
BackBufferDelegateHandle =
|
||
|
|
FSlateApplication::Get().GetRenderer()->OnBackBufferReadyToPresent()
|
||
|
|
.AddUObject(this, &USimpleRecorder::OnBackBufferReady);
|
||
|
|
}
|
||
|
|
|
||
|
|
// ─── 3. Start audio submix recording ────────────────────────────
|
||
|
|
{
|
||
|
|
// Clear any leftover audio data
|
||
|
|
FScopeLock Lock(&AudioBufferCritSection);
|
||
|
|
AudioBuffer.Empty();
|
||
|
|
}
|
||
|
|
|
||
|
|
if (GEngine && GEngine->GetMainAudioDevice())
|
||
|
|
{
|
||
|
|
FAudioDevice* AudioDevice = GEngine->GetMainAudioDevice().GetAudioDevice();
|
||
|
|
if (AudioDevice)
|
||
|
|
{
|
||
|
|
// If user didn't set a submix, use the master submix
|
||
|
|
USoundSubmix* Submix = TargetSubmix;
|
||
|
|
if (!Submix)
|
||
|
|
{
|
||
|
|
// Get the main submix from the audio device
|
||
|
|
Submix = &AudioDevice->GetMainSubmixObject();
|
||
|
|
}
|
||
|
|
|
||
|
|
if (Submix)
|
||
|
|
{
|
||
|
|
SubmixBridge = MakeShared<FSimpleRecorderSubmixBridge, ESPMode::ThreadSafe>(this);
|
||
|
|
AudioDevice->RegisterSubmixBufferListener(SubmixBridge.ToSharedRef(), *Submix);
|
||
|
|
UE_LOG(LogAudioVideoRecord, Log, TEXT(" Audio submix listener registered."));
|
||
|
|
}
|
||
|
|
else
|
||
|
|
{
|
||
|
|
UE_LOG(LogAudioVideoRecord, Warning,
|
||
|
|
TEXT("SimpleRecorder: Could not find a submix to record audio from."));
|
||
|
|
}
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
UE_LOG(LogAudioVideoRecord, Log, TEXT("SimpleRecorder: Recording started."));
|
||
|
|
}
|
||
|
|
|
||
|
|
// =====================================================================
|
||
|
|
// StopRecording
|
||
|
|
// =====================================================================
|
||
|
|
void USimpleRecorder::StopRecording()
|
||
|
|
{
|
||
|
|
if (!bIsRecording)
|
||
|
|
{
|
||
|
|
UE_LOG(LogAudioVideoRecord, Warning, TEXT("SimpleRecorder: Not recording."));
|
||
|
|
return;
|
||
|
|
}
|
||
|
|
|
||
|
|
bIsRecording = false;
|
||
|
|
UE_LOG(LogAudioVideoRecord, Log, TEXT("SimpleRecorder: Stopping recording..."));
|
||
|
|
|
||
|
|
// ─── 1. Unregister back-buffer delegate ─────────────────────────
|
||
|
|
if (FSlateApplication::IsInitialized() && BackBufferDelegateHandle.IsValid())
|
||
|
|
{
|
||
|
|
FSlateApplication::Get().GetRenderer()->OnBackBufferReadyToPresent()
|
||
|
|
.Remove(BackBufferDelegateHandle);
|
||
|
|
BackBufferDelegateHandle.Reset();
|
||
|
|
}
|
||
|
|
|
||
|
|
// ─── 2. Close the FFmpeg video pipe ─────────────────────────────
|
||
|
|
if (FFmpegVideoPipe)
|
||
|
|
{
|
||
|
|
#if PLATFORM_WINDOWS
|
||
|
|
_pclose(FFmpegVideoPipe);
|
||
|
|
#else
|
||
|
|
pclose(FFmpegVideoPipe);
|
||
|
|
#endif
|
||
|
|
FFmpegVideoPipe = nullptr;
|
||
|
|
UE_LOG(LogAudioVideoRecord, Log, TEXT(" FFmpeg video pipe closed → video_only.mp4 written."));
|
||
|
|
}
|
||
|
|
|
||
|
|
// ─── 3. Unregister audio submix listener ────────────────────────
|
||
|
|
if (GEngine && GEngine->GetMainAudioDevice())
|
||
|
|
{
|
||
|
|
FAudioDevice* AudioDevice = GEngine->GetMainAudioDevice().GetAudioDevice();
|
||
|
|
if (AudioDevice)
|
||
|
|
{
|
||
|
|
USoundSubmix* Submix = TargetSubmix;
|
||
|
|
if (!Submix)
|
||
|
|
{
|
||
|
|
Submix = &AudioDevice->GetMainSubmixObject();
|
||
|
|
}
|
||
|
|
if (Submix)
|
||
|
|
{
|
||
|
|
if (SubmixBridge.IsValid())
|
||
|
|
{
|
||
|
|
AudioDevice->UnregisterSubmixBufferListener(SubmixBridge.ToSharedRef(), *Submix);
|
||
|
|
SubmixBridge.Reset();
|
||
|
|
}
|
||
|
|
}
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
// ─── 4. Save audio to .wav ──────────────────────────────────────
|
||
|
|
SaveAudioToWav();
|
||
|
|
|
||
|
|
// ─── 5. Mux video + audio → final mp4 ──────────────────────────
|
||
|
|
MuxAudioVideo();
|
||
|
|
|
||
|
|
UE_LOG(LogAudioVideoRecord, Log, TEXT("SimpleRecorder: Recording stopped. All files saved."));
|
||
|
|
}
|
||
|
|
|
||
|
|
// =====================================================================
|
||
|
|
// OnBackBufferReady — called every presented frame on the RENDER thread
|
||
|
|
// =====================================================================
|
||
|
|
void USimpleRecorder::OnBackBufferReady(SWindow& SlateWindow, const FTextureRHIRef& BackBuffer)
|
||
|
|
{
|
||
|
|
// Safety: if we already stopped, do nothing
|
||
|
|
if (!bIsRecording || !FFmpegVideoPipe)
|
||
|
|
{
|
||
|
|
return;
|
||
|
|
}
|
||
|
|
|
||
|
|
// We're on the render thread here.
|
||
|
|
// Read the back-buffer pixels into a CPU-side array.
|
||
|
|
|
||
|
|
FRHICommandListImmediate& RHICmdList = FRHICommandListImmediate::Get();
|
||
|
|
|
||
|
|
// Determine actual back-buffer size (may differ from our target)
|
||
|
|
const FIntPoint BBSize = BackBuffer->GetSizeXY();
|
||
|
|
|
||
|
|
// We need a rect matching our capture resolution, clamped to back-buffer
|
||
|
|
const int32 ReadW = FMath::Min(CaptureWidth, (int32)BBSize.X);
|
||
|
|
const int32 ReadH = FMath::Min(CaptureHeight, (int32)BBSize.Y);
|
||
|
|
|
||
|
|
FIntRect ReadRect(0, 0, ReadW, ReadH);
|
||
|
|
|
||
|
|
// ReadSurfaceData puts pixels into a TArray<FColor> (BGRA, 8-bit per channel)
|
||
|
|
TArray<FColor> Pixels;
|
||
|
|
Pixels.SetNumUninitialized(ReadW * ReadH);
|
||
|
|
|
||
|
|
RHICmdList.ReadSurfaceData(
|
||
|
|
BackBuffer.GetReference(),
|
||
|
|
ReadRect,
|
||
|
|
Pixels,
|
||
|
|
FReadSurfaceDataFlags(RCM_UNorm)
|
||
|
|
);
|
||
|
|
|
||
|
|
// If the back-buffer is smaller than our target, we still write the full
|
||
|
|
// frame size so FFmpeg doesn't choke. Pad with black if needed.
|
||
|
|
if (ReadW == CaptureWidth && ReadH == CaptureHeight)
|
||
|
|
{
|
||
|
|
// Fast path — dimensions match exactly
|
||
|
|
fwrite(Pixels.GetData(), sizeof(FColor), Pixels.Num(), FFmpegVideoPipe);
|
||
|
|
}
|
||
|
|
else
|
||
|
|
{
|
||
|
|
// Slow path — need to pad to CaptureWidth x CaptureHeight
|
||
|
|
// Allocate a black frame
|
||
|
|
TArray<FColor> PaddedFrame;
|
||
|
|
PaddedFrame.SetNumZeroed(CaptureWidth * CaptureHeight);
|
||
|
|
|
||
|
|
for (int32 Row = 0; Row < ReadH; ++Row)
|
||
|
|
{
|
||
|
|
FMemory::Memcpy(
|
||
|
|
&PaddedFrame[Row * CaptureWidth],
|
||
|
|
&Pixels[Row * ReadW],
|
||
|
|
ReadW * sizeof(FColor));
|
||
|
|
}
|
||
|
|
|
||
|
|
fwrite(PaddedFrame.GetData(), sizeof(FColor), PaddedFrame.Num(), FFmpegVideoPipe);
|
||
|
|
}
|
||
|
|
}
|
||
|
|
|
||
|
|
// =====================================================================
|
||
|
|
// ISubmixBufferListener — audio callback (called from the audio thread)
|
||
|
|
// =====================================================================
|
||
|
|
void USimpleRecorder::OnNewSubmixBuffer(
|
||
|
|
const USoundSubmix* OwningSubmix,
|
||
|
|
float* AudioData,
|
||
|
|
int32 NumSamples,
|
||
|
|
int32 NumChannels,
|
||
|
|
const int32 SampleRate,
|
||
|
|
double AudioClock)
|
||
|
|
{
|
||
|
|
if (!bIsRecording)
|
||
|
|
{
|
||
|
|
return;
|
||
|
|
}
|
||
|
|
|
||
|
|
FScopeLock Lock(&AudioBufferCritSection);
|
||
|
|
|
||
|
|
// Store the format (may already be set, but harmless to update)
|
||
|
|
AudioSampleRate = SampleRate;
|
||
|
|
AudioNumChannels = NumChannels;
|
||
|
|
|
||
|
|
// Append the incoming samples
|
||
|
|
AudioBuffer.Append(AudioData, NumSamples);
|
||
|
|
}
|
||
|
|
|
||
|
|
// =====================================================================
|
||
|
|
// SaveAudioToWav — writes accumulated PCM float data as a 16-bit WAV
|
||
|
|
// =====================================================================
|
||
|
|
void USimpleRecorder::SaveAudioToWav()
|
||
|
|
{
|
||
|
|
FScopeLock Lock(&AudioBufferCritSection);
|
||
|
|
|
||
|
|
if (AudioBuffer.Num() == 0)
|
||
|
|
{
|
||
|
|
UE_LOG(LogAudioVideoRecord, Warning, TEXT("SimpleRecorder: No audio data captured."));
|
||
|
|
return;
|
||
|
|
}
|
||
|
|
|
||
|
|
UE_LOG(LogAudioVideoRecord, Log,
|
||
|
|
TEXT(" Saving audio: %d samples, %d channels, %d Hz"),
|
||
|
|
AudioBuffer.Num(), AudioNumChannels, AudioSampleRate);
|
||
|
|
|
||
|
|
// Convert float [-1,1] → int16
|
||
|
|
TArray<int16> PCM16;
|
||
|
|
PCM16.SetNumUninitialized(AudioBuffer.Num());
|
||
|
|
for (int32 i = 0; i < AudioBuffer.Num(); ++i)
|
||
|
|
{
|
||
|
|
float Clamped = FMath::Clamp(AudioBuffer[i], -1.0f, 1.0f);
|
||
|
|
PCM16[i] = static_cast<int16>(Clamped * 32767.0f);
|
||
|
|
}
|
||
|
|
|
||
|
|
// ── WAV header ──────────────────────────────────────────────────
|
||
|
|
const int32 BitsPerSample = 16;
|
||
|
|
const int32 BytesPerSample = BitsPerSample / 8;
|
||
|
|
const int32 DataSize = PCM16.Num() * BytesPerSample;
|
||
|
|
const int32 ByteRate = AudioSampleRate * AudioNumChannels * BytesPerSample;
|
||
|
|
const int16 BlockAlign = static_cast<int16>(AudioNumChannels * BytesPerSample);
|
||
|
|
|
||
|
|
TArray<uint8> WavFile;
|
||
|
|
// Reserve enough space: 44-byte header + data
|
||
|
|
WavFile.Reserve(44 + DataSize);
|
||
|
|
|
||
|
|
// Helper lambdas to append little-endian integers
|
||
|
|
auto Write4CC = [&](const char* FourCC)
|
||
|
|
{
|
||
|
|
WavFile.Append(reinterpret_cast<const uint8*>(FourCC), 4);
|
||
|
|
};
|
||
|
|
auto WriteInt32 = [&](int32 Val)
|
||
|
|
{
|
||
|
|
WavFile.Append(reinterpret_cast<const uint8*>(&Val), 4);
|
||
|
|
};
|
||
|
|
auto WriteInt16 = [&](int16 Val)
|
||
|
|
{
|
||
|
|
WavFile.Append(reinterpret_cast<const uint8*>(&Val), 2);
|
||
|
|
};
|
||
|
|
|
||
|
|
// RIFF header
|
||
|
|
Write4CC("RIFF");
|
||
|
|
WriteInt32(36 + DataSize); // File size - 8
|
||
|
|
Write4CC("WAVE");
|
||
|
|
|
||
|
|
// fmt sub-chunk
|
||
|
|
Write4CC("fmt ");
|
||
|
|
WriteInt32(16); // Sub-chunk size (PCM)
|
||
|
|
WriteInt16(1); // Audio format = PCM
|
||
|
|
WriteInt16(static_cast<int16>(AudioNumChannels));
|
||
|
|
WriteInt32(AudioSampleRate);
|
||
|
|
WriteInt32(ByteRate);
|
||
|
|
WriteInt16(BlockAlign);
|
||
|
|
WriteInt16(static_cast<int16>(BitsPerSample));
|
||
|
|
|
||
|
|
// data sub-chunk
|
||
|
|
Write4CC("data");
|
||
|
|
WriteInt32(DataSize);
|
||
|
|
WavFile.Append(reinterpret_cast<const uint8*>(PCM16.GetData()), DataSize);
|
||
|
|
|
||
|
|
// Write to disk
|
||
|
|
if (FFileHelper::SaveArrayToFile(WavFile, *AudioFilePath))
|
||
|
|
{
|
||
|
|
UE_LOG(LogAudioVideoRecord, Log, TEXT(" audio_only.wav saved (%d bytes)."), WavFile.Num());
|
||
|
|
}
|
||
|
|
else
|
||
|
|
{
|
||
|
|
UE_LOG(LogAudioVideoRecord, Error, TEXT(" Failed to save audio_only.wav!"));
|
||
|
|
}
|
||
|
|
|
||
|
|
// Free memory
|
||
|
|
AudioBuffer.Empty();
|
||
|
|
}
|
||
|
|
|
||
|
|
// =====================================================================
|
||
|
|
// MuxAudioVideo — runs FFmpeg to combine video + audio into final mp4
|
||
|
|
// =====================================================================
|
||
|
|
void USimpleRecorder::MuxAudioVideo()
|
||
|
|
{
|
||
|
|
FString FFmpeg = GetFFmpegExecutable();
|
||
|
|
|
||
|
|
// ffmpeg -y -i video_only.mp4 -i audio_only.wav -c:v copy -c:a aac -shortest final.mp4
|
||
|
|
FString Cmd = FString::Printf(
|
||
|
|
TEXT("\"%s\" -y -i \"%s\" -i \"%s\" -c:v copy -c:a aac -shortest \"%s\""),
|
||
|
|
*FFmpeg,
|
||
|
|
*VideoFilePath,
|
||
|
|
*AudioFilePath,
|
||
|
|
*FinalFilePath
|
||
|
|
);
|
||
|
|
|
||
|
|
UE_LOG(LogAudioVideoRecord, Log, TEXT(" Mux cmd: %s"), *Cmd);
|
||
|
|
|
||
|
|
// Run synchronously (blocks until done)
|
||
|
|
int32 ReturnCode = -1;
|
||
|
|
FString StdOut;
|
||
|
|
FString StdErr;
|
||
|
|
FPlatformProcess::ExecProcess(
|
||
|
|
*FFmpeg,
|
||
|
|
*FString::Printf(
|
||
|
|
TEXT("-y -i \"%s\" -i \"%s\" -c:v copy -c:a aac -shortest \"%s\""),
|
||
|
|
*VideoFilePath, *AudioFilePath, *FinalFilePath),
|
||
|
|
&ReturnCode, &StdOut, &StdErr
|
||
|
|
);
|
||
|
|
|
||
|
|
if (ReturnCode == 0)
|
||
|
|
{
|
||
|
|
UE_LOG(LogAudioVideoRecord, Log,
|
||
|
|
TEXT(" Mux complete → final_video_with_audio.mp4 saved."));
|
||
|
|
}
|
||
|
|
else
|
||
|
|
{
|
||
|
|
UE_LOG(LogAudioVideoRecord, Error,
|
||
|
|
TEXT(" Mux failed (code %d). stderr: %s"), ReturnCode, *StdErr);
|
||
|
|
}
|
||
|
|
}
|