This commit is contained in:
John
2026-03-11 11:25:15 +05:30
parent bcd6f827d5
commit 0327abb1a0
77 changed files with 7689 additions and 0 deletions

View File

@@ -0,0 +1,19 @@
{
"FileVersion": 3,
"Version": 1,
"VersionName": "1.0",
"FriendlyName": "Gameplay Recorder",
"Description": "Cross-GPU gameplay recording plugin. Captures video (NVENC/AMF/QSV/x264) and audio, muxes via FFmpeg.",
"Category": "Recording",
"CreatedBy": "AudioVideoRecord Project",
"CanContainContent": false,
"IsBetaVersion": false,
"Installed": false,
"Modules": [
{
"Name": "GameplayRecorder",
"Type": "Runtime",
"LoadingPhase": "Default"
}
]
}

View File

@@ -0,0 +1,255 @@
// AudioMixerRecorder.cpp
// ─────────────────────────────────────────────────────────────────────
// Per-participant + mixed voice audio recording.
// ─────────────────────────────────────────────────────────────────────
#include "AudioMixerRecorder.h"
#include "RecorderModule.h"
#include "Misc/FileHelper.h"
#include "Misc/Paths.h"
#include "HAL/PlatformFileManager.h"
// =====================================================================
// Construction / Destruction
// =====================================================================
FAudioMixerRecorder::FAudioMixerRecorder() = default;
FAudioMixerRecorder::~FAudioMixerRecorder() = default;
// =====================================================================
// Start / Stop
// =====================================================================
void FAudioMixerRecorder::Start()
{
bActive = true;
{
FScopeLock Lock(&MixedLock);
MixedBuffer.Empty();
}
{
FScopeLock Lock(&ParticipantLock);
ParticipantBuffers.Empty();
}
UE_LOG(LogGameplayRecorder, Log, TEXT("AudioMixerRecorder: Started."));
}
void FAudioMixerRecorder::Stop()
{
bActive = false;
UE_LOG(LogGameplayRecorder, Log, TEXT("AudioMixerRecorder: Stopped."));
}
// =====================================================================
// FeedParticipantAudio — called per voice chunk per participant
// =====================================================================
void FAudioMixerRecorder::FeedParticipantAudio(
const FString& UserID,
const float* Samples,
int32 InNumSamples,
bool bIsMuted)
{
if (!bActive || !Samples || InNumSamples <= 0)
{
return;
}
// ── 1. Always save to the individual track (even if muted) ──────
// This enables post-session analysis of what a muted person said.
{
FScopeLock Lock(&ParticipantLock);
TArray<float>& IndividualBuf = ParticipantBuffers.FindOrAdd(UserID);
IndividualBuf.Append(Samples, InNumSamples);
}
// ── 2. If NOT muted, also mix into the combined buffer ──────────
if (!bIsMuted)
{
FScopeLock Lock(&MixedLock);
const int32 CurrentLen = MixedBuffer.Num();
if (CurrentLen < CurrentLen + InNumSamples)
{
// Extend the buffer to fit
const int32 Needed = (CurrentLen + InNumSamples) - MixedBuffer.Num();
if (Needed > 0)
{
MixedBuffer.AddZeroed(Needed);
}
}
// Additive mix (sum the samples on top of what's already there)
// This correctly overlaps voices that speak simultaneously.
//
// IMPORTANT: We add from the END of the existing mixed buffer.
// The mixed buffer grows as chunks arrive; each participant's
// chunk is appended at the tail so they stay time-aligned.
//
// A more precise approach would use timestamps, but for a
// synchronous in-process scenario the append order is correct.
const int32 WriteOffset = CurrentLen;
for (int32 i = 0; i < InNumSamples; ++i)
{
if (WriteOffset + i < MixedBuffer.Num())
{
MixedBuffer[WriteOffset + i] += Samples[i];
}
}
}
}
// =====================================================================
// FeedGameAudio — game-world audio from the submix
// =====================================================================
void FAudioMixerRecorder::FeedGameAudio(const float* Samples, int32 InNumSamples)
{
if (!bActive || !Samples || InNumSamples <= 0)
{
return;
}
FScopeLock Lock(&MixedLock);
const int32 CurrentLen = MixedBuffer.Num();
const int32 NewLen = CurrentLen + InNumSamples;
if (MixedBuffer.Num() < NewLen)
{
MixedBuffer.AddZeroed(NewLen - MixedBuffer.Num());
}
// Additive mix at the tail
for (int32 i = 0; i < InNumSamples; ++i)
{
MixedBuffer[CurrentLen + i] += Samples[i];
}
}
// =====================================================================
// SaveMixedWav — combined voice + game audio
// =====================================================================
bool FAudioMixerRecorder::SaveMixedWav(const FString& FilePath) const
{
FScopeLock Lock(&const_cast<FAudioMixerRecorder*>(this)->MixedLock);
if (MixedBuffer.Num() == 0)
{
UE_LOG(LogGameplayRecorder, Warning,
TEXT("AudioMixerRecorder: No mixed audio data to save."));
return false;
}
UE_LOG(LogGameplayRecorder, Log,
TEXT("AudioMixerRecorder: Saving mixed WAV (%d samples) → %s"),
MixedBuffer.Num(), *FilePath);
return WriteWavFile(FilePath, MixedBuffer, SampleRate, NumChannels);
}
// =====================================================================
// SaveIndividualWavs — one file per participant
// =====================================================================
int32 FAudioMixerRecorder::SaveIndividualWavs(const FString& OutputDir) const
{
FScopeLock Lock(&const_cast<FAudioMixerRecorder*>(this)->ParticipantLock);
// Ensure directory exists
IPlatformFile& PF = FPlatformFileManager::Get().GetPlatformFile();
if (!PF.DirectoryExists(*OutputDir))
{
PF.CreateDirectoryTree(*OutputDir);
}
int32 SavedCount = 0;
for (const auto& Pair : ParticipantBuffers)
{
const FString& UserID = Pair.Key;
const TArray<float>& Buf = Pair.Value;
if (Buf.Num() == 0)
{
continue;
}
// Sanitize UserID for filename (replace spaces, special chars)
FString SafeID = UserID;
SafeID.ReplaceCharInline(TEXT(' '), TEXT('_'));
FString Path = FPaths::Combine(OutputDir, SafeID + TEXT("_audio.wav"));
if (WriteWavFile(Path, Buf, SampleRate, NumChannels))
{
UE_LOG(LogGameplayRecorder, Log,
TEXT(" Individual WAV: %s (%d samples)"), *Path, Buf.Num());
++SavedCount;
}
}
UE_LOG(LogGameplayRecorder, Log,
TEXT("AudioMixerRecorder: Saved %d individual track(s)."), SavedCount);
return SavedCount;
}
// =====================================================================
// WriteWavFile — static helper: float[] → 16-bit PCM WAV
// =====================================================================
bool FAudioMixerRecorder::WriteWavFile(
const FString& Path,
const TArray<float>& Samples,
int32 InSampleRate,
int32 InNumChannels)
{
// Convert float [-1,1] → int16
TArray<int16> PCM16;
PCM16.SetNumUninitialized(Samples.Num());
for (int32 i = 0; i < Samples.Num(); ++i)
{
float Clamped = FMath::Clamp(Samples[i], -1.0f, 1.0f);
PCM16[i] = static_cast<int16>(Clamped * 32767.0f);
}
const int32 BitsPerSample = 16;
const int32 BytesPerSample = BitsPerSample / 8;
const int32 DataSize = PCM16.Num() * BytesPerSample;
const int32 ByteRate = InSampleRate * InNumChannels * BytesPerSample;
const int16 BlockAlign = static_cast<int16>(InNumChannels * BytesPerSample);
TArray<uint8> Wav;
Wav.Reserve(44 + DataSize);
auto Write4CC = [&](const char* T) { Wav.Append(reinterpret_cast<const uint8*>(T), 4); };
auto WriteI32 = [&](int32 V) { Wav.Append(reinterpret_cast<const uint8*>(&V), 4); };
auto WriteI16 = [&](int16 V) { Wav.Append(reinterpret_cast<const uint8*>(&V), 2); };
// RIFF header
Write4CC("RIFF");
WriteI32(36 + DataSize);
Write4CC("WAVE");
// fmt chunk
Write4CC("fmt ");
WriteI32(16); // PCM sub-chunk size
WriteI16(1); // audio format = PCM
WriteI16(static_cast<int16>(InNumChannels));
WriteI32(InSampleRate);
WriteI32(ByteRate);
WriteI16(BlockAlign);
WriteI16(static_cast<int16>(BitsPerSample));
// data chunk
Write4CC("data");
WriteI32(DataSize);
Wav.Append(reinterpret_cast<const uint8*>(PCM16.GetData()), DataSize);
return FFileHelper::SaveArrayToFile(Wav, *Path);
}

View File

@@ -0,0 +1,98 @@
// AudioMixerRecorder.h
// ─────────────────────────────────────────────────────────────────────
// Records voice audio on a per-participant AND mixed basis.
//
// Two recording modes happen simultaneously:
// 1. PER-PARTICIPANT — each participant's mic feed is saved to its
// own buffer → individual WAV files (pilot_audio.wav, etc.)
// 2. MIXED — all non-muted participants' audio is summed into a
// single buffer → the combined audio_only.wav
//
// How audio arrives here:
// VoiceSessionManager captures mic data per participant and calls
// FeedParticipantAudio(). This class simply stores the samples.
//
// JS analogy: imagine multiple MediaStreams, each pushing data into
// their own ArrayBuffer, while you also mix them into one via a
// GainNode → destination pattern.
// ─────────────────────────────────────────────────────────────────────
#pragma once
#include "CoreMinimal.h"
#include "VoiceTypes.h"
// Forward declaration
class UParticipantManager;
class GAMEPLAYRECORDER_API FAudioMixerRecorder
{
public:
FAudioMixerRecorder();
~FAudioMixerRecorder();
// ── Configuration ───────────────────────────────────────────────
int32 SampleRate = 48000;
int32 NumChannels = 2; // stereo
// ── Lifecycle ───────────────────────────────────────────────────
/** Start a new recording session. Clears all buffers. */
void Start();
/** Mark recording as stopped. No more samples accepted. */
void Stop();
bool IsActive() const { return bActive; }
// ── Feeding audio data ──────────────────────────────────────────
/**
* Push a chunk of float PCM samples for a specific participant.
* Called from the voice capture thread or audio thread.
*
* @param UserID Participant's unique ID.
* @param Samples Interleaved float PCM data.
* @param NumSamples Total number of float values (frames × channels).
* @param bIsMuted If true, samples go to the individual track but
* NOT the mixed track (muted participants are still
* recorded individually for post-analysis).
*/
void FeedParticipantAudio(const FString& UserID, const float* Samples,
int32 NumSamples, bool bIsMuted);
/**
* Push game-world audio (submix) into the mixed buffer.
* This lets the final audio_only.wav contain gameplay sound too.
*/
void FeedGameAudio(const float* Samples, int32 NumSamples);
// ── Saving ──────────────────────────────────────────────────────
/** Save the combined (mixed) buffer as a WAV. Returns true on success. */
bool SaveMixedWav(const FString& FilePath) const;
/**
* Save each participant's individual track as a separate WAV.
* Files are named: <OutputDir>/<UserID>_audio.wav
* Returns the number of tracks saved.
*/
int32 SaveIndividualWavs(const FString& OutputDir) const;
private:
bool bActive = false;
// ── Buffers ─────────────────────────────────────────────────────
/** Combined mix of all non-muted voices + game audio. */
TArray<float> MixedBuffer;
FCriticalSection MixedLock;
/** Per-participant buffers: UserID → float samples. */
TMap<FString, TArray<float>> ParticipantBuffers;
FCriticalSection ParticipantLock;
// ── WAV writing helper ──────────────────────────────────────────
static bool WriteWavFile(const FString& Path, const TArray<float>& Samples,
int32 InSampleRate, int32 InNumChannels);
};

View File

@@ -0,0 +1,265 @@
// AudioRecorder.cpp
// ─────────────────────────────────────────────────────────────────────
// Audio capture via Unreal's submix listener + WAV export.
// ─────────────────────────────────────────────────────────────────────
#include "AudioRecorder.h"
#include "RecorderModule.h" // LogGameplayRecorder
#include "AudioDevice.h"
#include "AudioMixerDevice.h"
#include "Engine/Engine.h"
#include "Misc/FileHelper.h"
// =====================================================================
// FSubmixBridge — wraps callback in TSharedRef for UE 5.6 API
// =====================================================================
class FAudioRecorderSubmixBridge
: public ISubmixBufferListener
{
public:
FAudioRecorder* Owner;
FAudioRecorderSubmixBridge(FAudioRecorder* InOwner) : Owner(InOwner) {}
virtual void OnNewSubmixBuffer(
const USoundSubmix* OwningSubmix,
float* AudioData,
int32 NumSamples,
int32 NumChannels,
const int32 SampleRate,
double AudioClock) override
{
if (Owner)
{
Owner->OnNewSubmixBuffer(OwningSubmix, AudioData, NumSamples, NumChannels, SampleRate, AudioClock);
}
}
};
// =====================================================================
// Construction / Destruction
// =====================================================================
FAudioRecorder::FAudioRecorder() = default;
FAudioRecorder::~FAudioRecorder()
{
Stop();
}
// =====================================================================
// Start — register as a submix listener
// =====================================================================
void FAudioRecorder::Start(USoundSubmix* OptionalSubmix)
{
if (bActive)
{
UE_LOG(LogGameplayRecorder, Warning, TEXT("AudioRecorder: Already active."));
return;
}
Reset();
bActive = true;
// Grab the main audio device
if (!GEngine || !GEngine->GetMainAudioDevice())
{
UE_LOG(LogGameplayRecorder, Error, TEXT("AudioRecorder: No audio device available."));
bActive = false;
return;
}
FAudioDevice* AudioDevice = GEngine->GetMainAudioDevice().GetAudioDevice();
if (!AudioDevice)
{
UE_LOG(LogGameplayRecorder, Error, TEXT("AudioRecorder: GetAudioDevice() returned null."));
bActive = false;
return;
}
// Resolve the submix to listen on.
// If the caller didn't pass one, use the master submix (captures everything).
USoundSubmix* Submix = OptionalSubmix;
if (!Submix)
{
Submix = &AudioDevice->GetMainSubmixObject();
}
if (!Submix)
{
UE_LOG(LogGameplayRecorder, Error, TEXT("AudioRecorder: Could not find a submix."));
bActive = false;
return;
}
RegisteredSubmix = Submix;
// Create a bridge and register it with the audio device.
SubmixBridge = MakeShared<FAudioRecorderSubmixBridge, ESPMode::ThreadSafe>(this);
AudioDevice->RegisterSubmixBufferListener(SubmixBridge.ToSharedRef(), *Submix);
UE_LOG(LogGameplayRecorder, Log, TEXT("AudioRecorder: Listening on submix '%s'."),
*Submix->GetName());
}
// =====================================================================
// Stop — unregister from the audio device
// =====================================================================
void FAudioRecorder::Stop()
{
if (!bActive)
{
return;
}
bActive = false;
if (GEngine && GEngine->GetMainAudioDevice())
{
FAudioDevice* AudioDevice = GEngine->GetMainAudioDevice().GetAudioDevice();
if (AudioDevice && RegisteredSubmix.IsValid() && SubmixBridge.IsValid())
{
AudioDevice->UnregisterSubmixBufferListener(SubmixBridge.ToSharedRef(), *RegisteredSubmix.Get());
SubmixBridge.Reset();
UE_LOG(LogGameplayRecorder, Log, TEXT("AudioRecorder: Unregistered from submix."));
}
}
RegisteredSubmix = nullptr;
}
// =====================================================================
// ISubmixBufferListener callback — runs on the AUDIO RENDER thread
// =====================================================================
void FAudioRecorder::OnNewSubmixBuffer(
const USoundSubmix* OwningSubmix,
float* AudioData,
int32 InNumSamples,
int32 InNumChannels,
const int32 InSampleRate,
double AudioClock)
{
if (!bActive)
{
return;
}
FScopeLock Lock(&BufferLock);
// Capture format info from the first callback
SampleRate = InSampleRate;
NumChannels = InNumChannels;
// Append raw float samples
Buffer.Append(AudioData, InNumSamples);
}
// =====================================================================
// SaveWav — write accumulated float audio as a 16-bit PCM WAV file
// =====================================================================
bool FAudioRecorder::SaveWav(const FString& FilePath)
{
FScopeLock Lock(&BufferLock);
if (Buffer.Num() == 0)
{
UE_LOG(LogGameplayRecorder, Warning, TEXT("AudioRecorder: No audio data to save."));
return false;
}
UE_LOG(LogGameplayRecorder, Log,
TEXT("AudioRecorder: Saving %d samples (%d ch, %d Hz) → %s"),
Buffer.Num(), NumChannels, SampleRate, *FilePath);
// ── Convert float [-1, 1] to int16 ─────────────────────────────
TArray<int16> PCM16;
PCM16.SetNumUninitialized(Buffer.Num());
for (int32 i = 0; i < Buffer.Num(); ++i)
{
float Clamped = FMath::Clamp(Buffer[i], -1.0f, 1.0f);
PCM16[i] = static_cast<int16>(Clamped * 32767.0f);
}
// ── Build WAV file in memory ────────────────────────────────────
//
// WAV format (44-byte header + raw PCM data):
//
// Offset Size Field
// ------ ---- -----
// 0 4 "RIFF"
// 4 4 file size - 8
// 8 4 "WAVE"
// 12 4 "fmt "
// 16 4 16 (sub-chunk size for PCM)
// 20 2 1 (audio format = PCM)
// 22 2 number of channels
// 24 4 sample rate
// 28 4 byte rate (SampleRate * NumChannels * BytesPerSample)
// 32 2 block align (NumChannels * BytesPerSample)
// 34 2 bits per sample (16)
// 36 4 "data"
// 40 4 data size in bytes
// 44 … raw PCM samples
//
const int32 BitsPerSample = 16;
const int32 BytesPerSample = BitsPerSample / 8;
const int32 DataSize = PCM16.Num() * BytesPerSample;
const int32 ByteRate = SampleRate * NumChannels * BytesPerSample;
const int16 BlockAlign = static_cast<int16>(NumChannels * BytesPerSample);
TArray<uint8> Wav;
Wav.Reserve(44 + DataSize);
auto Write4CC = [&](const char* Tag) { Wav.Append(reinterpret_cast<const uint8*>(Tag), 4); };
auto WriteInt32 = [&](int32 V) { Wav.Append(reinterpret_cast<const uint8*>(&V), 4); };
auto WriteInt16 = [&](int16 V) { Wav.Append(reinterpret_cast<const uint8*>(&V), 2); };
Write4CC("RIFF");
WriteInt32(36 + DataSize);
Write4CC("WAVE");
Write4CC("fmt ");
WriteInt32(16);
WriteInt16(1); // PCM format
WriteInt16(static_cast<int16>(NumChannels));
WriteInt32(SampleRate);
WriteInt32(ByteRate);
WriteInt16(BlockAlign);
WriteInt16(static_cast<int16>(BitsPerSample));
Write4CC("data");
WriteInt32(DataSize);
Wav.Append(reinterpret_cast<const uint8*>(PCM16.GetData()), DataSize);
// ── Write to disk ───────────────────────────────────────────────
if (FFileHelper::SaveArrayToFile(Wav, *FilePath))
{
UE_LOG(LogGameplayRecorder, Log,
TEXT("AudioRecorder: WAV saved (%d bytes, %.1f sec)."),
Wav.Num(),
static_cast<float>(Buffer.Num()) / (SampleRate * NumChannels));
return true;
}
UE_LOG(LogGameplayRecorder, Error, TEXT("AudioRecorder: Failed to write %s"), *FilePath);
return false;
}
// =====================================================================
// Reset / GetSampleCount
// =====================================================================
void FAudioRecorder::Reset()
{
FScopeLock Lock(&BufferLock);
Buffer.Empty();
}
int32 FAudioRecorder::GetSampleCount() const
{
return Buffer.Num();
}

View File

@@ -0,0 +1,70 @@
// AudioRecorder.h
// ─────────────────────────────────────────────────────────────────────
// Listens to an Unreal Audio Mixer submix and accumulates raw PCM
// samples in memory. When recording stops, writes a standard WAV file.
//
// JS analogy: imagine an AudioWorkletProcessor that pushes every
// incoming audio buffer into a big array, then at the end converts
// the whole thing to a .wav Blob and downloads it.
// ─────────────────────────────────────────────────────────────────────
#pragma once
#include "CoreMinimal.h"
#include "ISubmixBufferListener.h"
#include "Sound/SoundSubmix.h"
// ─────────────────────────────────────────────────────────────────────
// FAudioRecorder (plain C++ — no UObject overhead)
//
// Uses a submix bridge to listen on the audio engine.
// ─────────────────────────────────────────────────────────────────────
class GAMEPLAYRECORDER_API FAudioRecorder
{
public:
FAudioRecorder();
virtual ~FAudioRecorder();
// ── Public API ──────────────────────────────────────────────────
/** Register with the audio device and start accumulating samples. */
void Start(USoundSubmix* OptionalSubmix = nullptr);
/** Unregister from the audio device. */
void Stop();
/** Write accumulated samples to a 16-bit PCM WAV file. Returns true on success. */
bool SaveWav(const FString& FilePath);
/** Discard all accumulated audio data. */
void Reset();
/** Number of float samples captured so far. */
int32 GetSampleCount() const;
// ── Audio callback (forwarded from bridge) ─────────────────────
void OnNewSubmixBuffer(
const USoundSubmix* OwningSubmix,
float* AudioData,
int32 NumSamples,
int32 NumChannels,
const int32 SampleRate,
double AudioClock);
private:
bool bActive = false;
// Format (updated from the first callback)
int32 SampleRate = 48000;
int32 NumChannels = 2;
// Accumulated interleaved float samples
TArray<float> Buffer;
FCriticalSection BufferLock;
// Keep track of the submix we registered with so we can unregister later.
TWeakObjectPtr<USoundSubmix> RegisteredSubmix;
// Submix bridge (UE 5.6 requires TSharedRef<ISubmixBufferListener>)
TSharedPtr<ISubmixBufferListener, ESPMode::ThreadSafe> SubmixBridge;
};

View File

@@ -0,0 +1,266 @@
// FFmpegPipe.cpp
// ─────────────────────────────────────────────────────────────────────
// Implementation of the FFmpeg video-encoding pipe and mux helper.
// ─────────────────────────────────────────────────────────────────────
#include "FFmpegPipe.h"
#include "RecorderModule.h" // LogGameplayRecorder
#include "GenericPlatform/GenericPlatformMisc.h"
#include "HAL/PlatformProcess.h"
#include "RHI.h" // GRHIAdapterName
#include <cstdio> // _popen / _pclose (Windows)
// =====================================================================
// Construction / Destruction
// =====================================================================
FFFmpegPipe::FFFmpegPipe() = default;
FFFmpegPipe::~FFFmpegPipe()
{
// Safety net — close the pipe if the caller forgot.
Close();
}
// =====================================================================
// GPU Detection
// =====================================================================
EHardwareEncoder FFFmpegPipe::DetectEncoder()
{
// GRHIAdapterName is a global FString set by Unreal at RHI init.
// It contains the GPU product name, e.g.:
// "NVIDIA GeForce RTX 4090"
// "AMD Radeon RX 7900 XTX"
// "Intel(R) Arc(TM) A770"
FString Adapter = GRHIAdapterName.ToUpper();
UE_LOG(LogGameplayRecorder, Log, TEXT("GPU adapter: %s"), *GRHIAdapterName);
if (Adapter.Contains(TEXT("NVIDIA")))
{
UE_LOG(LogGameplayRecorder, Log, TEXT(" → Selected encoder: NVENC (h264_nvenc)"));
return EHardwareEncoder::NVENC;
}
if (Adapter.Contains(TEXT("AMD")) || Adapter.Contains(TEXT("RADEON")))
{
UE_LOG(LogGameplayRecorder, Log, TEXT(" → Selected encoder: AMF (h264_amf)"));
return EHardwareEncoder::AMF;
}
if (Adapter.Contains(TEXT("INTEL")))
{
UE_LOG(LogGameplayRecorder, Log, TEXT(" → Selected encoder: QSV (h264_qsv)"));
return EHardwareEncoder::QSV;
}
UE_LOG(LogGameplayRecorder, Warning,
TEXT(" → No known HW encoder for this GPU. Falling back to libx264 (CPU)."));
return EHardwareEncoder::Software;
}
FString FFFmpegPipe::EncoderToString(EHardwareEncoder Enc)
{
switch (Enc)
{
case EHardwareEncoder::NVENC: return TEXT("NVENC");
case EHardwareEncoder::AMF: return TEXT("AMF");
case EHardwareEncoder::QSV: return TEXT("QSV");
case EHardwareEncoder::Software: return TEXT("Software (libx264)");
}
return TEXT("Unknown");
}
FString FFFmpegPipe::EncoderToCodecName(EHardwareEncoder Enc)
{
switch (Enc)
{
case EHardwareEncoder::NVENC: return TEXT("h264_nvenc");
case EHardwareEncoder::AMF: return TEXT("h264_amf");
case EHardwareEncoder::QSV: return TEXT("h264_qsv");
case EHardwareEncoder::Software: return TEXT("libx264");
}
return TEXT("libx264");
}
// =====================================================================
// Build the full FFmpeg command string
// =====================================================================
FString FFFmpegPipe::BuildCommand() const
{
// Resolve the executable path
FString Exe = FFmpegExe.IsEmpty() ? TEXT("ffmpeg") : FFmpegExe;
FString Codec = EncoderToCodecName(ChosenEncoder);
// ── Encoder-specific flags ──────────────────────────────────────
// Each hardware encoder family uses slightly different option names.
FString EncoderFlags;
switch (ChosenEncoder)
{
case EHardwareEncoder::NVENC:
// NVENC presets: p1 (fastest) … p7 (best quality)
EncoderFlags = FString::Printf(TEXT("-c:v %s -preset p5 -b:v %s"), *Codec, *Bitrate);
break;
case EHardwareEncoder::AMF:
// AMF uses -quality (speed / balanced / quality)
EncoderFlags = FString::Printf(TEXT("-c:v %s -quality balanced -b:v %s"), *Codec, *Bitrate);
break;
case EHardwareEncoder::QSV:
// QSV uses standard -preset
EncoderFlags = FString::Printf(TEXT("-c:v %s -preset medium -b:v %s"), *Codec, *Bitrate);
break;
case EHardwareEncoder::Software:
default:
// libx264 software fallback
EncoderFlags = FString::Printf(TEXT("-c:v %s -preset medium -b:v %s"), *Codec, *Bitrate);
break;
}
// Full command:
// ffmpeg -y
// -f rawvideo -pix_fmt bgra -video_size WxH -framerate FPS
// -i - ← reads raw frames from stdin
// <encoder flags>
// output.mp4
FString Cmd = FString::Printf(
TEXT("\"%s\" -y -f rawvideo -pix_fmt bgra -video_size %dx%d -framerate %d -i - %s \"%s\""),
*Exe,
Width, Height, Framerate,
*EncoderFlags,
*VideoOutPath
);
return Cmd;
}
// =====================================================================
// Open — detect GPU, build command, launch FFmpeg child process
// =====================================================================
bool FFFmpegPipe::Open()
{
if (Pipe)
{
UE_LOG(LogGameplayRecorder, Warning, TEXT("FFmpegPipe: Already open."));
return true;
}
ChosenEncoder = DetectEncoder();
FString Cmd = BuildCommand();
UE_LOG(LogGameplayRecorder, Log, TEXT("FFmpegPipe: Opening pipe..."));
UE_LOG(LogGameplayRecorder, Log, TEXT(" Command: %s"), *Cmd);
// _popen on Windows creates a child process and returns a FILE*
// whose write side is connected to the child's stdin.
// "wb" = write, binary mode (no newline translation).
#if PLATFORM_WINDOWS
Pipe = _popen(TCHAR_TO_ANSI(*Cmd), "wb");
#else
Pipe = popen(TCHAR_TO_ANSI(*Cmd), "w");
#endif
if (!Pipe)
{
UE_LOG(LogGameplayRecorder, Error,
TEXT("FFmpegPipe: _popen failed! Is ffmpeg.exe on your system PATH?"));
return false;
}
UE_LOG(LogGameplayRecorder, Log, TEXT("FFmpegPipe: Pipe opened successfully."));
return true;
}
// =====================================================================
// WriteFrame — push one BGRA frame into FFmpeg's stdin
// =====================================================================
bool FFFmpegPipe::WriteFrame(const void* Data, int32 SizeBytes)
{
if (!Pipe || !Data || SizeBytes <= 0)
{
return false;
}
// Lock so multiple threads don't interleave partial frames.
FScopeLock Lock(&WriteLock);
size_t Written = fwrite(Data, 1, SizeBytes, Pipe);
return (Written == static_cast<size_t>(SizeBytes));
}
// =====================================================================
// Close — flush the pipe, wait for FFmpeg to finish encoding
// =====================================================================
void FFFmpegPipe::Close()
{
if (!Pipe)
{
return;
}
UE_LOG(LogGameplayRecorder, Log, TEXT("FFmpegPipe: Closing pipe (waiting for FFmpeg)..."));
// _pclose waits for FFmpeg to finish writing the file and returns
// the child process exit code.
#if PLATFORM_WINDOWS
int32 ExitCode = _pclose(Pipe);
#else
int32 ExitCode = pclose(Pipe);
#endif
Pipe = nullptr;
if (ExitCode == 0)
{
UE_LOG(LogGameplayRecorder, Log, TEXT("FFmpegPipe: video_only.mp4 written successfully."));
}
else
{
UE_LOG(LogGameplayRecorder, Warning,
TEXT("FFmpegPipe: FFmpeg exited with code %d. Video file may be incomplete."), ExitCode);
}
}
// =====================================================================
// Mux — combine video + audio into a single MP4
// =====================================================================
bool FFFmpegPipe::Mux(const FString& VideoPath,
const FString& AudioPath,
const FString& OutputPath) const
{
FString Exe = FFmpegExe.IsEmpty() ? TEXT("ffmpeg") : FFmpegExe;
// Arguments: -y (overwrite) -i video -i audio -c:v copy -c:a aac output
FString Args = FString::Printf(
TEXT("-y -i \"%s\" -i \"%s\" -c:v copy -c:a aac \"%s\""),
*VideoPath, *AudioPath, *OutputPath
);
UE_LOG(LogGameplayRecorder, Log, TEXT("FFmpegPipe: Muxing..."));
UE_LOG(LogGameplayRecorder, Log, TEXT(" %s %s"), *Exe, *Args);
int32 ReturnCode = -1;
FString StdOut, StdErr;
// FPlatformProcess::ExecProcess runs a child process synchronously
// and captures its stdout/stderr. Blocks until done.
FPlatformProcess::ExecProcess(*Exe, *Args, &ReturnCode, &StdOut, &StdErr);
if (ReturnCode == 0)
{
UE_LOG(LogGameplayRecorder, Log, TEXT("FFmpegPipe: Mux complete → %s"), *OutputPath);
return true;
}
UE_LOG(LogGameplayRecorder, Error,
TEXT("FFmpegPipe: Mux failed (exit %d). stderr:\n%s"), ReturnCode, *StdErr);
return false;
}

View File

@@ -0,0 +1,85 @@
// FFmpegPipe.h
// ─────────────────────────────────────────────────────────────────────
// Owns the FFmpeg child-process pipe for video encoding and the
// post-recording mux step.
//
// Responsibilities:
// 1. Detect the GPU vendor → pick the best hardware encoder
// 2. Open an FFmpeg process whose stdin accepts raw BGRA frames
// 3. Accept WriteFrame() calls from the render thread
// 4. Close the pipe (finishes encoding video_only.mp4)
// 5. Run a second FFmpeg pass to mux video + audio → final .mp4
//
// Think of this like a Node.js child_process.spawn('ffmpeg', ...)
// where you pipe raw bytes into stdin.
// ─────────────────────────────────────────────────────────────────────
#pragma once
#include "CoreMinimal.h"
// ── Encoder enum ─────────────────────────────────────────────────────
// Each value maps to a specific FFmpeg codec name.
enum class EHardwareEncoder : uint8
{
NVENC, // NVIDIA → h264_nvenc
AMF, // AMD → h264_amf
QSV, // Intel → h264_qsv
Software // Fallback → libx264
};
// ─────────────────────────────────────────────────────────────────────
// FFFmpegPipe (plain C++ — no UObject / no garbage collection)
// ─────────────────────────────────────────────────────────────────────
class GAMEPLAYRECORDER_API FFFmpegPipe
{
public:
FFFmpegPipe();
~FFFmpegPipe();
// ── Configuration (set BEFORE calling Open) ─────────────────────
int32 Width = 1920;
int32 Height = 1080;
int32 Framerate = 60;
FString Bitrate = TEXT("8M");
FString FFmpegExe; // empty → "ffmpeg" on PATH
FString VideoOutPath; // e.g. ".../Saved/Recordings/video_only.mp4"
// ── Lifecycle ───────────────────────────────────────────────────
/** Detect GPU, build FFmpeg command, open the pipe. Returns true on success. */
bool Open();
/** Write one raw BGRA frame (Width*Height*4 bytes). Thread-safe. */
bool WriteFrame(const void* Data, int32 SizeBytes);
/** Flush and close the pipe. Blocks until FFmpeg finishes writing the file. */
void Close();
/** Returns true while the pipe is open. */
bool IsOpen() const { return (Pipe != nullptr); }
// ── Mux helper ──────────────────────────────────────────────────
/** Runs: ffmpeg -y -i video -i audio -c:v copy -c:a aac output.mp4
* Call AFTER Close(). Blocks until done.
* Returns true if FFmpeg exits with code 0. */
bool Mux(const FString& VideoPath,
const FString& AudioPath,
const FString& OutputPath) const;
// ── GPU detection (public so you can query it) ──────────────────
/** Reads GRHIAdapterName and returns the best encoder. */
static EHardwareEncoder DetectEncoder();
/** Human-readable name of the encoder enum. */
static FString EncoderToString(EHardwareEncoder Enc);
/** FFmpeg codec string: "h264_nvenc", "h264_amf", etc. */
static FString EncoderToCodecName(EHardwareEncoder Enc);
private:
FString BuildCommand() const;
FILE* Pipe = nullptr;
EHardwareEncoder ChosenEncoder = EHardwareEncoder::Software;
FCriticalSection WriteLock;
};

View File

@@ -0,0 +1,29 @@
// GameplayRecorder.Build.cs
// Module build rules for the GameplayRecorder plugin.
using UnrealBuildTool;
public class GameplayRecorder : ModuleRules
{
public GameplayRecorder(ReadOnlyTargetRules Target) : base(Target)
{
PCHUsage = PCHUsageMode.UseExplicitOrSharedPCHs;
PublicDependencyModuleNames.AddRange(new string[]
{
"Core",
"CoreUObject",
"Engine",
"AudioMixer", // Submix buffer listener (audio capture)
"RHI", // FRHICommandListImmediate, ReadSurfaceData
"RenderCore", // Render-thread utilities
"Slate", // FSlateApplication (back-buffer hook)
"SlateCore" // Slate renderer types
});
PrivateDependencyModuleNames.AddRange(new string[]
{
"RHICore" // Additional RHI helpers
});
}
}

View File

@@ -0,0 +1,113 @@
// ParticipantManager.cpp
// ─────────────────────────────────────────────────────────────────────
// Roster management for voice-session participants.
// ─────────────────────────────────────────────────────────────────────
#include "ParticipantManager.h"
#include "RecorderModule.h"
// =====================================================================
// Add / Remove
// =====================================================================
bool UParticipantManager::AddParticipant(
const FString& UserID,
const FString& DisplayName,
EVoiceRole Role)
{
if (Contains(UserID))
{
UE_LOG(LogGameplayRecorder, Warning,
TEXT("ParticipantManager: '%s' already exists — skipping add."), *UserID);
return false;
}
Participants.Emplace(UserID, DisplayName, Role);
UE_LOG(LogGameplayRecorder, Log,
TEXT("ParticipantManager: Added '%s' (%s) as %s."),
*DisplayName, *UserID,
Role == EVoiceRole::Pilot ? TEXT("Pilot") : TEXT("Instructor"));
return true;
}
bool UParticipantManager::RemoveParticipant(const FString& UserID)
{
const int32 Idx = Participants.IndexOfByPredicate(
[&](const FVoiceParticipant& P) { return P.UserID == UserID; });
if (Idx == INDEX_NONE)
{
UE_LOG(LogGameplayRecorder, Warning,
TEXT("ParticipantManager: '%s' not found — cannot remove."), *UserID);
return false;
}
UE_LOG(LogGameplayRecorder, Log,
TEXT("ParticipantManager: Removed '%s'."), *Participants[Idx].DisplayName);
Participants.RemoveAt(Idx);
return true;
}
void UParticipantManager::RemoveAll()
{
UE_LOG(LogGameplayRecorder, Log,
TEXT("ParticipantManager: Clearing all %d participants."), Participants.Num());
Participants.Empty();
}
// =====================================================================
// Lookups
// =====================================================================
FVoiceParticipant* UParticipantManager::FindParticipant(const FString& UserID)
{
return Participants.FindByPredicate(
[&](const FVoiceParticipant& P) { return P.UserID == UserID; });
}
const FVoiceParticipant* UParticipantManager::FindParticipant(const FString& UserID) const
{
return Participants.FindByPredicate(
[&](const FVoiceParticipant& P) { return P.UserID == UserID; });
}
bool UParticipantManager::GetParticipant(const FString& UserID, FVoiceParticipant& OutParticipant) const
{
const FVoiceParticipant* Found = FindParticipant(UserID);
if (Found)
{
OutParticipant = *Found;
return true;
}
return false;
}
bool UParticipantManager::Contains(const FString& UserID) const
{
return FindParticipant(UserID) != nullptr;
}
// =====================================================================
// Filtered queries
// =====================================================================
TArray<FVoiceParticipant> UParticipantManager::GetParticipantsByRole(EVoiceRole Role) const
{
TArray<FVoiceParticipant> Result;
for (const FVoiceParticipant& P : Participants)
{
if (P.Role == Role)
{
Result.Add(P);
}
}
return Result;
}
TArray<FVoiceParticipant> UParticipantManager::GetAllParticipants() const
{
return Participants;
}

View File

@@ -0,0 +1,77 @@
// ParticipantManager.h
// ─────────────────────────────────────────────────────────────────────
// Manages the roster of voice-session participants.
//
// Responsibilities:
// • Add / remove participants
// • Look up by UserID
// • List all participants, filter by role
//
// JS analogy: this is like a Map<string, Participant> with helper
// methods — new Map(), map.set(id, p), map.get(id), map.delete(id).
// ─────────────────────────────────────────────────────────────────────
#pragma once
#include "CoreMinimal.h"
#include "UObject/Object.h"
#include "VoiceTypes.h"
#include "ParticipantManager.generated.h"
UCLASS(BlueprintType)
class GAMEPLAYRECORDER_API UParticipantManager : public UObject
{
GENERATED_BODY()
public:
// ── Roster management ───────────────────────────────────────────
/** Add a participant. Returns false if UserID already exists. */
UFUNCTION(BlueprintCallable, Category = "Voice|Participants")
bool AddParticipant(const FString& UserID, const FString& DisplayName, EVoiceRole Role);
/** Remove a participant by UserID. Returns false if not found. */
UFUNCTION(BlueprintCallable, Category = "Voice|Participants")
bool RemoveParticipant(const FString& UserID);
/** Remove everyone. */
UFUNCTION(BlueprintCallable, Category = "Voice|Participants")
void RemoveAll();
// ── Lookups ─────────────────────────────────────────────────────
/** Find a participant by UserID. Returns nullptr if not found (C++ only). */
FVoiceParticipant* FindParticipant(const FString& UserID);
const FVoiceParticipant* FindParticipant(const FString& UserID) const;
/** Blueprint-friendly version: returns true if found and fills OutParticipant. */
UFUNCTION(BlueprintCallable, Category = "Voice|Participants")
bool GetParticipant(const FString& UserID, FVoiceParticipant& OutParticipant) const;
/** True if UserID is in the roster. */
UFUNCTION(BlueprintCallable, BlueprintPure, Category = "Voice|Participants")
bool Contains(const FString& UserID) const;
/** Total participant count. */
UFUNCTION(BlueprintCallable, BlueprintPure, Category = "Voice|Participants")
int32 GetCount() const { return Participants.Num(); }
// ── Filtered queries ────────────────────────────────────────────
/** Get all participants with a given role. */
UFUNCTION(BlueprintCallable, Category = "Voice|Participants")
TArray<FVoiceParticipant> GetParticipantsByRole(EVoiceRole Role) const;
/** Get a flat copy of every participant. */
UFUNCTION(BlueprintCallable, Category = "Voice|Participants")
TArray<FVoiceParticipant> GetAllParticipants() const;
// ── Direct access (C++ only, for audio recording) ───────────────
TArray<FVoiceParticipant>& GetParticipantsRef() { return Participants; }
private:
/** Internal storage — a simple TArray. For < 20 participants a
* linear search is faster than a TMap due to cache locality. */
UPROPERTY()
TArray<FVoiceParticipant> Participants;
};

View File

@@ -0,0 +1,377 @@
// RecorderManager.cpp
// ─────────────────────────────────────────────────────────────────────
// Orchestrates the full recording pipeline WITH voice:
// 1. Open FFmpeg pipe (video)
// 2. Hook Unreal back-buffer (frames → pipe)
// 3. Start voice session + audio capture
// 4. On stop: close pipe, save WAVs (mixed + individual), mux final MP4
// ─────────────────────────────────────────────────────────────────────
#include "RecorderManager.h"
#include "RecorderModule.h" // LogGameplayRecorder
#include "FFmpegPipe.h"
#include "AudioRecorder.h"
#include "VoiceSessionManager.h"
#include "AudioMixerRecorder.h"
#include "Framework/Application/SlateApplication.h"
#include "HAL/PlatformFileManager.h"
#include "Misc/Paths.h"
#include "RHI.h"
#include "RHICommandList.h"
#include "RHIResources.h"
#include "RenderingThread.h"
// =====================================================================
// Constructor
// =====================================================================
URecorderManager::URecorderManager()
{
// VoiceSession is created lazily on first use
}
// =====================================================================
// BeginDestroy
// =====================================================================
void URecorderManager::BeginDestroy()
{
if (bRecording)
{
StopRecording();
}
delete VideoPipe;
VideoPipe = nullptr;
delete AudioCapture;
AudioCapture = nullptr;
// VoiceSession is a UObject sub-object — GC handles it,
// but we should shut it down cleanly.
if (VoiceSession)
{
VoiceSession->ShutdownSession();
}
Super::BeginDestroy();
}
// =====================================================================
// Resolve output directory & file paths
// =====================================================================
void URecorderManager::ResolveOutputPaths()
{
FString Dir = OutputDirectory;
if (Dir.IsEmpty())
{
Dir = FPaths::Combine(FPaths::ProjectSavedDir(), TEXT("Recordings"));
}
if (FPaths::IsRelative(Dir))
{
Dir = FPaths::ConvertRelativePathToFull(Dir);
}
IPlatformFile& PF = FPlatformFileManager::Get().GetPlatformFile();
if (!PF.DirectoryExists(*Dir))
{
PF.CreateDirectoryTree(*Dir);
}
OutputDir = Dir;
VideoPath = FPaths::Combine(Dir, TEXT("video_only.mp4"));
AudioPath = FPaths::Combine(Dir, TEXT("audio_only.wav"));
FinalPath = FPaths::Combine(Dir, TEXT("final_recording.mp4"));
}
// =====================================================================
// Voice / Participant wrappers
// =====================================================================
bool URecorderManager::AddParticipant(
const FString& UserID, const FString& DisplayName, EVoiceRole Role)
{
// Lazily create the voice session manager
if (!VoiceSession)
{
VoiceSession = NewObject<UVoiceSessionManager>(this);
}
// Auto-initialize the session if the user forgot
VoiceSession->InitializeSession();
return VoiceSession->AddParticipant(UserID, DisplayName, Role);
}
bool URecorderManager::RemoveParticipant(const FString& UserID)
{
if (!VoiceSession) return false;
return VoiceSession->RemoveParticipant(UserID);
}
TArray<FVoiceParticipant> URecorderManager::GetAllParticipants() const
{
if (!VoiceSession) return {};
return VoiceSession->GetAllParticipants();
}
EVoicePermissionResult URecorderManager::MuteParticipant(
const FString& RequestorUserID, const FString& TargetUserID)
{
if (!VoiceSession) return EVoicePermissionResult::Denied;
return VoiceSession->MuteParticipant(RequestorUserID, TargetUserID);
}
EVoicePermissionResult URecorderManager::UnmuteParticipant(
const FString& RequestorUserID, const FString& TargetUserID)
{
if (!VoiceSession) return EVoicePermissionResult::Denied;
return VoiceSession->UnmuteParticipant(RequestorUserID, TargetUserID);
}
EVoicePermissionResult URecorderManager::ToggleMuteParticipant(
const FString& RequestorUserID, const FString& TargetUserID)
{
if (!VoiceSession) return EVoicePermissionResult::Denied;
return VoiceSession->ToggleMuteParticipant(RequestorUserID, TargetUserID);
}
// =====================================================================
// StartRecording
// =====================================================================
void URecorderManager::StartRecording()
{
if (bRecording)
{
UE_LOG(LogGameplayRecorder, Warning, TEXT("RecorderManager: Already recording."));
return;
}
ResolveOutputPaths();
UE_LOG(LogGameplayRecorder, Log, TEXT("═══════ TRAINING RECORDING START ═══════"));
UE_LOG(LogGameplayRecorder, Log, TEXT(" Resolution : %d x %d @ %d fps"), Width, Height, Framerate);
UE_LOG(LogGameplayRecorder, Log, TEXT(" Bitrate : %s"), *Bitrate);
UE_LOG(LogGameplayRecorder, Log, TEXT(" Video : %s"), *VideoPath);
UE_LOG(LogGameplayRecorder, Log, TEXT(" Audio : %s"), *AudioPath);
UE_LOG(LogGameplayRecorder, Log, TEXT(" Final : %s"), *FinalPath);
UE_LOG(LogGameplayRecorder, Log, TEXT(" Individual : %s"),
bSaveIndividualVoiceTracks ? TEXT("YES") : TEXT("NO"));
// ─── 1. Video: FFmpeg pipe ─────────────────────────────────────
delete VideoPipe;
VideoPipe = new FFFmpegPipe();
VideoPipe->Width = Width;
VideoPipe->Height = Height;
VideoPipe->Framerate = Framerate;
VideoPipe->Bitrate = Bitrate;
VideoPipe->FFmpegExe = FFmpegPath;
VideoPipe->VideoOutPath = VideoPath;
if (!VideoPipe->Open())
{
UE_LOG(LogGameplayRecorder, Error,
TEXT("RecorderManager: FFmpeg pipe failed — aborting."));
delete VideoPipe;
VideoPipe = nullptr;
return;
}
// ─── 2. Back-buffer hook ───────────────────────────────────────
if (FSlateApplication::IsInitialized())
{
BackBufferHandle =
FSlateApplication::Get().GetRenderer()->OnBackBufferReadyToPresent()
.AddUObject(this, &URecorderManager::OnBackBufferReady);
}
// ─── 3. Voice session + audio capture ──────────────────────────
if (VoiceSession)
{
VoiceSession->InitializeSession(); // safe to call twice
VoiceSession->StartVoiceCapture(AudioSubmix);
// Log participants
TArray<FVoiceParticipant> All = VoiceSession->GetAllParticipants();
UE_LOG(LogGameplayRecorder, Log, TEXT(" Participants: %d"), All.Num());
for (const FVoiceParticipant& P : All)
{
UE_LOG(LogGameplayRecorder, Log, TEXT(" [%s] %s (%s)%s"),
*P.UserID, *P.DisplayName,
P.Role == EVoiceRole::Pilot ? TEXT("Pilot") : TEXT("Instructor"),
P.bMuted ? TEXT(" MUTED") : TEXT(""));
}
}
// ─── 4. Also start the legacy game-audio recorder as a fallback ─
// (captures game-world sounds even if VoiceSession has no participants)
delete AudioCapture;
AudioCapture = new FAudioRecorder();
AudioCapture->Start(AudioSubmix);
bRecording = true;
UE_LOG(LogGameplayRecorder, Log, TEXT("RecorderManager: Recording is LIVE."));
}
// =====================================================================
// StopRecording
// =====================================================================
void URecorderManager::StopRecording()
{
if (!bRecording)
{
UE_LOG(LogGameplayRecorder, Warning, TEXT("RecorderManager: Not recording."));
return;
}
bRecording = false;
UE_LOG(LogGameplayRecorder, Log, TEXT("═══════ TRAINING RECORDING STOP ═══════"));
// ─── 1. Unhook back-buffer ────────────────────────────────────
if (FSlateApplication::IsInitialized() && BackBufferHandle.IsValid())
{
FSlateApplication::Get().GetRenderer()->OnBackBufferReadyToPresent()
.Remove(BackBufferHandle);
BackBufferHandle.Reset();
UE_LOG(LogGameplayRecorder, Log, TEXT(" Back-buffer hook removed."));
}
// ─── 2. Close video pipe ──────────────────────────────────────
if (VideoPipe)
{
VideoPipe->Close();
UE_LOG(LogGameplayRecorder, Log, TEXT(" Video pipe closed."));
}
// ─── 3. Stop voice capture + save voice audio ─────────────────
bool bVoiceAudioSaved = false;
if (VoiceSession)
{
VoiceSession->StopVoiceCapture();
FAudioMixerRecorder* MixRec = VoiceSession->GetAudioMixerRecorder();
if (MixRec)
{
// Save the combined (mixed) voice + game audio
bVoiceAudioSaved = MixRec->SaveMixedWav(AudioPath);
// Optionally save individual participant tracks
if (bSaveIndividualVoiceTracks)
{
int32 Count = MixRec->SaveIndividualWavs(OutputDir);
UE_LOG(LogGameplayRecorder, Log,
TEXT(" Saved %d individual voice track(s)."), Count);
}
}
}
// ─── 4. Fallback: if voice recorder had no data, use game-audio ─
if (!bVoiceAudioSaved && AudioCapture)
{
AudioCapture->Stop();
AudioCapture->SaveWav(AudioPath);
UE_LOG(LogGameplayRecorder, Log,
TEXT(" Game audio saved (no voice data available)."));
}
else if (AudioCapture)
{
AudioCapture->Stop();
}
// ─── 5. Mux video + audio → final_recording.mp4 ──────────────
if (VideoPipe)
{
VideoPipe->Mux(VideoPath, AudioPath, FinalPath);
}
// ─── 6. Clean up ─────────────────────────────────────────────
delete VideoPipe;
VideoPipe = nullptr;
delete AudioCapture;
AudioCapture = nullptr;
UE_LOG(LogGameplayRecorder, Log, TEXT("RecorderManager: All files saved. Done."));
}
// =====================================================================
// OnBackBufferReady — RENDER THREAD callback
// =====================================================================
//
// This function runs on Unreal's render thread — NOT the game thread.
// It is called once per presented frame.
//
// Pipeline:
// GPU back-buffer texture
// → ReadSurfaceData (GPU → CPU copy, returns TArray<FColor>)
// → fwrite into FFmpeg stdin pipe
// → FFmpeg encodes the frame with the chosen HW encoder
//
// Performance notes:
// • ReadSurfaceData stalls the GPU pipeline (sync readback).
// At 1080p60 this is ~8 MB/frame × 60 = ~480 MB/s of PCI-e traffic.
// Hardware encoding (NVENC/AMF/QSV) adds <2% GPU load.
// • For higher performance, you could use async readback via
// RHIAsyncReadback — but that adds significant complexity.
// This synchronous approach is simpler and fine for most use-cases.
void URecorderManager::OnBackBufferReady(
SWindow& Window,
const FTextureRHIRef& BackBuffer)
{
if (!bRecording || !VideoPipe || !VideoPipe->IsOpen())
{
return;
}
FRHICommandListImmediate& RHICmdList = FRHICommandListImmediate::Get();
// The back-buffer might be larger or smaller than our target resolution
// (e.g. if the window is resized). Clamp the read region.
const FIntPoint BBSize = BackBuffer->GetSizeXY();
const int32 ReadW = FMath::Min(Width, static_cast<int32>(BBSize.X));
const int32 ReadH = FMath::Min(Height, static_cast<int32>(BBSize.Y));
const FIntRect ReadRect(0, 0, ReadW, ReadH);
// ReadSurfaceData: copies pixels from the GPU texture to a CPU array.
// Each pixel is an FColor (BGRA, 4 bytes).
// This is a BLOCKING call — the CPU waits for the GPU to finish.
TArray<FColor> Pixels;
Pixels.SetNumUninitialized(ReadW * ReadH);
RHICmdList.ReadSurfaceData(
BackBuffer.GetReference(),
ReadRect,
Pixels,
FReadSurfaceDataFlags(RCM_UNorm)
);
// Expected frame size for FFmpeg
const int32 ExpectedPixels = Width * Height;
const int32 FrameBytes = ExpectedPixels * sizeof(FColor);
if (ReadW == Width && ReadH == Height)
{
// Fast path — dimensions match exactly
VideoPipe->WriteFrame(Pixels.GetData(), FrameBytes);
}
else
{
// Slow path — back-buffer is smaller than target.
// Pad with black so FFmpeg always gets a full-size frame.
TArray<FColor> Padded;
Padded.SetNumZeroed(ExpectedPixels);
for (int32 Row = 0; Row < ReadH; ++Row)
{
FMemory::Memcpy(
&Padded[Row * Width],
&Pixels[Row * ReadW],
ReadW * sizeof(FColor));
}
VideoPipe->WriteFrame(Padded.GetData(), FrameBytes);
}
}

View File

@@ -0,0 +1,161 @@
// RecorderManager.h
// ─────────────────────────────────────────────────────────────────────
// Top-level orchestrator for gameplay recording WITH voice support.
//
// This is a UObject-based class so Unreal's garbage collector manages
// its lifetime and Blueprint can call its functions.
//
// JS analogy: think of this as the "RecordingService" singleton that
// owns a VideoEncoder (FFmpegPipe), a VoiceSessionManager (voice chat
// + per-participant audio), and an AudioRecorder (game-world audio).
// It exposes start/stop/mute methods to Blueprint.
//
// Outputs (in <Project>/Saved/Recordings/ by default):
// video_only.mp4 — HW-encoded H.264
// audio_only.wav — mixed voice + game audio
// final_recording.mp4 — muxed video + audio
// <UserID>_audio.wav (optional) — individual voice tracks
// ─────────────────────────────────────────────────────────────────────
#pragma once
#include "CoreMinimal.h"
#include "UObject/Object.h"
#include "VoiceTypes.h"
#include "RecorderManager.generated.h"
// Forward declarations (avoid including heavy headers in the .h)
class FFFmpegPipe;
class FAudioRecorder;
class USoundSubmix;
class UVoiceSessionManager;
// ─────────────────────────────────────────────────────────────────────
// URecorderManager
// ─────────────────────────────────────────────────────────────────────
UCLASS(BlueprintType)
class GAMEPLAYRECORDER_API URecorderManager : public UObject
{
GENERATED_BODY()
public:
URecorderManager();
// ═════════════════════════════════════════════════════════════════
// RECORDING API
// ═════════════════════════════════════════════════════════════════
/** Begins capturing video frames, game audio, AND voice. */
UFUNCTION(BlueprintCallable, Category = "GameplayRecorder")
void StartRecording();
/** Stops everything, saves WAVs, muxes final_recording.mp4. */
UFUNCTION(BlueprintCallable, Category = "GameplayRecorder")
void StopRecording();
/** True while a recording session is active. */
UFUNCTION(BlueprintCallable, BlueprintPure, Category = "GameplayRecorder")
bool IsRecording() const { return bRecording; }
// ═════════════════════════════════════════════════════════════════
// VOICE / PARTICIPANT API (delegates to VoiceSessionManager)
// ═════════════════════════════════════════════════════════════════
/** Add a participant to the voice session (call BEFORE StartRecording). */
UFUNCTION(BlueprintCallable, Category = "GameplayRecorder|Voice")
bool AddParticipant(const FString& UserID, const FString& DisplayName, EVoiceRole Role);
/** Remove a participant. */
UFUNCTION(BlueprintCallable, Category = "GameplayRecorder|Voice")
bool RemoveParticipant(const FString& UserID);
/** Get the full roster. */
UFUNCTION(BlueprintCallable, Category = "GameplayRecorder|Voice")
TArray<FVoiceParticipant> GetAllParticipants() const;
/**
* Mute TargetUserID on behalf of RequestorUserID.
* Enforces role permissions (Pilot can only self-mute, Instructor can mute anyone).
*/
UFUNCTION(BlueprintCallable, Category = "GameplayRecorder|Voice")
EVoicePermissionResult MuteParticipant(
const FString& RequestorUserID,
const FString& TargetUserID);
/** Unmute TargetUserID on behalf of RequestorUserID. */
UFUNCTION(BlueprintCallable, Category = "GameplayRecorder|Voice")
EVoicePermissionResult UnmuteParticipant(
const FString& RequestorUserID,
const FString& TargetUserID);
/** Toggle mute state. */
UFUNCTION(BlueprintCallable, Category = "GameplayRecorder|Voice")
EVoicePermissionResult ToggleMuteParticipant(
const FString& RequestorUserID,
const FString& TargetUserID);
// ═════════════════════════════════════════════════════════════════
// SETTINGS (set BEFORE calling StartRecording)
// ═════════════════════════════════════════════════════════════════
/** Capture width in pixels. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "GameplayRecorder|Settings")
int32 Width = 1920;
/** Capture height in pixels. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "GameplayRecorder|Settings")
int32 Height = 1080;
/** Target frames per second. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "GameplayRecorder|Settings")
int32 Framerate = 60;
/** Video bitrate (FFmpeg string, e.g. "8M", "12M", "20M"). */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "GameplayRecorder|Settings")
FString Bitrate = TEXT("8M");
/** Output directory. Empty = <ProjectDir>/Saved/Recordings. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "GameplayRecorder|Settings")
FString OutputDirectory;
/** Full path to ffmpeg.exe. Empty = use the system PATH. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "GameplayRecorder|Settings")
FString FFmpegPath;
/** Optional: specific submix to record. Null = master (everything). */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "GameplayRecorder|Settings")
USoundSubmix* AudioSubmix = nullptr;
/** If true, save separate WAV files per participant (for post-analysis). */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "GameplayRecorder|Settings")
bool bSaveIndividualVoiceTracks = true;
protected:
/** Safety net: if the object is garbage-collected while recording, stop. */
virtual void BeginDestroy() override;
private:
// ── Internal helpers ────────────────────────────────────────────
void ResolveOutputPaths();
/** Render-thread callback from Unreal's Slate renderer. */
void OnBackBufferReady(SWindow& Window, const FTextureRHIRef& BackBuffer);
// ── State ───────────────────────────────────────────────────────
bool bRecording = false;
FDelegateHandle BackBufferHandle;
// Sub-systems
FFFmpegPipe* VideoPipe = nullptr; // raw ptr — we manage lifetime
FAudioRecorder* AudioCapture = nullptr; // game-world submix fallback
/** The voice session. UObject — GC-managed (created as sub-object). */
UPROPERTY()
UVoiceSessionManager* VoiceSession = nullptr;
// Resolved file paths for this session
FString OutputDir;
FString VideoPath;
FString AudioPath;
FString FinalPath;
};

View File

@@ -0,0 +1,20 @@
// RecorderModule.cpp
#include "RecorderModule.h"
// Define the log so every .cpp in this plugin can use UE_LOG(LogGameplayRecorder, ...)
DEFINE_LOG_CATEGORY(LogGameplayRecorder);
void FGameplayRecorderModule::StartupModule()
{
UE_LOG(LogGameplayRecorder, Log, TEXT("GameplayRecorder plugin loaded."));
}
void FGameplayRecorderModule::ShutdownModule()
{
UE_LOG(LogGameplayRecorder, Log, TEXT("GameplayRecorder plugin unloaded."));
}
// Tell Unreal "this module exists and here is the class that manages it"
// This macro connects the module name string to the C++ class.
IMPLEMENT_MODULE(FGameplayRecorderModule, GameplayRecorder)

View File

@@ -0,0 +1,22 @@
// RecorderModule.h
// IModuleInterface implementation for the GameplayRecorder plugin.
// This is the "entry point" Unreal uses to load the plugin — think of it
// like the main() of a Node.js package, but for an Unreal module.
#pragma once
#include "CoreMinimal.h"
#include "Modules/ModuleManager.h"
// Log category shared by every file in this plugin
DECLARE_LOG_CATEGORY_EXTERN(LogGameplayRecorder, Log, All);
class FGameplayRecorderModule : public IModuleInterface
{
public:
/** Called when the engine loads this plugin. */
virtual void StartupModule() override;
/** Called when the engine unloads this plugin. */
virtual void ShutdownModule() override;
};

View File

@@ -0,0 +1,106 @@
// VoicePermissionSystem.cpp
// ─────────────────────────────────────────────────────────────────────
// Implementation of role-based voice permission checks.
// ─────────────────────────────────────────────────────────────────────
#include "VoicePermissionSystem.h"
#include "RecorderModule.h"
// =====================================================================
// CanMute
// =====================================================================
EVoicePermissionResult UVoicePermissionSystem::CanMute(
const FVoiceParticipant& Requestor,
const FVoiceParticipant& Target)
{
// ── Rule 1: Instructors can mute anyone ─────────────────────────
if (Requestor.Role == EVoiceRole::Instructor)
{
return EVoicePermissionResult::Allowed;
}
// ── Rule 2: Pilots can only mute themselves ─────────────────────
if (Requestor.Role == EVoiceRole::Pilot)
{
if (Requestor.UserID == Target.UserID)
{
return EVoicePermissionResult::Allowed;
}
UE_LOG(LogGameplayRecorder, Warning,
TEXT("Permission DENIED: Pilot '%s' tried to mute '%s' — pilots can only mute themselves."),
*Requestor.DisplayName, *Target.DisplayName);
return EVoicePermissionResult::Denied;
}
// Fallback: deny unknown roles
return EVoicePermissionResult::Denied;
}
// =====================================================================
// CanUnmute — same rules as CanMute (symmetric)
// =====================================================================
EVoicePermissionResult UVoicePermissionSystem::CanUnmute(
const FVoiceParticipant& Requestor,
const FVoiceParticipant& Target)
{
// Instructors can unmute anyone
if (Requestor.Role == EVoiceRole::Instructor)
{
return EVoicePermissionResult::Allowed;
}
// Pilots can only unmute themselves
if (Requestor.Role == EVoiceRole::Pilot)
{
if (Requestor.UserID == Target.UserID)
{
return EVoicePermissionResult::Allowed;
}
UE_LOG(LogGameplayRecorder, Warning,
TEXT("Permission DENIED: Pilot '%s' tried to unmute '%s' — pilots can only unmute themselves."),
*Requestor.DisplayName, *Target.DisplayName);
return EVoicePermissionResult::Denied;
}
return EVoicePermissionResult::Denied;
}
// =====================================================================
// GetDenialReason — human-readable explanation for UI / logs
// =====================================================================
FString UVoicePermissionSystem::GetDenialReason(
const FVoiceParticipant& Requestor,
const FVoiceParticipant& Target,
bool bIsMuteAction)
{
const FString Action = bIsMuteAction ? TEXT("mute") : TEXT("unmute");
// Check the appropriate permission
EVoicePermissionResult Result = bIsMuteAction
? CanMute(Requestor, Target)
: CanUnmute(Requestor, Target);
if (Result == EVoicePermissionResult::Allowed)
{
return FString(); // empty = allowed, no denial reason
}
// Build a human-readable reason
if (Requestor.Role == EVoiceRole::Pilot)
{
return FString::Printf(
TEXT("Pilots can only %s themselves. You cannot %s %s."),
*Action, *Action, *Target.DisplayName);
}
return FString::Printf(
TEXT("You do not have permission to %s %s."),
*Action, *Target.DisplayName);
}

View File

@@ -0,0 +1,60 @@
// VoicePermissionSystem.h
// ─────────────────────────────────────────────────────────────────────
// Stateless permission checker for voice operations.
//
// This class encapsulates the business rules:
// • Pilots can only mute/unmute THEMSELVES
// • Instructors can mute/unmute ANYONE
//
// JS analogy: think of this like a pure-function middleware:
// function canMute(requestor, target) { ... return true/false; }
// We put it in a UObject so Blueprint can call it too.
// ─────────────────────────────────────────────────────────────────────
#pragma once
#include "CoreMinimal.h"
#include "UObject/Object.h"
#include "VoiceTypes.h"
#include "VoicePermissionSystem.generated.h"
UCLASS(BlueprintType)
class GAMEPLAYRECORDER_API UVoicePermissionSystem : public UObject
{
GENERATED_BODY()
public:
// ── Permission checks (static — no instance state needed) ───────
/**
* Can the requestor mute the target participant?
*
* Rules:
* Pilot → only if TargetID == RequestorID (muting yourself)
* Instructor → always allowed
*/
UFUNCTION(BlueprintCallable, Category = "Voice|Permissions")
static EVoicePermissionResult CanMute(
const FVoiceParticipant& Requestor,
const FVoiceParticipant& Target);
/**
* Can the requestor unmute the target participant?
*
* Same rules as CanMute — symmetric for simplicity.
*/
UFUNCTION(BlueprintCallable, Category = "Voice|Permissions")
static EVoicePermissionResult CanUnmute(
const FVoiceParticipant& Requestor,
const FVoiceParticipant& Target);
/**
* Human-readable reason string (useful for UI toasts / logs).
* Returns "" if allowed, or an explanation like "Pilots can only mute themselves."
*/
UFUNCTION(BlueprintCallable, Category = "Voice|Permissions")
static FString GetDenialReason(
const FVoiceParticipant& Requestor,
const FVoiceParticipant& Target,
bool bIsMuteAction);
};

View File

@@ -0,0 +1,355 @@
// VoiceSessionManager.cpp
// ─────────────────────────────────────────────────────────────────────
// Orchestrates voice sessions: participants, permissions, capture,
// and feeding audio to the mixer recorder.
// ─────────────────────────────────────────────────────────────────────
#include "VoiceSessionManager.h"
#include "RecorderModule.h"
#include "ParticipantManager.h"
#include "VoicePermissionSystem.h"
#include "AudioMixerRecorder.h"
#include "AudioDevice.h"
#include "AudioMixerDevice.h"
#include "Engine/Engine.h"
// =====================================================================
// FSubmixBridge — wraps callback in TSharedRef for UE 5.6 API
// =====================================================================
class FVoiceSessionSubmixBridge
: public ISubmixBufferListener
{
public:
TWeakObjectPtr<UVoiceSessionManager> Owner;
FVoiceSessionSubmixBridge(UVoiceSessionManager* InOwner) : Owner(InOwner) {}
virtual void OnNewSubmixBuffer(
const USoundSubmix* OwningSubmix,
float* AudioData,
int32 NumSamples,
int32 NumChannels,
const int32 SampleRate,
double AudioClock) override
{
if (UVoiceSessionManager* Mgr = Owner.Get())
{
Mgr->OnNewSubmixBuffer(OwningSubmix, AudioData, NumSamples, NumChannels, SampleRate, AudioClock);
}
}
};
// =====================================================================
// Constructor
// =====================================================================
UVoiceSessionManager::UVoiceSessionManager()
{
}
// =====================================================================
// BeginDestroy — safety net
// =====================================================================
void UVoiceSessionManager::BeginDestroy()
{
ShutdownSession();
Super::BeginDestroy();
}
// =====================================================================
// InitializeSession / ShutdownSession
// =====================================================================
void UVoiceSessionManager::InitializeSession()
{
if (bSessionActive)
{
UE_LOG(LogGameplayRecorder, Warning,
TEXT("VoiceSessionManager: Session already active."));
return;
}
// Create the participant manager as a sub-object of this UObject.
// NewObject keeps it under GC control.
ParticipantMgr = NewObject<UParticipantManager>(this);
// Create the plain-C++ audio mixer recorder
MixerRecorder = new FAudioMixerRecorder();
bSessionActive = true;
UE_LOG(LogGameplayRecorder, Log, TEXT("VoiceSessionManager: Session initialized."));
}
void UVoiceSessionManager::ShutdownSession()
{
if (!bSessionActive)
{
return;
}
StopVoiceCapture();
delete MixerRecorder;
MixerRecorder = nullptr;
if (ParticipantMgr)
{
ParticipantMgr->RemoveAll();
}
ParticipantMgr = nullptr;
bSessionActive = false;
UE_LOG(LogGameplayRecorder, Log, TEXT("VoiceSessionManager: Session shut down."));
}
// =====================================================================
// Participant management — thin wrappers around ParticipantManager
// =====================================================================
bool UVoiceSessionManager::AddParticipant(
const FString& UserID,
const FString& DisplayName,
EVoiceRole Role)
{
if (!ParticipantMgr)
{
UE_LOG(LogGameplayRecorder, Error,
TEXT("VoiceSessionManager: Not initialized. Call InitializeSession() first."));
return false;
}
return ParticipantMgr->AddParticipant(UserID, DisplayName, Role);
}
bool UVoiceSessionManager::RemoveParticipant(const FString& UserID)
{
if (!ParticipantMgr) return false;
return ParticipantMgr->RemoveParticipant(UserID);
}
TArray<FVoiceParticipant> UVoiceSessionManager::GetAllParticipants() const
{
if (!ParticipantMgr) return {};
return ParticipantMgr->GetAllParticipants();
}
// =====================================================================
// Mute / Unmute / Toggle — with permission checks
// =====================================================================
EVoicePermissionResult UVoiceSessionManager::MuteParticipant(
const FString& RequestorUserID,
const FString& TargetUserID)
{
if (!ParticipantMgr)
{
return EVoicePermissionResult::Denied;
}
FVoiceParticipant* Requestor = ParticipantMgr->FindParticipant(RequestorUserID);
FVoiceParticipant* Target = ParticipantMgr->FindParticipant(TargetUserID);
if (!Requestor || !Target)
{
UE_LOG(LogGameplayRecorder, Warning,
TEXT("VoiceSessionManager: MuteParticipant — user not found. Requestor='%s' Target='%s'"),
*RequestorUserID, *TargetUserID);
return EVoicePermissionResult::Denied;
}
// ── Permission check ────────────────────────────────────────────
EVoicePermissionResult Result = UVoicePermissionSystem::CanMute(*Requestor, *Target);
if (Result == EVoicePermissionResult::Denied)
{
FString Reason = UVoicePermissionSystem::GetDenialReason(*Requestor, *Target, true);
UE_LOG(LogGameplayRecorder, Warning, TEXT(" MUTE DENIED: %s"), *Reason);
return EVoicePermissionResult::Denied;
}
// ── Apply the mute ──────────────────────────────────────────────
Target->bMuted = true;
UE_LOG(LogGameplayRecorder, Log,
TEXT("VoiceSessionManager: '%s' muted '%s'."),
*Requestor->DisplayName, *Target->DisplayName);
// Fire the event so UI can update
OnMuteStateChanged.Broadcast(TargetUserID, true, RequestorUserID);
return EVoicePermissionResult::Allowed;
}
EVoicePermissionResult UVoiceSessionManager::UnmuteParticipant(
const FString& RequestorUserID,
const FString& TargetUserID)
{
if (!ParticipantMgr)
{
return EVoicePermissionResult::Denied;
}
FVoiceParticipant* Requestor = ParticipantMgr->FindParticipant(RequestorUserID);
FVoiceParticipant* Target = ParticipantMgr->FindParticipant(TargetUserID);
if (!Requestor || !Target)
{
UE_LOG(LogGameplayRecorder, Warning,
TEXT("VoiceSessionManager: UnmuteParticipant — user not found."));
return EVoicePermissionResult::Denied;
}
EVoicePermissionResult Result = UVoicePermissionSystem::CanUnmute(*Requestor, *Target);
if (Result == EVoicePermissionResult::Denied)
{
FString Reason = UVoicePermissionSystem::GetDenialReason(*Requestor, *Target, false);
UE_LOG(LogGameplayRecorder, Warning, TEXT(" UNMUTE DENIED: %s"), *Reason);
return EVoicePermissionResult::Denied;
}
Target->bMuted = false;
UE_LOG(LogGameplayRecorder, Log,
TEXT("VoiceSessionManager: '%s' unmuted '%s'."),
*Requestor->DisplayName, *Target->DisplayName);
OnMuteStateChanged.Broadcast(TargetUserID, false, RequestorUserID);
return EVoicePermissionResult::Allowed;
}
EVoicePermissionResult UVoiceSessionManager::ToggleMuteParticipant(
const FString& RequestorUserID,
const FString& TargetUserID)
{
if (!ParticipantMgr)
{
return EVoicePermissionResult::Denied;
}
const FVoiceParticipant* Target = ParticipantMgr->FindParticipant(TargetUserID);
if (!Target)
{
return EVoicePermissionResult::Denied;
}
if (Target->bMuted)
{
return UnmuteParticipant(RequestorUserID, TargetUserID);
}
else
{
return MuteParticipant(RequestorUserID, TargetUserID);
}
}
// =====================================================================
// Voice Capture — submix listener
// =====================================================================
void UVoiceSessionManager::StartVoiceCapture(USoundSubmix* OptionalSubmix)
{
if (bCapturingVoice)
{
UE_LOG(LogGameplayRecorder, Warning,
TEXT("VoiceSessionManager: Voice capture already active."));
return;
}
if (!MixerRecorder)
{
UE_LOG(LogGameplayRecorder, Error,
TEXT("VoiceSessionManager: Not initialized."));
return;
}
MixerRecorder->Start();
// Register as a submix listener to capture the combined audio output
if (GEngine && GEngine->GetMainAudioDevice())
{
FAudioDevice* AudioDevice = GEngine->GetMainAudioDevice().GetAudioDevice();
if (AudioDevice)
{
USoundSubmix* Submix = OptionalSubmix;
if (!Submix)
{
Submix = &AudioDevice->GetMainSubmixObject();
}
if (Submix)
{
RegisteredSubmix = Submix;
SubmixBridge = MakeShared<FVoiceSessionSubmixBridge, ESPMode::ThreadSafe>(this);
AudioDevice->RegisterSubmixBufferListener(SubmixBridge.ToSharedRef(), *Submix);
bCapturingVoice = true;
UE_LOG(LogGameplayRecorder, Log,
TEXT("VoiceSessionManager: Voice capture started (submix: %s)."),
*Submix->GetName());
}
}
}
}
void UVoiceSessionManager::StopVoiceCapture()
{
if (!bCapturingVoice)
{
return;
}
bCapturingVoice = false;
// Unregister from the audio device
if (GEngine && GEngine->GetMainAudioDevice())
{
FAudioDevice* AudioDevice = GEngine->GetMainAudioDevice().GetAudioDevice();
if (AudioDevice && RegisteredSubmix.IsValid() && SubmixBridge.IsValid())
{
AudioDevice->UnregisterSubmixBufferListener(SubmixBridge.ToSharedRef(), *RegisteredSubmix.Get());
SubmixBridge.Reset();
}
}
RegisteredSubmix = nullptr;
if (MixerRecorder)
{
MixerRecorder->Stop();
}
UE_LOG(LogGameplayRecorder, Log, TEXT("VoiceSessionManager: Voice capture stopped."));
}
// =====================================================================
// ISubmixBufferListener — audio callback (audio render thread)
// =====================================================================
//
// This captures the MIXED game audio output (all sounds + voice chat
// that has been routed through Unreal's audio engine).
//
// For per-participant voice capture in a networked scenario, each
// client would also feed their local mic data to
// MixerRecorder->FeedParticipantAudio() separately. In a local/LAN
// training setup you can use Unreal's VoiceCapture API or VOIP
// integration to route individual mic streams here.
void UVoiceSessionManager::OnNewSubmixBuffer(
const USoundSubmix* OwningSubmix,
float* AudioData,
int32 NumSamples,
int32 NumChannels,
const int32 SampleRate,
double AudioClock)
{
if (!bCapturingVoice || !MixerRecorder)
{
return;
}
// Update recorder format
MixerRecorder->SampleRate = SampleRate;
MixerRecorder->NumChannels = NumChannels;
// Feed the combined game audio into the mixed buffer
MixerRecorder->FeedGameAudio(AudioData, NumSamples);
}

View File

@@ -0,0 +1,151 @@
// VoiceSessionManager.h
// ─────────────────────────────────────────────────────────────────────
// Top-level voice-session orchestrator.
//
// Responsibilities:
// • Owns the ParticipantManager (roster)
// • Enforces permissions via VoicePermissionSystem
// • Manages per-participant voice capture
// • Feeds captured audio into an AudioMixerRecorder
//
// JS analogy: this is like a "VoiceChatService" class that wraps
// WebRTC connections, user lists, and mute logic into one API —
// but running locally inside the Unreal process.
//
// Blueprint-callable so instructors can wire up UI buttons to
// mute/unmute participants.
// ─────────────────────────────────────────────────────────────────────
#pragma once
#include "CoreMinimal.h"
#include "UObject/Object.h"
#include "VoiceTypes.h"
#include "ISubmixBufferListener.h"
#include "Sound/SoundSubmix.h"
#include "VoiceSessionManager.generated.h"
// Forward declarations
class UParticipantManager;
class UVoicePermissionSystem;
class FAudioMixerRecorder;
// ── Delegate fired when a participant's mute state changes ──────────
DECLARE_DYNAMIC_MULTICAST_DELEGATE_ThreeParams(
FOnMuteStateChanged,
const FString&, UserID,
bool, bNewMutedState,
const FString&, ChangedByUserID);
// ─────────────────────────────────────────────────────────────────────
// UVoiceSessionManager
// ─────────────────────────────────────────────────────────────────────
UCLASS(BlueprintType)
class GAMEPLAYRECORDER_API UVoiceSessionManager : public UObject
{
GENERATED_BODY()
public:
UVoiceSessionManager();
// ── Session lifecycle ───────────────────────────────────────────
/** Call once to set up the session. Creates the participant manager. */
UFUNCTION(BlueprintCallable, Category = "Voice|Session")
void InitializeSession();
/** Tear down the session. Stops voice capture if active. */
UFUNCTION(BlueprintCallable, Category = "Voice|Session")
void ShutdownSession();
// ── Participant management (delegate to ParticipantManager) ─────
UFUNCTION(BlueprintCallable, Category = "Voice|Session")
bool AddParticipant(const FString& UserID, const FString& DisplayName, EVoiceRole Role);
UFUNCTION(BlueprintCallable, Category = "Voice|Session")
bool RemoveParticipant(const FString& UserID);
UFUNCTION(BlueprintCallable, Category = "Voice|Session")
TArray<FVoiceParticipant> GetAllParticipants() const;
// ── Mute / Unmute with permission checks ────────────────────────
/**
* Attempt to mute TargetUserID on behalf of RequestorUserID.
* Checks permissions first. Returns the permission result.
*/
UFUNCTION(BlueprintCallable, Category = "Voice|Session")
EVoicePermissionResult MuteParticipant(
const FString& RequestorUserID,
const FString& TargetUserID);
/**
* Attempt to unmute TargetUserID on behalf of RequestorUserID.
*/
UFUNCTION(BlueprintCallable, Category = "Voice|Session")
EVoicePermissionResult UnmuteParticipant(
const FString& RequestorUserID,
const FString& TargetUserID);
/** Convenience: toggle mute. */
UFUNCTION(BlueprintCallable, Category = "Voice|Session")
EVoicePermissionResult ToggleMuteParticipant(
const FString& RequestorUserID,
const FString& TargetUserID);
// ── Voice capture control ───────────────────────────────────────
/**
* Start capturing voice from the submix (microphones + game audio).
* Feed data to the AudioMixerRecorder for recording.
*/
UFUNCTION(BlueprintCallable, Category = "Voice|Session")
void StartVoiceCapture(USoundSubmix* OptionalSubmix = nullptr);
/** Stop voice capture. */
UFUNCTION(BlueprintCallable, Category = "Voice|Session")
void StopVoiceCapture();
// ── Access to the internal audio mixer recorder ─────────────────
// (C++ only — RecorderManager uses this to save WAVs)
FAudioMixerRecorder* GetAudioMixerRecorder() const { return MixerRecorder; }
// ── Events ──────────────────────────────────────────────────────
/** Broadcast when any participant's mute state changes. */
UPROPERTY(BlueprintAssignable, Category = "Voice|Session")
FOnMuteStateChanged OnMuteStateChanged;
// ── Audio capture callback (forwarded from bridge) ───────────────
void OnNewSubmixBuffer(
const USoundSubmix* OwningSubmix,
float* AudioData,
int32 NumSamples,
int32 NumChannels,
const int32 SampleRate,
double AudioClock);
// ── Direct access to managers (C++) ─────────────────────────────
UParticipantManager* GetParticipantManager() const { return ParticipantMgr; }
protected:
virtual void BeginDestroy() override;
private:
// ── Owned sub-objects ───────────────────────────────────────────
UPROPERTY()
UParticipantManager* ParticipantMgr = nullptr;
// Plain C++ (not UObject — we manage lifetime manually)
FAudioMixerRecorder* MixerRecorder = nullptr;
bool bSessionActive = false;
bool bCapturingVoice = false;
TWeakObjectPtr<USoundSubmix> RegisteredSubmix;
// Submix bridge (UE 5.6 requires TSharedRef<ISubmixBufferListener>)
TSharedPtr<ISubmixBufferListener, ESPMode::ThreadSafe> SubmixBridge;
};

View File

@@ -0,0 +1,73 @@
// VoiceTypes.h
// ─────────────────────────────────────────────────────────────────────
// Shared enums and structs used across all voice-system classes.
//
// JS analogy: This is like a shared "types.ts" file that defines your
// TypeScript interfaces and enums used across the whole feature.
// ─────────────────────────────────────────────────────────────────────
#pragma once
#include "CoreMinimal.h"
#include "VoiceTypes.generated.h"
// ─────────────────────────────────────────────────────────────────────
// EVoiceRole — the role a participant plays in a training session
// ─────────────────────────────────────────────────────────────────────
UENUM(BlueprintType)
enum class EVoiceRole : uint8
{
/** The player flying / driving / playing the game. One per session. */
Pilot UMETA(DisplayName = "Pilot"),
/** An observer who can give instructions and moderate voice. */
Instructor UMETA(DisplayName = "Instructor")
};
// ─────────────────────────────────────────────────────────────────────
// FVoiceParticipant — one person in the voice session
//
// JS analogy: like a plain object { userId, displayName, role, muted }
// but as a C++ struct with Unreal reflection so Blueprint can see it.
// ─────────────────────────────────────────────────────────────────────
USTRUCT(BlueprintType)
struct GAMEPLAYRECORDER_API FVoiceParticipant
{
GENERATED_BODY()
/** Unique identifier (e.g. "Player_0", "Instructor_1"). */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "Voice")
FString UserID;
/** Human-readable name shown in UI. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "Voice")
FString DisplayName;
/** Pilot or Instructor. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "Voice")
EVoiceRole Role = EVoiceRole::Pilot;
/** True if this participant is currently muted. */
UPROPERTY(EditAnywhere, BlueprintReadWrite, Category = "Voice")
bool bMuted = false;
// ── Constructors ────────────────────────────────────────────────
FVoiceParticipant() = default;
FVoiceParticipant(const FString& InID, const FString& InName, EVoiceRole InRole)
: UserID(InID)
, DisplayName(InName)
, Role(InRole)
, bMuted(false)
{}
};
// ─────────────────────────────────────────────────────────────────────
// EVoicePermissionResult — returned from permission checks
// ─────────────────────────────────────────────────────────────────────
UENUM(BlueprintType)
enum class EVoicePermissionResult : uint8
{
Allowed UMETA(DisplayName = "Allowed"),
Denied UMETA(DisplayName = "Denied")
};