Select Git revision
RWTHVRToolkit.Build.cs
-
David Gilbert authored
- Fixed two missing includes that were throwing an error on my end. - Added a general settings class to the toolkit, populated with only a LiveLink preset for now. - Added functionality to the module to load and apply the default preset, added some guards to not do it twice (ART plugin crashes if that is done). - Added LiveLink functionality to the Tracked Components. - Added a change to the VRPawnMovement that manages to keep the rotation of the collider vertical even in the editor with LiveLink enabled. - Added cave config assets (ips public). - Added an example map that already includes the pawn with livelink + the cave root actor (no livelink yet there, todo). - Added a (temporary) SteamVRPreset.uasset for LiveLink for testing purposes.
David Gilbert authored- Fixed two missing includes that were throwing an error on my end. - Added a general settings class to the toolkit, populated with only a LiveLink preset for now. - Added functionality to the module to load and apply the default preset, added some guards to not do it twice (ART plugin crashes if that is done). - Added LiveLink functionality to the Tracked Components. - Added a change to the VRPawnMovement that manages to keep the rotation of the collider vertical even in the editor with LiveLink enabled. - Added cave config assets (ips public). - Added an example map that already includes the pawn with livelink + the cave root actor (no livelink yet there, todo). - Added a (temporary) SteamVRPreset.uasset for LiveLink for testing purposes.
VHFaceAnimation.cpp 15.80 KiB
// Fill out your copyright notice in the Description page of Project Settings.
#include "FaceAnimation/VHFaceAnimation.h"
#include "AssetRegistry/AssetRegistryModule.h"
#include "Animation/AnimSequence.h"
#include "Components/SkeletalMeshComponent.h"
#include "HAL/PlatformFileManager.h"
#include "Misc/Paths.h"
#include "Misc/FileHelper.h"
#include "VirtualHuman.h"
#include "CharacterPluginLogging.h"
#include "FaceAnimation/VHFacialExpressions.h"
UVHFaceAnimation::UVHFaceAnimation()
{
PrimaryComponentTick.bCanEverTick = true;
}
void UVHFaceAnimation::BeginPlay()
{
Super::BeginPlay();
if (!AnimationFilename.IsEmpty())
{
LoadAnimationFile(AnimationFilename);
}
}
void UVHFaceAnimation::Play()
{
PlayFromTime(0.0f);
}
void UVHFaceAnimation::PlayFromTime(float StartTime)
{
bCurrentlyPlaying = true;
CurrentPlayTime = StartTime - ConstantDelay;
LastFrameNr = 0;
SearchClosestFrameDataIndex();
if (bUseFaceCalibrationFrame)
{
Calibration(TimeForCalibration);
}
if (bResetFaceAfterFinish)
{
//if we are resetting the face we are also fading in
FadeFraction = 0.0f;
}
}
void UVHFaceAnimation::Stop()
{
bCurrentlyPlaying = false;
ResetFace();
}
bool UVHFaceAnimation::IsPlaying()
{
return bCurrentlyPlaying;
}
float UVHFaceAnimation::GetDuration() const
{
return AnimationData.TimeSteps.Last().Timestamp;
}
bool UVHFaceAnimation::LoadAnimationFile(FString Filename)
{
AnimInstance = Cast<AVirtualHuman>(GetOwner())->GetFaceAnimInstance();
if (!AnimInstance)
{
VH_WARN("No AnimInstance at LoadAnimationFile of UVHFaceAnimation\n");
}
UseMorphs = Cast<AVirtualHuman>(GetOwner())->SupportsMorphs();
if (CachedAnimationData.Contains(Filename))
{
AnimationData = CachedAnimationData[Filename];
VH_LOG("Use cached animation data for %s\n", *Filename);
}
else if (InternalLoadFile(Filename))
{
AnimationFilename = Filename;
CachedAnimationData.Add(Filename, AnimationData);
VH_LOG("Sucessfully loaded and cached animation from file %s\n", *Filename);
}
else
{
VH_WARN("Unable to load animation from file %s\n", *Filename);
return false;
}
AnimationData.CurrentFrameValues.Reset(AnimationData.BlendshapeNames.Num());
AnimationData.CurrentFrameValues.AddZeroed(AnimationData.BlendshapeNames.Num());
//sanity check whether all blendshapes are present for the character
TArray<FName> PresentMorphTargets;
USkeleton* Skeleton = AnimInstance->GetSkelMeshComponent()->GetSkeletalMeshAsset()->GetSkeleton();
Skeleton->GetCurveMetaDataNames(PresentMorphTargets);
for (int BlendshapeIndex = 0; BlendshapeIndex < AnimationData.BlendshapeNames.Num(); ++BlendshapeIndex)
{
FString Blendshape = AnimationData.BlendshapeNames[BlendshapeIndex];
if (!PresentMorphTargets.Contains(FName(*Blendshape)))
{
if (Blendshape == "Merged_Open_Mouth")
{
//this morph target is apparently not present in all CC3 exports (but for Henry), so we substitute it with jaw movement
for (FFrameData& TimeStep : AnimationData.TimeSteps)
{
TimeStep.JawRotation = TimeStep.BlendshapeActivations[BlendshapeIndex] * FRotator(
0.0f, -30.0f, 0.0f);
}
}
else if (Blendshape.StartsWith("CTRL_expressions_head") || Blendshape.StartsWith(
"CTRL_expressions_bodyNeck"))
{
//do nothing as we don't want to move the head.
}
else
{
VH_WARN("Mapped blendshape for face animation %s is not present, the animation will not fully work!\n",
*Blendshape);
}
}
}
return true;
}
bool UVHFaceAnimation::Calibrate(FString CalibrationFilename, float CalibrationTime)
{
FString CurrentlyLoadedAnim = AnimationFilename;
if (!LoadAnimationFile(CalibrationFilename))
{
VH_WARN("Animation %s which should be used for calibration cannot be loaded", *CalibrationFilename);
return false;
}
Calibration(CalibrationTime);
if (!CurrentlyLoadedAnim.IsEmpty())
{
LoadAnimationFile(CurrentlyLoadedAnim);
}
return true;
}
// Called every frame
void UVHFaceAnimation::TickComponent(float DeltaTime, ELevelTick TickType,
FActorComponentTickFunction*
ThisTickFunction)
{
Super::TickComponent(DeltaTime, TickType, ThisTickFunction);
if (bCurrentlyPlaying)
{
CurrentPlayTime += DeltaTime;
float TimeLeft = AnimationData.TimeSteps.Last().Timestamp - CurrentPlayTime;
if (AnimationData.TimeSteps.Num() == 0 || TimeLeft < 0)
{
bCurrentlyPlaying = false;
if (bResetFaceAfterFinish)
{
ResetFace();
}
return;
}
if (TimeLeft < FadeInOutTime && bResetFaceAfterFinish)
{
//we should be fading out
//in this case FadeDurationLeft is counted up
FadeFraction = TimeLeft / FadeInOutTime;
}
else
{
FadeFraction += DeltaTime / FadeInOutTime;
}
FadeFraction = FMath::Clamp(FadeFraction, 0.0f, 1.0f);
int FrameNr = SearchClosestFrameDataIndex();
LastFrameNr = FrameNr;
ApplyFrameData(FrameNr);
bIsFaceReset = false;
}
else if (bResetFaceAfterFinish && !bIsFaceReset)
{
//in case this is used together with a FacialExpression
//we have to do this, so that the right data is pulled from the
// FacialExpression component every frame (especially before staring that actual animation)
ResetFace();
bIsFaceReset = true;
}
}
bool UVHFaceAnimation::InternalLoadFile(FString FileName)
{
VH_WARN("This should be implemented by a subclass!!! Using this class directly is not intended.\n");
return false;
}
void UVHFaceAnimation::ApplyFrameData(int FrameNr)
{
float AmountBefore = 1.0f;
float AmountAfter = 0.0f;
if (FrameNr + 1 < AnimationData.TimeSteps.Num())
{
//we might be in between two timesteps of our data
const float TimeDiff = AnimationData.TimeSteps[FrameNr + 1].Timestamp - AnimationData.TimeSteps[
FrameNr].Timestamp;
const float InterFrameTime =
CurrentPlayTime - AnimationData.TimeSteps[FrameNr].Timestamp;
AmountAfter = InterFrameTime / TimeDiff;
AmountBefore = 1.0f - AmountAfter;
}
for (int i = 0; i < AnimationData.TimeSteps[FrameNr].BlendshapeActivations.Num(); ++i)
{
float Value = AnimationData.TimeSteps[FrameNr].BlendshapeActivations[i];
if (AmountAfter > 0.01f)
//if it doesn't play a role only use timestep before
{
Value = AmountBefore * Value + AmountAfter * AnimationData.TimeSteps[FrameNr + 1].BlendshapeActivations[i];
}
if (IsCalibrationPresent())
{
Value -= CalibrationFrame.BlendshapeActivations[i];
}
//if we are resetting also fade in
Value *= FadeFraction;
//if we are fading check whether there is a FacialExpression set which should be faded to
Value += (1.0f - FadeFraction) * GetFacialExpressionMorphTargetValue(*AnimationData.BlendshapeNames[i]);
AnimationData.CurrentFrameValues[i] = Value;
if (UseMorphs)
{
// in this case set it already otherwise the animation blueprint will pull the data
AnimInstance->SetMorphTarget(*AnimationData.BlendshapeNames[i], Value);
}
}
if (bUseHeadRotation)
{
FRotator HeadRotation = AnimationData.TimeSteps[FrameNr].HeadRotation;
if (AmountAfter > 0.01f)
{
HeadRotation = AmountBefore * HeadRotation + AmountAfter * AnimationData.TimeSteps[
FrameNr + 1].HeadRotation;
}
if (IsCalibrationPresent())
{
HeadRotation -= CalibrationFrame.HeadRotation;
}
AnimInstance->SkelControl_Head = HeadRotation;
}
if (bUseEyeRotations)
{
FRotator LeftEye = AnimationData.TimeSteps[FrameNr].LeftEyeRotation;
FRotator RightEye = AnimationData.TimeSteps[FrameNr].RightEyeRotation;
if (AmountAfter > 0.01f)
{
LeftEye = AmountBefore * LeftEye + AmountAfter * AnimationData.TimeSteps[FrameNr + 1].LeftEyeRotation;
RightEye = AmountBefore * RightEye + AmountAfter * AnimationData.TimeSteps[FrameNr + 1].RightEyeRotation;
}
if (IsCalibrationPresent())
{
LeftEye -= CalibrationFrame.LeftEyeRotation;
RightEye -= CalibrationFrame.RightEyeRotation;
}
AnimInstance->SkelControl_LeftEyeRot = LeftEye;
AnimInstance->SkelControl_RightEyeRot = RightEye;
}
FRotator JawRotation = AnimationData.TimeSteps[FrameNr].JawRotation;
if (AmountAfter > 0.01f)
{
JawRotation = AmountBefore * JawRotation + AmountAfter * AnimationData.TimeSteps[
FrameNr + 1].JawRotation;
}
if (IsCalibrationPresent())
{
JawRotation -= CalibrationFrame.JawRotation;
//since the jaw has to be rotated in a neutral position for CC3, we have to add that back in
FFrameData NeutralData;
JawRotation += NeutralData.JawRotation;
}
AnimInstance->SkelControl_JawRot = JawRotation;
}
void UVHFaceAnimation::ResetFace()
{
if (AnimationData.BlendshapeNames.Num() == 0)
return;
int i = 0;
for (FString Blendshape : AnimationData.BlendshapeNames)
{
float Value = GetFacialExpressionMorphTargetValue(FName(*Blendshape));
AnimationData.CurrentFrameValues[i] = Value;
if (UseMorphs)
{
AnimInstance->SetMorphTarget(*Blendshape, Value);
}
i++;
}
FFrameData NeutralFrame;
if (bUseHeadRotation)
{
AnimInstance->SkelControl_Head = NeutralFrame.HeadRotation;
}
if (bUseEyeRotations)
{
AnimInstance->SkelControl_LeftEyeRot = NeutralFrame.LeftEyeRotation;
AnimInstance->SkelControl_RightEyeRot = NeutralFrame.RightEyeRotation;
}
AnimInstance->SkelControl_JawRot = NeutralFrame.JawRotation;
}
int UVHFaceAnimation::SearchClosestFrameDataIndex()
{
if (AnimationData.TimeSteps[LastFrameNr].Timestamp <= CurrentPlayTime && (
LastFrameNr + 1 >= AnimationData.TimeSteps.Num() || AnimationData.TimeSteps[
LastFrameNr + 1].Timestamp > CurrentPlayTime))
{
return LastFrameNr;
}
if (LastFrameNr + 2 < AnimationData.TimeSteps.Num() && AnimationData.TimeSteps[LastFrameNr + 1].
Timestamp <= CurrentPlayTime &&
AnimationData.TimeSteps[
LastFrameNr + 2].Timestamp > CurrentPlayTime)
{
return LastFrameNr + 1;
}
//so the two most obvious solutions don't work so do a full search
for (int i = 0; i < AnimationData.TimeSteps.Num() - 1; ++i)
{
if (AnimationData.TimeSteps[i].Timestamp <= CurrentPlayTime && AnimationData.TimeSteps[i + 1].
Timestamp > CurrentPlayTime)
{
return i;
}
}
return AnimationData.TimeSteps.Num() - 1;
}
void UVHFaceAnimation::Calibration(float Time)
{
for (FFrameData Date : AnimationData.TimeSteps)
{
if (Date.Timestamp <= Time)
{
CalibrationFrame = Date;
return;
}
}
}
bool UVHFaceAnimation::IsCalibrationPresent() const
{
return CalibrationFrame.BlendshapeActivations.Num() > 0;
}
float UVHFaceAnimation::GetFacialExpressionMorphTargetValue(const FName& MorphName)
{
UVHFacialExpressions* Expression = Cast<UVHFacialExpressions>(
GetOwner()->GetComponentByClass(UVHFacialExpressions::StaticClass()));
if (Expression == nullptr)
{
//the actor does not have a UVHFacialExpressions component, so return neutral face value
return 0.0f;
}
const float* Value = Expression->GetCurrentAnimationValues().Find(MorphName);
if (Value != nullptr) return *Value;
return 0.0f;
}
const FAnimationData& UVHFaceAnimation::GetAnimData()
{
return AnimationData;
}
UAnimSequence* UVHFaceAnimation::SaveAsAnimSequence(FString AnimName)
{
VH_ERROR(
"This function is out of order because of deprecated functions and variables. If needed, reimplement it.\n");
return nullptr;
//#if WITH_EDITOR
// const FString AnimationRelative = FPaths::Combine(
// TEXT("/Game"), AnimName);
// const FString AnimationAbsolute = FPaths::Combine(
// FPaths::ProjectContentDir(), AnimName);
// //remove the path if any
// const FString AnimNameOnly = FPaths::GetBaseFilename(AnimName);
//
// UAnimSequence* AnimationSequence = nullptr;
//
// UPackage* Package = LoadPackage(nullptr, *AnimationAbsolute, LOAD_None);
//
// if (Package != nullptr)
// {
// AnimationSequence = FindObject<UAnimSequence>(Package, *AnimNameOnly);
// }
// else
// {
// Package = CreatePackage(*AnimationRelative);
// }
//
// if (AnimationSequence == nullptr)
// {
// AnimationSequence = NewObject<UAnimSequence>(
// Package, UAnimSequence::StaticClass(), *AnimNameOnly,
// RF_Public | RF_Standalone);
//
// IAnimationDataController& Controller = AnimationSequence->GetController();
//
// if (AnimationSequence)
// {
//
//
// AnimationSequence->SetSkeleton(AnimInstance->GetSkelMeshComponent()->SkeletalMesh->GetSkeleton());
// //AnimationSequence->SetSequenceLength(AnimationData.TimeSteps.Last().Timestamp);
// Controller.SetPlayLength(AnimationData.TimeSteps.Last().Timestamp);
// //rate = nr frames / length
// Controller.SetFrameRate(FFrameRate(AnimationData.TimeSteps.Num(), AnimationData.TimeSteps.Last().Timestamp));
//
// USkeleton* Skeleton = AnimationSequence->GetSkeleton();
// FFloatCurve* curve;
// FString ContextString;
// FSmartName MorphName;
//
// //Clear Curves
// //AnimationSequence->RawCurveData.Empty();
// FRawCurveTracks RawCurveData;
// RawCurveData = AnimationSequence->GetCurveData();
// RawCurveData.Empty();
//
// //Add Morph Curves
// for (int MorphTargetIndex = 0; MorphTargetIndex < AnimationData.BlendshapeNames.Num(); MorphTargetIndex++)
// {
// FString BlendshapeName = AnimationData.BlendshapeNames[MorphTargetIndex];
// Skeleton->AddSmartNameAndModify(USkeleton::AnimCurveMappingName,
// *BlendshapeName, MorphName);
// AnimationSequence->Modify(true);
// RawCurveData.AddCurveData(MorphName);
// RawCurveData.GetCurveData(MorphName.UID)->
// LastObservedName_DEPRECATED = *BlendshapeName;
//
// curve = StaticCast<FFloatCurve*>(
// RawCurveData.GetCurveData(
// MorphName.UID, ERawCurveTrackTypes::RCT_Float));
//
// for (int FrameNr = 0; FrameNr < AnimationData.TimeSteps.Num(); FrameNr++)
// {
// FFrameData& Frame = AnimationData.TimeSteps[FrameNr];
// float Value = Frame.BlendshapeActivations[MorphTargetIndex];
// if (IsCalibrationPresent())
// {
// Value -= CalibrationFrame.BlendshapeActivations[MorphTargetIndex];
// }
// curve->FloatCurve.AddKey(Frame.Timestamp, Value);
// }
// }
//
// FAssetRegistryModule::AssetCreated(AnimationSequence);
// const FString Filename = FString::Printf(
// TEXT("%s%s"), *AnimationAbsolute,
// *FPackageName::GetAssetPackageExtension());
// UPackage::SavePackage(Package, nullptr, RF_Public | RF_Standalone,
// *Filename);
// }
//
// // Add bone curves
// TArray<FQuat4f> HeadRawTrack;
// TArray<FQuat4f> REyeRawTrack;
// TArray<FQuat4f> LEyeRawTrack;
// TArray<FQuat4f> JawRawTrack;
// for (int FrameNr = 0; FrameNr < AnimationData.TimeSteps.Num(); FrameNr++) {
// FFrameData& Timestep = AnimationData.TimeSteps[FrameNr];
// HeadRawTrack.Add(FQuat4f(Timestep.HeadRotation.Quaternion()));
// REyeRawTrack.Add(FQuat4f(Timestep.RightEyeRotation.Quaternion()));
// LEyeRawTrack.Add(FQuat4f(Timestep.LeftEyeRotation.Quaternion()));
// JawRawTrack.Add(FQuat4f(Timestep.JawRotation.Quaternion()));
// }
//
// AVirtualHuman* owner = Cast<AVirtualHuman>(GetOwner());
// const BoneNames& bones = owner->GetBoneNames();
//
// if (bUseHeadRotation) {
// Controller.AddBoneTrack(bones.head);
// Controller.SetBoneTrackKeys(bones.head, {}, HeadRawTrack, {});
// }
// if (bUseEyeRotations) {
// Controller.AddBoneTrack(bones.eye_r);
// Controller.SetBoneTrackKeys(bones.eye_r, {}, REyeRawTrack, {});
// Controller.AddBoneTrack(bones.eye_l);
// Controller.SetBoneTrackKeys(bones.eye_l, {}, LEyeRawTrack, {});
// }
// Controller.AddBoneTrack(bones.jaw);
// Controller.SetBoneTrackKeys(bones.jaw, {}, JawRawTrack, {});
//
// //the metahuman jaw bone only lowers the chin and not the mouth and teeth
// if (owner->GetBodyType() == EBodyType::MetaHuman) {
// Controller.AddBoneTrack(bones.teeth_lower);
// Controller.SetBoneTrackKeys(bones.teeth_lower, {}, JawRawTrack, {});
// Controller.AddBoneTrack(bones.mouth_lower);
// Controller.SetBoneTrackKeys(bones.mouth_lower, {}, JawRawTrack, {});
// }
//
// }
// return AnimationSequence;
//#else
// VH_ERROR("UVHFaceAnimation::SaveAsAnimSequence can only be used in Editor builds!\n");
// return nullptr;
//#endif
}