Files
OpenVulkano/openVulkanoCpp/AR/Provider/Playback/ArSessionPlayback.cpp
2024-08-14 18:13:14 +02:00

135 lines
3.6 KiB
C++

/*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/.
*/
#include "ArSessionPlayback.hpp"
#include "ArFramePlayback.hpp"
#include "Base/Logger.hpp"
#include "Scene/Texture.hpp"
#include <filesystem>
using namespace std::chrono_literals;
namespace OpenVulkano::AR::Playback
{
ArSessionPlayback::ArSessionPlayback(const std::string& recordingPath, bool autoAdvance)
: ArSession(ArSessionMetadata(recordingPath)), recordingPath(recordingPath), autoAdvance(autoAdvance), playbackReader(recordingPath)
{
capabilities = ArSessionCapabilities(metadata.type, ArSessionType::PLAYBACK, false, metadata.depthFormat != ArDepthFormat::UNAVAILABLE, false, false, false);
constants = { Math::Matrix4f(1), metadata.confidenceRange };
m_playbackReaderThread = std::thread([this](){ReadWorker();});
}
ArSessionPlayback::~ArSessionPlayback()
{
Stop();
if (m_playbackReaderThread.joinable()) m_playbackReaderThread.join();
}
void ArSessionPlayback::Start()
{
running = true;
m_frameConsumed = true;
}
void ArSessionPlayback::Stop()
{
running = false;
OnStopped();
}
void ArSessionPlayback::Pause()
{
running = false;
}
std::shared_ptr<ArFrame> ArSessionPlayback::GetFrame()
{
while(IsRunning() && m_frameConsumed) { std::this_thread::yield(); }
auto frame = m_nextFrame;
m_nextFrame = nullptr;
m_frameConsumed = true;
return frame;
}
ArType ArSessionPlayback::GetArType()
{
return capabilities.GetArType();
}
void ArSessionPlayback::ReadWorker()
{
Utils::SetThreadName("AR_Playback");
std::this_thread::sleep_for(128ms); // Delay startup of playback
while (playbackReader.HasNext() && IsRunning())
{
while (!m_frameConsumed) { std::this_thread::yield(); }
try
{
std::shared_ptr<ArFrame> frame = std::make_shared<ArFramePlayback>(shared_from_this(), playbackReader);
lastTimestamp = frame->GetTimestamp();
m_nextFrame = frame;
m_frameConsumed = false;
//TODO try to keep original frame timing
// Trigger events
OnNewFrameAvailable();
OnNewFrame(frame);
if (frame->GetTrackingState() != m_lastTrackingState)
{
m_lastTrackingState = frame->GetTrackingState();
OnTrackingStateChanged(m_lastTrackingState);
}
OnNewCameraTransformation(frame->GetCameraTransformation());
if (OnNewCameraViewMatrix.HasHandlers())
{
auto view = frame->GetCameraViewForCurrentDeviceOrientation();
OnNewCameraViewMatrix(view);
}
OnPlaybackProgress(playbackReader.GetProgress());
}
catch (const std::exception& e)
{
Logger::AR->error("Failed to read AR frame: {}", e.what());
}
}
Stop();
OnSessionInterruptionChange(true);
}
Scene::Texture* ArSessionPlayback::MakeTexture(OpenVulkano::AR::ArFrame* frame)
{
Scene::Texture* texture;
if (!m_textureCache.empty())
{
texture = m_textureCache.back();
m_textureCache.pop_back();
}
else
{
texture = new Scene::Texture();
texture->format = DataFormat::R8G8B8A8_UNORM;
texture->updateFrequency = Scene::UpdateFrequency::Always;
}
auto img = frame->GetCameraImage();
texture->resolution = { img.luminescenceOrColor.resolution , 1 };
texture->textureBuffer = img.luminescenceOrColor.data;
texture->size = img.luminescenceOrColor.resolution.x * img.luminescenceOrColor.resolution.y * img.luminescenceOrColor.numChannels;
texture->updated = true;
return texture;
}
void ArSessionPlayback::ReturnTexture(Scene::Texture* texture)
{
m_textureCache.push_back(texture);
}
void ArSessionPlayback::SetRenderer(IRenderer* renderer)
{
//TODO
}
}