/* * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ #include "ArSessionPlayback.hpp" #include "ArFramePlayback.hpp" #include "Base/Logger.hpp" #include "Scene/Texture.hpp" #include using namespace std::chrono_literals; namespace OpenVulkano::AR::Playback { ArSessionPlayback::ArSessionPlayback(const std::filesystem::path& recordingPath, const bool autoAdvance, const bool loadImages, const bool loadDepth) : ArSession(ArSessionMetadata(recordingPath)), recordingPath(recordingPath), autoAdvance(autoAdvance) , loadImages(loadImages), loadDepth(loadDepth), playbackReader(recordingPath) { capabilities = ArSessionCapabilities(metadata.type, ArSessionType::PLAYBACK, false, metadata.depthFormat != ArDepthFormat::UNAVAILABLE && loadDepth, false, false, false); constants = { Math::Matrix4f(1), metadata.confidenceRange }; m_playbackReaderThread = std::thread([this](){ReadWorker();}); } ArSessionPlayback::~ArSessionPlayback() { Stop(); if (m_playbackReaderThread.joinable()) m_playbackReaderThread.join(); } void ArSessionPlayback::Start() { running = true; m_frameConsumed = true; } void ArSessionPlayback::Stop() { running = false; OnStopped(); } void ArSessionPlayback::Pause() { running = false; } std::shared_ptr ArSessionPlayback::GetFrame() { while(IsRunning() && m_frameConsumed) { std::this_thread::yield(); } auto frame = m_nextFrame; m_nextFrame = nullptr; m_frameConsumed = true; return frame; } ArType ArSessionPlayback::GetArType() { return capabilities.GetArType(); } void ArSessionPlayback::ReadWorker() { Utils::SetThreadName("AR_Playback"); std::string playbackInfo; if (!metadata.device.empty()) { playbackInfo = fmt::format("Device: {}; OS: {}", metadata.device, metadata.os); if (metadata.recDuration > 0) { playbackInfo += fmt::format("\nDuration: {}; Frames: {}; Skipped: {}", metadata.recDuration, metadata.recFrameCount, metadata.recSkippedFrames); } } Logger::AR->info("Starting {} playback '{}'{}", metadata.type.GetHumanReadableName(), recordingPath, playbackInfo); std::this_thread::sleep_for(128ms); // Delay startup of playback while (playbackReader.HasNext() && IsRunning()) { while (!m_frameConsumed) { std::this_thread::yield(); } try { std::shared_ptr frame = std::make_shared(shared_from_this(), playbackReader); lastTimestamp = frame->GetTimestamp(); m_nextFrame = frame; m_frameConsumed = false; //TODO try to keep original frame timing // Trigger events OnNewFrameAvailable(); OnNewFrame(frame); if (frame->GetTrackingState() != m_lastTrackingState) { m_lastTrackingState = frame->GetTrackingState(); OnTrackingStateChanged(m_lastTrackingState); } OnNewCameraTransformation(frame->GetCameraTransformation()); if (OnNewCameraViewMatrix.HasHandlers()) { auto view = frame->GetCameraViewForCurrentDeviceOrientation(); OnNewCameraViewMatrix(view); } OnPlaybackProgress(playbackReader.GetProgress()); } catch (const std::exception& e) { Logger::AR->error("Failed to read AR frame: {}", e.what()); break; } } Stop(); OnSessionInterruptionChange(true); } Scene::Texture* ArSessionPlayback::MakeTexture(ArFrame* frame) { Scene::Texture* texture; if (!m_textureCache.empty()) { texture = m_textureCache.back(); m_textureCache.pop_back(); } else { texture = new Scene::Texture(); texture->format = DataFormat::R8G8B8A8_UNORM; texture->updateFrequency = Scene::UpdateFrequency::Always; } auto img = frame->GetCameraImage(); texture->resolution = { img.luminescenceOrColor.resolution , 1 }; texture->textureBuffer = img.luminescenceOrColor.data; texture->size = img.luminescenceOrColor.resolution.x * img.luminescenceOrColor.resolution.y * img.luminescenceOrColor.numChannels; texture->updated = true; return texture; } void ArSessionPlayback::ReturnTexture(Scene::Texture* texture) { m_textureCache.push_back(texture); } void ArSessionPlayback::SetRenderer(IRenderer* renderer) { //TODO } }