Add initial AR playback support

This commit is contained in:
2021-06-18 19:48:11 +02:00
parent a28650a1ef
commit d98aa4fa57
5 changed files with 306 additions and 0 deletions

View File

@@ -0,0 +1,54 @@
/*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/.
*/
#include "ArFramePlayback.hpp"
#include "ArSessionPlayback.h"
#include "Base/BlockProfiler.hpp"
namespace openVulkanoCpp::AR::Playback
{
ArFramePlayback::ArFramePlayback(const std::shared_ptr<ArSessionPlayback>& session, ArPlaybackReader& frameReader)
: ArFrame(session)
{
BlockProfiler profile("Read_AR_Frame");
const auto data = frameReader.ReadMetadata();
frameMetadata = ArFrameMetadata::FromXML(data.Data(), data.Size());
colorImgData = frameReader.ReadColorImage();
auto depth = frameReader.ReadDepthImage();
confImgData = std::move(depth.confidence.image);
depthImgData = std::move(depth.depth.image);
depthImage.format = session->GetSessionMetadata().depthFormat;
depthImage.intrinsic = frameMetadata.intrinsic.GetForResolution({depth.depth.header.width, depth.depth.header.height});
depthImage.depth.data = depthImgData.get();
depthImage.depth.resolution = { depth.depth.header.width, depth.depth.header.height };
depthImage.confidence.data = confImgData.get();
depthImage.depth.resolution = { depth.confidence.header.width, depth.confidence.header.height };
colorImage.intrinsic = frameMetadata.intrinsic.GetForResolution({ colorImgData.cols, colorImgData.rows });
colorImage.format = ArImagePlanar::Format::RGB;
colorImage.luminescenceOrColor = { colorImgData.data, { colorImgData.cols, colorImgData.rows }};
}
ArImagePlanar ArFramePlayback::GetCameraImage()
{
return colorImage;
}
ArDepthImage ArFramePlayback::GetDepthImage()
{
return depthImage;
}
Math::Matrix4f ArFramePlayback::GetCameraViewForCurrentDeviceOrientation()
{
return Math::Utils::inverse(GetCameraTransformation());
}
Math::Matrix4f ArFramePlayback::GetCameraProjection(Math::Vector2f viewportSize, float near, float far)
{
return GetFrameMetadata().projection; //TODO
}
}

View File

@@ -0,0 +1,60 @@
/*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/.
*/
#include "ArPlaybackReader.hpp"
#if __has_include("turbojpeg.h")
#include <turbojpeg.h>
namespace openVulkanoCpp::AR::Playback
{
ColorImg ArPlaybackReader::ReadColorImage()
{
ColorImg img;
auto file = m_archiveColor.GetNextFile();
long unsigned int jpegSize = file->second.Size();
unsigned char* compressedImage = reinterpret_cast<uint8_t*>(file->second.Data());
int jpegSubsamp;
tjhandle jpegDecompressor = tjInitDecompress();
tjDecompressHeader2(jpegDecompressor, compressedImage, jpegSize, &img.cols, &img.rows, &jpegSubsamp);
img.channels = 3;
img.dataPtr = std::shared_ptr<uint8_t>(new uint8_t[img.cols * img.rows * 3]);
img.data = img.dataPtr.get();
//TODO is it better to not map to rgb? to keep the same pipeline as on device
tjDecompress2(jpegDecompressor, compressedImage, jpegSize, img.data, img.cols, 0/*pitch*/, img.rows, TJPF_RGB, TJFLAG_FASTDCT);
//tjDecompressToYUV2(jpegDecompressor, compressedImage, jpegSize, img.data, img.cols, img.rows, 1, TJFLAG_FASTDCT);
tjDestroy(jpegDecompressor);
//auto buff = new uint8_t[img.cols * img.rows * 3];
//YuvUtils::NV12FromChromaPlanes(buff, img.data + (img.cols * img.rows), img.cols * img.rows / 4);
return img;
}
}
#else
#define STB_IMAGE_IMPLEMENTATION
#include <stb_image.h>
namespace openVulkanoCpp::AR::Playback
{
ColorImg ArPlaybackReader::ReadColorImage()
{
ColorImg img;
auto file = m_archiveColor.GetNextFile();
img.dataPtr = std::shared_ptr<uint8_t>(
stbi_load_from_memory(reinterpret_cast<stbi_uc*>(file->second.Data()), file->second.Size(), &img.cols, &img.rows, &img.channels, 3),
&stbi_image_free);
img.data = img.dataPtr.get();
return img;
}
}
#endif

View File

@@ -0,0 +1,65 @@
/*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/.
*/
#pragma once
#include "IO/Archive/ArchiveReader.hpp"
#include "IO/Files/Pfm.hpp"
#include "IO/Files/Pnm.hpp"
#include <string>
#include <fstream>
namespace openVulkanoCpp::AR::Playback
{
struct DepthImage
{
PfmImage depth;
PnmImage confidence;
};
struct ColorImg
{
std::shared_ptr<uint8_t> dataPtr;
uint8_t* data;
int cols, rows, channels;
};
class ArPlaybackReader final
{
static constexpr std::string_view TAR_EXTENSIONS_REGEX = R"(\.(tar(\.gz|\.bz2)?|tgz|tbz|tb2|tbz2))";
ArchiveReader m_archiveMetadata, m_archiveColor, m_archiveDepth, m_archiveConfidence;
public:
ArPlaybackReader(const std::string& recDir)
{
std::string extensions = R"((_\d+|\.part\d+)?)" + std::string(TAR_EXTENSIONS_REGEX);
m_archiveMetadata.Open(recDir, ".*meta(data)?" + extensions);
m_archiveColor.Open(recDir, ".*(color|image)" + extensions);
m_archiveDepth.Open(recDir, ".*depth" + extensions);
m_archiveConfidence.Open(recDir, ".*conf(idence)?" + extensions);
}
Array<char> ReadMetadata()
{
return std::move(m_archiveMetadata.GetNextFile()->second);
}
ColorImg ReadColorImage();
DepthImage ReadDepthImage()
{
DepthImage img;
m_archiveDepth.GetNextFileAsStream([&img](const FileDescription&, std::istream& stream) { img.depth.Read(stream); });
m_archiveConfidence.GetNextFileAsStream([&img](const FileDescription&, std::istream& stream) { img.confidence.Read(stream); });
return img;
}
[[nodiscard]] bool HasNext() const
{
return m_archiveMetadata.HasNext() && m_archiveDepth.HasNext() && m_archiveConfidence.HasNext() && m_archiveColor.HasNext();
}
};
}

View File

@@ -0,0 +1,85 @@
/*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/.
*/
#include "ArSessionPlayback.h"
#include "ArFramePlayback.hpp"
#include "Base/Logger.hpp"
#include <filesystem>
namespace openVulkanoCpp::AR::Playback
{
ArSessionPlayback::ArSessionPlayback(const std::string& recordingPath, bool autoAdvance)
: ArSession(ArSessionMetadata(recordingPath)), recordingPath(recordingPath), autoAdvance(autoAdvance), playbackReader(recordingPath)
{
capabilities = ArSessionCapabilities(metadata.type, ArSessionType::PLAYBACK, false, metadata.depthFormat != ArDepthFormat::UNAVAILABLE, false);
constants = { Math::Matrix4f(1), metadata.confidenceRange };
}
ArSessionPlayback::~ArSessionPlayback() = default;
void ArSessionPlayback::Start()
{
running = true;
}
void ArSessionPlayback::Stop()
{
running = false;
}
void ArSessionPlayback::Pause()
{
running = false;
}
std::shared_ptr<ArFrame> ArSessionPlayback::GetFrame()
{
try
{
if (playbackReader.HasNext())
{
std::shared_ptr<ArFrame> frame = std::make_shared<ArFramePlayback>(shared_from_this(), playbackReader);
//if (lastTimestamp == frame->GetTimestamp()) return nullptr;
lastTimestamp = frame->GetTimestamp();
// Trigger events
OnNewFrame(frame);
OnNewCameraTransformation(frame->GetCameraTransformation());
if (OnNewCameraViewMatrix.HasHandlers())
{
auto view = frame->GetCameraViewForCurrentDeviceOrientation();
OnNewCameraViewMatrix(view);
}
if (playbackReader.HasNext())
{
OnNewFrameAvailable();
}
else
{
OnSessionInterruptionChange(true);
}
return frame;
}
}
catch (const std::exception& e)
{
Logger::AR->error("Failed to read AR frame: {}", e.what());
}
Stop();
return nullptr;
}
ArRecorder* ArSessionPlayback::GetRecorder()
{
return nullptr;
}
ArType ArSessionPlayback::GetArType()
{
return capabilities.GetArType();
}
}

View File

@@ -0,0 +1,42 @@
/*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/.
*/
#pragma once
#include "AR/ArSession.h"
#include "ArPlaybackReader.hpp"
#include "Math/Timestamp.hpp"
namespace openVulkanoCpp::AR::Playback
{
class ArSessionPlayback final : public ArSession, public std::enable_shared_from_this<ArSessionPlayback>
{
public:
ArSessionPlayback(const std::string& recordingPath, bool autoAdvance);
~ArSessionPlayback() override;
void Start() override;
void Stop() override;
void Pause() override;
[[nodiscard]] std::shared_ptr<ArFrame> GetFrame() override;
[[nodiscard]] ArRecorder* GetRecorder() override;
[[nodiscard]] ArSessionType GetSessionType() override { return ArSessionType::PLAYBACK; }
[[nodiscard]] ArType GetArType() override;
private:
Math::Timestamp lastTimestamp;
const std::string recordingPath;
const bool autoAdvance;
ArPlaybackReader playbackReader;
};
}