176 lines
5.3 KiB
C++
176 lines
5.3 KiB
C++
/*
|
|
* This Source Code Form is subject to the terms of the Mozilla Public
|
|
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
|
* file, You can obtain one at https://mozilla.org/MPL/2.0/.
|
|
*/
|
|
|
|
#pragma once
|
|
|
|
#include "Math/Math.hpp"
|
|
#include "Math/Timestamp.hpp"
|
|
#include "Math/Pose.hpp"
|
|
#include "Math/CameraIntrinsic.hpp"
|
|
#include "ArDepthFormat.hpp"
|
|
#include "ArTrackingState.hpp"
|
|
#include "ArFrameMetadata.hpp"
|
|
#include <functional>
|
|
#include <filesystem>
|
|
#include <memory>
|
|
|
|
namespace OpenVulkano::Scene
|
|
{
|
|
class Texture;
|
|
}
|
|
|
|
namespace OpenVulkano::AR
|
|
{
|
|
class ArSession;
|
|
|
|
class ArImage
|
|
{
|
|
public:
|
|
void* data;
|
|
Math::Vector2ui resolution;
|
|
uint32_t numChannels = 1;
|
|
};
|
|
|
|
class ArImagePlanar
|
|
{
|
|
public:
|
|
enum class Format { NV12, NV21, RGB, BGR };
|
|
|
|
ArImage luminescenceOrColor;
|
|
ArImage uv;
|
|
|
|
Math::CameraIntrinsic intrinsic;
|
|
Format format = Format::NV12;
|
|
|
|
[[nodiscard]] Math::Vector4uc Sample(float nx, float ny, bool asRgb = true) const
|
|
{
|
|
return Sample(static_cast<uint32_t>(nx * luminescenceOrColor.resolution.x), static_cast<uint32_t>(ny * luminescenceOrColor.resolution.y), asRgb);
|
|
}
|
|
|
|
[[nodiscard]] Math::Vector4uc Sample(uint32_t x, uint32_t y, bool asRgb = true) const
|
|
{
|
|
const uint8_t* lumColBuffer = static_cast<const uint8_t*>(luminescenceOrColor.data);
|
|
if (format == Format::BGR)
|
|
{
|
|
size_t idx = (y * luminescenceOrColor.resolution.x + x) * luminescenceOrColor.numChannels;
|
|
return { lumColBuffer[idx + 2], lumColBuffer[idx + 1], lumColBuffer[idx], 255 };
|
|
}
|
|
else if (format == Format::RGB)
|
|
{
|
|
size_t idx = (y * luminescenceOrColor.resolution.x + x) * luminescenceOrColor.numChannels;
|
|
return { lumColBuffer[idx], lumColBuffer[idx + 1], lumColBuffer[idx + 2], 255 };
|
|
}
|
|
|
|
int iUV = ((y / 2) * uv.resolution.x + x / 2) * 2;
|
|
|
|
Math::Vector4uc sample(
|
|
lumColBuffer[y * luminescenceOrColor.resolution.x + x],
|
|
static_cast<const uint8_t*>(uv.data)[iUV],
|
|
static_cast<const uint8_t*>(uv.data)[iUV+1],
|
|
255);
|
|
|
|
if (asRgb)
|
|
{
|
|
Math::Vector4f c(sample);
|
|
Math::Matrix4f conversionMatrix(
|
|
Math::Vector4f(+1.0000f, +1.0000f, +1.0000f, +0.0000f),
|
|
Math::Vector4f(+0.0000f, -0.3441f, +1.7720f, +0.0000f),
|
|
Math::Vector4f(+1.4020f, -0.7141f, +0.0000f, +0.0000f),
|
|
Math::Vector4f(-0.7010f, +0.5291f, -0.8860f, +1.0000f)
|
|
);
|
|
sample = Math::Utils::clamp(conversionMatrix * c, 0.0f, 255.0f);
|
|
}
|
|
|
|
return sample;
|
|
}
|
|
};
|
|
|
|
class ArDepthImage
|
|
{
|
|
public:
|
|
ArImage depth;
|
|
ArImage confidence;
|
|
ArDepthFormat format;
|
|
Math::CameraIntrinsic intrinsic;
|
|
};
|
|
|
|
class ArFrame : public std::enable_shared_from_this<ArFrame>
|
|
{
|
|
std::shared_ptr<ArSession> m_session;
|
|
size_t m_frameId;
|
|
Scene::Texture* m_texture = nullptr;
|
|
bool m_saved = false;
|
|
bool m_highRes = false;
|
|
|
|
protected:
|
|
ArFrameMetadata frameMetadata;
|
|
|
|
ArFrame(const std::shared_ptr<ArSession>& session, size_t frameId) : m_session(session), m_frameId(frameId)
|
|
{}
|
|
|
|
public:
|
|
virtual ~ArFrame();
|
|
|
|
[[nodiscard]] size_t GetFrameId() const { return m_frameId; }
|
|
|
|
[[nodiscard]] const ArFrameMetadata& GetFrameMetadata() const { return frameMetadata; }
|
|
|
|
[[nodiscard]] const std::shared_ptr<ArSession>& GetArSession() const { return m_session; }
|
|
|
|
[[nodiscard]] ArTrackingState GetTrackingState() const { return frameMetadata.trackingState; };
|
|
|
|
[[nodiscard]] virtual Math::PoseF GetPose() const { return Math::PoseF(); }; //TODO
|
|
|
|
[[nodiscard]] Math::Timestamp GetTimestamp() const { return frameMetadata.timestamp; };
|
|
|
|
[[nodiscard]] Math::Timestamp GetTimestampDepth() const { return frameMetadata.timestampDepth; };
|
|
|
|
[[nodiscard]] virtual ArImagePlanar GetCameraImage() = 0;
|
|
|
|
[[nodiscard]] virtual ArDepthImage GetDepthImage() = 0;
|
|
|
|
[[nodiscard]] const Math::Matrix4f& GetCameraTransformation() const { return frameMetadata.transformation; };
|
|
|
|
[[nodiscard]] virtual Math::Matrix4f GetCameraViewForCurrentDeviceOrientation() = 0;
|
|
|
|
[[nodiscard]] const Math::Matrix4f& GetCameraProjection() const { return frameMetadata.projection; }
|
|
|
|
[[nodiscard]] virtual Math::Matrix4f GetCameraProjection(Math::Vector2f viewportSize, float near = 0.25f, float far = 250.0f) = 0;
|
|
|
|
[[nodiscard]] float GetConfidenceNormalisationFactor() const;
|
|
|
|
[[nodiscard]] float GetColorTemperature() const { return frameMetadata.lightColorTemp; };
|
|
|
|
[[nodiscard]] float GetLightIntensity() const { return frameMetadata.lightIntensity; };
|
|
|
|
[[nodiscard]] float GetExposureTime() const { return frameMetadata.exposureTime; };
|
|
|
|
[[nodiscard]] float GetExposureOffset() const { return frameMetadata.exposureOffset; };
|
|
|
|
[[nodiscard]] bool IsSaved() const { return m_saved; };
|
|
|
|
[[nodiscard]] const Scene::Texture* GetImageTexture();
|
|
|
|
void Save();
|
|
|
|
void SaveToFile(const std::filesystem::path& path, bool downsample = false);
|
|
|
|
void SetSaved() { m_saved = true; }
|
|
|
|
void MarkHighRes() { m_highRes = true; }
|
|
|
|
[[nodiscard]] bool IsHighResolution() const { return m_highRes; }
|
|
|
|
/**
|
|
* Uses the platforms native jpeg writer to produce a jpeg representation of the camera image.
|
|
* This might not be implemented
|
|
* @param handler Function to be called once the image has been encoded
|
|
* @return true if image was encoded, false if not or unavailable
|
|
*/
|
|
virtual bool GetCameraImageAsJpeg(const std::function<void(const char*, size_t)>& handler) { return false; };
|
|
};
|
|
}
|