195 lines
6.9 KiB
Plaintext
195 lines
6.9 KiB
Plaintext
/*
|
|
* This Source Code Form is subject to the terms of the Mozilla Public
|
|
* License, v. 2.0. If a copy of the MPL was not distributed with this
|
|
* file, You can obtain one at https://mozilla.org/MPL/2.0/.
|
|
*/
|
|
|
|
#include "ArFrameArKit.h"
|
|
#include "ArSessionArKitInternal.h"
|
|
#include "ArTrackingStateConverter.h"
|
|
#include "Math/Math.hpp"
|
|
|
|
#import <ARKit/ARDepthData.h>
|
|
#import <ARKit/ARLightEstimate.h>
|
|
#import <ARKit/ARPointCloud.h>
|
|
#import <CoreVideo/CoreVideo.h>
|
|
|
|
namespace openVulkanoCpp::AR::ArKit
|
|
{
|
|
namespace
|
|
{
|
|
CIContext* CICONTEXT = [CIContext new];
|
|
|
|
inline Math::Vector2i GetSize(CVPixelBufferRef pixelBuffer)
|
|
{
|
|
return Math::Vector2i(CVPixelBufferGetWidth(pixelBuffer), CVPixelBufferGetHeight(pixelBuffer));
|
|
}
|
|
|
|
inline Math::Vector2i GetSize(CVPixelBufferRef pixelBuffer, size_t plane)
|
|
{
|
|
return Math::Vector2i(CVPixelBufferGetWidthOfPlane(pixelBuffer, plane), CVPixelBufferGetHeightOfPlane(pixelBuffer, plane));
|
|
}
|
|
|
|
inline Math::Vector2i GetDepthSize(ARFrame* arKitFrame)
|
|
{
|
|
if(@available(iOS 14.0, *))
|
|
{
|
|
if(arKitFrame.sceneDepth)
|
|
{
|
|
return GetSize(arKitFrame.sceneDepth.depthMap);
|
|
}
|
|
if(arKitFrame.smoothedSceneDepth)
|
|
{
|
|
return GetSize(arKitFrame.smoothedSceneDepth.depthMap);
|
|
}
|
|
}
|
|
return { -1, -1 };
|
|
}
|
|
|
|
UIInterfaceOrientation GetInterfaceOrientation()
|
|
{
|
|
UIInterfaceOrientation orientation = UIInterfaceOrientationLandscapeRight;
|
|
//TODO
|
|
/*auto window = [[UIApplication sharedApplication] windows].firstObject;
|
|
if (window && window.windowScene)
|
|
{
|
|
orientation = window.windowScene.interfaceOrientation;
|
|
}*/
|
|
return orientation;
|
|
}
|
|
|
|
ArFrameMetadata ReadFrameMetadata(ARFrame* arFrame)
|
|
{
|
|
ArFrameMetadata frameData;
|
|
|
|
auto intrinsics = arFrame.camera.intrinsics;
|
|
Math::Matrix3f intrinsicsMat = Math::Matrix3f(
|
|
reinterpret_cast<Math::Vector3f&>(intrinsics.columns[0]),
|
|
reinterpret_cast<Math::Vector3f&>(intrinsics.columns[1]),
|
|
reinterpret_cast<Math::Vector3f&>(intrinsics.columns[2]));
|
|
|
|
auto transform = arFrame.camera.transform;
|
|
auto projection = arFrame.camera.projectionMatrix;
|
|
|
|
frameData.intrinsic = { intrinsicsMat, { arFrame.camera.imageResolution.width, arFrame.camera.imageResolution.height } };
|
|
frameData.transformation = reinterpret_cast<Math::Matrix4f&>(transform);
|
|
frameData.projection = reinterpret_cast<Math::Matrix4f&>(projection);
|
|
frameData.exposureTime = arFrame.camera.exposureDuration;
|
|
frameData.exposureOffset = arFrame.camera.exposureOffset;
|
|
frameData.lightIntensity = arFrame.lightEstimate.ambientIntensity;
|
|
frameData.lightColorTemp = arFrame.lightEstimate.ambientColorTemperature;
|
|
frameData.timestamp = arFrame.timestamp;
|
|
frameData.timestampDepth = arFrame.capturedDepthDataTimestamp;
|
|
frameData.trackingState = GetArTrackingState(arFrame.camera);
|
|
|
|
return frameData;
|
|
}
|
|
}
|
|
|
|
ArFrameArKit::ArFrameArKit(ARFrame* arKitFrame, std::shared_ptr<ArSessionArKitInternal> arSession)
|
|
: ArFrame(std::static_pointer_cast<ArSession>(arSession), arSession->GetFrameId()), m_arKitFrame(arKitFrame)
|
|
, m_lockedColor(false), m_lockedDepth(false)
|
|
{
|
|
[m_arKitFrame retain];
|
|
|
|
frameMetadata = ReadFrameMetadata(arKitFrame);
|
|
|
|
m_colorImage.intrinsic = Math::CameraIntrinsic(frameMetadata.intrinsic);
|
|
m_colorImage.format = ArImagePlanar::Format::NV12;
|
|
m_colorImage.luminescenceOrColor.resolution = GetSize(arKitFrame.capturedImage);
|
|
m_colorImage.uv.resolution = m_colorImage.luminescenceOrColor.resolution / 2u;
|
|
|
|
m_depthImage.format = ArDepthFormat::METER_FP32;
|
|
m_depthImage.depth.resolution = GetDepthSize(arKitFrame);
|
|
m_depthImage.confidence.resolution = m_depthImage.depth.resolution;
|
|
m_depthImage.intrinsic = frameMetadata.intrinsic.GetForResolution(m_depthImage.depth.resolution);
|
|
}
|
|
|
|
ArFrameArKit::~ArFrameArKit()
|
|
{
|
|
if(m_lockedColor)
|
|
{
|
|
CVPixelBufferUnlockBaseAddress(m_arKitFrame.capturedImage, kCVPixelBufferLock_ReadOnly);
|
|
}
|
|
if(m_lockedDepth)
|
|
{
|
|
if(m_arKitFrame.sceneDepth)
|
|
{
|
|
CVPixelBufferUnlockBaseAddress(m_arKitFrame.sceneDepth.depthMap, kCVPixelBufferLock_ReadOnly);
|
|
CVPixelBufferUnlockBaseAddress(m_arKitFrame.sceneDepth.confidenceMap, kCVPixelBufferLock_ReadOnly);
|
|
}
|
|
else
|
|
{
|
|
CVPixelBufferUnlockBaseAddress(m_arKitFrame.smoothedSceneDepth.depthMap, kCVPixelBufferLock_ReadOnly);
|
|
CVPixelBufferUnlockBaseAddress(m_arKitFrame.smoothedSceneDepth.confidenceMap, kCVPixelBufferLock_ReadOnly);
|
|
}
|
|
}
|
|
[m_arKitFrame release];
|
|
}
|
|
|
|
Math::Matrix4f ArFrameArKit::GetCameraViewForCurrentDeviceOrientation()
|
|
{
|
|
auto arKitViewMat = [m_arKitFrame.camera viewMatrixForOrientation:GetInterfaceOrientation()];
|
|
return reinterpret_cast<Math::Matrix4f&>(arKitViewMat);
|
|
}
|
|
|
|
Math::Matrix4f ArFrameArKit::GetCameraProjection(Math::Vector2f viewportSize, float near, float far)
|
|
{
|
|
auto projection = [m_arKitFrame.camera projectionMatrixForOrientation:GetInterfaceOrientation() viewportSize:reinterpret_cast<CGSize&>(viewportSize) zNear:near zFar:far];
|
|
return reinterpret_cast<Math::Matrix4f&>(projection);
|
|
}
|
|
|
|
ArImagePlanar ArFrameArKit::GetCameraImage()
|
|
{
|
|
if(!m_lockedColor)
|
|
{
|
|
CVPixelBufferLockBaseAddress(m_arKitFrame.capturedImage, kCVPixelBufferLock_ReadOnly);
|
|
m_colorImage.luminescenceOrColor.data = CVPixelBufferGetBaseAddressOfPlane(m_arKitFrame.capturedImage, 0);
|
|
m_colorImage.uv.data = CVPixelBufferGetBaseAddressOfPlane(m_arKitFrame.capturedImage, 1);
|
|
m_lockedColor = true;
|
|
}
|
|
return m_colorImage;
|
|
}
|
|
|
|
ArDepthImage ArFrameArKit::GetDepthImage()
|
|
{
|
|
if (@available(iOS 14.0, *))
|
|
{
|
|
if (!m_lockedDepth)
|
|
{
|
|
if(m_arKitFrame.sceneDepth)
|
|
{
|
|
CVPixelBufferLockBaseAddress(m_arKitFrame.sceneDepth.depthMap, kCVPixelBufferLock_ReadOnly);
|
|
CVPixelBufferLockBaseAddress(m_arKitFrame.sceneDepth.confidenceMap, kCVPixelBufferLock_ReadOnly);
|
|
m_depthImage.depth.data = CVPixelBufferGetBaseAddress(m_arKitFrame.sceneDepth.depthMap);
|
|
m_depthImage.confidence.data = CVPixelBufferGetBaseAddress(m_arKitFrame.sceneDepth.confidenceMap);
|
|
m_lockedDepth = true;
|
|
}
|
|
else if (m_arKitFrame.smoothedSceneDepth)
|
|
{
|
|
CVPixelBufferLockBaseAddress(m_arKitFrame.smoothedSceneDepth.depthMap, kCVPixelBufferLock_ReadOnly);
|
|
CVPixelBufferLockBaseAddress(m_arKitFrame.smoothedSceneDepth.confidenceMap, kCVPixelBufferLock_ReadOnly);
|
|
m_depthImage.depth.data = CVPixelBufferGetBaseAddress(m_arKitFrame.smoothedSceneDepth.depthMap);
|
|
m_depthImage.confidence.data = CVPixelBufferGetBaseAddress(m_arKitFrame.smoothedSceneDepth.confidenceMap);
|
|
m_lockedDepth = true;
|
|
}
|
|
}
|
|
}
|
|
return m_depthImage;
|
|
}
|
|
|
|
bool ArFrameArKit::GetCameraImageAsJpeg(const std::function<void(const char*, size_t)>& handler)
|
|
{
|
|
bool success = false;
|
|
CIImage* image = [[CIImage alloc] initWithCVPixelBuffer: GetArKitFrame().capturedImage];
|
|
auto data = [CICONTEXT JPEGRepresentationOfImage: image colorSpace: image.colorSpace options: nil];
|
|
if (data)
|
|
{
|
|
handler(static_cast<const char*>([data bytes]), [data length]);
|
|
success = true;
|
|
}
|
|
[image release];
|
|
return success;
|
|
}
|
|
}
|