/* * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ #include "ArFrameArKit.h" #include "ArSessionArKitInternal.h" #include "ArTrackingStateConverter.h" #include "Math/Math.hpp" #import #import #import #import namespace OpenVulkano::AR::ArKit { namespace { CIContext* CICONTEXT = [CIContext new]; inline Math::Vector2i GetSize(CVPixelBufferRef pixelBuffer) { return Math::Vector2i(CVPixelBufferGetWidth(pixelBuffer), CVPixelBufferGetHeight(pixelBuffer)); } inline Math::Vector2i GetSize(CVPixelBufferRef pixelBuffer, size_t plane) { return Math::Vector2i(CVPixelBufferGetWidthOfPlane(pixelBuffer, plane), CVPixelBufferGetHeightOfPlane(pixelBuffer, plane)); } inline Math::Vector2i GetDepthSize(ARFrame* arKitFrame) { if(@available(iOS 14.0, *)) { if(arKitFrame.sceneDepth) { return GetSize(arKitFrame.sceneDepth.depthMap); } if(arKitFrame.smoothedSceneDepth) { return GetSize(arKitFrame.smoothedSceneDepth.depthMap); } } return { -1, -1 }; } UIInterfaceOrientation GetInterfaceOrientation() { UIInterfaceOrientation orientation = UIInterfaceOrientationLandscapeRight; //TODO /*auto window = [[UIApplication sharedApplication] windows].firstObject; if (window && window.windowScene) { orientation = window.windowScene.interfaceOrientation; }*/ return orientation; } ArFrameMetadata ReadFrameMetadata(ARFrame* arFrame) { ArFrameMetadata frameData; auto intrinsics = arFrame.camera.intrinsics; Math::Matrix3f intrinsicsMat = Math::Matrix3f( reinterpret_cast(intrinsics.columns[0]), reinterpret_cast(intrinsics.columns[1]), reinterpret_cast(intrinsics.columns[2])); auto transform = arFrame.camera.transform; auto projection = arFrame.camera.projectionMatrix; frameData.intrinsic = { intrinsicsMat, { arFrame.camera.imageResolution.width, arFrame.camera.imageResolution.height } }; frameData.transformation = reinterpret_cast(transform); frameData.projection = reinterpret_cast(projection); frameData.exposureTime = arFrame.camera.exposureDuration; frameData.exposureOffset = arFrame.camera.exposureOffset; frameData.lightIntensity = arFrame.lightEstimate.ambientIntensity; frameData.lightColorTemp = arFrame.lightEstimate.ambientColorTemperature; frameData.timestamp = arFrame.timestamp; frameData.timestampDepth = arFrame.capturedDepthDataTimestamp; frameData.trackingState = GetArTrackingState(arFrame.camera); return frameData; } } ArFrameArKit::ArFrameArKit(ARFrame* arKitFrame, std::shared_ptr arSession) : ArFrame(std::static_pointer_cast(arSession), arSession->GetFrameId()), m_arKitFrame(arKitFrame) , m_lockedColor(false), m_lockedDepth(false) { [m_arKitFrame retain]; frameMetadata = ReadFrameMetadata(arKitFrame); m_colorImage.intrinsic = Math::CameraIntrinsic(frameMetadata.intrinsic); m_colorImage.format = ArImagePlanar::Format::NV12; m_colorImage.luminescenceOrColor.resolution = GetSize(arKitFrame.capturedImage); m_colorImage.luminescenceOrColor.rowPadding = CVPixelBufferGetBytesPerRowOfPlane(arKitFrame.capturedImage, 0) - m_colorImage.luminescenceOrColor.resolution.x; m_colorImage.uv.resolution = m_colorImage.luminescenceOrColor.resolution / 2u; m_colorImage.uv.rowPadding = CVPixelBufferGetBytesPerRowOfPlane(arKitFrame.capturedImage, 1) - m_colorImage.uv.resolution.x * 2; #ifdef DEBUG assert(m_colorImage.uv.resolution == Math::Vector2ui(GetSize(arKitFrame.capturedImage, 1))); auto format = CVPixelBufferGetPixelFormatType(arKitFrame.capturedImage); assert(format == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange); #endif m_depthImage.format = ArDepthFormat::UNAVAILABLE; m_depthImage.depth.resolution = GetDepthSize(arKitFrame); if (m_arKitFrame.sceneDepth || m_arKitFrame.smoothedSceneDepth) { m_depthImage.format = ArDepthFormat::METER_FP32; m_depthImage.confidence.resolution = m_depthImage.depth.resolution; } m_depthImage.intrinsic = frameMetadata.intrinsic.GetForResolution(m_depthImage.depth.resolution); } ArFrameArKit::~ArFrameArKit() { if(m_lockedColor) { CVPixelBufferUnlockBaseAddress(m_arKitFrame.capturedImage, kCVPixelBufferLock_ReadOnly); } if(m_lockedDepth) { if(m_arKitFrame.sceneDepth) { CVPixelBufferUnlockBaseAddress(m_arKitFrame.sceneDepth.depthMap, kCVPixelBufferLock_ReadOnly); CVPixelBufferUnlockBaseAddress(m_arKitFrame.sceneDepth.confidenceMap, kCVPixelBufferLock_ReadOnly); } else if(m_arKitFrame.smoothedSceneDepth) { CVPixelBufferUnlockBaseAddress(m_arKitFrame.smoothedSceneDepth.depthMap, kCVPixelBufferLock_ReadOnly); CVPixelBufferUnlockBaseAddress(m_arKitFrame.smoothedSceneDepth.confidenceMap, kCVPixelBufferLock_ReadOnly); } } [m_arKitFrame release]; } Math::Matrix4f ArFrameArKit::GetCameraViewForCurrentDeviceOrientation() { auto arKitViewMat = [m_arKitFrame.camera viewMatrixForOrientation:GetInterfaceOrientation()]; return reinterpret_cast(arKitViewMat); } Math::Matrix4f ArFrameArKit::GetCameraProjection(Math::Vector2f viewportSize, float near, float far) { auto projection = [m_arKitFrame.camera projectionMatrixForOrientation:GetInterfaceOrientation() viewportSize:reinterpret_cast(viewportSize) zNear:near zFar:far]; return reinterpret_cast(projection); } ArImagePlanar ArFrameArKit::GetCameraImage() { if(!m_lockedColor) { CVPixelBufferLockBaseAddress(m_arKitFrame.capturedImage, kCVPixelBufferLock_ReadOnly); m_colorImage.luminescenceOrColor.data = CVPixelBufferGetBaseAddressOfPlane(m_arKitFrame.capturedImage, 0); m_colorImage.uv.data = CVPixelBufferGetBaseAddressOfPlane(m_arKitFrame.capturedImage, 1); m_lockedColor = true; } return m_colorImage; } ArDepthImage ArFrameArKit::GetDepthImage() { if (@available(iOS 14.0, *)) { if (!m_lockedDepth) { if(m_arKitFrame.sceneDepth) { CVPixelBufferLockBaseAddress(m_arKitFrame.sceneDepth.depthMap, kCVPixelBufferLock_ReadOnly); CVPixelBufferLockBaseAddress(m_arKitFrame.sceneDepth.confidenceMap, kCVPixelBufferLock_ReadOnly); m_depthImage.depth.data = CVPixelBufferGetBaseAddress(m_arKitFrame.sceneDepth.depthMap); m_depthImage.confidence.data = CVPixelBufferGetBaseAddress(m_arKitFrame.sceneDepth.confidenceMap); m_lockedDepth = true; } else if (m_arKitFrame.smoothedSceneDepth) { CVPixelBufferLockBaseAddress(m_arKitFrame.smoothedSceneDepth.depthMap, kCVPixelBufferLock_ReadOnly); CVPixelBufferLockBaseAddress(m_arKitFrame.smoothedSceneDepth.confidenceMap, kCVPixelBufferLock_ReadOnly); m_depthImage.depth.data = CVPixelBufferGetBaseAddress(m_arKitFrame.smoothedSceneDepth.depthMap); m_depthImage.confidence.data = CVPixelBufferGetBaseAddress(m_arKitFrame.smoothedSceneDepth.confidenceMap); m_lockedDepth = true; } } } return m_depthImage; } bool ArFrameArKit::GetCameraImageAsJpeg(const std::function& handler) { bool success = false; CIImage* image = [[CIImage alloc] initWithCVPixelBuffer: GetArKitFrame().capturedImage]; auto data = [CICONTEXT JPEGRepresentationOfImage: image colorSpace: image.colorSpace options: nil]; if (data) { handler(static_cast([data bytes]), [data length]); success = true; } [image release]; return success; } }