Add initial ArKit support

This commit is contained in:
2021-07-07 20:21:22 +02:00
parent 2e7b8d03c9
commit cb3c93c8c8
10 changed files with 624 additions and 1 deletions

View File

@@ -0,0 +1,42 @@
/*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/.
*/
#pragma once
#include "AR/ArFrame.hpp"
#import <ARKit/ARFrame.h>
namespace openVulkanoCpp::AR::ArKit
{
class ArSessionArKitInternal;
class ArFrameArKit final : public ArFrame
{
public:
ArFrameArKit(ARFrame* arKitFrame, std::shared_ptr<ArSessionArKitInternal> arSession);
~ArFrameArKit() override;
ArImagePlanar GetCameraImage() override;
ArDepthImage GetDepthImage() override;
Math::Matrix4f GetCameraViewForCurrentDeviceOrientation() override;
Math::Matrix4f GetCameraProjection(Math::Vector2f viewportSize, float near, float far) override;
ARFrame* GetArKitFrame() const { return m_arKitFrame; }
bool GetCameraImageAsJpeg(const std::function<void(const char*, size_t)>& handler) override;
private:
ARFrame* m_arKitFrame;
bool m_lockedColor, m_lockedDepth;
ArImagePlanar m_colorImage;
ArDepthImage m_depthImage;
};
}

View File

@@ -0,0 +1,194 @@
/*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/.
*/
#include "ArFrameArKit.h"
#include "ArSessionArKitInternal.h"
#include "ArTrackingStateConverter.h"
#include "Math/Math.hpp"
#import <ARKit/ARDepthData.h>
#import <ARKit/ARLightEstimate.h>
#import <ARKit/ARPointCloud.h>
#import <CoreVideo/CoreVideo.h>
namespace openVulkanoCpp::AR::ArKit
{
namespace
{
CIContext* CICONTEXT = [CIContext new];
inline Math::Vector2i GetSize(CVPixelBufferRef pixelBuffer)
{
return Math::Vector2i(CVPixelBufferGetWidth(pixelBuffer), CVPixelBufferGetHeight(pixelBuffer));
}
inline Math::Vector2i GetSize(CVPixelBufferRef pixelBuffer, size_t plane)
{
return Math::Vector2i(CVPixelBufferGetWidthOfPlane(pixelBuffer, plane), CVPixelBufferGetHeightOfPlane(pixelBuffer, plane));
}
inline Math::Vector2i GetDepthSize(ARFrame* arKitFrame)
{
if(@available(iOS 14.0, *))
{
if(arKitFrame.sceneDepth)
{
return GetSize(arKitFrame.sceneDepth.depthMap);
}
if(arKitFrame.smoothedSceneDepth)
{
return GetSize(arKitFrame.smoothedSceneDepth.depthMap);
}
}
return { -1, -1 };
}
UIInterfaceOrientation GetInterfaceOrientation()
{
UIInterfaceOrientation orientation = UIInterfaceOrientationLandscapeRight;
//TODO
/*auto window = [[UIApplication sharedApplication] windows].firstObject;
if (window && window.windowScene)
{
orientation = window.windowScene.interfaceOrientation;
}*/
return orientation;
}
ArFrameMetadata ReadFrameMetadata(ARFrame* arFrame)
{
ArFrameMetadata frameData;
auto intrinsics = arFrame.camera.intrinsics;
Math::Matrix3f intrinsicsMat = Math::Matrix3f(
reinterpret_cast<Math::Vector3f&>(intrinsics.columns[0]),
reinterpret_cast<Math::Vector3f&>(intrinsics.columns[1]),
reinterpret_cast<Math::Vector3f&>(intrinsics.columns[2]));
auto transform = arFrame.camera.transform;
auto projection = arFrame.camera.projectionMatrix;
frameData.intrinsic = { intrinsicsMat, { arFrame.camera.imageResolution.width, arFrame.camera.imageResolution.height } };
frameData.transformation = reinterpret_cast<Math::Matrix4f&>(transform);
frameData.projection = reinterpret_cast<Math::Matrix4f&>(projection);
frameData.exposureTime = arFrame.camera.exposureDuration;
frameData.exposureOffset = arFrame.camera.exposureOffset;
frameData.lightIntensity = arFrame.lightEstimate.ambientIntensity;
frameData.lightColorTemp = arFrame.lightEstimate.ambientColorTemperature;
frameData.timestamp = arFrame.timestamp;
frameData.timestampDepth = arFrame.capturedDepthDataTimestamp;
frameData.trackingState = GetArTrackingState(arFrame.camera);
return frameData;
}
}
ArFrameArKit::ArFrameArKit(ARFrame* arKitFrame, std::shared_ptr<ArSessionArKitInternal> arSession)
: ArFrame(std::static_pointer_cast<ArSession>(arSession), arSession->GetFrameId()), m_arKitFrame(arKitFrame)
, m_lockedColor(false), m_lockedDepth(false)
{
[m_arKitFrame retain];
frameMetadata = ReadFrameMetadata(arKitFrame);
m_colorImage.intrinsic = Math::CameraIntrinsic(frameMetadata.intrinsic);
m_colorImage.format = ArImagePlanar::Format::NV12;
m_colorImage.luminescenceOrColor.resolution = GetSize(arKitFrame.capturedImage);
m_colorImage.uv.resolution = m_colorImage.luminescenceOrColor.resolution / 2u;
m_depthImage.format = ArDepthFormat::METER_FP32;
m_depthImage.depth.resolution = GetDepthSize(arKitFrame);
m_depthImage.confidence.resolution = m_depthImage.depth.resolution;
m_depthImage.intrinsic = frameMetadata.intrinsic.GetForResolution(m_depthImage.depth.resolution);
}
ArFrameArKit::~ArFrameArKit()
{
if(m_lockedColor)
{
CVPixelBufferUnlockBaseAddress(m_arKitFrame.capturedImage, kCVPixelBufferLock_ReadOnly);
}
if(m_lockedDepth)
{
if(m_arKitFrame.sceneDepth)
{
CVPixelBufferUnlockBaseAddress(m_arKitFrame.sceneDepth.depthMap, kCVPixelBufferLock_ReadOnly);
CVPixelBufferUnlockBaseAddress(m_arKitFrame.sceneDepth.confidenceMap, kCVPixelBufferLock_ReadOnly);
}
else
{
CVPixelBufferUnlockBaseAddress(m_arKitFrame.smoothedSceneDepth.depthMap, kCVPixelBufferLock_ReadOnly);
CVPixelBufferUnlockBaseAddress(m_arKitFrame.smoothedSceneDepth.confidenceMap, kCVPixelBufferLock_ReadOnly);
}
}
[m_arKitFrame release];
}
Math::Matrix4f ArFrameArKit::GetCameraViewForCurrentDeviceOrientation()
{
auto arKitViewMat = [m_arKitFrame.camera viewMatrixForOrientation:GetInterfaceOrientation()];
return reinterpret_cast<Math::Matrix4f&>(arKitViewMat);
}
Math::Matrix4f ArFrameArKit::GetCameraProjection(Math::Vector2f viewportSize, float near, float far)
{
auto projection = [m_arKitFrame.camera projectionMatrixForOrientation:GetInterfaceOrientation() viewportSize:reinterpret_cast<CGSize&>(viewportSize) zNear:near zFar:far];
return reinterpret_cast<Math::Matrix4f&>(projection);
}
ArImagePlanar ArFrameArKit::GetCameraImage()
{
if(!m_lockedColor)
{
CVPixelBufferLockBaseAddress(m_arKitFrame.capturedImage, kCVPixelBufferLock_ReadOnly);
m_colorImage.luminescenceOrColor.data = CVPixelBufferGetBaseAddressOfPlane(m_arKitFrame.capturedImage, 0);
m_colorImage.uv.data = CVPixelBufferGetBaseAddressOfPlane(m_arKitFrame.capturedImage, 1);
m_lockedColor = true;
}
return m_colorImage;
}
ArDepthImage ArFrameArKit::GetDepthImage()
{
if (@available(iOS 14.0, *))
{
if (!m_lockedDepth)
{
if(m_arKitFrame.sceneDepth)
{
CVPixelBufferLockBaseAddress(m_arKitFrame.sceneDepth.depthMap, kCVPixelBufferLock_ReadOnly);
CVPixelBufferLockBaseAddress(m_arKitFrame.sceneDepth.confidenceMap, kCVPixelBufferLock_ReadOnly);
m_depthImage.depth.data = CVPixelBufferGetBaseAddress(m_arKitFrame.sceneDepth.depthMap);
m_depthImage.confidence.data = CVPixelBufferGetBaseAddress(m_arKitFrame.sceneDepth.confidenceMap);
m_lockedDepth = true;
}
else if (m_arKitFrame.smoothedSceneDepth)
{
CVPixelBufferLockBaseAddress(m_arKitFrame.smoothedSceneDepth.depthMap, kCVPixelBufferLock_ReadOnly);
CVPixelBufferLockBaseAddress(m_arKitFrame.smoothedSceneDepth.confidenceMap, kCVPixelBufferLock_ReadOnly);
m_depthImage.depth.data = CVPixelBufferGetBaseAddress(m_arKitFrame.smoothedSceneDepth.depthMap);
m_depthImage.confidence.data = CVPixelBufferGetBaseAddress(m_arKitFrame.smoothedSceneDepth.confidenceMap);
m_lockedDepth = true;
}
}
}
return m_depthImage;
}
bool ArFrameArKit::GetCameraImageAsJpeg(const std::function<void(const char*, size_t)>& handler)
{
bool success = false;
CIImage* image = [[CIImage alloc] initWithCVPixelBuffer: GetArKitFrame().capturedImage];
auto data = [CICONTEXT JPEGRepresentationOfImage: image colorSpace: image.colorSpace options: nil];
if (data)
{
handler(static_cast<const char*>([data bytes]), [data length]);
success = true;
}
[image release];
return success;
}
}

View File

@@ -0,0 +1,22 @@
/*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/.
*/
#pragma once
#import <ARKit/ARSession.h>
#include "ArSessionArKitInternal.h"
@interface ArKitDelegate : NSObject <ARSessionDelegate>
- (id)initWithFrameHandler:(openVulkanoCpp::AR::ArKit::ArSessionArKitInternal*)session;
- (void)session:(ARSession *)session didUpdateFrame:(ARFrame *)frame;
- (void)sessionWasInterrupted:(ARSession *)session;
- (void)sessionInterruptionEnded:(ARSession *)session;
- (void)session:(ARSession *)session didFailWithError:(NSError *)error;
- (BOOL)sessionShouldAttemptRelocalization:(ARSession *)session;
- (void)session:(ARSession *)session cameraDidChangeTrackingState:(ARCamera *)camera;
- (void)session:(ARSession *)session didUpdateAnchors:(NSArray<__kindof ARAnchor*>*)anchors;
@end

View File

@@ -0,0 +1,54 @@
/*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/.
*/
#include "ArKitDelegate.h"
@implementation ArKitDelegate
{
openVulkanoCpp::AR::ArKit::ArSessionArKitInternal* m_arSession;
}
- (id)initWithFrameHandler:(openVulkanoCpp::AR::ArKit::ArSessionArKitInternal*)session
{
m_arSession = session;
return self;
}
- (void)session:(ARSession *)session didUpdateFrame:(ARFrame *)frame
{
m_arSession->OnArNewFrame(session, frame);
}
- (void)sessionWasInterrupted:(ARSession *)session
{
m_arSession->OnArSessionInterruptedChanged(session, true);
}
- (void)sessionInterruptionEnded:(ARSession *)session
{
m_arSession->OnArSessionInterruptedChanged(session, false);
}
- (void)session:(ARSession *)session didFailWithError:(NSError *)error
{
m_arSession->OnArSessionFailed(session, error);
}
- (BOOL)sessionShouldAttemptRelocalization:(ARSession *)session
{
return m_arSession->ArShouldAttemptRelocalization();
}
- (void)session:(ARSession *)session cameraDidChangeTrackingState:(ARCamera *)camera
{
m_arSession->OnArCameraTrackingChange(session, camera);
}
- (void)session:(ARSession *)session didUpdateAnchors:(NSArray<__kindof ARAnchor*>*)anchors
{
m_arSession->OnArAnchorsUpdate(anchors);
}
@end

View File

@@ -0,0 +1,39 @@
/*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/.
*/
#pragma once
#include "AR/ArSession.hpp"
// Define the class of the native AR session to be used
#define NATIVE_AR_SESSION_CLASS ArKit::ArSessionArKit
namespace openVulkanoCpp::AR::ArKit
{
/**
* This is just a helper intermediate class.
* The ArKit implementation is in ArSessionArKitInternal.
* This is done to prevent the leakage of any objective c or ArKit classes into the ArSession.cpp translation unit.
*/
class ArSessionArKit : public ArSession
{
public:
[[nodiscard]] static std::shared_ptr<ArSession> Create(const ArSessionConfig& config);
[[nodiscard]] static bool IsAvailable();
[[nodiscard]] static const ArSessionCapabilities& GetCapabilities();
[[nodiscard]] ArSessionType GetSessionType() final { return ArSessionType::NATIVE; }
[[nodiscard]] ArType GetArType() final { return ArType::AR_KIT; }
protected:
ArSessionArKit();
~ArSessionArKit() override = default;
};
}

View File

@@ -0,0 +1,40 @@
/*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/.
*/
#include "ArSessionArKit.h"
#include "ArSessionArKitInternal.h"
namespace openVulkanoCpp::AR::ArKit
{
namespace
{
ArSessionCapabilities QueryNativeCapabilities()
{
bool supportsDepth = [ARWorldTrackingConfiguration supportsFrameSemantics:ARFrameSemanticSceneDepth];
ArSessionCapabilities capabilities(ArType::AR_KIT, ArSessionType::NATIVE, true, supportsDepth);
return capabilities;
}
}
std::shared_ptr<ArSession> ArSessionArKit::Create(const ArSessionConfig& config)
{
return std::dynamic_pointer_cast<ArSession>(std::make_shared<ArSessionArKitInternal>(config));
}
bool ArSessionArKit::IsAvailable()
{
return true;
}
const ArSessionCapabilities& ArSessionArKit::GetCapabilities()
{
static ArSessionCapabilities capabilities = QueryNativeCapabilities();
return capabilities;
}
ArSessionArKit::ArSessionArKit() : ArSession({ ArType::AR_KIT, ArDepthFormat::METER_FP32, { 0, 2 } })
{}
}

View File

@@ -0,0 +1,61 @@
/*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/.
*/
#pragma once
#include "ArSessionArKit.h"
#include "Data/Concurent/MutexProtectedObject.hpp"
#include <atomic>
#import <ARKit/ARSession.h>
#import <ARKit/ARConfiguration.h>
@class ArKitDelegate;
namespace openVulkanoCpp::AR::ArKit
{
class ArSessionArKitInternal final : public ArSessionArKit, public std::enable_shared_from_this<ArSessionArKitInternal>
{
public:
ArSessionArKitInternal(const ArSessionConfig& config);
~ArSessionArKitInternal() override;
void Start() override;
void Stop() override;
void Pause() override;
[[nodiscard]] std::shared_ptr<ArFrame> GetFrame() override;
void RequestHighResolutionFrame() override;
size_t GetFrameId()
{
return m_frameId++;
}
// AR Kit delegate events
void OnArNewFrame(ARSession* session, ARFrame* frame);
void OnArSessionInterruptedChanged(ARSession* session, bool interrupted);
void OnArSessionFailed(ARSession* session, NSError* error);
void OnArCameraTrackingChange(ARSession* session, ARCamera* camera);
void OnArAnchorsUpdate(NSArray<__kindof ARAnchor*>* anchors);
bool ArShouldAttemptRelocalization();
private:
ArKitDelegate* m_arKitDelegate;
ARWorldTrackingConfiguration* m_arConfig;
ARSession* m_arSession;
#if (__cplusplus >= 202002L)
std::atomic<std::shared_ptr<ArFrame>> m_frame;
#else
SpintexProtectedObject<std::shared_ptr<ArFrame>> m_frame;
#endif
std::atomic_size_t m_frameId;
};
}

View File

@@ -0,0 +1,136 @@
/*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/.
*/
#include "ArSessionArKitInternal.h"
#include "ArFrameArKit.h"
#include "ArTrackingStateConverter.h"
#include "Base/Logger.hpp"
#include "IO/AppFolders.hpp"
#include <sstream>
#import <ARKit/ARSession.h>
#import <ARKit/ARFrame.h>
#import <ARKit/ARConfiguration.h>
#import <ARKit/ARCamera.h>
#import <ARKit/ARKit.h>
#import <CoreVideo/CoreVideo.h>
#import "ArKitDelegate.h"
#define VALIDATE_SESSION(s) if(s != m_arSession) { Logger::AR->warn("ARSession does not match."); return; }
namespace openVulkanoCpp::AR::ArKit
{
ArSessionArKitInternal::ArSessionArKitInternal(const ArSessionConfig& config)
: m_frameId(0)
{
m_arKitDelegate = [[ArKitDelegate alloc] initWithFrameHandler:this];
m_arConfig = [ARWorldTrackingConfiguration new];
if (config.enableDepth)
{
m_arConfig.frameSemantics = ARFrameSemanticSceneDepth;
}
m_arSession = [ARSession new];
m_arSession.delegate = m_arKitDelegate;
m_loopClosureDetectionAnchor = nil;
running = false;
}
ArSessionArKitInternal::~ArSessionArKitInternal()
{
Stop();
[m_arSession release];
[m_arConfig release];
[m_arKitDelegate release];
}
void ArSessionArKitInternal::Start()
{
[m_arSession runWithConfiguration:m_arConfig];
running = true;
}
void ArSessionArKitInternal::Stop()
{
[m_arSession pause];
#if (__cplusplus >= 202002L)
m_frame = nullptr;
#else
m_frame.Access()->reset();
#endif
running = false;
}
void ArSessionArKitInternal::Pause()
{
ArSession::Pause();
}
double timestamp = 0;
std::shared_ptr<ArFrame> ArSessionArKitInternal::GetFrame()
{
return m_frame;
}
void ArSessionArKitInternal::RequestHighResolutionFrame()
{
Logger::AR->error("Failed to perform high resolution still frame capture: not supported on this platform!");
}
// AR Kit delegate events
void ArSessionArKitInternal::OnArNewFrame(ARSession* session, ARFrame* frame)
{
VALIDATE_SESSION(session);
std::shared_ptr<ArFrame> arFrame = std::make_shared<ArFrameArKit>(frame, shared_from_this());
OnNewFrame(arFrame);
OnNewCameraTransformation(arFrame->GetCameraTransformation());
if (OnNewCameraViewMatrix.HasHandlers())
{
auto view = arFrame->GetCameraViewForCurrentDeviceOrientation();
OnNewCameraViewMatrix(view);
}
OnNewFrameAvailable();
m_frame = arFrame;
GetRecorder().Save((ArFrameArKit*)arFrame.get());
}
void ArSessionArKitInternal::OnArSessionInterruptedChanged(ARSession* session, bool interrupted)
{
VALIDATE_SESSION(session);
Logger::AR->info("Ar session {}", interrupted ? "interrupted" : "resumed");
OnSessionInterruptionChange.NotifyAll(interrupted);
}
void ArSessionArKitInternal::OnArSessionFailed(ARSession* session, NSError* error)
{
VALIDATE_SESSION(session);
Logger::AR->warn("AR Session failed, description: {}", [[error description] UTF8String]);
std::string failReason = [[error localizedDescription] UTF8String];
OnSessionFailed(failReason);
}
void ArSessionArKitInternal::OnArCameraTrackingChange(ARSession* session, ARCamera* camera)
{
VALIDATE_SESSION(session);
OnTrackingStateChanged(GetArTrackingState(camera));
}
void ArSessionArKitInternal::OnArAnchorsUpdate(NSArray<__kindof ARAnchor*>* anchors)
{
Logger::AR->info("Anchors updated.");
OnAnchorsUpdated();
}
bool ArSessionArKitInternal::ArShouldAttemptRelocalization()
{
return shouldAttemptRelocalization;
}
}

View File

@@ -0,0 +1,35 @@
/*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/.
*/
#pragma once
#include "AR/ArTrackingState.hpp"
#import <ARKit/ARCamera.h>
namespace openVulkanoCpp::AR::ArKit
{
inline ArTrackingState GetArTrackingState(ARCamera* camera)
{
switch(camera.trackingState)
{
case ARTrackingStateNotAvailable: return ArTrackingState::UNAVAILABLE;
case ARTrackingStateNormal: return ArTrackingState::NORMAL;
case ARTrackingStateLimited:
{
switch(camera.trackingStateReason)
{
case ARTrackingStateReasonExcessiveMotion: return ArTrackingState::EXCESSIVE_MOTION;
case ARTrackingStateReasonInitializing: return ArTrackingState::INITIALIZING;
case ARTrackingStateReasonInsufficientFeatures: return ArTrackingState::INSUFFICIENT_FEATURES;
case ARTrackingStateReasonRelocalizing: return ArTrackingState::RELOCALIZING;
}
}
break;
}
return ArTrackingState::UNKNOWN;
}
}

View File

@@ -14,7 +14,7 @@ namespace openVulkanoCpp::AR::Playback
ArSessionPlayback::ArSessionPlayback(const std::string& recordingPath, bool autoAdvance)
: ArSession(ArSessionMetadata(recordingPath)), recordingPath(recordingPath), autoAdvance(autoAdvance), playbackReader(recordingPath)
{
capabilities = ArSessionCapabilities(metadata.type, ArSessionType::PLAYBACK, false, metadata.depthFormat != ArDepthFormat::UNAVAILABLE, false);
capabilities = ArSessionCapabilities(metadata.type, ArSessionType::PLAYBACK, false, metadata.depthFormat != ArDepthFormat::UNAVAILABLE);
constants = { Math::Matrix4f(1), metadata.confidenceRange };
}