/* * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ // Makre usre the Molten include is first! #include #include #include "ArSessionArKitInternal.h" #include "ArFrameArKit.h" #include "ArTrackingStateConverter.h" #include "Base/Logger.hpp" #include "IO/AppFolders.hpp" #include "Vulkan/Renderer.hpp" #include "Vulkan/Scene/VulkanTexture.hpp" #include #import #import #import #import #import #import #import #import "ArKitDelegate.h" #define VALIDATE_SESSION(s) if(s != m_arSession) { Logger::AR->warn("ARSession does not match."); return; } namespace OpenVulkano::AR::ArKit { namespace { std::string SupportedVideoFormatsToString() { std::stringstream ss("[ "); auto vidFormats = [ARWorldTrackingConfiguration supportedVideoFormats]; for (ARVideoFormat* format: vidFormats) { if (ss.tellp() > std::streampos(5)) ss << ", "; ss << format.imageResolution.width << 'x' << format.imageResolution.height; ss << '@' << format.framesPerSecond; if (@available(iOS 16.0, *)) { if (format.videoHDRSupported) ss << "_HDR"; if (format.isRecommendedForHighResolutionFrameCapturing) ss << "_HdCaptureRecommended"; } } ss << " ]"; return ss.str(); } void LogFormat(std::string_view formatUsage, ARVideoFormat* format) { bool hdr = false, hdCapture = false; if (@available(iOS 16.0, *)) { hdr = format.videoHDRSupported; hdCapture = format.isRecommendedForHighResolutionFrameCapturing; } Logger::AR->info("{}: {}x{}@{}{}{}", formatUsage, format.imageResolution.width, format.imageResolution.height, format.framesPerSecond, hdr ? " HDR" : "", hdCapture ? " HdCaptureRecommended" : ""); } } ArSessionArKitInternal::ArSessionArKitInternal(const ArSessionConfig& config) : m_frameId(0) { m_arKitDelegate = [[ArKitDelegate alloc] initWithFrameHandler:this]; m_arConfig = [ARWorldTrackingConfiguration new]; if (config.enableDepth) { m_arConfig.frameSemantics = ARFrameSemanticSceneDepth; } if (config.sceneReconstruction) { m_arConfig.sceneReconstruction = ARSceneReconstructionMesh; } m_arConfig.planeDetection = config.planeDetection ? (ARPlaneDetectionVertical | ARPlaneDetectionHorizontal) : ARPlaneDetectionNone; m_arConfig.autoFocusEnabled = config.autoFocus; // Set video format { Logger::AR->debug("Supported Video Formats: {}", SupportedVideoFormatsToString()); //TODO handle ar video format in settings if (@available(iOS 16.0, *)) { if (config.preferHDR) m_arConfig.videoFormat = [ARWorldTrackingConfiguration recommendedVideoFormatForHighResolutionFrameCapturing]; //TODO resolution handling } LogFormat("Using video format", m_arConfig.videoFormat); metadata.imageResolution = { m_arConfig.videoFormat.imageResolution.width, m_arConfig.videoFormat.imageResolution.height }; metadata.frameRate = m_arConfig.videoFormat.framesPerSecond; } m_arSession = [ARSession new]; m_arSession.delegate = m_arKitDelegate; running = false; } ArSessionArKitInternal::~ArSessionArKitInternal() { Stop(); [m_arSession release]; [m_arConfig release]; [m_arKitDelegate release]; } void ArSessionArKitInternal::SetRenderer(IRenderer* renderer) { if (renderer) m_textureCache.Init(renderer); else m_textureCache.Close(); } Scene::Texture* ArSessionArKitInternal::MakeTexture(ArFrame* frame) { if (!m_textureCache) [[unlikely]] throw std::runtime_error("No renderer set for which to produce textures"); ArFrameArKit* arFrame = static_cast(frame); ARFrame* arKitFrame = arFrame->GetArKitFrame(); return m_textureCache.Get(arKitFrame.capturedImage, MTLPixelFormatR8Unorm); } void ArSessionArKitInternal::ReturnTexture(Scene::Texture* texture) { m_textureCache.ReturnTexture(texture); } void ArSessionArKitInternal::Start() { [m_arSession runWithConfiguration:m_arConfig]; running = true; } void ArSessionArKitInternal::Stop() { if (m_lockedConfiguration) { m_lockedConfiguration = false; [[ARWorldTrackingConfiguration configurableCaptureDeviceForPrimaryCamera] unlockForConfiguration]; } OnStopped(); [m_arSession pause]; /*#if (__cplusplus >= 202002L) m_frame = nullptr; #else*/ m_frame.Access()->reset(); //#endif running = false; } void ArSessionArKitInternal::Pause() { ArSession::Pause(); } double timestamp = 0; std::shared_ptr ArSessionArKitInternal::GetFrame() { if (GetRecorder().GetRecordingMode() == RecordingMode::FRAME_REQUEST) GetRecorder().Save(m_frame); return m_frame; } void ArSessionArKitInternal::RequestHighResolutionFrame() { if (@available(iOS 16.0, *)) { [m_arSession captureHighResolutionFrameWithCompletion:^(ARFrame * _Nullable frame, NSError * _Nullable error) { if (frame) { std::shared_ptr arFrame = std::make_shared(frame, shared_from_this()); arFrame->MarkHighRes(); OnNewFrameHighResolution(arFrame); } else { Logger::AR->warn("Failed to perform high resolution still frame capture: {}", [[error description] UTF8String]); } }]; } else { Logger::AR->warn("Failed to perform high resolution still frame capture: requires iOS 16 or later!"); } } void ArSessionArKitInternal::LockExposureTime(bool locked) { AVCaptureDevice* dev = [ARWorldTrackingConfiguration configurableCaptureDeviceForPrimaryCamera]; if (!m_lockedConfiguration) m_lockedConfiguration |=[dev lockForConfiguration:nil]; [dev setExposureMode: locked ? AVCaptureExposureModeLocked : AVCaptureExposureModeContinuousAutoExposure]; } void ArSessionArKitInternal::LockWhitebalance(bool locked) { AVCaptureDevice* dev = [ARWorldTrackingConfiguration configurableCaptureDeviceForPrimaryCamera]; if (!m_lockedConfiguration) m_lockedConfiguration |=[dev lockForConfiguration:nil]; [dev setWhiteBalanceMode: locked ? AVCaptureWhiteBalanceModeLocked : AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance]; } void ArSessionArKitInternal::SetFlashlightOn(bool on) { AVCaptureDevice* dev = [ARWorldTrackingConfiguration configurableCaptureDeviceForPrimaryCamera]; if (!m_lockedConfiguration) m_lockedConfiguration |=[dev lockForConfiguration:nil]; [dev setTorchMode: on ? AVCaptureTorchModeOn : AVCaptureTorchModeOff]; } // AR Kit delegate events void ArSessionArKitInternal::OnArNewFrame(ARSession* session, ARFrame* frame) { VALIDATE_SESSION(session); std::shared_ptr arFrame = std::make_shared(frame, shared_from_this()); OnNewFrame(arFrame); OnNewCameraTransformation(arFrame->GetCameraTransformation()); if (OnNewCameraViewMatrix.HasHandlers()) { auto view = arFrame->GetCameraViewForCurrentDeviceOrientation(); OnNewCameraViewMatrix(view); } OnNewFrameAvailable(); m_frame = arFrame; } void ArSessionArKitInternal::OnArSessionInterruptedChanged(ARSession* session, bool interrupted) { VALIDATE_SESSION(session); Logger::AR->info("Ar session {}", interrupted ? "interrupted" : "resumed"); OnSessionInterruptionChange.NotifyAll(interrupted); } void ArSessionArKitInternal::OnArSessionFailed(ARSession* session, NSError* error) { VALIDATE_SESSION(session); Logger::AR->warn("Ar session failed, description: {}", [[error description] UTF8String]); std::string failReason = [[error localizedDescription] UTF8String]; OnSessionFailed(failReason); } void ArSessionArKitInternal::OnArCameraTrackingChange(ARSession* session, ARCamera* camera) { VALIDATE_SESSION(session); auto trackingState = GetArTrackingState(camera); Logger::AR->info("Ar session tracking state changed: {}", trackingState.GetName()); OnTrackingStateChanged(trackingState); } void ArSessionArKitInternal::OnArAnchorsUpdate(NSArray<__kindof ARAnchor*>* anchors) { Logger::AR->info("Anchors updated."); OnAnchorsUpdated(); } bool ArSessionArKitInternal::ArShouldAttemptRelocalization() { return shouldAttemptRelocalization; } }