Expand ar frame metadata with focal length and fnumber

This commit is contained in:
Georg Hagen
2025-02-12 23:45:39 +01:00
parent 453c42755a
commit 514371900b
5 changed files with 33 additions and 4 deletions

View File

@@ -165,11 +165,17 @@ namespace OpenVulkano::AR
[[nodiscard]] float GetExposureTime() const { return frameMetadata.exposureTime; };
[[nodiscard]] float GetExposureOffset() const { return frameMetadata.exposureOffset; };
[[nodiscard]] float GetFocalLength() const { return frameMetadata.focalLength; }
[[nodiscard]] float GetFNumber() const { return frameMetadata.fNumber; }
[[nodiscard]] bool IsSaved() const { return m_saved; };
[[nodiscard]] const Scene::Texture* GetImageTexture();
[[nodiscard]] virtual std::string GetLensModel() const { return ""; }
void Save();
void SaveToFile(const std::filesystem::path& path, bool downsample = false);

View File

@@ -109,6 +109,8 @@ namespace OpenVulkano::AR
frameData.intrinsic = {intrinsic, res};
camNode["ExposureDuration"] >> frameData.exposureTime;
camNode["ExposureOffset"] >> frameData.exposureOffset;
camNode["FocalLength"] >> frameData.focalLength;
camNode["FNumber"] >> frameData.fNumber;
uint64_t nanos;
root["Timestamp"] >> nanos;
frameData.timestamp = nanos;
@@ -145,6 +147,8 @@ namespace OpenVulkano::AR
- [ {}, {}, {} ]
ExposureDuration: {}
ExposureOffset: {}
FocalLength: {}
FNumber: {}
Timestamp: {}
TimestampDepth: {}
TrackingState:
@@ -165,7 +169,8 @@ Light:
camMat[0][0], camMat[1][0], camMat[2][0],
camMat[0][1], camMat[1][1], camMat[2][1],
camMat[0][2], camMat[1][2], camMat[2][2],
exposureTime, exposureOffset, timestamp.GetNanos(), timestampDepth.GetNanos(),
exposureTime, exposureOffset, focalLength, fNumber,
timestamp.GetNanos(), timestampDepth.GetNanos(),
trackingState.GetName(), lightIntensity, lightColorTemp
);
return meta;
@@ -194,4 +199,4 @@ Light:
return meta.str();
}
}
}

View File

@@ -19,7 +19,7 @@ namespace OpenVulkano::AR
float exposureTime, exposureOffset, lightIntensity, lightColorTemp;
Math::Timestamp timestamp, timestampDepth;
ArTrackingState trackingState;
float physicalPixelSize = 0;
float physicalPixelSize = 0, focalLength = 0, fNumber = 0;
[[nodiscard]] std::string ToYaml() const;
@@ -34,4 +34,4 @@ namespace OpenVulkano::AR
return content[0] == '<' ? FromXML(content, length) : FromYaml(content, length);
}
};
}
}

View File

@@ -32,6 +32,8 @@ namespace OpenVulkano::AR::ArKit
ARFrame* GetArKitFrame() const { return m_arKitFrame; }
bool GetCameraImageAsJpeg(const std::function<void(const char*, size_t)>& handler) override;
std::string GetLensModel() const override;
private:
ARFrame* m_arKitFrame;

View File

@@ -13,6 +13,7 @@
#import <ARKit/ARLightEstimate.h>
#import <ARKit/ARPointCloud.h>
#import <CoreVideo/CoreVideo.h>
#import <ImageIO/CGImageProperties.h>
namespace OpenVulkano::AR::ArKit
{
@@ -88,6 +89,12 @@ namespace OpenVulkano::AR::ArKit
frameData.timestampDepth = arFrame.capturedDepthDataTimestamp;
frameData.trackingState = GetArTrackingState(arFrame.camera);
if (@available(iOS 16.0, *))
{
frameData.focalLength = [(NSNumber*)[arFrame.exifData valueForKey:(__bridge NSString*)kCGImagePropertyExifFocalLength] floatValue];
frameData.fNumber = [(NSNumber*)[arFrame.exifData valueForKey:(__bridge NSString*)kCGImagePropertyExifFNumber] floatValue];
}
return frameData;
}
}
@@ -236,4 +243,13 @@ namespace OpenVulkano::AR::ArKit
[image release];
return success;
}
std::string ArFrameArKit::GetLensModel() const
{
if (@available(iOS 16.0, *))
{
return [[m_arKitFrame.exifData valueForKey:(__bridge NSString*)kCGImagePropertyExifLensModel] UTF8String];
}
return "";
}
}