/* * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ #include "ArRecorder.hpp" #include "ArSession.hpp" #include "ArFrame.hpp" #include "IO/Archive/MultiPartArchiveWriter.hpp" #include "IO/Archive/ArchiveConfiguration.hpp" #include "IO/Files/Pfm.hpp" #include "IO/Files/Pnm.hpp" #include "IO/AppFolders.hpp" #include "Base/BlockProfiler.hpp" #include "Base/Logger.hpp" #include "Image/YuvUtils.hpp" #include "Image/ExifBuilder.hpp" #include "Image/XmpBuilder.hpp" #include "Image/JpegWithTagsWriter.hpp" #include #include #if __has_include("turbojpeg.h") #include #define TURBO_JPEG #endif namespace OpenVulkano::AR { namespace { constexpr int JPEG_QUALITY_DS = 90; constexpr int JPEG_QUALITY_FS = 95; std::filesystem::path GeneratePath(const std::filesystem::path& baseDir, const std::string_view& name) { std::stringstream ss; auto t = std::time(nullptr); auto localTime = *std::localtime(&t); ss << std::put_time(&localTime, "%Y-%m-%d_%H_%M_%S"); return baseDir / name / ss.str(); } std::string GetFileName(size_t frameId, std::string_view fileExtension) { return fmt::format("{:07d}.{}", frameId, fileExtension); } std::string MakeXmpTag(ArFrame* arFrame) { Image::XmpBuilder xmpBuilder; xmpBuilder.SetPose(arFrame->GetPose()); xmpBuilder.SetCreateDateNow(); xmpBuilder.SetExposureTime(arFrame->GetExposureTime()); xmpBuilder.SetFocalLength(arFrame->GetFocalLength()); xmpBuilder.SetCameraIntrinsic(arFrame->GetCameraIntrinsic()); return xmpBuilder.Finalize(); } std::vector MakeExifTag(ArFrame* arFrame) { Image::ExifBuilder exifBuilder; exifBuilder.model = arFrame->GetLensModel(); exifBuilder.exposureTime = Image::RationalValue(1, 1.0f / arFrame->GetExposureTime()); exifBuilder.SetOrientation(atan2f(arFrame->GetCameraTransformation()[0][1], arFrame->GetCameraTransformation()[1][1])); exifBuilder.dateTaken = exifBuilder.GetCurrentTimestamp(); exifBuilder.make = arFrame->GetArSession()->GetArType().GetVendorName(); exifBuilder.SetResolution(); exifBuilder.fNumber = arFrame->GetFNumber(); exifBuilder.focalLength = arFrame->GetFocalLength(); return exifBuilder.Build(); } } ArRecorder::ArRecorder(ArSession* session) : m_session(session), m_asyncProcessor(this) { if (!session) return; m_settings.path = GeneratePath(AppFolders::GetAppDataHomeDir(), "ar_recording"); session->OnNewFrameHighResolution += EventHandler(this, &ArRecorder::SaveHighResolution); } ArRecorder::~ArRecorder() { if (!m_settings.asyncRecording) WriteMetadataFile(); else m_asyncProcessor.Close(); } void ArRecorder::WriteColorImage(ArFrame* arFrame, IArchiveWriter* colorWriter, JpegWithTagsWriter* jpgWriter, bool highRes) const { //BlockProfiler profile("Save AR Frame - Image"); #ifndef TURBO_JPEG std::string fileName = GetFileName(arFrame->GetFrameId(), "jpg"); if (arFrame->GetCameraImageAsJpeg([&fileName, this](const char* data, size_t len){ m_colorWriter->AddFile(fileName.c_str(), data, len); })) return; //TODO stb??? Logger::AR->error("Failed to create JPEG! Missing turbojpeg.h"); #else //TODO handle non nv12 images auto img = arFrame->GetCameraImage(); auto sizeLum = img.luminescenceOrColor.resolution.x * img.luminescenceOrColor.resolution.y; auto sizeUV = img.uv.resolution.x * img.uv.resolution.y; auto resX = img.luminescenceOrColor.resolution.x; auto resY = img.luminescenceOrColor.resolution.y; tjhandle handle = tjInitCompress(); const uint8_t* buffers[3]; std::unique_ptr dataBuffer; int jpegQuality = JPEG_QUALITY_FS; if (m_settings.downsampleColor && !highRes) { dataBuffer = YuvUtils::PlansFromNV12(static_cast(img.luminescenceOrColor.data), static_cast(img.uv.data), resX, resY, img.uv.resolution.x, img.uv.resolution.y, 2, 2, img.luminescenceOrColor.rowPadding, img.uv.rowPadding); resX /= 2; resY /= 2; buffers[0] = dataBuffer.get(); buffers[1] = buffers[0] + sizeLum / 4; buffers[2] = buffers[1] + sizeUV / 4; jpegQuality = JPEG_QUALITY_DS; // Use lower quality for downsampled images } else { dataBuffer = std::unique_ptr(new uint8_t[sizeUV + sizeUV]); YuvUtils::ChromaPlanesFromNV12((uint8_t*)img.uv.data, dataBuffer.get(), img.uv.resolution.x, img.uv.resolution.y, img.uv.rowPadding); buffers[0] = static_cast(img.luminescenceOrColor.data); buffers[1] = dataBuffer.get(); buffers[2] = buffers[1] + sizeUV; } uint8_t* outBuffer = nullptr; unsigned long size = 0; if (tjCompressFromYUVPlanes(handle, buffers, resX, nullptr, resY, TJSAMP_420, &outBuffer, &size, jpegQuality, TJFLAG_FASTDCT)) [[unlikely]] Logger::AR->error("Failed to create JPEG! {}", tjGetErrorStr()); else [[likely]] { if (colorWriter) [[likely]] { std::string fileName = GetFileName(arFrame->GetFrameId(), "jpg"); colorWriter->AddFile(fileName.c_str(), outBuffer, size); } if (jpgWriter) [[unlikely]] jpgWriter->WriteImageData({ outBuffer, size }, false); // Keep open, lifetime is managed outside } tjFree(outBuffer); tjDestroy(handle); #endif } void ArRecorder::WriteDepthImage(ArFrame* arFrame, IArchiveWriter* depthWriter, IArchiveWriter* confWriter) { std::string depthName = GetFileName(arFrame->GetFrameId(), "pfm"); std::string confName = GetFileName(arFrame->GetFrameId(), "pgm"); WriteDepthImage(arFrame, depthWriter, confWriter, depthName.c_str(), confName.c_str()); } void ArRecorder::WriteDepthImage(ArFrame* arFrame, IArchiveWriter* depthWriter, IArchiveWriter* confWriter, const char* depthName, const char* confidenceName) { //BlockProfiler profile("Save AR Frame - Depth"); if (!depthWriter || !confWriter) return; auto depthImg = arFrame->GetDepthImage(); std::array, 2> buffers; { // TODO handle alternative depth formats!!!! //BlockProfiler profile("Save AR Frame - Depth"); PfmHeader depthHeader(static_cast(depthImg.depth.resolution.x), static_cast(depthImg.depth.resolution.y), 5.0f, false); std::string header = depthHeader.ToString(); buffers[0].first = header.c_str(); buffers[0].second = header.size(); buffers[1].first = static_cast(depthImg.depth.data); buffers[1].second = depthImg.depth.resolution.x * depthImg.depth.resolution.y * sizeof(float); depthWriter->AddFile(depthName, buffers); } if (depthImg.confidence.resolution.x > 1 && depthImg.confidence.resolution.y > 1) { //BlockProfiler profile("Save AR Frame - Confi"); PnmHeader confidenceHeader(depthImg.confidence.resolution.x, depthImg.confidence.resolution.y, false, 2); std::string header = confidenceHeader.ToString(); buffers[0].first = header.c_str(); buffers[0].second = header.size(); buffers[1].first = static_cast(static_cast(depthImg.confidence.data)); buffers[1].second = static_cast(depthImg.confidence.resolution.x * depthImg.confidence.resolution.y); confWriter->AddFile(confidenceName, buffers); } } void ArRecorder::WriteMetadata(ArFrame* frame, IArchiveWriter* metaWriter) { std::string fileName = GetFileName(frame->GetFrameId(), "meta"); WriteMetadata(frame, metaWriter, fileName.c_str()); } void ArRecorder::WriteMetadata(ArFrame* frame, IArchiveWriter* metaWriter, const char* fileName) { //BlockProfiler profileMeta("Save AR Frame - Meta"); std::string metaContent = frame->GetFrameMetadata().ToYaml(); metaWriter->AddFile(fileName, metaContent.c_str(), metaContent.size()); } void ArRecorder::Write(ArFrame* frame, bool highRes) { if (frame->IsSaved()) return; frame->SetSaved(); bool useHighResWriter = highRes && m_settings.highResFramesInSeparateArchive; //BlockProfiler profile("Save AR Frame"); WriteMetadata(frame, useHighResWriter ? m_highResWriter.get() : m_metadataWriter.get()); WriteColorImage(frame, useHighResWriter ? m_highResWriter.get() : m_colorWriter.get(), nullptr, highRes); WriteDepthImage(frame, useHighResWriter ? m_highResWriter.get() : m_depthWriter.get(), useHighResWriter ? m_highResWriter.get() : m_confidenceWriter.get()); m_frameCount++; } void ArRecorder::Start() { if (!m_session) return; if (!m_colorWriter) { m_colorWriter = std::make_unique(m_settings.path.string(), "color_{:05d}.tar", ArchiveConfig::TAR, m_settings.archiveSize, true); m_depthWriter = std::make_unique(m_settings.path.string(), "depth_{:05d}.tar.zst", ArchiveConfig::TAR_ZSTD, m_settings.archiveSize * 2, true); m_confidenceWriter = std::make_unique(m_settings.path.string(), "confidence_{:05d}.tar.zst", ArchiveConfig::TAR_ZSTD1, m_settings.archiveSize * 10, true); m_metadataWriter = std::make_unique(m_settings.path.string(), "meta_{:05d}.tar.gz", ArchiveConfig::TAR_GZ, m_settings.archiveSize * 10, true); m_highResWriter = std::make_unique(m_settings.path.string(), "highres_{:05d}.tar", ArchiveConfig::TAR, m_settings.archiveSize, true); WriteMetadataFile(); } m_recording = true; OnRecordingStateChanged(this, m_recording); m_timer.Start(); } void ArRecorder::SplitWriters() { for(MultiPartArchiveWriter* writer : { m_colorWriter.get(), m_depthWriter.get(), m_confidenceWriter.get(), m_metadataWriter.get(), m_highResWriter.get() }) { if (writer) writer->Split(); } } void ArRecorder::Stop() { if (!m_recording) return; m_recording = false; OnRecordingStateChanged(this, m_recording); if (!m_settings.asyncRecording) SplitWriters(); m_timer.Tick(); m_timer.Start(); } void ArRecorder::SetRecordingPath(const std::filesystem::path& path) { if (!m_session) return; if (!m_colorWriter) { for (MultiPartArchiveWriter* writer: { m_colorWriter.get(), m_depthWriter.get(), m_confidenceWriter.get(), m_metadataWriter.get(), m_highResWriter.get() }) { if (writer) writer->Move(path); } if (std::filesystem::exists(m_settings.path / ArSessionMetadata::RECORDING_METADATA_FILENAME)) std::filesystem::rename(m_settings.path / ArSessionMetadata::RECORDING_METADATA_FILENAME, path / ArSessionMetadata::RECORDING_METADATA_FILENAME); } m_persistent = true; m_settings.path = path; } void ArRecorder::SetRecordingMode(RecordingMode mode) { if (!m_session) return; if (m_settings.recordingMode == mode) return; if (m_settings.recordingMode == RecordingMode::NEW_FRAME && m_newFrameHandler) { m_session->OnNewFrame -= m_newFrameHandler; m_newFrameHandler = nullptr; } m_settings.recordingMode = mode; if (m_settings.recordingMode == RecordingMode::NEW_FRAME) { m_newFrameHandler = m_session->OnNewFrame += EventHandler(this, &ArRecorder::Save); } } void ArRecorder::Save(const std::shared_ptr& frame) { if (!m_recording) return; if (m_settings.asyncRecording) { m_asyncProcessor.Queue(frame, false); } else Write(frame.get()); } void ArRecorder::SaveHighResolution(const std::shared_ptr& frame) { if (!m_recording || !m_settings.saveHighResFrames) return; if (m_settings.asyncRecording) { m_asyncProcessor.Queue(frame, true); } else Write(frame.get(), true); } void ArRecorder::SaveToFile(const std::shared_ptr& frame, const std::filesystem::path& path, bool downsample, bool includeAux) { if (m_settings.asyncRecording) m_asyncProcessor.Queue(frame, path, downsample, includeAux); else WriteToFile(frame, path, downsample, includeAux); } void ArRecorder::WriteToFile(const std::shared_ptr& frame, const std::filesystem::path& path, bool downsample, bool includeAux) { JpegWithTagsWriter jpgWriter(path); jpgWriter.WriteExifTag(MakeExifTag(frame.get())); jpgWriter.WriteXmpTag(MakeXmpTag(frame.get())); WriteColorImage(frame.get(), nullptr, &jpgWriter, !downsample); if (includeAux) { ArchiveWriter writer(jpgWriter.GetFilePtr(), ArchiveConfiguration(ArchiveType::ZIP)); WriteMetadata(frame.get(), &writer, "metadata.yml"); if (frame->GetDepthImage().depth.resolution.x > 0 && frame->GetDepthImage().depth.resolution.y > 0) WriteDepthImage(frame.get(), &writer, &writer, "depth.pfm", "confidence.pgm"); } } void ArRecorder::WriteMetadataFile() { m_timer.Tick(); m_session->GetSessionMetadata().recFrameCount = m_frameCount; m_session->GetSessionMetadata().recSkippedFrames = m_skippedFrames; m_session->GetSessionMetadata().recDuration = static_cast(m_timer.GetTotalSeconds()); std::ofstream platformInfoStream(m_settings.path / ArSessionMetadata::RECORDING_METADATA_FILENAME); platformInfoStream << m_session->GetSessionMetadata().ToYaml(); platformInfoStream.close(); } //region AsyncProcessor ArRecorder::AsyncProcessor::AsyncProcessor(ArRecorder* recorder) : recorder(recorder), processingThread(&ArRecorder::AsyncProcessor::Handler, this) {} ArRecorder::AsyncProcessor::~AsyncProcessor() { Close(); } void ArRecorder::AsyncProcessor::Close() { if (requestExit) return; requestExit = true; newDataAvailable.notify_one(); if (std::this_thread::get_id() != processingThread.get_id()) { if (processingThread.joinable()) processingThread.join(); } else processingThread.detach(); } void ArRecorder::AsyncProcessor::Queue(const std::shared_ptr& frame, bool highRes) { if (requestExit || !recorder->m_recording) return; // no need to queue up on shutdown { std::unique_lock lock(queueMutex); if (highRes) highResFrameQueue.push(frame); else frameQueue.push(frame); } newDataAvailable.notify_all(); } void ArRecorder::AsyncProcessor::Queue(const Ptr& frame, const std::filesystem::path& path, bool downsample, bool aux) { if (requestExit) return; { std::unique_lock lock(queueMutex); toFile.emplace(frame, path, downsample, aux); } newDataAvailable.notify_all(); } void ArRecorder::AsyncProcessor::Handler() { Utils::SetThreadName("ArRecorder"); if (!recorder->m_session) return; std::unique_lock lock(queueMutex); do { if (Empty()) newDataAvailable.wait(lock, [this]{ return !Empty() || requestExit; }); while(!toFile.empty()) { auto request = std::move(toFile.front()); toFile.pop(); if (!request.frame) continue; lock.unlock(); recorder->WriteToFile(request.frame, request.path, request.downsample, request.addAux); lock.lock(); } while(!highResFrameQueue.empty()) { auto frame = std::move(highResFrameQueue.front()); highResFrameQueue.pop(); if (!frame || frame->IsSaved()) continue; lock.unlock(); recorder->Write(frame.get(), true); lock.lock(); } if (requestExit) break; if(!frameQueue.empty()) { if (frameQueue.size() > 3) { Logger::AR->warn("Falling behind saving frames, skipping ..."); recorder->m_skippedFrames++; frameQueue.pop(); //while(frameQueue.size() > 3) frameQueue.pop(); } auto frame = std::move(frameQueue.front()); frameQueue.pop(); if (!frame || frame->IsSaved()) continue; lock.unlock(); recorder->Write(frame.get(), false); lock.lock(); } if (!recorder->m_recording) recorder->SplitWriters(); } while (!requestExit); recorder->WriteMetadataFile(); } //endregion }