import AVFoundation import Foundation import ImageIO import os import Photos import UIKit import UniformTypeIdentifiers import VideoToolbox public enum LivePhotoBuildStage: String, Codable, Sendable { case normalize case extractKeyFrame case writePhotoMetadata case writeVideoMetadata case saveToAlbum case validate } public struct LivePhotoBuildProgress: Sendable { public var stage: LivePhotoBuildStage public var fraction: Double public init(stage: LivePhotoBuildStage, fraction: Double) { self.stage = stage self.fraction = fraction } } public enum WorkStatus: String, Codable, Sendable { case idle case editing case processing case success case failed } public struct SourceRef: Codable, Sendable, Hashable { public var phAssetLocalIdentifier: String? public var fileURL: URL? public init(phAssetLocalIdentifier: String) { self.phAssetLocalIdentifier = phAssetLocalIdentifier self.fileURL = nil } public init(fileURL: URL) { self.phAssetLocalIdentifier = nil self.fileURL = fileURL } } public enum AudioPolicy: String, Codable, Sendable { case keep case remove } public enum CodecPolicy: String, Codable, Sendable { case passthrough case fallbackH264 } public enum HDRPolicy: String, Codable, Sendable { case keep case toneMapToSDR } public struct ExportParams: Codable, Sendable, Hashable { public var trimStart: Double public var trimEnd: Double public var keyFrameTime: Double public var audioPolicy: AudioPolicy public var codecPolicy: CodecPolicy public var hdrPolicy: HDRPolicy public var maxDimension: Int public init( trimStart: Double = 0, trimEnd: Double = 1.0, keyFrameTime: Double = 0.5, audioPolicy: AudioPolicy = .keep, codecPolicy: CodecPolicy = .fallbackH264, hdrPolicy: HDRPolicy = .toneMapToSDR, maxDimension: Int = 1920 ) { self.trimStart = trimStart self.trimEnd = trimEnd self.keyFrameTime = keyFrameTime self.audioPolicy = audioPolicy self.codecPolicy = codecPolicy self.hdrPolicy = hdrPolicy self.maxDimension = maxDimension } } public struct AppError: Error, Codable, Sendable, Hashable { public var code: String public var stage: LivePhotoBuildStage? public var message: String public var underlyingErrorDescription: String? public var suggestedActions: [String] public init( code: String, stage: LivePhotoBuildStage? = nil, message: String, underlyingErrorDescription: String? = nil, suggestedActions: [String] = [] ) { self.code = code self.stage = stage self.message = message self.underlyingErrorDescription = underlyingErrorDescription self.suggestedActions = suggestedActions } } public struct WorkItem: Identifiable, Codable, Sendable, Hashable { public var id: UUID public var createdAt: Date public var sourceVideo: SourceRef public var coverImage: SourceRef? public var exportParams: ExportParams public var status: WorkStatus public var resultAssetId: String? public var cacheDir: URL public var error: AppError? public init( id: UUID = UUID(), createdAt: Date = Date(), sourceVideo: SourceRef, coverImage: SourceRef? = nil, exportParams: ExportParams = ExportParams(), status: WorkStatus = .idle, resultAssetId: String? = nil, cacheDir: URL, error: AppError? = nil ) { self.id = id self.createdAt = createdAt self.sourceVideo = sourceVideo self.coverImage = coverImage self.exportParams = exportParams self.status = status self.resultAssetId = resultAssetId self.cacheDir = cacheDir self.error = error } } public struct LivePhotoWorkPaths: Sendable, Hashable { public var workDir: URL public var photoURL: URL public var pairedVideoURL: URL public var logURL: URL public init(workDir: URL, photoURL: URL, pairedVideoURL: URL, logURL: URL) { self.workDir = workDir self.photoURL = photoURL self.pairedVideoURL = pairedVideoURL self.logURL = logURL } } public struct CacheManager: Sendable { public var baseDirectory: URL public init(baseDirectory: URL? = nil) throws { if let baseDirectory { self.baseDirectory = baseDirectory } else { let caches = try FileManager.default.url(for: .cachesDirectory, in: .userDomainMask, appropriateFor: nil, create: true) self.baseDirectory = caches.appendingPathComponent("LivePhotoBuilder", isDirectory: true) } try FileManager.default.createDirectory(at: self.baseDirectory, withIntermediateDirectories: true) } public func makeWorkPaths(workId: UUID) throws -> LivePhotoWorkPaths { let workDir = baseDirectory.appendingPathComponent(workId.uuidString, isDirectory: true) try FileManager.default.createDirectory(at: workDir, withIntermediateDirectories: true) return LivePhotoWorkPaths( workDir: workDir, photoURL: workDir.appendingPathComponent("photo").appendingPathExtension("heic"), pairedVideoURL: workDir.appendingPathComponent("paired").appendingPathExtension("mov"), logURL: workDir.appendingPathComponent("builder").appendingPathExtension("log") ) } public func clearWork(workId: UUID) throws { let workDir = baseDirectory.appendingPathComponent(workId.uuidString, isDirectory: true) if FileManager.default.fileExists(atPath: workDir.path) { try FileManager.default.removeItem(at: workDir) } } } public struct LivePhotoLogger: Sendable { private var logger: os.Logger public init(subsystem: String = "ToLivePhoto", category: String = "LivePhotoCore") { self.logger = os.Logger(subsystem: subsystem, category: category) } public func info(_ message: String) { logger.info("\(message, privacy: .public)") } public func error(_ message: String) { logger.error("\(message, privacy: .public)") } } public actor AlbumWriter { public init() {} public func requestAddOnlyAuthorization() async -> PHAuthorizationStatus { await withCheckedContinuation { continuation in PHPhotoLibrary.requestAuthorization(for: .addOnly) { status in continuation.resume(returning: status) } } } public func saveLivePhoto(photoURL: URL, pairedVideoURL: URL, shouldMoveFiles: Bool = false) async throws -> String { try await withCheckedThrowingContinuation { continuation in var localIdentifier: String? PHPhotoLibrary.shared().performChanges({ let request = PHAssetCreationRequest.forAsset() let photoOptions = PHAssetResourceCreationOptions() photoOptions.shouldMoveFile = shouldMoveFiles photoOptions.uniformTypeIdentifier = UTType.heic.identifier let videoOptions = PHAssetResourceCreationOptions() videoOptions.shouldMoveFile = shouldMoveFiles videoOptions.uniformTypeIdentifier = UTType.quickTimeMovie.identifier request.addResource(with: .photo, fileURL: photoURL, options: photoOptions) request.addResource(with: .pairedVideo, fileURL: pairedVideoURL, options: videoOptions) localIdentifier = request.placeholderForCreatedAsset?.localIdentifier }, completionHandler: { success, error in if let error { continuation.resume(throwing: AppError(code: "LPB-401", stage: .saveToAlbum, message: "保存到相册失败", underlyingErrorDescription: error.localizedDescription, suggestedActions: ["允许添加到相册权限", "稍后重试"])) return } guard success, let id = localIdentifier else { continuation.resume(throwing: AppError(code: "LPB-401", stage: .saveToAlbum, message: "保存到相册失败", underlyingErrorDescription: nil, suggestedActions: ["允许添加到相册权限", "稍后重试"])) return } continuation.resume(returning: id) }) } } } public actor LivePhotoValidator { public init() {} public func isLivePhotoAsset(localIdentifier: String) async -> Bool { let result = PHAsset.fetchAssets(withLocalIdentifiers: [localIdentifier], options: nil) guard let asset = result.firstObject else { return false } return asset.mediaSubtypes.contains(.photoLive) } public func requestLivePhoto(localIdentifier: String) async -> PHLivePhoto? { let result = PHAsset.fetchAssets(withLocalIdentifiers: [localIdentifier], options: nil) guard let asset = result.firstObject else { return nil } return await withCheckedContinuation { continuation in PHImageManager.default().requestLivePhoto( for: asset, targetSize: CGSize(width: 1, height: 1), contentMode: .aspectFit, options: nil ) { livePhoto, _ in continuation.resume(returning: livePhoto) } } } public func requestLivePhoto(photoURL: URL, pairedVideoURL: URL) async -> PHLivePhoto? { await withCheckedContinuation { continuation in var hasResumed = false let requestID = PHLivePhoto.request( withResourceFileURLs: [pairedVideoURL, photoURL], placeholderImage: nil, targetSize: .zero, contentMode: .aspectFit ) { livePhoto, info in // 确保只 resume 一次 guard !hasResumed else { return } // 如果是降级版本,等待完整版本 if let isDegraded = info[PHLivePhotoInfoIsDegradedKey] as? Bool, isDegraded { return } // 检查是否有错误或被取消 if let error = info[PHLivePhotoInfoErrorKey] as? Error { print("[LivePhotoValidator] requestLivePhoto error: \(error.localizedDescription)") hasResumed = true continuation.resume(returning: nil) return } if let cancelled = info[PHLivePhotoInfoCancelledKey] as? Bool, cancelled { print("[LivePhotoValidator] requestLivePhoto cancelled") hasResumed = true continuation.resume(returning: nil) return } hasResumed = true continuation.resume(returning: livePhoto) } // 添加超时保护,防止无限等待 DispatchQueue.main.asyncAfter(deadline: .now() + 10) { guard !hasResumed else { return } print("[LivePhotoValidator] requestLivePhoto timeout, requestID: \(requestID)") PHLivePhoto.cancelRequest(withRequestID: requestID) hasResumed = true continuation.resume(returning: nil) } } } public func canCreateLivePhotoFromResources(photoURL: URL, pairedVideoURL: URL) async -> Bool { await requestLivePhoto(photoURL: photoURL, pairedVideoURL: pairedVideoURL) != nil } } public struct LivePhotoBuildOutput: Sendable, Hashable { public var workId: UUID public var assetIdentifier: String public var pairedImageURL: URL public var pairedVideoURL: URL public init(workId: UUID, assetIdentifier: String, pairedImageURL: URL, pairedVideoURL: URL) { self.workId = workId self.assetIdentifier = assetIdentifier self.pairedImageURL = pairedImageURL self.pairedVideoURL = pairedVideoURL } } public actor LivePhotoBuilder { private let cacheManager: CacheManager private let logger: LivePhotoLogger public init(cacheManager: CacheManager? = nil, logger: LivePhotoLogger = LivePhotoLogger()) throws { self.cacheManager = try cacheManager ?? CacheManager() self.logger = logger } public func buildResources( workId: UUID = UUID(), sourceVideoURL: URL, coverImageURL: URL? = nil, exportParams: ExportParams = ExportParams(), progress: (@Sendable (LivePhotoBuildProgress) -> Void)? = nil ) async throws -> LivePhotoBuildOutput { let assetIdentifier = UUID().uuidString let paths = try cacheManager.makeWorkPaths(workId: workId) progress?(LivePhotoBuildProgress(stage: .normalize, fraction: 0)) let trimmedVideoURL = try await trimVideo( sourceURL: sourceVideoURL, trimStart: exportParams.trimStart, trimEnd: exportParams.trimEnd, destinationURL: paths.workDir.appendingPathComponent("trimmed.mov") ) // 关键:将视频变速到约 1 秒,与 metadata.mov 的时间标记匹配 // live-wallpaper 项目使用 CMTimeMake(550, 600) = 0.917 秒 // 我们使用 1 秒以完全匹配 metadata.mov 的时长 let targetDuration = CMTimeMake(value: 550, timescale: 600) // ~0.917 秒,与 live-wallpaper 一致 progress?(LivePhotoBuildProgress(stage: .normalize, fraction: 0.5)) let scaledVideoURL = try await scaleVideoToTargetDuration( sourceURL: trimmedVideoURL, targetDuration: targetDuration, destinationURL: paths.workDir.appendingPathComponent("scaled.mov") ) // 计算关键帧时间:目标视频的中间位置(0.5 秒处,与 metadata.mov 的 still-image-time 匹配) let relativeKeyFrameTime = 0.5 // 固定为 0.5 秒,与 metadata.mov 匹配 progress?(LivePhotoBuildProgress(stage: .extractKeyFrame, fraction: 0)) let keyPhotoURL = try await resolveKeyPhotoURL( videoURL: scaledVideoURL, coverImageURL: coverImageURL, keyFrameTime: relativeKeyFrameTime, destinationURL: paths.workDir.appendingPathComponent("keyPhoto").appendingPathExtension("heic") ) progress?(LivePhotoBuildProgress(stage: .writePhotoMetadata, fraction: 0)) guard let pairedImageURL = addAssetID( assetIdentifier, toImage: keyPhotoURL, saveTo: paths.photoURL ) else { throw AppError(code: "LPB-201", stage: .writePhotoMetadata, message: "封面生成失败", underlyingErrorDescription: nil, suggestedActions: ["缩短时长", "降低分辨率", "重试"]) } progress?(LivePhotoBuildProgress(stage: .writeVideoMetadata, fraction: 0)) let pairedVideoURL = try await addAssetID(assetIdentifier, toVideo: scaledVideoURL, saveTo: paths.pairedVideoURL, stillImageTimeSeconds: relativeKeyFrameTime, progress: { p in progress?(LivePhotoBuildProgress(stage: .writeVideoMetadata, fraction: p)) }) logger.info("Generated Live Photo files:") logger.info(" Photo: \(pairedImageURL.path)") logger.info(" Video: \(pairedVideoURL.path)") logger.info(" AssetIdentifier: \(assetIdentifier)") return LivePhotoBuildOutput(workId: workId, assetIdentifier: assetIdentifier, pairedImageURL: pairedImageURL, pairedVideoURL: pairedVideoURL) } private func trimVideo(sourceURL: URL, trimStart: Double, trimEnd: Double, destinationURL: URL) async throws -> URL { let asset = AVURLAsset(url: sourceURL) let duration = try await asset.load(.duration).seconds let safeTrimStart = max(0, min(trimStart, duration)) let safeTrimEnd = max(safeTrimStart, min(trimEnd, duration)) if safeTrimEnd - safeTrimStart < 0.1 { throw AppError(code: "LPB-101", stage: .normalize, message: "视频时长不足", suggestedActions: ["选择更长的视频"]) } let startTime = CMTime(seconds: safeTrimStart, preferredTimescale: 600) let endTime = CMTime(seconds: safeTrimEnd, preferredTimescale: 600) let timeRange = CMTimeRange(start: startTime, end: endTime) if FileManager.default.fileExists(atPath: destinationURL.path) { try FileManager.default.removeItem(at: destinationURL) } let composition = AVMutableComposition() guard let videoTrack = try await asset.loadTracks(withMediaType: .video).first else { throw AppError(code: "LPB-101", stage: .normalize, message: "视频轨道不存在", suggestedActions: ["选择其他视频"]) } let compositionVideoTrack = composition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid) try compositionVideoTrack?.insertTimeRange(timeRange, of: videoTrack, at: .zero) if let audioTrack = try? await asset.loadTracks(withMediaType: .audio).first { let compositionAudioTrack = composition.addMutableTrack(withMediaType: .audio, preferredTrackID: kCMPersistentTrackID_Invalid) try? compositionAudioTrack?.insertTimeRange(timeRange, of: audioTrack, at: .zero) } let transform = try await videoTrack.load(.preferredTransform) // 保持原始视频的 transform,确保方向正确 compositionVideoTrack?.preferredTransform = transform // 使用 Passthrough 预设保持原始质量和尺寸 guard let exportSession = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetPassthrough) else { throw AppError(code: "LPB-101", stage: .normalize, message: "无法创建导出会话", suggestedActions: ["重试"]) } exportSession.outputURL = destinationURL exportSession.outputFileType = .mov await exportSession.export() guard exportSession.status == .completed else { throw AppError(code: "LPB-101", stage: .normalize, message: "视频裁剪失败", underlyingErrorDescription: exportSession.error?.localizedDescription, suggestedActions: ["缩短时长", "重试"]) } return destinationURL } /// 将视频处理为 Live Photo 所需的格式 /// 包括:时长变速到 ~0.917 秒、尺寸调整到 1080x1920(或保持比例)、帧率转换为 60fps /// 完全对齐 live-wallpaper 项目的 accelerateVideo + resizeVideo 流程 private func scaleVideoToTargetDuration( sourceURL: URL, targetDuration: CMTime, destinationURL: URL ) async throws -> URL { let asset = AVURLAsset(url: sourceURL) if FileManager.default.fileExists(atPath: destinationURL.path) { try FileManager.default.removeItem(at: destinationURL) } guard let videoTrack = try await asset.loadTracks(withMediaType: .video).first else { throw AppError(code: "LPB-101", stage: .normalize, message: "视频轨道不存在", suggestedActions: ["选择其他视频"]) } let originalDuration = try await asset.load(.duration) let naturalSize = try await videoTrack.load(.naturalSize) let preferredTransform = try await videoTrack.load(.preferredTransform) // 计算应用 transform 后的尺寸(与 live-wallpaper resizeVideo 一致) let originalSize = CGSize(width: naturalSize.width, height: naturalSize.height) let transformedSize = originalSize.applying(preferredTransform) let absoluteSize = CGSize(width: abs(transformedSize.width), height: abs(transformedSize.height)) // 根据源视频方向决定输出尺寸 // 横屏视频 -> 1920x1080,竖屏视频 -> 1080x1920 let isLandscape = absoluteSize.width > absoluteSize.height let livePhotoSize = isLandscape ? CGSize(width: 1920, height: 1080) : CGSize(width: 1080, height: 1920) // 步骤1:先变速到目标时长(对应 live-wallpaper 的 accelerateVideo) let acceleratedURL = destinationURL.deletingLastPathComponent().appendingPathComponent("accelerated.mov") if FileManager.default.fileExists(atPath: acceleratedURL.path) { try FileManager.default.removeItem(at: acceleratedURL) } let composition = AVMutableComposition() guard let compositionVideoTrack = composition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid) else { throw AppError(code: "LPB-101", stage: .normalize, message: "无法创建视频轨道", suggestedActions: ["重试"]) } try compositionVideoTrack.insertTimeRange(CMTimeRange(start: .zero, duration: originalDuration), of: videoTrack, at: .zero) // 变速:将原始时长缩放到目标时长(与 live-wallpaper accelerateVideo 第 287-288 行一致) compositionVideoTrack.scaleTimeRange(CMTimeRange(start: .zero, duration: originalDuration), toDuration: targetDuration) compositionVideoTrack.preferredTransform = preferredTransform guard let accelerateExport = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetHighestQuality) else { throw AppError(code: "LPB-101", stage: .normalize, message: "无法创建导出会话", suggestedActions: ["重试"]) } accelerateExport.outputURL = acceleratedURL accelerateExport.outputFileType = .mov await accelerateExport.export() guard accelerateExport.status == .completed else { throw AppError(code: "LPB-101", stage: .normalize, message: "视频变速失败", underlyingErrorDescription: accelerateExport.error?.localizedDescription, suggestedActions: ["重试"]) } // 步骤2:调整尺寸和帧率(对应 live-wallpaper 的 resizeVideo) let acceleratedAsset = AVURLAsset(url: acceleratedURL) guard let acceleratedVideoTrack = try await acceleratedAsset.loadTracks(withMediaType: .video).first else { return acceleratedURL } let acceleratedDuration = try await acceleratedAsset.load(.duration) // 加载加速后视频轨道的属性 let acceleratedNaturalSize = try await acceleratedVideoTrack.load(.naturalSize) let acceleratedTransform = try await acceleratedVideoTrack.load(.preferredTransform) guard let resizeExport = AVAssetExportSession(asset: acceleratedAsset, presetName: AVAssetExportPresetHighestQuality) else { return acceleratedURL } // 关键:使用 AVMutableVideoComposition 设置输出尺寸和帧率 let videoComposition = AVMutableVideoComposition() videoComposition.renderSize = livePhotoSize // 关键:设置 60fps videoComposition.frameDuration = CMTime(value: 1, timescale: 60) let instruction = AVMutableVideoCompositionInstruction() instruction.timeRange = CMTimeRange(start: .zero, duration: acceleratedDuration) let layerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: acceleratedVideoTrack) // 关键修复:正确计算变换 // 变换需要将 naturalSize 坐标系的像素映射到 livePhotoSize 坐标系 // 步骤: // 1. 应用 preferredTransform 旋转视频到正确方向 // 2. 根据旋转后的实际尺寸计算缩放和居中 // 计算旋转后的实际尺寸(用于确定缩放比例) let rotatedSize = acceleratedNaturalSize.applying(acceleratedTransform) let rotatedAbsoluteSize = CGSize(width: abs(rotatedSize.width), height: abs(rotatedSize.height)) // 基于旋转后尺寸重新计算缩放因子 let actualWidthRatio = livePhotoSize.width / rotatedAbsoluteSize.width let actualHeightRatio = livePhotoSize.height / rotatedAbsoluteSize.height let actualScaleFactor = min(actualWidthRatio, actualHeightRatio) let scaledWidth = rotatedAbsoluteSize.width * actualScaleFactor let scaledHeight = rotatedAbsoluteSize.height * actualScaleFactor // 居中偏移 let centerX = (livePhotoSize.width - scaledWidth) / 2 let centerY = (livePhotoSize.height - scaledHeight) / 2 // 构建最终变换: // 对于 preferredTransform,它通常包含旋转+平移,平移部分是为了将旋转后的内容移到正坐标 // 变换组合顺序(从右到左应用): // 1. 先应用 preferredTransform(旋转+平移到正坐标) // 2. 再缩放 // 3. 最后平移到目标中心 // // 使用 concatenating: A.concatenating(B) 表示先应用 A,再应用 B let scaleTransform = CGAffineTransform(scaleX: actualScaleFactor, y: actualScaleFactor) let translateToCenter = CGAffineTransform(translationX: centerX, y: centerY) let finalTransform = acceleratedTransform.concatenating(scaleTransform).concatenating(translateToCenter) layerInstruction.setTransform(finalTransform, at: .zero) instruction.layerInstructions = [layerInstruction] videoComposition.instructions = [instruction] resizeExport.videoComposition = videoComposition resizeExport.outputURL = destinationURL resizeExport.outputFileType = .mov resizeExport.shouldOptimizeForNetworkUse = true await resizeExport.export() // 清理临时文件 try? FileManager.default.removeItem(at: acceleratedURL) guard resizeExport.status == .completed else { throw AppError(code: "LPB-101", stage: .normalize, message: "视频尺寸调整失败", underlyingErrorDescription: resizeExport.error?.localizedDescription, suggestedActions: ["重试"]) } return destinationURL } private func resolveKeyPhotoURL( videoURL: URL, coverImageURL: URL?, keyFrameTime: Double, destinationURL: URL ) async throws -> URL { // 最大分辨率限制(对标竞品 1080p) let maxDimension = 1920 // 内部函数:将 CGImage 写入 HEIC 文件 func writeHEIC(_ image: CGImage, to url: URL) throws { guard let dest = CGImageDestinationCreateWithURL(url as CFURL, UTType.heic.identifier as CFString, 1, nil) else { throw AppError(code: "LPB-201", stage: .extractKeyFrame, message: "写入封面失败", underlyingErrorDescription: "无法创建 HEIC 写入器", suggestedActions: ["重试"]) } let props: [String: Any] = [ kCGImageDestinationLossyCompressionQuality as String: 0.9 ] CGImageDestinationAddImage(dest, image, props as CFDictionary) guard CGImageDestinationFinalize(dest) else { throw AppError(code: "LPB-201", stage: .extractKeyFrame, message: "写入封面失败", underlyingErrorDescription: "HEIC 写入失败", suggestedActions: ["重试"]) } } // 内部函数:缩放图像 func scaleImage(_ image: CGImage, maxDim: Int) -> CGImage { let width = image.width let height = image.height let maxSide = max(width, height) if maxSide <= maxDim { return image } let scale = CGFloat(maxDim) / CGFloat(maxSide) let newWidth = Int(CGFloat(width) * scale) let newHeight = Int(CGFloat(height) * scale) guard let context = CGContext( data: nil, width: newWidth, height: newHeight, bitsPerComponent: 8, bytesPerRow: 0, space: CGColorSpaceCreateDeviceRGB(), bitmapInfo: CGImageAlphaInfo.premultipliedLast.rawValue ) else { return image } context.interpolationQuality = .high context.draw(image, in: CGRect(x: 0, y: 0, width: newWidth, height: newHeight)) return context.makeImage() ?? image } // 如果用户提供了封面图 if let coverImageURL { guard let src = CGImageSourceCreateWithURL(coverImageURL as CFURL, nil), let img = CGImageSourceCreateImageAtIndex(src, 0, nil) else { throw AppError(code: "LPB-201", stage: .extractKeyFrame, message: "封面读取失败", underlyingErrorDescription: nil, suggestedActions: ["更换封面图", "重试"]) } let scaledImg = scaleImage(img, maxDim: maxDimension) try writeHEIC(scaledImg, to: destinationURL) return destinationURL } // 从视频抽帧 let asset = AVURLAsset(url: videoURL) let imageGenerator = AVAssetImageGenerator(asset: asset) imageGenerator.appliesPreferredTrackTransform = true imageGenerator.requestedTimeToleranceAfter = CMTime(value: 1, timescale: 100) imageGenerator.requestedTimeToleranceBefore = CMTime(value: 1, timescale: 100) // 设置最大尺寸,让 AVAssetImageGenerator 自动缩放 imageGenerator.maximumSize = CGSize(width: maxDimension, height: maxDimension) let safeSeconds = max(0, min(keyFrameTime, max(0, asset.duration.seconds - 0.1))) let time = CMTime(seconds: safeSeconds, preferredTimescale: asset.duration.timescale) let cgImage: CGImage do { cgImage = try imageGenerator.copyCGImage(at: time, actualTime: nil) } catch { throw AppError(code: "LPB-201", stage: .extractKeyFrame, message: "抽帧失败", underlyingErrorDescription: error.localizedDescription, suggestedActions: ["缩短时长", "降低分辨率", "重试"]) } try writeHEIC(cgImage, to: destinationURL) return destinationURL } private func addAssetID( _ assetIdentifier: String, toImage imageURL: URL, saveTo destinationURL: URL ) -> URL? { let useHEIC = true let imageType = useHEIC ? UTType.heic.identifier : UTType.jpeg.identifier guard let imageDestination = CGImageDestinationCreateWithURL(destinationURL as CFURL, imageType as CFString, 1, nil), let imageSource = CGImageSourceCreateWithURL(imageURL as CFURL, nil), let imageRef = CGImageSourceCreateImageAtIndex(imageSource, 0, nil) else { return nil } var imageProperties = (CGImageSourceCopyPropertiesAtIndex(imageSource, 0, nil) as? [String: Any]) ?? [:] // 获取图像尺寸 let width = imageRef.width let height = imageRef.height // 添加 TIFF (IFD0) 标准字段 - 对标竞品 var tiffDict = (imageProperties[kCGImagePropertyTIFFDictionary as String] as? [String: Any]) ?? [:] tiffDict[kCGImagePropertyTIFFOrientation as String] = 1 // Horizontal (normal) tiffDict[kCGImagePropertyTIFFXResolution as String] = 72 tiffDict[kCGImagePropertyTIFFYResolution as String] = 72 tiffDict[kCGImagePropertyTIFFResolutionUnit as String] = 2 // inches // 移除 Tile 字段 - 竞品没有这些字段 tiffDict.removeValue(forKey: kCGImagePropertyTIFFTileWidth as String) tiffDict.removeValue(forKey: kCGImagePropertyTIFFTileLength as String) imageProperties[kCGImagePropertyTIFFDictionary as String] = tiffDict // 添加 EXIF 标准字段 - 对标竞品 var exifDict = (imageProperties[kCGImagePropertyExifDictionary as String] as? [String: Any]) ?? [:] exifDict[kCGImagePropertyExifVersion as String] = [2, 2, 1] // 0221 exifDict[kCGImagePropertyExifPixelXDimension as String] = width exifDict[kCGImagePropertyExifPixelYDimension as String] = height imageProperties[kCGImagePropertyExifDictionary as String] = exifDict // 简化方案:只设置 ContentIdentifier,不注入复杂的 MakerNotes // 竞品也只使用 ContentIdentifier,这足以让 Photos 识别 Live Photo let assetIdentifierKey = "17" // Content Identifier var makerAppleDict: [String: Any] = [:] makerAppleDict[assetIdentifierKey] = assetIdentifier imageProperties[kCGImagePropertyMakerAppleDictionary as String] = makerAppleDict CGImageDestinationAddImage(imageDestination, imageRef, imageProperties as CFDictionary) guard CGImageDestinationFinalize(imageDestination) else { return nil } logger.info("Created HEIC with ContentIdentifier: \(assetIdentifier)") return destinationURL } private func addAssetID( _ assetIdentifier: String, toVideo videoURL: URL, saveTo destinationURL: URL, stillImageTimeSeconds: Double, progress: @Sendable @escaping (Double) -> Void ) async throws -> URL { // 关键修复:完全对齐 live-wallpaper 项目的实现 // 使用 AVAssetReaderTrackOutput + videoInput.transform,而非 AVAssetReaderVideoCompositionOutput guard let metadataURL = Self.metadataMovURL else { throw AppError(code: "LPB-301", stage: .writeVideoMetadata, message: "缺少 metadata.mov 资源文件", suggestedActions: ["重新安装应用"]) } return try await withCheckedThrowingContinuation { continuation in let queue = DispatchQueue(label: "LivePhotoCore.VideoPairing") queue.async { do { if FileManager.default.fileExists(atPath: destinationURL.path) { try FileManager.default.removeItem(at: destinationURL) } let videoAsset = AVURLAsset(url: videoURL) let metadataAsset = AVURLAsset(url: metadataURL) guard let videoTrack = videoAsset.tracks(withMediaType: .video).first else { continuation.resume(throwing: AppError(code: "LPB-301", stage: .writeVideoMetadata, message: "视频处理失败", underlyingErrorDescription: "缺少视频轨", suggestedActions: ["更换一个视频", "重试"])) return } let durationSeconds = max(0.001, videoAsset.duration.seconds) let nominalFrameRate = videoTrack.nominalFrameRate > 0 ? videoTrack.nominalFrameRate : 30 let frameCount = max(1, Int(durationSeconds * Double(nominalFrameRate))) // 创建 readers 和 writer let videoReader = try AVAssetReader(asset: videoAsset) let metadataReader = try AVAssetReader(asset: metadataAsset) let assetWriter = try AVAssetWriter(outputURL: destinationURL, fileType: .mov) let writingGroup = DispatchGroup() // 关键:使用 AVAssetReaderTrackOutput(与 live-wallpaper 完全一致) // 而不是 AVAssetReaderVideoCompositionOutput let videoReaderOutput = AVAssetReaderTrackOutput(track: videoTrack, outputSettings: [ kCVPixelBufferPixelFormatTypeKey as String: NSNumber(value: kCVPixelFormatType_32BGRA as UInt32) ]) videoReader.add(videoReaderOutput) // 关键:使用 track.naturalSize 作为输出尺寸(与 live-wallpaper 一致) // 视频方向通过 videoInput.transform 控制 let videoWriterInput = AVAssetWriterInput( mediaType: .video, outputSettings: [ AVVideoCodecKey: AVVideoCodecType.h264, AVVideoWidthKey: videoTrack.naturalSize.width, AVVideoHeightKey: videoTrack.naturalSize.height ] ) // 关键:通过 transform 属性设置视频方向(与 live-wallpaper 第 108 行完全一致) videoWriterInput.transform = videoTrack.preferredTransform // 关键:设置 expectsMediaDataInRealTime = true(与 live-wallpaper 第 109 行一致) videoWriterInput.expectsMediaDataInRealTime = true assetWriter.add(videoWriterInput) // 设置 metadata track 的 reader/writer(从 metadata.mov 复制) // 关键:不传 sourceFormatHint,与 live-wallpaper 项目保持一致 var metadataIOs = [(AVAssetWriterInput, AVAssetReaderTrackOutput)]() let metadataTracks = metadataAsset.tracks(withMediaType: .metadata) for track in metadataTracks { let trackReaderOutput = AVAssetReaderTrackOutput(track: track, outputSettings: nil) metadataReader.add(trackReaderOutput) let metadataInput = AVAssetWriterInput(mediaType: .metadata, outputSettings: nil) assetWriter.add(metadataInput) metadataIOs.append((metadataInput, trackReaderOutput)) } // 设置顶级元数据 assetWriter.metadata = [Self.metadataForAssetID(assetIdentifier)] assetWriter.startWriting() videoReader.startReading() metadataReader.startReading() assetWriter.startSession(atSourceTime: .zero) var currentFrameCount = 0 // 写入视频帧 writingGroup.enter() videoWriterInput.requestMediaDataWhenReady(on: DispatchQueue(label: "LivePhotoCore.VideoWriterInput")) { while videoWriterInput.isReadyForMoreMediaData { if let sampleBuffer = videoReaderOutput.copyNextSampleBuffer() { currentFrameCount += 1 let pct = Double(currentFrameCount) / Double(frameCount) progress(pct) videoWriterInput.append(sampleBuffer) } else { videoWriterInput.markAsFinished() writingGroup.leave() break } } } // 复制 metadata track 的 sample buffer(关键!) for (metadataInput, metadataOutput) in metadataIOs { writingGroup.enter() metadataInput.requestMediaDataWhenReady(on: DispatchQueue(label: "LivePhotoCore.MetadataWriterInput")) { while metadataInput.isReadyForMoreMediaData { if let sampleBuffer = metadataOutput.copyNextSampleBuffer() { metadataInput.append(sampleBuffer) } else { metadataInput.markAsFinished() writingGroup.leave() break } } } } writingGroup.notify(queue: .main) { if videoReader.status == .completed && metadataReader.status == .completed && assetWriter.status == .writing { assetWriter.finishWriting { if assetWriter.status == .completed { continuation.resume(returning: destinationURL) } else { continuation.resume(throwing: AppError(code: "LPB-301", stage: .writeVideoMetadata, message: "视频处理失败", underlyingErrorDescription: assetWriter.error?.localizedDescription, suggestedActions: ["重试"])) } } } else { let errorDesc = videoReader.error?.localizedDescription ?? metadataReader.error?.localizedDescription ?? assetWriter.error?.localizedDescription ?? "未知错误" continuation.resume(throwing: AppError(code: "LPB-301", stage: .writeVideoMetadata, message: "视频处理失败", underlyingErrorDescription: errorDesc, suggestedActions: ["重试"])) } } } catch { continuation.resume(throwing: AppError(code: "LPB-301", stage: .writeVideoMetadata, message: "视频处理失败", underlyingErrorDescription: error.localizedDescription, suggestedActions: ["重试"])) } } } } /// 获取 metadata.mov 资源文件的 URL private static var metadataMovURL: URL? { // 首先尝试从 Bundle 获取(用于 App) if let bundleURL = Bundle.main.url(forResource: "metadata", withExtension: "mov") { return bundleURL } // 然后尝试从 module bundle 获取(用于 SPM package) #if SWIFT_PACKAGE if let moduleURL = Bundle.module.url(forResource: "metadata", withExtension: "mov") { return moduleURL } #endif return nil } private static func metadataForAssetID(_ assetIdentifier: String) -> AVMetadataItem { let item = AVMutableMetadataItem() item.key = "com.apple.quicktime.content.identifier" as (NSCopying & NSObjectProtocol) item.keySpace = AVMetadataKeySpace(rawValue: "mdta") item.value = assetIdentifier as (NSCopying & NSObjectProtocol) item.dataType = "com.apple.metadata.datatype.UTF-8" return item } } public struct LivePhotoWorkflowResult: Sendable, Hashable { public var workId: UUID public var assetIdentifier: String public var pairedImageURL: URL public var pairedVideoURL: URL public var savedAssetId: String public var resourceValidationOK: Bool public var libraryAssetIsLivePhoto: Bool? public init( workId: UUID, assetIdentifier: String, pairedImageURL: URL, pairedVideoURL: URL, savedAssetId: String, resourceValidationOK: Bool, libraryAssetIsLivePhoto: Bool? ) { self.workId = workId self.assetIdentifier = assetIdentifier self.pairedImageURL = pairedImageURL self.pairedVideoURL = pairedVideoURL self.savedAssetId = savedAssetId self.resourceValidationOK = resourceValidationOK self.libraryAssetIsLivePhoto = libraryAssetIsLivePhoto } } public actor LivePhotoWorkflow { private let builder: LivePhotoBuilder private let albumWriter: AlbumWriter private let validator: LivePhotoValidator public init(cacheManager: CacheManager? = nil, logger: LivePhotoLogger = LivePhotoLogger()) throws { let cm = try cacheManager ?? CacheManager() self.builder = try LivePhotoBuilder(cacheManager: cm, logger: logger) self.albumWriter = AlbumWriter() self.validator = LivePhotoValidator() } public func buildSaveValidate( sourceVideoURL: URL, coverImageURL: URL? = nil, exportParams: ExportParams = ExportParams(), progress: (@Sendable (LivePhotoBuildProgress) -> Void)? = nil ) async throws -> LivePhotoWorkflowResult { let output = try await builder.buildResources( sourceVideoURL: sourceVideoURL, coverImageURL: coverImageURL, exportParams: exportParams, progress: progress ) progress?(LivePhotoBuildProgress(stage: .validate, fraction: 0)) let resourceOK = await validator.canCreateLivePhotoFromResources( photoURL: output.pairedImageURL, pairedVideoURL: output.pairedVideoURL ) progress?(LivePhotoBuildProgress(stage: .validate, fraction: 0.3)) let addOnlyStatus = await albumWriter.requestAddOnlyAuthorization() guard addOnlyStatus == .authorized else { throw AppError( code: "LPB-401", stage: .saveToAlbum, message: "无相册写入权限", underlyingErrorDescription: "authorizationStatus(addOnly)=\(addOnlyStatus)", suggestedActions: ["在系统设置中允许“添加照片”权限"] ) } progress?(LivePhotoBuildProgress(stage: .saveToAlbum, fraction: 0)) let assetId = try await albumWriter.saveLivePhoto( photoURL: output.pairedImageURL, pairedVideoURL: output.pairedVideoURL, shouldMoveFiles: false ) progress?(LivePhotoBuildProgress(stage: .saveToAlbum, fraction: 1)) var isLiveSubtype: Bool? = nil let readWriteStatus = PHPhotoLibrary.authorizationStatus(for: .readWrite) if readWriteStatus == .authorized || readWriteStatus == .limited { isLiveSubtype = await validator.isLivePhotoAsset(localIdentifier: assetId) } progress?(LivePhotoBuildProgress(stage: .validate, fraction: 1)) return LivePhotoWorkflowResult( workId: output.workId, assetIdentifier: output.assetIdentifier, pairedImageURL: output.pairedImageURL, pairedVideoURL: output.pairedVideoURL, savedAssetId: assetId, resourceValidationOK: resourceOK, libraryAssetIsLivePhoto: isLiveSubtype ) } }