import AVFoundation import Foundation import ImageIO import os import Photos import UIKit import UniformTypeIdentifiers import VideoToolbox public enum LivePhotoBuildStage: String, Codable, Sendable { case normalize case extractKeyFrame case aiEnhance case writePhotoMetadata case writeVideoMetadata case saveToAlbum case validate } public struct LivePhotoBuildProgress: Sendable { public var stage: LivePhotoBuildStage public var fraction: Double public init(stage: LivePhotoBuildStage, fraction: Double) { self.stage = stage self.fraction = fraction } } public enum WorkStatus: String, Codable, Sendable { case idle case editing case processing case success case failed } public struct SourceRef: Codable, Sendable, Hashable { public var phAssetLocalIdentifier: String? public var fileURL: URL? public init(phAssetLocalIdentifier: String) { self.phAssetLocalIdentifier = phAssetLocalIdentifier self.fileURL = nil } public init(fileURL: URL) { self.phAssetLocalIdentifier = nil self.fileURL = fileURL } } public enum AudioPolicy: String, Codable, Sendable { case keep case remove } public enum CodecPolicy: String, Codable, Sendable { case passthrough case fallbackH264 } public enum HDRPolicy: String, Codable, Sendable { case keep case toneMapToSDR } /// 裁剪区域(归一化坐标,0~1) public struct CropRect: Codable, Sendable, Hashable { public var x: CGFloat // 左上角 x(0~1) public var y: CGFloat // 左上角 y(0~1) public var width: CGFloat // 宽度(0~1) public var height: CGFloat // 高度(0~1) public init(x: CGFloat = 0, y: CGFloat = 0, width: CGFloat = 1, height: CGFloat = 1) { self.x = x self.y = y self.width = width self.height = height } /// 全画幅(不裁剪) public static let full = CropRect() /// 转换为像素坐标 public func toPixelRect(videoSize: CGSize) -> CGRect { CGRect( x: x * videoSize.width, y: y * videoSize.height, width: width * videoSize.width, height: height * videoSize.height ) } } /// 预设比例模板 public enum AspectRatioTemplate: String, Codable, Sendable, CaseIterable { case original = "original" // 保持原比例 case lockScreen = "lock_screen" // iPhone 锁屏 9:19.5 case fullScreen = "full_screen" // 全面屏 9:16 case classic = "classic" // 经典 4:3 case square = "square" // 正方形 1:1 public var displayName: String { switch self { case .original: return "原比例" case .lockScreen: return "锁屏" case .fullScreen: return "全屏" case .classic: return "4:3" case .square: return "1:1" } } public var ratio: CGFloat? { switch self { case .original: return nil case .lockScreen: return 9.0 / 19.5 case .fullScreen: return 9.0 / 16.0 case .classic: return 3.0 / 4.0 case .square: return 1.0 } } } public struct ExportParams: Codable, Sendable, Hashable { public var trimStart: Double public var trimEnd: Double public var keyFrameTime: Double public var audioPolicy: AudioPolicy public var codecPolicy: CodecPolicy public var hdrPolicy: HDRPolicy public var maxDimension: Int public var cropRect: CropRect public var aspectRatio: AspectRatioTemplate public var compatibilityMode: Bool public var targetFrameRate: Int public var aiEnhanceConfig: AIEnhanceConfig public init( trimStart: Double = 0, trimEnd: Double = 1.0, keyFrameTime: Double = 0.5, audioPolicy: AudioPolicy = .keep, codecPolicy: CodecPolicy = .fallbackH264, hdrPolicy: HDRPolicy = .toneMapToSDR, maxDimension: Int = 1920, cropRect: CropRect = .full, aspectRatio: AspectRatioTemplate = .original, compatibilityMode: Bool = false, targetFrameRate: Int = 60, aiEnhanceConfig: AIEnhanceConfig = .disabled ) { self.trimStart = trimStart self.trimEnd = trimEnd self.keyFrameTime = keyFrameTime self.audioPolicy = audioPolicy self.codecPolicy = codecPolicy self.hdrPolicy = hdrPolicy self.maxDimension = maxDimension self.cropRect = cropRect self.aspectRatio = aspectRatio self.compatibilityMode = compatibilityMode self.targetFrameRate = targetFrameRate self.aiEnhanceConfig = aiEnhanceConfig } /// 应用兼容模式的便捷方法 public func withCompatibilityMode() -> ExportParams { var params = self params.compatibilityMode = true params.maxDimension = 720 params.targetFrameRate = 30 params.codecPolicy = .fallbackH264 params.hdrPolicy = .toneMapToSDR return params } } public struct AppError: Error, Codable, Sendable, Hashable { public var code: String public var stage: LivePhotoBuildStage? public var message: String public var underlyingErrorDescription: String? public var suggestedActions: [String] public init( code: String, stage: LivePhotoBuildStage? = nil, message: String, underlyingErrorDescription: String? = nil, suggestedActions: [String] = [] ) { self.code = code self.stage = stage self.message = message self.underlyingErrorDescription = underlyingErrorDescription self.suggestedActions = suggestedActions } } public struct WorkItem: Identifiable, Codable, Sendable, Hashable { public var id: UUID public var createdAt: Date public var sourceVideo: SourceRef public var coverImage: SourceRef? public var exportParams: ExportParams public var status: WorkStatus public var resultAssetId: String? public var cacheDir: URL public var error: AppError? public init( id: UUID = UUID(), createdAt: Date = Date(), sourceVideo: SourceRef, coverImage: SourceRef? = nil, exportParams: ExportParams = ExportParams(), status: WorkStatus = .idle, resultAssetId: String? = nil, cacheDir: URL, error: AppError? = nil ) { self.id = id self.createdAt = createdAt self.sourceVideo = sourceVideo self.coverImage = coverImage self.exportParams = exportParams self.status = status self.resultAssetId = resultAssetId self.cacheDir = cacheDir self.error = error } } public struct LivePhotoWorkPaths: Sendable, Hashable { public var workDir: URL public var photoURL: URL public var pairedVideoURL: URL public var logURL: URL public init(workDir: URL, photoURL: URL, pairedVideoURL: URL, logURL: URL) { self.workDir = workDir self.photoURL = photoURL self.pairedVideoURL = pairedVideoURL self.logURL = logURL } } public struct CacheManager: Sendable { public var baseDirectory: URL public init(baseDirectory: URL? = nil) throws { if let baseDirectory { self.baseDirectory = baseDirectory } else { let caches = try FileManager.default.url(for: .cachesDirectory, in: .userDomainMask, appropriateFor: nil, create: true) self.baseDirectory = caches.appendingPathComponent("LivePhotoBuilder", isDirectory: true) } try FileManager.default.createDirectory(at: self.baseDirectory, withIntermediateDirectories: true) } public func makeWorkPaths(workId: UUID) throws -> LivePhotoWorkPaths { let workDir = baseDirectory.appendingPathComponent(workId.uuidString, isDirectory: true) try FileManager.default.createDirectory(at: workDir, withIntermediateDirectories: true) return LivePhotoWorkPaths( workDir: workDir, photoURL: workDir.appendingPathComponent("photo").appendingPathExtension("heic"), pairedVideoURL: workDir.appendingPathComponent("paired").appendingPathExtension("mov"), logURL: workDir.appendingPathComponent("builder").appendingPathExtension("log") ) } public func clearWork(workId: UUID) throws { let workDir = baseDirectory.appendingPathComponent(workId.uuidString, isDirectory: true) if FileManager.default.fileExists(atPath: workDir.path) { try FileManager.default.removeItem(at: workDir) } } /// 移除工作目录(静默失败,用于取消任务时清理) public func removeWorkDir(workId: UUID) { let workDir = baseDirectory.appendingPathComponent(workId.uuidString, isDirectory: true) try? FileManager.default.removeItem(at: workDir) } } public struct LivePhotoLogger: Sendable { private var logger: os.Logger public init(subsystem: String = "ToLivePhoto", category: String = "LivePhotoCore") { self.logger = os.Logger(subsystem: subsystem, category: category) } public func info(_ message: String) { logger.info("\(message, privacy: .public)") } public func error(_ message: String) { logger.error("\(message, privacy: .public)") } } public actor AlbumWriter { public init() {} public func requestAddOnlyAuthorization() async -> PHAuthorizationStatus { await withCheckedContinuation { continuation in PHPhotoLibrary.requestAuthorization(for: .addOnly) { status in continuation.resume(returning: status) } } } public func saveLivePhoto(photoURL: URL, pairedVideoURL: URL, shouldMoveFiles: Bool = false) async throws -> String { try await withCheckedThrowingContinuation { continuation in var localIdentifier: String? PHPhotoLibrary.shared().performChanges({ let request = PHAssetCreationRequest.forAsset() let photoOptions = PHAssetResourceCreationOptions() photoOptions.shouldMoveFile = shouldMoveFiles photoOptions.uniformTypeIdentifier = UTType.heic.identifier let videoOptions = PHAssetResourceCreationOptions() videoOptions.shouldMoveFile = shouldMoveFiles videoOptions.uniformTypeIdentifier = UTType.quickTimeMovie.identifier request.addResource(with: .photo, fileURL: photoURL, options: photoOptions) request.addResource(with: .pairedVideo, fileURL: pairedVideoURL, options: videoOptions) localIdentifier = request.placeholderForCreatedAsset?.localIdentifier }, completionHandler: { success, error in if let error { continuation.resume(throwing: AppError(code: "LPB-401", stage: .saveToAlbum, message: "保存到相册失败", underlyingErrorDescription: error.localizedDescription, suggestedActions: ["允许添加到相册权限", "稍后重试"])) return } guard success, let id = localIdentifier else { continuation.resume(throwing: AppError(code: "LPB-401", stage: .saveToAlbum, message: "保存到相册失败", underlyingErrorDescription: nil, suggestedActions: ["允许添加到相册权限", "稍后重试"])) return } continuation.resume(returning: id) }) } } } public actor LivePhotoValidator { public init() {} public func isLivePhotoAsset(localIdentifier: String) async -> Bool { let result = PHAsset.fetchAssets(withLocalIdentifiers: [localIdentifier], options: nil) guard let asset = result.firstObject else { return false } return asset.mediaSubtypes.contains(.photoLive) } public func requestLivePhoto(localIdentifier: String) async -> PHLivePhoto? { let result = PHAsset.fetchAssets(withLocalIdentifiers: [localIdentifier], options: nil) guard let asset = result.firstObject else { return nil } return await withCheckedContinuation { continuation in PHImageManager.default().requestLivePhoto( for: asset, targetSize: CGSize(width: 1, height: 1), contentMode: .aspectFit, options: nil ) { livePhoto, _ in continuation.resume(returning: livePhoto) } } } public func requestLivePhoto(photoURL: URL, pairedVideoURL: URL) async -> PHLivePhoto? { await withCheckedContinuation { continuation in var hasResumed = false let requestID = PHLivePhoto.request( withResourceFileURLs: [pairedVideoURL, photoURL], placeholderImage: nil, targetSize: .zero, contentMode: .aspectFit ) { livePhoto, info in // 确保只 resume 一次 guard !hasResumed else { return } // 如果是降级版本,等待完整版本 if let isDegraded = info[PHLivePhotoInfoIsDegradedKey] as? Bool, isDegraded { return } // 检查是否有错误或被取消 if let error = info[PHLivePhotoInfoErrorKey] as? Error { print("[LivePhotoValidator] requestLivePhoto error: \(error.localizedDescription)") hasResumed = true continuation.resume(returning: nil) return } if let cancelled = info[PHLivePhotoInfoCancelledKey] as? Bool, cancelled { print("[LivePhotoValidator] requestLivePhoto cancelled") hasResumed = true continuation.resume(returning: nil) return } hasResumed = true continuation.resume(returning: livePhoto) } // 添加超时保护,防止无限等待 DispatchQueue.main.asyncAfter(deadline: .now() + 10) { guard !hasResumed else { return } print("[LivePhotoValidator] requestLivePhoto timeout, requestID: \(requestID)") PHLivePhoto.cancelRequest(withRequestID: requestID) hasResumed = true continuation.resume(returning: nil) } } } public func canCreateLivePhotoFromResources(photoURL: URL, pairedVideoURL: URL) async -> Bool { await requestLivePhoto(photoURL: photoURL, pairedVideoURL: pairedVideoURL) != nil } } public struct LivePhotoBuildOutput: Sendable, Hashable { public var workId: UUID public var assetIdentifier: String public var pairedImageURL: URL public var pairedVideoURL: URL public init(workId: UUID, assetIdentifier: String, pairedImageURL: URL, pairedVideoURL: URL) { self.workId = workId self.assetIdentifier = assetIdentifier self.pairedImageURL = pairedImageURL self.pairedVideoURL = pairedVideoURL } } public actor LivePhotoBuilder { private let cacheManager: CacheManager private let logger: LivePhotoLogger public init(cacheManager: CacheManager? = nil, logger: LivePhotoLogger = LivePhotoLogger()) throws { self.cacheManager = try cacheManager ?? CacheManager() self.logger = logger } public func buildResources( workId: UUID = UUID(), sourceVideoURL: URL, coverImageURL: URL? = nil, exportParams: ExportParams = ExportParams(), progress: (@Sendable (LivePhotoBuildProgress) -> Void)? = nil ) async throws -> LivePhotoBuildOutput { let assetIdentifier = UUID().uuidString let paths = try cacheManager.makeWorkPaths(workId: workId) // 临时文件路径(用于清理) let trimmedURL = paths.workDir.appendingPathComponent("trimmed.mov") let scaledURL = paths.workDir.appendingPathComponent("scaled.mov") let keyPhotoTempURL = paths.workDir.appendingPathComponent("keyPhoto").appendingPathExtension("heic") // 内部函数:清理临时文件 func cleanupTempFiles() { try? FileManager.default.removeItem(at: trimmedURL) try? FileManager.default.removeItem(at: scaledURL) try? FileManager.default.removeItem(at: keyPhotoTempURL) } do { progress?(LivePhotoBuildProgress(stage: .normalize, fraction: 0)) let trimmedVideoURL = try await trimVideo( sourceURL: sourceVideoURL, trimStart: exportParams.trimStart, trimEnd: exportParams.trimEnd, destinationURL: trimmedURL ) // 关键:将视频变速到约 1 秒,与 metadata.mov 的时间标记匹配 let targetDuration = CMTimeMake(value: 550, timescale: 600) // ~0.917 秒,与 live-wallpaper 一致 progress?(LivePhotoBuildProgress(stage: .normalize, fraction: 0.5)) let scaledVideoURL = try await scaleVideoToTargetDuration( sourceURL: trimmedVideoURL, targetDuration: targetDuration, cropRect: exportParams.cropRect, aspectRatio: exportParams.aspectRatio, maxDimension: exportParams.maxDimension, targetFrameRate: exportParams.targetFrameRate, destinationURL: scaledURL ) // 计算关键帧时间:目标视频的中间位置(0.5 秒处,与 metadata.mov 的 still-image-time 匹配) let relativeKeyFrameTime = 0.5 // 固定为 0.5 秒,与 metadata.mov 匹配 progress?(LivePhotoBuildProgress(stage: .extractKeyFrame, fraction: 0)) let keyPhotoURL = try await resolveKeyPhotoURL( videoURL: scaledVideoURL, coverImageURL: coverImageURL, keyFrameTime: relativeKeyFrameTime, destinationURL: keyPhotoTempURL, aiEnhanceConfig: exportParams.aiEnhanceConfig, progress: progress ) progress?(LivePhotoBuildProgress(stage: .writePhotoMetadata, fraction: 0)) guard let pairedImageURL = addAssetID( assetIdentifier, toImage: keyPhotoURL, saveTo: paths.photoURL ) else { cleanupTempFiles() throw AppError(code: "LPB-201", stage: .writePhotoMetadata, message: "封面生成失败", underlyingErrorDescription: nil, suggestedActions: ["缩短时长", "降低分辨率", "重试"]) } progress?(LivePhotoBuildProgress(stage: .writeVideoMetadata, fraction: 0)) let pairedVideoURL = try await addAssetID(assetIdentifier, toVideo: scaledVideoURL, saveTo: paths.pairedVideoURL, stillImageTimeSeconds: relativeKeyFrameTime, progress: { p in progress?(LivePhotoBuildProgress(stage: .writeVideoMetadata, fraction: p)) }) // 清理临时文件(成功后) cleanupTempFiles() logger.info("Generated Live Photo files:") logger.info(" Photo: \(pairedImageURL.path)") logger.info(" Video: \(pairedVideoURL.path)") logger.info(" AssetIdentifier: \(assetIdentifier)") return LivePhotoBuildOutput(workId: workId, assetIdentifier: assetIdentifier, pairedImageURL: pairedImageURL, pairedVideoURL: pairedVideoURL) } catch { // 清理临时文件(失败后) cleanupTempFiles() throw error } } private func trimVideo(sourceURL: URL, trimStart: Double, trimEnd: Double, destinationURL: URL) async throws -> URL { let asset = AVURLAsset(url: sourceURL) let duration = try await asset.load(.duration).seconds let safeTrimStart = max(0, min(trimStart, duration)) let safeTrimEnd = max(safeTrimStart, min(trimEnd, duration)) if safeTrimEnd - safeTrimStart < 0.1 { throw AppError(code: "LPB-101", stage: .normalize, message: "视频时长不足", suggestedActions: ["选择更长的视频"]) } let startTime = CMTime(seconds: safeTrimStart, preferredTimescale: 600) let endTime = CMTime(seconds: safeTrimEnd, preferredTimescale: 600) let timeRange = CMTimeRange(start: startTime, end: endTime) if FileManager.default.fileExists(atPath: destinationURL.path) { try FileManager.default.removeItem(at: destinationURL) } let composition = AVMutableComposition() guard let videoTrack = try await asset.loadTracks(withMediaType: .video).first else { throw AppError(code: "LPB-101", stage: .normalize, message: "视频轨道不存在", suggestedActions: ["选择其他视频"]) } let compositionVideoTrack = composition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid) try compositionVideoTrack?.insertTimeRange(timeRange, of: videoTrack, at: .zero) if let audioTrack = try? await asset.loadTracks(withMediaType: .audio).first { let compositionAudioTrack = composition.addMutableTrack(withMediaType: .audio, preferredTrackID: kCMPersistentTrackID_Invalid) try? compositionAudioTrack?.insertTimeRange(timeRange, of: audioTrack, at: .zero) } let transform = try await videoTrack.load(.preferredTransform) // 保持原始视频的 transform,确保方向正确 compositionVideoTrack?.preferredTransform = transform // 使用 Passthrough 预设保持原始质量和尺寸 guard let exportSession = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetPassthrough) else { throw AppError(code: "LPB-101", stage: .normalize, message: "无法创建导出会话", suggestedActions: ["重试"]) } exportSession.outputURL = destinationURL exportSession.outputFileType = .mov await exportSession.export() guard exportSession.status == .completed else { throw AppError(code: "LPB-101", stage: .normalize, message: "视频裁剪失败", underlyingErrorDescription: exportSession.error?.localizedDescription, suggestedActions: ["缩短时长", "重试"]) } return destinationURL } /// 将视频处理为 Live Photo 所需的格式 /// 包括:时长变速到 ~0.917 秒、裁剪、尺寸调整、帧率转换 /// 优化:单次导出完成变速+裁剪+缩放(减少一次编码,降低内存峰值) private func scaleVideoToTargetDuration( sourceURL: URL, targetDuration: CMTime, cropRect: CropRect, aspectRatio: AspectRatioTemplate, maxDimension: Int, targetFrameRate: Int, destinationURL: URL ) async throws -> URL { let asset = AVURLAsset(url: sourceURL) if FileManager.default.fileExists(atPath: destinationURL.path) { try FileManager.default.removeItem(at: destinationURL) } guard let videoTrack = try await asset.loadTracks(withMediaType: .video).first else { throw AppError(code: "LPB-101", stage: .normalize, message: "视频轨道不存在", suggestedActions: ["选择其他视频"]) } let originalDuration = try await asset.load(.duration) let naturalSize = try await videoTrack.load(.naturalSize) let preferredTransform = try await videoTrack.load(.preferredTransform) // 计算应用 transform 后的尺寸 let originalSize = CGSize(width: naturalSize.width, height: naturalSize.height) let transformedSize = originalSize.applying(preferredTransform) let absoluteSize = CGSize(width: abs(transformedSize.width), height: abs(transformedSize.height)) // 根据 maxDimension 计算基准宽度 let baseWidth: CGFloat = maxDimension == 720 ? 720 : 1080 let maxHeight: CGFloat = maxDimension == 720 ? 1280 : 1920 // 根据裁剪和比例计算输出尺寸 let outputSize: CGSize if let targetRatio = aspectRatio.ratio { let width: CGFloat = baseWidth let height = width / targetRatio outputSize = CGSize(width: width, height: min(height, maxHeight)) } else { let isLandscape = absoluteSize.width > absoluteSize.height outputSize = isLandscape ? CGSize(width: maxHeight, height: baseWidth) : CGSize(width: baseWidth, height: maxHeight) } // 优化:单次导出完成变速+裁剪+缩放 // 使用 AVMutableComposition 进行时间缩放,AVMutableVideoComposition 进行空间变换 let composition = AVMutableComposition() guard let compositionVideoTrack = composition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid) else { throw AppError(code: "LPB-101", stage: .normalize, message: "无法创建视频轨道", suggestedActions: ["重试"]) } try compositionVideoTrack.insertTimeRange(CMTimeRange(start: .zero, duration: originalDuration), of: videoTrack, at: .zero) // 变速:将原始时长缩放到目标时长 compositionVideoTrack.scaleTimeRange(CMTimeRange(start: .zero, duration: originalDuration), toDuration: targetDuration) compositionVideoTrack.preferredTransform = preferredTransform guard let exportSession = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetHighestQuality) else { throw AppError(code: "LPB-101", stage: .normalize, message: "无法创建导出会话", suggestedActions: ["重试"]) } // 使用 AVMutableVideoComposition 设置输出尺寸和帧率 let videoComposition = AVMutableVideoComposition() videoComposition.renderSize = outputSize videoComposition.frameDuration = CMTime(value: 1, timescale: CMTimeScale(targetFrameRate)) let instruction = AVMutableVideoCompositionInstruction() instruction.timeRange = CMTimeRange(start: .zero, duration: targetDuration) let layerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: compositionVideoTrack) // 计算旋转后的实际尺寸 let rotatedSize = naturalSize.applying(preferredTransform) let rotatedAbsoluteSize = CGSize(width: abs(rotatedSize.width), height: abs(rotatedSize.height)) // 计算裁剪后的源区域尺寸 let croppedSourceWidth = rotatedAbsoluteSize.width * cropRect.width let croppedSourceHeight = rotatedAbsoluteSize.height * cropRect.height // 基于裁剪后尺寸计算缩放因子(填充模式) let actualWidthRatio = outputSize.width / croppedSourceWidth let actualHeightRatio = outputSize.height / croppedSourceHeight let actualScaleFactor = max(actualWidthRatio, actualHeightRatio) let scaledWidth = rotatedAbsoluteSize.width * actualScaleFactor let scaledHeight = rotatedAbsoluteSize.height * actualScaleFactor // 计算裁剪偏移(将裁剪区域中心对齐到输出中心) let cropCenterX = (cropRect.x + cropRect.width / 2) * scaledWidth let cropCenterY = (cropRect.y + cropRect.height / 2) * scaledHeight let outputCenterX = outputSize.width / 2 let outputCenterY = outputSize.height / 2 let centerX = outputCenterX - cropCenterX let centerY = outputCenterY - cropCenterY // 构建最终变换 let scaleTransform = CGAffineTransform(scaleX: actualScaleFactor, y: actualScaleFactor) let translateToCenter = CGAffineTransform(translationX: centerX, y: centerY) let finalTransform = preferredTransform.concatenating(scaleTransform).concatenating(translateToCenter) layerInstruction.setTransform(finalTransform, at: .zero) instruction.layerInstructions = [layerInstruction] videoComposition.instructions = [instruction] exportSession.videoComposition = videoComposition exportSession.outputURL = destinationURL exportSession.outputFileType = .mov exportSession.shouldOptimizeForNetworkUse = true await exportSession.export() guard exportSession.status == .completed else { throw AppError(code: "LPB-101", stage: .normalize, message: "视频处理失败", underlyingErrorDescription: exportSession.error?.localizedDescription, suggestedActions: ["重试"]) } return destinationURL } private func resolveKeyPhotoURL( videoURL: URL, coverImageURL: URL?, keyFrameTime: Double, destinationURL: URL, aiEnhanceConfig: AIEnhanceConfig = .disabled, progress: (@Sendable (LivePhotoBuildProgress) -> Void)? = nil ) async throws -> URL { // 最大分辨率限制(对标竞品 1080p) let maxDimension = 1920 // 内部函数:将 CGImage 写入 HEIC 文件 func writeHEIC(_ image: CGImage, to url: URL) throws { guard let dest = CGImageDestinationCreateWithURL(url as CFURL, UTType.heic.identifier as CFString, 1, nil) else { throw AppError(code: "LPB-201", stage: .extractKeyFrame, message: "写入封面失败", underlyingErrorDescription: "无法创建 HEIC 写入器", suggestedActions: ["重试"]) } let props: [String: Any] = [ kCGImageDestinationLossyCompressionQuality as String: 0.9 ] CGImageDestinationAddImage(dest, image, props as CFDictionary) guard CGImageDestinationFinalize(dest) else { throw AppError(code: "LPB-201", stage: .extractKeyFrame, message: "写入封面失败", underlyingErrorDescription: "HEIC 写入失败", suggestedActions: ["重试"]) } } // 内部函数:使用 CGImageSource 高效缩放图像(内存优化) func scaleImageFromSource(_ source: CGImageSource, maxDim: Int) -> CGImage? { let options: [CFString: Any] = [ kCGImageSourceThumbnailMaxPixelSize: maxDim, kCGImageSourceCreateThumbnailFromImageAlways: true, kCGImageSourceCreateThumbnailWithTransform: true ] return CGImageSourceCreateThumbnailAtIndex(source, 0, options as CFDictionary) } var finalImage: CGImage // 如果用户提供了封面图 if let coverImageURL { guard let src = CGImageSourceCreateWithURL(coverImageURL as CFURL, nil) else { throw AppError(code: "LPB-201", stage: .extractKeyFrame, message: "封面读取失败", underlyingErrorDescription: nil, suggestedActions: ["更换封面图", "重试"]) } // 使用 CGImageSource 高效缩放,无需加载完整图像到内存 if let scaledImg = scaleImageFromSource(src, maxDim: maxDimension) { finalImage = scaledImg } else if let img = CGImageSourceCreateImageAtIndex(src, 0, nil) { // 回退:直接使用原图 finalImage = img } else { throw AppError(code: "LPB-201", stage: .extractKeyFrame, message: "封面读取失败", underlyingErrorDescription: nil, suggestedActions: ["更换封面图", "重试"]) } } else { // 从视频抽帧 let asset = AVURLAsset(url: videoURL) let imageGenerator = AVAssetImageGenerator(asset: asset) imageGenerator.appliesPreferredTrackTransform = true imageGenerator.requestedTimeToleranceAfter = CMTime(value: 1, timescale: 100) imageGenerator.requestedTimeToleranceBefore = CMTime(value: 1, timescale: 100) // 设置最大尺寸,让 AVAssetImageGenerator 自动缩放 imageGenerator.maximumSize = CGSize(width: maxDimension, height: maxDimension) let safeSeconds = max(0, min(keyFrameTime, max(0, asset.duration.seconds - 0.1))) let time = CMTime(seconds: safeSeconds, preferredTimescale: asset.duration.timescale) do { finalImage = try imageGenerator.copyCGImage(at: time, actualTime: nil) } catch { throw AppError(code: "LPB-201", stage: .extractKeyFrame, message: "抽帧失败", underlyingErrorDescription: error.localizedDescription, suggestedActions: ["缩短时长", "降低分辨率", "重试"]) } } // AI 超分辨率增强(如果启用) if aiEnhanceConfig.enabled && AIEnhancer.isAvailable() { progress?(LivePhotoBuildProgress(stage: .aiEnhance, fraction: 0)) logger.info("Starting AI enhancement for cover image: \(finalImage.width)x\(finalImage.height)") do { let enhancer = AIEnhancer(config: aiEnhanceConfig) try await enhancer.preloadModel() let result = try await enhancer.enhance(image: finalImage) { p in progress?(LivePhotoBuildProgress(stage: .aiEnhance, fraction: p)) } finalImage = result.enhancedImage logger.info("AI enhancement complete: \(Int(result.originalSize.width))x\(Int(result.originalSize.height)) -> \(Int(result.enhancedSize.width))x\(Int(result.enhancedSize.height)) in \(Int(result.processingTimeMs))ms") } catch { // AI 增强失败时静默降级,使用原图 logger.error("AI enhancement failed, using original image: \(error.localizedDescription)") } } try writeHEIC(finalImage, to: destinationURL) return destinationURL } private func addAssetID( _ assetIdentifier: String, toImage imageURL: URL, saveTo destinationURL: URL ) -> URL? { let useHEIC = true let imageType = useHEIC ? UTType.heic.identifier : UTType.jpeg.identifier guard let imageDestination = CGImageDestinationCreateWithURL(destinationURL as CFURL, imageType as CFString, 1, nil), let imageSource = CGImageSourceCreateWithURL(imageURL as CFURL, nil), let imageRef = CGImageSourceCreateImageAtIndex(imageSource, 0, nil) else { return nil } var imageProperties = (CGImageSourceCopyPropertiesAtIndex(imageSource, 0, nil) as? [String: Any]) ?? [:] // 获取图像尺寸 let width = imageRef.width let height = imageRef.height // 添加 TIFF (IFD0) 标准字段 - 对标竞品 var tiffDict = (imageProperties[kCGImagePropertyTIFFDictionary as String] as? [String: Any]) ?? [:] tiffDict[kCGImagePropertyTIFFOrientation as String] = 1 // Horizontal (normal) tiffDict[kCGImagePropertyTIFFXResolution as String] = 72 tiffDict[kCGImagePropertyTIFFYResolution as String] = 72 tiffDict[kCGImagePropertyTIFFResolutionUnit as String] = 2 // inches // 移除 Tile 字段 - 竞品没有这些字段 tiffDict.removeValue(forKey: kCGImagePropertyTIFFTileWidth as String) tiffDict.removeValue(forKey: kCGImagePropertyTIFFTileLength as String) imageProperties[kCGImagePropertyTIFFDictionary as String] = tiffDict // 添加 EXIF 标准字段 - 对标竞品 var exifDict = (imageProperties[kCGImagePropertyExifDictionary as String] as? [String: Any]) ?? [:] exifDict[kCGImagePropertyExifVersion as String] = [2, 2, 1] // 0221 exifDict[kCGImagePropertyExifPixelXDimension as String] = width exifDict[kCGImagePropertyExifPixelYDimension as String] = height imageProperties[kCGImagePropertyExifDictionary as String] = exifDict // 简化方案:只设置 ContentIdentifier,不注入复杂的 MakerNotes // 竞品也只使用 ContentIdentifier,这足以让 Photos 识别 Live Photo let assetIdentifierKey = "17" // Content Identifier var makerAppleDict: [String: Any] = [:] makerAppleDict[assetIdentifierKey] = assetIdentifier imageProperties[kCGImagePropertyMakerAppleDictionary as String] = makerAppleDict CGImageDestinationAddImage(imageDestination, imageRef, imageProperties as CFDictionary) guard CGImageDestinationFinalize(imageDestination) else { return nil } logger.info("Created HEIC with ContentIdentifier: \(assetIdentifier)") return destinationURL } private func addAssetID( _ assetIdentifier: String, toVideo videoURL: URL, saveTo destinationURL: URL, stillImageTimeSeconds: Double, progress: @Sendable @escaping (Double) -> Void ) async throws -> URL { // 关键修复:完全对齐 live-wallpaper 项目的实现 // 使用 AVAssetReaderTrackOutput + videoInput.transform,而非 AVAssetReaderVideoCompositionOutput guard let metadataURL = Self.metadataMovURL else { throw AppError(code: "LPB-301", stage: .writeVideoMetadata, message: "缺少 metadata.mov 资源文件", suggestedActions: ["重新安装应用"]) } return try await withCheckedThrowingContinuation { continuation in let queue = DispatchQueue(label: "LivePhotoCore.VideoPairing") queue.async { do { if FileManager.default.fileExists(atPath: destinationURL.path) { try FileManager.default.removeItem(at: destinationURL) } let videoAsset = AVURLAsset(url: videoURL) let metadataAsset = AVURLAsset(url: metadataURL) guard let videoTrack = videoAsset.tracks(withMediaType: .video).first else { continuation.resume(throwing: AppError(code: "LPB-301", stage: .writeVideoMetadata, message: "视频处理失败", underlyingErrorDescription: "缺少视频轨", suggestedActions: ["更换一个视频", "重试"])) return } let durationSeconds = max(0.001, videoAsset.duration.seconds) let nominalFrameRate = videoTrack.nominalFrameRate > 0 ? videoTrack.nominalFrameRate : 30 let frameCount = max(1, Int(durationSeconds * Double(nominalFrameRate))) // 创建 readers 和 writer let videoReader = try AVAssetReader(asset: videoAsset) let metadataReader = try AVAssetReader(asset: metadataAsset) let assetWriter = try AVAssetWriter(outputURL: destinationURL, fileType: .mov) let writingGroup = DispatchGroup() // 关键:使用 AVAssetReaderTrackOutput(与 live-wallpaper 完全一致) // 而不是 AVAssetReaderVideoCompositionOutput let videoReaderOutput = AVAssetReaderTrackOutput(track: videoTrack, outputSettings: [ kCVPixelBufferPixelFormatTypeKey as String: NSNumber(value: kCVPixelFormatType_32BGRA as UInt32) ]) videoReader.add(videoReaderOutput) // 关键:使用 track.naturalSize 作为输出尺寸(与 live-wallpaper 一致) // 视频方向通过 videoInput.transform 控制 let videoWriterInput = AVAssetWriterInput( mediaType: .video, outputSettings: [ AVVideoCodecKey: AVVideoCodecType.h264, AVVideoWidthKey: videoTrack.naturalSize.width, AVVideoHeightKey: videoTrack.naturalSize.height ] ) // 关键:通过 transform 属性设置视频方向(与 live-wallpaper 第 108 行完全一致) videoWriterInput.transform = videoTrack.preferredTransform // 关键:设置 expectsMediaDataInRealTime = true(与 live-wallpaper 第 109 行一致) videoWriterInput.expectsMediaDataInRealTime = true assetWriter.add(videoWriterInput) // 设置 metadata track 的 reader/writer(从 metadata.mov 复制) // 关键:不传 sourceFormatHint,与 live-wallpaper 项目保持一致 var metadataIOs = [(AVAssetWriterInput, AVAssetReaderTrackOutput)]() let metadataTracks = metadataAsset.tracks(withMediaType: .metadata) for track in metadataTracks { let trackReaderOutput = AVAssetReaderTrackOutput(track: track, outputSettings: nil) metadataReader.add(trackReaderOutput) let metadataInput = AVAssetWriterInput(mediaType: .metadata, outputSettings: nil) assetWriter.add(metadataInput) metadataIOs.append((metadataInput, trackReaderOutput)) } // 设置顶级元数据 assetWriter.metadata = [Self.metadataForAssetID(assetIdentifier)] assetWriter.startWriting() videoReader.startReading() metadataReader.startReading() assetWriter.startSession(atSourceTime: .zero) var currentFrameCount = 0 // 写入视频帧 writingGroup.enter() videoWriterInput.requestMediaDataWhenReady(on: DispatchQueue(label: "LivePhotoCore.VideoWriterInput")) { while videoWriterInput.isReadyForMoreMediaData { if let sampleBuffer = videoReaderOutput.copyNextSampleBuffer() { currentFrameCount += 1 let pct = Double(currentFrameCount) / Double(frameCount) progress(pct) videoWriterInput.append(sampleBuffer) } else { videoWriterInput.markAsFinished() writingGroup.leave() break } } } // 复制 metadata track 的 sample buffer(关键!) for (metadataInput, metadataOutput) in metadataIOs { writingGroup.enter() metadataInput.requestMediaDataWhenReady(on: DispatchQueue(label: "LivePhotoCore.MetadataWriterInput")) { while metadataInput.isReadyForMoreMediaData { if let sampleBuffer = metadataOutput.copyNextSampleBuffer() { metadataInput.append(sampleBuffer) } else { metadataInput.markAsFinished() writingGroup.leave() break } } } } writingGroup.notify(queue: .main) { if videoReader.status == .completed && metadataReader.status == .completed && assetWriter.status == .writing { assetWriter.finishWriting { if assetWriter.status == .completed { continuation.resume(returning: destinationURL) } else { continuation.resume(throwing: AppError(code: "LPB-301", stage: .writeVideoMetadata, message: "视频处理失败", underlyingErrorDescription: assetWriter.error?.localizedDescription, suggestedActions: ["重试"])) } } } else { let errorDesc = videoReader.error?.localizedDescription ?? metadataReader.error?.localizedDescription ?? assetWriter.error?.localizedDescription ?? "未知错误" continuation.resume(throwing: AppError(code: "LPB-301", stage: .writeVideoMetadata, message: "视频处理失败", underlyingErrorDescription: errorDesc, suggestedActions: ["重试"])) } } } catch { continuation.resume(throwing: AppError(code: "LPB-301", stage: .writeVideoMetadata, message: "视频处理失败", underlyingErrorDescription: error.localizedDescription, suggestedActions: ["重试"])) } } } } /// 获取 metadata.mov 资源文件的 URL private static var metadataMovURL: URL? { // 首先尝试从 Bundle 获取(用于 App) if let bundleURL = Bundle.main.url(forResource: "metadata", withExtension: "mov") { return bundleURL } // 然后尝试从 module bundle 获取(用于 SPM package) #if SWIFT_PACKAGE if let moduleURL = Bundle.module.url(forResource: "metadata", withExtension: "mov") { return moduleURL } #endif return nil } private static func metadataForAssetID(_ assetIdentifier: String) -> AVMetadataItem { let item = AVMutableMetadataItem() item.key = "com.apple.quicktime.content.identifier" as (NSCopying & NSObjectProtocol) item.keySpace = AVMetadataKeySpace(rawValue: "mdta") item.value = assetIdentifier as (NSCopying & NSObjectProtocol) item.dataType = "com.apple.metadata.datatype.UTF-8" return item } } public struct LivePhotoWorkflowResult: Sendable, Hashable { public var workId: UUID public var assetIdentifier: String public var pairedImageURL: URL public var pairedVideoURL: URL public var savedAssetId: String public var resourceValidationOK: Bool public var libraryAssetIsLivePhoto: Bool? public init( workId: UUID, assetIdentifier: String, pairedImageURL: URL, pairedVideoURL: URL, savedAssetId: String, resourceValidationOK: Bool, libraryAssetIsLivePhoto: Bool? ) { self.workId = workId self.assetIdentifier = assetIdentifier self.pairedImageURL = pairedImageURL self.pairedVideoURL = pairedVideoURL self.savedAssetId = savedAssetId self.resourceValidationOK = resourceValidationOK self.libraryAssetIsLivePhoto = libraryAssetIsLivePhoto } } public actor LivePhotoWorkflow { private let builder: LivePhotoBuilder private let albumWriter: AlbumWriter private let validator: LivePhotoValidator private let cacheManager: CacheManager public init(cacheManager: CacheManager? = nil, logger: LivePhotoLogger = LivePhotoLogger()) throws { let cm = try cacheManager ?? CacheManager() self.cacheManager = cm self.builder = try LivePhotoBuilder(cacheManager: cm, logger: logger) self.albumWriter = AlbumWriter() self.validator = LivePhotoValidator() } /// 清理指定 workId 的工作目录(用于取消任务时清理中间文件) public func cleanupWork(workId: UUID) async { cacheManager.removeWorkDir(workId: workId) } public func buildSaveValidate( workId: UUID = UUID(), sourceVideoURL: URL, coverImageURL: URL? = nil, exportParams: ExportParams = ExportParams(), progress: (@Sendable (LivePhotoBuildProgress) -> Void)? = nil ) async throws -> LivePhotoWorkflowResult { let output = try await builder.buildResources( workId: workId, sourceVideoURL: sourceVideoURL, coverImageURL: coverImageURL, exportParams: exportParams, progress: progress ) progress?(LivePhotoBuildProgress(stage: .validate, fraction: 0)) let resourceOK = await validator.canCreateLivePhotoFromResources( photoURL: output.pairedImageURL, pairedVideoURL: output.pairedVideoURL ) progress?(LivePhotoBuildProgress(stage: .validate, fraction: 0.3)) let addOnlyStatus = await albumWriter.requestAddOnlyAuthorization() guard addOnlyStatus == .authorized else { throw AppError( code: "LPB-401", stage: .saveToAlbum, message: "无相册写入权限", underlyingErrorDescription: "authorizationStatus(addOnly)=\(addOnlyStatus)", suggestedActions: ["在系统设置中允许“添加照片”权限"] ) } progress?(LivePhotoBuildProgress(stage: .saveToAlbum, fraction: 0)) let assetId = try await albumWriter.saveLivePhoto( photoURL: output.pairedImageURL, pairedVideoURL: output.pairedVideoURL, shouldMoveFiles: false ) progress?(LivePhotoBuildProgress(stage: .saveToAlbum, fraction: 1)) var isLiveSubtype: Bool? = nil let readWriteStatus = PHPhotoLibrary.authorizationStatus(for: .readWrite) if readWriteStatus == .authorized || readWriteStatus == .limited { isLiveSubtype = await validator.isLivePhotoAsset(localIdentifier: assetId) } progress?(LivePhotoBuildProgress(stage: .validate, fraction: 1)) return LivePhotoWorkflowResult( workId: output.workId, assetIdentifier: output.assetIdentifier, pairedImageURL: output.pairedImageURL, pairedVideoURL: output.pairedVideoURL, savedAssetId: assetId, resourceValidationOK: resourceOK, libraryAssetIsLivePhoto: isLiveSubtype ) } }