- 添加 PRD、技术规范、交互规范文档 (V0.2) - 创建 Swift Package 和 Xcode 项目 - 实现 LivePhotoCore 基础模块 - 添加 HEIC MakerNote 元数据写入功能 - 创建项目结构文档和任务清单 - 添加 .gitignore 忽略规则
1105 lines
48 KiB
Swift
1105 lines
48 KiB
Swift
import AVFoundation
|
||
import Foundation
|
||
import ImageIO
|
||
import os
|
||
import Photos
|
||
import UIKit
|
||
import UniformTypeIdentifiers
|
||
|
||
public enum LivePhotoBuildStage: String, Codable, Sendable {
|
||
case normalize
|
||
case extractKeyFrame
|
||
case writePhotoMetadata
|
||
case writeVideoMetadata
|
||
case saveToAlbum
|
||
case validate
|
||
}
|
||
|
||
public struct LivePhotoBuildProgress: Sendable {
|
||
public var stage: LivePhotoBuildStage
|
||
public var fraction: Double
|
||
|
||
public init(stage: LivePhotoBuildStage, fraction: Double) {
|
||
self.stage = stage
|
||
self.fraction = fraction
|
||
}
|
||
}
|
||
|
||
public enum WorkStatus: String, Codable, Sendable {
|
||
case idle
|
||
case editing
|
||
case processing
|
||
case success
|
||
case failed
|
||
}
|
||
|
||
public struct SourceRef: Codable, Sendable, Hashable {
|
||
public var phAssetLocalIdentifier: String?
|
||
public var fileURL: URL?
|
||
|
||
public init(phAssetLocalIdentifier: String) {
|
||
self.phAssetLocalIdentifier = phAssetLocalIdentifier
|
||
self.fileURL = nil
|
||
}
|
||
|
||
public init(fileURL: URL) {
|
||
self.phAssetLocalIdentifier = nil
|
||
self.fileURL = fileURL
|
||
}
|
||
}
|
||
|
||
public enum AudioPolicy: String, Codable, Sendable {
|
||
case keep
|
||
case remove
|
||
}
|
||
|
||
public enum CodecPolicy: String, Codable, Sendable {
|
||
case passthrough
|
||
case fallbackH264
|
||
}
|
||
|
||
public enum HDRPolicy: String, Codable, Sendable {
|
||
case keep
|
||
case toneMapToSDR
|
||
}
|
||
|
||
public struct ExportParams: Codable, Sendable, Hashable {
|
||
public var trimStart: Double
|
||
public var trimEnd: Double
|
||
public var keyFrameTime: Double
|
||
public var audioPolicy: AudioPolicy
|
||
public var codecPolicy: CodecPolicy
|
||
public var hdrPolicy: HDRPolicy
|
||
public var maxDimension: Int
|
||
|
||
public init(
|
||
trimStart: Double = 0,
|
||
trimEnd: Double = 1.0,
|
||
keyFrameTime: Double = 0.5,
|
||
audioPolicy: AudioPolicy = .keep,
|
||
codecPolicy: CodecPolicy = .fallbackH264,
|
||
hdrPolicy: HDRPolicy = .toneMapToSDR,
|
||
maxDimension: Int = 1920
|
||
) {
|
||
self.trimStart = trimStart
|
||
self.trimEnd = trimEnd
|
||
self.keyFrameTime = keyFrameTime
|
||
self.audioPolicy = audioPolicy
|
||
self.codecPolicy = codecPolicy
|
||
self.hdrPolicy = hdrPolicy
|
||
self.maxDimension = maxDimension
|
||
}
|
||
}
|
||
|
||
public struct AppError: Error, Codable, Sendable, Hashable {
|
||
public var code: String
|
||
public var stage: LivePhotoBuildStage?
|
||
public var message: String
|
||
public var underlyingErrorDescription: String?
|
||
public var suggestedActions: [String]
|
||
|
||
public init(
|
||
code: String,
|
||
stage: LivePhotoBuildStage? = nil,
|
||
message: String,
|
||
underlyingErrorDescription: String? = nil,
|
||
suggestedActions: [String] = []
|
||
) {
|
||
self.code = code
|
||
self.stage = stage
|
||
self.message = message
|
||
self.underlyingErrorDescription = underlyingErrorDescription
|
||
self.suggestedActions = suggestedActions
|
||
}
|
||
}
|
||
|
||
public struct WorkItem: Identifiable, Codable, Sendable, Hashable {
|
||
public var id: UUID
|
||
public var createdAt: Date
|
||
public var sourceVideo: SourceRef
|
||
public var coverImage: SourceRef?
|
||
public var exportParams: ExportParams
|
||
public var status: WorkStatus
|
||
public var resultAssetId: String?
|
||
public var cacheDir: URL
|
||
public var error: AppError?
|
||
|
||
public init(
|
||
id: UUID = UUID(),
|
||
createdAt: Date = Date(),
|
||
sourceVideo: SourceRef,
|
||
coverImage: SourceRef? = nil,
|
||
exportParams: ExportParams = ExportParams(),
|
||
status: WorkStatus = .idle,
|
||
resultAssetId: String? = nil,
|
||
cacheDir: URL,
|
||
error: AppError? = nil
|
||
) {
|
||
self.id = id
|
||
self.createdAt = createdAt
|
||
self.sourceVideo = sourceVideo
|
||
self.coverImage = coverImage
|
||
self.exportParams = exportParams
|
||
self.status = status
|
||
self.resultAssetId = resultAssetId
|
||
self.cacheDir = cacheDir
|
||
self.error = error
|
||
}
|
||
}
|
||
|
||
public struct LivePhotoWorkPaths: Sendable, Hashable {
|
||
public var workDir: URL
|
||
public var photoURL: URL
|
||
public var pairedVideoURL: URL
|
||
public var logURL: URL
|
||
|
||
public init(workDir: URL, photoURL: URL, pairedVideoURL: URL, logURL: URL) {
|
||
self.workDir = workDir
|
||
self.photoURL = photoURL
|
||
self.pairedVideoURL = pairedVideoURL
|
||
self.logURL = logURL
|
||
}
|
||
}
|
||
|
||
public struct CacheManager: Sendable {
|
||
public var baseDirectory: URL
|
||
|
||
public init(baseDirectory: URL? = nil) throws {
|
||
if let baseDirectory {
|
||
self.baseDirectory = baseDirectory
|
||
} else {
|
||
let caches = try FileManager.default.url(for: .cachesDirectory, in: .userDomainMask, appropriateFor: nil, create: true)
|
||
self.baseDirectory = caches.appendingPathComponent("LivePhotoBuilder", isDirectory: true)
|
||
}
|
||
try FileManager.default.createDirectory(at: self.baseDirectory, withIntermediateDirectories: true)
|
||
}
|
||
|
||
public func makeWorkPaths(workId: UUID) throws -> LivePhotoWorkPaths {
|
||
let workDir = baseDirectory.appendingPathComponent(workId.uuidString, isDirectory: true)
|
||
try FileManager.default.createDirectory(at: workDir, withIntermediateDirectories: true)
|
||
return LivePhotoWorkPaths(
|
||
workDir: workDir,
|
||
photoURL: workDir.appendingPathComponent("photo").appendingPathExtension("heic"),
|
||
pairedVideoURL: workDir.appendingPathComponent("paired").appendingPathExtension("mov"),
|
||
logURL: workDir.appendingPathComponent("builder").appendingPathExtension("log")
|
||
)
|
||
}
|
||
|
||
public func clearWork(workId: UUID) throws {
|
||
let workDir = baseDirectory.appendingPathComponent(workId.uuidString, isDirectory: true)
|
||
if FileManager.default.fileExists(atPath: workDir.path) {
|
||
try FileManager.default.removeItem(at: workDir)
|
||
}
|
||
}
|
||
}
|
||
|
||
public struct LivePhotoLogger: Sendable {
|
||
private var logger: os.Logger
|
||
|
||
public init(subsystem: String = "ToLivePhoto", category: String = "LivePhotoCore") {
|
||
self.logger = os.Logger(subsystem: subsystem, category: category)
|
||
}
|
||
|
||
public func info(_ message: String) {
|
||
logger.info("\(message, privacy: .public)")
|
||
}
|
||
|
||
public func error(_ message: String) {
|
||
logger.error("\(message, privacy: .public)")
|
||
}
|
||
}
|
||
|
||
public actor AlbumWriter {
|
||
public init() {}
|
||
|
||
public func requestAddOnlyAuthorization() async -> PHAuthorizationStatus {
|
||
await withCheckedContinuation { continuation in
|
||
PHPhotoLibrary.requestAuthorization(for: .addOnly) { status in
|
||
continuation.resume(returning: status)
|
||
}
|
||
}
|
||
}
|
||
|
||
public func saveLivePhoto(photoURL: URL, pairedVideoURL: URL, shouldMoveFiles: Bool = false) async throws -> String {
|
||
try await withCheckedThrowingContinuation { continuation in
|
||
var localIdentifier: String?
|
||
|
||
PHPhotoLibrary.shared().performChanges({
|
||
let request = PHAssetCreationRequest.forAsset()
|
||
|
||
let photoOptions = PHAssetResourceCreationOptions()
|
||
photoOptions.shouldMoveFile = shouldMoveFiles
|
||
photoOptions.uniformTypeIdentifier = UTType.heic.identifier
|
||
|
||
let videoOptions = PHAssetResourceCreationOptions()
|
||
videoOptions.shouldMoveFile = shouldMoveFiles
|
||
videoOptions.uniformTypeIdentifier = UTType.quickTimeMovie.identifier
|
||
|
||
request.addResource(with: .photo, fileURL: photoURL, options: photoOptions)
|
||
request.addResource(with: .pairedVideo, fileURL: pairedVideoURL, options: videoOptions)
|
||
|
||
localIdentifier = request.placeholderForCreatedAsset?.localIdentifier
|
||
}, completionHandler: { success, error in
|
||
if let error {
|
||
continuation.resume(throwing: AppError(code: "LPB-401", stage: .saveToAlbum, message: "保存到相册失败", underlyingErrorDescription: error.localizedDescription, suggestedActions: ["允许添加到相册权限", "稍后重试"]))
|
||
return
|
||
}
|
||
|
||
guard success, let id = localIdentifier else {
|
||
continuation.resume(throwing: AppError(code: "LPB-401", stage: .saveToAlbum, message: "保存到相册失败", underlyingErrorDescription: nil, suggestedActions: ["允许添加到相册权限", "稍后重试"]))
|
||
return
|
||
}
|
||
|
||
continuation.resume(returning: id)
|
||
})
|
||
}
|
||
}
|
||
}
|
||
|
||
public actor LivePhotoValidator {
|
||
public init() {}
|
||
|
||
public func isLivePhotoAsset(localIdentifier: String) async -> Bool {
|
||
let result = PHAsset.fetchAssets(withLocalIdentifiers: [localIdentifier], options: nil)
|
||
guard let asset = result.firstObject else {
|
||
return false
|
||
}
|
||
return asset.mediaSubtypes.contains(.photoLive)
|
||
}
|
||
|
||
public func requestLivePhoto(localIdentifier: String) async -> PHLivePhoto? {
|
||
let result = PHAsset.fetchAssets(withLocalIdentifiers: [localIdentifier], options: nil)
|
||
guard let asset = result.firstObject else {
|
||
return nil
|
||
}
|
||
|
||
return await withCheckedContinuation { continuation in
|
||
PHImageManager.default().requestLivePhoto(
|
||
for: asset,
|
||
targetSize: CGSize(width: 1, height: 1),
|
||
contentMode: .aspectFit,
|
||
options: nil
|
||
) { livePhoto, _ in
|
||
continuation.resume(returning: livePhoto)
|
||
}
|
||
}
|
||
}
|
||
|
||
public func requestLivePhoto(photoURL: URL, pairedVideoURL: URL) async -> PHLivePhoto? {
|
||
await withCheckedContinuation { continuation in
|
||
var hasResumed = false
|
||
let requestID = PHLivePhoto.request(
|
||
withResourceFileURLs: [pairedVideoURL, photoURL],
|
||
placeholderImage: nil,
|
||
targetSize: .zero,
|
||
contentMode: .aspectFit
|
||
) { livePhoto, info in
|
||
// 确保只 resume 一次
|
||
guard !hasResumed else { return }
|
||
|
||
// 如果是降级版本,等待完整版本
|
||
if let isDegraded = info[PHLivePhotoInfoIsDegradedKey] as? Bool, isDegraded {
|
||
return
|
||
}
|
||
|
||
// 检查是否有错误或被取消
|
||
if let error = info[PHLivePhotoInfoErrorKey] as? Error {
|
||
print("[LivePhotoValidator] requestLivePhoto error: \(error.localizedDescription)")
|
||
hasResumed = true
|
||
continuation.resume(returning: nil)
|
||
return
|
||
}
|
||
|
||
if let cancelled = info[PHLivePhotoInfoCancelledKey] as? Bool, cancelled {
|
||
print("[LivePhotoValidator] requestLivePhoto cancelled")
|
||
hasResumed = true
|
||
continuation.resume(returning: nil)
|
||
return
|
||
}
|
||
|
||
hasResumed = true
|
||
continuation.resume(returning: livePhoto)
|
||
}
|
||
|
||
// 添加超时保护,防止无限等待
|
||
DispatchQueue.main.asyncAfter(deadline: .now() + 10) {
|
||
guard !hasResumed else { return }
|
||
print("[LivePhotoValidator] requestLivePhoto timeout, requestID: \(requestID)")
|
||
PHLivePhoto.cancelRequest(withRequestID: requestID)
|
||
hasResumed = true
|
||
continuation.resume(returning: nil)
|
||
}
|
||
}
|
||
}
|
||
|
||
public func canCreateLivePhotoFromResources(photoURL: URL, pairedVideoURL: URL) async -> Bool {
|
||
await requestLivePhoto(photoURL: photoURL, pairedVideoURL: pairedVideoURL) != nil
|
||
}
|
||
}
|
||
|
||
public struct LivePhotoBuildOutput: Sendable, Hashable {
|
||
public var workId: UUID
|
||
public var assetIdentifier: String
|
||
public var pairedImageURL: URL
|
||
public var pairedVideoURL: URL
|
||
|
||
public init(workId: UUID, assetIdentifier: String, pairedImageURL: URL, pairedVideoURL: URL) {
|
||
self.workId = workId
|
||
self.assetIdentifier = assetIdentifier
|
||
self.pairedImageURL = pairedImageURL
|
||
self.pairedVideoURL = pairedVideoURL
|
||
}
|
||
|
||
/// 将生成的文件导出到文档目录(方便调试)
|
||
public func exportToDocuments() throws -> (photoURL: URL, videoURL: URL) {
|
||
let docs = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first!
|
||
let photoDestURL = docs.appendingPathComponent("debug_photo.heic")
|
||
let videoDestURL = docs.appendingPathComponent("debug_video.mov")
|
||
|
||
// 删除旧文件
|
||
try? FileManager.default.removeItem(at: photoDestURL)
|
||
try? FileManager.default.removeItem(at: videoDestURL)
|
||
|
||
// 复制新文件
|
||
try FileManager.default.copyItem(at: pairedImageURL, to: photoDestURL)
|
||
try FileManager.default.copyItem(at: pairedVideoURL, to: videoDestURL)
|
||
|
||
return (photoDestURL, videoDestURL)
|
||
}
|
||
}
|
||
|
||
public actor LivePhotoBuilder {
|
||
private let cacheManager: CacheManager
|
||
private let logger: LivePhotoLogger
|
||
|
||
public init(cacheManager: CacheManager? = nil, logger: LivePhotoLogger = LivePhotoLogger()) throws {
|
||
self.cacheManager = try cacheManager ?? CacheManager()
|
||
self.logger = logger
|
||
}
|
||
|
||
public func buildResources(
|
||
workId: UUID = UUID(),
|
||
sourceVideoURL: URL,
|
||
coverImageURL: URL? = nil,
|
||
exportParams: ExportParams = ExportParams(),
|
||
progress: (@Sendable (LivePhotoBuildProgress) -> Void)? = nil
|
||
) async throws -> LivePhotoBuildOutput {
|
||
let assetIdentifier = UUID().uuidString
|
||
let paths = try cacheManager.makeWorkPaths(workId: workId)
|
||
|
||
progress?(LivePhotoBuildProgress(stage: .normalize, fraction: 0))
|
||
let trimmedVideoURL = try await trimVideo(
|
||
sourceURL: sourceVideoURL,
|
||
trimStart: exportParams.trimStart,
|
||
trimEnd: exportParams.trimEnd,
|
||
destinationURL: paths.workDir.appendingPathComponent("trimmed.mov")
|
||
)
|
||
|
||
let trimmedDuration = exportParams.trimEnd - exportParams.trimStart
|
||
let relativeKeyFrameTime = min(max(0, exportParams.keyFrameTime - exportParams.trimStart), trimmedDuration)
|
||
|
||
// 计算 LivePhotoVideoIndex(需要视频的帧率信息)
|
||
let nominalFrameRateForIndex: Float = {
|
||
let asset = AVURLAsset(url: trimmedVideoURL)
|
||
let rate = asset.tracks(withMediaType: .video).first?.nominalFrameRate ?? 30
|
||
return (rate.isFinite && rate > 0) ? rate : 30
|
||
}()
|
||
let livePhotoVideoIndex = Self.makeLivePhotoVideoIndex(
|
||
stillImageTimeSeconds: relativeKeyFrameTime,
|
||
nominalFrameRate: nominalFrameRateForIndex
|
||
)
|
||
|
||
progress?(LivePhotoBuildProgress(stage: .extractKeyFrame, fraction: 0))
|
||
let keyPhotoURL = try await resolveKeyPhotoURL(
|
||
videoURL: trimmedVideoURL,
|
||
coverImageURL: coverImageURL,
|
||
keyFrameTime: relativeKeyFrameTime,
|
||
destinationURL: paths.workDir.appendingPathComponent("keyPhoto").appendingPathExtension("heic")
|
||
)
|
||
|
||
progress?(LivePhotoBuildProgress(stage: .writePhotoMetadata, fraction: 0))
|
||
guard let pairedImageURL = addAssetID(
|
||
assetIdentifier,
|
||
toImage: keyPhotoURL,
|
||
saveTo: paths.photoURL,
|
||
livePhotoVideoIndex: livePhotoVideoIndex
|
||
) else {
|
||
throw AppError(code: "LPB-201", stage: .writePhotoMetadata, message: "封面生成失败", underlyingErrorDescription: nil, suggestedActions: ["缩短时长", "降低分辨率", "重试"])
|
||
}
|
||
|
||
progress?(LivePhotoBuildProgress(stage: .writeVideoMetadata, fraction: 0))
|
||
let pairedVideoURL = try await addAssetID(assetIdentifier, toVideo: trimmedVideoURL, saveTo: paths.pairedVideoURL, stillImageTimeSeconds: relativeKeyFrameTime, progress: { p in
|
||
progress?(LivePhotoBuildProgress(stage: .writeVideoMetadata, fraction: p))
|
||
})
|
||
|
||
logger.info("Generated Live Photo files:")
|
||
logger.info(" Photo: \(pairedImageURL.path)")
|
||
logger.info(" Video: \(pairedVideoURL.path)")
|
||
logger.info(" AssetIdentifier: \(assetIdentifier)")
|
||
|
||
return LivePhotoBuildOutput(workId: workId, assetIdentifier: assetIdentifier, pairedImageURL: pairedImageURL, pairedVideoURL: pairedVideoURL)
|
||
}
|
||
|
||
private func trimVideo(sourceURL: URL, trimStart: Double, trimEnd: Double, destinationURL: URL) async throws -> URL {
|
||
let asset = AVURLAsset(url: sourceURL)
|
||
|
||
let duration = try await asset.load(.duration).seconds
|
||
let safeTrimStart = max(0, min(trimStart, duration))
|
||
let safeTrimEnd = max(safeTrimStart, min(trimEnd, duration))
|
||
|
||
if safeTrimEnd - safeTrimStart < 0.1 {
|
||
throw AppError(code: "LPB-101", stage: .normalize, message: "视频时长不足", suggestedActions: ["选择更长的视频"])
|
||
}
|
||
|
||
let startTime = CMTime(seconds: safeTrimStart, preferredTimescale: 600)
|
||
let endTime = CMTime(seconds: safeTrimEnd, preferredTimescale: 600)
|
||
let timeRange = CMTimeRange(start: startTime, end: endTime)
|
||
|
||
if FileManager.default.fileExists(atPath: destinationURL.path) {
|
||
try FileManager.default.removeItem(at: destinationURL)
|
||
}
|
||
|
||
let composition = AVMutableComposition()
|
||
|
||
guard let videoTrack = try await asset.loadTracks(withMediaType: .video).first else {
|
||
throw AppError(code: "LPB-101", stage: .normalize, message: "视频轨道不存在", suggestedActions: ["选择其他视频"])
|
||
}
|
||
|
||
let compositionVideoTrack = composition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid)
|
||
try compositionVideoTrack?.insertTimeRange(timeRange, of: videoTrack, at: .zero)
|
||
|
||
if let audioTrack = try? await asset.loadTracks(withMediaType: .audio).first {
|
||
let compositionAudioTrack = composition.addMutableTrack(withMediaType: .audio, preferredTrackID: kCMPersistentTrackID_Invalid)
|
||
try? compositionAudioTrack?.insertTimeRange(timeRange, of: audioTrack, at: .zero)
|
||
}
|
||
|
||
let transform = try await videoTrack.load(.preferredTransform)
|
||
|
||
// 保持原始视频的 transform,确保方向正确
|
||
compositionVideoTrack?.preferredTransform = transform
|
||
|
||
// 使用 Passthrough 预设保持原始质量和尺寸
|
||
guard let exportSession = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetPassthrough) else {
|
||
throw AppError(code: "LPB-101", stage: .normalize, message: "无法创建导出会话", suggestedActions: ["重试"])
|
||
}
|
||
|
||
exportSession.outputURL = destinationURL
|
||
exportSession.outputFileType = .mov
|
||
|
||
await exportSession.export()
|
||
|
||
guard exportSession.status == .completed else {
|
||
throw AppError(code: "LPB-101", stage: .normalize, message: "视频裁剪失败", underlyingErrorDescription: exportSession.error?.localizedDescription, suggestedActions: ["缩短时长", "重试"])
|
||
}
|
||
|
||
return destinationURL
|
||
}
|
||
|
||
private func resolveKeyPhotoURL(
|
||
videoURL: URL,
|
||
coverImageURL: URL?,
|
||
keyFrameTime: Double,
|
||
destinationURL: URL
|
||
) async throws -> URL {
|
||
// 最大分辨率限制(对标竞品 1080p)
|
||
let maxDimension = 1920
|
||
|
||
// 内部函数:将 CGImage 写入 HEIC 文件
|
||
func writeHEIC(_ image: CGImage, to url: URL) throws {
|
||
guard let dest = CGImageDestinationCreateWithURL(url as CFURL, UTType.heic.identifier as CFString, 1, nil) else {
|
||
throw AppError(code: "LPB-201", stage: .extractKeyFrame, message: "写入封面失败", underlyingErrorDescription: "无法创建 HEIC 写入器", suggestedActions: ["重试"])
|
||
}
|
||
let props: [String: Any] = [
|
||
kCGImageDestinationLossyCompressionQuality as String: 0.9
|
||
]
|
||
CGImageDestinationAddImage(dest, image, props as CFDictionary)
|
||
guard CGImageDestinationFinalize(dest) else {
|
||
throw AppError(code: "LPB-201", stage: .extractKeyFrame, message: "写入封面失败", underlyingErrorDescription: "HEIC 写入失败", suggestedActions: ["重试"])
|
||
}
|
||
}
|
||
|
||
// 内部函数:缩放图像
|
||
func scaleImage(_ image: CGImage, maxDim: Int) -> CGImage {
|
||
let width = image.width
|
||
let height = image.height
|
||
let maxSide = max(width, height)
|
||
if maxSide <= maxDim { return image }
|
||
|
||
let scale = CGFloat(maxDim) / CGFloat(maxSide)
|
||
let newWidth = Int(CGFloat(width) * scale)
|
||
let newHeight = Int(CGFloat(height) * scale)
|
||
|
||
guard let context = CGContext(
|
||
data: nil, width: newWidth, height: newHeight,
|
||
bitsPerComponent: 8, bytesPerRow: 0,
|
||
space: CGColorSpaceCreateDeviceRGB(),
|
||
bitmapInfo: CGImageAlphaInfo.premultipliedLast.rawValue
|
||
) else { return image }
|
||
|
||
context.interpolationQuality = .high
|
||
context.draw(image, in: CGRect(x: 0, y: 0, width: newWidth, height: newHeight))
|
||
return context.makeImage() ?? image
|
||
}
|
||
|
||
// 如果用户提供了封面图
|
||
if let coverImageURL {
|
||
guard let src = CGImageSourceCreateWithURL(coverImageURL as CFURL, nil),
|
||
let img = CGImageSourceCreateImageAtIndex(src, 0, nil) else {
|
||
throw AppError(code: "LPB-201", stage: .extractKeyFrame, message: "封面读取失败", underlyingErrorDescription: nil, suggestedActions: ["更换封面图", "重试"])
|
||
}
|
||
let scaledImg = scaleImage(img, maxDim: maxDimension)
|
||
try writeHEIC(scaledImg, to: destinationURL)
|
||
return destinationURL
|
||
}
|
||
|
||
// 从视频抽帧
|
||
let asset = AVURLAsset(url: videoURL)
|
||
let imageGenerator = AVAssetImageGenerator(asset: asset)
|
||
imageGenerator.appliesPreferredTrackTransform = true
|
||
imageGenerator.requestedTimeToleranceAfter = CMTime(value: 1, timescale: 100)
|
||
imageGenerator.requestedTimeToleranceBefore = CMTime(value: 1, timescale: 100)
|
||
// 设置最大尺寸,让 AVAssetImageGenerator 自动缩放
|
||
imageGenerator.maximumSize = CGSize(width: maxDimension, height: maxDimension)
|
||
|
||
let safeSeconds = max(0, min(keyFrameTime, max(0, asset.duration.seconds - 0.1)))
|
||
let time = CMTime(seconds: safeSeconds, preferredTimescale: asset.duration.timescale)
|
||
|
||
let cgImage: CGImage
|
||
do {
|
||
cgImage = try imageGenerator.copyCGImage(at: time, actualTime: nil)
|
||
} catch {
|
||
throw AppError(code: "LPB-201", stage: .extractKeyFrame, message: "抽帧失败", underlyingErrorDescription: error.localizedDescription, suggestedActions: ["缩短时长", "降低分辨率", "重试"])
|
||
}
|
||
|
||
try writeHEIC(cgImage, to: destinationURL)
|
||
return destinationURL
|
||
}
|
||
|
||
/// 计算 LivePhotoVideoIndex:逆向工程推测为 Float32 帧索引的 bitPattern
|
||
private static func makeLivePhotoVideoIndex(stillImageTimeSeconds: Double, nominalFrameRate: Float) -> Int64 {
|
||
let safeFrameRate: Float = (nominalFrameRate.isFinite && nominalFrameRate > 0) ? nominalFrameRate : 30
|
||
let frameIndex = Float(stillImageTimeSeconds) * safeFrameRate
|
||
return Int64(frameIndex.bitPattern)
|
||
}
|
||
|
||
private func addAssetID(
|
||
_ assetIdentifier: String,
|
||
toImage imageURL: URL,
|
||
saveTo destinationURL: URL,
|
||
livePhotoVideoIndex: Int64
|
||
) -> URL? {
|
||
let useHEIC = true
|
||
let imageType = useHEIC ? UTType.heic.identifier : UTType.jpeg.identifier
|
||
|
||
guard let imageDestination = CGImageDestinationCreateWithURL(destinationURL as CFURL, imageType as CFString, 1, nil),
|
||
let imageSource = CGImageSourceCreateWithURL(imageURL as CFURL, nil),
|
||
let imageRef = CGImageSourceCreateImageAtIndex(imageSource, 0, nil) else {
|
||
return nil
|
||
}
|
||
|
||
var imageProperties = (CGImageSourceCopyPropertiesAtIndex(imageSource, 0, nil) as? [String: Any]) ?? [:]
|
||
|
||
// 获取图像尺寸
|
||
let width = imageRef.width
|
||
let height = imageRef.height
|
||
|
||
// 添加 TIFF (IFD0) 标准字段 - 对标竞品
|
||
var tiffDict = (imageProperties[kCGImagePropertyTIFFDictionary as String] as? [String: Any]) ?? [:]
|
||
tiffDict[kCGImagePropertyTIFFOrientation as String] = 1 // Horizontal (normal)
|
||
tiffDict[kCGImagePropertyTIFFXResolution as String] = 72
|
||
tiffDict[kCGImagePropertyTIFFYResolution as String] = 72
|
||
tiffDict[kCGImagePropertyTIFFResolutionUnit as String] = 2 // inches
|
||
// 移除 Tile 字段 - 竞品没有这些字段
|
||
tiffDict.removeValue(forKey: kCGImagePropertyTIFFTileWidth as String)
|
||
tiffDict.removeValue(forKey: kCGImagePropertyTIFFTileLength as String)
|
||
imageProperties[kCGImagePropertyTIFFDictionary as String] = tiffDict
|
||
|
||
// 添加 EXIF 标准字段 - 对标竞品
|
||
var exifDict = (imageProperties[kCGImagePropertyExifDictionary as String] as? [String: Any]) ?? [:]
|
||
exifDict[kCGImagePropertyExifVersion as String] = [2, 2, 1] // 0221
|
||
exifDict[kCGImagePropertyExifPixelXDimension as String] = width
|
||
exifDict[kCGImagePropertyExifPixelYDimension as String] = height
|
||
imageProperties[kCGImagePropertyExifDictionary as String] = exifDict
|
||
|
||
// 简化方案:只设置 ContentIdentifier,不注入复杂的 MakerNotes
|
||
// 竞品也只使用 ContentIdentifier,这足以让 Photos 识别 Live Photo
|
||
let assetIdentifierKey = "17" // Content Identifier
|
||
|
||
var makerAppleDict: [String: Any] = [:]
|
||
makerAppleDict[assetIdentifierKey] = assetIdentifier
|
||
imageProperties[kCGImagePropertyMakerAppleDictionary as String] = makerAppleDict
|
||
|
||
CGImageDestinationAddImage(imageDestination, imageRef, imageProperties as CFDictionary)
|
||
|
||
guard CGImageDestinationFinalize(imageDestination) else {
|
||
return nil
|
||
}
|
||
|
||
logger.info("Created HEIC with ContentIdentifier: \(assetIdentifier)")
|
||
return destinationURL
|
||
}
|
||
|
||
private func addAssetID(
|
||
_ assetIdentifier: String,
|
||
toVideo videoURL: URL,
|
||
saveTo destinationURL: URL,
|
||
stillImageTimeSeconds: Double,
|
||
progress: @Sendable @escaping (Double) -> Void
|
||
) async throws -> URL {
|
||
try await withCheckedThrowingContinuation { continuation in
|
||
let queue = DispatchQueue(label: "LivePhotoCore.VideoPairing")
|
||
queue.async {
|
||
do {
|
||
if FileManager.default.fileExists(atPath: destinationURL.path) {
|
||
try FileManager.default.removeItem(at: destinationURL)
|
||
}
|
||
|
||
let videoAsset = AVURLAsset(url: videoURL)
|
||
guard let videoTrack = videoAsset.tracks(withMediaType: .video).first else {
|
||
continuation.resume(throwing: AppError(code: "LPB-301", stage: .writeVideoMetadata, message: "视频处理失败", underlyingErrorDescription: "缺少视频轨", suggestedActions: ["更换一个视频", "重试"]))
|
||
return
|
||
}
|
||
|
||
let durationSeconds = max(0.001, videoAsset.duration.seconds)
|
||
let nominalFrameRate = videoTrack.nominalFrameRate > 0 ? videoTrack.nominalFrameRate : 30
|
||
let frameCount = max(1, Int(durationSeconds * Double(nominalFrameRate)))
|
||
|
||
// 关键修复:竞品视频没有 rotation,是烘焙到正向画面的
|
||
// 计算应用 transform 后的实际尺寸
|
||
let transform = videoTrack.preferredTransform
|
||
let naturalSize = videoTrack.naturalSize
|
||
|
||
// 判断是否有 90度/270度 旋转(需要交换宽高)
|
||
let isRotated90or270 = abs(transform.b) == 1.0 && abs(transform.c) == 1.0
|
||
let transformedSize: CGSize
|
||
if isRotated90or270 {
|
||
transformedSize = CGSize(width: naturalSize.height, height: naturalSize.width)
|
||
} else {
|
||
transformedSize = naturalSize
|
||
}
|
||
|
||
// 计算输出尺寸,限制最大边为 1920(对标竞品 1080p)
|
||
let maxDimension: CGFloat = 1920
|
||
let maxSide = max(transformedSize.width, transformedSize.height)
|
||
let scale: CGFloat = maxSide > maxDimension ? maxDimension / maxSide : 1.0
|
||
let outputWidth = Int(transformedSize.width * scale)
|
||
let outputHeight = Int(transformedSize.height * scale)
|
||
|
||
let assetWriter = try AVAssetWriter(outputURL: destinationURL, fileType: .mov)
|
||
let videoReader = try AVAssetReader(asset: videoAsset)
|
||
|
||
let videoReaderSettings: [String: Any] = [
|
||
kCVPixelBufferPixelFormatTypeKey as String: NSNumber(value: kCVPixelFormatType_32BGRA as UInt32)
|
||
]
|
||
let videoReaderOutput = AVAssetReaderTrackOutput(track: videoTrack, outputSettings: videoReaderSettings)
|
||
videoReader.add(videoReaderOutput)
|
||
|
||
// 使用 HEVC (H.265) 编码 - iPhone 原生 Live Photo 使用的格式
|
||
let videoWriterInput = AVAssetWriterInput(
|
||
mediaType: .video,
|
||
outputSettings: [
|
||
AVVideoCodecKey: AVVideoCodecType.hevc,
|
||
AVVideoWidthKey: Int(naturalSize.width * scale),
|
||
AVVideoHeightKey: Int(naturalSize.height * scale),
|
||
AVVideoCompressionPropertiesKey: [
|
||
AVVideoAverageBitRateKey: 8_000_000,
|
||
AVVideoQualityKey: 0.8
|
||
]
|
||
]
|
||
)
|
||
// 保留原始 transform
|
||
videoWriterInput.transform = transform
|
||
videoWriterInput.expectsMediaDataInRealTime = false
|
||
assetWriter.add(videoWriterInput)
|
||
|
||
var audioReader: AVAssetReader?
|
||
var audioReaderOutput: AVAssetReaderOutput?
|
||
var audioWriterInput: AVAssetWriterInput?
|
||
|
||
if let audioTrack = videoAsset.tracks(withMediaType: .audio).first {
|
||
let _audioReader = try AVAssetReader(asset: videoAsset)
|
||
let _audioReaderOutput = AVAssetReaderTrackOutput(track: audioTrack, outputSettings: nil)
|
||
_audioReader.add(_audioReaderOutput)
|
||
audioReader = _audioReader
|
||
audioReaderOutput = _audioReaderOutput
|
||
|
||
let _audioWriterInput = AVAssetWriterInput(mediaType: .audio, outputSettings: nil)
|
||
_audioWriterInput.expectsMediaDataInRealTime = false
|
||
assetWriter.add(_audioWriterInput)
|
||
audioWriterInput = _audioWriterInput
|
||
}
|
||
|
||
let assetIdentifierMetadata = Self.metadataForAssetID(assetIdentifier)
|
||
let stillImageTimeMetadataAdapter = Self.createMetadataAdaptorForStillImageTime()
|
||
|
||
// 只写入必要的 Content Identifier
|
||
assetWriter.metadata = [assetIdentifierMetadata]
|
||
|
||
// 只添加 still-image-time track(回退到稳定版本,移除 live-photo-info)
|
||
assetWriter.add(stillImageTimeMetadataAdapter.assetWriterInput)
|
||
|
||
assetWriter.startWriting()
|
||
assetWriter.startSession(atSourceTime: .zero)
|
||
|
||
// still-image-time track: 只写入一个 item(回退到稳定版本)
|
||
let stillTimeRange = videoAsset.makeStillImageTimeRange(seconds: stillImageTimeSeconds, frameCountHint: frameCount)
|
||
stillImageTimeMetadataAdapter.append(AVTimedMetadataGroup(
|
||
items: [Self.metadataItemForStillImageTime()],
|
||
timeRange: stillTimeRange
|
||
))
|
||
|
||
var writingVideoFinished = false
|
||
var writingAudioFinished = audioReader == nil
|
||
var currentFrameCount = 0
|
||
|
||
func didCompleteWriting() {
|
||
guard writingAudioFinished && writingVideoFinished else { return }
|
||
assetWriter.finishWriting {
|
||
if assetWriter.status == .completed {
|
||
continuation.resume(returning: destinationURL)
|
||
} else {
|
||
continuation.resume(throwing: AppError(code: "LPB-301", stage: .writeVideoMetadata, message: "视频处理失败", underlyingErrorDescription: assetWriter.error?.localizedDescription, suggestedActions: ["切换到 H.264 兼容导出", "关闭音频", "重试"]))
|
||
}
|
||
}
|
||
}
|
||
|
||
if videoReader.startReading() {
|
||
videoWriterInput.requestMediaDataWhenReady(on: DispatchQueue(label: "LivePhotoCore.VideoWriterInput")) {
|
||
while videoWriterInput.isReadyForMoreMediaData {
|
||
guard videoReader.status == .reading else {
|
||
videoWriterInput.markAsFinished()
|
||
writingVideoFinished = true
|
||
didCompleteWriting()
|
||
break
|
||
}
|
||
if let sampleBuffer = videoReaderOutput.copyNextSampleBuffer() {
|
||
currentFrameCount += 1
|
||
let pct = Double(currentFrameCount) / Double(frameCount)
|
||
progress(pct)
|
||
|
||
// 写入视频帧
|
||
if !videoWriterInput.append(sampleBuffer) {
|
||
videoReader.cancelReading()
|
||
}
|
||
} else {
|
||
videoWriterInput.markAsFinished()
|
||
writingVideoFinished = true
|
||
didCompleteWriting()
|
||
break
|
||
}
|
||
}
|
||
}
|
||
} else {
|
||
writingVideoFinished = true
|
||
didCompleteWriting()
|
||
}
|
||
|
||
if let audioReader, let audioWriterInput, audioReader.startReading() {
|
||
audioWriterInput.requestMediaDataWhenReady(on: DispatchQueue(label: "LivePhotoCore.AudioWriterInput")) {
|
||
while audioWriterInput.isReadyForMoreMediaData {
|
||
guard audioReader.status == .reading else {
|
||
audioWriterInput.markAsFinished()
|
||
writingAudioFinished = true
|
||
didCompleteWriting()
|
||
return
|
||
}
|
||
guard let sampleBuffer = audioReaderOutput?.copyNextSampleBuffer() else {
|
||
audioWriterInput.markAsFinished()
|
||
writingAudioFinished = true
|
||
didCompleteWriting()
|
||
return
|
||
}
|
||
_ = audioWriterInput.append(sampleBuffer)
|
||
}
|
||
}
|
||
} else {
|
||
writingAudioFinished = true
|
||
didCompleteWriting()
|
||
}
|
||
} catch {
|
||
continuation.resume(throwing: AppError(code: "LPB-301", stage: .writeVideoMetadata, message: "视频处理失败", underlyingErrorDescription: error.localizedDescription, suggestedActions: ["切换到 H.264 兼容导出", "关闭音频", "重试"]))
|
||
}
|
||
}
|
||
}
|
||
}
|
||
|
||
private static func metadataForAssetID(_ assetIdentifier: String) -> AVMetadataItem {
|
||
let item = AVMutableMetadataItem()
|
||
item.key = "com.apple.quicktime.content.identifier" as (NSCopying & NSObjectProtocol)
|
||
item.keySpace = AVMetadataKeySpace(rawValue: "mdta")
|
||
item.value = assetIdentifier as (NSCopying & NSObjectProtocol)
|
||
item.dataType = "com.apple.metadata.datatype.UTF-8"
|
||
return item
|
||
}
|
||
|
||
private static func createMetadataAdaptorForStillImageTime() -> AVAssetWriterInputMetadataAdaptor {
|
||
let keySpace = "mdta"
|
||
let keyStill = "com.apple.quicktime.still-image-time"
|
||
|
||
// 只声明 still-image-time 一个 key(回退到稳定版本)
|
||
let spec: NSDictionary = [
|
||
kCMMetadataFormatDescriptionMetadataSpecificationKey_Identifier as NSString: "\(keySpace)/\(keyStill)",
|
||
kCMMetadataFormatDescriptionMetadataSpecificationKey_DataType as NSString: "com.apple.metadata.datatype.int8"
|
||
]
|
||
|
||
var desc: CMFormatDescription?
|
||
CMMetadataFormatDescriptionCreateWithMetadataSpecifications(
|
||
allocator: kCFAllocatorDefault,
|
||
metadataType: kCMMetadataFormatType_Boxed,
|
||
metadataSpecifications: [spec] as CFArray,
|
||
formatDescriptionOut: &desc
|
||
)
|
||
|
||
let input = AVAssetWriterInput(mediaType: .metadata, outputSettings: nil, sourceFormatHint: desc)
|
||
return AVAssetWriterInputMetadataAdaptor(assetWriterInput: input)
|
||
}
|
||
|
||
/// 对标竞品 89 字节 still-image-time 数据
|
||
/// 结构:item1 (9B: still-image-time=-1) + item2 (80B: transform 3x3矩阵)
|
||
private static func metadataItemForStillImageTimeWithTransform() -> AVMetadataItem {
|
||
let item = AVMutableMetadataItem()
|
||
item.key = "com.apple.quicktime.still-image-time" as (NSCopying & NSObjectProtocol)
|
||
item.keySpace = AVMetadataKeySpace(rawValue: "mdta")
|
||
item.dataType = "com.apple.metadata.datatype.raw-data"
|
||
item.value = stillImageTime89BytesPayload() as NSData
|
||
return item
|
||
}
|
||
|
||
/// 构建 89 字节 payload(对标竞品格式)
|
||
private static func stillImageTime89BytesPayload() -> Data {
|
||
var data = Data()
|
||
|
||
// Item 1: still-image-time (9 bytes)
|
||
// size: 4 bytes (0x00000009)
|
||
data.append(contentsOf: [0x00, 0x00, 0x00, 0x09])
|
||
// keyIndex: 4 bytes (0x00000001)
|
||
data.append(contentsOf: [0x00, 0x00, 0x00, 0x01])
|
||
// value: 1 byte (0xFF = -1)
|
||
data.append(0xFF)
|
||
|
||
// Item 2: transform (80 bytes)
|
||
// size: 4 bytes (0x00000050 = 80)
|
||
data.append(contentsOf: [0x00, 0x00, 0x00, 0x50])
|
||
// keyIndex: 4 bytes (0x00000002)
|
||
data.append(contentsOf: [0x00, 0x00, 0x00, 0x02])
|
||
// 3x3 identity matrix as big-endian Float64 (72 bytes)
|
||
let matrix: [Double] = [1, 0, 0, 0, 1, 0, 0, 0, 1]
|
||
for value in matrix {
|
||
var bigEndian = value.bitPattern.bigEndian
|
||
withUnsafeBytes(of: &bigEndian) { data.append(contentsOf: $0) }
|
||
}
|
||
|
||
return data // 89 bytes total
|
||
}
|
||
|
||
private static func metadataItemForStillImageTime() -> AVMetadataItem {
|
||
let item = AVMutableMetadataItem()
|
||
item.key = "com.apple.quicktime.still-image-time" as (NSCopying & NSObjectProtocol)
|
||
item.keySpace = AVMetadataKeySpace(rawValue: "mdta")
|
||
// 竞品使用 0xFF (-1),但之前测试 0 也不行,现在改回 -1 对标竞品
|
||
item.value = NSNumber(value: Int8(-1)) as (NSCopying & NSObjectProtocol)
|
||
item.dataType = "com.apple.metadata.datatype.int8"
|
||
return item
|
||
}
|
||
|
||
/// 3x3 单位矩阵变换数据(72 字节,大端序 Float64)
|
||
private static func metadataItemForStillImageTransform() -> AVMetadataItem {
|
||
let item = AVMutableMetadataItem()
|
||
item.key = "com.apple.quicktime.live-photo-still-image-transform" as (NSCopying & NSObjectProtocol)
|
||
item.keySpace = AVMetadataKeySpace(rawValue: "mdta")
|
||
item.dataType = "com.apple.metadata.datatype.raw-data"
|
||
item.value = livePhotoStillImageTransformIdentityData() as NSData
|
||
return item
|
||
}
|
||
|
||
/// 生成 3x3 单位矩阵的大端序 Float64 数据
|
||
private static func livePhotoStillImageTransformIdentityData() -> Data {
|
||
// 单位矩阵:[1,0,0, 0,1,0, 0,0,1]
|
||
let matrix: [Double] = [1, 0, 0, 0, 1, 0, 0, 0, 1]
|
||
var data = Data()
|
||
data.reserveCapacity(matrix.count * 8)
|
||
for value in matrix {
|
||
var bigEndian = value.bitPattern.bigEndian
|
||
withUnsafeBytes(of: &bigEndian) { data.append(contentsOf: $0) }
|
||
}
|
||
return data // 72 字节
|
||
}
|
||
|
||
// MARK: - Live Photo Info Track (逐帧 timed metadata,对标竞品)
|
||
|
||
/// live-photo-info 数据暂时不写入,先确保基本功能正常
|
||
/// 设为空数据,跳过 live-photo-info track
|
||
private static let livePhotoInfoPayload: Data = Data()
|
||
|
||
private static func createMetadataAdaptorForLivePhotoInfo() -> AVAssetWriterInputMetadataAdaptor {
|
||
let key = "com.apple.quicktime.live-photo-info"
|
||
let keySpace = "mdta"
|
||
|
||
let spec: NSDictionary = [
|
||
kCMMetadataFormatDescriptionMetadataSpecificationKey_Identifier as NSString: "\(keySpace)/\(key)",
|
||
kCMMetadataFormatDescriptionMetadataSpecificationKey_DataType as NSString: "com.apple.metadata.datatype.raw-data"
|
||
]
|
||
|
||
var desc: CMFormatDescription?
|
||
CMMetadataFormatDescriptionCreateWithMetadataSpecifications(
|
||
allocator: kCFAllocatorDefault,
|
||
metadataType: kCMMetadataFormatType_Boxed,
|
||
metadataSpecifications: [spec] as CFArray,
|
||
formatDescriptionOut: &desc
|
||
)
|
||
|
||
let input = AVAssetWriterInput(mediaType: .metadata, outputSettings: nil, sourceFormatHint: desc)
|
||
return AVAssetWriterInputMetadataAdaptor(assetWriterInput: input)
|
||
}
|
||
|
||
private static func metadataItemForLivePhotoInfo() -> AVMetadataItem {
|
||
let item = AVMutableMetadataItem()
|
||
item.key = "com.apple.quicktime.live-photo-info" as (NSCopying & NSObjectProtocol)
|
||
item.keySpace = AVMetadataKeySpace(rawValue: "mdta")
|
||
item.value = livePhotoInfoPayload as NSData
|
||
item.dataType = "com.apple.metadata.datatype.raw-data"
|
||
return item
|
||
}
|
||
|
||
private static func metadataForSampleTime() -> AVMetadataItem {
|
||
let item = AVMutableMetadataItem()
|
||
item.key = "Sample Time" as (NSCopying & NSObjectProtocol)
|
||
item.keySpace = AVMetadataKeySpace(rawValue: "mdta")
|
||
item.value = "0 s" as (NSCopying & NSObjectProtocol)
|
||
item.dataType = "com.apple.metadata.datatype.UTF-8"
|
||
return item
|
||
}
|
||
|
||
private static func metadataForSampleDuration() -> AVMetadataItem {
|
||
let item = AVMutableMetadataItem()
|
||
item.key = "Sample Duration" as (NSCopying & NSObjectProtocol)
|
||
item.keySpace = AVMetadataKeySpace(rawValue: "mdta")
|
||
item.value = "0.03 s" as (NSCopying & NSObjectProtocol)
|
||
item.dataType = "com.apple.metadata.datatype.UTF-8"
|
||
return item
|
||
}
|
||
}
|
||
|
||
public struct LivePhotoWorkflowResult: Sendable, Hashable {
|
||
public var workId: UUID
|
||
public var assetIdentifier: String
|
||
public var pairedImageURL: URL
|
||
public var pairedVideoURL: URL
|
||
public var savedAssetId: String
|
||
public var resourceValidationOK: Bool
|
||
public var libraryAssetIsLivePhoto: Bool?
|
||
|
||
public init(
|
||
workId: UUID,
|
||
assetIdentifier: String,
|
||
pairedImageURL: URL,
|
||
pairedVideoURL: URL,
|
||
savedAssetId: String,
|
||
resourceValidationOK: Bool,
|
||
libraryAssetIsLivePhoto: Bool?
|
||
) {
|
||
self.workId = workId
|
||
self.assetIdentifier = assetIdentifier
|
||
self.pairedImageURL = pairedImageURL
|
||
self.pairedVideoURL = pairedVideoURL
|
||
self.savedAssetId = savedAssetId
|
||
self.resourceValidationOK = resourceValidationOK
|
||
self.libraryAssetIsLivePhoto = libraryAssetIsLivePhoto
|
||
}
|
||
}
|
||
|
||
public actor LivePhotoWorkflow {
|
||
private let builder: LivePhotoBuilder
|
||
private let albumWriter: AlbumWriter
|
||
private let validator: LivePhotoValidator
|
||
|
||
public init(cacheManager: CacheManager? = nil, logger: LivePhotoLogger = LivePhotoLogger()) throws {
|
||
let cm = try cacheManager ?? CacheManager()
|
||
self.builder = try LivePhotoBuilder(cacheManager: cm, logger: logger)
|
||
self.albumWriter = AlbumWriter()
|
||
self.validator = LivePhotoValidator()
|
||
}
|
||
|
||
public func buildSaveValidate(
|
||
sourceVideoURL: URL,
|
||
coverImageURL: URL? = nil,
|
||
exportParams: ExportParams = ExportParams(),
|
||
progress: (@Sendable (LivePhotoBuildProgress) -> Void)? = nil
|
||
) async throws -> LivePhotoWorkflowResult {
|
||
let output = try await builder.buildResources(
|
||
sourceVideoURL: sourceVideoURL,
|
||
coverImageURL: coverImageURL,
|
||
exportParams: exportParams,
|
||
progress: progress
|
||
)
|
||
|
||
// 调试:导出文件到文档目录
|
||
#if DEBUG
|
||
if let (debugPhoto, debugVideo) = try? output.exportToDocuments() {
|
||
print("[DEBUG] Exported files to Documents:")
|
||
print(" Photo: \(debugPhoto.path)")
|
||
print(" Video: \(debugVideo.path)")
|
||
}
|
||
#endif
|
||
|
||
progress?(LivePhotoBuildProgress(stage: .validate, fraction: 0))
|
||
let resourceOK = await validator.canCreateLivePhotoFromResources(
|
||
photoURL: output.pairedImageURL,
|
||
pairedVideoURL: output.pairedVideoURL
|
||
)
|
||
progress?(LivePhotoBuildProgress(stage: .validate, fraction: 0.3))
|
||
|
||
let addOnlyStatus = await albumWriter.requestAddOnlyAuthorization()
|
||
guard addOnlyStatus == .authorized else {
|
||
throw AppError(
|
||
code: "LPB-401",
|
||
stage: .saveToAlbum,
|
||
message: "无相册写入权限",
|
||
underlyingErrorDescription: "authorizationStatus(addOnly)=\(addOnlyStatus)",
|
||
suggestedActions: ["在系统设置中允许“添加照片”权限"]
|
||
)
|
||
}
|
||
|
||
progress?(LivePhotoBuildProgress(stage: .saveToAlbum, fraction: 0))
|
||
let assetId = try await albumWriter.saveLivePhoto(
|
||
photoURL: output.pairedImageURL,
|
||
pairedVideoURL: output.pairedVideoURL,
|
||
shouldMoveFiles: false
|
||
)
|
||
progress?(LivePhotoBuildProgress(stage: .saveToAlbum, fraction: 1))
|
||
|
||
var isLiveSubtype: Bool? = nil
|
||
let readWriteStatus = PHPhotoLibrary.authorizationStatus(for: .readWrite)
|
||
if readWriteStatus == .authorized || readWriteStatus == .limited {
|
||
isLiveSubtype = await validator.isLivePhotoAsset(localIdentifier: assetId)
|
||
}
|
||
progress?(LivePhotoBuildProgress(stage: .validate, fraction: 1))
|
||
|
||
return LivePhotoWorkflowResult(
|
||
workId: output.workId,
|
||
assetIdentifier: output.assetIdentifier,
|
||
pairedImageURL: output.pairedImageURL,
|
||
pairedVideoURL: output.pairedVideoURL,
|
||
savedAssetId: assetId,
|
||
resourceValidationOK: resourceOK,
|
||
libraryAssetIsLivePhoto: isLiveSubtype
|
||
)
|
||
}
|
||
}
|
||
|
||
private extension AVAsset {
|
||
func makeStillImageTimeRange(seconds: Double, frameCountHint: Int) -> CMTimeRange {
|
||
let duration = self.duration
|
||
|
||
let clampedSeconds = max(0, min(seconds, max(0, duration.seconds - 0.001)))
|
||
var time = CMTime(seconds: clampedSeconds, preferredTimescale: duration.timescale)
|
||
if time > duration {
|
||
time = duration
|
||
}
|
||
|
||
// 关键修复:竞品使用 duration_ts=1(最小 tick),而不是一帧时长
|
||
// 壁纸校验比相册更严格,需要 still-image-time 是"瞬时标记"而非"一帧区间"
|
||
return CMTimeRange(start: time, duration: CMTime(value: 1, timescale: duration.timescale))
|
||
}
|
||
}
|