Files
to-live-photo/Sources/LivePhotoCore/LivePhotoCore.swift
empty 64cdb82459 feat(M1): 完成比例模板、裁剪手势和取消策略
主要改动:
- EditorView: 添加5种比例模板选择(原比例/锁屏/全屏/4:3/1:1)
- EditorView: 实现裁剪预览(半透明遮罩+裁剪框)和缩放拖拽手势
- LivePhotoCore: ExportParams 新增 CropRect 和 AspectRatioTemplate
- LivePhotoCore: scaleVideoToTargetDuration 支持裁剪和比例输出
- AppState: 添加任务取消机制(cancelProcessing)
- ProcessingView: 添加取消按钮,支持取消状态显示
- CacheManager: 添加 removeWorkDir 静默清理方法
- Analytics: 添加 buildLivePhotoCancel 事件

M1 编辑能力全部完成:
 比例模板:锁屏/全屏/4:3/1:1/原比例
 裁剪手势:缩放+拖拽
 取消策略:终止任务+清理中间文件

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
2025-12-14 20:51:08 +08:00

1124 lines
49 KiB
Swift
Raw Blame History

This file contains ambiguous Unicode characters
This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.
import AVFoundation
import Foundation
import ImageIO
import os
import Photos
import UIKit
import UniformTypeIdentifiers
import VideoToolbox
public enum LivePhotoBuildStage: String, Codable, Sendable {
case normalize
case extractKeyFrame
case writePhotoMetadata
case writeVideoMetadata
case saveToAlbum
case validate
}
public struct LivePhotoBuildProgress: Sendable {
public var stage: LivePhotoBuildStage
public var fraction: Double
public init(stage: LivePhotoBuildStage, fraction: Double) {
self.stage = stage
self.fraction = fraction
}
}
public enum WorkStatus: String, Codable, Sendable {
case idle
case editing
case processing
case success
case failed
}
public struct SourceRef: Codable, Sendable, Hashable {
public var phAssetLocalIdentifier: String?
public var fileURL: URL?
public init(phAssetLocalIdentifier: String) {
self.phAssetLocalIdentifier = phAssetLocalIdentifier
self.fileURL = nil
}
public init(fileURL: URL) {
self.phAssetLocalIdentifier = nil
self.fileURL = fileURL
}
}
public enum AudioPolicy: String, Codable, Sendable {
case keep
case remove
}
public enum CodecPolicy: String, Codable, Sendable {
case passthrough
case fallbackH264
}
public enum HDRPolicy: String, Codable, Sendable {
case keep
case toneMapToSDR
}
/// 0~1
public struct CropRect: Codable, Sendable, Hashable {
public var x: CGFloat // x0~1
public var y: CGFloat // y0~1
public var width: CGFloat // 0~1
public var height: CGFloat // 0~1
public init(x: CGFloat = 0, y: CGFloat = 0, width: CGFloat = 1, height: CGFloat = 1) {
self.x = x
self.y = y
self.width = width
self.height = height
}
///
public static let full = CropRect()
///
public func toPixelRect(videoSize: CGSize) -> CGRect {
CGRect(
x: x * videoSize.width,
y: y * videoSize.height,
width: width * videoSize.width,
height: height * videoSize.height
)
}
}
///
public enum AspectRatioTemplate: String, Codable, Sendable, CaseIterable {
case original = "original" //
case lockScreen = "lock_screen" // iPhone 9:19.5
case fullScreen = "full_screen" // 9:16
case classic = "classic" // 4:3
case square = "square" // 1:1
public var displayName: String {
switch self {
case .original: return "原比例"
case .lockScreen: return "锁屏"
case .fullScreen: return "全屏"
case .classic: return "4:3"
case .square: return "1:1"
}
}
public var ratio: CGFloat? {
switch self {
case .original: return nil
case .lockScreen: return 9.0 / 19.5
case .fullScreen: return 9.0 / 16.0
case .classic: return 3.0 / 4.0
case .square: return 1.0
}
}
}
public struct ExportParams: Codable, Sendable, Hashable {
public var trimStart: Double
public var trimEnd: Double
public var keyFrameTime: Double
public var audioPolicy: AudioPolicy
public var codecPolicy: CodecPolicy
public var hdrPolicy: HDRPolicy
public var maxDimension: Int
public var cropRect: CropRect
public var aspectRatio: AspectRatioTemplate
public init(
trimStart: Double = 0,
trimEnd: Double = 1.0,
keyFrameTime: Double = 0.5,
audioPolicy: AudioPolicy = .keep,
codecPolicy: CodecPolicy = .fallbackH264,
hdrPolicy: HDRPolicy = .toneMapToSDR,
maxDimension: Int = 1920,
cropRect: CropRect = .full,
aspectRatio: AspectRatioTemplate = .original
) {
self.trimStart = trimStart
self.trimEnd = trimEnd
self.keyFrameTime = keyFrameTime
self.audioPolicy = audioPolicy
self.codecPolicy = codecPolicy
self.hdrPolicy = hdrPolicy
self.maxDimension = maxDimension
self.cropRect = cropRect
self.aspectRatio = aspectRatio
}
}
public struct AppError: Error, Codable, Sendable, Hashable {
public var code: String
public var stage: LivePhotoBuildStage?
public var message: String
public var underlyingErrorDescription: String?
public var suggestedActions: [String]
public init(
code: String,
stage: LivePhotoBuildStage? = nil,
message: String,
underlyingErrorDescription: String? = nil,
suggestedActions: [String] = []
) {
self.code = code
self.stage = stage
self.message = message
self.underlyingErrorDescription = underlyingErrorDescription
self.suggestedActions = suggestedActions
}
}
public struct WorkItem: Identifiable, Codable, Sendable, Hashable {
public var id: UUID
public var createdAt: Date
public var sourceVideo: SourceRef
public var coverImage: SourceRef?
public var exportParams: ExportParams
public var status: WorkStatus
public var resultAssetId: String?
public var cacheDir: URL
public var error: AppError?
public init(
id: UUID = UUID(),
createdAt: Date = Date(),
sourceVideo: SourceRef,
coverImage: SourceRef? = nil,
exportParams: ExportParams = ExportParams(),
status: WorkStatus = .idle,
resultAssetId: String? = nil,
cacheDir: URL,
error: AppError? = nil
) {
self.id = id
self.createdAt = createdAt
self.sourceVideo = sourceVideo
self.coverImage = coverImage
self.exportParams = exportParams
self.status = status
self.resultAssetId = resultAssetId
self.cacheDir = cacheDir
self.error = error
}
}
public struct LivePhotoWorkPaths: Sendable, Hashable {
public var workDir: URL
public var photoURL: URL
public var pairedVideoURL: URL
public var logURL: URL
public init(workDir: URL, photoURL: URL, pairedVideoURL: URL, logURL: URL) {
self.workDir = workDir
self.photoURL = photoURL
self.pairedVideoURL = pairedVideoURL
self.logURL = logURL
}
}
public struct CacheManager: Sendable {
public var baseDirectory: URL
public init(baseDirectory: URL? = nil) throws {
if let baseDirectory {
self.baseDirectory = baseDirectory
} else {
let caches = try FileManager.default.url(for: .cachesDirectory, in: .userDomainMask, appropriateFor: nil, create: true)
self.baseDirectory = caches.appendingPathComponent("LivePhotoBuilder", isDirectory: true)
}
try FileManager.default.createDirectory(at: self.baseDirectory, withIntermediateDirectories: true)
}
public func makeWorkPaths(workId: UUID) throws -> LivePhotoWorkPaths {
let workDir = baseDirectory.appendingPathComponent(workId.uuidString, isDirectory: true)
try FileManager.default.createDirectory(at: workDir, withIntermediateDirectories: true)
return LivePhotoWorkPaths(
workDir: workDir,
photoURL: workDir.appendingPathComponent("photo").appendingPathExtension("heic"),
pairedVideoURL: workDir.appendingPathComponent("paired").appendingPathExtension("mov"),
logURL: workDir.appendingPathComponent("builder").appendingPathExtension("log")
)
}
public func clearWork(workId: UUID) throws {
let workDir = baseDirectory.appendingPathComponent(workId.uuidString, isDirectory: true)
if FileManager.default.fileExists(atPath: workDir.path) {
try FileManager.default.removeItem(at: workDir)
}
}
///
public func removeWorkDir(workId: UUID) {
let workDir = baseDirectory.appendingPathComponent(workId.uuidString, isDirectory: true)
try? FileManager.default.removeItem(at: workDir)
}
}
public struct LivePhotoLogger: Sendable {
private var logger: os.Logger
public init(subsystem: String = "ToLivePhoto", category: String = "LivePhotoCore") {
self.logger = os.Logger(subsystem: subsystem, category: category)
}
public func info(_ message: String) {
logger.info("\(message, privacy: .public)")
}
public func error(_ message: String) {
logger.error("\(message, privacy: .public)")
}
}
public actor AlbumWriter {
public init() {}
public func requestAddOnlyAuthorization() async -> PHAuthorizationStatus {
await withCheckedContinuation { continuation in
PHPhotoLibrary.requestAuthorization(for: .addOnly) { status in
continuation.resume(returning: status)
}
}
}
public func saveLivePhoto(photoURL: URL, pairedVideoURL: URL, shouldMoveFiles: Bool = false) async throws -> String {
try await withCheckedThrowingContinuation { continuation in
var localIdentifier: String?
PHPhotoLibrary.shared().performChanges({
let request = PHAssetCreationRequest.forAsset()
let photoOptions = PHAssetResourceCreationOptions()
photoOptions.shouldMoveFile = shouldMoveFiles
photoOptions.uniformTypeIdentifier = UTType.heic.identifier
let videoOptions = PHAssetResourceCreationOptions()
videoOptions.shouldMoveFile = shouldMoveFiles
videoOptions.uniformTypeIdentifier = UTType.quickTimeMovie.identifier
request.addResource(with: .photo, fileURL: photoURL, options: photoOptions)
request.addResource(with: .pairedVideo, fileURL: pairedVideoURL, options: videoOptions)
localIdentifier = request.placeholderForCreatedAsset?.localIdentifier
}, completionHandler: { success, error in
if let error {
continuation.resume(throwing: AppError(code: "LPB-401", stage: .saveToAlbum, message: "保存到相册失败", underlyingErrorDescription: error.localizedDescription, suggestedActions: ["允许添加到相册权限", "稍后重试"]))
return
}
guard success, let id = localIdentifier else {
continuation.resume(throwing: AppError(code: "LPB-401", stage: .saveToAlbum, message: "保存到相册失败", underlyingErrorDescription: nil, suggestedActions: ["允许添加到相册权限", "稍后重试"]))
return
}
continuation.resume(returning: id)
})
}
}
}
public actor LivePhotoValidator {
public init() {}
public func isLivePhotoAsset(localIdentifier: String) async -> Bool {
let result = PHAsset.fetchAssets(withLocalIdentifiers: [localIdentifier], options: nil)
guard let asset = result.firstObject else {
return false
}
return asset.mediaSubtypes.contains(.photoLive)
}
public func requestLivePhoto(localIdentifier: String) async -> PHLivePhoto? {
let result = PHAsset.fetchAssets(withLocalIdentifiers: [localIdentifier], options: nil)
guard let asset = result.firstObject else {
return nil
}
return await withCheckedContinuation { continuation in
PHImageManager.default().requestLivePhoto(
for: asset,
targetSize: CGSize(width: 1, height: 1),
contentMode: .aspectFit,
options: nil
) { livePhoto, _ in
continuation.resume(returning: livePhoto)
}
}
}
public func requestLivePhoto(photoURL: URL, pairedVideoURL: URL) async -> PHLivePhoto? {
await withCheckedContinuation { continuation in
var hasResumed = false
let requestID = PHLivePhoto.request(
withResourceFileURLs: [pairedVideoURL, photoURL],
placeholderImage: nil,
targetSize: .zero,
contentMode: .aspectFit
) { livePhoto, info in
// resume
guard !hasResumed else { return }
//
if let isDegraded = info[PHLivePhotoInfoIsDegradedKey] as? Bool, isDegraded {
return
}
//
if let error = info[PHLivePhotoInfoErrorKey] as? Error {
print("[LivePhotoValidator] requestLivePhoto error: \(error.localizedDescription)")
hasResumed = true
continuation.resume(returning: nil)
return
}
if let cancelled = info[PHLivePhotoInfoCancelledKey] as? Bool, cancelled {
print("[LivePhotoValidator] requestLivePhoto cancelled")
hasResumed = true
continuation.resume(returning: nil)
return
}
hasResumed = true
continuation.resume(returning: livePhoto)
}
//
DispatchQueue.main.asyncAfter(deadline: .now() + 10) {
guard !hasResumed else { return }
print("[LivePhotoValidator] requestLivePhoto timeout, requestID: \(requestID)")
PHLivePhoto.cancelRequest(withRequestID: requestID)
hasResumed = true
continuation.resume(returning: nil)
}
}
}
public func canCreateLivePhotoFromResources(photoURL: URL, pairedVideoURL: URL) async -> Bool {
await requestLivePhoto(photoURL: photoURL, pairedVideoURL: pairedVideoURL) != nil
}
}
public struct LivePhotoBuildOutput: Sendable, Hashable {
public var workId: UUID
public var assetIdentifier: String
public var pairedImageURL: URL
public var pairedVideoURL: URL
public init(workId: UUID, assetIdentifier: String, pairedImageURL: URL, pairedVideoURL: URL) {
self.workId = workId
self.assetIdentifier = assetIdentifier
self.pairedImageURL = pairedImageURL
self.pairedVideoURL = pairedVideoURL
}
}
public actor LivePhotoBuilder {
private let cacheManager: CacheManager
private let logger: LivePhotoLogger
public init(cacheManager: CacheManager? = nil, logger: LivePhotoLogger = LivePhotoLogger()) throws {
self.cacheManager = try cacheManager ?? CacheManager()
self.logger = logger
}
public func buildResources(
workId: UUID = UUID(),
sourceVideoURL: URL,
coverImageURL: URL? = nil,
exportParams: ExportParams = ExportParams(),
progress: (@Sendable (LivePhotoBuildProgress) -> Void)? = nil
) async throws -> LivePhotoBuildOutput {
let assetIdentifier = UUID().uuidString
let paths = try cacheManager.makeWorkPaths(workId: workId)
progress?(LivePhotoBuildProgress(stage: .normalize, fraction: 0))
let trimmedVideoURL = try await trimVideo(
sourceURL: sourceVideoURL,
trimStart: exportParams.trimStart,
trimEnd: exportParams.trimEnd,
destinationURL: paths.workDir.appendingPathComponent("trimmed.mov")
)
// 1 metadata.mov
// live-wallpaper 使 CMTimeMake(550, 600) = 0.917
// 使 1 metadata.mov
let targetDuration = CMTimeMake(value: 550, timescale: 600) // ~0.917 live-wallpaper
progress?(LivePhotoBuildProgress(stage: .normalize, fraction: 0.5))
let scaledVideoURL = try await scaleVideoToTargetDuration(
sourceURL: trimmedVideoURL,
targetDuration: targetDuration,
cropRect: exportParams.cropRect,
aspectRatio: exportParams.aspectRatio,
destinationURL: paths.workDir.appendingPathComponent("scaled.mov")
)
// 0.5 metadata.mov still-image-time
let relativeKeyFrameTime = 0.5 // 0.5 metadata.mov
progress?(LivePhotoBuildProgress(stage: .extractKeyFrame, fraction: 0))
let keyPhotoURL = try await resolveKeyPhotoURL(
videoURL: scaledVideoURL,
coverImageURL: coverImageURL,
keyFrameTime: relativeKeyFrameTime,
destinationURL: paths.workDir.appendingPathComponent("keyPhoto").appendingPathExtension("heic")
)
progress?(LivePhotoBuildProgress(stage: .writePhotoMetadata, fraction: 0))
guard let pairedImageURL = addAssetID(
assetIdentifier,
toImage: keyPhotoURL,
saveTo: paths.photoURL
) else {
throw AppError(code: "LPB-201", stage: .writePhotoMetadata, message: "封面生成失败", underlyingErrorDescription: nil, suggestedActions: ["缩短时长", "降低分辨率", "重试"])
}
progress?(LivePhotoBuildProgress(stage: .writeVideoMetadata, fraction: 0))
let pairedVideoURL = try await addAssetID(assetIdentifier, toVideo: scaledVideoURL, saveTo: paths.pairedVideoURL, stillImageTimeSeconds: relativeKeyFrameTime, progress: { p in
progress?(LivePhotoBuildProgress(stage: .writeVideoMetadata, fraction: p))
})
logger.info("Generated Live Photo files:")
logger.info(" Photo: \(pairedImageURL.path)")
logger.info(" Video: \(pairedVideoURL.path)")
logger.info(" AssetIdentifier: \(assetIdentifier)")
return LivePhotoBuildOutput(workId: workId, assetIdentifier: assetIdentifier, pairedImageURL: pairedImageURL, pairedVideoURL: pairedVideoURL)
}
private func trimVideo(sourceURL: URL, trimStart: Double, trimEnd: Double, destinationURL: URL) async throws -> URL {
let asset = AVURLAsset(url: sourceURL)
let duration = try await asset.load(.duration).seconds
let safeTrimStart = max(0, min(trimStart, duration))
let safeTrimEnd = max(safeTrimStart, min(trimEnd, duration))
if safeTrimEnd - safeTrimStart < 0.1 {
throw AppError(code: "LPB-101", stage: .normalize, message: "视频时长不足", suggestedActions: ["选择更长的视频"])
}
let startTime = CMTime(seconds: safeTrimStart, preferredTimescale: 600)
let endTime = CMTime(seconds: safeTrimEnd, preferredTimescale: 600)
let timeRange = CMTimeRange(start: startTime, end: endTime)
if FileManager.default.fileExists(atPath: destinationURL.path) {
try FileManager.default.removeItem(at: destinationURL)
}
let composition = AVMutableComposition()
guard let videoTrack = try await asset.loadTracks(withMediaType: .video).first else {
throw AppError(code: "LPB-101", stage: .normalize, message: "视频轨道不存在", suggestedActions: ["选择其他视频"])
}
let compositionVideoTrack = composition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid)
try compositionVideoTrack?.insertTimeRange(timeRange, of: videoTrack, at: .zero)
if let audioTrack = try? await asset.loadTracks(withMediaType: .audio).first {
let compositionAudioTrack = composition.addMutableTrack(withMediaType: .audio, preferredTrackID: kCMPersistentTrackID_Invalid)
try? compositionAudioTrack?.insertTimeRange(timeRange, of: audioTrack, at: .zero)
}
let transform = try await videoTrack.load(.preferredTransform)
// transform
compositionVideoTrack?.preferredTransform = transform
// 使 Passthrough
guard let exportSession = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetPassthrough) else {
throw AppError(code: "LPB-101", stage: .normalize, message: "无法创建导出会话", suggestedActions: ["重试"])
}
exportSession.outputURL = destinationURL
exportSession.outputFileType = .mov
await exportSession.export()
guard exportSession.status == .completed else {
throw AppError(code: "LPB-101", stage: .normalize, message: "视频裁剪失败", underlyingErrorDescription: exportSession.error?.localizedDescription, suggestedActions: ["缩短时长", "重试"])
}
return destinationURL
}
/// Live Photo
/// ~0.917 1080x1920 60fps
/// live-wallpaper accelerateVideo + resizeVideo
private func scaleVideoToTargetDuration(
sourceURL: URL,
targetDuration: CMTime,
cropRect: CropRect,
aspectRatio: AspectRatioTemplate,
destinationURL: URL
) async throws -> URL {
let asset = AVURLAsset(url: sourceURL)
if FileManager.default.fileExists(atPath: destinationURL.path) {
try FileManager.default.removeItem(at: destinationURL)
}
guard let videoTrack = try await asset.loadTracks(withMediaType: .video).first else {
throw AppError(code: "LPB-101", stage: .normalize, message: "视频轨道不存在", suggestedActions: ["选择其他视频"])
}
let originalDuration = try await asset.load(.duration)
let naturalSize = try await videoTrack.load(.naturalSize)
let preferredTransform = try await videoTrack.load(.preferredTransform)
// transform live-wallpaper resizeVideo
let originalSize = CGSize(width: naturalSize.width, height: naturalSize.height)
let transformedSize = originalSize.applying(preferredTransform)
let absoluteSize = CGSize(width: abs(transformedSize.width), height: abs(transformedSize.height))
//
let outputSize: CGSize
if let targetRatio = aspectRatio.ratio {
//
// 1080
let width: CGFloat = 1080
let height = width / targetRatio
outputSize = CGSize(width: width, height: min(height, 1920))
} else {
//
let isLandscape = absoluteSize.width > absoluteSize.height
outputSize = isLandscape ? CGSize(width: 1920, height: 1080) : CGSize(width: 1080, height: 1920)
}
// 1 live-wallpaper accelerateVideo
let acceleratedURL = destinationURL.deletingLastPathComponent().appendingPathComponent("accelerated.mov")
if FileManager.default.fileExists(atPath: acceleratedURL.path) {
try FileManager.default.removeItem(at: acceleratedURL)
}
let composition = AVMutableComposition()
guard let compositionVideoTrack = composition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid) else {
throw AppError(code: "LPB-101", stage: .normalize, message: "无法创建视频轨道", suggestedActions: ["重试"])
}
try compositionVideoTrack.insertTimeRange(CMTimeRange(start: .zero, duration: originalDuration), of: videoTrack, at: .zero)
// live-wallpaper accelerateVideo 287-288
compositionVideoTrack.scaleTimeRange(CMTimeRange(start: .zero, duration: originalDuration), toDuration: targetDuration)
compositionVideoTrack.preferredTransform = preferredTransform
guard let accelerateExport = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetHighestQuality) else {
throw AppError(code: "LPB-101", stage: .normalize, message: "无法创建导出会话", suggestedActions: ["重试"])
}
accelerateExport.outputURL = acceleratedURL
accelerateExport.outputFileType = .mov
await accelerateExport.export()
guard accelerateExport.status == .completed else {
throw AppError(code: "LPB-101", stage: .normalize, message: "视频变速失败", underlyingErrorDescription: accelerateExport.error?.localizedDescription, suggestedActions: ["重试"])
}
// 2 live-wallpaper resizeVideo
let acceleratedAsset = AVURLAsset(url: acceleratedURL)
guard let acceleratedVideoTrack = try await acceleratedAsset.loadTracks(withMediaType: .video).first else {
return acceleratedURL
}
let acceleratedDuration = try await acceleratedAsset.load(.duration)
//
let acceleratedNaturalSize = try await acceleratedVideoTrack.load(.naturalSize)
let acceleratedTransform = try await acceleratedVideoTrack.load(.preferredTransform)
guard let resizeExport = AVAssetExportSession(asset: acceleratedAsset, presetName: AVAssetExportPresetHighestQuality) else {
return acceleratedURL
}
// 使 AVMutableVideoComposition
let videoComposition = AVMutableVideoComposition()
videoComposition.renderSize = outputSize
// 60fps
videoComposition.frameDuration = CMTime(value: 1, timescale: 60)
let instruction = AVMutableVideoCompositionInstruction()
instruction.timeRange = CMTimeRange(start: .zero, duration: acceleratedDuration)
let layerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: acceleratedVideoTrack)
//
// naturalSize outputSize
//
// 1. preferredTransform
// 2.
// 3.
//
let rotatedSize = acceleratedNaturalSize.applying(acceleratedTransform)
let rotatedAbsoluteSize = CGSize(width: abs(rotatedSize.width), height: abs(rotatedSize.height))
//
let croppedSourceWidth = rotatedAbsoluteSize.width * cropRect.width
let croppedSourceHeight = rotatedAbsoluteSize.height * cropRect.height
//
let actualWidthRatio = outputSize.width / croppedSourceWidth
let actualHeightRatio = outputSize.height / croppedSourceHeight
let actualScaleFactor = max(actualWidthRatio, actualHeightRatio) // 使 max
let scaledWidth = rotatedAbsoluteSize.width * actualScaleFactor
let scaledHeight = rotatedAbsoluteSize.height * actualScaleFactor
//
let cropCenterX = (cropRect.x + cropRect.width / 2) * scaledWidth
let cropCenterY = (cropRect.y + cropRect.height / 2) * scaledHeight
let outputCenterX = outputSize.width / 2
let outputCenterY = outputSize.height / 2
let centerX = outputCenterX - cropCenterX
let centerY = outputCenterY - cropCenterY
//
// preferredTransform+
//
// 1. preferredTransform+
// 2.
// 3.
//
// 使 concatenating: A.concatenating(B) A B
let scaleTransform = CGAffineTransform(scaleX: actualScaleFactor, y: actualScaleFactor)
let translateToCenter = CGAffineTransform(translationX: centerX, y: centerY)
let finalTransform = acceleratedTransform.concatenating(scaleTransform).concatenating(translateToCenter)
layerInstruction.setTransform(finalTransform, at: .zero)
instruction.layerInstructions = [layerInstruction]
videoComposition.instructions = [instruction]
resizeExport.videoComposition = videoComposition
resizeExport.outputURL = destinationURL
resizeExport.outputFileType = .mov
resizeExport.shouldOptimizeForNetworkUse = true
await resizeExport.export()
//
try? FileManager.default.removeItem(at: acceleratedURL)
guard resizeExport.status == .completed else {
throw AppError(code: "LPB-101", stage: .normalize, message: "视频尺寸调整失败", underlyingErrorDescription: resizeExport.error?.localizedDescription, suggestedActions: ["重试"])
}
return destinationURL
}
private func resolveKeyPhotoURL(
videoURL: URL,
coverImageURL: URL?,
keyFrameTime: Double,
destinationURL: URL
) async throws -> URL {
// 1080p
let maxDimension = 1920
// CGImage HEIC
func writeHEIC(_ image: CGImage, to url: URL) throws {
guard let dest = CGImageDestinationCreateWithURL(url as CFURL, UTType.heic.identifier as CFString, 1, nil) else {
throw AppError(code: "LPB-201", stage: .extractKeyFrame, message: "写入封面失败", underlyingErrorDescription: "无法创建 HEIC 写入器", suggestedActions: ["重试"])
}
let props: [String: Any] = [
kCGImageDestinationLossyCompressionQuality as String: 0.9
]
CGImageDestinationAddImage(dest, image, props as CFDictionary)
guard CGImageDestinationFinalize(dest) else {
throw AppError(code: "LPB-201", stage: .extractKeyFrame, message: "写入封面失败", underlyingErrorDescription: "HEIC 写入失败", suggestedActions: ["重试"])
}
}
//
func scaleImage(_ image: CGImage, maxDim: Int) -> CGImage {
let width = image.width
let height = image.height
let maxSide = max(width, height)
if maxSide <= maxDim { return image }
let scale = CGFloat(maxDim) / CGFloat(maxSide)
let newWidth = Int(CGFloat(width) * scale)
let newHeight = Int(CGFloat(height) * scale)
guard let context = CGContext(
data: nil, width: newWidth, height: newHeight,
bitsPerComponent: 8, bytesPerRow: 0,
space: CGColorSpaceCreateDeviceRGB(),
bitmapInfo: CGImageAlphaInfo.premultipliedLast.rawValue
) else { return image }
context.interpolationQuality = .high
context.draw(image, in: CGRect(x: 0, y: 0, width: newWidth, height: newHeight))
return context.makeImage() ?? image
}
//
if let coverImageURL {
guard let src = CGImageSourceCreateWithURL(coverImageURL as CFURL, nil),
let img = CGImageSourceCreateImageAtIndex(src, 0, nil) else {
throw AppError(code: "LPB-201", stage: .extractKeyFrame, message: "封面读取失败", underlyingErrorDescription: nil, suggestedActions: ["更换封面图", "重试"])
}
let scaledImg = scaleImage(img, maxDim: maxDimension)
try writeHEIC(scaledImg, to: destinationURL)
return destinationURL
}
//
let asset = AVURLAsset(url: videoURL)
let imageGenerator = AVAssetImageGenerator(asset: asset)
imageGenerator.appliesPreferredTrackTransform = true
imageGenerator.requestedTimeToleranceAfter = CMTime(value: 1, timescale: 100)
imageGenerator.requestedTimeToleranceBefore = CMTime(value: 1, timescale: 100)
// AVAssetImageGenerator
imageGenerator.maximumSize = CGSize(width: maxDimension, height: maxDimension)
let safeSeconds = max(0, min(keyFrameTime, max(0, asset.duration.seconds - 0.1)))
let time = CMTime(seconds: safeSeconds, preferredTimescale: asset.duration.timescale)
let cgImage: CGImage
do {
cgImage = try imageGenerator.copyCGImage(at: time, actualTime: nil)
} catch {
throw AppError(code: "LPB-201", stage: .extractKeyFrame, message: "抽帧失败", underlyingErrorDescription: error.localizedDescription, suggestedActions: ["缩短时长", "降低分辨率", "重试"])
}
try writeHEIC(cgImage, to: destinationURL)
return destinationURL
}
private func addAssetID(
_ assetIdentifier: String,
toImage imageURL: URL,
saveTo destinationURL: URL
) -> URL? {
let useHEIC = true
let imageType = useHEIC ? UTType.heic.identifier : UTType.jpeg.identifier
guard let imageDestination = CGImageDestinationCreateWithURL(destinationURL as CFURL, imageType as CFString, 1, nil),
let imageSource = CGImageSourceCreateWithURL(imageURL as CFURL, nil),
let imageRef = CGImageSourceCreateImageAtIndex(imageSource, 0, nil) else {
return nil
}
var imageProperties = (CGImageSourceCopyPropertiesAtIndex(imageSource, 0, nil) as? [String: Any]) ?? [:]
//
let width = imageRef.width
let height = imageRef.height
// TIFF (IFD0) -
var tiffDict = (imageProperties[kCGImagePropertyTIFFDictionary as String] as? [String: Any]) ?? [:]
tiffDict[kCGImagePropertyTIFFOrientation as String] = 1 // Horizontal (normal)
tiffDict[kCGImagePropertyTIFFXResolution as String] = 72
tiffDict[kCGImagePropertyTIFFYResolution as String] = 72
tiffDict[kCGImagePropertyTIFFResolutionUnit as String] = 2 // inches
// Tile -
tiffDict.removeValue(forKey: kCGImagePropertyTIFFTileWidth as String)
tiffDict.removeValue(forKey: kCGImagePropertyTIFFTileLength as String)
imageProperties[kCGImagePropertyTIFFDictionary as String] = tiffDict
// EXIF -
var exifDict = (imageProperties[kCGImagePropertyExifDictionary as String] as? [String: Any]) ?? [:]
exifDict[kCGImagePropertyExifVersion as String] = [2, 2, 1] // 0221
exifDict[kCGImagePropertyExifPixelXDimension as String] = width
exifDict[kCGImagePropertyExifPixelYDimension as String] = height
imageProperties[kCGImagePropertyExifDictionary as String] = exifDict
// ContentIdentifier MakerNotes
// 使 ContentIdentifier Photos Live Photo
let assetIdentifierKey = "17" // Content Identifier
var makerAppleDict: [String: Any] = [:]
makerAppleDict[assetIdentifierKey] = assetIdentifier
imageProperties[kCGImagePropertyMakerAppleDictionary as String] = makerAppleDict
CGImageDestinationAddImage(imageDestination, imageRef, imageProperties as CFDictionary)
guard CGImageDestinationFinalize(imageDestination) else {
return nil
}
logger.info("Created HEIC with ContentIdentifier: \(assetIdentifier)")
return destinationURL
}
private func addAssetID(
_ assetIdentifier: String,
toVideo videoURL: URL,
saveTo destinationURL: URL,
stillImageTimeSeconds: Double,
progress: @Sendable @escaping (Double) -> Void
) async throws -> URL {
// live-wallpaper
// 使 AVAssetReaderTrackOutput + videoInput.transform AVAssetReaderVideoCompositionOutput
guard let metadataURL = Self.metadataMovURL else {
throw AppError(code: "LPB-301", stage: .writeVideoMetadata, message: "缺少 metadata.mov 资源文件", suggestedActions: ["重新安装应用"])
}
return try await withCheckedThrowingContinuation { continuation in
let queue = DispatchQueue(label: "LivePhotoCore.VideoPairing")
queue.async {
do {
if FileManager.default.fileExists(atPath: destinationURL.path) {
try FileManager.default.removeItem(at: destinationURL)
}
let videoAsset = AVURLAsset(url: videoURL)
let metadataAsset = AVURLAsset(url: metadataURL)
guard let videoTrack = videoAsset.tracks(withMediaType: .video).first else {
continuation.resume(throwing: AppError(code: "LPB-301", stage: .writeVideoMetadata, message: "视频处理失败", underlyingErrorDescription: "缺少视频轨", suggestedActions: ["更换一个视频", "重试"]))
return
}
let durationSeconds = max(0.001, videoAsset.duration.seconds)
let nominalFrameRate = videoTrack.nominalFrameRate > 0 ? videoTrack.nominalFrameRate : 30
let frameCount = max(1, Int(durationSeconds * Double(nominalFrameRate)))
// readers writer
let videoReader = try AVAssetReader(asset: videoAsset)
let metadataReader = try AVAssetReader(asset: metadataAsset)
let assetWriter = try AVAssetWriter(outputURL: destinationURL, fileType: .mov)
let writingGroup = DispatchGroup()
// 使 AVAssetReaderTrackOutput live-wallpaper
// AVAssetReaderVideoCompositionOutput
let videoReaderOutput = AVAssetReaderTrackOutput(track: videoTrack, outputSettings: [
kCVPixelBufferPixelFormatTypeKey as String: NSNumber(value: kCVPixelFormatType_32BGRA as UInt32)
])
videoReader.add(videoReaderOutput)
// 使 track.naturalSize live-wallpaper
// videoInput.transform
let videoWriterInput = AVAssetWriterInput(
mediaType: .video,
outputSettings: [
AVVideoCodecKey: AVVideoCodecType.h264,
AVVideoWidthKey: videoTrack.naturalSize.width,
AVVideoHeightKey: videoTrack.naturalSize.height
]
)
// transform live-wallpaper 108
videoWriterInput.transform = videoTrack.preferredTransform
// expectsMediaDataInRealTime = true live-wallpaper 109
videoWriterInput.expectsMediaDataInRealTime = true
assetWriter.add(videoWriterInput)
// metadata track reader/writer metadata.mov
// sourceFormatHint live-wallpaper
var metadataIOs = [(AVAssetWriterInput, AVAssetReaderTrackOutput)]()
let metadataTracks = metadataAsset.tracks(withMediaType: .metadata)
for track in metadataTracks {
let trackReaderOutput = AVAssetReaderTrackOutput(track: track, outputSettings: nil)
metadataReader.add(trackReaderOutput)
let metadataInput = AVAssetWriterInput(mediaType: .metadata, outputSettings: nil)
assetWriter.add(metadataInput)
metadataIOs.append((metadataInput, trackReaderOutput))
}
//
assetWriter.metadata = [Self.metadataForAssetID(assetIdentifier)]
assetWriter.startWriting()
videoReader.startReading()
metadataReader.startReading()
assetWriter.startSession(atSourceTime: .zero)
var currentFrameCount = 0
//
writingGroup.enter()
videoWriterInput.requestMediaDataWhenReady(on: DispatchQueue(label: "LivePhotoCore.VideoWriterInput")) {
while videoWriterInput.isReadyForMoreMediaData {
if let sampleBuffer = videoReaderOutput.copyNextSampleBuffer() {
currentFrameCount += 1
let pct = Double(currentFrameCount) / Double(frameCount)
progress(pct)
videoWriterInput.append(sampleBuffer)
} else {
videoWriterInput.markAsFinished()
writingGroup.leave()
break
}
}
}
// metadata track sample buffer
for (metadataInput, metadataOutput) in metadataIOs {
writingGroup.enter()
metadataInput.requestMediaDataWhenReady(on: DispatchQueue(label: "LivePhotoCore.MetadataWriterInput")) {
while metadataInput.isReadyForMoreMediaData {
if let sampleBuffer = metadataOutput.copyNextSampleBuffer() {
metadataInput.append(sampleBuffer)
} else {
metadataInput.markAsFinished()
writingGroup.leave()
break
}
}
}
}
writingGroup.notify(queue: .main) {
if videoReader.status == .completed && metadataReader.status == .completed && assetWriter.status == .writing {
assetWriter.finishWriting {
if assetWriter.status == .completed {
continuation.resume(returning: destinationURL)
} else {
continuation.resume(throwing: AppError(code: "LPB-301", stage: .writeVideoMetadata, message: "视频处理失败", underlyingErrorDescription: assetWriter.error?.localizedDescription, suggestedActions: ["重试"]))
}
}
} else {
let errorDesc = videoReader.error?.localizedDescription ?? metadataReader.error?.localizedDescription ?? assetWriter.error?.localizedDescription ?? "未知错误"
continuation.resume(throwing: AppError(code: "LPB-301", stage: .writeVideoMetadata, message: "视频处理失败", underlyingErrorDescription: errorDesc, suggestedActions: ["重试"]))
}
}
} catch {
continuation.resume(throwing: AppError(code: "LPB-301", stage: .writeVideoMetadata, message: "视频处理失败", underlyingErrorDescription: error.localizedDescription, suggestedActions: ["重试"]))
}
}
}
}
/// metadata.mov URL
private static var metadataMovURL: URL? {
// Bundle App
if let bundleURL = Bundle.main.url(forResource: "metadata", withExtension: "mov") {
return bundleURL
}
// module bundle SPM package
#if SWIFT_PACKAGE
if let moduleURL = Bundle.module.url(forResource: "metadata", withExtension: "mov") {
return moduleURL
}
#endif
return nil
}
private static func metadataForAssetID(_ assetIdentifier: String) -> AVMetadataItem {
let item = AVMutableMetadataItem()
item.key = "com.apple.quicktime.content.identifier" as (NSCopying & NSObjectProtocol)
item.keySpace = AVMetadataKeySpace(rawValue: "mdta")
item.value = assetIdentifier as (NSCopying & NSObjectProtocol)
item.dataType = "com.apple.metadata.datatype.UTF-8"
return item
}
}
public struct LivePhotoWorkflowResult: Sendable, Hashable {
public var workId: UUID
public var assetIdentifier: String
public var pairedImageURL: URL
public var pairedVideoURL: URL
public var savedAssetId: String
public var resourceValidationOK: Bool
public var libraryAssetIsLivePhoto: Bool?
public init(
workId: UUID,
assetIdentifier: String,
pairedImageURL: URL,
pairedVideoURL: URL,
savedAssetId: String,
resourceValidationOK: Bool,
libraryAssetIsLivePhoto: Bool?
) {
self.workId = workId
self.assetIdentifier = assetIdentifier
self.pairedImageURL = pairedImageURL
self.pairedVideoURL = pairedVideoURL
self.savedAssetId = savedAssetId
self.resourceValidationOK = resourceValidationOK
self.libraryAssetIsLivePhoto = libraryAssetIsLivePhoto
}
}
public actor LivePhotoWorkflow {
private let builder: LivePhotoBuilder
private let albumWriter: AlbumWriter
private let validator: LivePhotoValidator
private let cacheManager: CacheManager
public init(cacheManager: CacheManager? = nil, logger: LivePhotoLogger = LivePhotoLogger()) throws {
let cm = try cacheManager ?? CacheManager()
self.cacheManager = cm
self.builder = try LivePhotoBuilder(cacheManager: cm, logger: logger)
self.albumWriter = AlbumWriter()
self.validator = LivePhotoValidator()
}
/// workId
public func cleanupWork(workId: UUID) async {
cacheManager.removeWorkDir(workId: workId)
}
public func buildSaveValidate(
workId: UUID = UUID(),
sourceVideoURL: URL,
coverImageURL: URL? = nil,
exportParams: ExportParams = ExportParams(),
progress: (@Sendable (LivePhotoBuildProgress) -> Void)? = nil
) async throws -> LivePhotoWorkflowResult {
let output = try await builder.buildResources(
workId: workId,
sourceVideoURL: sourceVideoURL,
coverImageURL: coverImageURL,
exportParams: exportParams,
progress: progress
)
progress?(LivePhotoBuildProgress(stage: .validate, fraction: 0))
let resourceOK = await validator.canCreateLivePhotoFromResources(
photoURL: output.pairedImageURL,
pairedVideoURL: output.pairedVideoURL
)
progress?(LivePhotoBuildProgress(stage: .validate, fraction: 0.3))
let addOnlyStatus = await albumWriter.requestAddOnlyAuthorization()
guard addOnlyStatus == .authorized else {
throw AppError(
code: "LPB-401",
stage: .saveToAlbum,
message: "无相册写入权限",
underlyingErrorDescription: "authorizationStatus(addOnly)=\(addOnlyStatus)",
suggestedActions: ["在系统设置中允许“添加照片”权限"]
)
}
progress?(LivePhotoBuildProgress(stage: .saveToAlbum, fraction: 0))
let assetId = try await albumWriter.saveLivePhoto(
photoURL: output.pairedImageURL,
pairedVideoURL: output.pairedVideoURL,
shouldMoveFiles: false
)
progress?(LivePhotoBuildProgress(stage: .saveToAlbum, fraction: 1))
var isLiveSubtype: Bool? = nil
let readWriteStatus = PHPhotoLibrary.authorizationStatus(for: .readWrite)
if readWriteStatus == .authorized || readWriteStatus == .limited {
isLiveSubtype = await validator.isLivePhotoAsset(localIdentifier: assetId)
}
progress?(LivePhotoBuildProgress(stage: .validate, fraction: 1))
return LivePhotoWorkflowResult(
workId: output.workId,
assetIdentifier: output.assetIdentifier,
pairedImageURL: output.pairedImageURL,
pairedVideoURL: output.pairedVideoURL,
savedAssetId: assetId,
resourceValidationOK: resourceOK,
libraryAssetIsLivePhoto: isLiveSubtype
)
}
}