From 299415a530ec5f16659be1b24a7beabf8313035d Mon Sep 17 00:00:00 2001 From: empty Date: Sun, 14 Dec 2025 16:21:20 +0800 Subject: [PATCH] =?UTF-8?q?feat:=20=E5=88=9D=E5=A7=8B=E5=8C=96=20Live=20Ph?= =?UTF-8?q?oto=20=E9=A1=B9=E7=9B=AE=E7=BB=93=E6=9E=84?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - 添加 PRD、技术规范、交互规范文档 (V0.2) - 创建 Swift Package 和 Xcode 项目 - 实现 LivePhotoCore 基础模块 - 添加 HEIC MakerNote 元数据写入功能 - 创建项目结构文档和任务清单 - 添加 .gitignore 忽略规则 --- .gitignore | 105 ++ .serena/.gitignore | 1 + ...o_wallpaper_root_cause_still_image_time.md | 17 + .serena/project.yml | 84 ++ PROJECT_STRUCTURE.md | 52 + Package.swift | 26 + .../LivePhotoCore/HEICMakerNotePatcher.swift | 591 +++++++++ Sources/LivePhotoCore/LivePhotoCore.swift | 1104 +++++++++++++++++ Sources/LivePhotoCore/MakerNotesPatcher.swift | 83 ++ TASK.md | 101 ++ .../LivePhotoCoreTests.swift | 8 + docs/IXSPEC_LivePhoto_App_V0.2_2025-12-13.md | 136 ++ docs/PRD_LivePhoto_App_V0.2_2025-12-13.md | 557 +++++++++ .../TECHSPEC_LivePhoto_App_V0.2_2025-12-13.md | 167 +++ docs_index.md | 27 + .../to-live-photo.xcodeproj/project.pbxproj | 612 +++++++++ .../contents.xcworkspacedata | 7 + to-live-photo/to-live-photo/AppState.swift | 86 ++ .../AccentColor.colorset/Contents.json | 11 + .../AppIcon.appiconset/Contents.json | 35 + .../Assets.xcassets/Contents.json | 6 + to-live-photo/to-live-photo/ContentView.swift | 40 + .../to-live-photo/Views/EditorView.swift | 127 ++ .../to-live-photo/Views/HomeView.swift | 125 ++ .../to-live-photo/Views/ProcessingView.swift | 127 ++ .../to-live-photo/Views/ResultView.swift | 145 +++ .../Views/WallpaperGuideView.swift | 324 +++++ .../to-live-photo/to_live_photoApp.swift | 20 + .../to_live_photoTests.swift | 17 + .../to_live_photoUITests.swift | 41 + .../to_live_photoUITestsLaunchTests.swift | 33 + 31 files changed, 4815 insertions(+) create mode 100644 .gitignore create mode 100644 .serena/.gitignore create mode 100644 .serena/memories/livephoto_wallpaper_root_cause_still_image_time.md create mode 100644 .serena/project.yml create mode 100644 PROJECT_STRUCTURE.md create mode 100644 Package.swift create mode 100644 Sources/LivePhotoCore/HEICMakerNotePatcher.swift create mode 100644 Sources/LivePhotoCore/LivePhotoCore.swift create mode 100644 Sources/LivePhotoCore/MakerNotesPatcher.swift create mode 100644 TASK.md create mode 100644 Tests/LivePhotoCoreTests/LivePhotoCoreTests.swift create mode 100644 docs/IXSPEC_LivePhoto_App_V0.2_2025-12-13.md create mode 100644 docs/PRD_LivePhoto_App_V0.2_2025-12-13.md create mode 100644 docs/TECHSPEC_LivePhoto_App_V0.2_2025-12-13.md create mode 100644 docs_index.md create mode 100644 to-live-photo/to-live-photo.xcodeproj/project.pbxproj create mode 100644 to-live-photo/to-live-photo.xcodeproj/project.xcworkspace/contents.xcworkspacedata create mode 100644 to-live-photo/to-live-photo/AppState.swift create mode 100644 to-live-photo/to-live-photo/Assets.xcassets/AccentColor.colorset/Contents.json create mode 100644 to-live-photo/to-live-photo/Assets.xcassets/AppIcon.appiconset/Contents.json create mode 100644 to-live-photo/to-live-photo/Assets.xcassets/Contents.json create mode 100644 to-live-photo/to-live-photo/ContentView.swift create mode 100644 to-live-photo/to-live-photo/Views/EditorView.swift create mode 100644 to-live-photo/to-live-photo/Views/HomeView.swift create mode 100644 to-live-photo/to-live-photo/Views/ProcessingView.swift create mode 100644 to-live-photo/to-live-photo/Views/ResultView.swift create mode 100644 to-live-photo/to-live-photo/Views/WallpaperGuideView.swift create mode 100644 to-live-photo/to-live-photo/to_live_photoApp.swift create mode 100644 to-live-photo/to-live-photoTests/to_live_photoTests.swift create mode 100644 to-live-photo/to-live-photoUITests/to_live_photoUITests.swift create mode 100644 to-live-photo/to-live-photoUITests/to_live_photoUITestsLaunchTests.swift diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..dc24f18 --- /dev/null +++ b/.gitignore @@ -0,0 +1,105 @@ +# Xcode +# +# gitignore contributors: remember to update Global Business Ignores +# https://github.com/github/gitignore/blob/main/Global/Xcode.gitignore + +## User settings +xcuserdata/ + +## Xcode build generated +build/ +DerivedData/ + +## Obj-C/Swift specific +*.hmap + +## App packaging +*.ipa +*.dSYM.zip +*.dSYM + +## Playgrounds +timeline.xctimeline +playground.xcworkspace + +# Swift Package Manager +# +# Add this line if you want to avoid checking in source code from Swift Package Manager dependencies. +.build/ +Packages/ +Package.resolved +*.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/ + +# CocoaPods +# +# We recommend against adding the Pods directory to your .gitignore. However +# you should judge for yourself, the pros and cons are mentioned at: +# https://guides.cocoapods.org/using/using-cocoapods.html#should-i-check-the-pods-directory-into-source-control +# +Pods/ +*.xcworkspace + +# Carthage +# +# Add this line if you want to avoid checking in source code from Carthage dependencies. +Carthage/Checkouts +Carthage/Build/ + +# Accio dependency management +Dependencies/ +.accio/ + +# fastlane +# +# It is recommended to not store the screenshots in the git repo. +# Instead, use fastlane to re-generate the screenshots whenever they are needed. +# For more information about the recommended setup visit: +# https://docs.fastlane.tools/best-practices/source-control/#source-control + +fastlane/report.xml +fastlane/Preview.html +fastlane/screenshots/**/*.png +fastlane/test_output + +# Code Injection +# +# After new calculation based on new calculation +iOSInjectionProject/ + +# macOS +.DS_Store +.AppleDouble +.LSOverride + +# Thumbnails +._* + +# Files that might appear in the root of a volume +.DocumentRevisions-V100 +.fseventsd +.Spotlight-V100 +.TemporaryItems +.Trashes +.VolumeIcon.icns +.com.apple.timemachine.donotpresent + +# Directories potentially created on remote AFP share +.AppleDB +.AppleDesktop +Network Trash Folder +Temporary Items +.apdisk + +# IDE - VSCode +.vscode/ + +# IDE - JetBrains +.idea/ + +# Temporary files +*.swp +*.swo +*~ + +# Debug logs +*.log diff --git a/.serena/.gitignore b/.serena/.gitignore new file mode 100644 index 0000000..14d86ad --- /dev/null +++ b/.serena/.gitignore @@ -0,0 +1 @@ +/cache diff --git a/.serena/memories/livephoto_wallpaper_root_cause_still_image_time.md b/.serena/memories/livephoto_wallpaper_root_cause_still_image_time.md new file mode 100644 index 0000000..16b65d3 --- /dev/null +++ b/.serena/memories/livephoto_wallpaper_root_cause_still_image_time.md @@ -0,0 +1,17 @@ +# Live Photo 无法设置为动态壁纸:根因记录 + +## 现象 +- 生成的 Live Photo 在相册中可识别、可播放,但在“用作壁纸”时提示“动态不可用”。 + +## 关键发现(本地文件元数据) +- `/Users/yuanjiantsui/Downloads/paired.mov` 中的 timed metadata:`StillImageTime` 为 **-1**(int8)。 + - `exiftool` 输出示例:`[Track3] StillImageTime : -1` + +## 代码根因 +- `Sources/LivePhotoCore/LivePhotoCore.swift:842`:`LivePhotoBuilder.metadataItemForStillImageTime()` 将 `com.apple.quicktime.still-image-time` 的 value 写成 `-1`。 + - 建议改为 `0`(int8)。 + - 仍用 timed metadata group 的 `timeRange.start` 表达关键帧时间。 + +## 额外建议(兼容性) +- 移除非标准的 mdta keys:`Sample Time` / `Sample Duration`(当前写入到 `assetWriter.metadata`)。 +- 若仍不兼容,优先尝试 H.264、30fps、SDR、2~3 秒时长作为壁纸兼容模式。 \ No newline at end of file diff --git a/.serena/project.yml b/.serena/project.yml new file mode 100644 index 0000000..0d796b9 --- /dev/null +++ b/.serena/project.yml @@ -0,0 +1,84 @@ +# list of languages for which language servers are started; choose from: +# al bash clojure cpp csharp csharp_omnisharp +# dart elixir elm erlang fortran go +# haskell java julia kotlin lua markdown +# nix perl php python python_jedi r +# rego ruby ruby_solargraph rust scala swift +# terraform typescript typescript_vts yaml zig +# Note: +# - For C, use cpp +# - For JavaScript, use typescript +# Special requirements: +# - csharp: Requires the presence of a .sln file in the project folder. +# When using multiple languages, the first language server that supports a given file will be used for that file. +# The first language is the default language and the respective language server will be used as a fallback. +# Note that when using the JetBrains backend, language servers are not used and this list is correspondingly ignored. +languages: +- swift + +# the encoding used by text files in the project +# For a list of possible encodings, see https://docs.python.org/3.11/library/codecs.html#standard-encodings +encoding: "utf-8" + +# whether to use the project's gitignore file to ignore files +# Added on 2025-04-07 +ignore_all_files_in_gitignore: true + +# list of additional paths to ignore +# same syntax as gitignore, so you can use * and ** +# Was previously called `ignored_dirs`, please update your config if you are using that. +# Added (renamed) on 2025-04-07 +ignored_paths: [] + +# whether the project is in read-only mode +# If set to true, all editing tools will be disabled and attempts to use them will result in an error +# Added on 2025-04-18 +read_only: false + +# list of tool names to exclude. We recommend not excluding any tools, see the readme for more details. +# Below is the complete list of tools for convenience. +# To make sure you have the latest list of tools, and to view their descriptions, +# execute `uv run scripts/print_tool_overview.py`. +# +# * `activate_project`: Activates a project by name. +# * `check_onboarding_performed`: Checks whether project onboarding was already performed. +# * `create_text_file`: Creates/overwrites a file in the project directory. +# * `delete_lines`: Deletes a range of lines within a file. +# * `delete_memory`: Deletes a memory from Serena's project-specific memory store. +# * `execute_shell_command`: Executes a shell command. +# * `find_referencing_code_snippets`: Finds code snippets in which the symbol at the given location is referenced. +# * `find_referencing_symbols`: Finds symbols that reference the symbol at the given location (optionally filtered by type). +# * `find_symbol`: Performs a global (or local) search for symbols with/containing a given name/substring (optionally filtered by type). +# * `get_current_config`: Prints the current configuration of the agent, including the active and available projects, tools, contexts, and modes. +# * `get_symbols_overview`: Gets an overview of the top-level symbols defined in a given file. +# * `initial_instructions`: Gets the initial instructions for the current project. +# Should only be used in settings where the system prompt cannot be set, +# e.g. in clients you have no control over, like Claude Desktop. +# * `insert_after_symbol`: Inserts content after the end of the definition of a given symbol. +# * `insert_at_line`: Inserts content at a given line in a file. +# * `insert_before_symbol`: Inserts content before the beginning of the definition of a given symbol. +# * `list_dir`: Lists files and directories in the given directory (optionally with recursion). +# * `list_memories`: Lists memories in Serena's project-specific memory store. +# * `onboarding`: Performs onboarding (identifying the project structure and essential tasks, e.g. for testing or building). +# * `prepare_for_new_conversation`: Provides instructions for preparing for a new conversation (in order to continue with the necessary context). +# * `read_file`: Reads a file within the project directory. +# * `read_memory`: Reads the memory with the given name from Serena's project-specific memory store. +# * `remove_project`: Removes a project from the Serena configuration. +# * `replace_lines`: Replaces a range of lines within a file with new content. +# * `replace_symbol_body`: Replaces the full definition of a symbol. +# * `restart_language_server`: Restarts the language server, may be necessary when edits not through Serena happen. +# * `search_for_pattern`: Performs a search for a pattern in the project. +# * `summarize_changes`: Provides instructions for summarizing the changes made to the codebase. +# * `switch_modes`: Activates modes by providing a list of their names +# * `think_about_collected_information`: Thinking tool for pondering the completeness of collected information. +# * `think_about_task_adherence`: Thinking tool for determining whether the agent is still on track with the current task. +# * `think_about_whether_you_are_done`: Thinking tool for determining whether the task is truly completed. +# * `write_memory`: Writes a named memory (for future reference) to Serena's project-specific memory store. +excluded_tools: [] + +# initial prompt for the project. It will always be given to the LLM upon activating the project +# (contrary to the memories, which are loaded on demand). +initial_prompt: "" + +project_name: "to-live-photo" +included_optional_tools: [] diff --git a/PROJECT_STRUCTURE.md b/PROJECT_STRUCTURE.md new file mode 100644 index 0000000..3d46a95 --- /dev/null +++ b/PROJECT_STRUCTURE.md @@ -0,0 +1,52 @@ +# 项目结构 + +> 说明:本文件用于记录项目目录/文件结构的变更。新增/删除目录或文件后需同步更新。 + +## 根目录 + +- Package.swift +- docs/ +- Sources/ +- Tests/ +- to-live-photo/ +- docs_index.md +- PROJECT_STRUCTURE.md +- TASK.md +- .DS_Store + +## docs/ + +- PRD_LivePhoto_App_V0.2_2025-12-13.md +- TECHSPEC_LivePhoto_App_V0.2_2025-12-13.md +- IXSPEC_LivePhoto_App_V0.2_2025-12-13.md +- .DS_Store + +## Sources/ + +- LivePhotoCore/ + - LivePhotoCore.swift + +## Tests/ + +- LivePhotoCoreTests/ + - LivePhotoCoreTests.swift + +## to-live-photo/ + +- to-live-photo.xcodeproj/ +- to-live-photo/ + - Assets.xcassets/ + - AppState.swift + - ContentView.swift + - to_live_photoApp.swift + - Views/ + - HomeView.swift + - EditorView.swift + - ProcessingView.swift + - ResultView.swift + - WallpaperGuideView.swift +- to-live-photoTests/ + - to_live_photoTests.swift +- to-live-photoUITests/ + - to_live_photoUITests.swift + - to_live_photoUITestsLaunchTests.swift diff --git a/Package.swift b/Package.swift new file mode 100644 index 0000000..294ed31 --- /dev/null +++ b/Package.swift @@ -0,0 +1,26 @@ +// swift-tools-version: 6.0 + +import PackageDescription + +let package = Package( + name: "ToLivePhoto", + platforms: [ + .iOS(.v18) + ], + products: [ + .library( + name: "LivePhotoCore", + targets: ["LivePhotoCore"] + ) + ], + targets: [ + .target( + name: "LivePhotoCore", + dependencies: [] + ), + .testTarget( + name: "LivePhotoCoreTests", + dependencies: ["LivePhotoCore"] + ) + ] +) diff --git a/Sources/LivePhotoCore/HEICMakerNotePatcher.swift b/Sources/LivePhotoCore/HEICMakerNotePatcher.swift new file mode 100644 index 0000000..a8286a7 --- /dev/null +++ b/Sources/LivePhotoCore/HEICMakerNotePatcher.swift @@ -0,0 +1,591 @@ +import Foundation + +/// HEIC 文件结构解析和 MakerNotes 二进制注入 +/// 用于绕过 CGImageDestination 无法正确写入 Int64 MakerNotes 字段的限制 +public enum HEICMakerNoteError: Error, CustomStringConvertible { + case invalidHEIC(String) + case metaNotFound + case iinfNotFound + case ilocNotFound + case exifItemNotFound + case exifLocationNotFound(itemID: UInt32) + case exifPayloadTooSmall + case tiffNotFound + case invalidTIFF(String) + case exifIFDPointerNotFound + case makerNoteTagNotFound + case makerNoteOutOfRange + case makerNoteTooShort(available: Int, required: Int) + + public var description: String { + switch self { + case .invalidHEIC(let msg): return "Invalid HEIC: \(msg)" + case .metaNotFound: return "meta box not found" + case .iinfNotFound: return "iinf box not found" + case .ilocNotFound: return "iloc box not found" + case .exifItemNotFound: return "Exif item not found" + case .exifLocationNotFound(let id): return "Exif item location not found for item_ID=\(id)" + case .exifPayloadTooSmall: return "Exif payload too small" + case .tiffNotFound: return "TIFF header not found" + case .invalidTIFF(let msg): return "Invalid TIFF: \(msg)" + case .exifIFDPointerNotFound: return "ExifIFDPointer (0x8769) not found" + case .makerNoteTagNotFound: return "MakerNote tag (0x927C) not found" + case .makerNoteOutOfRange: return "MakerNote data out of range" + case .makerNoteTooShort(let available, let required): + return "MakerNote too short: available=\(available), required=\(required)" + } + } +} + +public final class HEICMakerNotePatcher { + + // MARK: - Public API + + /// 将完整的 MakerNotes 数据注入到 HEIC 文件中 + /// 采用重建 Exif item 的方式,支持扩展 MakerNote 大小 + public static func injectMakerNoteInPlace(fileURL: URL, makerNote: Data) throws { + var fileData = try Data(contentsOf: fileURL, options: [.mappedIfSafe]) + + // 解析文件结构 + let metaRange = try findTopLevelBox(type: "meta", in: fileData) + guard let metaRange else { throw HEICMakerNoteError.metaNotFound } + let meta = BoxView(data: fileData, range: metaRange) + + let metaChildrenStart = meta.contentStart + 4 + guard metaChildrenStart <= meta.end else { + throw HEICMakerNoteError.invalidHEIC("meta content too short") + } + + guard let iinfRange = try findChildBox(type: "iinf", within: metaChildrenStart..= 0, exifLen > 0, exifStart + exifLen <= fileData.count else { + throw HEICMakerNoteError.exifPayloadTooSmall + } + + // 读取现有 Exif item + let existingExif = fileData.subdata(in: exifStart..<(exifStart + exifLen)) + + // 构建新的 Exif item(替换 MakerNote) + let newExif = try buildNewExifItem(existingExif: existingExif, newMakerNote: makerNote) + + if newExif.count <= exifLen { + // 新 Exif 不大于原来的,直接原位替换 + var paddedExif = newExif + if paddedExif.count < exifLen { + paddedExif.append(Data(repeating: 0x00, count: exifLen - paddedExif.count)) + } + fileData.replaceSubrange(exifStart..<(exifStart + exifLen), with: paddedExif) + } else { + // 新 Exif 比原来大,需要追加到文件末尾并更新 iloc + let newExifOffset = fileData.count + fileData.append(newExif) + + // 更新 iloc 中的 offset 和 length + try updateILOC( + in: &fileData, + ilocRange: ilocRange, + ilocInfo: ilocInfo, + itemID: exifItemID, + newOffset: UInt64(newExifOffset), + newLength: UInt64(newExif.count) + ) + } + + try fileData.write(to: fileURL, options: [.atomic]) + } + + // MARK: - Build New Exif Item + + /// 构建新的 Exif item,替换 MakerNote + private static func buildNewExifItem(existingExif: Data, newMakerNote: Data) throws -> Data { + guard existingExif.count >= 10 else { + throw HEICMakerNoteError.exifPayloadTooSmall + } + + // Exif item 结构: + // 4 bytes: TIFF header offset (通常是 6,指向 "Exif\0\0" 之后) + // 4 bytes: "Exif" + // 2 bytes: \0\0 + // 然后是 TIFF 数据 + + let tiffOffsetValue = existingExif.readUInt32BE(at: 0) + let tiffStart = 4 + Int(tiffOffsetValue) + + guard tiffStart + 8 <= existingExif.count else { + throw HEICMakerNoteError.tiffNotFound + } + + // 检查字节序 + let endianMarker = existingExif.subdata(in: tiffStart..<(tiffStart + 2)) + let isBigEndian: Bool + if endianMarker == Data([0x4D, 0x4D]) { + isBigEndian = true + } else if endianMarker == Data([0x49, 0x49]) { + isBigEndian = false + } else { + throw HEICMakerNoteError.invalidTIFF("Invalid endian marker") + } + + // 构建新的 TIFF 数据(Big-Endian,与 Apple 设备一致) + var newTiff = Data() + + // TIFF Header: "MM" + 0x002A + IFD0 offset (8) + newTiff.append(contentsOf: [0x4D, 0x4D]) // Big-endian + newTiff.append(contentsOf: [0x00, 0x2A]) // TIFF magic + newTiff.append(contentsOf: [0x00, 0x00, 0x00, 0x08]) // IFD0 offset = 8 + + // IFD0: 1 entry (ExifIFDPointer) + // Entry count: 1 + newTiff.append(contentsOf: [0x00, 0x01]) + + // Entry: ExifIFDPointer (0x8769) + let exifIFDOffset: UInt32 = 8 + 2 + 12 + 4 // = 26 (IFD0 之后) + newTiff.append(contentsOf: [0x87, 0x69]) // tag + newTiff.append(contentsOf: [0x00, 0x04]) // type = LONG + newTiff.append(contentsOf: [0x00, 0x00, 0x00, 0x01]) // count = 1 + newTiff.appendUInt32BE(exifIFDOffset) // value = offset to Exif IFD + + // Next IFD offset: 0 (no more IFDs) + newTiff.append(contentsOf: [0x00, 0x00, 0x00, 0x00]) + + // Exif IFD: 1 entry (MakerNote) + let makerNoteDataOffset: UInt32 = exifIFDOffset + 2 + 12 + 4 // = 44 + newTiff.append(contentsOf: [0x00, 0x01]) // entry count + + // Entry: MakerNote (0x927C) + newTiff.append(contentsOf: [0x92, 0x7C]) // tag + newTiff.append(contentsOf: [0x00, 0x07]) // type = UNDEFINED + newTiff.appendUInt32BE(UInt32(newMakerNote.count)) // count + newTiff.appendUInt32BE(makerNoteDataOffset) // offset to MakerNote data + + // Next IFD offset: 0 + newTiff.append(contentsOf: [0x00, 0x00, 0x00, 0x00]) + + // MakerNote data + newTiff.append(newMakerNote) + + // 构建完整的 Exif item + var newExifItem = Data() + // 4 bytes: offset to TIFF (= 6, 跳过 "Exif\0\0") + newExifItem.append(contentsOf: [0x00, 0x00, 0x00, 0x06]) + // "Exif\0\0" + newExifItem.append(contentsOf: [0x45, 0x78, 0x69, 0x66, 0x00, 0x00]) + // TIFF data + newExifItem.append(newTiff) + + return newExifItem + } + + // MARK: - Box Parsing + + private struct BoxHeader { + let type: String + let size: Int + let headerSize: Int + let contentStart: Int + let end: Int + } + + private struct BoxView { + let data: Data + let range: Range + + var start: Int { range.lowerBound } + var end: Int { range.upperBound } + + var header: BoxHeader { + let size32 = Int(data.readUInt32BE(at: start)) + let type = data.readFourCC(at: start + 4) + + if size32 == 1 { + let size64 = Int(data.readUInt64BE(at: start + 8)) + return BoxHeader( + type: type, + size: size64, + headerSize: 16, + contentStart: start + 16, + end: start + size64 + ) + } else if size32 == 0 { + return BoxHeader( + type: type, + size: data.count - start, + headerSize: 8, + contentStart: start + 8, + end: data.count + ) + } else { + return BoxHeader( + type: type, + size: size32, + headerSize: 8, + contentStart: start + 8, + end: start + size32 + ) + } + } + + var contentStart: Int { header.contentStart } + } + + private static func findTopLevelBox(type: String, in data: Data) throws -> Range? { + var cursor = 0 + while cursor + 8 <= data.count { + let box = try readBoxHeader(at: cursor, data: data) + if box.type == type { return cursor.., in data: Data) throws -> Range? { + var cursor = range.lowerBound + while cursor + 8 <= range.upperBound { + let box = try readBoxHeader(at: cursor, data: data) + if box.type == type { return cursor.. BoxHeader { + guard offset + 8 <= data.count else { + throw HEICMakerNoteError.invalidHEIC("box header out of bounds") + } + let size32 = Int(data.readUInt32BE(at: offset)) + let type = data.readFourCC(at: offset + 4) + + if size32 == 1 { + guard offset + 16 <= data.count else { + throw HEICMakerNoteError.invalidHEIC("large size box header out of bounds") + } + let size64 = Int(data.readUInt64BE(at: offset + 8)) + guard size64 >= 16 else { + throw HEICMakerNoteError.invalidHEIC("invalid box size") + } + return BoxHeader(type: type, size: size64, headerSize: 16, contentStart: offset + 16, end: offset + size64) + } else if size32 == 0 { + return BoxHeader(type: type, size: data.count - offset, headerSize: 8, contentStart: offset + 8, end: data.count) + } else { + guard size32 >= 8 else { + throw HEICMakerNoteError.invalidHEIC("invalid box size") + } + return BoxHeader(type: type, size: size32, headerSize: 8, contentStart: offset + 8, end: offset + size32) + } + } + + // MARK: - iinf / infe Parsing + + private static func parseIINFAndFindExifItemID(data: Data, iinfRange: Range) throws -> UInt32 { + let iinf = BoxView(data: data, range: iinfRange).header + var cursor = iinf.contentStart + + guard cursor + 4 <= iinf.end else { + throw HEICMakerNoteError.invalidHEIC("iinf too short") + } + let version = data.readUInt8(at: cursor) + cursor += 4 + + let entryCount: UInt32 + if version == 0 { + entryCount = UInt32(data.readUInt16BE(at: cursor)) + cursor += 2 + } else { + entryCount = data.readUInt32BE(at: cursor) + cursor += 4 + } + + var foundExif: UInt32? + var scanned: UInt32 = 0 + + while cursor + 8 <= iinf.end, scanned < entryCount { + let infe = try readBoxHeader(at: cursor, data: data) + guard infe.type == "infe" else { + cursor = infe.end + continue + } + + var p = infe.contentStart + guard p + 4 <= infe.end else { + throw HEICMakerNoteError.invalidHEIC("infe too short") + } + let infeVersion = data.readUInt8(at: p) + p += 4 + + let itemID: UInt32 + if infeVersion >= 3 { + itemID = data.readUInt32BE(at: p); p += 4 + } else { + itemID = UInt32(data.readUInt16BE(at: p)); p += 2 + } + + p += 2 // item_protection_index + + var itemType = "" + if infeVersion >= 2 { + guard p + 4 <= infe.end else { + throw HEICMakerNoteError.invalidHEIC("infe item_type out of bounds") + } + itemType = data.readFourCC(at: p) + p += 4 + } + + if itemType == "Exif" { + foundExif = itemID + break + } + + cursor = infe.end + scanned += 1 + } + + guard let exifID = foundExif else { + throw HEICMakerNoteError.exifItemNotFound + } + return exifID + } + + // MARK: - iloc Parsing + + private struct ItemLocation { + let offset: UInt64 + let length: UInt64 + } + + private struct ILOCInfo { + let version: UInt8 + let offsetSize: Int + let lengthSize: Int + let baseOffsetSize: Int + let indexSize: Int + let itemEntries: [UInt32: ILOCItemEntry] + } + + private struct ILOCItemEntry { + let itemID: UInt32 + let extentOffsetPosition: Int // 文件中 extent_offset 字段的位置 + let extentLengthPosition: Int // 文件中 extent_length 字段的位置 + } + + private static func parseILOCWithInfo(data: Data, ilocRange: Range) throws -> ([UInt32: ItemLocation], ILOCInfo) { + let iloc = BoxView(data: data, range: ilocRange).header + var cursor = iloc.contentStart + + guard cursor + 4 <= iloc.end else { + throw HEICMakerNoteError.invalidHEIC("iloc too short") + } + let version = data.readUInt8(at: cursor) + cursor += 4 + + guard cursor + 2 <= iloc.end else { + throw HEICMakerNoteError.invalidHEIC("iloc header out of bounds") + } + let offsetSize = Int(data.readUInt8(at: cursor) >> 4) + let lengthSize = Int(data.readUInt8(at: cursor) & 0x0F) + cursor += 1 + + let baseOffsetSize = Int(data.readUInt8(at: cursor) >> 4) + let indexSize = Int(data.readUInt8(at: cursor) & 0x0F) + cursor += 1 + + let itemCount: UInt32 + if version < 2 { + guard cursor + 2 <= iloc.end else { + throw HEICMakerNoteError.invalidHEIC("iloc item_count out of bounds") + } + itemCount = UInt32(data.readUInt16BE(at: cursor)) + cursor += 2 + } else { + guard cursor + 4 <= iloc.end else { + throw HEICMakerNoteError.invalidHEIC("iloc item_count out of bounds") + } + itemCount = data.readUInt32BE(at: cursor) + cursor += 4 + } + + var locations: [UInt32: ItemLocation] = [:] + var itemEntries: [UInt32: ILOCItemEntry] = [:] + + for _ in 0.. 0 { + guard cursor + indexSize <= iloc.end else { break } + cursor += indexSize + } + + guard cursor + offsetSize + lengthSize <= iloc.end else { break } + + if e == 0 { + extentOffsetPos = cursor + } + let extentOffset = try data.readUIntBE(at: cursor, size: offsetSize) + cursor += offsetSize + + if e == 0 { + extentLengthPos = cursor + } + let extentLength = try data.readUIntBE(at: cursor, size: lengthSize) + cursor += lengthSize + + if e == 0 { + firstExtentOffset = extentOffset + firstExtentLength = extentLength + } + } + + let fileOffset = baseOffset + firstExtentOffset + if firstExtentLength > 0 { + locations[itemID] = ItemLocation(offset: fileOffset, length: firstExtentLength) + itemEntries[itemID] = ILOCItemEntry( + itemID: itemID, + extentOffsetPosition: extentOffsetPos, + extentLengthPosition: extentLengthPos + ) + } + } + + let info = ILOCInfo( + version: version, + offsetSize: offsetSize, + lengthSize: lengthSize, + baseOffsetSize: baseOffsetSize, + indexSize: indexSize, + itemEntries: itemEntries + ) + + return (locations, info) + } + + private static func updateILOC( + in fileData: inout Data, + ilocRange: Range, + ilocInfo: ILOCInfo, + itemID: UInt32, + newOffset: UInt64, + newLength: UInt64 + ) throws { + guard let entry = ilocInfo.itemEntries[itemID] else { + throw HEICMakerNoteError.exifLocationNotFound(itemID: itemID) + } + + // 写入新的 offset + fileData.writeUIntBE(at: entry.extentOffsetPosition, value: newOffset, size: ilocInfo.offsetSize) + + // 写入新的 length + fileData.writeUIntBE(at: entry.extentLengthPosition, value: newLength, size: ilocInfo.lengthSize) + } + + // MARK: - EXIF/TIFF Patching + + enum Endian { + case little + case big + } +} + +// MARK: - Data Extensions + +private extension Data { + func readUInt8(at offset: Int) -> UInt8 { + self[self.index(self.startIndex, offsetBy: offset)] + } + + func readUInt16BE(at offset: Int) -> UInt16 { + let b0 = UInt16(readUInt8(at: offset)) + let b1 = UInt16(readUInt8(at: offset + 1)) + return (b0 << 8) | b1 + } + + func readUInt32BE(at offset: Int) -> UInt32 { + let b0 = UInt32(readUInt8(at: offset)) + let b1 = UInt32(readUInt8(at: offset + 1)) + let b2 = UInt32(readUInt8(at: offset + 2)) + let b3 = UInt32(readUInt8(at: offset + 3)) + return (b0 << 24) | (b1 << 16) | (b2 << 8) | b3 + } + + func readUInt64BE(at offset: Int) -> UInt64 { + var v: UInt64 = 0 + for i in 0..<8 { + v = (v << 8) | UInt64(readUInt8(at: offset + i)) + } + return v + } + + func readFourCC(at offset: Int) -> String { + let bytes = self.subdata(in: offset..<(offset + 4)) + return String(bytes: bytes, encoding: .ascii) ?? "????" + } + + func readUIntBE(at offset: Int, size: Int) throws -> UInt64 { + if size == 0 { return 0 } + guard offset + size <= count else { + throw HEICMakerNoteError.invalidHEIC("Variable-length integer out of bounds") + } + var v: UInt64 = 0 + for i in 0..> 24) & 0xFF)) + append(UInt8((value >> 16) & 0xFF)) + append(UInt8((value >> 8) & 0xFF)) + append(UInt8(value & 0xFF)) + } + + mutating func writeUIntBE(at offset: Int, value: UInt64, size: Int) { + for i in 0..> shift) & 0xFF) + } + } +} diff --git a/Sources/LivePhotoCore/LivePhotoCore.swift b/Sources/LivePhotoCore/LivePhotoCore.swift new file mode 100644 index 0000000..e5183e5 --- /dev/null +++ b/Sources/LivePhotoCore/LivePhotoCore.swift @@ -0,0 +1,1104 @@ +import AVFoundation +import Foundation +import ImageIO +import os +import Photos +import UIKit +import UniformTypeIdentifiers + +public enum LivePhotoBuildStage: String, Codable, Sendable { + case normalize + case extractKeyFrame + case writePhotoMetadata + case writeVideoMetadata + case saveToAlbum + case validate +} + +public struct LivePhotoBuildProgress: Sendable { + public var stage: LivePhotoBuildStage + public var fraction: Double + + public init(stage: LivePhotoBuildStage, fraction: Double) { + self.stage = stage + self.fraction = fraction + } +} + +public enum WorkStatus: String, Codable, Sendable { + case idle + case editing + case processing + case success + case failed +} + +public struct SourceRef: Codable, Sendable, Hashable { + public var phAssetLocalIdentifier: String? + public var fileURL: URL? + + public init(phAssetLocalIdentifier: String) { + self.phAssetLocalIdentifier = phAssetLocalIdentifier + self.fileURL = nil + } + + public init(fileURL: URL) { + self.phAssetLocalIdentifier = nil + self.fileURL = fileURL + } +} + +public enum AudioPolicy: String, Codable, Sendable { + case keep + case remove +} + +public enum CodecPolicy: String, Codable, Sendable { + case passthrough + case fallbackH264 +} + +public enum HDRPolicy: String, Codable, Sendable { + case keep + case toneMapToSDR +} + +public struct ExportParams: Codable, Sendable, Hashable { + public var trimStart: Double + public var trimEnd: Double + public var keyFrameTime: Double + public var audioPolicy: AudioPolicy + public var codecPolicy: CodecPolicy + public var hdrPolicy: HDRPolicy + public var maxDimension: Int + + public init( + trimStart: Double = 0, + trimEnd: Double = 1.0, + keyFrameTime: Double = 0.5, + audioPolicy: AudioPolicy = .keep, + codecPolicy: CodecPolicy = .fallbackH264, + hdrPolicy: HDRPolicy = .toneMapToSDR, + maxDimension: Int = 1920 + ) { + self.trimStart = trimStart + self.trimEnd = trimEnd + self.keyFrameTime = keyFrameTime + self.audioPolicy = audioPolicy + self.codecPolicy = codecPolicy + self.hdrPolicy = hdrPolicy + self.maxDimension = maxDimension + } +} + +public struct AppError: Error, Codable, Sendable, Hashable { + public var code: String + public var stage: LivePhotoBuildStage? + public var message: String + public var underlyingErrorDescription: String? + public var suggestedActions: [String] + + public init( + code: String, + stage: LivePhotoBuildStage? = nil, + message: String, + underlyingErrorDescription: String? = nil, + suggestedActions: [String] = [] + ) { + self.code = code + self.stage = stage + self.message = message + self.underlyingErrorDescription = underlyingErrorDescription + self.suggestedActions = suggestedActions + } +} + +public struct WorkItem: Identifiable, Codable, Sendable, Hashable { + public var id: UUID + public var createdAt: Date + public var sourceVideo: SourceRef + public var coverImage: SourceRef? + public var exportParams: ExportParams + public var status: WorkStatus + public var resultAssetId: String? + public var cacheDir: URL + public var error: AppError? + + public init( + id: UUID = UUID(), + createdAt: Date = Date(), + sourceVideo: SourceRef, + coverImage: SourceRef? = nil, + exportParams: ExportParams = ExportParams(), + status: WorkStatus = .idle, + resultAssetId: String? = nil, + cacheDir: URL, + error: AppError? = nil + ) { + self.id = id + self.createdAt = createdAt + self.sourceVideo = sourceVideo + self.coverImage = coverImage + self.exportParams = exportParams + self.status = status + self.resultAssetId = resultAssetId + self.cacheDir = cacheDir + self.error = error + } +} + +public struct LivePhotoWorkPaths: Sendable, Hashable { + public var workDir: URL + public var photoURL: URL + public var pairedVideoURL: URL + public var logURL: URL + + public init(workDir: URL, photoURL: URL, pairedVideoURL: URL, logURL: URL) { + self.workDir = workDir + self.photoURL = photoURL + self.pairedVideoURL = pairedVideoURL + self.logURL = logURL + } +} + +public struct CacheManager: Sendable { + public var baseDirectory: URL + + public init(baseDirectory: URL? = nil) throws { + if let baseDirectory { + self.baseDirectory = baseDirectory + } else { + let caches = try FileManager.default.url(for: .cachesDirectory, in: .userDomainMask, appropriateFor: nil, create: true) + self.baseDirectory = caches.appendingPathComponent("LivePhotoBuilder", isDirectory: true) + } + try FileManager.default.createDirectory(at: self.baseDirectory, withIntermediateDirectories: true) + } + + public func makeWorkPaths(workId: UUID) throws -> LivePhotoWorkPaths { + let workDir = baseDirectory.appendingPathComponent(workId.uuidString, isDirectory: true) + try FileManager.default.createDirectory(at: workDir, withIntermediateDirectories: true) + return LivePhotoWorkPaths( + workDir: workDir, + photoURL: workDir.appendingPathComponent("photo").appendingPathExtension("heic"), + pairedVideoURL: workDir.appendingPathComponent("paired").appendingPathExtension("mov"), + logURL: workDir.appendingPathComponent("builder").appendingPathExtension("log") + ) + } + + public func clearWork(workId: UUID) throws { + let workDir = baseDirectory.appendingPathComponent(workId.uuidString, isDirectory: true) + if FileManager.default.fileExists(atPath: workDir.path) { + try FileManager.default.removeItem(at: workDir) + } + } +} + +public struct LivePhotoLogger: Sendable { + private var logger: os.Logger + + public init(subsystem: String = "ToLivePhoto", category: String = "LivePhotoCore") { + self.logger = os.Logger(subsystem: subsystem, category: category) + } + + public func info(_ message: String) { + logger.info("\(message, privacy: .public)") + } + + public func error(_ message: String) { + logger.error("\(message, privacy: .public)") + } +} + +public actor AlbumWriter { + public init() {} + + public func requestAddOnlyAuthorization() async -> PHAuthorizationStatus { + await withCheckedContinuation { continuation in + PHPhotoLibrary.requestAuthorization(for: .addOnly) { status in + continuation.resume(returning: status) + } + } + } + + public func saveLivePhoto(photoURL: URL, pairedVideoURL: URL, shouldMoveFiles: Bool = false) async throws -> String { + try await withCheckedThrowingContinuation { continuation in + var localIdentifier: String? + + PHPhotoLibrary.shared().performChanges({ + let request = PHAssetCreationRequest.forAsset() + + let photoOptions = PHAssetResourceCreationOptions() + photoOptions.shouldMoveFile = shouldMoveFiles + photoOptions.uniformTypeIdentifier = UTType.heic.identifier + + let videoOptions = PHAssetResourceCreationOptions() + videoOptions.shouldMoveFile = shouldMoveFiles + videoOptions.uniformTypeIdentifier = UTType.quickTimeMovie.identifier + + request.addResource(with: .photo, fileURL: photoURL, options: photoOptions) + request.addResource(with: .pairedVideo, fileURL: pairedVideoURL, options: videoOptions) + + localIdentifier = request.placeholderForCreatedAsset?.localIdentifier + }, completionHandler: { success, error in + if let error { + continuation.resume(throwing: AppError(code: "LPB-401", stage: .saveToAlbum, message: "保存到相册失败", underlyingErrorDescription: error.localizedDescription, suggestedActions: ["允许添加到相册权限", "稍后重试"])) + return + } + + guard success, let id = localIdentifier else { + continuation.resume(throwing: AppError(code: "LPB-401", stage: .saveToAlbum, message: "保存到相册失败", underlyingErrorDescription: nil, suggestedActions: ["允许添加到相册权限", "稍后重试"])) + return + } + + continuation.resume(returning: id) + }) + } + } +} + +public actor LivePhotoValidator { + public init() {} + + public func isLivePhotoAsset(localIdentifier: String) async -> Bool { + let result = PHAsset.fetchAssets(withLocalIdentifiers: [localIdentifier], options: nil) + guard let asset = result.firstObject else { + return false + } + return asset.mediaSubtypes.contains(.photoLive) + } + + public func requestLivePhoto(localIdentifier: String) async -> PHLivePhoto? { + let result = PHAsset.fetchAssets(withLocalIdentifiers: [localIdentifier], options: nil) + guard let asset = result.firstObject else { + return nil + } + + return await withCheckedContinuation { continuation in + PHImageManager.default().requestLivePhoto( + for: asset, + targetSize: CGSize(width: 1, height: 1), + contentMode: .aspectFit, + options: nil + ) { livePhoto, _ in + continuation.resume(returning: livePhoto) + } + } + } + + public func requestLivePhoto(photoURL: URL, pairedVideoURL: URL) async -> PHLivePhoto? { + await withCheckedContinuation { continuation in + var hasResumed = false + let requestID = PHLivePhoto.request( + withResourceFileURLs: [pairedVideoURL, photoURL], + placeholderImage: nil, + targetSize: .zero, + contentMode: .aspectFit + ) { livePhoto, info in + // 确保只 resume 一次 + guard !hasResumed else { return } + + // 如果是降级版本,等待完整版本 + if let isDegraded = info[PHLivePhotoInfoIsDegradedKey] as? Bool, isDegraded { + return + } + + // 检查是否有错误或被取消 + if let error = info[PHLivePhotoInfoErrorKey] as? Error { + print("[LivePhotoValidator] requestLivePhoto error: \(error.localizedDescription)") + hasResumed = true + continuation.resume(returning: nil) + return + } + + if let cancelled = info[PHLivePhotoInfoCancelledKey] as? Bool, cancelled { + print("[LivePhotoValidator] requestLivePhoto cancelled") + hasResumed = true + continuation.resume(returning: nil) + return + } + + hasResumed = true + continuation.resume(returning: livePhoto) + } + + // 添加超时保护,防止无限等待 + DispatchQueue.main.asyncAfter(deadline: .now() + 10) { + guard !hasResumed else { return } + print("[LivePhotoValidator] requestLivePhoto timeout, requestID: \(requestID)") + PHLivePhoto.cancelRequest(withRequestID: requestID) + hasResumed = true + continuation.resume(returning: nil) + } + } + } + + public func canCreateLivePhotoFromResources(photoURL: URL, pairedVideoURL: URL) async -> Bool { + await requestLivePhoto(photoURL: photoURL, pairedVideoURL: pairedVideoURL) != nil + } +} + +public struct LivePhotoBuildOutput: Sendable, Hashable { + public var workId: UUID + public var assetIdentifier: String + public var pairedImageURL: URL + public var pairedVideoURL: URL + + public init(workId: UUID, assetIdentifier: String, pairedImageURL: URL, pairedVideoURL: URL) { + self.workId = workId + self.assetIdentifier = assetIdentifier + self.pairedImageURL = pairedImageURL + self.pairedVideoURL = pairedVideoURL + } + + /// 将生成的文件导出到文档目录(方便调试) + public func exportToDocuments() throws -> (photoURL: URL, videoURL: URL) { + let docs = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first! + let photoDestURL = docs.appendingPathComponent("debug_photo.heic") + let videoDestURL = docs.appendingPathComponent("debug_video.mov") + + // 删除旧文件 + try? FileManager.default.removeItem(at: photoDestURL) + try? FileManager.default.removeItem(at: videoDestURL) + + // 复制新文件 + try FileManager.default.copyItem(at: pairedImageURL, to: photoDestURL) + try FileManager.default.copyItem(at: pairedVideoURL, to: videoDestURL) + + return (photoDestURL, videoDestURL) + } +} + +public actor LivePhotoBuilder { + private let cacheManager: CacheManager + private let logger: LivePhotoLogger + + public init(cacheManager: CacheManager? = nil, logger: LivePhotoLogger = LivePhotoLogger()) throws { + self.cacheManager = try cacheManager ?? CacheManager() + self.logger = logger + } + + public func buildResources( + workId: UUID = UUID(), + sourceVideoURL: URL, + coverImageURL: URL? = nil, + exportParams: ExportParams = ExportParams(), + progress: (@Sendable (LivePhotoBuildProgress) -> Void)? = nil + ) async throws -> LivePhotoBuildOutput { + let assetIdentifier = UUID().uuidString + let paths = try cacheManager.makeWorkPaths(workId: workId) + + progress?(LivePhotoBuildProgress(stage: .normalize, fraction: 0)) + let trimmedVideoURL = try await trimVideo( + sourceURL: sourceVideoURL, + trimStart: exportParams.trimStart, + trimEnd: exportParams.trimEnd, + destinationURL: paths.workDir.appendingPathComponent("trimmed.mov") + ) + + let trimmedDuration = exportParams.trimEnd - exportParams.trimStart + let relativeKeyFrameTime = min(max(0, exportParams.keyFrameTime - exportParams.trimStart), trimmedDuration) + + // 计算 LivePhotoVideoIndex(需要视频的帧率信息) + let nominalFrameRateForIndex: Float = { + let asset = AVURLAsset(url: trimmedVideoURL) + let rate = asset.tracks(withMediaType: .video).first?.nominalFrameRate ?? 30 + return (rate.isFinite && rate > 0) ? rate : 30 + }() + let livePhotoVideoIndex = Self.makeLivePhotoVideoIndex( + stillImageTimeSeconds: relativeKeyFrameTime, + nominalFrameRate: nominalFrameRateForIndex + ) + + progress?(LivePhotoBuildProgress(stage: .extractKeyFrame, fraction: 0)) + let keyPhotoURL = try await resolveKeyPhotoURL( + videoURL: trimmedVideoURL, + coverImageURL: coverImageURL, + keyFrameTime: relativeKeyFrameTime, + destinationURL: paths.workDir.appendingPathComponent("keyPhoto").appendingPathExtension("heic") + ) + + progress?(LivePhotoBuildProgress(stage: .writePhotoMetadata, fraction: 0)) + guard let pairedImageURL = addAssetID( + assetIdentifier, + toImage: keyPhotoURL, + saveTo: paths.photoURL, + livePhotoVideoIndex: livePhotoVideoIndex + ) else { + throw AppError(code: "LPB-201", stage: .writePhotoMetadata, message: "封面生成失败", underlyingErrorDescription: nil, suggestedActions: ["缩短时长", "降低分辨率", "重试"]) + } + + progress?(LivePhotoBuildProgress(stage: .writeVideoMetadata, fraction: 0)) + let pairedVideoURL = try await addAssetID(assetIdentifier, toVideo: trimmedVideoURL, saveTo: paths.pairedVideoURL, stillImageTimeSeconds: relativeKeyFrameTime, progress: { p in + progress?(LivePhotoBuildProgress(stage: .writeVideoMetadata, fraction: p)) + }) + + logger.info("Generated Live Photo files:") + logger.info(" Photo: \(pairedImageURL.path)") + logger.info(" Video: \(pairedVideoURL.path)") + logger.info(" AssetIdentifier: \(assetIdentifier)") + + return LivePhotoBuildOutput(workId: workId, assetIdentifier: assetIdentifier, pairedImageURL: pairedImageURL, pairedVideoURL: pairedVideoURL) + } + + private func trimVideo(sourceURL: URL, trimStart: Double, trimEnd: Double, destinationURL: URL) async throws -> URL { + let asset = AVURLAsset(url: sourceURL) + + let duration = try await asset.load(.duration).seconds + let safeTrimStart = max(0, min(trimStart, duration)) + let safeTrimEnd = max(safeTrimStart, min(trimEnd, duration)) + + if safeTrimEnd - safeTrimStart < 0.1 { + throw AppError(code: "LPB-101", stage: .normalize, message: "视频时长不足", suggestedActions: ["选择更长的视频"]) + } + + let startTime = CMTime(seconds: safeTrimStart, preferredTimescale: 600) + let endTime = CMTime(seconds: safeTrimEnd, preferredTimescale: 600) + let timeRange = CMTimeRange(start: startTime, end: endTime) + + if FileManager.default.fileExists(atPath: destinationURL.path) { + try FileManager.default.removeItem(at: destinationURL) + } + + let composition = AVMutableComposition() + + guard let videoTrack = try await asset.loadTracks(withMediaType: .video).first else { + throw AppError(code: "LPB-101", stage: .normalize, message: "视频轨道不存在", suggestedActions: ["选择其他视频"]) + } + + let compositionVideoTrack = composition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid) + try compositionVideoTrack?.insertTimeRange(timeRange, of: videoTrack, at: .zero) + + if let audioTrack = try? await asset.loadTracks(withMediaType: .audio).first { + let compositionAudioTrack = composition.addMutableTrack(withMediaType: .audio, preferredTrackID: kCMPersistentTrackID_Invalid) + try? compositionAudioTrack?.insertTimeRange(timeRange, of: audioTrack, at: .zero) + } + + let transform = try await videoTrack.load(.preferredTransform) + + // 保持原始视频的 transform,确保方向正确 + compositionVideoTrack?.preferredTransform = transform + + // 使用 Passthrough 预设保持原始质量和尺寸 + guard let exportSession = AVAssetExportSession(asset: composition, presetName: AVAssetExportPresetPassthrough) else { + throw AppError(code: "LPB-101", stage: .normalize, message: "无法创建导出会话", suggestedActions: ["重试"]) + } + + exportSession.outputURL = destinationURL + exportSession.outputFileType = .mov + + await exportSession.export() + + guard exportSession.status == .completed else { + throw AppError(code: "LPB-101", stage: .normalize, message: "视频裁剪失败", underlyingErrorDescription: exportSession.error?.localizedDescription, suggestedActions: ["缩短时长", "重试"]) + } + + return destinationURL + } + + private func resolveKeyPhotoURL( + videoURL: URL, + coverImageURL: URL?, + keyFrameTime: Double, + destinationURL: URL + ) async throws -> URL { + // 最大分辨率限制(对标竞品 1080p) + let maxDimension = 1920 + + // 内部函数:将 CGImage 写入 HEIC 文件 + func writeHEIC(_ image: CGImage, to url: URL) throws { + guard let dest = CGImageDestinationCreateWithURL(url as CFURL, UTType.heic.identifier as CFString, 1, nil) else { + throw AppError(code: "LPB-201", stage: .extractKeyFrame, message: "写入封面失败", underlyingErrorDescription: "无法创建 HEIC 写入器", suggestedActions: ["重试"]) + } + let props: [String: Any] = [ + kCGImageDestinationLossyCompressionQuality as String: 0.9 + ] + CGImageDestinationAddImage(dest, image, props as CFDictionary) + guard CGImageDestinationFinalize(dest) else { + throw AppError(code: "LPB-201", stage: .extractKeyFrame, message: "写入封面失败", underlyingErrorDescription: "HEIC 写入失败", suggestedActions: ["重试"]) + } + } + + // 内部函数:缩放图像 + func scaleImage(_ image: CGImage, maxDim: Int) -> CGImage { + let width = image.width + let height = image.height + let maxSide = max(width, height) + if maxSide <= maxDim { return image } + + let scale = CGFloat(maxDim) / CGFloat(maxSide) + let newWidth = Int(CGFloat(width) * scale) + let newHeight = Int(CGFloat(height) * scale) + + guard let context = CGContext( + data: nil, width: newWidth, height: newHeight, + bitsPerComponent: 8, bytesPerRow: 0, + space: CGColorSpaceCreateDeviceRGB(), + bitmapInfo: CGImageAlphaInfo.premultipliedLast.rawValue + ) else { return image } + + context.interpolationQuality = .high + context.draw(image, in: CGRect(x: 0, y: 0, width: newWidth, height: newHeight)) + return context.makeImage() ?? image + } + + // 如果用户提供了封面图 + if let coverImageURL { + guard let src = CGImageSourceCreateWithURL(coverImageURL as CFURL, nil), + let img = CGImageSourceCreateImageAtIndex(src, 0, nil) else { + throw AppError(code: "LPB-201", stage: .extractKeyFrame, message: "封面读取失败", underlyingErrorDescription: nil, suggestedActions: ["更换封面图", "重试"]) + } + let scaledImg = scaleImage(img, maxDim: maxDimension) + try writeHEIC(scaledImg, to: destinationURL) + return destinationURL + } + + // 从视频抽帧 + let asset = AVURLAsset(url: videoURL) + let imageGenerator = AVAssetImageGenerator(asset: asset) + imageGenerator.appliesPreferredTrackTransform = true + imageGenerator.requestedTimeToleranceAfter = CMTime(value: 1, timescale: 100) + imageGenerator.requestedTimeToleranceBefore = CMTime(value: 1, timescale: 100) + // 设置最大尺寸,让 AVAssetImageGenerator 自动缩放 + imageGenerator.maximumSize = CGSize(width: maxDimension, height: maxDimension) + + let safeSeconds = max(0, min(keyFrameTime, max(0, asset.duration.seconds - 0.1))) + let time = CMTime(seconds: safeSeconds, preferredTimescale: asset.duration.timescale) + + let cgImage: CGImage + do { + cgImage = try imageGenerator.copyCGImage(at: time, actualTime: nil) + } catch { + throw AppError(code: "LPB-201", stage: .extractKeyFrame, message: "抽帧失败", underlyingErrorDescription: error.localizedDescription, suggestedActions: ["缩短时长", "降低分辨率", "重试"]) + } + + try writeHEIC(cgImage, to: destinationURL) + return destinationURL + } + + /// 计算 LivePhotoVideoIndex:逆向工程推测为 Float32 帧索引的 bitPattern + private static func makeLivePhotoVideoIndex(stillImageTimeSeconds: Double, nominalFrameRate: Float) -> Int64 { + let safeFrameRate: Float = (nominalFrameRate.isFinite && nominalFrameRate > 0) ? nominalFrameRate : 30 + let frameIndex = Float(stillImageTimeSeconds) * safeFrameRate + return Int64(frameIndex.bitPattern) + } + + private func addAssetID( + _ assetIdentifier: String, + toImage imageURL: URL, + saveTo destinationURL: URL, + livePhotoVideoIndex: Int64 + ) -> URL? { + let useHEIC = true + let imageType = useHEIC ? UTType.heic.identifier : UTType.jpeg.identifier + + guard let imageDestination = CGImageDestinationCreateWithURL(destinationURL as CFURL, imageType as CFString, 1, nil), + let imageSource = CGImageSourceCreateWithURL(imageURL as CFURL, nil), + let imageRef = CGImageSourceCreateImageAtIndex(imageSource, 0, nil) else { + return nil + } + + var imageProperties = (CGImageSourceCopyPropertiesAtIndex(imageSource, 0, nil) as? [String: Any]) ?? [:] + + // 获取图像尺寸 + let width = imageRef.width + let height = imageRef.height + + // 添加 TIFF (IFD0) 标准字段 - 对标竞品 + var tiffDict = (imageProperties[kCGImagePropertyTIFFDictionary as String] as? [String: Any]) ?? [:] + tiffDict[kCGImagePropertyTIFFOrientation as String] = 1 // Horizontal (normal) + tiffDict[kCGImagePropertyTIFFXResolution as String] = 72 + tiffDict[kCGImagePropertyTIFFYResolution as String] = 72 + tiffDict[kCGImagePropertyTIFFResolutionUnit as String] = 2 // inches + // 移除 Tile 字段 - 竞品没有这些字段 + tiffDict.removeValue(forKey: kCGImagePropertyTIFFTileWidth as String) + tiffDict.removeValue(forKey: kCGImagePropertyTIFFTileLength as String) + imageProperties[kCGImagePropertyTIFFDictionary as String] = tiffDict + + // 添加 EXIF 标准字段 - 对标竞品 + var exifDict = (imageProperties[kCGImagePropertyExifDictionary as String] as? [String: Any]) ?? [:] + exifDict[kCGImagePropertyExifVersion as String] = [2, 2, 1] // 0221 + exifDict[kCGImagePropertyExifPixelXDimension as String] = width + exifDict[kCGImagePropertyExifPixelYDimension as String] = height + imageProperties[kCGImagePropertyExifDictionary as String] = exifDict + + // 简化方案:只设置 ContentIdentifier,不注入复杂的 MakerNotes + // 竞品也只使用 ContentIdentifier,这足以让 Photos 识别 Live Photo + let assetIdentifierKey = "17" // Content Identifier + + var makerAppleDict: [String: Any] = [:] + makerAppleDict[assetIdentifierKey] = assetIdentifier + imageProperties[kCGImagePropertyMakerAppleDictionary as String] = makerAppleDict + + CGImageDestinationAddImage(imageDestination, imageRef, imageProperties as CFDictionary) + + guard CGImageDestinationFinalize(imageDestination) else { + return nil + } + + logger.info("Created HEIC with ContentIdentifier: \(assetIdentifier)") + return destinationURL + } + + private func addAssetID( + _ assetIdentifier: String, + toVideo videoURL: URL, + saveTo destinationURL: URL, + stillImageTimeSeconds: Double, + progress: @Sendable @escaping (Double) -> Void + ) async throws -> URL { + try await withCheckedThrowingContinuation { continuation in + let queue = DispatchQueue(label: "LivePhotoCore.VideoPairing") + queue.async { + do { + if FileManager.default.fileExists(atPath: destinationURL.path) { + try FileManager.default.removeItem(at: destinationURL) + } + + let videoAsset = AVURLAsset(url: videoURL) + guard let videoTrack = videoAsset.tracks(withMediaType: .video).first else { + continuation.resume(throwing: AppError(code: "LPB-301", stage: .writeVideoMetadata, message: "视频处理失败", underlyingErrorDescription: "缺少视频轨", suggestedActions: ["更换一个视频", "重试"])) + return + } + + let durationSeconds = max(0.001, videoAsset.duration.seconds) + let nominalFrameRate = videoTrack.nominalFrameRate > 0 ? videoTrack.nominalFrameRate : 30 + let frameCount = max(1, Int(durationSeconds * Double(nominalFrameRate))) + + // 关键修复:竞品视频没有 rotation,是烘焙到正向画面的 + // 计算应用 transform 后的实际尺寸 + let transform = videoTrack.preferredTransform + let naturalSize = videoTrack.naturalSize + + // 判断是否有 90度/270度 旋转(需要交换宽高) + let isRotated90or270 = abs(transform.b) == 1.0 && abs(transform.c) == 1.0 + let transformedSize: CGSize + if isRotated90or270 { + transformedSize = CGSize(width: naturalSize.height, height: naturalSize.width) + } else { + transformedSize = naturalSize + } + + // 计算输出尺寸,限制最大边为 1920(对标竞品 1080p) + let maxDimension: CGFloat = 1920 + let maxSide = max(transformedSize.width, transformedSize.height) + let scale: CGFloat = maxSide > maxDimension ? maxDimension / maxSide : 1.0 + let outputWidth = Int(transformedSize.width * scale) + let outputHeight = Int(transformedSize.height * scale) + + let assetWriter = try AVAssetWriter(outputURL: destinationURL, fileType: .mov) + let videoReader = try AVAssetReader(asset: videoAsset) + + let videoReaderSettings: [String: Any] = [ + kCVPixelBufferPixelFormatTypeKey as String: NSNumber(value: kCVPixelFormatType_32BGRA as UInt32) + ] + let videoReaderOutput = AVAssetReaderTrackOutput(track: videoTrack, outputSettings: videoReaderSettings) + videoReader.add(videoReaderOutput) + + // 使用 HEVC (H.265) 编码 - iPhone 原生 Live Photo 使用的格式 + let videoWriterInput = AVAssetWriterInput( + mediaType: .video, + outputSettings: [ + AVVideoCodecKey: AVVideoCodecType.hevc, + AVVideoWidthKey: Int(naturalSize.width * scale), + AVVideoHeightKey: Int(naturalSize.height * scale), + AVVideoCompressionPropertiesKey: [ + AVVideoAverageBitRateKey: 8_000_000, + AVVideoQualityKey: 0.8 + ] + ] + ) + // 保留原始 transform + videoWriterInput.transform = transform + videoWriterInput.expectsMediaDataInRealTime = false + assetWriter.add(videoWriterInput) + + var audioReader: AVAssetReader? + var audioReaderOutput: AVAssetReaderOutput? + var audioWriterInput: AVAssetWriterInput? + + if let audioTrack = videoAsset.tracks(withMediaType: .audio).first { + let _audioReader = try AVAssetReader(asset: videoAsset) + let _audioReaderOutput = AVAssetReaderTrackOutput(track: audioTrack, outputSettings: nil) + _audioReader.add(_audioReaderOutput) + audioReader = _audioReader + audioReaderOutput = _audioReaderOutput + + let _audioWriterInput = AVAssetWriterInput(mediaType: .audio, outputSettings: nil) + _audioWriterInput.expectsMediaDataInRealTime = false + assetWriter.add(_audioWriterInput) + audioWriterInput = _audioWriterInput + } + + let assetIdentifierMetadata = Self.metadataForAssetID(assetIdentifier) + let stillImageTimeMetadataAdapter = Self.createMetadataAdaptorForStillImageTime() + + // 只写入必要的 Content Identifier + assetWriter.metadata = [assetIdentifierMetadata] + + // 只添加 still-image-time track(回退到稳定版本,移除 live-photo-info) + assetWriter.add(stillImageTimeMetadataAdapter.assetWriterInput) + + assetWriter.startWriting() + assetWriter.startSession(atSourceTime: .zero) + + // still-image-time track: 只写入一个 item(回退到稳定版本) + let stillTimeRange = videoAsset.makeStillImageTimeRange(seconds: stillImageTimeSeconds, frameCountHint: frameCount) + stillImageTimeMetadataAdapter.append(AVTimedMetadataGroup( + items: [Self.metadataItemForStillImageTime()], + timeRange: stillTimeRange + )) + + var writingVideoFinished = false + var writingAudioFinished = audioReader == nil + var currentFrameCount = 0 + + func didCompleteWriting() { + guard writingAudioFinished && writingVideoFinished else { return } + assetWriter.finishWriting { + if assetWriter.status == .completed { + continuation.resume(returning: destinationURL) + } else { + continuation.resume(throwing: AppError(code: "LPB-301", stage: .writeVideoMetadata, message: "视频处理失败", underlyingErrorDescription: assetWriter.error?.localizedDescription, suggestedActions: ["切换到 H.264 兼容导出", "关闭音频", "重试"])) + } + } + } + + if videoReader.startReading() { + videoWriterInput.requestMediaDataWhenReady(on: DispatchQueue(label: "LivePhotoCore.VideoWriterInput")) { + while videoWriterInput.isReadyForMoreMediaData { + guard videoReader.status == .reading else { + videoWriterInput.markAsFinished() + writingVideoFinished = true + didCompleteWriting() + break + } + if let sampleBuffer = videoReaderOutput.copyNextSampleBuffer() { + currentFrameCount += 1 + let pct = Double(currentFrameCount) / Double(frameCount) + progress(pct) + + // 写入视频帧 + if !videoWriterInput.append(sampleBuffer) { + videoReader.cancelReading() + } + } else { + videoWriterInput.markAsFinished() + writingVideoFinished = true + didCompleteWriting() + break + } + } + } + } else { + writingVideoFinished = true + didCompleteWriting() + } + + if let audioReader, let audioWriterInput, audioReader.startReading() { + audioWriterInput.requestMediaDataWhenReady(on: DispatchQueue(label: "LivePhotoCore.AudioWriterInput")) { + while audioWriterInput.isReadyForMoreMediaData { + guard audioReader.status == .reading else { + audioWriterInput.markAsFinished() + writingAudioFinished = true + didCompleteWriting() + return + } + guard let sampleBuffer = audioReaderOutput?.copyNextSampleBuffer() else { + audioWriterInput.markAsFinished() + writingAudioFinished = true + didCompleteWriting() + return + } + _ = audioWriterInput.append(sampleBuffer) + } + } + } else { + writingAudioFinished = true + didCompleteWriting() + } + } catch { + continuation.resume(throwing: AppError(code: "LPB-301", stage: .writeVideoMetadata, message: "视频处理失败", underlyingErrorDescription: error.localizedDescription, suggestedActions: ["切换到 H.264 兼容导出", "关闭音频", "重试"])) + } + } + } + } + + private static func metadataForAssetID(_ assetIdentifier: String) -> AVMetadataItem { + let item = AVMutableMetadataItem() + item.key = "com.apple.quicktime.content.identifier" as (NSCopying & NSObjectProtocol) + item.keySpace = AVMetadataKeySpace(rawValue: "mdta") + item.value = assetIdentifier as (NSCopying & NSObjectProtocol) + item.dataType = "com.apple.metadata.datatype.UTF-8" + return item + } + + private static func createMetadataAdaptorForStillImageTime() -> AVAssetWriterInputMetadataAdaptor { + let keySpace = "mdta" + let keyStill = "com.apple.quicktime.still-image-time" + + // 只声明 still-image-time 一个 key(回退到稳定版本) + let spec: NSDictionary = [ + kCMMetadataFormatDescriptionMetadataSpecificationKey_Identifier as NSString: "\(keySpace)/\(keyStill)", + kCMMetadataFormatDescriptionMetadataSpecificationKey_DataType as NSString: "com.apple.metadata.datatype.int8" + ] + + var desc: CMFormatDescription? + CMMetadataFormatDescriptionCreateWithMetadataSpecifications( + allocator: kCFAllocatorDefault, + metadataType: kCMMetadataFormatType_Boxed, + metadataSpecifications: [spec] as CFArray, + formatDescriptionOut: &desc + ) + + let input = AVAssetWriterInput(mediaType: .metadata, outputSettings: nil, sourceFormatHint: desc) + return AVAssetWriterInputMetadataAdaptor(assetWriterInput: input) + } + + /// 对标竞品 89 字节 still-image-time 数据 + /// 结构:item1 (9B: still-image-time=-1) + item2 (80B: transform 3x3矩阵) + private static func metadataItemForStillImageTimeWithTransform() -> AVMetadataItem { + let item = AVMutableMetadataItem() + item.key = "com.apple.quicktime.still-image-time" as (NSCopying & NSObjectProtocol) + item.keySpace = AVMetadataKeySpace(rawValue: "mdta") + item.dataType = "com.apple.metadata.datatype.raw-data" + item.value = stillImageTime89BytesPayload() as NSData + return item + } + + /// 构建 89 字节 payload(对标竞品格式) + private static func stillImageTime89BytesPayload() -> Data { + var data = Data() + + // Item 1: still-image-time (9 bytes) + // size: 4 bytes (0x00000009) + data.append(contentsOf: [0x00, 0x00, 0x00, 0x09]) + // keyIndex: 4 bytes (0x00000001) + data.append(contentsOf: [0x00, 0x00, 0x00, 0x01]) + // value: 1 byte (0xFF = -1) + data.append(0xFF) + + // Item 2: transform (80 bytes) + // size: 4 bytes (0x00000050 = 80) + data.append(contentsOf: [0x00, 0x00, 0x00, 0x50]) + // keyIndex: 4 bytes (0x00000002) + data.append(contentsOf: [0x00, 0x00, 0x00, 0x02]) + // 3x3 identity matrix as big-endian Float64 (72 bytes) + let matrix: [Double] = [1, 0, 0, 0, 1, 0, 0, 0, 1] + for value in matrix { + var bigEndian = value.bitPattern.bigEndian + withUnsafeBytes(of: &bigEndian) { data.append(contentsOf: $0) } + } + + return data // 89 bytes total + } + + private static func metadataItemForStillImageTime() -> AVMetadataItem { + let item = AVMutableMetadataItem() + item.key = "com.apple.quicktime.still-image-time" as (NSCopying & NSObjectProtocol) + item.keySpace = AVMetadataKeySpace(rawValue: "mdta") + // 竞品使用 0xFF (-1),但之前测试 0 也不行,现在改回 -1 对标竞品 + item.value = NSNumber(value: Int8(-1)) as (NSCopying & NSObjectProtocol) + item.dataType = "com.apple.metadata.datatype.int8" + return item + } + + /// 3x3 单位矩阵变换数据(72 字节,大端序 Float64) + private static func metadataItemForStillImageTransform() -> AVMetadataItem { + let item = AVMutableMetadataItem() + item.key = "com.apple.quicktime.live-photo-still-image-transform" as (NSCopying & NSObjectProtocol) + item.keySpace = AVMetadataKeySpace(rawValue: "mdta") + item.dataType = "com.apple.metadata.datatype.raw-data" + item.value = livePhotoStillImageTransformIdentityData() as NSData + return item + } + + /// 生成 3x3 单位矩阵的大端序 Float64 数据 + private static func livePhotoStillImageTransformIdentityData() -> Data { + // 单位矩阵:[1,0,0, 0,1,0, 0,0,1] + let matrix: [Double] = [1, 0, 0, 0, 1, 0, 0, 0, 1] + var data = Data() + data.reserveCapacity(matrix.count * 8) + for value in matrix { + var bigEndian = value.bitPattern.bigEndian + withUnsafeBytes(of: &bigEndian) { data.append(contentsOf: $0) } + } + return data // 72 字节 + } + + // MARK: - Live Photo Info Track (逐帧 timed metadata,对标竞品) + + /// live-photo-info 数据暂时不写入,先确保基本功能正常 + /// 设为空数据,跳过 live-photo-info track + private static let livePhotoInfoPayload: Data = Data() + + private static func createMetadataAdaptorForLivePhotoInfo() -> AVAssetWriterInputMetadataAdaptor { + let key = "com.apple.quicktime.live-photo-info" + let keySpace = "mdta" + + let spec: NSDictionary = [ + kCMMetadataFormatDescriptionMetadataSpecificationKey_Identifier as NSString: "\(keySpace)/\(key)", + kCMMetadataFormatDescriptionMetadataSpecificationKey_DataType as NSString: "com.apple.metadata.datatype.raw-data" + ] + + var desc: CMFormatDescription? + CMMetadataFormatDescriptionCreateWithMetadataSpecifications( + allocator: kCFAllocatorDefault, + metadataType: kCMMetadataFormatType_Boxed, + metadataSpecifications: [spec] as CFArray, + formatDescriptionOut: &desc + ) + + let input = AVAssetWriterInput(mediaType: .metadata, outputSettings: nil, sourceFormatHint: desc) + return AVAssetWriterInputMetadataAdaptor(assetWriterInput: input) + } + + private static func metadataItemForLivePhotoInfo() -> AVMetadataItem { + let item = AVMutableMetadataItem() + item.key = "com.apple.quicktime.live-photo-info" as (NSCopying & NSObjectProtocol) + item.keySpace = AVMetadataKeySpace(rawValue: "mdta") + item.value = livePhotoInfoPayload as NSData + item.dataType = "com.apple.metadata.datatype.raw-data" + return item + } + + private static func metadataForSampleTime() -> AVMetadataItem { + let item = AVMutableMetadataItem() + item.key = "Sample Time" as (NSCopying & NSObjectProtocol) + item.keySpace = AVMetadataKeySpace(rawValue: "mdta") + item.value = "0 s" as (NSCopying & NSObjectProtocol) + item.dataType = "com.apple.metadata.datatype.UTF-8" + return item + } + + private static func metadataForSampleDuration() -> AVMetadataItem { + let item = AVMutableMetadataItem() + item.key = "Sample Duration" as (NSCopying & NSObjectProtocol) + item.keySpace = AVMetadataKeySpace(rawValue: "mdta") + item.value = "0.03 s" as (NSCopying & NSObjectProtocol) + item.dataType = "com.apple.metadata.datatype.UTF-8" + return item + } +} + +public struct LivePhotoWorkflowResult: Sendable, Hashable { + public var workId: UUID + public var assetIdentifier: String + public var pairedImageURL: URL + public var pairedVideoURL: URL + public var savedAssetId: String + public var resourceValidationOK: Bool + public var libraryAssetIsLivePhoto: Bool? + + public init( + workId: UUID, + assetIdentifier: String, + pairedImageURL: URL, + pairedVideoURL: URL, + savedAssetId: String, + resourceValidationOK: Bool, + libraryAssetIsLivePhoto: Bool? + ) { + self.workId = workId + self.assetIdentifier = assetIdentifier + self.pairedImageURL = pairedImageURL + self.pairedVideoURL = pairedVideoURL + self.savedAssetId = savedAssetId + self.resourceValidationOK = resourceValidationOK + self.libraryAssetIsLivePhoto = libraryAssetIsLivePhoto + } +} + +public actor LivePhotoWorkflow { + private let builder: LivePhotoBuilder + private let albumWriter: AlbumWriter + private let validator: LivePhotoValidator + + public init(cacheManager: CacheManager? = nil, logger: LivePhotoLogger = LivePhotoLogger()) throws { + let cm = try cacheManager ?? CacheManager() + self.builder = try LivePhotoBuilder(cacheManager: cm, logger: logger) + self.albumWriter = AlbumWriter() + self.validator = LivePhotoValidator() + } + + public func buildSaveValidate( + sourceVideoURL: URL, + coverImageURL: URL? = nil, + exportParams: ExportParams = ExportParams(), + progress: (@Sendable (LivePhotoBuildProgress) -> Void)? = nil + ) async throws -> LivePhotoWorkflowResult { + let output = try await builder.buildResources( + sourceVideoURL: sourceVideoURL, + coverImageURL: coverImageURL, + exportParams: exportParams, + progress: progress + ) + + // 调试:导出文件到文档目录 + #if DEBUG + if let (debugPhoto, debugVideo) = try? output.exportToDocuments() { + print("[DEBUG] Exported files to Documents:") + print(" Photo: \(debugPhoto.path)") + print(" Video: \(debugVideo.path)") + } + #endif + + progress?(LivePhotoBuildProgress(stage: .validate, fraction: 0)) + let resourceOK = await validator.canCreateLivePhotoFromResources( + photoURL: output.pairedImageURL, + pairedVideoURL: output.pairedVideoURL + ) + progress?(LivePhotoBuildProgress(stage: .validate, fraction: 0.3)) + + let addOnlyStatus = await albumWriter.requestAddOnlyAuthorization() + guard addOnlyStatus == .authorized else { + throw AppError( + code: "LPB-401", + stage: .saveToAlbum, + message: "无相册写入权限", + underlyingErrorDescription: "authorizationStatus(addOnly)=\(addOnlyStatus)", + suggestedActions: ["在系统设置中允许“添加照片”权限"] + ) + } + + progress?(LivePhotoBuildProgress(stage: .saveToAlbum, fraction: 0)) + let assetId = try await albumWriter.saveLivePhoto( + photoURL: output.pairedImageURL, + pairedVideoURL: output.pairedVideoURL, + shouldMoveFiles: false + ) + progress?(LivePhotoBuildProgress(stage: .saveToAlbum, fraction: 1)) + + var isLiveSubtype: Bool? = nil + let readWriteStatus = PHPhotoLibrary.authorizationStatus(for: .readWrite) + if readWriteStatus == .authorized || readWriteStatus == .limited { + isLiveSubtype = await validator.isLivePhotoAsset(localIdentifier: assetId) + } + progress?(LivePhotoBuildProgress(stage: .validate, fraction: 1)) + + return LivePhotoWorkflowResult( + workId: output.workId, + assetIdentifier: output.assetIdentifier, + pairedImageURL: output.pairedImageURL, + pairedVideoURL: output.pairedVideoURL, + savedAssetId: assetId, + resourceValidationOK: resourceOK, + libraryAssetIsLivePhoto: isLiveSubtype + ) + } +} + +private extension AVAsset { + func makeStillImageTimeRange(seconds: Double, frameCountHint: Int) -> CMTimeRange { + let duration = self.duration + + let clampedSeconds = max(0, min(seconds, max(0, duration.seconds - 0.001))) + var time = CMTime(seconds: clampedSeconds, preferredTimescale: duration.timescale) + if time > duration { + time = duration + } + + // 关键修复:竞品使用 duration_ts=1(最小 tick),而不是一帧时长 + // 壁纸校验比相册更严格,需要 still-image-time 是"瞬时标记"而非"一帧区间" + return CMTimeRange(start: time, duration: CMTime(value: 1, timescale: duration.timescale)) + } +} diff --git a/Sources/LivePhotoCore/MakerNotesPatcher.swift b/Sources/LivePhotoCore/MakerNotesPatcher.swift new file mode 100644 index 0000000..6cc1571 --- /dev/null +++ b/Sources/LivePhotoCore/MakerNotesPatcher.swift @@ -0,0 +1,83 @@ +import Foundation + +/// 用于修复 HEIC 文件中的 Apple MakerNotes,添加 LivePhotoVideoIndex 字段 +/// CGImageDestination 无法正确写入 Int64 类型的 MakerNotes 字段, +/// 所以我们使用预制的模板并在运行时替换关键字段 +public struct MakerNotesPatcher { + + // MARK: - 模板中的偏移量(基于原生 iPhone 照片的 MakerNotes 分析) + + /// ContentIdentifier 在 MakerNotes 模板中的偏移(36 字节 ASCII UUID + null) + private static let contentIdentifierOffset = 0x580 // 1408 + private static let contentIdentifierLength = 36 + + /// LivePhotoVideoIndex 在 MakerNotes 模板中的偏移(8 字节 Big-Endian Int64) + private static let livePhotoVideoIndexOffset = 0x5a6 // 1446 + private static let livePhotoVideoIndexLength = 8 + + /// 原生 iPhone MakerNotes 模板(从 iPhone 13 Pro Max 拍摄的 Live Photo 提取) + /// 包含完整的 Apple MakerNotes 结构,需要替换 ContentIdentifier 和 LivePhotoVideoIndex + private static let makerNotesTemplate: Data = { + // Base64 编码的 MakerNotes 模板 + let base64 = """ + QXBwbGUgaU9TAAABTU0APQABAAkAAAABAAAAEAACAAcAAAIAAAAC8AADAAcAAABoAAAE8AAEAAkA\ + AAABAAAAAQAFAAkAAAABAAAAqQAGAAkAAAABAAAApQAHAAkAAAABAAAAAQAIAAoAAAADAAAFWAAM\ + AAoAAAACAAAFcAANAAkAAAABAAAAFwAOAAkAAAABAAAABAAQAAkAAAABAAAAAQARAAIAAAAlAAAF\ + gAAUAAkAAAABAAAACgAXABAAAAABAAAFpgAZAAkAAAABAAAAAgAaAAIAAAAGAAAFrgAfAAkAAAAB\ + AAAAAAAgAAIAAAAlAAAFtAAhAAoAAAABAAAF2gAjAAkAAAACAAAF4gAlABAAAAABAAAF6gAmAAkA\ + AAABAAAAAwAnAAoAAAABAAAF8gAoAAkAAAABAAAAAQArAAIAAAAlAAAF+gAtAAkAAAABAAATXAAu\ + AAkAAAABAAAAAQAvAAkAAAABAAAAMAAwAAoAAAABAAAGIAAzAAkAAAABAAAQAAA0AAkAAAABAAAA\ + BAA1AAkAAAABAAAAAwA2AAkAAAABAADnJAA3AAkAAAABAAAABAA4AAkAAAABAAACPgA5AAkAAAAB\ + AAAAAAA6AAkAAAABAAAAAAA7AAkAAAABAAAAAAA8AAkAAAABAAAABAA9AAkAAAABAAAAAAA/AAkA\ + AAABAAAAOwBAAAcAAABQAAAGKABBAAkAAAABAAAAAABCAAkAAAABAAAAAABDAAkAAAABAAAAAABE\ + AAkAAAABAAAAAABFAAkAAAABAAAAAABGAAkAAAABAAAAAABIAAkAAAABAAACPgBJAAkAAAABAAAA\ + AABKAAkAAAABAAAAAgBNAAoAAAABAAAGeABOAAcAAAB5AAAGgABPAAcAAAArAAAG+gBSAAkAAAAB\ + AAAAAQBTAAkAAAABAAAAAQBVAAkAAAABAAAAAQBYAAkAAAABAAAHAwBgAAkAAAABAAASAABhAAkA\ + AAABAAAAGgAAAAC9AtMCxAKWAlsCIALqAbwBkgFrAUYBJQEIAfEA3QDMAAMDMwNAA/QCmQJWAg0C\ + 1QGoAX4BUwEtAQ8B9QDhAM0ALANqA2oDTAPrAnUCKALpAb4BhgFWATABEAH0AN4AyQA9A7oDwgNP\ + A7gCXAIYAugBqwF2AUwBJgEGAeoA0gC+ANECrwPSAxIDZAIVAt4BqwF8AVABKQEHAekA0AC6AKgA\ + zwFCAnYCBAK/AZYBcwFQAS8BDQHvANQAvQCpAJgAigDUAP8AGgEVARoB5QC6AMUAsACsAJkAggBy\ + AGQAXABTAGAAaABqAGYAXABMAEcARwBCAEEAPgAvACQAJAAlACoARgBIAEcARAA/ADcAMwAvACoA\ + KAAnACEAHQAfAB8AIAAxADIAMQAwAC4AKwAnACcAJQAfABwAGgAXABkAEwAUACkAKgAmACQAIgAg\ + ACEAIgAgAB4AHAAZABgAFAAXABUAIgAcABoAGwAaABcAHAAdABkAGAAYABgAFwAaABgAGAAaABsA\ + GgAYABUAEwAWABcAEwAWABcAFQAVABYAEwAUABoAFwAWABMAEgATABAADwATABMAEgANAA8ADwAP\ + AAwAEwAVABIAEwANABUAEwATABIADAAPAAsAEAASAA8ADgARABEADwAMAAwADwAWABMAEgASABQA\ + DQAPAAoADAAOAGJwbGlzdDAw1AECAwQFBgcIVWZsYWdzVXZhbHVlWXRpbWVzY2FsZVVlcG9jaBAB\ + EwABVEUBR7fxEjuaygAQAAgRFx0nLS84PQAAAAAAAAEBAAAAAAAAAAkAAAAAAAAAAAAAAAAAAAA/\ + ///J3gAANk3//8R4AAe+////5bUAAVy+AAAAOwAAAQAAAAAnAAABAAAAAAAAAAAAAAAAAAAAALtA\ + AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABI\ + RUlDAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\ + AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\ + AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAbwBwAGwAaQBzAHQAMAAwANQBAgMEBQYHCFEzUTFR\ + MlEwEAQiAAAAACQ/AAAAEAEIERMVFxkbICUAAAAAAAABAQAAAAAAAAAJAAAAAAAAAAAAAAAAAAAA\ + JwAC8/UAABV+YnBsaXN0MDDSAQIDSFExUTIQA6IFCtIGBwgJUzIuMVMyLjIjQEsf2IAAAACJQAAA\ + AAAAAADSBgcLDCM/4hqAAAAAAKNAVMAAAAAAAAgNDxETFhsfIywlOkMAAAAAAAABAQAAAAAAAAAL\ + AAAAAAAAAAAAAAAAAAAAQQAAAAA= + """ + return Data(base64Encoded: base64.replacingOccurrences(of: "\\\n", with: "").replacingOccurrences(of: "\n", with: ""))! + }() + + /// 创建自定义的 MakerNotes 数据 + /// - Parameters: + /// - contentIdentifier: Live Photo 的 Content Identifier (UUID 字符串格式) + /// - livePhotoVideoIndex: Live Photo Video Index (通常是帧索引的 Float32 bitPattern) + /// - Returns: 修改后的 MakerNotes 数据 + public static func createMakerNotes( + contentIdentifier: String, + livePhotoVideoIndex: Int64 + ) -> Data { + var data = makerNotesTemplate + + // 替换 ContentIdentifier + let uuidData = contentIdentifier.data(using: .ascii)! + let paddedUUID = uuidData + Data(repeating: 0, count: max(0, contentIdentifierLength - uuidData.count)) + data.replaceSubrange(contentIdentifierOffset..<(contentIdentifierOffset + contentIdentifierLength), with: paddedUUID.prefix(contentIdentifierLength)) + + // 替换 LivePhotoVideoIndex (Big-Endian) + var bigEndianValue = UInt64(bitPattern: livePhotoVideoIndex).bigEndian + let indexData = Data(bytes: &bigEndianValue, count: livePhotoVideoIndexLength) + data.replaceSubrange(livePhotoVideoIndexOffset..<(livePhotoVideoIndexOffset + livePhotoVideoIndexLength), with: indexData) + + return data + } +} diff --git a/TASK.md b/TASK.md new file mode 100644 index 0000000..138b2a3 --- /dev/null +++ b/TASK.md @@ -0,0 +1,101 @@ +# TASK|to-live-photo + +> 说明:本清单按阶段拆解研发事项,默认最低支持 iOS/iPadOS 16+,先完成 MVP 闭环,再逐步完善。 + +## M0|技术预研 / POC(以“系统可识别 Live Photo”为第一目标) + +- [ ] 建立 Xcode 工程骨架(SwiftUI 优先),设置 Deployment Target = iOS/iPadOS 16.0 +- [ ] 补齐权限与 Info.plist 文案: + - [ ] NSPhotoLibraryUsageDescription + - [ ] NSPhotoLibraryAddUsageDescription +- [ ] POC:最小链路跑通(不做复杂编辑) + - [ ] 从相册导入视频(PHPicker,视频过滤) + - [ ] 以默认参数(3s、maxDimension、30fps 策略)生成 photo + pairedVideo + - [ ] 写入相册(PHAssetCreationRequest 同时写入 .photo 与 .pairedVideo) + - [ ] 校验:保存后按 assetId 取回并验证 Live 识别(至少做到“相册 Live 标识 + 长按可播”的人工确认路径) +- [ ] 约束与策略确认(写入代码常量/配置): + - [ ] 时长限制:1.5~5s(默认 3s) + - [ ] 分辨率上限:默认 1920(可后续自适应) + - [ ] 帧率策略:>30fps 降到 30fps + - [ ] HDR 策略:默认转 SDR 或首次提示(确认最终策略) + - [ ] 编码策略:优先 re-mux,失败再转 H.264 兼容导出(确认兜底策略) +- [ ] 设计基础设施: + - [ ] WorkItem / ExportParams 数据模型(与 TECHSPEC 对齐) + - [ ] CacheManager:按 workId 建目录、成功/失败保留 24h 清理策略 + - [ ] Logger:阶段化日志(stage enum + progress + error_code) + +### M0 完成定义 + +- [ ] 能在至少 1 台 iPhone + 1 台 iPad 上生成并保存 Live Photo,且系统相册可识别(有 Live 标识,长按可播放)。 + +## M1|MVP(导入→编辑→生成→保存→引导) + +### 1) UI 页面闭环 + +- [ ] HomeView:首页导入入口、最近作品(可先仅内存态/本地简单持久化) +- [ ] EditorView:比例裁剪、时长裁剪、封面帧选择、预览 +- [ ] ProcessingView:进度条 + 阶段文案 + 取消/重试/返回编辑 +- [ ] ResultView:保存到相册、再次编辑、进入壁纸引导 +- [ ] WallpaperGuideView:按系统版本展示步骤卡片、FAQ、打开设置、完成确认 + +### 2) 编辑能力(MVP 版) + +- [ ] 比例模板:iPhone 锁屏 / 全面屏 / 4:3 等(先做 2~3 个核心模板) +- [ ] 裁剪手势:缩放 + 拖拽,保持比例 +- [ ] 时长裁剪:range slider(1.5~5s,默认 0~3s) +- [ ] 封面帧:滑杆选择 keyFrameTime,实时刷新封面预览 + +### 3) 生成与保存(与 TECHSPEC 阶段枚举对齐) + +- [ ] 生成管线:normalize → extractKeyFrame → writePhotoMetadata → writeVideoMetadata → saveToAlbum → validate +- [ ] 取消策略:取消时终止任务并清理未写入相册的中间文件 +- [ ] 错误码与可行动建议:至少覆盖 LPB-001/101/201/301/401/501/901 + +### 4) 引导内容(MVP 版) + +- [ ] 版本检测:iOS/iPadOS 16 显示“系统限制/不支持锁屏 Live 动效”的明确文案与替代方案 +- [ ] iOS/iPadOS 17+:展示步骤卡片(设置→墙纸→添加新墙纸→照片→选择 Live Photo→开启 Live) +- [ ] FAQ:Motion not available、低电量模式、找不到 Live 按钮等 + +### 5) 基础埋点(可先打印日志,后续再接 SDK) + +- [ ] home_import_video_click / import_video_success +- [ ] editor_generate_click / build_livephoto_start / build_livephoto_fail +- [ ] save_album_success / save_album_fail +- [ ] guide_open / guide_complete + +### 6) MVP QA(手工为主) + +- [ ] 测试矩阵:iPhone 1~2 台 + iPad 1 台;iOS/iPadOS 17+ 与 16 各至少 1 台 +- [ ] 素材覆盖:H.264/HEVC、30/60fps、竖/横、SDR/HDR +- [ ] 验收点:生成成功率、保存成功率、相册识别率、引导文案准确性 + +### M1 完成定义 + +- [ ] 按 PRD 的 MVP 验收标准打通闭环:生成 Live Photo → 保存相册可识别 → 可进入引导并在不同系统版本下给出正确提示。 + +## M2|完善(体验提升 + 失败率降低) + +- [ ] 兼容模式开关(UI 可见):降分辨率/30fps/H.264/SDR +- [ ] 自动诊断与建议:根据素材参数提示“建议缩短/建议兼容模式/建议转 SDR”等 +- [ ] iPad 编辑页布局优化:左右分栏(预览/参数) +- [ ] 最近作品列表完善:持久化(仅存参数与缩略图/assetId,不重复存媒体) +- [ ] 设置页(可选):权限状态、清理缓存、反馈入口 +- [ ] 错误反馈包导出(可选):builder.log + 参数(不包含媒体内容) + +## M3|稳定性 / 上线准备 + +- [ ] 性能与内存优化:大视频处理、峰值内存控制 +- [ ] 崩溃与异常收敛:日志脱敏、错误归因完善(按 stage 统计) +- [ ] App Store 合规检查:权限文案、引导表述(不承诺一键设置壁纸)、隐私说明 +- [ ] 产出核心文档补齐(按需要最小化): + - [ ] 测试文档:MVP 测试矩阵与用例 + - [ ] 用户手册:导入/生成/保存/设置引导与常见问题 + +--- + +## 决策备忘(后续需要你拍板) + +- [ ] HDR 默认策略:默认转 SDR vs 首次提示用户选择 +- [ ] 编码兜底策略:完全自动兜底 vs 失败后提示开启兼容模式 +- [ ] 高级合成(照片+视频)进入哪个阶段(建议 M2) diff --git a/Tests/LivePhotoCoreTests/LivePhotoCoreTests.swift b/Tests/LivePhotoCoreTests/LivePhotoCoreTests.swift new file mode 100644 index 0000000..a7cf1b4 --- /dev/null +++ b/Tests/LivePhotoCoreTests/LivePhotoCoreTests.swift @@ -0,0 +1,8 @@ +import XCTest +@testable import LivePhotoCore + +final class LivePhotoCoreTests: XCTestCase { + func testPlaceholder() { + XCTAssertTrue(true) + } +} diff --git a/docs/IXSPEC_LivePhoto_App_V0.2_2025-12-13.md b/docs/IXSPEC_LivePhoto_App_V0.2_2025-12-13.md new file mode 100644 index 0000000..c758d23 --- /dev/null +++ b/docs/IXSPEC_LivePhoto_App_V0.2_2025-12-13.md @@ -0,0 +1,136 @@ +# 交互规格书|Live Photo 制作与动态壁纸引导 App(V0.2-IX) + +- 适用平台:iPhone / iPad(iOS / iPadOS) +- 日期:2025-12-13(Asia/Manila) +- 用途:用于 AI 生成 UI / 交互实现 / QA 对照 + +## 1. 设计原则 + +- 一条主线:导入 → 编辑 → 生成 → 保存 → 引导设置。 +- 减少认知:默认 3 秒、默认锁屏比例模板、默认智能封面。 +- 失败可行动:每个错误必须给“下一步”。 +- 强预期管理:明确“通常仅锁屏动效”,以及系统版本差异。 + +## 2. 全局交互规范 + +### 2.1 导航与按钮 + +- 主按钮(Primary):导入、生成、保存、去设置壁纸。 +- 次按钮(Secondary):再次编辑、重试、了解原因。 +- 危险操作:清理缓存/删除作品需二次确认。 + +### 2.2 Loading / Empty / Error 规范 + +| 状态 | 表现 | 必备元素 | +|---|---|---| +| Loading | 进度条 + 阶段文案 | 取消/后台继续(可选)、预计步骤而非时间 | +| Empty | 插画/图标 + 1句解释 | 主按钮引导下一步 | +| Error | 标题 + 原因 + 建议 | 重试按钮 + 反馈入口(可选) | + +## 3. 页面级交互规格 + +### 3.1 首页(HomeView) + +| 区域 | 组件 | 交互 | 状态/校验 | +|---|---|---|---| +| 顶部 | App 标题 + 设置入口 | 点击设置进入 Settings(可选) | 无 | +| 主区 | 按钮:从相册导入视频 | 打开 PHPicker(视频过滤) | 无权限→解释页 | +| 主区 | 入口:高级合成(照片+视频) | 打开 PHPicker(照片+视频) | MVP可隐藏 | +| 主区 | 入口:教程/FAQ | 打开 Guide/FAQ | 版本检测提示 | +| 列表 | 最近作品(0~10) | 点击进入 Result;长按删除缓存(可选) | 空态展示提示 | + +- 空态文案示例:**“导入一段 2–3 秒视频,做成可以长按播放的实况照片。”** +- 首次进入:可展示 2~3 页 onboarding(可选)。 + +### 3.2 编辑页(EditorView) + +| 模块 | 组件 | 交互细节 | 默认值 | +|---|---|---|---| +| 预览 | 视频播放器(静音预览可选) | 支持播放/暂停;拖动时间轴预览 | 自动播放关闭 | +| 比例 | 比例选择器(模板chips) | 切换模板时保持主体居中;允许用户再拖拽微调 | iPhone锁屏模板 | +| 裁剪 | 画面裁剪手势 | 双指缩放、单指拖动;显示安全区参考线(可选) | scale=1 | +| 时长 | 时间裁剪条(range slider) | 限制 1.5~5s;默认 0~3s;自动吸附到整帧点 | 3秒 | +| 封面 | 封面帧滑杆 + 预览帧 | 滑动实时更新封面预览;提供“推荐封面”按钮 | 中间帧 | +| 操作 | 按钮:生成 Live Photo | 点击后进入 Processing;参数校验不通过则提示 | 可用 | + +- 校验提示:时长过长→建议缩短;分辨率过高→建议开启兼容模式。 +- iPad 适配:左侧预览、右侧参数面板;或上下布局(横屏)。 + +### 3.3 生成进度页(ProcessingView) + +| 元素 | 说明 | 交互 | +|---|---|---| +| 进度条 | 0~100%;按阶段推进 | 不可拖动 | +| 阶段文案 | 例如:处理视频 / 写入实况信息 / 准备保存 | 随阶段自动切换 | +| 取消 | 终止任务并回到编辑 | 二次确认;取消后清理缓存 | +| 失败态 | 展示错误码+原因+建议 | 按钮:重试 / 返回编辑 | + +### 3.4 结果页(ResultView) + +| 区域 | 组件 | 交互 | 成功条件 | +|---|---|---|---| +| 顶部 | 缩略图 + Live 标识(模拟) | 长按预览动效(App内) | 预览流畅 | +| 主按钮 | 保存到相册 | 触发相册写入;成功弹 toast | 相册可见Live | +| 主按钮 | 去设置壁纸 | 进入 WallpaperGuide | 版本差异提示 | +| 次按钮 | 再次编辑 | 回到 Editor(保留参数) | 参数不丢 | +| 信息 | 作品参数摘要 | 展开查看细节(可选) | 无 | + +### 3.5 壁纸引导页(WallpaperGuideView) + +| 模块 | 内容 | 交互 | 备注 | +|---|---|---|---| +| 版本检测 | 显示:当前系统版本/是否支持Live锁屏动效 | 点击“了解原因”展开说明 | 必须明确限制 | +| 步骤卡片 | 步骤1~5(图文) | 每步可折叠;支持复制路径文案 | iPhone/iPad分支 | +| 常见问题 | Motion not available、低电量、找不到Live按钮等 | 点击展开答案 | MVP用静态文案 | +| 跳转设置 | 按钮:打开设置 | 打开 Settings(到首页即可) | 不承诺深链成功 | +| 完成确认 | 按钮:我已设置完成 | 记录引导完成埋点 | 用于漏斗统计 | + +## 4. 文案与提示(统一模板) + +### 4.1 错误提示模板 + +- 标题:一句话告诉用户发生了什么(例如:**“视频处理失败”**) +- 原因:给 1~2 条最可能原因(不要超过 3 条) +- 建议:给可点击动作(例如:**“切换到兼容模式(H.264)”**、**“缩短到 3 秒以内”**) +- 附加:可显示错误码(长按复制)与反馈入口(可选) + +### 4.2 兼容模式说明(示例) + +- 兼容模式会:降低分辨率、降帧率到 30fps、转码到 H.264(如需要)、将 HDR 转为 SDR。 +- 目的:提升生成成功率与壁纸/分享兼容性。 + +## 5. 埋点事件字典(Event Dictionary) + +### 5.1 事件命名规范 + +- 动词_对象_结果:例如 `import_video_success` +- 所有事件带公共属性:`app_version`, `os_version`, `device_model`, `locale` + +### 5.2 核心事件表 + +| 事件名 | 触发时机 | 关键属性(properties) | 用途 | +|---|---|---|---| +| home_import_video_click | 点击“导入视频” | entry=home | 漏斗起点 | +| import_video_success | 完成导入并进入编辑 | duration,resolution,fps,codec,hdr | 素材分布 | +| editor_generate_click | 点击生成 | ratio,trim,has_cover,compat_mode | 转化与参数 | +| build_livephoto_start | 开始生成 | work_id,stage=normalize | 性能监控 | +| build_livephoto_fail | 生成失败 | error_code,stage,codec_policy,hdr_policy | 失败归因 | +| save_album_success | 写入相册成功 | asset_id,elapsed_ms | 闭环成功 | +| save_album_fail | 写入相册失败 | error_code,permission_state | 权限问题 | +| guide_open | 进入壁纸引导页 | from=result,os_support=true/false | 引导覆盖 | +| guide_complete | 点击“已设置完成” | time_spent_s | 引导成效 | +| cache_clear | 清理缓存 | freed_mb | 存储与留存 | + +## 6. iPhone / iPad 适配规则 + +- iPhone:编辑页优先竖屏;底部工具栏;预览占上半屏。 +- iPad:横屏优先;左右分栏(预览/参数);支持拖拽文件导入(可选)。 +- 安全区:裁剪预览提供参考线(可选)以减少锁屏 UI 遮挡主体。 + +## 7. QA 对照清单(交互) + +1. 所有主按钮在不可用状态必须有原因提示(例如未选视频)。 +2. 生成中返回/退出:必须提示风险(任务取消/后台)。 +3. 权限拒绝:必须有解释页与“去设置开启”按钮。 +4. iOS 16:引导页必须出现“不支持锁屏Live动效”的明确文案。 +5. 所有错误页必须包含:重试与返回编辑两个行动。 diff --git a/docs/PRD_LivePhoto_App_V0.2_2025-12-13.md b/docs/PRD_LivePhoto_App_V0.2_2025-12-13.md new file mode 100644 index 0000000..c2fcd46 --- /dev/null +++ b/docs/PRD_LivePhoto_App_V0.2_2025-12-13.md @@ -0,0 +1,557 @@ +PRD|Live Photo 制作与动态壁纸引导 App + +适用平台:iPhone / iPad(iOS / iPadOS) + +文档版本:V0.2(草案) + +日期:2025-12-13(Asia/Manila) + +作者:——(待填) + +# 1. 文档信息与变更记录 + +# 2. 背景与机会 + +用户希望把自拍视频/短片制作成“像苹果实况照片(Live Photo)一样”的内容,用于相册浏览、分享(例如发微信)、并在支持的系统版本上设置为锁屏动态壁纸。 + +痛点 1:现有转换工具分散,流程复杂(导入、裁剪、转码、导出、再去相册设置)。 + +痛点 2:很多用户不知道 iOS/iPadOS 不同版本对 Live Photo 壁纸的支持差异,导致“做出来不能动”。 + +痛点 3:对尺寸/时长/关键帧(封面)选择缺乏指导,成品效果不稳定。 + +本 App 提供一站式:导入视频 → 生成系统可识别的 Live Photo → 保存到相册 → 分步骤引导设置壁纸。 + +# 3. 产品目标与非目标 + +## 3.1 目标(Goals) + +G1:用户可从“视频/照片素材”快速生成“系统相册识别的 Live Photo”,并保存到相册。 + +G2:提供清晰的“设置为锁屏动态壁纸”引导,降低因系统版本/设置项导致的失败率。 + +G3:通过模板与提示,帮助用户得到高成功率的“可播放、可分享、封面好看”的作品。 + +G4:适配 iPhone 与 iPad,不同屏幕与比例下提供合理裁剪与预览。 + +## 3.2 非目标(Non-goals) + +不使用任何私有 API 直接替用户设置系统壁纸(App Store 会拒)。 + +不做云端素材社区/版权图库(可在后续版本作为增长模块另立项)。 + +不保证所有历史系统版本都能播放 Live Photo 壁纸动画(受系统能力限制)。 + +# 4. 目标用户与使用场景 + +## 4.1 目标用户(Personas) + +P1:普通用户——想把自拍视频做成动态壁纸,但不懂格式/设置路径。 + +P2:内容创作者——需要批量把短视频转 Live Photo 进行分享与展示。 + +P3:设计/审美用户——关注裁剪、封面帧、色彩与动效节奏。 + +## 4.2 核心场景(Scenarios) + +S1:用户选择一段 2-5 秒视频,生成 Live Photo,保存并设置为锁屏动态壁纸。 + +S2:用户选择“照片 + 视频”合成(照片做封面),并指定关键帧。 + +S3:用户生成后想发微信:需要在相册里保持 Live Photo 形态并可被微信识别。 + +S4:用户在 iPad 上制作,导出后在 iPhone 上设置壁纸(跨设备)。 + +# 5. 兼容性与约束 + +## 5.1 系统与功能可用性(建议写在 App 内) + +Live Photo 生成与保存:iOS/iPadOS 14+(建议 15+)可实现(依赖 Photos / AVFoundation)。 + +“Live Photo 作为锁屏动态壁纸播放”:iOS 17 或更高版本支持在锁屏唤醒时播放(iPhone 官方说明)。 + +iPadOS 锁屏支持 Live Photo 的动效选项(官方 iPad 用户指南包含 Live Photo 动效按钮说明)。 + +注意:Live Photo 动效通常只作用于锁屏;主屏一般显示静态图(以系统版本为准)。 + +低电量模式等系统状态可能导致动效不可用(需在引导里提示)。 + +## 5.2 平台约束(必须写清) + +iOS/iPadOS 没有公开 API 允许第三方 App 直接设置系统壁纸:只能引导用户在系统界面完成设置。 + +禁止使用私有 API、越狱方案或企业签名绕过(不符合 App Store 上架)。 + +# 6. 需求范围(MVP) + +## 6.1 MVP 功能列表 + +导入:从相册选择视频;可选:选择封面照片(可不选,默认从视频自动取帧)。 + +编辑:裁剪比例(适配 iPhone/iPad 锁屏常见比例)、裁剪时长(建议 2-3 秒)、选择封面帧(关键帧)。 + +生成:将素材合成为系统可识别 Live Photo(照片 + pairedVideo + 元数据)。 + +保存:保存到系统相册(Live Photos 相簿 / 最近项目)。 + +引导:分步骤引导用户把 Live Photo 设置为锁屏动态壁纸(按系统版本展示不同路径)。 + +质量检测:生成后做一次本地校验(能否被系统识别为 Live Photo;失败原因提示)。 + +## 6.2 后续版本(Backlog) + +模板:预设“人物/风景/文字”裁剪模板、调速/慢动作适配。 + +批量生成:一次导入多段视频,批量导出 Live Photo。 + +小组件/主题包:壁纸合集管理(订阅制/一次性购买)。 + +分享:一键分享(注意平台对实况/视频的支持差异)。 + +# 7. 用户流程 + +## 7.1 主流程:视频 → Live Photo → 设置锁屏 + +进入首页,点击【从相册导入视频】。 + +选择视频后进入编辑页:裁剪比例、裁剪时长、选择封面帧。 + +点击【生成 Live Photo】;显示进度与阶段文案(处理视频 / 写入元数据 / 保存准备)。 + +生成成功 → 点击【保存到相册】;提示“已保存,可在 Live Photos 相簿查看”。 + +进入【设置壁纸引导】:按步骤提示用户在系统【设置 > 墙纸】选择 Live Photo 并开启 Live 动效。 + +## 7.2 失败/异常流程 + +用户拒绝相册权限:展示解释页,并引导去系统设置开启。 + +生成失败(编码/元数据写入失败):提示原因 + 建议(更换视频编码、缩短时长、降低分辨率)。 + +保存成功但系统未识别为 Live Photo:提示“请在相册查看是否有 Live 标识”,并提供重新生成按钮。 + +# 8. 功能需求详述 + +## 8.1 首页 + +入口 1:导入视频(必选)。 + +入口 2:导入照片 + 视频(可选,作为高级模式)。 + +入口 3:教程/引导(包含系统版本差异、常见问题)。 + +展示最近生成的作品列表(MVP 可只展示最近 10 条)。 + +## 8.2 编辑页 + +裁剪比例:常用预设(iPhone 锁屏 / 全面屏 / 4:3 等),支持手动缩放与拖拽。 + +时长:默认 3 秒;可选范围 1.5-5 秒(过长降低成功率与体积)。 + +封面帧:滑杆选择时间点;支持“智能推荐封面”(清晰、有人脸、少抖动)。 + +预览:静态预览 + 动效预览(模拟锁屏唤醒播放方式)。 + +## 8.3 生成引擎(核心) + +输入:视频(必选) + 可选封面照片。 + +输出:照片文件(建议 HEIC)+ 视频文件(MOV,H.264/HEVC)+ 绑定元数据(identifier、still-image-time 等)。 + +关键规则:照片与视频共享同一 content/asset identifier;视频包含 still-image-time 标记关键帧。 + +对视频进行重封装(re-mux)以写入元数据;必要时重新编码(可配置:优先保留编码,失败则转码)。 + +导出参数建议:分辨率/比特率上限以保证速度与成功率;音频可选保留/移除。 + +本地校验:生成后尝试用系统方式加载校验(如 PHLivePhoto 请求加载)并输出可读错误。 + +## 8.4 保存到相册 + +使用 Photos 写入:photo 资源 + pairedVideo 资源。 + +成功回调:提示“已保存”;失败回调:展示错误与建议。 + +相册归类:系统会自动归类到 Live Photos;App 内保存最近作品列表。 + +## 8.5 动态壁纸设置引导 + +按系统版本展示不同文案与截图示意。 + +iOS/iPadOS 17+:引导用户进入【设置 > 墙纸 > 添加新墙纸 > 照片 > Live Photo】并开启 Live 播放选项。 + +iOS/iPadOS 16:明确提示系统限制;提供替代方案(设置静态壁纸/升级系统建议)。 + +常见失败提示:低电量模式、‘Motion not available’、素材参数不兼容等。 + +提供按钮:打开系统设置(尽量深链;无法深链则打开设置首页)。 + +# 9. 非功能需求 + +## 9.1 性能与体验 + +生成过程需有可见进度与阶段提示;失败可重试。 + +中间文件自动清理;不占用过多存储。 + +错误提示要“可行动”:给出下一步(缩短时长、降低分辨率、重新选择素材)。 + +## 9.2 隐私与合规 + +默认本地处理:不上传用户照片/视频。 + +权限最小化:仅在需要时请求相册/相机/麦克风权限,并提供用途说明。 + +如接入统计:匿名化、可关闭,并在隐私政策中说明。 + +# 10. 数据埋点与指标 + +## 10.1 核心漏斗 + +导入视频 → 进入编辑 → 点击生成 → 生成成功 → 保存成功 → 进入壁纸引导 → 引导完成(点击“完成/已设置”)。 + +## 10.2 关键指标(KPIs) + +生成成功率(按机型/系统版本/视频参数分层)。 + +保存成功率(相册写入成功)。 + +引导完成率(壁纸设置引导)。 + +7 日留存(如后续商业化,再加转化率)。 + +# 11. 里程碑建议 + +M0:技术预研(Live Photo 合成可行性、系统版本验证、参数边界)。 + +M1:MVP(导入视频 → 生成 → 保存 → 基础引导)。 + +M2:适配 iPad + 编辑体验提升(比例模板、预览增强)。 + +M3:稳定性与质量(失败提示、自动降级转码、素材建议)。 + +# 12. 验收标准(MVP) + +在 iPhone(至少 2 款机型)与 iPad(至少 1 款机型)上:从相册导入视频可生成 Live Photo 并保存到系统相册。 + +生成的 Live Photo 在系统相册中显示为 Live Photo(有 Live 标识,长按可播放)。 + +在支持 Live Photo 锁屏播放的系统版本上,用户可按引导步骤完成设置并在唤醒锁屏时播放。 + +在不支持版本上,App 必须明确提示限制并给出替代方案,避免误导。 + +不使用私有 API,权限声明齐全,可通过 App Store 审核。 + +# 13. 风险与对策 + +R1:系统版本差异 → 版本检测 + 差异化引导文案。 + +R2:素材多样导致失败 → 参数归一化 + 自动降级转码 + 可读错误提示。 + +R3:用户期待“主屏也会动” → 产品内明确说明:通常仅锁屏动效。 + +R4:审核风险 → 只做引导,不做自动设置;不接触私有 API。 + +# 14. 参考资料(TBD) + +Apple Support:Set a Live Photo as your Lock Screen wallpaper (iOS 17+): https://support.apple.com/120734 + +Apple Support:Change your iPhone wallpaper(含 Live Photo 入口说明): https://support.apple.com/102638 + +Apple iPad User Guide:Create a custom iPad Lock Screen: https://support.apple.com/guide/ipad/ipad782d4de8/ipados + +Apple iPad User Guide:Change the wallpaper on iPad: https://support.apple.com/guide/ipad/ipad997d908e/ipados + +# 15. 信息架构与页面清单 + +本节用于指导你用 AI 编码时的页面/路由/组件拆分。 + +## 15.1 信息架构(IA) + +首页(Home):导入视频 / 高级合成 / 教程与FAQ / 最近作品列表 + +编辑页(Editor):比例裁剪、时长裁剪、封面帧选择、预览、生成按钮 + +生成进度页(Processing):阶段进度、取消/后台、失败原因与重试 + +结果页(Result):保存到相册、再次编辑、进入壁纸引导、分享(后续) + +壁纸引导页(Wallpaper Guide):按系统版本展示步骤、常见问题、跳转设置 + +作品库(Library - MVP可选):最近作品、再次导出、删除缓存 + +设置(Settings):权限状态、清理缓存、隐私、关于、反馈 + +## 15.2 页面与路由(建议命名) + +# 16. 用户故事与验收标准(更细颗粒) + +## 16.1 用户故事(User Stories) + +## 16.2 验收标准(按故事拆分) + +US-01:生成后在系统相册中显示 Live 标识;长按可播放;不出现仅照片或仅视频的孤儿资源。 + +US-02:裁剪后的导出在锁屏预览时主体居中,用户可通过手势调整;导出不拉伸。 + +US-03:用户选定封面帧后,相册静态显示与导出封面一致(允许系统做轻微处理)。 + +US-05:引导页能根据系统版本显示正确路径;并提示‘低电量模式/系统限制’等常见原因。 + +US-06:错误提示包含:错误标题 + 可能原因 + 可点击的解决建议(例如缩短时长/降低分辨率/改用H.264)。 + +# 17. 详细功能需求表(可直接喂给 AI 编码) + +## 17.1 功能点-输入-处理-输出 + +## 17.2 关键约束与参数建议 + +推荐时长:2-3秒(越长越大且更容易失败)。 + +推荐分辨率上限:以锁屏显示为目标,通常不需要超过 1440p;可按设备上限做自适应。 + +帧率:建议 30fps;高帧率素材可降到30fps以提升兼容性。 + +编码:优先保留原编码;失败时降级到 H.264 + AAC(更通用)。 + +HDR/Dolby Vision:可提示用户可能存在兼容性问题;必要时转 SDR。 + +# 18. 技术方案概述(研发实现导向) + +## 18.1 模块划分 + +## 18.2 Live Photo 合成关键点(落地检查清单) + +生成 assetIdentifier(UUID字符串)。 + +图片侧:写入 Apple MakerNote/相关标识(建议HEIC)。 + +视频侧:写入 QuickTime content identifier 元数据 + still-image-time timed metadata track。 + +保存侧:PHAssetCreationRequest 同时写入 .photo 与 .pairedVideo。 + +校验侧:尝试从相册 asset 请求 PHLivePhoto 或检查 Live 标识是否出现。 + +## 18.3 权限与Info.plist + +NSPhotoLibraryUsageDescription:读取相册用于选择素材 + +NSPhotoLibraryAddUsageDescription:保存生成的 Live Photo 到相册 + +NSCameraUsageDescription:如后续加入拍摄入口 + +NSMicrophoneUsageDescription:如保留或录制音频 + +# 19. 数据模型与本地存储 + +## 19.1 本地作品模型(示例) + +WorkItem:id、createTime、sourceVideoLocalID、exportParams(ratio/trim/keyFrame)、resultAssetLocalID(相册asset.localIdentifier)、status(processing/success/failed)、errorCode、thumbnailPath。 + +缓存目录:/Library/Caches/LivePhotoBuilder/{workId}/(中间mov、heic、日志)。 + +清理策略:成功后保留缩略图与参数;中间文件按设置或定期清理。 + +# 20. QA 测试计划(MVP) + +## 20.1 测试矩阵 + +## 20.2 用例清单(样例) + +从相册导入竖屏30fps H.264视频,默认参数生成并保存,确认相册显示Live标识且可播放。 + +导入60fps HEVC视频,选择3秒裁剪,生成成功;若失败需自动降级转码并提示。 + +导入HDR视频,提示可能兼容性风险;选择转SDR后生成成功。 + +拒绝相册权限后再次进入导入流程,能看到解释页并可跳转系统设置。 + +在iOS 16设备:壁纸引导页必须显示“系统限制/不支持动效”的文案与替代方案。 + +# 21. 合规、审核与版权 + +壁纸设置:不得宣称“一键自动设置系统壁纸”,应明确为“引导用户在系统中设置”。 + +权限说明:文案必须与实际用途一致,避免过度索权。 + +版权与内容:用户素材本地处理;若引入模板/素材库需提供版权声明与授权来源。 + +隐私政策:说明是否收集设备信息、崩溃日志与使用数据;提供关闭选项。 + +# 22. 文案与引导内容(可直接放App里) + +## 22.1 关键页面文案(示例) + +## 22.2 壁纸设置步骤卡片(iOS 17+ 示例) + +打开【设置】->【墙纸】。 + +点击【添加新墙纸】->【照片】。 + +选择你刚保存的 Live Photo(带 Live 标识)。 + +点击屏幕左下角的【Live】按钮确保动效开启(若可见)。 + +保存并设置到锁屏。唤醒锁屏时,按压/触摸或系统动作会触发动效(以系统表现为准)。 + +提示:提示:若出现 “Motion not available”,请检查:是否开启低电量模式、是否选择了 Live Photo、素材是否过长/过大。 + +# 23. 开放问题(需你决策后再让AI编码) + +商业化:免费+广告?一次性买断?订阅(模板/批量/高清导出)? + +支持的最低系统版本:iOS 14/15/16?(决定了API与用户覆盖) + +是否支持拍摄直接生成(AVCapture Live Photo Capture)作为后续版本? + +是否需要“跨设备同步”(iCloud)保存作品参数/模板? + +是否需要“微信分享优化”(检测相册资产是否仍为Live Photo形态)? + + + +# 附录:表格汇总 + + + +## 表格 1 + +| 版本 | 日期 | 作者 | 说明 | + +| --- | --- | --- | --- | + +| V0.1 | 2025-12-13 | —— | 首版草案:功能范围、流程与需求拆解 | + +| V0.2 | 2025-12-13 | —— | 补充:信息架构、用户故事、详细需求表、技术方案、QA/合规与开放问题 | + +| V1.0 | —— | —— | (预留) | + + + +## 表格 2 + +| 页面 | 路由/模块名 | 主要能力 | MVP | + +| --- | --- | --- | --- | + +| 首页 | HomeView | 导入入口、最近作品 | 是 | + +| 编辑 | EditorView | 裁剪/封面/预览/生成 | 是 | + +| 生成进度 | ProcessingView | 进度、取消、失败重试 | 是 | + +| 结果 | ResultView | 保存、再次编辑、引导入口 | 是 | + +| 壁纸引导 | WallpaperGuideView | 版本检测、步骤卡片、FAQ | 是 | + +| 作品库 | LibraryView | 作品管理 | 可选 | + +| 设置 | SettingsView | 权限、清理、隐私、反馈 | 可选 | + + + +## 表格 3 + +| 编号 | 用户故事 | 优先级 | 备注 | + +| --- | --- | --- | --- | + +| US-01 | 作为用户,我想从相册选择一段视频并生成 Live Photo,这样我可以在相册长按播放。 | P0 | 核心闭环 | + +| US-02 | 作为用户,我想裁剪比例以适配锁屏,避免设置壁纸后被系统裁掉关键主体。 | P0 | 需预设模板 | + +| US-03 | 作为用户,我想选择封面帧(关键帧),这样锁屏静止画面最好看。 | P0 | 提供智能推荐 | + +| US-04 | 作为用户,我想保存到相册并看到 Live 标识,确保微信等平台可识别为实况。 | P0 | 写入 pairedVideo | + +| US-05 | 作为用户,我想得到清晰的设置壁纸引导,减少我找不到入口或不能动的困扰。 | P0 | 版本差异化 | + +| US-06 | 作为用户,我希望失败时能看到原因和解决办法,而不是一句“失败”。 | P0 | 错误可行动 | + +| US-07 | 作为高级用户,我想用‘照片+视频’合成,以照片为封面,视频为动效。 | P1 | 高级模式 | + +| US-08 | 作为用户,我想一键清理缓存,避免占用太多存储。 | P1 | 设置页 | + + + +## 表格 4 + +| 功能点 | 输入 | 处理/规则 | 输出/状态 | 优先级 | + +| --- | --- | --- | --- | --- | + +| 导入视频 | PHPicker选择视频 | 读取时长/分辨率/帧率/编码;提示不推荐参数 | 进入编辑页或提示不兼容 | P0 | + +| 比例裁剪 | 视频画面 + 模板比例 | 支持拖拽/缩放;保持比例;实时预览 | 裁剪区域参数(scale/offset) | P0 | + +| 时长裁剪 | 起止时间 | 默认3秒;限制1.5-5秒;对齐关键帧建议 | trimStart/trimEnd | P0 | + +| 封面帧选择 | 时间点t | 提供静帧预览;可智能推荐t* | keyFrameTime | P0 | + +| 生成Live Photo | 视频+裁剪+关键帧(+可选封面照) | 重封装写入content identifier、still-image-time;必要时转码 | photoURL + videoURL + 成功/失败 | P0 | + +| 保存到相册 | photoURL+videoURL | PHAssetCreationRequest写入.photo与.pairedVideo | 相册新增Live Photo资产 | P0 | + +| 引导设置壁纸 | 系统版本/设备类型 | 差异化步骤卡片;提示限制与FAQ | 用户完成率提升 | P0 | + +| 缓存清理 | 中间文件目录 | 仅清理App生成缓存;不影响相册成品 | 释放存储空间 | P1 | + + + +## 表格 5 + +| 模块 | 职责 | 关键框架/组件 | + +| --- | --- | --- | + +| MediaImport | PHPicker选择媒体、读取元信息 | PhotosUI, AVFoundation | + +| EditorCore | 裁剪参数管理、预览渲染 | SwiftUI/UIView, AVPlayer | + +| LivePhotoBuilder | 写入元数据、重封装/转码 | AVAssetReader/Writer, ImageIO | + +| AlbumWriter | 写入系统相册 | Photos | + +| Validation | 生成后校验是否为Live Photo | Photos, PHLivePhoto | + +| GuideEngine | 版本检测、引导内容与FAQ | UIKit/SwiftUI | + +| CacheManager | 中间文件管理与清理 | FileManager | + +| Analytics | 埋点与漏斗 | 自研/第三方SDK | + + + +## 表格 6 + +| 维度 | 覆盖建议 | + +| --- | --- | + +| 设备 | 至少2款iPhone(不同分辨率)+ 1款iPad | + +| 系统 | iOS/iPadOS 17+(验证壁纸动效)+ iOS/iPadOS 16(验证限制提示) | + +| 素材 | H.264/HEVC;30/60fps;SDR/HDR;有/无音频;竖/横屏 | + +| 时长 | 1.5秒、3秒、5秒边界 | + +| 权限 | 首次拒绝/后续开启;相册只读/读写 | + + + +## 表格 7 + +| 场景 | 标题 | 正文/提示 | 按钮 | + +| --- | --- | --- | --- | + +| 生成中 | 正在生成 Live Photo | 正在处理视频并写入实况信息,请不要退出。 | 取消 / 后台继续 | + +| 生成成功 | 生成成功 | 已生成实况照片。保存到相册后可在“照片”里长按播放。 | 保存到相册 / 去设置壁纸 | + +| 保存成功 | 已保存到相册 | 你可以在“Live Photos”相簿找到它。 | 去设置壁纸 / 再做一个 | + +| 不支持提示 | 当前系统限制 | 此系统版本无法将 Live Photo 作为可播放的锁屏壁纸。你仍可保存为实况照片,或升级系统后再设置。 | 了解原因 / 返回 | + +| 低电量提示 | 可能无法播放动效 | 低电量模式可能导致锁屏动效不可用。 | 知道了 | diff --git a/docs/TECHSPEC_LivePhoto_App_V0.2_2025-12-13.md b/docs/TECHSPEC_LivePhoto_App_V0.2_2025-12-13.md new file mode 100644 index 0000000..257d5fd --- /dev/null +++ b/docs/TECHSPEC_LivePhoto_App_V0.2_2025-12-13.md @@ -0,0 +1,167 @@ +# 技术规格书|Live Photo 制作与动态壁纸引导 App(V0.2-Tech) + +- 适用平台:iPhone / iPad(iOS / iPadOS) +- 日期:2025-12-13(Asia/Manila) +- 用途:用于 AI 编码 / 架构落地 / 研发验收 + +## 1. 范围与目标 + +本技术规格书覆盖:媒体导入、编辑、Live Photo 合成与写入、校验、壁纸引导、缓存与日志、埋点。 + +- **目标**:在不使用私有 API 的前提下,生成系统相册可识别的 Live Photo 资产,并通过引导帮助用户设置锁屏动态壁纸。 +- **非目标**:不直接替用户设置系统壁纸;不做云端上传/存储(默认本地)。 + +## 2. 总体架构 + +### 2.1 模块分层 + +| 层级 | 模块 | 职责 | 关键技术 | +|---|---|---|---| +| UI | Home/Editor/Processing/Result/Guide | 页面展示、交互、状态驱动 | SwiftUI(推荐)/UIKit | +| Domain | Workflows | 导入-编辑-生成-保存-引导的业务编排 | async/await, Combine(可选) | +| Service | MediaImport / Builder / AlbumWriter / Validation | 导入、合成、写相册、校验 | PhotosUI, AVFoundation, Photos, ImageIO | +| Infra | CacheManager / Logger / Analytics | 缓存、日志、埋点、错误封装 | FileManager, OSLog, 自研/第三方 | + +### 2.2 目录结构建议(Xcode) + +- App/(入口、DI、AppState) +- Features/Home, Features/Editor, Features/Processing, Features/Result, Features/Guide +- Domain/Models(WorkItem、ExportParams、Capability) +- Domain/UseCases(ImportVideo、BuildLivePhoto、SaveToAlbum、Validate、GenerateGuideSteps) +- Services/MediaImport, Services/LivePhotoBuilder, Services/AlbumWriter, Services/Validation +- Infra/Cache, Infra/Logging, Infra/Analytics, Infra/Errors +- Resources/(引导图、FAQ 文案、模板配置 JSON) + +## 3. 核心数据模型 + +### 3.1 WorkItem(作品) + +| 字段 | 类型 | 说明 | +|---|---|---| +| id | UUID | 作品唯一标识 | +| createdAt | Date | 创建时间 | +| sourceVideo | SourceRef | 来源引用:PHAsset localIdentifier 或 fileURL | +| coverImage | SourceRef? | 可选封面图引用 | +| exportParams | ExportParams | 比例/裁剪/关键帧等参数 | +| status | enum | idle/editing/processing/success/failed | +| resultAssetId | String? | 写入相册后的 asset.localIdentifier | +| cacheDir | URL | 中间文件目录 | +| error | AppError? | 失败信息(含可行动建议) | + +### 3.2 ExportParams(导出参数) + +| 字段 | 类型 | 说明/规则 | +|---|---|---| +| aspectRatio | Preset/CGFloat | 模板:iPhoneLock、Full、4:3… | +| cropTransform | scale + offset | 编辑页输出的裁剪参数 | +| trimStart | Double | 秒;默认0 | +| trimEnd | Double | 秒;默认3;限制1.5~5 | +| keyFrameTime | Double | 秒;在trim区间内 | +| audioPolicy | enum | keep/remove;默认keep | +| codecPolicy | enum | passthrough / fallbackH264 | +| hdrPolicy | enum | keep / toneMapToSDR(建议) | +| maxDimension | Int | 上限,例如 1920 或自适应 | + +## 4. 状态机与工作流 + +### 4.1 作品状态机(建议) + +- Idle(未开始) +- → Importing(导入中)→ Editing(编辑中) +- → Processing(生成中:Normalize → BuildPhoto → BuildVideo → Pairing → Validate) +- → Success(成功:可保存/已保存) +- → Failed(失败:含可重试点与建议) + +处理阶段建议暴露为枚举(用于进度与日志): + +- normalize(归一化:裁剪/转码策略确定) +- extractKeyFrame(取关键帧/封面图) +- writePhotoMetadata(写图片侧元数据) +- writeVideoMetadata(重封装并写视频侧元数据) +- saveToAlbum(写入相册) +- validate(校验 Live Photo 是否可识别) + +### 4.2 并发与取消 + +- 生成任务使用 Task(Swift Concurrency)实现,可被用户取消;取消时清理未写入相册的中间文件。 +- 对 AVAssetReader/Writer 的写入任务采用串行队列/actor 封装,避免资源竞争。 +- 避免主线程阻塞:任何转码/重封装/写文件都在后台队列或 Task 中执行。 + +## 5. Live Photo 合成实现规范 + +### 5.1 输入约束与预处理(Normalize) + +- 时长:将 trim 区间限制在 1.5~5s(默认 3s)。 +- 分辨率:按 maxDimension 缩放;优先保证主体清晰而不是极限画质。 +- 帧率:高帧率(>30fps)可降至 30fps,以提升兼容性与体积。 +- HDR:若检测到 HDR/Dolby Vision,优先提示并建议转 SDR(tone mapping),避免部分系统/场景识别异常。 + +### 5.2 绑定标识(Identifier) + +- 每次生成一个 assetIdentifier(UUID string)。 +- 图片与视频必须共享同一 identifier;否则系统不会将其识别为 Live Photo 对。 + +### 5.3 图片侧(Photo)输出规范 + +- 优先输出 HEIC(更现代且在较新系统上更稳定);必要时支持 JPEG 作为降级。 +- 写入 Apple MakerNote/相关字段以携带 identifier(实现时以可验证的字段集合为准)。 +- 封面图来源优先级:用户选封面照片 > 从视频 keyFrameTime 抽帧。 + +### 5.4 视频侧(Paired Video)输出规范 + +- 容器:MOV。 +- 写入 QuickTime metadata:content identifier(与照片一致)。 +- 写入 timed metadata track:still-image-time(标记关键照片时刻)。 +- 尽量 re-mux(不重编码)提升速度;若写入失败或素材不兼容,再降级到转码流程。 + +### 5.5 写入相册规范 + +- 使用 PHAssetCreationRequest 同时 addResource:.photo 与 .pairedVideo。 +- 在 performChanges 回调中记录成功/失败;成功后写入 resultAssetId;失败需返回可行动错误。 + +### 5.6 校验(Validation) + +- 策略 A:保存后用 resultAssetId 取回 PHAsset,尝试请求 Live Photo(或检查其资源类型/子资源)。 +- 策略 B:若无法直接取 Live Photo 对象,则至少验证:相册中显示 Live 标识(人工/自动化截图对比可作为 QA 手段)。 +- 校验失败要区分:合成失败 vs 写相册失败 vs 系统未识别。 + +## 6. 错误码与可行动建议(Error Taxonomy) + +| 错误码 | 阶段 | 用户可见文案(标题) | 常见原因 | 建议动作(App 提示) | +|---|---|---|---|---| +| LPB-001 | Import | 无法读取视频 | 权限不足/资源损坏 | 检查相册权限;换一个视频 | +| LPB-101 | Normalize | 素材参数不兼容 | HDR/超高分辨率/奇怪编码 | 开启“兼容模式”;降低分辨率/转 SDR | +| LPB-201 | Photo | 封面生成失败 | 抽帧失败/内存不足 | 缩短时长;降低分辨率;重试 | +| LPB-301 | Video | 视频处理失败 | 重封装/转码失败 | 切换到 H.264 兼容导出;关闭音频 | +| LPB-401 | Album | 保存到相册失败 | 无写入权限/相册忙 | 允许“添加到相册”;稍后重试 | +| LPB-501 | Validate | 系统未识别为实况 | 元数据不完整/系统限制 | 重新生成;尝试更短视频;升级系统(如需壁纸动效) | +| LPB-901 | Unknown | 发生未知错误 | 不可预期异常 | 反馈日志;重启 App;重试 | + +## 7. 缓存、日志与诊断 + +### 7.1 缓存目录结构 + +- `Caches/LivePhotoBuilder/{workId}/source.mov`(可选) +- `Caches/LivePhotoBuilder/{workId}/normalized.mov`(归一化输出) +- `Caches/LivePhotoBuilder/{workId}/photo.heic`(封面图) +- `Caches/LivePhotoBuilder/{workId}/paired.mov`(写入元数据后的成品视频) +- `Caches/LivePhotoBuilder/{workId}/builder.log`(阶段日志,供反馈) + +### 7.2 清理策略 + +- 成功:保留 photo/paired 的短期缓存(例如 24h)以支持“再次保存/分享”;随后自动清理。 +- 失败:保留日志与关键中间文件(例如 24h),方便用户一键反馈;随后清理。 +- 用户手动“清理缓存”:立即删除所有 workId 目录,但不影响系统相册成品。 + +## 8. 安全与隐私 + +- 默认全程本地处理,不上传用户素材。 +- 日志默认不包含媒体内容本身;仅记录参数与错误码;用户反馈前需二次确认。 +- 权限请求延迟到使用时,并提供用途说明与拒绝后的替代路径。 + +## 9. AI 编码提示(可直接复制给 AI) + +- 按模块创建 Swift Package 或 App 内分组;对 LivePhotoBuilder 使用 `actor` 管理状态与文件路径。 +- UseCase 层提供 async 函数:`importVideo()`, `buildLivePhoto()`, `saveToAlbum()`, `validate()`. +- UI 层采用单一 source of truth:`WorkItemViewState`;所有副作用通过 UseCase 注入。 +- 为每个阶段输出结构化日志与 `progress (0~1)` + `stage enum`。 diff --git a/docs_index.md b/docs_index.md new file mode 100644 index 0000000..98e86f5 --- /dev/null +++ b/docs_index.md @@ -0,0 +1,27 @@ +# 文档索引 + +## 需求 + +- docs/PRD_LivePhoto_App_V0.2_2025-12-13.md:PRD(V0.2),定义目标、MVP范围、流程、验收与风险。 + +## 设计 + +- docs/TECHSPEC_LivePhoto_App_V0.2_2025-12-13.md:技术规格(V0.2),架构/模型/合成规范/错误码/缓存等。 +- docs/IXSPEC_LivePhoto_App_V0.2_2025-12-13.md:交互规格(V0.2),页面交互/状态/埋点/iPad适配等。 + +## 测试 + +- (待补充) + +## 用户手册 + +- (待补充) + +## 知识库 + +- docs_index.md:文档索引(本文件) + - PROJECT_STRUCTURE.md:项目结构(目录/文件结构变更记录) + +## 任务进度 + +- TASK.md:任务清单(按阶段拆解) diff --git a/to-live-photo/to-live-photo.xcodeproj/project.pbxproj b/to-live-photo/to-live-photo.xcodeproj/project.pbxproj new file mode 100644 index 0000000..6bb7fb0 --- /dev/null +++ b/to-live-photo/to-live-photo.xcodeproj/project.pbxproj @@ -0,0 +1,612 @@ +// !$*UTF8*$! +{ + archiveVersion = 1; + classes = { + }; + objectVersion = 77; + objects = { + +/* Begin PBXBuildFile section */ + F1A6CF932EED993E00822C1B /* LivePhotoCore in Frameworks */ = {isa = PBXBuildFile; productRef = F1A6CF922EED993E00822C1B /* LivePhotoCore */; }; +/* End PBXBuildFile section */ + +/* Begin PBXContainerItemProxy section */ + F1A6CF5D2EED942800822C1B /* PBXContainerItemProxy */ = { + isa = PBXContainerItemProxy; + containerPortal = F1A6CF472EED942500822C1B /* Project object */; + proxyType = 1; + remoteGlobalIDString = F1A6CF4E2EED942500822C1B; + remoteInfo = "to-live-photo"; + }; + F1A6CF672EED942800822C1B /* PBXContainerItemProxy */ = { + isa = PBXContainerItemProxy; + containerPortal = F1A6CF472EED942500822C1B /* Project object */; + proxyType = 1; + remoteGlobalIDString = F1A6CF4E2EED942500822C1B; + remoteInfo = "to-live-photo"; + }; +/* End PBXContainerItemProxy section */ + +/* Begin PBXFileReference section */ + F1A6CF4F2EED942500822C1B /* to-live-photo.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = "to-live-photo.app"; sourceTree = BUILT_PRODUCTS_DIR; }; + F1A6CF5C2EED942800822C1B /* to-live-photoTests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = "to-live-photoTests.xctest"; sourceTree = BUILT_PRODUCTS_DIR; }; + F1A6CF662EED942800822C1B /* to-live-photoUITests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = "to-live-photoUITests.xctest"; sourceTree = BUILT_PRODUCTS_DIR; }; +/* End PBXFileReference section */ + +/* Begin PBXFileSystemSynchronizedRootGroup section */ + F1A6CF512EED942500822C1B /* to-live-photo */ = { + isa = PBXFileSystemSynchronizedRootGroup; + path = "to-live-photo"; + sourceTree = ""; + }; + F1A6CF5F2EED942800822C1B /* to-live-photoTests */ = { + isa = PBXFileSystemSynchronizedRootGroup; + path = "to-live-photoTests"; + sourceTree = ""; + }; + F1A6CF692EED942800822C1B /* to-live-photoUITests */ = { + isa = PBXFileSystemSynchronizedRootGroup; + path = "to-live-photoUITests"; + sourceTree = ""; + }; +/* End PBXFileSystemSynchronizedRootGroup section */ + +/* Begin PBXFrameworksBuildPhase section */ + F1A6CF4C2EED942500822C1B /* Frameworks */ = { + isa = PBXFrameworksBuildPhase; + buildActionMask = 2147483647; + files = ( + F1A6CF932EED993E00822C1B /* LivePhotoCore in Frameworks */, + ); + runOnlyForDeploymentPostprocessing = 0; + }; + F1A6CF592EED942800822C1B /* Frameworks */ = { + isa = PBXFrameworksBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + runOnlyForDeploymentPostprocessing = 0; + }; + F1A6CF632EED942800822C1B /* Frameworks */ = { + isa = PBXFrameworksBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + runOnlyForDeploymentPostprocessing = 0; + }; +/* End PBXFrameworksBuildPhase section */ + +/* Begin PBXGroup section */ + F1A6CF462EED942500822C1B = { + isa = PBXGroup; + children = ( + F1A6CF512EED942500822C1B /* to-live-photo */, + F1A6CF5F2EED942800822C1B /* to-live-photoTests */, + F1A6CF692EED942800822C1B /* to-live-photoUITests */, + F1A6CF502EED942500822C1B /* Products */, + ); + sourceTree = ""; + }; + F1A6CF502EED942500822C1B /* Products */ = { + isa = PBXGroup; + children = ( + F1A6CF4F2EED942500822C1B /* to-live-photo.app */, + F1A6CF5C2EED942800822C1B /* to-live-photoTests.xctest */, + F1A6CF662EED942800822C1B /* to-live-photoUITests.xctest */, + ); + name = Products; + sourceTree = ""; + }; +/* End PBXGroup section */ + +/* Begin PBXNativeTarget section */ + F1A6CF4E2EED942500822C1B /* to-live-photo */ = { + isa = PBXNativeTarget; + buildConfigurationList = F1A6CF702EED942800822C1B /* Build configuration list for PBXNativeTarget "to-live-photo" */; + buildPhases = ( + F1A6CF4B2EED942500822C1B /* Sources */, + F1A6CF4C2EED942500822C1B /* Frameworks */, + F1A6CF4D2EED942500822C1B /* Resources */, + ); + buildRules = ( + ); + dependencies = ( + ); + fileSystemSynchronizedGroups = ( + F1A6CF512EED942500822C1B /* to-live-photo */, + ); + name = "to-live-photo"; + packageProductDependencies = ( + F1A6CF922EED993E00822C1B /* LivePhotoCore */, + ); + productName = "to-live-photo"; + productReference = F1A6CF4F2EED942500822C1B /* to-live-photo.app */; + productType = "com.apple.product-type.application"; + }; + F1A6CF5B2EED942800822C1B /* to-live-photoTests */ = { + isa = PBXNativeTarget; + buildConfigurationList = F1A6CF732EED942800822C1B /* Build configuration list for PBXNativeTarget "to-live-photoTests" */; + buildPhases = ( + F1A6CF582EED942800822C1B /* Sources */, + F1A6CF592EED942800822C1B /* Frameworks */, + F1A6CF5A2EED942800822C1B /* Resources */, + ); + buildRules = ( + ); + dependencies = ( + F1A6CF5E2EED942800822C1B /* PBXTargetDependency */, + ); + fileSystemSynchronizedGroups = ( + F1A6CF5F2EED942800822C1B /* to-live-photoTests */, + ); + name = "to-live-photoTests"; + packageProductDependencies = ( + ); + productName = "to-live-photoTests"; + productReference = F1A6CF5C2EED942800822C1B /* to-live-photoTests.xctest */; + productType = "com.apple.product-type.bundle.unit-test"; + }; + F1A6CF652EED942800822C1B /* to-live-photoUITests */ = { + isa = PBXNativeTarget; + buildConfigurationList = F1A6CF762EED942800822C1B /* Build configuration list for PBXNativeTarget "to-live-photoUITests" */; + buildPhases = ( + F1A6CF622EED942800822C1B /* Sources */, + F1A6CF632EED942800822C1B /* Frameworks */, + F1A6CF642EED942800822C1B /* Resources */, + ); + buildRules = ( + ); + dependencies = ( + F1A6CF682EED942800822C1B /* PBXTargetDependency */, + ); + fileSystemSynchronizedGroups = ( + F1A6CF692EED942800822C1B /* to-live-photoUITests */, + ); + name = "to-live-photoUITests"; + packageProductDependencies = ( + ); + productName = "to-live-photoUITests"; + productReference = F1A6CF662EED942800822C1B /* to-live-photoUITests.xctest */; + productType = "com.apple.product-type.bundle.ui-testing"; + }; +/* End PBXNativeTarget section */ + +/* Begin PBXProject section */ + F1A6CF472EED942500822C1B /* Project object */ = { + isa = PBXProject; + attributes = { + BuildIndependentTargetsInParallel = 1; + LastSwiftUpdateCheck = 2610; + LastUpgradeCheck = 2610; + TargetAttributes = { + F1A6CF4E2EED942500822C1B = { + CreatedOnToolsVersion = 26.1.1; + }; + F1A6CF5B2EED942800822C1B = { + CreatedOnToolsVersion = 26.1.1; + TestTargetID = F1A6CF4E2EED942500822C1B; + }; + F1A6CF652EED942800822C1B = { + CreatedOnToolsVersion = 26.1.1; + TestTargetID = F1A6CF4E2EED942500822C1B; + }; + }; + }; + buildConfigurationList = F1A6CF4A2EED942500822C1B /* Build configuration list for PBXProject "to-live-photo" */; + developmentRegion = en; + hasScannedForEncodings = 0; + knownRegions = ( + en, + Base, + ); + mainGroup = F1A6CF462EED942500822C1B; + minimizedProjectReferenceProxies = 1; + packageReferences = ( + F1A6CF912EED993E00822C1B /* XCLocalSwiftPackageReference "../../to-live-photo" */, + ); + preferredProjectObjectVersion = 77; + productRefGroup = F1A6CF502EED942500822C1B /* Products */; + projectDirPath = ""; + projectRoot = ""; + targets = ( + F1A6CF4E2EED942500822C1B /* to-live-photo */, + F1A6CF5B2EED942800822C1B /* to-live-photoTests */, + F1A6CF652EED942800822C1B /* to-live-photoUITests */, + ); + }; +/* End PBXProject section */ + +/* Begin PBXResourcesBuildPhase section */ + F1A6CF4D2EED942500822C1B /* Resources */ = { + isa = PBXResourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + runOnlyForDeploymentPostprocessing = 0; + }; + F1A6CF5A2EED942800822C1B /* Resources */ = { + isa = PBXResourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + runOnlyForDeploymentPostprocessing = 0; + }; + F1A6CF642EED942800822C1B /* Resources */ = { + isa = PBXResourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + runOnlyForDeploymentPostprocessing = 0; + }; +/* End PBXResourcesBuildPhase section */ + +/* Begin PBXSourcesBuildPhase section */ + F1A6CF4B2EED942500822C1B /* Sources */ = { + isa = PBXSourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + runOnlyForDeploymentPostprocessing = 0; + }; + F1A6CF582EED942800822C1B /* Sources */ = { + isa = PBXSourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + runOnlyForDeploymentPostprocessing = 0; + }; + F1A6CF622EED942800822C1B /* Sources */ = { + isa = PBXSourcesBuildPhase; + buildActionMask = 2147483647; + files = ( + ); + runOnlyForDeploymentPostprocessing = 0; + }; +/* End PBXSourcesBuildPhase section */ + +/* Begin PBXTargetDependency section */ + F1A6CF5E2EED942800822C1B /* PBXTargetDependency */ = { + isa = PBXTargetDependency; + target = F1A6CF4E2EED942500822C1B /* to-live-photo */; + targetProxy = F1A6CF5D2EED942800822C1B /* PBXContainerItemProxy */; + }; + F1A6CF682EED942800822C1B /* PBXTargetDependency */ = { + isa = PBXTargetDependency; + target = F1A6CF4E2EED942500822C1B /* to-live-photo */; + targetProxy = F1A6CF672EED942800822C1B /* PBXContainerItemProxy */; + }; +/* End PBXTargetDependency section */ + +/* Begin XCBuildConfiguration section */ + F1A6CF6E2EED942800822C1B /* Debug */ = { + isa = XCBuildConfiguration; + buildSettings = { + ALWAYS_SEARCH_USER_PATHS = NO; + ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES; + CLANG_ANALYZER_NONNULL = YES; + CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; + CLANG_CXX_LANGUAGE_STANDARD = "gnu++20"; + CLANG_ENABLE_MODULES = YES; + CLANG_ENABLE_OBJC_ARC = YES; + CLANG_ENABLE_OBJC_WEAK = YES; + CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; + CLANG_WARN_BOOL_CONVERSION = YES; + CLANG_WARN_COMMA = YES; + CLANG_WARN_CONSTANT_CONVERSION = YES; + CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; + CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; + CLANG_WARN_DOCUMENTATION_COMMENTS = YES; + CLANG_WARN_EMPTY_BODY = YES; + CLANG_WARN_ENUM_CONVERSION = YES; + CLANG_WARN_INFINITE_RECURSION = YES; + CLANG_WARN_INT_CONVERSION = YES; + CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; + CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; + CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; + CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; + CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES; + CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; + CLANG_WARN_STRICT_PROTOTYPES = YES; + CLANG_WARN_SUSPICIOUS_MOVE = YES; + CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; + CLANG_WARN_UNREACHABLE_CODE = YES; + CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; + COPY_PHASE_STRIP = NO; + DEBUG_INFORMATION_FORMAT = dwarf; + DEVELOPMENT_TEAM = Y976PBNGA8; + ENABLE_STRICT_OBJC_MSGSEND = YES; + ENABLE_TESTABILITY = YES; + ENABLE_USER_SCRIPT_SANDBOXING = YES; + GCC_C_LANGUAGE_STANDARD = gnu17; + GCC_DYNAMIC_NO_PIC = NO; + GCC_NO_COMMON_BLOCKS = YES; + GCC_OPTIMIZATION_LEVEL = 0; + GCC_PREPROCESSOR_DEFINITIONS = ( + "DEBUG=1", + "$(inherited)", + ); + GCC_WARN_64_TO_32_BIT_CONVERSION = YES; + GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; + GCC_WARN_UNDECLARED_SELECTOR = YES; + GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; + GCC_WARN_UNUSED_FUNCTION = YES; + GCC_WARN_UNUSED_VARIABLE = YES; + IPHONEOS_DEPLOYMENT_TARGET = 18.0; + LOCALIZATION_PREFERS_STRING_CATALOGS = YES; + MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE; + MTL_FAST_MATH = YES; + ONLY_ACTIVE_ARCH = YES; + SDKROOT = iphoneos; + SWIFT_ACTIVE_COMPILATION_CONDITIONS = "DEBUG $(inherited)"; + SWIFT_OPTIMIZATION_LEVEL = "-Onone"; + }; + name = Debug; + }; + F1A6CF6F2EED942800822C1B /* Release */ = { + isa = XCBuildConfiguration; + buildSettings = { + ALWAYS_SEARCH_USER_PATHS = NO; + ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES; + CLANG_ANALYZER_NONNULL = YES; + CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE; + CLANG_CXX_LANGUAGE_STANDARD = "gnu++20"; + CLANG_ENABLE_MODULES = YES; + CLANG_ENABLE_OBJC_ARC = YES; + CLANG_ENABLE_OBJC_WEAK = YES; + CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES; + CLANG_WARN_BOOL_CONVERSION = YES; + CLANG_WARN_COMMA = YES; + CLANG_WARN_CONSTANT_CONVERSION = YES; + CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES; + CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR; + CLANG_WARN_DOCUMENTATION_COMMENTS = YES; + CLANG_WARN_EMPTY_BODY = YES; + CLANG_WARN_ENUM_CONVERSION = YES; + CLANG_WARN_INFINITE_RECURSION = YES; + CLANG_WARN_INT_CONVERSION = YES; + CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES; + CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES; + CLANG_WARN_OBJC_LITERAL_CONVERSION = YES; + CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR; + CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES; + CLANG_WARN_RANGE_LOOP_ANALYSIS = YES; + CLANG_WARN_STRICT_PROTOTYPES = YES; + CLANG_WARN_SUSPICIOUS_MOVE = YES; + CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE; + CLANG_WARN_UNREACHABLE_CODE = YES; + CLANG_WARN__DUPLICATE_METHOD_MATCH = YES; + COPY_PHASE_STRIP = NO; + DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; + DEVELOPMENT_TEAM = Y976PBNGA8; + ENABLE_NS_ASSERTIONS = NO; + ENABLE_STRICT_OBJC_MSGSEND = YES; + ENABLE_USER_SCRIPT_SANDBOXING = YES; + GCC_C_LANGUAGE_STANDARD = gnu17; + GCC_NO_COMMON_BLOCKS = YES; + GCC_WARN_64_TO_32_BIT_CONVERSION = YES; + GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR; + GCC_WARN_UNDECLARED_SELECTOR = YES; + GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE; + GCC_WARN_UNUSED_FUNCTION = YES; + GCC_WARN_UNUSED_VARIABLE = YES; + IPHONEOS_DEPLOYMENT_TARGET = 18.0; + LOCALIZATION_PREFERS_STRING_CATALOGS = YES; + MTL_ENABLE_DEBUG_INFO = NO; + MTL_FAST_MATH = YES; + SDKROOT = iphoneos; + SWIFT_COMPILATION_MODE = wholemodule; + VALIDATE_PRODUCT = YES; + }; + name = Release; + }; + F1A6CF712EED942800822C1B /* Debug */ = { + isa = XCBuildConfiguration; + buildSettings = { + ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; + ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor; + CODE_SIGN_STYLE = Automatic; + CURRENT_PROJECT_VERSION = 1; + DEVELOPMENT_TEAM = Y976PBNGA8; + ENABLE_PREVIEWS = YES; + GENERATE_INFOPLIST_FILE = YES; + INFOPLIST_KEY_NSPhotoLibraryAddUsageDescription = "用于将生成的 Live Photo 保存到系统相册"; + INFOPLIST_KEY_NSPhotoLibraryUsageDescription = "用于读取并校验已保存的 Live Photo(可选)"; + INFOPLIST_KEY_UIApplicationSceneManifest_Generation = YES; + INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents = YES; + INFOPLIST_KEY_UILaunchScreen_Generation = YES; + INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = "UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight"; + INFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone = "UIInterfaceOrientationPortrait UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight"; + IPHONEOS_DEPLOYMENT_TARGET = 18.0; + LD_RUNPATH_SEARCH_PATHS = ( + "$(inherited)", + "@executable_path/Frameworks", + ); + MARKETING_VERSION = 1.0; + PRODUCT_BUNDLE_IDENTIFIER = "xu.to-live-photo"; + PRODUCT_NAME = "$(TARGET_NAME)"; + STRING_CATALOG_GENERATE_SYMBOLS = YES; + SWIFT_APPROACHABLE_CONCURRENCY = YES; + SWIFT_DEFAULT_ACTOR_ISOLATION = MainActor; + SWIFT_EMIT_LOC_STRINGS = YES; + SWIFT_UPCOMING_FEATURE_MEMBER_IMPORT_VISIBILITY = YES; + SWIFT_VERSION = 5.0; + TARGETED_DEVICE_FAMILY = "1,2"; + }; + name = Debug; + }; + F1A6CF722EED942800822C1B /* Release */ = { + isa = XCBuildConfiguration; + buildSettings = { + ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon; + ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor; + CODE_SIGN_STYLE = Automatic; + CURRENT_PROJECT_VERSION = 1; + DEVELOPMENT_TEAM = Y976PBNGA8; + ENABLE_PREVIEWS = YES; + GENERATE_INFOPLIST_FILE = YES; + INFOPLIST_KEY_NSPhotoLibraryAddUsageDescription = "用于将生成的 Live Photo 保存到系统相册"; + INFOPLIST_KEY_NSPhotoLibraryUsageDescription = "用于读取并校验已保存的 Live Photo(可选)"; + INFOPLIST_KEY_UIApplicationSceneManifest_Generation = YES; + INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents = YES; + INFOPLIST_KEY_UILaunchScreen_Generation = YES; + INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = "UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight"; + INFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone = "UIInterfaceOrientationPortrait UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight"; + IPHONEOS_DEPLOYMENT_TARGET = 18.0; + LD_RUNPATH_SEARCH_PATHS = ( + "$(inherited)", + "@executable_path/Frameworks", + ); + MARKETING_VERSION = 1.0; + PRODUCT_BUNDLE_IDENTIFIER = "xu.to-live-photo"; + PRODUCT_NAME = "$(TARGET_NAME)"; + STRING_CATALOG_GENERATE_SYMBOLS = YES; + SWIFT_APPROACHABLE_CONCURRENCY = YES; + SWIFT_DEFAULT_ACTOR_ISOLATION = MainActor; + SWIFT_EMIT_LOC_STRINGS = YES; + SWIFT_UPCOMING_FEATURE_MEMBER_IMPORT_VISIBILITY = YES; + SWIFT_VERSION = 5.0; + TARGETED_DEVICE_FAMILY = "1,2"; + }; + name = Release; + }; + F1A6CF742EED942800822C1B /* Debug */ = { + isa = XCBuildConfiguration; + buildSettings = { + BUNDLE_LOADER = "$(TEST_HOST)"; + CODE_SIGN_STYLE = Automatic; + CURRENT_PROJECT_VERSION = 1; + DEVELOPMENT_TEAM = Y976PBNGA8; + GENERATE_INFOPLIST_FILE = YES; + IPHONEOS_DEPLOYMENT_TARGET = 18.0; + MARKETING_VERSION = 1.0; + PRODUCT_BUNDLE_IDENTIFIER = "xu.to-live-photoTests"; + PRODUCT_NAME = "$(TARGET_NAME)"; + STRING_CATALOG_GENERATE_SYMBOLS = NO; + SWIFT_APPROACHABLE_CONCURRENCY = YES; + SWIFT_EMIT_LOC_STRINGS = NO; + SWIFT_UPCOMING_FEATURE_MEMBER_IMPORT_VISIBILITY = YES; + SWIFT_VERSION = 5.0; + TARGETED_DEVICE_FAMILY = "1,2"; + TEST_HOST = "$(BUILT_PRODUCTS_DIR)/to-live-photo.app/$(BUNDLE_EXECUTABLE_FOLDER_PATH)/to-live-photo"; + }; + name = Debug; + }; + F1A6CF752EED942800822C1B /* Release */ = { + isa = XCBuildConfiguration; + buildSettings = { + BUNDLE_LOADER = "$(TEST_HOST)"; + CODE_SIGN_STYLE = Automatic; + CURRENT_PROJECT_VERSION = 1; + DEVELOPMENT_TEAM = Y976PBNGA8; + GENERATE_INFOPLIST_FILE = YES; + IPHONEOS_DEPLOYMENT_TARGET = 18.0; + MARKETING_VERSION = 1.0; + PRODUCT_BUNDLE_IDENTIFIER = "xu.to-live-photoTests"; + PRODUCT_NAME = "$(TARGET_NAME)"; + STRING_CATALOG_GENERATE_SYMBOLS = NO; + SWIFT_APPROACHABLE_CONCURRENCY = YES; + SWIFT_EMIT_LOC_STRINGS = NO; + SWIFT_UPCOMING_FEATURE_MEMBER_IMPORT_VISIBILITY = YES; + SWIFT_VERSION = 5.0; + TARGETED_DEVICE_FAMILY = "1,2"; + TEST_HOST = "$(BUILT_PRODUCTS_DIR)/to-live-photo.app/$(BUNDLE_EXECUTABLE_FOLDER_PATH)/to-live-photo"; + }; + name = Release; + }; + F1A6CF772EED942800822C1B /* Debug */ = { + isa = XCBuildConfiguration; + buildSettings = { + CODE_SIGN_STYLE = Automatic; + CURRENT_PROJECT_VERSION = 1; + DEVELOPMENT_TEAM = Y976PBNGA8; + GENERATE_INFOPLIST_FILE = YES; + MARKETING_VERSION = 1.0; + PRODUCT_BUNDLE_IDENTIFIER = "xu.to-live-photoUITests"; + PRODUCT_NAME = "$(TARGET_NAME)"; + STRING_CATALOG_GENERATE_SYMBOLS = NO; + SWIFT_APPROACHABLE_CONCURRENCY = YES; + SWIFT_EMIT_LOC_STRINGS = NO; + SWIFT_UPCOMING_FEATURE_MEMBER_IMPORT_VISIBILITY = YES; + SWIFT_VERSION = 5.0; + TARGETED_DEVICE_FAMILY = "1,2"; + TEST_TARGET_NAME = "to-live-photo"; + }; + name = Debug; + }; + F1A6CF782EED942800822C1B /* Release */ = { + isa = XCBuildConfiguration; + buildSettings = { + CODE_SIGN_STYLE = Automatic; + CURRENT_PROJECT_VERSION = 1; + DEVELOPMENT_TEAM = Y976PBNGA8; + GENERATE_INFOPLIST_FILE = YES; + MARKETING_VERSION = 1.0; + PRODUCT_BUNDLE_IDENTIFIER = "xu.to-live-photoUITests"; + PRODUCT_NAME = "$(TARGET_NAME)"; + STRING_CATALOG_GENERATE_SYMBOLS = NO; + SWIFT_APPROACHABLE_CONCURRENCY = YES; + SWIFT_EMIT_LOC_STRINGS = NO; + SWIFT_UPCOMING_FEATURE_MEMBER_IMPORT_VISIBILITY = YES; + SWIFT_VERSION = 5.0; + TARGETED_DEVICE_FAMILY = "1,2"; + TEST_TARGET_NAME = "to-live-photo"; + }; + name = Release; + }; +/* End XCBuildConfiguration section */ + +/* Begin XCConfigurationList section */ + F1A6CF4A2EED942500822C1B /* Build configuration list for PBXProject "to-live-photo" */ = { + isa = XCConfigurationList; + buildConfigurations = ( + F1A6CF6E2EED942800822C1B /* Debug */, + F1A6CF6F2EED942800822C1B /* Release */, + ); + defaultConfigurationIsVisible = 0; + defaultConfigurationName = Release; + }; + F1A6CF702EED942800822C1B /* Build configuration list for PBXNativeTarget "to-live-photo" */ = { + isa = XCConfigurationList; + buildConfigurations = ( + F1A6CF712EED942800822C1B /* Debug */, + F1A6CF722EED942800822C1B /* Release */, + ); + defaultConfigurationIsVisible = 0; + defaultConfigurationName = Release; + }; + F1A6CF732EED942800822C1B /* Build configuration list for PBXNativeTarget "to-live-photoTests" */ = { + isa = XCConfigurationList; + buildConfigurations = ( + F1A6CF742EED942800822C1B /* Debug */, + F1A6CF752EED942800822C1B /* Release */, + ); + defaultConfigurationIsVisible = 0; + defaultConfigurationName = Release; + }; + F1A6CF762EED942800822C1B /* Build configuration list for PBXNativeTarget "to-live-photoUITests" */ = { + isa = XCConfigurationList; + buildConfigurations = ( + F1A6CF772EED942800822C1B /* Debug */, + F1A6CF782EED942800822C1B /* Release */, + ); + defaultConfigurationIsVisible = 0; + defaultConfigurationName = Release; + }; +/* End XCConfigurationList section */ + +/* Begin XCLocalSwiftPackageReference section */ + F1A6CF912EED993E00822C1B /* XCLocalSwiftPackageReference "../../to-live-photo" */ = { + isa = XCLocalSwiftPackageReference; + relativePath = "../../to-live-photo"; + }; +/* End XCLocalSwiftPackageReference section */ + +/* Begin XCSwiftPackageProductDependency section */ + F1A6CF922EED993E00822C1B /* LivePhotoCore */ = { + isa = XCSwiftPackageProductDependency; + productName = LivePhotoCore; + }; +/* End XCSwiftPackageProductDependency section */ + }; + rootObject = F1A6CF472EED942500822C1B /* Project object */; +} diff --git a/to-live-photo/to-live-photo.xcodeproj/project.xcworkspace/contents.xcworkspacedata b/to-live-photo/to-live-photo.xcodeproj/project.xcworkspace/contents.xcworkspacedata new file mode 100644 index 0000000..919434a --- /dev/null +++ b/to-live-photo/to-live-photo.xcodeproj/project.xcworkspace/contents.xcworkspacedata @@ -0,0 +1,7 @@ + + + + + diff --git a/to-live-photo/to-live-photo/AppState.swift b/to-live-photo/to-live-photo/AppState.swift new file mode 100644 index 0000000..b9ef9e1 --- /dev/null +++ b/to-live-photo/to-live-photo/AppState.swift @@ -0,0 +1,86 @@ +// +// AppState.swift +// to-live-photo +// +// App 全局状态管理 + 页面导航状态机 +// + +import SwiftUI +import PhotosUI +import LivePhotoCore + +enum AppRoute: Hashable { + case home + case editor(videoURL: URL) + case processing(videoURL: URL, exportParams: ExportParams) + case result(workflowResult: LivePhotoWorkflowResult) + case wallpaperGuide(assetId: String) +} + +@MainActor +@Observable +final class AppState { + var navigationPath = NavigationPath() + + var processingProgress: LivePhotoBuildProgress? + var processingError: AppError? + var isProcessing = false + + private var workflow: LivePhotoWorkflow? + + init() { + do { + workflow = try LivePhotoWorkflow() + } catch { + print("Failed to init LivePhotoWorkflow: \(error)") + } + } + + func navigateTo(_ route: AppRoute) { + navigationPath.append(route) + } + + func popToRoot() { + navigationPath = NavigationPath() + } + + func pop() { + if !navigationPath.isEmpty { + navigationPath.removeLast() + } + } + + func startProcessing(videoURL: URL, exportParams: ExportParams) async -> LivePhotoWorkflowResult? { + guard let workflow else { + processingError = AppError(code: "LPB-001", message: "初始化失败", suggestedActions: ["重启 App"]) + return nil + } + + isProcessing = true + processingProgress = nil + processingError = nil + + do { + let state = self + let result = try await workflow.buildSaveValidate( + sourceVideoURL: videoURL, + coverImageURL: nil, + exportParams: exportParams + ) { progress in + Task { @MainActor in + state.processingProgress = progress + } + } + isProcessing = false + return result + } catch let error as AppError { + isProcessing = false + processingError = error + return nil + } catch { + isProcessing = false + processingError = AppError(code: "LPB-901", message: "未知错误", underlyingErrorDescription: error.localizedDescription, suggestedActions: ["重试"]) + return nil + } + } +} diff --git a/to-live-photo/to-live-photo/Assets.xcassets/AccentColor.colorset/Contents.json b/to-live-photo/to-live-photo/Assets.xcassets/AccentColor.colorset/Contents.json new file mode 100644 index 0000000..eb87897 --- /dev/null +++ b/to-live-photo/to-live-photo/Assets.xcassets/AccentColor.colorset/Contents.json @@ -0,0 +1,11 @@ +{ + "colors" : [ + { + "idiom" : "universal" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/to-live-photo/to-live-photo/Assets.xcassets/AppIcon.appiconset/Contents.json b/to-live-photo/to-live-photo/Assets.xcassets/AppIcon.appiconset/Contents.json new file mode 100644 index 0000000..2305880 --- /dev/null +++ b/to-live-photo/to-live-photo/Assets.xcassets/AppIcon.appiconset/Contents.json @@ -0,0 +1,35 @@ +{ + "images" : [ + { + "idiom" : "universal", + "platform" : "ios", + "size" : "1024x1024" + }, + { + "appearances" : [ + { + "appearance" : "luminosity", + "value" : "dark" + } + ], + "idiom" : "universal", + "platform" : "ios", + "size" : "1024x1024" + }, + { + "appearances" : [ + { + "appearance" : "luminosity", + "value" : "tinted" + } + ], + "idiom" : "universal", + "platform" : "ios", + "size" : "1024x1024" + } + ], + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/to-live-photo/to-live-photo/Assets.xcassets/Contents.json b/to-live-photo/to-live-photo/Assets.xcassets/Contents.json new file mode 100644 index 0000000..73c0059 --- /dev/null +++ b/to-live-photo/to-live-photo/Assets.xcassets/Contents.json @@ -0,0 +1,6 @@ +{ + "info" : { + "author" : "xcode", + "version" : 1 + } +} diff --git a/to-live-photo/to-live-photo/ContentView.swift b/to-live-photo/to-live-photo/ContentView.swift new file mode 100644 index 0000000..87340e2 --- /dev/null +++ b/to-live-photo/to-live-photo/ContentView.swift @@ -0,0 +1,40 @@ +// +// ContentView.swift +// to-live-photo +// +// Created by empty on 2025/12/13. +// + +import SwiftUI +import LivePhotoCore + +struct ContentView: View { + @Environment(AppState.self) private var appState + + var body: some View { + @Bindable var appState = appState + + NavigationStack(path: $appState.navigationPath) { + HomeView() + .navigationDestination(for: AppRoute.self) { route in + switch route { + case .home: + HomeView() + case .editor(let videoURL): + EditorView(videoURL: videoURL) + case .processing(let videoURL, let exportParams): + ProcessingView(videoURL: videoURL, exportParams: exportParams) + case .result(let workflowResult): + ResultView(workflowResult: workflowResult) + case .wallpaperGuide(let assetId): + WallpaperGuideView(assetId: assetId) + } + } + } + } +} + +#Preview { + ContentView() + .environment(AppState()) +} diff --git a/to-live-photo/to-live-photo/Views/EditorView.swift b/to-live-photo/to-live-photo/Views/EditorView.swift new file mode 100644 index 0000000..497b73e --- /dev/null +++ b/to-live-photo/to-live-photo/Views/EditorView.swift @@ -0,0 +1,127 @@ +// +// EditorView.swift +// to-live-photo +// +// 编辑页:预览视频 + 调整参数 +// + +import SwiftUI +import AVKit +import LivePhotoCore + +struct EditorView: View { + @Environment(AppState.self) private var appState + + let videoURL: URL + + @State private var player: AVPlayer? + @State private var duration: Double = 1.0 + @State private var trimStart: Double = 0 + @State private var trimEnd: Double = 1.0 + @State private var keyFrameTime: Double = 0.5 + @State private var videoDuration: Double = 0 + + var body: some View { + VStack(spacing: 16) { + if let player { + VideoPlayer(player: player) + .aspectRatio(9/16, contentMode: .fit) + .clipShape(RoundedRectangle(cornerRadius: 16)) + .padding(.horizontal) + } else { + RoundedRectangle(cornerRadius: 16) + .fill(Color.secondary.opacity(0.2)) + .aspectRatio(9/16, contentMode: .fit) + .overlay { + ProgressView() + } + .padding(.horizontal) + } + + VStack(alignment: .leading, spacing: 12) { + HStack { + Text("时长") + Spacer() + Text(String(format: "%.1f 秒", trimEnd - trimStart)) + .foregroundStyle(.secondary) + } + + Slider(value: $trimEnd, in: 1.0...max(1.0, min(1.5, videoDuration))) { _ in + updateKeyFrameTime() + } + .disabled(videoDuration < 1.0) + + Text("Live Photo 壁纸时长限制:1 ~ 1.5 秒") + .font(.caption) + .foregroundStyle(.secondary) + } + .padding(.horizontal, 24) + + Spacer() + + Button { + startProcessing() + } label: { + HStack { + Image(systemName: "wand.and.stars") + Text("生成 Live Photo") + } + .font(.headline) + .frame(maxWidth: .infinity) + .padding() + .background(Color.accentColor) + .foregroundColor(.white) + .clipShape(RoundedRectangle(cornerRadius: 14)) + } + .padding(.horizontal, 24) + .padding(.bottom) + } + .navigationTitle("编辑") + .navigationBarTitleDisplayMode(.inline) + .onAppear { + loadVideo() + } + .onDisappear { + player?.pause() + } + } + + private func loadVideo() { + let asset = AVURLAsset(url: videoURL) + Task { + do { + let durationCMTime = try await asset.load(.duration) + let durationSeconds = durationCMTime.seconds + await MainActor.run { + videoDuration = durationSeconds + trimEnd = min(1.0, durationSeconds) // 限制为 1 秒 + keyFrameTime = trimEnd / 2 + player = AVPlayer(url: videoURL) + player?.play() + } + } catch { + print("Failed to load video duration: \(error)") + } + } + } + + private func updateKeyFrameTime() { + keyFrameTime = (trimStart + trimEnd) / 2 + } + + private func startProcessing() { + let params = ExportParams( + trimStart: trimStart, + trimEnd: trimEnd, + keyFrameTime: keyFrameTime + ) + appState.navigateTo(.processing(videoURL: videoURL, exportParams: params)) + } +} + +#Preview { + NavigationStack { + EditorView(videoURL: URL(fileURLWithPath: "/tmp/test.mov")) + } + .environment(AppState()) +} diff --git a/to-live-photo/to-live-photo/Views/HomeView.swift b/to-live-photo/to-live-photo/Views/HomeView.swift new file mode 100644 index 0000000..8e5a97d --- /dev/null +++ b/to-live-photo/to-live-photo/Views/HomeView.swift @@ -0,0 +1,125 @@ +// +// HomeView.swift +// to-live-photo +// +// 首页:选择视频入口 +// + +import SwiftUI +import PhotosUI +import AVKit + +struct HomeView: View { + @Environment(AppState.self) private var appState + @State private var selectedItem: PhotosPickerItem? + @State private var isLoading = false + @State private var errorMessage: String? + + var body: some View { + VStack(spacing: 32) { + Spacer() + + Image(systemName: "livephoto") + .font(.system(size: 80)) + .foregroundStyle(.tint) + + Text("Live Photo 制作") + .font(.largeTitle) + .fontWeight(.bold) + + Text("选择一段视频,将其转换为 Live Photo\n然后设置为动态锁屏壁纸") + .font(.body) + .foregroundStyle(.secondary) + .multilineTextAlignment(.center) + + Spacer() + + PhotosPicker( + selection: $selectedItem, + matching: .videos, + photoLibrary: .shared() + ) { + HStack { + Image(systemName: "video.badge.plus") + Text("选择视频") + } + .font(.headline) + .frame(maxWidth: .infinity) + .padding() + .background(Color.accentColor) + .foregroundColor(.white) + .clipShape(RoundedRectangle(cornerRadius: 14)) + } + .disabled(isLoading) + + if isLoading { + ProgressView("正在加载视频...") + } + + if let errorMessage { + Text(errorMessage) + .font(.caption) + .foregroundStyle(.red) + } + + Spacer() + } + .padding(.horizontal, 24) + .navigationTitle("首页") + .navigationBarTitleDisplayMode(.inline) + .onChange(of: selectedItem) { _, newValue in + Task { + await handleSelectedItem(newValue) + } + } + } + + private func handleSelectedItem(_ item: PhotosPickerItem?) async { + guard let item else { return } + + isLoading = true + errorMessage = nil + + do { + guard let movie = try await item.loadTransferable(type: VideoTransferable.self) else { + errorMessage = "无法加载视频" + isLoading = false + return + } + + isLoading = false + appState.navigateTo(.editor(videoURL: movie.url)) + } catch { + errorMessage = "加载失败: \(error.localizedDescription)" + isLoading = false + } + } +} + +struct VideoTransferable: Transferable { + let url: URL + + static var transferRepresentation: some TransferRepresentation { + FileRepresentation(contentType: .movie) { video in + SentTransferredFile(video.url) + } importing: { received in + let tempDir = FileManager.default.temporaryDirectory + let filename = "import_\(UUID().uuidString).mov" + let destURL = tempDir.appendingPathComponent(filename) + + if FileManager.default.fileExists(atPath: destURL.path) { + try FileManager.default.removeItem(at: destURL) + } + try FileManager.default.copyItem(at: received.file, to: destURL) + + return VideoTransferable(url: destURL) + } + } +} + +#Preview { + NavigationStack { + HomeView() + } + .environment(AppState()) +} diff --git a/to-live-photo/to-live-photo/Views/ProcessingView.swift b/to-live-photo/to-live-photo/Views/ProcessingView.swift new file mode 100644 index 0000000..efa4997 --- /dev/null +++ b/to-live-photo/to-live-photo/Views/ProcessingView.swift @@ -0,0 +1,127 @@ +// +// ProcessingView.swift +// to-live-photo +// +// 生成进度页:显示处理进度 +// + +import SwiftUI +import LivePhotoCore + +struct ProcessingView: View { + @Environment(AppState.self) private var appState + + let videoURL: URL + let exportParams: ExportParams + + @State private var hasStarted = false + + var body: some View { + VStack(spacing: 32) { + Spacer() + + if appState.processingError != nil { + errorContent + } else { + progressContent + } + + Spacer() + } + .padding(.horizontal, 24) + .navigationTitle("生成中") + .navigationBarTitleDisplayMode(.inline) + .navigationBarBackButtonHidden(appState.isProcessing) + .task { + guard !hasStarted else { return } + hasStarted = true + await startProcessing() + } + } + + @ViewBuilder + private var progressContent: some View { + ProgressView() + .scaleEffect(1.5) + + VStack(spacing: 8) { + Text(stageText) + .font(.headline) + + if let progress = appState.processingProgress { + Text(String(format: "%.0f%%", progress.fraction * 100)) + .font(.title2) + .fontWeight(.bold) + .foregroundStyle(.tint) + } + } + + Text("正在生成 Live Photo,请稍候...") + .font(.body) + .foregroundStyle(.secondary) + } + + @ViewBuilder + private var errorContent: some View { + Image(systemName: "exclamationmark.triangle.fill") + .font(.system(size: 60)) + .foregroundStyle(.red) + + if let error = appState.processingError { + VStack(spacing: 8) { + Text("生成失败") + .font(.headline) + + Text(error.message) + .font(.body) + .foregroundStyle(.secondary) + + if !error.suggestedActions.isEmpty { + Text("建议:\(error.suggestedActions.joined(separator: "、"))") + .font(.caption) + .foregroundStyle(.secondary) + } + } + } + + Button { + appState.pop() + } label: { + Text("返回重试") + .font(.headline) + .frame(maxWidth: .infinity) + .padding() + .background(Color.accentColor) + .foregroundColor(.white) + .clipShape(RoundedRectangle(cornerRadius: 14)) + } + } + + private var stageText: String { + guard let stage = appState.processingProgress?.stage else { + return "准备中..." + } + switch stage { + case .normalize: return "预处理视频..." + case .extractKeyFrame: return "提取封面帧..." + case .writePhotoMetadata: return "写入图片元数据..." + case .writeVideoMetadata: return "写入视频元数据..." + case .saveToAlbum: return "保存到相册..." + case .validate: return "校验 Live Photo..." + } + } + + private func startProcessing() async { + if let result = await appState.startProcessing(videoURL: videoURL, exportParams: exportParams) { + appState.pop() + appState.navigateTo(.result(workflowResult: result)) + } + } +} + +#Preview { + NavigationStack { + ProcessingView(videoURL: URL(fileURLWithPath: "/tmp/test.mov"), exportParams: ExportParams()) + } + .environment(AppState()) +} diff --git a/to-live-photo/to-live-photo/Views/ResultView.swift b/to-live-photo/to-live-photo/Views/ResultView.swift new file mode 100644 index 0000000..8766571 --- /dev/null +++ b/to-live-photo/to-live-photo/Views/ResultView.swift @@ -0,0 +1,145 @@ +// +// ResultView.swift +// to-live-photo +// +// 结果页:显示成功/失败状态与后续操作 +// + +import SwiftUI +import LivePhotoCore + +struct ResultView: View { + @Environment(AppState.self) private var appState + @State private var showShareSheet = false + @State private var shareItems: [Any] = [] + + let workflowResult: LivePhotoWorkflowResult + + var body: some View { + VStack(spacing: 32) { + Spacer() + + Image(systemName: isSuccess ? "checkmark.circle.fill" : "xmark.circle.fill") + .font(.system(size: 80)) + .foregroundStyle(isSuccess ? .green : .red) + + VStack(spacing: 8) { + Text(isSuccess ? "Live Photo 已保存" : "保存失败") + .font(.title) + .fontWeight(.bold) + + if isSuccess { + Text("已保存到系统相册") + .font(.body) + .foregroundStyle(.secondary) + + if workflowResult.resourceValidationOK { + Label("资源校验通过", systemImage: "checkmark.seal.fill") + .font(.caption) + .foregroundStyle(.green) + } + + if let isLive = workflowResult.libraryAssetIsLivePhoto, isLive { + Label("相册识别为 Live Photo", systemImage: "livephoto") + .font(.caption) + .foregroundStyle(.green) + } + } + } + + Spacer() + + VStack(spacing: 12) { + if isSuccess { + Button { + appState.navigateTo(.wallpaperGuide(assetId: workflowResult.savedAssetId)) + } label: { + HStack { + Image(systemName: "arrow.right.circle") + Text("设置为动态壁纸") + } + .font(.headline) + .frame(maxWidth: .infinity) + .padding() + .background(Color.accentColor) + .foregroundColor(.white) + .clipShape(RoundedRectangle(cornerRadius: 14)) + } + + // 调试:导出原始文件 + Button { + prepareShareItems() + showShareSheet = true + } label: { + HStack { + Image(systemName: "square.and.arrow.up") + Text("导出调试文件") + } + .font(.headline) + .frame(maxWidth: .infinity) + .padding() + .background(Color.orange.opacity(0.8)) + .foregroundColor(.white) + .clipShape(RoundedRectangle(cornerRadius: 14)) + } + } + + Button { + appState.popToRoot() + } label: { + Text(isSuccess ? "继续制作" : "返回首页") + .font(.headline) + .frame(maxWidth: .infinity) + .padding() + .background(Color.secondary.opacity(0.2)) + .foregroundColor(.primary) + .clipShape(RoundedRectangle(cornerRadius: 14)) + } + } + .padding(.horizontal, 24) + .padding(.bottom) + } + .navigationTitle("完成") + .navigationBarTitleDisplayMode(.inline) + .navigationBarBackButtonHidden(true) + .sheet(isPresented: $showShareSheet) { + ShareSheet(activityItems: shareItems) + } + } + + private var isSuccess: Bool { + !workflowResult.savedAssetId.isEmpty + } + + private func prepareShareItems() { + shareItems = [ + workflowResult.pairedImageURL, + workflowResult.pairedVideoURL + ] + } +} + +struct ShareSheet: UIViewControllerRepresentable { + let activityItems: [Any] + + func makeUIViewController(context: Context) -> UIActivityViewController { + UIActivityViewController(activityItems: activityItems, applicationActivities: nil) + } + + func updateUIViewController(_ uiViewController: UIActivityViewController, context: Context) {} +} + +#Preview { + NavigationStack { + ResultView(workflowResult: LivePhotoWorkflowResult( + workId: UUID(), + assetIdentifier: "test", + pairedImageURL: URL(fileURLWithPath: "/tmp/photo.jpg"), + pairedVideoURL: URL(fileURLWithPath: "/tmp/video.mov"), + savedAssetId: "ABC123", + resourceValidationOK: true, + libraryAssetIsLivePhoto: true + )) + } + .environment(AppState()) +} diff --git a/to-live-photo/to-live-photo/Views/WallpaperGuideView.swift b/to-live-photo/to-live-photo/Views/WallpaperGuideView.swift new file mode 100644 index 0000000..741bd88 --- /dev/null +++ b/to-live-photo/to-live-photo/Views/WallpaperGuideView.swift @@ -0,0 +1,324 @@ +// +// WallpaperGuideView.swift +// to-live-photo +// +// 壁纸引导页:教用户如何设置动态壁纸 +// + +import SwiftUI + +struct WallpaperGuideView: View { + @Environment(AppState.self) private var appState + + let assetId: String + + private var iosVersion: Int { + Int(UIDevice.current.systemVersion.split(separator: ".").first ?? "16") ?? 16 + } + + var body: some View { + ScrollView { + VStack(alignment: .leading, spacing: 24) { + headerSection + + quickActionSection + + stepsSection + + tipsSection + + doneButton + } + .padding(.horizontal, 20) + .padding(.vertical, 16) + } + .navigationTitle("设置动态壁纸") + .navigationBarTitleDisplayMode(.inline) + } + + @ViewBuilder + private var headerSection: some View { + VStack(alignment: .center, spacing: 12) { + Image(systemName: "livephoto") + .font(.system(size: 50)) + .foregroundStyle(.tint) + .padding(.bottom, 4) + + Text("Live Photo 已保存到相册") + .font(.title3) + .fontWeight(.bold) + + if iosVersion >= 17 { + HStack(spacing: 6) { + Image(systemName: "checkmark.circle.fill") + .foregroundStyle(.green) + Text("你的设备支持锁屏动态壁纸") + .foregroundStyle(.secondary) + } + .font(.subheadline) + } else { + HStack(spacing: 6) { + Image(systemName: "exclamationmark.triangle.fill") + .foregroundStyle(.orange) + Text("iOS 17+ 才支持锁屏动态效果") + .foregroundStyle(.secondary) + } + .font(.subheadline) + } + } + .frame(maxWidth: .infinity) + .padding(.vertical, 8) + } + + @ViewBuilder + private var quickActionSection: some View { + Button { + if let url = URL(string: "photos-redirect://") { + UIApplication.shared.open(url) + } + } label: { + HStack(spacing: 12) { + Image(systemName: "photo.on.rectangle.angled") + .font(.title2) + VStack(alignment: .leading, spacing: 2) { + Text("打开照片 App") + .font(.headline) + Text("找到刚保存的 Live Photo") + .font(.caption) + .foregroundStyle(.white.opacity(0.8)) + } + Spacer() + Image(systemName: "arrow.up.right.square") + .font(.title3) + } + .padding(16) + .frame(maxWidth: .infinity) + .background( + LinearGradient( + colors: [Color.blue, Color.purple], + startPoint: .topLeading, + endPoint: .bottomTrailing + ) + ) + .foregroundColor(.white) + .clipShape(RoundedRectangle(cornerRadius: 14)) + } + } + + @ViewBuilder + private var stepsSection: some View { + VStack(alignment: .leading, spacing: 16) { + HStack { + Image(systemName: "list.number") + .foregroundStyle(.tint) + Text("设置壁纸步骤") + .font(.headline) + } + + VStack(spacing: 0) { + StepRow( + number: 1, + icon: "photo.fill", + title: "在照片中找到 Live Photo", + description: "照片左上角会显示【LIVE】标识,长按可预览动画效果", + isLast: false + ) + + StepRow( + number: 2, + icon: "square.and.arrow.up", + title: "点击分享按钮", + description: "位于屏幕左下角,然后选择【用作壁纸】选项", + isLast: false + ) + + StepRow( + number: 3, + icon: "crop", + title: "调整照片位置", + description: "双指缩放和拖动来调整照片在壁纸中的位置", + isLast: false + ) + + if iosVersion >= 17 { + StepRow( + number: 4, + icon: "livephoto", + title: "确认动态效果已开启", + description: "点击左下角的 Live Photo 图标,图标高亮表示动态效果已开启", + isLast: false + ) + } else { + StepRow( + number: 4, + icon: "info.circle", + title: "了解系统限制", + description: "iOS 16 锁屏不支持动态效果,仅主屏幕长按可播放", + isLast: false + ) + } + + StepRow( + number: 5, + icon: "checkmark.circle", + title: "完成设置", + description: "点击右上角【完成】,选择【设定锁定屏幕】或【同时设定】", + isLast: true + ) + } + .padding(12) + .background(Color.secondary.opacity(0.1)) + .clipShape(RoundedRectangle(cornerRadius: 12)) + } + } + + @ViewBuilder + private var tipsSection: some View { + VStack(alignment: .leading, spacing: 12) { + HStack { + Image(systemName: "questionmark.circle") + .foregroundStyle(.tint) + Text("常见问题") + .font(.headline) + } + + FAQRow( + icon: "magnifyingglass", + question: "找不到刚保存的 Live Photo?", + answer: "打开照片 App → 相簿 → 媒体类型 → 实况照片,或直接搜索【实况】" + ) + + FAQRow( + icon: "hand.tap", + question: "设置后壁纸不会动?", + answer: "锁屏状态下长按屏幕 1-2 秒可触发动画播放(需 iOS 17+)" + ) + + FAQRow( + icon: "battery.25", + question: "动画效果突然失效?", + answer: "检查是否开启了【低电量模式】,该模式下系统会自动禁用动态效果以省电" + ) + + FAQRow( + icon: "exclamationmark.circle", + question: "Live Photo 图标是灰色/划线?", + answer: "iOS 对壁纸有额外限制,部分 Live Photo 可能不支持作为动态壁纸。建议使用 2-3 秒时长、竖屏比例的视频重新生成" + ) + + if iosVersion < 17 { + FAQRow( + icon: "iphone.gen3", + question: "为什么我的锁屏没有动画?", + answer: "iOS 16 系统限制:锁屏壁纸不支持 Live Photo 动画,建议升级到 iOS 17+" + ) + } + } + } + + @ViewBuilder + private var doneButton: some View { + VStack(spacing: 12) { + Button { + appState.popToRoot() + } label: { + Text("完成,返回首页") + .font(.headline) + .frame(maxWidth: .infinity) + .padding() + .background(Color.accentColor) + .foregroundColor(.white) + .clipShape(RoundedRectangle(cornerRadius: 14)) + } + + Text("你可以随时制作新的 Live Photo") + .font(.caption) + .foregroundStyle(.secondary) + } + .padding(.top, 8) + } +} + +struct StepRow: View { + let number: Int + let icon: String + let title: String + let description: String + let isLast: Bool + + var body: some View { + HStack(alignment: .top, spacing: 14) { + VStack(spacing: 0) { + ZStack { + Circle() + .fill(Color.accentColor) + .frame(width: 32, height: 32) + Text("\(number)") + .font(.subheadline) + .fontWeight(.bold) + .foregroundColor(.white) + } + + if !isLast { + Rectangle() + .fill(Color.accentColor.opacity(0.3)) + .frame(width: 2) + .frame(maxHeight: .infinity) + } + } + + VStack(alignment: .leading, spacing: 6) { + HStack(spacing: 8) { + Image(systemName: icon) + .font(.subheadline) + .foregroundStyle(.tint) + Text(title) + .font(.subheadline) + .fontWeight(.semibold) + } + + Text(description) + .font(.caption) + .foregroundStyle(.secondary) + .fixedSize(horizontal: false, vertical: true) + } + .padding(.bottom, isLast ? 0 : 16) + } + } +} + +struct FAQRow: View { + let icon: String + let question: String + let answer: String + + var body: some View { + HStack(alignment: .top, spacing: 12) { + Image(systemName: icon) + .font(.title3) + .foregroundStyle(.tint) + .frame(width: 24) + + VStack(alignment: .leading, spacing: 4) { + Text(question) + .font(.subheadline) + .fontWeight(.medium) + Text(answer) + .font(.caption) + .foregroundStyle(.secondary) + .fixedSize(horizontal: false, vertical: true) + } + } + .padding(14) + .frame(maxWidth: .infinity, alignment: .leading) + .background(Color.secondary.opacity(0.08)) + .clipShape(RoundedRectangle(cornerRadius: 12)) + } +} + +#Preview { + NavigationStack { + WallpaperGuideView(assetId: "ABC123") + } + .environment(AppState()) +} diff --git a/to-live-photo/to-live-photo/to_live_photoApp.swift b/to-live-photo/to-live-photo/to_live_photoApp.swift new file mode 100644 index 0000000..37df4ab --- /dev/null +++ b/to-live-photo/to-live-photo/to_live_photoApp.swift @@ -0,0 +1,20 @@ +// +// to_live_photoApp.swift +// to-live-photo +// +// Created by empty on 2025/12/13. +// + +import SwiftUI + +@main +struct to_live_photoApp: App { + @State private var appState = AppState() + + var body: some Scene { + WindowGroup { + ContentView() + .environment(appState) + } + } +} diff --git a/to-live-photo/to-live-photoTests/to_live_photoTests.swift b/to-live-photo/to-live-photoTests/to_live_photoTests.swift new file mode 100644 index 0000000..d6bb5f5 --- /dev/null +++ b/to-live-photo/to-live-photoTests/to_live_photoTests.swift @@ -0,0 +1,17 @@ +// +// to_live_photoTests.swift +// to-live-photoTests +// +// Created by empty on 2025/12/13. +// + +import Testing +@testable import to_live_photo + +struct to_live_photoTests { + + @Test func example() async throws { + // Write your test here and use APIs like `#expect(...)` to check expected conditions. + } + +} diff --git a/to-live-photo/to-live-photoUITests/to_live_photoUITests.swift b/to-live-photo/to-live-photoUITests/to_live_photoUITests.swift new file mode 100644 index 0000000..10d6e0f --- /dev/null +++ b/to-live-photo/to-live-photoUITests/to_live_photoUITests.swift @@ -0,0 +1,41 @@ +// +// to_live_photoUITests.swift +// to-live-photoUITests +// +// Created by empty on 2025/12/13. +// + +import XCTest + +final class to_live_photoUITests: XCTestCase { + + override func setUpWithError() throws { + // Put setup code here. This method is called before the invocation of each test method in the class. + + // In UI tests it is usually best to stop immediately when a failure occurs. + continueAfterFailure = false + + // In UI tests it’s important to set the initial state - such as interface orientation - required for your tests before they run. The setUp method is a good place to do this. + } + + override func tearDownWithError() throws { + // Put teardown code here. This method is called after the invocation of each test method in the class. + } + + @MainActor + func testExample() throws { + // UI tests must launch the application that they test. + let app = XCUIApplication() + app.launch() + + // Use XCTAssert and related functions to verify your tests produce the correct results. + } + + @MainActor + func testLaunchPerformance() throws { + // This measures how long it takes to launch your application. + measure(metrics: [XCTApplicationLaunchMetric()]) { + XCUIApplication().launch() + } + } +} diff --git a/to-live-photo/to-live-photoUITests/to_live_photoUITestsLaunchTests.swift b/to-live-photo/to-live-photoUITests/to_live_photoUITestsLaunchTests.swift new file mode 100644 index 0000000..885299a --- /dev/null +++ b/to-live-photo/to-live-photoUITests/to_live_photoUITestsLaunchTests.swift @@ -0,0 +1,33 @@ +// +// to_live_photoUITestsLaunchTests.swift +// to-live-photoUITests +// +// Created by empty on 2025/12/13. +// + +import XCTest + +final class to_live_photoUITestsLaunchTests: XCTestCase { + + override class var runsForEachTargetApplicationUIConfiguration: Bool { + true + } + + override func setUpWithError() throws { + continueAfterFailure = false + } + + @MainActor + func testLaunch() throws { + let app = XCUIApplication() + app.launch() + + // Insert steps here to perform after app launch but before taking a screenshot, + // such as logging into a test account or navigating somewhere in the app + + let attachment = XCTAttachment(screenshot: app.screenshot()) + attachment.name = "Launch Screen" + attachment.lifetime = .keepAlways + add(attachment) + } +}