feat: 初始化 Live Photo 项目结构

- 添加 PRD、技术规范、交互规范文档 (V0.2)
- 创建 Swift Package 和 Xcode 项目
- 实现 LivePhotoCore 基础模块
- 添加 HEIC MakerNote 元数据写入功能
- 创建项目结构文档和任务清单
- 添加 .gitignore 忽略规则
This commit is contained in:
empty
2025-12-14 16:21:20 +08:00
commit 299415a530
31 changed files with 4815 additions and 0 deletions

105
.gitignore vendored Normal file
View File

@@ -0,0 +1,105 @@
# Xcode
#
# gitignore contributors: remember to update Global Business Ignores
# https://github.com/github/gitignore/blob/main/Global/Xcode.gitignore
## User settings
xcuserdata/
## Xcode build generated
build/
DerivedData/
## Obj-C/Swift specific
*.hmap
## App packaging
*.ipa
*.dSYM.zip
*.dSYM
## Playgrounds
timeline.xctimeline
playground.xcworkspace
# Swift Package Manager
#
# Add this line if you want to avoid checking in source code from Swift Package Manager dependencies.
.build/
Packages/
Package.resolved
*.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/
# CocoaPods
#
# We recommend against adding the Pods directory to your .gitignore. However
# you should judge for yourself, the pros and cons are mentioned at:
# https://guides.cocoapods.org/using/using-cocoapods.html#should-i-check-the-pods-directory-into-source-control
#
Pods/
*.xcworkspace
# Carthage
#
# Add this line if you want to avoid checking in source code from Carthage dependencies.
Carthage/Checkouts
Carthage/Build/
# Accio dependency management
Dependencies/
.accio/
# fastlane
#
# It is recommended to not store the screenshots in the git repo.
# Instead, use fastlane to re-generate the screenshots whenever they are needed.
# For more information about the recommended setup visit:
# https://docs.fastlane.tools/best-practices/source-control/#source-control
fastlane/report.xml
fastlane/Preview.html
fastlane/screenshots/**/*.png
fastlane/test_output
# Code Injection
#
# After new calculation based on new calculation
iOSInjectionProject/
# macOS
.DS_Store
.AppleDouble
.LSOverride
# Thumbnails
._*
# Files that might appear in the root of a volume
.DocumentRevisions-V100
.fseventsd
.Spotlight-V100
.TemporaryItems
.Trashes
.VolumeIcon.icns
.com.apple.timemachine.donotpresent
# Directories potentially created on remote AFP share
.AppleDB
.AppleDesktop
Network Trash Folder
Temporary Items
.apdisk
# IDE - VSCode
.vscode/
# IDE - JetBrains
.idea/
# Temporary files
*.swp
*.swo
*~
# Debug logs
*.log

1
.serena/.gitignore vendored Normal file
View File

@@ -0,0 +1 @@
/cache

View File

@@ -0,0 +1,17 @@
# Live Photo 无法设置为动态壁纸:根因记录
## 现象
- 生成的 Live Photo 在相册中可识别、可播放,但在“用作壁纸”时提示“动态不可用”。
## 关键发现(本地文件元数据)
- `/Users/yuanjiantsui/Downloads/paired.mov` 中的 timed metadata`StillImageTime`**-1**int8
- `exiftool` 输出示例:`[Track3] StillImageTime : -1`
## 代码根因
- `Sources/LivePhotoCore/LivePhotoCore.swift:842``LivePhotoBuilder.metadataItemForStillImageTime()``com.apple.quicktime.still-image-time` 的 value 写成 `-1`
- 建议改为 `0`int8
- 仍用 timed metadata group 的 `timeRange.start` 表达关键帧时间。
## 额外建议(兼容性)
- 移除非标准的 mdta keys`Sample Time` / `Sample Duration`(当前写入到 `assetWriter.metadata`)。
- 若仍不兼容,优先尝试 H.264、30fps、SDR、2~3 秒时长作为壁纸兼容模式。

84
.serena/project.yml Normal file
View File

@@ -0,0 +1,84 @@
# list of languages for which language servers are started; choose from:
# al bash clojure cpp csharp csharp_omnisharp
# dart elixir elm erlang fortran go
# haskell java julia kotlin lua markdown
# nix perl php python python_jedi r
# rego ruby ruby_solargraph rust scala swift
# terraform typescript typescript_vts yaml zig
# Note:
# - For C, use cpp
# - For JavaScript, use typescript
# Special requirements:
# - csharp: Requires the presence of a .sln file in the project folder.
# When using multiple languages, the first language server that supports a given file will be used for that file.
# The first language is the default language and the respective language server will be used as a fallback.
# Note that when using the JetBrains backend, language servers are not used and this list is correspondingly ignored.
languages:
- swift
# the encoding used by text files in the project
# For a list of possible encodings, see https://docs.python.org/3.11/library/codecs.html#standard-encodings
encoding: "utf-8"
# whether to use the project's gitignore file to ignore files
# Added on 2025-04-07
ignore_all_files_in_gitignore: true
# list of additional paths to ignore
# same syntax as gitignore, so you can use * and **
# Was previously called `ignored_dirs`, please update your config if you are using that.
# Added (renamed) on 2025-04-07
ignored_paths: []
# whether the project is in read-only mode
# If set to true, all editing tools will be disabled and attempts to use them will result in an error
# Added on 2025-04-18
read_only: false
# list of tool names to exclude. We recommend not excluding any tools, see the readme for more details.
# Below is the complete list of tools for convenience.
# To make sure you have the latest list of tools, and to view their descriptions,
# execute `uv run scripts/print_tool_overview.py`.
#
# * `activate_project`: Activates a project by name.
# * `check_onboarding_performed`: Checks whether project onboarding was already performed.
# * `create_text_file`: Creates/overwrites a file in the project directory.
# * `delete_lines`: Deletes a range of lines within a file.
# * `delete_memory`: Deletes a memory from Serena's project-specific memory store.
# * `execute_shell_command`: Executes a shell command.
# * `find_referencing_code_snippets`: Finds code snippets in which the symbol at the given location is referenced.
# * `find_referencing_symbols`: Finds symbols that reference the symbol at the given location (optionally filtered by type).
# * `find_symbol`: Performs a global (or local) search for symbols with/containing a given name/substring (optionally filtered by type).
# * `get_current_config`: Prints the current configuration of the agent, including the active and available projects, tools, contexts, and modes.
# * `get_symbols_overview`: Gets an overview of the top-level symbols defined in a given file.
# * `initial_instructions`: Gets the initial instructions for the current project.
# Should only be used in settings where the system prompt cannot be set,
# e.g. in clients you have no control over, like Claude Desktop.
# * `insert_after_symbol`: Inserts content after the end of the definition of a given symbol.
# * `insert_at_line`: Inserts content at a given line in a file.
# * `insert_before_symbol`: Inserts content before the beginning of the definition of a given symbol.
# * `list_dir`: Lists files and directories in the given directory (optionally with recursion).
# * `list_memories`: Lists memories in Serena's project-specific memory store.
# * `onboarding`: Performs onboarding (identifying the project structure and essential tasks, e.g. for testing or building).
# * `prepare_for_new_conversation`: Provides instructions for preparing for a new conversation (in order to continue with the necessary context).
# * `read_file`: Reads a file within the project directory.
# * `read_memory`: Reads the memory with the given name from Serena's project-specific memory store.
# * `remove_project`: Removes a project from the Serena configuration.
# * `replace_lines`: Replaces a range of lines within a file with new content.
# * `replace_symbol_body`: Replaces the full definition of a symbol.
# * `restart_language_server`: Restarts the language server, may be necessary when edits not through Serena happen.
# * `search_for_pattern`: Performs a search for a pattern in the project.
# * `summarize_changes`: Provides instructions for summarizing the changes made to the codebase.
# * `switch_modes`: Activates modes by providing a list of their names
# * `think_about_collected_information`: Thinking tool for pondering the completeness of collected information.
# * `think_about_task_adherence`: Thinking tool for determining whether the agent is still on track with the current task.
# * `think_about_whether_you_are_done`: Thinking tool for determining whether the task is truly completed.
# * `write_memory`: Writes a named memory (for future reference) to Serena's project-specific memory store.
excluded_tools: []
# initial prompt for the project. It will always be given to the LLM upon activating the project
# (contrary to the memories, which are loaded on demand).
initial_prompt: ""
project_name: "to-live-photo"
included_optional_tools: []

52
PROJECT_STRUCTURE.md Normal file
View File

@@ -0,0 +1,52 @@
# 项目结构
> 说明:本文件用于记录项目目录/文件结构的变更。新增/删除目录或文件后需同步更新。
## 根目录
- Package.swift
- docs/
- Sources/
- Tests/
- to-live-photo/
- docs_index.md
- PROJECT_STRUCTURE.md
- TASK.md
- .DS_Store
## docs/
- PRD_LivePhoto_App_V0.2_2025-12-13.md
- TECHSPEC_LivePhoto_App_V0.2_2025-12-13.md
- IXSPEC_LivePhoto_App_V0.2_2025-12-13.md
- .DS_Store
## Sources/
- LivePhotoCore/
- LivePhotoCore.swift
## Tests/
- LivePhotoCoreTests/
- LivePhotoCoreTests.swift
## to-live-photo/
- to-live-photo.xcodeproj/
- to-live-photo/
- Assets.xcassets/
- AppState.swift
- ContentView.swift
- to_live_photoApp.swift
- Views/
- HomeView.swift
- EditorView.swift
- ProcessingView.swift
- ResultView.swift
- WallpaperGuideView.swift
- to-live-photoTests/
- to_live_photoTests.swift
- to-live-photoUITests/
- to_live_photoUITests.swift
- to_live_photoUITestsLaunchTests.swift

26
Package.swift Normal file
View File

@@ -0,0 +1,26 @@
// swift-tools-version: 6.0
import PackageDescription
let package = Package(
name: "ToLivePhoto",
platforms: [
.iOS(.v18)
],
products: [
.library(
name: "LivePhotoCore",
targets: ["LivePhotoCore"]
)
],
targets: [
.target(
name: "LivePhotoCore",
dependencies: []
),
.testTarget(
name: "LivePhotoCoreTests",
dependencies: ["LivePhotoCore"]
)
]
)

View File

@@ -0,0 +1,591 @@
import Foundation
/// HEIC MakerNotes
/// CGImageDestination Int64 MakerNotes
public enum HEICMakerNoteError: Error, CustomStringConvertible {
case invalidHEIC(String)
case metaNotFound
case iinfNotFound
case ilocNotFound
case exifItemNotFound
case exifLocationNotFound(itemID: UInt32)
case exifPayloadTooSmall
case tiffNotFound
case invalidTIFF(String)
case exifIFDPointerNotFound
case makerNoteTagNotFound
case makerNoteOutOfRange
case makerNoteTooShort(available: Int, required: Int)
public var description: String {
switch self {
case .invalidHEIC(let msg): return "Invalid HEIC: \(msg)"
case .metaNotFound: return "meta box not found"
case .iinfNotFound: return "iinf box not found"
case .ilocNotFound: return "iloc box not found"
case .exifItemNotFound: return "Exif item not found"
case .exifLocationNotFound(let id): return "Exif item location not found for item_ID=\(id)"
case .exifPayloadTooSmall: return "Exif payload too small"
case .tiffNotFound: return "TIFF header not found"
case .invalidTIFF(let msg): return "Invalid TIFF: \(msg)"
case .exifIFDPointerNotFound: return "ExifIFDPointer (0x8769) not found"
case .makerNoteTagNotFound: return "MakerNote tag (0x927C) not found"
case .makerNoteOutOfRange: return "MakerNote data out of range"
case .makerNoteTooShort(let available, let required):
return "MakerNote too short: available=\(available), required=\(required)"
}
}
}
public final class HEICMakerNotePatcher {
// MARK: - Public API
/// MakerNotes HEIC
/// Exif item MakerNote
public static func injectMakerNoteInPlace(fileURL: URL, makerNote: Data) throws {
var fileData = try Data(contentsOf: fileURL, options: [.mappedIfSafe])
//
let metaRange = try findTopLevelBox(type: "meta", in: fileData)
guard let metaRange else { throw HEICMakerNoteError.metaNotFound }
let meta = BoxView(data: fileData, range: metaRange)
let metaChildrenStart = meta.contentStart + 4
guard metaChildrenStart <= meta.end else {
throw HEICMakerNoteError.invalidHEIC("meta content too short")
}
guard let iinfRange = try findChildBox(type: "iinf", within: metaChildrenStart..<meta.end, in: fileData) else {
throw HEICMakerNoteError.iinfNotFound
}
guard let ilocRange = try findChildBox(type: "iloc", within: metaChildrenStart..<meta.end, in: fileData) else {
throw HEICMakerNoteError.ilocNotFound
}
let exifItemID = try parseIINFAndFindExifItemID(data: fileData, iinfRange: iinfRange)
let (locations, ilocInfo) = try parseILOCWithInfo(data: fileData, ilocRange: ilocRange)
guard let exifLoc = locations[exifItemID] else {
throw HEICMakerNoteError.exifLocationNotFound(itemID: exifItemID)
}
let exifStart = Int(exifLoc.offset)
let exifLen = Int(exifLoc.length)
guard exifStart >= 0, exifLen > 0, exifStart + exifLen <= fileData.count else {
throw HEICMakerNoteError.exifPayloadTooSmall
}
// Exif item
let existingExif = fileData.subdata(in: exifStart..<(exifStart + exifLen))
// Exif item MakerNote
let newExif = try buildNewExifItem(existingExif: existingExif, newMakerNote: makerNote)
if newExif.count <= exifLen {
// Exif
var paddedExif = newExif
if paddedExif.count < exifLen {
paddedExif.append(Data(repeating: 0x00, count: exifLen - paddedExif.count))
}
fileData.replaceSubrange(exifStart..<(exifStart + exifLen), with: paddedExif)
} else {
// Exif iloc
let newExifOffset = fileData.count
fileData.append(newExif)
// iloc offset length
try updateILOC(
in: &fileData,
ilocRange: ilocRange,
ilocInfo: ilocInfo,
itemID: exifItemID,
newOffset: UInt64(newExifOffset),
newLength: UInt64(newExif.count)
)
}
try fileData.write(to: fileURL, options: [.atomic])
}
// MARK: - Build New Exif Item
/// Exif item MakerNote
private static func buildNewExifItem(existingExif: Data, newMakerNote: Data) throws -> Data {
guard existingExif.count >= 10 else {
throw HEICMakerNoteError.exifPayloadTooSmall
}
// Exif item
// 4 bytes: TIFF header offset ( 6 "Exif\0\0" )
// 4 bytes: "Exif"
// 2 bytes: \0\0
// TIFF
let tiffOffsetValue = existingExif.readUInt32BE(at: 0)
let tiffStart = 4 + Int(tiffOffsetValue)
guard tiffStart + 8 <= existingExif.count else {
throw HEICMakerNoteError.tiffNotFound
}
//
let endianMarker = existingExif.subdata(in: tiffStart..<(tiffStart + 2))
let isBigEndian: Bool
if endianMarker == Data([0x4D, 0x4D]) {
isBigEndian = true
} else if endianMarker == Data([0x49, 0x49]) {
isBigEndian = false
} else {
throw HEICMakerNoteError.invalidTIFF("Invalid endian marker")
}
// TIFF Big-Endian Apple
var newTiff = Data()
// TIFF Header: "MM" + 0x002A + IFD0 offset (8)
newTiff.append(contentsOf: [0x4D, 0x4D]) // Big-endian
newTiff.append(contentsOf: [0x00, 0x2A]) // TIFF magic
newTiff.append(contentsOf: [0x00, 0x00, 0x00, 0x08]) // IFD0 offset = 8
// IFD0: 1 entry (ExifIFDPointer)
// Entry count: 1
newTiff.append(contentsOf: [0x00, 0x01])
// Entry: ExifIFDPointer (0x8769)
let exifIFDOffset: UInt32 = 8 + 2 + 12 + 4 // = 26 (IFD0 )
newTiff.append(contentsOf: [0x87, 0x69]) // tag
newTiff.append(contentsOf: [0x00, 0x04]) // type = LONG
newTiff.append(contentsOf: [0x00, 0x00, 0x00, 0x01]) // count = 1
newTiff.appendUInt32BE(exifIFDOffset) // value = offset to Exif IFD
// Next IFD offset: 0 (no more IFDs)
newTiff.append(contentsOf: [0x00, 0x00, 0x00, 0x00])
// Exif IFD: 1 entry (MakerNote)
let makerNoteDataOffset: UInt32 = exifIFDOffset + 2 + 12 + 4 // = 44
newTiff.append(contentsOf: [0x00, 0x01]) // entry count
// Entry: MakerNote (0x927C)
newTiff.append(contentsOf: [0x92, 0x7C]) // tag
newTiff.append(contentsOf: [0x00, 0x07]) // type = UNDEFINED
newTiff.appendUInt32BE(UInt32(newMakerNote.count)) // count
newTiff.appendUInt32BE(makerNoteDataOffset) // offset to MakerNote data
// Next IFD offset: 0
newTiff.append(contentsOf: [0x00, 0x00, 0x00, 0x00])
// MakerNote data
newTiff.append(newMakerNote)
// Exif item
var newExifItem = Data()
// 4 bytes: offset to TIFF (= 6, "Exif\0\0")
newExifItem.append(contentsOf: [0x00, 0x00, 0x00, 0x06])
// "Exif\0\0"
newExifItem.append(contentsOf: [0x45, 0x78, 0x69, 0x66, 0x00, 0x00])
// TIFF data
newExifItem.append(newTiff)
return newExifItem
}
// MARK: - Box Parsing
private struct BoxHeader {
let type: String
let size: Int
let headerSize: Int
let contentStart: Int
let end: Int
}
private struct BoxView {
let data: Data
let range: Range<Int>
var start: Int { range.lowerBound }
var end: Int { range.upperBound }
var header: BoxHeader {
let size32 = Int(data.readUInt32BE(at: start))
let type = data.readFourCC(at: start + 4)
if size32 == 1 {
let size64 = Int(data.readUInt64BE(at: start + 8))
return BoxHeader(
type: type,
size: size64,
headerSize: 16,
contentStart: start + 16,
end: start + size64
)
} else if size32 == 0 {
return BoxHeader(
type: type,
size: data.count - start,
headerSize: 8,
contentStart: start + 8,
end: data.count
)
} else {
return BoxHeader(
type: type,
size: size32,
headerSize: 8,
contentStart: start + 8,
end: start + size32
)
}
}
var contentStart: Int { header.contentStart }
}
private static func findTopLevelBox(type: String, in data: Data) throws -> Range<Int>? {
var cursor = 0
while cursor + 8 <= data.count {
let box = try readBoxHeader(at: cursor, data: data)
if box.type == type { return cursor..<box.end }
cursor = box.end
}
return nil
}
private static func findChildBox(type: String, within range: Range<Int>, in data: Data) throws -> Range<Int>? {
var cursor = range.lowerBound
while cursor + 8 <= range.upperBound {
let box = try readBoxHeader(at: cursor, data: data)
if box.type == type { return cursor..<min(box.end, range.upperBound) }
cursor = box.end
}
return nil
}
private static func readBoxHeader(at offset: Int, data: Data) throws -> BoxHeader {
guard offset + 8 <= data.count else {
throw HEICMakerNoteError.invalidHEIC("box header out of bounds")
}
let size32 = Int(data.readUInt32BE(at: offset))
let type = data.readFourCC(at: offset + 4)
if size32 == 1 {
guard offset + 16 <= data.count else {
throw HEICMakerNoteError.invalidHEIC("large size box header out of bounds")
}
let size64 = Int(data.readUInt64BE(at: offset + 8))
guard size64 >= 16 else {
throw HEICMakerNoteError.invalidHEIC("invalid box size")
}
return BoxHeader(type: type, size: size64, headerSize: 16, contentStart: offset + 16, end: offset + size64)
} else if size32 == 0 {
return BoxHeader(type: type, size: data.count - offset, headerSize: 8, contentStart: offset + 8, end: data.count)
} else {
guard size32 >= 8 else {
throw HEICMakerNoteError.invalidHEIC("invalid box size")
}
return BoxHeader(type: type, size: size32, headerSize: 8, contentStart: offset + 8, end: offset + size32)
}
}
// MARK: - iinf / infe Parsing
private static func parseIINFAndFindExifItemID(data: Data, iinfRange: Range<Int>) throws -> UInt32 {
let iinf = BoxView(data: data, range: iinfRange).header
var cursor = iinf.contentStart
guard cursor + 4 <= iinf.end else {
throw HEICMakerNoteError.invalidHEIC("iinf too short")
}
let version = data.readUInt8(at: cursor)
cursor += 4
let entryCount: UInt32
if version == 0 {
entryCount = UInt32(data.readUInt16BE(at: cursor))
cursor += 2
} else {
entryCount = data.readUInt32BE(at: cursor)
cursor += 4
}
var foundExif: UInt32?
var scanned: UInt32 = 0
while cursor + 8 <= iinf.end, scanned < entryCount {
let infe = try readBoxHeader(at: cursor, data: data)
guard infe.type == "infe" else {
cursor = infe.end
continue
}
var p = infe.contentStart
guard p + 4 <= infe.end else {
throw HEICMakerNoteError.invalidHEIC("infe too short")
}
let infeVersion = data.readUInt8(at: p)
p += 4
let itemID: UInt32
if infeVersion >= 3 {
itemID = data.readUInt32BE(at: p); p += 4
} else {
itemID = UInt32(data.readUInt16BE(at: p)); p += 2
}
p += 2 // item_protection_index
var itemType = ""
if infeVersion >= 2 {
guard p + 4 <= infe.end else {
throw HEICMakerNoteError.invalidHEIC("infe item_type out of bounds")
}
itemType = data.readFourCC(at: p)
p += 4
}
if itemType == "Exif" {
foundExif = itemID
break
}
cursor = infe.end
scanned += 1
}
guard let exifID = foundExif else {
throw HEICMakerNoteError.exifItemNotFound
}
return exifID
}
// MARK: - iloc Parsing
private struct ItemLocation {
let offset: UInt64
let length: UInt64
}
private struct ILOCInfo {
let version: UInt8
let offsetSize: Int
let lengthSize: Int
let baseOffsetSize: Int
let indexSize: Int
let itemEntries: [UInt32: ILOCItemEntry]
}
private struct ILOCItemEntry {
let itemID: UInt32
let extentOffsetPosition: Int // extent_offset
let extentLengthPosition: Int // extent_length
}
private static func parseILOCWithInfo(data: Data, ilocRange: Range<Int>) throws -> ([UInt32: ItemLocation], ILOCInfo) {
let iloc = BoxView(data: data, range: ilocRange).header
var cursor = iloc.contentStart
guard cursor + 4 <= iloc.end else {
throw HEICMakerNoteError.invalidHEIC("iloc too short")
}
let version = data.readUInt8(at: cursor)
cursor += 4
guard cursor + 2 <= iloc.end else {
throw HEICMakerNoteError.invalidHEIC("iloc header out of bounds")
}
let offsetSize = Int(data.readUInt8(at: cursor) >> 4)
let lengthSize = Int(data.readUInt8(at: cursor) & 0x0F)
cursor += 1
let baseOffsetSize = Int(data.readUInt8(at: cursor) >> 4)
let indexSize = Int(data.readUInt8(at: cursor) & 0x0F)
cursor += 1
let itemCount: UInt32
if version < 2 {
guard cursor + 2 <= iloc.end else {
throw HEICMakerNoteError.invalidHEIC("iloc item_count out of bounds")
}
itemCount = UInt32(data.readUInt16BE(at: cursor))
cursor += 2
} else {
guard cursor + 4 <= iloc.end else {
throw HEICMakerNoteError.invalidHEIC("iloc item_count out of bounds")
}
itemCount = data.readUInt32BE(at: cursor)
cursor += 4
}
var locations: [UInt32: ItemLocation] = [:]
var itemEntries: [UInt32: ILOCItemEntry] = [:]
for _ in 0..<itemCount {
guard cursor + 2 <= iloc.end else { break }
let itemID: UInt32
if version < 2 {
itemID = UInt32(data.readUInt16BE(at: cursor)); cursor += 2
} else {
guard cursor + 4 <= iloc.end else { break }
itemID = data.readUInt32BE(at: cursor); cursor += 4
}
if version == 1 || version == 2 {
guard cursor + 2 <= iloc.end else { break }
cursor += 2
}
guard cursor + 2 <= iloc.end else { break }
cursor += 2
guard cursor + baseOffsetSize <= iloc.end else { break }
let baseOffset = try data.readUIntBE(at: cursor, size: baseOffsetSize)
cursor += baseOffsetSize
guard cursor + 2 <= iloc.end else { break }
let extentCount = Int(data.readUInt16BE(at: cursor))
cursor += 2
var firstExtentOffset: UInt64 = 0
var firstExtentLength: UInt64 = 0
var extentOffsetPos = 0
var extentLengthPos = 0
for e in 0..<extentCount {
if (version == 1 || version == 2) && indexSize > 0 {
guard cursor + indexSize <= iloc.end else { break }
cursor += indexSize
}
guard cursor + offsetSize + lengthSize <= iloc.end else { break }
if e == 0 {
extentOffsetPos = cursor
}
let extentOffset = try data.readUIntBE(at: cursor, size: offsetSize)
cursor += offsetSize
if e == 0 {
extentLengthPos = cursor
}
let extentLength = try data.readUIntBE(at: cursor, size: lengthSize)
cursor += lengthSize
if e == 0 {
firstExtentOffset = extentOffset
firstExtentLength = extentLength
}
}
let fileOffset = baseOffset + firstExtentOffset
if firstExtentLength > 0 {
locations[itemID] = ItemLocation(offset: fileOffset, length: firstExtentLength)
itemEntries[itemID] = ILOCItemEntry(
itemID: itemID,
extentOffsetPosition: extentOffsetPos,
extentLengthPosition: extentLengthPos
)
}
}
let info = ILOCInfo(
version: version,
offsetSize: offsetSize,
lengthSize: lengthSize,
baseOffsetSize: baseOffsetSize,
indexSize: indexSize,
itemEntries: itemEntries
)
return (locations, info)
}
private static func updateILOC(
in fileData: inout Data,
ilocRange: Range<Int>,
ilocInfo: ILOCInfo,
itemID: UInt32,
newOffset: UInt64,
newLength: UInt64
) throws {
guard let entry = ilocInfo.itemEntries[itemID] else {
throw HEICMakerNoteError.exifLocationNotFound(itemID: itemID)
}
// offset
fileData.writeUIntBE(at: entry.extentOffsetPosition, value: newOffset, size: ilocInfo.offsetSize)
// length
fileData.writeUIntBE(at: entry.extentLengthPosition, value: newLength, size: ilocInfo.lengthSize)
}
// MARK: - EXIF/TIFF Patching
enum Endian {
case little
case big
}
}
// MARK: - Data Extensions
private extension Data {
func readUInt8(at offset: Int) -> UInt8 {
self[self.index(self.startIndex, offsetBy: offset)]
}
func readUInt16BE(at offset: Int) -> UInt16 {
let b0 = UInt16(readUInt8(at: offset))
let b1 = UInt16(readUInt8(at: offset + 1))
return (b0 << 8) | b1
}
func readUInt32BE(at offset: Int) -> UInt32 {
let b0 = UInt32(readUInt8(at: offset))
let b1 = UInt32(readUInt8(at: offset + 1))
let b2 = UInt32(readUInt8(at: offset + 2))
let b3 = UInt32(readUInt8(at: offset + 3))
return (b0 << 24) | (b1 << 16) | (b2 << 8) | b3
}
func readUInt64BE(at offset: Int) -> UInt64 {
var v: UInt64 = 0
for i in 0..<8 {
v = (v << 8) | UInt64(readUInt8(at: offset + i))
}
return v
}
func readFourCC(at offset: Int) -> String {
let bytes = self.subdata(in: offset..<(offset + 4))
return String(bytes: bytes, encoding: .ascii) ?? "????"
}
func readUIntBE(at offset: Int, size: Int) throws -> UInt64 {
if size == 0 { return 0 }
guard offset + size <= count else {
throw HEICMakerNoteError.invalidHEIC("Variable-length integer out of bounds")
}
var v: UInt64 = 0
for i in 0..<size {
v = (v << 8) | UInt64(readUInt8(at: offset + i))
}
return v
}
mutating func appendUInt32BE(_ value: UInt32) {
append(UInt8((value >> 24) & 0xFF))
append(UInt8((value >> 16) & 0xFF))
append(UInt8((value >> 8) & 0xFF))
append(UInt8(value & 0xFF))
}
mutating func writeUIntBE(at offset: Int, value: UInt64, size: Int) {
for i in 0..<size {
let byteIndex = self.index(self.startIndex, offsetBy: offset + i)
let shift = (size - 1 - i) * 8
self[byteIndex] = UInt8((value >> shift) & 0xFF)
}
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,83 @@
import Foundation
/// HEIC Apple MakerNotes LivePhotoVideoIndex
/// CGImageDestination Int64 MakerNotes
/// 使
public struct MakerNotesPatcher {
// MARK: - iPhone MakerNotes
/// ContentIdentifier MakerNotes 36 ASCII UUID + null
private static let contentIdentifierOffset = 0x580 // 1408
private static let contentIdentifierLength = 36
/// LivePhotoVideoIndex MakerNotes 8 Big-Endian Int64
private static let livePhotoVideoIndexOffset = 0x5a6 // 1446
private static let livePhotoVideoIndexLength = 8
/// iPhone MakerNotes iPhone 13 Pro Max Live Photo
/// Apple MakerNotes ContentIdentifier LivePhotoVideoIndex
private static let makerNotesTemplate: Data = {
// Base64 MakerNotes
let base64 = """
QXBwbGUgaU9TAAABTU0APQABAAkAAAABAAAAEAACAAcAAAIAAAAC8AADAAcAAABoAAAE8AAEAAkA\
AAABAAAAAQAFAAkAAAABAAAAqQAGAAkAAAABAAAApQAHAAkAAAABAAAAAQAIAAoAAAADAAAFWAAM\
AAoAAAACAAAFcAANAAkAAAABAAAAFwAOAAkAAAABAAAABAAQAAkAAAABAAAAAQARAAIAAAAlAAAF\
gAAUAAkAAAABAAAACgAXABAAAAABAAAFpgAZAAkAAAABAAAAAgAaAAIAAAAGAAAFrgAfAAkAAAAB\
AAAAAAAgAAIAAAAlAAAFtAAhAAoAAAABAAAF2gAjAAkAAAACAAAF4gAlABAAAAABAAAF6gAmAAkA\
AAABAAAAAwAnAAoAAAABAAAF8gAoAAkAAAABAAAAAQArAAIAAAAlAAAF+gAtAAkAAAABAAATXAAu\
AAkAAAABAAAAAQAvAAkAAAABAAAAMAAwAAoAAAABAAAGIAAzAAkAAAABAAAQAAA0AAkAAAABAAAA\
BAA1AAkAAAABAAAAAwA2AAkAAAABAADnJAA3AAkAAAABAAAABAA4AAkAAAABAAACPgA5AAkAAAAB\
AAAAAAA6AAkAAAABAAAAAAA7AAkAAAABAAAAAAA8AAkAAAABAAAABAA9AAkAAAABAAAAAAA/AAkA\
AAABAAAAOwBAAAcAAABQAAAGKABBAAkAAAABAAAAAABCAAkAAAABAAAAAABDAAkAAAABAAAAAABE\
AAkAAAABAAAAAABFAAkAAAABAAAAAABGAAkAAAABAAAAAABIAAkAAAABAAACPgBJAAkAAAABAAAA\
AABKAAkAAAABAAAAAgBNAAoAAAABAAAGeABOAAcAAAB5AAAGgABPAAcAAAArAAAG+gBSAAkAAAAB\
AAAAAQBTAAkAAAABAAAAAQBVAAkAAAABAAAAAQBYAAkAAAABAAAHAwBgAAkAAAABAAASAABhAAkA\
AAABAAAAGgAAAAC9AtMCxAKWAlsCIALqAbwBkgFrAUYBJQEIAfEA3QDMAAMDMwNAA/QCmQJWAg0C\
1QGoAX4BUwEtAQ8B9QDhAM0ALANqA2oDTAPrAnUCKALpAb4BhgFWATABEAH0AN4AyQA9A7oDwgNP\
A7gCXAIYAugBqwF2AUwBJgEGAeoA0gC+ANECrwPSAxIDZAIVAt4BqwF8AVABKQEHAekA0AC6AKgA\
zwFCAnYCBAK/AZYBcwFQAS8BDQHvANQAvQCpAJgAigDUAP8AGgEVARoB5QC6AMUAsACsAJkAggBy\
AGQAXABTAGAAaABqAGYAXABMAEcARwBCAEEAPgAvACQAJAAlACoARgBIAEcARAA/ADcAMwAvACoA\
KAAnACEAHQAfAB8AIAAxADIAMQAwAC4AKwAnACcAJQAfABwAGgAXABkAEwAUACkAKgAmACQAIgAg\
ACEAIgAgAB4AHAAZABgAFAAXABUAIgAcABoAGwAaABcAHAAdABkAGAAYABgAFwAaABgAGAAaABsA\
GgAYABUAEwAWABcAEwAWABcAFQAVABYAEwAUABoAFwAWABMAEgATABAADwATABMAEgANAA8ADwAP\
AAwAEwAVABIAEwANABUAEwATABIADAAPAAsAEAASAA8ADgARABEADwAMAAwADwAWABMAEgASABQA\
DQAPAAoADAAOAGJwbGlzdDAw1AECAwQFBgcIVWZsYWdzVXZhbHVlWXRpbWVzY2FsZVVlcG9jaBAB\
EwABVEUBR7fxEjuaygAQAAgRFx0nLS84PQAAAAAAAAEBAAAAAAAAAAkAAAAAAAAAAAAAAAAAAAA/\
///J3gAANk3//8R4AAe+////5bUAAVy+AAAAOwAAAQAAAAAnAAABAAAAAAAAAAAAAAAAAAAAALtA\
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABI\
RUlDAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAbwBwAGwAaQBzAHQAMAAwANQBAgMEBQYHCFEzUTFR\
MlEwEAQiAAAAACQ/AAAAEAEIERMVFxkbICUAAAAAAAABAQAAAAAAAAAJAAAAAAAAAAAAAAAAAAAA\
JwAC8/UAABV+YnBsaXN0MDDSAQIDSFExUTIQA6IFCtIGBwgJUzIuMVMyLjIjQEsf2IAAAACJQAAA\
AAAAAADSBgcLDCM/4hqAAAAAAKNAVMAAAAAAAAgNDxETFhsfIywlOkMAAAAAAAABAQAAAAAAAAAL\
AAAAAAAAAAAAAAAAAAAAQQAAAAA=
"""
return Data(base64Encoded: base64.replacingOccurrences(of: "\\\n", with: "").replacingOccurrences(of: "\n", with: ""))!
}()
/// MakerNotes
/// - Parameters:
/// - contentIdentifier: Live Photo Content Identifier (UUID )
/// - livePhotoVideoIndex: Live Photo Video Index ( Float32 bitPattern)
/// - Returns: MakerNotes
public static func createMakerNotes(
contentIdentifier: String,
livePhotoVideoIndex: Int64
) -> Data {
var data = makerNotesTemplate
// ContentIdentifier
let uuidData = contentIdentifier.data(using: .ascii)!
let paddedUUID = uuidData + Data(repeating: 0, count: max(0, contentIdentifierLength - uuidData.count))
data.replaceSubrange(contentIdentifierOffset..<(contentIdentifierOffset + contentIdentifierLength), with: paddedUUID.prefix(contentIdentifierLength))
// LivePhotoVideoIndex (Big-Endian)
var bigEndianValue = UInt64(bitPattern: livePhotoVideoIndex).bigEndian
let indexData = Data(bytes: &bigEndianValue, count: livePhotoVideoIndexLength)
data.replaceSubrange(livePhotoVideoIndexOffset..<(livePhotoVideoIndexOffset + livePhotoVideoIndexLength), with: indexData)
return data
}
}

101
TASK.md Normal file
View File

@@ -0,0 +1,101 @@
# TASKto-live-photo
> 说明:本清单按阶段拆解研发事项,默认最低支持 iOS/iPadOS 16+,先完成 MVP 闭环,再逐步完善。
## M0技术预研 / POC以“系统可识别 Live Photo”为第一目标
- [ ] 建立 Xcode 工程骨架SwiftUI 优先),设置 Deployment Target = iOS/iPadOS 16.0
- [ ] 补齐权限与 Info.plist 文案:
- [ ] NSPhotoLibraryUsageDescription
- [ ] NSPhotoLibraryAddUsageDescription
- [ ] POC最小链路跑通不做复杂编辑
- [ ] 从相册导入视频PHPicker视频过滤
- [ ] 以默认参数3s、maxDimension、30fps 策略)生成 photo + pairedVideo
- [ ] 写入相册PHAssetCreationRequest 同时写入 .photo 与 .pairedVideo
- [ ] 校验:保存后按 assetId 取回并验证 Live 识别(至少做到“相册 Live 标识 + 长按可播”的人工确认路径)
- [ ] 约束与策略确认(写入代码常量/配置):
- [ ] 时长限制1.5~5s默认 3s
- [ ] 分辨率上限:默认 1920可后续自适应
- [ ] 帧率策略:>30fps 降到 30fps
- [ ] HDR 策略:默认转 SDR 或首次提示(确认最终策略)
- [ ] 编码策略:优先 re-mux失败再转 H.264 兼容导出(确认兜底策略)
- [ ] 设计基础设施:
- [ ] WorkItem / ExportParams 数据模型(与 TECHSPEC 对齐)
- [ ] CacheManager按 workId 建目录、成功/失败保留 24h 清理策略
- [ ] Logger阶段化日志stage enum + progress + error_code
### M0 完成定义
- [ ] 能在至少 1 台 iPhone + 1 台 iPad 上生成并保存 Live Photo且系统相册可识别有 Live 标识,长按可播放)。
## M1MVP导入→编辑→生成→保存→引导
### 1) UI 页面闭环
- [ ] HomeView首页导入入口、最近作品可先仅内存态/本地简单持久化)
- [ ] EditorView比例裁剪、时长裁剪、封面帧选择、预览
- [ ] ProcessingView进度条 + 阶段文案 + 取消/重试/返回编辑
- [ ] ResultView保存到相册、再次编辑、进入壁纸引导
- [ ] WallpaperGuideView按系统版本展示步骤卡片、FAQ、打开设置、完成确认
### 2) 编辑能力MVP 版)
- [ ] 比例模板iPhone 锁屏 / 全面屏 / 4:3 等(先做 2~3 个核心模板)
- [ ] 裁剪手势:缩放 + 拖拽,保持比例
- [ ] 时长裁剪range slider1.5~5s默认 0~3s
- [ ] 封面帧:滑杆选择 keyFrameTime实时刷新封面预览
### 3) 生成与保存(与 TECHSPEC 阶段枚举对齐)
- [ ] 生成管线normalize → extractKeyFrame → writePhotoMetadata → writeVideoMetadata → saveToAlbum → validate
- [ ] 取消策略:取消时终止任务并清理未写入相册的中间文件
- [ ] 错误码与可行动建议:至少覆盖 LPB-001/101/201/301/401/501/901
### 4) 引导内容MVP 版)
- [ ] 版本检测iOS/iPadOS 16 显示“系统限制/不支持锁屏 Live 动效”的明确文案与替代方案
- [ ] iOS/iPadOS 17+:展示步骤卡片(设置→墙纸→添加新墙纸→照片→选择 Live Photo→开启 Live
- [ ] FAQMotion not available、低电量模式、找不到 Live 按钮等
### 5) 基础埋点(可先打印日志,后续再接 SDK
- [ ] home_import_video_click / import_video_success
- [ ] editor_generate_click / build_livephoto_start / build_livephoto_fail
- [ ] save_album_success / save_album_fail
- [ ] guide_open / guide_complete
### 6) MVP QA手工为主
- [ ] 测试矩阵iPhone 1~2 台 + iPad 1 台iOS/iPadOS 17+ 与 16 各至少 1 台
- [ ] 素材覆盖H.264/HEVC、30/60fps、竖/横、SDR/HDR
- [ ] 验收点:生成成功率、保存成功率、相册识别率、引导文案准确性
### M1 完成定义
- [ ] 按 PRD 的 MVP 验收标准打通闭环:生成 Live Photo → 保存相册可识别 → 可进入引导并在不同系统版本下给出正确提示。
## M2完善体验提升 + 失败率降低)
- [ ] 兼容模式开关UI 可见):降分辨率/30fps/H.264/SDR
- [ ] 自动诊断与建议:根据素材参数提示“建议缩短/建议兼容模式/建议转 SDR”等
- [ ] iPad 编辑页布局优化:左右分栏(预览/参数)
- [ ] 最近作品列表完善:持久化(仅存参数与缩略图/assetId不重复存媒体
- [ ] 设置页(可选):权限状态、清理缓存、反馈入口
- [ ] 错误反馈包导出可选builder.log + 参数(不包含媒体内容)
## M3稳定性 / 上线准备
- [ ] 性能与内存优化:大视频处理、峰值内存控制
- [ ] 崩溃与异常收敛:日志脱敏、错误归因完善(按 stage 统计)
- [ ] App Store 合规检查:权限文案、引导表述(不承诺一键设置壁纸)、隐私说明
- [ ] 产出核心文档补齐(按需要最小化):
- [ ] 测试文档MVP 测试矩阵与用例
- [ ] 用户手册:导入/生成/保存/设置引导与常见问题
---
## 决策备忘(后续需要你拍板)
- [ ] HDR 默认策略:默认转 SDR vs 首次提示用户选择
- [ ] 编码兜底策略:完全自动兜底 vs 失败后提示开启兼容模式
- [ ] 高级合成(照片+视频)进入哪个阶段(建议 M2

View File

@@ -0,0 +1,8 @@
import XCTest
@testable import LivePhotoCore
final class LivePhotoCoreTests: XCTestCase {
func testPlaceholder() {
XCTAssertTrue(true)
}
}

View File

@@ -0,0 +1,136 @@
# 交互规格书Live Photo 制作与动态壁纸引导 AppV0.2-IX
- 适用平台iPhone / iPadiOS / iPadOS
- 日期2025-12-13Asia/Manila
- 用途:用于 AI 生成 UI / 交互实现 / QA 对照
## 1. 设计原则
- 一条主线:导入 → 编辑 → 生成 → 保存 → 引导设置。
- 减少认知:默认 3 秒、默认锁屏比例模板、默认智能封面。
- 失败可行动:每个错误必须给“下一步”。
- 强预期管理:明确“通常仅锁屏动效”,以及系统版本差异。
## 2. 全局交互规范
### 2.1 导航与按钮
- 主按钮Primary导入、生成、保存、去设置壁纸。
- 次按钮Secondary再次编辑、重试、了解原因。
- 危险操作:清理缓存/删除作品需二次确认。
### 2.2 Loading / Empty / Error 规范
| 状态 | 表现 | 必备元素 |
|---|---|---|
| Loading | 进度条 + 阶段文案 | 取消/后台继续(可选)、预计步骤而非时间 |
| Empty | 插画/图标 + 1句解释 | 主按钮引导下一步 |
| Error | 标题 + 原因 + 建议 | 重试按钮 + 反馈入口(可选) |
## 3. 页面级交互规格
### 3.1 首页HomeView
| 区域 | 组件 | 交互 | 状态/校验 |
|---|---|---|---|
| 顶部 | App 标题 + 设置入口 | 点击设置进入 Settings可选 | 无 |
| 主区 | 按钮:从相册导入视频 | 打开 PHPicker视频过滤 | 无权限→解释页 |
| 主区 | 入口:高级合成(照片+视频) | 打开 PHPicker照片+视频) | MVP可隐藏 |
| 主区 | 入口:教程/FAQ | 打开 Guide/FAQ | 版本检测提示 |
| 列表 | 最近作品0~10 | 点击进入 Result长按删除缓存可选 | 空态展示提示 |
- 空态文案示例:**“导入一段 23 秒视频,做成可以长按播放的实况照片。”**
- 首次进入:可展示 2~3 页 onboarding可选
### 3.2 编辑页EditorView
| 模块 | 组件 | 交互细节 | 默认值 |
|---|---|---|---|
| 预览 | 视频播放器(静音预览可选) | 支持播放/暂停;拖动时间轴预览 | 自动播放关闭 |
| 比例 | 比例选择器模板chips | 切换模板时保持主体居中;允许用户再拖拽微调 | iPhone锁屏模板 |
| 裁剪 | 画面裁剪手势 | 双指缩放、单指拖动;显示安全区参考线(可选) | scale=1 |
| 时长 | 时间裁剪条range slider | 限制 1.5~5s默认 0~3s自动吸附到整帧点 | 3秒 |
| 封面 | 封面帧滑杆 + 预览帧 | 滑动实时更新封面预览;提供“推荐封面”按钮 | 中间帧 |
| 操作 | 按钮:生成 Live Photo | 点击后进入 Processing参数校验不通过则提示 | 可用 |
- 校验提示:时长过长→建议缩短;分辨率过高→建议开启兼容模式。
- iPad 适配:左侧预览、右侧参数面板;或上下布局(横屏)。
### 3.3 生成进度页ProcessingView
| 元素 | 说明 | 交互 |
|---|---|---|
| 进度条 | 0~100%;按阶段推进 | 不可拖动 |
| 阶段文案 | 例如:处理视频 / 写入实况信息 / 准备保存 | 随阶段自动切换 |
| 取消 | 终止任务并回到编辑 | 二次确认;取消后清理缓存 |
| 失败态 | 展示错误码+原因+建议 | 按钮:重试 / 返回编辑 |
### 3.4 结果页ResultView
| 区域 | 组件 | 交互 | 成功条件 |
|---|---|---|---|
| 顶部 | 缩略图 + Live 标识(模拟) | 长按预览动效App内 | 预览流畅 |
| 主按钮 | 保存到相册 | 触发相册写入;成功弹 toast | 相册可见Live |
| 主按钮 | 去设置壁纸 | 进入 WallpaperGuide | 版本差异提示 |
| 次按钮 | 再次编辑 | 回到 Editor保留参数 | 参数不丢 |
| 信息 | 作品参数摘要 | 展开查看细节(可选) | 无 |
### 3.5 壁纸引导页WallpaperGuideView
| 模块 | 内容 | 交互 | 备注 |
|---|---|---|---|
| 版本检测 | 显示:当前系统版本/是否支持Live锁屏动效 | 点击“了解原因”展开说明 | 必须明确限制 |
| 步骤卡片 | 步骤1~5图文 | 每步可折叠;支持复制路径文案 | iPhone/iPad分支 |
| 常见问题 | Motion not available、低电量、找不到Live按钮等 | 点击展开答案 | MVP用静态文案 |
| 跳转设置 | 按钮:打开设置 | 打开 Settings到首页即可 | 不承诺深链成功 |
| 完成确认 | 按钮:我已设置完成 | 记录引导完成埋点 | 用于漏斗统计 |
## 4. 文案与提示(统一模板)
### 4.1 错误提示模板
- 标题:一句话告诉用户发生了什么(例如:**“视频处理失败”**
- 原因:给 1~2 条最可能原因(不要超过 3 条)
- 建议:给可点击动作(例如:**“切换到兼容模式H.264)”**、**“缩短到 3 秒以内”**
- 附加:可显示错误码(长按复制)与反馈入口(可选)
### 4.2 兼容模式说明(示例)
- 兼容模式会:降低分辨率、降帧率到 30fps、转码到 H.264(如需要)、将 HDR 转为 SDR。
- 目的:提升生成成功率与壁纸/分享兼容性。
## 5. 埋点事件字典Event Dictionary
### 5.1 事件命名规范
- 动词_对象_结果例如 `import_video_success`
- 所有事件带公共属性:`app_version`, `os_version`, `device_model`, `locale`
### 5.2 核心事件表
| 事件名 | 触发时机 | 关键属性properties | 用途 |
|---|---|---|---|
| home_import_video_click | 点击“导入视频” | entry=home | 漏斗起点 |
| import_video_success | 完成导入并进入编辑 | duration,resolution,fps,codec,hdr | 素材分布 |
| editor_generate_click | 点击生成 | ratio,trim,has_cover,compat_mode | 转化与参数 |
| build_livephoto_start | 开始生成 | work_id,stage=normalize | 性能监控 |
| build_livephoto_fail | 生成失败 | error_code,stage,codec_policy,hdr_policy | 失败归因 |
| save_album_success | 写入相册成功 | asset_id,elapsed_ms | 闭环成功 |
| save_album_fail | 写入相册失败 | error_code,permission_state | 权限问题 |
| guide_open | 进入壁纸引导页 | from=result,os_support=true/false | 引导覆盖 |
| guide_complete | 点击“已设置完成” | time_spent_s | 引导成效 |
| cache_clear | 清理缓存 | freed_mb | 存储与留存 |
## 6. iPhone / iPad 适配规则
- iPhone编辑页优先竖屏底部工具栏预览占上半屏。
- iPad横屏优先左右分栏预览/参数);支持拖拽文件导入(可选)。
- 安全区:裁剪预览提供参考线(可选)以减少锁屏 UI 遮挡主体。
## 7. QA 对照清单(交互)
1. 所有主按钮在不可用状态必须有原因提示(例如未选视频)。
2. 生成中返回/退出:必须提示风险(任务取消/后台)。
3. 权限拒绝:必须有解释页与“去设置开启”按钮。
4. iOS 16引导页必须出现“不支持锁屏Live动效”的明确文案。
5. 所有错误页必须包含:重试与返回编辑两个行动。

View File

@@ -0,0 +1,557 @@
PRDLive Photo 制作与动态壁纸引导 App
适用平台iPhone / iPadiOS / iPadOS
文档版本V0.2(草案)
日期2025-12-13Asia/Manila
作者:——(待填)
# 1. 文档信息与变更记录
# 2. 背景与机会
用户希望把自拍视频/短片制作成“像苹果实况照片Live Photo一样”的内容用于相册浏览、分享例如发微信、并在支持的系统版本上设置为锁屏动态壁纸。
痛点 1现有转换工具分散流程复杂导入、裁剪、转码、导出、再去相册设置
痛点 2很多用户不知道 iOS/iPadOS 不同版本对 Live Photo 壁纸的支持差异,导致“做出来不能动”。
痛点 3对尺寸/时长/关键帧(封面)选择缺乏指导,成品效果不稳定。
本 App 提供一站式:导入视频 → 生成系统可识别的 Live Photo → 保存到相册 → 分步骤引导设置壁纸。
# 3. 产品目标与非目标
## 3.1 目标Goals
G1用户可从“视频/照片素材”快速生成“系统相册识别的 Live Photo”并保存到相册。
G2提供清晰的“设置为锁屏动态壁纸”引导降低因系统版本/设置项导致的失败率。
G3通过模板与提示帮助用户得到高成功率的“可播放、可分享、封面好看”的作品。
G4适配 iPhone 与 iPad不同屏幕与比例下提供合理裁剪与预览。
## 3.2 非目标Non-goals
不使用任何私有 API 直接替用户设置系统壁纸App Store 会拒)。
不做云端素材社区/版权图库(可在后续版本作为增长模块另立项)。
不保证所有历史系统版本都能播放 Live Photo 壁纸动画(受系统能力限制)。
# 4. 目标用户与使用场景
## 4.1 目标用户Personas
P1普通用户——想把自拍视频做成动态壁纸但不懂格式/设置路径。
P2内容创作者——需要批量把短视频转 Live Photo 进行分享与展示。
P3设计/审美用户——关注裁剪、封面帧、色彩与动效节奏。
## 4.2 核心场景Scenarios
S1用户选择一段 2-5 秒视频,生成 Live Photo保存并设置为锁屏动态壁纸。
S2用户选择“照片 + 视频”合成(照片做封面),并指定关键帧。
S3用户生成后想发微信需要在相册里保持 Live Photo 形态并可被微信识别。
S4用户在 iPad 上制作,导出后在 iPhone 上设置壁纸(跨设备)。
# 5. 兼容性与约束
## 5.1 系统与功能可用性(建议写在 App 内)
Live Photo 生成与保存iOS/iPadOS 14+(建议 15+)可实现(依赖 Photos / AVFoundation
“Live Photo 作为锁屏动态壁纸播放”iOS 17 或更高版本支持在锁屏唤醒时播放iPhone 官方说明)。
iPadOS 锁屏支持 Live Photo 的动效选项(官方 iPad 用户指南包含 Live Photo 动效按钮说明)。
注意Live Photo 动效通常只作用于锁屏;主屏一般显示静态图(以系统版本为准)。
低电量模式等系统状态可能导致动效不可用(需在引导里提示)。
## 5.2 平台约束(必须写清)
iOS/iPadOS 没有公开 API 允许第三方 App 直接设置系统壁纸:只能引导用户在系统界面完成设置。
禁止使用私有 API、越狱方案或企业签名绕过不符合 App Store 上架)。
# 6. 需求范围MVP
## 6.1 MVP 功能列表
导入:从相册选择视频;可选:选择封面照片(可不选,默认从视频自动取帧)。
编辑:裁剪比例(适配 iPhone/iPad 锁屏常见比例)、裁剪时长(建议 2-3 秒)、选择封面帧(关键帧)。
生成:将素材合成为系统可识别 Live Photo照片 + pairedVideo + 元数据)。
保存保存到系统相册Live Photos 相簿 / 最近项目)。
引导:分步骤引导用户把 Live Photo 设置为锁屏动态壁纸(按系统版本展示不同路径)。
质量检测:生成后做一次本地校验(能否被系统识别为 Live Photo失败原因提示
## 6.2 后续版本Backlog
模板:预设“人物/风景/文字”裁剪模板、调速/慢动作适配。
批量生成:一次导入多段视频,批量导出 Live Photo。
小组件/主题包:壁纸合集管理(订阅制/一次性购买)。
分享:一键分享(注意平台对实况/视频的支持差异)。
# 7. 用户流程
## 7.1 主流程:视频 → Live Photo → 设置锁屏
进入首页,点击【从相册导入视频】。
选择视频后进入编辑页:裁剪比例、裁剪时长、选择封面帧。
点击【生成 Live Photo】显示进度与阶段文案处理视频 / 写入元数据 / 保存准备)。
生成成功 → 点击【保存到相册】;提示“已保存,可在 Live Photos 相簿查看”。
进入【设置壁纸引导】:按步骤提示用户在系统【设置 > 墙纸】选择 Live Photo 并开启 Live 动效。
## 7.2 失败/异常流程
用户拒绝相册权限:展示解释页,并引导去系统设置开启。
生成失败(编码/元数据写入失败):提示原因 + 建议(更换视频编码、缩短时长、降低分辨率)。
保存成功但系统未识别为 Live Photo提示“请在相册查看是否有 Live 标识”,并提供重新生成按钮。
# 8. 功能需求详述
## 8.1 首页
入口 1导入视频必选
入口 2导入照片 + 视频(可选,作为高级模式)。
入口 3教程/引导(包含系统版本差异、常见问题)。
展示最近生成的作品列表MVP 可只展示最近 10 条)。
## 8.2 编辑页
裁剪比例常用预设iPhone 锁屏 / 全面屏 / 4:3 等),支持手动缩放与拖拽。
时长:默认 3 秒;可选范围 1.5-5 秒(过长降低成功率与体积)。
封面帧:滑杆选择时间点;支持“智能推荐封面”(清晰、有人脸、少抖动)。
预览:静态预览 + 动效预览(模拟锁屏唤醒播放方式)。
## 8.3 生成引擎(核心)
输入:视频(必选) + 可选封面照片。
输出:照片文件(建议 HEIC+ 视频文件MOVH.264/HEVC+ 绑定元数据identifier、still-image-time 等)。
关键规则:照片与视频共享同一 content/asset identifier视频包含 still-image-time 标记关键帧。
对视频进行重封装re-mux以写入元数据必要时重新编码可配置优先保留编码失败则转码
导出参数建议:分辨率/比特率上限以保证速度与成功率;音频可选保留/移除。
本地校验:生成后尝试用系统方式加载校验(如 PHLivePhoto 请求加载)并输出可读错误。
## 8.4 保存到相册
使用 Photos 写入photo 资源 + pairedVideo 资源。
成功回调:提示“已保存”;失败回调:展示错误与建议。
相册归类:系统会自动归类到 Live PhotosApp 内保存最近作品列表。
## 8.5 动态壁纸设置引导
按系统版本展示不同文案与截图示意。
iOS/iPadOS 17+:引导用户进入【设置 > 墙纸 > 添加新墙纸 > 照片 > Live Photo】并开启 Live 播放选项。
iOS/iPadOS 16明确提示系统限制提供替代方案设置静态壁纸/升级系统建议)。
常见失败提示低电量模式、Motion not available、素材参数不兼容等。
提供按钮:打开系统设置(尽量深链;无法深链则打开设置首页)。
# 9. 非功能需求
## 9.1 性能与体验
生成过程需有可见进度与阶段提示;失败可重试。
中间文件自动清理;不占用过多存储。
错误提示要“可行动”:给出下一步(缩短时长、降低分辨率、重新选择素材)。
## 9.2 隐私与合规
默认本地处理:不上传用户照片/视频。
权限最小化:仅在需要时请求相册/相机/麦克风权限,并提供用途说明。
如接入统计:匿名化、可关闭,并在隐私政策中说明。
# 10. 数据埋点与指标
## 10.1 核心漏斗
导入视频 → 进入编辑 → 点击生成 → 生成成功 → 保存成功 → 进入壁纸引导 → 引导完成(点击“完成/已设置”)。
## 10.2 关键指标KPIs
生成成功率(按机型/系统版本/视频参数分层)。
保存成功率(相册写入成功)。
引导完成率(壁纸设置引导)。
7 日留存(如后续商业化,再加转化率)。
# 11. 里程碑建议
M0技术预研Live Photo 合成可行性、系统版本验证、参数边界)。
M1MVP导入视频 → 生成 → 保存 → 基础引导)。
M2适配 iPad + 编辑体验提升(比例模板、预览增强)。
M3稳定性与质量失败提示、自动降级转码、素材建议
# 12. 验收标准MVP
在 iPhone至少 2 款机型)与 iPad至少 1 款机型)上:从相册导入视频可生成 Live Photo 并保存到系统相册。
生成的 Live Photo 在系统相册中显示为 Live Photo有 Live 标识,长按可播放)。
在支持 Live Photo 锁屏播放的系统版本上,用户可按引导步骤完成设置并在唤醒锁屏时播放。
在不支持版本上App 必须明确提示限制并给出替代方案,避免误导。
不使用私有 API权限声明齐全可通过 App Store 审核。
# 13. 风险与对策
R1系统版本差异 → 版本检测 + 差异化引导文案。
R2素材多样导致失败 → 参数归一化 + 自动降级转码 + 可读错误提示。
R3用户期待“主屏也会动” → 产品内明确说明:通常仅锁屏动效。
R4审核风险 → 只做引导,不做自动设置;不接触私有 API。
# 14. 参考资料TBD
Apple SupportSet a Live Photo as your Lock Screen wallpaper (iOS 17+): https://support.apple.com/120734
Apple SupportChange your iPhone wallpaper含 Live Photo 入口说明): https://support.apple.com/102638
Apple iPad User GuideCreate a custom iPad Lock Screen: https://support.apple.com/guide/ipad/ipad782d4de8/ipados
Apple iPad User GuideChange the wallpaper on iPad: https://support.apple.com/guide/ipad/ipad997d908e/ipados
# 15. 信息架构与页面清单
本节用于指导你用 AI 编码时的页面/路由/组件拆分。
## 15.1 信息架构IA
首页Home导入视频 / 高级合成 / 教程与FAQ / 最近作品列表
编辑页Editor比例裁剪、时长裁剪、封面帧选择、预览、生成按钮
生成进度页Processing阶段进度、取消/后台、失败原因与重试
结果页Result保存到相册、再次编辑、进入壁纸引导、分享后续
壁纸引导页Wallpaper Guide按系统版本展示步骤、常见问题、跳转设置
作品库Library - MVP可选最近作品、再次导出、删除缓存
设置Settings权限状态、清理缓存、隐私、关于、反馈
## 15.2 页面与路由(建议命名)
# 16. 用户故事与验收标准(更细颗粒)
## 16.1 用户故事User Stories
## 16.2 验收标准(按故事拆分)
US-01生成后在系统相册中显示 Live 标识;长按可播放;不出现仅照片或仅视频的孤儿资源。
US-02裁剪后的导出在锁屏预览时主体居中用户可通过手势调整导出不拉伸。
US-03用户选定封面帧后相册静态显示与导出封面一致允许系统做轻微处理
US-05引导页能根据系统版本显示正确路径并提示低电量模式/系统限制’等常见原因。
US-06错误提示包含错误标题 + 可能原因 + 可点击的解决建议(例如缩短时长/降低分辨率/改用H.264)。
# 17. 详细功能需求表(可直接喂给 AI 编码)
## 17.1 功能点-输入-处理-输出
## 17.2 关键约束与参数建议
推荐时长2-3秒越长越大且更容易失败
推荐分辨率上限:以锁屏显示为目标,通常不需要超过 1440p可按设备上限做自适应。
帧率:建议 30fps高帧率素材可降到30fps以提升兼容性。
编码:优先保留原编码;失败时降级到 H.264 + AAC更通用
HDR/Dolby Vision可提示用户可能存在兼容性问题必要时转 SDR。
# 18. 技术方案概述(研发实现导向)
## 18.1 模块划分
## 18.2 Live Photo 合成关键点(落地检查清单)
生成 assetIdentifierUUID字符串
图片侧:写入 Apple MakerNote/相关标识建议HEIC
视频侧:写入 QuickTime content identifier 元数据 + still-image-time timed metadata track。
保存侧PHAssetCreationRequest 同时写入 .photo 与 .pairedVideo。
校验侧:尝试从相册 asset 请求 PHLivePhoto 或检查 Live 标识是否出现。
## 18.3 权限与Info.plist
NSPhotoLibraryUsageDescription读取相册用于选择素材
NSPhotoLibraryAddUsageDescription保存生成的 Live Photo 到相册
NSCameraUsageDescription如后续加入拍摄入口
NSMicrophoneUsageDescription如保留或录制音频
# 19. 数据模型与本地存储
## 19.1 本地作品模型(示例)
WorkItemid、createTime、sourceVideoLocalID、exportParamsratio/trim/keyFrame、resultAssetLocalID相册asset.localIdentifier、statusprocessing/success/failed、errorCode、thumbnailPath。
缓存目录:/Library/Caches/LivePhotoBuilder/{workId}/中间mov、heic、日志
清理策略:成功后保留缩略图与参数;中间文件按设置或定期清理。
# 20. QA 测试计划MVP
## 20.1 测试矩阵
## 20.2 用例清单(样例)
从相册导入竖屏30fps H.264视频默认参数生成并保存确认相册显示Live标识且可播放。
导入60fps HEVC视频选择3秒裁剪生成成功若失败需自动降级转码并提示。
导入HDR视频提示可能兼容性风险选择转SDR后生成成功。
拒绝相册权限后再次进入导入流程,能看到解释页并可跳转系统设置。
在iOS 16设备壁纸引导页必须显示“系统限制/不支持动效”的文案与替代方案。
# 21. 合规、审核与版权
壁纸设置:不得宣称“一键自动设置系统壁纸”,应明确为“引导用户在系统中设置”。
权限说明:文案必须与实际用途一致,避免过度索权。
版权与内容:用户素材本地处理;若引入模板/素材库需提供版权声明与授权来源。
隐私政策:说明是否收集设备信息、崩溃日志与使用数据;提供关闭选项。
# 22. 文案与引导内容可直接放App里
## 22.1 关键页面文案(示例)
## 22.2 壁纸设置步骤卡片iOS 17+ 示例)
打开【设置】->【墙纸】。
点击【添加新墙纸】->【照片】。
选择你刚保存的 Live Photo带 Live 标识)。
点击屏幕左下角的【Live】按钮确保动效开启若可见
保存并设置到锁屏。唤醒锁屏时,按压/触摸或系统动作会触发动效(以系统表现为准)。
提示:提示:若出现 “Motion not available”请检查是否开启低电量模式、是否选择了 Live Photo、素材是否过长/过大。
# 23. 开放问题需你决策后再让AI编码
商业化:免费+广告?一次性买断?订阅(模板/批量/高清导出)?
支持的最低系统版本iOS 14/15/16决定了API与用户覆盖
是否支持拍摄直接生成AVCapture Live Photo Capture作为后续版本
是否需要“跨设备同步”iCloud保存作品参数/模板?
是否需要“微信分享优化”检测相册资产是否仍为Live Photo形态
# 附录:表格汇总
## 表格 1
| 版本 | 日期 | 作者 | 说明 |
| --- | --- | --- | --- |
| V0.1 | 2025-12-13 | —— | 首版草案:功能范围、流程与需求拆解 |
| V0.2 | 2025-12-13 | —— | 补充信息架构、用户故事、详细需求表、技术方案、QA/合规与开放问题 |
| V1.0 | —— | —— | (预留) |
## 表格 2
| 页面 | 路由/模块名 | 主要能力 | MVP |
| --- | --- | --- | --- |
| 首页 | HomeView | 导入入口、最近作品 | 是 |
| 编辑 | EditorView | 裁剪/封面/预览/生成 | 是 |
| 生成进度 | ProcessingView | 进度、取消、失败重试 | 是 |
| 结果 | ResultView | 保存、再次编辑、引导入口 | 是 |
| 壁纸引导 | WallpaperGuideView | 版本检测、步骤卡片、FAQ | 是 |
| 作品库 | LibraryView | 作品管理 | 可选 |
| 设置 | SettingsView | 权限、清理、隐私、反馈 | 可选 |
## 表格 3
| 编号 | 用户故事 | 优先级 | 备注 |
| --- | --- | --- | --- |
| US-01 | 作为用户,我想从相册选择一段视频并生成 Live Photo这样我可以在相册长按播放。 | P0 | 核心闭环 |
| US-02 | 作为用户,我想裁剪比例以适配锁屏,避免设置壁纸后被系统裁掉关键主体。 | P0 | 需预设模板 |
| US-03 | 作为用户,我想选择封面帧(关键帧),这样锁屏静止画面最好看。 | P0 | 提供智能推荐 |
| US-04 | 作为用户,我想保存到相册并看到 Live 标识,确保微信等平台可识别为实况。 | P0 | 写入 pairedVideo |
| US-05 | 作为用户,我想得到清晰的设置壁纸引导,减少我找不到入口或不能动的困扰。 | P0 | 版本差异化 |
| US-06 | 作为用户,我希望失败时能看到原因和解决办法,而不是一句“失败”。 | P0 | 错误可行动 |
| US-07 | 作为高级用户,我想用‘照片+视频’合成,以照片为封面,视频为动效。 | P1 | 高级模式 |
| US-08 | 作为用户,我想一键清理缓存,避免占用太多存储。 | P1 | 设置页 |
## 表格 4
| 功能点 | 输入 | 处理/规则 | 输出/状态 | 优先级 |
| --- | --- | --- | --- | --- |
| 导入视频 | PHPicker选择视频 | 读取时长/分辨率/帧率/编码;提示不推荐参数 | 进入编辑页或提示不兼容 | P0 |
| 比例裁剪 | 视频画面 + 模板比例 | 支持拖拽/缩放;保持比例;实时预览 | 裁剪区域参数scale/offset | P0 |
| 时长裁剪 | 起止时间 | 默认3秒限制1.5-5秒对齐关键帧建议 | trimStart/trimEnd | P0 |
| 封面帧选择 | 时间点t | 提供静帧预览可智能推荐t* | keyFrameTime | P0 |
| 生成Live Photo | 视频+裁剪+关键帧(+可选封面照) | 重封装写入content identifier、still-image-time必要时转码 | photoURL + videoURL + 成功/失败 | P0 |
| 保存到相册 | photoURL+videoURL | PHAssetCreationRequest写入.photo与.pairedVideo | 相册新增Live Photo资产 | P0 |
| 引导设置壁纸 | 系统版本/设备类型 | 差异化步骤卡片提示限制与FAQ | 用户完成率提升 | P0 |
| 缓存清理 | 中间文件目录 | 仅清理App生成缓存不影响相册成品 | 释放存储空间 | P1 |
## 表格 5
| 模块 | 职责 | 关键框架/组件 |
| --- | --- | --- |
| MediaImport | PHPicker选择媒体、读取元信息 | PhotosUI, AVFoundation |
| EditorCore | 裁剪参数管理、预览渲染 | SwiftUI/UIView, AVPlayer |
| LivePhotoBuilder | 写入元数据、重封装/转码 | AVAssetReader/Writer, ImageIO |
| AlbumWriter | 写入系统相册 | Photos |
| Validation | 生成后校验是否为Live Photo | Photos, PHLivePhoto |
| GuideEngine | 版本检测、引导内容与FAQ | UIKit/SwiftUI |
| CacheManager | 中间文件管理与清理 | FileManager |
| Analytics | 埋点与漏斗 | 自研/第三方SDK |
## 表格 6
| 维度 | 覆盖建议 |
| --- | --- |
| 设备 | 至少2款iPhone不同分辨率+ 1款iPad |
| 系统 | iOS/iPadOS 17+(验证壁纸动效)+ iOS/iPadOS 16验证限制提示 |
| 素材 | H.264/HEVC30/60fpsSDR/HDR有/无音频;竖/横屏 |
| 时长 | 1.5秒、3秒、5秒边界 |
| 权限 | 首次拒绝/后续开启;相册只读/读写 |
## 表格 7
| 场景 | 标题 | 正文/提示 | 按钮 |
| --- | --- | --- | --- |
| 生成中 | 正在生成 Live Photo | 正在处理视频并写入实况信息,请不要退出。 | 取消 / 后台继续 |
| 生成成功 | 生成成功 | 已生成实况照片。保存到相册后可在“照片”里长按播放。 | 保存到相册 / 去设置壁纸 |
| 保存成功 | 已保存到相册 | 你可以在“Live Photos”相簿找到它。 | 去设置壁纸 / 再做一个 |
| 不支持提示 | 当前系统限制 | 此系统版本无法将 Live Photo 作为可播放的锁屏壁纸。你仍可保存为实况照片,或升级系统后再设置。 | 了解原因 / 返回 |
| 低电量提示 | 可能无法播放动效 | 低电量模式可能导致锁屏动效不可用。 | 知道了 |

View File

@@ -0,0 +1,167 @@
# 技术规格书Live Photo 制作与动态壁纸引导 AppV0.2-Tech
- 适用平台iPhone / iPadiOS / iPadOS
- 日期2025-12-13Asia/Manila
- 用途:用于 AI 编码 / 架构落地 / 研发验收
## 1. 范围与目标
本技术规格书覆盖媒体导入、编辑、Live Photo 合成与写入、校验、壁纸引导、缓存与日志、埋点。
- **目标**:在不使用私有 API 的前提下,生成系统相册可识别的 Live Photo 资产,并通过引导帮助用户设置锁屏动态壁纸。
- **非目标**:不直接替用户设置系统壁纸;不做云端上传/存储(默认本地)。
## 2. 总体架构
### 2.1 模块分层
| 层级 | 模块 | 职责 | 关键技术 |
|---|---|---|---|
| UI | Home/Editor/Processing/Result/Guide | 页面展示、交互、状态驱动 | SwiftUI推荐/UIKit |
| Domain | Workflows | 导入-编辑-生成-保存-引导的业务编排 | async/await, Combine(可选) |
| Service | MediaImport / Builder / AlbumWriter / Validation | 导入、合成、写相册、校验 | PhotosUI, AVFoundation, Photos, ImageIO |
| Infra | CacheManager / Logger / Analytics | 缓存、日志、埋点、错误封装 | FileManager, OSLog, 自研/第三方 |
### 2.2 目录结构建议Xcode
- App/入口、DI、AppState
- Features/Home, Features/Editor, Features/Processing, Features/Result, Features/Guide
- Domain/ModelsWorkItem、ExportParams、Capability
- Domain/UseCasesImportVideo、BuildLivePhoto、SaveToAlbum、Validate、GenerateGuideSteps
- Services/MediaImport, Services/LivePhotoBuilder, Services/AlbumWriter, Services/Validation
- Infra/Cache, Infra/Logging, Infra/Analytics, Infra/Errors
- Resources/引导图、FAQ 文案、模板配置 JSON
## 3. 核心数据模型
### 3.1 WorkItem作品
| 字段 | 类型 | 说明 |
|---|---|---|
| id | UUID | 作品唯一标识 |
| createdAt | Date | 创建时间 |
| sourceVideo | SourceRef | 来源引用PHAsset localIdentifier 或 fileURL |
| coverImage | SourceRef? | 可选封面图引用 |
| exportParams | ExportParams | 比例/裁剪/关键帧等参数 |
| status | enum | idle/editing/processing/success/failed |
| resultAssetId | String? | 写入相册后的 asset.localIdentifier |
| cacheDir | URL | 中间文件目录 |
| error | AppError? | 失败信息(含可行动建议) |
### 3.2 ExportParams导出参数
| 字段 | 类型 | 说明/规则 |
|---|---|---|
| aspectRatio | Preset/CGFloat | 模板iPhoneLock、Full、4:3… |
| cropTransform | scale + offset | 编辑页输出的裁剪参数 |
| trimStart | Double | 秒默认0 |
| trimEnd | Double | 秒默认3限制1.5~5 |
| keyFrameTime | Double | 秒在trim区间内 |
| audioPolicy | enum | keep/remove默认keep |
| codecPolicy | enum | passthrough / fallbackH264 |
| hdrPolicy | enum | keep / toneMapToSDR建议 |
| maxDimension | Int | 上限,例如 1920 或自适应 |
## 4. 状态机与工作流
### 4.1 作品状态机(建议)
- Idle未开始
- → Importing导入中→ Editing编辑中
- → Processing生成中Normalize → BuildPhoto → BuildVideo → Pairing → Validate
- → Success成功可保存/已保存)
- → Failed失败含可重试点与建议
处理阶段建议暴露为枚举(用于进度与日志):
- normalize归一化裁剪/转码策略确定)
- extractKeyFrame取关键帧/封面图)
- writePhotoMetadata写图片侧元数据
- writeVideoMetadata重封装并写视频侧元数据
- saveToAlbum写入相册
- validate校验 Live Photo 是否可识别)
### 4.2 并发与取消
- 生成任务使用 TaskSwift Concurrency实现可被用户取消取消时清理未写入相册的中间文件。
- 对 AVAssetReader/Writer 的写入任务采用串行队列/actor 封装,避免资源竞争。
- 避免主线程阻塞:任何转码/重封装/写文件都在后台队列或 Task 中执行。
## 5. Live Photo 合成实现规范
### 5.1 输入约束与预处理Normalize
- 时长:将 trim 区间限制在 1.5~5s默认 3s
- 分辨率:按 maxDimension 缩放;优先保证主体清晰而不是极限画质。
- 帧率:高帧率(>30fps可降至 30fps以提升兼容性与体积。
- HDR若检测到 HDR/Dolby Vision优先提示并建议转 SDRtone mapping避免部分系统/场景识别异常。
### 5.2 绑定标识Identifier
- 每次生成一个 assetIdentifierUUID string
- 图片与视频必须共享同一 identifier否则系统不会将其识别为 Live Photo 对。
### 5.3 图片侧Photo输出规范
- 优先输出 HEIC更现代且在较新系统上更稳定必要时支持 JPEG 作为降级。
- 写入 Apple MakerNote/相关字段以携带 identifier实现时以可验证的字段集合为准
- 封面图来源优先级:用户选封面照片 > 从视频 keyFrameTime 抽帧。
### 5.4 视频侧Paired Video输出规范
- 容器MOV。
- 写入 QuickTime metadatacontent identifier与照片一致
- 写入 timed metadata trackstill-image-time标记关键照片时刻
- 尽量 re-mux不重编码提升速度若写入失败或素材不兼容再降级到转码流程。
### 5.5 写入相册规范
- 使用 PHAssetCreationRequest 同时 addResource.photo 与 .pairedVideo。
- 在 performChanges 回调中记录成功/失败;成功后写入 resultAssetId失败需返回可行动错误。
### 5.6 校验Validation
- 策略 A保存后用 resultAssetId 取回 PHAsset尝试请求 Live Photo或检查其资源类型/子资源)。
- 策略 B若无法直接取 Live Photo 对象,则至少验证:相册中显示 Live 标识(人工/自动化截图对比可作为 QA 手段)。
- 校验失败要区分:合成失败 vs 写相册失败 vs 系统未识别。
## 6. 错误码与可行动建议Error Taxonomy
| 错误码 | 阶段 | 用户可见文案(标题) | 常见原因 | 建议动作App 提示) |
|---|---|---|---|---|
| LPB-001 | Import | 无法读取视频 | 权限不足/资源损坏 | 检查相册权限;换一个视频 |
| LPB-101 | Normalize | 素材参数不兼容 | HDR/超高分辨率/奇怪编码 | 开启“兼容模式”;降低分辨率/转 SDR |
| LPB-201 | Photo | 封面生成失败 | 抽帧失败/内存不足 | 缩短时长;降低分辨率;重试 |
| LPB-301 | Video | 视频处理失败 | 重封装/转码失败 | 切换到 H.264 兼容导出;关闭音频 |
| LPB-401 | Album | 保存到相册失败 | 无写入权限/相册忙 | 允许“添加到相册”;稍后重试 |
| LPB-501 | Validate | 系统未识别为实况 | 元数据不完整/系统限制 | 重新生成;尝试更短视频;升级系统(如需壁纸动效) |
| LPB-901 | Unknown | 发生未知错误 | 不可预期异常 | 反馈日志;重启 App重试 |
## 7. 缓存、日志与诊断
### 7.1 缓存目录结构
- `Caches/LivePhotoBuilder/{workId}/source.mov`(可选)
- `Caches/LivePhotoBuilder/{workId}/normalized.mov`(归一化输出)
- `Caches/LivePhotoBuilder/{workId}/photo.heic`(封面图)
- `Caches/LivePhotoBuilder/{workId}/paired.mov`(写入元数据后的成品视频)
- `Caches/LivePhotoBuilder/{workId}/builder.log`(阶段日志,供反馈)
### 7.2 清理策略
- 成功:保留 photo/paired 的短期缓存(例如 24h以支持“再次保存/分享”;随后自动清理。
- 失败:保留日志与关键中间文件(例如 24h方便用户一键反馈随后清理。
- 用户手动“清理缓存”:立即删除所有 workId 目录,但不影响系统相册成品。
## 8. 安全与隐私
- 默认全程本地处理,不上传用户素材。
- 日志默认不包含媒体内容本身;仅记录参数与错误码;用户反馈前需二次确认。
- 权限请求延迟到使用时,并提供用途说明与拒绝后的替代路径。
## 9. AI 编码提示(可直接复制给 AI
- 按模块创建 Swift Package 或 App 内分组;对 LivePhotoBuilder 使用 `actor` 管理状态与文件路径。
- UseCase 层提供 async 函数:`importVideo()`, `buildLivePhoto()`, `saveToAlbum()`, `validate()`.
- UI 层采用单一 source of truth`WorkItemViewState`;所有副作用通过 UseCase 注入。
- 为每个阶段输出结构化日志与 `progress (0~1)` + `stage enum`

27
docs_index.md Normal file
View File

@@ -0,0 +1,27 @@
# 文档索引
## 需求
- docs/PRD_LivePhoto_App_V0.2_2025-12-13.mdPRDV0.2定义目标、MVP范围、流程、验收与风险。
## 设计
- docs/TECHSPEC_LivePhoto_App_V0.2_2025-12-13.md技术规格V0.2),架构/模型/合成规范/错误码/缓存等。
- docs/IXSPEC_LivePhoto_App_V0.2_2025-12-13.md交互规格V0.2),页面交互/状态/埋点/iPad适配等。
## 测试
- (待补充)
## 用户手册
- (待补充)
## 知识库
- docs_index.md文档索引本文件
- PROJECT_STRUCTURE.md项目结构目录/文件结构变更记录)
## 任务进度
- TASK.md任务清单按阶段拆解

View File

@@ -0,0 +1,612 @@
// !$*UTF8*$!
{
archiveVersion = 1;
classes = {
};
objectVersion = 77;
objects = {
/* Begin PBXBuildFile section */
F1A6CF932EED993E00822C1B /* LivePhotoCore in Frameworks */ = {isa = PBXBuildFile; productRef = F1A6CF922EED993E00822C1B /* LivePhotoCore */; };
/* End PBXBuildFile section */
/* Begin PBXContainerItemProxy section */
F1A6CF5D2EED942800822C1B /* PBXContainerItemProxy */ = {
isa = PBXContainerItemProxy;
containerPortal = F1A6CF472EED942500822C1B /* Project object */;
proxyType = 1;
remoteGlobalIDString = F1A6CF4E2EED942500822C1B;
remoteInfo = "to-live-photo";
};
F1A6CF672EED942800822C1B /* PBXContainerItemProxy */ = {
isa = PBXContainerItemProxy;
containerPortal = F1A6CF472EED942500822C1B /* Project object */;
proxyType = 1;
remoteGlobalIDString = F1A6CF4E2EED942500822C1B;
remoteInfo = "to-live-photo";
};
/* End PBXContainerItemProxy section */
/* Begin PBXFileReference section */
F1A6CF4F2EED942500822C1B /* to-live-photo.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = "to-live-photo.app"; sourceTree = BUILT_PRODUCTS_DIR; };
F1A6CF5C2EED942800822C1B /* to-live-photoTests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = "to-live-photoTests.xctest"; sourceTree = BUILT_PRODUCTS_DIR; };
F1A6CF662EED942800822C1B /* to-live-photoUITests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = "to-live-photoUITests.xctest"; sourceTree = BUILT_PRODUCTS_DIR; };
/* End PBXFileReference section */
/* Begin PBXFileSystemSynchronizedRootGroup section */
F1A6CF512EED942500822C1B /* to-live-photo */ = {
isa = PBXFileSystemSynchronizedRootGroup;
path = "to-live-photo";
sourceTree = "<group>";
};
F1A6CF5F2EED942800822C1B /* to-live-photoTests */ = {
isa = PBXFileSystemSynchronizedRootGroup;
path = "to-live-photoTests";
sourceTree = "<group>";
};
F1A6CF692EED942800822C1B /* to-live-photoUITests */ = {
isa = PBXFileSystemSynchronizedRootGroup;
path = "to-live-photoUITests";
sourceTree = "<group>";
};
/* End PBXFileSystemSynchronizedRootGroup section */
/* Begin PBXFrameworksBuildPhase section */
F1A6CF4C2EED942500822C1B /* Frameworks */ = {
isa = PBXFrameworksBuildPhase;
buildActionMask = 2147483647;
files = (
F1A6CF932EED993E00822C1B /* LivePhotoCore in Frameworks */,
);
runOnlyForDeploymentPostprocessing = 0;
};
F1A6CF592EED942800822C1B /* Frameworks */ = {
isa = PBXFrameworksBuildPhase;
buildActionMask = 2147483647;
files = (
);
runOnlyForDeploymentPostprocessing = 0;
};
F1A6CF632EED942800822C1B /* Frameworks */ = {
isa = PBXFrameworksBuildPhase;
buildActionMask = 2147483647;
files = (
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXFrameworksBuildPhase section */
/* Begin PBXGroup section */
F1A6CF462EED942500822C1B = {
isa = PBXGroup;
children = (
F1A6CF512EED942500822C1B /* to-live-photo */,
F1A6CF5F2EED942800822C1B /* to-live-photoTests */,
F1A6CF692EED942800822C1B /* to-live-photoUITests */,
F1A6CF502EED942500822C1B /* Products */,
);
sourceTree = "<group>";
};
F1A6CF502EED942500822C1B /* Products */ = {
isa = PBXGroup;
children = (
F1A6CF4F2EED942500822C1B /* to-live-photo.app */,
F1A6CF5C2EED942800822C1B /* to-live-photoTests.xctest */,
F1A6CF662EED942800822C1B /* to-live-photoUITests.xctest */,
);
name = Products;
sourceTree = "<group>";
};
/* End PBXGroup section */
/* Begin PBXNativeTarget section */
F1A6CF4E2EED942500822C1B /* to-live-photo */ = {
isa = PBXNativeTarget;
buildConfigurationList = F1A6CF702EED942800822C1B /* Build configuration list for PBXNativeTarget "to-live-photo" */;
buildPhases = (
F1A6CF4B2EED942500822C1B /* Sources */,
F1A6CF4C2EED942500822C1B /* Frameworks */,
F1A6CF4D2EED942500822C1B /* Resources */,
);
buildRules = (
);
dependencies = (
);
fileSystemSynchronizedGroups = (
F1A6CF512EED942500822C1B /* to-live-photo */,
);
name = "to-live-photo";
packageProductDependencies = (
F1A6CF922EED993E00822C1B /* LivePhotoCore */,
);
productName = "to-live-photo";
productReference = F1A6CF4F2EED942500822C1B /* to-live-photo.app */;
productType = "com.apple.product-type.application";
};
F1A6CF5B2EED942800822C1B /* to-live-photoTests */ = {
isa = PBXNativeTarget;
buildConfigurationList = F1A6CF732EED942800822C1B /* Build configuration list for PBXNativeTarget "to-live-photoTests" */;
buildPhases = (
F1A6CF582EED942800822C1B /* Sources */,
F1A6CF592EED942800822C1B /* Frameworks */,
F1A6CF5A2EED942800822C1B /* Resources */,
);
buildRules = (
);
dependencies = (
F1A6CF5E2EED942800822C1B /* PBXTargetDependency */,
);
fileSystemSynchronizedGroups = (
F1A6CF5F2EED942800822C1B /* to-live-photoTests */,
);
name = "to-live-photoTests";
packageProductDependencies = (
);
productName = "to-live-photoTests";
productReference = F1A6CF5C2EED942800822C1B /* to-live-photoTests.xctest */;
productType = "com.apple.product-type.bundle.unit-test";
};
F1A6CF652EED942800822C1B /* to-live-photoUITests */ = {
isa = PBXNativeTarget;
buildConfigurationList = F1A6CF762EED942800822C1B /* Build configuration list for PBXNativeTarget "to-live-photoUITests" */;
buildPhases = (
F1A6CF622EED942800822C1B /* Sources */,
F1A6CF632EED942800822C1B /* Frameworks */,
F1A6CF642EED942800822C1B /* Resources */,
);
buildRules = (
);
dependencies = (
F1A6CF682EED942800822C1B /* PBXTargetDependency */,
);
fileSystemSynchronizedGroups = (
F1A6CF692EED942800822C1B /* to-live-photoUITests */,
);
name = "to-live-photoUITests";
packageProductDependencies = (
);
productName = "to-live-photoUITests";
productReference = F1A6CF662EED942800822C1B /* to-live-photoUITests.xctest */;
productType = "com.apple.product-type.bundle.ui-testing";
};
/* End PBXNativeTarget section */
/* Begin PBXProject section */
F1A6CF472EED942500822C1B /* Project object */ = {
isa = PBXProject;
attributes = {
BuildIndependentTargetsInParallel = 1;
LastSwiftUpdateCheck = 2610;
LastUpgradeCheck = 2610;
TargetAttributes = {
F1A6CF4E2EED942500822C1B = {
CreatedOnToolsVersion = 26.1.1;
};
F1A6CF5B2EED942800822C1B = {
CreatedOnToolsVersion = 26.1.1;
TestTargetID = F1A6CF4E2EED942500822C1B;
};
F1A6CF652EED942800822C1B = {
CreatedOnToolsVersion = 26.1.1;
TestTargetID = F1A6CF4E2EED942500822C1B;
};
};
};
buildConfigurationList = F1A6CF4A2EED942500822C1B /* Build configuration list for PBXProject "to-live-photo" */;
developmentRegion = en;
hasScannedForEncodings = 0;
knownRegions = (
en,
Base,
);
mainGroup = F1A6CF462EED942500822C1B;
minimizedProjectReferenceProxies = 1;
packageReferences = (
F1A6CF912EED993E00822C1B /* XCLocalSwiftPackageReference "../../to-live-photo" */,
);
preferredProjectObjectVersion = 77;
productRefGroup = F1A6CF502EED942500822C1B /* Products */;
projectDirPath = "";
projectRoot = "";
targets = (
F1A6CF4E2EED942500822C1B /* to-live-photo */,
F1A6CF5B2EED942800822C1B /* to-live-photoTests */,
F1A6CF652EED942800822C1B /* to-live-photoUITests */,
);
};
/* End PBXProject section */
/* Begin PBXResourcesBuildPhase section */
F1A6CF4D2EED942500822C1B /* Resources */ = {
isa = PBXResourcesBuildPhase;
buildActionMask = 2147483647;
files = (
);
runOnlyForDeploymentPostprocessing = 0;
};
F1A6CF5A2EED942800822C1B /* Resources */ = {
isa = PBXResourcesBuildPhase;
buildActionMask = 2147483647;
files = (
);
runOnlyForDeploymentPostprocessing = 0;
};
F1A6CF642EED942800822C1B /* Resources */ = {
isa = PBXResourcesBuildPhase;
buildActionMask = 2147483647;
files = (
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXResourcesBuildPhase section */
/* Begin PBXSourcesBuildPhase section */
F1A6CF4B2EED942500822C1B /* Sources */ = {
isa = PBXSourcesBuildPhase;
buildActionMask = 2147483647;
files = (
);
runOnlyForDeploymentPostprocessing = 0;
};
F1A6CF582EED942800822C1B /* Sources */ = {
isa = PBXSourcesBuildPhase;
buildActionMask = 2147483647;
files = (
);
runOnlyForDeploymentPostprocessing = 0;
};
F1A6CF622EED942800822C1B /* Sources */ = {
isa = PBXSourcesBuildPhase;
buildActionMask = 2147483647;
files = (
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXSourcesBuildPhase section */
/* Begin PBXTargetDependency section */
F1A6CF5E2EED942800822C1B /* PBXTargetDependency */ = {
isa = PBXTargetDependency;
target = F1A6CF4E2EED942500822C1B /* to-live-photo */;
targetProxy = F1A6CF5D2EED942800822C1B /* PBXContainerItemProxy */;
};
F1A6CF682EED942800822C1B /* PBXTargetDependency */ = {
isa = PBXTargetDependency;
target = F1A6CF4E2EED942500822C1B /* to-live-photo */;
targetProxy = F1A6CF672EED942800822C1B /* PBXContainerItemProxy */;
};
/* End PBXTargetDependency section */
/* Begin XCBuildConfiguration section */
F1A6CF6E2EED942800822C1B /* Debug */ = {
isa = XCBuildConfiguration;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES;
CLANG_ANALYZER_NONNULL = YES;
CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
CLANG_CXX_LANGUAGE_STANDARD = "gnu++20";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_ENABLE_OBJC_WEAK = YES;
CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_COMMA = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES;
CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
CLANG_WARN_STRICT_PROTOTYPES = YES;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
COPY_PHASE_STRIP = NO;
DEBUG_INFORMATION_FORMAT = dwarf;
DEVELOPMENT_TEAM = Y976PBNGA8;
ENABLE_STRICT_OBJC_MSGSEND = YES;
ENABLE_TESTABILITY = YES;
ENABLE_USER_SCRIPT_SANDBOXING = YES;
GCC_C_LANGUAGE_STANDARD = gnu17;
GCC_DYNAMIC_NO_PIC = NO;
GCC_NO_COMMON_BLOCKS = YES;
GCC_OPTIMIZATION_LEVEL = 0;
GCC_PREPROCESSOR_DEFINITIONS = (
"DEBUG=1",
"$(inherited)",
);
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
GCC_WARN_UNDECLARED_SELECTOR = YES;
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
IPHONEOS_DEPLOYMENT_TARGET = 18.0;
LOCALIZATION_PREFERS_STRING_CATALOGS = YES;
MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE;
MTL_FAST_MATH = YES;
ONLY_ACTIVE_ARCH = YES;
SDKROOT = iphoneos;
SWIFT_ACTIVE_COMPILATION_CONDITIONS = "DEBUG $(inherited)";
SWIFT_OPTIMIZATION_LEVEL = "-Onone";
};
name = Debug;
};
F1A6CF6F2EED942800822C1B /* Release */ = {
isa = XCBuildConfiguration;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES;
CLANG_ANALYZER_NONNULL = YES;
CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
CLANG_CXX_LANGUAGE_STANDARD = "gnu++20";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_ENABLE_OBJC_WEAK = YES;
CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_COMMA = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES;
CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
CLANG_WARN_STRICT_PROTOTYPES = YES;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
COPY_PHASE_STRIP = NO;
DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
DEVELOPMENT_TEAM = Y976PBNGA8;
ENABLE_NS_ASSERTIONS = NO;
ENABLE_STRICT_OBJC_MSGSEND = YES;
ENABLE_USER_SCRIPT_SANDBOXING = YES;
GCC_C_LANGUAGE_STANDARD = gnu17;
GCC_NO_COMMON_BLOCKS = YES;
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
GCC_WARN_UNDECLARED_SELECTOR = YES;
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
IPHONEOS_DEPLOYMENT_TARGET = 18.0;
LOCALIZATION_PREFERS_STRING_CATALOGS = YES;
MTL_ENABLE_DEBUG_INFO = NO;
MTL_FAST_MATH = YES;
SDKROOT = iphoneos;
SWIFT_COMPILATION_MODE = wholemodule;
VALIDATE_PRODUCT = YES;
};
name = Release;
};
F1A6CF712EED942800822C1B /* Debug */ = {
isa = XCBuildConfiguration;
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor;
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 1;
DEVELOPMENT_TEAM = Y976PBNGA8;
ENABLE_PREVIEWS = YES;
GENERATE_INFOPLIST_FILE = YES;
INFOPLIST_KEY_NSPhotoLibraryAddUsageDescription = "用于将生成的 Live Photo 保存到系统相册";
INFOPLIST_KEY_NSPhotoLibraryUsageDescription = "用于读取并校验已保存的 Live Photo可选";
INFOPLIST_KEY_UIApplicationSceneManifest_Generation = YES;
INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents = YES;
INFOPLIST_KEY_UILaunchScreen_Generation = YES;
INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = "UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight";
INFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone = "UIInterfaceOrientationPortrait UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight";
IPHONEOS_DEPLOYMENT_TARGET = 18.0;
LD_RUNPATH_SEARCH_PATHS = (
"$(inherited)",
"@executable_path/Frameworks",
);
MARKETING_VERSION = 1.0;
PRODUCT_BUNDLE_IDENTIFIER = "xu.to-live-photo";
PRODUCT_NAME = "$(TARGET_NAME)";
STRING_CATALOG_GENERATE_SYMBOLS = YES;
SWIFT_APPROACHABLE_CONCURRENCY = YES;
SWIFT_DEFAULT_ACTOR_ISOLATION = MainActor;
SWIFT_EMIT_LOC_STRINGS = YES;
SWIFT_UPCOMING_FEATURE_MEMBER_IMPORT_VISIBILITY = YES;
SWIFT_VERSION = 5.0;
TARGETED_DEVICE_FAMILY = "1,2";
};
name = Debug;
};
F1A6CF722EED942800822C1B /* Release */ = {
isa = XCBuildConfiguration;
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor;
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 1;
DEVELOPMENT_TEAM = Y976PBNGA8;
ENABLE_PREVIEWS = YES;
GENERATE_INFOPLIST_FILE = YES;
INFOPLIST_KEY_NSPhotoLibraryAddUsageDescription = "用于将生成的 Live Photo 保存到系统相册";
INFOPLIST_KEY_NSPhotoLibraryUsageDescription = "用于读取并校验已保存的 Live Photo可选";
INFOPLIST_KEY_UIApplicationSceneManifest_Generation = YES;
INFOPLIST_KEY_UIApplicationSupportsIndirectInputEvents = YES;
INFOPLIST_KEY_UILaunchScreen_Generation = YES;
INFOPLIST_KEY_UISupportedInterfaceOrientations_iPad = "UIInterfaceOrientationPortrait UIInterfaceOrientationPortraitUpsideDown UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight";
INFOPLIST_KEY_UISupportedInterfaceOrientations_iPhone = "UIInterfaceOrientationPortrait UIInterfaceOrientationLandscapeLeft UIInterfaceOrientationLandscapeRight";
IPHONEOS_DEPLOYMENT_TARGET = 18.0;
LD_RUNPATH_SEARCH_PATHS = (
"$(inherited)",
"@executable_path/Frameworks",
);
MARKETING_VERSION = 1.0;
PRODUCT_BUNDLE_IDENTIFIER = "xu.to-live-photo";
PRODUCT_NAME = "$(TARGET_NAME)";
STRING_CATALOG_GENERATE_SYMBOLS = YES;
SWIFT_APPROACHABLE_CONCURRENCY = YES;
SWIFT_DEFAULT_ACTOR_ISOLATION = MainActor;
SWIFT_EMIT_LOC_STRINGS = YES;
SWIFT_UPCOMING_FEATURE_MEMBER_IMPORT_VISIBILITY = YES;
SWIFT_VERSION = 5.0;
TARGETED_DEVICE_FAMILY = "1,2";
};
name = Release;
};
F1A6CF742EED942800822C1B /* Debug */ = {
isa = XCBuildConfiguration;
buildSettings = {
BUNDLE_LOADER = "$(TEST_HOST)";
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 1;
DEVELOPMENT_TEAM = Y976PBNGA8;
GENERATE_INFOPLIST_FILE = YES;
IPHONEOS_DEPLOYMENT_TARGET = 18.0;
MARKETING_VERSION = 1.0;
PRODUCT_BUNDLE_IDENTIFIER = "xu.to-live-photoTests";
PRODUCT_NAME = "$(TARGET_NAME)";
STRING_CATALOG_GENERATE_SYMBOLS = NO;
SWIFT_APPROACHABLE_CONCURRENCY = YES;
SWIFT_EMIT_LOC_STRINGS = NO;
SWIFT_UPCOMING_FEATURE_MEMBER_IMPORT_VISIBILITY = YES;
SWIFT_VERSION = 5.0;
TARGETED_DEVICE_FAMILY = "1,2";
TEST_HOST = "$(BUILT_PRODUCTS_DIR)/to-live-photo.app/$(BUNDLE_EXECUTABLE_FOLDER_PATH)/to-live-photo";
};
name = Debug;
};
F1A6CF752EED942800822C1B /* Release */ = {
isa = XCBuildConfiguration;
buildSettings = {
BUNDLE_LOADER = "$(TEST_HOST)";
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 1;
DEVELOPMENT_TEAM = Y976PBNGA8;
GENERATE_INFOPLIST_FILE = YES;
IPHONEOS_DEPLOYMENT_TARGET = 18.0;
MARKETING_VERSION = 1.0;
PRODUCT_BUNDLE_IDENTIFIER = "xu.to-live-photoTests";
PRODUCT_NAME = "$(TARGET_NAME)";
STRING_CATALOG_GENERATE_SYMBOLS = NO;
SWIFT_APPROACHABLE_CONCURRENCY = YES;
SWIFT_EMIT_LOC_STRINGS = NO;
SWIFT_UPCOMING_FEATURE_MEMBER_IMPORT_VISIBILITY = YES;
SWIFT_VERSION = 5.0;
TARGETED_DEVICE_FAMILY = "1,2";
TEST_HOST = "$(BUILT_PRODUCTS_DIR)/to-live-photo.app/$(BUNDLE_EXECUTABLE_FOLDER_PATH)/to-live-photo";
};
name = Release;
};
F1A6CF772EED942800822C1B /* Debug */ = {
isa = XCBuildConfiguration;
buildSettings = {
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 1;
DEVELOPMENT_TEAM = Y976PBNGA8;
GENERATE_INFOPLIST_FILE = YES;
MARKETING_VERSION = 1.0;
PRODUCT_BUNDLE_IDENTIFIER = "xu.to-live-photoUITests";
PRODUCT_NAME = "$(TARGET_NAME)";
STRING_CATALOG_GENERATE_SYMBOLS = NO;
SWIFT_APPROACHABLE_CONCURRENCY = YES;
SWIFT_EMIT_LOC_STRINGS = NO;
SWIFT_UPCOMING_FEATURE_MEMBER_IMPORT_VISIBILITY = YES;
SWIFT_VERSION = 5.0;
TARGETED_DEVICE_FAMILY = "1,2";
TEST_TARGET_NAME = "to-live-photo";
};
name = Debug;
};
F1A6CF782EED942800822C1B /* Release */ = {
isa = XCBuildConfiguration;
buildSettings = {
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 1;
DEVELOPMENT_TEAM = Y976PBNGA8;
GENERATE_INFOPLIST_FILE = YES;
MARKETING_VERSION = 1.0;
PRODUCT_BUNDLE_IDENTIFIER = "xu.to-live-photoUITests";
PRODUCT_NAME = "$(TARGET_NAME)";
STRING_CATALOG_GENERATE_SYMBOLS = NO;
SWIFT_APPROACHABLE_CONCURRENCY = YES;
SWIFT_EMIT_LOC_STRINGS = NO;
SWIFT_UPCOMING_FEATURE_MEMBER_IMPORT_VISIBILITY = YES;
SWIFT_VERSION = 5.0;
TARGETED_DEVICE_FAMILY = "1,2";
TEST_TARGET_NAME = "to-live-photo";
};
name = Release;
};
/* End XCBuildConfiguration section */
/* Begin XCConfigurationList section */
F1A6CF4A2EED942500822C1B /* Build configuration list for PBXProject "to-live-photo" */ = {
isa = XCConfigurationList;
buildConfigurations = (
F1A6CF6E2EED942800822C1B /* Debug */,
F1A6CF6F2EED942800822C1B /* Release */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
F1A6CF702EED942800822C1B /* Build configuration list for PBXNativeTarget "to-live-photo" */ = {
isa = XCConfigurationList;
buildConfigurations = (
F1A6CF712EED942800822C1B /* Debug */,
F1A6CF722EED942800822C1B /* Release */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
F1A6CF732EED942800822C1B /* Build configuration list for PBXNativeTarget "to-live-photoTests" */ = {
isa = XCConfigurationList;
buildConfigurations = (
F1A6CF742EED942800822C1B /* Debug */,
F1A6CF752EED942800822C1B /* Release */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
F1A6CF762EED942800822C1B /* Build configuration list for PBXNativeTarget "to-live-photoUITests" */ = {
isa = XCConfigurationList;
buildConfigurations = (
F1A6CF772EED942800822C1B /* Debug */,
F1A6CF782EED942800822C1B /* Release */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
/* End XCConfigurationList section */
/* Begin XCLocalSwiftPackageReference section */
F1A6CF912EED993E00822C1B /* XCLocalSwiftPackageReference "../../to-live-photo" */ = {
isa = XCLocalSwiftPackageReference;
relativePath = "../../to-live-photo";
};
/* End XCLocalSwiftPackageReference section */
/* Begin XCSwiftPackageProductDependency section */
F1A6CF922EED993E00822C1B /* LivePhotoCore */ = {
isa = XCSwiftPackageProductDependency;
productName = LivePhotoCore;
};
/* End XCSwiftPackageProductDependency section */
};
rootObject = F1A6CF472EED942500822C1B /* Project object */;
}

View File

@@ -0,0 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<Workspace
version = "1.0">
<FileRef
location = "self:">
</FileRef>
</Workspace>

View File

@@ -0,0 +1,86 @@
//
// AppState.swift
// to-live-photo
//
// App +
//
import SwiftUI
import PhotosUI
import LivePhotoCore
enum AppRoute: Hashable {
case home
case editor(videoURL: URL)
case processing(videoURL: URL, exportParams: ExportParams)
case result(workflowResult: LivePhotoWorkflowResult)
case wallpaperGuide(assetId: String)
}
@MainActor
@Observable
final class AppState {
var navigationPath = NavigationPath()
var processingProgress: LivePhotoBuildProgress?
var processingError: AppError?
var isProcessing = false
private var workflow: LivePhotoWorkflow?
init() {
do {
workflow = try LivePhotoWorkflow()
} catch {
print("Failed to init LivePhotoWorkflow: \(error)")
}
}
func navigateTo(_ route: AppRoute) {
navigationPath.append(route)
}
func popToRoot() {
navigationPath = NavigationPath()
}
func pop() {
if !navigationPath.isEmpty {
navigationPath.removeLast()
}
}
func startProcessing(videoURL: URL, exportParams: ExportParams) async -> LivePhotoWorkflowResult? {
guard let workflow else {
processingError = AppError(code: "LPB-001", message: "初始化失败", suggestedActions: ["重启 App"])
return nil
}
isProcessing = true
processingProgress = nil
processingError = nil
do {
let state = self
let result = try await workflow.buildSaveValidate(
sourceVideoURL: videoURL,
coverImageURL: nil,
exportParams: exportParams
) { progress in
Task { @MainActor in
state.processingProgress = progress
}
}
isProcessing = false
return result
} catch let error as AppError {
isProcessing = false
processingError = error
return nil
} catch {
isProcessing = false
processingError = AppError(code: "LPB-901", message: "未知错误", underlyingErrorDescription: error.localizedDescription, suggestedActions: ["重试"])
return nil
}
}
}

View File

@@ -0,0 +1,11 @@
{
"colors" : [
{
"idiom" : "universal"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}

View File

@@ -0,0 +1,35 @@
{
"images" : [
{
"idiom" : "universal",
"platform" : "ios",
"size" : "1024x1024"
},
{
"appearances" : [
{
"appearance" : "luminosity",
"value" : "dark"
}
],
"idiom" : "universal",
"platform" : "ios",
"size" : "1024x1024"
},
{
"appearances" : [
{
"appearance" : "luminosity",
"value" : "tinted"
}
],
"idiom" : "universal",
"platform" : "ios",
"size" : "1024x1024"
}
],
"info" : {
"author" : "xcode",
"version" : 1
}
}

View File

@@ -0,0 +1,6 @@
{
"info" : {
"author" : "xcode",
"version" : 1
}
}

View File

@@ -0,0 +1,40 @@
//
// ContentView.swift
// to-live-photo
//
// Created by empty on 2025/12/13.
//
import SwiftUI
import LivePhotoCore
struct ContentView: View {
@Environment(AppState.self) private var appState
var body: some View {
@Bindable var appState = appState
NavigationStack(path: $appState.navigationPath) {
HomeView()
.navigationDestination(for: AppRoute.self) { route in
switch route {
case .home:
HomeView()
case .editor(let videoURL):
EditorView(videoURL: videoURL)
case .processing(let videoURL, let exportParams):
ProcessingView(videoURL: videoURL, exportParams: exportParams)
case .result(let workflowResult):
ResultView(workflowResult: workflowResult)
case .wallpaperGuide(let assetId):
WallpaperGuideView(assetId: assetId)
}
}
}
}
}
#Preview {
ContentView()
.environment(AppState())
}

View File

@@ -0,0 +1,127 @@
//
// EditorView.swift
// to-live-photo
//
// +
//
import SwiftUI
import AVKit
import LivePhotoCore
struct EditorView: View {
@Environment(AppState.self) private var appState
let videoURL: URL
@State private var player: AVPlayer?
@State private var duration: Double = 1.0
@State private var trimStart: Double = 0
@State private var trimEnd: Double = 1.0
@State private var keyFrameTime: Double = 0.5
@State private var videoDuration: Double = 0
var body: some View {
VStack(spacing: 16) {
if let player {
VideoPlayer(player: player)
.aspectRatio(9/16, contentMode: .fit)
.clipShape(RoundedRectangle(cornerRadius: 16))
.padding(.horizontal)
} else {
RoundedRectangle(cornerRadius: 16)
.fill(Color.secondary.opacity(0.2))
.aspectRatio(9/16, contentMode: .fit)
.overlay {
ProgressView()
}
.padding(.horizontal)
}
VStack(alignment: .leading, spacing: 12) {
HStack {
Text("时长")
Spacer()
Text(String(format: "%.1f 秒", trimEnd - trimStart))
.foregroundStyle(.secondary)
}
Slider(value: $trimEnd, in: 1.0...max(1.0, min(1.5, videoDuration))) { _ in
updateKeyFrameTime()
}
.disabled(videoDuration < 1.0)
Text("Live Photo 壁纸时长限制1 ~ 1.5 秒")
.font(.caption)
.foregroundStyle(.secondary)
}
.padding(.horizontal, 24)
Spacer()
Button {
startProcessing()
} label: {
HStack {
Image(systemName: "wand.and.stars")
Text("生成 Live Photo")
}
.font(.headline)
.frame(maxWidth: .infinity)
.padding()
.background(Color.accentColor)
.foregroundColor(.white)
.clipShape(RoundedRectangle(cornerRadius: 14))
}
.padding(.horizontal, 24)
.padding(.bottom)
}
.navigationTitle("编辑")
.navigationBarTitleDisplayMode(.inline)
.onAppear {
loadVideo()
}
.onDisappear {
player?.pause()
}
}
private func loadVideo() {
let asset = AVURLAsset(url: videoURL)
Task {
do {
let durationCMTime = try await asset.load(.duration)
let durationSeconds = durationCMTime.seconds
await MainActor.run {
videoDuration = durationSeconds
trimEnd = min(1.0, durationSeconds) // 1
keyFrameTime = trimEnd / 2
player = AVPlayer(url: videoURL)
player?.play()
}
} catch {
print("Failed to load video duration: \(error)")
}
}
}
private func updateKeyFrameTime() {
keyFrameTime = (trimStart + trimEnd) / 2
}
private func startProcessing() {
let params = ExportParams(
trimStart: trimStart,
trimEnd: trimEnd,
keyFrameTime: keyFrameTime
)
appState.navigateTo(.processing(videoURL: videoURL, exportParams: params))
}
}
#Preview {
NavigationStack {
EditorView(videoURL: URL(fileURLWithPath: "/tmp/test.mov"))
}
.environment(AppState())
}

View File

@@ -0,0 +1,125 @@
//
// HomeView.swift
// to-live-photo
//
//
//
import SwiftUI
import PhotosUI
import AVKit
struct HomeView: View {
@Environment(AppState.self) private var appState
@State private var selectedItem: PhotosPickerItem?
@State private var isLoading = false
@State private var errorMessage: String?
var body: some View {
VStack(spacing: 32) {
Spacer()
Image(systemName: "livephoto")
.font(.system(size: 80))
.foregroundStyle(.tint)
Text("Live Photo 制作")
.font(.largeTitle)
.fontWeight(.bold)
Text("选择一段视频,将其转换为 Live Photo\n然后设置为动态锁屏壁纸")
.font(.body)
.foregroundStyle(.secondary)
.multilineTextAlignment(.center)
Spacer()
PhotosPicker(
selection: $selectedItem,
matching: .videos,
photoLibrary: .shared()
) {
HStack {
Image(systemName: "video.badge.plus")
Text("选择视频")
}
.font(.headline)
.frame(maxWidth: .infinity)
.padding()
.background(Color.accentColor)
.foregroundColor(.white)
.clipShape(RoundedRectangle(cornerRadius: 14))
}
.disabled(isLoading)
if isLoading {
ProgressView("正在加载视频...")
}
if let errorMessage {
Text(errorMessage)
.font(.caption)
.foregroundStyle(.red)
}
Spacer()
}
.padding(.horizontal, 24)
.navigationTitle("首页")
.navigationBarTitleDisplayMode(.inline)
.onChange(of: selectedItem) { _, newValue in
Task {
await handleSelectedItem(newValue)
}
}
}
private func handleSelectedItem(_ item: PhotosPickerItem?) async {
guard let item else { return }
isLoading = true
errorMessage = nil
do {
guard let movie = try await item.loadTransferable(type: VideoTransferable.self) else {
errorMessage = "无法加载视频"
isLoading = false
return
}
isLoading = false
appState.navigateTo(.editor(videoURL: movie.url))
} catch {
errorMessage = "加载失败: \(error.localizedDescription)"
isLoading = false
}
}
}
struct VideoTransferable: Transferable {
let url: URL
static var transferRepresentation: some TransferRepresentation {
FileRepresentation(contentType: .movie) { video in
SentTransferredFile(video.url)
} importing: { received in
let tempDir = FileManager.default.temporaryDirectory
let filename = "import_\(UUID().uuidString).mov"
let destURL = tempDir.appendingPathComponent(filename)
if FileManager.default.fileExists(atPath: destURL.path) {
try FileManager.default.removeItem(at: destURL)
}
try FileManager.default.copyItem(at: received.file, to: destURL)
return VideoTransferable(url: destURL)
}
}
}
#Preview {
NavigationStack {
HomeView()
}
.environment(AppState())
}

View File

@@ -0,0 +1,127 @@
//
// ProcessingView.swift
// to-live-photo
//
//
//
import SwiftUI
import LivePhotoCore
struct ProcessingView: View {
@Environment(AppState.self) private var appState
let videoURL: URL
let exportParams: ExportParams
@State private var hasStarted = false
var body: some View {
VStack(spacing: 32) {
Spacer()
if appState.processingError != nil {
errorContent
} else {
progressContent
}
Spacer()
}
.padding(.horizontal, 24)
.navigationTitle("生成中")
.navigationBarTitleDisplayMode(.inline)
.navigationBarBackButtonHidden(appState.isProcessing)
.task {
guard !hasStarted else { return }
hasStarted = true
await startProcessing()
}
}
@ViewBuilder
private var progressContent: some View {
ProgressView()
.scaleEffect(1.5)
VStack(spacing: 8) {
Text(stageText)
.font(.headline)
if let progress = appState.processingProgress {
Text(String(format: "%.0f%%", progress.fraction * 100))
.font(.title2)
.fontWeight(.bold)
.foregroundStyle(.tint)
}
}
Text("正在生成 Live Photo请稍候...")
.font(.body)
.foregroundStyle(.secondary)
}
@ViewBuilder
private var errorContent: some View {
Image(systemName: "exclamationmark.triangle.fill")
.font(.system(size: 60))
.foregroundStyle(.red)
if let error = appState.processingError {
VStack(spacing: 8) {
Text("生成失败")
.font(.headline)
Text(error.message)
.font(.body)
.foregroundStyle(.secondary)
if !error.suggestedActions.isEmpty {
Text("建议:\(error.suggestedActions.joined(separator: ""))")
.font(.caption)
.foregroundStyle(.secondary)
}
}
}
Button {
appState.pop()
} label: {
Text("返回重试")
.font(.headline)
.frame(maxWidth: .infinity)
.padding()
.background(Color.accentColor)
.foregroundColor(.white)
.clipShape(RoundedRectangle(cornerRadius: 14))
}
}
private var stageText: String {
guard let stage = appState.processingProgress?.stage else {
return "准备中..."
}
switch stage {
case .normalize: return "预处理视频..."
case .extractKeyFrame: return "提取封面帧..."
case .writePhotoMetadata: return "写入图片元数据..."
case .writeVideoMetadata: return "写入视频元数据..."
case .saveToAlbum: return "保存到相册..."
case .validate: return "校验 Live Photo..."
}
}
private func startProcessing() async {
if let result = await appState.startProcessing(videoURL: videoURL, exportParams: exportParams) {
appState.pop()
appState.navigateTo(.result(workflowResult: result))
}
}
}
#Preview {
NavigationStack {
ProcessingView(videoURL: URL(fileURLWithPath: "/tmp/test.mov"), exportParams: ExportParams())
}
.environment(AppState())
}

View File

@@ -0,0 +1,145 @@
//
// ResultView.swift
// to-live-photo
//
// /
//
import SwiftUI
import LivePhotoCore
struct ResultView: View {
@Environment(AppState.self) private var appState
@State private var showShareSheet = false
@State private var shareItems: [Any] = []
let workflowResult: LivePhotoWorkflowResult
var body: some View {
VStack(spacing: 32) {
Spacer()
Image(systemName: isSuccess ? "checkmark.circle.fill" : "xmark.circle.fill")
.font(.system(size: 80))
.foregroundStyle(isSuccess ? .green : .red)
VStack(spacing: 8) {
Text(isSuccess ? "Live Photo 已保存" : "保存失败")
.font(.title)
.fontWeight(.bold)
if isSuccess {
Text("已保存到系统相册")
.font(.body)
.foregroundStyle(.secondary)
if workflowResult.resourceValidationOK {
Label("资源校验通过", systemImage: "checkmark.seal.fill")
.font(.caption)
.foregroundStyle(.green)
}
if let isLive = workflowResult.libraryAssetIsLivePhoto, isLive {
Label("相册识别为 Live Photo", systemImage: "livephoto")
.font(.caption)
.foregroundStyle(.green)
}
}
}
Spacer()
VStack(spacing: 12) {
if isSuccess {
Button {
appState.navigateTo(.wallpaperGuide(assetId: workflowResult.savedAssetId))
} label: {
HStack {
Image(systemName: "arrow.right.circle")
Text("设置为动态壁纸")
}
.font(.headline)
.frame(maxWidth: .infinity)
.padding()
.background(Color.accentColor)
.foregroundColor(.white)
.clipShape(RoundedRectangle(cornerRadius: 14))
}
//
Button {
prepareShareItems()
showShareSheet = true
} label: {
HStack {
Image(systemName: "square.and.arrow.up")
Text("导出调试文件")
}
.font(.headline)
.frame(maxWidth: .infinity)
.padding()
.background(Color.orange.opacity(0.8))
.foregroundColor(.white)
.clipShape(RoundedRectangle(cornerRadius: 14))
}
}
Button {
appState.popToRoot()
} label: {
Text(isSuccess ? "继续制作" : "返回首页")
.font(.headline)
.frame(maxWidth: .infinity)
.padding()
.background(Color.secondary.opacity(0.2))
.foregroundColor(.primary)
.clipShape(RoundedRectangle(cornerRadius: 14))
}
}
.padding(.horizontal, 24)
.padding(.bottom)
}
.navigationTitle("完成")
.navigationBarTitleDisplayMode(.inline)
.navigationBarBackButtonHidden(true)
.sheet(isPresented: $showShareSheet) {
ShareSheet(activityItems: shareItems)
}
}
private var isSuccess: Bool {
!workflowResult.savedAssetId.isEmpty
}
private func prepareShareItems() {
shareItems = [
workflowResult.pairedImageURL,
workflowResult.pairedVideoURL
]
}
}
struct ShareSheet: UIViewControllerRepresentable {
let activityItems: [Any]
func makeUIViewController(context: Context) -> UIActivityViewController {
UIActivityViewController(activityItems: activityItems, applicationActivities: nil)
}
func updateUIViewController(_ uiViewController: UIActivityViewController, context: Context) {}
}
#Preview {
NavigationStack {
ResultView(workflowResult: LivePhotoWorkflowResult(
workId: UUID(),
assetIdentifier: "test",
pairedImageURL: URL(fileURLWithPath: "/tmp/photo.jpg"),
pairedVideoURL: URL(fileURLWithPath: "/tmp/video.mov"),
savedAssetId: "ABC123",
resourceValidationOK: true,
libraryAssetIsLivePhoto: true
))
}
.environment(AppState())
}

View File

@@ -0,0 +1,324 @@
//
// WallpaperGuideView.swift
// to-live-photo
//
//
//
import SwiftUI
struct WallpaperGuideView: View {
@Environment(AppState.self) private var appState
let assetId: String
private var iosVersion: Int {
Int(UIDevice.current.systemVersion.split(separator: ".").first ?? "16") ?? 16
}
var body: some View {
ScrollView {
VStack(alignment: .leading, spacing: 24) {
headerSection
quickActionSection
stepsSection
tipsSection
doneButton
}
.padding(.horizontal, 20)
.padding(.vertical, 16)
}
.navigationTitle("设置动态壁纸")
.navigationBarTitleDisplayMode(.inline)
}
@ViewBuilder
private var headerSection: some View {
VStack(alignment: .center, spacing: 12) {
Image(systemName: "livephoto")
.font(.system(size: 50))
.foregroundStyle(.tint)
.padding(.bottom, 4)
Text("Live Photo 已保存到相册")
.font(.title3)
.fontWeight(.bold)
if iosVersion >= 17 {
HStack(spacing: 6) {
Image(systemName: "checkmark.circle.fill")
.foregroundStyle(.green)
Text("你的设备支持锁屏动态壁纸")
.foregroundStyle(.secondary)
}
.font(.subheadline)
} else {
HStack(spacing: 6) {
Image(systemName: "exclamationmark.triangle.fill")
.foregroundStyle(.orange)
Text("iOS 17+ 才支持锁屏动态效果")
.foregroundStyle(.secondary)
}
.font(.subheadline)
}
}
.frame(maxWidth: .infinity)
.padding(.vertical, 8)
}
@ViewBuilder
private var quickActionSection: some View {
Button {
if let url = URL(string: "photos-redirect://") {
UIApplication.shared.open(url)
}
} label: {
HStack(spacing: 12) {
Image(systemName: "photo.on.rectangle.angled")
.font(.title2)
VStack(alignment: .leading, spacing: 2) {
Text("打开照片 App")
.font(.headline)
Text("找到刚保存的 Live Photo")
.font(.caption)
.foregroundStyle(.white.opacity(0.8))
}
Spacer()
Image(systemName: "arrow.up.right.square")
.font(.title3)
}
.padding(16)
.frame(maxWidth: .infinity)
.background(
LinearGradient(
colors: [Color.blue, Color.purple],
startPoint: .topLeading,
endPoint: .bottomTrailing
)
)
.foregroundColor(.white)
.clipShape(RoundedRectangle(cornerRadius: 14))
}
}
@ViewBuilder
private var stepsSection: some View {
VStack(alignment: .leading, spacing: 16) {
HStack {
Image(systemName: "list.number")
.foregroundStyle(.tint)
Text("设置壁纸步骤")
.font(.headline)
}
VStack(spacing: 0) {
StepRow(
number: 1,
icon: "photo.fill",
title: "在照片中找到 Live Photo",
description: "照片左上角会显示【LIVE】标识长按可预览动画效果",
isLast: false
)
StepRow(
number: 2,
icon: "square.and.arrow.up",
title: "点击分享按钮",
description: "位于屏幕左下角,然后选择【用作壁纸】选项",
isLast: false
)
StepRow(
number: 3,
icon: "crop",
title: "调整照片位置",
description: "双指缩放和拖动来调整照片在壁纸中的位置",
isLast: false
)
if iosVersion >= 17 {
StepRow(
number: 4,
icon: "livephoto",
title: "确认动态效果已开启",
description: "点击左下角的 Live Photo 图标,图标高亮表示动态效果已开启",
isLast: false
)
} else {
StepRow(
number: 4,
icon: "info.circle",
title: "了解系统限制",
description: "iOS 16 锁屏不支持动态效果,仅主屏幕长按可播放",
isLast: false
)
}
StepRow(
number: 5,
icon: "checkmark.circle",
title: "完成设置",
description: "点击右上角【完成】,选择【设定锁定屏幕】或【同时设定】",
isLast: true
)
}
.padding(12)
.background(Color.secondary.opacity(0.1))
.clipShape(RoundedRectangle(cornerRadius: 12))
}
}
@ViewBuilder
private var tipsSection: some View {
VStack(alignment: .leading, spacing: 12) {
HStack {
Image(systemName: "questionmark.circle")
.foregroundStyle(.tint)
Text("常见问题")
.font(.headline)
}
FAQRow(
icon: "magnifyingglass",
question: "找不到刚保存的 Live Photo",
answer: "打开照片 App → 相簿 → 媒体类型 → 实况照片,或直接搜索【实况】"
)
FAQRow(
icon: "hand.tap",
question: "设置后壁纸不会动?",
answer: "锁屏状态下长按屏幕 1-2 秒可触发动画播放(需 iOS 17+"
)
FAQRow(
icon: "battery.25",
question: "动画效果突然失效?",
answer: "检查是否开启了【低电量模式】,该模式下系统会自动禁用动态效果以省电"
)
FAQRow(
icon: "exclamationmark.circle",
question: "Live Photo 图标是灰色/划线?",
answer: "iOS 对壁纸有额外限制,部分 Live Photo 可能不支持作为动态壁纸。建议使用 2-3 秒时长、竖屏比例的视频重新生成"
)
if iosVersion < 17 {
FAQRow(
icon: "iphone.gen3",
question: "为什么我的锁屏没有动画?",
answer: "iOS 16 系统限制:锁屏壁纸不支持 Live Photo 动画,建议升级到 iOS 17+"
)
}
}
}
@ViewBuilder
private var doneButton: some View {
VStack(spacing: 12) {
Button {
appState.popToRoot()
} label: {
Text("完成,返回首页")
.font(.headline)
.frame(maxWidth: .infinity)
.padding()
.background(Color.accentColor)
.foregroundColor(.white)
.clipShape(RoundedRectangle(cornerRadius: 14))
}
Text("你可以随时制作新的 Live Photo")
.font(.caption)
.foregroundStyle(.secondary)
}
.padding(.top, 8)
}
}
struct StepRow: View {
let number: Int
let icon: String
let title: String
let description: String
let isLast: Bool
var body: some View {
HStack(alignment: .top, spacing: 14) {
VStack(spacing: 0) {
ZStack {
Circle()
.fill(Color.accentColor)
.frame(width: 32, height: 32)
Text("\(number)")
.font(.subheadline)
.fontWeight(.bold)
.foregroundColor(.white)
}
if !isLast {
Rectangle()
.fill(Color.accentColor.opacity(0.3))
.frame(width: 2)
.frame(maxHeight: .infinity)
}
}
VStack(alignment: .leading, spacing: 6) {
HStack(spacing: 8) {
Image(systemName: icon)
.font(.subheadline)
.foregroundStyle(.tint)
Text(title)
.font(.subheadline)
.fontWeight(.semibold)
}
Text(description)
.font(.caption)
.foregroundStyle(.secondary)
.fixedSize(horizontal: false, vertical: true)
}
.padding(.bottom, isLast ? 0 : 16)
}
}
}
struct FAQRow: View {
let icon: String
let question: String
let answer: String
var body: some View {
HStack(alignment: .top, spacing: 12) {
Image(systemName: icon)
.font(.title3)
.foregroundStyle(.tint)
.frame(width: 24)
VStack(alignment: .leading, spacing: 4) {
Text(question)
.font(.subheadline)
.fontWeight(.medium)
Text(answer)
.font(.caption)
.foregroundStyle(.secondary)
.fixedSize(horizontal: false, vertical: true)
}
}
.padding(14)
.frame(maxWidth: .infinity, alignment: .leading)
.background(Color.secondary.opacity(0.08))
.clipShape(RoundedRectangle(cornerRadius: 12))
}
}
#Preview {
NavigationStack {
WallpaperGuideView(assetId: "ABC123")
}
.environment(AppState())
}

View File

@@ -0,0 +1,20 @@
//
// to_live_photoApp.swift
// to-live-photo
//
// Created by empty on 2025/12/13.
//
import SwiftUI
@main
struct to_live_photoApp: App {
@State private var appState = AppState()
var body: some Scene {
WindowGroup {
ContentView()
.environment(appState)
}
}
}

View File

@@ -0,0 +1,17 @@
//
// to_live_photoTests.swift
// to-live-photoTests
//
// Created by empty on 2025/12/13.
//
import Testing
@testable import to_live_photo
struct to_live_photoTests {
@Test func example() async throws {
// Write your test here and use APIs like `#expect(...)` to check expected conditions.
}
}

View File

@@ -0,0 +1,41 @@
//
// to_live_photoUITests.swift
// to-live-photoUITests
//
// Created by empty on 2025/12/13.
//
import XCTest
final class to_live_photoUITests: XCTestCase {
override func setUpWithError() throws {
// Put setup code here. This method is called before the invocation of each test method in the class.
// In UI tests it is usually best to stop immediately when a failure occurs.
continueAfterFailure = false
// In UI tests its important to set the initial state - such as interface orientation - required for your tests before they run. The setUp method is a good place to do this.
}
override func tearDownWithError() throws {
// Put teardown code here. This method is called after the invocation of each test method in the class.
}
@MainActor
func testExample() throws {
// UI tests must launch the application that they test.
let app = XCUIApplication()
app.launch()
// Use XCTAssert and related functions to verify your tests produce the correct results.
}
@MainActor
func testLaunchPerformance() throws {
// This measures how long it takes to launch your application.
measure(metrics: [XCTApplicationLaunchMetric()]) {
XCUIApplication().launch()
}
}
}

View File

@@ -0,0 +1,33 @@
//
// to_live_photoUITestsLaunchTests.swift
// to-live-photoUITests
//
// Created by empty on 2025/12/13.
//
import XCTest
final class to_live_photoUITestsLaunchTests: XCTestCase {
override class var runsForEachTargetApplicationUIConfiguration: Bool {
true
}
override func setUpWithError() throws {
continueAfterFailure = false
}
@MainActor
func testLaunch() throws {
let app = XCUIApplication()
app.launch()
// Insert steps here to perform after app launch but before taking a screenshot,
// such as logging into a test account or navigating somewhere in the app
let attachment = XCTAttachment(screenshot: app.screenshot())
attachment.name = "Launch Screen"
attachment.lifetime = .keepAlways
add(attachment)
}
}