feat(deeplink): forward agent links via bridge

This commit is contained in:
Peter Steinberger
2025-12-13 01:18:48 +00:00
parent a56daa6c06
commit 378e5acd23
6 changed files with 177 additions and 65 deletions

View File

@@ -10,6 +10,9 @@ struct ClawdisNodeApp: App {
RootCanvas()
.environmentObject(self.appModel)
.environmentObject(self.appModel.voiceWake)
.onOpenURL { url in
Task { await self.appModel.handleDeepLink(url: url) }
}
.onChange(of: self.scenePhase) { _, newValue in
self.appModel.setScenePhase(newValue)
}

View File

@@ -114,6 +114,56 @@ final class NodeAppModel: ObservableObject {
try await self.bridge.sendEvent(event: "voice.transcript", payloadJSON: json)
}
func handleDeepLink(url: URL) async {
guard let route = DeepLinkParser.parse(url) else { return }
switch route {
case let .agent(link):
await self.handleAgentDeepLink(link, originalURL: url)
}
}
private func handleAgentDeepLink(_ link: AgentDeepLink, originalURL: URL) async {
let message = link.message.trimmingCharacters(in: .whitespacesAndNewlines)
guard !message.isEmpty else { return }
if message.count > 20000 {
self.screen.errorText = "Deep link too large (message exceeds 20,000 characters)."
return
}
guard await self.isBridgeConnected() else {
self.screen.errorText = "Bridge not connected (cannot forward deep link)."
return
}
do {
try await self.sendAgentRequest(link: link)
self.screen.errorText = nil
} catch {
self.screen.errorText = "Agent request failed: \(error.localizedDescription)"
}
}
private func sendAgentRequest(link: AgentDeepLink) async throws {
if link.message.trimmingCharacters(in: .whitespacesAndNewlines).isEmpty {
throw NSError(domain: "DeepLink", code: 1, userInfo: [
NSLocalizedDescriptionKey: "invalid agent message",
])
}
// iOS bridge forwards to the gateway; no local auth prompts here.
// (Key-based unattended auth is handled on macOS for clawdis:// links.)
let data = try JSONEncoder().encode(link)
let json = String(decoding: data, as: UTF8.self)
try await self.bridge.sendEvent(event: "agent.request", payloadJSON: json)
}
private func isBridgeConnected() async -> Bool {
if case .connected = await self.bridge.state { return true }
return false
}
private func handleInvoke(_ req: BridgeInvokeRequest) async -> BridgeInvokeResponse {
if req.command.hasPrefix("screen."), self.isBackgrounded {
return BridgeInvokeResponse(

View File

@@ -123,17 +123,24 @@ final class VoiceWakeManager: NSObject, ObservableObject {
self.audioEngine.prepare()
try self.audioEngine.start()
self.recognitionTask = self.speechRecognizer?
.recognitionTask(with: request) { [weak manager = self] result, error in
Task { @MainActor in
manager?.handleRecognitionCallback(result: result, error: error)
}
}
let handler = self.makeRecognitionResultHandler()
self.recognitionTask = self.speechRecognizer?.recognitionTask(with: request, resultHandler: handler)
}
private func handleRecognitionCallback(result: SFSpeechRecognitionResult?, error: Error?) {
if let error {
self.statusText = "Recognizer error: \(error.localizedDescription)"
private nonisolated func makeRecognitionResultHandler() -> @Sendable (SFSpeechRecognitionResult?, Error?) -> Void {
{ [weak self] result, error in
let transcript = result?.bestTranscription.formattedString
let errorText = error?.localizedDescription
Task { @MainActor in
self?.handleRecognitionCallback(transcript: transcript, errorText: errorText)
}
}
}
private func handleRecognitionCallback(transcript: String?, errorText: String?) {
if let errorText {
self.statusText = "Recognizer error: \(errorText)"
self.isListening = false
let shouldRestart = self.isEnabled
@@ -146,8 +153,7 @@ final class VoiceWakeManager: NSObject, ObservableObject {
return
}
guard let result else { return }
let transcript = result.bestTranscription.formattedString
guard let transcript else { return }
guard let cmd = self.extractCommand(from: transcript) else { return }
if cmd == self.lastDispatched { return }
@@ -189,17 +195,21 @@ final class VoiceWakeManager: NSObject, ObservableObject {
}
private nonisolated static func requestMicrophonePermission() async -> Bool {
await withCheckedContinuation(isolation: nil) { cont in
await withCheckedContinuation { cont in
AVAudioApplication.requestRecordPermission { ok in
cont.resume(returning: ok)
Task { @MainActor in
cont.resume(returning: ok)
}
}
}
}
private nonisolated static func requestSpeechPermission() async -> Bool {
await withCheckedContinuation(isolation: nil) { cont in
await withCheckedContinuation { cont in
SFSpeechRecognizer.requestAuthorization { status in
cont.resume(returning: status == .authorized)
Task { @MainActor in
cont.resume(returning: status == .authorized)
}
}
}
}