fix: refactor TUI stream assembly (#1202, thanks @aaronveklabs)
Co-authored-by: Aaron <aaron@vektor-labs.com>
This commit is contained in:
@@ -4,8 +4,13 @@ Docs: https://docs.clawd.bot
|
||||
|
||||
## 2026.1.20-1
|
||||
|
||||
### Changes
|
||||
- Repo: remove the Peekaboo git submodule now that the SPM release is used.
|
||||
|
||||
### Fixes
|
||||
- Web search: infer Perplexity base URL from API key source (direct vs OpenRouter).
|
||||
- TUI: keep thinking blocks ordered before content during streaming and isolate per-run assembly. (#1202) — thanks @aaronveklabs.
|
||||
- CLI: avoid duplicating --profile/--dev flags when formatting commands.
|
||||
|
||||
## 2026.1.19-3
|
||||
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import { normalizeProfileName } from "./profile-utils.js";
|
||||
|
||||
const CLI_PREFIX_RE = /^(?:pnpm|npm|bunx|npx)\s+clawdbot\b|^clawdbot\b/;
|
||||
const PROFILE_FLAG_RE = /(^|\s)--profile(\s|$)/;
|
||||
const DEV_FLAG_RE = /(^|\s)--dev(\s|$)/;
|
||||
const PROFILE_FLAG_RE = /(?:^|\s)--profile(?:\s|=|$)/;
|
||||
const DEV_FLAG_RE = /(?:^|\s)--dev(?:\s|$)/;
|
||||
|
||||
export function formatCliCommand(
|
||||
command: string,
|
||||
|
||||
@@ -4,30 +4,15 @@ import { AssistantMessageComponent } from "./assistant-message.js";
|
||||
import { ToolExecutionComponent } from "./tool-execution.js";
|
||||
import { UserMessageComponent } from "./user-message.js";
|
||||
|
||||
/**
|
||||
* Per-run streaming state - isolates each run's thinking and content streams.
|
||||
* This enables proper sequencing regardless of network arrival order.
|
||||
*/
|
||||
interface StreamingRunState {
|
||||
component: AssistantMessageComponent;
|
||||
thinkingText: string;
|
||||
contentText: string;
|
||||
showThinking: boolean;
|
||||
}
|
||||
|
||||
export class ChatLog extends Container {
|
||||
private toolById = new Map<string, ToolExecutionComponent>();
|
||||
// FIXED: Replace single streaming fields with per-runId Map for proper isolation
|
||||
private streamingRuns = new Map<string, StreamingRunState>();
|
||||
// Keep reference to most recent run for backward compatibility
|
||||
private lastStreamingRunId: string | null = null;
|
||||
private streamingRuns = new Map<string, AssistantMessageComponent>();
|
||||
private toolsExpanded = false;
|
||||
|
||||
clearAll() {
|
||||
this.clear();
|
||||
this.toolById.clear();
|
||||
this.streamingRuns.clear();
|
||||
this.lastStreamingRunId = null;
|
||||
}
|
||||
|
||||
addSystem(text: string) {
|
||||
@@ -39,141 +24,36 @@ export class ChatLog extends Container {
|
||||
this.addChild(new UserMessageComponent(text));
|
||||
}
|
||||
|
||||
/**
|
||||
* Get or create streaming state for a specific runId.
|
||||
*/
|
||||
private getOrCreateRunState(runId: string, showThinking: boolean): StreamingRunState {
|
||||
let state = this.streamingRuns.get(runId);
|
||||
if (!state) {
|
||||
const component = new AssistantMessageComponent("");
|
||||
this.addChild(component);
|
||||
state = {
|
||||
component,
|
||||
thinkingText: "",
|
||||
contentText: "",
|
||||
showThinking,
|
||||
};
|
||||
this.streamingRuns.set(runId, state);
|
||||
this.lastStreamingRunId = runId;
|
||||
}
|
||||
return state;
|
||||
}
|
||||
|
||||
/**
|
||||
* Compose the final display text from thinking + content.
|
||||
* FIXED: Ensures thinking always appears before content regardless of arrival order.
|
||||
*/
|
||||
private composeDisplayText(state: StreamingRunState): string {
|
||||
const parts: string[] = [];
|
||||
|
||||
// Thinking comes first (if enabled and present)
|
||||
if (state.showThinking && state.thinkingText.trim()) {
|
||||
parts.push(`[thinking]\n${state.thinkingText}`);
|
||||
}
|
||||
|
||||
// Content comes after thinking
|
||||
if (state.contentText.trim()) {
|
||||
parts.push(state.contentText);
|
||||
}
|
||||
|
||||
return parts.join("\n\n").trim() || "";
|
||||
private resolveRunId(runId?: string) {
|
||||
return runId ?? "default";
|
||||
}
|
||||
|
||||
startAssistant(text: string, runId?: string) {
|
||||
const component = new AssistantMessageComponent(text);
|
||||
if (runId) {
|
||||
// Create proper streaming state for tracked runs
|
||||
this.streamingRuns.set(runId, {
|
||||
component,
|
||||
thinkingText: "",
|
||||
contentText: text,
|
||||
showThinking: false,
|
||||
});
|
||||
this.lastStreamingRunId = runId;
|
||||
}
|
||||
this.streamingRuns.set(this.resolveRunId(runId), component);
|
||||
this.addChild(component);
|
||||
return component;
|
||||
}
|
||||
|
||||
/**
|
||||
* Update the assistant message with new streaming content.
|
||||
* FIXED: Now properly isolates by runId and separates thinking/content.
|
||||
*/
|
||||
updateAssistant(
|
||||
text: string,
|
||||
runId?: string,
|
||||
options?: {
|
||||
thinkingText?: string;
|
||||
contentText?: string;
|
||||
showThinking?: boolean;
|
||||
},
|
||||
) {
|
||||
const effectiveRunId = runId ?? "default";
|
||||
const showThinking = options?.showThinking ?? false;
|
||||
const state = this.getOrCreateRunState(effectiveRunId, showThinking);
|
||||
|
||||
// Update thinking and/or content separately if provided
|
||||
if (options?.thinkingText !== undefined) {
|
||||
state.thinkingText = options.thinkingText;
|
||||
updateAssistant(text: string, runId?: string) {
|
||||
const effectiveRunId = this.resolveRunId(runId);
|
||||
const existing = this.streamingRuns.get(effectiveRunId);
|
||||
if (!existing) {
|
||||
this.startAssistant(text, runId);
|
||||
return;
|
||||
}
|
||||
if (options?.contentText !== undefined) {
|
||||
state.contentText = options.contentText;
|
||||
}
|
||||
|
||||
// If only raw text provided (backward compatibility), use as content
|
||||
if (options?.thinkingText === undefined && options?.contentText === undefined) {
|
||||
state.contentText = text;
|
||||
}
|
||||
|
||||
state.showThinking = showThinking;
|
||||
|
||||
// Recompose and render with guaranteed ordering
|
||||
const displayText = this.composeDisplayText(state);
|
||||
state.component.setText(displayText);
|
||||
}
|
||||
|
||||
getStreamingText(runId?: string) {
|
||||
const effectiveRunId = runId ?? this.lastStreamingRunId;
|
||||
if (!effectiveRunId) return null;
|
||||
|
||||
const state = this.streamingRuns.get(effectiveRunId);
|
||||
if (!state) return null;
|
||||
|
||||
return this.composeDisplayText(state);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the raw streaming state (for diagnostics).
|
||||
*/
|
||||
getStreamingState(runId: string): { thinking: string; content: string } | null {
|
||||
const state = this.streamingRuns.get(runId);
|
||||
if (!state) return null;
|
||||
return {
|
||||
thinking: state.thinkingText,
|
||||
content: state.contentText,
|
||||
};
|
||||
existing.setText(text);
|
||||
}
|
||||
|
||||
finalizeAssistant(text: string, runId?: string) {
|
||||
const effectiveRunId = runId ?? this.lastStreamingRunId;
|
||||
const state = effectiveRunId ? this.streamingRuns.get(effectiveRunId) : null;
|
||||
|
||||
if (state) {
|
||||
// Use the final text, or compose from existing state if final is empty
|
||||
const finalText = text.trim() || this.composeDisplayText(state);
|
||||
state.component.setText(finalText);
|
||||
} else {
|
||||
// No existing state - create a new component with final text
|
||||
this.startAssistant(text, runId);
|
||||
}
|
||||
|
||||
// Clean up the streaming state for this run
|
||||
if (effectiveRunId) {
|
||||
const effectiveRunId = this.resolveRunId(runId);
|
||||
const existing = this.streamingRuns.get(effectiveRunId);
|
||||
if (existing) {
|
||||
existing.setText(text);
|
||||
this.streamingRuns.delete(effectiveRunId);
|
||||
if (this.lastStreamingRunId === effectiveRunId) {
|
||||
this.lastStreamingRunId = null;
|
||||
}
|
||||
return;
|
||||
}
|
||||
this.addChild(new AssistantMessageComponent(text));
|
||||
}
|
||||
|
||||
startTool(toolCallId: string, toolName: string, args: unknown) {
|
||||
|
||||
@@ -1,12 +1,7 @@
|
||||
import type { TUI } from "@mariozechner/pi-tui";
|
||||
import type { ChatLog } from "./components/chat-log.js";
|
||||
import {
|
||||
asString,
|
||||
extractTextFromMessage,
|
||||
extractThinkingFromMessage,
|
||||
extractContentFromMessage,
|
||||
resolveFinalAssistantText,
|
||||
} from "./tui-formatters.js";
|
||||
import { asString } from "./tui-formatters.js";
|
||||
import { TuiStreamAssembler } from "./tui-stream-assembler.js";
|
||||
import type { AgentEvent, ChatEvent, TuiStateAccess } from "./tui-types.js";
|
||||
|
||||
type EventHandlerContext = {
|
||||
@@ -17,25 +12,14 @@ type EventHandlerContext = {
|
||||
refreshSessionInfo?: () => Promise<void>;
|
||||
};
|
||||
|
||||
/**
|
||||
* Per-run stream buffer for tracking thinking/content separately.
|
||||
* Enables proper sequencing regardless of network arrival order.
|
||||
*/
|
||||
interface RunStreamBuffer {
|
||||
thinkingText: string;
|
||||
contentText: string;
|
||||
lastUpdateMs: number;
|
||||
}
|
||||
|
||||
export function createEventHandlers(context: EventHandlerContext) {
|
||||
const { chatLog, tui, state, setActivityStatus, refreshSessionInfo } = context;
|
||||
const finalizedRuns = new Map<string, number>();
|
||||
// FIXED: Per-run stream buffers for proper isolation
|
||||
const runBuffers = new Map<string, RunStreamBuffer>();
|
||||
const streamAssembler = new TuiStreamAssembler();
|
||||
|
||||
const noteFinalizedRun = (runId: string) => {
|
||||
finalizedRuns.set(runId, Date.now());
|
||||
runBuffers.delete(runId); // Clean up buffer
|
||||
streamAssembler.drop(runId);
|
||||
if (finalizedRuns.size <= 200) return;
|
||||
const keepUntil = Date.now() - 10 * 60 * 1000;
|
||||
for (const [key, ts] of finalizedRuns) {
|
||||
@@ -50,22 +34,6 @@ export function createEventHandlers(context: EventHandlerContext) {
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Get or create a stream buffer for a specific runId.
|
||||
*/
|
||||
const getOrCreateBuffer = (runId: string): RunStreamBuffer => {
|
||||
let buffer = runBuffers.get(runId);
|
||||
if (!buffer) {
|
||||
buffer = {
|
||||
thinkingText: "",
|
||||
contentText: "",
|
||||
lastUpdateMs: Date.now(),
|
||||
};
|
||||
runBuffers.set(runId, buffer);
|
||||
}
|
||||
return buffer;
|
||||
};
|
||||
|
||||
const handleChatEvent = (payload: unknown) => {
|
||||
if (!payload || typeof payload !== "object") return;
|
||||
const evt = payload as ChatEvent;
|
||||
@@ -75,33 +43,9 @@ export function createEventHandlers(context: EventHandlerContext) {
|
||||
if (evt.state === "final") return;
|
||||
}
|
||||
if (evt.state === "delta") {
|
||||
const buffer = getOrCreateBuffer(evt.runId);
|
||||
|
||||
// FIXED: Extract thinking and content SEPARATELY for proper sequencing
|
||||
// This is model-agnostic: models without thinking blocks just return empty string
|
||||
const thinkingText = extractThinkingFromMessage(evt.message);
|
||||
const contentText = extractContentFromMessage(evt.message);
|
||||
|
||||
// Update buffer with new content
|
||||
// In streaming, we typically receive the full accumulated text each time
|
||||
if (thinkingText) {
|
||||
buffer.thinkingText = thinkingText;
|
||||
}
|
||||
if (contentText) {
|
||||
buffer.contentText = contentText;
|
||||
}
|
||||
buffer.lastUpdateMs = Date.now();
|
||||
|
||||
// Skip render if both are empty
|
||||
if (!buffer.thinkingText && !buffer.contentText) return;
|
||||
|
||||
// FIXED: Pass separated streams to ChatLog for proper sequencing
|
||||
chatLog.updateAssistant("", evt.runId, {
|
||||
thinkingText: buffer.thinkingText,
|
||||
contentText: buffer.contentText,
|
||||
showThinking: state.showThinking,
|
||||
});
|
||||
|
||||
const displayText = streamAssembler.ingestDelta(evt.runId, evt.message, state.showThinking);
|
||||
if (!displayText) return;
|
||||
chatLog.updateAssistant(displayText, evt.runId);
|
||||
setActivityStatus("streaming");
|
||||
}
|
||||
if (evt.state === "final") {
|
||||
@@ -112,24 +56,7 @@ export function createEventHandlers(context: EventHandlerContext) {
|
||||
: ""
|
||||
: "";
|
||||
|
||||
// FIXED: Extract final content with proper thinking handling
|
||||
const thinkingText = extractThinkingFromMessage(evt.message);
|
||||
const contentText = extractContentFromMessage(evt.message);
|
||||
|
||||
// Compose final text with proper ordering (thinking before content)
|
||||
const parts: string[] = [];
|
||||
if (state.showThinking && thinkingText.trim()) {
|
||||
parts.push(`[thinking]\n${thinkingText}`);
|
||||
}
|
||||
if (contentText.trim()) {
|
||||
parts.push(contentText);
|
||||
}
|
||||
const finalComposed = parts.join("\n\n").trim();
|
||||
|
||||
const finalText = resolveFinalAssistantText({
|
||||
finalText: finalComposed,
|
||||
streamedText: chatLog.getStreamingText(evt.runId),
|
||||
});
|
||||
const finalText = streamAssembler.finalize(evt.runId, evt.message, state.showThinking);
|
||||
chatLog.finalizeAssistant(finalText, evt.runId);
|
||||
noteFinalizedRun(evt.runId);
|
||||
state.activeChatRunId = null;
|
||||
@@ -139,14 +66,14 @@ export function createEventHandlers(context: EventHandlerContext) {
|
||||
}
|
||||
if (evt.state === "aborted") {
|
||||
chatLog.addSystem("run aborted");
|
||||
runBuffers.delete(evt.runId);
|
||||
streamAssembler.drop(evt.runId);
|
||||
state.activeChatRunId = null;
|
||||
setActivityStatus("aborted");
|
||||
void refreshSessionInfo?.();
|
||||
}
|
||||
if (evt.state === "error") {
|
||||
chatLog.addSystem(`run error: ${evt.errorMessage ?? "unknown"}`);
|
||||
runBuffers.delete(evt.runId);
|
||||
streamAssembler.drop(evt.runId);
|
||||
state.activeChatRunId = null;
|
||||
setActivityStatus("error");
|
||||
void refreshSessionInfo?.();
|
||||
|
||||
@@ -1,6 +1,10 @@
|
||||
import { describe, expect, it } from "vitest";
|
||||
|
||||
import { extractTextFromMessage } from "./tui-formatters.js";
|
||||
import {
|
||||
extractContentFromMessage,
|
||||
extractTextFromMessage,
|
||||
extractThinkingFromMessage,
|
||||
} from "./tui-formatters.js";
|
||||
|
||||
describe("extractTextFromMessage", () => {
|
||||
it("renders errorMessage when assistant content is empty", () => {
|
||||
@@ -27,4 +31,60 @@ describe("extractTextFromMessage", () => {
|
||||
|
||||
expect(text).toContain("unknown error");
|
||||
});
|
||||
|
||||
it("joins multiple text blocks with single newlines", () => {
|
||||
const text = extractTextFromMessage({
|
||||
role: "assistant",
|
||||
content: [
|
||||
{ type: "text", text: "first" },
|
||||
{ type: "text", text: "second" },
|
||||
],
|
||||
});
|
||||
|
||||
expect(text).toBe("first\nsecond");
|
||||
});
|
||||
|
||||
it("places thinking before content when included", () => {
|
||||
const text = extractTextFromMessage(
|
||||
{
|
||||
role: "assistant",
|
||||
content: [
|
||||
{ type: "text", text: "hello" },
|
||||
{ type: "thinking", thinking: "ponder" },
|
||||
],
|
||||
},
|
||||
{ includeThinking: true },
|
||||
);
|
||||
|
||||
expect(text).toBe("[thinking]\nponder\n\nhello");
|
||||
});
|
||||
});
|
||||
|
||||
describe("extractThinkingFromMessage", () => {
|
||||
it("collects only thinking blocks", () => {
|
||||
const text = extractThinkingFromMessage({
|
||||
role: "assistant",
|
||||
content: [
|
||||
{ type: "thinking", thinking: "alpha" },
|
||||
{ type: "text", text: "hello" },
|
||||
{ type: "thinking", thinking: "beta" },
|
||||
],
|
||||
});
|
||||
|
||||
expect(text).toBe("alpha\nbeta");
|
||||
});
|
||||
});
|
||||
|
||||
describe("extractContentFromMessage", () => {
|
||||
it("collects only text blocks", () => {
|
||||
const text = extractContentFromMessage({
|
||||
role: "assistant",
|
||||
content: [
|
||||
{ type: "thinking", thinking: "alpha" },
|
||||
{ type: "text", text: "hello" },
|
||||
],
|
||||
});
|
||||
|
||||
expect(text).toBe("hello");
|
||||
});
|
||||
});
|
||||
|
||||
@@ -12,6 +12,25 @@ export function resolveFinalAssistantText(params: {
|
||||
return "(no output)";
|
||||
}
|
||||
|
||||
export function composeThinkingAndContent(params: {
|
||||
thinkingText?: string;
|
||||
contentText?: string;
|
||||
showThinking?: boolean;
|
||||
}) {
|
||||
const thinkingText = params.thinkingText?.trim() ?? "";
|
||||
const contentText = params.contentText?.trim() ?? "";
|
||||
const parts: string[] = [];
|
||||
|
||||
if (params.showThinking && thinkingText) {
|
||||
parts.push(`[thinking]\n${thinkingText}`);
|
||||
}
|
||||
if (contentText) {
|
||||
parts.push(contentText);
|
||||
}
|
||||
|
||||
return parts.join("\n\n").trim();
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract ONLY thinking blocks from message content.
|
||||
* Model-agnostic: returns empty string if no thinking blocks exist.
|
||||
@@ -80,7 +99,6 @@ function extractTextBlocks(content: unknown, opts?: { includeThinking?: boolean
|
||||
if (typeof content === "string") return content.trim();
|
||||
if (!Array.isArray(content)) return "";
|
||||
|
||||
// FIXED: Separate collection to ensure proper ordering (thinking before text)
|
||||
const thinkingParts: string[] = [];
|
||||
const textParts: string[] = [];
|
||||
|
||||
@@ -95,20 +113,15 @@ function extractTextBlocks(content: unknown, opts?: { includeThinking?: boolean
|
||||
record.type === "thinking" &&
|
||||
typeof record.thinking === "string"
|
||||
) {
|
||||
thinkingParts.push(`[thinking]\n${record.thinking}`);
|
||||
thinkingParts.push(record.thinking);
|
||||
}
|
||||
}
|
||||
|
||||
// FIXED: Always put thinking BEFORE text content for consistent ordering
|
||||
const parts: string[] = [];
|
||||
if (thinkingParts.length > 0) {
|
||||
parts.push(...thinkingParts);
|
||||
}
|
||||
if (textParts.length > 0) {
|
||||
parts.push(...textParts);
|
||||
}
|
||||
|
||||
return parts.join("\n\n").trim();
|
||||
return composeThinkingAndContent({
|
||||
thinkingText: thinkingParts.join("\n").trim(),
|
||||
contentText: textParts.join("\n").trim(),
|
||||
showThinking: opts?.includeThinking ?? false,
|
||||
});
|
||||
}
|
||||
|
||||
export function extractTextFromMessage(
|
||||
|
||||
68
src/tui/tui-stream-assembler.test.ts
Normal file
68
src/tui/tui-stream-assembler.test.ts
Normal file
@@ -0,0 +1,68 @@
|
||||
import { describe, expect, it } from "vitest";
|
||||
|
||||
import { TuiStreamAssembler } from "./tui-stream-assembler.js";
|
||||
|
||||
describe("TuiStreamAssembler", () => {
|
||||
it("keeps thinking before content even when thinking arrives later", () => {
|
||||
const assembler = new TuiStreamAssembler();
|
||||
const first = assembler.ingestDelta(
|
||||
"run-1",
|
||||
{
|
||||
role: "assistant",
|
||||
content: [{ type: "text", text: "Hello" }],
|
||||
},
|
||||
true,
|
||||
);
|
||||
expect(first).toBe("Hello");
|
||||
|
||||
const second = assembler.ingestDelta(
|
||||
"run-1",
|
||||
{
|
||||
role: "assistant",
|
||||
content: [{ type: "thinking", thinking: "Brain" }],
|
||||
},
|
||||
true,
|
||||
);
|
||||
expect(second).toBe("[thinking]\nBrain\n\nHello");
|
||||
});
|
||||
|
||||
it("omits thinking when showThinking is false", () => {
|
||||
const assembler = new TuiStreamAssembler();
|
||||
const text = assembler.ingestDelta(
|
||||
"run-2",
|
||||
{
|
||||
role: "assistant",
|
||||
content: [
|
||||
{ type: "thinking", thinking: "Hidden" },
|
||||
{ type: "text", text: "Visible" },
|
||||
],
|
||||
},
|
||||
false,
|
||||
);
|
||||
|
||||
expect(text).toBe("Visible");
|
||||
});
|
||||
|
||||
it("falls back to streamed text on empty final payload", () => {
|
||||
const assembler = new TuiStreamAssembler();
|
||||
assembler.ingestDelta(
|
||||
"run-3",
|
||||
{
|
||||
role: "assistant",
|
||||
content: [{ type: "text", text: "Streamed" }],
|
||||
},
|
||||
false,
|
||||
);
|
||||
|
||||
const finalText = assembler.finalize(
|
||||
"run-3",
|
||||
{
|
||||
role: "assistant",
|
||||
content: [],
|
||||
},
|
||||
false,
|
||||
);
|
||||
|
||||
expect(finalText).toBe("Streamed");
|
||||
});
|
||||
});
|
||||
83
src/tui/tui-stream-assembler.ts
Normal file
83
src/tui/tui-stream-assembler.ts
Normal file
@@ -0,0 +1,83 @@
|
||||
import {
|
||||
composeThinkingAndContent,
|
||||
extractContentFromMessage,
|
||||
extractThinkingFromMessage,
|
||||
resolveFinalAssistantText,
|
||||
} from "./tui-formatters.js";
|
||||
|
||||
type RunStreamState = {
|
||||
thinkingText: string;
|
||||
contentText: string;
|
||||
displayText: string;
|
||||
};
|
||||
|
||||
export class TuiStreamAssembler {
|
||||
private runs = new Map<string, RunStreamState>();
|
||||
|
||||
private getOrCreateRun(runId: string): RunStreamState {
|
||||
let state = this.runs.get(runId);
|
||||
if (!state) {
|
||||
state = {
|
||||
thinkingText: "",
|
||||
contentText: "",
|
||||
displayText: "",
|
||||
};
|
||||
this.runs.set(runId, state);
|
||||
}
|
||||
return state;
|
||||
}
|
||||
|
||||
ingestDelta(runId: string, message: unknown, showThinking: boolean): string | null {
|
||||
const thinkingText = extractThinkingFromMessage(message);
|
||||
const contentText = extractContentFromMessage(message);
|
||||
const state = this.getOrCreateRun(runId);
|
||||
|
||||
if (thinkingText) {
|
||||
state.thinkingText = thinkingText;
|
||||
}
|
||||
if (contentText) {
|
||||
state.contentText = contentText;
|
||||
}
|
||||
|
||||
const displayText = composeThinkingAndContent({
|
||||
thinkingText: state.thinkingText,
|
||||
contentText: state.contentText,
|
||||
showThinking,
|
||||
});
|
||||
|
||||
if (!displayText || displayText === state.displayText) return null;
|
||||
|
||||
state.displayText = displayText;
|
||||
return displayText;
|
||||
}
|
||||
|
||||
finalize(runId: string, message: unknown, showThinking: boolean): string {
|
||||
const state = this.getOrCreateRun(runId);
|
||||
const thinkingText = extractThinkingFromMessage(message);
|
||||
const contentText = extractContentFromMessage(message);
|
||||
|
||||
if (thinkingText) {
|
||||
state.thinkingText = thinkingText;
|
||||
}
|
||||
if (contentText) {
|
||||
state.contentText = contentText;
|
||||
}
|
||||
|
||||
const finalComposed = composeThinkingAndContent({
|
||||
thinkingText: state.thinkingText,
|
||||
contentText: state.contentText,
|
||||
showThinking,
|
||||
});
|
||||
const finalText = resolveFinalAssistantText({
|
||||
finalText: finalComposed,
|
||||
streamedText: state.displayText,
|
||||
});
|
||||
|
||||
this.runs.delete(runId);
|
||||
return finalText;
|
||||
}
|
||||
|
||||
drop(runId: string) {
|
||||
this.runs.delete(runId);
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user