fix: send text between tool calls to channel immediately

Previously, when block streaming was disabled (the default), text generated
between tool calls would only appear after all tools completed. This was
because onBlockReply wasn't passed to the subscription when block streaming
was off, so flushBlockReplyBuffer() before tool execution did nothing.

Now onBlockReply is always passed, and when block streaming is disabled,
block replies are sent directly during tool flush. Directly sent payloads
are tracked to avoid duplicates in final payloads.

Also fixes a race condition where tool summaries could be emitted before
the typing indicator started by awaiting onAgentEvent in tool handlers.
This commit is contained in:
Tyler Yust
2026-01-15 20:55:52 -08:00
parent e6364d031d
commit 2ee71e4154
11 changed files with 118 additions and 71 deletions

View File

@@ -14,7 +14,7 @@ export function handleAgentStart(ctx: EmbeddedPiSubscribeContext) {
startedAt: Date.now(),
},
});
ctx.params.onAgentEvent?.({
void ctx.params.onAgentEvent?.({
stream: "lifecycle",
data: { phase: "start" },
});
@@ -24,7 +24,7 @@ export function handleAutoCompactionStart(ctx: EmbeddedPiSubscribeContext) {
ctx.state.compactionInFlight = true;
ctx.ensureCompactionPromise();
ctx.log.debug(`embedded run compaction start: runId=${ctx.params.runId}`);
ctx.params.onAgentEvent?.({
void ctx.params.onAgentEvent?.({
stream: "compaction",
data: { phase: "start" },
});
@@ -43,7 +43,7 @@ export function handleAutoCompactionEnd(
} else {
ctx.maybeResolveCompactionWait();
}
ctx.params.onAgentEvent?.({
void ctx.params.onAgentEvent?.({
stream: "compaction",
data: { phase: "end", willRetry },
});
@@ -59,7 +59,7 @@ export function handleAgentEnd(ctx: EmbeddedPiSubscribeContext) {
endedAt: Date.now(),
},
});
ctx.params.onAgentEvent?.({
void ctx.params.onAgentEvent?.({
stream: "lifecycle",
data: { phase: "end" },
});

View File

@@ -118,7 +118,7 @@ export function handleMessageUpdate(
mediaUrls: mediaUrls?.length ? mediaUrls : undefined,
},
});
ctx.params.onAgentEvent?.({
void ctx.params.onAgentEvent?.({
stream: "assistant",
data: {
text: cleanedText,

View File

@@ -11,7 +11,7 @@ import {
} from "./pi-embedded-subscribe.tools.js";
import { inferToolMetaFromArgs } from "./pi-embedded-utils.js";
export function handleToolExecutionStart(
export async function handleToolExecutionStart(
ctx: EmbeddedPiSubscribeContext,
evt: AgentEvent & { toolName: string; toolCallId: string; args: unknown },
) {
@@ -53,7 +53,8 @@ export function handleToolExecutionStart(
args: args as Record<string, unknown>,
},
});
ctx.params.onAgentEvent?.({
// Await onAgentEvent to ensure typing indicator starts before tool summaries are emitted.
await ctx.params.onAgentEvent?.({
stream: "tool",
data: { phase: "start", name: toolName, toolCallId },
});
@@ -108,7 +109,7 @@ export function handleToolExecutionUpdate(
partialResult: sanitized,
},
});
ctx.params.onAgentEvent?.({
void ctx.params.onAgentEvent?.({
stream: "tool",
data: {
phase: "update",
@@ -170,7 +171,7 @@ export function handleToolExecutionEnd(
result: sanitizedResult,
},
});
ctx.params.onAgentEvent?.({
void ctx.params.onAgentEvent?.({
stream: "tool",
data: {
phase: "result",

View File

@@ -32,7 +32,8 @@ export function createEmbeddedPiSessionEventHandler(ctx: EmbeddedPiSubscribeCont
handleMessageEnd(ctx, evt as never);
return;
case "tool_execution_start":
handleToolExecutionStart(ctx, evt as never);
// Async handler - awaits typing indicator before emitting tool summaries.
void handleToolExecutionStart(ctx, evt as never);
return;
case "tool_execution_update":
handleToolExecutionUpdate(ctx, evt as never);

View File

@@ -13,7 +13,7 @@ describe("subscribeEmbeddedPiSession", () => {
{ tag: "antthinking", open: "<antthinking>", close: "</antthinking>" },
] as const;
it("includes canvas action metadata in tool summaries", () => {
it("includes canvas action metadata in tool summaries", async () => {
let handler: ((evt: unknown) => void) | undefined;
const session: StubSession = {
subscribe: (fn) => {
@@ -38,6 +38,9 @@ describe("subscribeEmbeddedPiSession", () => {
args: { action: "a2ui_push", jsonlPath: "/tmp/a2ui.jsonl" },
});
// Wait for async handler to complete
await Promise.resolve();
expect(onToolResult).toHaveBeenCalledTimes(1);
const payload = onToolResult.mock.calls[0][0];
expect(payload.text).toContain("🖼️");
@@ -72,7 +75,7 @@ describe("subscribeEmbeddedPiSession", () => {
expect(onToolResult).not.toHaveBeenCalled();
});
it("emits tool summaries when shouldEmitToolResult overrides verbose", () => {
it("emits tool summaries when shouldEmitToolResult overrides verbose", async () => {
let handler: ((evt: unknown) => void) | undefined;
const session: StubSession = {
subscribe: (fn) => {
@@ -98,6 +101,9 @@ describe("subscribeEmbeddedPiSession", () => {
args: { path: "/tmp/c.txt" },
});
// Wait for async handler to complete
await Promise.resolve();
expect(onToolResult).toHaveBeenCalledTimes(1);
});
});

View File

@@ -14,7 +14,7 @@ describe("subscribeEmbeddedPiSession", () => {
{ tag: "antthinking", open: "<antthinking>", close: "</antthinking>" },
] as const;
it("suppresses message_end block replies when the message tool already sent", () => {
it("suppresses message_end block replies when the message tool already sent", async () => {
let handler: ((evt: unknown) => void) | undefined;
const session: StubSession = {
subscribe: (fn) => {
@@ -41,6 +41,9 @@ describe("subscribeEmbeddedPiSession", () => {
args: { action: "send", to: "+1555", message: messageText },
});
// Wait for async handler to complete
await Promise.resolve();
handler?.({
type: "tool_execution_end",
toolName: "message",
@@ -58,7 +61,7 @@ describe("subscribeEmbeddedPiSession", () => {
expect(onBlockReply).not.toHaveBeenCalled();
});
it("does not suppress message_end replies when message tool reports error", () => {
it("does not suppress message_end replies when message tool reports error", async () => {
let handler: ((evt: unknown) => void) | undefined;
const session: StubSession = {
subscribe: (fn) => {
@@ -85,6 +88,9 @@ describe("subscribeEmbeddedPiSession", () => {
args: { action: "send", to: "+1555", message: messageText },
});
// Wait for async handler to complete
await Promise.resolve();
handler?.({
type: "tool_execution_end",
toolName: "message",

View File

@@ -54,7 +54,7 @@ describe("subscribeEmbeddedPiSession", () => {
await waitPromise;
expect(resolved).toBe(true);
});
it("emits tool summaries at tool start when verbose is on", () => {
it("emits tool summaries at tool start when verbose is on", async () => {
let handler: ((evt: unknown) => void) | undefined;
const session: StubSession = {
subscribe: (fn) => {
@@ -79,6 +79,9 @@ describe("subscribeEmbeddedPiSession", () => {
args: { path: "/tmp/a.txt" },
});
// Wait for async handler to complete
await Promise.resolve();
expect(onToolResult).toHaveBeenCalledTimes(1);
const payload = onToolResult.mock.calls[0][0];
expect(payload.text).toContain("/tmp/a.txt");
@@ -93,7 +96,7 @@ describe("subscribeEmbeddedPiSession", () => {
expect(onToolResult).toHaveBeenCalledTimes(1);
});
it("includes browser action metadata in tool summaries", () => {
it("includes browser action metadata in tool summaries", async () => {
let handler: ((evt: unknown) => void) | undefined;
const session: StubSession = {
subscribe: (fn) => {
@@ -118,6 +121,9 @@ describe("subscribeEmbeddedPiSession", () => {
args: { action: "snapshot", targetUrl: "https://example.com" },
});
// Wait for async handler to complete
await Promise.resolve();
expect(onToolResult).toHaveBeenCalledTimes(1);
const payload = onToolResult.mock.calls[0][0];
expect(payload.text).toContain("🌐");

View File

@@ -22,7 +22,10 @@ export type SubscribeEmbeddedPiSessionParams = {
blockReplyChunking?: BlockReplyChunking;
onPartialReply?: (payload: { text?: string; mediaUrls?: string[] }) => void | Promise<void>;
onAssistantMessageStart?: () => void | Promise<void>;
onAgentEvent?: (evt: { stream: string; data: Record<string, unknown> }) => void;
onAgentEvent?: (evt: {
stream: string;
data: Record<string, unknown>;
}) => void | Promise<void>;
enforceFinalTag?: boolean;
};