fix(deps): patch pi-ai tool calling
This commit is contained in:
@@ -1,5 +1,5 @@
|
||||
diff --git a/dist/providers/google-gemini-cli.js b/dist/providers/google-gemini-cli.js
|
||||
index 93aa26c395e9bd0df64376408a13d15ee9e7cce7..41a439e5fc370038a5febef9e8f021ee279cf8aa 100644
|
||||
index 93aa26c..beb585e 100644
|
||||
--- a/dist/providers/google-gemini-cli.js
|
||||
+++ b/dist/providers/google-gemini-cli.js
|
||||
@@ -248,6 +248,11 @@ export const streamGoogleGeminiCli = (model, context, options) => {
|
||||
@@ -15,7 +15,7 @@ index 93aa26c395e9bd0df64376408a13d15ee9e7cce7..41a439e5fc370038a5febef9e8f021ee
|
||||
if (attempt < MAX_RETRIES && isRetryableError(response.status, errorText)) {
|
||||
// Use server-provided delay or exponential backoff
|
||||
diff --git a/dist/providers/openai-codex-responses.js b/dist/providers/openai-codex-responses.js
|
||||
index 188a8294f26fe1bfe3fb298a7f58e4d8eaf2a529..3fd8027edafdad4ca364af53f0a1811139705b21 100644
|
||||
index 188a829..4555c9f 100644
|
||||
--- a/dist/providers/openai-codex-responses.js
|
||||
+++ b/dist/providers/openai-codex-responses.js
|
||||
@@ -433,9 +433,15 @@ function convertMessages(model, context) {
|
||||
@@ -34,8 +34,100 @@ index 188a8294f26fe1bfe3fb298a7f58e4d8eaf2a529..3fd8027edafdad4ca364af53f0a18111
|
||||
const reasoningItem = JSON.parse(block.thinkingSignature);
|
||||
output.push(reasoningItem);
|
||||
}
|
||||
@@ -515,7 +521,7 @@ function convertTools(tools) {
|
||||
name: tool.name,
|
||||
description: tool.description,
|
||||
parameters: tool.parameters,
|
||||
- strict: null,
|
||||
+ strict: false,
|
||||
}));
|
||||
}
|
||||
function mapStopReason(status) {
|
||||
diff --git a/dist/providers/openai-completions.js b/dist/providers/openai-completions.js
|
||||
index 5d0813a..e0ef676 100644
|
||||
--- a/dist/providers/openai-completions.js
|
||||
+++ b/dist/providers/openai-completions.js
|
||||
@@ -71,6 +71,18 @@ export const streamOpenAICompletions = (model, context, options) => {
|
||||
stream.push({ type: "start", partial: output });
|
||||
let currentBlock = null;
|
||||
const blocks = output.content;
|
||||
+ const pendingToolCalls = new Map();
|
||||
+ const isCompleteJsonObject = (text) => {
|
||||
+ if (!text || text.trim() === "")
|
||||
+ return false;
|
||||
+ try {
|
||||
+ JSON.parse(text);
|
||||
+ return true;
|
||||
+ }
|
||||
+ catch {
|
||||
+ return false;
|
||||
+ }
|
||||
+ };
|
||||
const blockIndex = () => blocks.length - 1;
|
||||
const finishCurrentBlock = (block) => {
|
||||
if (block) {
|
||||
@@ -193,31 +205,41 @@ export const streamOpenAICompletions = (model, context, options) => {
|
||||
}
|
||||
if (choice?.delta?.tool_calls) {
|
||||
for (const toolCall of choice.delta.tool_calls) {
|
||||
+ const index = typeof toolCall.index === "number" ? toolCall.index : 0;
|
||||
+ const pending = pendingToolCalls.get(index) || {
|
||||
+ type: "toolCall",
|
||||
+ id: "",
|
||||
+ name: "",
|
||||
+ arguments: {},
|
||||
+ partialArgs: "",
|
||||
+ };
|
||||
+ if (toolCall.id)
|
||||
+ pending.id = toolCall.id;
|
||||
+ if (toolCall.function?.name)
|
||||
+ pending.name = toolCall.function.name;
|
||||
+ let delta = "";
|
||||
+ if (toolCall.function && "arguments" in toolCall.function) {
|
||||
+ delta = toolCall.function.arguments || "";
|
||||
+ pending.partialArgs += delta;
|
||||
+ pending.arguments = parseStreamingJson(pending.partialArgs);
|
||||
+ }
|
||||
+ pendingToolCalls.set(index, pending);
|
||||
+ // Delay emitting tool calls until the arguments JSON is complete.
|
||||
+ // Some providers (e.g. LM Studio) stream an initial empty chunk.
|
||||
+ if (!isCompleteJsonObject(pending.partialArgs)) {
|
||||
+ continue;
|
||||
+ }
|
||||
if (!currentBlock ||
|
||||
currentBlock.type !== "toolCall" ||
|
||||
- (toolCall.id && currentBlock.id !== toolCall.id)) {
|
||||
+ (pending.id && currentBlock.id !== pending.id)) {
|
||||
finishCurrentBlock(currentBlock);
|
||||
- currentBlock = {
|
||||
- type: "toolCall",
|
||||
- id: toolCall.id || "",
|
||||
- name: toolCall.function?.name || "",
|
||||
- arguments: {},
|
||||
- partialArgs: "",
|
||||
- };
|
||||
+ currentBlock = pending;
|
||||
output.content.push(currentBlock);
|
||||
stream.push({ type: "toolcall_start", contentIndex: blockIndex(), partial: output });
|
||||
}
|
||||
if (currentBlock.type === "toolCall") {
|
||||
- if (toolCall.id)
|
||||
- currentBlock.id = toolCall.id;
|
||||
- if (toolCall.function?.name)
|
||||
- currentBlock.name = toolCall.function.name;
|
||||
- let delta = "";
|
||||
- if (toolCall.function?.arguments) {
|
||||
- delta = toolCall.function.arguments;
|
||||
- currentBlock.partialArgs += toolCall.function.arguments;
|
||||
- currentBlock.arguments = parseStreamingJson(currentBlock.partialArgs);
|
||||
- }
|
||||
+ currentBlock.partialArgs = pending.partialArgs;
|
||||
+ currentBlock.arguments = pending.arguments;
|
||||
stream.push({
|
||||
type: "toolcall_delta",
|
||||
contentIndex: blockIndex(),
|
||||
diff --git a/dist/providers/openai-responses.js b/dist/providers/openai-responses.js
|
||||
index 20fb0a22aaa28f7ff7c2f44a8b628fa1d9d7d936..0bf46bfb4a6fac5a0304652e42566b2c991bab48 100644
|
||||
index f07085c..f3b01ee 100644
|
||||
--- a/dist/providers/openai-responses.js
|
||||
+++ b/dist/providers/openai-responses.js
|
||||
@@ -396,10 +396,16 @@ function convertMessages(model, context) {
|
||||
|
||||
Reference in New Issue
Block a user