fix: enable lmstudio responses and drop think tags

This commit is contained in:
Peter Steinberger
2025-12-27 00:28:52 +00:00
parent 2477ffd860
commit 7e380bb6f8
6 changed files with 29 additions and 10 deletions

View File

@@ -39,6 +39,7 @@
- Streamed `<think>` segments are stripped before partial replies are emitted.
- System prompt now tags allowlisted owner numbers as the user identity to avoid mistaken “friend” assumptions.
- LM Studio/Ollama replies now require <final> tags; streaming ignores content until <final> begins.
- LM Studio responses API: tools payloads no longer include `strict: null`, and LM Studio no longer gets forced `<think>/<final>` tags.
- `process log` pagination is now line-based (omit `offset` to grab the last N lines).
- macOS WebChat: assistant bubbles now update correctly when toggling light/dark mode.
- macOS: avoid spawning a duplicate gateway process when an external listener already exists.

View File

@@ -0,0 +1,12 @@
diff --git a/dist/providers/openai-responses.js b/dist/providers/openai-responses.js
index 20fb0a22aaa28f7ff7c2f44a8b628fa1d9d7d936..c2bc63f483f3285b00755901ba97db810221cea6 100644
--- a/dist/providers/openai-responses.js
+++ b/dist/providers/openai-responses.js
@@ -486,7 +486,6 @@ function convertTools(tools) {
name: tool.name,
description: tool.description,
parameters: tool.parameters, // TypeBox already generates JSON Schema
- strict: null,
}));
}
function mapStopReason(status) {

13
pnpm-lock.yaml generated
View File

@@ -4,6 +4,11 @@ settings:
autoInstallPeers: true
excludeLinksFromLockfile: false
patchedDependencies:
'@mariozechner/pi-ai':
hash: bf3e904ebaad236b8c3bb48c7d1150a1463735e783acaab6d15d6cd381b43832
path: patches/@mariozechner__pi-ai.patch
importers:
.:
@@ -19,7 +24,7 @@ importers:
version: 0.30.2(ws@8.18.3)(zod@4.2.1)
'@mariozechner/pi-ai':
specifier: ^0.30.2
version: 0.30.2(ws@8.18.3)(zod@4.2.1)
version: 0.30.2(patch_hash=bf3e904ebaad236b8c3bb48c7d1150a1463735e783acaab6d15d6cd381b43832)(ws@8.18.3)(zod@4.2.1)
'@mariozechner/pi-coding-agent':
specifier: ^0.30.2
version: 0.30.2(ws@8.18.3)(zod@4.2.1)
@@ -3247,7 +3252,7 @@ snapshots:
'@mariozechner/pi-agent-core@0.30.2(ws@8.18.3)(zod@4.2.1)':
dependencies:
'@mariozechner/pi-ai': 0.30.2(ws@8.18.3)(zod@4.2.1)
'@mariozechner/pi-ai': 0.30.2(patch_hash=bf3e904ebaad236b8c3bb48c7d1150a1463735e783acaab6d15d6cd381b43832)(ws@8.18.3)(zod@4.2.1)
'@mariozechner/pi-tui': 0.30.2
transitivePeerDependencies:
- '@modelcontextprotocol/sdk'
@@ -3257,7 +3262,7 @@ snapshots:
- ws
- zod
'@mariozechner/pi-ai@0.30.2(ws@8.18.3)(zod@4.2.1)':
'@mariozechner/pi-ai@0.30.2(patch_hash=bf3e904ebaad236b8c3bb48c7d1150a1463735e783acaab6d15d6cd381b43832)(ws@8.18.3)(zod@4.2.1)':
dependencies:
'@anthropic-ai/sdk': 0.71.2(zod@4.2.1)
'@google/genai': 1.34.0
@@ -3280,7 +3285,7 @@ snapshots:
'@mariozechner/pi-coding-agent@0.30.2(ws@8.18.3)(zod@4.2.1)':
dependencies:
'@mariozechner/pi-agent-core': 0.30.2(ws@8.18.3)(zod@4.2.1)
'@mariozechner/pi-ai': 0.30.2(ws@8.18.3)(zod@4.2.1)
'@mariozechner/pi-ai': 0.30.2(patch_hash=bf3e904ebaad236b8c3bb48c7d1150a1463735e783acaab6d15d6cd381b43832)(ws@8.18.3)(zod@4.2.1)
'@mariozechner/pi-tui': 0.30.2
chalk: 5.6.2
cli-highlight: 2.1.11

View File

@@ -1,6 +1,6 @@
packages:
- "."
- "ui"
- .
- ui
onlyBuiltDependencies:
- '@whiskeysockets/baileys'
@@ -8,3 +8,6 @@ onlyBuiltDependencies:
- esbuild
- protobufjs
- sharp
patchedDependencies:
'@mariozechner/pi-ai': patches/@mariozechner__pi-ai.patch

View File

@@ -417,8 +417,7 @@ export async function runEmbeddedPiAgent(params: {
node: process.version,
model: `${provider}/${modelId}`,
};
const reasoningTagHint =
provider === "lmstudio" || provider === "ollama";
const reasoningTagHint = provider === "ollama";
const systemPrompt = buildSystemPrompt({
appendPrompt: buildAgentSystemPromptAppend({
workspaceDir: resolvedWorkspace,

View File

@@ -1177,8 +1177,7 @@ export async function getReplyFromConfig(
prompt: commandBody,
extraSystemPrompt: groupIntro || undefined,
ownerNumbers: ownerList.length > 0 ? ownerList : undefined,
enforceFinalTag:
provider === "lmstudio" || provider === "ollama" ? true : undefined,
enforceFinalTag: provider === "ollama" ? true : undefined,
provider,
model,
thinkLevel: resolvedThinkLevel,