Telegram: threaded conversation support (#1597)

* Telegram: isolate dm topic sessions

* Tests: cap vitest workers

* Tests: cap Vitest workers on CI macOS

* Tests: avoid timer-based pi-ai stream mock

* Tests: increase embedded runner timeout

* fix: harden telegram dm thread handling (#1597) (thanks @rohannagpal)

---------

Co-authored-by: Peter Steinberger <steipete@gmail.com>
This commit is contained in:
Rohan Nagpal
2026-01-25 10:18:51 +05:30
committed by GitHub
parent 9eaaadf8ee
commit 06a7e1e8ce
10 changed files with 256 additions and 11 deletions

View File

@@ -70,7 +70,7 @@ vi.mock("@mariozechner/pi-ai", async () => {
},
streamSimple: (model: { api: string; provider: string; id: string }) => {
const stream = new actual.AssistantMessageEventStream();
setTimeout(() => {
queueMicrotask(() => {
stream.push({
type: "done",
reason: "stop",
@@ -80,7 +80,7 @@ vi.mock("@mariozechner/pi-ai", async () => {
: buildAssistantMessage(model),
});
stream.end();
}, 0);
});
return stream;
},
};
@@ -213,7 +213,7 @@ describe("runEmbeddedPiAgent", () => {
itIfNotWin32(
"persists the first user message before assistant output",
{ timeout: 60_000 },
{ timeout: 120_000 },
async () => {
const sessionFile = nextSessionFile();
const cfg = makeOpenAiConfig(["mock-1"]);