Skip to content

Commit e2dffde

Browse files
authored
fixing client tool stalling (#58)
1 parent 3c6279c commit e2dffde

File tree

3 files changed

+69
-10
lines changed

3 files changed

+69
-10
lines changed

examples/ts-react-chat/src/routes/api.tanchat.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@ import { createFileRoute } from '@tanstack/react-router'
22
import { chat, maxIterations, toStreamResponse } from '@tanstack/ai'
33
import { openai } from '@tanstack/ai-openai'
44
// import { ollama } from "@tanstack/ai-ollama";
5-
import { anthropic } from '@tanstack/ai-anthropic'
5+
// import { anthropic } from '@tanstack/ai-anthropic'
66
// import { gemini } from "@tanstack/ai-gemini";
77
import { allTools } from '@/lib/guitar-tools'
88

packages/typescript/ai-client/src/chat-client.ts

Lines changed: 42 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,12 @@ import {
1515
} from './message-updaters'
1616
import { DefaultChatClientEventEmitter } from './events'
1717
import type { ModelMessage } from '@tanstack/ai'
18-
import type { ChatClientOptions, ToolCallPart, UIMessage } from './types'
18+
import type {
19+
ChatClientOptions,
20+
ToolCallPart,
21+
ToolResultState,
22+
UIMessage,
23+
} from './types'
1924
import type { ConnectionAdapter } from './connection-adapters'
2025
import type { ChunkStrategy, StreamParser } from './stream/types'
2126
import type { ChatClientEventEmitter } from './events'
@@ -392,17 +397,45 @@ export class ChatClient {
392397
result.state || 'output-available',
393398
)
394399

395-
// Update the tool call part with the output
396-
this.setMessages(
397-
updateToolCallWithOutput(
398-
this.messages,
399-
result.toolCallId,
400-
result.output,
401-
result.state === 'output-error' ? 'input-complete' : undefined,
402-
result.errorText,
400+
// Find the message containing this tool call
401+
const messageWithToolCall = this.messages.find((msg) =>
402+
msg.parts.some(
403+
(p): p is ToolCallPart =>
404+
p.type === 'tool-call' && p.id === result.toolCallId,
403405
),
404406
)
405407

408+
// Step 1: Update the tool call part with the output
409+
let updatedMessages = updateToolCallWithOutput(
410+
this.messages,
411+
result.toolCallId,
412+
result.output,
413+
result.state === 'output-error' ? 'input-complete' : undefined,
414+
result.errorText,
415+
)
416+
417+
// Step 2: Also create a tool-result part (needed for LLM conversation history)
418+
if (messageWithToolCall) {
419+
const content =
420+
typeof result.output === 'string'
421+
? result.output
422+
: JSON.stringify(result.output)
423+
const toolResultState: ToolResultState = result.errorText
424+
? 'error'
425+
: 'complete'
426+
427+
updatedMessages = updateToolResultPart(
428+
updatedMessages,
429+
messageWithToolCall.id,
430+
result.toolCallId,
431+
content,
432+
toolResultState,
433+
result.errorText,
434+
)
435+
}
436+
437+
this.setMessages(updatedMessages)
438+
406439
// Check if we should auto-send
407440
if (this.shouldAutoSend()) {
408441
// Continue the flow without adding a new message

packages/typescript/ai-openai/src/openai-adapter.ts

Lines changed: 26 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -275,6 +275,32 @@ export class OpenAI extends BaseAdapter<
275275
yield handleContentPart(contentPart)
276276
}
277277

278+
// handle content deltas - this is where streaming happens!
279+
if (chunk.type === 'response.output_text.delta') {
280+
accumulatedContent += chunk.delta
281+
yield {
282+
type: 'content',
283+
id: responseId || generateId(),
284+
model: model || options.model,
285+
timestamp,
286+
delta: chunk.delta,
287+
content: accumulatedContent,
288+
role: 'assistant',
289+
}
290+
}
291+
292+
if (chunk.type === 'response.reasoning_text.delta') {
293+
accumulatedReasoning += chunk.delta
294+
yield {
295+
type: 'thinking',
296+
id: responseId || generateId(),
297+
model: model || options.model,
298+
timestamp,
299+
delta: chunk.delta,
300+
content: accumulatedReasoning,
301+
}
302+
}
303+
278304
if (chunk.type === 'response.content_part.done') {
279305
const contentPart = chunk.part
280306

0 commit comments

Comments
 (0)