Skip to content

Commit ca695aa

Browse files
committed
Use recent messages in prompt and tool prompts
messagesToPrompt now preserves the system message and includes up to the last 10 non-system conversation turns, formatting them as System/User/Assistant blocks and handling content arrays. buildPromptWithTools was changed to construct a prompt consisting of tool instructions, any existing system context, and the last user message (extracted safely from array content), instead of mutating the messages array. The chat route streaming logic was adjusted to suppress immediate text chunk streaming when tools are present so tool call responses can be emitted as proper tool_calls on completion.
1 parent 824cc17 commit ca695aa

File tree

3 files changed

+83
-44
lines changed

3 files changed

+83
-44
lines changed

src/helpers/format.js

Lines changed: 42 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -214,32 +214,55 @@ const getModelMapping = (requestedModel) => {
214214
// ---------------------------------------------------------------------------
215215

216216
/**
217-
* Convert an OpenAI messages array into a single prompt string.
217+
* Convert an OpenAI messages array into a single prompt string for qodercli.
218218
*
219-
* For better qodercli compatibility, we only send the latest user message
220-
* instead of full conversation history, as qodercli responds better to
221-
* individual prompts than conversation threads.
219+
* Includes conversation history (up to last 10 messages) so the model has
220+
* context for follow-up questions and multi-turn edits. Older messages are
221+
* dropped to avoid exceeding qodercli's context limits.
222+
*
223+
* Format:
224+
* System: <system message if present>
225+
* User: <message>
226+
* Assistant: <message>
227+
* User: <latest message>
222228
*/
223229
const messagesToPrompt = (messages) => {
224-
// Find the last user message
225-
const lastUserMessage = messages
226-
.slice()
227-
.reverse()
228-
.find((msg) => msg.role === "user");
229-
230-
if (!lastUserMessage) {
231-
return "Hello";
232-
}
230+
if (!messages || messages.length === 0) return "Hello";
231+
232+
// Separate system message from conversation
233+
const systemMsg = messages.find((m) => m.role === "system");
234+
const conversation = messages.filter((m) => m.role !== "system");
233235

234-
// Extract content
235-
const content = Array.isArray(lastUserMessage.content)
236-
? lastUserMessage.content
236+
// Keep last 10 conversation turns to avoid context overflow
237+
const recent = conversation.slice(-10);
238+
239+
const extractContent = (msg) => {
240+
if (Array.isArray(msg.content)) {
241+
return msg.content
237242
.filter((p) => p.type === "text")
238243
.map((p) => p.text)
239-
.join("")
240-
: lastUserMessage.content || "";
244+
.join("");
245+
}
246+
return msg.content || "";
247+
};
248+
249+
const parts = [];
250+
251+
// Include system message if present
252+
if (systemMsg) {
253+
const sysContent = extractContent(systemMsg);
254+
if (sysContent.trim()) parts.push(`System: ${sysContent.trim()}`);
255+
}
256+
257+
// Include conversation history
258+
for (const msg of recent) {
259+
const content = extractContent(msg).trim();
260+
if (!content) continue;
261+
if (msg.role === "user") parts.push(`User: ${content}`);
262+
else if (msg.role === "assistant") parts.push(`Assistant: ${content}`);
263+
}
241264

242-
return `User: ${content.trim()}`;
265+
return parts.join("\n\n") || "Hello";
243266
};
244267

245268
// ---------------------------------------------------------------------------

src/helpers/toolPrompt.js

Lines changed: 28 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -61,22 +61,34 @@ const buildPromptWithTools = (messages, tools, messagesToPromptFn) => {
6161

6262
const toolSystem = buildToolSystemPrompt(tools);
6363

64-
// Find existing system message and prepend tool instructions to it,
65-
// or insert a new system message at the start
66-
const existingSystemIdx = messages.findIndex((m) => m.role === 'system');
67-
let augmented;
68-
69-
if (existingSystemIdx !== -1) {
70-
augmented = messages.map((m, i) =>
71-
i === existingSystemIdx
72-
? { ...m, content: `${toolSystem}\n\n${m.content}` }
73-
: m
74-
);
75-
} else {
76-
augmented = [{ role: 'system', content: toolSystem }, ...messages];
77-
}
78-
79-
return messagesToPromptFn(augmented);
64+
// Extract the last user message directly — bypasses messagesToPrompt entirely
65+
// because messagesToPrompt drops system messages and we need to include tool instructions.
66+
const lastUser = messages
67+
.slice()
68+
.reverse()
69+
.find((m) => m.role === 'user');
70+
71+
const userContent = Array.isArray(lastUser?.content)
72+
? lastUser.content
73+
.filter((p) => p.type === 'text')
74+
.map((p) => p.text)
75+
.join('')
76+
: lastUser?.content || '';
77+
78+
// Also extract any existing system message to include alongside tool instructions
79+
const existingSystem = messages.find((m) => m.role === 'system');
80+
const systemContent = existingSystem
81+
? Array.isArray(existingSystem.content)
82+
? existingSystem.content.filter((p) => p.type === 'text').map((p) => p.text).join('')
83+
: existingSystem.content || ''
84+
: '';
85+
86+
// Build final prompt: tool instructions + optional system context + user message
87+
const parts = [toolSystem];
88+
if (systemContent) parts.push(`System context: ${systemContent.trim()}`);
89+
parts.push(`User: ${userContent.trim()}`);
90+
91+
return parts.join('\n\n');
8092
};
8193

8294
// ── Response parsing ──────────────────────────────────────────────────────────

src/routes/chat.js

Lines changed: 13 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -155,15 +155,19 @@ router.post("/", (req, res) => {
155155
if (finishReason) lastFinishReason = finishReason;
156156
if (content) {
157157
fullStreamText += content;
158-
// Stream content chunks normally — tool call detection happens onDone
159-
const chunk = {
160-
id,
161-
object: "chat.completion.chunk",
162-
created: Math.floor(Date.now() / 1000),
163-
model,
164-
choices: [{ index: 0, delta: { content }, finish_reason: null }],
165-
};
166-
res.write(`data: ${JSON.stringify(chunk)}\n\n`);
158+
// When tools are present: accumulate silently, don't stream raw text chunks.
159+
// The response will be emitted as a proper tool_calls chunk in onDone.
160+
// When no tools: stream content immediately as normal.
161+
if (!hasTools) {
162+
const chunk = {
163+
id,
164+
object: "chat.completion.chunk",
165+
created: Math.floor(Date.now() / 1000),
166+
model,
167+
choices: [{ index: 0, delta: { content }, finish_reason: null }],
168+
};
169+
res.write(`data: ${JSON.stringify(chunk)}\n\n`);
170+
}
167171
}
168172
},
169173
onDone: (code, stderr) => {

0 commit comments

Comments
 (0)