async function getWebLLMCompletionStream(
systemPrompt,
prompt,
{ temperature = 0.0 } = {}
) {
const outputElement = document.getElementById("webllm-output");
outputElement.textContent = "";
if (prompt.length == 0) {
return;
}
let messages = [];
if (systemPrompt) {
messages.push({ role: "system", content: systemPrompt });
}
messages.push({ role: "user", content: prompt });
const stream = await engine.chat.completions.create({
messages: messages,
temperature: temperature,
stream: true
});
for await (const part of stream) {
const text = part.choices[0]?.delta?.content || "";
outputElement.textContent += text;
}
}