casey
casey
HHono
Created by casey on 9/15/2024 in #help
streamText only returning one chunk
chatApi.get("/test", async (c) => {
const openai = new OpenAI({
apiKey: c.env.OPEN_AI_API_KEY,
})

return streamText(c, async (stream) => {
const result = await openai.chat.completions.create({
model: "gpt-4o-mini-2024-07-18",
messages: [
{
role: "system",
content: "You are a helpful assistant",
},
{
role: "user",
content: "Tell me a story",
},
],
stream: true,
})

for await (const chunk of result) {
console.log(chunk.choices[0]?.delta?.content)
await stream.write(chunk.choices[0]?.delta?.content ?? "")
}
})
})
chatApi.get("/test", async (c) => {
const openai = new OpenAI({
apiKey: c.env.OPEN_AI_API_KEY,
})

return streamText(c, async (stream) => {
const result = await openai.chat.completions.create({
model: "gpt-4o-mini-2024-07-18",
messages: [
{
role: "system",
content: "You are a helpful assistant",
},
{
role: "user",
content: "Tell me a story",
},
],
stream: true,
})

for await (const chunk of result) {
console.log(chunk.choices[0]?.delta?.content)
await stream.write(chunk.choices[0]?.delta?.content ?? "")
}
})
})
When I log the returned chunks from OpenAI it is clearly streaming token by token. However when I call from postman I only get the text back in one massive chunk at the end. Any idea what I'm doing wrong?
1 replies