swiecki
swiecki
Explore posts from servers
CDCloudflare Developers
Created by someguy on 10/15/2023 in #general-help
Next.js/Vercel AI/Streaming Issue
@anurag here's what worked for me
return new Response(readable, {
headers: {
"Access-Control-Allow-Origin": "*",
"Access-Control-Allow-Methods": "GET, POST",
"Access-Control-Allow-Headers": "accept, content-type",
"Content-Type": "text/event-stream",
"Connection": "keep-alive",
"Cache-Control": "no-cache",
"X-Accel-Buffering": "no",
"Content-Encoding": "none",
}
});
return new Response(readable, {
headers: {
"Access-Control-Allow-Origin": "*",
"Access-Control-Allow-Methods": "GET, POST",
"Access-Control-Allow-Headers": "accept, content-type",
"Content-Type": "text/event-stream",
"Connection": "keep-alive",
"Cache-Control": "no-cache",
"X-Accel-Buffering": "no",
"Content-Encoding": "none",
}
});
53 replies
CDCloudflare Developers
Created by swiecki on 11/1/2023 in #general-help
streaming responses being buffered
@kian thank you for your reply. i went down a deep rabbit hole. i realized curl needed -N param, once I added that it worked. Then i started with plain JS, it worked. Then a clean nextJS app, it worked. Then I figured out I had a middleware issue in my nextjs app that was causing my fetch requests to buffer. Thanks!
7 replies
CDCloudflare Developers
Created by swiecki on 11/1/2023 in #general-help
streaming responses being buffered
I'm uncertain if there's a configuration change I need to make
7 replies
CDCloudflare Developers
Created by swiecki on 11/1/2023 in #general-help
streaming responses being buffered
here is example worker code that reproduces the issue. I expect {"status":"processing"} then {"status":"success","data":{"fileText":"Page1 Page2","numPages":2}} but instead I get nothing and then {"status":"processing"}{"status":"success","data":{"fileText":"Page1 Page2","numPages":2}} all at once
export default {
async fetch(request, env, ctx) {
console.log("start");

const { readable, writable } = new TransformStream();
const writer = writable.getWriter();
const encoder = new TextEncoder();

const heartbeatInterval = setInterval(async () => {
console.log("writing status");
const processingMessage = JSON.stringify({ status: "processing" });
await writer.write(encoder.encode(processingMessage));
}, 5000);

setTimeout(async () => {
clearInterval(heartbeatInterval); // Simulate fetch completion
console.log("got response");

// Simulated response data
const simulatedData = [
{ text: "Page1", metadata: { page_number: 1 } },
{ text: "Page2", metadata: { page_number: 2 } },
];

const fileText = simulatedData.map((chunk) => chunk.text).join(" ");
const numPages = simulatedData[simulatedData.length - 1].metadata.page_number;

console.log("writing success data");
await writer.write(
encoder.encode(
JSON.stringify({
status: "success",
data: {
fileText,
numPages,
},
})
)
);
clearInterval(heartbeatInterval);
writer.close();
}, 16000);

return new Response(readable, {
headers: {
"Transfer-Encoding": "chunked",
"Content-Type": "text/event-stream",
"Cache-Control": "no-cache",
"Access-Control-Allow-Origin": "*",
"Access-Control-Allow-Methods": "GET, POST",
"Access-Control-Allow-Headers": "accept, content-type",
},
});
},
};
export default {
async fetch(request, env, ctx) {
console.log("start");

const { readable, writable } = new TransformStream();
const writer = writable.getWriter();
const encoder = new TextEncoder();

const heartbeatInterval = setInterval(async () => {
console.log("writing status");
const processingMessage = JSON.stringify({ status: "processing" });
await writer.write(encoder.encode(processingMessage));
}, 5000);

setTimeout(async () => {
clearInterval(heartbeatInterval); // Simulate fetch completion
console.log("got response");

// Simulated response data
const simulatedData = [
{ text: "Page1", metadata: { page_number: 1 } },
{ text: "Page2", metadata: { page_number: 2 } },
];

const fileText = simulatedData.map((chunk) => chunk.text).join(" ");
const numPages = simulatedData[simulatedData.length - 1].metadata.page_number;

console.log("writing success data");
await writer.write(
encoder.encode(
JSON.stringify({
status: "success",
data: {
fileText,
numPages,
},
})
)
);
clearInterval(heartbeatInterval);
writer.close();
}, 16000);

return new Response(readable, {
headers: {
"Transfer-Encoding": "chunked",
"Content-Type": "text/event-stream",
"Cache-Control": "no-cache",
"Access-Control-Allow-Origin": "*",
"Access-Control-Allow-Methods": "GET, POST",
"Access-Control-Allow-Headers": "accept, content-type",
},
});
},
};
7 replies
CDCloudflare Developers
Created by someguy on 10/15/2023 in #general-help
Next.js/Vercel AI/Streaming Issue
Hi, I'm not using vercel AI, but these recent messages came up when I searched for streaming response issues. I'm unable to get a cloudflare worker to stream any responses using either readable stream or transform stream. Instead it's being buffered and sent as a single chunk once the response is complete. Are there specific configuration requirements? I've tried setting transfer-encoding header to chunked and said no-cache but it's still an issue. Is there a way to upgrade my workers or something so they can respond with streaming responses? Thanks
53 replies