Thats rather simple You split up the

Thats rather simple. You split up the file in chunks on the client and send it one by one
1 Reply
kev-ac
kev-acOP17mo ago
const handleUpload = async () => {
setUploading(true);

// Process upload queue
// @ts-ignore
for(const [index, elem] of uploadQueue.entries()) {
await new Promise(async (res, rej) => {
const file = elem.file;
const chunkSize = 1024 * 1024 * 5;
const totalChunks = Math.ceil(file.size / chunkSize);

// Create upload session
const uploadSessionReq = await fetch("/api/upload/init", {
method: "POST",
body: JSON.stringify({
filename: file.name,
totalChunks: totalChunks,
}),
headers: {
"Content-Type": "application/json",
}
});
const uploadSession = await uploadSessionReq.json();

// Part 2 follows
const handleUpload = async () => {
setUploading(true);

// Process upload queue
// @ts-ignore
for(const [index, elem] of uploadQueue.entries()) {
await new Promise(async (res, rej) => {
const file = elem.file;
const chunkSize = 1024 * 1024 * 5;
const totalChunks = Math.ceil(file.size / chunkSize);

// Create upload session
const uploadSessionReq = await fetch("/api/upload/init", {
method: "POST",
body: JSON.stringify({
filename: file.name,
totalChunks: totalChunks,
}),
headers: {
"Content-Type": "application/json",
}
});
const uploadSession = await uploadSessionReq.json();

// Part 2 follows
Part 2:
// Iterate over each chunk
for (let currentChunk = 0; currentChunk < totalChunks; currentChunk++) {
const start = currentChunk * chunkSize;
const end = Math.min(start + chunkSize, file.size);

const chunk = file.slice(start, end);

const checksum = await createChecksum(chunk);

// Send the chunk
await fetch(`/api/upload/${uploadSession.id}/chunk`, {
method: "POST",
body: chunk,
headers: {
"Content-Type": "application/octet-stream",
"x-upload-chunk": currentChunk?.toString(),
"x-upload-range": `${start}-${end}/${file.size}`,
"x-upload-checksum": checksum
},
});
console.log(`Uploaded chunk ${currentChunk+1} of ${totalChunks}.`);
elem.progress = ((currentChunk + 1) / totalChunks) * 100;
handleFileStateChange(index, elem);
}
elem.state = "success";
handleFileStateChange(index, elem);
res(uploadSession);
});
console.log(`Uploaded file ${elem.file.name}`);
}
};
// Iterate over each chunk
for (let currentChunk = 0; currentChunk < totalChunks; currentChunk++) {
const start = currentChunk * chunkSize;
const end = Math.min(start + chunkSize, file.size);

const chunk = file.slice(start, end);

const checksum = await createChecksum(chunk);

// Send the chunk
await fetch(`/api/upload/${uploadSession.id}/chunk`, {
method: "POST",
body: chunk,
headers: {
"Content-Type": "application/octet-stream",
"x-upload-chunk": currentChunk?.toString(),
"x-upload-range": `${start}-${end}/${file.size}`,
"x-upload-checksum": checksum
},
});
console.log(`Uploaded chunk ${currentChunk+1} of ${totalChunks}.`);
elem.progress = ((currentChunk + 1) / totalChunks) * 100;
handleFileStateChange(index, elem);
}
elem.state = "success";
handleFileStateChange(index, elem);
res(uploadSession);
});
console.log(`Uploaded file ${elem.file.name}`);
}
};
That's a code snippet from one project. Everyone: Please judge me 😁 Upload flow: - Iterate over the queue of to-be-uploaded files (there could be multiple) - Figure out how many chunks need to be uploaded - Request an upload session from our backend - Iterate over the chunk count and upload the chunk (and create checksums in this case for good measure) What this code misses: Any form of error handling / retry logic when a chunk upload fails for some reason. To say it in Cloudflares terms: It will be added soontm . Hope this can help you, @w8r .
Want results from more Discord servers?
Add your server