Send large data (~5MB) to trpc backend

I need to build a feature that reads sqlite files in memory, my idea is to upload the file, parse it and pass it to my trpc backend that will create an in memory sqlite db to perform some queries. However, it seems passing large payloads takes ages (never finished) even locally, im not sure if trpc is not equipped to handle huge payloads. Please share if you have a solution or even a better way to do it. NOTE: I cant have a single db - each use can upload their own sqlite db file TLDR: how to upload a sqlite dump & perform queries on it
7 Replies
cje
cje2y ago
either use a regular next.js api endpoint or reconsider your architecture (this is what id advise)
Shoodey
ShoodeyOP2y ago
@cje will try with a regular api endpoint. What do you mean by archi? is it the tech stack or the actual approach I thought of? My main concern is that I cant instantiate sqlite in mem db on client side, hence the need to try to do it on backend (in theory?)
cje
cje2y ago
what are you doing that requires you to send an entire sqlite database back and forth what happens if the user closes the tab or loses their connection before they can send it back it seems like a recipe for disaster
Shoodey
ShoodeyOP2y ago
It doesnt really matter if the data is volatile - Im building a save editor for a game - users upload their save, I show them some data, they can update some data and I overwrite the sql, then send them back the save file - data is meant to be volatile save file is a binary file that also contains the sqlite db dump (i built this as a CLI, but trying the web approach for non-techy users :D) update: yeah even with api endpoint, request is stuck at pending :/ I guess the next sensible approach is to try a file upload rather than send large payload through api - i will see where that leads, might be faster to upload file then just read that, and somehow delete on session close or periodically
Shoodey
ShoodeyOP2y ago
ok quick update again: this fixed the delay https://github.com/vercel/next.js/discussions/12517#discussioncomment-2929922 - I also had to add responseLimit: false to the config - huge W
GitHub
Get Body if bodyParser is Disabled · Discussion #12517 · vercel/nex...
Hi guys, How to get request body if the bodyParser is Disabled ?
Amit
Amit2y ago
Hmm, how about not uploading anything and doing everything at client side(in the browser). I guess there are some libraries for this. A quick search leads to https://github.com/sql-js/sql.js/
GitHub
GitHub - sql-js/sql.js: A javascript library to run SQLite on the web.
A javascript library to run SQLite on the web. . Contribute to sql-js/sql.js development by creating an account on GitHub.
Lopen
Lopen2y ago
I will recommend using nextjs regular endpoint It will save you lot of stress I will share you my upload code snippet
import fs from "fs";
import { randomUUID } from "crypto";
import { type NextApiRequest, NextApiResponse } from "next";
import formidable from "formidable";
import { Upload } from "@aws-sdk/lib-storage";
import { S3Client } from "@aws-sdk/client-s3";
import { env } from "@/env/server.mjs";

const s3 = new S3Client({
endpoint: env.DO_ENDPOINT,
region: env.DO_REGION,
credentials: {
accessKeyId: env.DO_ACCESS_KEY,
secretAccessKey: env.DO_SECRET_KEY,
},
});

export const config = {
api: {
bodyParser: false,
},
};

export default async function handler(
req: NextApiRequest,
res: NextApiResponse
) {
const form = new formidable.IncomingForm({
multiples: true,
});

form.parse(req, async (err, fields, files) => {
if (!files.file) {
return res.status(400).json({ message: "No file uploaded" });
}

if (err) {
return res.status(500).json({ message: "Something went wrong" });
}

try {
const fileUrl: any = [];
for (const file of files.file as any) {
const fileName = `${randomUUID()}-${file.originalFilename}`;
const parallelUploads3 = new Upload({
client: s3,
queueSize: 4,
partSize: 1024 * 1024 * 5,
leavePartsOnError: false,
params: {
Bucket: env.DO_BUCKET_NAME,
Key: fileName,
ACL: "public-read",
Body: fs.createReadStream(file.filepath),
},
});
const { Location }: never | any = await parallelUploads3.done();
fileUrl.push(Location);
}
return res.status(200).json(fileUrl);
} catch (error) {
console.log(error);
return res.status(500).json({ message: "Something went wrong" });
}
});
}
import fs from "fs";
import { randomUUID } from "crypto";
import { type NextApiRequest, NextApiResponse } from "next";
import formidable from "formidable";
import { Upload } from "@aws-sdk/lib-storage";
import { S3Client } from "@aws-sdk/client-s3";
import { env } from "@/env/server.mjs";

const s3 = new S3Client({
endpoint: env.DO_ENDPOINT,
region: env.DO_REGION,
credentials: {
accessKeyId: env.DO_ACCESS_KEY,
secretAccessKey: env.DO_SECRET_KEY,
},
});

export const config = {
api: {
bodyParser: false,
},
};

export default async function handler(
req: NextApiRequest,
res: NextApiResponse
) {
const form = new formidable.IncomingForm({
multiples: true,
});

form.parse(req, async (err, fields, files) => {
if (!files.file) {
return res.status(400).json({ message: "No file uploaded" });
}

if (err) {
return res.status(500).json({ message: "Something went wrong" });
}

try {
const fileUrl: any = [];
for (const file of files.file as any) {
const fileName = `${randomUUID()}-${file.originalFilename}`;
const parallelUploads3 = new Upload({
client: s3,
queueSize: 4,
partSize: 1024 * 1024 * 5,
leavePartsOnError: false,
params: {
Bucket: env.DO_BUCKET_NAME,
Key: fileName,
ACL: "public-read",
Body: fs.createReadStream(file.filepath),
},
});
const { Location }: never | any = await parallelUploads3.done();
fileUrl.push(Location);
}
return res.status(200).json(fileUrl);
} catch (error) {
console.log(error);
return res.status(500).json({ message: "Something went wrong" });
}
});
}
Backend
import { useState } from 'react'
import axios from 'axios'

export default function Example() {
const [file, setFile] = useState<FileList | null>(null)
const [loading, setLoading] = useState<boolean>(false)


const handleSubmit = async (e: React.FormEvent<HTMLFormElement>) => {
e.preventDefault()
if (!file) return
setLoading(true)
const formData = new FormData()

// this is for multiple files
Array.from(file).forEach((file) => {
formData.append('file', file)
})

// this is for single file
// formData.append('file', file[0])

const config = {
headers: {
'content-type': 'multipart/form-data'
},

}

try {
const { data } = await axios.post('/api/uploadImage', formData)
console.log(data)

} catch (error) {
console.log(error)
} finally {
setLoading(false)
}
}
return (
<div>
<form onSubmit={handleSubmit}>
<h1>file upload</h1>
<input multiple type="file" onChange={(e) => {
const files = e.target.files
if (files) {
setFile(files)
}
}} />
<button disabled={loading} type="submit">submit</button>
</form>
</div>
)
}
import { useState } from 'react'
import axios from 'axios'

export default function Example() {
const [file, setFile] = useState<FileList | null>(null)
const [loading, setLoading] = useState<boolean>(false)


const handleSubmit = async (e: React.FormEvent<HTMLFormElement>) => {
e.preventDefault()
if (!file) return
setLoading(true)
const formData = new FormData()

// this is for multiple files
Array.from(file).forEach((file) => {
formData.append('file', file)
})

// this is for single file
// formData.append('file', file[0])

const config = {
headers: {
'content-type': 'multipart/form-data'
},

}

try {
const { data } = await axios.post('/api/uploadImage', formData)
console.log(data)

} catch (error) {
console.log(error)
} finally {
setLoading(false)
}
}
return (
<div>
<form onSubmit={handleSubmit}>
<h1>file upload</h1>
<input multiple type="file" onChange={(e) => {
const files = e.target.files
if (files) {
setFile(files)
}
}} />
<button disabled={loading} type="submit">submit</button>
</form>
</div>
)
}
Front-end
Want results from more Discord servers?
Add your server