add configurable limits to the buffer sizes

This commit is contained in:
daniel31x13 2024-06-28 12:12:16 -04:00
parent 06234e42df
commit 576d50f467
7 changed files with 100 additions and 42 deletions

View File

@ -24,10 +24,11 @@ IGNORE_URL_SIZE_LIMIT=
ADMINISTRATOR=
NEXT_PUBLIC_MAX_FILE_BUFFER=
MONOLITH_MAX_BUFFER=
MONOLITH_OPTIONS=
MONOLITH_CUSTOM_OPTIONS=
PDF_MAX_BUFFER=
SCREENSHOT_MAX_BUFFER=
READABILITY_MAX_BUFFER=
PREVIEW_MAX_BUFFER=
# AWS S3 Settings
SPACES_KEY=

View File

@ -14,17 +14,23 @@ const generatePreview = async (
image?.resize(1280, Jimp.AUTO).quality(20);
const processedBuffer = await image?.getBufferAsync(Jimp.MIME_JPEG);
createFile({
if (
Buffer.byteLength(processedBuffer) >
1024 * 1024 * Number(process.env.PREVIEW_MAX_BUFFER || 0.1)
)
return console.log("Error generating preview: Buffer size exceeded");
await createFile({
data: processedBuffer,
filePath: `archives/preview/${collectionId}/${linkId}.jpeg`,
}).then(() => {
return prisma.link.update({
});
await prisma.link.update({
where: { id: linkId },
data: {
preview: `archives/preview/${collectionId}/${linkId}.jpeg`,
},
});
});
}
}).catch((err) => {
console.error("Error processing the image:", err);

View File

@ -36,14 +36,19 @@ const handleArchivePreview = async (
console.log("No og:image found");
await page
.screenshot({ type: "jpeg", quality: 20 })
.then((screenshot) => {
return createFile({
.then(async (screenshot) => {
if (
Buffer.byteLength(screenshot) >
1024 * 1024 * Number(process.env.PREVIEW_MAX_BUFFER || 0.1)
)
return console.log("Error generating preview: Buffer size exceeded");
await createFile({
data: screenshot,
filePath: `archives/preview/${link.collectionId}/${link.id}.jpeg`,
});
})
.then(() => {
return prisma.link.update({
await prisma.link.update({
where: { id: link.id },
data: {
preview: `archives/preview/${link.collectionId}/${link.id}.jpeg`,

View File

@ -9,7 +9,7 @@ const handleMonolith = async (link: Link, content: string) => {
try {
let html = execSync(
`monolith - -I -b ${link.url} ${
process.env.MONOLITH_OPTIONS || "-j -F -s"
process.env.MONOLITH_CUSTOM_OPTIONS || "-j -F -s"
} -o -`,
{
timeout: 120000,
@ -18,10 +18,14 @@ const handleMonolith = async (link: Link, content: string) => {
}
);
if (!html?.length) {
console.error("Error running MONOLITH: Empty buffer");
return;
}
if (!html?.length)
return console.error("Error archiving as Monolith: Empty buffer");
if (
Buffer.byteLength(html) >
1024 * 1024 * Number(process.env.MONOLITH_MAX_BUFFER || 6)
)
return console.error("Error archiving as Monolith: Buffer size exceeded");
await createFile({
data: html,
@ -35,7 +39,7 @@ const handleMonolith = async (link: Link, content: string) => {
});
});
} catch (err) {
console.error("Error running MONOLITH:", err);
console.log("Error running MONOLITH:", err);
}
};

View File

@ -23,8 +23,18 @@ const handleReadablility = async (content: string, link: Link) => {
})
)?.collectionId;
const data = JSON.stringify(article);
if (
Buffer.byteLength(data, "utf8") >
1024 * 1024 * Number(process.env.READABILITY_MAX_BUFFER || 1)
)
return console.error(
"Error archiving as Readability: Buffer size exceeded"
);
await createFile({
data: JSON.stringify(article),
data,
filePath: `archives/${collectionId}/${link.id}_readability.json`,
});

View File

@ -24,11 +24,29 @@ const handleScreenshotAndPdf = async (
if (user.archiveAsScreenshot && !link.image?.startsWith("archive")) {
processingPromises.push(
page.screenshot({ fullPage: true, type: "jpeg" }).then((screenshot) => {
return createFile({
page
.screenshot({ fullPage: true, type: "jpeg" })
.then(async (screenshot) => {
if (
Buffer.byteLength(screenshot) >
1024 * 1024 * Number(process.env.SCREENSHOT_MAX_BUFFER || 2)
)
return console.log(
"Error archiving as Screenshot: Buffer size exceeded"
);
await createFile({
data: screenshot,
filePath: `archives/${linkExists.collectionId}/${link.id}.jpeg`,
});
await prisma.link.update({
where: { id: link.id },
data: {
image: user.archiveAsScreenshot
? `archives/${linkExists.collectionId}/${link.id}.jpeg`
: undefined,
},
});
})
);
}
@ -47,26 +65,32 @@ const handleScreenshotAndPdf = async (
printBackground: true,
margin: margins,
})
.then((pdf) => {
return createFile({
.then(async (pdf) => {
if (
Buffer.byteLength(pdf) >
1024 * 1024 * Number(process.env.PDF_MAX_BUFFER || 2)
)
return console.log(
"Error archiving as PDF: Buffer size exceeded"
);
await createFile({
data: pdf,
filePath: `archives/${linkExists.collectionId}/${link.id}.pdf`,
});
})
);
}
await Promise.allSettled(processingPromises);
await prisma.link.update({
where: { id: link.id },
data: {
image: user.archiveAsScreenshot
? `archives/${linkExists.collectionId}/${link.id}.jpeg`
: undefined,
pdf: user.archiveAsPDF
? `archives/${linkExists.collectionId}/${link.id}.pdf`
: undefined,
},
});
})
);
}
await Promise.allSettled(processingPromises);
}
};

View File

@ -120,7 +120,7 @@ export default async function Index(req: NextApiRequest, res: NextApiResponse) {
const form = formidable({
maxFields: 1,
maxFiles: 1,
maxFileSize: NEXT_PUBLIC_MAX_FILE_BUFFER * 1048576,
maxFileSize: NEXT_PUBLIC_MAX_FILE_BUFFER * 1024 * 1024,
});
form.parse(req, async (err, fields, files) => {
@ -138,12 +138,20 @@ export default async function Index(req: NextApiRequest, res: NextApiResponse) {
!allowedMIMETypes.includes(files.file[0].mimetype || "")
) {
// Handle parsing error
return res.status(500).json({
return res.status(400).json({
response: `Sorry, we couldn't process your file. Please ensure it's a PDF, PNG, or JPG format and doesn't exceed ${NEXT_PUBLIC_MAX_FILE_BUFFER}MB.`,
});
} else {
const fileBuffer = fs.readFileSync(files.file[0].filepath);
if (
Buffer.byteLength(fileBuffer) >
1024 * 1024 * Number(NEXT_PUBLIC_MAX_FILE_BUFFER)
)
return res.status(400).json({
response: `Sorry, we couldn't process your file. Please ensure it's a PDF, PNG, or JPG format and doesn't exceed ${NEXT_PUBLIC_MAX_FILE_BUFFER}MB.`,
});
const linkStillExists = await prisma.link.findUnique({
where: { id: linkId },
});