commit
aff91c688a
12
.env.sample
12
.env.sample
|
@ -1,4 +1,12 @@
|
|||
NEXTAUTH_SECRET=very_sensitive_secret2
|
||||
NEXTAUTH_SECRET=very_sensitive_secret
|
||||
DATABASE_URL=postgresql://user:password@localhost:5432/linkwarden
|
||||
NEXTAUTH_URL=http://localhost:3000
|
||||
PAGINATION_TAKE_COUNT=20
|
||||
PAGINATION_TAKE_COUNT=20
|
||||
STORAGE_FOLDER=data
|
||||
|
||||
# Linkwarden Cloud specific configs (Ignore - Not applicable for self-hosted version)
|
||||
IS_CLOUD_INSTANCE=
|
||||
SPACES_KEY=
|
||||
SPACES_SECRET=
|
||||
SPACES_ENDPOINT=
|
||||
SPACES_REGION=
|
|
@ -0,0 +1,76 @@
|
|||
# Contributor Covenant Code of Conduct
|
||||
|
||||
## Our Pledge
|
||||
|
||||
In the interest of fostering an open and welcoming environment, we as
|
||||
contributors and maintainers pledge to make participation in our project and
|
||||
our community a harassment-free experience for everyone, regardless of age, body
|
||||
size, disability, ethnicity, sex characteristics, gender identity and expression,
|
||||
level of experience, education, socio-economic status, nationality, personal
|
||||
appearance, race, religion, or sexual identity and orientation.
|
||||
|
||||
## Our Standards
|
||||
|
||||
Examples of behavior that contributes to creating a positive environment
|
||||
include:
|
||||
|
||||
- Using welcoming and inclusive language
|
||||
- Being respectful of differing viewpoints and experiences
|
||||
- Gracefully accepting constructive criticism
|
||||
- Focusing on what is best for the community
|
||||
- Showing empathy towards other community members
|
||||
|
||||
Examples of unacceptable behavior by participants include:
|
||||
|
||||
- The use of sexualized language or imagery and unwelcome sexual attention or
|
||||
advances
|
||||
- Trolling, insulting/derogatory comments, and personal or political attacks
|
||||
- Public or private harassment
|
||||
- Publishing others' private information, such as a physical or electronic
|
||||
address, without explicit permission
|
||||
- Other conduct which could reasonably be considered inappropriate in a
|
||||
professional setting
|
||||
|
||||
## Our Responsibilities
|
||||
|
||||
Project maintainers are responsible for clarifying the standards of acceptable
|
||||
behavior and are expected to take appropriate and fair corrective action in
|
||||
response to any instances of unacceptable behavior.
|
||||
|
||||
Project maintainers have the right and responsibility to remove, edit, or
|
||||
reject comments, commits, code, wiki edits, issues, and other contributions
|
||||
that are not aligned to this Code of Conduct, or to ban temporarily or
|
||||
permanently any contributor for other behaviors that they deem inappropriate,
|
||||
threatening, offensive, or harmful.
|
||||
|
||||
## Scope
|
||||
|
||||
This Code of Conduct applies within all project spaces, and it also applies when
|
||||
an individual is representing the project or its community in public spaces.
|
||||
Examples of representing a project or community include using an official
|
||||
project e-mail address, posting via an official social media account, or acting
|
||||
as an appointed representative at an online or offline event. Representation of
|
||||
a project may be further defined and clarified by project maintainers.
|
||||
|
||||
## Enforcement
|
||||
|
||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be
|
||||
reported by contacting the project team at [INSERT EMAIL ADDRESS]. All
|
||||
complaints will be reviewed and investigated and will result in a response that
|
||||
is deemed necessary and appropriate to the circumstances. The project team is
|
||||
obligated to maintain confidentiality with regard to the reporter of an incident.
|
||||
Further details of specific enforcement policies may be posted separately.
|
||||
|
||||
Project maintainers who do not follow or enforce the Code of Conduct in good
|
||||
faith may face temporary or permanent repercussions as determined by other
|
||||
members of the project's leadership.
|
||||
|
||||
## Attribution
|
||||
|
||||
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
|
||||
available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html
|
||||
|
||||
[homepage]: https://www.contributor-covenant.org
|
||||
|
||||
For answers to common questions about this code of conduct, see
|
||||
https://www.contributor-covenant.org/faq
|
|
@ -0,0 +1,17 @@
|
|||
# Security Policy
|
||||
|
||||
## Supported Versions
|
||||
|
||||
| Version | Supported |
|
||||
| ------- | --------- |
|
||||
| 1.x.x | ✅ |
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
First off, we really appreciate the time you spent!
|
||||
|
||||
If you found a vulnerability, these are the ways you can reach us:
|
||||
|
||||
Email: [hello@linkwarden.app](mailto:hello@daniel31x13.io)
|
||||
|
||||
Or you can directly reach me via Twitter: [@daniel31x13](https://twitter.com/Daniel31X13).
|
|
@ -36,3 +36,9 @@ next-env.d.ts
|
|||
|
||||
# generated files and folders
|
||||
/data
|
||||
|
||||
# tests
|
||||
/tests
|
||||
/test-results/
|
||||
/playwright-report/
|
||||
/playwright/.cache/
|
||||
|
|
|
@ -137,8 +137,7 @@ export default function LinkDetails({ link }: Props) {
|
|||
try {
|
||||
const color = colorThief.getPalette(
|
||||
e.target as HTMLImageElement,
|
||||
4,
|
||||
20
|
||||
4
|
||||
);
|
||||
|
||||
setColorPalette(color);
|
||||
|
|
|
@ -21,7 +21,7 @@ export default function SortDropdown({
|
|||
const target = e.target as HTMLInputElement;
|
||||
if (target.id !== "sort-dropdown") toggleSortDropdown();
|
||||
}}
|
||||
className="absolute top-8 right-0 border border-sky-100 shadow-md bg-gray-50 rounded-md p-2 z-10 w-48"
|
||||
className="absolute top-8 right-0 border border-sky-100 shadow-md bg-gray-50 rounded-md p-2 z-20 w-48"
|
||||
>
|
||||
<p className="mb-2 text-sky-900 text-center font-semibold">Sort by</p>
|
||||
<div className="flex flex-col gap-2">
|
||||
|
|
|
@ -1,46 +1,47 @@
|
|||
import { Page } from "puppeteer";
|
||||
import { Page, chromium, devices } from "playwright";
|
||||
import { prisma } from "@/lib/api/db";
|
||||
import puppeteer from "puppeteer-extra";
|
||||
import AdblockerPlugin from "puppeteer-extra-plugin-adblocker";
|
||||
import StealthPlugin from "puppeteer-extra-plugin-stealth";
|
||||
import createFile from "@/lib/api/storage/createFile";
|
||||
|
||||
export default async function archive(
|
||||
url: string,
|
||||
collectionId: number,
|
||||
linkId: number
|
||||
) {
|
||||
const archivePath = `data/archives/${collectionId}/${linkId}`;
|
||||
|
||||
const browser = await puppeteer.launch();
|
||||
const browser = await chromium.launch();
|
||||
const context = await browser.newContext(devices["Desktop Chrome"]);
|
||||
const page = await context.newPage();
|
||||
|
||||
try {
|
||||
puppeteer.use(AdblockerPlugin()).use(StealthPlugin());
|
||||
|
||||
const page = await browser.newPage();
|
||||
|
||||
await page.goto(url, { waitUntil: "domcontentloaded", timeout: 300000 });
|
||||
|
||||
await page.setViewport({ width: 1080, height: 1024 });
|
||||
|
||||
await autoScroll(page);
|
||||
|
||||
const linkExists = await prisma.link.findFirst({
|
||||
const linkExists = await prisma.link.findUnique({
|
||||
where: {
|
||||
id: linkId,
|
||||
},
|
||||
});
|
||||
|
||||
if (linkExists) {
|
||||
await Promise.all([
|
||||
page.pdf({
|
||||
path: archivePath + ".pdf",
|
||||
width: "1366px",
|
||||
height: "1931px",
|
||||
printBackground: true,
|
||||
margin: { top: "15px", bottom: "15px" },
|
||||
}),
|
||||
page.screenshot({ fullPage: true, path: archivePath + ".png" }),
|
||||
]);
|
||||
const pdf = await page.pdf({
|
||||
width: "1366px",
|
||||
height: "1931px",
|
||||
printBackground: true,
|
||||
margin: { top: "15px", bottom: "15px" },
|
||||
});
|
||||
const screenshot = await page.screenshot({
|
||||
fullPage: true,
|
||||
});
|
||||
|
||||
createFile({
|
||||
data: screenshot,
|
||||
filePath: `archives/${collectionId}/${linkId}.png`,
|
||||
});
|
||||
|
||||
createFile({
|
||||
data: pdf,
|
||||
filePath: `archives/${collectionId}/${linkId}.pdf`,
|
||||
});
|
||||
}
|
||||
|
||||
await browser.close();
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import { prisma } from "@/lib/api/db";
|
||||
import getPermission from "@/lib/api/getPermission";
|
||||
import { Collection, UsersAndCollections } from "@prisma/client";
|
||||
import fs from "fs";
|
||||
import removeFolder from "@/lib/api/storage/removeFolder";
|
||||
|
||||
export default async function deleteCollection(
|
||||
collection: { id: number },
|
||||
|
@ -56,13 +56,7 @@ export default async function deleteCollection(
|
|||
},
|
||||
});
|
||||
|
||||
try {
|
||||
fs.rmdirSync(`data/archives/${collectionId}`, { recursive: true });
|
||||
} catch (error) {
|
||||
console.log(
|
||||
"Collection's archive directory wasn't deleted most likely because it didn't exist..."
|
||||
);
|
||||
}
|
||||
removeFolder({ filePath: `archives/${collectionId}` });
|
||||
|
||||
return await prisma.collection.delete({
|
||||
where: {
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import { prisma } from "@/lib/api/db";
|
||||
import { CollectionIncludingMembersAndLinkCount } from "@/types/global";
|
||||
import { existsSync, mkdirSync } from "fs";
|
||||
import createFolder from "@/lib/api/storage/createFolder";
|
||||
|
||||
export default async function postCollection(
|
||||
collection: CollectionIncludingMembersAndLinkCount,
|
||||
|
@ -66,9 +66,7 @@ export default async function postCollection(
|
|||
},
|
||||
});
|
||||
|
||||
const collectionPath = `data/archives/${newCollection.id}`;
|
||||
if (!existsSync(collectionPath))
|
||||
mkdirSync(collectionPath, { recursive: true });
|
||||
createFolder({ filePath: `archives/${newCollection.id}` });
|
||||
|
||||
return { response: newCollection, status: 200 };
|
||||
}
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
import { prisma } from "@/lib/api/db";
|
||||
import { LinkIncludingShortenedCollectionAndTags } from "@/types/global";
|
||||
import fs from "fs";
|
||||
import { Collection, Link, UsersAndCollections } from "@prisma/client";
|
||||
import getPermission from "@/lib/api/getPermission";
|
||||
import removeFile from "@/lib/api/storage/removeFile";
|
||||
|
||||
export default async function deleteLink(
|
||||
link: LinkIncludingShortenedCollectionAndTags,
|
||||
|
@ -33,13 +33,8 @@ export default async function deleteLink(
|
|||
},
|
||||
});
|
||||
|
||||
fs.unlink(`data/archives/${link.collectionId}/${link.id}.pdf`, (err) => {
|
||||
if (err) console.log(err);
|
||||
});
|
||||
|
||||
fs.unlink(`data/archives/${link.collectionId}/${link.id}.png`, (err) => {
|
||||
if (err) console.log(err);
|
||||
});
|
||||
removeFile({ filePath: `archives/${link.collectionId}/${link.id}.pdf` });
|
||||
removeFile({ filePath: `archives/${link.collectionId}/${link.id}.png` });
|
||||
|
||||
return { response: deleteLink, status: 200 };
|
||||
}
|
||||
|
|
|
@ -1,10 +1,9 @@
|
|||
import { prisma } from "@/lib/api/db";
|
||||
import { LinkIncludingShortenedCollectionAndTags } from "@/types/global";
|
||||
import getTitle from "../../getTitle";
|
||||
import archive from "../../archive";
|
||||
import getTitle from "@/lib/api/getTitle";
|
||||
import archive from "@/lib/api/archive";
|
||||
import { Collection, Link, UsersAndCollections } from "@prisma/client";
|
||||
import getPermission from "@/lib/api/getPermission";
|
||||
import { existsSync, mkdirSync } from "fs";
|
||||
|
||||
export default async function postLink(
|
||||
link: LinkIncludingShortenedCollectionAndTags,
|
||||
|
@ -84,10 +83,6 @@ export default async function postLink(
|
|||
include: { tags: true, collection: true },
|
||||
});
|
||||
|
||||
const collectionPath = `data/archives/${newLink.collectionId}`;
|
||||
if (!existsSync(collectionPath))
|
||||
mkdirSync(collectionPath, { recursive: true });
|
||||
|
||||
archive(newLink.url, newLink.collectionId, newLink.id);
|
||||
|
||||
return { response: newLink, status: 200 };
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
import { prisma } from "@/lib/api/db";
|
||||
import { AccountSettings } from "@/types/global";
|
||||
import fs from "fs";
|
||||
import path from "path";
|
||||
import bcrypt from "bcrypt";
|
||||
import removeFile from "@/lib/api/storage/removeFile";
|
||||
import createFile from "@/lib/api/storage/createFile";
|
||||
|
||||
export default async function updateUser(
|
||||
user: AccountSettings,
|
||||
|
@ -43,15 +43,12 @@ export default async function updateUser(
|
|||
if (profilePic.startsWith("data:image/jpeg;base64")) {
|
||||
if (user.profilePic.length < 1572864) {
|
||||
try {
|
||||
const filePath = path.join(
|
||||
process.cwd(),
|
||||
`data/uploads/avatar/${userId}.jpg`
|
||||
);
|
||||
|
||||
const base64Data = profilePic.replace(/^data:image\/jpeg;base64,/, "");
|
||||
|
||||
fs.writeFile(filePath, base64Data, "base64", function (err) {
|
||||
console.log(err);
|
||||
await createFile({
|
||||
filePath: `uploads/avatar/${userId}.jpg`,
|
||||
data: base64Data,
|
||||
isBase64: true,
|
||||
});
|
||||
} catch (err) {
|
||||
console.log("Error saving image:", err);
|
||||
|
@ -64,9 +61,7 @@ export default async function updateUser(
|
|||
};
|
||||
}
|
||||
} else if (profilePic == "") {
|
||||
fs.unlink(`data/uploads/avatar/${userId}.jpg`, (err) => {
|
||||
if (err) console.log(err);
|
||||
});
|
||||
removeFile({ filePath: `uploads/avatar/${userId}.jpg` });
|
||||
}
|
||||
|
||||
// Other settings
|
||||
|
|
|
@ -0,0 +1,40 @@
|
|||
import { PutObjectCommand, PutObjectCommandInput } from "@aws-sdk/client-s3";
|
||||
import fs from "fs";
|
||||
import path from "path";
|
||||
import s3Client from "./s3Client";
|
||||
|
||||
export default async function createFile({
|
||||
filePath,
|
||||
data,
|
||||
isBase64,
|
||||
}: {
|
||||
filePath: string;
|
||||
data: Buffer | string;
|
||||
isBase64?: boolean;
|
||||
}) {
|
||||
if (s3Client) {
|
||||
const bucketParams: PutObjectCommandInput = {
|
||||
Bucket: process.env.BUCKET_NAME,
|
||||
Key: filePath,
|
||||
Body: isBase64 ? Buffer.from(data as string, "base64") : data,
|
||||
};
|
||||
|
||||
try {
|
||||
await s3Client.send(new PutObjectCommand(bucketParams));
|
||||
|
||||
return true;
|
||||
} catch (err) {
|
||||
console.log("Error", err);
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
const storagePath = process.env.STORAGE_FOLDER;
|
||||
const creationPath = path.join(process.cwd(), storagePath + "/" + filePath);
|
||||
|
||||
fs.writeFile(creationPath, data, isBase64 ? "base64" : {}, function (err) {
|
||||
if (err) console.log(err);
|
||||
});
|
||||
|
||||
return true;
|
||||
}
|
||||
}
|
|
@ -0,0 +1,14 @@
|
|||
import fs from "fs";
|
||||
import path from "path";
|
||||
import s3Client from "./s3Client";
|
||||
|
||||
export default function createFolder({ filePath }: { filePath: string }) {
|
||||
if (s3Client) {
|
||||
// Do nothing, S3 builds files recursively
|
||||
} else {
|
||||
const storagePath = process.env.STORAGE_FOLDER;
|
||||
const creationPath = path.join(process.cwd(), storagePath + "/" + filePath);
|
||||
|
||||
fs.mkdirSync(creationPath, { recursive: true });
|
||||
}
|
||||
}
|
|
@ -0,0 +1,74 @@
|
|||
import { GetObjectCommand, GetObjectCommandInput } from "@aws-sdk/client-s3";
|
||||
import fs from "fs";
|
||||
import path from "path";
|
||||
import s3Client from "./s3Client";
|
||||
|
||||
export default async function readFile({ filePath }: { filePath: string }) {
|
||||
let contentType:
|
||||
| "text/plain"
|
||||
| "image/jpeg"
|
||||
| "image/png"
|
||||
| "application/pdf";
|
||||
|
||||
if (s3Client) {
|
||||
const bucketParams: GetObjectCommandInput = {
|
||||
Bucket: process.env.BUCKET_NAME,
|
||||
Key: filePath,
|
||||
};
|
||||
|
||||
try {
|
||||
const response = await s3Client.send(new GetObjectCommand(bucketParams));
|
||||
const data = await streamToBuffer(response.Body);
|
||||
|
||||
if (filePath.endsWith(".pdf")) {
|
||||
contentType = "application/pdf";
|
||||
} else if (filePath.endsWith(".png")) {
|
||||
contentType = "image/png";
|
||||
} else {
|
||||
// if (filePath.endsWith(".jpg"))
|
||||
contentType = "image/jpeg";
|
||||
}
|
||||
|
||||
return { file: data, contentType };
|
||||
} catch (err) {
|
||||
console.log("Error", err);
|
||||
|
||||
contentType = "text/plain";
|
||||
|
||||
return {
|
||||
file: "File not found, it's possible that the file you're looking for either doesn't exist or hasn't been created yet.",
|
||||
contentType,
|
||||
};
|
||||
}
|
||||
} else {
|
||||
const storagePath = process.env.STORAGE_FOLDER;
|
||||
const creationPath = path.join(process.cwd(), storagePath + "/" + filePath);
|
||||
|
||||
const file = fs.existsSync(creationPath)
|
||||
? fs.readFileSync(creationPath)
|
||||
: "File not found, it's possible that the file you're looking for either doesn't exist or hasn't been created yet.";
|
||||
|
||||
if (file.toString().startsWith("File not found")) {
|
||||
contentType = "text/plain";
|
||||
} else if (filePath.endsWith(".pdf")) {
|
||||
contentType = "application/pdf";
|
||||
} else if (filePath.endsWith(".png")) {
|
||||
contentType = "image/png";
|
||||
} else {
|
||||
// if (filePath.endsWith(".jpg"))
|
||||
contentType = "image/jpeg";
|
||||
}
|
||||
|
||||
return { file, contentType };
|
||||
}
|
||||
}
|
||||
|
||||
// Turn the file's body into buffer
|
||||
const streamToBuffer = (stream: any) => {
|
||||
const chunks: any = [];
|
||||
return new Promise((resolve, reject) => {
|
||||
stream.on("data", (chunk: any) => chunks.push(Buffer.from(chunk)));
|
||||
stream.on("error", (err: any) => reject(err));
|
||||
stream.on("end", () => resolve(Buffer.concat(chunks)));
|
||||
});
|
||||
};
|
|
@ -0,0 +1,26 @@
|
|||
import fs from "fs";
|
||||
import path from "path";
|
||||
import s3Client from "./s3Client";
|
||||
import { PutObjectCommandInput, DeleteObjectCommand } from "@aws-sdk/client-s3";
|
||||
|
||||
export default async function removeFile({ filePath }: { filePath: string }) {
|
||||
if (s3Client) {
|
||||
const bucketParams: PutObjectCommandInput = {
|
||||
Bucket: process.env.BUCKET_NAME,
|
||||
Key: filePath,
|
||||
};
|
||||
|
||||
try {
|
||||
await s3Client.send(new DeleteObjectCommand(bucketParams));
|
||||
} catch (err) {
|
||||
console.log("Error", err);
|
||||
}
|
||||
} else {
|
||||
const storagePath = process.env.STORAGE_FOLDER;
|
||||
const creationPath = path.join(process.cwd(), storagePath + "/" + filePath);
|
||||
|
||||
fs.unlink(creationPath, (err) => {
|
||||
if (err) console.log(err);
|
||||
});
|
||||
}
|
||||
}
|
|
@ -0,0 +1,59 @@
|
|||
import fs from "fs";
|
||||
import path from "path";
|
||||
import s3Client from "./s3Client";
|
||||
import {
|
||||
DeleteObjectsCommand,
|
||||
DeleteObjectsCommandInput,
|
||||
ListObjectsCommand,
|
||||
} from "@aws-sdk/client-s3";
|
||||
|
||||
async function emptyS3Directory(bucket: string, dir: string) {
|
||||
if (s3Client) {
|
||||
const listParams = {
|
||||
Bucket: bucket,
|
||||
Prefix: dir,
|
||||
};
|
||||
|
||||
const deleteParams: DeleteObjectsCommandInput = {
|
||||
Bucket: bucket,
|
||||
Delete: { Objects: [] },
|
||||
};
|
||||
|
||||
const listedObjects = await s3Client.send(
|
||||
new ListObjectsCommand(listParams)
|
||||
);
|
||||
|
||||
if (listedObjects.Contents?.length === 0) return;
|
||||
|
||||
listedObjects.Contents?.forEach(({ Key }) => {
|
||||
deleteParams.Delete?.Objects?.push({ Key });
|
||||
});
|
||||
|
||||
console.log(listedObjects);
|
||||
|
||||
await s3Client.send(new DeleteObjectsCommand(deleteParams));
|
||||
|
||||
if (listedObjects.IsTruncated) await emptyS3Directory(bucket, dir);
|
||||
}
|
||||
}
|
||||
|
||||
export default async function removeFolder({ filePath }: { filePath: string }) {
|
||||
if (s3Client) {
|
||||
try {
|
||||
await emptyS3Directory(process.env.BUCKET_NAME as string, filePath);
|
||||
} catch (err) {
|
||||
console.log("Error", err);
|
||||
}
|
||||
} else {
|
||||
const storagePath = process.env.STORAGE_FOLDER;
|
||||
const creationPath = path.join(process.cwd(), storagePath + "/" + filePath);
|
||||
|
||||
try {
|
||||
fs.rmdirSync(creationPath, { recursive: true });
|
||||
} catch (error) {
|
||||
console.log(
|
||||
"Collection's archive directory wasn't deleted most likely because it didn't exist..."
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,19 @@
|
|||
import { S3 } from "@aws-sdk/client-s3";
|
||||
|
||||
const s3Client: S3 | undefined =
|
||||
process.env.SPACES_ENDPOINT &&
|
||||
process.env.SPACES_REGION &&
|
||||
process.env.SPACES_KEY &&
|
||||
process.env.SPACES_SECRET
|
||||
? new S3({
|
||||
forcePathStyle: false,
|
||||
endpoint: process.env.SPACES_ENDPOINT,
|
||||
region: process.env.SPACES_REGION,
|
||||
credentials: {
|
||||
accessKeyId: process.env.SPACES_KEY,
|
||||
secretAccessKey: process.env.SPACES_SECRET,
|
||||
},
|
||||
})
|
||||
: undefined;
|
||||
|
||||
export default s3Client;
|
16
package.json
16
package.json
|
@ -1,6 +1,6 @@
|
|||
{
|
||||
"name": "linkwarden",
|
||||
"version": "3.0.0",
|
||||
"version": "1.0.0",
|
||||
"main": "index.js",
|
||||
"repository": "https://github.com/Daniel31x13/link-warden.git",
|
||||
"author": "Daniel31X13 <daniel31x13@gmail.com>",
|
||||
|
@ -13,6 +13,7 @@
|
|||
"lint": "next lint"
|
||||
},
|
||||
"dependencies": {
|
||||
"@aws-sdk/client-s3": "^3.363.0",
|
||||
"@fortawesome/fontawesome-svg-core": "^6.3.0",
|
||||
"@fortawesome/free-regular-svg-icons": "^6.3.0",
|
||||
"@fortawesome/free-solid-svg-icons": "^6.3.0",
|
||||
|
@ -22,32 +23,31 @@
|
|||
"@prisma/client": "^4.9.0",
|
||||
"@types/crypto-js": "^4.1.1",
|
||||
"@types/node": "18.11.18",
|
||||
"@types/nodemailer": "^6.4.8",
|
||||
"@types/react": "18.0.27",
|
||||
"@types/react-dom": "18.0.10",
|
||||
"bcrypt": "^5.1.0",
|
||||
"colorthief": "^2.4.0",
|
||||
"crypto-js": "^4.1.1",
|
||||
"eslint": "8.33.0",
|
||||
"eslint": "8.44.0",
|
||||
"eslint-config-next": "13.1.6",
|
||||
"multer": "^1.4.5-lts.1",
|
||||
"next": "13.1.6",
|
||||
"next-auth": "^4.22.1",
|
||||
"puppeteer": "^19.8.0",
|
||||
"puppeteer-extra": "^3.3.6",
|
||||
"puppeteer-extra-plugin-adblocker": "^2.13.6",
|
||||
"puppeteer-extra-plugin-stealth": "^2.11.2",
|
||||
"nodemailer": "^6.9.3",
|
||||
"playwright": "^1.35.1",
|
||||
"react": "18.2.0",
|
||||
"react-colorful": "^5.6.1",
|
||||
"react-dom": "18.2.0",
|
||||
"react-hot-toast": "^2.4.1",
|
||||
"react-image-file-resizer": "^0.4.8",
|
||||
"react-select": "^5.7.0",
|
||||
"sharp": "^0.32.1",
|
||||
"typescript": "4.9.4",
|
||||
"zustand": "^4.3.3"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@playwright/test": "^1.35.1",
|
||||
"@types/bcrypt": "^5.0.0",
|
||||
"@types/multer": "^1.4.7",
|
||||
"autoprefixer": "^10.4.14",
|
||||
"postcss": "^8.4.24",
|
||||
"prisma": "^4.9.0",
|
||||
|
|
|
@ -1,9 +1,8 @@
|
|||
import type { NextApiRequest, NextApiResponse } from "next";
|
||||
import { getServerSession } from "next-auth/next";
|
||||
import { authOptions } from "pages/api/auth/[...nextauth]";
|
||||
import path from "path";
|
||||
import fs from "fs";
|
||||
import getPermission from "@/lib/api/getPermission";
|
||||
import readFile from "@/lib/api/storage/readFile";
|
||||
|
||||
export default async function Index(req: NextApiRequest, res: NextApiResponse) {
|
||||
if (!req.query.params)
|
||||
|
@ -27,20 +26,10 @@ export default async function Index(req: NextApiRequest, res: NextApiResponse) {
|
|||
.status(401)
|
||||
.json({ response: "You don't have access to this collection." });
|
||||
|
||||
const requestedPath = `data/archives/${collectionId}/${linkId}`;
|
||||
|
||||
const filePath = path.join(process.cwd(), requestedPath);
|
||||
|
||||
const file = fs.existsSync(filePath)
|
||||
? fs.readFileSync(filePath)
|
||||
: "File not found, it's possible that the file you're looking for either doesn't exist or hasn't been created yet.";
|
||||
|
||||
if (!fs.existsSync(filePath))
|
||||
res.setHeader("Content-Type", "text/plain").status(404);
|
||||
else if (filePath.endsWith(".pdf"))
|
||||
res.setHeader("Content-Type", "application/pdf").status(200);
|
||||
else if (filePath.endsWith(".png"))
|
||||
res.setHeader("Content-Type", "image/png").status(200);
|
||||
const { file, contentType } = await readFile({
|
||||
filePath: `archives/${collectionId}/${linkId}`,
|
||||
});
|
||||
res.setHeader("Content-Type", contentType).status(200);
|
||||
|
||||
return res.send(file);
|
||||
}
|
||||
|
|
|
@ -2,8 +2,7 @@ import type { NextApiRequest, NextApiResponse } from "next";
|
|||
import { getServerSession } from "next-auth/next";
|
||||
import { authOptions } from "pages/api/auth/[...nextauth]";
|
||||
import { prisma } from "@/lib/api/db";
|
||||
import path from "path";
|
||||
import fs from "fs";
|
||||
import readFile from "@/lib/api/storage/readFile";
|
||||
|
||||
export default async function Index(req: NextApiRequest, res: NextApiResponse) {
|
||||
const session = await getServerSession(req, res, authOptions);
|
||||
|
@ -41,17 +40,11 @@ export default async function Index(req: NextApiRequest, res: NextApiResponse) {
|
|||
}
|
||||
}
|
||||
|
||||
const filePath = path.join(
|
||||
process.cwd(),
|
||||
`data/uploads/avatar/${queryId}.jpg`
|
||||
);
|
||||
const { file, contentType } = await readFile({
|
||||
filePath: `uploads/avatar/${queryId}.jpg`,
|
||||
});
|
||||
|
||||
const file = fs.existsSync(filePath)
|
||||
? fs.readFileSync(filePath)
|
||||
: "File not found.";
|
||||
|
||||
if (!fs.existsSync(filePath)) res.setHeader("Content-Type", "text/plain");
|
||||
else res.setHeader("Content-Type", "image/jpeg");
|
||||
res.setHeader("Content-Type", contentType);
|
||||
|
||||
return res.send(file);
|
||||
}
|
||||
|
|
|
@ -0,0 +1,77 @@
|
|||
import { defineConfig, devices } from '@playwright/test';
|
||||
|
||||
/**
|
||||
* Read environment variables from file.
|
||||
* https://github.com/motdotla/dotenv
|
||||
*/
|
||||
// require('dotenv').config();
|
||||
|
||||
/**
|
||||
* See https://playwright.dev/docs/test-configuration.
|
||||
*/
|
||||
export default defineConfig({
|
||||
testDir: './e2e',
|
||||
/* Run tests in files in parallel */
|
||||
fullyParallel: true,
|
||||
/* Fail the build on CI if you accidentally left test.only in the source code. */
|
||||
forbidOnly: !!process.env.CI,
|
||||
/* Retry on CI only */
|
||||
retries: process.env.CI ? 2 : 0,
|
||||
/* Opt out of parallel tests on CI. */
|
||||
workers: process.env.CI ? 1 : undefined,
|
||||
/* Reporter to use. See https://playwright.dev/docs/test-reporters */
|
||||
reporter: 'html',
|
||||
/* Shared settings for all the projects below. See https://playwright.dev/docs/api/class-testoptions. */
|
||||
use: {
|
||||
/* Base URL to use in actions like `await page.goto('/')`. */
|
||||
// baseURL: 'http://127.0.0.1:3000',
|
||||
|
||||
/* Collect trace when retrying the failed test. See https://playwright.dev/docs/trace-viewer */
|
||||
trace: 'on-first-retry',
|
||||
},
|
||||
|
||||
/* Configure projects for major browsers */
|
||||
projects: [
|
||||
{
|
||||
name: 'chromium',
|
||||
use: { ...devices['Desktop Chrome'] },
|
||||
},
|
||||
|
||||
{
|
||||
name: 'firefox',
|
||||
use: { ...devices['Desktop Firefox'] },
|
||||
},
|
||||
|
||||
{
|
||||
name: 'webkit',
|
||||
use: { ...devices['Desktop Safari'] },
|
||||
},
|
||||
|
||||
/* Test against mobile viewports. */
|
||||
// {
|
||||
// name: 'Mobile Chrome',
|
||||
// use: { ...devices['Pixel 5'] },
|
||||
// },
|
||||
// {
|
||||
// name: 'Mobile Safari',
|
||||
// use: { ...devices['iPhone 12'] },
|
||||
// },
|
||||
|
||||
/* Test against branded browsers. */
|
||||
// {
|
||||
// name: 'Microsoft Edge',
|
||||
// use: { ...devices['Desktop Edge'], channel: 'msedge' },
|
||||
// },
|
||||
// {
|
||||
// name: 'Google Chrome',
|
||||
// use: { ...devices['Desktop Chrome'], channel: 'chrome' },
|
||||
// },
|
||||
],
|
||||
|
||||
/* Run your local dev server before starting the tests */
|
||||
// webServer: {
|
||||
// command: 'npm run start',
|
||||
// url: 'http://127.0.0.1:3000',
|
||||
// reuseExistingServer: !process.env.CI,
|
||||
// },
|
||||
});
|
|
@ -0,0 +1,19 @@
|
|||
declare global {
|
||||
namespace NodeJS {
|
||||
interface ProcessEnv {
|
||||
NEXTAUTH_SECRET: string;
|
||||
DATABASE_URL: string;
|
||||
NEXTAUTH_URL: string;
|
||||
PAGINATION_TAKE_COUNT: string;
|
||||
STORAGE_FOLDER?: string;
|
||||
IS_CLOUD_INSTANCE?: true;
|
||||
SPACES_KEY?: string;
|
||||
SPACES_SECRET?: string;
|
||||
SPACES_ENDPOINT?: string;
|
||||
BUCKET_NAME?: string;
|
||||
SPACES_REGION?: string;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export {};
|
Ŝarĝante…
Reference in New Issue