Dockerize the app

This commit is contained in:
Gardner Bickford 2022-06-18 13:18:48 +12:00
parent 17a27a138b
commit a9c051b743
17 changed files with 224 additions and 53 deletions

6
.dockerignore Normal file
View File

@ -0,0 +1,6 @@
.github
api
.gitignore
.dockerignore
Dockerfile*
node_modules

24
.github/workflows/build_images.yml vendored Normal file
View File

@ -0,0 +1,24 @@
---
name: 'build images'
# See https://itnext.io/building-multi-cpu-architecture-docker-images-for-arm-and-x86-3-building-in-github-action-ci-a382feab5af9
on:
push:
branches:
- master
jobs:
build-docker-images:
name: Build Docker Images
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Set up QEMU
uses: docker/setup-qemu-action@v1
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
- name: Login to GitHub Package Registry
run: echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u ${{ github.actor }} --password-stdin
- name: Build & Push Docker image
run: docker buildx build -t ghcr.io/${{ github.repository_owner }}/myimage:${GITHUB_SHA} -f [path to Dockerfile] --push --platform=linux/arm64,linux/amd64 [path to build context]

4
.gitignore vendored
View File

@ -12,6 +12,10 @@ npm-debug.log*
yarn-debug.log*
yarn-error.log*
media
api/.ash_history
api/.cache/
api/.pki/
api/node_modules
api/..pnp
api/.pnp.js

12
Dockerfile Normal file
View File

@ -0,0 +1,12 @@
# Development image for React app
FROM node:18-alpine
WORKDIR /home/node
VOLUME /home/node/node_modules
COPY package*.json .
RUN npm i -g npm@latest \
&& npm ci --legacy-peer-deps \

16
Dockerfile.prod Normal file
View File

@ -0,0 +1,16 @@
# Production image for React app
FROM node:18-alpine AS builder
WORKDIR /home/node
VOLUME /home/node/node_modules
COPY . .
RUN npm i -g npm@latest \
&& npm ci --legacy-peer-deps \
&& npm run build
FROM nginx:alpine
COPY --from=builder /home/node/build /usr/share/nginx/html

29
api/Dockerfile Normal file
View File

@ -0,0 +1,29 @@
# Production image for api
# See https://github.com/puppeteer/puppeteer/blob/main/docs/troubleshooting.md#running-on-alpine
FROM node:18-alpine
ENV PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=true \
PUPPETEER_EXECUTABLE_PATH=/usr/bin/chromium-browser
WORKDIR /home/node
VOLUME /home/node/node_modules
RUN apk add --no-cache \
chromium \
nss \
freetype \
harfbuzz \
ca-certificates \
ttf-freefont
COPY . .
RUN npm ci && mkdir -p /media
# The collowing command fails when attempting to chown the node_modules directory.
# Running it in its own layer allows it to modify the volume.
RUN chown -R node:node /home/node /media
USER node
EXPOSE 5000
CMD node server.js

9
api/config.js Normal file
View File

@ -0,0 +1,9 @@
const fs = require("fs");
module.exports.port = process.env.PORT || 5000;
module.exports.URI = process.env.MONGODB_URI || 'mongodb://localhost:27017';
module.exports.database = process.env.DB_NAME || 'sample_db';
module.exports.collection = process.env.COLLECTION_NAME || 'list';
const storageLocation = process.env.STORAGE_LOCATION || '/media';
module.exports.screenshotDirectory = storageLocation + '/screenshots';
module.exports.pdfDirectory = storageLocation + '/pdfs';

View File

@ -1,23 +1,13 @@
const puppeteer = require("puppeteer");
const { PuppeteerBlocker } = require("@cliqz/adblocker-puppeteer");
const fetch = require("cross-fetch");
const config = require("../../src/config.js");
const fs = require("fs");
const screenshotDirectory =
config.API.STORAGE_LOCATION + "/LinkWarden/screenshot's/";
const pdfDirectory = config.API.STORAGE_LOCATION + "/LinkWarden/pdf's/";
if (!fs.existsSync(screenshotDirectory)) {
fs.mkdirSync(screenshotDirectory, { recursive: true });
}
if (!fs.existsSync(pdfDirectory)) {
fs.mkdirSync(pdfDirectory, { recursive: true });
}
const { screenshotDirectory, pdfDirectory } = require("../config.js");
module.exports = async (link, id) => {
const browser = await puppeteer.launch();
const browser = await puppeteer.launch({
args: ['--no-sandbox'],
timeout: 10000,
});
const page = await browser.newPage();
await PuppeteerBlocker.fromPrebuiltAdsAndTracking(fetch).then((blocker) => {
@ -26,11 +16,13 @@ module.exports = async (link, id) => {
await page.goto(link, { waitUntil: "load", timeout: 0 });
console.log(screenshotDirectory + "/" + id + ".png");
await page.screenshot({
path: screenshotDirectory + id + ".png",
path: screenshotDirectory + "/" + id + ".png",
fullPage: true,
});
await page.pdf({ path: pdfDirectory + id + ".pdf", format: "a4" });
await page.pdf({ path: pdfDirectory + "/" + id + ".pdf", format: "a4" });
await browser.close();
};

43
api/package-lock.json generated
View File

@ -15,6 +15,7 @@
"express": "^4.17.3",
"mongodb": "^4.5.0",
"puppeteer": "^14.1.1",
"sanitize-filename": "^1.6.3",
"uuid": "^8.3.2"
},
"devDependencies": {
@ -2106,6 +2107,14 @@
"resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz",
"integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg=="
},
"node_modules/sanitize-filename": {
"version": "1.6.3",
"resolved": "https://registry.npmjs.org/sanitize-filename/-/sanitize-filename-1.6.3.tgz",
"integrity": "sha512-y/52Mcy7aw3gRm7IrcGDFx/bCk4AhRh2eI9luHOQM86nZsqwiRkkq2GekHXBBD+SmPidc8i2PqtYZl+pWJ8Oeg==",
"dependencies": {
"truncate-utf8-bytes": "^1.0.0"
}
},
"node_modules/saslprep": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/saslprep/-/saslprep-1.0.3.tgz",
@ -2391,6 +2400,14 @@
"node": ">=12"
}
},
"node_modules/truncate-utf8-bytes": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/truncate-utf8-bytes/-/truncate-utf8-bytes-1.0.2.tgz",
"integrity": "sha512-95Pu1QXQvruGEhv62XCMO3Mm90GscOCClvrIUwCM0PYOXK3kaF3l3sIHxx71ThJfcbM2O5Au6SO3AWCSEfW4mQ==",
"dependencies": {
"utf8-byte-length": "^1.0.1"
}
},
"node_modules/type-fest": {
"version": "0.20.2",
"resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz",
@ -2514,6 +2531,11 @@
"node": ">=4"
}
},
"node_modules/utf8-byte-length": {
"version": "1.0.4",
"resolved": "https://registry.npmjs.org/utf8-byte-length/-/utf8-byte-length-1.0.4.tgz",
"integrity": "sha512-4+wkEYLBbWxqTahEsWrhxepcoVOJ+1z5PGIjPZxRkytcdSUaNjIjBM7Xn8E+pdSuV7SzvWovBFA54FO0JSoqhA=="
},
"node_modules/util-deprecate": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz",
@ -4224,6 +4246,14 @@
"resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz",
"integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg=="
},
"sanitize-filename": {
"version": "1.6.3",
"resolved": "https://registry.npmjs.org/sanitize-filename/-/sanitize-filename-1.6.3.tgz",
"integrity": "sha512-y/52Mcy7aw3gRm7IrcGDFx/bCk4AhRh2eI9luHOQM86nZsqwiRkkq2GekHXBBD+SmPidc8i2PqtYZl+pWJ8Oeg==",
"requires": {
"truncate-utf8-bytes": "^1.0.0"
}
},
"saslprep": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/saslprep/-/saslprep-1.0.3.tgz",
@ -4454,6 +4484,14 @@
"punycode": "^2.1.1"
}
},
"truncate-utf8-bytes": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/truncate-utf8-bytes/-/truncate-utf8-bytes-1.0.2.tgz",
"integrity": "sha512-95Pu1QXQvruGEhv62XCMO3Mm90GscOCClvrIUwCM0PYOXK3kaF3l3sIHxx71ThJfcbM2O5Au6SO3AWCSEfW4mQ==",
"requires": {
"utf8-byte-length": "^1.0.1"
}
},
"type-fest": {
"version": "0.20.2",
"resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.20.2.tgz",
@ -4549,6 +4587,11 @@
"prepend-http": "^2.0.0"
}
},
"utf8-byte-length": {
"version": "1.0.4",
"resolved": "https://registry.npmjs.org/utf8-byte-length/-/utf8-byte-length-1.0.4.tgz",
"integrity": "sha512-4+wkEYLBbWxqTahEsWrhxepcoVOJ+1z5PGIjPZxRkytcdSUaNjIjBM7Xn8E+pdSuV7SzvWovBFA54FO0JSoqhA=="
},
"util-deprecate": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz",

View File

@ -15,6 +15,7 @@
"express": "^4.17.3",
"mongodb": "^4.5.0",
"puppeteer": "^14.1.1",
"sanitize-filename": "^1.6.3",
"uuid": "^8.3.2"
},
"devDependencies": {

View File

@ -2,21 +2,26 @@ const express = require("express");
const app = express();
const { MongoClient } = require("mongodb");
const cors = require("cors");
const config = require("../src/config.js");
const getData = require("./modules/getData.js");
const fs = require("fs");
const { port, URI, database, collection, screenshotDirectory, pdfDirectory } = require("./config.js");
const fetch = require("cross-fetch");
const port = config.API.PORT;
const URI = config.API.MONGODB_URI;
const database = config.API.DB_NAME;
const collection = config.API.COLLECTION_NAME;
const sanitize = require("sanitize-filename");
const client = new MongoClient(URI);
const db = client.db(database);
const list = db.collection(collection);
// Create the storage directories if they do ot exist
if (!fs.existsSync(screenshotDirectory)) {
fs.mkdirSync(screenshotDirectory, { recursive: true });
}
if (!fs.existsSync(pdfDirectory)) {
fs.mkdirSync(pdfDirectory, { recursive: true });
}
app.use(cors());
app.use(express.json());
@ -27,8 +32,9 @@ app.get("/api", async (req, res) => {
});
app.get("/screenshots/:id", async (req, res) => {
console.log(screenshotDirectory + "/" + sanitize(req.params.id));
res.sendFile(
config.API.STORAGE_LOCATION + "/LinkWarden/screenshot's/" + req.params.id,
screenshotDirectory + "/" + sanitize(req.params.id),
(err) => {
if (err) {
res.sendFile(__dirname + "/pages/404.html");
@ -38,8 +44,9 @@ app.get("/screenshots/:id", async (req, res) => {
});
app.get("/pdfs/:id", async (req, res) => {
console.log(pdfDirectory + "/" + sanitize(req.params.id));
res.sendFile(
config.API.STORAGE_LOCATION + "/LinkWarden/pdf's/" + req.params.id,
pdfDirectory + "/" + sanitize(req.params.id),
(err) => {
if (err) {
res.sendFile(__dirname + "/pages/404.html");
@ -57,7 +64,7 @@ app.post("/api", async (req, res) => {
.then((res) => res.text())
.then((text) => (body = text));
// regular expression to parse contents of the <title> tag
let match = body.match(/<title>([^<]*)<\/title>/);
let match = body.match(/<title.*>([^<]*)<\/title>/);
return match[1];
};
@ -116,11 +123,12 @@ async function getDoc() {
}
async function deleteDoc(doc) {
doc = sanitize(doc);
try {
const result = await list.deleteOne({ _id: doc });
fs.unlink(
config.API.STORAGE_LOCATION + "/LinkWarden/screenshot's/" + doc + ".png",
screenshotDirectory + "/" + doc + ".png",
(err) => {
if (err) {
console.log(err);
@ -129,7 +137,7 @@ async function deleteDoc(doc) {
);
fs.unlink(
config.API.STORAGE_LOCATION + "/LinkWarden/pdf's/" + doc + ".pdf",
pdfDirectory +"/" + doc + ".pdf",
(err) => {
if (err) {
console.log(err);
@ -147,3 +155,4 @@ app.listen(port, () => {
console.log(`Success! running on port ${port}.`);
client.connect();
});

40
docker-compose.yml Normal file
View File

@ -0,0 +1,40 @@
version: "3"
## This compose file can be used for development
services:
mongo:
image: mongo
environment:
- MONGO_INITDB_ROOT_USERNAME=linkwarden
- MONGO_INITDB_ROOT_PASSWORD=changeme
restart: unless-stopped
link-warden-api:
build: ./api
environment:
- MONGODB_URI=mongodb://linkwarden:changeme@mongo:27017/
- PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=true
volumes:
- ./media:/media
- ./api:/home/node
ports:
- 5000:5000
restart: unless-stopped
depends_on:
- mongo
link-warden:
build: .
environment:
# - DANGEROUSLY_DISABLE_HOST_CHECK=true
- REACT_APP_API_HOST=http://localhost:5000
command: npm run go
volumes:
- /home/node/node_modules
- .:/home/node
ports:
- 3000:3000
restart: unless-stopped
depends_on:
- link-warden-api

View File

@ -2,7 +2,7 @@ import { useEffect, useState } from "react";
import "./styles/App.css";
import List from "./componets/List";
import AddItem from "./componets/AddItem";
import config from "./config";
import { API_HOST } from "./config";
import Filters from "./componets/Filters";
import sortList from "./modules/sortList";
import filter from "./modules/filterData";
@ -60,8 +60,7 @@ function App() {
const tags = concatTags(data);
async function fetchData() {
const ADDRESS = config.API.ADDRESS + ":" + config.API.PORT;
const res = await fetch(ADDRESS + "/api");
const res = await fetch(API_HOST + "/api");
const resJSON = await res.json();
const data = resJSON.reverse();
setData(data);

View File

@ -1,11 +1,11 @@
import "../styles/ViewArchived.css";
import config from "../config";
import { API_HOST } from "../config";
const ViewArchived = ({ id }) => {
const screenshotPath =
config.API.ADDRESS + ":" + config.API.PORT + "/screenshots/" + id + ".png";
API_HOST + "/screenshots/" + id + ".png";
const pdfPath =
config.API.ADDRESS + ":" + config.API.PORT + "/pdfs/" + id + ".pdf";
API_HOST + "/pdfs/" + id + ".pdf";
return (
<div className="view-archived">

View File

@ -1,12 +1 @@
// Note: the formatting are really sensitive so for example DO NOT end
// the "STORAGE_LOCATION" path with an extra slash "/" (i.e. "/home/")
module.exports = {
API: {
ADDRESS: "http://192.168.1.7", // IP address of the computer which LinkWarden is running
PORT: 5000, // The api port
MONGODB_URI: "mongodb://localhost:27017", // MongoDB link
DB_NAME: "sample_db", // MongoDB database name
COLLECTION_NAME: "list", // MongoDB collection name
STORAGE_LOCATION: "/home/danny/Documents", // The path to store the archived data
},
};
export const API_HOST = process.env.REACT_APP_API_HOST || "";

View File

@ -1,8 +1,7 @@
import config from "../config";
import { API_HOST } from "../config";
const deleteEntity = (id, reFetch, onExit, SetLoader) => {
const ADDRESS = config.API.ADDRESS + ":" + config.API.PORT;
fetch(ADDRESS + "/api", {
fetch(API_HOST + "/api", {
method: "DELETE",
body: JSON.stringify({ id }),
headers: {

View File

@ -1,4 +1,4 @@
import config from "../config";
import { API_HOST } from "../config";
import { nanoid } from "nanoid";
const addItem = async (
@ -28,8 +28,7 @@ const addItem = async (
}
if (isValidHttpUrl(link)) {
const ADDRESS = config.API.ADDRESS + ":" + config.API.PORT;
fetch(ADDRESS + "/api", {
fetch(API_HOST + "/api", {
method: method,
body: JSON.stringify({
_id: id,