skip to Main Content

I have a problem. With the help of multer I save the images on my backend server in a special folder.
However, the problem is that as soon as the server restarts, the pictures are gone. Is there a way to rewrite the CI/CD pipeline or the gitlab runner so that the images remain?

stages:  
  - build
  - deploy
  
build docker image:
  stage: build
  script:
    - npm install
    - docker rmi -f ci/backendcontainer
    - docker build -t ci/backendcontainer .
  only:
    - main

deploy docker image:
  stage: deploy
  script:    
    - docker-compose up -d
  only:
    - main
    
router.put('/post/photo/:id', auth_util.checkSession, upload.single('image'), async (req, res) => {
    try {
        const { id } = req.params
        const imagePath = req.file.path
        const description = req.file.originalname
        await pgclient.query(`UPDATE public.tbl_player
            SET playerimageurl='${imagePath}'
            WHERE playerid= ${id};`);
        console.log("ADDING NEW IMAGE")
        res.status(200).json(imagePath);
    } catch (err) {
        console.error(err.message);
        res.status(404);
    }
})

Example with multer

const express = require("express");
const router = express.Router();
const pgclient = require("../app");
const auth_util = require("../utilities/auth_util");
const Knex = require("knex");

const knex = Knex({
    client: "pg",
    connection: {
        user: process.env.DB_USER,
        host: process.env.DB_HOST,
        database: process.env.DB_DATABASE,
        password: process.env.DB_PASSWORD,
        port: process.env.DB_PORT,
    },
});

const fs = require("fs");
const eis = require("ejs");
const sharp = require("sharp");

const multer = require('multer')
const storage = multer.diskStorage({
    destination: function (req, file, cb) {
        cb(null, 'images')
    },
    filename: function (req, file, cb) {
        cb(null, `${Date.now()}-${file.originalname}`)
    }
})


router.put('/post/photo/:id', auth_util.checkSession, upload.single('image'), async (req, res) => {

    try {
        console.log(req.body)
        const { id } = req.params
        const imagePath = req.file.path
        const description = req.file.originalname
        //console.log(id)
        await pgclient.query(`UPDATE public.tbl_spieler
            SET spielerbildurl='${imagePath}'
            WHERE spielerid = ${id};`);
        console.log("ADDING NEW IMAGE")
        res.status(200).json(imagePath);
        //res.send({ description, imagePath })
    } catch (err) {
        console.error(err.message);
        res.status(404);
    }
})

router.delete('/delete/photo/images/:filename/:id', auth_util.checkSession, async (req, res) => {
    const { filename } = req.params;
    const { id } = req.params;
    const filePath = `images/${filename}`;
    try {
        await pgclient.query(`UPDATE public.tbl_spieler
                SET spielerbildurl = ${null}
                WHERE spielerid = ${id};`)
        fs.unlink(filePath, (err) => {
            if (err) {
                return res.status(500).json({ success: false, message: 'Failed to delete image' });
            }
            return res.json({ success: true, message: 'Image deleted successfully' });
        });
    } catch (err) {
        console.error(err.message);
        res.status(404);
    }
});




router.get("/get/photo/:id", auth_util.checkSession, async (req, res, next) => {
    const { id } = req.params
    try {

        player = await pgclient.query(`SELECT spielerbildurl
        FROM public.tbl_spieler
        WHERE spielerid = ${id};`)
        res.status(200).json(player.rows[0]);
        //console.log("hat gerklappt")
    }
    catch (err) {
        console.error(err);
        res.status(400).send();
    }
});

router.get('/images/:imageName', (req, res) => {
    // do a bunch of if statements to make sure the user is 
    // authorized to view this image, then
    try {
        const imageName = req.params.imageName
        console.log(req)
        console.log("----")
        console.log(imageName)
        const readStream = fs.createReadStream(`images/${imageName}`)
        readStream.pipe(res)
    }
    catch {

    }

})

const path = require("path");

router.use(express.static(path.join(__dirname, 'build')));

router.get('/', function (req, res) {
    res.sendFile(path.join(__dirname, 'build', 'index.html'));
});












module.exports = router;

2

Answers


  1. you need to create a directory locally and map that through a volume in your docker-compose configuration.

    Volumes are the preferred mechanism for persisting data generated by
    and used by Docker containers. While bind mounts are dependent on the
    directory structure and OS of the host machine, volumes are completely
    managed by Docker.
    https://docs.docker.com/storage/volumes/

    you can try the below

    stages:  
      - build
      - deploy
    build docker image:
    stage: build
    script:
    - npm install
    - docker rmi -f ci/backendcontainer
    - docker build -t ci/backendcontainer .
    only:
    - main
    
    deploy docker image:
    stage: deploy
    script:
    - docker-compose up -d
    volumes:
    - images:/home/node/app/images
    only:
    - main
    

    for the express app just make sure that you use the images directory. the volume will keep the container in sync with whatever happens to that directory.

    Login or Signup to reply.
  2. CI/CD runners are meant as a way to rerun periodic jobs in the scope of a project, so it’s common to delete any used context by a job (or you risk running out of disk), or they get deleted/recreated. Think of them as throwaway servers.

    If you want to save generated images imho the best recommendation is to save them outside the runner space, eg cloud storage (AWS S3). Docker volumes will persist your changes locally once you’re done with your container, but they won’t save you from the runner being nuked.

    Login or Signup to reply.
Please signup or login to give your own answer.
Back To Top
Search