skip to Main Content

i have api in nodejs for read data from mysql database and write it into csv file and download file after complete write in csv file. when i have 1.7 million records in database it is working but now i have 5 million record it is give me error

javascript - heap out of memory

my api code:

exports.exportdatatocsv = async (req, res) => {
    con.query(
        "SELECT sender_name, table_name FROM sender_tbl where sender_name ",
        function (error, data) {
            if (error) {
                console.error(error);
                res.status(500).send("Internal Server Error");
                return;
            }

            var mysqlData = JSON.parse(JSON.stringify(data));
            var workbook = new ExcelJS.stream.xlsx.WorkbookWriter({ stream: res });

            mysqlData.forEach((sender, index) => {
                var worksheet = workbook.addWorksheet(
                    sender.sender_name.substring(0, 31)
                );

                con.query(
                    `SELECT * FROM ${sender.table_name} ORDER BY id DESC`,
                    function (error, tableData) {
                        if (error) {
                            console.error(error);
                            res.status(500).send("Internal Server Error");
                            return;
                        }

                        var fileHeader = [
                            "message",
                            "info",
                            "credit/debit",
                            "amount",
                            "netbal",
                        ];
                        worksheet.addRow(fileHeader);

                        tableData.forEach((row) => {
                            worksheet.addRow([
                                row.message,
                                row.info,
                                row.isdebit ? "debit" : "credit",
                                row.amount,
                                row.netbal,
                            ]);
                        });

                        if (index === mysqlData.length - 1) {
                            workbook
                                .commit()
                                .then(function () {
                                    res.status(200).end();
                                })
                                .catch(function (err) {
                                    console.error(err);
                                    res
                                        .status(500)
                                        .send("Error occurred while generating Excel file");
                                });
                        }
                    }
                );
            });
        }
    );
};

i have implement stream but still not working and i do not have to much idea regarding stream

2

Answers


  1. Node has a default max memory usage of less than 2GB on some 64bit systems depending on the Node runtime version. This can cause unexpected memory issues when running.

    It’s possible to adjust the max memory ceiling using a command-line flag passed into Node:

    node --max-old-space-size=8192 your_script.js // 8192MB = 8GB
    

    If you want to ensure all the forked node process want to run with this setting, use this

    NODE_OPTIONS=--max-old-space-size=4096 node your_script.js // 4GB
    

    Your code executes too many db query calls at the same time even its look like callbacks.

    so what we have to do is simply sequentialise the execution.

    This can achieved through promises, I’ve used util.promisify so we don’t need too much nested callback

    //your imports +
    const {promisify} = require('node:util');
    const db = promisify(con.query);
    
    exports.exportdatatocsv = async (req, res) => {
        let fileHeader = [
            "message",
            "info",
            "credit/debit",
            "amount",
            "netbal",
        ];
        
        try{
            const workbook = new ExcelJS.stream.xlsx.WorkbookWriter({ stream: res });
            const mysqlData = await db('SELECT sender_name, table_name FROM sender_tbl where sender_name').then(data => JSON.parse(JSON.stringify(data)));
            for(sender of mysqlData){
    
                let worksheet = workbook.addWorksheet(
                    sender.sender_name.substring(0, 31)
                );
    
                const tableData = await db(`SELECT * FROM ${sender.table_name} ORDER BY id DESC`);
                
                worksheet.addRow(fileHeader);
    
                tableData.forEach((row) => {
                    worksheet.addRow([
                        row.message,
                        row.info,
                        row.isdebit ? "debit" : "credit",
                        row.amount,
                        row.netbal,
                    ]);
                });
            }
    
            await workbook.commit();
            res.status(200).end();
    
        }catch(e){
            res.status(500).send(e.message);
        }
    };
    

    Thank to promisify to make the code more readable.

    Login or Signup to reply.
  2. const ExcelJS = require('exceljs');
    const fs = require('fs');
    const { Transform } = require('stream');
    
    exports.exportdatatocsv = async (req, res) => {
        // Create a transform stream to convert row objects to CSV strings
        const csvTransformStream = new Transform({
            writableObjectMode: true,
            transform(chunk, encoding, callback) {
                const csvString = Object.values(chunk).join(',') + 'n';
                callback(null, csvString);
            }
        });
    
        res.setHeader('Content-Type', 'text/csv');
        res.setHeader('Content-Disposition', 'attachment; filename="export.csv"');
    
        // Pipe the CSV transform stream directly to the response stream
        csvTransformStream.pipe(res);
    
        con.query("SELECT sender_name, table_name FROM sender_tbl where sender_name ", async function (error, data) {
            if (error) {
                console.error(error);
                res.status(500).send("Internal Server Error");
                return;
            }
    
            try {
                for (const sender of data) {
                    const tableData = await new Promise((resolve, reject) => {
                        con.query(`SELECT * FROM ${sender.table_name} ORDER BY id DESC`, (error, tableData) => {
                            if (error) {
                                console.error(error);
                                reject(error);
                            } else {
                                resolve(tableData);
                            }
                        });
                    });
    
                    // Write CSV header
                    if (tableData.length > 0) {
                        csvTransformStream.write(tableData[0]);
                    }
    
                    // Write table data to CSV
                    for (const row of tableData) {
                        csvTransformStream.write(row);
                    }
                }
    
                // End the response stream
                csvTransformStream.end();
            } catch (err) {
                console.error(err);
                res.status(500).send("Internal Server Error");
            }
        });
    };
    
    Try this code if it is helpful to you.
    
    Login or Signup to reply.
Please signup or login to give your own answer.
Back To Top
Search