We have an application that uses a nodejs and express backend powered by firebase but one problem that we’re facing is very long load times even for simple queries. We refractored our endpoints into different files so that they all don’t run all at once and we also have cron jobs which i suspected would help with cold starts in which i’m convinced and doubt we experiencing as all the requests including subsquent req are just slow and they really take at most times more than 8 seconds which is a poor perfomance for users to experience.
in our package.json this what we have in order for you to see the firebase versions we using
{
"name": "functions",
"description": "Cloud Functions for Firebase",
"scripts": {
"serve": "firebase serve --only functions",
"shell": "firebase functions:shell",
"start": "npm run shell",
"deploy": "firebase deploy --only functions",
"logs": "firebase functions:log"
},
"engines": {
"node": "14"
},
"dependencies": {
"@google-cloud/storage": "^5.8.2",
"@sendgrid/mail": "^7.2.1",
"algoliasearch": "^4.3.0",
"bcrypt": "^5.1.0",
"busboy": "^0.3.1",
"cookie-parser": "^1.4.5",
"cors": "^2.8.5",
"dayjs": "^1.10.4",
"dotenv": "^8.2.0",
"easy-soap-request": "^4.1.3",
"express": "^4.17.1",
"firebase": "^7.15.5",
"firebase-admin": "^8.6.0",
"firebase-functions": "^3.23.0",
"fs-extra": "^9.0.1",
"jwt-decode": "^2.2.0",
"moment": "^2.29.1",
"request": "^2.88.2",
"sharp": "^0.25.4",
"sib-api-v3-sdk": "^8.4.2",
"uuid": "^8.2.0",
"xml-js": "^1.6.11"
},
"devDependencies": {
"firebase-functions-test": "^0.1.6"
},
"private": true
}
below is the index.js file on how we set up everything.
require("dotenv").config();
const functions = require("firebase-functions");
const express = require("express");
const app = express();
const cookieParser = require("cookie-parser");
const cors = require("cors");
app.use(cors());
app.use(cookieParser());
app.use(express.json());
const dashboardRoutes = require("./routes/dashboardRoutes");
const userRoutes = require("./routes/userRoutes");
const pagesRoutes = require("./routes/pagesRoutes");
const orderRoutes = require("./routes/orderRoutes");
const cartRoutes = require("./routes/cartRoutes");
const wishlistRoutes = require("./routes/wishlistRoutes");
const authRoutes = require("./routes/authRoutes");
const storeRoutes = require("./routes/storeRoutes");
const createSellerRoutes = require("./routes/createSellerRoutes");
app.use("/", pagesRoutes);
app.use("/user", userRoutes);
app.use("/dash", dashboardRoutes);
app.use("/order", orderRoutes);
app.use("/cart", cartRoutes);
app.use("/wishlist", wishlistRoutes);
app.use("/auth", authRoutes);
app.use("/s", storeRoutes);
app.use("/cr", createSellerRoutes);
const {
cron_job1,
cron_job2,
cron_job3,
cron_job4,
} = require("./triggers/search_triggers_and_cron_jobs"); <-- not the name of the actual file
const { **other cron jobs** } = require("./cron-jobs");
const {
update_exchange_currency_rates,
} = require("./cron-jobs/currency_exchange_rates");
const { reset_product_visits } = require("./triggers/products");
const { Home } = require("./handlers/pages");
const { db } = require("./util/admin");
const { product_basic_obj } = require("./util/product_basic_obj");
exports.apis = functions.https.onRequest(app);
// this functionality below is the sample for the kind of executions we perfom in the endpoints in which we also experience slow execution times ven for a function in this file
app.get("/test-home", (req, res) => {
let content = {};
db.collection("products")
.where("status", "==", "active")
.orderBy("visited", "desc")
.limit(20)
.get()
.then((data) => {
content.popular_today = [];
data.forEach((x) => {
content.popular_today.push(product_basic_obj(x.data()));
});
return db
.collection("products")
.where("status", "==", "active")
.orderBy("todaysSales", "desc")
.limit(20)
.get();
})
.then((data) => {
content.hot_today = [];
data.forEach((x) => {
content.hot_today.push(product_basic_obj(x.data()));
});
return db.collection("departments").get();
})
.then((data) => {
content.departments = [];
data.forEach((x) => {
content.departments.push(x.data());
});
return db
.collection("departments")
.orderBy("products_sold_today", "desc")
.limit(6)
.get();
})
.then((data) => {
content.top_departments = [];
data.forEach((x) => {
content.top_departments.push(x.data());
});
return res.status(200).json(content);
});
});
//cron jobs
exports.cron_job1 = cron_job1;
exports.cron_job2 = cron_job2;
exports.cron_job3 = cron_job3;
exports.cron_job4 = cron_job4;
Upon executing an end point this is what shows in the consol and the in a deployed development we experience the same slow execution times which seems to be the average for all our executions
i functions: Beginning execution of "us-central1-apis"
⚠ Google API requested!
- URL: "https://www.googleapis.com/oauth2/v4/token"
- Be careful, this may be a production service.
i functions: Finished "us-central1-apis" in ~9s
i functions: Finished "us-central1-apis" in ~8s
i functions: Beginning execution of "us-central1-apis"
i functions: Finished "us-central1-apis" in ~7s
i functions: Beginning execution of "us-central1-apis"
i functions: Finished "us-central1-apis" in ~7s
i functions: Beginning execution of "us-central1-apis"
i functions: Finished "us-central1-apis" in ~7s
i functions: Beginning execution of "us-central1-apis"
i functions: Finished "us-central1-apis" in ~6s
i functions: Beginning execution of "us-central1-apis"
i functions: Finished "us-central1-apis" in ~7s
i functions: Beginning execution of "us-central1-apis"
i functions: Finished "us-central1-apis" in ~7s
How can i speed up the execution using the information above.
We tried breaking code into smaller files which couldn’t work as we expected to get more faster excecutions and we also removed most of our 3rd part libraries but we failed to make a change. What could we do to bring down executions times futher down.
2
Answers
Your data loading strategy is sequential, Load 1 then Load2 then Load3, and in case none of the following Load depends on the result of previous Load – that aproach is not really effective and useful.
Instead – you can try to utilize Promise.all() to fire all that promises "in parallel".
Next issue – you are loading
departments
from firebase twice, actualdepartments
andtop_departments
, and there is no need to loadtop_departments
again due to all the data that is needed is already indepartments
, you only need to.sort
andslice
them (or its shallow copy[...departments]
).I’d try this approach:
Try to execute your requests in parallel on the index.js.
This optimization will provide some gains on the network request timings.