skip to Main Content

I’ve seen other questions related to this but I still can’t solve this.

So basically what is happening right now is that it’s executing the second request without waiting for the first request to finish and since the first request needs to change the apiurl of the second request when it finishes this wont work because it will keep using the same apiurl since the request didnt finish and changed the apiurl before the second one started

I want wait for the first request to finish so it can pass the new apiurl variable to the next request so it can fetch data from a different page, this is the code, hope its not too complicated:

const request = require('request');
const fs = require('fs');
const json2csv = require('json2csv');
const async = require('async');
const _ = require('underscore');
const RateLimiter = require('limiter').RateLimiter;
const flatten = require('flat');
const limiter = new RateLimiter(1, 500);
const credentials = require('dotenv').config();

let apikey = "someapikey";
let password = "somepassword";
let storeName = "somestorename";

let baseurl = 'https://' + apikey + ':' + password + '@' + storeName + '.myshopify.com';
let numOrders = 0;
let ordersList = [];
let countordersList = 0;
var nextLink;
var apiurl;



let getOrders = function(page, callback) {
    console.log(page);
    if (page == 1) {
        apiurl = baseurl + '/admin/orders.json?status=any&limit=250';
    }
    console.log(apiurl);

    request({
        url: apiurl,
        json: true
    }, function(error, response, body) {

        if (!error && response.statusCode === 200) {

            if (response.headers.link && response.headers.link.indexOf(`rel="next"`) > -1) {
                console.log(apiurl);

                try {
                    // Try to parse our the string we need
                    nextLink = response.headers.link;

                    // If there's a previous link, remove the first part of the string entirely
                    if (nextLink.indexOf(`rel="previous"`) > -1) {
                        nextLink = nextLink.substr(nextLink.indexOf(",") + 2, nextLink.length);
                    }

                    // Parse the remaining string for the actual link
                    nextLink = nextLink.substr(1, nextLink.indexOf(">") - 1);
                    // READY - CALL THE NEXT SET WITH NEXTLINK
                    apiurl = nextLink;
                } catch (ex) {
                    console.log("ERROR");
                    console.log(response.headers);
                }
            } else {
                console.log("ALL ORDERS EXPORTED");
                console.log(response.headers);
            }




            let newList = [];
            for (i = 0; i < body.orders.length; i++) {
                newList.push(flatten(body.orders[i]));
            }

            ordersList = ordersList.concat(newList);
            countordersList += ordersList.length;
            console.log('Orders received: ' + countordersList + ' / ' + numOrders);
            console.log();

            json2csv({
                    data: ordersList,
                    fields: ['order_number', 'email', 'financial_status', 'processed_at', 'fulfillment_status:', 'currency', 'total_price', 'browser_ip', 'gateway', 'billing_address.first_name', 'billing_address.last_name', 'billing_address.address1', 'billing_address.address2', 'billing_address.company', 'billing_address.city', 'billing_address.zip', 'billing_address.province', 'billing_address.province_code', 'billing_address.country', 'billing_address.country_code', 'billing_address.phone', 'shipping_address.first_name', 'shipping_address.last_name', 'shipping_address.address1', 'shipping_address.address2', 'shipping_address.company', 'shipping_address.city', 'shipping_address.zip', 'shipping_address.province_code', 'shipping_address.country_code', 'shipping_address.phone', 'line_items.0.name', 'line_items.0.quantity', 'line_items.0.price', 'line_items.0.sku', 'line_items.1.name', 'line_items.1.quantity', 'line_items.1.price', 'line_items.1.sku', 'line_items.2.name', 'line_items.2.quantity', 'line_items.2.price', 'line_items.2.sku', 'line_items.3.name', 'line_items.3.quantity', 'line_items.3.price', 'line_items.3.sku', 'line_items.4.name', 'line_items.4.quantity', 'line_items.4.price', 'line_items.4.sku']
                },
                function(err, csv) {
                    if (err) console.log(err);
                    fs.appendFile(storeName + '.csv', csv, function(err) {
                        if (err) throw err;
                        ordersList = [];
                    });
                });

            callback();

        }


    })
}

request({
    url: baseurl + '/admin/orders/count.json?status=any',
    json: true
}, function(error, response, body) {
    if (!error && response.statusCode === 200) {
        numOrders = body.count;
    }
    console.log();
    if (numOrders > 700000) {
        numOrders = 700000;
    }
    console.log('Total: ' + body.count);
    console.log('Processing: ' + numOrders);
    console.log();
    let numPages = numOrders / 250;
    let r = _.range(1, numPages + 1);

    async.forEach(r, function(page, callback) {
        limiter.removeTokens(1, function() {
            getOrders(page, callback);
        })

    }, function(err) {
        // Called when all are finished
        console.log('Total: ' + ordersList.length);
        console.log();


    });


});

What my script does:

It gets a page with 250 orders from my Shopify store using the Shopify API and uses cursor-based pagination to request one page of orders at a time or at least that’s what I want it to do, right now the requests are trying to get all pages at the same time since it doesnt wait for the first request to finish and it can’t pass the new url variable apiurl to the next iteration, then it saves the info to a csv file but the relevant piece of code is the first half ot the getOrders function. I would really appreciate the help, I feel dumb for trying to solve it for the past 5 hours with almost no progress.

2

Answers


  1. Try using async.forEachSeries(). It is the same as async.forEach() but runs only a single async operation at a time.

        async.forEachSeries(r, function(page, callback) {
           getOrders(page, callback);
        }, function(err) {
            // Called when all are finished
            console.log('Total: ' + ordersList.length);
            console.log();
        });
    

    As you can see in the example below, there is no problem with the basic logic of the function async.forEachSeries(). You need to make sure that the callback() function in getOrders() is called properly.

    function getOrders(page, callback) {
      console.log('getOrders called with ' + page);
      setTimeout(function() {
        console.log('Call callback in getOrders');
        callback();
      }, 1000);
    }
    
    const r = [1, 2, 3, 4]
    
    async.forEachSeries(r, function(page, callback) {
       getOrders(page, callback);
    }, function(err) {
        // Called when all are finished
        console.log('Total: all orders finished');
        console.log();
    });
    <script src="https://cdnjs.cloudflare.com/ajax/libs/async/3.2.0/async.min.js"></script>
    Login or Signup to reply.
  2. Here you can use async module too. async.waterfall or async.auto

    async.auto({
       first_req: function(callback) {
        request({
           url: apiurl,
           json: true
          }, function(error, response, body) {
           callback(null, body);
          }
         },
         second_req: ['first_req', function(results, callback) {
          // here you will get access to result of first request in results
          //process or request another link here and send data to callback
          callback(null, second_result);
         }],
       },
       function(err, results) {
        //[second_req] here you will get result of second request
        console.log('err = ', err);
        console.log('results = ', results);
       });
    
    

    And instead of forEach go for for...of which will wait a process to complete.

    Login or Signup to reply.
Please signup or login to give your own answer.
Back To Top
Search