Node.js synchronously loop or iterate over asynchronous statements

48,767

Solution 1

I found out that I wasn't releasing my mysql connections after I was done with each call and this tied up the connections causing it to fail and appear to be an issue with synchronization.

After explicitly calling connection.release(); it caused my code to work 100% correctly even in an asynchronous fashion.

Thanks for those who posted to this question.

Solution 2

With recursion the code is pretty clean. Wait for the http response to come back then fire off next attempt. This will work in all versions of node.

var urls = ['http://stackoverflow.com/', 'http://security.stackexchange.com/', 'http://unix.stackexchange.com/'];

var processItems = function(x){
  if( x < urls.length ) {
    http.get(urls[x], function(res) {

      // add some code here to process the response

      processItems(x+1);
    });
  }
};

processItems(0);

A solution using promises would also work well, and is more terse. For example, if you have a version of get that returns a promise and Node v7.6+, you could write an async/await function like this example, which uses some new JS features.

const urls = ['http://stackoverflow.com/', 'http://security.stackexchange.com/', 'http://unix.stackexchange.com/'];

async function processItems(urls){
  for(const url of urls) {
    const response = await promisifiedHttpGet(url);    
    // add some code here to process the response.
  }
};

processItems(urls);

Note: both of these examples skip over error handling, but you should probably have that in a production app.

Solution 3

To loop and synchronously chain asynchronous actions, the cleanest solution is probably to use a promise library (promises are being introduced in ES6, this is the way to go).

Using Bluebird, this could be

Var p = Promise.resolve();
forEach(sets, function(item, index, arr) {
    p.then(new Promise(function(resolve, reject) {
         http.get(theUrl, function(res) {
         ....
         res.on('end', function() {
              ...
              resolve();
         }
    }));
});
p.then(function(){
   // all tasks launched in the loop are finished
});
Share:
48,767
user3447415
Author by

user3447415

Updated on June 16, 2021

Comments

  • user3447415
    user3447415 almost 3 years

    I want to do a for each loop but have it run synchronously. Each iteration of the loop will do an http.get call and that will return json for it to insert the values into a database. The problem is that the for loop runs asynchronously and that causes all of the http.gets to all run at once and my database doesn't end up inserting all of the data.I am using async-foreach to try to do what I want it to do, but I don't have to use it if I can do it the right way.

    mCardImport = require('m_cardImport.js');
    var http = require('http');
    app.get('/path/hi', function(req, res) {
    
    mCardImport.getList(function(sets) {
      forEach(sets, function(item, index, arr) {
        theUrl = 'http://' + sets.set_code + '.json';
        http.get(theUrl, function(res) {
    
          var jsonData = '';
          res.on('data', function(chunk) {
            jsonData += chunk;
          });
    
          res.on('end', function() {
            var theResponse = JSON.parse(jsonData);
            mCardImport.importResponse(theResponse.list, theResponse.code, function(theSet) {
              console.log("SET: " + theSet);
            });
          });
        });
      });
    });
    });
    

    and my model

    exports.importResponse = function(cardList, setCode, callback) {
    
    mysqlLib.getConnection(function(err, connection) {
    
    forEach(cardList, function(item, index, arr) {
    
      var theSql = "INSERT INTO table (name, code, multid, collector_set_num) VALUES "
       + "(?, ?, ?, ?) ON DUPLICATE KEY UPDATE id=id";
      connection.query(theSql, [item.name, setCode, item.multid, item.number], function(err, results) {
        if (err) {
          console.log(err);
        };
      });
    });
    });
    callback(setCode);
    };