Max Rumpf
Max Rumpf

Reputation: 138

Maximum Call Stack Size exceeded Node.JS WebCrawler

I have written this WebCrawler in Node.JS. It crawls the page and saves it into Redis. I use the setImmediate and process.nextTick, but it still throws this error. What have I done wrong?

var $, client, f, redis, request, s, urlhelper, urls, validator, _;

request = require("request");

validator = require("validator");

$ = require("cheerio");

_ = require("underscore");

s = require("underscore.string");

urlhelper = require("url");

urls = [];

redis = require("redis");

client = redis.createClient();

f = function(url) {
    return process.nextTick(function() {
        urls.push(url);
        if (validator.isURL(url) !== true) {
            return;
        }
        return request(url, function(error, response, body) {
            var title, _$;
            if (!error && response.statusCode === 200) {
                _$ = $.load(body);
                title = _$("title").text() || "";
                return client.hset(url, "title", title, function() {
                    return _.each(_$("a"), function(object) {
                        var href;
                        href = object.attribs["href"];
                        if (!validator.isURL(href)) {
                            href = urlhelper.resolve(url, href || "");
                        }
                        console.log(href);
                        return client.exists(href, function(error, reply) {
                            if (error) {
                                throw error;
                            }
                            if (reply === 1) {
                                return client.hincrby(href, "refs", 1, function() {});
                            } else {
                                return client.hmset(href, {
                                    "refs": "1",
                                    "title": ""
                                }, function() {
                                    return client.sadd("sites", href, function() {
                                        return setTimeout(function() {
                                            return f(href);
                                        }, 0);
                                    });
                                });
                            }
                        });
                    });
                });
            }
        });
    });
};

client.on("connect", function() {
    return f("http://www.apple.com");
});

I would appreciate any help.

Thank you so much,

Max

Upvotes: 1

Views: 287

Answers (1)

Mike Scott
Mike Scott

Reputation: 4805

You've got a recursive function that has to crawl through the entire world-wide web before it finishes (or at least, all of it that can be reached starting at www.apple.com). Unless it can hold a few billion web pages in memory, it's going to run out of stack space. You need to rewrite it so that you maintain a separate queue of pages to be crawled in a database of some kind -- this isn't a place where you can use recursion. Alternatively, you could set a maximum depth for the recursion.

Upvotes: 1

Related Questions