Dong
Dong

Reputation: 434

Amazon S3 Node.js SDK deleteObjects

I am trying to delete several objects after copying them to a different folder. My code is like:

    var deleteParam = {
        Bucket: 'frontpass-test',
        Delete: {
            Objects: [
                {Key: '1.txt'},
                {Key: '2.txt'},
                {Key: '3.txt'}
            ]
        }
    };    
    s3.deleteObjects(deleteParam, function(err, data) {
        if (err) console.log(err, err.stack);
        else console.log('delete', data);
    });

and the returned data is:

delete { Deleted: [ { Key: '1.txt' }, { Key: '3.txt' }, { Key: '2.txt' } ],
  Errors: [] }

so I assume the deletion is completed. But the objects are still exist in the folder, is there something wrong with my code?

I also tried to delete objects using for loop and s3.deleteObject, but it only delete the last object in my list of files.

for (var i = 0; i < files.length; i++) {
    var copyParams = {
        Bucket: 'frontpass-test',
        CopySource: 'frontpass-test/unsold/' + files[i].filename,
        Key: 'sold/' + files[i].filename
    };      
    var deleteParam = {
        Bucket: 'frontpass-test',
        Key: 'unsold/' + files[i].filename
    };
    s3.copyObject(copyParams, function(err, data) {
        if (err) console.log(err, err.stack);
        else {
            s3.deleteObject(deleteParam, function(err, data) {
                if (err) console.log(err, err.stack);
                else console.log('delete', data);
            });
        }
    });
}

Any idea on how to delete objects in my case? Thanks in advance.

Upvotes: 10

Views: 6118

Answers (2)

liron_hazan
liron_hazan

Reputation: 1546

Just had to implement folder rename on top of s3, I did it as follows: (promise api)

    _getDataForItemRename(from, to) {
    return s3.listObjectsV2({Bucket: services.conf.workspace, Prefix: from}).promise()
        .then((data) => {
            const toCopy = [];
            const toRemove = [];
            const s3Contents = Object.assign([], data.Contents);
            // in case of a single dir (with no children)
            if (s3Contents.length === 0) {
                s3Contents.push({Key: from});
            }
            s3Contents.forEach((item) => {
                const copyPromise = s3.copyObject({
                    Bucket: services.conf.workspace,
                    Key: to,
                    CopySource: services.conf.workspace + '/' + item.Key
                }).promise();

                const deletePromise = s3.deleteObjects({
                    Bucket: services.conf.workspace,
                    Delete: {Objects: [{Key: from}]}
                }).promise();

                toCopy.push(copyPromise);
                toRemove.push(deletePromise);
            });

            return {copy: toCopy, remove: toRemove};
        }).catch((err) => {
            return Promise.reject(err);
        });
}


            return this._getDataForItemRename(_from, _to).then((files) => {
            return Promise.all(files.copy).then(() => {
                return Promise.all(files.remove).then(result => {
                        return result;
                    });
            });
        }).catch((err) => {
            return Promise.reject(err);
        });

Upvotes: 1

Max
Max

Reputation: 8836

Well the first example looks good. Do you have object versioning turned on in the bucket? That would keep a copy of a file even after you delete it.

The second example actually contains some bugs that would explain why only the last one gets deleted. Because Node.js is asynchronous, when you hit the copyObject function call, the loop iteration ends and goes to the next iteration, not waiting for the callback on copyObject to be called. You try to define the params variables for each iteration of the loop with the var keyword, but because Javascript has function level scope not block level scope, you aren't actually creating new variables on each iteration. You only have one instance of copyParmas and deleteParams. So you quickly run through the loop and deleteParams stays on the value it receives in the last iteration of the loop. Then eventually the callbacks to the copyObject calls start firing, and they all call deleteObject with deleteParams which by now is the last one. In order to make multiple asynchronous calls in a loop, I like to use the async library. Using it, you could do the following:

async.each(files, function iterator(file, callback) {
    var copyParams = {
        Bucket: 'frontpass-test',
        CopySource: 'frontpass-test/unsold/' + file.filename,
        Key: 'sold/' + file.filename
    };      
    var deleteParam = {
        Bucket: 'frontpass-test',
        Key: 'unsold/' + file.filename
    };
    s3.copyObject(copyParams, function(err, data) {
        if (err) callback(err);
        else {
            s3.deleteObject(deleteParam, function(err, data) {
                if (err) callback(err)
                else {
                    console.log('delete', data);
                    callback();
                }
            });
        }
    });
}, function allDone(err) {
    //This gets called when all callbacks are called
    if (err) console.log(err, err.stack);
});

Upvotes: 4

Related Questions