αƞjiβ
αƞjiβ

Reputation: 3246

Collect unique objects in JavaScript array

Suppose I have following arrays of objects

var firstDataSet = [
  {'id': 123, 'name': 'ABC'},
  {'id': 456, 'name': 'DEF'},
  {'id': 789, 'name': 'GHI'},
  {'id': 101, 'name': 'JKL'}
];

var secondDataSet = [
  {'id': 123, 'name': 'ABC', 'xProp': '1q'},
  {'id': 156, 'name': 'MNO', 'xProp': '2w'},
  {'id': 789, 'name': 'GHI', 'xProp': '3e'},
  {'id': 111, 'name': 'PQR', 'xProp': '4r'}
];

Now I want to collect array with unique objects (matching id and name)i.e.

var firstDataSet = [
  {'id': 123, 'name': 'ABC', 'xProp': '1q'},
  {'id': 456, 'name': 'DEF'},
  {'id': 789, 'name': 'GHI', 'xProp': '3e'},
  {'id': 101, 'name': 'JKL'},
  {'id': 156, 'name': 'MNO', 'xProp': '2w'},
  {'id': 111, 'name': 'PQR', 'xProp': '4r'}
];

I am able to collect ALL with

Array.prototype.unshift.apply(firstDataSet , secondDataSet );

But not sure how I can filter out duplicates. Any suggestion?

Edit: My object on two different array are not same. At least based on number of properties.

Upvotes: 6

Views: 17444

Answers (4)

Akshat Mahajan
Akshat Mahajan

Reputation: 9846

To Remove Duplicates With All Identical Properties

This was the original question.

Use a Set:

The Set object lets you store unique values of any type, whether primitive values or object references.

You can also use object literals.

var list = [JSON.stringify({id: 123, 'name': 'ABC'}), JSON.stringify({id: 123, 'name': 'ABC'})]; 
var unique_list = new Set(list); // returns Set {"{'id': 123, 'name': 'ABC'}"}
var list = Array.from(unique_list); // converts back to an array, and you can unstringify the results accordingly.

For more ways to construct a set back to an array, you can follow instructions here. If you can't use ES6 (which is what defines Set), there's a polyfill for older browsers.


To Remove Objects with Duplicate Subset of Properties

Unfortunately, these objects are no longer strictly duplicates and cannot be tackled in a friendly way using Set, for instance.

The easiest way to approach this type of problem is to iterate through the array of objects, identify those with repeated property values, and eliminate in place using splice, for example.

Upvotes: 11

Shubham Dixit
Shubham Dixit

Reputation: 1

This can be achieved By extending Set class Like below

    var firstDataSet = [
      {'id': 123, 'name': 'ABC'},
      {'id': 456, 'name': 'DEF'},
      {'id': 789, 'name': 'GHI'},
      {'id': 101, 'name': 'JKL'}
    ];

    var secondDataSet = [
      {'id': 123, 'name': 'ABC', 'xProp': '1q'},
      {'id': 156, 'name': 'MNO', 'xProp': '2w'},
      {'id': 789, 'name': 'GHI', 'xProp': '3e'},
      {'id': 111, 'name': 'PQR', 'xProp': '4r'}
    ];

    Array.prototype.unshift.apply(firstDataSet , secondDataSet );

    //console.log(firstDataSet)

    class UniqueSet extends Set {
            constructor(values) {
                super(values);

                const data = [];
                for (let value of this) {
                    if (data.includes(JSON.parse(value.id))) {
                        this.delete(value);
                    } else {
                        data.push(value.id);
                    }
                }
            }
          }

console.log(new UniqueSet(firstDataSet))

Working link

Upvotes: 1

frajk
frajk

Reputation: 863

This may not be the most efficient solution, but assuming that id is always unique, it should work.

var firstDataSet = [
  {'id': 123, 'name': 'ABC'},
  {'id': 456, 'name': 'DEF'},
  {'id': 789, 'name': 'GHI'},
  {'id': 101, 'name': 'JKL'}
];

var secondDataSet = [
  {'id': 123, 'name': 'ABC', 'xProp': '1q'},
  {'id': 156, 'name': 'MNO', 'xProp': '2w'},
  {'id': 789, 'name': 'GHI', 'xProp': '3e'},
  {'id': 111, 'name': 'PQR', 'xProp': '4r'}
];

Array.prototype.unique = function() {
    var o = {}, i, l = this.length, r = [];
    for(i=0; i<l;i+=1) o[this[i]] = this[i];
    for(i in o) r.push(o[i]);
    return r;
};

function concatUnique(a, b, property) {
    var arr = a.concat(b);
    arr.sort(function(a,b) { 
        return Object.keys(b).length - Object.keys(a).length; 
    });
    var ids = arr.map(function(obj){ return obj[property] }).unique();

    return arr.filter(function(obj) { 
        if(ids.indexOf(obj[property]) > -1) { 
            ids.splice( ids.indexOf(obj[property]) , 1); 
            return true; 
        } else { 
            return false 
        }
    });
}

var newArray = concatUnique(firstDataSet, secondDataSet, 'id');

Upvotes: 0

user663031
user663031

Reputation:

We'll combine the two arrays using concat, then filter the resulting array using filter. For each element, we'll find the index of the first element with the same id and name, using findIndex. If that index is the same as the current index, it means this is the first occurrence of that id and name, so we just let it pass through. Otherwise, we'll add in new fields to the first occurrence, and filter it out.

function combine(a1, a2) {

  function match(e1, e2) { return e1.id === e2.id && e1.name === e2.name); }

  return a1.concat(a2) . filter((e1, i, a) => {
    let firstIndex = a.findIndex(e2 => match(e1, e2));
    if (i === firstIndex) return true; // this is the first occurrence
    a[firstIndex].xProp = e2.xProp;    // copy over property
    return false;                      // filter out
  });

}

If you want to handle arbitrary properties, instead of just xProp, then change the relevant line to something like

a[firstIndex] = Object.assign(e2, a[firstIndex]);

That will replace the first occurrence with the result of copying all its properties on top of the current occurrence including whatever additional properties it may have.

Mandatory disclaimer: As always, depending on your environment, you may not have arrow functions, or Array#findIndex, or Object.assign. In such cases, rewrite/polyfill/transpile as necessary.

Upvotes: 0

Related Questions