Migrate large json file into Mongodb using nodejs

I’m using nodejs to migrate data into mongodb, the json file which i’m trying to migrate is too large, when I try to migrate just some data (about 8000 docs), it works, otherwise I’m getting this issue :

MongooseError: Operation `products.insertOne()` buffering timed out after 10000ms

at Timeout.<anonymous> (migration- 
modulenode_modulesmongooselibdriversnode-mongodb-nativecollection.js:148:23)

at listOnTimeout (internal/timers.js:557:17)

at processTimers (internal/timers.js:500:7)

MongooseError: Operation `products.insertOne()` buffering timed out after 10000ms

 at Timeout.<anonymous> (migration- 
modulenode_modulesmongooselibdriversnode-mongodb-nativecollection.js:148:23)

at listOnTimeout (internal/timers.js:557:17)

at processTimers (internal/timers.js:500:7)

the script :

/** @format */
fs = require('fs')
var mongoose = require('mongoose')
 mongoose.connect('mongodb://host')
 mongoose.Promise = global.Promise
 var schema = new mongoose.Schema({
 //some fields
  })
  var d = fs.readFileSync('./data/file.json', 'utf8', (err, data) => {
   if (err) throw err
   return d
   })
  var e = JSON.parse(d)
  var Product = mongoose.model('product', schema)
  //console.log(e)
  for (var i = 0; i < e.length; i++) {
   // data process
   // another data process
  var product = new Product(e[i])
  product.save(function (err) {
  if (err) return console.log(err)
  })
 }
 e.length === i ? console.log('Migration Done successfully') : ''

any solution to get ride of this problem please ?