123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105 |
- 'use strict';
-
- const common = require('./common');
- const BulkOperationBase = common.BulkOperationBase;
- const Batch = common.Batch;
- const bson = common.bson;
- const utils = require('../utils');
- const toError = utils.toError;
-
- /**
- * Add to internal list of Operations
- *
- * @param {OrderedBulkOperation} bulkOperation
- * @param {number} docType number indicating the document type
- * @param {object} document
- * @return {OrderedBulkOperation}
- */
- function addToOperationsList(bulkOperation, docType, document) {
- // Get the bsonSize
- const bsonSize = bson.calculateObjectSize(document, {
- checkKeys: false,
-
- // Since we don't know what the user selected for BSON options here,
- // err on the safe side, and check the size with ignoreUndefined: false.
- ignoreUndefined: false
- });
-
- // Throw error if the doc is bigger than the max BSON size
- if (bsonSize >= bulkOperation.s.maxBatchSizeBytes)
- throw toError('document is larger than the maximum size ' + bulkOperation.s.maxBatchSizeBytes);
-
- // Create a new batch object if we don't have a current one
- if (bulkOperation.s.currentBatch == null)
- bulkOperation.s.currentBatch = new Batch(docType, bulkOperation.s.currentIndex);
-
- const maxKeySize = bulkOperation.s.maxKeySize;
-
- // Check if we need to create a new batch
- if (
- bulkOperation.s.currentBatchSize + 1 >= bulkOperation.s.maxWriteBatchSize ||
- bulkOperation.s.currentBatchSizeBytes + maxKeySize + bsonSize >=
- bulkOperation.s.maxBatchSizeBytes ||
- bulkOperation.s.currentBatch.batchType !== docType
- ) {
- // Save the batch to the execution stack
- bulkOperation.s.batches.push(bulkOperation.s.currentBatch);
-
- // Create a new batch
- bulkOperation.s.currentBatch = new Batch(docType, bulkOperation.s.currentIndex);
-
- // Reset the current size trackers
- bulkOperation.s.currentBatchSize = 0;
- bulkOperation.s.currentBatchSizeBytes = 0;
- }
-
- if (docType === common.INSERT) {
- bulkOperation.s.bulkResult.insertedIds.push({
- index: bulkOperation.s.currentIndex,
- _id: document._id
- });
- }
-
- // We have an array of documents
- if (Array.isArray(document)) {
- throw toError('operation passed in cannot be an Array');
- }
-
- bulkOperation.s.currentBatch.originalIndexes.push(bulkOperation.s.currentIndex);
- bulkOperation.s.currentBatch.operations.push(document);
- bulkOperation.s.currentBatchSize += 1;
- bulkOperation.s.currentBatchSizeBytes += maxKeySize + bsonSize;
- bulkOperation.s.currentIndex += 1;
-
- // Return bulkOperation
- return bulkOperation;
- }
-
- /**
- * Create a new OrderedBulkOperation instance (INTERNAL TYPE, do not instantiate directly)
- * @class
- * @extends BulkOperationBase
- * @property {number} length Get the number of operations in the bulk.
- * @return {OrderedBulkOperation} a OrderedBulkOperation instance.
- */
-
- class OrderedBulkOperation extends BulkOperationBase {
- constructor(topology, collection, options) {
- options = options || {};
- options = Object.assign(options, { addToOperationsList });
-
- super(topology, collection, options, true);
- }
- }
-
- /**
- * Returns an unordered batch object
- * @ignore
- */
- function initializeOrderedBulkOp(topology, collection, options) {
- return new OrderedBulkOperation(topology, collection, options);
- }
-
- initializeOrderedBulkOp.OrderedBulkOperation = OrderedBulkOperation;
- module.exports = initializeOrderedBulkOp;
- module.exports.Bulk = OrderedBulkOperation;
|