Initial commit
This commit is contained in:
251
node_modules/mongodb/lib/core/connection/apm.js
generated
vendored
Normal file
251
node_modules/mongodb/lib/core/connection/apm.js
generated
vendored
Normal file
@@ -0,0 +1,251 @@
|
||||
'use strict';
|
||||
const Msg = require('../connection/msg').Msg;
|
||||
const KillCursor = require('../connection/commands').KillCursor;
|
||||
const GetMore = require('../connection/commands').GetMore;
|
||||
const calculateDurationInMs = require('../../utils').calculateDurationInMs;
|
||||
|
||||
/** Commands that we want to redact because of the sensitive nature of their contents */
|
||||
const SENSITIVE_COMMANDS = new Set([
|
||||
'authenticate',
|
||||
'saslStart',
|
||||
'saslContinue',
|
||||
'getnonce',
|
||||
'createUser',
|
||||
'updateUser',
|
||||
'copydbgetnonce',
|
||||
'copydbsaslstart',
|
||||
'copydb'
|
||||
]);
|
||||
|
||||
// helper methods
|
||||
const extractCommandName = commandDoc => Object.keys(commandDoc)[0];
|
||||
const namespace = command => command.ns;
|
||||
const databaseName = command => command.ns.split('.')[0];
|
||||
const collectionName = command => command.ns.split('.')[1];
|
||||
const generateConnectionId = pool =>
|
||||
pool.options ? `${pool.options.host}:${pool.options.port}` : pool.address;
|
||||
const maybeRedact = (commandName, result) => (SENSITIVE_COMMANDS.has(commandName) ? {} : result);
|
||||
const isLegacyPool = pool => pool.s && pool.queue;
|
||||
|
||||
const LEGACY_FIND_QUERY_MAP = {
|
||||
$query: 'filter',
|
||||
$orderby: 'sort',
|
||||
$hint: 'hint',
|
||||
$comment: 'comment',
|
||||
$maxScan: 'maxScan',
|
||||
$max: 'max',
|
||||
$min: 'min',
|
||||
$returnKey: 'returnKey',
|
||||
$showDiskLoc: 'showRecordId',
|
||||
$maxTimeMS: 'maxTimeMS',
|
||||
$snapshot: 'snapshot'
|
||||
};
|
||||
|
||||
const LEGACY_FIND_OPTIONS_MAP = {
|
||||
numberToSkip: 'skip',
|
||||
numberToReturn: 'batchSize',
|
||||
returnFieldsSelector: 'projection'
|
||||
};
|
||||
|
||||
const OP_QUERY_KEYS = [
|
||||
'tailable',
|
||||
'oplogReplay',
|
||||
'noCursorTimeout',
|
||||
'awaitData',
|
||||
'partial',
|
||||
'exhaust'
|
||||
];
|
||||
|
||||
/**
|
||||
* Extract the actual command from the query, possibly upconverting if it's a legacy
|
||||
* format
|
||||
*
|
||||
* @param {Object} command the command
|
||||
*/
|
||||
const extractCommand = command => {
|
||||
if (command instanceof GetMore) {
|
||||
return {
|
||||
getMore: command.cursorId,
|
||||
collection: collectionName(command),
|
||||
batchSize: command.numberToReturn
|
||||
};
|
||||
}
|
||||
|
||||
if (command instanceof KillCursor) {
|
||||
return {
|
||||
killCursors: collectionName(command),
|
||||
cursors: command.cursorIds
|
||||
};
|
||||
}
|
||||
|
||||
if (command instanceof Msg) {
|
||||
return command.command;
|
||||
}
|
||||
|
||||
if (command.query && command.query.$query) {
|
||||
let result;
|
||||
if (command.ns === 'admin.$cmd') {
|
||||
// upconvert legacy command
|
||||
result = Object.assign({}, command.query.$query);
|
||||
} else {
|
||||
// upconvert legacy find command
|
||||
result = { find: collectionName(command) };
|
||||
Object.keys(LEGACY_FIND_QUERY_MAP).forEach(key => {
|
||||
if (typeof command.query[key] !== 'undefined')
|
||||
result[LEGACY_FIND_QUERY_MAP[key]] = command.query[key];
|
||||
});
|
||||
}
|
||||
|
||||
Object.keys(LEGACY_FIND_OPTIONS_MAP).forEach(key => {
|
||||
if (typeof command[key] !== 'undefined') result[LEGACY_FIND_OPTIONS_MAP[key]] = command[key];
|
||||
});
|
||||
|
||||
OP_QUERY_KEYS.forEach(key => {
|
||||
if (command[key]) result[key] = command[key];
|
||||
});
|
||||
|
||||
if (typeof command.pre32Limit !== 'undefined') {
|
||||
result.limit = command.pre32Limit;
|
||||
}
|
||||
|
||||
if (command.query.$explain) {
|
||||
return { explain: result };
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
return command.query ? command.query : command;
|
||||
};
|
||||
|
||||
const extractReply = (command, reply) => {
|
||||
if (command instanceof GetMore) {
|
||||
return {
|
||||
ok: 1,
|
||||
cursor: {
|
||||
id: reply.message.cursorId,
|
||||
ns: namespace(command),
|
||||
nextBatch: reply.message.documents
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
if (command instanceof KillCursor) {
|
||||
return {
|
||||
ok: 1,
|
||||
cursorsUnknown: command.cursorIds
|
||||
};
|
||||
}
|
||||
|
||||
// is this a legacy find command?
|
||||
if (command.query && typeof command.query.$query !== 'undefined') {
|
||||
return {
|
||||
ok: 1,
|
||||
cursor: {
|
||||
id: reply.message.cursorId,
|
||||
ns: namespace(command),
|
||||
firstBatch: reply.message.documents
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
return reply && reply.result ? reply.result : reply;
|
||||
};
|
||||
|
||||
const extractConnectionDetails = pool => {
|
||||
if (isLegacyPool(pool)) {
|
||||
return {
|
||||
connectionId: generateConnectionId(pool)
|
||||
};
|
||||
}
|
||||
|
||||
// APM in the modern pool is done at the `Connection` level, so we rename it here for
|
||||
// readability.
|
||||
const connection = pool;
|
||||
return {
|
||||
address: connection.address,
|
||||
connectionId: connection.id
|
||||
};
|
||||
};
|
||||
|
||||
/** An event indicating the start of a given command */
|
||||
class CommandStartedEvent {
|
||||
/**
|
||||
* Create a started event
|
||||
*
|
||||
* @param {Pool} pool the pool that originated the command
|
||||
* @param {Object} command the command
|
||||
*/
|
||||
constructor(pool, command) {
|
||||
const cmd = extractCommand(command);
|
||||
const commandName = extractCommandName(cmd);
|
||||
const connectionDetails = extractConnectionDetails(pool);
|
||||
|
||||
// NOTE: remove in major revision, this is not spec behavior
|
||||
if (SENSITIVE_COMMANDS.has(commandName)) {
|
||||
this.commandObj = {};
|
||||
this.commandObj[commandName] = true;
|
||||
}
|
||||
|
||||
Object.assign(this, connectionDetails, {
|
||||
requestId: command.requestId,
|
||||
databaseName: databaseName(command),
|
||||
commandName,
|
||||
command: cmd
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/** An event indicating the success of a given command */
|
||||
class CommandSucceededEvent {
|
||||
/**
|
||||
* Create a succeeded event
|
||||
*
|
||||
* @param {Pool} pool the pool that originated the command
|
||||
* @param {Object} command the command
|
||||
* @param {Object} reply the reply for this command from the server
|
||||
* @param {Array} started a high resolution tuple timestamp of when the command was first sent, to calculate duration
|
||||
*/
|
||||
constructor(pool, command, reply, started) {
|
||||
const cmd = extractCommand(command);
|
||||
const commandName = extractCommandName(cmd);
|
||||
const connectionDetails = extractConnectionDetails(pool);
|
||||
|
||||
Object.assign(this, connectionDetails, {
|
||||
requestId: command.requestId,
|
||||
commandName,
|
||||
duration: calculateDurationInMs(started),
|
||||
reply: maybeRedact(commandName, extractReply(command, reply))
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/** An event indicating the failure of a given command */
|
||||
class CommandFailedEvent {
|
||||
/**
|
||||
* Create a failure event
|
||||
*
|
||||
* @param {Pool} pool the pool that originated the command
|
||||
* @param {Object} command the command
|
||||
* @param {MongoError|Object} error the generated error or a server error response
|
||||
* @param {Array} started a high resolution tuple timestamp of when the command was first sent, to calculate duration
|
||||
*/
|
||||
constructor(pool, command, error, started) {
|
||||
const cmd = extractCommand(command);
|
||||
const commandName = extractCommandName(cmd);
|
||||
const connectionDetails = extractConnectionDetails(pool);
|
||||
|
||||
Object.assign(this, connectionDetails, {
|
||||
requestId: command.requestId,
|
||||
commandName,
|
||||
duration: calculateDurationInMs(started),
|
||||
failure: maybeRedact(commandName, error)
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
CommandStartedEvent,
|
||||
CommandSucceededEvent,
|
||||
CommandFailedEvent
|
||||
};
|
||||
36
node_modules/mongodb/lib/core/connection/command_result.js
generated
vendored
Normal file
36
node_modules/mongodb/lib/core/connection/command_result.js
generated
vendored
Normal file
@@ -0,0 +1,36 @@
|
||||
'use strict';
|
||||
|
||||
/**
|
||||
* Creates a new CommandResult instance
|
||||
* @class
|
||||
* @param {object} result CommandResult object
|
||||
* @param {Connection} connection A connection instance associated with this result
|
||||
* @return {CommandResult} A cursor instance
|
||||
*/
|
||||
var CommandResult = function(result, connection, message) {
|
||||
this.result = result;
|
||||
this.connection = connection;
|
||||
this.message = message;
|
||||
};
|
||||
|
||||
/**
|
||||
* Convert CommandResult to JSON
|
||||
* @method
|
||||
* @return {object}
|
||||
*/
|
||||
CommandResult.prototype.toJSON = function() {
|
||||
let result = Object.assign({}, this, this.result);
|
||||
delete result.message;
|
||||
return result;
|
||||
};
|
||||
|
||||
/**
|
||||
* Convert CommandResult to String representation
|
||||
* @method
|
||||
* @return {string}
|
||||
*/
|
||||
CommandResult.prototype.toString = function() {
|
||||
return JSON.stringify(this.toJSON());
|
||||
};
|
||||
|
||||
module.exports = CommandResult;
|
||||
507
node_modules/mongodb/lib/core/connection/commands.js
generated
vendored
Normal file
507
node_modules/mongodb/lib/core/connection/commands.js
generated
vendored
Normal file
@@ -0,0 +1,507 @@
|
||||
'use strict';
|
||||
|
||||
var retrieveBSON = require('./utils').retrieveBSON;
|
||||
var BSON = retrieveBSON();
|
||||
var Long = BSON.Long;
|
||||
const Buffer = require('safe-buffer').Buffer;
|
||||
|
||||
// Incrementing request id
|
||||
var _requestId = 0;
|
||||
|
||||
// Wire command operation ids
|
||||
var opcodes = require('../wireprotocol/shared').opcodes;
|
||||
|
||||
// Query flags
|
||||
var OPTS_TAILABLE_CURSOR = 2;
|
||||
var OPTS_SLAVE = 4;
|
||||
var OPTS_OPLOG_REPLAY = 8;
|
||||
var OPTS_NO_CURSOR_TIMEOUT = 16;
|
||||
var OPTS_AWAIT_DATA = 32;
|
||||
var OPTS_EXHAUST = 64;
|
||||
var OPTS_PARTIAL = 128;
|
||||
|
||||
// Response flags
|
||||
var CURSOR_NOT_FOUND = 1;
|
||||
var QUERY_FAILURE = 2;
|
||||
var SHARD_CONFIG_STALE = 4;
|
||||
var AWAIT_CAPABLE = 8;
|
||||
|
||||
/**************************************************************
|
||||
* QUERY
|
||||
**************************************************************/
|
||||
var Query = function(bson, ns, query, options) {
|
||||
var self = this;
|
||||
// Basic options needed to be passed in
|
||||
if (ns == null) throw new Error('ns must be specified for query');
|
||||
if (query == null) throw new Error('query must be specified for query');
|
||||
|
||||
// Validate that we are not passing 0x00 in the collection name
|
||||
if (ns.indexOf('\x00') !== -1) {
|
||||
throw new Error('namespace cannot contain a null character');
|
||||
}
|
||||
|
||||
// Basic options
|
||||
this.bson = bson;
|
||||
this.ns = ns;
|
||||
this.query = query;
|
||||
|
||||
// Additional options
|
||||
this.numberToSkip = options.numberToSkip || 0;
|
||||
this.numberToReturn = options.numberToReturn || 0;
|
||||
this.returnFieldSelector = options.returnFieldSelector || null;
|
||||
this.requestId = Query.getRequestId();
|
||||
|
||||
// special case for pre-3.2 find commands, delete ASAP
|
||||
this.pre32Limit = options.pre32Limit;
|
||||
|
||||
// Serialization option
|
||||
this.serializeFunctions =
|
||||
typeof options.serializeFunctions === 'boolean' ? options.serializeFunctions : false;
|
||||
this.ignoreUndefined =
|
||||
typeof options.ignoreUndefined === 'boolean' ? options.ignoreUndefined : false;
|
||||
this.maxBsonSize = options.maxBsonSize || 1024 * 1024 * 16;
|
||||
this.checkKeys = typeof options.checkKeys === 'boolean' ? options.checkKeys : true;
|
||||
this.batchSize = self.numberToReturn;
|
||||
|
||||
// Flags
|
||||
this.tailable = false;
|
||||
this.slaveOk = typeof options.slaveOk === 'boolean' ? options.slaveOk : false;
|
||||
this.oplogReplay = false;
|
||||
this.noCursorTimeout = false;
|
||||
this.awaitData = false;
|
||||
this.exhaust = false;
|
||||
this.partial = false;
|
||||
};
|
||||
|
||||
//
|
||||
// Assign a new request Id
|
||||
Query.prototype.incRequestId = function() {
|
||||
this.requestId = _requestId++;
|
||||
};
|
||||
|
||||
//
|
||||
// Assign a new request Id
|
||||
Query.nextRequestId = function() {
|
||||
return _requestId + 1;
|
||||
};
|
||||
|
||||
//
|
||||
// Uses a single allocated buffer for the process, avoiding multiple memory allocations
|
||||
Query.prototype.toBin = function() {
|
||||
var self = this;
|
||||
var buffers = [];
|
||||
var projection = null;
|
||||
|
||||
// Set up the flags
|
||||
var flags = 0;
|
||||
if (this.tailable) {
|
||||
flags |= OPTS_TAILABLE_CURSOR;
|
||||
}
|
||||
|
||||
if (this.slaveOk) {
|
||||
flags |= OPTS_SLAVE;
|
||||
}
|
||||
|
||||
if (this.oplogReplay) {
|
||||
flags |= OPTS_OPLOG_REPLAY;
|
||||
}
|
||||
|
||||
if (this.noCursorTimeout) {
|
||||
flags |= OPTS_NO_CURSOR_TIMEOUT;
|
||||
}
|
||||
|
||||
if (this.awaitData) {
|
||||
flags |= OPTS_AWAIT_DATA;
|
||||
}
|
||||
|
||||
if (this.exhaust) {
|
||||
flags |= OPTS_EXHAUST;
|
||||
}
|
||||
|
||||
if (this.partial) {
|
||||
flags |= OPTS_PARTIAL;
|
||||
}
|
||||
|
||||
// If batchSize is different to self.numberToReturn
|
||||
if (self.batchSize !== self.numberToReturn) self.numberToReturn = self.batchSize;
|
||||
|
||||
// Allocate write protocol header buffer
|
||||
var header = Buffer.alloc(
|
||||
4 * 4 + // Header
|
||||
4 + // Flags
|
||||
Buffer.byteLength(self.ns) +
|
||||
1 + // namespace
|
||||
4 + // numberToSkip
|
||||
4 // numberToReturn
|
||||
);
|
||||
|
||||
// Add header to buffers
|
||||
buffers.push(header);
|
||||
|
||||
// Serialize the query
|
||||
var query = self.bson.serialize(this.query, {
|
||||
checkKeys: this.checkKeys,
|
||||
serializeFunctions: this.serializeFunctions,
|
||||
ignoreUndefined: this.ignoreUndefined
|
||||
});
|
||||
|
||||
// Add query document
|
||||
buffers.push(query);
|
||||
|
||||
if (self.returnFieldSelector && Object.keys(self.returnFieldSelector).length > 0) {
|
||||
// Serialize the projection document
|
||||
projection = self.bson.serialize(this.returnFieldSelector, {
|
||||
checkKeys: this.checkKeys,
|
||||
serializeFunctions: this.serializeFunctions,
|
||||
ignoreUndefined: this.ignoreUndefined
|
||||
});
|
||||
// Add projection document
|
||||
buffers.push(projection);
|
||||
}
|
||||
|
||||
// Total message size
|
||||
var totalLength = header.length + query.length + (projection ? projection.length : 0);
|
||||
|
||||
// Set up the index
|
||||
var index = 4;
|
||||
|
||||
// Write total document length
|
||||
header[3] = (totalLength >> 24) & 0xff;
|
||||
header[2] = (totalLength >> 16) & 0xff;
|
||||
header[1] = (totalLength >> 8) & 0xff;
|
||||
header[0] = totalLength & 0xff;
|
||||
|
||||
// Write header information requestId
|
||||
header[index + 3] = (this.requestId >> 24) & 0xff;
|
||||
header[index + 2] = (this.requestId >> 16) & 0xff;
|
||||
header[index + 1] = (this.requestId >> 8) & 0xff;
|
||||
header[index] = this.requestId & 0xff;
|
||||
index = index + 4;
|
||||
|
||||
// Write header information responseTo
|
||||
header[index + 3] = (0 >> 24) & 0xff;
|
||||
header[index + 2] = (0 >> 16) & 0xff;
|
||||
header[index + 1] = (0 >> 8) & 0xff;
|
||||
header[index] = 0 & 0xff;
|
||||
index = index + 4;
|
||||
|
||||
// Write header information OP_QUERY
|
||||
header[index + 3] = (opcodes.OP_QUERY >> 24) & 0xff;
|
||||
header[index + 2] = (opcodes.OP_QUERY >> 16) & 0xff;
|
||||
header[index + 1] = (opcodes.OP_QUERY >> 8) & 0xff;
|
||||
header[index] = opcodes.OP_QUERY & 0xff;
|
||||
index = index + 4;
|
||||
|
||||
// Write header information flags
|
||||
header[index + 3] = (flags >> 24) & 0xff;
|
||||
header[index + 2] = (flags >> 16) & 0xff;
|
||||
header[index + 1] = (flags >> 8) & 0xff;
|
||||
header[index] = flags & 0xff;
|
||||
index = index + 4;
|
||||
|
||||
// Write collection name
|
||||
index = index + header.write(this.ns, index, 'utf8') + 1;
|
||||
header[index - 1] = 0;
|
||||
|
||||
// Write header information flags numberToSkip
|
||||
header[index + 3] = (this.numberToSkip >> 24) & 0xff;
|
||||
header[index + 2] = (this.numberToSkip >> 16) & 0xff;
|
||||
header[index + 1] = (this.numberToSkip >> 8) & 0xff;
|
||||
header[index] = this.numberToSkip & 0xff;
|
||||
index = index + 4;
|
||||
|
||||
// Write header information flags numberToReturn
|
||||
header[index + 3] = (this.numberToReturn >> 24) & 0xff;
|
||||
header[index + 2] = (this.numberToReturn >> 16) & 0xff;
|
||||
header[index + 1] = (this.numberToReturn >> 8) & 0xff;
|
||||
header[index] = this.numberToReturn & 0xff;
|
||||
index = index + 4;
|
||||
|
||||
// Return the buffers
|
||||
return buffers;
|
||||
};
|
||||
|
||||
Query.getRequestId = function() {
|
||||
return ++_requestId;
|
||||
};
|
||||
|
||||
/**************************************************************
|
||||
* GETMORE
|
||||
**************************************************************/
|
||||
var GetMore = function(bson, ns, cursorId, opts) {
|
||||
opts = opts || {};
|
||||
this.numberToReturn = opts.numberToReturn || 0;
|
||||
this.requestId = _requestId++;
|
||||
this.bson = bson;
|
||||
this.ns = ns;
|
||||
this.cursorId = cursorId;
|
||||
};
|
||||
|
||||
//
|
||||
// Uses a single allocated buffer for the process, avoiding multiple memory allocations
|
||||
GetMore.prototype.toBin = function() {
|
||||
var length = 4 + Buffer.byteLength(this.ns) + 1 + 4 + 8 + 4 * 4;
|
||||
// Create command buffer
|
||||
var index = 0;
|
||||
// Allocate buffer
|
||||
var _buffer = Buffer.alloc(length);
|
||||
|
||||
// Write header information
|
||||
// index = write32bit(index, _buffer, length);
|
||||
_buffer[index + 3] = (length >> 24) & 0xff;
|
||||
_buffer[index + 2] = (length >> 16) & 0xff;
|
||||
_buffer[index + 1] = (length >> 8) & 0xff;
|
||||
_buffer[index] = length & 0xff;
|
||||
index = index + 4;
|
||||
|
||||
// index = write32bit(index, _buffer, requestId);
|
||||
_buffer[index + 3] = (this.requestId >> 24) & 0xff;
|
||||
_buffer[index + 2] = (this.requestId >> 16) & 0xff;
|
||||
_buffer[index + 1] = (this.requestId >> 8) & 0xff;
|
||||
_buffer[index] = this.requestId & 0xff;
|
||||
index = index + 4;
|
||||
|
||||
// index = write32bit(index, _buffer, 0);
|
||||
_buffer[index + 3] = (0 >> 24) & 0xff;
|
||||
_buffer[index + 2] = (0 >> 16) & 0xff;
|
||||
_buffer[index + 1] = (0 >> 8) & 0xff;
|
||||
_buffer[index] = 0 & 0xff;
|
||||
index = index + 4;
|
||||
|
||||
// index = write32bit(index, _buffer, OP_GETMORE);
|
||||
_buffer[index + 3] = (opcodes.OP_GETMORE >> 24) & 0xff;
|
||||
_buffer[index + 2] = (opcodes.OP_GETMORE >> 16) & 0xff;
|
||||
_buffer[index + 1] = (opcodes.OP_GETMORE >> 8) & 0xff;
|
||||
_buffer[index] = opcodes.OP_GETMORE & 0xff;
|
||||
index = index + 4;
|
||||
|
||||
// index = write32bit(index, _buffer, 0);
|
||||
_buffer[index + 3] = (0 >> 24) & 0xff;
|
||||
_buffer[index + 2] = (0 >> 16) & 0xff;
|
||||
_buffer[index + 1] = (0 >> 8) & 0xff;
|
||||
_buffer[index] = 0 & 0xff;
|
||||
index = index + 4;
|
||||
|
||||
// Write collection name
|
||||
index = index + _buffer.write(this.ns, index, 'utf8') + 1;
|
||||
_buffer[index - 1] = 0;
|
||||
|
||||
// Write batch size
|
||||
// index = write32bit(index, _buffer, numberToReturn);
|
||||
_buffer[index + 3] = (this.numberToReturn >> 24) & 0xff;
|
||||
_buffer[index + 2] = (this.numberToReturn >> 16) & 0xff;
|
||||
_buffer[index + 1] = (this.numberToReturn >> 8) & 0xff;
|
||||
_buffer[index] = this.numberToReturn & 0xff;
|
||||
index = index + 4;
|
||||
|
||||
// Write cursor id
|
||||
// index = write32bit(index, _buffer, cursorId.getLowBits());
|
||||
_buffer[index + 3] = (this.cursorId.getLowBits() >> 24) & 0xff;
|
||||
_buffer[index + 2] = (this.cursorId.getLowBits() >> 16) & 0xff;
|
||||
_buffer[index + 1] = (this.cursorId.getLowBits() >> 8) & 0xff;
|
||||
_buffer[index] = this.cursorId.getLowBits() & 0xff;
|
||||
index = index + 4;
|
||||
|
||||
// index = write32bit(index, _buffer, cursorId.getHighBits());
|
||||
_buffer[index + 3] = (this.cursorId.getHighBits() >> 24) & 0xff;
|
||||
_buffer[index + 2] = (this.cursorId.getHighBits() >> 16) & 0xff;
|
||||
_buffer[index + 1] = (this.cursorId.getHighBits() >> 8) & 0xff;
|
||||
_buffer[index] = this.cursorId.getHighBits() & 0xff;
|
||||
index = index + 4;
|
||||
|
||||
// Return buffer
|
||||
return _buffer;
|
||||
};
|
||||
|
||||
/**************************************************************
|
||||
* KILLCURSOR
|
||||
**************************************************************/
|
||||
var KillCursor = function(bson, ns, cursorIds) {
|
||||
this.ns = ns;
|
||||
this.requestId = _requestId++;
|
||||
this.cursorIds = cursorIds;
|
||||
};
|
||||
|
||||
//
|
||||
// Uses a single allocated buffer for the process, avoiding multiple memory allocations
|
||||
KillCursor.prototype.toBin = function() {
|
||||
var length = 4 + 4 + 4 * 4 + this.cursorIds.length * 8;
|
||||
|
||||
// Create command buffer
|
||||
var index = 0;
|
||||
var _buffer = Buffer.alloc(length);
|
||||
|
||||
// Write header information
|
||||
// index = write32bit(index, _buffer, length);
|
||||
_buffer[index + 3] = (length >> 24) & 0xff;
|
||||
_buffer[index + 2] = (length >> 16) & 0xff;
|
||||
_buffer[index + 1] = (length >> 8) & 0xff;
|
||||
_buffer[index] = length & 0xff;
|
||||
index = index + 4;
|
||||
|
||||
// index = write32bit(index, _buffer, requestId);
|
||||
_buffer[index + 3] = (this.requestId >> 24) & 0xff;
|
||||
_buffer[index + 2] = (this.requestId >> 16) & 0xff;
|
||||
_buffer[index + 1] = (this.requestId >> 8) & 0xff;
|
||||
_buffer[index] = this.requestId & 0xff;
|
||||
index = index + 4;
|
||||
|
||||
// index = write32bit(index, _buffer, 0);
|
||||
_buffer[index + 3] = (0 >> 24) & 0xff;
|
||||
_buffer[index + 2] = (0 >> 16) & 0xff;
|
||||
_buffer[index + 1] = (0 >> 8) & 0xff;
|
||||
_buffer[index] = 0 & 0xff;
|
||||
index = index + 4;
|
||||
|
||||
// index = write32bit(index, _buffer, OP_KILL_CURSORS);
|
||||
_buffer[index + 3] = (opcodes.OP_KILL_CURSORS >> 24) & 0xff;
|
||||
_buffer[index + 2] = (opcodes.OP_KILL_CURSORS >> 16) & 0xff;
|
||||
_buffer[index + 1] = (opcodes.OP_KILL_CURSORS >> 8) & 0xff;
|
||||
_buffer[index] = opcodes.OP_KILL_CURSORS & 0xff;
|
||||
index = index + 4;
|
||||
|
||||
// index = write32bit(index, _buffer, 0);
|
||||
_buffer[index + 3] = (0 >> 24) & 0xff;
|
||||
_buffer[index + 2] = (0 >> 16) & 0xff;
|
||||
_buffer[index + 1] = (0 >> 8) & 0xff;
|
||||
_buffer[index] = 0 & 0xff;
|
||||
index = index + 4;
|
||||
|
||||
// Write batch size
|
||||
// index = write32bit(index, _buffer, this.cursorIds.length);
|
||||
_buffer[index + 3] = (this.cursorIds.length >> 24) & 0xff;
|
||||
_buffer[index + 2] = (this.cursorIds.length >> 16) & 0xff;
|
||||
_buffer[index + 1] = (this.cursorIds.length >> 8) & 0xff;
|
||||
_buffer[index] = this.cursorIds.length & 0xff;
|
||||
index = index + 4;
|
||||
|
||||
// Write all the cursor ids into the array
|
||||
for (var i = 0; i < this.cursorIds.length; i++) {
|
||||
// Write cursor id
|
||||
// index = write32bit(index, _buffer, cursorIds[i].getLowBits());
|
||||
_buffer[index + 3] = (this.cursorIds[i].getLowBits() >> 24) & 0xff;
|
||||
_buffer[index + 2] = (this.cursorIds[i].getLowBits() >> 16) & 0xff;
|
||||
_buffer[index + 1] = (this.cursorIds[i].getLowBits() >> 8) & 0xff;
|
||||
_buffer[index] = this.cursorIds[i].getLowBits() & 0xff;
|
||||
index = index + 4;
|
||||
|
||||
// index = write32bit(index, _buffer, cursorIds[i].getHighBits());
|
||||
_buffer[index + 3] = (this.cursorIds[i].getHighBits() >> 24) & 0xff;
|
||||
_buffer[index + 2] = (this.cursorIds[i].getHighBits() >> 16) & 0xff;
|
||||
_buffer[index + 1] = (this.cursorIds[i].getHighBits() >> 8) & 0xff;
|
||||
_buffer[index] = this.cursorIds[i].getHighBits() & 0xff;
|
||||
index = index + 4;
|
||||
}
|
||||
|
||||
// Return buffer
|
||||
return _buffer;
|
||||
};
|
||||
|
||||
var Response = function(bson, message, msgHeader, msgBody, opts) {
|
||||
opts = opts || { promoteLongs: true, promoteValues: true, promoteBuffers: false };
|
||||
this.parsed = false;
|
||||
this.raw = message;
|
||||
this.data = msgBody;
|
||||
this.bson = bson;
|
||||
this.opts = opts;
|
||||
|
||||
// Read the message header
|
||||
this.length = msgHeader.length;
|
||||
this.requestId = msgHeader.requestId;
|
||||
this.responseTo = msgHeader.responseTo;
|
||||
this.opCode = msgHeader.opCode;
|
||||
this.fromCompressed = msgHeader.fromCompressed;
|
||||
|
||||
// Read the message body
|
||||
this.responseFlags = msgBody.readInt32LE(0);
|
||||
this.cursorId = new Long(msgBody.readInt32LE(4), msgBody.readInt32LE(8));
|
||||
this.startingFrom = msgBody.readInt32LE(12);
|
||||
this.numberReturned = msgBody.readInt32LE(16);
|
||||
|
||||
// Preallocate document array
|
||||
this.documents = new Array(this.numberReturned);
|
||||
|
||||
// Flag values
|
||||
this.cursorNotFound = (this.responseFlags & CURSOR_NOT_FOUND) !== 0;
|
||||
this.queryFailure = (this.responseFlags & QUERY_FAILURE) !== 0;
|
||||
this.shardConfigStale = (this.responseFlags & SHARD_CONFIG_STALE) !== 0;
|
||||
this.awaitCapable = (this.responseFlags & AWAIT_CAPABLE) !== 0;
|
||||
this.promoteLongs = typeof opts.promoteLongs === 'boolean' ? opts.promoteLongs : true;
|
||||
this.promoteValues = typeof opts.promoteValues === 'boolean' ? opts.promoteValues : true;
|
||||
this.promoteBuffers = typeof opts.promoteBuffers === 'boolean' ? opts.promoteBuffers : false;
|
||||
};
|
||||
|
||||
Response.prototype.isParsed = function() {
|
||||
return this.parsed;
|
||||
};
|
||||
|
||||
Response.prototype.parse = function(options) {
|
||||
// Don't parse again if not needed
|
||||
if (this.parsed) return;
|
||||
options = options || {};
|
||||
|
||||
// Allow the return of raw documents instead of parsing
|
||||
var raw = options.raw || false;
|
||||
var documentsReturnedIn = options.documentsReturnedIn || null;
|
||||
var promoteLongs =
|
||||
typeof options.promoteLongs === 'boolean' ? options.promoteLongs : this.opts.promoteLongs;
|
||||
var promoteValues =
|
||||
typeof options.promoteValues === 'boolean' ? options.promoteValues : this.opts.promoteValues;
|
||||
var promoteBuffers =
|
||||
typeof options.promoteBuffers === 'boolean' ? options.promoteBuffers : this.opts.promoteBuffers;
|
||||
var bsonSize, _options;
|
||||
|
||||
// Set up the options
|
||||
_options = {
|
||||
promoteLongs: promoteLongs,
|
||||
promoteValues: promoteValues,
|
||||
promoteBuffers: promoteBuffers
|
||||
};
|
||||
|
||||
// Position within OP_REPLY at which documents start
|
||||
// (See https://docs.mongodb.com/manual/reference/mongodb-wire-protocol/#wire-op-reply)
|
||||
this.index = 20;
|
||||
|
||||
//
|
||||
// Parse Body
|
||||
//
|
||||
for (var i = 0; i < this.numberReturned; i++) {
|
||||
bsonSize =
|
||||
this.data[this.index] |
|
||||
(this.data[this.index + 1] << 8) |
|
||||
(this.data[this.index + 2] << 16) |
|
||||
(this.data[this.index + 3] << 24);
|
||||
|
||||
// If we have raw results specified slice the return document
|
||||
if (raw) {
|
||||
this.documents[i] = this.data.slice(this.index, this.index + bsonSize);
|
||||
} else {
|
||||
this.documents[i] = this.bson.deserialize(
|
||||
this.data.slice(this.index, this.index + bsonSize),
|
||||
_options
|
||||
);
|
||||
}
|
||||
|
||||
// Adjust the index
|
||||
this.index = this.index + bsonSize;
|
||||
}
|
||||
|
||||
if (this.documents.length === 1 && documentsReturnedIn != null && raw) {
|
||||
const fieldsAsRaw = {};
|
||||
fieldsAsRaw[documentsReturnedIn] = true;
|
||||
_options.fieldsAsRaw = fieldsAsRaw;
|
||||
|
||||
const doc = this.bson.deserialize(this.documents[0], _options);
|
||||
this.documents = [doc];
|
||||
}
|
||||
|
||||
// Set parsed
|
||||
this.parsed = true;
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
Query: Query,
|
||||
GetMore: GetMore,
|
||||
Response: Response,
|
||||
KillCursor: KillCursor
|
||||
};
|
||||
352
node_modules/mongodb/lib/core/connection/connect.js
generated
vendored
Normal file
352
node_modules/mongodb/lib/core/connection/connect.js
generated
vendored
Normal file
@@ -0,0 +1,352 @@
|
||||
'use strict';
|
||||
const net = require('net');
|
||||
const tls = require('tls');
|
||||
const Connection = require('./connection');
|
||||
const MongoError = require('../error').MongoError;
|
||||
const MongoNetworkError = require('../error').MongoNetworkError;
|
||||
const MongoNetworkTimeoutError = require('../error').MongoNetworkTimeoutError;
|
||||
const defaultAuthProviders = require('../auth/defaultAuthProviders').defaultAuthProviders;
|
||||
const AuthContext = require('../auth/auth_provider').AuthContext;
|
||||
const WIRE_CONSTANTS = require('../wireprotocol/constants');
|
||||
const makeClientMetadata = require('../utils').makeClientMetadata;
|
||||
const MAX_SUPPORTED_WIRE_VERSION = WIRE_CONSTANTS.MAX_SUPPORTED_WIRE_VERSION;
|
||||
const MAX_SUPPORTED_SERVER_VERSION = WIRE_CONSTANTS.MAX_SUPPORTED_SERVER_VERSION;
|
||||
const MIN_SUPPORTED_WIRE_VERSION = WIRE_CONSTANTS.MIN_SUPPORTED_WIRE_VERSION;
|
||||
const MIN_SUPPORTED_SERVER_VERSION = WIRE_CONSTANTS.MIN_SUPPORTED_SERVER_VERSION;
|
||||
let AUTH_PROVIDERS;
|
||||
|
||||
function connect(options, cancellationToken, callback) {
|
||||
if (typeof cancellationToken === 'function') {
|
||||
callback = cancellationToken;
|
||||
cancellationToken = undefined;
|
||||
}
|
||||
|
||||
const ConnectionType = options && options.connectionType ? options.connectionType : Connection;
|
||||
if (AUTH_PROVIDERS == null) {
|
||||
AUTH_PROVIDERS = defaultAuthProviders(options.bson);
|
||||
}
|
||||
|
||||
const family = options.family !== void 0 ? options.family : 0;
|
||||
makeConnection(family, options, cancellationToken, (err, socket) => {
|
||||
if (err) {
|
||||
callback(err, socket); // in the error case, `socket` is the originating error event name
|
||||
return;
|
||||
}
|
||||
|
||||
performInitialHandshake(new ConnectionType(socket, options), options, callback);
|
||||
});
|
||||
}
|
||||
|
||||
function isModernConnectionType(conn) {
|
||||
return !(conn instanceof Connection);
|
||||
}
|
||||
|
||||
function checkSupportedServer(ismaster, options) {
|
||||
const serverVersionHighEnough =
|
||||
ismaster &&
|
||||
typeof ismaster.maxWireVersion === 'number' &&
|
||||
ismaster.maxWireVersion >= MIN_SUPPORTED_WIRE_VERSION;
|
||||
const serverVersionLowEnough =
|
||||
ismaster &&
|
||||
typeof ismaster.minWireVersion === 'number' &&
|
||||
ismaster.minWireVersion <= MAX_SUPPORTED_WIRE_VERSION;
|
||||
|
||||
if (serverVersionHighEnough) {
|
||||
if (serverVersionLowEnough) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const message = `Server at ${options.host}:${options.port} reports minimum wire version ${ismaster.minWireVersion}, but this version of the Node.js Driver requires at most ${MAX_SUPPORTED_WIRE_VERSION} (MongoDB ${MAX_SUPPORTED_SERVER_VERSION})`;
|
||||
return new MongoError(message);
|
||||
}
|
||||
|
||||
const message = `Server at ${options.host}:${
|
||||
options.port
|
||||
} reports maximum wire version ${ismaster.maxWireVersion ||
|
||||
0}, but this version of the Node.js Driver requires at least ${MIN_SUPPORTED_WIRE_VERSION} (MongoDB ${MIN_SUPPORTED_SERVER_VERSION})`;
|
||||
return new MongoError(message);
|
||||
}
|
||||
|
||||
function performInitialHandshake(conn, options, _callback) {
|
||||
const callback = function(err, ret) {
|
||||
if (err && conn) {
|
||||
conn.destroy();
|
||||
}
|
||||
_callback(err, ret);
|
||||
};
|
||||
|
||||
const credentials = options.credentials;
|
||||
if (credentials) {
|
||||
if (!credentials.mechanism.match(/DEFAULT/i) && !AUTH_PROVIDERS[credentials.mechanism]) {
|
||||
callback(new MongoError(`authMechanism '${credentials.mechanism}' not supported`));
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
const authContext = new AuthContext(conn, credentials, options);
|
||||
prepareHandshakeDocument(authContext, (err, handshakeDoc) => {
|
||||
if (err) {
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
const handshakeOptions = Object.assign({}, options);
|
||||
if (options.connectTimeoutMS || options.connectionTimeout) {
|
||||
// The handshake technically is a monitoring check, so its socket timeout should be connectTimeoutMS
|
||||
handshakeOptions.socketTimeout = options.connectTimeoutMS || options.connectionTimeout;
|
||||
}
|
||||
|
||||
const start = new Date().getTime();
|
||||
conn.command('admin.$cmd', handshakeDoc, handshakeOptions, (err, result) => {
|
||||
if (err) {
|
||||
callback(err);
|
||||
return;
|
||||
}
|
||||
|
||||
const response = result.result;
|
||||
if (response.ok === 0) {
|
||||
callback(new MongoError(response));
|
||||
return;
|
||||
}
|
||||
|
||||
const supportedServerErr = checkSupportedServer(response, options);
|
||||
if (supportedServerErr) {
|
||||
callback(supportedServerErr);
|
||||
return;
|
||||
}
|
||||
|
||||
if (!isModernConnectionType(conn)) {
|
||||
// resolve compression
|
||||
if (response.compression) {
|
||||
const agreedCompressors = handshakeDoc.compression.filter(
|
||||
compressor => response.compression.indexOf(compressor) !== -1
|
||||
);
|
||||
|
||||
if (agreedCompressors.length) {
|
||||
conn.agreedCompressor = agreedCompressors[0];
|
||||
}
|
||||
|
||||
if (options.compression && options.compression.zlibCompressionLevel) {
|
||||
conn.zlibCompressionLevel = options.compression.zlibCompressionLevel;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// NOTE: This is metadata attached to the connection while porting away from
|
||||
// handshake being done in the `Server` class. Likely, it should be
|
||||
// relocated, or at very least restructured.
|
||||
conn.ismaster = response;
|
||||
conn.lastIsMasterMS = new Date().getTime() - start;
|
||||
|
||||
if (!response.arbiterOnly && credentials) {
|
||||
// store the response on auth context
|
||||
Object.assign(authContext, { response });
|
||||
|
||||
const resolvedCredentials = credentials.resolveAuthMechanism(response);
|
||||
const authProvider = AUTH_PROVIDERS[resolvedCredentials.mechanism];
|
||||
authProvider.auth(authContext, err => {
|
||||
if (err) return callback(err);
|
||||
callback(undefined, conn);
|
||||
});
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
callback(undefined, conn);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function prepareHandshakeDocument(authContext, callback) {
|
||||
const options = authContext.options;
|
||||
const compressors =
|
||||
options.compression && options.compression.compressors ? options.compression.compressors : [];
|
||||
|
||||
const handshakeDoc = {
|
||||
ismaster: true,
|
||||
client: options.metadata || makeClientMetadata(options),
|
||||
compression: compressors
|
||||
};
|
||||
|
||||
const credentials = authContext.credentials;
|
||||
if (credentials) {
|
||||
if (credentials.mechanism.match(/DEFAULT/i) && credentials.username) {
|
||||
Object.assign(handshakeDoc, {
|
||||
saslSupportedMechs: `${credentials.source}.${credentials.username}`
|
||||
});
|
||||
|
||||
AUTH_PROVIDERS['scram-sha-256'].prepare(handshakeDoc, authContext, callback);
|
||||
return;
|
||||
}
|
||||
|
||||
const authProvider = AUTH_PROVIDERS[credentials.mechanism];
|
||||
authProvider.prepare(handshakeDoc, authContext, callback);
|
||||
return;
|
||||
}
|
||||
|
||||
callback(undefined, handshakeDoc);
|
||||
}
|
||||
|
||||
const LEGAL_SSL_SOCKET_OPTIONS = [
|
||||
'pfx',
|
||||
'key',
|
||||
'passphrase',
|
||||
'cert',
|
||||
'ca',
|
||||
'ciphers',
|
||||
'NPNProtocols',
|
||||
'ALPNProtocols',
|
||||
'servername',
|
||||
'ecdhCurve',
|
||||
'secureProtocol',
|
||||
'secureContext',
|
||||
'session',
|
||||
'minDHSize',
|
||||
'crl',
|
||||
'rejectUnauthorized'
|
||||
];
|
||||
|
||||
function parseConnectOptions(family, options) {
|
||||
const host = typeof options.host === 'string' ? options.host : 'localhost';
|
||||
if (host.indexOf('/') !== -1) {
|
||||
return { path: host };
|
||||
}
|
||||
|
||||
const result = {
|
||||
family,
|
||||
host,
|
||||
port: typeof options.port === 'number' ? options.port : 27017,
|
||||
rejectUnauthorized: false
|
||||
};
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
function parseSslOptions(family, options) {
|
||||
const result = parseConnectOptions(family, options);
|
||||
|
||||
// Merge in valid SSL options
|
||||
for (const name in options) {
|
||||
if (options[name] != null && LEGAL_SSL_SOCKET_OPTIONS.indexOf(name) !== -1) {
|
||||
result[name] = options[name];
|
||||
}
|
||||
}
|
||||
|
||||
// Override checkServerIdentity behavior
|
||||
if (options.checkServerIdentity === false) {
|
||||
// Skip the identiy check by retuning undefined as per node documents
|
||||
// https://nodejs.org/api/tls.html#tls_tls_connect_options_callback
|
||||
result.checkServerIdentity = function() {
|
||||
return undefined;
|
||||
};
|
||||
} else if (typeof options.checkServerIdentity === 'function') {
|
||||
result.checkServerIdentity = options.checkServerIdentity;
|
||||
}
|
||||
|
||||
// Set default sni servername to be the same as host
|
||||
if (result.servername == null) {
|
||||
result.servername = result.host;
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
const SOCKET_ERROR_EVENTS = new Set(['error', 'close', 'timeout', 'parseError']);
|
||||
function makeConnection(family, options, cancellationToken, _callback) {
|
||||
const useSsl = typeof options.ssl === 'boolean' ? options.ssl : false;
|
||||
const keepAlive = typeof options.keepAlive === 'boolean' ? options.keepAlive : true;
|
||||
let keepAliveInitialDelay =
|
||||
typeof options.keepAliveInitialDelay === 'number' ? options.keepAliveInitialDelay : 120000;
|
||||
const noDelay = typeof options.noDelay === 'boolean' ? options.noDelay : true;
|
||||
const connectionTimeout =
|
||||
typeof options.connectionTimeout === 'number'
|
||||
? options.connectionTimeout
|
||||
: typeof options.connectTimeoutMS === 'number'
|
||||
? options.connectTimeoutMS
|
||||
: 30000;
|
||||
const socketTimeout = typeof options.socketTimeout === 'number' ? options.socketTimeout : 360000;
|
||||
const rejectUnauthorized =
|
||||
typeof options.rejectUnauthorized === 'boolean' ? options.rejectUnauthorized : true;
|
||||
|
||||
if (keepAliveInitialDelay > socketTimeout) {
|
||||
keepAliveInitialDelay = Math.round(socketTimeout / 2);
|
||||
}
|
||||
|
||||
let socket;
|
||||
const callback = function(err, ret) {
|
||||
if (err && socket) {
|
||||
socket.destroy();
|
||||
}
|
||||
|
||||
_callback(err, ret);
|
||||
};
|
||||
|
||||
try {
|
||||
if (useSsl) {
|
||||
socket = tls.connect(parseSslOptions(family, options));
|
||||
if (typeof socket.disableRenegotiation === 'function') {
|
||||
socket.disableRenegotiation();
|
||||
}
|
||||
} else {
|
||||
socket = net.createConnection(parseConnectOptions(family, options));
|
||||
}
|
||||
} catch (err) {
|
||||
return callback(err);
|
||||
}
|
||||
|
||||
socket.setKeepAlive(keepAlive, keepAliveInitialDelay);
|
||||
socket.setTimeout(connectionTimeout);
|
||||
socket.setNoDelay(noDelay);
|
||||
|
||||
const connectEvent = useSsl ? 'secureConnect' : 'connect';
|
||||
let cancellationHandler;
|
||||
function errorHandler(eventName) {
|
||||
return err => {
|
||||
SOCKET_ERROR_EVENTS.forEach(event => socket.removeAllListeners(event));
|
||||
if (cancellationHandler) {
|
||||
cancellationToken.removeListener('cancel', cancellationHandler);
|
||||
}
|
||||
|
||||
socket.removeListener(connectEvent, connectHandler);
|
||||
callback(connectionFailureError(eventName, err));
|
||||
};
|
||||
}
|
||||
|
||||
function connectHandler() {
|
||||
SOCKET_ERROR_EVENTS.forEach(event => socket.removeAllListeners(event));
|
||||
if (cancellationHandler) {
|
||||
cancellationToken.removeListener('cancel', cancellationHandler);
|
||||
}
|
||||
|
||||
if (socket.authorizationError && rejectUnauthorized) {
|
||||
return callback(socket.authorizationError);
|
||||
}
|
||||
|
||||
socket.setTimeout(socketTimeout);
|
||||
callback(null, socket);
|
||||
}
|
||||
|
||||
SOCKET_ERROR_EVENTS.forEach(event => socket.once(event, errorHandler(event)));
|
||||
if (cancellationToken) {
|
||||
cancellationHandler = errorHandler('cancel');
|
||||
cancellationToken.once('cancel', cancellationHandler);
|
||||
}
|
||||
|
||||
socket.once(connectEvent, connectHandler);
|
||||
}
|
||||
|
||||
function connectionFailureError(type, err) {
|
||||
switch (type) {
|
||||
case 'error':
|
||||
return new MongoNetworkError(err);
|
||||
case 'timeout':
|
||||
return new MongoNetworkTimeoutError(`connection timed out`);
|
||||
case 'close':
|
||||
return new MongoNetworkError(`connection closed`);
|
||||
case 'cancel':
|
||||
return new MongoNetworkError(`connection establishment was cancelled`);
|
||||
default:
|
||||
return new MongoNetworkError(`unknown network error`);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = connect;
|
||||
712
node_modules/mongodb/lib/core/connection/connection.js
generated
vendored
Normal file
712
node_modules/mongodb/lib/core/connection/connection.js
generated
vendored
Normal file
@@ -0,0 +1,712 @@
|
||||
'use strict';
|
||||
|
||||
const EventEmitter = require('events').EventEmitter;
|
||||
const crypto = require('crypto');
|
||||
const debugOptions = require('./utils').debugOptions;
|
||||
const parseHeader = require('../wireprotocol/shared').parseHeader;
|
||||
const decompress = require('../wireprotocol/compression').decompress;
|
||||
const Response = require('./commands').Response;
|
||||
const BinMsg = require('./msg').BinMsg;
|
||||
const MongoNetworkError = require('../error').MongoNetworkError;
|
||||
const MongoNetworkTimeoutError = require('../error').MongoNetworkTimeoutError;
|
||||
const MongoError = require('../error').MongoError;
|
||||
const Logger = require('./logger');
|
||||
const OP_COMPRESSED = require('../wireprotocol/shared').opcodes.OP_COMPRESSED;
|
||||
const OP_MSG = require('../wireprotocol/shared').opcodes.OP_MSG;
|
||||
const MESSAGE_HEADER_SIZE = require('../wireprotocol/shared').MESSAGE_HEADER_SIZE;
|
||||
const Buffer = require('safe-buffer').Buffer;
|
||||
const Query = require('./commands').Query;
|
||||
const CommandResult = require('./command_result');
|
||||
|
||||
let _id = 0;
|
||||
|
||||
const DEFAULT_MAX_BSON_MESSAGE_SIZE = 1024 * 1024 * 16 * 4;
|
||||
const DEBUG_FIELDS = [
|
||||
'host',
|
||||
'port',
|
||||
'size',
|
||||
'keepAlive',
|
||||
'keepAliveInitialDelay',
|
||||
'noDelay',
|
||||
'connectionTimeout',
|
||||
'socketTimeout',
|
||||
'ssl',
|
||||
'ca',
|
||||
'crl',
|
||||
'cert',
|
||||
'rejectUnauthorized',
|
||||
'promoteLongs',
|
||||
'promoteValues',
|
||||
'promoteBuffers',
|
||||
'checkServerIdentity'
|
||||
];
|
||||
|
||||
let connectionAccountingSpy = undefined;
|
||||
let connectionAccounting = false;
|
||||
let connections = {};
|
||||
|
||||
/**
|
||||
* A class representing a single connection to a MongoDB server
|
||||
*
|
||||
* @fires Connection#connect
|
||||
* @fires Connection#close
|
||||
* @fires Connection#error
|
||||
* @fires Connection#timeout
|
||||
* @fires Connection#parseError
|
||||
* @fires Connection#message
|
||||
*/
|
||||
class Connection extends EventEmitter {
|
||||
/**
|
||||
* Creates a new Connection instance
|
||||
*
|
||||
* **NOTE**: Internal class, do not instantiate directly
|
||||
*
|
||||
* @param {Socket} socket The socket this connection wraps
|
||||
* @param {Object} options Various settings
|
||||
* @param {object} options.bson An implementation of bson serialize and deserialize
|
||||
* @param {string} [options.host='localhost'] The host the socket is connected to
|
||||
* @param {number} [options.port=27017] The port used for the socket connection
|
||||
* @param {boolean} [options.keepAlive=true] TCP Connection keep alive enabled
|
||||
* @param {number} [options.keepAliveInitialDelay=120000] Initial delay before TCP keep alive enabled
|
||||
* @param {number} [options.connectionTimeout=30000] TCP Connection timeout setting
|
||||
* @param {number} [options.socketTimeout=360000] TCP Socket timeout setting
|
||||
* @param {boolean} [options.promoteLongs] Convert Long values from the db into Numbers if they fit into 53 bits
|
||||
* @param {boolean} [options.promoteValues] Promotes BSON values to native types where possible, set to false to only receive wrapper types.
|
||||
* @param {boolean} [options.promoteBuffers] Promotes Binary BSON values to native Node Buffers.
|
||||
* @param {number} [options.maxBsonMessageSize=0x4000000] Largest possible size of a BSON message (for legacy purposes)
|
||||
*/
|
||||
constructor(socket, options) {
|
||||
super();
|
||||
|
||||
options = options || {};
|
||||
if (!options.bson) {
|
||||
throw new TypeError('must pass in valid bson parser');
|
||||
}
|
||||
|
||||
this.id = _id++;
|
||||
this.options = options;
|
||||
this.logger = Logger('Connection', options);
|
||||
this.bson = options.bson;
|
||||
this.tag = options.tag;
|
||||
this.maxBsonMessageSize = options.maxBsonMessageSize || DEFAULT_MAX_BSON_MESSAGE_SIZE;
|
||||
|
||||
this.port = options.port || 27017;
|
||||
this.host = options.host || 'localhost';
|
||||
this.socketTimeout = typeof options.socketTimeout === 'number' ? options.socketTimeout : 360000;
|
||||
|
||||
// These values are inspected directly in tests, but maybe not necessary to keep around
|
||||
this.keepAlive = typeof options.keepAlive === 'boolean' ? options.keepAlive : true;
|
||||
this.keepAliveInitialDelay =
|
||||
typeof options.keepAliveInitialDelay === 'number' ? options.keepAliveInitialDelay : 120000;
|
||||
this.connectionTimeout =
|
||||
typeof options.connectionTimeout === 'number' ? options.connectionTimeout : 30000;
|
||||
if (this.keepAliveInitialDelay > this.socketTimeout) {
|
||||
this.keepAliveInitialDelay = Math.round(this.socketTimeout / 2);
|
||||
}
|
||||
|
||||
// Debug information
|
||||
if (this.logger.isDebug()) {
|
||||
this.logger.debug(
|
||||
`creating connection ${this.id} with options [${JSON.stringify(
|
||||
debugOptions(DEBUG_FIELDS, options)
|
||||
)}]`
|
||||
);
|
||||
}
|
||||
|
||||
// Response options
|
||||
this.responseOptions = {
|
||||
promoteLongs: typeof options.promoteLongs === 'boolean' ? options.promoteLongs : true,
|
||||
promoteValues: typeof options.promoteValues === 'boolean' ? options.promoteValues : true,
|
||||
promoteBuffers: typeof options.promoteBuffers === 'boolean' ? options.promoteBuffers : false
|
||||
};
|
||||
|
||||
// Flushing
|
||||
this.flushing = false;
|
||||
this.queue = [];
|
||||
|
||||
// Internal state
|
||||
this.writeStream = null;
|
||||
this.destroyed = false;
|
||||
this.timedOut = false;
|
||||
|
||||
// Create hash method
|
||||
const hash = crypto.createHash('sha1');
|
||||
hash.update(this.address);
|
||||
this.hashedName = hash.digest('hex');
|
||||
|
||||
// All operations in flight on the connection
|
||||
this.workItems = [];
|
||||
|
||||
// setup socket
|
||||
this.socket = socket;
|
||||
this.socket.once('error', errorHandler(this));
|
||||
this.socket.once('timeout', timeoutHandler(this));
|
||||
this.socket.once('close', closeHandler(this));
|
||||
this.socket.on('data', dataHandler(this));
|
||||
|
||||
if (connectionAccounting) {
|
||||
addConnection(this.id, this);
|
||||
}
|
||||
}
|
||||
|
||||
setSocketTimeout(value) {
|
||||
if (this.socket) {
|
||||
this.socket.setTimeout(value);
|
||||
}
|
||||
}
|
||||
|
||||
resetSocketTimeout() {
|
||||
if (this.socket) {
|
||||
this.socket.setTimeout(this.socketTimeout);
|
||||
}
|
||||
}
|
||||
|
||||
static enableConnectionAccounting(spy) {
|
||||
if (spy) {
|
||||
connectionAccountingSpy = spy;
|
||||
}
|
||||
|
||||
connectionAccounting = true;
|
||||
connections = {};
|
||||
}
|
||||
|
||||
static disableConnectionAccounting() {
|
||||
connectionAccounting = false;
|
||||
connectionAccountingSpy = undefined;
|
||||
}
|
||||
|
||||
static connections() {
|
||||
return connections;
|
||||
}
|
||||
|
||||
get address() {
|
||||
return `${this.host}:${this.port}`;
|
||||
}
|
||||
|
||||
/**
|
||||
* Unref this connection
|
||||
* @method
|
||||
* @return {boolean}
|
||||
*/
|
||||
unref() {
|
||||
if (this.socket == null) {
|
||||
this.once('connect', () => this.socket.unref());
|
||||
return;
|
||||
}
|
||||
|
||||
this.socket.unref();
|
||||
}
|
||||
|
||||
/**
|
||||
* Flush all work Items on this connection
|
||||
*
|
||||
* @param {*} err The error to propagate to the flushed work items
|
||||
*/
|
||||
flush(err) {
|
||||
while (this.workItems.length > 0) {
|
||||
const workItem = this.workItems.shift();
|
||||
if (workItem.cb) {
|
||||
workItem.cb(err);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Destroy connection
|
||||
* @method
|
||||
*/
|
||||
destroy(options, callback) {
|
||||
if (typeof options === 'function') {
|
||||
callback = options;
|
||||
options = {};
|
||||
}
|
||||
|
||||
options = Object.assign({ force: false }, options);
|
||||
|
||||
if (connectionAccounting) {
|
||||
deleteConnection(this.id);
|
||||
}
|
||||
|
||||
if (this.socket == null) {
|
||||
this.destroyed = true;
|
||||
return;
|
||||
}
|
||||
|
||||
if (options.force || this.timedOut) {
|
||||
this.socket.destroy();
|
||||
this.destroyed = true;
|
||||
if (typeof callback === 'function') callback(null, null);
|
||||
return;
|
||||
}
|
||||
|
||||
this.socket.end(err => {
|
||||
this.destroyed = true;
|
||||
if (typeof callback === 'function') callback(err, null);
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Write to connection
|
||||
* @method
|
||||
* @param {Command} command Command to write out need to implement toBin and toBinUnified
|
||||
*/
|
||||
write(buffer) {
|
||||
// Debug Log
|
||||
if (this.logger.isDebug()) {
|
||||
if (!Array.isArray(buffer)) {
|
||||
this.logger.debug(`writing buffer [${buffer.toString('hex')}] to ${this.address}`);
|
||||
} else {
|
||||
for (let i = 0; i < buffer.length; i++)
|
||||
this.logger.debug(`writing buffer [${buffer[i].toString('hex')}] to ${this.address}`);
|
||||
}
|
||||
}
|
||||
|
||||
// Double check that the connection is not destroyed
|
||||
if (this.socket.destroyed === false) {
|
||||
// Write out the command
|
||||
if (!Array.isArray(buffer)) {
|
||||
this.socket.write(buffer, 'binary');
|
||||
return true;
|
||||
}
|
||||
|
||||
// Iterate over all buffers and write them in order to the socket
|
||||
for (let i = 0; i < buffer.length; i++) {
|
||||
this.socket.write(buffer[i], 'binary');
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
// Connection is destroyed return write failed
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return id of connection as a string
|
||||
* @method
|
||||
* @return {string}
|
||||
*/
|
||||
toString() {
|
||||
return '' + this.id;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return json object of connection
|
||||
* @method
|
||||
* @return {object}
|
||||
*/
|
||||
toJSON() {
|
||||
return { id: this.id, host: this.host, port: this.port };
|
||||
}
|
||||
|
||||
/**
|
||||
* Is the connection connected
|
||||
* @method
|
||||
* @return {boolean}
|
||||
*/
|
||||
isConnected() {
|
||||
if (this.destroyed) return false;
|
||||
return !this.socket.destroyed && this.socket.writable;
|
||||
}
|
||||
|
||||
/**
|
||||
* @ignore
|
||||
*/
|
||||
command(ns, command, options, callback) {
|
||||
if (typeof options === 'function') (callback = options), (options = {});
|
||||
|
||||
const conn = this;
|
||||
const socketTimeout =
|
||||
typeof options.socketTimeout === 'number' ? options.socketTimeout : 360000;
|
||||
const bson = conn.options.bson;
|
||||
const query = new Query(bson, ns, command, {
|
||||
numberToSkip: 0,
|
||||
numberToReturn: 1
|
||||
});
|
||||
|
||||
const noop = () => {};
|
||||
function _callback(err, result) {
|
||||
callback(err, result);
|
||||
callback = noop;
|
||||
}
|
||||
|
||||
function errorHandler(err) {
|
||||
conn.resetSocketTimeout();
|
||||
CONNECTION_ERROR_EVENTS.forEach(eventName => conn.removeListener(eventName, errorHandler));
|
||||
conn.removeListener('message', messageHandler);
|
||||
|
||||
if (err == null) {
|
||||
err = new MongoError(`runCommand failed for connection to '${conn.address}'`);
|
||||
}
|
||||
|
||||
// ignore all future errors
|
||||
conn.on('error', noop);
|
||||
_callback(err);
|
||||
}
|
||||
|
||||
function messageHandler(msg) {
|
||||
if (msg.responseTo !== query.requestId) {
|
||||
return;
|
||||
}
|
||||
|
||||
conn.resetSocketTimeout();
|
||||
CONNECTION_ERROR_EVENTS.forEach(eventName => conn.removeListener(eventName, errorHandler));
|
||||
conn.removeListener('message', messageHandler);
|
||||
|
||||
msg.parse({ promoteValues: true });
|
||||
|
||||
const response = msg.documents[0];
|
||||
if (response.ok === 0 || response.$err || response.errmsg || response.code) {
|
||||
_callback(new MongoError(response));
|
||||
return;
|
||||
}
|
||||
|
||||
_callback(undefined, new CommandResult(response, this, msg));
|
||||
}
|
||||
|
||||
conn.setSocketTimeout(socketTimeout);
|
||||
CONNECTION_ERROR_EVENTS.forEach(eventName => conn.once(eventName, errorHandler));
|
||||
conn.on('message', messageHandler);
|
||||
conn.write(query.toBin());
|
||||
}
|
||||
}
|
||||
|
||||
const CONNECTION_ERROR_EVENTS = ['error', 'close', 'timeout', 'parseError'];
|
||||
|
||||
function deleteConnection(id) {
|
||||
// console.log("=== deleted connection " + id + " :: " + (connections[id] ? connections[id].port : ''))
|
||||
delete connections[id];
|
||||
|
||||
if (connectionAccountingSpy) {
|
||||
connectionAccountingSpy.deleteConnection(id);
|
||||
}
|
||||
}
|
||||
|
||||
function addConnection(id, connection) {
|
||||
// console.log("=== added connection " + id + " :: " + connection.port)
|
||||
connections[id] = connection;
|
||||
|
||||
if (connectionAccountingSpy) {
|
||||
connectionAccountingSpy.addConnection(id, connection);
|
||||
}
|
||||
}
|
||||
|
||||
//
|
||||
// Connection handlers
|
||||
function errorHandler(conn) {
|
||||
return function(err) {
|
||||
if (connectionAccounting) deleteConnection(conn.id);
|
||||
// Debug information
|
||||
if (conn.logger.isDebug()) {
|
||||
conn.logger.debug(
|
||||
`connection ${conn.id} for [${conn.address}] errored out with [${JSON.stringify(err)}]`
|
||||
);
|
||||
}
|
||||
|
||||
conn.emit('error', new MongoNetworkError(err), conn);
|
||||
};
|
||||
}
|
||||
|
||||
function timeoutHandler(conn) {
|
||||
return function() {
|
||||
if (connectionAccounting) deleteConnection(conn.id);
|
||||
|
||||
if (conn.logger.isDebug()) {
|
||||
conn.logger.debug(`connection ${conn.id} for [${conn.address}] timed out`);
|
||||
}
|
||||
|
||||
conn.timedOut = true;
|
||||
conn.emit(
|
||||
'timeout',
|
||||
new MongoNetworkTimeoutError(`connection ${conn.id} to ${conn.address} timed out`, {
|
||||
beforeHandshake: conn.ismaster == null
|
||||
}),
|
||||
conn
|
||||
);
|
||||
};
|
||||
}
|
||||
|
||||
function closeHandler(conn) {
|
||||
return function(hadError) {
|
||||
if (connectionAccounting) deleteConnection(conn.id);
|
||||
|
||||
if (conn.logger.isDebug()) {
|
||||
conn.logger.debug(`connection ${conn.id} with for [${conn.address}] closed`);
|
||||
}
|
||||
|
||||
if (!hadError) {
|
||||
conn.emit(
|
||||
'close',
|
||||
new MongoNetworkError(`connection ${conn.id} to ${conn.address} closed`),
|
||||
conn
|
||||
);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// Handle a message once it is received
|
||||
function processMessage(conn, message) {
|
||||
const msgHeader = parseHeader(message);
|
||||
if (msgHeader.opCode !== OP_COMPRESSED) {
|
||||
const ResponseConstructor = msgHeader.opCode === OP_MSG ? BinMsg : Response;
|
||||
conn.emit(
|
||||
'message',
|
||||
new ResponseConstructor(
|
||||
conn.bson,
|
||||
message,
|
||||
msgHeader,
|
||||
message.slice(MESSAGE_HEADER_SIZE),
|
||||
conn.responseOptions
|
||||
),
|
||||
conn
|
||||
);
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
msgHeader.fromCompressed = true;
|
||||
let index = MESSAGE_HEADER_SIZE;
|
||||
msgHeader.opCode = message.readInt32LE(index);
|
||||
index += 4;
|
||||
msgHeader.length = message.readInt32LE(index);
|
||||
index += 4;
|
||||
const compressorID = message[index];
|
||||
index++;
|
||||
|
||||
decompress(compressorID, message.slice(index), (err, decompressedMsgBody) => {
|
||||
if (err) {
|
||||
conn.emit('error', err);
|
||||
return;
|
||||
}
|
||||
|
||||
if (decompressedMsgBody.length !== msgHeader.length) {
|
||||
conn.emit(
|
||||
'error',
|
||||
new MongoError(
|
||||
'Decompressing a compressed message from the server failed. The message is corrupt.'
|
||||
)
|
||||
);
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
const ResponseConstructor = msgHeader.opCode === OP_MSG ? BinMsg : Response;
|
||||
conn.emit(
|
||||
'message',
|
||||
new ResponseConstructor(
|
||||
conn.bson,
|
||||
message,
|
||||
msgHeader,
|
||||
decompressedMsgBody,
|
||||
conn.responseOptions
|
||||
),
|
||||
conn
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
function dataHandler(conn) {
|
||||
return function(data) {
|
||||
// Parse until we are done with the data
|
||||
while (data.length > 0) {
|
||||
// If we still have bytes to read on the current message
|
||||
if (conn.bytesRead > 0 && conn.sizeOfMessage > 0) {
|
||||
// Calculate the amount of remaining bytes
|
||||
const remainingBytesToRead = conn.sizeOfMessage - conn.bytesRead;
|
||||
// Check if the current chunk contains the rest of the message
|
||||
if (remainingBytesToRead > data.length) {
|
||||
// Copy the new data into the exiting buffer (should have been allocated when we know the message size)
|
||||
data.copy(conn.buffer, conn.bytesRead);
|
||||
// Adjust the number of bytes read so it point to the correct index in the buffer
|
||||
conn.bytesRead = conn.bytesRead + data.length;
|
||||
|
||||
// Reset state of buffer
|
||||
data = Buffer.alloc(0);
|
||||
} else {
|
||||
// Copy the missing part of the data into our current buffer
|
||||
data.copy(conn.buffer, conn.bytesRead, 0, remainingBytesToRead);
|
||||
// Slice the overflow into a new buffer that we will then re-parse
|
||||
data = data.slice(remainingBytesToRead);
|
||||
|
||||
// Emit current complete message
|
||||
const emitBuffer = conn.buffer;
|
||||
// Reset state of buffer
|
||||
conn.buffer = null;
|
||||
conn.sizeOfMessage = 0;
|
||||
conn.bytesRead = 0;
|
||||
conn.stubBuffer = null;
|
||||
|
||||
processMessage(conn, emitBuffer);
|
||||
}
|
||||
} else {
|
||||
// Stub buffer is kept in case we don't get enough bytes to determine the
|
||||
// size of the message (< 4 bytes)
|
||||
if (conn.stubBuffer != null && conn.stubBuffer.length > 0) {
|
||||
// If we have enough bytes to determine the message size let's do it
|
||||
if (conn.stubBuffer.length + data.length > 4) {
|
||||
// Prepad the data
|
||||
const newData = Buffer.alloc(conn.stubBuffer.length + data.length);
|
||||
conn.stubBuffer.copy(newData, 0);
|
||||
data.copy(newData, conn.stubBuffer.length);
|
||||
// Reassign for parsing
|
||||
data = newData;
|
||||
|
||||
// Reset state of buffer
|
||||
conn.buffer = null;
|
||||
conn.sizeOfMessage = 0;
|
||||
conn.bytesRead = 0;
|
||||
conn.stubBuffer = null;
|
||||
} else {
|
||||
// Add the the bytes to the stub buffer
|
||||
const newStubBuffer = Buffer.alloc(conn.stubBuffer.length + data.length);
|
||||
// Copy existing stub buffer
|
||||
conn.stubBuffer.copy(newStubBuffer, 0);
|
||||
// Copy missing part of the data
|
||||
data.copy(newStubBuffer, conn.stubBuffer.length);
|
||||
// Exit parsing loop
|
||||
data = Buffer.alloc(0);
|
||||
}
|
||||
} else {
|
||||
if (data.length > 4) {
|
||||
// Retrieve the message size
|
||||
const sizeOfMessage = data[0] | (data[1] << 8) | (data[2] << 16) | (data[3] << 24);
|
||||
// If we have a negative sizeOfMessage emit error and return
|
||||
if (sizeOfMessage < 0 || sizeOfMessage > conn.maxBsonMessageSize) {
|
||||
const errorObject = {
|
||||
err: 'socketHandler',
|
||||
trace: '',
|
||||
bin: conn.buffer,
|
||||
parseState: {
|
||||
sizeOfMessage: sizeOfMessage,
|
||||
bytesRead: conn.bytesRead,
|
||||
stubBuffer: conn.stubBuffer
|
||||
}
|
||||
};
|
||||
// We got a parse Error fire it off then keep going
|
||||
conn.emit('parseError', errorObject, conn);
|
||||
return;
|
||||
}
|
||||
|
||||
// Ensure that the size of message is larger than 0 and less than the max allowed
|
||||
if (
|
||||
sizeOfMessage > 4 &&
|
||||
sizeOfMessage < conn.maxBsonMessageSize &&
|
||||
sizeOfMessage > data.length
|
||||
) {
|
||||
conn.buffer = Buffer.alloc(sizeOfMessage);
|
||||
// Copy all the data into the buffer
|
||||
data.copy(conn.buffer, 0);
|
||||
// Update bytes read
|
||||
conn.bytesRead = data.length;
|
||||
// Update sizeOfMessage
|
||||
conn.sizeOfMessage = sizeOfMessage;
|
||||
// Ensure stub buffer is null
|
||||
conn.stubBuffer = null;
|
||||
// Exit parsing loop
|
||||
data = Buffer.alloc(0);
|
||||
} else if (
|
||||
sizeOfMessage > 4 &&
|
||||
sizeOfMessage < conn.maxBsonMessageSize &&
|
||||
sizeOfMessage === data.length
|
||||
) {
|
||||
const emitBuffer = data;
|
||||
// Reset state of buffer
|
||||
conn.buffer = null;
|
||||
conn.sizeOfMessage = 0;
|
||||
conn.bytesRead = 0;
|
||||
conn.stubBuffer = null;
|
||||
// Exit parsing loop
|
||||
data = Buffer.alloc(0);
|
||||
// Emit the message
|
||||
processMessage(conn, emitBuffer);
|
||||
} else if (sizeOfMessage <= 4 || sizeOfMessage > conn.maxBsonMessageSize) {
|
||||
const errorObject = {
|
||||
err: 'socketHandler',
|
||||
trace: null,
|
||||
bin: data,
|
||||
parseState: {
|
||||
sizeOfMessage: sizeOfMessage,
|
||||
bytesRead: 0,
|
||||
buffer: null,
|
||||
stubBuffer: null
|
||||
}
|
||||
};
|
||||
// We got a parse Error fire it off then keep going
|
||||
conn.emit('parseError', errorObject, conn);
|
||||
|
||||
// Clear out the state of the parser
|
||||
conn.buffer = null;
|
||||
conn.sizeOfMessage = 0;
|
||||
conn.bytesRead = 0;
|
||||
conn.stubBuffer = null;
|
||||
// Exit parsing loop
|
||||
data = Buffer.alloc(0);
|
||||
} else {
|
||||
const emitBuffer = data.slice(0, sizeOfMessage);
|
||||
// Reset state of buffer
|
||||
conn.buffer = null;
|
||||
conn.sizeOfMessage = 0;
|
||||
conn.bytesRead = 0;
|
||||
conn.stubBuffer = null;
|
||||
// Copy rest of message
|
||||
data = data.slice(sizeOfMessage);
|
||||
// Emit the message
|
||||
processMessage(conn, emitBuffer);
|
||||
}
|
||||
} else {
|
||||
// Create a buffer that contains the space for the non-complete message
|
||||
conn.stubBuffer = Buffer.alloc(data.length);
|
||||
// Copy the data to the stub buffer
|
||||
data.copy(conn.stubBuffer, 0);
|
||||
// Exit parsing loop
|
||||
data = Buffer.alloc(0);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* A server connect event, used to verify that the connection is up and running
|
||||
*
|
||||
* @event Connection#connect
|
||||
* @type {Connection}
|
||||
*/
|
||||
|
||||
/**
|
||||
* The server connection closed, all pool connections closed
|
||||
*
|
||||
* @event Connection#close
|
||||
* @type {Connection}
|
||||
*/
|
||||
|
||||
/**
|
||||
* The server connection caused an error, all pool connections closed
|
||||
*
|
||||
* @event Connection#error
|
||||
* @type {Connection}
|
||||
*/
|
||||
|
||||
/**
|
||||
* The server connection timed out, all pool connections closed
|
||||
*
|
||||
* @event Connection#timeout
|
||||
* @type {Connection}
|
||||
*/
|
||||
|
||||
/**
|
||||
* The driver experienced an invalid message, all pool connections closed
|
||||
*
|
||||
* @event Connection#parseError
|
||||
* @type {Connection}
|
||||
*/
|
||||
|
||||
/**
|
||||
* An event emitted each time the connection receives a parsed message from the wire
|
||||
*
|
||||
* @event Connection#message
|
||||
* @type {Connection}
|
||||
*/
|
||||
|
||||
module.exports = Connection;
|
||||
251
node_modules/mongodb/lib/core/connection/logger.js
generated
vendored
Normal file
251
node_modules/mongodb/lib/core/connection/logger.js
generated
vendored
Normal file
@@ -0,0 +1,251 @@
|
||||
'use strict';
|
||||
|
||||
var f = require('util').format,
|
||||
MongoError = require('../error').MongoError;
|
||||
|
||||
// Filters for classes
|
||||
var classFilters = {};
|
||||
var filteredClasses = {};
|
||||
var level = null;
|
||||
// Save the process id
|
||||
var pid = process.pid;
|
||||
// current logger
|
||||
var currentLogger = null;
|
||||
|
||||
/**
|
||||
* @callback Logger~loggerCallback
|
||||
* @param {string} msg message being logged
|
||||
* @param {object} state an object containing more metadata about the logging message
|
||||
*/
|
||||
|
||||
/**
|
||||
* Creates a new Logger instance
|
||||
* @class
|
||||
* @param {string} className The Class name associated with the logging instance
|
||||
* @param {object} [options=null] Optional settings.
|
||||
* @param {Logger~loggerCallback} [options.logger=null] Custom logger function;
|
||||
* @param {string} [options.loggerLevel=error] Override default global log level.
|
||||
*/
|
||||
var Logger = function(className, options) {
|
||||
if (!(this instanceof Logger)) return new Logger(className, options);
|
||||
options = options || {};
|
||||
|
||||
// Current reference
|
||||
this.className = className;
|
||||
|
||||
// Current logger
|
||||
if (options.logger) {
|
||||
currentLogger = options.logger;
|
||||
} else if (currentLogger == null) {
|
||||
currentLogger = console.log;
|
||||
}
|
||||
|
||||
// Set level of logging, default is error
|
||||
if (options.loggerLevel) {
|
||||
level = options.loggerLevel || 'error';
|
||||
}
|
||||
|
||||
// Add all class names
|
||||
if (filteredClasses[this.className] == null) classFilters[this.className] = true;
|
||||
};
|
||||
|
||||
/**
|
||||
* Log a message at the debug level
|
||||
* @method
|
||||
* @param {string} message The message to log
|
||||
* @param {object} object additional meta data to log
|
||||
* @return {null}
|
||||
*/
|
||||
Logger.prototype.debug = function(message, object) {
|
||||
if (
|
||||
this.isDebug() &&
|
||||
((Object.keys(filteredClasses).length > 0 && filteredClasses[this.className]) ||
|
||||
(Object.keys(filteredClasses).length === 0 && classFilters[this.className]))
|
||||
) {
|
||||
var dateTime = new Date().getTime();
|
||||
var msg = f('[%s-%s:%s] %s %s', 'DEBUG', this.className, pid, dateTime, message);
|
||||
var state = {
|
||||
type: 'debug',
|
||||
message: message,
|
||||
className: this.className,
|
||||
pid: pid,
|
||||
date: dateTime
|
||||
};
|
||||
if (object) state.meta = object;
|
||||
currentLogger(msg, state);
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Log a message at the warn level
|
||||
* @method
|
||||
* @param {string} message The message to log
|
||||
* @param {object} object additional meta data to log
|
||||
* @return {null}
|
||||
*/
|
||||
(Logger.prototype.warn = function(message, object) {
|
||||
if (
|
||||
this.isWarn() &&
|
||||
((Object.keys(filteredClasses).length > 0 && filteredClasses[this.className]) ||
|
||||
(Object.keys(filteredClasses).length === 0 && classFilters[this.className]))
|
||||
) {
|
||||
var dateTime = new Date().getTime();
|
||||
var msg = f('[%s-%s:%s] %s %s', 'WARN', this.className, pid, dateTime, message);
|
||||
var state = {
|
||||
type: 'warn',
|
||||
message: message,
|
||||
className: this.className,
|
||||
pid: pid,
|
||||
date: dateTime
|
||||
};
|
||||
if (object) state.meta = object;
|
||||
currentLogger(msg, state);
|
||||
}
|
||||
}),
|
||||
/**
|
||||
* Log a message at the info level
|
||||
* @method
|
||||
* @param {string} message The message to log
|
||||
* @param {object} object additional meta data to log
|
||||
* @return {null}
|
||||
*/
|
||||
(Logger.prototype.info = function(message, object) {
|
||||
if (
|
||||
this.isInfo() &&
|
||||
((Object.keys(filteredClasses).length > 0 && filteredClasses[this.className]) ||
|
||||
(Object.keys(filteredClasses).length === 0 && classFilters[this.className]))
|
||||
) {
|
||||
var dateTime = new Date().getTime();
|
||||
var msg = f('[%s-%s:%s] %s %s', 'INFO', this.className, pid, dateTime, message);
|
||||
var state = {
|
||||
type: 'info',
|
||||
message: message,
|
||||
className: this.className,
|
||||
pid: pid,
|
||||
date: dateTime
|
||||
};
|
||||
if (object) state.meta = object;
|
||||
currentLogger(msg, state);
|
||||
}
|
||||
}),
|
||||
/**
|
||||
* Log a message at the error level
|
||||
* @method
|
||||
* @param {string} message The message to log
|
||||
* @param {object} object additional meta data to log
|
||||
* @return {null}
|
||||
*/
|
||||
(Logger.prototype.error = function(message, object) {
|
||||
if (
|
||||
this.isError() &&
|
||||
((Object.keys(filteredClasses).length > 0 && filteredClasses[this.className]) ||
|
||||
(Object.keys(filteredClasses).length === 0 && classFilters[this.className]))
|
||||
) {
|
||||
var dateTime = new Date().getTime();
|
||||
var msg = f('[%s-%s:%s] %s %s', 'ERROR', this.className, pid, dateTime, message);
|
||||
var state = {
|
||||
type: 'error',
|
||||
message: message,
|
||||
className: this.className,
|
||||
pid: pid,
|
||||
date: dateTime
|
||||
};
|
||||
if (object) state.meta = object;
|
||||
currentLogger(msg, state);
|
||||
}
|
||||
}),
|
||||
/**
|
||||
* Is the logger set at info level
|
||||
* @method
|
||||
* @return {boolean}
|
||||
*/
|
||||
(Logger.prototype.isInfo = function() {
|
||||
return level === 'info' || level === 'debug';
|
||||
}),
|
||||
/**
|
||||
* Is the logger set at error level
|
||||
* @method
|
||||
* @return {boolean}
|
||||
*/
|
||||
(Logger.prototype.isError = function() {
|
||||
return level === 'error' || level === 'info' || level === 'debug';
|
||||
}),
|
||||
/**
|
||||
* Is the logger set at error level
|
||||
* @method
|
||||
* @return {boolean}
|
||||
*/
|
||||
(Logger.prototype.isWarn = function() {
|
||||
return level === 'error' || level === 'warn' || level === 'info' || level === 'debug';
|
||||
}),
|
||||
/**
|
||||
* Is the logger set at debug level
|
||||
* @method
|
||||
* @return {boolean}
|
||||
*/
|
||||
(Logger.prototype.isDebug = function() {
|
||||
return level === 'debug';
|
||||
});
|
||||
|
||||
/**
|
||||
* Resets the logger to default settings, error and no filtered classes
|
||||
* @method
|
||||
* @return {null}
|
||||
*/
|
||||
Logger.reset = function() {
|
||||
level = 'error';
|
||||
filteredClasses = {};
|
||||
};
|
||||
|
||||
/**
|
||||
* Get the current logger function
|
||||
* @method
|
||||
* @return {Logger~loggerCallback}
|
||||
*/
|
||||
Logger.currentLogger = function() {
|
||||
return currentLogger;
|
||||
};
|
||||
|
||||
/**
|
||||
* Set the current logger function
|
||||
* @method
|
||||
* @param {Logger~loggerCallback} logger Logger function.
|
||||
* @return {null}
|
||||
*/
|
||||
Logger.setCurrentLogger = function(logger) {
|
||||
if (typeof logger !== 'function') throw new MongoError('current logger must be a function');
|
||||
currentLogger = logger;
|
||||
};
|
||||
|
||||
/**
|
||||
* Set what classes to log.
|
||||
* @method
|
||||
* @param {string} type The type of filter (currently only class)
|
||||
* @param {string[]} values The filters to apply
|
||||
* @return {null}
|
||||
*/
|
||||
Logger.filter = function(type, values) {
|
||||
if (type === 'class' && Array.isArray(values)) {
|
||||
filteredClasses = {};
|
||||
|
||||
values.forEach(function(x) {
|
||||
filteredClasses[x] = true;
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Set the current log level
|
||||
* @method
|
||||
* @param {string} level Set current log level (debug, info, error)
|
||||
* @return {null}
|
||||
*/
|
||||
Logger.setLevel = function(_level) {
|
||||
if (_level !== 'info' && _level !== 'error' && _level !== 'debug' && _level !== 'warn') {
|
||||
throw new Error(f('%s is an illegal logging level', _level));
|
||||
}
|
||||
|
||||
level = _level;
|
||||
};
|
||||
|
||||
module.exports = Logger;
|
||||
222
node_modules/mongodb/lib/core/connection/msg.js
generated
vendored
Normal file
222
node_modules/mongodb/lib/core/connection/msg.js
generated
vendored
Normal file
@@ -0,0 +1,222 @@
|
||||
'use strict';
|
||||
|
||||
// Implementation of OP_MSG spec:
|
||||
// https://github.com/mongodb/specifications/blob/master/source/message/OP_MSG.rst
|
||||
//
|
||||
// struct Section {
|
||||
// uint8 payloadType;
|
||||
// union payload {
|
||||
// document document; // payloadType == 0
|
||||
// struct sequence { // payloadType == 1
|
||||
// int32 size;
|
||||
// cstring identifier;
|
||||
// document* documents;
|
||||
// };
|
||||
// };
|
||||
// };
|
||||
|
||||
// struct OP_MSG {
|
||||
// struct MsgHeader {
|
||||
// int32 messageLength;
|
||||
// int32 requestID;
|
||||
// int32 responseTo;
|
||||
// int32 opCode = 2013;
|
||||
// };
|
||||
// uint32 flagBits;
|
||||
// Section+ sections;
|
||||
// [uint32 checksum;]
|
||||
// };
|
||||
|
||||
const Buffer = require('safe-buffer').Buffer;
|
||||
const opcodes = require('../wireprotocol/shared').opcodes;
|
||||
const databaseNamespace = require('../wireprotocol/shared').databaseNamespace;
|
||||
const ReadPreference = require('../topologies/read_preference');
|
||||
|
||||
// Incrementing request id
|
||||
let _requestId = 0;
|
||||
|
||||
// Msg Flags
|
||||
const OPTS_CHECKSUM_PRESENT = 1;
|
||||
const OPTS_MORE_TO_COME = 2;
|
||||
const OPTS_EXHAUST_ALLOWED = 1 << 16;
|
||||
|
||||
class Msg {
|
||||
constructor(bson, ns, command, options) {
|
||||
// Basic options needed to be passed in
|
||||
if (command == null) throw new Error('query must be specified for query');
|
||||
|
||||
// Basic options
|
||||
this.bson = bson;
|
||||
this.ns = ns;
|
||||
this.command = command;
|
||||
this.command.$db = databaseNamespace(ns);
|
||||
|
||||
if (options.readPreference && options.readPreference.mode !== ReadPreference.PRIMARY) {
|
||||
this.command.$readPreference = options.readPreference.toJSON();
|
||||
}
|
||||
|
||||
// Ensure empty options
|
||||
this.options = options || {};
|
||||
|
||||
// Additional options
|
||||
this.requestId = options.requestId ? options.requestId : Msg.getRequestId();
|
||||
|
||||
// Serialization option
|
||||
this.serializeFunctions =
|
||||
typeof options.serializeFunctions === 'boolean' ? options.serializeFunctions : false;
|
||||
this.ignoreUndefined =
|
||||
typeof options.ignoreUndefined === 'boolean' ? options.ignoreUndefined : false;
|
||||
this.checkKeys = typeof options.checkKeys === 'boolean' ? options.checkKeys : false;
|
||||
this.maxBsonSize = options.maxBsonSize || 1024 * 1024 * 16;
|
||||
|
||||
// flags
|
||||
this.checksumPresent = false;
|
||||
this.moreToCome = options.moreToCome || false;
|
||||
this.exhaustAllowed =
|
||||
typeof options.exhaustAllowed === 'boolean' ? options.exhaustAllowed : false;
|
||||
}
|
||||
|
||||
toBin() {
|
||||
const buffers = [];
|
||||
let flags = 0;
|
||||
|
||||
if (this.checksumPresent) {
|
||||
flags |= OPTS_CHECKSUM_PRESENT;
|
||||
}
|
||||
|
||||
if (this.moreToCome) {
|
||||
flags |= OPTS_MORE_TO_COME;
|
||||
}
|
||||
|
||||
if (this.exhaustAllowed) {
|
||||
flags |= OPTS_EXHAUST_ALLOWED;
|
||||
}
|
||||
|
||||
const header = Buffer.alloc(
|
||||
4 * 4 + // Header
|
||||
4 // Flags
|
||||
);
|
||||
|
||||
buffers.push(header);
|
||||
|
||||
let totalLength = header.length;
|
||||
const command = this.command;
|
||||
totalLength += this.makeDocumentSegment(buffers, command);
|
||||
|
||||
header.writeInt32LE(totalLength, 0); // messageLength
|
||||
header.writeInt32LE(this.requestId, 4); // requestID
|
||||
header.writeInt32LE(0, 8); // responseTo
|
||||
header.writeInt32LE(opcodes.OP_MSG, 12); // opCode
|
||||
header.writeUInt32LE(flags, 16); // flags
|
||||
return buffers;
|
||||
}
|
||||
|
||||
makeDocumentSegment(buffers, document) {
|
||||
const payloadTypeBuffer = Buffer.alloc(1);
|
||||
payloadTypeBuffer[0] = 0;
|
||||
|
||||
const documentBuffer = this.serializeBson(document);
|
||||
buffers.push(payloadTypeBuffer);
|
||||
buffers.push(documentBuffer);
|
||||
|
||||
return payloadTypeBuffer.length + documentBuffer.length;
|
||||
}
|
||||
|
||||
serializeBson(document) {
|
||||
return this.bson.serialize(document, {
|
||||
checkKeys: this.checkKeys,
|
||||
serializeFunctions: this.serializeFunctions,
|
||||
ignoreUndefined: this.ignoreUndefined
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
Msg.getRequestId = function() {
|
||||
_requestId = (_requestId + 1) & 0x7fffffff;
|
||||
return _requestId;
|
||||
};
|
||||
|
||||
class BinMsg {
|
||||
constructor(bson, message, msgHeader, msgBody, opts) {
|
||||
opts = opts || { promoteLongs: true, promoteValues: true, promoteBuffers: false };
|
||||
this.parsed = false;
|
||||
this.raw = message;
|
||||
this.data = msgBody;
|
||||
this.bson = bson;
|
||||
this.opts = opts;
|
||||
|
||||
// Read the message header
|
||||
this.length = msgHeader.length;
|
||||
this.requestId = msgHeader.requestId;
|
||||
this.responseTo = msgHeader.responseTo;
|
||||
this.opCode = msgHeader.opCode;
|
||||
this.fromCompressed = msgHeader.fromCompressed;
|
||||
|
||||
// Read response flags
|
||||
this.responseFlags = msgBody.readInt32LE(0);
|
||||
this.checksumPresent = (this.responseFlags & OPTS_CHECKSUM_PRESENT) !== 0;
|
||||
this.moreToCome = (this.responseFlags & OPTS_MORE_TO_COME) !== 0;
|
||||
this.exhaustAllowed = (this.responseFlags & OPTS_EXHAUST_ALLOWED) !== 0;
|
||||
this.promoteLongs = typeof opts.promoteLongs === 'boolean' ? opts.promoteLongs : true;
|
||||
this.promoteValues = typeof opts.promoteValues === 'boolean' ? opts.promoteValues : true;
|
||||
this.promoteBuffers = typeof opts.promoteBuffers === 'boolean' ? opts.promoteBuffers : false;
|
||||
|
||||
this.documents = [];
|
||||
}
|
||||
|
||||
isParsed() {
|
||||
return this.parsed;
|
||||
}
|
||||
|
||||
parse(options) {
|
||||
// Don't parse again if not needed
|
||||
if (this.parsed) return;
|
||||
options = options || {};
|
||||
|
||||
this.index = 4;
|
||||
// Allow the return of raw documents instead of parsing
|
||||
const raw = options.raw || false;
|
||||
const documentsReturnedIn = options.documentsReturnedIn || null;
|
||||
const promoteLongs =
|
||||
typeof options.promoteLongs === 'boolean' ? options.promoteLongs : this.opts.promoteLongs;
|
||||
const promoteValues =
|
||||
typeof options.promoteValues === 'boolean' ? options.promoteValues : this.opts.promoteValues;
|
||||
const promoteBuffers =
|
||||
typeof options.promoteBuffers === 'boolean'
|
||||
? options.promoteBuffers
|
||||
: this.opts.promoteBuffers;
|
||||
|
||||
// Set up the options
|
||||
const _options = {
|
||||
promoteLongs: promoteLongs,
|
||||
promoteValues: promoteValues,
|
||||
promoteBuffers: promoteBuffers
|
||||
};
|
||||
|
||||
while (this.index < this.data.length) {
|
||||
const payloadType = this.data.readUInt8(this.index++);
|
||||
if (payloadType === 1) {
|
||||
console.error('TYPE 1');
|
||||
} else if (payloadType === 0) {
|
||||
const bsonSize = this.data.readUInt32LE(this.index);
|
||||
const bin = this.data.slice(this.index, this.index + bsonSize);
|
||||
this.documents.push(raw ? bin : this.bson.deserialize(bin, _options));
|
||||
|
||||
this.index += bsonSize;
|
||||
}
|
||||
}
|
||||
|
||||
if (this.documents.length === 1 && documentsReturnedIn != null && raw) {
|
||||
const fieldsAsRaw = {};
|
||||
fieldsAsRaw[documentsReturnedIn] = true;
|
||||
_options.fieldsAsRaw = fieldsAsRaw;
|
||||
|
||||
const doc = this.bson.deserialize(this.documents[0], _options);
|
||||
this.documents = [doc];
|
||||
}
|
||||
|
||||
this.parsed = true;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { Msg, BinMsg };
|
||||
1281
node_modules/mongodb/lib/core/connection/pool.js
generated
vendored
Normal file
1281
node_modules/mongodb/lib/core/connection/pool.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
57
node_modules/mongodb/lib/core/connection/utils.js
generated
vendored
Normal file
57
node_modules/mongodb/lib/core/connection/utils.js
generated
vendored
Normal file
@@ -0,0 +1,57 @@
|
||||
'use strict';
|
||||
|
||||
const require_optional = require('require_optional');
|
||||
|
||||
function debugOptions(debugFields, options) {
|
||||
var finaloptions = {};
|
||||
debugFields.forEach(function(n) {
|
||||
finaloptions[n] = options[n];
|
||||
});
|
||||
|
||||
return finaloptions;
|
||||
}
|
||||
|
||||
function retrieveBSON() {
|
||||
var BSON = require('bson');
|
||||
BSON.native = false;
|
||||
|
||||
try {
|
||||
var optionalBSON = require_optional('bson-ext');
|
||||
if (optionalBSON) {
|
||||
optionalBSON.native = true;
|
||||
return optionalBSON;
|
||||
}
|
||||
} catch (err) {} // eslint-disable-line
|
||||
|
||||
return BSON;
|
||||
}
|
||||
|
||||
// Throw an error if an attempt to use Snappy is made when Snappy is not installed
|
||||
function noSnappyWarning() {
|
||||
throw new Error(
|
||||
'Attempted to use Snappy compression, but Snappy is not installed. Install or disable Snappy compression and try again.'
|
||||
);
|
||||
}
|
||||
|
||||
// Facilitate loading Snappy optionally
|
||||
function retrieveSnappy() {
|
||||
var snappy = null;
|
||||
try {
|
||||
snappy = require_optional('snappy');
|
||||
} catch (error) {} // eslint-disable-line
|
||||
if (!snappy) {
|
||||
snappy = {
|
||||
compress: noSnappyWarning,
|
||||
uncompress: noSnappyWarning,
|
||||
compressSync: noSnappyWarning,
|
||||
uncompressSync: noSnappyWarning
|
||||
};
|
||||
}
|
||||
return snappy;
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
debugOptions,
|
||||
retrieveBSON,
|
||||
retrieveSnappy
|
||||
};
|
||||
Reference in New Issue
Block a user