diff --git a/.gitignore b/.gitignore
index 9610afd15bc..7a71ba0b970 100644
--- a/.gitignore
+++ b/.gitignore
@@ -95,3 +95,7 @@ node-artifacts
# AWS SAM generated
test/lambda/.aws-sam
test/lambda/env.json
+
+!encryption/lib
+!encryption/test
+!encryption/test/types
diff --git a/encryption/lib/autoEncrypter.js b/encryption/lib/autoEncrypter.js
new file mode 100644
index 00000000000..f4e3dc081b7
--- /dev/null
+++ b/encryption/lib/autoEncrypter.js
@@ -0,0 +1,440 @@
+'use strict';
+
+module.exports = function (modules) {
+ const mc = require('bindings')('mongocrypt');
+ const common = require('./common');
+ const databaseNamespace = common.databaseNamespace;
+ const StateMachine = modules.stateMachine.StateMachine;
+ const MongocryptdManager = require('./mongocryptdManager').MongocryptdManager;
+ const MongoClient = modules.mongodb.MongoClient;
+ const MongoError = modules.mongodb.MongoError;
+ const BSON = modules.mongodb.BSON;
+ const { loadCredentials } = require('./providers/index');
+ const cryptoCallbacks = require('./cryptoCallbacks');
+
+ /**
+ * Configuration options for a automatic client encryption.
+ *
+ * @typedef {Object} AutoEncrypter~AutoEncryptionOptions
+ * @property {MongoClient} [keyVaultClient] A `MongoClient` used to fetch keys from a key vault
+ * @property {string} [keyVaultNamespace] The namespace where keys are stored in the key vault
+ * @property {KMSProviders} [kmsProviders] Configuration options that are used by specific KMS providers during key generation, encryption, and decryption.
+ * @property {object} [schemaMap] A map of namespaces to a local JSON schema for encryption
+ * @property {boolean} [bypassAutoEncryption] Allows the user to bypass auto encryption, maintaining implicit decryption
+ * @property {AutoEncrypter~logger} [options.logger] An optional hook to catch logging messages from the underlying encryption engine
+ * @property {AutoEncrypter~AutoEncryptionExtraOptions} [extraOptions] Extra options related to the mongocryptd process
+ */
+
+ /**
+ * Extra options related to the mongocryptd process
+ * \* _Available in MongoDB 6.0 or higher._
+ * @typedef {object} AutoEncrypter~AutoEncryptionExtraOptions
+ * @property {string} [mongocryptdURI] A local process the driver communicates with to determine how to encrypt values in a command. Defaults to "mongodb://%2Fvar%2Fmongocryptd.sock" if domain sockets are available or "mongodb://localhost:27020" otherwise
+ * @property {boolean} [mongocryptdBypassSpawn=false] If true, autoEncryption will not attempt to spawn a mongocryptd before connecting
+ * @property {string} [mongocryptdSpawnPath] The path to the mongocryptd executable on the system
+ * @property {string[]} [mongocryptdSpawnArgs] Command line arguments to use when auto-spawning a mongocryptd
+ * @property {string} [cryptSharedLibPath] Full path to a MongoDB Crypt shared library on the system. If specified, autoEncryption will not attempt to spawn a mongocryptd, but makes use of the shared library file specified. Note that the path must point to the shared libary file itself, not the folder which contains it \*
+ * @property {boolean} [cryptSharedLibRequired] If true, never use mongocryptd and fail when the MongoDB Crypt shared libary cannot be loaded. Defaults to true if [cryptSharedLibPath] is specified and false otherwise \*
+ */
+
+ /**
+ * @callback AutoEncrypter~logger
+ * @description A callback that is invoked with logging information from
+ * the underlying C++ Bindings.
+ * @param {AutoEncrypter~logLevel} level The level of logging.
+ * @param {string} message The message to log
+ */
+
+ /**
+ * @name AutoEncrypter~logLevel
+ * @enum {number}
+ * @description
+ * The level of severity of the log message
+ *
+ * | Value | Level |
+ * |-------|-------|
+ * | 0 | Fatal Error |
+ * | 1 | Error |
+ * | 2 | Warning |
+ * | 3 | Info |
+ * | 4 | Trace |
+ */
+
+ /**
+ * @classdesc An internal class to be used by the driver for auto encryption
+ * **NOTE**: Not meant to be instantiated directly, this is for internal use only.
+ */
+ class AutoEncrypter {
+ /**
+ * Create an AutoEncrypter
+ *
+ * **Note**: Do not instantiate this class directly. Rather, supply the relevant options to a MongoClient
+ *
+ * **Note**: Supplying `options.schemaMap` provides more security than relying on JSON Schemas obtained from the server.
+ * It protects against a malicious server advertising a false JSON Schema, which could trick the client into sending unencrypted data that should be encrypted.
+ * Schemas supplied in the schemaMap only apply to configuring automatic encryption for Client-Side Field Level Encryption.
+ * Other validation rules in the JSON schema will not be enforced by the driver and will result in an error.
+ * @param {MongoClient} client The client autoEncryption is enabled on
+ * @param {AutoEncrypter~AutoEncryptionOptions} [options] Optional settings
+ *
+ * @example
Create an AutoEncrypter that makes use of mongocryptd
+ * // Enabling autoEncryption via a MongoClient using mongocryptd
+ * const { MongoClient } = require('mongodb');
+ * const client = new MongoClient(URL, {
+ * autoEncryption: {
+ * kmsProviders: {
+ * aws: {
+ * accessKeyId: AWS_ACCESS_KEY,
+ * secretAccessKey: AWS_SECRET_KEY
+ * }
+ * }
+ * }
+ * });
+ *
+ * await client.connect();
+ * // From here on, the client will be encrypting / decrypting automatically
+ * @example Create an AutoEncrypter that makes use of libmongocrypt's CSFLE shared library
+ * // Enabling autoEncryption via a MongoClient using CSFLE shared library
+ * const { MongoClient } = require('mongodb');
+ * const client = new MongoClient(URL, {
+ * autoEncryption: {
+ * kmsProviders: {
+ * aws: {}
+ * },
+ * extraOptions: {
+ * cryptSharedLibPath: '/path/to/local/crypt/shared/lib',
+ * cryptSharedLibRequired: true
+ * }
+ * }
+ * });
+ *
+ * await client.connect();
+ * // From here on, the client will be encrypting / decrypting automatically
+ */
+ constructor(client, options) {
+ this._client = client;
+ this._bson = options.bson || BSON || client.topology.bson;
+ this._bypassEncryption = options.bypassAutoEncryption === true;
+
+ this._keyVaultNamespace = options.keyVaultNamespace || 'admin.datakeys';
+ this._keyVaultClient = options.keyVaultClient || client;
+ this._metaDataClient = options.metadataClient || client;
+ this._proxyOptions = options.proxyOptions || {};
+ this._tlsOptions = options.tlsOptions || {};
+ this._onKmsProviderRefresh = options.onKmsProviderRefresh;
+ this._kmsProviders = options.kmsProviders || {};
+
+ const mongoCryptOptions = {};
+ if (options.schemaMap) {
+ mongoCryptOptions.schemaMap = Buffer.isBuffer(options.schemaMap)
+ ? options.schemaMap
+ : this._bson.serialize(options.schemaMap);
+ }
+
+ if (options.encryptedFieldsMap) {
+ mongoCryptOptions.encryptedFieldsMap = Buffer.isBuffer(options.encryptedFieldsMap)
+ ? options.encryptedFieldsMap
+ : this._bson.serialize(options.encryptedFieldsMap);
+ }
+
+ mongoCryptOptions.kmsProviders = !Buffer.isBuffer(this._kmsProviders)
+ ? this._bson.serialize(this._kmsProviders)
+ : this._kmsProviders;
+
+ if (options.logger) {
+ mongoCryptOptions.logger = options.logger;
+ }
+
+ if (options.extraOptions && options.extraOptions.cryptSharedLibPath) {
+ mongoCryptOptions.cryptSharedLibPath = options.extraOptions.cryptSharedLibPath;
+ }
+
+ if (options.bypassQueryAnalysis) {
+ mongoCryptOptions.bypassQueryAnalysis = options.bypassQueryAnalysis;
+ }
+
+ this._bypassMongocryptdAndCryptShared = this._bypassEncryption || options.bypassQueryAnalysis;
+
+ if (options.extraOptions && options.extraOptions.cryptSharedLibSearchPaths) {
+ // Only for driver testing
+ mongoCryptOptions.cryptSharedLibSearchPaths =
+ options.extraOptions.cryptSharedLibSearchPaths;
+ } else if (!this._bypassMongocryptdAndCryptShared) {
+ mongoCryptOptions.cryptSharedLibSearchPaths = ['$SYSTEM'];
+ }
+
+ Object.assign(mongoCryptOptions, { cryptoCallbacks });
+ this._mongocrypt = new mc.MongoCrypt(mongoCryptOptions);
+ this._contextCounter = 0;
+
+ if (
+ options.extraOptions &&
+ options.extraOptions.cryptSharedLibRequired &&
+ !this.cryptSharedLibVersionInfo
+ ) {
+ throw new MongoError('`cryptSharedLibRequired` set but no crypt_shared library loaded');
+ }
+
+ // Only instantiate mongocryptd manager/client once we know for sure
+ // that we are not using the CSFLE shared library.
+ if (!this._bypassMongocryptdAndCryptShared && !this.cryptSharedLibVersionInfo) {
+ this._mongocryptdManager = new MongocryptdManager(options.extraOptions);
+ const clientOptions = {
+ useNewUrlParser: true,
+ useUnifiedTopology: true,
+ serverSelectionTimeoutMS: 10000
+ };
+
+ if (
+ options.extraOptions == null ||
+ typeof options.extraOptions.mongocryptdURI !== 'string'
+ ) {
+ clientOptions.family = 4;
+ }
+
+ this._mongocryptdClient = new MongoClient(this._mongocryptdManager.uri, clientOptions);
+ }
+ }
+
+ /**
+ * @ignore
+ * @param {Function} callback Invoked when the mongocryptd client either successfully connects or errors
+ */
+ init(callback) {
+ if (this._bypassMongocryptdAndCryptShared || this.cryptSharedLibVersionInfo) {
+ return callback();
+ }
+ const _callback = (err, res) => {
+ if (
+ err &&
+ err.message &&
+ (err.message.match(/timed out after/) || err.message.match(/ENOTFOUND/))
+ ) {
+ callback(
+ new MongoError(
+ 'Unable to connect to `mongocryptd`, please make sure it is running or in your PATH for auto-spawn'
+ )
+ );
+ return;
+ }
+
+ callback(err, res);
+ };
+
+ if (this._mongocryptdManager.bypassSpawn) {
+ return this._mongocryptdClient.connect().then(
+ result => {
+ return _callback(null, result);
+ },
+ error => {
+ _callback(error, null);
+ }
+ );
+ }
+
+ this._mongocryptdManager.spawn(() => {
+ this._mongocryptdClient.connect().then(
+ result => {
+ return _callback(null, result);
+ },
+ error => {
+ _callback(error, null);
+ }
+ );
+ });
+ }
+
+ /**
+ * @ignore
+ * @param {Function} callback Invoked when the mongocryptd client either successfully disconnects or errors
+ */
+ teardown(force, callback) {
+ if (this._mongocryptdClient) {
+ this._mongocryptdClient.close(force).then(
+ result => {
+ return callback(null, result);
+ },
+ error => {
+ callback(error);
+ }
+ );
+ } else {
+ callback();
+ }
+ }
+
+ /**
+ * @ignore
+ * Encrypt a command for a given namespace.
+ *
+ * @param {string} ns The namespace for this encryption context
+ * @param {object} cmd The command to encrypt
+ * @param {Function} callback
+ */
+ encrypt(ns, cmd, options, callback) {
+ if (typeof ns !== 'string') {
+ throw new TypeError('Parameter `ns` must be a string');
+ }
+
+ if (typeof cmd !== 'object') {
+ throw new TypeError('Parameter `cmd` must be an object');
+ }
+
+ if (typeof options === 'function' && callback == null) {
+ callback = options;
+ options = {};
+ }
+
+ // If `bypassAutoEncryption` has been specified, don't encrypt
+ if (this._bypassEncryption) {
+ callback(undefined, cmd);
+ return;
+ }
+
+ const bson = this._bson;
+ const commandBuffer = Buffer.isBuffer(cmd) ? cmd : bson.serialize(cmd, options);
+
+ let context;
+ try {
+ context = this._mongocrypt.makeEncryptionContext(databaseNamespace(ns), commandBuffer);
+ } catch (err) {
+ callback(err, null);
+ return;
+ }
+
+ // TODO: should these be accessors from the addon?
+ context.id = this._contextCounter++;
+ context.ns = ns;
+ context.document = cmd;
+
+ const stateMachine = new StateMachine({
+ bson,
+ ...options,
+ promoteValues: false,
+ promoteLongs: false,
+ proxyOptions: this._proxyOptions,
+ tlsOptions: this._tlsOptions
+ });
+ stateMachine.execute(this, context, callback);
+ }
+
+ /**
+ * @ignore
+ * Decrypt a command response
+ *
+ * @param {Buffer} buffer
+ * @param {Function} callback
+ */
+ decrypt(response, options, callback) {
+ if (typeof options === 'function' && callback == null) {
+ callback = options;
+ options = {};
+ }
+
+ const bson = this._bson;
+ const buffer = Buffer.isBuffer(response) ? response : bson.serialize(response, options);
+
+ let context;
+ try {
+ context = this._mongocrypt.makeDecryptionContext(buffer);
+ } catch (err) {
+ callback(err, null);
+ return;
+ }
+
+ // TODO: should this be an accessor from the addon?
+ context.id = this._contextCounter++;
+
+ const stateMachine = new StateMachine({
+ bson,
+ ...options,
+ proxyOptions: this._proxyOptions,
+ tlsOptions: this._tlsOptions
+ });
+
+ const decorateResult = this[Symbol.for('@@mdb.decorateDecryptionResult')];
+ stateMachine.execute(this, context, function (err, result) {
+ // Only for testing/internal usage
+ if (!err && result && decorateResult) {
+ err = decorateDecryptionResult(result, response, bson);
+ if (err) return callback(err);
+ }
+ callback(err, result);
+ });
+ }
+
+ /**
+ * Ask the user for KMS credentials.
+ *
+ * This returns anything that looks like the kmsProviders original input
+ * option. It can be empty, and any provider specified here will override
+ * the original ones.
+ */
+ async askForKMSCredentials() {
+ return this._onKmsProviderRefresh
+ ? this._onKmsProviderRefresh()
+ : loadCredentials(this._kmsProviders);
+ }
+
+ /**
+ * Return the current libmongocrypt's CSFLE shared library version
+ * as `{ version: bigint, versionStr: string }`, or `null` if no CSFLE
+ * shared library was loaded.
+ */
+ get cryptSharedLibVersionInfo() {
+ return this._mongocrypt.cryptSharedLibVersionInfo;
+ }
+
+ static get libmongocryptVersion() {
+ return mc.MongoCrypt.libmongocryptVersion;
+ }
+ }
+
+ return { AutoEncrypter };
+};
+
+/**
+ * Recurse through the (identically-shaped) `decrypted` and `original`
+ * objects and attach a `decryptedKeys` property on each sub-object that
+ * contained encrypted fields. Because we only call this on BSON responses,
+ * we do not need to worry about circular references.
+ *
+ * @internal
+ * @ignore
+ */
+function decorateDecryptionResult(decrypted, original, bson, isTopLevelDecorateCall = true) {
+ const decryptedKeys = Symbol.for('@@mdb.decryptedKeys');
+ if (isTopLevelDecorateCall) {
+ // The original value could have been either a JS object or a BSON buffer
+ if (Buffer.isBuffer(original)) {
+ original = bson.deserialize(original);
+ }
+ if (Buffer.isBuffer(decrypted)) {
+ return new Error('Expected result of decryption to be deserialized BSON object');
+ }
+ }
+
+ if (!decrypted || typeof decrypted !== 'object') return;
+ for (const k of Object.keys(decrypted)) {
+ const originalValue = original[k];
+
+ // An object was decrypted by libmongocrypt if and only if it was
+ // a BSON Binary object with subtype 6.
+ if (originalValue && originalValue._bsontype === 'Binary' && originalValue.sub_type === 6) {
+ if (!decrypted[decryptedKeys]) {
+ Object.defineProperty(decrypted, decryptedKeys, {
+ value: [],
+ configurable: true,
+ enumerable: false,
+ writable: false
+ });
+ }
+ decrypted[decryptedKeys].push(k);
+ // Do not recurse into this decrypted value. It could be a subdocument/array,
+ // in which case there is no original value associated with its subfields.
+ continue;
+ }
+
+ decorateDecryptionResult(decrypted[k], originalValue, bson, false);
+ }
+}
diff --git a/encryption/lib/buffer_pool.js b/encryption/lib/buffer_pool.js
new file mode 100644
index 00000000000..23a21fed774
--- /dev/null
+++ b/encryption/lib/buffer_pool.js
@@ -0,0 +1,123 @@
+'use strict';
+
+/**
+ * @internal
+ * @ignore
+ * */
+const kBuffers = Symbol('buffers');
+/**
+ * @internal
+ * @ignore
+ *
+ * */
+const kLength = Symbol('length');
+
+/**
+ * A pool of Buffers which allow you to read them as if they were one
+ * @internal
+ * @ignore
+ */
+class BufferPool {
+ // [kBuffers]: Buffer[];
+ // [kLength]: number;
+
+ constructor() {
+ this[kBuffers] = [];
+ this[kLength] = 0;
+ }
+
+ get length() {
+ return this[kLength];
+ }
+
+ /**
+ * Adds a buffer to the internal buffer pool list
+ * @param {Buffer} buffer - buffer to append to the pool
+ * @returns {void}
+ */
+ append(buffer) {
+ this[kBuffers].push(buffer);
+ this[kLength] += buffer.length;
+ }
+
+ /**
+ * Returns the requested number of bytes without consuming them
+ * @param {number} size - the number of bytes to return from the head of the pool
+ * @returns {Buffer}
+ */
+ peek(size) {
+ return this.read(size, false);
+ }
+
+ /**
+ * Reads the requested number of bytes, optionally consuming them
+ * @param {number} size - the number of bytes to return from the head of the pool
+ * @param {boolean} [consume] - whether the bytes returned should be removed, defaults to true
+ * @returns {Buffer}
+ */
+ read(size, consume = true) {
+ if (typeof size !== 'number' || size < 0) {
+ throw new Error('Argument "size" must be a non-negative number');
+ }
+
+ if (size > this[kLength]) {
+ return Buffer.alloc(0);
+ }
+
+ let result;
+
+ // read the whole buffer
+ if (size === this.length) {
+ result = Buffer.concat(this[kBuffers]);
+
+ if (consume) {
+ this[kBuffers] = [];
+ this[kLength] = 0;
+ }
+ }
+
+ // size is within first buffer, no need to concat
+ else if (size <= this[kBuffers][0].length) {
+ result = this[kBuffers][0].slice(0, size);
+ if (consume) {
+ this[kBuffers][0] = this[kBuffers][0].slice(size);
+ this[kLength] -= size;
+ }
+ }
+
+ // size is beyond first buffer, need to track and copy
+ else {
+ result = Buffer.allocUnsafe(size);
+
+ let idx;
+ let offset = 0;
+ let bytesToCopy = size;
+ for (idx = 0; idx < this[kBuffers].length; ++idx) {
+ let bytesCopied;
+ if (bytesToCopy > this[kBuffers][idx].length) {
+ bytesCopied = this[kBuffers][idx].copy(result, offset, 0);
+ offset += bytesCopied;
+ } else {
+ bytesCopied = this[kBuffers][idx].copy(result, offset, 0, bytesToCopy);
+ if (consume) {
+ this[kBuffers][idx] = this[kBuffers][idx].slice(bytesCopied);
+ }
+ offset += bytesCopied;
+ break;
+ }
+
+ bytesToCopy -= bytesCopied;
+ }
+
+ // compact the internal buffer array
+ if (consume) {
+ this[kBuffers] = this[kBuffers].slice(idx);
+ this[kLength] -= size;
+ }
+ }
+
+ return result;
+ }
+}
+
+module.exports = { BufferPool };
diff --git a/encryption/lib/clientEncryption.js b/encryption/lib/clientEncryption.js
new file mode 100644
index 00000000000..652df755c16
--- /dev/null
+++ b/encryption/lib/clientEncryption.js
@@ -0,0 +1,819 @@
+'use strict';
+
+module.exports = function (modules) {
+ const mc = require('bindings')('mongocrypt');
+ const common = require('./common');
+ const databaseNamespace = common.databaseNamespace;
+ const collectionNamespace = common.collectionNamespace;
+ const promiseOrCallback = common.promiseOrCallback;
+ const maybeCallback = common.maybeCallback;
+ const StateMachine = modules.stateMachine.StateMachine;
+ const BSON = modules.mongodb.BSON;
+ const {
+ MongoCryptCreateEncryptedCollectionError,
+ MongoCryptCreateDataKeyError
+ } = require('./errors');
+ const { loadCredentials } = require('./providers/index');
+ const cryptoCallbacks = require('./cryptoCallbacks');
+ const { promisify } = require('util');
+
+ /** @typedef {*} BSONValue - any serializable BSON value */
+ /** @typedef {BSON.Long} Long A 64 bit integer, represented by the js-bson Long type.*/
+
+ /**
+ * @typedef {object} KMSProviders Configuration options that are used by specific KMS providers during key generation, encryption, and decryption.
+ * @property {object} [aws] Configuration options for using 'aws' as your KMS provider
+ * @property {string} [aws.accessKeyId] The access key used for the AWS KMS provider
+ * @property {string} [aws.secretAccessKey] The secret access key used for the AWS KMS provider
+ * @property {object} [local] Configuration options for using 'local' as your KMS provider
+ * @property {Buffer} [local.key] The master key used to encrypt/decrypt data keys. A 96-byte long Buffer.
+ * @property {object} [azure] Configuration options for using 'azure' as your KMS provider
+ * @property {string} [azure.tenantId] The tenant ID identifies the organization for the account
+ * @property {string} [azure.clientId] The client ID to authenticate a registered application
+ * @property {string} [azure.clientSecret] The client secret to authenticate a registered application
+ * @property {string} [azure.identityPlatformEndpoint] If present, a host with optional port. E.g. "example.com" or "example.com:443". This is optional, and only needed if customer is using a non-commercial Azure instance (e.g. a government or China account, which use different URLs). Defaults to "login.microsoftonline.com"
+ * @property {object} [gcp] Configuration options for using 'gcp' as your KMS provider
+ * @property {string} [gcp.email] The service account email to authenticate
+ * @property {string|Binary} [gcp.privateKey] A PKCS#8 encrypted key. This can either be a base64 string or a binary representation
+ * @property {string} [gcp.endpoint] If present, a host with optional port. E.g. "example.com" or "example.com:443". Defaults to "oauth2.googleapis.com"
+ */
+
+ /**
+ * @typedef {object} DataKey A data key as stored in the database.
+ * @property {UUID} _id A unique identifier for the key.
+ * @property {number} version A numeric identifier for the schema version of this document. Implicitly 0 if unset.
+ * @property {string[]} [keyAltNames] Alternate names to search for keys by. Used for a per-document key scenario in support of GDPR scenarios.
+ * @property {Binary} keyMaterial Encrypted data key material, BinData type General.
+ * @property {Date} creationDate The datetime the wrapped data key material was imported into the Key Database.
+ * @property {Date} updateDate The datetime the wrapped data key material was last modified. On initial import, this value will be set to creationDate.
+ * @property {number} status 0 = enabled, 1 = disabled
+ * @property {object} masterKey the encrypted master key
+ */
+
+ /**
+ * @typedef {string} KmsProvider A string containing the name of a kms provider. Valid options are 'aws', 'azure', 'gcp', 'kmip', or 'local'
+ */
+
+ /**
+ * @typedef {object} ClientSession The ClientSession class from the MongoDB Node driver (see https://mongodb.github.io/node-mongodb-native/4.8/classes/ClientSession.html)
+ */
+
+ /**
+ * @typedef {object} DeleteResult The result of a delete operation from the MongoDB Node driver (see https://mongodb.github.io/node-mongodb-native/4.8/interfaces/DeleteResult.html)
+ * @property {boolean} acknowledged Indicates whether this write result was acknowledged. If not, then all other members of this result will be undefined.
+ * @property {number} deletedCount The number of documents that were deleted
+ */
+
+ /**
+ * @typedef {object} BulkWriteResult The BulkWriteResult class from the MongoDB Node driver (https://mongodb.github.io/node-mongodb-native/4.8/classes/BulkWriteResult.html)
+ */
+
+ /**
+ * @typedef {object} FindCursor The FindCursor class from the MongoDB Node driver (see https://mongodb.github.io/node-mongodb-native/4.8/classes/FindCursor.html)
+ */
+
+ /**
+ * The public interface for explicit in-use encryption
+ */
+ class ClientEncryption {
+ /**
+ * Create a new encryption instance
+ *
+ * @param {MongoClient} client The client used for encryption
+ * @param {object} options Additional settings
+ * @param {string} options.keyVaultNamespace The namespace of the key vault, used to store encryption keys
+ * @param {object} options.tlsOptions An object that maps KMS provider names to TLS options.
+ * @param {MongoClient} [options.keyVaultClient] A `MongoClient` used to fetch keys from a key vault. Defaults to `client`
+ * @param {KMSProviders} [options.kmsProviders] options for specific KMS providers to use
+ *
+ * @example
+ * new ClientEncryption(mongoClient, {
+ * keyVaultNamespace: 'client.encryption',
+ * kmsProviders: {
+ * local: {
+ * key: masterKey // The master key used for encryption/decryption. A 96-byte long Buffer
+ * }
+ * }
+ * });
+ *
+ * @example
+ * new ClientEncryption(mongoClient, {
+ * keyVaultNamespace: 'client.encryption',
+ * kmsProviders: {
+ * aws: {
+ * accessKeyId: AWS_ACCESS_KEY,
+ * secretAccessKey: AWS_SECRET_KEY
+ * }
+ * }
+ * });
+ */
+ constructor(client, options) {
+ this._client = client;
+ this._bson = options.bson || BSON || client.topology.bson;
+ this._proxyOptions = options.proxyOptions;
+ this._tlsOptions = options.tlsOptions;
+ this._kmsProviders = options.kmsProviders || {};
+
+ if (options.keyVaultNamespace == null) {
+ throw new TypeError('Missing required option `keyVaultNamespace`');
+ }
+
+ const mongoCryptOptions = { ...options, cryptoCallbacks };
+
+ mongoCryptOptions.kmsProviders = !Buffer.isBuffer(this._kmsProviders)
+ ? this._bson.serialize(this._kmsProviders)
+ : this._kmsProviders;
+
+ this._onKmsProviderRefresh = options.onKmsProviderRefresh;
+ this._keyVaultNamespace = options.keyVaultNamespace;
+ this._keyVaultClient = options.keyVaultClient || client;
+ this._mongoCrypt = new mc.MongoCrypt(mongoCryptOptions);
+ }
+
+ /**
+ * @typedef {Binary} ClientEncryptionDataKeyId
+ * The id of an existing dataKey. Is a bson Binary value.
+ * Can be used for {@link ClientEncryption.encrypt}, and can be used to directly
+ * query for the data key itself against the key vault namespace.
+ */
+
+ /**
+ * @callback ClientEncryptionCreateDataKeyCallback
+ * @param {Error} [error] If present, indicates an error that occurred in the creation of the data key
+ * @param {ClientEncryption~dataKeyId} [dataKeyId] If present, returns the id of the created data key
+ */
+
+ /**
+ * @typedef {object} AWSEncryptionKeyOptions Configuration options for making an AWS encryption key
+ * @property {string} region The AWS region of the KMS
+ * @property {string} key The Amazon Resource Name (ARN) to the AWS customer master key (CMK)
+ * @property {string} [endpoint] An alternate host to send KMS requests to. May include port number
+ */
+
+ /**
+ * @typedef {object} GCPEncryptionKeyOptions Configuration options for making a GCP encryption key
+ * @property {string} projectId GCP project id
+ * @property {string} location Location name (e.g. "global")
+ * @property {string} keyRing Key ring name
+ * @property {string} keyName Key name
+ * @property {string} [keyVersion] Key version
+ * @property {string} [endpoint] KMS URL, defaults to `https://www.googleapis.com/auth/cloudkms`
+ */
+
+ /**
+ * @typedef {object} AzureEncryptionKeyOptions Configuration options for making an Azure encryption key
+ * @property {string} keyName Key name
+ * @property {string} keyVaultEndpoint Key vault URL, typically `.vault.azure.net`
+ * @property {string} [keyVersion] Key version
+ */
+
+ /**
+ * Creates a data key used for explicit encryption and inserts it into the key vault namespace
+ *
+ * @param {string} provider The KMS provider used for this data key. Must be `'aws'`, `'azure'`, `'gcp'`, or `'local'`
+ * @param {object} [options] Options for creating the data key
+ * @param {AWSEncryptionKeyOptions|AzureEncryptionKeyOptions|GCPEncryptionKeyOptions} [options.masterKey] Idenfities a new KMS-specific key used to encrypt the new data key
+ * @param {string[]} [options.keyAltNames] An optional list of string alternate names used to reference a key. If a key is created with alternate names, then encryption may refer to the key by the unique alternate name instead of by _id.
+ * @param {ClientEncryptionCreateDataKeyCallback} [callback] Optional callback to invoke when key is created
+ * @returns {Promise|void} If no callback is provided, returns a Promise that either resolves with {@link ClientEncryption~dataKeyId the id of the created data key}, or rejects with an error. If a callback is provided, returns nothing.
+ * @example
+ * // Using callbacks to create a local key
+ * clientEncryption.createDataKey('local', (err, dataKey) => {
+ * if (err) {
+ * // This means creating the key failed.
+ * } else {
+ * // key creation succeeded
+ * }
+ * });
+ *
+ * @example
+ * // Using async/await to create a local key
+ * const dataKeyId = await clientEncryption.createDataKey('local');
+ *
+ * @example
+ * // Using async/await to create an aws key
+ * const dataKeyId = await clientEncryption.createDataKey('aws', {
+ * masterKey: {
+ * region: 'us-east-1',
+ * key: 'xxxxxxxxxxxxxx' // CMK ARN here
+ * }
+ * });
+ *
+ * @example
+ * // Using async/await to create an aws key with a keyAltName
+ * const dataKeyId = await clientEncryption.createDataKey('aws', {
+ * masterKey: {
+ * region: 'us-east-1',
+ * key: 'xxxxxxxxxxxxxx' // CMK ARN here
+ * },
+ * keyAltNames: [ 'mySpecialKey' ]
+ * });
+ */
+ createDataKey(provider, options, callback) {
+ if (typeof options === 'function') {
+ callback = options;
+ options = {};
+ }
+ if (options == null) {
+ options = {};
+ }
+
+ const bson = this._bson;
+
+ const dataKey = Object.assign({ provider }, options.masterKey);
+
+ if (options.keyAltNames && !Array.isArray(options.keyAltNames)) {
+ throw new TypeError(
+ `Option "keyAltNames" must be an array of strings, but was of type ${typeof options.keyAltNames}.`
+ );
+ }
+
+ let keyAltNames = undefined;
+ if (options.keyAltNames && options.keyAltNames.length > 0) {
+ keyAltNames = options.keyAltNames.map((keyAltName, i) => {
+ if (typeof keyAltName !== 'string') {
+ throw new TypeError(
+ `Option "keyAltNames" must be an array of strings, but item at index ${i} was of type ${typeof keyAltName}`
+ );
+ }
+
+ return bson.serialize({ keyAltName });
+ });
+ }
+
+ let keyMaterial = undefined;
+ if (options.keyMaterial) {
+ keyMaterial = bson.serialize({ keyMaterial: options.keyMaterial });
+ }
+
+ const dataKeyBson = bson.serialize(dataKey);
+ const context = this._mongoCrypt.makeDataKeyContext(dataKeyBson, {
+ keyAltNames,
+ keyMaterial
+ });
+ const stateMachine = new StateMachine({
+ bson,
+ proxyOptions: this._proxyOptions,
+ tlsOptions: this._tlsOptions
+ });
+
+ return promiseOrCallback(callback, cb => {
+ stateMachine.execute(this, context, (err, dataKey) => {
+ if (err) {
+ cb(err, null);
+ return;
+ }
+
+ const dbName = databaseNamespace(this._keyVaultNamespace);
+ const collectionName = collectionNamespace(this._keyVaultNamespace);
+
+ this._keyVaultClient
+ .db(dbName)
+ .collection(collectionName)
+ .insertOne(dataKey, { writeConcern: { w: 'majority' } })
+ .then(
+ result => {
+ return cb(null, result.insertedId);
+ },
+ err => {
+ cb(err, null);
+ }
+ );
+ });
+ });
+ }
+
+ /**
+ * @typedef {object} RewrapManyDataKeyResult
+ * @property {BulkWriteResult} [bulkWriteResult] An optional BulkWriteResult, if any keys were matched and attempted to be re-wrapped.
+ */
+
+ /**
+ * Searches the keyvault for any data keys matching the provided filter. If there are matches, rewrapManyDataKey then attempts to re-wrap the data keys using the provided options.
+ *
+ * If no matches are found, then no bulk write is performed.
+ *
+ * @param {object} filter A valid MongoDB filter. Any documents matching this filter will be re-wrapped.
+ * @param {object} [options]
+ * @param {KmsProvider} options.provider The KMS provider to use when re-wrapping the data keys.
+ * @param {AWSEncryptionKeyOptions | AzureEncryptionKeyOptions | GCPEncryptionKeyOptions} [options.masterKey]
+ * @returns {Promise}
+ *
+ * @example
+ * // rewrapping all data data keys (using a filter that matches all documents)
+ * const filter = {};
+ *
+ * const result = await clientEncryption.rewrapManyDataKey(filter);
+ * if (result.bulkWriteResult != null) {
+ * // keys were re-wrapped, results will be available in the bulkWrite object.
+ * }
+ *
+ * @example
+ * // attempting to rewrap all data keys with no matches
+ * const filter = { _id: new Binary() } // assume _id matches no documents in the database
+ * const result = await clientEncryption.rewrapManyDataKey(filter);
+ *
+ * if (result.bulkWriteResult == null) {
+ * // no keys matched, `bulkWriteResult` does not exist on the result object
+ * }
+ */
+ async rewrapManyDataKey(filter, options) {
+ const bson = this._bson;
+
+ let keyEncryptionKeyBson = undefined;
+ if (options) {
+ const keyEncryptionKey = Object.assign({ provider: options.provider }, options.masterKey);
+ keyEncryptionKeyBson = bson.serialize(keyEncryptionKey);
+ } else {
+ // Always make sure `options` is an object below.
+ options = {};
+ }
+ const filterBson = bson.serialize(filter);
+ const context = this._mongoCrypt.makeRewrapManyDataKeyContext(
+ filterBson,
+ keyEncryptionKeyBson
+ );
+ const stateMachine = new StateMachine({
+ bson,
+ proxyOptions: this._proxyOptions,
+ tlsOptions: this._tlsOptions
+ });
+
+ const execute = promisify(stateMachine.execute.bind(stateMachine));
+
+ const dataKey = await execute(this, context);
+ if (!dataKey || dataKey.v.length === 0) {
+ return {};
+ }
+
+ const dbName = databaseNamespace(this._keyVaultNamespace);
+ const collectionName = collectionNamespace(this._keyVaultNamespace);
+ const replacements = dataKey.v.map(key => ({
+ updateOne: {
+ filter: { _id: key._id },
+ update: {
+ $set: {
+ masterKey: key.masterKey,
+ keyMaterial: key.keyMaterial
+ },
+ $currentDate: {
+ updateDate: true
+ }
+ }
+ }
+ }));
+
+ const result = await this._keyVaultClient
+ .db(dbName)
+ .collection(collectionName)
+ .bulkWrite(replacements, {
+ writeConcern: { w: 'majority' }
+ });
+
+ return { bulkWriteResult: result };
+ }
+
+ /**
+ * Deletes the key with the provided id from the keyvault, if it exists.
+ *
+ * @param {ClientEncryptionDataKeyId} _id - the id of the document to delete.
+ * @returns {Promise} Returns a promise that either resolves to a {@link DeleteResult} or rejects with an error.
+ *
+ * @example
+ * // delete a key by _id
+ * const id = new Binary(); // id is a bson binary subtype 4 object
+ * const { deletedCount } = await clientEncryption.deleteKey(id);
+ *
+ * if (deletedCount != null && deletedCount > 0) {
+ * // successful deletion
+ * }
+ *
+ */
+ async deleteKey(_id) {
+ const dbName = databaseNamespace(this._keyVaultNamespace);
+ const collectionName = collectionNamespace(this._keyVaultNamespace);
+ return await this._keyVaultClient
+ .db(dbName)
+ .collection(collectionName)
+ .deleteOne({ _id }, { writeConcern: { w: 'majority' } });
+ }
+
+ /**
+ * Finds all the keys currently stored in the keyvault.
+ *
+ * This method will not throw.
+ *
+ * @returns {FindCursor} a FindCursor over all keys in the keyvault.
+ * @example
+ * // fetching all keys
+ * const keys = await clientEncryption.getKeys().toArray();
+ */
+ getKeys() {
+ const dbName = databaseNamespace(this._keyVaultNamespace);
+ const collectionName = collectionNamespace(this._keyVaultNamespace);
+ return this._keyVaultClient
+ .db(dbName)
+ .collection(collectionName)
+ .find({}, { readConcern: { level: 'majority' } });
+ }
+
+ /**
+ * Finds a key in the keyvault with the specified _id.
+ *
+ * @param {ClientEncryptionDataKeyId} _id - the id of the document to delete.
+ * @returns {Promise} Returns a promise that either resolves to a {@link DataKey} if a document matches the key or null if no documents
+ * match the id. The promise rejects with an error if an error is thrown.
+ * @example
+ * // getting a key by id
+ * const id = new Binary(); // id is a bson binary subtype 4 object
+ * const key = await clientEncryption.getKey(id);
+ * if (!key) {
+ * // key is null if there was no matching key
+ * }
+ */
+ async getKey(_id) {
+ const dbName = databaseNamespace(this._keyVaultNamespace);
+ const collectionName = collectionNamespace(this._keyVaultNamespace);
+ return await this._keyVaultClient
+ .db(dbName)
+ .collection(collectionName)
+ .findOne({ _id }, { readConcern: { level: 'majority' } });
+ }
+
+ /**
+ * Finds a key in the keyvault which has the specified keyAltName.
+ *
+ * @param {string} keyAltName - a keyAltName to search for a key
+ * @returns {Promise} Returns a promise that either resolves to a {@link DataKey} if a document matches the key or null if no documents
+ * match the keyAltName. The promise rejects with an error if an error is thrown.
+ * @example
+ * // get a key by alt name
+ * const keyAltName = 'keyAltName';
+ * const key = await clientEncryption.getKeyByAltName(keyAltName);
+ * if (!key) {
+ * // key is null if there is no matching key
+ * }
+ */
+ async getKeyByAltName(keyAltName) {
+ const dbName = databaseNamespace(this._keyVaultNamespace);
+ const collectionName = collectionNamespace(this._keyVaultNamespace);
+ return await this._keyVaultClient
+ .db(dbName)
+ .collection(collectionName)
+ .findOne({ keyAltNames: keyAltName }, { readConcern: { level: 'majority' } });
+ }
+
+ /**
+ * Adds a keyAltName to a key identified by the provided _id.
+ *
+ * This method resolves to/returns the *old* key value (prior to adding the new altKeyName).
+ *
+ * @param {ClientEncryptionDataKeyId} _id The id of the document to update.
+ * @param {string} keyAltName - a keyAltName to search for a key
+ * @returns {Promise} Returns a promise that either resolves to a {@link DataKey} if a document matches the key or null if no documents
+ * match the id. The promise rejects with an error if an error is thrown.
+ * @example
+ * // adding an keyAltName to a data key
+ * const id = new Binary(); // id is a bson binary subtype 4 object
+ * const keyAltName = 'keyAltName';
+ * const oldKey = await clientEncryption.addKeyAltName(id, keyAltName);
+ * if (!oldKey) {
+ * // null is returned if there is no matching document with an id matching the supplied id
+ * }
+ */
+ async addKeyAltName(_id, keyAltName) {
+ const dbName = databaseNamespace(this._keyVaultNamespace);
+ const collectionName = collectionNamespace(this._keyVaultNamespace);
+ const { value } = await this._keyVaultClient
+ .db(dbName)
+ .collection(collectionName)
+ .findOneAndUpdate(
+ { _id },
+ { $addToSet: { keyAltNames: keyAltName } },
+ { writeConcern: { w: 'majority' }, returnDocument: 'before' }
+ );
+
+ return value;
+ }
+
+ /**
+ * Adds a keyAltName to a key identified by the provided _id.
+ *
+ * This method resolves to/returns the *old* key value (prior to removing the new altKeyName).
+ *
+ * If the removed keyAltName is the last keyAltName for that key, the `altKeyNames` property is unset from the document.
+ *
+ * @param {ClientEncryptionDataKeyId} _id The id of the document to update.
+ * @param {string} keyAltName - a keyAltName to search for a key
+ * @returns {Promise} Returns a promise that either resolves to a {@link DataKey} if a document matches the key or null if no documents
+ * match the id. The promise rejects with an error if an error is thrown.
+ * @example
+ * // removing a key alt name from a data key
+ * const id = new Binary(); // id is a bson binary subtype 4 object
+ * const keyAltName = 'keyAltName';
+ * const oldKey = await clientEncryption.removeKeyAltName(id, keyAltName);
+ *
+ * if (!oldKey) {
+ * // null is returned if there is no matching document with an id matching the supplied id
+ * }
+ */
+ async removeKeyAltName(_id, keyAltName) {
+ const dbName = databaseNamespace(this._keyVaultNamespace);
+ const collectionName = collectionNamespace(this._keyVaultNamespace);
+ const pipeline = [
+ {
+ $set: {
+ keyAltNames: {
+ $cond: [
+ {
+ $eq: ['$keyAltNames', [keyAltName]]
+ },
+ '$$REMOVE',
+ {
+ $filter: {
+ input: '$keyAltNames',
+ cond: {
+ $ne: ['$$this', keyAltName]
+ }
+ }
+ }
+ ]
+ }
+ }
+ }
+ ];
+ const { value } = await this._keyVaultClient
+ .db(dbName)
+ .collection(collectionName)
+ .findOneAndUpdate({ _id }, pipeline, {
+ writeConcern: { w: 'majority' },
+ returnDocument: 'before'
+ });
+
+ return value;
+ }
+
+ /**
+ * A convenience method for creating an encrypted collection.
+ * This method will create data keys for any encryptedFields that do not have a `keyId` defined
+ * and then create a new collection with the full set of encryptedFields.
+ *
+ * @template {TSchema} - Schema for the collection being created
+ * @param {Db} db - A Node.js driver Db object with which to create the collection
+ * @param {string} name - The name of the collection to be created
+ * @param {object} options - Options for createDataKey and for createCollection
+ * @param {string} options.provider - KMS provider name
+ * @param {AWSEncryptionKeyOptions | AzureEncryptionKeyOptions | GCPEncryptionKeyOptions} [options.masterKey] - masterKey to pass to createDataKey
+ * @param {CreateCollectionOptions} options.createCollectionOptions - options to pass to createCollection, must include `encryptedFields`
+ * @returns {Promise<{ collection: Collection, encryptedFields: Document }>} - created collection and generated encryptedFields
+ * @throws {MongoCryptCreateDataKeyError} - If part way through the process a createDataKey invocation fails, an error will be rejected that has the partial `encryptedFields` that were created.
+ * @throws {MongoCryptCreateEncryptedCollectionError} - If creating the collection fails, an error will be rejected that has the entire `encryptedFields` that were created.
+ */
+ async createEncryptedCollection(db, name, options) {
+ const {
+ provider,
+ masterKey,
+ createCollectionOptions: {
+ encryptedFields: { ...encryptedFields },
+ ...createCollectionOptions
+ }
+ } = options;
+
+ if (Array.isArray(encryptedFields.fields)) {
+ const createDataKeyPromises = encryptedFields.fields.map(async field =>
+ field == null || typeof field !== 'object' || field.keyId != null
+ ? field
+ : {
+ ...field,
+ keyId: await this.createDataKey(provider, { masterKey })
+ }
+ );
+
+ const createDataKeyResolutions = await Promise.allSettled(createDataKeyPromises);
+
+ encryptedFields.fields = createDataKeyResolutions.map((resolution, index) =>
+ resolution.status === 'fulfilled' ? resolution.value : encryptedFields.fields[index]
+ );
+
+ const rejection = createDataKeyResolutions.find(({ status }) => status === 'rejected');
+ if (rejection != null) {
+ throw new MongoCryptCreateDataKeyError({ encryptedFields, cause: rejection.reason });
+ }
+ }
+
+ try {
+ const collection = await db.createCollection(name, {
+ ...createCollectionOptions,
+ encryptedFields
+ });
+ return { collection, encryptedFields };
+ } catch (cause) {
+ throw new MongoCryptCreateEncryptedCollectionError({ encryptedFields, cause });
+ }
+ }
+
+ /**
+ * @callback ClientEncryptionEncryptCallback
+ * @param {Error} [err] If present, indicates an error that occurred in the process of encryption
+ * @param {Buffer} [result] If present, is the encrypted result
+ */
+
+ /**
+ * @typedef {object} RangeOptions
+ * min, max, sparsity, and range must match the values set in the encryptedFields of the destination collection.
+ * For double and decimal128, min/max/precision must all be set, or all be unset.
+ * @property {BSONValue} min is required if precision is set.
+ * @property {BSONValue} max is required if precision is set.
+ * @property {BSON.Long} sparsity
+ * @property {number | undefined} precision (may only be set for double or decimal128).
+ */
+
+ /**
+ * @typedef {object} EncryptOptions Options to provide when encrypting data.
+ * @property {ClientEncryptionDataKeyId} [keyId] The id of the Binary dataKey to use for encryption.
+ * @property {string} [keyAltName] A unique string name corresponding to an already existing dataKey.
+ * @property {string} [algorithm] The algorithm to use for encryption. Must be either `'AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic'`, `'AEAD_AES_256_CBC_HMAC_SHA_512-Random'`, `'Indexed'` or `'Unindexed'`
+ * @property {bigint | number} [contentionFactor] - the contention factor.
+ * @property {'equality' | 'rangePreview'} queryType - the query type supported. only the query type `equality` is stable at this time. queryType `rangePreview` is experimental.
+ * @property {RangeOptions} [rangeOptions] (experimental) The index options for a Queryable Encryption field supporting "rangePreview" queries.
+ */
+
+ /**
+ * Explicitly encrypt a provided value. Note that either `options.keyId` or `options.keyAltName` must
+ * be specified. Specifying both `options.keyId` and `options.keyAltName` is considered an error.
+ *
+ * @param {*} value The value that you wish to serialize. Must be of a type that can be serialized into BSON
+ * @param {EncryptOptions} options
+ * @param {ClientEncryptionEncryptCallback} [callback] Optional callback to invoke when value is encrypted
+ * @returns {Promise|void} If no callback is provided, returns a Promise that either resolves with the encrypted value, or rejects with an error. If a callback is provided, returns nothing.
+ *
+ * @example
+ * // Encryption with callback API
+ * function encryptMyData(value, callback) {
+ * clientEncryption.createDataKey('local', (err, keyId) => {
+ * if (err) {
+ * return callback(err);
+ * }
+ * clientEncryption.encrypt(value, { keyId, algorithm: 'AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic' }, callback);
+ * });
+ * }
+ *
+ * @example
+ * // Encryption with async/await api
+ * async function encryptMyData(value) {
+ * const keyId = await clientEncryption.createDataKey('local');
+ * return clientEncryption.encrypt(value, { keyId, algorithm: 'AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic' });
+ * }
+ *
+ * @example
+ * // Encryption using a keyAltName
+ * async function encryptMyData(value) {
+ * await clientEncryption.createDataKey('local', { keyAltNames: 'mySpecialKey' });
+ * return clientEncryption.encrypt(value, { keyAltName: 'mySpecialKey', algorithm: 'AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic' });
+ * }
+ */
+ encrypt(value, options, callback) {
+ return maybeCallback(() => this._encrypt(value, false, options), callback);
+ }
+
+ /**
+ * Encrypts a Match Expression or Aggregate Expression to query a range index.
+ *
+ * Only supported when queryType is "rangePreview" and algorithm is "RangePreview".
+ *
+ * @experimental The Range algorithm is experimental only. It is not intended for production use. It is subject to breaking changes.
+ *
+ * @param {object} expression a BSON document of one of the following forms:
+ * 1. A Match Expression of this form:
+ * `{$and: [{: {$gt: }}, {: {$lt: }}]}`
+ * 2. An Aggregate Expression of this form:
+ * `{$and: [{$gt: [, ]}, {$lt: [, ]}]}`
+ *
+ * `$gt` may also be `$gte`. `$lt` may also be `$lte`.
+ *
+ * @param {EncryptOptions} options
+ * @returns {Promise} Returns a Promise that either resolves with the encrypted value or rejects with an error.
+ */
+ async encryptExpression(expression, options) {
+ return this._encrypt(expression, true, options);
+ }
+
+ /**
+ * @callback ClientEncryption~decryptCallback
+ * @param {Error} [err] If present, indicates an error that occurred in the process of decryption
+ * @param {object} [result] If present, is the decrypted result
+ */
+
+ /**
+ * Explicitly decrypt a provided encrypted value
+ *
+ * @param {Buffer | Binary} value An encrypted value
+ * @param {ClientEncryption~decryptCallback} callback Optional callback to invoke when value is decrypted
+ * @returns {Promise|void} If no callback is provided, returns a Promise that either resolves with the decrypted value, or rejects with an error. If a callback is provided, returns nothing.
+ *
+ * @example
+ * // Decrypting value with callback API
+ * function decryptMyValue(value, callback) {
+ * clientEncryption.decrypt(value, callback);
+ * }
+ *
+ * @example
+ * // Decrypting value with async/await API
+ * async function decryptMyValue(value) {
+ * return clientEncryption.decrypt(value);
+ * }
+ */
+ decrypt(value, callback) {
+ const bson = this._bson;
+ const valueBuffer = bson.serialize({ v: value });
+ const context = this._mongoCrypt.makeExplicitDecryptionContext(valueBuffer);
+
+ const stateMachine = new StateMachine({
+ bson,
+ proxyOptions: this._proxyOptions,
+ tlsOptions: this._tlsOptions
+ });
+
+ return promiseOrCallback(callback, cb => {
+ stateMachine.execute(this, context, (err, result) => {
+ if (err) {
+ cb(err, null);
+ return;
+ }
+
+ cb(null, result.v);
+ });
+ });
+ }
+
+ /**
+ * Ask the user for KMS credentials.
+ *
+ * This returns anything that looks like the kmsProviders original input
+ * option. It can be empty, and any provider specified here will override
+ * the original ones.
+ */
+ async askForKMSCredentials() {
+ return this._onKmsProviderRefresh
+ ? this._onKmsProviderRefresh()
+ : loadCredentials(this._kmsProviders);
+ }
+
+ static get libmongocryptVersion() {
+ return mc.MongoCrypt.libmongocryptVersion;
+ }
+
+ /**
+ * A helper that perform explicit encryption of values and expressions.
+ * Explicitly encrypt a provided value. Note that either `options.keyId` or `options.keyAltName` must
+ * be specified. Specifying both `options.keyId` and `options.keyAltName` is considered an error.
+ *
+ * @param {*} value The value that you wish to encrypt. Must be of a type that can be serialized into BSON
+ * @param {boolean} expressionMode - a boolean that indicates whether or not to encrypt the value as an expression
+ * @param {EncryptOptions} options
+ * @returns the raw result of the call to stateMachine.execute(). When expressionMode is set to true, the return
+ * value will be a bson document. When false, the value will be a BSON Binary.
+ *
+ * @ignore
+ *
+ */
+ async _encrypt(value, expressionMode, options) {
+ const bson = this._bson;
+ const valueBuffer = bson.serialize({ v: value });
+ const contextOptions = Object.assign({}, options, { expressionMode });
+ if (options.keyId) {
+ contextOptions.keyId = options.keyId.buffer;
+ }
+ if (options.keyAltName) {
+ const keyAltName = options.keyAltName;
+ if (options.keyId) {
+ throw new TypeError(`"options" cannot contain both "keyId" and "keyAltName"`);
+ }
+ const keyAltNameType = typeof keyAltName;
+ if (keyAltNameType !== 'string') {
+ throw new TypeError(
+ `"options.keyAltName" must be of type string, but was of type ${keyAltNameType}`
+ );
+ }
+
+ contextOptions.keyAltName = bson.serialize({ keyAltName });
+ }
+
+ if ('rangeOptions' in options) {
+ contextOptions.rangeOptions = bson.serialize(options.rangeOptions);
+ }
+
+ const stateMachine = new StateMachine({
+ bson,
+ proxyOptions: this._proxyOptions,
+ tlsOptions: this._tlsOptions
+ });
+ const context = this._mongoCrypt.makeExplicitEncryptionContext(valueBuffer, contextOptions);
+
+ const result = await stateMachine.executeAsync(this, context);
+ return result.v;
+ }
+ }
+
+ return { ClientEncryption };
+};
diff --git a/encryption/lib/common.js b/encryption/lib/common.js
new file mode 100644
index 00000000000..468b8c05082
--- /dev/null
+++ b/encryption/lib/common.js
@@ -0,0 +1,98 @@
+'use strict';
+
+/**
+ * @ignore
+ * Helper function for logging. Enabled by setting the environment flag MONGODB_CRYPT_DEBUG.
+ * @param {*} msg Anything you want to be logged.
+ */
+function debug(msg) {
+ if (process.env.MONGODB_CRYPT_DEBUG) {
+ // eslint-disable-next-line no-console
+ console.error(msg);
+ }
+}
+
+/**
+ * @ignore
+ * Gets the database portion of a namespace string
+ * @param {string} ns A string in the format of a namespace (database.collection)
+ * @returns {string} The database portion of the namespace
+ */
+function databaseNamespace(ns) {
+ return ns.split('.')[0];
+}
+/**
+ * @ignore
+ * Gets the collection portion of a namespace string
+ * @param {string} ns A string in the format of a namespace (database.collection)
+ * @returns {string} The collection portion of the namespace
+ */
+function collectionNamespace(ns) {
+ return ns.split('.').slice(1).join('.');
+}
+
+function maybeCallback(promiseFn, callback) {
+ const promise = promiseFn();
+ if (callback == null) {
+ return promise;
+ }
+
+ promise.then(
+ result => process.nextTick(callback, undefined, result),
+ error => process.nextTick(callback, error)
+ );
+ return;
+}
+
+/**
+ * @ignore
+ * A helper function. Invokes a function that takes a callback as the final
+ * parameter. If a callback is supplied, then it is passed to the function.
+ * If not, a Promise is returned that resolves/rejects with the result of the
+ * callback
+ * @param {Function} [callback] an optional callback.
+ * @param {Function} fn A function that takes a callback
+ * @returns {Promise|void} Returns nothing if a callback is supplied, else returns a Promise.
+ */
+function promiseOrCallback(callback, fn) {
+ if (typeof callback === 'function') {
+ fn(function (err) {
+ if (err != null) {
+ try {
+ callback(err);
+ } catch (error) {
+ return process.nextTick(() => {
+ throw error;
+ });
+ }
+ return;
+ }
+
+ callback.apply(this, arguments);
+ });
+
+ return;
+ }
+
+ return new Promise((resolve, reject) => {
+ fn(function (err, res) {
+ if (err != null) {
+ return reject(err);
+ }
+
+ if (arguments.length > 2) {
+ return resolve(Array.prototype.slice.call(arguments, 1));
+ }
+
+ resolve(res);
+ });
+ });
+}
+
+module.exports = {
+ debug,
+ databaseNamespace,
+ collectionNamespace,
+ promiseOrCallback,
+ maybeCallback
+};
diff --git a/encryption/lib/cryptoCallbacks.js b/encryption/lib/cryptoCallbacks.js
new file mode 100644
index 00000000000..a036ff80821
--- /dev/null
+++ b/encryption/lib/cryptoCallbacks.js
@@ -0,0 +1,87 @@
+'use strict';
+const crypto = require('crypto');
+
+function makeAES256Hook(method, mode) {
+ return function (key, iv, input, output) {
+ let result;
+
+ try {
+ let cipher = crypto[method](mode, key, iv);
+ cipher.setAutoPadding(false);
+ result = cipher.update(input);
+ const final = cipher.final();
+ if (final.length > 0) {
+ result = Buffer.concat([result, final]);
+ }
+ } catch (e) {
+ return e;
+ }
+
+ result.copy(output);
+ return result.length;
+ };
+}
+
+function randomHook(buffer, count) {
+ try {
+ crypto.randomFillSync(buffer, 0, count);
+ } catch (e) {
+ return e;
+ }
+ return count;
+}
+
+function sha256Hook(input, output) {
+ let result;
+ try {
+ result = crypto.createHash('sha256').update(input).digest();
+ } catch (e) {
+ return e;
+ }
+
+ result.copy(output);
+ return result.length;
+}
+
+function makeHmacHook(algorithm) {
+ return (key, input, output) => {
+ let result;
+ try {
+ result = crypto.createHmac(algorithm, key).update(input).digest();
+ } catch (e) {
+ return e;
+ }
+
+ result.copy(output);
+ return result.length;
+ };
+}
+
+function signRsaSha256Hook(key, input, output) {
+ let result;
+ try {
+ const signer = crypto.createSign('sha256WithRSAEncryption');
+ const privateKey = Buffer.from(
+ `-----BEGIN PRIVATE KEY-----\n${key.toString('base64')}\n-----END PRIVATE KEY-----\n`
+ );
+
+ result = signer.update(input).end().sign(privateKey);
+ } catch (e) {
+ return e;
+ }
+
+ result.copy(output);
+ return result.length;
+}
+
+module.exports = {
+ aes256CbcEncryptHook: makeAES256Hook('createCipheriv', 'aes-256-cbc'),
+ aes256CbcDecryptHook: makeAES256Hook('createDecipheriv', 'aes-256-cbc'),
+ aes256CtrEncryptHook: makeAES256Hook('createCipheriv', 'aes-256-ctr'),
+ aes256CtrDecryptHook: makeAES256Hook('createDecipheriv', 'aes-256-ctr'),
+ randomHook,
+ hmacSha512Hook: makeHmacHook('sha512'),
+ hmacSha256Hook: makeHmacHook('sha256'),
+ sha256Hook,
+ signRsaSha256Hook
+};
diff --git a/encryption/lib/errors.js b/encryption/lib/errors.js
new file mode 100644
index 00000000000..577930aae59
--- /dev/null
+++ b/encryption/lib/errors.js
@@ -0,0 +1,73 @@
+'use strict';
+
+/**
+ * @class
+ * An error indicating that something went wrong specifically with MongoDB Client Encryption
+ */
+class MongoCryptError extends Error {
+ constructor(message, options = {}) {
+ super(message);
+ if (options.cause != null) {
+ this.cause = options.cause;
+ }
+ }
+
+ get name() {
+ return 'MongoCryptError';
+ }
+}
+
+/**
+ * @class
+ * An error indicating that `ClientEncryption.createEncryptedCollection()` failed to create data keys
+ */
+class MongoCryptCreateDataKeyError extends MongoCryptError {
+ constructor({ encryptedFields, cause }) {
+ super(`Unable to complete creating data keys: ${cause.message}`, { cause });
+ this.encryptedFields = encryptedFields;
+ }
+
+ get name() {
+ return 'MongoCryptCreateDataKeyError';
+ }
+}
+
+/**
+ * @class
+ * An error indicating that `ClientEncryption.createEncryptedCollection()` failed to create a collection
+ */
+class MongoCryptCreateEncryptedCollectionError extends MongoCryptError {
+ constructor({ encryptedFields, cause }) {
+ super(`Unable to create collection: ${cause.message}`, { cause });
+ this.encryptedFields = encryptedFields;
+ }
+
+ get name() {
+ return 'MongoCryptCreateEncryptedCollectionError';
+ }
+}
+
+/**
+ * @class
+ * An error indicating that mongodb-client-encryption failed to auto-refresh Azure KMS credentials.
+ */
+class MongoCryptAzureKMSRequestError extends MongoCryptError {
+ /**
+ * @param {string} message
+ * @param {object | undefined} body
+ */
+ constructor(message, body) {
+ super(message);
+ this.body = body;
+ }
+}
+
+class MongoCryptKMSRequestNetworkTimeoutError extends MongoCryptError {}
+
+module.exports = {
+ MongoCryptError,
+ MongoCryptKMSRequestNetworkTimeoutError,
+ MongoCryptAzureKMSRequestError,
+ MongoCryptCreateDataKeyError,
+ MongoCryptCreateEncryptedCollectionError
+};
diff --git a/encryption/lib/index.js b/encryption/lib/index.js
new file mode 100644
index 00000000000..b2f760b113f
--- /dev/null
+++ b/encryption/lib/index.js
@@ -0,0 +1,73 @@
+'use strict';
+
+let defaultModule;
+function loadDefaultModule() {
+ if (!defaultModule) {
+ defaultModule = extension(require('mongodb'));
+ }
+
+ return defaultModule;
+}
+
+const {
+ MongoCryptError,
+ MongoCryptCreateEncryptedCollectionError,
+ MongoCryptCreateDataKeyError,
+ MongoCryptAzureKMSRequestError,
+ MongoCryptKMSRequestNetworkTimeoutError
+} = require('./errors');
+
+const { fetchAzureKMSToken } = require('./providers/index');
+
+function extension(mongodb) {
+ const modules = { mongodb };
+
+ modules.stateMachine = require('./stateMachine')(modules);
+ modules.autoEncrypter = require('./autoEncrypter')(modules);
+ modules.clientEncryption = require('./clientEncryption')(modules);
+
+ const exports = {
+ AutoEncrypter: modules.autoEncrypter.AutoEncrypter,
+ ClientEncryption: modules.clientEncryption.ClientEncryption,
+ MongoCryptError,
+ MongoCryptCreateEncryptedCollectionError,
+ MongoCryptCreateDataKeyError,
+ MongoCryptAzureKMSRequestError,
+ MongoCryptKMSRequestNetworkTimeoutError
+ };
+
+ Object.defineProperty(exports, '___azureKMSProseTestExports', {
+ enumerable: false,
+ configurable: false,
+ value: fetchAzureKMSToken
+ });
+
+ return exports;
+}
+
+module.exports = {
+ extension,
+ MongoCryptError,
+ MongoCryptCreateEncryptedCollectionError,
+ MongoCryptCreateDataKeyError,
+ MongoCryptAzureKMSRequestError,
+ MongoCryptKMSRequestNetworkTimeoutError,
+ get AutoEncrypter() {
+ const m = loadDefaultModule();
+ delete module.exports.AutoEncrypter;
+ module.exports.AutoEncrypter = m.AutoEncrypter;
+ return m.AutoEncrypter;
+ },
+ get ClientEncryption() {
+ const m = loadDefaultModule();
+ delete module.exports.ClientEncryption;
+ module.exports.ClientEncryption = m.ClientEncryption;
+ return m.ClientEncryption;
+ }
+};
+
+Object.defineProperty(module.exports, '___azureKMSProseTestExports', {
+ enumerable: false,
+ configurable: false,
+ value: fetchAzureKMSToken
+});
diff --git a/encryption/lib/mongocryptdManager.js b/encryption/lib/mongocryptdManager.js
new file mode 100644
index 00000000000..076c584c9b7
--- /dev/null
+++ b/encryption/lib/mongocryptdManager.js
@@ -0,0 +1,66 @@
+'use strict';
+
+const spawn = require('child_process').spawn;
+
+/**
+ * @ignore
+ * An internal class that handles spawning a mongocryptd.
+ */
+class MongocryptdManager {
+ /**
+ * @ignore
+ * Creates a new Mongocryptd Manager
+ * @param {AutoEncrypter~AutoEncryptionExtraOptions} [extraOptions] extra options that determine how/when to spawn a mongocryptd
+ */
+ constructor(extraOptions) {
+ extraOptions = extraOptions || {};
+
+ this.uri =
+ typeof extraOptions.mongocryptdURI === 'string' && extraOptions.mongocryptdURI.length > 0
+ ? extraOptions.mongocryptdURI
+ : MongocryptdManager.DEFAULT_MONGOCRYPTD_URI;
+
+ this.bypassSpawn = !!extraOptions.mongocryptdBypassSpawn;
+
+ this.spawnPath = extraOptions.mongocryptdSpawnPath || '';
+ this.spawnArgs = [];
+ if (Array.isArray(extraOptions.mongocryptdSpawnArgs)) {
+ this.spawnArgs = this.spawnArgs.concat(extraOptions.mongocryptdSpawnArgs);
+ }
+ if (
+ this.spawnArgs
+ .filter(arg => typeof arg === 'string')
+ .every(arg => arg.indexOf('--idleShutdownTimeoutSecs') < 0)
+ ) {
+ this.spawnArgs.push('--idleShutdownTimeoutSecs', 60);
+ }
+ }
+
+ /**
+ * @ignore
+ * Will check to see if a mongocryptd is up. If it is not up, it will attempt
+ * to spawn a mongocryptd in a detached process, and then wait for it to be up.
+ * @param {Function} callback Invoked when we think a mongocryptd is up
+ */
+ spawn(callback) {
+ const cmdName = this.spawnPath || 'mongocryptd';
+
+ // Spawned with stdio: ignore and detatched:true
+ // to ensure child can outlive parent.
+ this._child = spawn(cmdName, this.spawnArgs, {
+ stdio: 'ignore',
+ detached: true
+ });
+
+ this._child.on('error', () => {});
+
+ // unref child to remove handle from event loop
+ this._child.unref();
+
+ process.nextTick(callback);
+ }
+}
+
+MongocryptdManager.DEFAULT_MONGOCRYPTD_URI = 'mongodb://localhost:27020';
+
+module.exports = { MongocryptdManager };
diff --git a/encryption/lib/providers/aws.js b/encryption/lib/providers/aws.js
new file mode 100644
index 00000000000..b71c2b1d96d
--- /dev/null
+++ b/encryption/lib/providers/aws.js
@@ -0,0 +1,26 @@
+'use strict';
+
+let awsCredentialProviders = null;
+/** @ignore */
+async function loadAWSCredentials(kmsProviders) {
+ if (awsCredentialProviders == null) {
+ try {
+ // Ensure you always wrap an optional require in the try block NODE-3199
+ awsCredentialProviders = require('@aws-sdk/credential-providers');
+ // eslint-disable-next-line no-empty
+ } catch {}
+ }
+
+ if (awsCredentialProviders != null) {
+ const { fromNodeProviderChain } = awsCredentialProviders;
+ const provider = fromNodeProviderChain();
+ // The state machine is the only place calling this so it will
+ // catch if there is a rejection here.
+ const aws = await provider();
+ return { ...kmsProviders, aws };
+ }
+
+ return kmsProviders;
+}
+
+module.exports = { loadAWSCredentials };
diff --git a/encryption/lib/providers/azure.js b/encryption/lib/providers/azure.js
new file mode 100644
index 00000000000..bd5225643a8
--- /dev/null
+++ b/encryption/lib/providers/azure.js
@@ -0,0 +1,178 @@
+'use strict';
+
+const {
+ MongoCryptAzureKMSRequestError,
+ MongoCryptKMSRequestNetworkTimeoutError
+} = require('../errors');
+const utils = require('./utils');
+
+const MINIMUM_TOKEN_REFRESH_IN_MILLISECONDS = 6000;
+
+/**
+ * @class
+ * @ignore
+ */
+class AzureCredentialCache {
+ constructor() {
+ /**
+ * @type { { accessToken: string, expiresOnTimestamp: number } | null}
+ */
+ this.cachedToken = null;
+ }
+
+ async getToken() {
+ if (this.needsRefresh(this.cachedToken)) {
+ this.cachedToken = await this._getToken();
+ }
+
+ return { accessToken: this.cachedToken.accessToken };
+ }
+
+ needsRefresh(token) {
+ if (token == null) {
+ return true;
+ }
+ const timeUntilExpirationMS = token.expiresOnTimestamp - Date.now();
+ return timeUntilExpirationMS <= MINIMUM_TOKEN_REFRESH_IN_MILLISECONDS;
+ }
+
+ /**
+ * exposed for testing
+ * @ignore
+ */
+ resetCache() {
+ this.cachedToken = null;
+ }
+
+ /**
+ * exposed for testing
+ * @ignore
+ */
+ _getToken() {
+ return fetchAzureKMSToken();
+ }
+}
+/**
+ * @type{ AzureCredentialCache }
+ * @ignore
+ */
+let tokenCache = new AzureCredentialCache();
+
+/**
+ * @typedef {object} KmsRequestResponsePayload
+ * @property {string | undefined} access_token
+ * @property {string | undefined} expires_in
+ *
+ * @ignore
+ */
+
+/**
+ * @param { {body: string, status: number }} response
+ * @returns { Promise<{ accessToken: string, expiresOnTimestamp: number } >}
+ * @ignore
+ */
+async function parseResponse(response) {
+ const { status, body: rawBody } = response;
+
+ /**
+ * @type { KmsRequestResponsePayload }
+ */
+ const body = (() => {
+ try {
+ return JSON.parse(rawBody);
+ } catch {
+ throw new MongoCryptAzureKMSRequestError('Malformed JSON body in GET request.');
+ }
+ })();
+
+ if (status !== 200) {
+ throw new MongoCryptAzureKMSRequestError('Unable to complete request.', body);
+ }
+
+ if (!body.access_token) {
+ throw new MongoCryptAzureKMSRequestError(
+ 'Malformed response body - missing field `access_token`.'
+ );
+ }
+
+ if (!body.expires_in) {
+ throw new MongoCryptAzureKMSRequestError(
+ 'Malformed response body - missing field `expires_in`.'
+ );
+ }
+
+ const expiresInMS = Number(body.expires_in) * 1000;
+ if (Number.isNaN(expiresInMS)) {
+ throw new MongoCryptAzureKMSRequestError(
+ 'Malformed response body - unable to parse int from `expires_in` field.'
+ );
+ }
+
+ return {
+ accessToken: body.access_token,
+ expiresOnTimestamp: Date.now() + expiresInMS
+ };
+}
+
+/**
+ * @param {object} options
+ * @param {object | undefined} [options.headers]
+ * @param {URL | undefined} [options.url]
+ *
+ * @ignore
+ */
+function prepareRequest(options) {
+ const url =
+ options.url == null
+ ? new URL('http://169.254.169.254/metadata/identity/oauth2/token')
+ : new URL(options.url);
+
+ url.searchParams.append('api-version', '2018-02-01');
+ url.searchParams.append('resource', 'https://vault.azure.net');
+
+ const headers = { ...options.headers, 'Content-Type': 'application/json', Metadata: true };
+ return { headers, url };
+}
+
+/**
+ * @typedef {object} AzureKMSRequestOptions
+ * @property {object | undefined} headers
+ * @property {URL | undefined} url
+ * @ignore
+ */
+
+/**
+ * @typedef {object} AzureKMSRequestResponse
+ * @property {string} accessToken
+ * @property {number} expiresOnTimestamp
+ * @ignore
+ */
+
+/**
+ * exported only for testing purposes in the driver
+ *
+ * @param {AzureKMSRequestOptions} options
+ * @returns {Promise}
+ *
+ * @ignore
+ */
+async function fetchAzureKMSToken(options = {}) {
+ const { headers, url } = prepareRequest(options);
+ const response = await utils.get(url, { headers }).catch(error => {
+ if (error instanceof MongoCryptKMSRequestNetworkTimeoutError) {
+ throw new MongoCryptAzureKMSRequestError(`[Azure KMS] ${error.message}`);
+ }
+ throw error;
+ });
+ return parseResponse(response);
+}
+
+/**
+ * @ignore
+ */
+async function loadAzureCredentials(kmsProviders) {
+ const azure = await tokenCache.getToken();
+ return { ...kmsProviders, azure };
+}
+
+module.exports = { loadAzureCredentials, AzureCredentialCache, fetchAzureKMSToken, tokenCache };
diff --git a/encryption/lib/providers/gcp.js b/encryption/lib/providers/gcp.js
new file mode 100644
index 00000000000..01edcfdd147
--- /dev/null
+++ b/encryption/lib/providers/gcp.js
@@ -0,0 +1,24 @@
+'use strict';
+
+let gcpMetadata = null;
+/** @ignore */
+async function loadGCPCredentials(kmsProviders) {
+ if (gcpMetadata == null) {
+ try {
+ // Ensure you always wrap an optional require in the try block NODE-3199
+ gcpMetadata = require('gcp-metadata');
+ // eslint-disable-next-line no-empty
+ } catch {}
+ }
+
+ if (gcpMetadata != null) {
+ const { access_token: accessToken } = await gcpMetadata.instance({
+ property: 'service-accounts/default/token'
+ });
+ return { ...kmsProviders, gcp: { accessToken } };
+ }
+
+ return kmsProviders;
+}
+
+module.exports = { loadGCPCredentials };
diff --git a/encryption/lib/providers/index.js b/encryption/lib/providers/index.js
new file mode 100644
index 00000000000..33847a57b5e
--- /dev/null
+++ b/encryption/lib/providers/index.js
@@ -0,0 +1,54 @@
+'use strict';
+
+const { loadAWSCredentials } = require('./aws');
+const { loadAzureCredentials, fetchAzureKMSToken } = require('./azure');
+const { loadGCPCredentials } = require('./gcp');
+
+/**
+ * Auto credential fetching should only occur when the provider is defined on the kmsProviders map
+ * and the settings are an empty object.
+ *
+ * This is distinct from a nullish provider key.
+ *
+ * @param {'aws' | 'gcp' | 'azure'} provider
+ * @param {object} kmsProviders
+ *
+ * @ignore
+ */
+function isEmptyCredentials(provider, kmsProviders) {
+ return (
+ provider in kmsProviders &&
+ kmsProviders[provider] != null &&
+ typeof kmsProviders[provider] === 'object' &&
+ Object.keys(kmsProviders[provider]).length === 0
+ );
+}
+
+/**
+ * Load cloud provider credentials for the user provided KMS providers.
+ * Credentials will only attempt to get loaded if they do not exist
+ * and no existing credentials will get overwritten.
+ *
+ * @param {object} kmsProviders - The user provided KMS providers.
+ * @returns {object} The new kms providers.
+ *
+ * @ignore
+ */
+async function loadCredentials(kmsProviders) {
+ let finalKMSProviders = kmsProviders;
+
+ if (isEmptyCredentials('aws', kmsProviders)) {
+ finalKMSProviders = await loadAWSCredentials(finalKMSProviders);
+ }
+
+ if (isEmptyCredentials('gcp', kmsProviders)) {
+ finalKMSProviders = await loadGCPCredentials(finalKMSProviders);
+ }
+
+ if (isEmptyCredentials('azure', kmsProviders)) {
+ finalKMSProviders = await loadAzureCredentials(finalKMSProviders);
+ }
+ return finalKMSProviders;
+}
+
+module.exports = { loadCredentials, isEmptyCredentials, fetchAzureKMSToken };
diff --git a/encryption/lib/providers/utils.js b/encryption/lib/providers/utils.js
new file mode 100644
index 00000000000..844c1369690
--- /dev/null
+++ b/encryption/lib/providers/utils.js
@@ -0,0 +1,39 @@
+'use strict';
+
+const { MongoCryptKMSRequestNetworkTimeoutError } = require('../errors');
+const http = require('http');
+
+/**
+ * @param {URL | string} url
+ * @param {http.RequestOptions} options
+ *
+ * @returns { Promise<{ body: string, status: number }> }
+ * @ignore
+ */
+function get(url, options = {}) {
+ return new Promise((resolve, reject) => {
+ let timeoutId;
+ const request = http
+ .get(url, options, response => {
+ response.setEncoding('utf8');
+ let body = '';
+ response.on('data', chunk => (body += chunk));
+ response.on('end', () => {
+ clearTimeout(timeoutId);
+ resolve({ status: response.statusCode, body });
+ });
+ })
+ .on('error', error => {
+ clearTimeout(timeoutId);
+ reject(error);
+ })
+ .end();
+ timeoutId = setTimeout(() => {
+ request.destroy(
+ new MongoCryptKMSRequestNetworkTimeoutError(`request timed out after 10 seconds`)
+ );
+ }, 10000);
+ });
+}
+
+module.exports = { get };
diff --git a/encryption/lib/stateMachine.js b/encryption/lib/stateMachine.js
new file mode 100644
index 00000000000..30215c3ecf2
--- /dev/null
+++ b/encryption/lib/stateMachine.js
@@ -0,0 +1,492 @@
+'use strict';
+
+const { promisify } = require('util');
+
+module.exports = function (modules) {
+ const tls = require('tls');
+ const net = require('net');
+ const fs = require('fs');
+ const { once } = require('events');
+ const { SocksClient } = require('socks');
+
+ // Try first to import 4.x name, fallback to 3.x name
+ const MongoNetworkTimeoutError =
+ modules.mongodb.MongoNetworkTimeoutError || modules.mongodb.MongoTimeoutError;
+
+ const common = require('./common');
+ const debug = common.debug;
+ const databaseNamespace = common.databaseNamespace;
+ const collectionNamespace = common.collectionNamespace;
+ const { MongoCryptError } = require('./errors');
+ const { BufferPool } = require('./buffer_pool');
+
+ // libmongocrypt states
+ const MONGOCRYPT_CTX_ERROR = 0;
+ const MONGOCRYPT_CTX_NEED_MONGO_COLLINFO = 1;
+ const MONGOCRYPT_CTX_NEED_MONGO_MARKINGS = 2;
+ const MONGOCRYPT_CTX_NEED_MONGO_KEYS = 3;
+ const MONGOCRYPT_CTX_NEED_KMS_CREDENTIALS = 7;
+ const MONGOCRYPT_CTX_NEED_KMS = 4;
+ const MONGOCRYPT_CTX_READY = 5;
+ const MONGOCRYPT_CTX_DONE = 6;
+
+ const HTTPS_PORT = 443;
+
+ const stateToString = new Map([
+ [MONGOCRYPT_CTX_ERROR, 'MONGOCRYPT_CTX_ERROR'],
+ [MONGOCRYPT_CTX_NEED_MONGO_COLLINFO, 'MONGOCRYPT_CTX_NEED_MONGO_COLLINFO'],
+ [MONGOCRYPT_CTX_NEED_MONGO_MARKINGS, 'MONGOCRYPT_CTX_NEED_MONGO_MARKINGS'],
+ [MONGOCRYPT_CTX_NEED_MONGO_KEYS, 'MONGOCRYPT_CTX_NEED_MONGO_KEYS'],
+ [MONGOCRYPT_CTX_NEED_KMS_CREDENTIALS, 'MONGOCRYPT_CTX_NEED_KMS_CREDENTIALS'],
+ [MONGOCRYPT_CTX_NEED_KMS, 'MONGOCRYPT_CTX_NEED_KMS'],
+ [MONGOCRYPT_CTX_READY, 'MONGOCRYPT_CTX_READY'],
+ [MONGOCRYPT_CTX_DONE, 'MONGOCRYPT_CTX_DONE']
+ ]);
+
+ const INSECURE_TLS_OPTIONS = [
+ 'tlsInsecure',
+ 'tlsAllowInvalidCertificates',
+ 'tlsAllowInvalidHostnames',
+ 'tlsDisableOCSPEndpointCheck',
+ 'tlsDisableCertificateRevocationCheck'
+ ];
+
+ /**
+ * @ignore
+ * @callback StateMachine~executeCallback
+ * @param {Error} [err] If present, indicates that the execute call failed with the given error
+ * @param {object} [result] If present, is the result of executing the state machine.
+ * @returns {void}
+ */
+
+ /**
+ * @ignore
+ * @callback StateMachine~fetchCollectionInfoCallback
+ * @param {Error} [err] If present, indicates that fetching the collection info failed with the given error
+ * @param {object} [result] If present, is the fetched collection info for the first collection to match the given filter
+ * @returns {void}
+ */
+
+ /**
+ * @ignore
+ * @callback StateMachine~markCommandCallback
+ * @param {Error} [err] If present, indicates that marking the command failed with the given error
+ * @param {Buffer} [result] If present, is the marked command serialized into bson
+ * @returns {void}
+ */
+
+ /**
+ * @ignore
+ * @callback StateMachine~fetchKeysCallback
+ * @param {Error} [err] If present, indicates that fetching the keys failed with the given error
+ * @param {object[]} [result] If present, is all the keys from the keyVault collection that matched the given filter
+ */
+
+ /**
+ * @ignore
+ * An internal class that executes across a MongoCryptContext until either
+ * a finishing state or an error is reached. Do not instantiate directly.
+ * @class StateMachine
+ */
+ class StateMachine {
+ constructor(options) {
+ this.options = options || {};
+ this.bson = options.bson;
+
+ this.executeAsync = promisify((autoEncrypter, context, callback) =>
+ this.execute(autoEncrypter, context, callback)
+ );
+ }
+
+ /**
+ * @ignore
+ * Executes the state machine according to the specification
+ * @param {AutoEncrypter|ClientEncryption} autoEncrypter The JS encryption object
+ * @param {object} context The C++ context object returned from the bindings
+ * @param {StateMachine~executeCallback} callback Invoked with the result/error of executing the state machine
+ * @returns {void}
+ */
+ execute(autoEncrypter, context, callback) {
+ const bson = this.bson;
+ const keyVaultNamespace = autoEncrypter._keyVaultNamespace;
+ const keyVaultClient = autoEncrypter._keyVaultClient;
+ const metaDataClient = autoEncrypter._metaDataClient;
+ const mongocryptdClient = autoEncrypter._mongocryptdClient;
+ const mongocryptdManager = autoEncrypter._mongocryptdManager;
+
+ debug(`[context#${context.id}] ${stateToString.get(context.state) || context.state}`);
+ switch (context.state) {
+ case MONGOCRYPT_CTX_NEED_MONGO_COLLINFO: {
+ const filter = bson.deserialize(context.nextMongoOperation());
+ this.fetchCollectionInfo(metaDataClient, context.ns, filter, (err, collInfo) => {
+ if (err) {
+ return callback(err, null);
+ }
+
+ if (collInfo) {
+ context.addMongoOperationResponse(collInfo);
+ }
+
+ context.finishMongoOperation();
+ this.execute(autoEncrypter, context, callback);
+ });
+
+ return;
+ }
+
+ case MONGOCRYPT_CTX_NEED_MONGO_MARKINGS: {
+ const command = context.nextMongoOperation();
+ this.markCommand(mongocryptdClient, context.ns, command, (err, markedCommand) => {
+ if (err) {
+ // If we are not bypassing spawning, then we should retry once on a MongoTimeoutError (server selection error)
+ if (
+ err instanceof MongoNetworkTimeoutError &&
+ mongocryptdManager &&
+ !mongocryptdManager.bypassSpawn
+ ) {
+ mongocryptdManager.spawn(() => {
+ // TODO: should we be shadowing the variables here?
+ this.markCommand(mongocryptdClient, context.ns, command, (err, markedCommand) => {
+ if (err) return callback(err, null);
+
+ context.addMongoOperationResponse(markedCommand);
+ context.finishMongoOperation();
+
+ this.execute(autoEncrypter, context, callback);
+ });
+ });
+ return;
+ }
+ return callback(err, null);
+ }
+ context.addMongoOperationResponse(markedCommand);
+ context.finishMongoOperation();
+
+ this.execute(autoEncrypter, context, callback);
+ });
+
+ return;
+ }
+
+ case MONGOCRYPT_CTX_NEED_MONGO_KEYS: {
+ const filter = context.nextMongoOperation();
+ this.fetchKeys(keyVaultClient, keyVaultNamespace, filter, (err, keys) => {
+ if (err) return callback(err, null);
+ keys.forEach(key => {
+ context.addMongoOperationResponse(bson.serialize(key));
+ });
+
+ context.finishMongoOperation();
+ this.execute(autoEncrypter, context, callback);
+ });
+
+ return;
+ }
+
+ case MONGOCRYPT_CTX_NEED_KMS_CREDENTIALS: {
+ autoEncrypter
+ .askForKMSCredentials()
+ .then(kmsProviders => {
+ context.provideKMSProviders(
+ !Buffer.isBuffer(kmsProviders) ? bson.serialize(kmsProviders) : kmsProviders
+ );
+ this.execute(autoEncrypter, context, callback);
+ })
+ .catch(err => {
+ callback(err, null);
+ });
+
+ return;
+ }
+
+ case MONGOCRYPT_CTX_NEED_KMS: {
+ const promises = [];
+
+ let request;
+ while ((request = context.nextKMSRequest())) {
+ promises.push(this.kmsRequest(request));
+ }
+
+ Promise.all(promises)
+ .then(() => {
+ context.finishKMSRequests();
+ this.execute(autoEncrypter, context, callback);
+ })
+ .catch(err => {
+ callback(err, null);
+ });
+
+ return;
+ }
+
+ // terminal states
+ case MONGOCRYPT_CTX_READY: {
+ const finalizedContext = context.finalize();
+ // TODO: Maybe rework the logic here so that instead of doing
+ // the callback here, finalize stores the result, and then
+ // we wait to MONGOCRYPT_CTX_DONE to do the callback
+ if (context.state === MONGOCRYPT_CTX_ERROR) {
+ const message = context.status.message || 'Finalization error';
+ callback(new MongoCryptError(message));
+ return;
+ }
+ callback(null, bson.deserialize(finalizedContext, this.options));
+ return;
+ }
+ case MONGOCRYPT_CTX_ERROR: {
+ const message = context.status.message;
+ callback(new MongoCryptError(message));
+ return;
+ }
+
+ case MONGOCRYPT_CTX_DONE:
+ callback();
+ return;
+
+ default:
+ callback(new MongoCryptError(`Unknown state: ${context.state}`));
+ return;
+ }
+ }
+
+ /**
+ * @ignore
+ * Handles the request to the KMS service. Exposed for testing purposes. Do not directly invoke.
+ * @param {*} kmsContext A C++ KMS context returned from the bindings
+ * @returns {Promise} A promise that resolves when the KMS reply has be fully parsed
+ */
+ kmsRequest(request) {
+ const parsedUrl = request.endpoint.split(':');
+ const port = parsedUrl[1] != null ? Number.parseInt(parsedUrl[1], 10) : HTTPS_PORT;
+ const options = { host: parsedUrl[0], servername: parsedUrl[0], port };
+ const message = request.message;
+
+ // TODO(NODE-3959): We can adopt `for-await on(socket, 'data')` with logic to control abort
+ // eslint-disable-next-line no-async-promise-executor
+ return new Promise(async (resolve, reject) => {
+ const buffer = new BufferPool();
+
+ let socket;
+ let rawSocket;
+
+ function destroySockets() {
+ for (const sock of [socket, rawSocket]) {
+ if (sock) {
+ sock.removeAllListeners();
+ sock.destroy();
+ }
+ }
+ }
+
+ function ontimeout() {
+ destroySockets();
+ reject(new MongoCryptError('KMS request timed out'));
+ }
+
+ function onerror(err) {
+ destroySockets();
+ const mcError = new MongoCryptError('KMS request failed');
+ mcError.originalError = err;
+ reject(mcError);
+ }
+
+ if (this.options.proxyOptions && this.options.proxyOptions.proxyHost) {
+ rawSocket = net.connect({
+ host: this.options.proxyOptions.proxyHost,
+ port: this.options.proxyOptions.proxyPort || 1080
+ });
+
+ rawSocket.on('timeout', ontimeout);
+ rawSocket.on('error', onerror);
+ try {
+ await once(rawSocket, 'connect');
+ options.socket = (
+ await SocksClient.createConnection({
+ existing_socket: rawSocket,
+ command: 'connect',
+ destination: { host: options.host, port: options.port },
+ proxy: {
+ // host and port are ignored because we pass existing_socket
+ host: 'iLoveJavaScript',
+ port: 0,
+ type: 5,
+ userId: this.options.proxyOptions.proxyUsername,
+ password: this.options.proxyOptions.proxyPassword
+ }
+ })
+ ).socket;
+ } catch (err) {
+ return onerror(err);
+ }
+ }
+
+ const tlsOptions = this.options.tlsOptions;
+ if (tlsOptions) {
+ const kmsProvider = request.kmsProvider;
+ const providerTlsOptions = tlsOptions[kmsProvider];
+ if (providerTlsOptions) {
+ const error = this.validateTlsOptions(kmsProvider, providerTlsOptions);
+ if (error) reject(error);
+ this.setTlsOptions(providerTlsOptions, options);
+ }
+ }
+ socket = tls.connect(options, () => {
+ socket.write(message);
+ });
+
+ socket.once('timeout', ontimeout);
+ socket.once('error', onerror);
+
+ socket.on('data', data => {
+ buffer.append(data);
+ while (request.bytesNeeded > 0 && buffer.length) {
+ const bytesNeeded = Math.min(request.bytesNeeded, buffer.length);
+ request.addResponse(buffer.read(bytesNeeded));
+ }
+
+ if (request.bytesNeeded <= 0) {
+ // There's no need for any more activity on this socket at this point.
+ destroySockets();
+ resolve();
+ }
+ });
+ });
+ }
+
+ /**
+ * @ignore
+ * Validates the provided TLS options are secure.
+ *
+ * @param {string} kmsProvider The KMS provider name.
+ * @param {ClientEncryptionTLSOptions} tlsOptions The client TLS options for the provider.
+ *
+ * @returns {Error} If any option is invalid.
+ */
+ validateTlsOptions(kmsProvider, tlsOptions) {
+ const tlsOptionNames = Object.keys(tlsOptions);
+ for (const option of INSECURE_TLS_OPTIONS) {
+ if (tlsOptionNames.includes(option)) {
+ return new MongoCryptError(
+ `Insecure TLS options prohibited for ${kmsProvider}: ${option}`
+ );
+ }
+ }
+ }
+
+ /**
+ * @ignore
+ * Sets only the valid secure TLS options.
+ *
+ * @param {ClientEncryptionTLSOptions} tlsOptions The client TLS options for the provider.
+ * @param {Object} options The existing connection options.
+ */
+ setTlsOptions(tlsOptions, options) {
+ if (tlsOptions.tlsCertificateKeyFile) {
+ const cert = fs.readFileSync(tlsOptions.tlsCertificateKeyFile);
+ options.cert = options.key = cert;
+ }
+ if (tlsOptions.tlsCAFile) {
+ options.ca = fs.readFileSync(tlsOptions.tlsCAFile);
+ }
+ if (tlsOptions.tlsCertificateKeyFilePassword) {
+ options.passphrase = tlsOptions.tlsCertificateKeyFilePassword;
+ }
+ }
+
+ /**
+ * @ignore
+ * Fetches collection info for a provided namespace, when libmongocrypt
+ * enters the `MONGOCRYPT_CTX_NEED_MONGO_COLLINFO` state. The result is
+ * used to inform libmongocrypt of the schema associated with this
+ * namespace. Exposed for testing purposes. Do not directly invoke.
+ *
+ * @param {MongoClient} client A MongoClient connected to the topology
+ * @param {string} ns The namespace to list collections from
+ * @param {object} filter A filter for the listCollections command
+ * @param {StateMachine~fetchCollectionInfoCallback} callback Invoked with the info of the requested collection, or with an error
+ */
+ fetchCollectionInfo(client, ns, filter, callback) {
+ const bson = this.bson;
+ const dbName = databaseNamespace(ns);
+
+ client
+ .db(dbName)
+ .listCollections(filter, {
+ promoteLongs: false,
+ promoteValues: false
+ })
+ .toArray()
+ .then(
+ collections => {
+ const info = collections.length > 0 ? bson.serialize(collections[0]) : null;
+ return callback(null, info);
+ },
+ err => {
+ callback(err, null);
+ }
+ );
+ }
+
+ /**
+ * @ignore
+ * Calls to the mongocryptd to provide markings for a command.
+ * Exposed for testing purposes. Do not directly invoke.
+ * @param {MongoClient} client A MongoClient connected to a mongocryptd
+ * @param {string} ns The namespace (database.collection) the command is being executed on
+ * @param {object} command The command to execute.
+ * @param {StateMachine~markCommandCallback} callback Invoked with the serialized and marked bson command, or with an error
+ * @returns {void}
+ */
+ markCommand(client, ns, command, callback) {
+ const bson = this.bson;
+ const options = { promoteLongs: false, promoteValues: false };
+ const dbName = databaseNamespace(ns);
+ const rawCommand = bson.deserialize(command, options);
+
+ client
+ .db(dbName)
+ .command(rawCommand, options)
+ .then(
+ response => {
+ return callback(null, bson.serialize(response, this.options));
+ },
+ err => {
+ callback(err, null);
+ }
+ );
+ }
+
+ /**
+ * @ignore
+ * Requests keys from the keyVault collection on the topology.
+ * Exposed for testing purposes. Do not directly invoke.
+ * @param {MongoClient} client A MongoClient connected to the topology
+ * @param {string} keyVaultNamespace The namespace (database.collection) of the keyVault Collection
+ * @param {object} filter The filter for the find query against the keyVault Collection
+ * @param {StateMachine~fetchKeysCallback} callback Invoked with the found keys, or with an error
+ * @returns {void}
+ */
+ fetchKeys(client, keyVaultNamespace, filter, callback) {
+ const bson = this.bson;
+ const dbName = databaseNamespace(keyVaultNamespace);
+ const collectionName = collectionNamespace(keyVaultNamespace);
+ filter = bson.deserialize(filter);
+
+ client
+ .db(dbName)
+ .collection(collectionName, { readConcern: { level: 'majority' } })
+ .find(filter)
+ .toArray()
+ .then(
+ keys => {
+ return callback(null, keys);
+ },
+ err => {
+ callback(err, null);
+ }
+ );
+ }
+ }
+
+ return { StateMachine };
+};
diff --git a/encryption/test/autoEncrypter.test.js b/encryption/test/autoEncrypter.test.js
new file mode 100644
index 00000000000..dbff3111fe6
--- /dev/null
+++ b/encryption/test/autoEncrypter.test.js
@@ -0,0 +1,950 @@
+'use strict';
+
+const fs = require('fs');
+const path = require('path');
+const sinon = require('sinon');
+const mongodb = require('mongodb');
+const BSON = mongodb.BSON;
+const EJSON = BSON.EJSON;
+const requirements = require('./requirements.helper');
+const MongoNetworkTimeoutError = mongodb.MongoNetworkTimeoutError || mongodb.MongoTimeoutError;
+const MongoError = mongodb.MongoError;
+const stateMachine = require('../lib/stateMachine')({ mongodb });
+const StateMachine = stateMachine.StateMachine;
+const MongocryptdManager = require('../lib/mongocryptdManager').MongocryptdManager;
+
+const { expect } = require('chai');
+
+const sharedLibrarySuffix =
+ process.platform === 'win32' ? 'dll' : process.platform === 'darwin' ? 'dylib' : 'so';
+let sharedLibraryStub = path.resolve(
+ __dirname,
+ '..',
+ '..',
+ '..',
+ `mongo_crypt_v1.${sharedLibrarySuffix}`
+);
+if (!fs.existsSync(sharedLibraryStub)) {
+ sharedLibraryStub = path.resolve(
+ __dirname,
+ '..',
+ 'deps',
+ 'tmp',
+ 'libmongocrypt-build',
+ ...(process.platform === 'win32' ? ['RelWithDebInfo'] : []),
+ `mongo_crypt_v1.${sharedLibrarySuffix}`
+ );
+}
+
+function readExtendedJsonToBuffer(path) {
+ const ejson = EJSON.parse(fs.readFileSync(path, 'utf8'));
+ return BSON.serialize(ejson);
+}
+
+function readHttpResponse(path) {
+ let data = fs.readFileSync(path, 'utf8');
+ data = data.split('\n').join('\r\n');
+ return Buffer.from(data, 'utf8');
+}
+
+const TEST_COMMAND = JSON.parse(fs.readFileSync(`${__dirname}/data/cmd.json`));
+const MOCK_COLLINFO_RESPONSE = readExtendedJsonToBuffer(`${__dirname}/data/collection-info.json`);
+const MOCK_MONGOCRYPTD_RESPONSE = readExtendedJsonToBuffer(
+ `${__dirname}/data/mongocryptd-reply.json`
+);
+const MOCK_KEYDOCUMENT_RESPONSE = readExtendedJsonToBuffer(`${__dirname}/data/key-document.json`);
+const MOCK_KMS_DECRYPT_REPLY = readHttpResponse(`${__dirname}/data/kms-decrypt-reply.txt`);
+
+class MockClient {
+ constructor() {
+ this.topology = {
+ bson: BSON
+ };
+ }
+}
+
+const originalAccessKeyId = process.env.AWS_ACCESS_KEY_ID;
+const originalSecretAccessKey = process.env.AWS_SECRET_ACCESS_KEY;
+
+const AutoEncrypter = require('../lib/autoEncrypter')({ mongodb, stateMachine }).AutoEncrypter;
+describe('AutoEncrypter', function () {
+ this.timeout(12000);
+ let ENABLE_LOG_TEST = false;
+ let sandbox = sinon.createSandbox();
+ beforeEach(() => {
+ sandbox.restore();
+ sandbox.stub(StateMachine.prototype, 'kmsRequest').callsFake(request => {
+ request.addResponse(MOCK_KMS_DECRYPT_REPLY);
+ return Promise.resolve();
+ });
+
+ sandbox
+ .stub(StateMachine.prototype, 'fetchCollectionInfo')
+ .callsFake((client, ns, filter, callback) => {
+ callback(null, MOCK_COLLINFO_RESPONSE);
+ });
+
+ sandbox
+ .stub(StateMachine.prototype, 'markCommand')
+ .callsFake((client, ns, command, callback) => {
+ if (ENABLE_LOG_TEST) {
+ const response = BSON.deserialize(MOCK_MONGOCRYPTD_RESPONSE);
+ response.schemaRequiresEncryption = false;
+
+ ENABLE_LOG_TEST = false; // disable test after run
+ callback(null, BSON.serialize(response));
+ return;
+ }
+
+ callback(null, MOCK_MONGOCRYPTD_RESPONSE);
+ });
+
+ sandbox.stub(StateMachine.prototype, 'fetchKeys').callsFake((client, ns, filter, callback) => {
+ // mock data is already serialized, our action deals with the result of a cursor
+ const deserializedKey = BSON.deserialize(MOCK_KEYDOCUMENT_RESPONSE);
+ callback(null, [deserializedKey]);
+ });
+ });
+
+ afterEach(() => {
+ sandbox.restore();
+ });
+
+ describe('#constructor', function () {
+ context('when mongodb exports BSON (driver >= 4.9.0)', function () {
+ context('when a bson option is provided', function () {
+ const bson = Object.assign({}, BSON);
+ const encrypter = new AutoEncrypter(
+ {},
+ {
+ bson: bson,
+ kmsProviders: {
+ local: { key: Buffer.alloc(96) }
+ }
+ }
+ );
+
+ it('uses the bson option', function () {
+ expect(encrypter._bson).to.equal(bson);
+ });
+ });
+
+ context('when a bson option is not provided', function () {
+ const encrypter = new AutoEncrypter(
+ {},
+ {
+ kmsProviders: {
+ local: { key: Buffer.alloc(96) }
+ }
+ }
+ );
+
+ it('uses the mongodb exported BSON', function () {
+ expect(encrypter._bson).to.equal(BSON);
+ });
+ });
+
+ it('never uses bson from the topology', function () {
+ expect(() => {
+ new AutoEncrypter(
+ {},
+ {
+ kmsProviders: {
+ local: { key: Buffer.alloc(96) }
+ }
+ }
+ );
+ }).not.to.throw();
+ });
+ });
+
+ context('when mongodb does not export BSON (driver < 4.9.0)', function () {
+ context('when a bson option is provided', function () {
+ const bson = Object.assign({}, BSON);
+ const encrypter = new AutoEncrypter(
+ {},
+ {
+ bson: bson,
+ kmsProviders: {
+ local: { key: Buffer.alloc(96) }
+ }
+ }
+ );
+
+ it('uses the bson option', function () {
+ expect(encrypter._bson).to.equal(bson);
+ });
+ });
+
+ context('when a bson option is not provided', function () {
+ const mongoNoBson = { ...mongodb, BSON: undefined };
+ const AutoEncrypterNoBson = require('../lib/autoEncrypter')({
+ mongodb: mongoNoBson,
+ stateMachine
+ }).AutoEncrypter;
+
+ context('when the client has a topology', function () {
+ const client = new MockClient();
+ const encrypter = new AutoEncrypterNoBson(client, {
+ kmsProviders: {
+ local: { key: Buffer.alloc(96) }
+ }
+ });
+
+ it('uses the bson on the topology', function () {
+ expect(encrypter._bson).to.equal(client.topology.bson);
+ });
+ });
+
+ context('when the client does not have a topology', function () {
+ it('raises an error', function () {
+ expect(() => {
+ new AutoEncrypterNoBson({}, {});
+ }).to.throw(/bson/);
+ });
+ });
+ });
+ });
+
+ context('when using mongocryptd', function () {
+ const client = new MockClient();
+ const autoEncrypterOptions = {
+ mongocryptdBypassSpawn: true,
+ keyVaultNamespace: 'admin.datakeys',
+ logger: () => {},
+ kmsProviders: {
+ aws: { accessKeyId: 'example', secretAccessKey: 'example' },
+ local: { key: Buffer.alloc(96) }
+ }
+ };
+ const autoEncrypter = new AutoEncrypter(client, autoEncrypterOptions);
+
+ it('instantiates a mongo client on the auto encrypter', function () {
+ expect(autoEncrypter)
+ .to.have.property('_mongocryptdClient')
+ .to.be.instanceOf(mongodb.MongoClient);
+ });
+
+ it('sets the 3x legacy client options on the mongo client', function () {
+ expect(autoEncrypter).to.have.nested.property('_mongocryptdClient.s.options');
+ const options = autoEncrypter._mongocryptdClient.s.options;
+ expect(options).to.have.property('useUnifiedTopology', true);
+ expect(options).to.have.property('useNewUrlParser', true);
+ });
+
+ it('sets serverSelectionTimeoutMS to 10000ms', function () {
+ expect(autoEncrypter).to.have.nested.property('_mongocryptdClient.s.options');
+ const options = autoEncrypter._mongocryptdClient.s.options;
+ expect(options).to.have.property('serverSelectionTimeoutMS', 10000);
+ });
+
+ context('when mongocryptdURI is not specified', () => {
+ it('sets the ip address family to ipv4', function () {
+ expect(autoEncrypter).to.have.nested.property('_mongocryptdClient.s.options');
+ const options = autoEncrypter._mongocryptdClient.s.options;
+ expect(options).to.have.property('family', 4);
+ });
+ });
+
+ context('when mongocryptdURI is specified', () => {
+ it('does not set the ip address family to ipv4', function () {
+ const autoEncrypter = new AutoEncrypter(client, {
+ ...autoEncrypterOptions,
+ extraOptions: { mongocryptdURI: MongocryptdManager.DEFAULT_MONGOCRYPTD_URI }
+ });
+
+ expect(autoEncrypter).to.have.nested.property('_mongocryptdClient.s.options');
+ const options = autoEncrypter._mongocryptdClient.s.options;
+ expect(options).not.to.have.property('family', 4);
+ });
+ });
+ });
+ });
+
+ it('should support `bypassAutoEncryption`', function (done) {
+ const client = new MockClient();
+ const autoEncrypter = new AutoEncrypter(client, {
+ bypassAutoEncryption: true,
+ mongocryptdBypassSpawn: true,
+ keyVaultNamespace: 'admin.datakeys',
+ logger: () => {},
+ kmsProviders: {
+ aws: { accessKeyId: 'example', secretAccessKey: 'example' },
+ local: { key: Buffer.alloc(96) }
+ }
+ });
+
+ autoEncrypter.encrypt('test.test', { test: 'command' }, (err, encrypted) => {
+ expect(err).to.not.exist;
+ expect(encrypted).to.eql({ test: 'command' });
+ done();
+ });
+ });
+
+ describe('state machine', function () {
+ it('should decrypt mock data', function (done) {
+ const input = readExtendedJsonToBuffer(`${__dirname}/data/encrypted-document.json`);
+ const client = new MockClient();
+ const mc = new AutoEncrypter(client, {
+ keyVaultNamespace: 'admin.datakeys',
+ logger: () => {},
+ kmsProviders: {
+ aws: { accessKeyId: 'example', secretAccessKey: 'example' },
+ local: { key: Buffer.alloc(96) }
+ }
+ });
+ mc.decrypt(input, (err, decrypted) => {
+ if (err) return done(err);
+ expect(decrypted).to.eql({ filter: { find: 'test', ssn: '457-55-5462' } });
+ expect(decrypted).to.not.have.property(Symbol.for('@@mdb.decryptedKeys'));
+ expect(decrypted.filter).to.not.have.property(Symbol.for('@@mdb.decryptedKeys'));
+ done();
+ });
+ });
+
+ it('should decrypt mock data and mark decrypted items if enabled for testing', function (done) {
+ const input = readExtendedJsonToBuffer(`${__dirname}/data/encrypted-document.json`);
+ const nestedInput = readExtendedJsonToBuffer(
+ `${__dirname}/data/encrypted-document-nested.json`
+ );
+ const client = new MockClient();
+ const mc = new AutoEncrypter(client, {
+ keyVaultNamespace: 'admin.datakeys',
+ logger: () => {},
+ kmsProviders: {
+ aws: { accessKeyId: 'example', secretAccessKey: 'example' },
+ local: { key: Buffer.alloc(96) }
+ }
+ });
+ mc[Symbol.for('@@mdb.decorateDecryptionResult')] = true;
+ mc.decrypt(input, (err, decrypted) => {
+ if (err) return done(err);
+ expect(decrypted).to.eql({ filter: { find: 'test', ssn: '457-55-5462' } });
+ expect(decrypted).to.not.have.property(Symbol.for('@@mdb.decryptedKeys'));
+ expect(decrypted.filter[Symbol.for('@@mdb.decryptedKeys')]).to.eql(['ssn']);
+
+ // The same, but with an object containing different data types as the input
+ mc.decrypt({ a: [null, 1, { c: new BSON.Binary('foo', 1) }] }, (err, decrypted) => {
+ if (err) return done(err);
+ expect(decrypted).to.eql({ a: [null, 1, { c: new BSON.Binary('foo', 1) }] });
+ expect(decrypted).to.not.have.property(Symbol.for('@@mdb.decryptedKeys'));
+
+ // The same, but with nested data inside the decrypted input
+ mc.decrypt(nestedInput, (err, decrypted) => {
+ if (err) return done(err);
+ expect(decrypted).to.eql({ nested: { x: { y: 1234 } } });
+ expect(decrypted[Symbol.for('@@mdb.decryptedKeys')]).to.eql(['nested']);
+ expect(decrypted.nested).to.not.have.property(Symbol.for('@@mdb.decryptedKeys'));
+ expect(decrypted.nested.x).to.not.have.property(Symbol.for('@@mdb.decryptedKeys'));
+ expect(decrypted.nested.x.y).to.not.have.property(Symbol.for('@@mdb.decryptedKeys'));
+ done();
+ });
+ });
+ });
+ });
+
+ it('should decrypt mock data with per-context KMS credentials', function (done) {
+ const input = readExtendedJsonToBuffer(`${__dirname}/data/encrypted-document.json`);
+ const client = new MockClient();
+ const mc = new AutoEncrypter(client, {
+ keyVaultNamespace: 'admin.datakeys',
+ logger: () => {},
+ kmsProviders: {
+ aws: {}
+ },
+ async onKmsProviderRefresh() {
+ return { aws: { accessKeyId: 'example', secretAccessKey: 'example' } };
+ }
+ });
+ mc.decrypt(input, (err, decrypted) => {
+ if (err) return done(err);
+ expect(decrypted).to.eql({ filter: { find: 'test', ssn: '457-55-5462' } });
+ done();
+ });
+ });
+
+ context('when no refresh function is provided', function () {
+ const accessKey = 'example';
+ const secretKey = 'example';
+
+ before(function () {
+ if (!requirements.credentialProvidersInstalled.aws) {
+ this.currentTest.skipReason = 'Cannot refresh credentials without sdk provider';
+ this.currentTest.skip();
+ return;
+ }
+ // After the entire suite runs, set the env back for the rest of the test run.
+ process.env.AWS_ACCESS_KEY_ID = accessKey;
+ process.env.AWS_SECRET_ACCESS_KEY = secretKey;
+ });
+
+ after(function () {
+ // After the entire suite runs, set the env back for the rest of the test run.
+ process.env.AWS_ACCESS_KEY_ID = originalAccessKeyId;
+ process.env.AWS_SECRET_ACCESS_KEY = originalSecretAccessKey;
+ });
+
+ it('should decrypt mock data with KMS credentials from the environment', function (done) {
+ const input = readExtendedJsonToBuffer(`${__dirname}/data/encrypted-document.json`);
+ const client = new MockClient();
+ const mc = new AutoEncrypter(client, {
+ keyVaultNamespace: 'admin.datakeys',
+ logger: () => {},
+ kmsProviders: {
+ aws: {}
+ }
+ });
+ mc.decrypt(input, (err, decrypted) => {
+ if (err) return done(err);
+ expect(decrypted).to.eql({ filter: { find: 'test', ssn: '457-55-5462' } });
+ done();
+ });
+ });
+ });
+
+ context('when no refresh function is provided and no optional sdk', function () {
+ const accessKey = 'example';
+ const secretKey = 'example';
+
+ before(function () {
+ if (requirements.credentialProvidersInstalled.aws) {
+ this.currentTest.skipReason = 'With optional sdk installed credentials would be loaded.';
+ this.currentTest.skip();
+ return;
+ }
+ // After the entire suite runs, set the env back for the rest of the test run.
+ process.env.AWS_ACCESS_KEY_ID = accessKey;
+ process.env.AWS_SECRET_ACCESS_KEY = secretKey;
+ });
+
+ after(function () {
+ // After the entire suite runs, set the env back for the rest of the test run.
+ process.env.AWS_ACCESS_KEY_ID = originalAccessKeyId;
+ process.env.AWS_SECRET_ACCESS_KEY = originalSecretAccessKey;
+ });
+
+ it('errors without the optional sdk credential provider', function (done) {
+ const input = readExtendedJsonToBuffer(`${__dirname}/data/encrypted-document.json`);
+ const client = new MockClient();
+ const mc = new AutoEncrypter(client, {
+ keyVaultNamespace: 'admin.datakeys',
+ logger: () => {},
+ kmsProviders: {
+ aws: {}
+ }
+ });
+ mc.decrypt(input, err => {
+ expect(err.message).to.equal(
+ 'client not configured with KMS provider necessary to decrypt'
+ );
+ done();
+ });
+ });
+ });
+
+ it('should encrypt mock data', function (done) {
+ const client = new MockClient();
+ const mc = new AutoEncrypter(client, {
+ keyVaultNamespace: 'admin.datakeys',
+ logger: () => {},
+ kmsProviders: {
+ aws: { accessKeyId: 'example', secretAccessKey: 'example' },
+ local: { key: Buffer.alloc(96) }
+ }
+ });
+
+ mc.encrypt('test.test', TEST_COMMAND, (err, encrypted) => {
+ if (err) return done(err);
+ const expected = EJSON.parse(
+ JSON.stringify({
+ find: 'test',
+ filter: {
+ ssn: {
+ $binary: {
+ base64:
+ 'AWFhYWFhYWFhYWFhYWFhYWECRTOW9yZzNDn5dGwuqsrJQNLtgMEKaujhs9aRWRp+7Yo3JK8N8jC8P0Xjll6C1CwLsE/iP5wjOMhVv1KMMyOCSCrHorXRsb2IKPtzl2lKTqQ=',
+ subType: '6'
+ }
+ }
+ }
+ })
+ );
+
+ expect(encrypted).to.containSubset(expected);
+ done();
+ });
+ });
+
+ it('should encrypt mock data with per-context KMS credentials', function (done) {
+ const client = new MockClient();
+ const mc = new AutoEncrypter(client, {
+ keyVaultNamespace: 'admin.datakeys',
+ logger: () => {},
+ kmsProviders: {
+ aws: {}
+ },
+ async onKmsProviderRefresh() {
+ return { aws: { accessKeyId: 'example', secretAccessKey: 'example' } };
+ }
+ });
+
+ mc.encrypt('test.test', TEST_COMMAND, (err, encrypted) => {
+ if (err) return done(err);
+ const expected = EJSON.parse(
+ JSON.stringify({
+ find: 'test',
+ filter: {
+ ssn: {
+ $binary: {
+ base64:
+ 'AWFhYWFhYWFhYWFhYWFhYWECRTOW9yZzNDn5dGwuqsrJQNLtgMEKaujhs9aRWRp+7Yo3JK8N8jC8P0Xjll6C1CwLsE/iP5wjOMhVv1KMMyOCSCrHorXRsb2IKPtzl2lKTqQ=',
+ subType: '6'
+ }
+ }
+ }
+ })
+ );
+
+ expect(encrypted).to.containSubset(expected);
+ done();
+ });
+ });
+
+ // TODO(NODE-4089): Enable test once https://github.com/mongodb/libmongocrypt/pull/263 is done
+ it.skip('should encrypt mock data when using the crypt_shared library', function (done) {
+ const client = new MockClient();
+ const mc = new AutoEncrypter(client, {
+ keyVaultNamespace: 'admin.datakeys',
+ logger: () => {},
+ kmsProviders: {
+ aws: {}
+ },
+ async onKmsProviderRefresh() {
+ return { aws: { accessKeyId: 'example', secretAccessKey: 'example' } };
+ },
+ extraOptions: {
+ cryptSharedLibPath: sharedLibraryStub
+ }
+ });
+
+ expect(mc).to.not.have.property('_mongocryptdManager');
+ expect(mc).to.not.have.property('_mongocryptdClient');
+
+ mc.encrypt('test.test', TEST_COMMAND, (err, encrypted) => {
+ if (err) return done(err);
+ const expected = EJSON.parse(
+ JSON.stringify({
+ find: 'test',
+ filter: {
+ ssn: {
+ $binary: {
+ base64:
+ 'AWFhYWFhYWFhYWFhYWFhYWECRTOW9yZzNDn5dGwuqsrJQNLtgMEKaujhs9aRWRp+7Yo3JK8N8jC8P0Xjll6C1CwLsE/iP5wjOMhVv1KMMyOCSCrHorXRsb2IKPtzl2lKTqQ=',
+ subType: '6'
+ }
+ }
+ }
+ })
+ );
+
+ expect(encrypted).to.containSubset(expected);
+ done();
+ });
+ });
+ });
+
+ describe('logging', function () {
+ it('should allow registration of a log handler', function (done) {
+ ENABLE_LOG_TEST = true;
+
+ let loggerCalled = false;
+ const logger = (level, message) => {
+ if (loggerCalled) return;
+
+ loggerCalled = true;
+ expect(level).to.be.oneOf([2, 3]);
+ expect(message).to.not.be.empty;
+ };
+
+ const client = new MockClient();
+ const mc = new AutoEncrypter(client, {
+ logger,
+ kmsProviders: {
+ aws: { accessKeyId: 'example', secretAccessKey: 'example' },
+ local: { key: Buffer.alloc(96) }
+ }
+ });
+
+ mc.encrypt('test.test', TEST_COMMAND, (err, encrypted) => {
+ if (err) return done(err);
+ const expected = EJSON.parse(
+ JSON.stringify({
+ find: 'test',
+ filter: {
+ ssn: '457-55-5462'
+ }
+ })
+ );
+
+ expect(encrypted).to.containSubset(expected);
+ done();
+ });
+ });
+ });
+
+ describe('autoSpawn', function () {
+ beforeEach(function () {
+ if (requirements.SKIP_LIVE_TESTS) {
+ this.currentTest.skipReason = `requirements.SKIP_LIVE_TESTS=${requirements.SKIP_LIVE_TESTS}`;
+ this.currentTest.skip();
+ return;
+ }
+ });
+ afterEach(function (done) {
+ if (this.mc) {
+ this.mc.teardown(false, err => {
+ this.mc = undefined;
+ done(err);
+ });
+ } else {
+ done();
+ }
+ });
+
+ it('should autoSpawn a mongocryptd on init by default', function (done) {
+ const client = new MockClient();
+ this.mc = new AutoEncrypter(client, {
+ keyVaultNamespace: 'admin.datakeys',
+ logger: () => {},
+ kmsProviders: {
+ aws: { accessKeyId: 'example', secretAccessKey: 'example' },
+ local: { key: Buffer.alloc(96) }
+ }
+ });
+
+ expect(this.mc).to.have.property('cryptSharedLibVersionInfo', null);
+
+ const localMcdm = this.mc._mongocryptdManager;
+ sandbox.spy(localMcdm, 'spawn');
+
+ this.mc.init(err => {
+ if (err) return done(err);
+ expect(localMcdm.spawn).to.have.been.calledOnce;
+ done();
+ });
+ });
+
+ it('should not attempt to kick off mongocryptd on a normal error', function (done) {
+ let called = false;
+ StateMachine.prototype.markCommand.callsFake((client, ns, filter, callback) => {
+ if (!called) {
+ called = true;
+ callback(new Error('msg'));
+ return;
+ }
+
+ callback(null, MOCK_MONGOCRYPTD_RESPONSE);
+ });
+
+ const client = new MockClient();
+ this.mc = new AutoEncrypter(client, {
+ keyVaultNamespace: 'admin.datakeys',
+ logger: () => {},
+ kmsProviders: {
+ aws: { accessKeyId: 'example', secretAccessKey: 'example' },
+ local: { key: Buffer.alloc(96) }
+ }
+ });
+ expect(this.mc).to.have.property('cryptSharedLibVersionInfo', null);
+
+ const localMcdm = this.mc._mongocryptdManager;
+ this.mc.init(err => {
+ if (err) return done(err);
+
+ sandbox.spy(localMcdm, 'spawn');
+
+ this.mc.encrypt('test.test', TEST_COMMAND, err => {
+ expect(localMcdm.spawn).to.not.have.been.called;
+ expect(err).to.be.an.instanceOf(Error);
+ done();
+ });
+ });
+ });
+
+ it('should restore the mongocryptd and retry once if a MongoNetworkTimeoutError is experienced', function (done) {
+ let called = false;
+ StateMachine.prototype.markCommand.callsFake((client, ns, filter, callback) => {
+ if (!called) {
+ called = true;
+ callback(new MongoNetworkTimeoutError('msg'));
+ return;
+ }
+
+ callback(null, MOCK_MONGOCRYPTD_RESPONSE);
+ });
+
+ const client = new MockClient();
+ this.mc = new AutoEncrypter(client, {
+ keyVaultNamespace: 'admin.datakeys',
+ logger: () => {},
+ kmsProviders: {
+ aws: { accessKeyId: 'example', secretAccessKey: 'example' },
+ local: { key: Buffer.alloc(96) }
+ }
+ });
+ expect(this.mc).to.have.property('cryptSharedLibVersionInfo', null);
+
+ const localMcdm = this.mc._mongocryptdManager;
+ this.mc.init(err => {
+ if (err) return done(err);
+
+ sandbox.spy(localMcdm, 'spawn');
+
+ this.mc.encrypt('test.test', TEST_COMMAND, err => {
+ expect(localMcdm.spawn).to.have.been.calledOnce;
+ expect(err).to.not.exist;
+ done();
+ });
+ });
+ });
+
+ it('should propagate error if MongoNetworkTimeoutError is experienced twice in a row', function (done) {
+ let counter = 2;
+ StateMachine.prototype.markCommand.callsFake((client, ns, filter, callback) => {
+ if (counter) {
+ counter -= 1;
+ callback(new MongoNetworkTimeoutError('msg'));
+ return;
+ }
+
+ callback(null, MOCK_MONGOCRYPTD_RESPONSE);
+ });
+
+ const client = new MockClient();
+ this.mc = new AutoEncrypter(client, {
+ keyVaultNamespace: 'admin.datakeys',
+ logger: () => {},
+ kmsProviders: {
+ aws: { accessKeyId: 'example', secretAccessKey: 'example' },
+ local: { key: Buffer.alloc(96) }
+ }
+ });
+ expect(this.mc).to.have.property('cryptSharedLibVersionInfo', null);
+
+ const localMcdm = this.mc._mongocryptdManager;
+ this.mc.init(err => {
+ if (err) return done(err);
+
+ sandbox.spy(localMcdm, 'spawn');
+
+ this.mc.encrypt('test.test', TEST_COMMAND, err => {
+ expect(localMcdm.spawn).to.have.been.calledOnce;
+ expect(err).to.be.an.instanceof(MongoNetworkTimeoutError);
+ done();
+ });
+ });
+ });
+
+ it('should return a useful message if mongocryptd fails to autospawn', function (done) {
+ const client = new MockClient();
+ this.mc = new AutoEncrypter(client, {
+ keyVaultNamespace: 'admin.datakeys',
+ logger: () => {},
+ kmsProviders: {
+ aws: { accessKeyId: 'example', secretAccessKey: 'example' },
+ local: { key: Buffer.alloc(96) }
+ },
+ extraOptions: {
+ mongocryptdURI: 'mongodb://something.invalid:27020/'
+ }
+ });
+ expect(this.mc).to.have.property('cryptSharedLibVersionInfo', null);
+
+ sandbox.stub(MongocryptdManager.prototype, 'spawn').callsFake(callback => {
+ callback();
+ });
+
+ this.mc.init(err => {
+ expect(err).to.exist;
+ expect(err).to.be.instanceOf(MongoError);
+ done();
+ });
+ });
+ });
+
+ describe('noAutoSpawn', function () {
+ beforeEach('start MongocryptdManager', function (done) {
+ if (requirements.SKIP_LIVE_TESTS) {
+ this.currentTest.skipReason = `requirements.SKIP_LIVE_TESTS=${requirements.SKIP_LIVE_TESTS}`;
+ this.skip();
+ }
+
+ this.mcdm = new MongocryptdManager({});
+ this.mcdm.spawn(done);
+ });
+
+ afterEach(function (done) {
+ if (this.mc) {
+ this.mc.teardown(false, err => {
+ this.mc = undefined;
+ done(err);
+ });
+ } else {
+ done();
+ }
+ });
+
+ ['mongocryptdBypassSpawn', 'bypassAutoEncryption', 'bypassQueryAnalysis'].forEach(opt => {
+ const encryptionOptions = {
+ keyVaultNamespace: 'admin.datakeys',
+ logger: () => {},
+ kmsProviders: {
+ aws: { accessKeyId: 'example', secretAccessKey: 'example' },
+ local: { key: Buffer.alloc(96) }
+ },
+ extraOptions: {
+ mongocryptdBypassSpawn: opt === 'mongocryptdBypassSpawn'
+ },
+ bypassAutoEncryption: opt === 'bypassAutoEncryption',
+ bypassQueryAnalysis: opt === 'bypassQueryAnalysis'
+ };
+
+ it(`should not spawn mongocryptd on startup if ${opt} is true`, function (done) {
+ const client = new MockClient();
+ this.mc = new AutoEncrypter(client, encryptionOptions);
+
+ const localMcdm = this.mc._mongocryptdManager || { spawn: () => {} };
+ sandbox.spy(localMcdm, 'spawn');
+
+ this.mc.init(err => {
+ expect(err).to.not.exist;
+ expect(localMcdm.spawn).to.have.a.callCount(0);
+ done();
+ });
+ });
+ });
+
+ it('should not spawn a mongocryptd or retry on a server selection error if mongocryptdBypassSpawn: true', function (done) {
+ let called = false;
+ const timeoutError = new MongoNetworkTimeoutError('msg');
+ StateMachine.prototype.markCommand.callsFake((client, ns, filter, callback) => {
+ if (!called) {
+ called = true;
+ callback(timeoutError);
+ return;
+ }
+
+ callback(null, MOCK_MONGOCRYPTD_RESPONSE);
+ });
+
+ const client = new MockClient();
+ this.mc = new AutoEncrypter(client, {
+ keyVaultNamespace: 'admin.datakeys',
+ logger: () => {},
+ kmsProviders: {
+ aws: { accessKeyId: 'example', secretAccessKey: 'example' },
+ local: { key: Buffer.alloc(96) }
+ },
+ extraOptions: {
+ mongocryptdBypassSpawn: true
+ }
+ });
+
+ const localMcdm = this.mc._mongocryptdManager;
+ sandbox.spy(localMcdm, 'spawn');
+
+ this.mc.init(err => {
+ expect(err).to.not.exist;
+ expect(localMcdm.spawn).to.not.have.been.called;
+
+ this.mc.encrypt('test.test', TEST_COMMAND, (err, response) => {
+ expect(localMcdm.spawn).to.not.have.been.called;
+ expect(response).to.not.exist;
+ expect(err).to.equal(timeoutError);
+ done();
+ });
+ });
+ });
+ });
+
+ describe('crypt_shared library', function () {
+ it('should fail if no library can be found in the search path and cryptSharedLibRequired is set', function () {
+ // NB: This test has to be run before the tests/without having previously
+ // loaded a CSFLE shared library below to get the right error path.
+ const client = new MockClient();
+ try {
+ new AutoEncrypter(client, {
+ keyVaultNamespace: 'admin.datakeys',
+ logger: () => {},
+ kmsProviders: {
+ aws: { accessKeyId: 'example', secretAccessKey: 'example' },
+ local: { key: Buffer.alloc(96) }
+ },
+ extraOptions: {
+ cryptSharedLibSearchPaths: ['/nonexistent'],
+ cryptSharedLibRequired: true
+ }
+ });
+ expect.fail('missed exception');
+ } catch (err) {
+ expect(err.message).to.include(
+ '`cryptSharedLibRequired` set but no crypt_shared library loaded'
+ );
+ }
+ });
+
+ it('should load a shared library by specifying its path', function (done) {
+ const client = new MockClient();
+ this.mc = new AutoEncrypter(client, {
+ keyVaultNamespace: 'admin.datakeys',
+ logger: () => {},
+ kmsProviders: {
+ aws: { accessKeyId: 'example', secretAccessKey: 'example' },
+ local: { key: Buffer.alloc(96) }
+ },
+ extraOptions: {
+ cryptSharedLibPath: sharedLibraryStub
+ }
+ });
+
+ expect(this.mc).to.not.have.property('_mongocryptdManager');
+ expect(this.mc).to.not.have.property('_mongocryptdClient');
+ expect(this.mc).to.have.deep.property('cryptSharedLibVersionInfo', {
+ // eslint-disable-next-line no-undef
+ version: BigInt(0x000600020001000),
+ versionStr: 'stubbed-crypt_shared'
+ });
+
+ this.mc.teardown(true, done);
+ });
+
+ it('should load a shared library by specifying a search path', function (done) {
+ const client = new MockClient();
+ this.mc = new AutoEncrypter(client, {
+ keyVaultNamespace: 'admin.datakeys',
+ logger: () => {},
+ kmsProviders: {
+ aws: { accessKeyId: 'example', secretAccessKey: 'example' },
+ local: { key: Buffer.alloc(96) }
+ },
+ extraOptions: {
+ cryptSharedLibSearchPaths: [path.dirname(sharedLibraryStub)]
+ }
+ });
+
+ expect(this.mc).to.not.have.property('_mongocryptdManager');
+ expect(this.mc).to.not.have.property('_mongocryptdClient');
+ expect(this.mc).to.have.deep.property('cryptSharedLibVersionInfo', {
+ // eslint-disable-next-line no-undef
+ version: BigInt(0x000600020001000),
+ versionStr: 'stubbed-crypt_shared'
+ });
+
+ this.mc.teardown(true, done);
+ });
+ });
+
+ it('should provide the libmongocrypt version', function () {
+ expect(AutoEncrypter.libmongocryptVersion).to.be.a('string');
+ });
+});
diff --git a/encryption/test/buffer_pool.test.js b/encryption/test/buffer_pool.test.js
new file mode 100644
index 00000000000..973e4f74e6b
--- /dev/null
+++ b/encryption/test/buffer_pool.test.js
@@ -0,0 +1,91 @@
+'use strict';
+
+const { BufferPool } = require('../lib/buffer_pool');
+const { expect } = require('chai');
+
+describe('new BufferPool()', function () {
+ it('should report the correct length', function () {
+ const buffer = new BufferPool();
+ buffer.append(Buffer.from([0, 1]));
+ buffer.append(Buffer.from([2, 3]));
+ buffer.append(Buffer.from([2, 3]));
+ expect(buffer).property('length').to.equal(6);
+ });
+
+ it('return an empty buffer if too many bytes requested', function () {
+ const buffer = new BufferPool();
+ buffer.append(Buffer.from([0, 1, 2, 3]));
+ const data = buffer.read(6);
+ expect(data).to.have.length(0);
+ expect(buffer).property('length').to.equal(4);
+ });
+
+ context('peek', function () {
+ it('exact size', function () {
+ const buffer = new BufferPool();
+ buffer.append(Buffer.from([0, 1]));
+ const data = buffer.peek(2);
+ expect(data).to.eql(Buffer.from([0, 1]));
+ expect(buffer).property('length').to.equal(2);
+ });
+
+ it('within first buffer', function () {
+ const buffer = new BufferPool();
+ buffer.append(Buffer.from([0, 1, 2, 3]));
+ const data = buffer.peek(2);
+ expect(data).to.eql(Buffer.from([0, 1]));
+ expect(buffer).property('length').to.equal(4);
+ });
+
+ it('across multiple buffers', function () {
+ const buffer = new BufferPool();
+ buffer.append(Buffer.from([0, 1]));
+ buffer.append(Buffer.from([2, 3]));
+ buffer.append(Buffer.from([4, 5]));
+ expect(buffer).property('length').to.equal(6);
+ const data = buffer.peek(5);
+ expect(data).to.eql(Buffer.from([0, 1, 2, 3, 4]));
+ expect(buffer).property('length').to.equal(6);
+ });
+ });
+
+ context('read', function () {
+ it('should throw an error if a negative size is requested', function () {
+ const buffer = new BufferPool();
+ expect(() => buffer.read(-1)).to.throw(/Argument "size" must be a non-negative number/);
+ });
+
+ it('should throw an error if a non-number size is requested', function () {
+ const buffer = new BufferPool();
+ expect(() => buffer.read('256')).to.throw(/Argument "size" must be a non-negative number/);
+ });
+
+ it('exact size', function () {
+ const buffer = new BufferPool();
+ buffer.append(Buffer.from([0, 1]));
+ const data = buffer.read(2);
+ expect(data).to.eql(Buffer.from([0, 1]));
+ expect(buffer).property('length').to.equal(0);
+ });
+
+ it('within first buffer', function () {
+ const buffer = new BufferPool();
+ buffer.append(Buffer.from([0, 1, 2, 3]));
+ const data = buffer.read(2);
+ expect(data).to.eql(Buffer.from([0, 1]));
+ expect(buffer).property('length').to.equal(2);
+ });
+
+ it('across multiple buffers', function () {
+ const buffer = new BufferPool();
+ buffer.append(Buffer.from([0, 1]));
+ buffer.append(Buffer.from([2, 3]));
+ buffer.append(Buffer.from([4, 5]));
+ expect(buffer).property('length').to.equal(6);
+ const data = buffer.read(5);
+ expect(data).to.eql(Buffer.from([0, 1, 2, 3, 4]));
+ expect(buffer).property('length').to.equal(1);
+ expect(buffer.read(1)).to.eql(Buffer.from([5]));
+ });
+ });
+});
diff --git a/encryption/test/clientEncryption.test.js b/encryption/test/clientEncryption.test.js
new file mode 100644
index 00000000000..837bb85d491
--- /dev/null
+++ b/encryption/test/clientEncryption.test.js
@@ -0,0 +1,1093 @@
+'use strict';
+const fs = require('fs');
+const { expect } = require('chai');
+const sinon = require('sinon');
+const mongodb = require('mongodb');
+const BSON = mongodb.BSON;
+const MongoClient = mongodb.MongoClient;
+const cryptoCallbacks = require('../lib/cryptoCallbacks');
+const stateMachine = require('../lib/stateMachine')({ mongodb });
+const StateMachine = stateMachine.StateMachine;
+const { Binary, EJSON, deserialize } = BSON;
+const {
+ MongoCryptCreateEncryptedCollectionError,
+ MongoCryptCreateDataKeyError
+} = require('../lib/errors');
+
+function readHttpResponse(path) {
+ let data = fs.readFileSync(path, 'utf8').toString();
+ data = data.split('\n').join('\r\n');
+ return Buffer.from(data, 'utf8');
+}
+
+const ClientEncryption = require('../lib/clientEncryption')({
+ mongodb,
+ stateMachine
+}).ClientEncryption;
+
+class MockClient {
+ constructor() {
+ this.topology = {
+ bson: BSON
+ };
+ }
+ db(dbName) {
+ return {
+ async createCollection(name, options) {
+ return { namespace: `${dbName}.${name}`, options };
+ }
+ };
+ }
+}
+
+const requirements = require('./requirements.helper');
+
+describe('ClientEncryption', function () {
+ this.timeout(12000);
+ /** @type {MongoClient} */
+ let client;
+
+ function throwIfNotNsNotFoundError(err) {
+ if (!err.message.match(/ns not found/)) {
+ throw err;
+ }
+ }
+
+ async function setup() {
+ client = new MongoClient(process.env.MONGODB_URI || 'mongodb://localhost:27017/test');
+ await client.connect();
+ try {
+ await client.db('client').collection('encryption').drop();
+ } catch (err) {
+ throwIfNotNsNotFoundError(err);
+ }
+ }
+
+ function teardown() {
+ if (requirements.SKIP_LIVE_TESTS) {
+ return Promise.resolve();
+ }
+
+ return client.close();
+ }
+
+ describe('#constructor', function () {
+ context('when mongodb exports BSON (driver >= 4.9.0)', function () {
+ context('when a bson option is provided', function () {
+ const bson = Object.assign({}, BSON);
+ const encrypter = new ClientEncryption(
+ {},
+ {
+ bson: bson,
+ keyVaultNamespace: 'client.encryption',
+ kmsProviders: {
+ local: { key: Buffer.alloc(96) }
+ }
+ }
+ );
+
+ it('uses the bson option', function () {
+ expect(encrypter._bson).to.equal(bson);
+ });
+ });
+
+ context('when a bson option is not provided', function () {
+ const encrypter = new ClientEncryption(
+ {},
+ {
+ keyVaultNamespace: 'client.encryption',
+ kmsProviders: {
+ local: { key: Buffer.alloc(96) }
+ }
+ }
+ );
+
+ it('uses the mongodb exported BSON', function () {
+ expect(encrypter._bson).to.equal(BSON);
+ });
+ });
+
+ it('never uses bson from the topology', function () {
+ expect(() => {
+ new ClientEncryption(
+ {},
+ {
+ keyVaultNamespace: 'client.encryption',
+ kmsProviders: {
+ local: { key: Buffer.alloc(96) }
+ }
+ }
+ );
+ }).not.to.throw();
+ });
+ });
+
+ context('when mongodb does not export BSON (driver < 4.9.0)', function () {
+ context('when a bson option is provided', function () {
+ const bson = Object.assign({}, BSON);
+ const encrypter = new ClientEncryption(
+ {},
+ {
+ bson: bson,
+ keyVaultNamespace: 'client.encryption',
+ kmsProviders: {
+ local: { key: Buffer.alloc(96) }
+ }
+ }
+ );
+
+ it('uses the bson option', function () {
+ expect(encrypter._bson).to.equal(bson);
+ });
+ });
+
+ context('when a bson option is not provided', function () {
+ const mongoNoBson = { ...mongodb, BSON: undefined };
+ const ClientEncryptionNoBson = require('../lib/clientEncryption')({
+ mongodb: mongoNoBson,
+ stateMachine
+ }).ClientEncryption;
+
+ context('when the client has a topology', function () {
+ const client = new MockClient();
+ const encrypter = new ClientEncryptionNoBson(client, {
+ keyVaultNamespace: 'client.encryption',
+ kmsProviders: {
+ local: { key: Buffer.alloc(96) }
+ }
+ });
+
+ it('uses the bson on the topology', function () {
+ expect(encrypter._bson).to.equal(client.topology.bson);
+ });
+ });
+
+ context('when the client does not have a topology', function () {
+ it('raises an error', function () {
+ expect(() => {
+ new ClientEncryptionNoBson({}, {});
+ }).to.throw(/bson/);
+ });
+ });
+ });
+ });
+ });
+
+ describe('stubbed stateMachine', function () {
+ let sandbox = sinon.createSandbox();
+
+ after(() => sandbox.restore());
+ before(() => {
+ // stubbed out for AWS unit testing below
+ const MOCK_KMS_ENCRYPT_REPLY = readHttpResponse(`${__dirname}/data/kms-encrypt-reply.txt`);
+ sandbox.stub(StateMachine.prototype, 'kmsRequest').callsFake(request => {
+ request.addResponse(MOCK_KMS_ENCRYPT_REPLY);
+ return Promise.resolve();
+ });
+ });
+
+ beforeEach(function () {
+ if (requirements.SKIP_LIVE_TESTS) {
+ this.currentTest.skipReason = `requirements.SKIP_LIVE_TESTS=${requirements.SKIP_LIVE_TESTS}`;
+ this.test.skip();
+ return;
+ }
+
+ return setup();
+ });
+
+ afterEach(function () {
+ return teardown();
+ });
+
+ [
+ {
+ name: 'local',
+ kmsProviders: { local: { key: Buffer.alloc(96) } }
+ },
+ {
+ name: 'aws',
+ kmsProviders: { aws: { accessKeyId: 'example', secretAccessKey: 'example' } },
+ options: { masterKey: { region: 'region', key: 'cmk' } }
+ }
+ ].forEach(providerTest => {
+ it(`should create a data key with the "${providerTest.name}" KMS provider`, async function () {
+ const providerName = providerTest.name;
+ const encryption = new ClientEncryption(client, {
+ keyVaultNamespace: 'client.encryption',
+ kmsProviders: providerTest.kmsProviders
+ });
+
+ const dataKeyOptions = providerTest.options || {};
+
+ const dataKey = await encryption.createDataKey(providerName, dataKeyOptions);
+ expect(dataKey).property('_bsontype', 'Binary');
+
+ const doc = await client.db('client').collection('encryption').findOne({ _id: dataKey });
+ expect(doc).to.have.property('masterKey');
+ expect(doc.masterKey).property('provider', providerName);
+ });
+
+ it(`should create a data key with the "${providerTest.name}" KMS provider (fixed key material)`, async function () {
+ const providerName = providerTest.name;
+ const encryption = new ClientEncryption(client, {
+ keyVaultNamespace: 'client.encryption',
+ kmsProviders: providerTest.kmsProviders
+ });
+
+ const dataKeyOptions = {
+ ...providerTest.options,
+ keyMaterial: new BSON.Binary(Buffer.alloc(96))
+ };
+
+ const dataKey = await encryption.createDataKey(providerName, dataKeyOptions);
+ expect(dataKey).property('_bsontype', 'Binary');
+
+ const doc = await client.db('client').collection('encryption').findOne({ _id: dataKey });
+ expect(doc).to.have.property('masterKey');
+ expect(doc.masterKey).property('provider', providerName);
+ });
+ });
+
+ it(`should create a data key with the local KMS provider (fixed key material, fixed key UUID)`, async function () {
+ // 'Custom Key Material Test' prose spec test:
+ const keyVaultColl = client.db('client').collection('encryption');
+ const encryption = new ClientEncryption(client, {
+ keyVaultNamespace: 'client.encryption',
+ kmsProviders: {
+ local: {
+ key: 'A'.repeat(128) // the value here is not actually relevant
+ }
+ }
+ });
+
+ const dataKeyOptions = {
+ keyMaterial: new BSON.Binary(
+ Buffer.from(
+ 'xPTAjBRG5JiPm+d3fj6XLi2q5DMXUS/f1f+SMAlhhwkhDRL0kr8r9GDLIGTAGlvC+HVjSIgdL+RKwZCvpXSyxTICWSXTUYsWYPyu3IoHbuBZdmw2faM3WhcRIgbMReU5',
+ 'base64'
+ )
+ )
+ };
+ const dataKey = await encryption.createDataKey('local', dataKeyOptions);
+ expect(dataKey._bsontype).to.equal('Binary');
+
+ // Remove and re-insert with a fixed UUID to guarantee consistent output
+ const doc = (
+ await keyVaultColl.findOneAndDelete({ _id: dataKey }, { writeConcern: { w: 'majority' } })
+ ).value;
+ doc._id = new BSON.Binary(Buffer.alloc(16), 4);
+ await keyVaultColl.insertOne(doc, { writeConcern: { w: 'majority' } });
+
+ const encrypted = await encryption.encrypt('test', {
+ keyId: doc._id,
+ algorithm: 'AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic'
+ });
+ expect(encrypted._bsontype).to.equal('Binary');
+ expect(encrypted.toString('base64')).to.equal(
+ 'AQAAAAAAAAAAAAAAAAAAAAACz0ZOLuuhEYi807ZXTdhbqhLaS2/t9wLifJnnNYwiw79d75QYIZ6M/aYC1h9nCzCjZ7pGUpAuNnkUhnIXM3PjrA=='
+ );
+ });
+
+ it('should fail to create a data key if keyMaterial is wrong', function (done) {
+ const encryption = new ClientEncryption(client, {
+ keyVaultNamespace: 'client.encryption',
+ kmsProviders: { local: { key: 'A'.repeat(128) } }
+ });
+
+ const dataKeyOptions = {
+ keyMaterial: new BSON.Binary(Buffer.alloc(97))
+ };
+ try {
+ encryption.createDataKey('local', dataKeyOptions);
+ expect.fail('missed exception');
+ } catch (err) {
+ expect(err.message).to.equal('keyMaterial should have length 96, but has length 97');
+ done();
+ }
+ });
+
+ it('should explicitly encrypt and decrypt with the "local" KMS provider', function (done) {
+ const encryption = new ClientEncryption(client, {
+ keyVaultNamespace: 'client.encryption',
+ kmsProviders: { local: { key: Buffer.alloc(96) } }
+ });
+
+ encryption.createDataKey('local', (err, dataKey) => {
+ expect(err).to.not.exist;
+
+ const encryptOptions = {
+ keyId: dataKey,
+ algorithm: 'AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic'
+ };
+
+ encryption.encrypt('hello', encryptOptions, (err, encrypted) => {
+ expect(err).to.not.exist;
+ expect(encrypted._bsontype).to.equal('Binary');
+ expect(encrypted.sub_type).to.equal(6);
+
+ encryption.decrypt(encrypted, (err, decrypted) => {
+ expect(err).to.not.exist;
+ expect(decrypted).to.equal('hello');
+ done();
+ });
+ });
+ });
+ });
+
+ it('should explicitly encrypt and decrypt with the "local" KMS provider (promise)', function () {
+ const encryption = new ClientEncryption(client, {
+ keyVaultNamespace: 'client.encryption',
+ kmsProviders: { local: { key: Buffer.alloc(96) } }
+ });
+
+ return encryption
+ .createDataKey('local')
+ .then(dataKey => {
+ const encryptOptions = {
+ keyId: dataKey,
+ algorithm: 'AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic'
+ };
+
+ return encryption.encrypt('hello', encryptOptions);
+ })
+ .then(encrypted => {
+ expect(encrypted._bsontype).to.equal('Binary');
+ expect(encrypted.sub_type).to.equal(6);
+
+ return encryption.decrypt(encrypted);
+ })
+ .then(decrypted => {
+ expect(decrypted).to.equal('hello');
+ });
+ });
+
+ it('should explicitly encrypt and decrypt with a re-wrapped local key', function () {
+ // Create new ClientEncryption instances to make sure
+ // that we are actually using the rewrapped keys and not
+ // something that has been cached.
+ const newClientEncryption = () =>
+ new ClientEncryption(client, {
+ keyVaultNamespace: 'client.encryption',
+ kmsProviders: { local: { key: 'A'.repeat(128) } }
+ });
+ let encrypted;
+
+ return newClientEncryption()
+ .createDataKey('local')
+ .then(dataKey => {
+ const encryptOptions = {
+ keyId: dataKey,
+ algorithm: 'Indexed',
+ contentionFactor: 0
+ };
+
+ return newClientEncryption().encrypt('hello', encryptOptions);
+ })
+ .then(_encrypted => {
+ encrypted = _encrypted;
+ expect(encrypted._bsontype).to.equal('Binary');
+ expect(encrypted.sub_type).to.equal(6);
+ })
+ .then(() => {
+ return newClientEncryption().rewrapManyDataKey({});
+ })
+ .then(rewrapManyDataKeyResult => {
+ expect(rewrapManyDataKeyResult.bulkWriteResult.result.nModified).to.equal(1);
+ return newClientEncryption().decrypt(encrypted);
+ })
+ .then(decrypted => {
+ expect(decrypted).to.equal('hello');
+ });
+ });
+
+ it('should not perform updates if no keys match', function () {
+ const clientEncryption = new ClientEncryption(client, {
+ keyVaultNamespace: 'client.encryption',
+ kmsProviders: { local: { key: 'A'.repeat(128) } }
+ });
+
+ return clientEncryption.rewrapManyDataKey({ _id: 12345 }).then(rewrapManyDataKeyResult => {
+ expect(rewrapManyDataKeyResult.bulkWriteResult).to.equal(undefined);
+ });
+ });
+
+ it.skip('should explicitly encrypt and decrypt with a re-wrapped local key (explicit session/transaction)', function () {
+ const encryption = new ClientEncryption(client, {
+ keyVaultNamespace: 'client.encryption',
+ kmsProviders: { local: { key: 'A'.repeat(128) } }
+ });
+ let encrypted;
+ let rewrapManyDataKeyResult;
+
+ return encryption
+ .createDataKey('local')
+ .then(dataKey => {
+ const encryptOptions = {
+ keyId: dataKey,
+ algorithm: 'Indexed',
+ contentionFactor: 0
+ };
+
+ return encryption.encrypt('hello', encryptOptions);
+ })
+ .then(_encrypted => {
+ encrypted = _encrypted;
+ })
+ .then(() => {
+ // withSession does not forward the callback's return value, hence
+ // the slightly awkward 'rewrapManyDataKeyResult' passing here
+ return client.withSession(session => {
+ return session.withTransaction(() => {
+ expect(session.transaction.isStarting).to.equal(true);
+ expect(session.transaction.isActive).to.equal(true);
+ rewrapManyDataKeyResult = encryption.rewrapManyDataKey(
+ {},
+ { provider: 'local', session }
+ );
+ return rewrapManyDataKeyResult.then(() => {
+ // Verify that the 'session' argument was actually used
+ expect(session.transaction.isStarting).to.equal(false);
+ expect(session.transaction.isActive).to.equal(true);
+ });
+ });
+ });
+ })
+ .then(() => {
+ return rewrapManyDataKeyResult;
+ })
+ .then(rewrapManyDataKeyResult => {
+ expect(rewrapManyDataKeyResult.bulkWriteResult.result.nModified).to.equal(1);
+ return encryption.decrypt(encrypted);
+ })
+ .then(decrypted => {
+ expect(decrypted).to.equal('hello');
+ });
+ }).skipReason = 'TODO(DRIVERS-2389): add explicit session support to key management API';
+
+ // TODO(NODE-3371): resolve KMS JSON response does not include string 'Plaintext'. HTTP status=200 error
+ it.skip('should explicitly encrypt and decrypt with the "aws" KMS provider', function (done) {
+ const encryption = new ClientEncryption(client, {
+ keyVaultNamespace: 'client.encryption',
+ kmsProviders: { aws: { accessKeyId: 'example', secretAccessKey: 'example' } }
+ });
+
+ const dataKeyOptions = {
+ masterKey: { region: 'region', key: 'cmk' }
+ };
+
+ encryption.createDataKey('aws', dataKeyOptions, (err, dataKey) => {
+ expect(err).to.not.exist;
+
+ const encryptOptions = {
+ keyId: dataKey,
+ algorithm: 'AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic'
+ };
+
+ encryption.encrypt('hello', encryptOptions, (err, encrypted) => {
+ expect(err).to.not.exist;
+ expect(encrypted).to.have.property('v');
+ expect(encrypted.v._bsontype).to.equal('Binary');
+ expect(encrypted.v.sub_type).to.equal(6);
+
+ encryption.decrypt(encrypted, (err, decrypted) => {
+ expect(err).to.not.exist;
+ expect(decrypted).to.equal('hello');
+ done();
+ });
+ });
+ });
+ }).skipReason =
+ "TODO(NODE-3371): resolve KMS JSON response does not include string 'Plaintext'. HTTP status=200 error";
+ });
+
+ describe('ClientEncryptionKeyAltNames', function () {
+ const kmsProviders = requirements.awsKmsProviders;
+ const dataKeyOptions = requirements.awsDataKeyOptions;
+ beforeEach(function () {
+ if (requirements.SKIP_AWS_TESTS) {
+ this.currentTest.skipReason = `requirements.SKIP_AWS_TESTS=${requirements.SKIP_AWS_TESTS}`;
+ this.currentTest.skip();
+ return;
+ }
+
+ return setup().then(() => {
+ this.client = client;
+ this.collection = client.db('client').collection('encryption');
+ this.encryption = new ClientEncryption(this.client, {
+ keyVaultNamespace: 'client.encryption',
+ kmsProviders
+ });
+ });
+ });
+
+ afterEach(function () {
+ return teardown().then(() => {
+ this.encryption = undefined;
+ this.collection = undefined;
+ this.client = undefined;
+ });
+ });
+
+ function makeOptions(keyAltNames) {
+ expect(dataKeyOptions.masterKey).to.be.an('object');
+ expect(dataKeyOptions.masterKey.key).to.be.a('string');
+ expect(dataKeyOptions.masterKey.region).to.be.a('string');
+
+ return {
+ masterKey: {
+ key: dataKeyOptions.masterKey.key,
+ region: dataKeyOptions.masterKey.region
+ },
+ keyAltNames
+ };
+ }
+
+ describe('errors', function () {
+ [42, 'hello', { keyAltNames: 'foobar' }, /foobar/].forEach(val => {
+ it(`should fail if typeof keyAltNames = ${typeof val}`, function () {
+ const options = makeOptions(val);
+ expect(() => this.encryption.createDataKey('aws', options, () => undefined)).to.throw(
+ TypeError
+ );
+ });
+ });
+
+ [undefined, null, 42, { keyAltNames: 'foobar' }, ['foobar'], /foobar/].forEach(val => {
+ it(`should fail if typeof keyAltNames[x] = ${typeof val}`, function () {
+ const options = makeOptions([val]);
+ expect(() => this.encryption.createDataKey('aws', options, () => undefined)).to.throw(
+ TypeError
+ );
+ });
+ });
+ });
+
+ it('should create a key with keyAltNames', function () {
+ let dataKey;
+ const options = makeOptions(['foobar']);
+ return this.encryption
+ .createDataKey('aws', options)
+ .then(_dataKey => (dataKey = _dataKey))
+ .then(() => this.collection.findOne({ keyAltNames: 'foobar' }))
+ .then(document => {
+ expect(document).to.be.an('object');
+ expect(document).to.have.property('keyAltNames').that.includes.members(['foobar']);
+ expect(document).to.have.property('_id').that.deep.equals(dataKey);
+ });
+ });
+
+ it('should create a key with multiple keyAltNames', function () {
+ let dataKey;
+ return this.encryption
+ .createDataKey('aws', makeOptions(['foobar', 'fizzbuzz']))
+ .then(_dataKey => (dataKey = _dataKey))
+ .then(() =>
+ Promise.all([
+ this.collection.findOne({ keyAltNames: 'foobar' }),
+ this.collection.findOne({ keyAltNames: 'fizzbuzz' })
+ ])
+ )
+ .then(docs => {
+ expect(docs).to.have.lengthOf(2);
+ const doc1 = docs[0];
+ const doc2 = docs[1];
+ expect(doc1).to.be.an('object');
+ expect(doc2).to.be.an('object');
+ expect(doc1)
+ .to.have.property('keyAltNames')
+ .that.includes.members(['foobar', 'fizzbuzz']);
+ expect(doc1).to.have.property('_id').that.deep.equals(dataKey);
+ expect(doc2)
+ .to.have.property('keyAltNames')
+ .that.includes.members(['foobar', 'fizzbuzz']);
+ expect(doc2).to.have.property('_id').that.deep.equals(dataKey);
+ });
+ });
+
+ it('should be able to reference a key with `keyAltName` during encryption', function () {
+ let keyId;
+ const keyAltName = 'mySpecialKey';
+ const algorithm = 'AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic';
+
+ const valueToEncrypt = 'foobar';
+
+ return this.encryption
+ .createDataKey('aws', makeOptions([keyAltName]))
+ .then(_dataKey => (keyId = _dataKey))
+ .then(() => this.encryption.encrypt(valueToEncrypt, { keyId, algorithm }))
+ .then(encryptedValue => {
+ return this.encryption
+ .encrypt(valueToEncrypt, { keyAltName, algorithm })
+ .then(encryptedValue2 => {
+ expect(encryptedValue).to.deep.equal(encryptedValue2);
+ });
+ });
+ });
+ });
+
+ context('with stubbed key material and fixed random source', function () {
+ let sandbox = sinon.createSandbox();
+
+ afterEach(() => {
+ sandbox.restore();
+ });
+ beforeEach(() => {
+ const rndData = Buffer.from(
+ '\x4d\x06\x95\x64\xf5\xa0\x5e\x9e\x35\x23\xb9\x8f\x57\x5a\xcb\x15',
+ 'latin1'
+ );
+ let rndPos = 0;
+ sandbox.stub(cryptoCallbacks, 'randomHook').callsFake((buffer, count) => {
+ if (rndPos + count > rndData) {
+ return new Error('Out of fake random data');
+ }
+ buffer.set(rndData.subarray(rndPos, rndPos + count));
+ rndPos += count;
+ return count;
+ });
+
+ // stubbed out for AWS unit testing below
+ sandbox.stub(StateMachine.prototype, 'fetchKeys').callsFake((client, ns, filter, cb) => {
+ filter = deserialize(filter);
+ const keyIds = filter.$or[0]._id.$in.map(key => key.toString('hex'));
+ const fileNames = keyIds.map(
+ keyId => `${__dirname}/../../../test/data/keys/${keyId.toUpperCase()}-local-document.json`
+ );
+ const contents = fileNames.map(filename => EJSON.parse(fs.readFileSync(filename)));
+ cb(null, contents);
+ });
+ });
+
+ // This exactly matches _test_encrypt_fle2_explicit from the C tests
+ it('should explicitly encrypt and decrypt with the "local" KMS provider (FLE2, exact result)', function () {
+ const encryption = new ClientEncryption(new MockClient(), {
+ keyVaultNamespace: 'client.encryption',
+ kmsProviders: { local: { key: Buffer.alloc(96) } }
+ });
+
+ const encryptOptions = {
+ keyId: new Binary(Buffer.from('ABCDEFAB123498761234123456789012', 'hex'), 4),
+ algorithm: 'Unindexed'
+ };
+
+ return encryption
+ .encrypt('value123', encryptOptions)
+ .then(encrypted => {
+ expect(encrypted._bsontype).to.equal('Binary');
+ expect(encrypted.sub_type).to.equal(6);
+ return encryption.decrypt(encrypted);
+ })
+ .then(decrypted => {
+ expect(decrypted).to.equal('value123');
+ });
+ });
+ });
+
+ describe('encrypt()', function () {
+ let clientEncryption;
+ let completeOptions;
+ let dataKey;
+
+ beforeEach(async function () {
+ if (requirements.SKIP_LIVE_TESTS) {
+ this.currentTest.skipReason = `requirements.SKIP_LIVE_TESTS=${requirements.SKIP_LIVE_TESTS}`;
+ this.test.skip();
+ return;
+ }
+
+ await setup();
+ clientEncryption = new ClientEncryption(client, {
+ keyVaultNamespace: 'client.encryption',
+ kmsProviders: { local: { key: Buffer.alloc(96) } }
+ });
+
+ dataKey = await clientEncryption.createDataKey('local', {
+ name: 'local',
+ kmsProviders: { local: { key: Buffer.alloc(96) } }
+ });
+
+ completeOptions = {
+ algorithm: 'RangePreview',
+ contentionFactor: 0,
+ rangeOptions: {
+ min: new BSON.Long(0),
+ max: new BSON.Long(10),
+ sparsity: new BSON.Long(1)
+ },
+ keyId: dataKey
+ };
+ });
+
+ afterEach(() => teardown());
+
+ context('when expressionMode is incorrectly provided as an argument', function () {
+ it('overrides the provided option with the correct value for expression mode', async function () {
+ const optionsWithExpressionMode = { ...completeOptions, expressionMode: true };
+ const result = await clientEncryption.encrypt(
+ new mongodb.Long(0),
+ optionsWithExpressionMode
+ );
+
+ expect(result).to.be.instanceof(Binary);
+ });
+ });
+ });
+
+ describe('encryptExpression()', function () {
+ let clientEncryption;
+ let completeOptions;
+ let dataKey;
+ const expression = {
+ $and: [{ someField: { $gt: 1 } }]
+ };
+
+ beforeEach(async function () {
+ if (requirements.SKIP_LIVE_TESTS) {
+ this.currentTest.skipReason = `requirements.SKIP_LIVE_TESTS=${requirements.SKIP_LIVE_TESTS}`;
+ this.test.skip();
+ return;
+ }
+
+ await setup();
+ clientEncryption = new ClientEncryption(client, {
+ keyVaultNamespace: 'client.encryption',
+ kmsProviders: { local: { key: Buffer.alloc(96) } }
+ });
+
+ dataKey = await clientEncryption.createDataKey('local', {
+ name: 'local',
+ kmsProviders: { local: { key: Buffer.alloc(96) } }
+ });
+
+ completeOptions = {
+ algorithm: 'RangePreview',
+ queryType: 'rangePreview',
+ contentionFactor: 0,
+ rangeOptions: {
+ min: new BSON.Int32(0),
+ max: new BSON.Int32(10),
+ sparsity: new BSON.Long(1)
+ },
+ keyId: dataKey
+ };
+ });
+
+ afterEach(() => teardown());
+
+ it('throws if rangeOptions is not provided', async function () {
+ expect(delete completeOptions.rangeOptions).to.be.true;
+ const errorOrResult = await clientEncryption
+ .encryptExpression(expression, completeOptions)
+ .catch(e => e);
+
+ expect(errorOrResult).to.be.instanceof(TypeError);
+ });
+
+ it('throws if algorithm is not provided', async function () {
+ expect(delete completeOptions.algorithm).to.be.true;
+ const errorOrResult = await clientEncryption
+ .encryptExpression(expression, completeOptions)
+ .catch(e => e);
+
+ expect(errorOrResult).to.be.instanceof(TypeError);
+ });
+
+ it(`throws if algorithm does not equal 'rangePreview'`, async function () {
+ completeOptions['algorithm'] = 'equality';
+ const errorOrResult = await clientEncryption
+ .encryptExpression(expression, completeOptions)
+ .catch(e => e);
+
+ expect(errorOrResult).to.be.instanceof(TypeError);
+ });
+
+ it(`does not throw if algorithm has different casing than 'rangePreview'`, async function () {
+ completeOptions['algorithm'] = 'rAnGePrEvIeW';
+ const errorOrResult = await clientEncryption
+ .encryptExpression(expression, completeOptions)
+ .catch(e => e);
+
+ expect(errorOrResult).not.to.be.instanceof(Error);
+ });
+
+ context('when expressionMode is incorrectly provided as an argument', function () {
+ it('overrides the provided option with the correct value for expression mode', async function () {
+ const optionsWithExpressionMode = { ...completeOptions, expressionMode: false };
+ const result = await clientEncryption.encryptExpression(
+ expression,
+ optionsWithExpressionMode
+ );
+
+ expect(result).not.to.be.instanceof(Binary);
+ });
+ });
+ });
+
+ it('should provide the libmongocrypt version', function () {
+ expect(ClientEncryption.libmongocryptVersion).to.be.a('string');
+ });
+
+ describe('createEncryptedCollection()', () => {
+ /** @type {InstanceType} */
+ let clientEncryption;
+ const client = new MockClient();
+ let db;
+ const collectionName = 'secure';
+
+ beforeEach(async function () {
+ clientEncryption = new ClientEncryption(client, {
+ keyVaultNamespace: 'client.encryption',
+ kmsProviders: { local: { key: Buffer.alloc(96, 0) } }
+ });
+
+ db = client.db('createEncryptedCollectionDb');
+ });
+
+ afterEach(async () => {
+ sinon.restore();
+ });
+
+ context('validates input', () => {
+ it('throws TypeError if options are omitted', async () => {
+ const error = await clientEncryption
+ .createEncryptedCollection(db, collectionName)
+ .catch(error => error);
+ expect(error).to.be.instanceOf(TypeError, /provider/);
+ });
+
+ it('throws TypeError if options.createCollectionOptions are omitted', async () => {
+ const error = await clientEncryption
+ .createEncryptedCollection(db, collectionName, {})
+ .catch(error => error);
+ expect(error).to.be.instanceOf(TypeError, /encryptedFields/);
+ });
+
+ it('throws TypeError if options.createCollectionOptions.encryptedFields are omitted', async () => {
+ const error = await clientEncryption
+ .createEncryptedCollection(db, collectionName, { createCollectionOptions: {} })
+ .catch(error => error);
+ expect(error).to.be.instanceOf(TypeError, /Cannot read properties/);
+ });
+ });
+
+ context('when options.encryptedFields.fields is not an array', () => {
+ it('does not generate any encryption keys', async () => {
+ const createCollectionSpy = sinon.spy(db, 'createCollection');
+ const createDataKeySpy = sinon.spy(clientEncryption, 'createDataKey');
+ await clientEncryption.createEncryptedCollection(db, collectionName, {
+ createCollectionOptions: { encryptedFields: { fields: 'not an array' } }
+ });
+
+ expect(createDataKeySpy.callCount).to.equal(0);
+ const options = createCollectionSpy.getCall(0).args[1];
+ expect(options).to.deep.equal({ encryptedFields: { fields: 'not an array' } });
+ });
+ });
+
+ context('when options.encryptedFields.fields elements are not objects', () => {
+ it('they are passed along to createCollection', async () => {
+ const createCollectionSpy = sinon.spy(db, 'createCollection');
+ const keyId = new Binary(Buffer.alloc(16, 0));
+ const createDataKeyStub = sinon.stub(clientEncryption, 'createDataKey').resolves(keyId);
+ await clientEncryption.createEncryptedCollection(db, collectionName, {
+ createCollectionOptions: {
+ encryptedFields: { fields: ['not an array', { keyId: null }, { keyId: {} }] }
+ }
+ });
+
+ expect(createDataKeyStub.callCount).to.equal(1);
+ const options = createCollectionSpy.getCall(0).args[1];
+ expect(options).to.deep.equal({
+ encryptedFields: { fields: ['not an array', { keyId: keyId }, { keyId: {} }] }
+ });
+ });
+ });
+
+ it('only passes options.masterKey to createDataKey', async () => {
+ const masterKey = Symbol('key');
+ const createDataKey = sinon
+ .stub(clientEncryption, 'createDataKey')
+ .resolves(new Binary(Buffer.alloc(16, 0)));
+ const result = await clientEncryption.createEncryptedCollection(db, collectionName, {
+ provider: 'aws',
+ createCollectionOptions: { encryptedFields: { fields: [{}] } },
+ masterKey
+ });
+ expect(result).to.have.property('collection');
+ expect(createDataKey).to.have.been.calledOnceWithExactly('aws', { masterKey });
+ });
+
+ context('when createDataKey rejects', () => {
+ const customErrorEvil = new Error('evil!');
+ const customErrorGood = new Error('good!');
+ const keyId = new Binary(Buffer.alloc(16, 0), 4);
+ const createCollectionOptions = {
+ encryptedFields: { fields: [{}, {}, { keyId: 'cool id!' }, {}] }
+ };
+ const createDataKeyRejection = async () => {
+ const stub = sinon.stub(clientEncryption, 'createDataKey');
+ stub.onCall(0).resolves(keyId);
+ stub.onCall(1).rejects(customErrorEvil);
+ stub.onCall(2).rejects(customErrorGood);
+ stub.onCall(4).resolves(keyId);
+
+ const error = await clientEncryption
+ .createEncryptedCollection(db, collectionName, {
+ provider: 'local',
+ createCollectionOptions
+ })
+ .catch(error => error);
+
+ // At least make sure the function did not succeed
+ expect(error).to.be.instanceOf(Error);
+
+ return error;
+ };
+
+ it('throws MongoCryptCreateDataKeyError', async () => {
+ const error = await createDataKeyRejection();
+ expect(error).to.be.instanceOf(MongoCryptCreateDataKeyError);
+ });
+
+ it('thrown error has a cause set to the first error that was thrown from createDataKey', async () => {
+ const error = await createDataKeyRejection();
+ expect(error.cause).to.equal(customErrorEvil);
+ expect(error.message).to.include(customErrorEvil.message);
+ });
+
+ it('thrown error contains partially filled encryptedFields.fields', async () => {
+ const error = await createDataKeyRejection();
+ expect(error.encryptedFields).property('fields').that.is.an('array');
+ expect(error.encryptedFields.fields).to.have.lengthOf(
+ createCollectionOptions.encryptedFields.fields.length
+ );
+ expect(error.encryptedFields.fields).to.have.nested.property('[0].keyId', keyId);
+ expect(error.encryptedFields.fields).to.not.have.nested.property('[1].keyId');
+ expect(error.encryptedFields.fields).to.have.nested.property('[2].keyId', 'cool id!');
+ });
+ });
+
+ context('when createCollection rejects', () => {
+ const customError = new Error('evil!');
+ const keyId = new Binary(Buffer.alloc(16, 0), 4);
+ const createCollectionRejection = async () => {
+ const stubCreateDataKey = sinon.stub(clientEncryption, 'createDataKey');
+ stubCreateDataKey.onCall(0).resolves(keyId);
+ stubCreateDataKey.onCall(1).resolves(keyId);
+ stubCreateDataKey.onCall(2).resolves(keyId);
+
+ sinon.stub(db, 'createCollection').rejects(customError);
+
+ const createCollectionOptions = {
+ encryptedFields: { fields: [{}, {}, { keyId: 'cool id!' }] }
+ };
+ const error = await clientEncryption
+ .createEncryptedCollection(db, collectionName, {
+ provider: 'local',
+ createCollectionOptions
+ })
+ .catch(error => error);
+
+ // At least make sure the function did not succeed
+ expect(error).to.be.instanceOf(Error);
+
+ return error;
+ };
+
+ it('throws MongoCryptCreateEncryptedCollectionError', async () => {
+ const error = await createCollectionRejection();
+ expect(error).to.be.instanceOf(MongoCryptCreateEncryptedCollectionError);
+ });
+
+ it('thrown error has a cause set to the error that was thrown from createCollection', async () => {
+ const error = await createCollectionRejection();
+ expect(error.cause).to.equal(customError);
+ expect(error.message).to.include(customError.message);
+ });
+
+ it('thrown error contains filled encryptedFields.fields', async () => {
+ const error = await createCollectionRejection();
+ expect(error.encryptedFields).property('fields').that.is.an('array');
+ expect(error.encryptedFields.fields).to.have.nested.property('[0].keyId', keyId);
+ expect(error.encryptedFields.fields).to.have.nested.property('[1].keyId', keyId);
+ expect(error.encryptedFields.fields).to.have.nested.property('[2].keyId', 'cool id!');
+ });
+ });
+
+ context('when there are nullish keyIds in the encryptedFields.fields array', function () {
+ it('does not mutate the input fields array when generating data keys', async () => {
+ const encryptedFields = Object.freeze({
+ escCollection: 'esc',
+ eccCollection: 'ecc',
+ ecocCollection: 'ecoc',
+ fields: Object.freeze([
+ Object.freeze({ keyId: false }),
+ Object.freeze({
+ keyId: null,
+ path: 'name',
+ bsonType: 'int',
+ queries: Object.freeze({ contentionFactor: 0 })
+ }),
+ null
+ ])
+ });
+
+ const keyId = new Binary(Buffer.alloc(16, 0), 4);
+ sinon.stub(clientEncryption, 'createDataKey').resolves(keyId);
+
+ const { collection, encryptedFields: resultEncryptedFields } =
+ await clientEncryption.createEncryptedCollection(db, collectionName, {
+ provider: 'local',
+ createCollectionOptions: {
+ encryptedFields
+ }
+ });
+
+ expect(collection).to.have.property('namespace', 'createEncryptedCollectionDb.secure');
+ expect(encryptedFields, 'original encryptedFields should be unmodified').nested.property(
+ 'fields[0].keyId',
+ false
+ );
+ expect(
+ resultEncryptedFields,
+ 'encryptedFields created by helper should have replaced nullish keyId'
+ ).nested.property('fields[1].keyId', keyId);
+ expect(encryptedFields, 'original encryptedFields should be unmodified').nested.property(
+ 'fields[2]',
+ null
+ );
+ });
+
+ it('generates dataKeys for all null keyIds in the fields array', async () => {
+ const encryptedFields = Object.freeze({
+ escCollection: 'esc',
+ eccCollection: 'ecc',
+ ecocCollection: 'ecoc',
+ fields: Object.freeze([
+ Object.freeze({ keyId: null }),
+ Object.freeze({ keyId: null }),
+ Object.freeze({ keyId: null })
+ ])
+ });
+
+ const keyId = new Binary(Buffer.alloc(16, 0), 4);
+ sinon.stub(clientEncryption, 'createDataKey').resolves(keyId);
+
+ const { collection, encryptedFields: resultEncryptedFields } =
+ await clientEncryption.createEncryptedCollection(db, collectionName, {
+ provider: 'local',
+ createCollectionOptions: {
+ encryptedFields
+ }
+ });
+
+ expect(collection).to.have.property('namespace', 'createEncryptedCollectionDb.secure');
+ expect(resultEncryptedFields.fields).to.have.lengthOf(3);
+ expect(resultEncryptedFields.fields.filter(({ keyId }) => keyId === null)).to.have.lengthOf(
+ 0
+ );
+ });
+ });
+ });
+});
diff --git a/encryption/test/common.test.js b/encryption/test/common.test.js
new file mode 100644
index 00000000000..7b1b7dadd67
--- /dev/null
+++ b/encryption/test/common.test.js
@@ -0,0 +1,94 @@
+'use strict';
+
+const { expect } = require('chai');
+const maybeCallback = require('../lib/common').maybeCallback;
+
+describe('maybeCallback()', () => {
+ it('should accept two arguments', () => {
+ expect(maybeCallback).to.have.lengthOf(2);
+ });
+
+ describe('when handling an error case', () => {
+ it('should pass the error to the callback provided', done => {
+ const superPromiseRejection = Promise.reject(new Error('fail'));
+ const result = maybeCallback(
+ () => superPromiseRejection,
+ (error, result) => {
+ try {
+ expect(result).to.not.exist;
+ expect(error).to.be.instanceOf(Error);
+ return done();
+ } catch (assertionError) {
+ return done(assertionError);
+ }
+ }
+ );
+ expect(result).to.be.undefined;
+ });
+
+ it('should return the rejected promise to the caller when no callback is provided', async () => {
+ const superPromiseRejection = Promise.reject(new Error('fail'));
+ const returnedPromise = maybeCallback(() => superPromiseRejection, undefined);
+ expect(returnedPromise).to.equal(superPromiseRejection);
+ // @ts-expect-error: There is no overload to change the return type not be nullish,
+ // and we do not want to add one in fear of making it too easy to neglect adding the callback argument
+ const thrownError = await returnedPromise.catch(error => error);
+ expect(thrownError).to.be.instanceOf(Error);
+ });
+
+ it('should not modify a rejection error promise', async () => {
+ class MyError extends Error {}
+ const driverError = Object.freeze(new MyError());
+ const rejection = Promise.reject(driverError);
+ // @ts-expect-error: There is no overload to change the return type not be nullish,
+ // and we do not want to add one in fear of making it too easy to neglect adding the callback argument
+ const thrownError = await maybeCallback(() => rejection, undefined).catch(error => error);
+ expect(thrownError).to.be.equal(driverError);
+ });
+
+ it('should not modify a rejection error when passed to callback', done => {
+ class MyError extends Error {}
+ const driverError = Object.freeze(new MyError());
+ const rejection = Promise.reject(driverError);
+ maybeCallback(
+ () => rejection,
+ error => {
+ try {
+ expect(error).to.exist;
+ expect(error).to.equal(driverError);
+ done();
+ } catch (assertionError) {
+ done(assertionError);
+ }
+ }
+ );
+ });
+ });
+
+ describe('when handling a success case', () => {
+ it('should pass the result and undefined error to the callback provided', done => {
+ const superPromiseSuccess = Promise.resolve(2);
+
+ const result = maybeCallback(
+ () => superPromiseSuccess,
+ (error, result) => {
+ try {
+ expect(error).to.be.undefined;
+ expect(result).to.equal(2);
+ done();
+ } catch (assertionError) {
+ done(assertionError);
+ }
+ }
+ );
+ expect(result).to.be.undefined;
+ });
+
+ it('should return the resolved promise to the caller when no callback is provided', async () => {
+ const superPromiseSuccess = Promise.resolve(2);
+ const result = maybeCallback(() => superPromiseSuccess);
+ expect(result).to.equal(superPromiseSuccess);
+ expect(await result).to.equal(2);
+ });
+ });
+});
diff --git a/encryption/test/cryptoCallbacks.test.js b/encryption/test/cryptoCallbacks.test.js
new file mode 100644
index 00000000000..941dfbaf967
--- /dev/null
+++ b/encryption/test/cryptoCallbacks.test.js
@@ -0,0 +1,240 @@
+'use strict';
+
+const sinon = require('sinon');
+const { expect } = require('chai');
+const mongodb = require('mongodb');
+const MongoClient = mongodb.MongoClient;
+const stateMachine = require('../lib/stateMachine')({ mongodb });
+const cryptoCallbacks = require('../lib/cryptoCallbacks');
+const ClientEncryption = require('../lib/clientEncryption')({
+ mongodb,
+ stateMachine
+}).ClientEncryption;
+
+const requirements = require('./requirements.helper');
+
+// Data Key Stuff
+const kmsProviders = Object.assign({}, requirements.awsKmsProviders);
+const dataKeyOptions = Object.assign({}, requirements.awsDataKeyOptions);
+
+describe('cryptoCallbacks', function () {
+ before(function () {
+ if (requirements.SKIP_AWS_TESTS) {
+ console.error('Skipping crypto callback tests');
+ return;
+ }
+ this.sinon = sinon.createSandbox();
+ });
+
+ beforeEach(function () {
+ if (requirements.SKIP_AWS_TESTS) {
+ this.currentTest.skipReason = `requirements.SKIP_AWS_TESTS=${requirements.SKIP_AWS_TESTS}`;
+ this.test.skip();
+ return;
+ }
+ this.sinon.restore();
+ this.client = new MongoClient('mongodb://localhost:27017/', {
+ useUnifiedTopology: true,
+ useNewUrlParser: true
+ });
+
+ return this.client.connect();
+ });
+
+ afterEach(function () {
+ if (requirements.SKIP_AWS_TESTS) {
+ return;
+ }
+ this.sinon.restore();
+ let p = Promise.resolve();
+ if (this.client) {
+ p = p.then(() => this.client.close()).then(() => (this.client = undefined));
+ }
+
+ return p;
+ });
+
+ after(function () {
+ this.sinon = undefined;
+ });
+
+ // TODO(NODE-3370): fix key formatting error "asn1_check_tlen:wrong tag"
+ it.skip('should support support crypto callback for signing RSA-SHA256', function () {
+ const input = Buffer.from('data to sign');
+ const pemFileData =
+ '-----BEGIN PRIVATE KEY-----\n' +
+ 'MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQC4JOyv5z05cL18ztpknRC7CFY2gYol4DAKerdVUoDJxCTmFMf39dVUEqD0WDiw/qcRtSO1/FRut08PlSPmvbyKetsLoxlpS8lukSzEFpFK7+L+R4miFOl6HvECyg7lbC1H/WGAhIz9yZRlXhRo9qmO/fB6PV9IeYtU+1xYuXicjCDPp36uuxBAnCz7JfvxJ3mdVc0vpSkbSb141nWuKNYR1mgyvvL6KzxO6mYsCo4hRAdhuizD9C4jDHk0V2gDCFBk0h8SLEdzStX8L0jG90/Og4y7J1b/cPo/kbYokkYisxe8cPlsvGBf+rZex7XPxc1yWaP080qeABJb+S88O//LAgMBAAECggEBAKVxP1m3FzHBUe2NZ3fYCc0Qa2zjK7xl1KPFp2u4CU+9sy0oZJUqQHUdm5CMprqWwIHPTftWboFenmCwrSXFOFzujljBO7Z3yc1WD3NJl1ZNepLcsRJ3WWFH5V+NLJ8Bdxlj1DMEZCwr7PC5+vpnCuYWzvT0qOPTl9RNVaW9VVjHouJ9Fg+s2DrShXDegFabl1iZEDdI4xScHoYBob06A5lw0WOCTayzw0Naf37lM8Y4psRAmI46XLiF/Vbuorna4hcChxDePlNLEfMipICcuxTcei1RBSlBa2t1tcnvoTy6cuYDqqImRYjp1KnMKlKQBnQ1NjS2TsRGm+F0FbreVCECgYEA4IDJlm8q/hVyNcPe4OzIcL1rsdYN3bNm2Y2O/YtRPIkQ446ItyxD06d9VuXsQpFp9jNACAPfCMSyHpPApqlxdc8z/xATlgHkcGezEOd1r4E7NdTpGg8y6Rj9b8kVlED6v4grbRhKcU6moyKUQT3+1B6ENZTOKyxuyDEgTwZHtFECgYEA0fqdv9h9s77d6eWmIioP7FSymq93pC4umxf6TVicpjpMErdD2ZfJGulN37dq8FOsOFnSmFYJdICj/PbJm6p1i8O21lsFCltEqVoVabJ7/0alPfdG2U76OeBqI8ZubL4BMnWXAB/VVEYbyWCNpQSDTjHQYs54qa2I0dJB7OgJt1sCgYEArctFQ02/7H5Rscl1yo3DBXO94SeiCFSPdC8f2Kt3MfOxvVdkAtkjkMACSbkoUsgbTVqTYSEOEc2jTgR3iQ13JgpHaFbbsq64V0QP3TAxbLIQUjYGVgQaF1UfLOBv8hrzgj45z/ST/G80lOl595+0nCUbmBcgG1AEWrmdF0/3RmECgYAKvIzKXXB3+19vcT2ga5Qq2l3TiPtOGsppRb2XrNs9qKdxIYvHmXo/9QP1V3SRW0XoD7ez8FpFabp42cmPOxUNk3FK3paQZABLxH5pzCWI9PzIAVfPDrm+sdnbgG7vAnwfL2IMMJSA3aDYGCbF9EgefG+STcpfqq7fQ6f5TBgLFwKBgCd7gn1xYL696SaKVSm7VngpXlczHVEpz3kStWR5gfzriPBxXgMVcWmcbajRser7ARpCEfbxM1UJyv6oAYZWVSNErNzNVb4POqLYcCNySuC6xKhs9FrEQnyKjyk8wI4VnrEMGrQ8e+qYSwYk9Gh6dKGoRMAPYVXQAO0fIsHF/T0a\n' +
+ '-----END PRIVATE KEY-----';
+ const key = Buffer.from(pemFileData);
+ const output = Buffer.alloc(256);
+ const expectedOutput = Buffer.from(
+ 'VocBRhpMmQ2XCzVehWSqheQLnU889gf3dhU4AnVnQTJjsKx/CM23qKDPkZDd2A/BnQsp99SN7ksIX5Raj0TPwyN5OCN/YrNFNGoOFlTsGhgP/hyE8X3Duiq6sNO0SMvRYNPFFGlJFsp1Fw3Z94eYMg4/Wpw5s4+Jo5Zm/qY7aTJIqDKDQ3CNHLeJgcMUOc9sz01/GzoUYKDVODHSxrYEk5ireFJFz9vP8P7Ha+VDUZuQIQdXer9NBbGFtYmWprY3nn4D3Dw93Sn0V0dIqYeIo91oKyslvMebmUM95S2PyIJdEpPb2DJDxjvX/0LLwSWlSXRWy9gapWoBkb4ynqZBsg==',
+ 'base64'
+ );
+
+ const { signRsaSha256Hook } = cryptoCallbacks;
+ const err = signRsaSha256Hook(key, input, output);
+ if (err instanceof Error) {
+ expect(err).to.not.exist;
+ }
+
+ expect(output).to.deep.equal(expectedOutput);
+ }).skipReason = 'TODO(NODE-3370): fix key formatting error "asn1_check_tlen:wrong tag"';
+
+ const hookNames = new Set([
+ 'aes256CbcEncryptHook',
+ 'aes256CbcDecryptHook',
+ 'randomHook',
+ 'hmacSha512Hook',
+ 'hmacSha256Hook',
+ 'sha256Hook'
+ ]);
+
+ it('should invoke crypto callbacks when doing encryption', function (done) {
+ for (const name of hookNames) {
+ this.sinon.spy(cryptoCallbacks, name);
+ }
+
+ function assertCertainHooksCalled(expectedSet) {
+ expectedSet = expectedSet || new Set([]);
+ for (const name of hookNames) {
+ const hook = cryptoCallbacks[name];
+ if (expectedSet.has(name)) {
+ expect(hook).to.have.been.called;
+ } else {
+ expect(hook).to.not.have.been.called;
+ }
+
+ hook.resetHistory();
+ }
+ }
+
+ const encryption = new ClientEncryption(this.client, {
+ keyVaultNamespace: 'test.encryption',
+ kmsProviders
+ });
+
+ try {
+ assertCertainHooksCalled();
+ } catch (e) {
+ return done(e);
+ }
+
+ encryption.createDataKey('aws', dataKeyOptions, (err, dataKey) => {
+ try {
+ expect(err).to.not.exist;
+ assertCertainHooksCalled(new Set(['hmacSha256Hook', 'sha256Hook', 'randomHook']));
+ } catch (e) {
+ return done(e);
+ }
+
+ const encryptOptions = {
+ keyId: dataKey,
+ algorithm: 'AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic'
+ };
+
+ encryption.encrypt('hello', encryptOptions, (err, encryptedValue) => {
+ try {
+ expect(err).to.not.exist;
+ assertCertainHooksCalled(
+ new Set(['aes256CbcEncryptHook', 'hmacSha512Hook', 'hmacSha256Hook', 'sha256Hook'])
+ );
+ } catch (e) {
+ return done(e);
+ }
+ encryption.decrypt(encryptedValue, err => {
+ try {
+ expect(err).to.not.exist;
+ assertCertainHooksCalled(new Set(['aes256CbcDecryptHook', 'hmacSha512Hook']));
+ } catch (e) {
+ return done(e);
+ }
+ done();
+ });
+ });
+ });
+ });
+
+ describe('error testing', function () {
+ ['aes256CbcEncryptHook', 'aes256CbcDecryptHook', 'hmacSha512Hook'].forEach(hookName => {
+ it(`should properly propagate an error when ${hookName} fails`, function (done) {
+ const error = new Error('some random error text');
+ this.sinon.stub(cryptoCallbacks, hookName).returns(error);
+
+ const encryption = new ClientEncryption(this.client, {
+ keyVaultNamespace: 'test.encryption',
+ kmsProviders
+ });
+
+ function finish(err) {
+ try {
+ expect(err, 'Expected an error to exist').to.exist;
+ expect(err).to.have.property('message', error.message);
+ done();
+ } catch (e) {
+ done(e);
+ }
+ }
+
+ try {
+ encryption.createDataKey('aws', dataKeyOptions, (err, dataKey) => {
+ if (err) return finish(err);
+
+ const encryptOptions = {
+ keyId: dataKey,
+ algorithm: 'AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic'
+ };
+
+ encryption.encrypt('hello', encryptOptions, (err, encryptedValue) => {
+ if (err) return finish(err);
+ encryption.decrypt(encryptedValue, err => finish(err));
+ });
+ });
+ } catch (e) {
+ done(new Error('We should not be here'));
+ }
+ });
+ });
+
+ // These ones will fail with an error, but that error will get overridden
+ // with "failed to create KMS message" in mongocrypt-kms-ctx.c
+ ['hmacSha256Hook', 'sha256Hook'].forEach(hookName => {
+ it(`should error with a specific kms error when ${hookName} fails`, function () {
+ const error = new Error('some random error text');
+ this.sinon.stub(cryptoCallbacks, hookName).returns(error);
+
+ const encryption = new ClientEncryption(this.client, {
+ keyVaultNamespace: 'test.encryption',
+ kmsProviders
+ });
+
+ expect(() => encryption.createDataKey('aws', dataKeyOptions, () => undefined)).to.throw(
+ 'failed to create KMS message'
+ );
+ });
+ });
+
+ it('should error synchronously with error when randomHook fails', function (done) {
+ const error = new Error('some random error text');
+ this.sinon.stub(cryptoCallbacks, 'randomHook').returns(error);
+
+ const encryption = new ClientEncryption(this.client, {
+ keyVaultNamespace: 'test.encryption',
+ kmsProviders
+ });
+
+ try {
+ encryption.createDataKey('aws', dataKeyOptions, () => {
+ done(new Error('We should not be here'));
+ });
+ } catch (err) {
+ try {
+ expect(err).to.have.property('message', 'some random error text');
+ done();
+ } catch (e) {
+ done(e);
+ }
+ }
+ });
+ });
+});
diff --git a/encryption/test/data/README.md b/encryption/test/data/README.md
new file mode 100644
index 00000000000..c7f47ca8b40
--- /dev/null
+++ b/encryption/test/data/README.md
@@ -0,0 +1,5 @@
+# libmongocrypt example data #
+
+This directory contains a simple example of mocked responses to test libmongocrypt and driver wrappers. Data for other scenarios and edge cases is in the `data` directory.
+
+The HTTP reply file, kms-decrypt-reply.txt, has regular newline endings \n that MUST be replaced by \r\n endings when reading the file for testing.
\ No newline at end of file
diff --git a/encryption/test/data/cmd.json b/encryption/test/data/cmd.json
new file mode 100644
index 00000000000..0fa3fee5562
--- /dev/null
+++ b/encryption/test/data/cmd.json
@@ -0,0 +1,6 @@
+{
+ "find": "test",
+ "filter": {
+ "ssn": "457-55-5462"
+ }
+}
diff --git a/encryption/test/data/collection-info.json b/encryption/test/data/collection-info.json
new file mode 100644
index 00000000000..063e32ba160
--- /dev/null
+++ b/encryption/test/data/collection-info.json
@@ -0,0 +1,37 @@
+{
+ "type": "collection",
+ "name": "test",
+ "idIndex": {
+ "ns": "test.test",
+ "name": "_id_",
+ "key": {
+ "_id": {
+ "$numberInt": "1"
+ }
+ },
+ "v": {
+ "$numberInt": "2"
+ }
+ },
+ "options": {
+ "validator": {
+ "$jsonSchema": {
+ "properties": {
+ "ssn": {
+ "encrypt": {
+ "keyId": {
+ "$binary": {
+ "base64": "YWFhYWFhYWFhYWFhYWFhYQ==",
+ "subType": "04"
+ }
+ },
+ "type": "string",
+ "algorithm": "AEAD_AES_256_CBC_HMAC_SHA_512-Deterministic"
+ }
+ }
+ },
+ "bsonType": "object"
+ }
+ }
+ }
+}
diff --git a/encryption/test/data/encrypted-document-nested.json b/encryption/test/data/encrypted-document-nested.json
new file mode 100644
index 00000000000..bd02d07b020
--- /dev/null
+++ b/encryption/test/data/encrypted-document-nested.json
@@ -0,0 +1,8 @@
+{
+ "nested": {
+ "$binary": {
+ "base64": "AmFhYWFhYWFhYWFhYWFhYWEDW89+etsVGIufAfsiEwR62ce6+lry79sJJBUyJ6hH89wBkhpRkzFLz26Nu6jXQRe8ESYAF5cAa5wg9rsq95IBHuaIaLEQLW2jLjGo1fp69jg=",
+ "subType": "06"
+ }
+ }
+}
diff --git a/encryption/test/data/encrypted-document.json b/encryption/test/data/encrypted-document.json
new file mode 100644
index 00000000000..98b2c5b101c
--- /dev/null
+++ b/encryption/test/data/encrypted-document.json
@@ -0,0 +1,11 @@
+{
+ "filter": {
+ "find": "test",
+ "ssn": {
+ "$binary": {
+ "base64": "AWFhYWFhYWFhYWFhYWFhYWECRTOW9yZzNDn5dGwuqsrJQNLtgMEKaujhs9aRWRp+7Yo3JK8N8jC8P0Xjll6C1CwLsE/iP5wjOMhVv1KMMyOCSCrHorXRsb2IKPtzl2lKTqQ=",
+ "subType": "06"
+ }
+ }
+ }
+}
diff --git a/encryption/test/data/encryptedFields.json b/encryption/test/data/encryptedFields.json
new file mode 100644
index 00000000000..07a29ede1ee
--- /dev/null
+++ b/encryption/test/data/encryptedFields.json
@@ -0,0 +1,30 @@
+{
+ "fields": [
+ {
+ "keyId": {
+ "$binary": {
+ "base64": "EjRWeBI0mHYSNBI0VniQEg==",
+ "subType": "04"
+ }
+ },
+ "path": "encryptedIndexed",
+ "bsonType": "string",
+ "queries": {
+ "queryType": "equality",
+ "contention": {
+ "$numberLong": "0"
+ }
+ }
+ },
+ {
+ "keyId": {
+ "$binary": {
+ "base64": "q83vqxI0mHYSNBI0VniQEg==",
+ "subType": "04"
+ }
+ },
+ "path": "encryptedUnindexed",
+ "bsonType": "string"
+ }
+ ]
+}
diff --git a/encryption/test/data/key-document.json b/encryption/test/data/key-document.json
new file mode 100644
index 00000000000..647963f1a61
--- /dev/null
+++ b/encryption/test/data/key-document.json
@@ -0,0 +1,32 @@
+{
+ "status": {
+ "$numberInt": "1"
+ },
+ "_id": {
+ "$binary": {
+ "base64": "YWFhYWFhYWFhYWFhYWFhYQ==",
+ "subType": "04"
+ }
+ },
+ "masterKey": {
+ "region": "us-east-1",
+ "key": "arn:aws:kms:us-east-1:579766882180:key/89fcc2c4-08b0-4bd9-9f25-e30687b580d0",
+ "provider": "aws"
+ },
+ "updateDate": {
+ "$date": {
+ "$numberLong": "1557827033449"
+ }
+ },
+ "keyMaterial": {
+ "$binary": {
+ "base64": "AQICAHhQNmWG2CzOm1dq3kWLM+iDUZhEqnhJwH9wZVpuZ94A8gEqnsxXlR51T5EbEVezUqqKAAAAwjCBvwYJKoZIhvcNAQcGoIGxMIGuAgEAMIGoBgkqhkiG9w0BBwEwHgYJYIZIAWUDBAEuMBEEDHa4jo6yp0Z18KgbUgIBEIB74sKxWtV8/YHje5lv5THTl0HIbhSwM6EqRlmBiFFatmEWaeMk4tO4xBX65eq670I5TWPSLMzpp8ncGHMmvHqRajNBnmFtbYxN3E3/WjxmdbOOe+OXpnGJPcGsftc7cB2shRfA4lICPnE26+oVNXT6p0Lo20nY5XC7jyCO",
+ "subType": "00"
+ }
+ },
+ "creationDate": {
+ "$date": {
+ "$numberLong": "1557827033449"
+ }
+ }
+}
diff --git a/encryption/test/data/key1-document.json b/encryption/test/data/key1-document.json
new file mode 100644
index 00000000000..566b56c354f
--- /dev/null
+++ b/encryption/test/data/key1-document.json
@@ -0,0 +1,30 @@
+{
+ "_id": {
+ "$binary": {
+ "base64": "EjRWeBI0mHYSNBI0VniQEg==",
+ "subType": "04"
+ }
+ },
+ "keyMaterial": {
+ "$binary": {
+ "base64": "sHe0kz57YW7v8g9VP9sf/+K1ex4JqKc5rf/URX3n3p8XdZ6+15uXPaSayC6adWbNxkFskuMCOifDoTT+rkqMtFkDclOy884RuGGtUysq3X7zkAWYTKi8QAfKkajvVbZl2y23UqgVasdQu3OVBQCrH/xY00nNAs/52e958nVjBuzQkSb1T8pKJAyjZsHJ60+FtnfafDZSTAIBJYn7UWBCwQ==",
+ "subType": "00"
+ }
+ },
+ "creationDate": {
+ "$date": {
+ "$numberLong": "1648914851981"
+ }
+ },
+ "updateDate": {
+ "$date": {
+ "$numberLong": "1648914851981"
+ }
+ },
+ "status": {
+ "$numberInt": "0"
+ },
+ "masterKey": {
+ "provider": "local"
+ }
+}
diff --git a/encryption/test/data/kms-decrypt-reply.txt b/encryption/test/data/kms-decrypt-reply.txt
new file mode 100644
index 00000000000..c2c52e38413
--- /dev/null
+++ b/encryption/test/data/kms-decrypt-reply.txt
@@ -0,0 +1,6 @@
+HTTP/1.1 200 OK
+x-amzn-RequestId: deeb35e5-4ecb-4bf1-9af5-84a54ff0af0e
+Content-Type: application/x-amz-json-1.1
+Content-Length: 233
+
+{"KeyId": "arn:aws:kms:us-east-1:579766882180:key/89fcc2c4-08b0-4bd9-9f25-e30687b580d0", "Plaintext": "TqhXy3tKckECjy4/ZNykMWG8amBF46isVPzeOgeusKrwheBmYaU8TMG5AHR/NeUDKukqo8hBGgogiQOVpLPkqBQHD8YkLsNbDmHoGOill5QAHnniF/Lz405bGucB5TfR"}
\ No newline at end of file
diff --git a/encryption/test/data/kms-encrypt-reply.txt b/encryption/test/data/kms-encrypt-reply.txt
new file mode 100644
index 00000000000..8b5ab33ca4e
--- /dev/null
+++ b/encryption/test/data/kms-encrypt-reply.txt
@@ -0,0 +1,7 @@
+HTTP/1.1 200 OK
+x-amzn-RequestId: deeb35e5-4ecb-4bf1-9af5-84a54ff0af0e
+Content-Type: application/x-amz-json-1.1
+Content-Length: 446
+Connection: close
+
+{"KeyId": "arn:aws:kms:us-east-1:579766882180:key/89fcc2c4-08b0-4bd9-9f25-e30687b580d0", "CiphertextBlob": "AQICAHhQNmWG2CzOm1dq3kWLM+iDUZhEqnhJwH9wZVpuZ94A8gHCPOT4UQIpMTvAVABLqnXlAAAAwjCBvwYJKoZIhvcNAQcGoIGxMIGuAgEAMIGoBgkqhkiG9w0BBwEwHgYJYIZIAWUDBAEuMBEEDLxAm0nO3rccdoWA6AIBEIB7HUe6+aPvgNu/4sLEXBQVDIJVBueI3q7zdOMBSkRKkgZWqEuQgA6iDuEZbhHhOVCUXPBaLX6QWRwyMmjvIy/2Bg5q+TmwnfRo6QKdw2vee1W32/FdPWIoQy1yKOoIhNy6XMWldS3JuWK8ffQOYkssEqx0V4LW6PKuFv7D"}
\ No newline at end of file
diff --git a/encryption/test/data/mongocryptd-reply.json b/encryption/test/data/mongocryptd-reply.json
new file mode 100644
index 00000000000..9d03814974b
--- /dev/null
+++ b/encryption/test/data/mongocryptd-reply.json
@@ -0,0 +1,18 @@
+{
+ "schemaRequiresEncryption": true,
+ "ok": {
+ "$numberInt": "1"
+ },
+ "result": {
+ "filter": {
+ "ssn": {
+ "$binary": {
+ "base64": "ADgAAAAQYQABAAAABWtpABAAAAAEYWFhYWFhYWFhYWFhYWFhYQJ2AAwAAAA0NTctNTUtNTQ2MgAA",
+ "subType": "06"
+ }
+ }
+ },
+ "find": "test"
+ },
+ "hasEncryptedPlaceholders": true
+}
diff --git a/encryption/test/index.test.js b/encryption/test/index.test.js
new file mode 100644
index 00000000000..188a236e3e0
--- /dev/null
+++ b/encryption/test/index.test.js
@@ -0,0 +1,45 @@
+'use strict';
+
+const { expect } = require('chai');
+const mongodbClientEncryption = require('../lib/index');
+const { fetchAzureKMSToken } = require('../lib/providers');
+
+// Update this as you add exports, helps double check we don't accidentally remove something
+// since not all tests import from the root public export
+const EXPECTED_EXPORTS = [
+ 'extension',
+ 'MongoCryptError',
+ 'MongoCryptCreateEncryptedCollectionError',
+ 'MongoCryptCreateDataKeyError',
+ 'MongoCryptAzureKMSRequestError',
+ 'MongoCryptKMSRequestNetworkTimeoutError',
+ 'AutoEncrypter',
+ 'ClientEncryption'
+];
+
+describe('mongodb-client-encryption entrypoint', () => {
+ it('should export all and only the expected keys in expected_exports', () => {
+ expect(mongodbClientEncryption).to.have.all.keys(EXPECTED_EXPORTS);
+ });
+
+ it('extension returns an object equal in shape to the default except for extension', () => {
+ const extensionResult = mongodbClientEncryption.extension(require('mongodb'));
+ const expectedExports = EXPECTED_EXPORTS.filter(exp => exp !== 'extension');
+ const exportsDefault = Object.keys(mongodbClientEncryption).filter(exp => exp !== 'extension');
+ expect(extensionResult).to.have.all.keys(expectedExports);
+ expect(extensionResult).to.have.all.keys(exportsDefault);
+ });
+
+ context('exports for driver testing', () => {
+ it('exports `fetchAzureKMSToken` in a symbol property', () => {
+ expect(mongodbClientEncryption).to.have.property(
+ '___azureKMSProseTestExports',
+ fetchAzureKMSToken
+ );
+ });
+ it('extension exports `fetchAzureKMSToken` in a symbol property', () => {
+ const extensionResult = mongodbClientEncryption.extension(require('mongodb'));
+ expect(extensionResult).to.have.property('___azureKMSProseTestExports', fetchAzureKMSToken);
+ });
+ });
+});
diff --git a/encryption/test/mongocryptdManager.test.js b/encryption/test/mongocryptdManager.test.js
new file mode 100644
index 00000000000..17f86879286
--- /dev/null
+++ b/encryption/test/mongocryptdManager.test.js
@@ -0,0 +1,48 @@
+'use strict';
+
+const MongocryptdManager = require('../lib/mongocryptdManager').MongocryptdManager;
+const { expect } = require('chai');
+
+describe('MongocryptdManager', function () {
+ it('should default to having spawnArgs of --idleShutdownTimeoutSecs=60', function () {
+ const mcdm = new MongocryptdManager();
+ expect(mcdm.spawnArgs).to.deep.equal(['--idleShutdownTimeoutSecs', 60]);
+ });
+
+ it('should concat --idleShutdownTimeoutSecs=60 to provided args', function () {
+ const mcdm = new MongocryptdManager({ mongocryptdSpawnArgs: ['foo', 12] });
+ expect(mcdm.spawnArgs).to.deep.equal(['foo', 12, '--idleShutdownTimeoutSecs', 60]);
+ });
+
+ it('should not override `idleShutdownTimeoutSecs` if the user sets it using `key value` form', function () {
+ const mcdm = new MongocryptdManager({
+ mongocryptdSpawnArgs: ['--idleShutdownTimeoutSecs', 12]
+ });
+
+ expect(mcdm.spawnArgs).to.deep.equal(['--idleShutdownTimeoutSecs', 12]);
+ });
+
+ it('should not override `idleShutdownTimeoutSecs` if the user sets it using `key=value` form', function () {
+ const mcdm = new MongocryptdManager({
+ mongocryptdSpawnArgs: ['--idleShutdownTimeoutSecs=12']
+ });
+
+ expect(mcdm.spawnArgs).to.deep.equal(['--idleShutdownTimeoutSecs=12']);
+ });
+
+ it('should support construction with options', function () {
+ const mcdm = new MongocryptdManager({
+ mongocryptdURI: 'some-uri',
+ mongocryptdBypassSpawn: true,
+ mongocryptdSpawnPath: 'some-spawn-path',
+ mongocryptdSpawnArgs: ['--idleShutdownTimeoutSecs=12']
+ });
+
+ expect(mcdm).to.eql({
+ uri: 'some-uri',
+ bypassSpawn: true,
+ spawnPath: 'some-spawn-path',
+ spawnArgs: ['--idleShutdownTimeoutSecs=12']
+ });
+ });
+});
diff --git a/encryption/test/providers/credentialsProvider.test.js b/encryption/test/providers/credentialsProvider.test.js
new file mode 100644
index 00000000000..abd15d15445
--- /dev/null
+++ b/encryption/test/providers/credentialsProvider.test.js
@@ -0,0 +1,555 @@
+'use strict';
+
+const { expect } = require('chai');
+const http = require('http');
+const requirements = require('../requirements.helper');
+const { loadCredentials, isEmptyCredentials } = require('../../lib/providers');
+const { tokenCache, fetchAzureKMSToken } = require('../../lib/providers/azure');
+const sinon = require('sinon');
+const utils = require('../../lib/providers/utils');
+const {
+ MongoCryptKMSRequestNetworkTimeoutError,
+ MongoCryptAzureKMSRequestError
+} = require('../../lib/errors');
+
+const originalAccessKeyId = process.env.AWS_ACCESS_KEY_ID;
+const originalSecretAccessKey = process.env.AWS_SECRET_ACCESS_KEY;
+const originalSessionToken = process.env.AWS_SESSION_TOKEN;
+
+describe('#loadCredentials', function () {
+ context('isEmptyCredentials()', () => {
+ it('returns true for an empty object', () => {
+ expect(isEmptyCredentials('rainyCloud', { rainyCloud: {} })).to.be.true;
+ });
+
+ it('returns false for an object with keys', () => {
+ expect(isEmptyCredentials('rainyCloud', { rainyCloud: { password: 'secret' } })).to.be.false;
+ });
+
+ it('returns false for an nullish credentials', () => {
+ expect(isEmptyCredentials('rainyCloud', { rainyCloud: null })).to.be.false;
+ expect(isEmptyCredentials('rainyCloud', { rainyCloud: undefined })).to.be.false;
+ expect(isEmptyCredentials('rainyCloud', {})).to.be.false;
+ });
+
+ it('returns false for non object credentials', () => {
+ expect(isEmptyCredentials('rainyCloud', { rainyCloud: 0 })).to.be.false;
+ expect(isEmptyCredentials('rainyCloud', { rainyCloud: false })).to.be.false;
+ expect(isEmptyCredentials('rainyCloud', { rainyCloud: Symbol('secret') })).to.be.false;
+ });
+ });
+
+ context('when using aws', () => {
+ const accessKey = 'example';
+ const secretKey = 'example';
+ const sessionToken = 'example';
+
+ after(function () {
+ // After the entire suite runs, set the env back for the rest of the test run.
+ process.env.AWS_ACCESS_KEY_ID = originalAccessKeyId;
+ process.env.AWS_SECRET_ACCESS_KEY = originalSecretAccessKey;
+ process.env.AWS_SESSION_TOKEN = originalSessionToken;
+ });
+
+ context('when the credential provider finds credentials', function () {
+ before(function () {
+ process.env.AWS_ACCESS_KEY_ID = accessKey;
+ process.env.AWS_SECRET_ACCESS_KEY = secretKey;
+ process.env.AWS_SESSION_TOKEN = sessionToken;
+ });
+
+ context('when the credentials are empty', function () {
+ const kmsProviders = { aws: {} };
+
+ before(function () {
+ if (!requirements.credentialProvidersInstalled.aws) {
+ this.currentTest.skipReason = 'Cannot refresh credentials without sdk provider';
+ this.currentTest.skip();
+ return;
+ }
+ });
+
+ it('refreshes the aws credentials', async function () {
+ const providers = await loadCredentials(kmsProviders);
+ expect(providers).to.deep.equal({
+ aws: {
+ accessKeyId: accessKey,
+ secretAccessKey: secretKey,
+ sessionToken: sessionToken
+ }
+ });
+ });
+ });
+
+ context('when the credentials are not empty', function () {
+ context('when aws is empty', function () {
+ const kmsProviders = {
+ local: {
+ key: Buffer.alloc(96)
+ },
+ aws: {}
+ };
+
+ before(function () {
+ if (!requirements.credentialProvidersInstalled.aws) {
+ this.currentTest.skipReason = 'Cannot refresh credentials without sdk provider';
+ this.currentTest.skip();
+ return;
+ }
+ });
+
+ it('refreshes only the aws credentials', async function () {
+ const providers = await loadCredentials(kmsProviders);
+ expect(providers).to.deep.equal({
+ local: {
+ key: Buffer.alloc(96)
+ },
+ aws: {
+ accessKeyId: accessKey,
+ secretAccessKey: secretKey,
+ sessionToken: sessionToken
+ }
+ });
+ });
+ });
+
+ context('when aws is not empty', function () {
+ const kmsProviders = {
+ local: {
+ key: Buffer.alloc(96)
+ },
+ aws: {
+ accessKeyId: 'example'
+ }
+ };
+
+ before(function () {
+ if (!requirements.credentialProvidersInstalled.aws) {
+ this.currentTest.skipReason = 'Cannot refresh credentials without sdk provider';
+ this.currentTest.skip();
+ return;
+ }
+ });
+
+ it('does not refresh credentials', async function () {
+ const providers = await loadCredentials(kmsProviders);
+ expect(providers).to.deep.equal(kmsProviders);
+ });
+ });
+ });
+ });
+
+ context('when the sdk is not installed', function () {
+ const kmsProviders = {
+ local: {
+ key: Buffer.alloc(96)
+ },
+ aws: {}
+ };
+
+ before(function () {
+ if (requirements.credentialProvidersInstalled.aws) {
+ this.currentTest.skipReason = 'Credentials will be loaded when sdk present';
+ this.currentTest.skip();
+ return;
+ }
+ });
+
+ it('does not refresh credentials', async function () {
+ const providers = await loadCredentials(kmsProviders);
+ expect(providers).to.deep.equal(kmsProviders);
+ });
+ });
+ });
+
+ context('when using gcp', () => {
+ const setupHttpServer = status => {
+ let httpServer;
+ before(() => {
+ httpServer = http
+ .createServer((_, res) => {
+ if (status === 200) {
+ res.writeHead(200, {
+ 'Content-Type': 'application/json',
+ 'Metadata-Flavor': 'Google'
+ });
+ res.end(JSON.stringify({ access_token: 'abc' }));
+ } else {
+ res.writeHead(401, {
+ 'Content-Type': 'application/json',
+ 'Metadata-Flavor': 'Google'
+ });
+ res.end('{}');
+ }
+ })
+ .listen(5001);
+ process.env.GCE_METADATA_HOST = 'http://127.0.0.1:5001';
+ });
+
+ after(() => {
+ httpServer.close();
+ delete process.env.GCE_METADATA_HOST;
+ });
+ };
+
+ context('and gcp-metadata is installed', () => {
+ beforeEach(function () {
+ if (!requirements.credentialProvidersInstalled.gcp) {
+ this.currentTest.skipReason = 'Tests require gcp-metadata to be installed';
+ this.currentTest.skip();
+ return;
+ }
+ });
+
+ context('when metadata http response is 200 ok', () => {
+ setupHttpServer(200);
+ context('when the credentials are empty', function () {
+ const kmsProviders = { gcp: {} };
+
+ it('refreshes the gcp credentials', async function () {
+ const providers = await loadCredentials(kmsProviders);
+ expect(providers).to.deep.equal({
+ gcp: {
+ accessToken: 'abc'
+ }
+ });
+ });
+ });
+ });
+
+ context('when metadata http response is 401 bad', () => {
+ setupHttpServer(401);
+ context('when the credentials are empty', function () {
+ const kmsProviders = { gcp: {} };
+
+ it('surfaces error from server', async function () {
+ const error = await loadCredentials(kmsProviders).catch(error => error);
+ expect(error).to.be.instanceOf(Error);
+ });
+ });
+ });
+ });
+
+ context('and gcp-metadata is not installed', () => {
+ beforeEach(function () {
+ if (requirements.credentialProvidersInstalled.gcp) {
+ this.currentTest.skipReason = 'Tests require gcp-metadata to be installed';
+ this.currentTest.skip();
+ return;
+ }
+ });
+
+ context('when the credentials are empty', function () {
+ const kmsProviders = { gcp: {} };
+
+ it('does not modify the gcp credentials', async function () {
+ const providers = await loadCredentials(kmsProviders);
+ expect(providers).to.deep.equal({ gcp: {} });
+ });
+ });
+ });
+ });
+
+ context('when using azure', () => {
+ afterEach(() => tokenCache.resetCache());
+ afterEach(() => sinon.restore());
+ context('credential caching', () => {
+ const cache = tokenCache;
+
+ beforeEach(() => {
+ cache.resetCache();
+ });
+
+ context('when there is no cached token', () => {
+ let mockToken = {
+ accessToken: 'mock token',
+ expiresOnTimestamp: Date.now()
+ };
+
+ let token;
+
+ beforeEach(async () => {
+ sinon.stub(cache, '_getToken').returns(mockToken);
+ token = await cache.getToken();
+ });
+ it('fetches a token', async () => {
+ expect(token).to.have.property('accessToken', mockToken.accessToken);
+ });
+ it('caches the token on the class', async () => {
+ expect(cache.cachedToken).to.equal(mockToken);
+ });
+ });
+
+ context('when there is a cached token', () => {
+ context('when the cached token expires <= 1 minute from the current time', () => {
+ let mockToken = {
+ accessToken: 'mock token',
+ expiresOnTimestamp: Date.now()
+ };
+
+ let token;
+
+ beforeEach(async () => {
+ cache.cachedToken = {
+ accessToken: 'a new key',
+ expiresOnTimestamp: Date.now() + 3000
+ };
+ sinon.stub(cache, '_getToken').returns(mockToken);
+ token = await cache.getToken();
+ });
+
+ it('fetches a token', () => {
+ expect(token).to.have.property('accessToken', mockToken.accessToken);
+ });
+ it('caches the token on the class', () => {
+ expect(cache.cachedToken).to.equal(mockToken);
+ });
+ });
+
+ context('when the cached token expires > 1 minute from the current time', () => {
+ let expiredToken = {
+ token: 'mock token',
+ expiresOnTimestamp: Date.now()
+ };
+
+ let expectedMockToken = {
+ accessToken: 'a new key',
+ expiresOnTimestamp: Date.now() + 10000
+ };
+
+ let token;
+
+ beforeEach(async () => {
+ cache.cachedToken = expiredToken;
+ sinon.stub(cache, '_getToken').returns(expectedMockToken);
+ token = await cache.getToken();
+ });
+ it('returns the cached token', () => {
+ expect(token).to.have.property('accessToken', expectedMockToken.accessToken);
+ });
+ });
+ });
+ });
+
+ context('request configuration', () => {
+ const mockResponse = {
+ status: 200,
+ body: '{ "access_token": "token", "expires_in": "10000" }'
+ };
+
+ let httpSpy;
+
+ beforeEach(async () => {
+ httpSpy = sinon.stub(utils, 'get');
+ httpSpy.resolves(mockResponse);
+
+ await loadCredentials({ azure: {} });
+ });
+
+ it('sets the `api-version` param to 2012-02-01', () => {
+ const url = httpSpy.args[0][0];
+ expect(url).to.be.instanceof(URL);
+ expect(url.searchParams.get('api-version'), '2018-02-01');
+ });
+
+ it('sets the `resource` param to `https://vault.azure.net`', () => {
+ const url = httpSpy.args[0][0];
+ expect(url).to.be.instanceof(URL);
+ expect(url.searchParams.get('resource'), 'https://vault.azure.net');
+ });
+
+ it('sends the request to `http://169.254.169.254/metadata/identity/oauth2/token`', () => {
+ const url = httpSpy.args[0][0];
+ expect(url).to.be.instanceof(URL);
+ expect(url.toString()).to.include('http://169.254.169.254/metadata/identity/oauth2/token');
+ });
+
+ it('sets the Metadata header to true', () => {
+ const options = httpSpy.args[0][1];
+ expect(options).to.have.property('headers').to.have.property('Metadata', true);
+ });
+
+ it('sets the Content-Type header to application/json', () => {
+ const options = httpSpy.args[0][1];
+ expect(options)
+ .to.have.property('headers')
+ .to.have.property('Content-Type', 'application/json');
+ });
+
+ context('prose test specific requirements', () => {
+ /**
+ * the driver prose tests require the ability to set custom URL endpoints
+ * for the IMDS call and set custom headers
+ */
+ const url = new URL('http://customentpoint.com');
+
+ beforeEach(async () => {
+ sinon.restore();
+ httpSpy = sinon.stub(utils, 'get');
+ httpSpy.resolves(mockResponse);
+ await fetchAzureKMSToken({
+ url,
+ headers: {
+ customHeader1: 'value1',
+ customHeader2: 'value2'
+ }
+ });
+ });
+
+ it('allows a custom URL to be specified', () => {
+ const url = httpSpy.args[0][0];
+ expect(url).to.be.instanceof(URL);
+ expect(url.toString()).to.include('http://customentpoint.com');
+ });
+
+ it('deep copies the provided url', () => {
+ const spiedUrl = httpSpy.args[0][0];
+ expect(spiedUrl).to.be.instanceof(URL);
+ expect(spiedUrl).to.not.equal(url);
+ });
+
+ it('allows custom headers to be specified', () => {
+ const options = httpSpy.args[0][1];
+ expect(options).to.have.property('headers').to.have.property('customHeader1', 'value1');
+ expect(options).to.have.property('headers').to.have.property('customHeader2', 'value2');
+ });
+ });
+ });
+
+ context('error handling', () => {
+ afterEach(() => sinon.restore());
+ context('when the request times out', () => {
+ before(() => {
+ sinon.stub(utils, 'get').rejects(new MongoCryptKMSRequestNetworkTimeoutError());
+ });
+
+ it('throws a MongoCryptKMSRequestError', async () => {
+ const error = await loadCredentials({ azure: {} }).catch(e => e);
+ expect(error).to.be.instanceOf(MongoCryptAzureKMSRequestError);
+ });
+ });
+
+ context('when the request returns a non-200 error', () => {
+ context('when the request has no body', () => {
+ before(() => {
+ sinon.stub(utils, 'get').resolves({ status: 400 });
+ });
+
+ it('throws a MongoCryptKMSRequestError', async () => {
+ const error = await loadCredentials({ azure: {} }).catch(e => e);
+ expect(error).to.be.instanceOf(MongoCryptAzureKMSRequestError);
+ expect(error).to.match(/Malformed JSON body in GET request/);
+ });
+ });
+
+ context('when the request has a non-json body', () => {
+ before(() => {
+ sinon.stub(utils, 'get').resolves({ status: 400, body: 'non-json body' });
+ });
+
+ it('throws a MongoCryptKMSRequestError', async () => {
+ const error = await loadCredentials({ azure: {} }).catch(e => e);
+ expect(error).to.be.instanceOf(MongoCryptAzureKMSRequestError);
+ expect(error).to.match(/Malformed JSON body in GET request/);
+ });
+ });
+
+ context('when the request has a json body', () => {
+ beforeEach(() => {
+ sinon
+ .stub(utils, 'get')
+ .resolves({ status: 400, body: '{ "error": "something went wrong" }' });
+ });
+
+ it('throws a MongoCryptKMSRequestError', async () => {
+ const error = await loadCredentials({ azure: {} }).catch(e => e);
+ expect(error).to.be.instanceOf(MongoCryptAzureKMSRequestError);
+ });
+
+ it('attaches the body to the error', async () => {
+ const error = await loadCredentials({ azure: {} }).catch(e => e);
+ expect(error).to.have.property('body').to.deep.equal({ error: 'something went wrong' });
+ });
+ });
+ });
+
+ context('when the request returns a 200 response', () => {
+ context('when the request has no body', () => {
+ before(() => {
+ sinon.stub(utils, 'get').resolves({ status: 200 });
+ });
+
+ it('throws a MongoCryptKMSRequestError', async () => {
+ const error = await loadCredentials({ azure: {} }).catch(e => e);
+ expect(error).to.be.instanceOf(MongoCryptAzureKMSRequestError);
+ expect(error).to.match(/Malformed JSON body in GET request/);
+ });
+ });
+
+ context('when the request has a non-json body', () => {
+ before(() => {
+ sinon.stub(utils, 'get').resolves({ status: 200, body: 'non-json body' });
+ });
+
+ it('throws a MongoCryptKMSRequestError', async () => {
+ const error = await loadCredentials({ azure: {} }).catch(e => e);
+ expect(error).to.be.instanceOf(MongoCryptAzureKMSRequestError);
+ expect(error).to.match(/Malformed JSON body in GET request/);
+ });
+ });
+
+ context('when the body has no access_token', () => {
+ beforeEach(() => {
+ sinon.stub(utils, 'get').resolves({ status: 200, body: '{ "expires_in": "10000" }' });
+ });
+
+ it('throws a MongoCryptKMSRequestError', async () => {
+ const error = await loadCredentials({ azure: {} }).catch(e => e);
+ expect(error).to.be.instanceOf(MongoCryptAzureKMSRequestError);
+ expect(error).to.match(/missing field `access_token/);
+ });
+ });
+
+ context('when the body has no expires_in', () => {
+ beforeEach(() => {
+ sinon.stub(utils, 'get').resolves({ status: 200, body: '{ "access_token": "token" }' });
+ });
+
+ it('throws a MongoCryptKMSRequestError', async () => {
+ const error = await loadCredentials({ azure: {} }).catch(e => e);
+ expect(error).to.be.instanceOf(MongoCryptAzureKMSRequestError);
+ expect(error).to.match(/missing field `expires_in/);
+ });
+ });
+
+ context('when expires_in cannot be parsed into a number', () => {
+ beforeEach(() => {
+ sinon.stub(utils, 'get').resolves({
+ status: 200,
+ body: '{ "access_token": "token", "expires_in": "foo" }'
+ });
+ });
+
+ it('throws a MongoCryptKMSRequestError', async () => {
+ const error = await loadCredentials({ azure: {} }).catch(e => e);
+ expect(error).to.be.instanceOf(MongoCryptAzureKMSRequestError);
+ expect(error).to.match(/unable to parse int from `expires_in` field/);
+ });
+ });
+ });
+
+ context('when a valid token was returned', () => {
+ beforeEach(() => {
+ sinon
+ .stub(utils, 'get')
+ .resolves({ status: 200, body: '{ "access_token": "token", "expires_in": "10000" }' });
+ });
+
+ it('returns the token in the `azure` field of the kms providers', async () => {
+ const kmsProviders = await loadCredentials({ azure: {} });
+ expect(kmsProviders).to.have.property('azure').to.deep.equal({ accessToken: 'token' });
+ });
+ });
+ });
+ });
+});
diff --git a/encryption/test/release.test.js b/encryption/test/release.test.js
new file mode 100644
index 00000000000..caaecf544cb
--- /dev/null
+++ b/encryption/test/release.test.js
@@ -0,0 +1,66 @@
+'use strict';
+const { expect } = require('chai');
+const tar = require('tar');
+const cp = require('child_process');
+const fs = require('fs');
+const pkg = require('../package.json');
+
+const packFile = `mongodb-client-encryption-${pkg.version}.tgz`;
+
+const REQUIRED_FILES = [
+ 'package/binding.gyp',
+ 'package/CHANGELOG.md',
+ 'package/index.d.ts',
+ 'package/lib/index.js',
+ 'package/lib/autoEncrypter.js',
+ 'package/lib/buffer_pool.js',
+ 'package/lib/clientEncryption.js',
+ 'package/lib/common.js',
+ 'package/lib/providers/index.js',
+ 'package/lib/providers/gcp.js',
+ 'package/lib/providers/aws.js',
+ 'package/lib/providers/azure.js',
+ 'package/lib/providers/utils.js',
+ 'package/lib/cryptoCallbacks.js',
+ 'package/lib/errors.js',
+ 'package/lib/mongocryptdManager.js',
+ 'package/lib/stateMachine.js',
+ 'package/LICENSE',
+ 'package/package.json',
+ 'package/README.md',
+ 'package/src/mongocrypt.cc',
+ 'package/src/mongocrypt.h'
+];
+
+describe(`Release ${packFile}`, function () {
+ this.timeout(5000);
+
+ let tarFileList;
+ before(() => {
+ expect(fs.existsSync(packFile)).to.equal(false);
+ cp.execSync('npm pack', { stdio: 'ignore' });
+ tarFileList = [];
+ tar.list({
+ file: packFile,
+ sync: true,
+ onentry(entry) {
+ tarFileList.push(entry.path);
+ }
+ });
+ });
+
+ after(() => {
+ fs.unlinkSync(packFile);
+ });
+
+ for (const requiredFile of REQUIRED_FILES) {
+ it(`should contain ${requiredFile}`, () => {
+ expect(tarFileList).to.includes(requiredFile);
+ });
+ }
+
+ it('should not have extraneous files', () => {
+ const unexpectedFileList = tarFileList.filter(f => !REQUIRED_FILES.some(r => r === f));
+ expect(unexpectedFileList).to.have.lengthOf(0, `Extra files: ${unexpectedFileList.join(', ')}`);
+ });
+});
diff --git a/encryption/test/requirements.helper.js b/encryption/test/requirements.helper.js
new file mode 100644
index 00000000000..9dc5711c16f
--- /dev/null
+++ b/encryption/test/requirements.helper.js
@@ -0,0 +1,51 @@
+'use strict';
+
+// Data Key Stuff
+const AWS_ACCESS_KEY_ID = process.env.AWS_ACCESS_KEY_ID;
+const AWS_SECRET_ACCESS_KEY = process.env.AWS_SECRET_ACCESS_KEY;
+const AWS_REGION = process.env.AWS_REGION;
+const AWS_CMK_ID = process.env.AWS_CMK_ID;
+
+const awsKmsProviders = {
+ aws: { accessKeyId: AWS_ACCESS_KEY_ID, secretAccessKey: AWS_SECRET_ACCESS_KEY }
+};
+const awsDataKeyOptions = { masterKey: { key: AWS_CMK_ID, region: AWS_REGION } };
+
+const SKIP_LIVE_TESTS = !!process.env.MONGODB_NODE_SKIP_LIVE_TESTS;
+const SKIP_AWS_TESTS =
+ SKIP_LIVE_TESTS || !AWS_ACCESS_KEY_ID || !AWS_SECRET_ACCESS_KEY || !AWS_REGION || !AWS_CMK_ID;
+
+function isAWSCredentialProviderInstalled() {
+ try {
+ require.resolve('@aws-sdk/credential-providers');
+ return true;
+ } catch {
+ return false;
+ }
+}
+
+function isGCPCredentialProviderInstalled() {
+ try {
+ require.resolve('gcp-metadata');
+ return true;
+ } catch {
+ return false;
+ }
+}
+
+module.exports = {
+ SKIP_LIVE_TESTS,
+ SKIP_AWS_TESTS,
+ KEYS: {
+ AWS_ACCESS_KEY_ID,
+ AWS_SECRET_ACCESS_KEY,
+ AWS_REGION,
+ AWS_CMK_ID
+ },
+ awsKmsProviders,
+ awsDataKeyOptions,
+ credentialProvidersInstalled: {
+ aws: isAWSCredentialProviderInstalled(),
+ gcp: isGCPCredentialProviderInstalled()
+ }
+};
diff --git a/encryption/test/stateMachine.test.js b/encryption/test/stateMachine.test.js
new file mode 100644
index 00000000000..31d96ba68ae
--- /dev/null
+++ b/encryption/test/stateMachine.test.js
@@ -0,0 +1,331 @@
+'use strict';
+
+const { EventEmitter, once } = require('events');
+const net = require('net');
+const tls = require('tls');
+const fs = require('fs');
+const { expect } = require('chai');
+const sinon = require('sinon');
+const mongodb = require('mongodb');
+const BSON = mongodb.BSON;
+const StateMachine = require('../lib/stateMachine')({ mongodb }).StateMachine;
+
+describe('StateMachine', function () {
+ class MockRequest {
+ constructor(message, bytesNeeded) {
+ this._bytesNeeded = typeof bytesNeeded === 'number' ? bytesNeeded : 1024;
+ this._message = message;
+ this.endpoint = 'some.fake.host.com';
+ this._kmsProvider = 'aws';
+ }
+
+ get message() {
+ return this._message;
+ }
+
+ get bytesNeeded() {
+ return this._bytesNeeded;
+ }
+
+ get kmsProvider() {
+ return this._kmsProvider;
+ }
+
+ addResponse(buffer) {
+ this._bytesNeeded -= buffer.length;
+ }
+ }
+
+ describe('#markCommand', function () {
+ let runCommandStub;
+ let dbStub;
+ let clientStub;
+
+ beforeEach(function () {
+ this.sinon = sinon.createSandbox();
+ runCommandStub = this.sinon.stub().resolves({});
+ dbStub = this.sinon.createStubInstance(mongodb.Db, {
+ command: runCommandStub
+ });
+ clientStub = this.sinon.createStubInstance(mongodb.MongoClient, {
+ db: dbStub
+ });
+ });
+
+ const command = {
+ encryptedFields: {},
+ a: new BSON.Long('0'),
+ b: new BSON.Int32(0)
+ };
+ const options = { promoteLongs: false, promoteValues: false };
+ const serializedCommand = BSON.serialize(command);
+ const stateMachine = new StateMachine({ bson: BSON });
+ const callback = () => {};
+
+ context('when executing the command', function () {
+ it('does not promote values', function () {
+ stateMachine.markCommand(clientStub, 'test.coll', serializedCommand, callback);
+ expect(runCommandStub.calledWith(command, options)).to.be.true;
+ });
+ });
+ });
+
+ describe('kmsRequest', function () {
+ class MockSocket extends EventEmitter {
+ constructor(callback) {
+ super();
+ this.on('connect', callback);
+ }
+ write() {}
+ destroy() {}
+ end(callback) {
+ Promise.resolve().then(callback);
+ }
+ }
+
+ before(function () {
+ this.sinon = sinon.createSandbox();
+ });
+
+ context('when handling standard kms requests', function () {
+ beforeEach(function () {
+ this.fakeSocket = undefined;
+ this.sinon.stub(tls, 'connect').callsFake((options, callback) => {
+ this.fakeSocket = new MockSocket(callback);
+ return this.fakeSocket;
+ });
+ });
+
+ it('should only resolve once bytesNeeded drops to zero', function (done) {
+ const stateMachine = new StateMachine({ bson: BSON });
+ const request = new MockRequest(Buffer.from('foobar'), 500);
+ let status = 'pending';
+ stateMachine
+ .kmsRequest(request)
+ .then(
+ () => (status = 'resolved'),
+ () => (status = 'rejected')
+ )
+ .catch(() => {});
+
+ this.fakeSocket.emit('connect');
+ setTimeout(() => {
+ expect(status).to.equal('pending');
+ expect(request.bytesNeeded).to.equal(500);
+ expect(request.kmsProvider).to.equal('aws');
+ this.fakeSocket.emit('data', Buffer.alloc(300));
+ setTimeout(() => {
+ expect(status).to.equal('pending');
+ expect(request.bytesNeeded).to.equal(200);
+ this.fakeSocket.emit('data', Buffer.alloc(200));
+ setTimeout(() => {
+ expect(status).to.equal('resolved');
+ expect(request.bytesNeeded).to.equal(0);
+ done();
+ });
+ });
+ });
+ });
+ });
+
+ context('when tls options are provided', function () {
+ context('when the options are insecure', function () {
+ [
+ 'tlsInsecure',
+ 'tlsAllowInvalidCertificates',
+ 'tlsAllowInvalidHostnames',
+ 'tlsDisableOCSPEndpointCheck',
+ 'tlsDisableCertificateRevocationCheck'
+ ].forEach(function (option) {
+ context(`when the option is ${option}`, function () {
+ const stateMachine = new StateMachine({
+ bson: BSON,
+ tlsOptions: { aws: { [option]: true } }
+ });
+ const request = new MockRequest(Buffer.from('foobar'), 500);
+
+ it('rejects with the validation error', function (done) {
+ stateMachine.kmsRequest(request).catch(err => {
+ expect(err.message).to.equal(`Insecure TLS options prohibited for aws: ${option}`);
+ done();
+ });
+ });
+ });
+ });
+ });
+
+ context('when the options are secure', function () {
+ context('when providing tlsCertificateKeyFile', function () {
+ const stateMachine = new StateMachine({
+ bson: BSON,
+ tlsOptions: { aws: { tlsCertificateKeyFile: 'test.pem' } }
+ });
+ const request = new MockRequest(Buffer.from('foobar'), -1);
+ const buffer = Buffer.from('foobar');
+ let connectOptions;
+
+ it('sets the cert and key options in the tls connect options', function (done) {
+ this.sinon.stub(fs, 'readFileSync').callsFake(fileName => {
+ expect(fileName).to.equal('test.pem');
+ return buffer;
+ });
+ this.sinon.stub(tls, 'connect').callsFake((options, callback) => {
+ connectOptions = options;
+ this.fakeSocket = new MockSocket(callback);
+ return this.fakeSocket;
+ });
+ stateMachine.kmsRequest(request).then(function () {
+ expect(connectOptions.cert).to.equal(buffer);
+ expect(connectOptions.key).to.equal(buffer);
+ done();
+ });
+ this.fakeSocket.emit('data', Buffer.alloc(0));
+ });
+ });
+
+ context('when providing tlsCAFile', function () {
+ const stateMachine = new StateMachine({
+ bson: BSON,
+ tlsOptions: { aws: { tlsCAFile: 'test.pem' } }
+ });
+ const request = new MockRequest(Buffer.from('foobar'), -1);
+ const buffer = Buffer.from('foobar');
+ let connectOptions;
+
+ it('sets the ca options in the tls connect options', function (done) {
+ this.sinon.stub(fs, 'readFileSync').callsFake(fileName => {
+ expect(fileName).to.equal('test.pem');
+ return buffer;
+ });
+ this.sinon.stub(tls, 'connect').callsFake((options, callback) => {
+ connectOptions = options;
+ this.fakeSocket = new MockSocket(callback);
+ return this.fakeSocket;
+ });
+ stateMachine.kmsRequest(request).then(function () {
+ expect(connectOptions.ca).to.equal(buffer);
+ done();
+ });
+ this.fakeSocket.emit('data', Buffer.alloc(0));
+ });
+ });
+
+ context('when providing tlsCertificateKeyFilePassword', function () {
+ const stateMachine = new StateMachine({
+ bson: BSON,
+ tlsOptions: { aws: { tlsCertificateKeyFilePassword: 'test' } }
+ });
+ const request = new MockRequest(Buffer.from('foobar'), -1);
+ let connectOptions;
+
+ it('sets the passphrase option in the tls connect options', function (done) {
+ this.sinon.stub(tls, 'connect').callsFake((options, callback) => {
+ connectOptions = options;
+ this.fakeSocket = new MockSocket(callback);
+ return this.fakeSocket;
+ });
+ stateMachine.kmsRequest(request).then(function () {
+ expect(connectOptions.passphrase).to.equal('test');
+ done();
+ });
+ this.fakeSocket.emit('data', Buffer.alloc(0));
+ });
+ });
+ });
+ });
+
+ afterEach(function () {
+ this.sinon.restore();
+ });
+ });
+
+ describe('Socks5 support', function () {
+ let socks5srv;
+ let hasTlsConnection;
+ let withUsernamePassword;
+
+ beforeEach(async () => {
+ hasTlsConnection = false;
+ socks5srv = net.createServer(async socket => {
+ if (withUsernamePassword) {
+ expect(await once(socket, 'data')).to.deep.equal([Buffer.from('05020002', 'hex')]);
+ socket.write(Buffer.from('0502', 'hex'));
+ expect(await once(socket, 'data')).to.deep.equal([
+ Buffer.concat([
+ Buffer.from('0103', 'hex'),
+ Buffer.from('foo'),
+ Buffer.from('03', 'hex'),
+ Buffer.from('bar')
+ ])
+ ]);
+ socket.write(Buffer.from('0100', 'hex'));
+ } else {
+ expect(await once(socket, 'data')).to.deep.equal([Buffer.from('050100', 'hex')]);
+ socket.write(Buffer.from('0500', 'hex'));
+ }
+ expect(await once(socket, 'data')).to.deep.equal([
+ Buffer.concat([
+ Buffer.from('0501000312', 'hex'),
+ Buffer.from('some.fake.host.com'),
+ Buffer.from('01bb', 'hex')
+ ])
+ ]);
+ socket.write(Buffer.from('0500007f0000010100', 'hex'));
+ expect((await once(socket, 'data'))[0][1]).to.equal(3); // TLS handshake version byte
+ hasTlsConnection = true;
+ socket.end();
+ });
+ socks5srv.listen(0);
+ await once(socks5srv, 'listening');
+ });
+
+ afterEach(() => {
+ socks5srv.close();
+ });
+
+ it('should create HTTPS connections through a Socks5 proxy (no proxy auth)', async function () {
+ const stateMachine = new StateMachine({
+ bson: BSON,
+ proxyOptions: {
+ proxyHost: 'localhost',
+ proxyPort: socks5srv.address().port
+ }
+ });
+
+ const request = new MockRequest(Buffer.from('foobar'), 500);
+ try {
+ await stateMachine.kmsRequest(request);
+ } catch (err) {
+ expect(err.name).to.equal('MongoCryptError');
+ expect(err.originalError.code).to.equal('ECONNRESET');
+ expect(hasTlsConnection).to.equal(true);
+ return;
+ }
+ expect.fail('missed exception');
+ });
+
+ it('should create HTTPS connections through a Socks5 proxy (username/password auth)', async function () {
+ withUsernamePassword = true;
+ const stateMachine = new StateMachine({
+ bson: BSON,
+ proxyOptions: {
+ proxyHost: 'localhost',
+ proxyPort: socks5srv.address().port,
+ proxyUsername: 'foo',
+ proxyPassword: 'bar'
+ }
+ });
+
+ const request = new MockRequest(Buffer.from('foobar'), 500);
+ try {
+ await stateMachine.kmsRequest(request);
+ } catch (err) {
+ expect(err.name).to.equal('MongoCryptError');
+ expect(err.originalError.code).to.equal('ECONNRESET');
+ expect(hasTlsConnection).to.equal(true);
+ return;
+ }
+ expect.fail('missed exception');
+ });
+ });
+});
diff --git a/encryption/test/tools/chai-addons.js b/encryption/test/tools/chai-addons.js
new file mode 100644
index 00000000000..68dd475d0ad
--- /dev/null
+++ b/encryption/test/tools/chai-addons.js
@@ -0,0 +1,8 @@
+'use strict';
+
+// configure chai
+const chai = require('chai');
+chai.use(require('sinon-chai'));
+chai.use(require('chai-subset'));
+
+chai.config.truncateThreshold = 0;
diff --git a/encryption/test/tools/mongodb_reporter.js b/encryption/test/tools/mongodb_reporter.js
new file mode 100644
index 00000000000..9e8461c1f15
--- /dev/null
+++ b/encryption/test/tools/mongodb_reporter.js
@@ -0,0 +1,325 @@
+//@ts-check
+'use strict';
+const mocha = require('mocha');
+const chalk = require('chalk');
+
+chalk.level = 3;
+
+const {
+ EVENT_RUN_BEGIN,
+ EVENT_RUN_END,
+ EVENT_TEST_FAIL,
+ EVENT_TEST_PASS,
+ EVENT_SUITE_BEGIN,
+ EVENT_SUITE_END,
+ EVENT_TEST_PENDING,
+ EVENT_TEST_BEGIN,
+ EVENT_TEST_END
+} = mocha.Runner.constants;
+
+const fs = require('fs');
+const os = require('os');
+
+/**
+ * @typedef {object} MongoMochaSuiteExtension
+ * @property {Date} timestamp - suite start date
+ * @property {string} stdout - capture of stdout
+ * @property {string} stderr - capture of stderr
+ * @property {MongoMochaTest} test - capture of stderr
+ * @typedef {object} MongoMochaTestExtension
+ * @property {Date} startTime - test start date
+ * @property {Date} endTime - test end date
+ * @property {number} elapsedTime - difference between end and start
+ * @property {Error} [error] - The possible error from a test
+ * @property {true} [skipped] - Set if test was skipped
+ * @typedef {MongoMochaSuiteExtension & Mocha.Suite} MongoMochaSuite
+ * @typedef {MongoMochaTestExtension & Mocha.Test} MongoMochaTest
+ */
+
+// Turn this on if you have to debug this custom reporter!
+let REPORT_TO_STDIO = false;
+
+function captureStream(stream) {
+ var oldWrite = stream.write;
+ var buf = '';
+ stream.write = function (chunk) {
+ buf += chunk.toString(); // chunk is a String or Buffer
+ oldWrite.apply(stream, arguments);
+ };
+
+ return {
+ unhook: function unhook() {
+ stream.write = oldWrite;
+ return buf;
+ },
+ captured: function () {
+ return buf;
+ }
+ };
+}
+
+/**
+ * @param {Mocha.Runner} runner
+ * @this {any}
+ */
+class MongoDBMochaReporter extends mocha.reporters.Spec {
+ constructor(runner) {
+ super(runner);
+ /** @type {Map} */
+ this.suites = new Map();
+ this.xunitWritten = false;
+ runner.on(EVENT_RUN_BEGIN, () => this.start());
+ runner.on(EVENT_RUN_END, () => this.end());
+ runner.on(EVENT_SUITE_BEGIN, suite => this.onSuite(suite));
+ runner.on(EVENT_TEST_BEGIN, test => this.onTest(test));
+ runner.on(EVENT_TEST_PASS, test => this.pass(test));
+ runner.on(EVENT_TEST_FAIL, (test, error) => this.fail(test, error));
+ runner.on(EVENT_TEST_PENDING, test => this.pending(test));
+ runner.on(EVENT_SUITE_END, suite => this.suiteEnd(suite));
+ runner.on(EVENT_TEST_END, test => this.testEnd(test));
+
+ process.on('SIGINT', () => this.end(true));
+ }
+ start() {}
+
+ end(ctrlC) {
+ try {
+ if (ctrlC) console.log('emergency exit!');
+ const output = { testSuites: [] };
+
+ for (const [id, [className, { suite }]] of [...this.suites.entries()].entries()) {
+ let totalSuiteTime = 0;
+ let testCases = [];
+ let failureCount = 0;
+
+ const tests = /** @type {MongoMochaTest[]}*/ (suite.tests);
+ for (const test of tests) {
+ let time = test.elapsedTime / 1000;
+ time = Number.isNaN(time) ? 0 : time;
+
+ totalSuiteTime += time;
+ failureCount += test.state === 'failed' ? 1 : 0;
+
+ /** @type {string | Date | number} */
+ let startTime = test.startTime;
+ startTime = startTime ? startTime.toISOString() : 0;
+
+ /** @type {string | Date | number} */
+ let endTime = test.endTime;
+ endTime = endTime ? endTime.toISOString() : 0;
+
+ let error = test.error;
+ let failure = error
+ ? {
+ type: error.constructor.name,
+ message: error.message,
+ stack: error.stack
+ }
+ : undefined;
+
+ let skipped = !!test.skipped;
+
+ testCases.push({
+ name: test.title,
+ className,
+ time,
+ startTime,
+ endTime,
+ skipped,
+ failure
+ });
+ }
+
+ /** @type {string | Date | number} */
+ let timestamp = suite.timestamp;
+ timestamp = timestamp ? timestamp.toISOString().split('.')[0] : '';
+
+ output.testSuites.push({
+ package: suite.file.includes('integration') ? 'Integration' : 'Unit',
+ id,
+ name: className,
+ timestamp,
+ hostname: os.hostname(),
+ tests: suite.tests.length,
+ failures: failureCount,
+ errors: '0',
+ time: totalSuiteTime,
+ testCases,
+ stdout: suite.stdout,
+ stderr: suite.stderr
+ });
+ }
+
+ if (!this.xunitWritten) {
+ fs.writeFileSync('xunit.xml', outputToXML(output), { encoding: 'utf8' });
+ }
+ this.xunitWritten = true;
+ console.log(chalk.bold('wrote xunit.xml'));
+ } catch (error) {
+ console.error(chalk.red(`Failed to output xunit report! ${error}`));
+ } finally {
+ if (ctrlC) process.exit(1);
+ }
+ }
+
+ /**
+ * @param {MongoMochaSuite} suite
+ */
+ onSuite(suite) {
+ if (suite.root) return;
+ if (!this.suites.has(suite.fullTitle())) {
+ suite.timestamp = new Date();
+ this.suites.set(suite.fullTitle(), {
+ suite,
+ stdout: captureStream(process.stdout),
+ stderr: captureStream(process.stderr)
+ });
+ } else {
+ console.warn(`${chalk.yellow('WARNING:')} ${suite.fullTitle()} started twice`);
+ }
+ }
+
+ /**
+ * @param {MongoMochaSuite} suite
+ */
+ suiteEnd(suite) {
+ if (suite.root) return;
+ const currentSuite = this.suites.get(suite.fullTitle());
+ if (!currentSuite) {
+ console.error('Suite never started >:(');
+ process.exit(1);
+ }
+ if (currentSuite.stdout || currentSuite.stderr) {
+ suite.stdout = currentSuite.stdout.unhook();
+ suite.stderr = currentSuite.stderr.unhook();
+ delete currentSuite.stdout;
+ delete currentSuite.stderr;
+ }
+ }
+
+ /**
+ * @param {MongoMochaTest} test
+ */
+ onTest(test) {
+ test.startTime = new Date();
+ }
+
+ /**
+ * @param {MongoMochaTest} test
+ */
+ testEnd(test) {
+ test.endTime = new Date();
+ test.elapsedTime = Number(test.endTime) - Number(test.startTime);
+ }
+
+ /**
+ * @param {MongoMochaTest} test
+ */
+ pass(test) {
+ if (REPORT_TO_STDIO) console.log(chalk.green(`✔ ${test.fullTitle()}`));
+ }
+
+ /**
+ * @param {MongoMochaTest} test
+ * @param {Error} error
+ */
+ fail(test, error) {
+ if (REPORT_TO_STDIO) console.log(chalk.red(`⨯ ${test.fullTitle()} -- ${error.message}`));
+ test.error = error;
+ }
+
+ /**
+ * @param {MongoMochaTest & {skipReason?: string}} test
+ */
+ pending(test) {
+ if (REPORT_TO_STDIO) console.log(chalk.cyan(`↬ ${test.fullTitle()}`));
+ if (typeof test.skipReason === 'string') {
+ console.log(chalk.cyan(`${' '.repeat(test.titlePath().length + 1)}↬ ${test.skipReason}`));
+ }
+ test.skipped = true;
+ }
+}
+
+module.exports = MongoDBMochaReporter;
+
+function replaceIllegalXMLCharacters(string) {
+ // prettier-ignore
+ return String(string)
+ .split('"').join('"')
+ .split('<').join('﹤')
+ .split('>').join('﹥')
+ .split('&').join('﹠');
+}
+
+const ANSI_ESCAPE_REGEX =
+ // eslint-disable-next-line no-control-regex
+ /[\u001b\u009b][[()#;?]*(?:[0-9]{1,4}(?:;[0-9]{0,4})*)?[0-9A-ORZcf-nqry=><]/g;
+function outputToXML(output) {
+ function cdata(str) {
+ return `')
+ .join('\\]\\]\\>')}]]>`;
+ }
+
+ function makeTag(name, attributes, selfClose, content) {
+ const attributesString = Object.entries(attributes || {})
+ .map(([k, v]) => `${k}="${replaceIllegalXMLCharacters(v)}"`)
+ .join(' ');
+ let tag = `<${name}${attributesString ? ' ' + attributesString : ''}`;
+ if (selfClose) return tag + '/>\n';
+ else tag += '>';
+ if (content) return tag + content + `${name}>`;
+ return tag;
+ }
+
+ let s =
+ '\n\n\n';
+
+ for (const suite of output.testSuites) {
+ s += makeTag('testsuite', {
+ package: suite.package,
+ id: suite.id,
+ name: suite.name,
+ timestamp: suite.timestamp,
+ hostname: suite.hostname,
+ tests: suite.tests,
+ failures: suite.failures,
+ errors: suite.errors,
+ time: suite.time
+ });
+ s += '\n\t' + makeTag('properties') + '\n'; // can put metadata here?
+ for (const test of suite.testCases) {
+ s +=
+ '\t' +
+ makeTag(
+ 'testcase',
+ {
+ name: test.name,
+ classname: test.className,
+ time: test.time,
+ start: test.startTime,
+ end: test.endTime
+ },
+ !test.failure && !test.skipped
+ );
+ if (test.failure) {
+ s +=
+ '\n\t\t' +
+ makeTag('failure', { type: test.failure.type }, false, cdata(test.failure.stack)) +
+ '\n';
+ s += `\t\n`;
+ }
+ if (test.skipped) {
+ s += makeTag('skipped', {}, true);
+ s += `\t\n`;
+ }
+ }
+ s += '\t' + makeTag('system-out', {}, false, cdata(suite.stdout)) + '\n';
+ s += '\t' + makeTag('system-err', {}, false, cdata(suite.stderr)) + '\n';
+ s += `\n`;
+ }
+
+ return s + ' \n';
+}
diff --git a/encryption/test/types/index.test-d.ts b/encryption/test/types/index.test-d.ts
new file mode 100644
index 00000000000..ae0b6b92a8e
--- /dev/null
+++ b/encryption/test/types/index.test-d.ts
@@ -0,0 +1,63 @@
+import { expectAssignable, expectError, expectType, expectNotType, expectNotAssignable } from 'tsd';
+import { RangeOptions, AWSEncryptionKeyOptions, AzureEncryptionKeyOptions, ClientEncryption, GCPEncryptionKeyOptions, ClientEncryptionEncryptOptions, KMSProviders } from '../..';
+
+type RequiredCreateEncryptedCollectionSettings = Parameters<
+ ClientEncryption['createEncryptedCollection']
+>[2];
+
+expectError({});
+expectError({
+ provider: 'blah!',
+ createCollectionOptions: { encryptedFields: {} }
+});
+expectError({
+ provider: 'aws',
+ createCollectionOptions: {}
+});
+expectError({
+ provider: 'aws',
+ createCollectionOptions: { encryptedFields: null }
+});
+
+expectAssignable({
+ provider: 'aws',
+ createCollectionOptions: { encryptedFields: {} }
+});
+expectAssignable({
+ provider: 'aws',
+ createCollectionOptions: { encryptedFields: {} },
+ masterKey: { } as AWSEncryptionKeyOptions | AzureEncryptionKeyOptions | GCPEncryptionKeyOptions
+});
+
+{
+ // NODE-5041 - incorrect spelling of rangeOpts in typescript definitions
+ const options = {} as ClientEncryptionEncryptOptions;
+ expectType(options.rangeOptions)
+}
+
+{
+ // KMSProviders
+ // aws
+ expectAssignable({ accessKeyId: '', secretAccessKey: '' });
+ expectAssignable({ accessKeyId: '', secretAccessKey: '', sessionToken: undefined });
+ expectAssignable({ accessKeyId: '', secretAccessKey: '', sessionToken: '' });
+ // automatic
+ expectAssignable({});
+
+ // azure
+ expectAssignable({ tenantId: 'a', clientId: 'a', clientSecret: 'a' });
+ expectAssignable({ tenantId: 'a', clientId: 'a', clientSecret: 'a' });
+ expectAssignable({ tenantId: 'a', clientId: 'a', clientSecret: 'a', identityPlatformEndpoint: undefined });
+ expectAssignable({ tenantId: 'a', clientId: 'a', clientSecret: 'a', identityPlatformEndpoint: '' });
+ expectAssignable({ accessToken: 'a' });
+ expectAssignable({});
+
+ // gcp
+ expectAssignable({ email: 'a', privateKey: 'a' });
+ expectAssignable({ email: 'a', privateKey: 'a', endpoint: undefined });
+ expectAssignable({ email: 'a', privateKey: 'a', endpoint: 'a' });
+ expectAssignable({ accessToken: 'a' });
+ // automatic
+ expectAssignable({});
+
+}