diff --git a/src/database/mongo/hash.js b/src/database/mongo/hash.js index 0829bbf8af..67daab7b16 100644 --- a/src/database/mongo/hash.js +++ b/src/database/mongo/hash.js @@ -14,6 +14,9 @@ module.exports = function (module) { } const writeData = helpers.serializeData(data); + if (!Object.keys(writeData).length) { + return; + } try { if (isArray) { const bulk = module.client.collection('objects').initializeUnorderedBulkOp(); @@ -39,9 +42,18 @@ module.exports = function (module) { const writeData = data.map(helpers.serializeData); try { - const bulk = module.client.collection('objects').initializeUnorderedBulkOp(); - keys.forEach((key, i) => bulk.find({ _key: key }).upsert().updateOne({ $set: writeData[i] })); - await bulk.execute(); + let bulk; + keys.forEach((key, i) => { + if (Object.keys(writeData[i]).length) { + if (!bulk) { + bulk = module.client.collection('objects').initializeUnorderedBulkOp(); + } + bulk.find({ _key: key }).upsert().updateOne({ $set: writeData[i] }); + } + }); + if (bulk) { + await bulk.execute(); + } } catch (err) { if (err && err.message.startsWith('E11000 duplicate key error')) { return await module.setObjectBulk(keys, data); diff --git a/src/database/postgres/hash.js b/src/database/postgres/hash.js index 1a733a3518..04b38713c2 100644 --- a/src/database/postgres/hash.js +++ b/src/database/postgres/hash.js @@ -11,7 +11,9 @@ module.exports = function (module) { if (data.hasOwnProperty('')) { delete data['']; } - + if (!Object.keys(data).length) { + return; + } await module.transaction(async (client) => { const dataString = JSON.stringify(data); async function setOne(key) { diff --git a/src/database/redis/hash.js b/src/database/redis/hash.js index 103cbc4a81..966a36eddd 100644 --- a/src/database/redis/hash.js +++ b/src/database/redis/hash.js @@ -41,7 +41,11 @@ module.exports = function (module) { return; } const batch = module.client.batch(); - keys.forEach((k, i) => batch.hmset(k, data[i])); + keys.forEach((k, i) => { + if (Object.keys(data[i]).length) { + batch.hmset(k, data[i]); + } + }); await helpers.execBatch(batch); cache.del(keys); }; diff --git a/test/database/hash.js b/test/database/hash.js index 39c4e39624..650afae8bf 100644 --- a/test/database/hash.js +++ b/test/database/hash.js @@ -72,7 +72,7 @@ describe('Hash methods', () => { }); }); - it('should set multiple keys to different okjects', async () => { + it('should set multiple keys to different objects', async () => { const keys = ['bulkKey1', 'bulkKey2']; const data = [{ foo: '1' }, { baz: 'baz' }]; @@ -80,6 +80,39 @@ describe('Hash methods', () => { const result = await db.getObjects(keys); assert.deepStrictEqual(result, data); }); + + it('should not error if object is empty', async () => { + const keys = ['bulkKey3', 'bulkKey4']; + const data = [{ foo: '1' }, { }]; + + await db.setObjectBulk(keys, data); + const result = await db.getObjects(keys); + assert.deepStrictEqual(result, [{ foo: '1' }, null]); + }); + + it('should not error if object is empty', async () => { + const keys = ['bulkKey5']; + const data = [{ }]; + + await db.setObjectBulk(keys, data); + const result = await db.getObjects(keys); + assert.deepStrictEqual(result, [null]); + }); + + it('should not error if object is empty', async () => { + const keys = ['bulkKey6', 'bulkKey7']; + const data = {}; + + await db.setObject(keys, data); + const result = await db.getObjects(keys); + assert.deepStrictEqual(result, [null, null]); + }); + + it('should not error if object is empty', async () => { + await db.setObject('emptykey', {}); + const result = await db.getObject('emptykey'); + assert.deepStrictEqual(result, null); + }); }); describe('setObjectField()', () => {