refactor: setObjectBulk to match sortedSetAddBulk

isekai-main
Barış Soner Uşaklı 3 years ago
parent 1a85aaad23
commit 8379c11b22

@ -121,8 +121,7 @@ module.exports = function (Categories) {
);
await db.setObjectBulk(
childrenCids.map(cid => `category:${cid}`),
childrenCids.map((cid, index) => ({ order: index + 1 }))
childrenCids.map((cid, index) => [`category:${cid}`, { order: index + 1 }])
);
cache.del([

@ -35,20 +35,26 @@ module.exports = function (module) {
cache.del(key);
};
module.setObjectBulk = async function (keys, data) {
if (!keys.length || !data.length) {
module.setObjectBulk = async function (...args) {
let data = args[0];
if (!Array.isArray(data) || !data.length) {
return;
}
if (Array.isArray(args[1])) {
console.warn('[deprecated] db.setObjectBulk(keys, data) usage is deprecated, please use db.setObjectBulk(data)');
// conver old format to new format for backwards compatibility
data = args[0].map((key, i) => [key, args[1][i]]);
}
const writeData = data.map(helpers.serializeData);
try {
let bulk;
keys.forEach((key, i) => {
if (Object.keys(writeData[i]).length) {
data.forEach((item) => {
const writeData = helpers.serializeData(item[1]);
if (Object.keys(writeData).length) {
if (!bulk) {
bulk = module.client.collection('objects').initializeUnorderedBulkOp();
}
bulk.find({ _key: key }).upsert().updateOne({ $set: writeData[i] });
bulk.find({ _key: item[0] }).upsert().updateOne({ $set: writeData });
}
});
if (bulk) {
@ -56,12 +62,12 @@ module.exports = function (module) {
}
} catch (err) {
if (err && err.message.startsWith('E11000 duplicate key error')) {
return await module.setObjectBulk(keys, data);
return await module.setObjectBulk(data);
}
throw err;
}
cache.del(keys);
cache.del(data.map(item => item[0]));
};
module.setObjectField = async function (key, field, value) {

@ -44,29 +44,30 @@ module.exports = function (module) {
});
};
module.setObjectBulk = async function (keys, data) {
if (!keys.length || !data.length) {
module.setObjectBulk = async function (...args) {
let data = args[0];
if (!Array.isArray(data) || !data.length) {
return;
}
if (Array.isArray(args[1])) {
console.warn('[deprecated] db.setObjectBulk(keys, data) usage is deprecated, please use db.setObjectBulk(data)');
// conver old format to new format for backwards compatibility
data = args[0].map((key, i) => [key, args[1][i]]);
}
await module.transaction(async (client) => {
keys = keys.slice();
data = data.filter((d, i) => {
if (d.hasOwnProperty('')) {
delete d[''];
}
const keep = !!Object.keys(d).length;
if (!keep) {
keys.splice(i, 1);
data = data.filter((item) => {
if (item[1].hasOwnProperty('')) {
delete item[1][''];
}
return keep;
return !!Object.keys(item[1]).length;
});
const keys = data.map(item => item[0]);
if (!keys.length) {
return;
}
await helpers.ensureLegacyObjectsType(client, keys, 'hash');
const dataStrings = data.map(JSON.stringify);
const dataStrings = data.map(item => JSON.stringify(item[1]));
await client.query({
name: 'setObjectBulk',
text: `

@ -36,18 +36,25 @@ module.exports = function (module) {
cache.del(key);
};
module.setObjectBulk = async function (keys, data) {
if (!keys.length || !data.length) {
module.setObjectBulk = async function (...args) {
let data = args[0];
if (!Array.isArray(data) || !data.length) {
return;
}
if (Array.isArray(args[1])) {
console.warn('[deprecated] db.setObjectBulk(keys, data) usage is deprecated, please use db.setObjectBulk(data)');
// conver old format to new format for backwards compatibility
data = args[0].map((key, i) => [key, args[1][i]]);
}
const batch = module.client.batch();
keys.forEach((k, i) => {
if (Object.keys(data[i]).length) {
batch.hmset(k, data[i]);
data.forEach((item) => {
if (Object.keys(item[1]).length) {
batch.hmset(item[0], item[1]);
}
});
await helpers.execBatch(batch);
cache.del(keys);
cache.del(data.map(item => item[0]));
};
module.setObjectField = async function (key, field, value) {

@ -70,19 +70,18 @@ Settings.set = async function (hash, values, quiet) {
}));
const sortedSetData = [];
const objectData = { keys: [], data: [] };
const objectData = [];
sortedLists.forEach((list) => {
const arr = sortedListData[list];
arr.forEach((data, order) => {
sortedSetData.push([`settings:${hash}:sorted-list:${list}`, order, order]);
objectData.keys.push(`settings:${hash}:sorted-list:${list}:${order}`);
objectData.data.push(data);
objectData.push([`settings:${hash}:sorted-list:${list}:${order}`, data]);
});
});
await Promise.all([
db.sortedSetAddBulk(sortedSetData),
db.setObjectBulk(objectData.keys, objectData.data),
db.setObjectBulk(objectData),
]);
}

@ -343,8 +343,7 @@ module.exports = function (Posts) {
post.data.tid = newTid;
});
await db.setObjectBulk(
postData.map(p => `post:queue:${p.id}`),
postData.map(p => ({ data: JSON.stringify(p.data) }))
postData.map(p => [`post:queue:${p.id}`, { data: JSON.stringify(p.data) }]),
);
cache.del('post-queue');
}

@ -124,5 +124,5 @@ async function updateUserLastposttimes(uids, topicsData) {
async function shiftPostTimes(tid, timestamp) {
const pids = (await posts.getPidsFromSet(`tid:${tid}:posts`, 0, -1, false));
// Leaving other related score values intact, since they reflect post order correctly, and it seems that's good enough
return db.setObjectBulk(pids.map(pid => `post:${pid}`), pids.map((_, idx) => ({ timestamp: timestamp + idx + 1 })));
return db.setObjectBulk(pids.map((pid, idx) => [`post:${pid}`, { timestamp: timestamp + idx + 1 }]));
}

@ -155,8 +155,7 @@ module.exports = function (Topics) {
}
});
await db.setObjectBulk(
topicData.map(t => `topic:${t.tid}`),
topicData.map(t => ({ tags: t.tags.join(',') }))
topicData.map(t => [`topic:${t.tid}`, { tags: t.tags.join(',') }]),
);
}, {});
await Topics.deleteTag(tag);
@ -335,14 +334,11 @@ module.exports = function (Topics) {
topicTags.push(tag);
}
});
bulkSet.push({ tags: topicTags.join(',') });
bulkSet.push([`topic:${t.tid}`, { tags: topicTags.join(',') }]);
});
await Promise.all([
db.sortedSetAddBulk(bulkAdd),
db.setObjectBulk(
topicData.map(t => `topic:${t.tid}`),
bulkSet,
),
db.setObjectBulk(bulkSet),
]);
await Promise.all(tags.map(updateTagCount));
@ -363,14 +359,11 @@ module.exports = function (Topics) {
topicTags.splice(topicTags.indexOf(tag), 1);
}
});
bulkSet.push({ tags: topicTags.join(',') });
bulkSet.push([`topic:${t.tid}`, { tags: topicTags.join(',') }]);
});
await Promise.all([
db.sortedSetRemoveBulk(bulkRemove),
db.setObjectBulk(
topicData.map(t => `topic:${t.tid}`),
bulkSet,
),
db.setObjectBulk(bulkSet),
]);
await Promise.all(tags.map(updateTagCount));

@ -15,15 +15,13 @@ module.exports = {
const keys = tids.map(tid => `tid:${tid}:posters`);
await db.sortedSetsRemoveRangeByScore(keys, '-inf', 0);
const counts = await db.sortedSetsCard(keys);
const setKeys = [];
const data = [];
const bulkSet = [];
for (let i = 0; i < tids.length; i++) {
if (counts[i] > 0) {
setKeys.push(`topic:${tids[i]}`);
data.push({ postercount: counts[i] });
bulkSet.push([`topic:${tids[i]}`, { postercount: counts[i] }]);
}
}
await db.setObjectBulk(setKeys, data);
await db.setObjectBulk(bulkSet);
}, {
progress: progress,
batchSize: 500,

@ -16,8 +16,7 @@ module.exports = {
const tidToCount = _.zipObject(tids, counts);
const tidsWithThumbs = tids.filter((t, i) => counts[i] > 0);
await db.setObjectBulk(
tidsWithThumbs.map(tid => `topic:${tid}`),
tidsWithThumbs.map(tid => ({ numThumbs: tidToCount[tid] }))
tidsWithThumbs.map(tid => [`topic:${tid}`, { numThumbs: tidToCount[tid] }]),
);
progress.incr(tids.length);

@ -25,8 +25,7 @@ module.exports = {
}).filter(t => t && t.tags.length);
await db.setObjectBulk(
topicsWithTags.map(t => `topic:${t.tid}`),
topicsWithTags.map(t => ({ tags: t.tags.join(',') }))
topicsWithTags.map(t => [`topic:${t.tid}`, { tags: t.tags.join(',') }]),
);
await db.deleteAll(tids.map(tid => `topic:${tid}:tags`));
progress.incr(tids.length);

@ -177,13 +177,9 @@ widgets.setAreas = async function (areas) {
templates[area.template][area.location] = JSON.stringify(area.widgets);
});
const keys = [];
const data = [];
Object.keys(templates).forEach((tpl) => {
keys.push(`widgets:${tpl}`);
data.push(templates[tpl]);
});
await db.setObjectBulk(keys, data);
await db.setObjectBulk(
Object.keys(templates).map(tpl => [`widgets:${tpl}`, templates[tpl]])
);
};
widgets.reset = async function () {

@ -73,36 +73,33 @@ describe('Hash methods', () => {
});
it('should set multiple keys to different objects', async () => {
const keys = ['bulkKey1', 'bulkKey2'];
const data = [{ foo: '1' }, { baz: 'baz' }];
await db.setObjectBulk(keys, data);
const result = await db.getObjects(keys);
assert.deepStrictEqual(result, data);
await db.setObjectBulk([
['bulkKey1', { foo: '1' }],
['bulkKey2', { baz: 'baz' }],
]);
const result = await db.getObjects(['bulkKey1', 'bulkKey2']);
assert.deepStrictEqual(result, [{ foo: '1' }, { baz: 'baz' }]);
});
it('should not error if object is empty', async () => {
const keys = ['bulkKey3', 'bulkKey4'];
const data = [{ foo: '1' }, { }];
await db.setObjectBulk(keys, data);
const result = await db.getObjects(keys);
await db.setObjectBulk([
['bulkKey3', { foo: '1' }],
['bulkKey4', { }],
]);
const result = await db.getObjects(['bulkKey3', 'bulkKey4']);
assert.deepStrictEqual(result, [{ foo: '1' }, null]);
});
it('should update existing object on second call', async () => {
await db.setObjectBulk(['bulkKey3.5'], [{ foo: '1' }]);
await db.setObjectBulk(['bulkKey3.5'], [{ baz: '2' }]);
await db.setObjectBulk([['bulkKey3.5', { foo: '1' }]]);
await db.setObjectBulk([['bulkKey3.5', { baz: '2' }]]);
const result = await db.getObject('bulkKey3.5');
assert.deepStrictEqual(result, { foo: '1', baz: '2' });
});
it('should not error if object is empty', async () => {
const keys = ['bulkKey5'];
const data = [{ }];
await db.setObjectBulk(keys, data);
const result = await db.getObjects(keys);
await db.setObjectBulk([['bulkKey5', {}]]);
const result = await db.getObjects(['bulkKey5']);
assert.deepStrictEqual(result, [null]);
});

Loading…
Cancel
Save