Skip to content
This repository was archived by the owner on May 25, 2025. It is now read-only.

Commit c1226a2

Browse files
feat: CommonDao.deleteByQuery stream option
1 parent b155085 commit c1226a2

File tree

5 files changed

+234
-216
lines changed

5 files changed

+234
-216
lines changed

.github/workflows/default.yml

Lines changed: 0 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -25,8 +25,3 @@ jobs:
2525
./cc-test-reporter before-build
2626
yarn test-ci
2727
./cc-test-reporter after-build -t lcov
28-
29-
- uses: actions/upload-artifact@v2
30-
with: { name: 'unit.xml', path: 'tmp/jest/unit.xml' }
31-
- uses: actions/upload-artifact@v2
32-
with: { name: 'coverage-unit', path: 'coverage/lcov-report' }

src/commondao/common.dao.test.ts

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -68,6 +68,8 @@ test('common', async () => {
6868

6969
expect(await dao.deleteById(undefined)).toBe(0)
7070
expect(await dao.deleteById('123')).toBe(0)
71+
expect(await dao.deleteByQuery(dao.query())).toBe(0)
72+
expect(await dao.deleteByQuery(dao.query(), { stream: true })).toBe(0)
7173

7274
expect(dao.anyToDBM(undefined)).toBeUndefined()
7375
expect(dao.anyToDBM({}, { skipValidation: true })).toMatchObject({})

src/commondao/common.dao.ts

Lines changed: 43 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -30,6 +30,7 @@ import {
3030
transformTap,
3131
writableVoid,
3232
_pipeline,
33+
transformBuffer,
3334
} from '@naturalcycles/nodejs-lib'
3435
import { DBLibError } from '../cnst'
3536
import { DBModelType, RunQueryResult } from '../db.model'
@@ -719,14 +720,53 @@ export class CommonDao<
719720
return deletedIds
720721
}
721722

722-
async deleteByQuery(q: DBQuery<DBM>, opt: CommonDaoOptions = {}): Promise<number> {
723+
/**
724+
* Pass `stream: true` option to use Streaming: it will Stream the query, batch by 500, and execute
725+
* `deleteByIds` for each batch concurrently (infinite concurrency).
726+
* This is expected to be more memory-efficient way of deleting big numbers of rows.
727+
*/
728+
async deleteByQuery(
729+
q: DBQuery<DBM>,
730+
opt: CommonDaoStreamForEachOptions<DBM> & { stream?: boolean } = {},
731+
): Promise<number> {
723732
this.requireWriteAccess()
724733
q.table = opt.table || q.table
725734
const op = `deleteByQuery(${q.pretty()})`
726735
const started = this.logStarted(op, q.table)
727-
const ids = await this.cfg.db.deleteByQuery(q, opt)
736+
let deleted = 0
737+
738+
if (opt.stream) {
739+
const batchSize = 500
740+
741+
await _pipeline([
742+
this.cfg.db.streamQuery<DBM>(q.select(['id']), opt),
743+
transformMapSimple<ObjectWithId, string>(objectWithId => objectWithId.id, {
744+
errorMode: ErrorMode.SUPPRESS,
745+
}),
746+
transformBuffer<string>({ batchSize }),
747+
transformMap<string[], void>(
748+
async ids => {
749+
deleted += await this.cfg.db.deleteByIds(q.table, ids, opt)
750+
},
751+
{
752+
predicate: _passthroughPredicate,
753+
},
754+
),
755+
// LogProgress should be AFTER the mapper, to be able to report correct stats
756+
transformLogProgress({
757+
metric: q.table,
758+
logEvery: 2, // 500 * 2 === 1000
759+
batchSize,
760+
...opt,
761+
}),
762+
writableVoid(),
763+
])
764+
} else {
765+
deleted = await this.cfg.db.deleteByQuery(q, opt)
766+
}
767+
728768
this.logSaveResult(started, op, q.table)
729-
return ids
769+
return deleted
730770
}
731771

732772
// CONVERSIONS

src/query/dbQuery.ts

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -272,7 +272,9 @@ export class RunnableDBQuery<
272272
await this.dao.streamQueryIdsForEach(this, mapper, opt)
273273
}
274274

275-
async deleteByQuery(opt?: CommonDaoOptions): Promise<number> {
275+
async deleteByQuery(
276+
opt?: CommonDaoStreamForEachOptions<DBM> & { stream?: boolean },
277+
): Promise<number> {
276278
return await this.dao.deleteByQuery(this, opt)
277279
}
278280
}

0 commit comments

Comments
 (0)