batched rel references and added test for the batch delete processing

This commit is contained in:
Gani Georgiev
2022-12-12 19:21:41 +02:00
parent 0eeae9de80
commit be3dd42eac
6 changed files with 111 additions and 159 deletions
+5 -7
View File
@@ -111,7 +111,7 @@ func (dao *Dao) RunInTransaction(fn func(txDao *Dao) error) error {
case *dbx.Tx:
// nested transactions are not supported by default
// so execute the function within the current transaction
// ---
// create a new dao with the same hooks to avoid semaphore deadlock when nesting
txDao := New(txOrDB)
txDao.BeforeCreateFunc = dao.BeforeCreateFunc
@@ -320,12 +320,10 @@ Retry:
if attempts == 2 {
// assign new Dao without the before hooks to avoid triggering
// the already fired before callbacks multiple times
retryDao = &Dao{
db: dao.db,
AfterCreateFunc: dao.AfterCreateFunc,
AfterUpdateFunc: dao.AfterUpdateFunc,
AfterDeleteFunc: dao.AfterDeleteFunc,
}
retryDao = New(dao.db)
retryDao.AfterCreateFunc = dao.AfterCreateFunc
retryDao.AfterUpdateFunc = dao.AfterUpdateFunc
retryDao.AfterDeleteFunc = dao.AfterDeleteFunc
}
// execute
+8 -8
View File
@@ -433,21 +433,21 @@ func (dao *Dao) cascadeRecordDelete(mainRecord *models.Record, refs map[*models.
break
}
perPage := 200
pages := int(math.Ceil(float64(total) / float64(perPage)))
perWorkers := 50
workers := int(math.Ceil(float64(total) / float64(perWorkers)))
batchErr := func() error {
ch := make(chan error)
defer close(ch)
for i := 0; i < pages; i++ {
for i := 0; i < workers; i++ {
var chunks []dbx.NullStringMap
if len(rows) <= perPage {
chunks = rows[0:]
if len(rows) <= perWorkers {
chunks = rows
rows = nil
} else {
chunks = rows[0:perPage]
rows = rows[perPage:]
chunks = rows[:perWorkers]
rows = rows[perWorkers:]
}
go func() {
@@ -456,7 +456,7 @@ func (dao *Dao) cascadeRecordDelete(mainRecord *models.Record, refs map[*models.
}()
}
for i := 0; i < pages; i++ {
for i := 0; i < workers; i++ {
if err := <-ch; err != nil {
return err
}