feat: migration skip exists docs with bulk operation

This commit is contained in:
hardy 2024-02-05 10:09:56 +08:00
parent a338af5c9a
commit 855c4849cb
No known key found for this signature in database
GPG Key ID: D1ED7F7A9ED520C3
2 changed files with 12 additions and 10 deletions

View File

@ -200,15 +200,16 @@ func (p *processor) splitMajorMigrationTask(taskItem *task.Task) error {
IdleTimeoutInSeconds: clusterMigrationTask.Settings.Bulk.IdleTimeoutInSeconds,
SliceSize: clusterMigrationTask.Settings.Bulk.SliceSize,
Compress: clusterMigrationTask.Settings.Bulk.Compress,
Operation: clusterMigrationTask.Settings.Bulk.Operation,
},
}
if index.Partition != nil {
partitionQ := &elastic.PartitionQuery{
IndexName: index.Source.Name,
FieldName: index.Partition.FieldName,
FieldType: index.Partition.FieldType,
Step: index.Partition.Step,
IndexName: index.Source.Name,
FieldName: index.Partition.FieldName,
FieldType: index.Partition.FieldType,
Step: index.Partition.Step,
UseEvenStrategy: index.Partition.UseEvenStrategy,
}
if source.QueryDSL != nil {

View File

@ -23,12 +23,13 @@ type IndexDiffConfig struct {
// tunable `bulk_indexing` configurations
type BulkIndexingConfig struct {
Docs int `json:"docs"`
StoreSizeInMB int `json:"store_size_in_mb"`
MaxWorkerSize int `json:"max_worker_size"`
IdleTimeoutInSeconds int `json:"idle_timeout_in_seconds"`
SliceSize int `json:"slice_size"`
Compress bool `json:"compress"`
Docs int `json:"docs"`
StoreSizeInMB int `json:"store_size_in_mb"`
MaxWorkerSize int `json:"max_worker_size"`
IdleTimeoutInSeconds int `json:"idle_timeout_in_seconds"`
SliceSize int `json:"slice_size"`
Compress bool `json:"compress"`
Operation string `json:"operation"`
}
type PipelineTaskLoggingConfig struct {