Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

ddl: refine job_type of admin show ddl jobs (#47699) #48002

Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions pkg/ddl/ddl_worker.go
Original file line number Diff line number Diff line change
Expand Up @@ -117,6 +117,7 @@ type JobContext struct {
tp string

resourceGroupName string
cloudStorageURI string
}

// NewJobContext returns a new ddl job context.
Expand Down
10 changes: 9 additions & 1 deletion pkg/ddl/index.go
Original file line number Diff line number Diff line change
Expand Up @@ -644,6 +644,7 @@ SwitchIndexState:
}
return ver, err
}
loadCloudStorageURI(w, job)
if reorgTp.NeedMergeProcess() {
// Increase telemetryAddIndexIngestUsage
telemetryAddIndexIngestUsage.Inc()
Expand Down Expand Up @@ -791,6 +792,12 @@ func pickBackfillType(ctx context.Context, job *model.Job, unique bool, d *ddlCt
return model.ReorgTypeTxnMerge, nil
}

func loadCloudStorageURI(w *worker, job *model.Job) {
jc := w.jobContext(job.ID, job.ReorgMeta)
jc.cloudStorageURI = variable.CloudStorageURI.Load()
job.ReorgMeta.UseCloudStorage = len(jc.cloudStorageURI) > 0
}

// cleanupSortPath is used to clean up the temp data of the previous jobs.
// Because we don't remove all the files after the support of checkpoint,
// there maybe some stale files in the sort path if TiDB is killed during the backfill process.
Expand Down Expand Up @@ -2111,11 +2118,12 @@ func (w *worker) executeDistGlobalTask(reorgInfo *reorgInfo) error {
elemIDs = append(elemIDs, elem.ID)
}

job := reorgInfo.Job
taskMeta := &BackfillGlobalMeta{
Job: *reorgInfo.Job.Clone(),
EleIDs: elemIDs,
EleTypeKey: reorgInfo.currElement.TypeKey,
CloudStorageURI: variable.CloudStorageURI.Load(),
CloudStorageURI: w.jobContext(job.ID, job.ReorgMeta).cloudStorageURI,
}

metaData, err := json.Marshal(taskMeta)
Expand Down
1 change: 1 addition & 0 deletions pkg/ddl/multi_schema_change.go
Original file line number Diff line number Diff line change
Expand Up @@ -198,6 +198,7 @@ func appendToSubJobs(m *model.MultiSchemaInfo, job *model.Job) error {
Revertible: true,
CtxVars: job.CtxVars,
ReorgTp: reorgTp,
UseCloud: false,
})
return nil
}
Expand Down
18 changes: 16 additions & 2 deletions pkg/executor/executor.go
Original file line number Diff line number Diff line change
Expand Up @@ -520,21 +520,35 @@ func (e *DDLJobRetriever) appendJobToChunk(req *chunk.Chunk, job *model.Job, che
func showAddIdxReorgTp(job *model.Job) string {
if job.Type == model.ActionAddIndex || job.Type == model.ActionAddPrimaryKey {
if job.ReorgMeta != nil {
sb := strings.Builder{}
tp := job.ReorgMeta.ReorgTp.String()
if len(tp) > 0 {
return " /* " + tp + " */"
sb.WriteString(" /* ")
sb.WriteString(tp)
if job.ReorgMeta.UseCloudStorage {
sb.WriteString(" cloud")
}
sb.WriteString(" */")
}
return sb.String()
}
}
return ""
}

func showAddIdxReorgTpInSubJob(subJob *model.SubJob) string {
if subJob.Type == model.ActionAddIndex || subJob.Type == model.ActionAddPrimaryKey {
sb := strings.Builder{}
tp := subJob.ReorgTp.String()
if len(tp) > 0 {
return " /* " + tp + " */"
sb.WriteString(" /* ")
sb.WriteString(tp)
if subJob.UseCloud {
sb.WriteString(" cloud")
}
sb.WriteString(" */")
}
return sb.String()
}
return ""
}
Expand Down
2 changes: 2 additions & 0 deletions pkg/parser/model/ddl.go
Original file line number Diff line number Diff line change
Expand Up @@ -301,6 +301,7 @@ type SubJob struct {
CtxVars []interface{} `json:"-"`
SchemaVer int64 `json:"schema_version"`
ReorgTp ReorgType `json:"reorg_tp"`
UseCloud bool `json:"use_cloud"`
}

// IsNormal returns true if the sub-job is normally running.
Expand Down Expand Up @@ -369,6 +370,7 @@ func (sub *SubJob) FromProxyJob(proxyJob *Job, ver int64) {
sub.RowCount = proxyJob.RowCount
sub.SchemaVer = ver
sub.ReorgTp = proxyJob.ReorgMeta.ReorgTp
sub.UseCloud = proxyJob.ReorgMeta.UseCloudStorage
}

// JobMeta is meta info of Job.
Expand Down
1 change: 1 addition & 0 deletions pkg/parser/model/reorg.go
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ type DDLReorgMeta struct {
Location *TimeZoneLocation `json:"location"`
ReorgTp ReorgType `json:"reorg_tp"`
IsDistReorg bool `json:"is_dist_reorg"`
UseCloudStorage bool `json:"use_cloud_storage"`
ResourceGroupName string `json:"resource_group_name"`
Version int64 `json:"version"`
}
Expand Down