Skip to content

Commit

Permalink
fixed issue after #865 we can't use `create_remote --diff-from-remote…
Browse files Browse the repository at this point in the history
…` for `remote_storage: custom`, affected versions 2.5.0, 2.5.1, fix #900
  • Loading branch information
Slach committed Apr 23, 2024
1 parent 22791b9 commit 686aba6
Show file tree
Hide file tree
Showing 4 changed files with 39 additions and 6 deletions.
4 changes: 4 additions & 0 deletions ChangeLog.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,7 @@
# v2.5.2
BUG FIXES
- fixed issue after [865](https://github.com/Altinity/clickhouse-backup/pull/865) we can't use `create_remote --diff-from-remote` for `remote_storage: custom`, affected versions 2.5.0, 2.5.1, fix [900](https://github.com/Altinity/clickhouse-backup/pull/900)

# v2.5.1
BUG FIXES
- fixed issue when set both `AWS_ROLE_ARN` and `S3_ASSUME_ROLE_ARN` then `S3_ASSUME_ROLE_ARN` shall have more priority than `AWS_ROLE_ARN` fix [898](https://github.com/Altinity/clickhouse-backup/issues/898)
Expand Down
4 changes: 2 additions & 2 deletions pkg/backup/create.go
Original file line number Diff line number Diff line change
Expand Up @@ -216,7 +216,7 @@ func (b *Backuper) createBackupLocal(ctx context.Context, backupName, diffFromRe
}
}
}
if isObjectDiskContainsTables || diffFromRemote != "" {
if isObjectDiskContainsTables || (diffFromRemote != "" && b.cfg.General.RemoteStorage != "custom") {
var err error
if err = config.ValidateObjectDiskConfig(b.cfg); err != nil {
return err
Expand All @@ -235,7 +235,7 @@ func (b *Backuper) createBackupLocal(ctx context.Context, backupName, diffFromRe
}()
}
var tablesDiffFromRemote map[metadata.TableTitle]metadata.TableMetadata
if diffFromRemote != "" {
if diffFromRemote != "" && b.cfg.General.RemoteStorage != "custom" {
var diffFromRemoteErr error
tablesDiffFromRemote, diffFromRemoteErr = b.getTablesDiffFromRemote(ctx, diffFromRemote, tablePattern)
if diffFromRemoteErr != nil {
Expand Down
25 changes: 24 additions & 1 deletion pkg/backup/watch.go
Original file line number Diff line number Diff line change
Expand Up @@ -135,7 +135,30 @@ func (b *Backuper) Watch(watchInterval, fullInterval, watchBackupNameTemplate, t
} else {
createRemoteErr = b.CreateToRemote(backupName, false, "", diffFromRemote, tablePattern, partitions, schemaOnly, backupRBAC, false, backupConfigs, false, skipCheckPartsColumns, false, version, commandId)
if createRemoteErr != nil {
log.Errorf("create_remote %s return error: %v", backupName, createRemoteErr)
cmd := "create_remote"
if diffFromRemote != "" {
cmd += " --diff-from-remote=" + diffFromRemote
}
if tablePattern != "" {
cmd += " --tables=" + tablePattern
}
if len(partitions) > 0 {
cmd += " --partition=" + strings.Join(partitions, ",")
}
if schemaOnly {
cmd += " --schema"
}
if backupRBAC {
cmd += " --rbac"
}
if backupConfigs {
cmd += " --configs"
}
if skipCheckPartsColumns {
cmd += " --skip-check-parts-columns"
}
cmd += " " + backupName
log.Errorf("%s return error: %v", cmd, createRemoteErr)
createRemoteErrCount += 1
} else {
createRemoteErrCount = 0
Expand Down
12 changes: 9 additions & 3 deletions test/integration/integration_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -2197,6 +2197,7 @@ func runMainIntegrationScenario(t *testing.T, remoteStorageType, backupConfig st
// main test scenario
testBackupName := fmt.Sprintf("%s_full_%d", t.Name(), rand.Int())
incrementBackupName := fmt.Sprintf("%s_increment_%d", t.Name(), rand.Int())
incrementBackupName2 := fmt.Sprintf("%s_increment2_%d", t.Name(), rand.Int())
databaseList := []string{dbNameOrdinary, dbNameAtomic, dbNameMySQL, dbNamePostgreSQL, Issue331Atomic, Issue331Ordinary}
tablesPattern := fmt.Sprintf("*_%s.*", t.Name())
log.Info("Clean before start")
Expand All @@ -2206,18 +2207,23 @@ func runMainIntegrationScenario(t *testing.T, remoteStorageType, backupConfig st
testData := generateTestData(t, r, ch, remoteStorageType, defaultTestData)

r.NoError(dockerExec("minio", "mc", "ls", "local/clickhouse/disk_s3"))

log.Info("Create backup")
r.NoError(dockerExec("clickhouse-backup", "clickhouse-backup", "-c", "/etc/clickhouse-backup/"+backupConfig, "create", "--tables", tablesPattern, testBackupName))
generateIncrementTestData(t, ch, r, defaultIncrementData)

generateIncrementTestData(t, ch, r, defaultIncrementData)
r.NoError(dockerExec("clickhouse-backup", "clickhouse-backup", "-c", "/etc/clickhouse-backup/"+backupConfig, "create", "--tables", tablesPattern, incrementBackupName))

log.Info("create --diff-from-remote backup")
r.NoError(dockerExec("clickhouse-backup", "clickhouse-backup", "-c", "/etc/clickhouse-backup/"+backupConfig, "create", "--diff-from-remote", testBackupName, "--tables", tablesPattern, incrementBackupName2))
r.NoError(dockerExec("clickhouse-backup", "clickhouse-backup", "-c", "/etc/clickhouse-backup/"+backupConfig, "delete", "local", incrementBackupName2))

log.Info("Upload")
uploadCmd := fmt.Sprintf("%s_COMPRESSION_FORMAT=zstd CLICKHOUSE_BACKUP_CONFIG=/etc/clickhouse-backup/%s clickhouse-backup upload --resume %s", remoteStorageType, backupConfig, testBackupName)
checkResumeAlreadyProcessed(uploadCmd, testBackupName, "upload", r, remoteStorageType)

diffFrom := "--diff-from-remote"
uploadCmd = fmt.Sprintf("clickhouse-backup -c /etc/clickhouse-backup/%s upload %s %s %s --resume", backupConfig, incrementBackupName, diffFrom, testBackupName)
log.Info("Upload increment")
uploadCmd = fmt.Sprintf("clickhouse-backup -c /etc/clickhouse-backup/%s upload %s --diff-from-remote %s --resume", backupConfig, incrementBackupName, testBackupName)
checkResumeAlreadyProcessed(uploadCmd, incrementBackupName, "upload", r, remoteStorageType)

backupDir := "/var/lib/clickhouse/backup"
Expand Down

0 comments on commit 686aba6

Please sign in to comment.