Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
39 changes: 29 additions & 10 deletions fe/fe-core/src/main/java/org/apache/doris/backup/BackupJobInfo.java
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@
import org.apache.doris.common.FeConstants;
import org.apache.doris.common.io.Text;
import org.apache.doris.common.io.Writable;
import org.apache.doris.persist.gson.GsonUtils;

import com.google.common.base.Joiner;
import com.google.common.collect.Lists;
Expand All @@ -59,8 +60,6 @@
import java.util.Set;

import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.google.gson.annotations.Expose;
import com.google.gson.annotations.SerializedName;
import org.glassfish.jersey.internal.guava.Sets;

Expand All @@ -72,6 +71,7 @@
public class BackupJobInfo implements Writable {
private static final Logger LOG = LogManager.getLogger(BackupJobInfo.class);

@SerializedName("name")
public String name;
@SerializedName("database")
public String dbName;
Expand All @@ -87,7 +87,7 @@ public class BackupJobInfo implements Writable {
// include other objects: view, external table
@SerializedName("new_backup_objects")
public NewBackupObjects newBackupObjects = new NewBackupObjects();
public boolean success = true;
public boolean success;
@SerializedName("backup_result")
public String successJson = "succeed";

Expand All @@ -96,7 +96,6 @@ public class BackupJobInfo implements Writable {

// This map is used to save the table alias mapping info when processing a restore job.
// origin -> alias
@Expose(serialize = false, deserialize = false)
public Map<String, String> tblAlias = Maps.newHashMap();

public void initBackupJobInfoAfterDeserialize() {
Expand Down Expand Up @@ -274,10 +273,13 @@ public String getOrginNameByAlias(String alias) {
}

public static class BriefBackupJobInfo {
@SerializedName("name")
public String name;
@SerializedName("database")
public String database;
@SerializedName("backup_time")
public long backupTime;
@SerializedName("content")
public BackupContent content;
@SerializedName("olap_table_list")
public List<BriefBackupOlapTable> olapTableList = Lists.newArrayList();
Expand Down Expand Up @@ -309,12 +311,14 @@ public static BriefBackupJobInfo fromBackupJobInfo(BackupJobInfo backupJobInfo)
}

public static class BriefBackupOlapTable {
@SerializedName("name")
public String name;
@SerializedName("partition_names")
public List<String> partitionNames;
}

public static class NewBackupObjects {
@SerializedName("views")
public List<BackupViewInfo> views = Lists.newArrayList();
@SerializedName("odbc_tables")
public List<BackupOdbcTableInfo> odbcTables = Lists.newArrayList();
Expand All @@ -323,7 +327,9 @@ public static class NewBackupObjects {
}

public static class BackupOlapTableInfo {
@SerializedName("id")
public long id;
@SerializedName("partitions")
public Map<String, BackupPartitionInfo> partitions = Maps.newHashMap();

public boolean containsPart(String partName) {
Expand All @@ -349,10 +355,13 @@ public void retainPartitions(Collection<String> partNames) {
}

public static class BackupPartitionInfo {
@SerializedName("id")
public long id;
@SerializedName("version")
public long version;
@SerializedName("version_hash")
public long versionHash;
@SerializedName("indexes")
public Map<String, BackupIndexInfo> indexes = Maps.newHashMap();

public BackupIndexInfo getIdx(String idxName) {
Expand All @@ -361,13 +370,14 @@ public BackupIndexInfo getIdx(String idxName) {
}

public static class BackupIndexInfo {
@SerializedName("id")
public long id;
@SerializedName("schema_hash")
public int schemaHash;
@SerializedName("tablets")
public Map<Long, List<String>> tablets = Maps.newHashMap();
@SerializedName("tablets_order")
public List<Long> tabletsOrder = Lists.newArrayList();
@Expose(serialize = false, deserialize = false)
public List<BackupTabletInfo> sortedTabletInfoList = Lists.newArrayList();

public List<String> getTabletFiles(long tabletId) {
Expand All @@ -388,7 +398,9 @@ private List<Long> getSortedTabletIds() {
}

public static class BackupTabletInfo {
@SerializedName("id")
public long id;
@SerializedName("files")
public List<String> files;

public BackupTabletInfo(long id, List<String> files) {
Expand All @@ -398,11 +410,14 @@ public BackupTabletInfo(long id, List<String> files) {
}

public static class BackupViewInfo {
@SerializedName("id")
public long id;
@SerializedName("name")
public String name;
}

public static class BackupOdbcTableInfo {
@SerializedName("id")
public long id;
@SerializedName("doris_table_name")
public String dorisTableName;
Expand All @@ -412,15 +427,20 @@ public static class BackupOdbcTableInfo {
public String linkedOdbcTableName;
@SerializedName("resource_name")
public String resourceName;
@SerializedName("host")
public String host;
@SerializedName("port")
public String port;
@SerializedName("user")
public String user;
@SerializedName("driver")
public String driver;
@SerializedName("odbc_type")
public String odbcType;
}

public static class BackupOdbcResourceInfo {
@SerializedName("name")
public String name;
}

Expand Down Expand Up @@ -624,8 +644,7 @@ private static BackupJobInfo genFromJson(String json) {
* }
* }
*/
Gson gson = new Gson();
BackupJobInfo jobInfo = gson.fromJson(json, BackupJobInfo.class);
BackupJobInfo jobInfo = GsonUtils.GSON.fromJson(json, BackupJobInfo.class);
jobInfo.initBackupJobInfoAfterDeserialize();
return jobInfo;
}
Expand All @@ -644,16 +663,16 @@ public void writeToFile(File jobInfoFile) throws FileNotFoundException {
// Only return basic info, table and partitions
public String getBrief() {
BriefBackupJobInfo briefBackupJobInfo = BriefBackupJobInfo.fromBackupJobInfo(this);
Gson gson = new GsonBuilder().setPrettyPrinting().create();
Gson gson = GsonUtils.GSON_PRETTY_PRINTING;
return gson.toJson(briefBackupJobInfo);
}

public String toJson(boolean prettyPrinting) {
Gson gson;
if (prettyPrinting) {
gson = new GsonBuilder().setPrettyPrinting().create();
gson = GsonUtils.GSON_PRETTY_PRINTING;
} else {
gson = new Gson();
gson = GsonUtils.GSON;
}
return gson.toJson(this);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -132,9 +132,13 @@ public class GsonUtils {
.registerTypeAdapter(ImmutableMap.class, new ImmutableMapDeserializer())
.registerTypeAdapter(AtomicBoolean.class, new AtomicBooleanAdapter());

private static final GsonBuilder GSON_BUILDER_PRETTY_PRINTING = GSON_BUILDER.setPrettyPrinting();

// this instance is thread-safe.
public static final Gson GSON = GSON_BUILDER.create();

public static final Gson GSON_PRETTY_PRINTING = GSON_BUILDER_PRETTY_PRINTING.create();

/*
* The exclusion strategy of GSON serialization.
* Any fields without "@SerializedName" annotation with be ignore with
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,11 +17,6 @@

package org.apache.doris.backup;

import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;

import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.File;
Expand All @@ -31,6 +26,11 @@
import java.io.IOException;
import java.io.PrintWriter;

import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;

public class BackupJobInfoTest {

private static String fileName = "job_info.txt";
Expand Down Expand Up @@ -169,6 +169,7 @@ public void testReadWrite() {
Assert.assertEquals("view1", jobInfo.newBackupObjects.views.get(0).name);

File tmpFile = new File("./tmp");
File tmpFile1 = new File("./tmp1");
try {
DataOutputStream out = new DataOutputStream(new FileOutputStream(tmpFile));
jobInfo.write(out);
Expand All @@ -186,11 +187,28 @@ public void testReadWrite() {
Assert.assertEquals(jobInfo.newBackupObjects.views.size(), newInfo.newBackupObjects.views.size());
Assert.assertEquals("view1", newInfo.newBackupObjects.views.get(0).name);

out = new DataOutputStream(new FileOutputStream(tmpFile1));
newInfo.write(out);
out.flush();
out.close();

in = new DataInputStream(new FileInputStream(tmpFile1));
BackupJobInfo newInfo1 = BackupJobInfo.read(in);
in.close();

Assert.assertEquals(
newInfo.backupOlapTableObjects.get("table2").getPartInfo("partition1")
.indexes.get("table2").sortedTabletInfoList.size(),
newInfo1.backupOlapTableObjects.get("table2").getPartInfo("partition1")
.indexes.get("table2").sortedTabletInfoList.size());

} catch (IOException e) {
e.printStackTrace();
Assert.fail();
} finally {
tmpFile.delete();
tmpFile1.delete();
}

}
}