diff --git a/base/dsfs/compute_fields.go b/base/dsfs/compute_fields.go index aa3e205c3..46945ef42 100644 --- a/base/dsfs/compute_fields.go +++ b/base/dsfs/compute_fields.go @@ -168,8 +168,12 @@ func (cff *computeFieldsFile) handleRows(ctx context.Context) { } cff.Lock() - // assign timestamp early. saving process on large files can take many minutes - cff.ds.Commit.Timestamp = Timestamp() + if cff.ds.Commit.Timestamp.IsZero() { + // assign timestamp early. saving process on large files can take many minutes + cff.ds.Commit.Timestamp = Timestamp() + } else { + cff.ds.Commit.Timestamp = cff.ds.Commit.Timestamp.In(time.UTC) + } cff.acc = dsstats.NewAccumulator(st) cff.Unlock() diff --git a/base/dsfs/dataset.go b/base/dsfs/dataset.go index d6f07dca0..4b388932f 100644 --- a/base/dsfs/dataset.go +++ b/base/dsfs/dataset.go @@ -111,6 +111,8 @@ func DerefDataset(ctx context.Context, store qfs.Filesystem, ds *dataset.Dataset // SaveSwitches represents options for saving a dataset type SaveSwitches struct { + // Use a custom timestamp, defaults to time.Now if unset + Time time.Time // Replace is whether the save is a full replacement or a set of patches to previous Replace bool // Pin is whether the dataset should be pinned diff --git a/base/dsfs/dataset_test.go b/base/dsfs/dataset_test.go index 4226f2749..19c073a2f 100644 --- a/base/dsfs/dataset_test.go +++ b/base/dsfs/dataset_test.go @@ -112,7 +112,6 @@ func TestLoadDataset(t *testing.T) { continue } } - } func TestCreateDataset(t *testing.T) { @@ -297,6 +296,34 @@ func TestCreateDataset(t *testing.T) { // case: previous dataset isn't valid } +func TestDatasetSaveCustomTimestamp(t *testing.T) { + ctx := context.Background() + fs := qfs.NewMemFS() + privKey := testPeers.GetTestPeerInfo(10).PrivKey + + // use a custom timestamp in local zone. should be converted to UTC for saving + ts := time.Date(2100, 1, 2, 3, 4, 5, 6, time.Local) + + ds := &dataset.Dataset{ + Commit: &dataset.Commit{ + Timestamp: ts, + }, + Structure: &dataset.Structure{Format: "json", Schema: dataset.BaseSchemaArray}, + } + ds.SetBodyFile(qfs.NewMemfileBytes("/body.json", []byte(`[]`))) + + path, err := CreateDataset(ctx, fs, fs, ds, nil, privKey, SaveSwitches{}) + if err != nil { + t.Fatal(err) + } + + got, err := LoadDataset(ctx, fs, path) + + if !ts.In(time.UTC).Equal(got.Commit.Timestamp) { + t.Errorf("result timestamp mismatch.\nwant: %q\ngot: %q", ts.In(time.UTC), got.Commit.Timestamp) + } +} + // BaseTabularSchema is the base schema for tabular data // NOTE: Do not use if possible, prefer github.com/qri-io/dataset/tabular // TODO(dustmop): Possibly move this to tabular package