Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(bigquery): switch all timestamp representations to int64 usec #9368

Merged
merged 4 commits into from
Feb 5, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions bigquery/iterator.go
Original file line number Diff line number Diff line change
Expand Up @@ -280,6 +280,7 @@ func fetchTableResultPage(ctx context.Context, src *rowSource, schema Schema, st
}()
}
call := src.t.c.bqs.Tabledata.List(src.t.ProjectID, src.t.DatasetID, src.t.TableID)
call = call.FormatOptionsUseInt64Timestamp(true)
setClientHeader(call.Header())
if pageToken != "" {
call.PageToken(pageToken)
Expand Down Expand Up @@ -317,6 +318,7 @@ func fetchJobResultPage(ctx context.Context, src *rowSource, schema Schema, star
// reduce data transfered by leveraging api projections
projectedFields := []googleapi.Field{"rows", "pageToken", "totalRows"}
call := src.j.c.bqs.Jobs.GetQueryResults(src.j.projectID, src.j.jobID).Location(src.j.location).Context(ctx)
call = call.FormatOptionsUseInt64Timestamp(true)
if schema == nil {
// only project schema if we weren't supplied one.
projectedFields = append(projectedFields, "schema")
Expand Down
1 change: 1 addition & 0 deletions bigquery/job.go
Original file line number Diff line number Diff line change
Expand Up @@ -353,6 +353,7 @@ func (j *Job) read(ctx context.Context, waitForQuery func(context.Context, strin
func (j *Job) waitForQuery(ctx context.Context, projectID string) (Schema, uint64, error) {
// Use GetQueryResults only to wait for completion, not to read results.
call := j.c.bqs.Jobs.GetQueryResults(projectID, j.jobID).Location(j.location).Context(ctx).MaxResults(0)
call = call.FormatOptionsUseInt64Timestamp(true)
setClientHeader(call.Header())
backoff := gax.Backoff{
Initial: 1 * time.Second,
Expand Down
3 changes: 3 additions & 0 deletions bigquery/query.go
Original file line number Diff line number Diff line change
Expand Up @@ -470,6 +470,9 @@ func (q *Query) probeFastPath() (*bq.QueryRequest, error) {
MaximumBytesBilled: q.QueryConfig.MaxBytesBilled,
RequestId: uid.NewSpace("request", nil).New(),
Labels: q.Labels,
FormatOptions: &bq.DataFormatOptions{
UseInt64Timestamp: true,
},
}
if q.QueryConfig.DisableQueryCache {
qRequest.UseQueryCache = &pfalse
Expand Down
6 changes: 6 additions & 0 deletions bigquery/query_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -439,6 +439,9 @@ func TestProbeFastPath(t *testing.T) {
wantReq: &bq.QueryRequest{
Query: "foo",
UseLegacySql: &pfalse,
FormatOptions: &bq.DataFormatOptions{
UseInt64Timestamp: true,
},
},
},
{
Expand Down Expand Up @@ -473,6 +476,9 @@ func TestProbeFastPath(t *testing.T) {
},
},
UseQueryCache: &pfalse,
FormatOptions: &bq.DataFormatOptions{
UseInt64Timestamp: true,
},
},
},
{
Expand Down
9 changes: 2 additions & 7 deletions bigquery/value.go
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@ import (
"encoding/base64"
"errors"
"fmt"
"math"
"math/big"
"reflect"
"strconv"
Expand Down Expand Up @@ -955,15 +954,11 @@ func convertBasicType(val string, typ FieldType) (Value, error) {
case BooleanFieldType:
return strconv.ParseBool(val)
case TimestampFieldType:
f, err := strconv.ParseFloat(val, 64)
i, err := strconv.ParseInt(val, 10, 64)
if err != nil {
return nil, err
}
secs := math.Trunc(f)
// Timestamps in BigQuery have microsecond precision, so we must
// return a round number of microseconds.
micros := math.Trunc((f-secs)*1e6 + 0.5)
return Value(time.Unix(int64(secs), int64(micros)*1000).UTC()), nil
return time.UnixMicro(i), nil
case DateFieldType:
return civil.ParseDate(val)
case TimeFieldType:
Expand Down
45 changes: 2 additions & 43 deletions bigquery/value_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ func TestConvertTime(t *testing.T) {
ts := testTimestamp.Round(time.Millisecond)
row := &bq.TableRow{
F: []*bq.TableCell{
{V: fmt.Sprintf("%.10f", float64(ts.UnixNano())/1e9)},
{V: fmt.Sprint(ts.UnixMicro())},
{V: testDate.String()},
{V: testTime.String()},
{V: testDateTime.String()},
Expand All @@ -95,15 +95,12 @@ func TestConvertTime(t *testing.T) {
t.Errorf("#%d: got:\n%v\nwant:\n%v", i, g, w)
}
}
if got[0].(time.Time).Location() != time.UTC {
t.Errorf("expected time zone UTC: got:\n%v", got)
}
}

func TestConvertSmallTimes(t *testing.T) {
for _, year := range []int{1600, 1066, 1} {
want := time.Date(year, time.January, 1, 0, 0, 0, 0, time.UTC)
s := fmt.Sprintf("%.10f", float64(want.Unix()))
s := fmt.Sprint(time.Date(year, time.January, 1, 0, 0, 0, 0, time.UTC).UnixMicro())
got, err := convertBasicType(s, TimestampFieldType)
if err != nil {
t.Fatal(err)
Expand All @@ -114,44 +111,6 @@ func TestConvertSmallTimes(t *testing.T) {
}
}

func TestConvertTimePrecision(t *testing.T) {
tcs := []struct {
// Internally, BigQuery stores timestamps as microsecond-precision
// floats.
bq float64
want time.Time
}{
{
bq: 1555593697.154358,
want: time.Unix(1555593697, 154358*1000),
},
{
bq: 1555593697.154359,
want: time.Unix(1555593697, 154359*1000),
},
{
bq: 1555593697.154360,
want: time.Unix(1555593697, 154360*1000),
},
}
for _, tc := range tcs {
bqS := fmt.Sprintf("%.6f", tc.bq)
t.Run(bqS, func(t *testing.T) {
got, err := convertBasicType(bqS, TimestampFieldType)
if err != nil {
t.Fatalf("convertBasicType failed: %v", err)
}
gotT, ok := got.(time.Time)
if !ok {
t.Fatalf("got a %T from convertBasicType, want a time.Time; got = %v", got, got)
}
if !gotT.Equal(tc.want) {
t.Errorf("got %v from convertBasicType, want %v", gotT, tc.want)
}
})
}
}

func TestConvertNullValues(t *testing.T) {
schema := Schema{{Type: StringFieldType}}
row := &bq.TableRow{
Expand Down
Loading