diff --git a/pkg/cmd/generate-binary/main.go b/pkg/cmd/generate-binary/main.go index 7bd218561f28..982dd4728a8e 100644 --- a/pkg/cmd/generate-binary/main.go +++ b/pkg/cmd/generate-binary/main.go @@ -277,6 +277,11 @@ var inputs = map[string][]string{ "hello123", }, + `'%s'::char(8) COLLATE "en_US"`: { + "hello", + "hello123", + }, + "'%s'::timestamp": { "1999-01-08 04:05:06+00", "1999-01-08 04:05:06+00:00", @@ -510,5 +515,15 @@ var inputs = map[string][]string{ `'name'::NAME`, `'false'::JSONB`, `'{"a": []}'::JSONB`, + `1::int4`, + `1::int2`, + `1::char(2)`, + `1::char(1)`, + `1::varchar(4)`, + `1::text`, + `1::char(2) COLLATE "en_US"`, + `1::char(1) COLLATE "en_US"`, + `1::varchar(4) COLLATE "en_US"`, + `1::text COLLATE "en_US"`, }, } diff --git a/pkg/sql/pgwire/encoding_test.go b/pkg/sql/pgwire/encoding_test.go index 0602c62ae8e6..a750c0b7b76f 100644 --- a/pkg/sql/pgwire/encoding_test.go +++ b/pkg/sql/pgwire/encoding_test.go @@ -99,6 +99,13 @@ func readEncodingTests(t testing.TB) []*encodingTest { // The width of a bpchar type is fixed and equal to the length of the // Text string returned by postgres. tc.T.InternalType.Width = int32(len(tc.Text)) + case oid.T_record: + tupleExpr := te.(*tree.Tuple) + typs := make([]*types.T, len(tupleExpr.Exprs)) + for i := range tupleExpr.Exprs { + typs[i] = tupleExpr.Exprs[i].(tree.TypedExpr).ResolvedType() + } + tc.T = types.MakeTuple(typs) } } @@ -177,6 +184,11 @@ func TestEncodings(t *testing.T) { case *tree.DTuple: // Unsupported. continue + case *tree.DCollatedString: + // Decoding collated strings is unsupported by this test. The encoded + // value is the same as a normal string, so decoding it turns it into + // a DString. + continue } for code, value := range map[pgwirebase.FormatCode][]byte{ pgwirebase.FormatText: tc.TextAsBinary, diff --git a/pkg/sql/pgwire/testdata/encodings.json b/pkg/sql/pgwire/testdata/encodings.json index dde959771f71..708ef742aa9b 100644 --- a/pkg/sql/pgwire/testdata/encodings.json +++ b/pkg/sql/pgwire/testdata/encodings.json @@ -55,6 +55,20 @@ "TextAsBinary": [104, 101, 108, 108, 111, 49, 50, 51], "Binary": [104, 101, 108, 108, 111, 49, 50, 51] }, + { + "SQL": "'hello'::char(8) COLLATE \"en_US\"", + "Oid": 1042, + "Text": "hello ", + "TextAsBinary": [104, 101, 108, 108, 111, 32, 32, 32], + "Binary": [104, 101, 108, 108, 111, 32, 32, 32] + }, + { + "SQL": "'hello123'::char(8) COLLATE \"en_US\"", + "Oid": 1042, + "Text": "hello123", + "TextAsBinary": [104, 101, 108, 108, 111, 49, 50, 51], + "Binary": [104, 101, 108, 108, 111, 49, 50, 51] + }, { "SQL": "'1999-01-08'::date", "Oid": 1082, @@ -1686,6 +1700,76 @@ "TextAsBinary": [40, 34, 123, 34, 34, 97, 34, 34, 58, 32, 91, 93, 125, 34, 44, 41], "Binary": [0, 0, 0, 2, 0, 0, 14, 218, 0, 0, 0, 10, 1, 123, 34, 97, 34, 58, 32, 91, 93, 125, 0, 0, 2, 193, 255, 255, 255, 255] }, + { + "SQL": "(1::int4,null)", + "Oid": 2249, + "Text": "(1,)", + "TextAsBinary": [40, 49, 44, 41], + "Binary": [0, 0, 0, 2, 0, 0, 0, 23, 0, 0, 0, 4, 0, 0, 0, 1, 0, 0, 2, 193, 255, 255, 255, 255] + }, + { + "SQL": "(1::int2,null)", + "Oid": 2249, + "Text": "(1,)", + "TextAsBinary": [40, 49, 44, 41], + "Binary": [0, 0, 0, 2, 0, 0, 0, 21, 0, 0, 0, 2, 0, 1, 0, 0, 2, 193, 255, 255, 255, 255] + }, + { + "SQL": "(1::char(2),null)", + "Oid": 2249, + "Text": "(\"1 \",)", + "TextAsBinary": [40, 34, 49, 32, 34, 44, 41], + "Binary": [0, 0, 0, 2, 0, 0, 4, 18, 0, 0, 0, 2, 49, 32, 0, 0, 2, 193, 255, 255, 255, 255] + }, + { + "SQL": "(1::char(1),null)", + "Oid": 2249, + "Text": "(1,)", + "TextAsBinary": [40, 49, 44, 41], + "Binary": [0, 0, 0, 2, 0, 0, 4, 18, 0, 0, 0, 1, 49, 0, 0, 2, 193, 255, 255, 255, 255] + }, + { + "SQL": "(1::varchar(4),null)", + "Oid": 2249, + "Text": "(1,)", + "TextAsBinary": [40, 49, 44, 41], + "Binary": [0, 0, 0, 2, 0, 0, 4, 19, 0, 0, 0, 1, 49, 0, 0, 2, 193, 255, 255, 255, 255] + }, + { + "SQL": "(1::text,null)", + "Oid": 2249, + "Text": "(1,)", + "TextAsBinary": [40, 49, 44, 41], + "Binary": [0, 0, 0, 2, 0, 0, 0, 25, 0, 0, 0, 1, 49, 0, 0, 2, 193, 255, 255, 255, 255] + }, + { + "SQL": "(1::char(2) COLLATE \"en_US\",null)", + "Oid": 2249, + "Text": "(\"1 \",)", + "TextAsBinary": [40, 34, 49, 32, 34, 44, 41], + "Binary": [0, 0, 0, 2, 0, 0, 4, 18, 0, 0, 0, 2, 49, 32, 0, 0, 2, 193, 255, 255, 255, 255] + }, + { + "SQL": "(1::char(1) COLLATE \"en_US\",null)", + "Oid": 2249, + "Text": "(1,)", + "TextAsBinary": [40, 49, 44, 41], + "Binary": [0, 0, 0, 2, 0, 0, 4, 18, 0, 0, 0, 1, 49, 0, 0, 2, 193, 255, 255, 255, 255] + }, + { + "SQL": "(1::varchar(4) COLLATE \"en_US\",null)", + "Oid": 2249, + "Text": "(1,)", + "TextAsBinary": [40, 49, 44, 41], + "Binary": [0, 0, 0, 2, 0, 0, 4, 19, 0, 0, 0, 1, 49, 0, 0, 2, 193, 255, 255, 255, 255] + }, + { + "SQL": "(1::text COLLATE \"en_US\",null)", + "Oid": 2249, + "Text": "(1,)", + "TextAsBinary": [40, 49, 44, 41], + "Binary": [0, 0, 0, 2, 0, 0, 0, 25, 0, 0, 0, 1, 49, 0, 0, 2, 193, 255, 255, 255, 255] + }, { "SQL": "B''", "Oid": 1560, diff --git a/pkg/sql/pgwire/testdata/pgtest/errors b/pkg/sql/pgwire/testdata/pgtest/errors index d5041e3f9198..e8ce4fdc0581 100644 --- a/pkg/sql/pgwire/testdata/pgtest/errors +++ b/pkg/sql/pgwire/testdata/pgtest/errors @@ -23,7 +23,7 @@ ReadyForQuery {"Type":"ReadyForQuery","TxStatus":"I"} send -Query {"String": "CREATE TABLE child (c INT PRIMARY KEY, p INT REFERENCES parent(p))"} +Query {"String": "CREATE TABLE child (c INT PRIMARY KEY, p INT)"} ---- until @@ -32,6 +32,16 @@ ReadyForQuery {"Type":"CommandComplete","CommandTag":"CREATE TABLE"} {"Type":"ReadyForQuery","TxStatus":"I"} +send +Query {"String": "ALTER TABLE child ADD CONSTRAINT fk_p_ref_parent FOREIGN KEY (p) REFERENCES parent(p)"} +---- + +until +ReadyForQuery +---- +{"Type":"CommandComplete","CommandTag":"ALTER TABLE"} +{"Type":"ReadyForQuery","TxStatus":"I"} + send Query {"String": "INSERT INTO child VALUES (1,1)"} ---- @@ -47,13 +57,13 @@ send Query {"String": "ALTER TABLE child DROP CONSTRAINT fk_p_ref_parent"} ---- -# Test with a constraint name that requires quoting. until ReadyForQuery ---- {"Type":"CommandComplete","CommandTag":"ALTER TABLE"} {"Type":"ReadyForQuery","TxStatus":"I"} +# Test with a constraint name that requires quoting. send Query {"String": "ALTER TABLE child ADD CONSTRAINT \"foo bar\" FOREIGN KEY (p) REFERENCES parent(p)"} ---- diff --git a/pkg/sql/pgwire/testdata/pgtest/timezone b/pkg/sql/pgwire/testdata/pgtest/timezone index 2ef774c8cb2d..312abe77f21e 100644 --- a/pkg/sql/pgwire/testdata/pgtest/timezone +++ b/pkg/sql/pgwire/testdata/pgtest/timezone @@ -1,11 +1,22 @@ send -Query {"String": "SELECT '00:00:00+01:01'::\"timetz\""} +Query {"String": "SET TIME ZONE \"UTC\""} ---- until ReadyForQuery ---- -{"Type":"RowDescription","Fields":[{"Name":"timetz","TableOID":0,"TableAttributeNumber":0,"DataTypeOID":1266,"DataTypeSize":16,"TypeModifier":-1,"Format":0}]} +{"Type":"ParameterStatus","Name":"TimeZone","Value":"UTC"} +{"Type":"CommandComplete","CommandTag":"SET"} +{"Type":"ReadyForQuery","TxStatus":"I"} + +send +Query {"String": "SELECT '00:00:00+01:01'::\"timetz\""} +---- + +until ignore_data_type_sizes +ReadyForQuery +---- +{"Type":"RowDescription","Fields":[{"Name":"timetz","TableOID":0,"TableAttributeNumber":0,"DataTypeOID":1266,"DataTypeSize":0,"TypeModifier":-1,"Format":0}]} {"Type":"DataRow","Values":[{"text":"00:00:00+01:01"}]} {"Type":"CommandComplete","CommandTag":"SELECT 1"} {"Type":"ReadyForQuery","TxStatus":"I"} @@ -14,22 +25,23 @@ send Query {"String": "SELECT '00:00:00+01:01:03'::\"timetz\""} ---- -until +until ignore_data_type_sizes ReadyForQuery ---- -{"Type":"RowDescription","Fields":[{"Name":"timetz","TableOID":0,"TableAttributeNumber":0,"DataTypeOID":1266,"DataTypeSize":16,"TypeModifier":-1,"Format":0}]} +{"Type":"RowDescription","Fields":[{"Name":"timetz","TableOID":0,"TableAttributeNumber":0,"DataTypeOID":1266,"DataTypeSize":0,"TypeModifier":-1,"Format":0}]} {"Type":"DataRow","Values":[{"text":"00:00:00+01:01:03"}]} {"Type":"CommandComplete","CommandTag":"SELECT 1"} {"Type":"ReadyForQuery","TxStatus":"I"} -send +# PostgreSQL does not display seconds offset here, but CockroachDB does. +send crdb_only Query {"String": "SELECT '1882-05-23T00:00:00'::\"timestamptz\""} ---- -until +until crdb_only ignore_data_type_sizes ReadyForQuery ---- -{"Type":"RowDescription","Fields":[{"Name":"timestamptz","TableOID":0,"TableAttributeNumber":0,"DataTypeOID":1184,"DataTypeSize":24,"TypeModifier":-1,"Format":0}]} +{"Type":"RowDescription","Fields":[{"Name":"timestamptz","TableOID":0,"TableAttributeNumber":0,"DataTypeOID":1184,"DataTypeSize":0,"TypeModifier":-1,"Format":0}]} {"Type":"DataRow","Values":[{"text":"1882-05-23 00:00:00+00:00"}]} {"Type":"CommandComplete","CommandTag":"SELECT 1"} {"Type":"ReadyForQuery","TxStatus":"I"} @@ -49,10 +61,10 @@ send Query {"String": "SELECT '1882-05-23T00:00:00-05:51'::\"timestamptz\""} ---- -until +until ignore_data_type_sizes ReadyForQuery ---- -{"Type":"RowDescription","Fields":[{"Name":"timestamptz","TableOID":0,"TableAttributeNumber":0,"DataTypeOID":1184,"DataTypeSize":24,"TypeModifier":-1,"Format":0}]} +{"Type":"RowDescription","Fields":[{"Name":"timestamptz","TableOID":0,"TableAttributeNumber":0,"DataTypeOID":1184,"DataTypeSize":0,"TypeModifier":-1,"Format":0}]} {"Type":"DataRow","Values":[{"text":"1882-05-23 00:00:24-05:50:36"}]} {"Type":"CommandComplete","CommandTag":"SELECT 1"} {"Type":"ReadyForQuery","TxStatus":"I"} diff --git a/pkg/sql/pgwire/testdata/pgtest/tuple b/pkg/sql/pgwire/testdata/pgtest/tuple new file mode 100644 index 000000000000..8fc783b3a88a --- /dev/null +++ b/pkg/sql/pgwire/testdata/pgtest/tuple @@ -0,0 +1,66 @@ +# 80 = ASCII 'P' for Portal +# ResultFormatCodes [1] = FormatBinary +send +Parse {"Name": "s1", "Query": "SELECT (1::int2, 2::int4, 3::int8, null) AS row"} +Bind {"DestinationPortal": "p1", "PreparedStatement": "s1", "ResultFormatCodes": [1]} +Describe {"ObjectType": 80, "Name": "p1"} +Execute {"Portal": "p1"} +Sync +---- + +# PostgreSQL reports a DataTypeSize of -1 for tuples, whereas CockroachDB +# computes the size of the tuple. +until ignore_data_type_sizes +ReadyForQuery +---- +{"Type":"ParseComplete"} +{"Type":"BindComplete"} +{"Type":"RowDescription","Fields":[{"Name":"row","TableOID":0,"TableAttributeNumber":0,"DataTypeOID":2249,"DataTypeSize":0,"TypeModifier":-1,"Format":1}]} +{"Type":"DataRow","Values":[{"binary":"000000040000001500000002000100000017000000040000000200000014000000080000000000000003000002c1ffffffff"}]} +{"Type":"CommandComplete","CommandTag":"SELECT 1"} +{"Type":"ReadyForQuery","TxStatus":"I"} + + +# 80 = ASCII 'P' for Portal +# ResultFormatCodes [1] = FormatBinary +send +Parse {"Name": "s2", "Query": "SELECT ('a'::text, 'b'::varchar(4), 'c'::char(1), 'd'::char(2), 'e'::\"char\", 'f'::char(3) COLLATE \"en_US\") AS row"} +Bind {"DestinationPortal": "p2", "PreparedStatement": "s2", "ResultFormatCodes": [1]} +Describe {"ObjectType": 80, "Name": "p2"} +Execute {"Portal": "p2"} +Sync +---- + +# PostgreSQL reports a DataTypeSize of -1 for tuples, whereas CockroachDB +# computes the size of the tuple. +until ignore_data_type_sizes +ReadyForQuery +---- +{"Type":"ParseComplete"} +{"Type":"BindComplete"} +{"Type":"RowDescription","Fields":[{"Name":"row","TableOID":0,"TableAttributeNumber":0,"DataTypeOID":2249,"DataTypeSize":0,"TypeModifier":-1,"Format":1}]} +{"Type":"DataRow","Values":[{"binary":"00000006000000190000000161000004130000000162000004120000000163000004120000000264200000001200000001650000041200000003662020"}]} +{"Type":"CommandComplete","CommandTag":"SELECT 1"} +{"Type":"ReadyForQuery","TxStatus":"I"} + +# 80 = ASCII 'P' for Portal +# ResultFormatCodes [0] = FormatText +send +Parse {"Name": "s3", "Query": "SELECT ('a'::text, 'b'::varchar(4), 'c'::char(1), 'd'::char(2), 'e'::\"char\", 'f'::char(3) COLLATE \"en_US\") AS row"} +Bind {"DestinationPortal": "p3", "PreparedStatement": "s3", "ResultFormatCodes": [0]} +Describe {"ObjectType": 80, "Name": "p3"} +Execute {"Portal": "p3"} +Sync +---- + +# PostgreSQL reports a DataTypeSize of -1 for tuples, whereas CockroachDB +# computes the size of the tuple. +until ignore_data_type_sizes +ReadyForQuery +---- +{"Type":"ParseComplete"} +{"Type":"BindComplete"} +{"Type":"RowDescription","Fields":[{"Name":"row","TableOID":0,"TableAttributeNumber":0,"DataTypeOID":2249,"DataTypeSize":0,"TypeModifier":-1,"Format":0}]} +{"Type":"DataRow","Values":[{"text":"(a,b,c,\"d \",e,\"f \")"}]} +{"Type":"CommandComplete","CommandTag":"SELECT 1"} +{"Type":"ReadyForQuery","TxStatus":"I"} diff --git a/pkg/sql/pgwire/types.go b/pkg/sql/pgwire/types.go index 572c0153ee77..d46d102edf97 100644 --- a/pkg/sql/pgwire/types.go +++ b/pkg/sql/pgwire/types.go @@ -13,7 +13,6 @@ package pgwire import ( "context" "encoding/binary" - "fmt" "math" "math/big" "net" @@ -65,17 +64,6 @@ func pgTypeForParserType(t *types.T) pgType { } } -// resolveBlankPaddedChar pads the given string with spaces if blank padding is -// required or returns the string unmodified otherwise. -func resolveBlankPaddedChar(s string, t *types.T) string { - if t.Oid() == oid.T_bpchar { - // Pad spaces on the right of the string to make it of length specified in - // the type t. - return fmt.Sprintf("%-*v", t.Width(), s) - } - return s -} - // writeTextDatum writes d to the buffer. Type t must be specified for types // that have various width encodings and therefore need padding (chars). // It is ignored (and can be nil) for types which do not need padding. @@ -135,10 +123,10 @@ func (b *writeBuffer) writeTextDatum( b.writeLengthPrefixedString(v.IPAddr.String()) case *tree.DString: - b.writeLengthPrefixedString(resolveBlankPaddedChar(string(*v), t)) + b.writeLengthPrefixedString(tree.ResolveBlankPaddedChar(string(*v), t)) case *tree.DCollatedString: - b.writeLengthPrefixedString(v.Contents) + b.writeLengthPrefixedString(tree.ResolveBlankPaddedChar(v.Contents, t)) case *tree.DDate: b.textFormatter.FormatNode(v) @@ -422,10 +410,10 @@ func (b *writeBuffer) writeBinaryDatum( b.writeLengthPrefixedString(v.LogicalRep) case *tree.DString: - b.writeLengthPrefixedString(resolveBlankPaddedChar(string(*v), t)) + b.writeLengthPrefixedString(tree.ResolveBlankPaddedChar(string(*v), t)) case *tree.DCollatedString: - b.writeLengthPrefixedString(v.Contents) + b.writeLengthPrefixedString(tree.ResolveBlankPaddedChar(v.Contents, t)) case *tree.DTimestamp: b.putInt32(8) @@ -459,10 +447,11 @@ func (b *writeBuffer) writeBinaryDatum( subWriter := newWriteBuffer(nil /* bytecount */) // Put the number of datums. subWriter.putInt32(int32(len(v.D))) - for _, elem := range v.D { - oid := elem.ResolvedType().Oid() + tupleTypes := t.TupleContents() + for i, elem := range v.D { + oid := tupleTypes[i].Oid() subWriter.putInt32(int32(oid)) - subWriter.writeBinaryDatum(ctx, elem, sessionLoc, elem.ResolvedType()) + subWriter.writeBinaryDatum(ctx, elem, sessionLoc, tupleTypes[i]) } b.writeLengthPrefixedBuffer(&subWriter.wrapped) diff --git a/pkg/sql/rowenc/testutils.go b/pkg/sql/rowenc/testutils.go index b54bbd51ff52..ba0ec75fd38a 100644 --- a/pkg/sql/rowenc/testutils.go +++ b/pkg/sql/rowenc/testutils.go @@ -178,6 +178,9 @@ func RandDatumWithNullChance(rng *rand.Rand, typ *types.T, nullChance int) tree. for i := range typ.TupleContents() { tuple.D[i] = RandDatum(rng, typ.TupleContents()[i], true) } + // Calling ResolvedType causes the internal TupleContents types to be + // populated. + tuple.ResolvedType() return &tuple case types.BitFamily: width := typ.Width() diff --git a/pkg/sql/sem/tree/pgwire_encode.go b/pkg/sql/sem/tree/pgwire_encode.go index bff327414a05..77df8cd2633a 100644 --- a/pkg/sql/sem/tree/pgwire_encode.go +++ b/pkg/sql/sem/tree/pgwire_encode.go @@ -12,11 +12,24 @@ package tree import ( "bytes" + "fmt" "unicode/utf8" + "github.com/cockroachdb/cockroach/pkg/sql/types" "github.com/lib/pq/oid" ) +// ResolveBlankPaddedChar pads the given string with spaces if blank padding is +// required or returns the string unmodified otherwise. +func ResolveBlankPaddedChar(s string, t *types.T) string { + if t.Oid() == oid.T_bpchar { + // Pad spaces on the right of the string to make it of length specified in + // the type t. + return fmt.Sprintf("%-*v", t.Width(), s) + } + return s +} + func (d *DTuple) pgwireFormat(ctx *FmtCtx) { // When converting a tuple to text in "postgres mode" there is // special behavior: values are printed in "postgres mode" then the @@ -31,14 +44,17 @@ func (d *DTuple) pgwireFormat(ctx *FmtCtx) { // string printer called pgwireFormatStringInTuple(). ctx.WriteByte('(') comma := "" - for _, v := range d.D { + for i, v := range d.D { ctx.WriteString(comma) + t := d.ResolvedType().TupleContents()[i] switch dv := UnwrapDatum(nil, v).(type) { case dNull: case *DString: - pgwireFormatStringInTuple(&ctx.Buffer, string(*dv)) + s := ResolveBlankPaddedChar(string(*dv), t) + pgwireFormatStringInTuple(&ctx.Buffer, s) case *DCollatedString: - pgwireFormatStringInTuple(&ctx.Buffer, dv.Contents) + s := ResolveBlankPaddedChar(dv.Contents, t) + pgwireFormatStringInTuple(&ctx.Buffer, s) // Bytes cannot use the default case because they will be incorrectly // double escaped. case *DBytes: diff --git a/pkg/sql/sem/tree/type_check.go b/pkg/sql/sem/tree/type_check.go index 147a207ba023..017b7e3138e1 100644 --- a/pkg/sql/sem/tree/type_check.go +++ b/pkg/sql/sem/tree/type_check.go @@ -590,7 +590,11 @@ func (expr *CollateExpr) TypeCheck( return nil, err } t := subExpr.ResolvedType() - if types.IsStringType(t) || t.Family() == types.UnknownFamily { + if types.IsStringType(t) { + expr.Expr = subExpr + expr.typ = types.MakeCollatedString(t, expr.Locale) + return expr, nil + } else if t.Family() == types.UnknownFamily { expr.Expr = subExpr expr.typ = types.MakeCollatedString(types.String, expr.Locale) return expr, nil diff --git a/pkg/testutils/pgtest/datadriven.go b/pkg/testutils/pgtest/datadriven.go index bd9c733b0e41..e4f59cd3454b 100644 --- a/pkg/testutils/pgtest/datadriven.go +++ b/pkg/testutils/pgtest/datadriven.go @@ -182,6 +182,14 @@ func msgsToJSONWithIgnore(msgs []pgproto3.BackendMessage, args *datadriven.TestD } } } + case "ignore_data_type_sizes": + for _, msg := range msgs { + if m, ok := msg.(*pgproto3.RowDescription); ok { + for i := range m.Fields { + m.Fields[i].DataTypeSize = 0 + } + } + } case "ignore": for _, typ := range arg.Vals { ignore[fmt.Sprintf("*pgproto3.%s", typ)] = true