Skip to content

Commit

Permalink
Patch json binary decoder to return trailing 0
Browse files Browse the repository at this point in the history
  • Loading branch information
mtaner committed Nov 21, 2024
1 parent 08b4172 commit 41f382c
Show file tree
Hide file tree
Showing 3 changed files with 64 additions and 25 deletions.
17 changes: 14 additions & 3 deletions test/go/dml_events_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ func (this *DMLEventsTestSuite) SetupTest() {

columns := []schema.TableColumn{
{Name: "col1"},
{Name: "col2"},
{Name: "col2", Type: schema.TYPE_JSON},
{Name: "col3"},
}

Expand Down Expand Up @@ -62,12 +62,13 @@ func (this *DMLEventsTestSuite) TestBinlogInsertEventGeneratesInsertQuery() {
Rows: [][]interface{}{
{1000, []byte("val1"), true},
{1001, []byte("val2"), false},
{1002, "{\"val\": 42.0}", false},
},
}

dmlEvents, err := ghostferry.NewBinlogInsertEvents(this.eventBase, rowsEvent)
this.Require().Nil(err)
this.Require().Equal(2, len(dmlEvents))
this.Require().Equal(3, len(dmlEvents))

q1, err := dmlEvents[0].AsSQLString(this.targetTable.Schema, this.targetTable.Name)
this.Require().Nil(err)
Expand All @@ -76,6 +77,10 @@ func (this *DMLEventsTestSuite) TestBinlogInsertEventGeneratesInsertQuery() {
q2, err := dmlEvents[1].AsSQLString(this.targetTable.Schema, this.targetTable.Name)
this.Require().Nil(err)
this.Require().Equal("INSERT IGNORE INTO `target_schema`.`target_table` (`col1`,`col2`,`col3`) VALUES (1001,_binary'val2',0)", q2)

q3, err := dmlEvents[2].AsSQLString(this.targetTable.Schema, this.targetTable.Name)
this.Require().Nil(err)
this.Require().Equal("INSERT IGNORE INTO `target_schema`.`target_table` (`col1`,`col2`,`col3`) VALUES (1002,CAST('{\"val\": 42.0}' AS JSON),0)", q3)
}

func (this *DMLEventsTestSuite) TestBinlogInsertEventWithWrongColumnsReturnsError() {
Expand Down Expand Up @@ -117,12 +122,14 @@ func (this *DMLEventsTestSuite) TestBinlogUpdateEventGeneratesUpdateQuery() {
{1000, []byte("val2"), false},
{1001, []byte("val3"), false},
{1001, []byte("val4"), true},
{1002, "{\"val\": 42.0}", false},
{1002, "{\"val\": 43.0}", false},
},
}

dmlEvents, err := ghostferry.NewBinlogUpdateEvents(this.eventBase, rowsEvent)
this.Require().Nil(err)
this.Require().Equal(2, len(dmlEvents))
this.Require().Equal(3, len(dmlEvents))

q1, err := dmlEvents[0].AsSQLString(this.targetTable.Schema, this.targetTable.Name)
this.Require().Nil(err)
Expand All @@ -131,6 +138,10 @@ func (this *DMLEventsTestSuite) TestBinlogUpdateEventGeneratesUpdateQuery() {
q2, err := dmlEvents[1].AsSQLString(this.targetTable.Schema, this.targetTable.Name)
this.Require().Nil(err)
this.Require().Equal("UPDATE `target_schema`.`target_table` SET `col1`=1001,`col2`=_binary'val4',`col3`=1 WHERE `col1`=1001 AND `col2`=_binary'val3' AND `col3`=0", q2)

q3, err := dmlEvents[2].AsSQLString(this.targetTable.Schema, this.targetTable.Name)
this.Require().Nil(err)
this.Require().Equal("UPDATE `target_schema`.`target_table` SET `col1`=1002,`col2`=CAST('{\"val\": 43.0}' AS JSON),`col3`=0 WHERE `col1`=1002 AND `col2`=CAST('{\"val\": 42.0}' AS JSON) AND `col3`=0", q3)
}

func (this *DMLEventsTestSuite) TestBinlogUpdateEventWithWrongColumnsReturnsError() {
Expand Down
51 changes: 30 additions & 21 deletions test/integration/types_test.rb
Original file line number Diff line number Diff line change
@@ -1,13 +1,15 @@
require "test_helper"

class TypesTest < GhostferryTestCase
JSON_OBJ = '{"data": {"quote": "\\\'", "value": [1]}}'
JSON_OBJ = '{"data": {"float": 32.0, "quote": "\\\'", "value": [1, 12.13]}}'
EMPTY_JSON = '{}'
JSON_ARRAY = '[\"test_data\", \"test_data_2\"]'
JSON_NULL = 'null'
JSON_TRUE = 'true'
JSON_FALSE = 'false'
JSON_NUMBER = '42'
JSON_FLOATING_POINT_WITH_ZERO_FRACTIONAL_PART = '52.0'
JSON_FLOATING_POINT_WITH_NON_ZERO_FRACTIONAL_PART = '52.13'

def test_json_colum_not_null_with_no_default_is_invalid_this_is_fine
# See: https://bugs.mysql.com/bug.php?id=98496
Expand Down Expand Up @@ -103,26 +105,29 @@ def test_json_data_insert
# with a JSON column is broken on 5.7.
# See: https://bugs.mysql.com/bug.php?id=87847
res = target_db.query("SELECT COUNT(*) AS cnt FROM #{DEFAULT_FULL_TABLE_NAME}")
assert_equal 16, res.first["cnt"]
assert_equal 20, res.first["cnt"]

expected = [
{"id"=>1, "data"=>"{\"data\": {\"quote\": \"'\", \"value\": [1]}}"},
{"id"=>1, "data"=>"{\"data\": {\"float\": 32.0, \"quote\": \"'\", \"value\": [1, 12.13]}}"},
{"id"=>2, "data"=>"[\"test_data\", \"test_data_2\"]"},
{"id"=>3, "data"=>"{}"},
{"id"=>4, "data"=>nil},
{"id"=>5, "data"=>"null"},
{"id"=>6, "data"=>"true"},
{"id"=>7, "data"=>"false"},
{"id"=>8, "data"=>"42"},

{"id"=>9, "data"=>"{\"data\": {\"quote\": \"'\", \"value\": [1]}}"},
{"id"=>10, "data"=>"[\"test_data\", \"test_data_2\"]"},
{"id"=>11, "data"=>"{}"},
{"id"=>12, "data"=>nil},
{"id"=>13, "data"=>"null"},
{"id"=>14, "data"=>"true"},
{"id"=>15, "data"=>"false"},
{"id"=>16, "data"=>"42"},
{"id"=>9, "data"=>"52.0"},
{"id"=>10, "data"=>"52.13"},
{"id"=>11, "data"=>"{\"data\": {\"float\": 32.0, \"quote\": \"'\", \"value\": [1, 12.13]}}"},
{"id"=>12, "data"=>"[\"test_data\", \"test_data_2\"]"},
{"id"=>13, "data"=>"{}"},
{"id"=>14, "data"=>nil},
{"id"=>15, "data"=>"null"},
{"id"=>16, "data"=>"true"},
{"id"=>17, "data"=>"false"},
{"id"=>18, "data"=>"42"},
{"id"=>19, "data"=>"52.0"},
{"id"=>20, "data"=>"52.13"},
]

res = target_db.query("SELECT * FROM #{DEFAULT_FULL_TABLE_NAME} ORDER BY id ASC")
Expand Down Expand Up @@ -194,15 +199,17 @@ def test_json_data_update
loop do
sleep 0.1
res = target_db.query("SELECT COUNT(*) AS cnt FROM #{DEFAULT_FULL_TABLE_NAME}")
if res.first["cnt"] == 8
if res.first["cnt"] == 10
source_db.query("UPDATE #{DEFAULT_FULL_TABLE_NAME} SET data = '#{EMPTY_JSON}' WHERE id = 1")
source_db.query("UPDATE #{DEFAULT_FULL_TABLE_NAME} SET data = '#{JSON_ARRAY}' WHERE id = 2")
source_db.query("UPDATE #{DEFAULT_FULL_TABLE_NAME} SET data = NULL WHERE id = 3")
source_db.query("UPDATE #{DEFAULT_FULL_TABLE_NAME} SET data = '#{JSON_OBJ}' WHERE id = 4")
source_db.query("UPDATE #{DEFAULT_FULL_TABLE_NAME} SET data = '#{JSON_TRUE}' WHERE id = 5")
source_db.query("UPDATE #{DEFAULT_FULL_TABLE_NAME} SET data = '#{JSON_FALSE}' WHERE id = 6")
source_db.query("UPDATE #{DEFAULT_FULL_TABLE_NAME} SET data = '#{JSON_NUMBER}' WHERE id = 7")
source_db.query("UPDATE #{DEFAULT_FULL_TABLE_NAME} SET data = '#{JSON_FLOATING_POINT_WITH_ZERO_FRACTIONAL_PART}' WHERE id = 7")
source_db.query("UPDATE #{DEFAULT_FULL_TABLE_NAME} SET data = '#{JSON_NULL}' WHERE id = 8")
source_db.query("UPDATE #{DEFAULT_FULL_TABLE_NAME} SET data = '#{JSON_NUMBER}' WHERE id = 9")
source_db.query("UPDATE #{DEFAULT_FULL_TABLE_NAME} SET data = '#{JSON_FLOATING_POINT_WITH_NON_ZERO_FRACTIONAL_PART}' WHERE id = 10")
break
end

Expand All @@ -217,17 +224,19 @@ def test_json_data_update
refute timedout, "failed due to time out while waiting for the 4 insert binlogs to be written to the target"

res = target_db.query("SELECT COUNT(*) AS cnt FROM #{DEFAULT_FULL_TABLE_NAME}")
assert_equal 8, res.first["cnt"]
assert_equal 10, res.first["cnt"]

expected = [
{"id"=>1, "data"=>"{}"},
{"id"=>2, "data"=>"[\"test_data\", \"test_data_2\"]"},
{"id"=>3, "data"=>nil},
{"id"=>4, "data"=>"{\"data\": {\"quote\": \"'\", \"value\": [1]}}"},
{"id"=>4, "data"=>"{\"data\": {\"float\": 32.0, \"quote\": \"'\", \"value\": [1, 12.13]}}"},
{"id"=>5, "data"=>"true"},
{"id"=>6, "data"=>"false"},
{"id"=>7, "data"=>"42"},
{"id"=>7, "data"=>"52.0"},
{"id"=>8, "data"=>"null"},
{"id"=>9, "data"=>"42"},
{"id"=>10, "data"=>"52.13"},
]

res = target_db.query("SELECT * FROM #{DEFAULT_FULL_TABLE_NAME} ORDER BY id ASC")
Expand Down Expand Up @@ -319,7 +328,7 @@ def test_copy_data_in_fixed_size_binary_column

def test_copy_data_in_fixed_size_binary_column__value_completely_filled
# Also see: https://github.com/Shopify/ghostferry/pull/159#issuecomment-597769258
#
#
# NOTE: This test is interesting (beyond what is covered above already),
# because it seems the server strips the trailing 0-bytes before sending
# them to the binlog even when the trailing 0-bytes are inserted by the user.
Expand All @@ -334,7 +343,7 @@ def test_copy_data_in_fixed_size_binary_column__value_completely_filled

def test_copy_data_in_fixed_size_binary_column__value_is_empty_and_length_is_1
# Also see: https://github.com/Shopify/ghostferry/pull/159#issuecomment-597769258
#
#
# slight variation to cover the corner-case where there is no data in the
# column at all and the entire value is 0-padded (here, only 1 byte)
execute_copy_data_in_fixed_size_binary_column(
Expand Down Expand Up @@ -393,8 +402,6 @@ def test_decimal
end
end



private

def insert_json_on_source
Expand All @@ -406,6 +413,8 @@ def insert_json_on_source
source_db.query("INSERT INTO #{DEFAULT_FULL_TABLE_NAME} (data) VALUES ('#{JSON_TRUE}')")
source_db.query("INSERT INTO #{DEFAULT_FULL_TABLE_NAME} (data) VALUES ('#{JSON_FALSE}')")
source_db.query("INSERT INTO #{DEFAULT_FULL_TABLE_NAME} (data) VALUES ('#{JSON_NUMBER}')")
source_db.query("INSERT INTO #{DEFAULT_FULL_TABLE_NAME} (data) VALUES ('#{JSON_FLOATING_POINT_WITH_ZERO_FRACTIONAL_PART}')")
source_db.query("INSERT INTO #{DEFAULT_FULL_TABLE_NAME} (data) VALUES ('#{JSON_FLOATING_POINT_WITH_NON_ZERO_FRACTIONAL_PART}')")
end

def execute_copy_data_in_fixed_size_binary_column(column_size:, inserted_data:, expected_inserted_data:, updated_data:)
Expand Down

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

0 comments on commit 41f382c

Please sign in to comment.