diff --git a/README.md b/README.md index 969ec43..90375bd 100644 --- a/README.md +++ b/README.md @@ -157,18 +157,24 @@ then you can use the _update_duplicates_ option. Set this option to true is found the row will be updated with your new values. Default value for this option is false. +You can optionally declare specific column list for update duplicates +statement. Use the _update_columns_ option, then only these columns will be +updated. Be default, if option _update_columns_ not passed, used column list +from _column_names_ option. + ```ruby destination_columns = [:title, :author] +update_columns = [:title] # Update duplicate rows (MySQL) -Book.bulk_insert(*destination_columns, update_duplicates: true) do |worker| +Book.bulk_insert(*destination_columns, update_duplicates: true, update_columns: update_columns) do |worker| worker.add(...) worker.add(...) # ... end # Update duplicate rows (PostgreSQL) -Book.bulk_insert(*destination_columns, update_duplicates: %w[title]) do |worker| +Book.bulk_insert(*destination_columns, update_duplicates: %w[title], update_columns: update_columns) do |worker| worker.add(...) # ... end diff --git a/lib/bulk_insert.rb b/lib/bulk_insert.rb index 7907aff..0d6d077 100644 --- a/lib/bulk_insert.rb +++ b/lib/bulk_insert.rb @@ -4,9 +4,9 @@ module BulkInsert extend ActiveSupport::Concern module ClassMethods - def bulk_insert(*columns, values: nil, set_size:500, ignore: false, update_duplicates: false, return_primary_keys: false) + def bulk_insert(*columns, values: nil, set_size:500, ignore: false, update_duplicates: false, return_primary_keys: false, update_columns: nil) columns = default_bulk_columns if columns.empty? - worker = BulkInsert::Worker.new(connection, table_name, primary_key, columns, set_size, ignore, update_duplicates, return_primary_keys) + worker = BulkInsert::Worker.new(connection, table_name, primary_key, columns, set_size, ignore, update_duplicates, return_primary_keys, update_columns) if values.present? transaction do diff --git a/lib/bulk_insert/worker.rb b/lib/bulk_insert/worker.rb index a16a938..636029c 100644 --- a/lib/bulk_insert/worker.rb +++ b/lib/bulk_insert/worker.rb @@ -7,7 +7,7 @@ class Worker attr_accessor :adapter_name attr_reader :ignore, :update_duplicates, :result_sets - def initialize(connection, table_name, primary_key, column_names, set_size=500, ignore=false, update_duplicates=false, return_primary_keys=false) + def initialize(connection, table_name, primary_key, column_names, set_size=500, ignore=false, update_duplicates=false, return_primary_keys=false, update_columns=nil) @connection = connection @set_size = set_size @@ -24,6 +24,7 @@ def initialize(connection, table_name, primary_key, column_names, set_size=500, @columns = column_names.map { |name| column_map[name.to_s] } @table_name = connection.quote_table_name(table_name) @column_names = column_names.map { |name| connection.quote_column_name(name) }.join(",") + @update_columns = update_columns.is_a?(Array) ? update_columns.map { |name| column_map[name.to_s] } : @columns @before_save_callback = nil @after_save_callback = nil @@ -154,12 +155,12 @@ def on_conflict_statement if is_postgres && ignore ' ON CONFLICT DO NOTHING' elsif is_postgres && update_duplicates - update_values = @columns.map do |column| + update_values = @update_columns.map do |column| "#{column.name}=EXCLUDED.#{column.name}" end.join(', ') ' ON CONFLICT(' + update_duplicates.join(', ') + ') DO UPDATE SET ' + update_values elsif adapter_name =~ /^mysql/i && update_duplicates - update_values = @columns.map do |column| + update_values = @update_columns.map do |column| "`#{column.name}`=VALUES(`#{column.name}`)" end.join(', ') ' ON DUPLICATE KEY UPDATE ' + update_values diff --git a/test/bulk_insert/worker_test.rb b/test/bulk_insert/worker_test.rb index 735972b..1aa9d7c 100644 --- a/test/bulk_insert/worker_test.rb +++ b/test/bulk_insert/worker_test.rb @@ -342,7 +342,7 @@ class BulkInsertWorkerTest < ActiveSupport::TestCase assert_equal pgsql_worker.compose_insert_query, "INSERT INTO \"testings\" (\"greeting\",\"age\",\"happy\",\"created_at\",\"updated_at\",\"color\") VALUES ('Yo',15,0,NULL,NULL,'chartreuse') ON CONFLICT DO NOTHING RETURNING id" end - test "adapter dependent postgresql methods (with update_duplicates)" do + test "adapter dependent postgresql methods (with update_duplicates, without update_columns)" do pgsql_worker = BulkInsert::Worker.new( Testing.connection, Testing.table_name, @@ -359,6 +359,24 @@ class BulkInsertWorkerTest < ActiveSupport::TestCase assert_equal pgsql_worker.compose_insert_query, "INSERT INTO \"testings\" (\"greeting\",\"age\",\"happy\",\"created_at\",\"updated_at\",\"color\") VALUES ('Yo',15,0,NULL,NULL,'chartreuse') ON CONFLICT(greeting, age, happy) DO UPDATE SET greeting=EXCLUDED.greeting, age=EXCLUDED.age, happy=EXCLUDED.happy, created_at=EXCLUDED.created_at, updated_at=EXCLUDED.updated_at, color=EXCLUDED.color RETURNING id" end + test "adapter dependent postgresql methods (with update_duplicates, with update_columns)" do + pgsql_worker = BulkInsert::Worker.new( + Testing.connection, + Testing.table_name, + 'id', + %w(greeting age happy created_at updated_at color), + 500, # batch size + false, # ignore + %w(greeting age happy), # update duplicates + true, # return primary keys, + %w(greeting age happy updated_at) # update column names + ) + pgsql_worker.adapter_name = 'PostgreSQL' + pgsql_worker.add ["Yo", 15, false, nil, nil] + + assert_equal pgsql_worker.compose_insert_query, "INSERT INTO \"testings\" (\"greeting\",\"age\",\"happy\",\"created_at\",\"updated_at\",\"color\") VALUES ('Yo',15,0,NULL,NULL,'chartreuse') ON CONFLICT(greeting, age, happy) DO UPDATE SET greeting=EXCLUDED.greeting, age=EXCLUDED.age, happy=EXCLUDED.happy, updated_at=EXCLUDED.updated_at RETURNING id" + end + test "adapter dependent PostGIS methods" do pgsql_worker = BulkInsert::Worker.new( Testing.connection, @@ -418,4 +436,22 @@ class BulkInsertWorkerTest < ActiveSupport::TestCase assert_equal mysql_worker.compose_insert_query, "INSERT INTO \"testings\" (\"greeting\",\"age\",\"happy\",\"created_at\",\"updated_at\",\"color\") VALUES ('Yo',15,0,NULL,NULL,'chartreuse') ON DUPLICATE KEY UPDATE `greeting`=VALUES(`greeting`), `age`=VALUES(`age`), `happy`=VALUES(`happy`), `created_at`=VALUES(`created_at`), `updated_at`=VALUES(`updated_at`), `color`=VALUES(`color`)" end + + test "mysql adapter can update duplicates (with update_columns)" do + mysql_worker = BulkInsert::Worker.new( + Testing.connection, + Testing.table_name, + 'id', + %w(greeting age happy created_at updated_at color), + 500, # batch size + false, # ignore + true, # update_duplicates, + false, # return primary keys + %w(greeting age happy updated_at) + ) + mysql_worker.adapter_name = 'MySQL' + mysql_worker.add ["Yo", 15, false, nil, nil] + + assert_equal mysql_worker.compose_insert_query, "INSERT INTO \"testings\" (\"greeting\",\"age\",\"happy\",\"created_at\",\"updated_at\",\"color\") VALUES ('Yo',15,0,NULL,NULL,'chartreuse') ON DUPLICATE KEY UPDATE `greeting`=VALUES(`greeting`), `age`=VALUES(`age`), `happy`=VALUES(`happy`), `updated_at`=VALUES(`updated_at`)" + end end