Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix Bulk Imports #199

Merged
merged 7 commits into from
May 12, 2018
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
57 changes: 54 additions & 3 deletions spec/granite_orm/fields/timestamps_spec.cr
Original file line number Diff line number Diff line change
@@ -1,15 +1,14 @@
require "../../spec_helper"

# TODO sqlite support for timestamps
# Can run this spec for sqlite after https://www.sqlite.org/draft/releaselog/3_24_0.html is released.
{% for adapter in ["pg", "mysql"] %}
module {{adapter.capitalize.id}}
{%
avoid_macro_bug = 1 # https://github.com/crystal-lang/crystal/issues/5724

# TODO mysql timestamp support should work better
if adapter == "pg"
time_kind_on_read = "Time::Kind::Utc".id
elsif adapter == "mysql"
else
time_kind_on_read = "Time::Kind::Unspecified".id
end
%}
Expand Down Expand Up @@ -56,6 +55,58 @@ module {{adapter.capitalize.id}}

original_timestamp.epoch.should eq read_timestamp.epoch
end

context "bulk imports" do
it "timestamps are returned correctly with bulk imports" do
to_import = [
Parent.new(name: "ParentImport1"),
Parent.new(name: "ParentImport2"),
Parent.new(name: "ParentImport3"),
]

grandma = Parent.new(name: "grandma").tap(&.save)
found_grandma = Parent.find! grandma.id
Parent.import(to_import)

parents = Parent.all("WHERE name LIKE ?", ["ParentImport%"])

parents.size.should eq 3

parents.each do |parent|
parent.updated_at.not_nil!.kind.should eq {{ time_kind_on_read }}
parent.created_at.not_nil!.kind.should eq {{ time_kind_on_read }}
found_grandma.updated_at.not_nil!.epoch.should eq parent.updated_at.not_nil!.epoch
found_grandma.created_at.not_nil!.epoch.should eq parent.created_at.not_nil!.epoch
end
end

it "created_at and updated_at are correctly handled" do
to_import = [
Parent.new(name: "ParentOne"),
]

Parent.import(to_import)
import_time = Time.now

parent1 = Parent.find_by! :name, "ParentOne"
parent1.name.should eq "ParentOne"
parent1.created_at.not_nil!.epoch.should eq import_time.epoch
parent1.updated_at.not_nil!.epoch.should eq import_time.epoch

to_update = Parent.all("WHERE name = ?", ["ParentOne"])
to_update.each { |parent| parent.name = "ParentOneEdited" }

sleep 1

Parent.import(to_update, update_on_duplicate: true, columns: ["name"])
update_time = Time.now

parent1_edited = Parent.find_by! :name, "ParentOneEdited"
parent1_edited.name.should eq "ParentOneEdited"
parent1_edited.created_at.not_nil!.epoch.should eq import_time.epoch
parent1_edited.updated_at.not_nil!.epoch.should eq update_time.epoch
end
end
end
end
{% end %}
211 changes: 164 additions & 47 deletions spec/granite_orm/transactions/import_spec.cr
Original file line number Diff line number Diff line change
Expand Up @@ -3,69 +3,186 @@ require "../../spec_helper"
{% for adapter in GraniteExample::ADAPTERS %}
module {{adapter.capitalize.id}}
describe "{{ adapter.id }} .import" do
it "should import 3 new objects" do
to_import = [
Parent.new(name: "ImportParent1"),
Parent.new(name: "ImportParent2"),
Parent.new(name: "ImportParent3"),
]
Parent.import(to_import)
Parent.all("WHERE name LIKE ?", ["Import%"]).size.should eq 3
end
describe "using the defualt primary key" do
context "with an AUTO INCREMENT PK" do
it "should import 3 new objects" do
to_import = [
Parent.new(name: "ImportParent1"),
Parent.new(name: "ImportParent2"),
Parent.new(name: "ImportParent3"),
]
Parent.import(to_import)
Parent.all("WHERE name LIKE ?", ["ImportParent%"]).size.should eq 3
end

it "should work with batch_size" do
to_import = [
Book.new(name: "ImportBatchBook1"),
Book.new(name: "ImportBatchBook2"),
Book.new(name: "ImportBatchBook3"),
Book.new(name: "ImportBatchBook4"),
]

Book.import(to_import, batch_size: 2)
Book.all("WHERE name LIKE ?", ["ImportBatch%"]).size.should eq 4
end

it "should be able to update existing records" do
to_import = [
Review.new(name: "ImportReview1", published: false, upvotes: 0.to_i64),
Review.new(name: "ImportReview2", published: false, upvotes: 0.to_i64),
Review.new(name: "ImportReview3", published: false, upvotes: 0.to_i64),
Review.new(name: "ImportReview4", published: false, upvotes: 0.to_i64),
]

it "should work with on_duplicate_key_update" do
to_import = [
Parent.new(id: 111, name: "ImportParent1"),
Parent.new(id: 112, name: "ImportParent2"),
Parent.new(id: 113, name: "ImportParent3"),
]
Review.import(to_import)

Parent.import(to_import)
reviews = Review.all("WHERE name LIKE ?", ["ImportReview%"])
reviews.size.should eq 4
reviews.none? { |r| r.published }.should be_true
reviews.all? { |r| r.upvotes == 0 }.should be_true

to_import = [
Parent.new(id: 112, name: "ImportParent112"),
]
reviews.each { |r| r.published = true; r.upvotes = 1.to_i64 }

Parent.import(to_import, update_on_duplicate: true, columns: ["name"])
Review.import(reviews, update_on_duplicate: true, columns: ["published", "upvotes"])

if parent = Parent.find 112
parent.name.should be "ImportParent112"
parent.id.should eq 112
reviews = Review.all("WHERE name LIKE ?", ["ImportReview%"])

reviews.size.should eq 4
reviews.all? { |r| r.published }.should be_true
reviews.all? { |r| r.upvotes == 1 }.should be_true
end
end
end
context "with non AUTO INCREMENT PK" do
it "should work with on_duplicate_key_update" do
to_import = [
NonAutoDefaultPK.new(id: 1.to_i64, name: "NonAutoDefaultPK1"),
NonAutoDefaultPK.new(id: 2.to_i64, name: "NonAutoDefaultPK2"),
NonAutoDefaultPK.new(id: 3.to_i64, name: "NonAutoDefaultPK3"),
]

NonAutoDefaultPK.import(to_import)

it "should work with on_duplicate_key_ignore" do
to_import = [
Parent.new(id: 111, name: "ImportParent1"),
Parent.new(id: 112, name: "ImportParent2"),
Parent.new(id: 113, name: "ImportParent3"),
]
to_import = [
NonAutoDefaultPK.new(id: 3.to_i64, name: "NonAutoDefaultPK3"),
]

Parent.import(to_import)
NonAutoDefaultPK.import(to_import, update_on_duplicate: true, columns: ["name"])

to_import = [
Parent.new(id: 113, name: "ImportParent113"),
]
record = NonAutoDefaultPK.find! 3.to_i64
record.name.should eq "NonAutoDefaultPK3"
record.id.should eq 3.to_i64
end

Parent.import(to_import, ignore_on_duplicate: true)
it "should work with on_duplicate_key_ignore" do
to_import = [
NonAutoDefaultPK.new(id: 4.to_i64, name: "NonAutoDefaultPK4"),
NonAutoDefaultPK.new(id: 5.to_i64, name: "NonAutoDefaultPK5"),
NonAutoDefaultPK.new(id: 6.to_i64, name: "NonAutoDefaultPK6"),
]

if parent = Parent.find 113
parent.name.should be "ImportParent3"
parent.id.should eq 113
NonAutoDefaultPK.import(to_import)

to_import = [
NonAutoDefaultPK.new(id: 6.to_i64, name: "NonAutoDefaultPK6"),
]

NonAutoDefaultPK.import(to_import, ignore_on_duplicate: true)

record = NonAutoDefaultPK.find! 6.to_i64
record.name.should eq "NonAutoDefaultPK6"
record.id.should eq 6.to_i64
end
end
end
describe "using a custom primary key" do
context "with an AUTO INCREMENT PK" do
it "should import 3 new objects" do
to_import = [
School.new(name: "ImportBasicSchool1"),
School.new(name: "ImportBasicSchool2"),
School.new(name: "ImportBasicSchool3"),
]
School.import(to_import)
School.all("WHERE name LIKE ?", ["ImportBasicSchool%"]).size.should eq 3
end

it "should work with batch_size" do
to_import = [
School.new(name: "ImportBatchSchool1"),
School.new(name: "ImportBatchSchool2"),
School.new(name: "ImportBatchSchool3"),
School.new(name: "ImportBatchSchool4"),
]

School.import(to_import, batch_size: 2)
School.all("WHERE name LIKE ?", ["ImportBatchSchool%"]).size.should eq 4
end

it "should be able to update existing records" do
to_import = [
School.new(name: "ImportExistingSchool"),
School.new(name: "ImportExistingSchool"),
School.new(name: "ImportExistingSchool"),
School.new(name: "ImportExistingSchool"),
]

it "should work with batch_size" do
to_import = [
Book.new(id: 111, name: "ImportBook1"),
Book.new(id: 112, name: "ImportBook2"),
Book.new(id: 113, name: "ImportBook3"),
Book.new(id: 114, name: "ImportBook4"),
]
School.import(to_import)

Book.import(to_import, batch_size: 2)
schools = School.all("WHERE name = ?", ["ImportExistingSchool"])
schools.size.should eq 4
schools.all? { |s| s.name == "ImportExistingSchool" }.should be_true

Book.all("WHERE name LIKE ?", ["Import%"]).size.should eq 4
schools.each { |s| s.name = "ImportExistingSchoolEdited" }

School.import(schools, update_on_duplicate: true, columns: ["name"])

schools = School.all("WHERE name LIKE ?", ["ImportExistingSchool%"])
schools.size.should eq 4
schools.all? { |s| s.name == "ImportExistingSchoolEdited" }.should be_true
end
end
context "with non AUTO INCREMENT PK" do
it "should work with on_duplicate_key_update" do
to_import = [
NonAutoCustomPK.new(custom_id: 1.to_i64, name: "NonAutoCustomPK1"),
NonAutoCustomPK.new(custom_id: 2.to_i64.to_i64, name: "NonAutoCustomPK2"),
NonAutoCustomPK.new(custom_id: 3.to_i64, name: "NonAutoCustomPK3"),
]

NonAutoCustomPK.import(to_import)

to_import = [
NonAutoCustomPK.new(custom_id: 3.to_i64, name: "NonAutoCustomPK3"),
]

NonAutoCustomPK.import(to_import, update_on_duplicate: true, columns: ["name"])

record = NonAutoCustomPK.find! 3.to_i64
record.name.should eq "NonAutoCustomPK3"
record.custom_id.should eq 3.to_i64
end

it "should work with on_duplicate_key_ignore" do
to_import = [
NonAutoCustomPK.new(custom_id: 4.to_i64, name: "NonAutoCustomPK4"),
NonAutoCustomPK.new(custom_id: 5.to_i64, name: "NonAutoCustomPK5"),
NonAutoCustomPK.new(custom_id: 6.to_i64, name: "NonAutoCustomPK6"),
]

NonAutoCustomPK.import(to_import)

to_import = [
NonAutoCustomPK.new(custom_id: 6.to_i64, name: "NonAutoCustomPK6"),
]

NonAutoCustomPK.import(to_import, ignore_on_duplicate: true)

record = NonAutoCustomPK.find! 6.to_i64
record.name.should eq "NonAutoCustomPK6"
record.custom_id.should eq 6.to_i64
end
end
end
end
end
Expand Down
18 changes: 18 additions & 0 deletions spec/spec_models.cr
Original file line number Diff line number Diff line change
Expand Up @@ -195,6 +195,22 @@ require "uuid"
end
end

class NonAutoDefaultPK < Granite::ORM::Base
adapter {{ adapter_literal }}
table_name non_auto_default_pk

primary id : Int64, auto: false
field name : String
end

class NonAutoCustomPK < Granite::ORM::Base
adapter {{ adapter_literal }}
table_name non_auto_custom_pk

primary custom_id : Int64, auto: false
field name : String
end

Parent.migrator.drop_and_create
Teacher.migrator.drop_and_create
Student.migrator.drop_and_create
Expand All @@ -213,5 +229,7 @@ require "uuid"
Book.migrator.drop_and_create
BookReview.migrator.drop_and_create
Item.migrator.drop_and_create
NonAutoDefaultPK.migrator.drop_and_create
NonAutoCustomPK.migrator.drop_and_create
end
{% end %}
2 changes: 1 addition & 1 deletion src/adapter/base.cr
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ abstract class Granite::Adapter::Base
abstract def insert(table_name, fields, params, lastval) : Int64

# This will insert an array of models as one insert statement
abstract def import(table_name : String, primary_name : String, fields, model_array, **options)
abstract def import(table_name : String, primary_name : String, auto : String, fields, model_array, **options)

# This will update a row in the database.
abstract def update(table_name, primary_name, fields, params)
Expand Down
6 changes: 3 additions & 3 deletions src/adapter/mysql.cr
Original file line number Diff line number Diff line change
Expand Up @@ -85,10 +85,9 @@ class Granite::Adapter::Mysql < Granite::Adapter::Base
end
end

def import(table_name : String, primary_name : String, fields, model_array, **options)
def import(table_name : String, primary_name : String, auto : String, fields, model_array, **options)
params = [] of DB::Any
now = Time.now.to_utc
fields.reject! { |field| field === "id" } if primary_name === "id"
now = Time.utc_now

statement = String.build do |stmt|
stmt << "INSERT"
Expand All @@ -111,6 +110,7 @@ class Granite::Adapter::Mysql < Granite::Adapter::Base
if options["update_on_duplicate"]?
if columns = options["columns"]?
statement += " ON DUPLICATE KEY UPDATE "
columns << "updated_at" if fields.includes? "updated_at"
columns.each do |key|
statement += "#{quote(key)}=VALUES(#{quote(key)}), "
end
Expand Down
9 changes: 6 additions & 3 deletions src/adapter/pg.cr
Original file line number Diff line number Diff line change
Expand Up @@ -85,10 +85,12 @@ class Granite::Adapter::Pg < Granite::Adapter::Base
end
end

def import(table_name : String, primary_name : String, fields, model_array, **options)
def import(table_name : String, primary_name : String, auto : String, fields, model_array, **options)
params = [] of DB::Any
now = Time.now.to_utc
fields.reject! { |field| field === "id" } if primary_name === "id"
now = Time.utc_now
# PG fails when inserting null into AUTO INCREMENT PK field.
# If AUTO INCREMENT is TRUE AND all model's pk are nil, remove PK from fields list for AUTO INCREMENT to work properly
fields.reject! { |field| field == primary_name } if model_array.all? { |m| m.to_h[primary_name].nil? } && auto == "true"
index = 0

statement = String.build do |stmt|
Expand All @@ -112,6 +114,7 @@ class Granite::Adapter::Pg < Granite::Adapter::Base
if options["update_on_duplicate"]?
if columns = options["columns"]?
statement += " ON CONFLICT (#{quote(primary_name)}) DO UPDATE SET "
columns << "updated_at" if fields.includes? "updated_at"
columns.each do |key|
statement += "#{quote(key)}=EXCLUDED.#{quote(key)}, "
end
Expand Down
Loading