Skip to content

Commit

Permalink
Merge branch 'main' into xxh/add_some_metrics
Browse files Browse the repository at this point in the history
  • Loading branch information
mergify[bot] authored Aug 1, 2022
2 parents c566a3c + fb2b33a commit 93eb0cb
Show file tree
Hide file tree
Showing 45 changed files with 1,955 additions and 373 deletions.
505 changes: 403 additions & 102 deletions Cargo.lock

Large diffs are not rendered by default.

1 change: 1 addition & 0 deletions codecov.yml
Original file line number Diff line number Diff line change
Expand Up @@ -15,3 +15,4 @@ coverage:
threshold: 0.1% # allow the coverage to drop by 0.1% and posting a success status
ignore:
- "src/risedevtooltool"
- "src/bench/s3_bench"
10 changes: 5 additions & 5 deletions e2e_test/batch/aggregate/string_agg.slt.part
Original file line number Diff line number Diff line change
Expand Up @@ -8,19 +8,19 @@ statement ok
insert into t values ('aaa', 1, 1), ('bbb', 0, 2), ('ccc', 0, 5), ('ddd', 1, 4)

query T
select string_agg(v1 order by v1) from t
select string_agg(v1, '' order by v1) from t
----
aaabbbcccddd

query T
select string_agg(v1 order by v3) from t
select string_agg(v1, ',' order by v3) from t
----
aaabbbdddccc
aaa,bbb,ddd,ccc

query T
select string_agg(v1 order by v2 asc, v3 desc) from t
select string_agg(v1, v3::varchar order by v2 asc, v3 desc) from t
----
cccbbbdddaaa
ccc2bbb4ddd1aaa

statement ok
drop table t
3 changes: 2 additions & 1 deletion proto/hummock.proto
Original file line number Diff line number Diff line change
Expand Up @@ -210,7 +210,8 @@ message GetCompactionTasksResponse {
}

message ReportCompactionTasksRequest {
CompactTask compact_task = 1;
uint32 context_id = 1;
CompactTask compact_task = 2;
}

message ReportCompactionTasksResponse {
Expand Down
12 changes: 5 additions & 7 deletions proto/stream_plan.proto
Original file line number Diff line number Diff line change
Expand Up @@ -132,9 +132,9 @@ message FilterNode {
message MaterializeNode {
uint32 table_id = 1;
// Column indexes and orders of primary key
repeated plan_common.ColumnOrder column_orders = 3;
repeated plan_common.ColumnOrder column_orders = 2;
// Used for internal table states.
catalog.Table table = 4;
catalog.Table table = 3;
}

// Remark by Yanghao: for both local and global we use the same node in the protobuf.
Expand Down Expand Up @@ -289,14 +289,12 @@ message ArrangementInfo {
// Special node for shared state, which will only be produced in fragmenter. ArrangeNode will
// produce a special Materialize executor, which materializes data for downstream to query.
message ArrangeNode {
// Table Id of the arrangement
uint32 table_id = 2;
// Info about the arrangement
ArrangementInfo table_info = 3;
ArrangementInfo table_info = 1;
// Hash key of the materialize node, which is a subset of pk.
repeated uint32 distribution_key = 4;
repeated uint32 distribution_key = 2;
// Used for internal table states.
catalog.Table table = 5;
catalog.Table table = 3;
}

// Special node for shared state. LookupNode will join an arrangement with a stream.
Expand Down
1 change: 1 addition & 0 deletions proto/stream_service.proto
Original file line number Diff line number Diff line change
Expand Up @@ -91,6 +91,7 @@ message BarrierCompleteResponse {
hummock.SstableInfo sst = 2;
}
repeated GroupedSstableInfo synced_sstables = 4;
uint32 worker_id = 5;
}

// Before starting streaming, the leader node broadcast the actor-host table to needed workers.
Expand Down
10 changes: 10 additions & 0 deletions src/bench/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,10 @@ edition = "2021"

[dependencies]
async-trait = "0.1"

aws-config = { version = "0.15", default-features = false, features = ["rt-tokio", "native-tls"] }
aws-sdk-s3 = { version = "0.15", default-features = false, features = ["rt-tokio", "native-tls"] }
aws-smithy-http = "0.45"
bytes = "1"
bytesize = { version = "1", features = ["serde"] }
clap = { version = "3", features = ["derive"] }
Expand Down Expand Up @@ -36,10 +40,16 @@ tokio = { version = "=0.2.0-alpha.5", package = "madsim-tokio", features = [
"time",
"signal",
] }
tokio-stream = "0.1"
toml = "0.5"
workspace-hack = { version = "0.1", path = "../workspace-hack" }


[[bin]]
name = "file-cache-bench"
path = "file_cache_bench/main.rs"

[[bin]]
name = "s3-bench"
path = "s3_bench/main.rs"

209 changes: 209 additions & 0 deletions src/bench/s3_bench/cases.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,209 @@
[[case]]
type = "Put"
[case.args]
name = "s3 - put - 1MiB"
obj = "1MiB"
size = "1MiB"

[[case]]
type = "Put"
[case.args]
name = "s3 - put - 8MiB"
obj = "8MiB"
size = "8MiB"

[[case]]
type = "Put"
[case.args]
name = "s3 - put - 64MiB"
obj = "64MiB"
size = "64MiB"

[[case]]
type = "Put"
[case.args]
name = "s3 - put - 128MiB"
obj = "128MiB"
size = "128MiB"

[[case]]
type = "Put"
[case.args]
name = "s3 - put - 256MiB"
obj = "256MiB"
size = "256MiB"

[[case]]
type = "MultiPartUpload"
[case.args]
name = "s3 - multi part upload - 1MiB-p8MiB"
obj = "1MiB-p8MiB"
size = "1MiB"
part = "8MiB"

[[case]]
type = "MultiPartUpload"
[case.args]
name = "s3 - multi part upload - 8MiB-p8MiB"
obj = "8MiB-p8MiB"
size = "8MiB"
part = "8MiB"

[[case]]
type = "MultiPartUpload"
[case.args]
name = "s3 - multi part upload - 64MiB-p8MiB"
obj = "64MiB-p8MiB"
size = "64MiB"
part = "8MiB"

[[case]]
type = "MultiPartUpload"
[case.args]
name = "s3 - multi part upload - 128MiB-p8MiB"
obj = "128MiB-p8MiB"
size = "128MiB"
part = "8MiB"

[[case]]
type = "MultiPartUpload"
[case.args]
name = "s3 - multi part upload - 256MiB-p8MiB"
obj = "256MiB-p8MiB"
size = "256MiB"
part = "8MiB"

[[case]]
type = "Get"
[case.args]
name = "s3 - get - 1MiB"
obj = "1MiB"

[[case]]
type = "Get"
[case.args]
name = "s3 - get - 8MiB"
obj = "8MiB"

[[case]]
type = "Get"
[case.args]
name = "s3 - get - 64MiB"
obj = "64MiB"

[[case]]
type = "Get"
[case.args]
name = "s3 - get - 128MiB"
obj = "128MiB"

[[case]]
type = "Get"
[case.args]
name = "s3 - get (parted) - 1MiB-p8MiB"
obj = "1MiB"

[[case]]
type = "Get"
[case.args]
name = "s3 - get (parted) - 8MiB-p8MiB"
obj = "8MiB"

[[case]]
type = "Get"
[case.args]
name = "s3 - get (parted) - 64MiB-p8MiB"
obj = "64MiB"

[[case]]
type = "Get"
[case.args]
name = "s3 - get (parted) - 128MiB-p8MiB"
obj = "128MiB"

[[case]]
type = "Get"
[case.args]
name = "s3 - get (parted) - 256MiB-p8MiB"
obj = "256MiB"

[[case]]
type = "MultiPartGet"
[case.args]
name = "s3 - multi part get (all) - 1MiB-p8MiB"
obj = "1MiB-p8MiB"
part = [1]

[[case]]
type = "MultiPartGet"
[case.args]
name = "s3 - multi part get (all) - 8MiB-p8MiB"
obj = "8MiB-p8MiB"
part = [1]

[[case]]
type = "MultiPartGet"
[case.args]
name = "s3 - multi part get (all) - 64MiB-p8MiB"
obj = "64MiB-p8MiB"
part = [1, 2, 3, 4, 5, 6, 7, 8]

[[case]]
type = "MultiPartGet"
[case.args]
name = "s3 - multi part get (all) - 128MiB-p8MiB"
obj = "128MiB-p8MiB"
part = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16]

[[case]]
type = "MultiPartGet"
[case.args]
name = "s3 - multi part get (all) - 256MiB-p8MiB"
obj = "256MiB-p8MiB"
part = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32]

[[case]]
type = "MultiPartGet"
[case.args]
name = "s3 - multi part get (3 4) - 64MiB-p8MiB"
obj = "64MiB-p8MiB"
part = [3, 4]

[[case]]
type = "MultiPartGet"
[case.args]
name = "s3 - multi part get (3 4) - 128MiB-p8MiB"
obj = "128MiB-p8MiB"
part = [3, 4]

[[case]]
type = "ByteRangeGet"
[case.args]
name = "s3 - byte range get (fit 2 parts) - 64MiB-p8MiB"
obj = "64MiB-p8MiB"
start = 16777216 # 16MiB
end = 33554431 # 32MiB - 1

[[case]]
type = "ByteRangeGet"
[case.args]
name = "s3 - byte range get (fit 2 parts) - 128MiB-p8MiB"
obj = "128MiB-p8MiB"
start = 16777216 # 16MiB
end = 33554431 # 32MiB - 1

[[case]]
type = "ByteRangeGet"
[case.args]
name = "s3 - byte range get (unfit 2 parts) - 64MiB-p8MiB"
obj = "64MiB-p8MiB"
start = 19922944 # 19MiB
end = 36700159 # 35MiB - 1

[[case]]
type = "ByteRangeGet"
[case.args]
name = "s3 - byte range get (unfit 2 parts) - 128MiB-p8MiB"
obj = "128MiB-p8MiB"
start = 19922944 # 19MiB
end = 36700159 # 35MiB - 1
Loading

0 comments on commit 93eb0cb

Please sign in to comment.