Skip to content

Commit dbb34d5

Browse files
authored
Merge branch 'main' into feature/add-string-split-support
2 parents 5f54043 + d47f196 commit dbb34d5

File tree

13 files changed

+297
-252
lines changed

13 files changed

+297
-252
lines changed
Lines changed: 49 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,49 @@
1+
# Licensed to the Apache Software Foundation (ASF) under one
2+
# or more contributor license agreements. See the NOTICE file
3+
# distributed with this work for additional information
4+
# regarding copyright ownership. The ASF licenses this file
5+
# to you under the Apache License, Version 2.0 (the
6+
# "License"); you may not use this file except in compliance
7+
# with the License. You may obtain a copy of the License at
8+
#
9+
# http://www.apache.org/licenses/LICENSE-2.0
10+
#
11+
# Unless required by applicable law or agreed to in writing,
12+
# software distributed under the License is distributed on an
13+
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14+
# KIND, either express or implied. See the License for the
15+
# specific language governing permissions and limitations
16+
# under the License.
17+
18+
name: Check Markdown Formatting
19+
20+
concurrency:
21+
group: ${{ github.repository }}-${{ github.head_ref || github.sha }}-${{ github.workflow }}
22+
cancel-in-progress: true
23+
24+
on:
25+
pull_request:
26+
paths:
27+
- '**.md'
28+
29+
jobs:
30+
prettier-check:
31+
runs-on: ubuntu-latest
32+
steps:
33+
- uses: actions/checkout@v5
34+
35+
- name: Setup Node.js
36+
uses: actions/setup-node@v6
37+
with:
38+
node-version: '24'
39+
40+
- name: Install prettier
41+
run: npm install -g prettier
42+
43+
- name: Check markdown formatting
44+
run: |
45+
# if you encounter error, run prettier locally and commit changes using instructions at:
46+
#
47+
# https://datafusion.apache.org/comet/contributor-guide/development.html#how-to-format-md-document
48+
#
49+
prettier --check "**/*.md"

common/src/main/scala/org/apache/comet/CometConf.scala

Lines changed: 0 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -666,14 +666,6 @@ object CometConf extends ShimCometConf {
666666
.booleanConf
667667
.createWithDefault(false)
668668

669-
val COMET_EXPR_ALLOW_INCOMPATIBLE: ConfigEntry[Boolean] =
670-
conf("spark.comet.expression.allowIncompatible")
671-
.category(CATEGORY_EXEC)
672-
.doc("Comet is not currently fully compatible with Spark for all expressions. " +
673-
s"Set this config to true to allow them anyway. $COMPAT_GUIDE.")
674-
.booleanConf
675-
.createWithDefault(false)
676-
677669
val COMET_EXEC_STRICT_FLOATING_POINT: ConfigEntry[Boolean] =
678670
conf("spark.comet.exec.strictFloatingPoint")
679671
.category(CATEGORY_EXEC)

dev/benchmarks/comet-tpcds.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,7 @@ $SPARK_HOME/bin/spark-submit \
4040
--conf spark.executor.extraClassPath=$COMET_JAR \
4141
--conf spark.plugins=org.apache.spark.CometPlugin \
4242
--conf spark.shuffle.manager=org.apache.spark.sql.comet.execution.shuffle.CometShuffleManager \
43-
--conf spark.comet.expression.allowIncompatible=true \
43+
--conf spark.comet.expression.Cast.allowIncompatible=true \
4444
--conf spark.hadoop.fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem \
4545
--conf spark.hadoop.fs.s3a.aws.credentials.provider=com.amazonaws.auth.DefaultAWSCredentialsProviderChain \
4646
tpcbench.py \

dev/benchmarks/comet-tpch.sh

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -41,7 +41,7 @@ $SPARK_HOME/bin/spark-submit \
4141
--conf spark.plugins=org.apache.spark.CometPlugin \
4242
--conf spark.shuffle.manager=org.apache.spark.sql.comet.execution.shuffle.CometShuffleManager \
4343
--conf spark.comet.exec.replaceSortMergeJoin=true \
44-
--conf spark.comet.expression.allowIncompatible=true \
44+
--conf spark.comet.expression.Cast.allowIncompatible=true \
4545
--conf spark.hadoop.fs.s3a.impl=org.apache.hadoop.fs.s3a.S3AFileSystem \
4646
--conf spark.hadoop.fs.s3a.aws.credentials.provider=com.amazonaws.auth.DefaultAWSCredentialsProviderChain \
4747
tpcbench.py \

docs/source/user-guide/latest/compatibility.md

Lines changed: 91 additions & 93 deletions
Large diffs are not rendered by default.

docs/source/user-guide/latest/configs.md

Lines changed: 126 additions & 127 deletions
Large diffs are not rendered by default.

docs/source/user-guide/latest/expressions.md

Lines changed: 0 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -31,9 +31,6 @@ of expressions that be disabled.
3131
Expressions that are not Spark-compatible will fall back to Spark by default and can be enabled by setting
3232
`spark.comet.expression.EXPRNAME.allowIncompatible=true`.
3333

34-
It is also possible to specify `spark.comet.expression.allowIncompatible=true` to enable all
35-
incompatible expressions.
36-
3734
## Conditional Expressions
3835

3936
| Expression | SQL | Spark-Compatible? |

docs/source/user-guide/latest/kubernetes.md

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -79,7 +79,6 @@ spec:
7979
"spark.plugins": "org.apache.spark.CometPlugin"
8080
"spark.comet.enabled": "true"
8181
"spark.comet.exec.enabled": "true"
82-
"spark.comet.expression.allowIncompatible": "true"
8382
"spark.comet.exec.shuffle.enabled": "true"
8483
"spark.comet.exec.shuffle.mode": "auto"
8584
"spark.shuffle.manager": "org.apache.spark.sql.comet.execution.shuffle.CometShuffleManager"

spark/src/main/scala/org/apache/comet/GenerateDocs.scala

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -52,6 +52,7 @@ object GenerateDocs {
5252
w.write(s"${line.stripTrailing()}\n".getBytes)
5353
line match {
5454
case pattern(category) =>
55+
w.write("<!-- prettier-ignore-start -->\n".getBytes)
5556
w.write("| Config | Description | Default Value |\n".getBytes)
5657
w.write("|--------|-------------|---------------|\n".getBytes)
5758
category match {
@@ -61,12 +62,14 @@ object GenerateDocs {
6162
w.write(
6263
s"| `$config` | Enable Comet acceleration for `$expr` | true |\n".getBytes)
6364
}
65+
w.write("<!-- prettier-ignore-end -->\n".getBytes)
6466
case "enable_agg_expr" =>
6567
for (expr <- QueryPlanSerde.aggrSerdeMap.keys.map(_.getSimpleName).toList.sorted) {
6668
val config = s"spark.comet.expression.$expr.enabled"
6769
w.write(
6870
s"| `$config` | Enable Comet acceleration for `$expr` | true |\n".getBytes)
6971
}
72+
w.write("<!-- prettier-ignore-end -->\n".getBytes)
7073
case _ =>
7174
val urlPattern = """Comet\s+(Compatibility|Tuning|Tracing)\s+Guide\s+\(""".r
7275
val confs = publicConfigs.filter(_.category == category).toList.sortBy(_.key)
@@ -93,6 +96,7 @@ object GenerateDocs {
9396
}
9497
}
9598
}
99+
w.write("<!-- prettier-ignore-end -->\n".getBytes)
96100
}
97101
case _ =>
98102
}
@@ -106,6 +110,7 @@ object GenerateDocs {
106110
for (line <- lines) {
107111
w.write(s"${line.stripTrailing()}\n".getBytes)
108112
if (line.trim == "<!--BEGIN:COMPAT_CAST_TABLE-->") {
113+
w.write("<!-- prettier-ignore-start -->\n".getBytes)
109114
w.write("| From Type | To Type | Notes |\n".getBytes)
110115
w.write("|-|-|-|\n".getBytes)
111116
for (fromType <- CometCast.supportedTypes) {
@@ -123,7 +128,9 @@ object GenerateDocs {
123128
}
124129
}
125130
}
131+
w.write("<!-- prettier-ignore-end -->\n".getBytes)
126132
} else if (line.trim == "<!--BEGIN:INCOMPAT_CAST_TABLE-->") {
133+
w.write("<!-- prettier-ignore-start -->\n".getBytes)
127134
w.write("| From Type | To Type | Notes |\n".getBytes)
128135
w.write("|-|-|-|\n".getBytes)
129136
for (fromType <- CometCast.supportedTypes) {
@@ -140,6 +147,7 @@ object GenerateDocs {
140147
}
141148
}
142149
}
150+
w.write("<!-- prettier-ignore-end -->\n".getBytes)
143151
}
144152
}
145153
w.close()

spark/src/main/scala/org/apache/comet/expressions/CometCast.scala

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -93,10 +93,9 @@ object CometCast extends CometExpressionSerde[Cast] with CometExprShim {
9393
castBuilder.setDatatype(dataType)
9494
castBuilder.setEvalMode(evalModeToProto(evalMode))
9595
castBuilder.setAllowIncompat(
96-
CometConf.COMET_EXPR_ALLOW_INCOMPATIBLE.get() ||
97-
SQLConf.get
98-
.getConfString(CometConf.getExprAllowIncompatConfigKey(classOf[Cast]), "false")
99-
.toBoolean)
96+
SQLConf.get
97+
.getConfString(CometConf.getExprAllowIncompatConfigKey(classOf[Cast]), "false")
98+
.toBoolean)
10099
castBuilder.setTimezone(timeZoneId.getOrElse("UTC"))
101100
Some(
102101
ExprOuterClass.Expr

0 commit comments

Comments
 (0)