Skip to content

Commit 7cc9c95

Browse files
authored
Merge branch 'master' into SPARK-28098
2 parents 72eae96 + b6bb24c commit 7cc9c95

File tree

38 files changed

+1498
-299
lines changed

38 files changed

+1498
-299
lines changed

.github/workflows/notify_test_workflow.yml

Lines changed: 55 additions & 26 deletions
Original file line numberDiff line numberDiff line change
@@ -57,34 +57,63 @@ jobs:
5757
await new Promise(r => setTimeout(r, 3000))
5858
5959
const runs = await github.request(endpoint, params)
60-
const runID = runs.data.workflow_runs[0].id
61-
// TODO: If no workflows were found, it's likely GitHub Actions was not enabled
62-
63-
if (runs.data.workflow_runs[0].head_sha != context.payload.pull_request.head.sha) {
64-
throw new Error('There was a new unsynced commit pushed. Please retrigger the workflow.');
65-
}
66-
67-
const runUrl = 'https://github.com/'
68-
+ context.payload.pull_request.head.repo.full_name
69-
+ '/actions/runs/'
70-
+ runID
7160
7261
const name = 'Build and test'
7362
const head_sha = context.payload.pull_request.head.sha
74-
const status = 'queued'
63+
let status = 'queued'
64+
65+
if (runs.data.workflow_runs.length === 0) {
66+
status = 'completed'
67+
const conclusion = 'action_required'
68+
69+
github.checks.create({
70+
owner: context.repo.owner,
71+
repo: context.repo.repo,
72+
name: name,
73+
head_sha: head_sha,
74+
status: status,
75+
conclusion: conclusion,
76+
output: {
77+
title: 'Workflow run detection failed',
78+
summary: `
79+
Unable to detect the workflow run for testing the changes in your PR.
7580
76-
github.checks.create({
77-
...context.repo,
78-
name,
79-
head_sha,
80-
status,
81-
output: {
82-
title: 'Test results',
83-
summary: runUrl,
84-
text: JSON.stringify({
85-
owner: context.payload.pull_request.head.repo.owner.login,
86-
repo: context.payload.pull_request.head.repo.name,
87-
run_id: runID
88-
})
81+
1. If you did not enable GitHub Actions in your forked repository, please enable it. See also [Disabling or limiting GitHub Actions for a repository](https://docs.github.com/en/github/administering-a-repository/disabling-or-limiting-github-actions-for-a-repository) for more details.
82+
2. It is possible your branch is based on the old \`master\` branch in Apache Spark, please sync your branch to the latest master branch. For example as below:
83+
\`\`\`bash
84+
git fetch upstream
85+
git rebase upstream/master
86+
git push origin YOUR_BRANCH --force
87+
\`\`\``
88+
}
89+
})
90+
} else {
91+
const runID = runs.data.workflow_runs[0].id
92+
93+
if (runs.data.workflow_runs[0].head_sha != context.payload.pull_request.head.sha) {
94+
throw new Error('There was a new unsynced commit pushed. Please retrigger the workflow.');
8995
}
90-
})
96+
97+
const runUrl = 'https://github.com/'
98+
+ context.payload.pull_request.head.repo.full_name
99+
+ '/actions/runs/'
100+
+ runID
101+
102+
github.checks.create({
103+
owner: context.repo.owner,
104+
repo: context.repo.repo,
105+
name: name,
106+
head_sha: head_sha,
107+
status: status,
108+
output: {
109+
title: 'Test results',
110+
summary: '[See test results](' + runUrl + ')',
111+
text: JSON.stringify({
112+
owner: context.payload.pull_request.head.repo.owner.login,
113+
repo: context.payload.pull_request.head.repo.name,
114+
run_id: runID
115+
})
116+
},
117+
details_url: runUrl,
118+
})
119+
}

.github/workflows/update_build_status.yml

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -58,7 +58,7 @@ jobs:
5858
5959
// Iterator GitHub Checks in the PR
6060
for await (const cr of checkRuns.data.check_runs) {
61-
if (cr.name == 'Build and test') {
61+
if (cr.name == 'Build and test' && cr.conclusion != "action_required") {
6262
// text contains parameters to make request in JSON.
6363
const params = JSON.parse(cr.output.text)
6464
@@ -74,7 +74,8 @@ jobs:
7474
check_run_id: cr.id,
7575
output: cr.output,
7676
status: run.data.status,
77-
conclusion: run.data.conclusion
77+
conclusion: run.data.conclusion,
78+
details_url: run.data.details_url
7879
})
7980
} else {
8081
console.log(' Run ' + cr.id + ': set status (' + run.data.status + ')')
@@ -84,6 +85,7 @@ jobs:
8485
check_run_id: cr.id,
8586
output: cr.output,
8687
status: run.data.status,
88+
details_url: run.data.details_url
8789
})
8890
}
8991

core/src/main/resources/org/apache/spark/ui/static/webui.css

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -113,7 +113,7 @@ table.sortable td {
113113
box-shadow: inset 1px 0 0 rgba(0,0,0,.15), inset 0 -1px 0 rgba(0,0,0,.15);
114114
}
115115

116-
.progress.progress-started {
116+
.progress .progress-bar.progress-started {
117117
background-color: #A0DFFF;
118118
background-image: -moz-linear-gradient(top, #A4EDFF, #94DDFF);
119119
background-image: -webkit-gradient(linear, 0 0, 0 100%, from(#A4EDFF), to(#94DDFF));
@@ -124,7 +124,7 @@ table.sortable td {
124124
filter: progid:dximagetransform.microsoft.gradient(startColorstr='#FFA4EDFF', endColorstr='#FF94DDFF', GradientType=0);
125125
}
126126

127-
.progress .progress-bar {
127+
.progress .progress-bar.progress-completed {
128128
background-color: #3EC0FF;
129129
background-image: -moz-linear-gradient(top, #44CBFF, #34B0EE);
130130
background-image: -webkit-gradient(linear, 0 0, 0 100%, from(#44CBFF), to(#34B0EE));

core/src/main/scala/org/apache/spark/ui/UIUtils.scala

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -461,13 +461,14 @@ private[spark] object UIUtils extends Logging {
461461
skipped: Int,
462462
reasonToNumKilled: Map[String, Int],
463463
total: Int): Seq[Node] = {
464-
val ratio = if (total == 0) 100.0 else (completed.toDouble/total)*100
464+
val ratio = if (total == 0) 100.0 else (completed.toDouble / total) * 100
465465
val completeWidth = "width: %s%%".format(ratio)
466466
// started + completed can be > total when there are speculative tasks
467467
val boundedStarted = math.min(started, total - completed)
468-
val startWidth = "width: %s%%".format((boundedStarted.toDouble/total)*100)
468+
val startRatio = if (total == 0) 0.0 else (boundedStarted.toDouble / total) * 100
469+
val startWidth = "width: %s%%".format(startRatio)
469470

470-
<div class={ if (started > 0) s"progress progress-started" else s"progress" }>
471+
<div class="progress">
471472
<span style="text-align:center; position:absolute; width:100%;">
472473
{completed}/{total}
473474
{ if (failed == 0 && skipped == 0 && started > 0) s"($started running)" }
@@ -478,7 +479,8 @@ private[spark] object UIUtils extends Logging {
478479
}
479480
}
480481
</span>
481-
<div class="progress-bar" style={completeWidth}></div>
482+
<div class="progress-bar progress-completed" style={completeWidth}></div>
483+
<div class="progress-bar progress-started" style={startWidth}></div>
482484
</div>
483485
}
484486

Lines changed: 58 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,58 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one or more
3+
* contributor license agreements. See the NOTICE file distributed with
4+
* this work for additional information regarding copyright ownership.
5+
* The ASF licenses this file to You under the Apache License, Version 2.0
6+
* (the "License"); you may not use this file except in compliance with
7+
* the License. You may obtain a copy of the License at
8+
*
9+
* http://www.apache.org/licenses/LICENSE-2.0
10+
*
11+
* Unless required by applicable law or agreed to in writing, software
12+
* distributed under the License is distributed on an "AS IS" BASIS,
13+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
* See the License for the specific language governing permissions and
15+
* limitations under the License.
16+
*/
17+
18+
package org.apache.spark.util.collection
19+
20+
private object ErrorMessage {
21+
final val msg: String = "mutable operation is not supported"
22+
}
23+
24+
// An immutable BitSet that initializes set bits in its constructor.
25+
class ImmutableBitSet(val numBits: Int, val bitsToSet: Int*) extends BitSet(numBits) {
26+
27+
// Initialize the set bits.
28+
{
29+
val bitsIterator = bitsToSet.iterator
30+
while (bitsIterator.hasNext) {
31+
super.set(bitsIterator.next)
32+
}
33+
}
34+
35+
override def clear(): Unit = {
36+
throw new UnsupportedOperationException(ErrorMessage.msg)
37+
}
38+
39+
override def clearUntil(bitIndex: Int): Unit = {
40+
throw new UnsupportedOperationException(ErrorMessage.msg)
41+
}
42+
43+
override def set(index: Int): Unit = {
44+
throw new UnsupportedOperationException(ErrorMessage.msg)
45+
}
46+
47+
override def setUntil(bitIndex: Int): Unit = {
48+
throw new UnsupportedOperationException(ErrorMessage.msg)
49+
}
50+
51+
override def unset(index: Int): Unit = {
52+
throw new UnsupportedOperationException(ErrorMessage.msg)
53+
}
54+
55+
override def union(other: BitSet): Unit = {
56+
throw new UnsupportedOperationException(ErrorMessage.msg)
57+
}
58+
}

core/src/test/scala/org/apache/spark/ui/UIUtilsSuite.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -113,7 +113,8 @@ class UIUtilsSuite extends SparkFunSuite {
113113
test("SPARK-11906: Progress bar should not overflow because of speculative tasks") {
114114
val generated = makeProgressBar(2, 3, 0, 0, Map.empty, 4).head.child.filter(_.label == "div")
115115
val expected = Seq(
116-
<div class="progress-bar" style="width: 75.0%"></div>
116+
<div class="progress-bar progress-completed" style="width: 75.0%"></div>,
117+
<div class="progress-bar progress-started" style="width: 25.0%"></div>
117118
)
118119
assert(generated.sameElements(expected),
119120
s"\nRunning progress bar should round down\n\nExpected:\n$expected\nGenerated:\n$generated")
Lines changed: 140 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,140 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one or more
3+
* contributor license agreements. See the NOTICE file distributed with
4+
* this work for additional information regarding copyright ownership.
5+
* The ASF licenses this file to You under the Apache License, Version 2.0
6+
* (the "License"); you may not use this file except in compliance with
7+
* the License. You may obtain a copy of the License at
8+
*
9+
* http://www.apache.org/licenses/LICENSE-2.0
10+
*
11+
* Unless required by applicable law or agreed to in writing, software
12+
* distributed under the License is distributed on an "AS IS" BASIS,
13+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
* See the License for the specific language governing permissions and
15+
* limitations under the License.
16+
*/
17+
18+
package org.apache.spark.util.collection
19+
20+
import org.apache.spark.SparkFunSuite
21+
22+
class ImmutableBitSetSuite extends SparkFunSuite {
23+
24+
test("basic get") {
25+
val bitset = new ImmutableBitSet(100, 0, 9, 1, 10, 90, 96)
26+
val setBits = Seq(0, 9, 1, 10, 90, 96)
27+
for (i <- 0 until 100) {
28+
if (setBits.contains(i)) {
29+
assert(bitset.get(i))
30+
} else {
31+
assert(!bitset.get(i))
32+
}
33+
}
34+
assert(bitset.cardinality() === setBits.size)
35+
}
36+
37+
test("nextSetBit") {
38+
val bitset = new ImmutableBitSet(100, 0, 9, 1, 10, 90, 96)
39+
40+
assert(bitset.nextSetBit(0) === 0)
41+
assert(bitset.nextSetBit(1) === 1)
42+
assert(bitset.nextSetBit(2) === 9)
43+
assert(bitset.nextSetBit(9) === 9)
44+
assert(bitset.nextSetBit(10) === 10)
45+
assert(bitset.nextSetBit(11) === 90)
46+
assert(bitset.nextSetBit(80) === 90)
47+
assert(bitset.nextSetBit(91) === 96)
48+
assert(bitset.nextSetBit(96) === 96)
49+
assert(bitset.nextSetBit(97) === -1)
50+
}
51+
52+
test( "xor len(bitsetX) < len(bitsetY)" ) {
53+
val bitsetX = new ImmutableBitSet(60, 0, 2, 3, 37, 41)
54+
val bitsetY = new ImmutableBitSet(100, 0, 1, 3, 37, 38, 41, 85)
55+
56+
val bitsetXor = bitsetX ^ bitsetY
57+
58+
assert(bitsetXor.nextSetBit(0) === 1)
59+
assert(bitsetXor.nextSetBit(1) === 1)
60+
assert(bitsetXor.nextSetBit(2) === 2)
61+
assert(bitsetXor.nextSetBit(3) === 38)
62+
assert(bitsetXor.nextSetBit(38) === 38)
63+
assert(bitsetXor.nextSetBit(39) === 85)
64+
assert(bitsetXor.nextSetBit(42) === 85)
65+
assert(bitsetXor.nextSetBit(85) === 85)
66+
assert(bitsetXor.nextSetBit(86) === -1)
67+
68+
}
69+
70+
test( "xor len(bitsetX) > len(bitsetY)" ) {
71+
val bitsetX = new ImmutableBitSet(100, 0, 1, 3, 37, 38, 41, 85)
72+
val bitsetY = new ImmutableBitSet(60, 0, 2, 3, 37, 41)
73+
74+
val bitsetXor = bitsetX ^ bitsetY
75+
76+
assert(bitsetXor.nextSetBit(0) === 1)
77+
assert(bitsetXor.nextSetBit(1) === 1)
78+
assert(bitsetXor.nextSetBit(2) === 2)
79+
assert(bitsetXor.nextSetBit(3) === 38)
80+
assert(bitsetXor.nextSetBit(38) === 38)
81+
assert(bitsetXor.nextSetBit(39) === 85)
82+
assert(bitsetXor.nextSetBit(42) === 85)
83+
assert(bitsetXor.nextSetBit(85) === 85)
84+
assert(bitsetXor.nextSetBit(86) === -1)
85+
86+
}
87+
88+
test( "andNot len(bitsetX) < len(bitsetY)" ) {
89+
val bitsetX = new ImmutableBitSet(60, 0, 2, 3, 37, 41, 48)
90+
val bitsetY = new ImmutableBitSet(100, 0, 1, 3, 37, 38, 41, 85)
91+
92+
val bitsetDiff = bitsetX.andNot( bitsetY )
93+
94+
assert(bitsetDiff.nextSetBit(0) === 2)
95+
assert(bitsetDiff.nextSetBit(1) === 2)
96+
assert(bitsetDiff.nextSetBit(2) === 2)
97+
assert(bitsetDiff.nextSetBit(3) === 48)
98+
assert(bitsetDiff.nextSetBit(48) === 48)
99+
assert(bitsetDiff.nextSetBit(49) === -1)
100+
assert(bitsetDiff.nextSetBit(65) === -1)
101+
}
102+
103+
test( "andNot len(bitsetX) > len(bitsetY)" ) {
104+
val bitsetX = new ImmutableBitSet(100, 0, 1, 3, 37, 38, 41, 85)
105+
val bitsetY = new ImmutableBitSet(60, 0, 2, 3, 37, 41, 48)
106+
107+
val bitsetDiff = bitsetX.andNot( bitsetY )
108+
109+
assert(bitsetDiff.nextSetBit(0) === 1)
110+
assert(bitsetDiff.nextSetBit(1) === 1)
111+
assert(bitsetDiff.nextSetBit(2) === 38)
112+
assert(bitsetDiff.nextSetBit(3) === 38)
113+
assert(bitsetDiff.nextSetBit(38) === 38)
114+
assert(bitsetDiff.nextSetBit(39) === 85)
115+
assert(bitsetDiff.nextSetBit(85) === 85)
116+
assert(bitsetDiff.nextSetBit(86) === -1)
117+
}
118+
119+
test( "immutability" ) {
120+
val bitset = new ImmutableBitSet(100)
121+
intercept[UnsupportedOperationException] {
122+
bitset.set(1)
123+
}
124+
intercept[UnsupportedOperationException] {
125+
bitset.setUntil(10)
126+
}
127+
intercept[UnsupportedOperationException] {
128+
bitset.unset(1)
129+
}
130+
intercept[UnsupportedOperationException] {
131+
bitset.clear()
132+
}
133+
intercept[UnsupportedOperationException] {
134+
bitset.clearUntil(10)
135+
}
136+
intercept[UnsupportedOperationException] {
137+
bitset.union(new ImmutableBitSet(100))
138+
}
139+
}
140+
}

docs/core-migration-guide.md

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -34,6 +34,8 @@ license: |
3434

3535
- In Spark 3.2, support for Apache Mesos as a resource manager is deprecated and will be removed in a future version.
3636

37+
- In Spark 3.2, Spark will delete K8s driver service resource when the application terminates by itself. To restore the behavior before Spark 3.2, you can set `spark.kubernetes.driver.service.deleteOnTermination` to `false`.
38+
3739
## Upgrading from Core 3.0 to 3.1
3840

3941
- In Spark 3.0 and below, `SparkContext` can be created in executors. Since Spark 3.1, an exception will be thrown when creating `SparkContext` in executors. You can allow it by setting the configuration `spark.executor.allowSparkContext` when creating `SparkContext` in executors.

project/SparkBuild.scala

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -689,7 +689,9 @@ object ExcludedDependencies {
689689
excludeDependencies ++= Seq(
690690
ExclusionRule(organization = "com.sun.jersey"),
691691
ExclusionRule("javax.servlet", "javax.servlet-api"),
692-
ExclusionRule("javax.ws.rs", "jsr311-api"))
692+
ExclusionRule("javax.ws.rs", "jsr311-api"),
693+
ExclusionRule("io.netty", "netty-handler"),
694+
ExclusionRule("io.netty", "netty-transport-native-epoll"))
693695
)
694696
}
695697

0 commit comments

Comments
 (0)