Skip to content

Commit d14209c

Browse files
zhengruifengcloud-fan
authored andcommitted
[SPARK-54115][TESTS][FOLLOW-UP] Refine org.apache.spark.util.UtilsSuite
### What changes were proposed in this pull request? ### Why are the changes needed? > Use doReturn() in those rare occasions when you cannot use when(Object) `when` is preferred in the official doc https://javadoc.io/static/org.mockito/mockito-core/5.12.0/org/mockito/Mockito.html#when(T) ### Does this PR introduce _any_ user-facing change? no, test-only ### How was this patch tested? ci ### Was this patch authored or co-authored using generative AI tooling? no Closes #53192 from zhengruifeng/spark_54115_test_followup. Authored-by: Ruifeng Zheng <ruifengz@apache.org> Signed-off-by: Wenchen Fan <wenchen@databricks.com>
1 parent 0ba9a9b commit d14209c

File tree

2 files changed

+25
-28
lines changed

2 files changed

+25
-28
lines changed

core/src/main/scala/org/apache/spark/util/Utils.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2079,7 +2079,7 @@ private[spark] object Utils
20792079

20802080
val CONNECT_EXECUTE_THREAD_PREFIX = "SparkConnectExecuteThread"
20812081

2082-
private val threadInfoOrdering = Ordering.fromLessThan {
2082+
private[spark] val threadInfoOrdering = Ordering.fromLessThan {
20832083
(threadTrace1: ThreadInfo, threadTrace2: ThreadInfo) => {
20842084
def priority(ti: ThreadInfo): Int = ti.getThreadName match {
20852085
case name if name.startsWith(TASK_THREAD_NAME_PREFIX) => 100

core/src/test/scala/org/apache/spark/util/UtilsSuite.scala

Lines changed: 24 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -38,8 +38,7 @@ import org.apache.hadoop.fs.Path
3838
import org.apache.hadoop.fs.audit.CommonAuditContext.currentAuditContext
3939
import org.apache.hadoop.ipc.{CallerContext => HadoopCallerContext}
4040
import org.apache.logging.log4j.Level
41-
import org.mockito.Mockito.doReturn
42-
import org.scalatest.PrivateMethodTester
41+
import org.mockito.Mockito.when
4342
import org.scalatestplus.mockito.MockitoSugar.mock
4443

4544
import org.apache.spark.{SparkConf, SparkException, SparkFunSuite, TaskContext}
@@ -51,7 +50,7 @@ import org.apache.spark.scheduler.SparkListener
5150
import org.apache.spark.util.collection.Utils.createArray
5251
import org.apache.spark.util.io.ChunkedByteBufferInputStream
5352

54-
class UtilsSuite extends SparkFunSuite with ResetSystemProperties with PrivateMethodTester {
53+
class UtilsSuite extends SparkFunSuite with ResetSystemProperties {
5554

5655
test("timeConversion") {
5756
// Test -1
@@ -1132,37 +1131,35 @@ class UtilsSuite extends SparkFunSuite with ResetSystemProperties with PrivateMe
11321131

11331132
test("ThreadInfoOrdering") {
11341133
val task1T = mock[ThreadInfo]
1135-
doReturn(11L).when(task1T).getThreadId
1136-
doReturn("Executor task launch worker for task 1.0 in stage 1.0 (TID 11)")
1137-
.when(task1T).getThreadName
1138-
doReturn("Executor task launch worker for task 1.0 in stage 1.0 (TID 11)")
1139-
.when(task1T).toString
1134+
when(task1T.getThreadId).thenReturn(11L)
1135+
when(task1T.getThreadName)
1136+
.thenReturn("Executor task launch worker for task 1.0 in stage 1.0 (TID 11)")
1137+
when(task1T.toString)
1138+
.thenReturn("Executor task launch worker for task 1.0 in stage 1.0 (TID 11)")
11401139

11411140
val task2T = mock[ThreadInfo]
1142-
doReturn(12L).when(task2T).getThreadId
1143-
doReturn("Executor task launch worker for task 2.0 in stage 1.0 (TID 22)")
1144-
.when(task2T).getThreadName
1145-
doReturn("Executor task launch worker for task 2.0 in stage 1.0 (TID 22)")
1146-
.when(task2T).toString
1141+
when(task2T.getThreadId).thenReturn(12L)
1142+
when(task2T.getThreadName)
1143+
.thenReturn("Executor task launch worker for task 2.0 in stage 1.0 (TID 22)")
1144+
when(task2T.toString)
1145+
.thenReturn("Executor task launch worker for task 2.0 in stage 1.0 (TID 22)")
11471146

11481147
val connectExecuteOp1T = mock[ThreadInfo]
1149-
doReturn(21L).when(connectExecuteOp1T).getThreadId
1150-
doReturn("SparkConnectExecuteThread_opId=16148fb4-4189-43c3-b8d4-8b3b6ddd41c7")
1151-
.when(connectExecuteOp1T).getThreadName
1152-
doReturn("SparkConnectExecuteThread_opId=16148fb4-4189-43c3-b8d4-8b3b6ddd41c7")
1153-
.when(connectExecuteOp1T).toString
1148+
when(connectExecuteOp1T.getThreadId).thenReturn(21L)
1149+
when(connectExecuteOp1T.getThreadName)
1150+
.thenReturn("SparkConnectExecuteThread_opId=16148fb4-4189-43c3-b8d4-8b3b6ddd41c7")
1151+
when(connectExecuteOp1T.toString)
1152+
.thenReturn("SparkConnectExecuteThread_opId=16148fb4-4189-43c3-b8d4-8b3b6ddd41c7")
11541153

11551154
val connectExecuteOp2T = mock[ThreadInfo]
1156-
doReturn(22L).when(connectExecuteOp2T).getThreadId
1157-
doReturn("SparkConnectExecuteThread_opId=4e4d1cac-ffde-46c1-b7c2-808b726cb47e")
1158-
.when(connectExecuteOp2T).getThreadName
1159-
doReturn("SparkConnectExecuteThread_opId=4e4d1cac-ffde-46c1-b7c2-808b726cb47e")
1160-
.when(connectExecuteOp2T).toString
1161-
1162-
val threadInfoOrderingMethod =
1163-
PrivateMethod[Ordering[ThreadInfo]](Symbol("threadInfoOrdering"))
1155+
when(connectExecuteOp2T.getThreadId).thenReturn(22L)
1156+
when(connectExecuteOp2T.getThreadName)
1157+
.thenReturn("SparkConnectExecuteThread_opId=4e4d1cac-ffde-46c1-b7c2-808b726cb47e")
1158+
when(connectExecuteOp2T.toString)
1159+
.thenReturn("SparkConnectExecuteThread_opId=4e4d1cac-ffde-46c1-b7c2-808b726cb47e")
1160+
11641161
val sorted = Seq(connectExecuteOp1T, connectExecuteOp2T, task1T, task2T)
1165-
.sorted(Utils.invokePrivate(threadInfoOrderingMethod()))
1162+
.sorted(Utils.threadInfoOrdering)
11661163
assert(sorted === Seq(task1T, task2T, connectExecuteOp1T, connectExecuteOp2T))
11671164
}
11681165

0 commit comments

Comments
 (0)