Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Update scalafmt-core to 3.8.4 #415

Open
wants to merge 3 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions .git-blame-ignore-revs
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
# Scala Steward: Reformat with scalafmt 3.8.4
9550d47e90d98cd6d60369855d0154800035a797
2 changes: 1 addition & 1 deletion .scalafmt.conf
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
version = "3.7.17"
version = "3.8.4"

align.preset = more
maxColumn = 100
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,9 @@ final class StageElem(
| <div class="progress-bar" role="progressbar" style="background-color: blue; width: $donePct%; ${extraStyle.mkString(
"; "
)}; color: white" aria-valuenow="$donePct" aria-valuemin="0" aria-valuemax="100">
| $doneTasks0${if (diff == 0) "" else s" + $diff"} / $numTasks
| $doneTasks0${
if (diff == 0) "" else s" + $diff"
} / $numTasks
| </div>
| <div class="progress-bar" role="progressbar" style="background-color: red; width: $startedPct%" aria-valuenow="$startedPct" aria-valuemin="0" aria-valuemax="100"></div>
|</div>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ class ProgressBarTests(sparkVersion: String, master: String, conf: (String, Stri

val tests = Tests {

"dummy test" - {
"dummy test" -
sparkSession(
if (interactive)
"""
Expand All @@ -47,7 +47,6 @@ class ProgressBarTests(sparkVersion: String, master: String, conf: (String, Stri
@ val rdd = spark.sparkContext.makeRDD(1 to 10000, 200)
"""
)
}

}

Expand Down
34 changes: 11 additions & 23 deletions modules/tests/src/main/scala/ammonite/spark/SparkReplTests.scala
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ class SparkReplTests(
// Beware that indentation of the session snippets is super sensitive.
// All snippets should have the exact same indentation.

"simple foreach with accumulator" - {
"simple foreach with accumulator" -
sparkSession(
"""
@ val accum = sc.longAccumulator
Expand All @@ -78,9 +78,8 @@ class SparkReplTests(
res: java.lang.Long = 55L
"""
)
}

"external vars" - {
"external vars" -
sparkSession(
"""
@ var v = 7
Expand All @@ -95,9 +94,8 @@ class SparkReplTests(
res2: Int = 100
"""
)
}

"external classes" - {
"external classes" -
sparkSession(
"""
@ class C {
Expand All @@ -109,9 +107,8 @@ class SparkReplTests(
res: Int = 50
"""
)
}

"external functions" - {
"external functions" -
sparkSession(
"""
@ def double(x: Int) = x + x
Expand All @@ -121,9 +118,8 @@ class SparkReplTests(
res: Int = 110
"""
)
}

"external functions that access vars" - {
"external functions that access vars" -
sparkSession(
"""
@ var v = 7
Expand All @@ -141,7 +137,6 @@ class SparkReplTests(
res2: Int = 100
"""
)
}

def hasBroadcastIssue =
master == "local" || master.startsWith("local[")
Expand Down Expand Up @@ -219,7 +214,7 @@ class SparkReplTests(
)
}

"SPARK-2452 compound statements" - {
"SPARK-2452 compound statements" -
sparkSession(
"""
@ val x = 4 ; def f() = x
Expand All @@ -230,7 +225,6 @@ class SparkReplTests(
resFoo: Int = 4
"""
)
}

"SPARK-2576 importing implicits" - {
val fieldNamePart = if (is212) "" else "value = "
Expand All @@ -252,7 +246,7 @@ class SparkReplTests(
)
}

"Datasets and encoders" - {
"Datasets and encoders" -
sparkSession(
"""
@ import spark.implicits._
Expand Down Expand Up @@ -286,7 +280,6 @@ class SparkReplTests(
res: Array[Int] = Array(10)
"""
)
}

"SPARK-2632 importing a method from non serializable class and not using it" - {
val fieldNamePart = if (is212) "" else "value = "
Expand Down Expand Up @@ -361,8 +354,7 @@ class SparkReplTests(
)
}

"replicating blocks of object with class defined in repl" - {

"replicating blocks of object with class defined in repl" -
// FIXME The actual test also does https://github.com/apache/spark/blob/ab18b02e66fd04bc8f1a4fb7b6a7f2773902a494/repl/src/test/scala/org/apache/spark/repl/SingletonReplSuite.scala#L353-L359

sparkSession(
Expand All @@ -387,7 +379,6 @@ class SparkReplTests(
res: Int = 10
"""
)
}

"should clone and clean line object in ClosureCleaner" - {
inputUrlOpt match {
Expand Down Expand Up @@ -430,7 +421,7 @@ class SparkReplTests(
}
}

"newProductSeqEncoder with REPL defined class" - {
"newProductSeqEncoder with REPL defined class" -
sparkSession(
"""
@ case class Click(id: Int)
Expand All @@ -440,7 +431,6 @@ class SparkReplTests(
res: Boolean = true
"""
)
}

// Adapted from https://github.com/apache/spark/blob/3d5c61e5fd24f07302e39b5d61294da79aa0c2f9/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala#L193-L208
"line wrapper only initialized once when used as encoder outer scope" - {
Expand Down Expand Up @@ -488,7 +478,7 @@ class SparkReplTests(
}

// Adapted from https://github.com/apache/spark/blob/3d5c61e5fd24f07302e39b5d61294da79aa0c2f9/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala#L230-L238
"spark-shell should find imported types in class constructors and extends clause" - {
"spark-shell should find imported types in class constructors and extends clause" -
sparkSession(
"""
@ import org.apache.spark.Partition
Expand All @@ -501,10 +491,9 @@ class SparkReplTests(
defined class P
"""
)
}

// https://github.com/apache/spark/blob/3d5c61e5fd24f07302e39b5d61294da79aa0c2f9/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala#L240-L259
"spark-shell should shadow val/def definitions correctly" - {
"spark-shell should shadow val/def definitions correctly" -
sparkSession(
"""
@ def myMethod() = "first definition"
Expand Down Expand Up @@ -539,7 +528,6 @@ class SparkReplTests(
res: String = "!!2!!"
"""
)
}

// tests below are custom ones

Expand Down
Loading