Skip to content

Commit

Permalink
State: optionally apply indents to alt mod
Browse files Browse the repository at this point in the history
  • Loading branch information
kitbellew committed Nov 23, 2024
1 parent 9fae106 commit edfc005
Show file tree
Hide file tree
Showing 6 changed files with 72 additions and 69 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ case class ModExt(
mod: Modification,
indents: Seq[Indent] = Nil,
altOpt: Option[ModExt] = None,
noAltIndent: Boolean = false,
) {
@inline
def isNL: Boolean = mod.isNL
Expand All @@ -27,18 +28,20 @@ case class ModExt(

altOpt match {
case None => res("")
case Some(x) => x.toString(res("|"), "+")
case Some(x) => x.toString(res("|"), if (noAltIndent) "" else "+")
}
}

override def toString: String = toString("", "")

def withAlt(alt: ModExt): ModExt =
if (altOpt.contains(alt)) this else copy(altOpt = Some(alt))
def withAlt(alt: => ModExt, noAltIndent: Boolean = false): ModExt =
copy(altOpt = Some(alt), noAltIndent = noAltIndent)

@inline
def withAltIf(ok: Boolean)(alt: => ModExt): ModExt =
if (ok) withAlt(alt) else this
def withAltIf(
ok: Boolean,
)(alt: => ModExt, noAltIndent: Boolean = false): ModExt =
if (ok) withAlt(alt, noAltIndent = noAltIndent) else this

def orMod(flag: Boolean, mod: => ModExt): ModExt = if (flag) this else mod

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1951,7 +1951,10 @@ class Router(formatOps: FormatOps) {
case Newlines.fold =>
def nlSplitBase(cost: Int, policy: Policy = NoPolicy)(implicit
fileLine: FileLine,
) = Split(Newline.withAlt(modSpace), cost, policy = policy)
) = {
val nlMod = Newline.withAlt(modSpace, noAltIndent = true)
Split(nlMod, cost, policy = policy)
}
if (nextDotIfSig.isEmpty)
if (nlOnly) Seq(nlSplitBase(0))
else {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -85,6 +85,7 @@ final class State(
initialModExt.altOpt.flatMap { alt =>
if (tok.left.is[T.Comment]) None
else if (nextIndent < alt.mod.length + column) None
else if (initialModExt.noAltIndent) Some(alt)
else Some(alt.withIndents(initialModExt.indents))
}.fold((initialNextSplit, nextIndent, nextPushes)) { alt =>
val altPushes = getUnexpired(alt, pushes)
Expand Down
108 changes: 52 additions & 56 deletions scalafmt-tests/shared/src/test/resources/newlines/source_fold.stat
Original file line number Diff line number Diff line change
Expand Up @@ -8430,7 +8430,7 @@ object a {
)
)
}
>>> { stateVisits = 4769, stateVisits2 = 4769 }
>>> { stateVisits = 3882, stateVisits2 = 3882 }
object a {
div(cls := "cover")(
div(cls := "doc")(bodyContents),
Expand Down Expand Up @@ -9558,61 +9558,57 @@ object a {
.map(_.filterNot(_.getCanonicalPath.contains("SSLOptions")))
}
}
>>> { stateVisits = 3668, stateVisits2 = 3668 }
>>> { stateVisits = 52849, stateVisits2 = 52849 }
object a {
private def ignoreUndocumentedPackages(
packages: Seq[Seq[File]]
): Seq[Seq[File]] = {
packages.map(_.filterNot(_.getName.contains("$")))
.map(_.filterNot(_.getCanonicalPath.contains("org/apache/spark/deploy")))
.map(_.filterNot(
_.getCanonicalPath.contains("org/apache/spark/examples")
)).map(_.filterNot(
_.getCanonicalPath.contains("org/apache/spark/internal")
))
.map(_.filterNot(_.getCanonicalPath.contains("org/apache/spark/memory")))
.map(_.filterNot(_.getCanonicalPath.contains("org/apache/spark/network")))
.map(_.filterNot(_.getCanonicalPath.contains("org/apache/spark/rpc")))
.map(_.filterNot(f =>
f.getCanonicalPath.contains("org/apache/spark/shuffle") &&
!f.getCanonicalPath.contains("org/apache/spark/shuffle/api")
)).map(_.filterNot(
_.getCanonicalPath.contains("org/apache/spark/executor")
)).map(_.filterNot(
_.getCanonicalPath.contains("org/apache/spark/ExecutorAllocationClient")
)).map(_.filterNot(_.getCanonicalPath.contains(
"org/apache/spark/scheduler/cluster/CoarseGrainedSchedulerBackend"
))).map(_.filterNot(f =>
f.getCanonicalPath.contains("org/apache/spark/unsafe") &&
!f.getCanonicalPath
.contains("org/apache/spark/unsafe/types/CalendarInterval")
)).map(_.filterNot(_.getCanonicalPath.contains("python")))
.map(_.filterNot(
_.getCanonicalPath.contains("org/apache/spark/util/collection")
))
.map(_.filterNot(_.getCanonicalPath.contains("org/apache/spark/util/io")))
.map(_.filterNot(
_.getCanonicalPath.contains("org/apache/spark/util/kvstore")
)).map(_.filterNot(
_.getCanonicalPath.contains("org/apache/spark/sql/catalyst")
)).map(_.filterNot(
_.getCanonicalPath.contains("org/apache/spark/sql/connect/")
)).map(_.filterNot(
_.getCanonicalPath.contains("org/apache/spark/sql/execution")
)).map(_.filterNot(
_.getCanonicalPath.contains("org/apache/spark/sql/internal")
)).map(_.filterNot(
_.getCanonicalPath.contains("org/apache/spark/sql/hive")
)).map(_.filterNot(
_.getCanonicalPath.contains("org/apache/spark/sql/catalog/v2/utils")
))
.map(_.filterNot(_.getCanonicalPath.contains("org.apache.spark.errors")))
.map(_.filterNot(
_.getCanonicalPath.contains("org.apache.spark.sql.errors")
)).map(_.filterNot(_.getCanonicalPath.contains("org/apache/hive")))
.map(_.filterNot(
_.getCanonicalPath.contains("org/apache/spark/sql/v2/avro")
)).map(_.filterNot(_.getCanonicalPath.contains("SSLOptions")))
packages.map(_.filterNot(_.getName.contains("$"))).map(_.filterNot(
_.getCanonicalPath.contains("org/apache/spark/deploy")
)).map(_.filterNot(
_.getCanonicalPath.contains("org/apache/spark/examples")
))
.map(_.filterNot(_.getCanonicalPath.contains("org/apache/spark/internal")))
.map(_.filterNot(_.getCanonicalPath.contains("org/apache/spark/memory")))
.map(_.filterNot(_.getCanonicalPath.contains("org/apache/spark/network")))
.map(_.filterNot(_.getCanonicalPath.contains("org/apache/spark/rpc")))
.map(_.filterNot(f =>
f.getCanonicalPath.contains("org/apache/spark/shuffle") &&
!f.getCanonicalPath.contains("org/apache/spark/shuffle/api")
))
.map(_.filterNot(_.getCanonicalPath.contains("org/apache/spark/executor")))
.map(_.filterNot(
_.getCanonicalPath.contains("org/apache/spark/ExecutorAllocationClient")
)).map(_.filterNot(_.getCanonicalPath.contains(
"org/apache/spark/scheduler/cluster/CoarseGrainedSchedulerBackend"
))).map(_.filterNot(f =>
f.getCanonicalPath.contains("org/apache/spark/unsafe") &&
!f.getCanonicalPath
.contains("org/apache/spark/unsafe/types/CalendarInterval")
)).map(_.filterNot(_.getCanonicalPath.contains("python"))).map(_.filterNot(
_.getCanonicalPath.contains("org/apache/spark/util/collection")
)).map(_.filterNot(_.getCanonicalPath.contains("org/apache/spark/util/io")))
.map(_.filterNot(
_.getCanonicalPath.contains("org/apache/spark/util/kvstore")
)).map(_.filterNot(
_.getCanonicalPath.contains("org/apache/spark/sql/catalyst")
)).map(_.filterNot(
_.getCanonicalPath.contains("org/apache/spark/sql/connect/")
)).map(_.filterNot(
_.getCanonicalPath.contains("org/apache/spark/sql/execution")
)).map(_.filterNot(
_.getCanonicalPath.contains("org/apache/spark/sql/internal")
))
.map(_.filterNot(_.getCanonicalPath.contains("org/apache/spark/sql/hive")))
.map(_.filterNot(
_.getCanonicalPath.contains("org/apache/spark/sql/catalog/v2/utils")
)).map(_.filterNot(_.getCanonicalPath.contains("org.apache.spark.errors")))
.map(_.filterNot(
_.getCanonicalPath.contains("org.apache.spark.sql.errors")
)).map(_.filterNot(_.getCanonicalPath.contains("org/apache/hive")))
.map(_.filterNot(
_.getCanonicalPath.contains("org/apache/spark/sql/v2/avro")
)).map(_.filterNot(_.getCanonicalPath.contains("SSLOptions")))
}
}
<<< #4133 #4133 spark: massive chain of applies with function calls
Expand Down Expand Up @@ -10069,7 +10065,7 @@ system.dynamicAccess.createInstanceFor[Serializer](fqn, Nil).recoverWith {
}
}
}
>>> { stateVisits = 4820, stateVisits2 = 4820 }
>>> { stateVisits = 3689, stateVisits2 = 3689 }
system.dynamicAccess.createInstanceFor[Serializer](fqn, Nil).recoverWith {
case _: NoSuchMethodException => system.dynamicAccess
.createInstanceFor[Serializer](
Expand Down Expand Up @@ -10512,9 +10508,9 @@ object a {
// c1
baz
}.qux { x =>
// c2
quux(x)
}
// c2
quux(x)
}
}
<<< chained maps with multi-line blocks, with braces-to-parens not OK
object a {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6702,12 +6702,12 @@ object a {
x8: Any, x9: Any, x10: Any, x11: Any, x12: Any, x13: Any,
x14: Any, x15: Any, x16: Any, x17: Any, x18: Any) =>
g.asInstanceOf[
Tuple18[
_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _,
_] => Any].apply(
(
x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13,
x14, x15, x16, x17, x18))).asInstanceOf[F])
Tuple18[
_, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _, _,
_] => Any].apply(
(
x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13,
x14, x15, x16, x17, x18))).asInstanceOf[F])
}
<<< #4133 binpack overflow assignment with selects, attributes, !dangling
indent.callSite = 4
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -144,7 +144,7 @@ class FormatTests extends FunSuite with CanRunTests with FormatAssertions {
val explored = Debug.explored.get()
logger.debug(s"Total explored: $explored")
if (!onlyUnit && !onlyManual)
assertEquals(explored, 1089468, "total explored")
assertEquals(explored, 1183844, "total explored")
val results = debugResults.result()
// TODO(olafur) don't block printing out test results.
// I don't want to deal with scalaz's Tasks :'(
Expand Down

0 comments on commit edfc005

Please sign in to comment.