Skip to content

Commit

Permalink
[GLUTEN-7110][VL][DELTA] support IncrementMetric in gluten (#7111)
Browse files Browse the repository at this point in the history
* [GLUTEN-7110][VL][DELTA] support IncrementMetric in gluten

* init DeltaFilterExecTransformer

* init DeltaProjectExecTransformer

* update metric

* add FilterTransformerRegistor

* fix scala 213 compile error

* add ProjectTransformerRegister

* remove redundant function

* update supportedDelta

* update veloxSparkPlanExecApi

* use createUnsafe for ProjectExec

* init ReplaceDeltaTransformer

* use delta write rule

* fix error

* use copy-resources to reduce redundant resources
  • Loading branch information
dcoliversun authored Oct 15, 2024
1 parent 3dc1980 commit 27f30ad
Show file tree
Hide file tree
Showing 14 changed files with 415 additions and 54 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -165,7 +165,9 @@ class CHMetricsApi extends MetricsApi with Logging with LogLevelUtil {
"totalTime" -> SQLMetrics.createTimingMetric(sparkContext, "time")
)

override def genFilterTransformerMetricsUpdater(metrics: Map[String, SQLMetric]): MetricsUpdater =
override def genFilterTransformerMetricsUpdater(
metrics: Map[String, SQLMetric],
extraMetrics: Seq[(String, SQLMetric)] = Seq.empty): MetricsUpdater =
new FilterMetricsUpdater(metrics)

override def genProjectTransformerMetrics(sparkContext: SparkContext): Map[String, SQLMetric] =
Expand All @@ -182,7 +184,9 @@ class CHMetricsApi extends MetricsApi with Logging with LogLevelUtil {
)

override def genProjectTransformerMetricsUpdater(
metrics: Map[String, SQLMetric]): MetricsUpdater = new ProjectMetricsUpdater(metrics)
metrics: Map[String, SQLMetric],
extraMetrics: Seq[(String, SQLMetric)] = Seq.empty): MetricsUpdater =
new ProjectMetricsUpdater(metrics)

override def genHashAggregateTransformerMetrics(
sparkContext: SparkContext): Map[String, SQLMetric] =
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -175,8 +175,10 @@ class VeloxMetricsApi extends MetricsApi with Logging {
"number of memory allocations")
)

override def genFilterTransformerMetricsUpdater(metrics: Map[String, SQLMetric]): MetricsUpdater =
new FilterMetricsUpdater(metrics)
override def genFilterTransformerMetricsUpdater(
metrics: Map[String, SQLMetric],
extraMetrics: Seq[(String, SQLMetric)] = Seq.empty): MetricsUpdater =
new FilterMetricsUpdater(metrics, extraMetrics)

override def genProjectTransformerMetrics(sparkContext: SparkContext): Map[String, SQLMetric] =
Map(
Expand All @@ -192,7 +194,9 @@ class VeloxMetricsApi extends MetricsApi with Logging {
)

override def genProjectTransformerMetricsUpdater(
metrics: Map[String, SQLMetric]): MetricsUpdater = new ProjectMetricsUpdater(metrics)
metrics: Map[String, SQLMetric],
extraMetrics: Seq[(String, SQLMetric)] = Seq.empty): MetricsUpdater =
new ProjectMetricsUpdater(metrics, extraMetrics)

override def genHashAggregateTransformerMetrics(
sparkContext: SparkContext): Map[String, SQLMetric] =
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,30 +16,11 @@
*/
package org.apache.gluten.execution

import org.apache.spark.sql.catalyst.expressions.{And, Expression}
import org.apache.spark.sql.catalyst.expressions.Expression
import org.apache.spark.sql.execution.SparkPlan

case class FilterExecTransformer(condition: Expression, child: SparkPlan)
extends FilterExecTransformerBase(condition, child) {
// FIXME: Should use field "condition" to store the actual executed filter expressions.
// To make optimization easier (like to remove filter when it actually does nothing)
override protected def getRemainingCondition: Expression = {
val scanFilters = child match {
// Get the filters including the manually pushed down ones.
case basicScanExecTransformer: BasicScanExecTransformer =>
basicScanExecTransformer.filterExprs()
// For fallback scan, we need to keep original filter.
case _ =>
Seq.empty[Expression]
}
if (scanFilters.isEmpty) {
condition
} else {
val remainingFilters =
FilterHandler.getRemainingFilters(scanFilters, splitConjunctivePredicates(condition))
remainingFilters.reduceLeftOption(And).orNull
}
}

override protected def withNewChildInternal(newChild: SparkPlan): FilterExecTransformer =
copy(child = newChild)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,10 @@ package org.apache.gluten.metrics

import org.apache.spark.sql.execution.metric.SQLMetric

class FilterMetricsUpdater(val metrics: Map[String, SQLMetric]) extends MetricsUpdater {
class FilterMetricsUpdater(
val metrics: Map[String, SQLMetric],
val extraMetrics: Seq[(String, SQLMetric)])
extends MetricsUpdater {

override def updateNativeMetrics(opMetrics: IOperatorMetrics): Unit = {
if (opMetrics != null) {
Expand All @@ -30,6 +33,13 @@ class FilterMetricsUpdater(val metrics: Map[String, SQLMetric]) extends MetricsU
metrics("wallNanos") += operatorMetrics.wallNanos
metrics("peakMemoryBytes") += operatorMetrics.peakMemoryBytes
metrics("numMemoryAllocations") += operatorMetrics.numMemoryAllocations
extraMetrics.foreach {
case (name, metric) =>
name match {
case "increment_metric" => metric += operatorMetrics.outputRows
case _ => // do nothing
}
}
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,10 @@ package org.apache.gluten.metrics

import org.apache.spark.sql.execution.metric.SQLMetric

class ProjectMetricsUpdater(val metrics: Map[String, SQLMetric]) extends MetricsUpdater {
class ProjectMetricsUpdater(
val metrics: Map[String, SQLMetric],
val extraMetrics: Seq[(String, SQLMetric)])
extends MetricsUpdater {

override def updateNativeMetrics(opMetrics: IOperatorMetrics): Unit = {
if (opMetrics != null) {
Expand All @@ -30,6 +33,13 @@ class ProjectMetricsUpdater(val metrics: Map[String, SQLMetric]) extends Metrics
metrics("wallNanos") += operatorMetrics.wallNanos
metrics("peakMemoryBytes") += operatorMetrics.peakMemoryBytes
metrics("numMemoryAllocations") += operatorMetrics.numMemoryAllocations
extraMetrics.foreach {
case (name, metric) =>
name match {
case "increment_metric" => metric += operatorMetrics.outputRows
case _ => // do nothing
}
}
}
}
}
31 changes: 31 additions & 0 deletions gluten-delta/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -130,6 +130,19 @@
<plugin>
<groupId>com.diffplug.spotless</groupId>
<artifactId>spotless-maven-plugin</artifactId>
<configuration>
<scala>
<scalafmt>
<file>${project.basedir}/../.scalafmt.conf</file>
</scalafmt>
<includes>
<include>src/main/scala/**/*.scala</include>
<include>src/test/scala/**/*.scala</include>
<include>src/main/delta-${delta.binary.version}/**/*.scala</include>
<include>src/test/delta-${delta.binary.version}/**/*.scala</include>
</includes>
</scala>
</configuration>
</plugin>
<plugin>
<groupId>org.scalatest</groupId>
Expand All @@ -154,6 +167,24 @@
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-resources-plugin</artifactId>
<executions>
<execution>
<id>copy-resources</id>
<phase>generate-sources</phase>
<goals>
<goal>copy-resources</goal>
</goals>
<configuration>
<outputDirectory>src/main/scala/org/apache/gluten/execution</outputDirectory>
<resources>
<resource>
<directory>src/main/delta-${delta.binary.version}/org/apache/gluten/execution</directory>
</resource>
</resources>
<overwrite>true</overwrite>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,81 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gluten.execution

import org.apache.gluten.backendsapi.BackendsApiManager
import org.apache.gluten.expression.{ConverterUtils, ExpressionConverter}
import org.apache.gluten.metrics.MetricsUpdater
import org.apache.gluten.substrait.`type`.TypeBuilder
import org.apache.gluten.substrait.SubstraitContext
import org.apache.gluten.substrait.extensions.ExtensionBuilder
import org.apache.gluten.substrait.rel.{RelBuilder, RelNode}

import org.apache.spark.sql.catalyst.expressions.{Attribute, Expression}
import org.apache.spark.sql.delta.metric.IncrementMetric
import org.apache.spark.sql.execution.SparkPlan
import org.apache.spark.sql.execution.metric.SQLMetric

import scala.collection.JavaConverters._

case class DeltaFilterExecTransformer(condition: Expression, child: SparkPlan)
extends FilterExecTransformerBase(condition, child) {

private var extraMetrics: Seq[(String, SQLMetric)] = Seq.empty

override def metricsUpdater(): MetricsUpdater =
BackendsApiManager.getMetricsApiInstance.genFilterTransformerMetricsUpdater(
metrics,
extraMetrics.toSeq)

override def getRelNode(
context: SubstraitContext,
condExpr: Expression,
originalInputAttributes: Seq[Attribute],
operatorId: Long,
input: RelNode,
validation: Boolean): RelNode = {
assert(condExpr != null)
val args = context.registeredFunction
val condExprNode = condExpr match {
case IncrementMetric(child, metric) =>
extraMetrics :+= (condExpr.prettyName, metric)
ExpressionConverter
.replaceWithExpressionTransformer(child, attributeSeq = originalInputAttributes)
.doTransform(args)
case _ =>
ExpressionConverter
.replaceWithExpressionTransformer(condExpr, attributeSeq = originalInputAttributes)
.doTransform(args)
}

if (!validation) {
RelBuilder.makeFilterRel(input, condExprNode, context, operatorId)
} else {
// Use a extension node to send the input types through Substrait plan for validation.
val inputTypeNodeList = originalInputAttributes
.map(attr => ConverterUtils.getTypeNode(attr.dataType, attr.nullable))
.asJava
val extensionNode = ExtensionBuilder.makeAdvancedExtension(
BackendsApiManager.getTransformerApiInstance.packPBMessage(
TypeBuilder.makeStruct(false, inputTypeNodeList).toProtobuf))
RelBuilder.makeFilterRel(input, condExprNode, extensionNode, context, operatorId)
}
}

override protected def withNewChildInternal(newChild: SparkPlan): DeltaFilterExecTransformer =
copy(child = newChild)
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,116 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gluten.execution

import org.apache.gluten.backendsapi.BackendsApiManager
import org.apache.gluten.expression.{ConverterUtils, ExpressionConverter, ExpressionTransformer}
import org.apache.gluten.metrics.MetricsUpdater
import org.apache.gluten.substrait.`type`.TypeBuilder
import org.apache.gluten.substrait.SubstraitContext
import org.apache.gluten.substrait.extensions.ExtensionBuilder
import org.apache.gluten.substrait.rel.{RelBuilder, RelNode}

import org.apache.spark.sql.catalyst.expressions.{Alias, Attribute, CaseWhen, NamedExpression}
import org.apache.spark.sql.delta.metric.IncrementMetric
import org.apache.spark.sql.execution.SparkPlan
import org.apache.spark.sql.execution.metric.SQLMetric

import scala.collection.JavaConverters._
import scala.collection.mutable

case class DeltaProjectExecTransformer(projectList: Seq[NamedExpression], child: SparkPlan)
extends ProjectExecTransformerBase(projectList, child) {

private var extraMetrics = mutable.Seq.empty[(String, SQLMetric)]

override def metricsUpdater(): MetricsUpdater =
BackendsApiManager.getMetricsApiInstance.genProjectTransformerMetricsUpdater(
metrics,
extraMetrics.toSeq)

override def getRelNode(
context: SubstraitContext,
projectList: Seq[NamedExpression],
originalInputAttributes: Seq[Attribute],
operatorId: Long,
input: RelNode,
validation: Boolean): RelNode = {
val args = context.registeredFunction
val newProjectList = genNewProjectList(projectList)
val columnarProjExprs: Seq[ExpressionTransformer] = ExpressionConverter
.replaceWithExpressionTransformer(newProjectList, attributeSeq = originalInputAttributes)
val projExprNodeList = columnarProjExprs.map(_.doTransform(args)).asJava
val emitStartIndex = originalInputAttributes.size
if (!validation) {
RelBuilder.makeProjectRel(input, projExprNodeList, context, operatorId, emitStartIndex)
} else {
// Use a extension node to send the input types through Substrait plan for validation.
val inputTypeNodeList = originalInputAttributes
.map(attr => ConverterUtils.getTypeNode(attr.dataType, attr.nullable))
.asJava
val extensionNode = ExtensionBuilder.makeAdvancedExtension(
BackendsApiManager.getTransformerApiInstance.packPBMessage(
TypeBuilder.makeStruct(false, inputTypeNodeList).toProtobuf))
RelBuilder.makeProjectRel(
input,
projExprNodeList,
extensionNode,
context,
operatorId,
emitStartIndex)
}
}

override protected def withNewChildInternal(newChild: SparkPlan): DeltaProjectExecTransformer =
copy(child = newChild)

def genNewProjectList(projectList: Seq[NamedExpression]): Seq[NamedExpression] = {
projectList.map {
case alias: Alias =>
alias.child match {
case IncrementMetric(child, metric) =>
extraMetrics :+= (alias.child.prettyName, metric)
Alias(child = child, name = alias.name)()

case CaseWhen(branches, elseValue) =>
val newBranches = branches.map {
case (expr1, expr2: IncrementMetric) =>
extraMetrics :+= (expr2.prettyName, expr2.metric)
(expr1, expr2.child)
case other => other
}

val newElseValue = elseValue match {
case Some(IncrementMetric(child: IncrementMetric, metric)) =>
extraMetrics :+= (child.prettyName, metric)
extraMetrics :+= (child.prettyName, child.metric)
Some(child.child)
case _ => elseValue
}

Alias(
child = CaseWhen(newBranches, newElseValue),
name = alias.name
)(alias.exprId)

case _ =>
alias
}
case other => other
}
}
}
Loading

0 comments on commit 27f30ad

Please sign in to comment.