Skip to content

Commit

Permalink
Fix databricks build for AQE support (NVIDIA#560)
Browse files Browse the repository at this point in the history
Signed-off-by: Thomas Graves <tgraves@nvidia.com>
  • Loading branch information
tgravescs authored Aug 14, 2020
1 parent 9f7a475 commit f760e36
Show file tree
Hide file tree
Showing 3 changed files with 34 additions and 2 deletions.
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
/*
* Copyright (c) 2020, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.nvidia.spark.rapids.shims.spark300db

import org.apache.spark.sql.catalyst.plans.physical.BroadcastMode
import org.apache.spark.sql.execution.SparkPlan
import org.apache.spark.sql.rapids.execution.{GpuBroadcastExchangeExec, GpuBroadcastExchangeExecBase}

case class GpuBroadcastExchangeExec(
mode: BroadcastMode,
child: SparkPlan) extends GpuBroadcastExchangeExecBase(mode, child) {

override def doCanonicalize(): SparkPlan = {
GpuBroadcastExchangeExec(mode.canonicalized, child.canonicalized)
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -26,10 +26,11 @@ import org.apache.spark.sql.catalyst.optimizer.{BuildLeft, BuildRight, BuildSide
import org.apache.spark.sql.catalyst.plans.JoinType
import org.apache.spark.sql.catalyst.plans.physical.{BroadcastDistribution, Distribution, UnspecifiedDistribution}
import org.apache.spark.sql.execution.{BinaryExecNode, SparkPlan}
import org.apache.spark.sql.execution.adaptive.BroadcastQueryStageExec
import org.apache.spark.sql.execution.exchange.ReusedExchangeExec
import org.apache.spark.sql.execution.joins.{BroadcastHashJoinExec, HashedRelationBroadcastMode}
import org.apache.spark.sql.execution.metric.{SQLMetric, SQLMetrics}
import org.apache.spark.sql.rapids.execution.{GpuBroadcastExchangeExec, SerializeConcatHostBuffersDeserializeBatch}
import org.apache.spark.sql.rapids.execution.SerializeConcatHostBuffersDeserializeBatch
import org.apache.spark.sql.vectorized.ColumnarBatch

/**
Expand Down Expand Up @@ -113,6 +114,9 @@ case class GpuBroadcastHashJoinExec(
}

def broadcastExchange: GpuBroadcastExchangeExec = buildPlan match {
case BroadcastQueryStageExec(_, gpu: GpuBroadcastExchangeExec, _) => gpu
case BroadcastQueryStageExec(_, reused: ReusedExchangeExec, _) =>
reused.child.asInstanceOf[GpuBroadcastExchangeExec]
case gpu: GpuBroadcastExchangeExec => gpu
case reused: ReusedExchangeExec => reused.child.asInstanceOf[GpuBroadcastExchangeExec]
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,6 @@ import org.apache.spark.sql.execution.joins.{BroadcastHashJoinExec, BroadcastNes
import org.apache.spark.sql.execution.joins.ShuffledHashJoinExec
import org.apache.spark.sql.rapids.{GpuFileSourceScanExec, GpuTimeSub}
import org.apache.spark.sql.rapids.execution.GpuBroadcastNestedLoopJoinExecBase
import org.apache.spark.sql.rapids.shims.spark300db._
import org.apache.spark.sql.types._
import org.apache.spark.storage.{BlockId, BlockManagerId}

Expand Down

0 comments on commit f760e36

Please sign in to comment.