Skip to content

Commit

Permalink
Make shims.v2.ParquetCachedBatchSerializer as protected (#4560)
Browse files Browse the repository at this point in the history
* Change ParquetCachedSerializer and other classes as protected.

Signed-off-by: Niranjan Artal <nartal@nvidia.com>
  • Loading branch information
nartal1 authored Jan 21, 2022
1 parent d6e5eb7 commit a409d0e
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 4 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -259,7 +259,7 @@ private case class CloseableColumnBatchIterator(iter: Iterator[ColumnarBatch]) e
/**
* This class assumes, the data is Columnar and the plugin is on
*/
class ParquetCachedBatchSerializer extends GpuCachedBatchSerializer with Arm {
protected class ParquetCachedBatchSerializer extends GpuCachedBatchSerializer with Arm {

override def supportsColumnarInput(schema: Seq[Attribute]): Boolean = true

Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
/*
* Copyright (c) 2021, NVIDIA CORPORATION.
* Copyright (c) 2021-2022, NVIDIA CORPORATION.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
Expand All @@ -14,12 +14,12 @@
* limitations under the License.
*/

package com.nvidia.spark.rapids
package com.nvidia.spark.rapids.shims.v2

import scala.collection.mutable

import ai.rapids.cudf.{ColumnVector, DType, Table, TableWriter}
import com.nvidia.spark.rapids.shims.v2.{ParquetCachedBatchSerializer, ParquetOutputFileFormat}
import com.nvidia.spark.rapids._
import org.apache.hadoop.mapreduce.{RecordWriter, TaskAttemptContext}
import org.mockito.ArgumentMatchers._
import org.mockito.Mockito._
Expand Down

0 comments on commit a409d0e

Please sign in to comment.