Skip to content

Commit d57ee7c

Browse files
committed
Address review feedback: rename ExtractV2Relation to ExtractV2ScanInfo, collapse single-line match
1 parent ea06939 commit d57ee7c

5 files changed

Lines changed: 9 additions & 10 deletions

File tree

sql/catalyst/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DataSourceV2Relation.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -290,7 +290,7 @@ object ExtractV2Scan {
290290
Some(scanRelation.scan)
291291
}
292292

293-
object ExtractV2Relation {
293+
object ExtractV2ScanInfo {
294294
def unapply(scanRelation: DataSourceV2ScanRelation)
295295
: Option[(DataSourceV2Relation, Scan, Seq[AttributeReference])] =
296296
Some((scanRelation.relation, scanRelation.scan, scanRelation.output))

sql/core/src/main/scala/org/apache/spark/sql/classic/Dataset.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -60,7 +60,7 @@ import org.apache.spark.sql.execution.aggregate.TypedAggregateExpression
6060
import org.apache.spark.sql.execution.arrow.{ArrowBatchStreamWriter, ArrowConverters}
6161
import org.apache.spark.sql.execution.command._
6262
import org.apache.spark.sql.execution.datasources.LogicalRelationWithTable
63-
import org.apache.spark.sql.execution.datasources.v2.{ExtractV2Relation, ExtractV2Table, FileTable}
63+
import org.apache.spark.sql.execution.datasources.v2.{ExtractV2ScanInfo, ExtractV2Table, FileTable}
6464
import org.apache.spark.sql.execution.python.EvaluatePython
6565
import org.apache.spark.sql.execution.stat.StatFunctions
6666
import org.apache.spark.sql.internal.SQLConf
@@ -1732,7 +1732,7 @@ class Dataset[T] private[sql](
17321732
fr.inputFiles
17331733
case r: HiveTableRelation =>
17341734
r.tableMeta.storage.locationUri.map(_.toString).toArray
1735-
case ExtractV2Relation(ExtractV2Table(table: FileTable), _, _) =>
1735+
case ExtractV2ScanInfo(ExtractV2Table(table: FileTable), _, _) =>
17361736
table.fileIndex.inputFiles
17371737
}.flatten
17381738
files.toSet.toArray

sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DataSourceV2Strategy.scala

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -119,7 +119,7 @@ class DataSourceV2Strategy(session: SparkSession) extends Strategy with Predicat
119119
}
120120

121121
override def apply(plan: LogicalPlan): Seq[SparkPlan] = plan match {
122-
case PhysicalOperation(project, filters, ExtractV2Relation(
122+
case PhysicalOperation(project, filters, ExtractV2ScanInfo(
123123
v2Relation, V1ScanWrapper(scan, pushed, pushedDownOperators), output)) =>
124124
val v1Relation = scan.toV1TableScan[BaseRelation with TableScan](session.sqlContext)
125125
if (v1Relation.schema != scan.readSchema()) {
@@ -146,7 +146,7 @@ class DataSourceV2Strategy(session: SparkSession) extends Strategy with Predicat
146146
project, filters, dsScan, needsUnsafeConversion = false) :: Nil
147147

148148
case PhysicalOperation(project, filters,
149-
ExtractV2Relation(_, scan: LocalScan, output)) =>
149+
ExtractV2ScanInfo(_, scan: LocalScan, output)) =>
150150
val localScanExec = LocalTableScanExec(output, scan.rows().toImmutableArraySeq, None)
151151
DataSourceV2Strategy.withProjectAndFilter(
152152
project, filters, localScanExec, needsUnsafeConversion = false) :: Nil
@@ -346,7 +346,7 @@ class DataSourceV2Strategy(session: SparkSession) extends Strategy with Predicat
346346

347347
case DeleteFromTable(relation, condition) =>
348348
relation match {
349-
case ExtractV2Relation(r, _, output) =>
349+
case ExtractV2ScanInfo(r, _, output) =>
350350
val table = r.table
351351
if (SubqueryExpression.hasSubquery(condition)) {
352352
throw QueryCompilationErrors.unsupportedDeleteByConditionWithSubqueryError(condition)

sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/V2ScanPartitioningAndOrdering.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -41,7 +41,7 @@ object V2ScanPartitioningAndOrdering extends Rule[LogicalPlan] with Logging {
4141
}
4242

4343
private def partitioning(plan: LogicalPlan) = plan.transformDown {
44-
case d @ ExtractV2Relation(relation, scan: SupportsReportPartitioning, _)
44+
case d @ ExtractV2ScanInfo(relation, scan: SupportsReportPartitioning, _)
4545
if d.keyGroupedPartitioning.isEmpty =>
4646
val catalystPartitioning = scan.outputPartitioning() match {
4747
case kgp: KeyGroupedPartitioning =>
@@ -69,7 +69,7 @@ object V2ScanPartitioningAndOrdering extends Rule[LogicalPlan] with Logging {
6969
}
7070

7171
private def ordering(plan: LogicalPlan) = plan.transformDown {
72-
case d @ ExtractV2Relation(relation, scan: SupportsReportOrdering, _) =>
72+
case d @ ExtractV2ScanInfo(relation, scan: SupportsReportOrdering, _) =>
7373
val ordering =
7474
V2ExpressionUtils.toCatalystOrdering(scan.outputOrdering(), relation, relation.funCatalog)
7575
d.copy(ordering = Some(ordering))

sql/core/src/test/scala/org/apache/spark/sql/collation/CollatedFilterPushDownToParquetSuite.scala

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -257,8 +257,7 @@ class CollatedFilterPushDownToParquetV2Suite extends CollatedFilterPushDownToPar
257257

258258
override def getPushedDownFilters(query: DataFrame): Seq[Filter] = {
259259
query.queryExecution.optimizedPlan.collectFirst {
260-
case PhysicalOperation(_, _,
261-
ExtractV2Scan(scan: ParquetScan)) =>
260+
case PhysicalOperation(_, _, ExtractV2Scan(scan: ParquetScan)) =>
262261
scan.pushedFilters.toSeq
263262
}.getOrElse(Seq.empty)
264263
}

0 commit comments

Comments
 (0)