Skip to content

Commit 9895738

Browse files
committed
[SPARK-51012][SQL] Remove SparkStrategy from Connect Shims
### What changes were proposed in this pull request? This PR removes SparkStrategy from the Connect shims. ### Why are the changes needed? SparkStrategy in Connect shims is causing some headaches Scala reflection based tests in Catalyst (see apache#48818). This was the smallest change that fixes this particular issues. Another approach would have been to split the shims projects into a sql-shims and a core-shims project. ### Does this PR introduce _any_ user-facing change? No. ### How was this patch tested? Existing tests. I manually executed `org.apache.spark.sql.catalyst.encoders.EncoderResolutionSuite` in both SBT and Maven. ### Was this patch authored or co-authored using generative AI tooling? No. Closes apache#49701 from hvanhovell/SPARK-51012. Authored-by: Herman van Hovell <[email protected]> Signed-off-by: Herman van Hovell <[email protected]>
1 parent dd51f0e commit 9895738

File tree

15 files changed

+29
-24
lines changed

15 files changed

+29
-24
lines changed

sql/api/src/main/scala/org/apache/spark/sql/package.scala

Lines changed: 0 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -16,9 +16,6 @@
1616
*/
1717
package org.apache.spark
1818

19-
import org.apache.spark.annotation.{DeveloperApi, Unstable}
20-
import org.apache.spark.sql.execution.SparkStrategy
21-
2219
/**
2320
* Allows the execution of relational queries, including those expressed in SQL using Spark.
2421
*
@@ -33,16 +30,6 @@ import org.apache.spark.sql.execution.SparkStrategy
3330
*/
3431
package object sql {
3532

36-
/**
37-
* Converts a logical plan into zero or more SparkPlans. This API is exposed for experimenting
38-
* with the query planner and is not designed to be stable across spark releases. Developers
39-
* writing libraries should instead consider using the stable APIs provided in
40-
* [[org.apache.spark.sql.sources]]
41-
*/
42-
@DeveloperApi
43-
@Unstable
44-
type Strategy = SparkStrategy
45-
4633
type DataFrame = Dataset[Row]
4734

4835
/**

sql/connect/shims/src/main/scala/org/apache/spark/shims.scala

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,6 @@ package sql {
3535

3636
package execution {
3737
class QueryExecution
38-
class SparkStrategy
3938
}
4039
package internal {
4140
class SharedState

sql/core/src/main/scala/org/apache/spark/sql/ExperimentalMethods.scala

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,7 @@ package org.apache.spark.sql
2020
import org.apache.spark.annotation.{Experimental, Unstable}
2121
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
2222
import org.apache.spark.sql.catalyst.rules.Rule
23+
import org.apache.spark.sql.classic.Strategy
2324

2425
/**
2526
* :: Experimental ::

sql/core/src/main/scala/org/apache/spark/sql/SparkSessionExtensions.scala

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -28,6 +28,7 @@ import org.apache.spark.sql.catalyst.expressions.ExpressionInfo
2828
import org.apache.spark.sql.catalyst.parser.ParserInterface
2929
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
3030
import org.apache.spark.sql.catalyst.rules.Rule
31+
import org.apache.spark.sql.classic.Strategy
3132
import org.apache.spark.sql.execution.{ColumnarRule, SparkPlan}
3233

3334
/**

sql/core/src/main/scala/org/apache/spark/sql/classic/package.scala

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,9 @@
1717

1818
package org.apache.spark.sql
1919

20+
import org.apache.spark.annotation.{DeveloperApi, Unstable}
21+
import org.apache.spark.sql.execution.SparkStrategy
22+
2023
/**
2124
* Allows the execution of relational queries, including those expressed in SQL using Spark.
2225
*
@@ -30,4 +33,14 @@ package org.apache.spark.sql
3033
*/
3134
package object classic {
3235
type DataFrame = Dataset[Row]
36+
37+
/**
38+
* Converts a logical plan into zero or more SparkPlans. This API is exposed for experimenting
39+
* with the query planner and is not designed to be stable across spark releases. Developers
40+
* writing libraries should instead consider using the stable APIs provided in
41+
* [[org.apache.spark.sql.sources]]
42+
*/
43+
@DeveloperApi
44+
@Unstable
45+
type Strategy = SparkStrategy
3346
}

sql/core/src/main/scala/org/apache/spark/sql/execution/adaptive/LogicalQueryStageStrategy.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,11 +17,11 @@
1717

1818
package org.apache.spark.sql.execution.adaptive
1919

20-
import org.apache.spark.sql.Strategy
2120
import org.apache.spark.sql.catalyst.optimizer.{BuildLeft, BuildRight}
2221
import org.apache.spark.sql.catalyst.planning.{ExtractEquiJoinKeys, ExtractSingleColumnNullAwareAntiJoin}
2322
import org.apache.spark.sql.catalyst.plans.LeftAnti
2423
import org.apache.spark.sql.catalyst.plans.logical.{Join, LogicalPlan}
24+
import org.apache.spark.sql.classic.Strategy
2525
import org.apache.spark.sql.execution.{joins, SparkPlan}
2626
import org.apache.spark.sql.execution.joins.{BroadcastHashJoinExec, BroadcastNestedLoopJoinExec}
2727

sql/core/src/main/scala/org/apache/spark/sql/execution/command/v2/V2CommandStrategy.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,10 +17,10 @@
1717

1818
package org.apache.spark.sql.execution.command.v2
1919

20-
import org.apache.spark.sql.Strategy
2120
import org.apache.spark.sql.catalyst.analysis.ResolvedIdentifier
2221
import org.apache.spark.sql.catalyst.expressions.VariableReference
2322
import org.apache.spark.sql.catalyst.plans.logical._
23+
import org.apache.spark.sql.classic.Strategy
2424
import org.apache.spark.sql.execution.SparkPlan
2525

2626
object V2CommandStrategy extends Strategy {

sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DataSourceStrategy.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@ import org.apache.hadoop.fs.Path
2727
import org.apache.spark.internal.{Logging, MDC}
2828
import org.apache.spark.internal.LogKeys.PREDICATES
2929
import org.apache.spark.rdd.RDD
30-
import org.apache.spark.sql.{Row, SaveMode, Strategy}
30+
import org.apache.spark.sql.{Row, SaveMode}
3131
import org.apache.spark.sql.catalyst.{expressions, CatalystTypeConverters, InternalRow, QualifiedTableName, SQLConfHelper}
3232
import org.apache.spark.sql.catalyst.CatalystTypeConverters.convertToScala
3333
import org.apache.spark.sql.catalyst.analysis._
@@ -41,7 +41,7 @@ import org.apache.spark.sql.catalyst.rules.Rule
4141
import org.apache.spark.sql.catalyst.streaming.StreamingRelationV2
4242
import org.apache.spark.sql.catalyst.types.DataTypeUtils
4343
import org.apache.spark.sql.catalyst.util.{GeneratedColumn, IdentityColumn, ResolveDefaultColumns, V2ExpressionBuilder}
44-
import org.apache.spark.sql.classic.SparkSession
44+
import org.apache.spark.sql.classic.{SparkSession, Strategy}
4545
import org.apache.spark.sql.connector.catalog.{SupportsRead, V1Table}
4646
import org.apache.spark.sql.connector.catalog.TableCapability._
4747
import org.apache.spark.sql.connector.expressions.{Expression => V2Expression, NullOrdering, SortDirection, SortOrder => V2SortOrder, SortValue}

sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/FileSourceStrategy.scala

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -31,6 +31,7 @@ import org.apache.spark.sql.catalyst.planning.ScanOperation
3131
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
3232
import org.apache.spark.sql.catalyst.trees.TreePattern.{PLAN_EXPRESSION, SCALAR_SUBQUERY}
3333
import org.apache.spark.sql.catalyst.types.DataTypeUtils
34+
import org.apache.spark.sql.classic.Strategy
3435
import org.apache.spark.sql.execution.{FileSourceScanExec, SparkPlan}
3536
import org.apache.spark.sql.internal.SQLConf
3637
import org.apache.spark.sql.types.{DoubleType, FloatType, StructType}

sql/core/src/main/scala/org/apache/spark/sql/internal/BaseSessionStateBuilder.scala

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@
1717
package org.apache.spark.sql.internal
1818

1919
import org.apache.spark.annotation.Unstable
20-
import org.apache.spark.sql.{DataSourceRegistration, ExperimentalMethods, SparkSessionExtensions, Strategy, UDTFRegistration}
20+
import org.apache.spark.sql.{DataSourceRegistration, ExperimentalMethods, SparkSessionExtensions, UDTFRegistration}
2121
import org.apache.spark.sql.artifact.ArtifactManager
2222
import org.apache.spark.sql.catalyst.analysis.{Analyzer, EvalSubqueriesForTimeTravel, FunctionRegistry, InvokeProcedures, ReplaceCharWithVarchar, ResolveDataSource, ResolveSessionCatalog, ResolveTranspose, TableFunctionRegistry}
2323
import org.apache.spark.sql.catalyst.analysis.resolver.ResolverExtension
@@ -27,7 +27,7 @@ import org.apache.spark.sql.catalyst.optimizer.Optimizer
2727
import org.apache.spark.sql.catalyst.parser.ParserInterface
2828
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
2929
import org.apache.spark.sql.catalyst.rules.Rule
30-
import org.apache.spark.sql.classic.{SparkSession, StreamingQueryManager, UDFRegistration}
30+
import org.apache.spark.sql.classic.{SparkSession, Strategy, StreamingQueryManager, UDFRegistration}
3131
import org.apache.spark.sql.connector.catalog.CatalogManager
3232
import org.apache.spark.sql.errors.QueryCompilationErrors
3333
import org.apache.spark.sql.execution.{ColumnarRule, CommandExecutionMode, QueryExecution, SparkOptimizer, SparkPlanner, SparkSqlParser}

0 commit comments

Comments
 (0)