Skip to content

Commit b8657a9

Browse files
pan3793dongjoon-hyun
authored andcommitted
[SPARK-53523][SQL] Named parameters respect spark.sql.caseSensitive
### What changes were proposed in this pull request? As the title. ### Why are the changes needed? The issue was originally found during - apache/iceberg#13106 I don't see any special reason that named parameters should always be case sensitive. (correct me if I'm wrong) I tested PostgreSQL, and the named parameters are case-insensitive by default. ``` psql (17.6 (Debian 17.6-1.pgdg13+1)) Type "help" for help. postgres=# CREATE FUNCTION concat_lower_or_upper(a text, b text, uppercase boolean DEFAULT false) RETURNS text AS $$ SELECT CASE WHEN $3 THEN UPPER($1 || ' ' || $2) ELSE LOWER($1 || ' ' || $2) END; $$ LANGUAGE SQL IMMUTABLE STRICT; CREATE FUNCTION postgres=# SELECT concat_lower_or_upper('Hello', 'World', true); concat_lower_or_upper ----------------------- HELLO WORLD (1 row) postgres=# SELECT concat_lower_or_upper(a => 'Hello', b => 'World'); concat_lower_or_upper ----------------------- hello world (1 row) postgres=# SELECT concat_lower_or_upper(A => 'Hello', b => 'World'); concat_lower_or_upper ----------------------- hello world (1 row) postgres=# ``` ### Does this PR introduce _any_ user-facing change? Yes, named parameters used by functions, procedures now respect `spark.sql.caseSensitive`, instead of always performing case sensitive. ### How was this patch tested? Added UT. ### Was this patch authored or co-authored using generative AI tooling? No. Closes #52269 from pan3793/SPARK-53523. Authored-by: Cheng Pan <[email protected]> Signed-off-by: Dongjoon Hyun <[email protected]>
1 parent c0acf45 commit b8657a9

File tree

6 files changed

+214
-73
lines changed

6 files changed

+214
-73
lines changed

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/Analyzer.scala

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2360,7 +2360,8 @@ class Analyzer(override val catalogManager: CatalogManager) extends RuleExecutor
23602360
val inputType = extractInputType(args)
23612361
val bound = unbound.bind(inputType)
23622362
validateParameterModes(bound)
2363-
val rearrangedArgs = NamedParametersSupport.defaultRearrange(bound, args)
2363+
val rearrangedArgs =
2364+
NamedParametersSupport.defaultRearrange(bound, args, SQLConf.get.resolver)
23642365
Call(ResolvedProcedure(catalog, ident, bound), rearrangedArgs, execute)
23652366
}
23662367

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/FunctionRegistry.scala

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -35,6 +35,7 @@ import org.apache.spark.sql.catalyst.expressions.xml._
3535
import org.apache.spark.sql.catalyst.plans.logical.{FunctionBuilderBase, Generate, LogicalPlan, OneRowRelation, Range}
3636
import org.apache.spark.sql.catalyst.trees.TreeNodeTag
3737
import org.apache.spark.sql.errors.QueryCompilationErrors
38+
import org.apache.spark.sql.internal.SQLConf
3839
import org.apache.spark.sql.types._
3940
import org.apache.spark.util.ArrayImplicits._
4041

@@ -1024,9 +1025,9 @@ object FunctionRegistry {
10241025
name: String,
10251026
builder: T,
10261027
expressions: Seq[Expression]) : Seq[Expression] = {
1027-
val rearrangedExpressions = if (!builder.functionSignature.isEmpty) {
1028+
val rearrangedExpressions = if (builder.functionSignature.isDefined) {
10281029
val functionSignature = builder.functionSignature.get
1029-
builder.rearrange(functionSignature, expressions, name)
1030+
builder.rearrange(functionSignature, expressions, name, SQLConf.get.resolver)
10301031
} else {
10311032
expressions
10321033
}

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1936,7 +1936,7 @@ class SessionCatalog(
19361936
}
19371937

19381938
NamedParametersSupport.defaultRearrange(
1939-
FunctionSignature(paramNames), expressions, functionName)
1939+
FunctionSignature(paramNames), expressions, functionName, SQLConf.get.resolver)
19401940
}
19411941

19421942
/**

sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/FunctionBuilderBase.scala

Lines changed: 25 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@
1616
*/
1717
package org.apache.spark.sql.catalyst.plans.logical
1818

19+
import org.apache.spark.sql.catalyst.analysis.Resolver
1920
import org.apache.spark.sql.catalyst.expressions.{Expression, NamedArgumentExpression}
2021
import org.apache.spark.sql.catalyst.util.ResolveDefaultColumns
2122
import org.apache.spark.sql.connector.catalog.procedures.{BoundProcedure, ProcedureParameter}
@@ -67,8 +68,10 @@ trait FunctionBuilderBase[T] {
6768
def rearrange(
6869
expectedSignature: FunctionSignature,
6970
providedArguments: Seq[Expression],
70-
functionName: String) : Seq[Expression] = {
71-
NamedParametersSupport.defaultRearrange(expectedSignature, providedArguments, functionName)
71+
functionName: String,
72+
resolver: Resolver) : Seq[Expression] = {
73+
NamedParametersSupport.defaultRearrange(
74+
expectedSignature, providedArguments, functionName, resolver)
7275
}
7376

7477
def build(funcName: String, expressions: Seq[Expression]): T
@@ -89,15 +92,17 @@ object NamedParametersSupport {
8992
*/
9093
def splitAndCheckNamedArguments(
9194
args: Seq[Expression],
92-
functionName: String): (Seq[Expression], Seq[NamedArgumentExpression]) = {
95+
functionName: String,
96+
resolver: Resolver):
97+
(Seq[Expression], Seq[NamedArgumentExpression]) = {
9398
val (positionalArgs, namedArgs) = args.span(!_.isInstanceOf[NamedArgumentExpression])
9499

95100
val namedParametersSet = collection.mutable.Set[String]()
96101

97102
(positionalArgs,
98103
namedArgs.zipWithIndex.map {
99104
case (namedArg @ NamedArgumentExpression(parameterName, _), _) =>
100-
if (namedParametersSet.contains(parameterName)) {
105+
if (namedParametersSet.exists(resolver(_, parameterName))) {
101106
throw QueryCompilationErrors.doubleNamedArgumentReference(
102107
functionName, parameterName)
103108
}
@@ -123,15 +128,20 @@ object NamedParametersSupport {
123128
final def defaultRearrange(
124129
functionSignature: FunctionSignature,
125130
args: Seq[Expression],
126-
functionName: String): Seq[Expression] = {
127-
defaultRearrange(functionName, functionSignature.parameters, args)
131+
functionName: String,
132+
resolver: Resolver): Seq[Expression] = {
133+
defaultRearrange(functionName, functionSignature.parameters, args, resolver)
128134
}
129135

130-
final def defaultRearrange(procedure: BoundProcedure, args: Seq[Expression]): Seq[Expression] = {
136+
final def defaultRearrange(
137+
procedure: BoundProcedure,
138+
args: Seq[Expression],
139+
resolver: Resolver): Seq[Expression] = {
131140
defaultRearrange(
132141
procedure.name,
133142
procedure.parameters.map(toInputParameter).toSeq,
134-
args)
143+
args,
144+
resolver)
135145
}
136146

137147
private def toInputParameter(param: ProcedureParameter): InputParameter = {
@@ -144,12 +154,13 @@ object NamedParametersSupport {
144154
private def defaultRearrange(
145155
routineName: String,
146156
parameters: Seq[InputParameter],
147-
args: Seq[Expression]): Seq[Expression] = {
157+
args: Seq[Expression],
158+
resolver: Resolver): Seq[Expression] = {
148159
if (parameters.dropWhile(_.default.isEmpty).exists(_.default.isEmpty)) {
149160
throw QueryCompilationErrors.unexpectedRequiredParameter(routineName, parameters)
150161
}
151162

152-
val (positionalArgs, namedArgs) = splitAndCheckNamedArguments(args, routineName)
163+
val (positionalArgs, namedArgs) = splitAndCheckNamedArguments(args, routineName, resolver)
153164
val namedParameters: Seq[InputParameter] = parameters.drop(positionalArgs.size)
154165

155166
// The following loop checks for the following:
@@ -161,11 +172,11 @@ object NamedParametersSupport {
161172

162173
namedArgs.foreach { namedArg =>
163174
val parameterName = namedArg.key
164-
if (!parameterNamesSet.contains(parameterName)) {
175+
if (!parameterNamesSet.exists(resolver(_, parameterName))) {
165176
throw QueryCompilationErrors.unrecognizedParameterName(routineName, namedArg.key,
166177
parameterNamesSet.toSeq)
167178
}
168-
if (positionalParametersSet.contains(parameterName)) {
179+
if (positionalParametersSet.exists(resolver(_, parameterName))) {
169180
throw QueryCompilationErrors.positionalAndNamedArgumentDoubleReference(
170181
routineName, namedArg.key)
171182
}
@@ -187,14 +198,13 @@ object NamedParametersSupport {
187198
// We rearrange named arguments to match their positional order.
188199
val rearrangedNamedArgs: Seq[Expression] = namedParameters.zipWithIndex.map {
189200
case (param, index) =>
190-
namedArgMap.getOrElse(
191-
param.name,
201+
namedArgMap.view.filterKeys(resolver(_, param.name)).headOption.map(_._2).getOrElse {
192202
if (param.default.isEmpty) {
193203
throw QueryCompilationErrors.requiredParameterNotFound(routineName, param.name, index)
194204
} else {
195205
param.default.get
196206
}
197-
)
207+
}
198208
}
199209
val rearrangedArgs = positionalArgs ++ rearrangedNamedArgs
200210
assert(rearrangedArgs.size == parameters.size)

0 commit comments

Comments
 (0)