From fbd02378383517dcaec4f6616c1b10af6801eaaf Mon Sep 17 00:00:00 2001 From: Teodor Djelic <130703036+TeodorDjelic@users.noreply.github.com> Date: Wed, 10 Sep 2025 16:01:45 +0200 Subject: [PATCH 01/10] Removed exception throwing for continue handlers, and added tests; --- .../sql/catalyst/parser/AstBuilder.scala | 8 +- .../parser/SqlScriptingParserSuite.scala | 320 ++++++++++++++++-- .../SqlScriptingInterpreterSuite.scala | 139 +++++++- 3 files changed, 441 insertions(+), 26 deletions(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala index 4e930280381c1..6e285e3dd01bc 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala @@ -282,11 +282,13 @@ class AstBuilder extends DataTypeAstBuilder parsingCtx: SqlScriptingParsingContext): ExceptionHandler = { val exceptionHandlerTriggers = visitConditionValuesImpl(ctx.conditionValues()) - if (Option(ctx.CONTINUE()).isDefined) { - throw SqlScriptingErrors.continueHandlerNotSupported(CurrentOrigin.get) + val handlerType = if (Option(ctx.CONTINUE()).isDefined) { + ExceptionHandlerType.CONTINUE + } + else { + ExceptionHandlerType.EXIT } - val handlerType = ExceptionHandlerType.EXIT val body = if (Option(ctx.beginEndCompoundBlock()).isDefined) { visitBeginEndCompoundBlockImpl( ctx.beginEndCompoundBlock(), diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/SqlScriptingParserSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/SqlScriptingParserSuite.scala index 9902374ce8e9b..dd273b312a53f 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/SqlScriptingParserSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/SqlScriptingParserSuite.scala @@ -2842,12 +2842,12 @@ class SqlScriptingParserSuite extends SparkFunSuite with SQLHelper { assert(exception.origin.line.contains(6)) } - test("continue handler not supported") { + test("declare exit handler for qualified condition name that is not supported") { val sqlScript = """ |BEGIN | DECLARE OR REPLACE flag INT = -1; - | DECLARE CONTINUE HANDLER FOR SQLSTATE '22012' + | DECLARE EXIT HANDLER FOR qualified.condition.name | BEGIN | SET flag = 1; | END; @@ -2859,16 +2859,16 @@ class SqlScriptingParserSuite extends SparkFunSuite with SQLHelper { exception = intercept[SqlScriptingException] { parsePlan(sqlScript) }, - condition = "UNSUPPORTED_FEATURE.CONTINUE_EXCEPTION_HANDLER", - parameters = Map.empty) + condition = "INVALID_HANDLER_DECLARATION.CONDITION_NOT_FOUND", + parameters = Map("condition" -> "QUALIFIED.CONDITION.NAME")) } - test("declare handler for qualified condition name that is not supported") { + test("declare continue handler for qualified condition name that is not supported") { val sqlScript = """ |BEGIN | DECLARE OR REPLACE flag INT = -1; - | DECLARE EXIT HANDLER FOR qualified.condition.name + | DECLARE CONTINUE HANDLER FOR qualified.condition.name | BEGIN | SET flag = 1; | END; @@ -2884,7 +2884,7 @@ class SqlScriptingParserSuite extends SparkFunSuite with SQLHelper { parameters = Map("condition" -> "QUALIFIED.CONDITION.NAME")) } - test("declare handler for undefined condition") { + test("declare exit handler for undefined condition") { val sqlScriptText = """ |BEGIN @@ -2901,7 +2901,24 @@ class SqlScriptingParserSuite extends SparkFunSuite with SQLHelper { assert(exception.origin.line.contains(2)) } - test("declare handler in wrong place") { + test("declare continue handler for undefined condition") { + val sqlScriptText = + """ + |BEGIN + | DECLARE CONTINUE HANDLER FOR undefined_condition BEGIN SELECT 1; END; + | SELECT 1; + |END""".stripMargin + val exception = intercept[SqlScriptingException] { + parsePlan(sqlScriptText) + } + checkError( + exception = exception, + condition = "INVALID_HANDLER_DECLARATION.CONDITION_NOT_FOUND", + parameters = Map("condition" -> "UNDEFINED_CONDITION")) + assert(exception.origin.line.contains(2)) + } + + test("declare exit handler in wrong place") { val sqlScriptText = """ |BEGIN @@ -2918,7 +2935,24 @@ class SqlScriptingParserSuite extends SparkFunSuite with SQLHelper { assert(exception.origin.line.contains(2)) } - test("duplicate condition in handler declaration") { + test("declare continue handler in wrong place") { + val sqlScriptText = + """ + |BEGIN + | SELECT 1; + | DECLARE CONTINUE HANDLER FOR DIVIDE_BY_ZERO BEGIN SELECT 1; END; + |END""".stripMargin + val exception = intercept[SqlScriptingException] { + parsePlan(sqlScriptText) + } + checkError( + exception = exception, + condition = "INVALID_HANDLER_DECLARATION.WRONG_PLACE_OF_DECLARATION", + parameters = Map.empty) + assert(exception.origin.line.contains(2)) + } + + test("duplicate condition in exit handler declaration") { val sqlScript = """ |BEGIN @@ -2940,7 +2974,29 @@ class SqlScriptingParserSuite extends SparkFunSuite with SQLHelper { parameters = Map("condition" -> "DUPLICATE_CONDITION")) } - test("duplicate sqlState in handler declaration") { + test("duplicate condition in continue handler declaration") { + val sqlScript = + """ + |BEGIN + | DECLARE OR REPLACE flag INT = -1; + | DECLARE DUPLICATE_CONDITION CONDITION FOR SQLSTATE '12345'; + | DECLARE CONTINUE HANDLER FOR duplicate_condition, duplicate_condition + | BEGIN + | SET flag = 1; + | END; + | SELECT 1/0; + | SELECT flag; + |END + |""".stripMargin + checkError( + exception = intercept[SqlScriptingException] { + parsePlan(sqlScript) + }, + condition = "INVALID_HANDLER_DECLARATION.DUPLICATE_CONDITION_IN_HANDLER_DECLARATION", + parameters = Map("condition" -> "DUPLICATE_CONDITION")) + } + + test("duplicate sqlState in exit handler declaration") { val sqlScript = """ |BEGIN @@ -2961,7 +3017,28 @@ class SqlScriptingParserSuite extends SparkFunSuite with SQLHelper { parameters = Map("sqlState" -> "12345")) } - test("invalid condition combination in handler declaration") { + test("duplicate sqlState in continue handler declaration") { + val sqlScript = + """ + |BEGIN + | DECLARE OR REPLACE flag INT = -1; + | DECLARE CONTINUE HANDLER FOR SQLSTATE '12345', SQLSTATE '12345' + | BEGIN + | SET flag = 1; + | END; + | SELECT 1/0; + | SELECT flag; + |END + |""".stripMargin + checkError( + exception = intercept[SqlScriptingException] { + parsePlan(sqlScript) + }, + condition = "INVALID_HANDLER_DECLARATION.DUPLICATE_SQLSTATE_IN_HANDLER_DECLARATION", + parameters = Map("sqlState" -> "12345")) + } + + test("invalid condition combination in exit handler declaration") { val sqlScript = """ |BEGIN @@ -2982,7 +3059,28 @@ class SqlScriptingParserSuite extends SparkFunSuite with SQLHelper { parameters = Map.empty) } - test("declare handler with compound body") { + test("invalid condition combination in continue handler declaration") { + val sqlScript = + """ + |BEGIN + | DECLARE OR REPLACE flag INT = -1; + | DECLARE CONTINUE HANDLER FOR SQLEXCEPTION, SQLSTATE '12345' + | BEGIN + | SET flag = 1; + | END; + | SELECT 1/0; + | SELECT flag; + |END + |""".stripMargin + checkError( + exception = intercept[SqlScriptingException] { + parsePlan(sqlScript) + }, + condition = "INVALID_HANDLER_DECLARATION.INVALID_CONDITION_COMBINATION", + parameters = Map.empty) + } + + test("declare exit handler with compound body") { val sqlScriptText = """ |BEGIN @@ -2999,9 +3097,26 @@ class SqlScriptingParserSuite extends SparkFunSuite with SQLHelper { .parsedPlan.isInstanceOf[Project]) } + test("declare continue handler with compound body") { + val sqlScriptText = + """ + |BEGIN + | DECLARE CONTINUE HANDLER FOR DIVIDE_BY_ZERO BEGIN SELECT 1; END; + |END""".stripMargin + val tree = parsePlan(sqlScriptText).asInstanceOf[CompoundBody] + assert(tree.handlers.length == 1) + assert(tree.handlers.head.isInstanceOf[ExceptionHandler]) + assert(tree.handlers.head.exceptionHandlerTriggers.conditions.size == 1) + assert(tree.handlers.head.exceptionHandlerTriggers.conditions.contains("DIVIDE_BY_ZERO")) + assert(tree.handlers.head.body.collection.size == 1) + assert(tree.handlers.head.body.collection.head.isInstanceOf[SingleStatement]) + assert(tree.handlers.head.body.collection.head.asInstanceOf[SingleStatement] + .parsedPlan.isInstanceOf[Project]) + } + // This test works because END is not keyword here but a part of the statement. // It represents the name of the column in returned dataframe. - test("declare handler single statement with END") { + test("declare exit handler single statement with END") { val sqlScriptText = """ |BEGIN @@ -3018,7 +3133,24 @@ class SqlScriptingParserSuite extends SparkFunSuite with SQLHelper { .parsedPlan.isInstanceOf[Project]) } - test("declare handler single statement") { + test("declare continue handler single statement with END") { + val sqlScriptText = + """ + |BEGIN + | DECLARE CONTINUE HANDLER FOR DIVIDE_BY_ZERO SELECT 1 END; + |END""".stripMargin + val tree = parsePlan(sqlScriptText).asInstanceOf[CompoundBody] + assert(tree.handlers.length == 1) + assert(tree.handlers.head.isInstanceOf[ExceptionHandler]) + assert(tree.handlers.head.exceptionHandlerTriggers.conditions.size == 1) + assert(tree.handlers.head.exceptionHandlerTriggers.conditions.contains("DIVIDE_BY_ZERO")) + assert(tree.handlers.head.body.collection.size == 1) + assert(tree.handlers.head.body.collection.head.isInstanceOf[SingleStatement]) + assert(tree.handlers.head.body.collection.head.asInstanceOf[SingleStatement] + .parsedPlan.isInstanceOf[Project]) + } + + test("declare exit handler single statement") { val sqlScriptText = """ |BEGIN @@ -3035,7 +3167,24 @@ class SqlScriptingParserSuite extends SparkFunSuite with SQLHelper { .parsedPlan.isInstanceOf[Project]) } - test("declare handler set statement") { + test("declare continue handler single statement") { + val sqlScriptText = + """ + |BEGIN + | DECLARE CONTINUE HANDLER FOR DIVIDE_BY_ZERO SELECT 1; + |END""".stripMargin + val tree = parsePlan(sqlScriptText).asInstanceOf[CompoundBody] + assert(tree.handlers.length == 1) + assert(tree.handlers.head.isInstanceOf[ExceptionHandler]) + assert(tree.handlers.head.exceptionHandlerTriggers.conditions.size == 1) + assert(tree.handlers.head.exceptionHandlerTriggers.conditions.contains("DIVIDE_BY_ZERO")) + assert(tree.handlers.head.body.collection.size == 1) + assert(tree.handlers.head.body.collection.head.isInstanceOf[SingleStatement]) + assert(tree.handlers.head.body.collection.head.asInstanceOf[SingleStatement] + .parsedPlan.isInstanceOf[Project]) + } + + test("declare exit handler set statement") { val sqlScriptText = """ |BEGIN @@ -3052,7 +3201,24 @@ class SqlScriptingParserSuite extends SparkFunSuite with SQLHelper { .parsedPlan.isInstanceOf[SetVariable]) } - test("declare handler with multiple conditions/sqlstates") { + test("declare continue handler set statement") { + val sqlScriptText = + """ + |BEGIN + | DECLARE CONTINUE HANDLER FOR DIVIDE_BY_ZERO SET test_var = 1; + |END""".stripMargin + val tree = parsePlan(sqlScriptText).asInstanceOf[CompoundBody] + assert(tree.handlers.length == 1) + assert(tree.handlers.head.isInstanceOf[ExceptionHandler]) + assert(tree.handlers.head.exceptionHandlerTriggers.conditions.size == 1) + assert(tree.handlers.head.exceptionHandlerTriggers.conditions.contains("DIVIDE_BY_ZERO")) + assert(tree.handlers.head.body.collection.size == 1) + assert(tree.handlers.head.body.collection.head.isInstanceOf[SingleStatement]) + assert(tree.handlers.head.body.collection.head.asInstanceOf[SingleStatement] + .parsedPlan.isInstanceOf[SetVariable]) + } + + test("declare exit handler with multiple conditions/sqlstates") { val sqlScriptText = """ |BEGIN @@ -3075,7 +3241,30 @@ class SqlScriptingParserSuite extends SparkFunSuite with SQLHelper { .parsedPlan.isInstanceOf[SetVariable]) } - test("declare handler for SQLEXCEPTION") { + test("declare continue handler with multiple conditions/sqlstates") { + val sqlScriptText = + """ + |BEGIN + | DECLARE TEST_CONDITION_1 CONDITION FOR SQLSTATE '12345'; + | DECLARE TEST_CONDITION_2 CONDITION FOR SQLSTATE '54321'; + | DECLARE CONTINUE HANDLER FOR SQLSTATE '22012', TEST_CONDITION_1, test_condition_2 + | SET test_var = 1; + |END""".stripMargin + val tree = parsePlan(sqlScriptText).asInstanceOf[CompoundBody] + assert(tree.handlers.length == 1) + assert(tree.handlers.head.isInstanceOf[ExceptionHandler]) + assert(tree.handlers.head.exceptionHandlerTriggers.conditions.size == 2) + assert(tree.handlers.head.exceptionHandlerTriggers.conditions.contains("TEST_CONDITION_1")) + assert(tree.handlers.head.exceptionHandlerTriggers.conditions.contains("TEST_CONDITION_2")) + assert(tree.handlers.head.exceptionHandlerTriggers.sqlStates.size == 1) + assert(tree.handlers.head.exceptionHandlerTriggers.sqlStates.contains("22012")) + assert(tree.handlers.head.body.collection.size == 1) + assert(tree.handlers.head.body.collection.head.isInstanceOf[SingleStatement]) + assert(tree.handlers.head.body.collection.head.asInstanceOf[SingleStatement] + .parsedPlan.isInstanceOf[SetVariable]) + } + + test("declare exit handler for SQLEXCEPTION") { val sqlScriptText = """ |BEGIN @@ -3091,7 +3280,23 @@ class SqlScriptingParserSuite extends SparkFunSuite with SQLHelper { assert(tree.handlers.head.body.collection.size == 1) } - test("declare handler for NOT FOUND") { + test("declare continue handler for SQLEXCEPTION") { + val sqlScriptText = + """ + |BEGIN + | DECLARE CONTINUE HANDLER FOR SQLEXCEPTION SET test_var = 1; + |END""".stripMargin + val tree = parsePlan(sqlScriptText).asInstanceOf[CompoundBody] + assert(tree.handlers.length == 1) + assert(tree.handlers.head.isInstanceOf[ExceptionHandler]) + assert(tree.handlers.head.exceptionHandlerTriggers.conditions.isEmpty) + assert(tree.handlers.head.exceptionHandlerTriggers.sqlStates.isEmpty) + assert(tree.handlers.head.exceptionHandlerTriggers.sqlException) // true + assert(!tree.handlers.head.exceptionHandlerTriggers.notFound) // false + assert(tree.handlers.head.body.collection.size == 1) + } + + test("declare exit handler for NOT FOUND") { val sqlScriptText = """ |BEGIN @@ -3107,7 +3312,23 @@ class SqlScriptingParserSuite extends SparkFunSuite with SQLHelper { assert(tree.handlers.head.body.collection.size == 1) } - test("declare handler with condition and sqlstate with same value") { + test("declare continue handler for NOT FOUND") { + val sqlScriptText = + """ + |BEGIN + | DECLARE CONTINUE HANDLER FOR NOT FOUND SET test_var = 1; + |END""".stripMargin + val tree = parsePlan(sqlScriptText).asInstanceOf[CompoundBody] + assert(tree.handlers.length == 1) + assert(tree.handlers.head.isInstanceOf[ExceptionHandler]) + assert(tree.handlers.head.exceptionHandlerTriggers.conditions.isEmpty) + assert(tree.handlers.head.exceptionHandlerTriggers.sqlStates.isEmpty) + assert(!tree.handlers.head.exceptionHandlerTriggers.sqlException) // true + assert(tree.handlers.head.exceptionHandlerTriggers.notFound) // false + assert(tree.handlers.head.body.collection.size == 1) + } + + test("declare exit handler with condition and sqlstate with same value") { val sqlScriptText = """ |BEGIN @@ -3126,7 +3347,26 @@ class SqlScriptingParserSuite extends SparkFunSuite with SQLHelper { assert(tree.handlers.head.body.collection.size == 1) } - test("declare handler for condition in parent scope") { + test("declare continue handler with condition and sqlstate with same value") { + val sqlScriptText = + """ + |BEGIN + | DECLARE K2000 CONDITION FOR SQLSTATE '12345'; + | DECLARE CONTINUE HANDLER FOR K2000, SQLSTATE VALUE 'K2000' SET test_var = 1; + |END""".stripMargin + val tree = parsePlan(sqlScriptText).asInstanceOf[CompoundBody] + assert(tree.handlers.length == 1) + assert(tree.handlers.head.isInstanceOf[ExceptionHandler]) + assert(tree.handlers.head.exceptionHandlerTriggers.conditions.size == 1) + assert(tree.handlers.head.exceptionHandlerTriggers.conditions.contains("K2000")) + assert(tree.handlers.head.exceptionHandlerTriggers.sqlStates.size == 1) + assert(tree.handlers.head.exceptionHandlerTriggers.sqlStates.contains("K2000")) + assert(!tree.handlers.head.exceptionHandlerTriggers.sqlException) // true + assert(!tree.handlers.head.exceptionHandlerTriggers.notFound) // false + assert(tree.handlers.head.body.collection.size == 1) + } + + test("declare exit handler for condition in parent scope") { val sqlScriptText = """ |BEGIN @@ -3144,7 +3384,25 @@ class SqlScriptingParserSuite extends SparkFunSuite with SQLHelper { assert(handlerBody.handlers.head.body.collection.size == 1) } - test("declare nested handler for condition in parent scope of parent handler") { + test("declare continue handler for condition in parent scope") { + val sqlScriptText = + """ + |BEGIN + | DECLARE TEST_CONDITION CONDITION FOR SQLSTATE '12345'; + | BEGIN + | DECLARE CONTINUE HANDLER FOR TEST_CONDITION SET test_var = 1; + | END; + |END""".stripMargin + val tree = parsePlan(sqlScriptText).asInstanceOf[CompoundBody] + val handlerBody = tree.collection.head.asInstanceOf[CompoundBody] + assert(handlerBody.handlers.length == 1) + assert(handlerBody.handlers.head.isInstanceOf[ExceptionHandler]) + assert(handlerBody.handlers.head.exceptionHandlerTriggers.conditions.size == 1) + assert(handlerBody.handlers.head.exceptionHandlerTriggers.conditions.contains("TEST_CONDITION")) + assert(handlerBody.handlers.head.body.collection.size == 1) + } + + test("declare nested exit handler for condition in parent scope of parent handler") { val sqlScriptText = """ |BEGIN @@ -3164,6 +3422,26 @@ class SqlScriptingParserSuite extends SparkFunSuite with SQLHelper { assert(handlerBody.exceptionHandlerTriggers.conditions.contains("TEST_CONDITION")) } + test("declare nested continue handler for condition in parent scope of parent handler") { + val sqlScriptText = + """ + |BEGIN + | DECLARE TEST_CONDITION CONDITION FOR SQLSTATE '12345'; + | BEGIN + | DECLARE CONTINUE HANDLER FOR DIVIDE_BY_ZERO + | BEGIN + | DECLARE CONTINUE HANDLER FOR TEST_CONDITION SET test_var = 1; + | END; + | END; + |END""".stripMargin + val tree = parsePlan(sqlScriptText).asInstanceOf[CompoundBody] + val handlerBody = tree + .collection.head.asInstanceOf[CompoundBody] + .handlers.head.body.asInstanceOf[CompoundBody] + .handlers.head + assert(handlerBody.exceptionHandlerTriggers.conditions.contains("TEST_CONDITION")) + } + // Helper methods def cleanupStatementString(statementStr: String): String = { statementStr diff --git a/sql/core/src/test/scala/org/apache/spark/sql/scripting/SqlScriptingInterpreterSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/scripting/SqlScriptingInterpreterSuite.scala index 684a5a72e6d83..c46bb26168876 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/scripting/SqlScriptingInterpreterSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/scripting/SqlScriptingInterpreterSuite.scala @@ -3467,7 +3467,7 @@ class SqlScriptingInterpreterSuite } } - test("Duplicate SQLEXCEPTION Handler") { + test("Duplicate SQLEXCEPTION EXIT/EXIT Handler") { val sqlScript = """ |BEGIN @@ -3490,7 +3490,7 @@ class SqlScriptingInterpreterSuite ) } - test("Duplicate NOT FOUND Handler") { + test("Duplicate NOT FOUND EXIT/EXIT Handler") { val sqlScript = """ |BEGIN @@ -3511,4 +3511,139 @@ class SqlScriptingInterpreterSuite parameters = Map("condition" -> "NOT FOUND") ) } + + test("Duplicate SQLEXCEPTION CONTINUE/CONTINUE Handler") { + val sqlScript = + """ + |BEGIN + | DECLARE CONTINUE HANDLER FOR SQLEXCEPTION + | BEGIN + | SELECT 1; + | END; + | DECLARE CONTINUE HANDLER FOR SQLEXCEPTION + | BEGIN + | SELECT 2; + | END; + | + |END""".stripMargin + checkError( + exception = intercept[SqlScriptingException] { + runSqlScript(sqlScript) + }, + condition = "DUPLICATE_EXCEPTION_HANDLER.CONDITION", + parameters = Map("condition" -> "SQLEXCEPTION") + ) + } + + test("Duplicate NOT FOUND CONTINUE/CONTINUE Handler") { + val sqlScript = + """ + |BEGIN + | DECLARE CONTINUE HANDLER FOR NOT FOUND + | BEGIN + | SELECT 1; + | END; + | DECLARE CONTINUE HANDLER FOR NOT FOUND + | BEGIN + | SELECT 2; + | END; + |END""".stripMargin + checkError( + exception = intercept[SqlScriptingException] { + runSqlScript(sqlScript) + }, + condition = "DUPLICATE_EXCEPTION_HANDLER.CONDITION", + parameters = Map("condition" -> "NOT FOUND") + ) + } + + test("Duplicate SQLEXCEPTION EXIT/CONTINUE Handler") { + val sqlScript = + """ + |BEGIN + | DECLARE EXIT HANDLER FOR SQLEXCEPTION + | BEGIN + | SELECT 1; + | END; + | DECLARE CONTINUE HANDLER FOR SQLEXCEPTION + | BEGIN + | SELECT 2; + | END; + | + |END""".stripMargin + checkError( + exception = intercept[SqlScriptingException] { + runSqlScript(sqlScript) + }, + condition = "DUPLICATE_EXCEPTION_HANDLER.CONDITION", + parameters = Map("condition" -> "SQLEXCEPTION") + ) + } + + test("Duplicate NOT FOUND EXIT/CONTINUE Handler") { + val sqlScript = + """ + |BEGIN + | DECLARE EXIT HANDLER FOR NOT FOUND + | BEGIN + | SELECT 1; + | END; + | DECLARE CONTINUE HANDLER FOR NOT FOUND + | BEGIN + | SELECT 2; + | END; + |END""".stripMargin + checkError( + exception = intercept[SqlScriptingException] { + runSqlScript(sqlScript) + }, + condition = "DUPLICATE_EXCEPTION_HANDLER.CONDITION", + parameters = Map("condition" -> "NOT FOUND") + ) + } + + test("Duplicate SQLEXCEPTION CONTINUE/EXIT Handler") { + val sqlScript = + """ + |BEGIN + | DECLARE CONTINUE HANDLER FOR SQLEXCEPTION + | BEGIN + | SELECT 1; + | END; + | DECLARE EXIT HANDLER FOR SQLEXCEPTION + | BEGIN + | SELECT 2; + | END; + | + |END""".stripMargin + checkError( + exception = intercept[SqlScriptingException] { + runSqlScript(sqlScript) + }, + condition = "DUPLICATE_EXCEPTION_HANDLER.CONDITION", + parameters = Map("condition" -> "SQLEXCEPTION") + ) + } + + test("Duplicate NOT FOUND CONTINUE/EXIT Handler") { + val sqlScript = + """ + |BEGIN + | DECLARE CONTINUE HANDLER FOR NOT FOUND + | BEGIN + | SELECT 1; + | END; + | DECLARE EXIT HANDLER FOR NOT FOUND + | BEGIN + | SELECT 2; + | END; + |END""".stripMargin + checkError( + exception = intercept[SqlScriptingException] { + runSqlScript(sqlScript) + }, + condition = "DUPLICATE_EXCEPTION_HANDLER.CONDITION", + parameters = Map("condition" -> "NOT FOUND") + ) + } } From 4e5ae21f43bce9a4b0c40851c2c004b5cbd9568a Mon Sep 17 00:00:00 2001 From: Teodor Djelic <130703036+TeodorDjelic@users.noreply.github.com> Date: Wed, 10 Sep 2025 18:36:37 +0200 Subject: [PATCH 02/10] Fixed an extra newline before else; --- .../org/apache/spark/sql/catalyst/parser/AstBuilder.scala | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala index 6e285e3dd01bc..9c14f3defcd7e 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala @@ -284,8 +284,7 @@ class AstBuilder extends DataTypeAstBuilder val handlerType = if (Option(ctx.CONTINUE()).isDefined) { ExceptionHandlerType.CONTINUE - } - else { + } else { ExceptionHandlerType.EXIT } From be0b3f2ae0a3d814498e800d98f5bae4463d0bdb Mon Sep 17 00:00:00 2001 From: Teodor Djelic <130703036+TeodorDjelic@users.noreply.github.com> Date: Thu, 11 Sep 2025 16:19:57 +0200 Subject: [PATCH 03/10] Added a new SQLConf entry spark.sql.scripting.continueHandlerEnabled, as a continue handler feature switch; --- .../apache/spark/sql/catalyst/parser/AstBuilder.scala | 3 +++ .../scala/org/apache/spark/sql/internal/SQLConf.scala | 9 +++++++++ .../sql/catalyst/parser/SqlScriptingParserSuite.scala | 11 +++++++++++ .../sql/scripting/SqlScriptingInterpreterSuite.scala | 10 ++++++++++ 4 files changed, 33 insertions(+) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala index 9c14f3defcd7e..415add48f23f5 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/parser/AstBuilder.scala @@ -283,6 +283,9 @@ class AstBuilder extends DataTypeAstBuilder val exceptionHandlerTriggers = visitConditionValuesImpl(ctx.conditionValues()) val handlerType = if (Option(ctx.CONTINUE()).isDefined) { + if (!conf.getConf(SQLConf.SQL_SCRIPTING_CONTINUE_HANDLER_ENABLED)) { + throw SqlScriptingErrors.continueHandlerNotSupported(CurrentOrigin.get) + } ExceptionHandlerType.CONTINUE } else { ExceptionHandlerType.EXIT diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala index 5a99814c8cc85..de950b7503714 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala @@ -4144,6 +4144,15 @@ object SQLConf { .booleanConf .createWithDefault(true) + val SQL_SCRIPTING_CONTINUE_HANDLER_ENABLED = + buildConf("spark.sql.scripting.continueHandlerEnabled") + .doc("SQL Scripting CONTINUE HANDLER feature is under development and its use should be " + + "done under this feature flag. SQL Scripting CONTINUE HANDLER enables users to use " + + "CONTINUE HANDLER syntax inside SQL Scripts.") + .version("4.0.0") + .booleanConf + .createWithDefault(false) + val CONCAT_BINARY_AS_STRING = buildConf("spark.sql.function.concatBinaryAsString") .doc("When this option is set to false and all inputs are binary, `functions.concat` returns " + "an output as binary. Otherwise, it returns as a string.") diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/SqlScriptingParserSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/SqlScriptingParserSuite.scala index dd273b312a53f..76f29e6c3fcd5 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/SqlScriptingParserSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/SqlScriptingParserSuite.scala @@ -23,10 +23,21 @@ import org.apache.spark.sql.catalyst.plans.SQLHelper import org.apache.spark.sql.catalyst.plans.logical.{CompoundBody, CreateVariable, ExceptionHandler, ForStatement, IfElseStatement, IterateStatement, LeaveStatement, LoopStatement, Project, RepeatStatement, SearchedCaseStatement, SetVariable, SimpleCaseStatement, SingleStatement, WhileStatement} import org.apache.spark.sql.errors.DataTypeErrors.toSQLId import org.apache.spark.sql.exceptions.SqlScriptingException +import org.apache.spark.sql.internal.SQLConf class SqlScriptingParserSuite extends SparkFunSuite with SQLHelper { import CatalystSqlParser._ + protected override def beforeAll(): Unit = { + super.beforeAll() + conf.setConfString(SQLConf.SQL_SCRIPTING_CONTINUE_HANDLER_ENABLED.key, "true") + } + + protected override def afterAll(): Unit = { + conf.unsetConf(SQLConf.SQL_SCRIPTING_CONTINUE_HANDLER_ENABLED.key) + super.afterAll() + } + // Tests test("single select") { val sqlScriptText = "SELECT 1;" diff --git a/sql/core/src/test/scala/org/apache/spark/sql/scripting/SqlScriptingInterpreterSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/scripting/SqlScriptingInterpreterSuite.scala index c46bb26168876..dee458da8405c 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/scripting/SqlScriptingInterpreterSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/scripting/SqlScriptingInterpreterSuite.scala @@ -38,6 +38,16 @@ class SqlScriptingInterpreterSuite with SharedSparkSession with SqlScriptingTestUtils { + protected override def beforeAll(): Unit = { + super.beforeAll() + conf.setConfString(SQLConf.SQL_SCRIPTING_CONTINUE_HANDLER_ENABLED.key, "true") + } + + protected override def afterAll(): Unit = { + conf.unsetConf(SQLConf.SQL_SCRIPTING_CONTINUE_HANDLER_ENABLED.key) + super.afterAll() + } + // Helpers private def runSqlScript( sqlText: String, From d6d7858cac94df2adf4c79c5c73e1ca014b55872 Mon Sep 17 00:00:00 2001 From: Teodor Djelic <130703036+TeodorDjelic@users.noreply.github.com> Date: Thu, 11 Sep 2025 17:11:20 +0200 Subject: [PATCH 04/10] Changed spark.sql.scripting.continueHandlerEnabled feature switch description; --- .../main/scala/org/apache/spark/sql/internal/SQLConf.scala | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala index de950b7503714..61f51a1d68350 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala @@ -4146,9 +4146,10 @@ object SQLConf { val SQL_SCRIPTING_CONTINUE_HANDLER_ENABLED = buildConf("spark.sql.scripting.continueHandlerEnabled") - .doc("SQL Scripting CONTINUE HANDLER feature is under development and its use should be " + - "done under this feature flag. SQL Scripting CONTINUE HANDLER enables users to use " + - "CONTINUE HANDLER syntax inside SQL Scripts.") + .doc("EXPERIMENTAL FEATURE/WORK IN PROGRESS: SQL Scripting CONTINUE HANDLER feature " + + "is under development and still not working as intended. This feature switch is intended " + + "to be used internally for development and testing, not by end users. " + + "YOU ARE ADVISED AGAINST USING THIS FEATURE AS ITS NOT FINISHED.") .version("4.0.0") .booleanConf .createWithDefault(false) From 59637428ad0b7260533c119a097aba1d5bec6c03 Mon Sep 17 00:00:00 2001 From: Teodor Djelic <130703036+TeodorDjelic@users.noreply.github.com> Date: Fri, 12 Sep 2025 14:24:29 +0200 Subject: [PATCH 05/10] Continue handler not supported test readded; --- .../parser/SqlScriptingParserSuite.scala | 24 +++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/SqlScriptingParserSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/SqlScriptingParserSuite.scala index 76f29e6c3fcd5..4e2bda311ae1b 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/SqlScriptingParserSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/SqlScriptingParserSuite.scala @@ -2853,6 +2853,30 @@ class SqlScriptingParserSuite extends SparkFunSuite with SQLHelper { assert(exception.origin.line.contains(6)) } + test("continue handler not supported") { + val sqlScript = + """ + |BEGIN + | DECLARE OR REPLACE flag INT = -1; + | DECLARE CONTINUE HANDLER FOR SQLSTATE '22012' + | BEGIN + | SET flag = 1; + | END; + | SELECT 1/0; + | SELECT flag; + |END + |""".stripMargin + + conf.unsetConf(SQLConf.SQL_SCRIPTING_CONTINUE_HANDLER_ENABLED.key) + checkError( + exception = intercept[SqlScriptingException] { + parsePlan(sqlScript) + }, + condition = "UNSUPPORTED_FEATURE.CONTINUE_EXCEPTION_HANDLER", + parameters = Map.empty) + conf.setConfString(SQLConf.SQL_SCRIPTING_CONTINUE_HANDLER_ENABLED.key, "true") + } + test("declare exit handler for qualified condition name that is not supported") { val sqlScript = """ From 50f23c7775e91d59a78ae83fc58d6af54cb073b8 Mon Sep 17 00:00:00 2001 From: Teodor Djelic <130703036+TeodorDjelic@users.noreply.github.com> Date: Fri, 12 Sep 2025 15:33:28 +0200 Subject: [PATCH 06/10] Update sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala Co-authored-by: Wenchen Fan --- .../src/main/scala/org/apache/spark/sql/internal/SQLConf.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala index 61f51a1d68350..a69155b3b3ff2 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala @@ -4150,7 +4150,7 @@ object SQLConf { "is under development and still not working as intended. This feature switch is intended " + "to be used internally for development and testing, not by end users. " + "YOU ARE ADVISED AGAINST USING THIS FEATURE AS ITS NOT FINISHED.") - .version("4.0.0") + .version("4.1.0") .booleanConf .createWithDefault(false) From 5cdd546261a4db2729c68b1720899d37a253f60e Mon Sep 17 00:00:00 2001 From: Teodor Djelic <130703036+TeodorDjelic@users.noreply.github.com> Date: Fri, 12 Sep 2025 15:33:41 +0200 Subject: [PATCH 07/10] Update sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala Co-authored-by: Wenchen Fan --- .../src/main/scala/org/apache/spark/sql/internal/SQLConf.scala | 1 + 1 file changed, 1 insertion(+) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala index a69155b3b3ff2..416f2dff6f37d 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/internal/SQLConf.scala @@ -4146,6 +4146,7 @@ object SQLConf { val SQL_SCRIPTING_CONTINUE_HANDLER_ENABLED = buildConf("spark.sql.scripting.continueHandlerEnabled") + .internal() .doc("EXPERIMENTAL FEATURE/WORK IN PROGRESS: SQL Scripting CONTINUE HANDLER feature " + "is under development and still not working as intended. This feature switch is intended " + "to be used internally for development and testing, not by end users. " + From 37322c20f151654ca3b054c6a5bd5761bfd688f9 Mon Sep 17 00:00:00 2001 From: Teodor Djelic <130703036+TeodorDjelic@users.noreply.github.com> Date: Fri, 12 Sep 2025 15:34:00 +0200 Subject: [PATCH 08/10] Update sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/SqlScriptingParserSuite.scala Co-authored-by: Wenchen Fan --- .../spark/sql/catalyst/parser/SqlScriptingParserSuite.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/SqlScriptingParserSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/SqlScriptingParserSuite.scala index 4e2bda311ae1b..51339dde81449 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/SqlScriptingParserSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/SqlScriptingParserSuite.scala @@ -30,7 +30,7 @@ class SqlScriptingParserSuite extends SparkFunSuite with SQLHelper { protected override def beforeAll(): Unit = { super.beforeAll() - conf.setConfString(SQLConf.SQL_SCRIPTING_CONTINUE_HANDLER_ENABLED.key, "true") + conf.setConf(SQLConf.SQL_SCRIPTING_CONTINUE_HANDLER_ENABLED, true) } protected override def afterAll(): Unit = { From a4047f98d6826afc0936e74b679b393020abf91e Mon Sep 17 00:00:00 2001 From: Teodor Djelic <130703036+TeodorDjelic@users.noreply.github.com> Date: Fri, 12 Sep 2025 15:34:16 +0200 Subject: [PATCH 09/10] Update sql/core/src/test/scala/org/apache/spark/sql/scripting/SqlScriptingInterpreterSuite.scala Co-authored-by: Wenchen Fan --- .../spark/sql/scripting/SqlScriptingInterpreterSuite.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sql/core/src/test/scala/org/apache/spark/sql/scripting/SqlScriptingInterpreterSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/scripting/SqlScriptingInterpreterSuite.scala index dee458da8405c..6671b52381c21 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/scripting/SqlScriptingInterpreterSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/scripting/SqlScriptingInterpreterSuite.scala @@ -40,7 +40,7 @@ class SqlScriptingInterpreterSuite protected override def beforeAll(): Unit = { super.beforeAll() - conf.setConfString(SQLConf.SQL_SCRIPTING_CONTINUE_HANDLER_ENABLED.key, "true") + conf.setConf(SQLConf.SQL_SCRIPTING_CONTINUE_HANDLER_ENABLED, true) } protected override def afterAll(): Unit = { From 6db58f07723c811a7ec8459436cf1b659892a601 Mon Sep 17 00:00:00 2001 From: Teodor Djelic <130703036+TeodorDjelic@users.noreply.github.com> Date: Fri, 12 Sep 2025 16:23:56 +0200 Subject: [PATCH 10/10] Fixing a test; --- .../parser/SqlScriptingParserSuite.scala | 42 +++++++++---------- 1 file changed, 21 insertions(+), 21 deletions(-) diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/SqlScriptingParserSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/SqlScriptingParserSuite.scala index 51339dde81449..298329db1ee30 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/SqlScriptingParserSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/parser/SqlScriptingParserSuite.scala @@ -2854,27 +2854,27 @@ class SqlScriptingParserSuite extends SparkFunSuite with SQLHelper { } test("continue handler not supported") { - val sqlScript = - """ - |BEGIN - | DECLARE OR REPLACE flag INT = -1; - | DECLARE CONTINUE HANDLER FOR SQLSTATE '22012' - | BEGIN - | SET flag = 1; - | END; - | SELECT 1/0; - | SELECT flag; - |END - |""".stripMargin - - conf.unsetConf(SQLConf.SQL_SCRIPTING_CONTINUE_HANDLER_ENABLED.key) - checkError( - exception = intercept[SqlScriptingException] { - parsePlan(sqlScript) - }, - condition = "UNSUPPORTED_FEATURE.CONTINUE_EXCEPTION_HANDLER", - parameters = Map.empty) - conf.setConfString(SQLConf.SQL_SCRIPTING_CONTINUE_HANDLER_ENABLED.key, "true") + withSQLConf(SQLConf.SQL_SCRIPTING_CONTINUE_HANDLER_ENABLED.key -> "false") { + val sqlScript = + """ + |BEGIN + | DECLARE OR REPLACE flag INT = -1; + | DECLARE CONTINUE HANDLER FOR SQLSTATE '22012' + | BEGIN + | SET flag = 1; + | END; + | SELECT 1/0; + | SELECT flag; + |END + |""".stripMargin + + checkError( + exception = intercept[SqlScriptingException] { + parsePlan(sqlScript) + }, + condition = "UNSUPPORTED_FEATURE.CONTINUE_EXCEPTION_HANDLER", + parameters = Map.empty) + } } test("declare exit handler for qualified condition name that is not supported") {