From f5d8eeae9a1ca9dda0125366bc60125c9bbf0783 Mon Sep 17 00:00:00 2001 From: B Vadlamani Date: Mon, 29 Dec 2025 10:50:45 -0800 Subject: [PATCH 1/5] ansi_suport_benchmarking --- .../benchmark/CometArithmeticBenchmark.scala | 4 ++-- .../sql/benchmark/CometBenchmarkBase.scala | 19 +++++++++++++++---- .../sql/benchmark/CometCastBenchmark.scala | 4 ++-- .../CometConditionalExpressionBenchmark.scala | 4 ++-- .../CometDatetimeExpressionBenchmark.scala | 4 ++-- .../CometJsonExpressionBenchmark.scala | 7 ++++++- .../CometPredicateExpressionBenchmark.scala | 2 +- .../CometStringExpressionBenchmark.scala | 7 ++++++- 8 files changed, 36 insertions(+), 15 deletions(-) diff --git a/spark/src/test/scala/org/apache/spark/sql/benchmark/CometArithmeticBenchmark.scala b/spark/src/test/scala/org/apache/spark/sql/benchmark/CometArithmeticBenchmark.scala index a513aa1a77..888d490085 100644 --- a/spark/src/test/scala/org/apache/spark/sql/benchmark/CometArithmeticBenchmark.scala +++ b/spark/src/test/scala/org/apache/spark/sql/benchmark/CometArithmeticBenchmark.scala @@ -44,7 +44,7 @@ object CometArithmeticBenchmark extends CometBenchmarkBase { val name = s"Binary op ${dataType.sql}, dictionary = $useDictionary" val query = s"SELECT c1 ${op.sig} c2 FROM $table" - runExpressionBenchmark(name, values, query) + runExpressionBenchmark(name, values, query, isANSIEnabled = false) } } } @@ -64,7 +64,7 @@ object CometArithmeticBenchmark extends CometBenchmarkBase { val name = s"Binary op ${dataType.sql}, dictionary = $useDictionary" val query = s"SELECT c1 ${op.sig} c2 FROM $table" - runExpressionBenchmark(name, values, query) + runExpressionBenchmark(name, values, query, isANSIEnabled = false) } } } diff --git a/spark/src/test/scala/org/apache/spark/sql/benchmark/CometBenchmarkBase.scala b/spark/src/test/scala/org/apache/spark/sql/benchmark/CometBenchmarkBase.scala index 8d56cefa05..c6a63e1edd 100644 --- a/spark/src/test/scala/org/apache/spark/sql/benchmark/CometBenchmarkBase.scala +++ b/spark/src/test/scala/org/apache/spark/sql/benchmark/CometBenchmarkBase.scala @@ -23,7 +23,7 @@ import java.io.File import java.nio.charset.StandardCharsets import java.util.Base64 -import scala.util.Random +import scala.util.{Random, Try} import org.apache.parquet.crypto.DecryptionPropertiesFactory import org.apache.parquet.crypto.keytools.{KeyToolkit, PropertiesDrivenCryptoFactory} @@ -127,7 +127,8 @@ trait CometBenchmarkBase extends SqlBasedBenchmark { name: String, cardinality: Long, query: String, - extraCometConfigs: Map[String, String] = Map.empty): Unit = { + extraCometConfigs: Map[String, String] = Map.empty, + isANSIEnabled: Boolean): Unit = { val benchmark = new Benchmark(name, cardinality, output = output) benchmark.addCase("Spark") { _ => @@ -140,7 +141,7 @@ trait CometBenchmarkBase extends SqlBasedBenchmark { withSQLConf( CometConf.COMET_ENABLED.key -> "true", CometConf.COMET_EXEC_ENABLED.key -> "false") { - spark.sql(query).noop() + runSparkCommand(spark, query, isANSIEnabled) } } @@ -151,13 +152,23 @@ trait CometBenchmarkBase extends SqlBasedBenchmark { benchmark.addCase("Comet (Scan + Exec)") { _ => withSQLConf(cometExecConfigs.toSeq: _*) { - spark.sql(query).noop() + runSparkCommand(spark, query, isANSIEnabled) } } benchmark.run() } + private def runSparkCommand(spark: SparkSession, query: String, isANSIMode: Boolean): Unit = { + if (isANSIMode) { + Try { + spark.sql(query).noop() + } + } else { + spark.sql(query).noop() + } + } + protected def prepareTable(dir: File, df: DataFrame, partition: Option[String] = None): Unit = { val testDf = if (partition.isDefined) { df.write.partitionBy(partition.get) diff --git a/spark/src/test/scala/org/apache/spark/sql/benchmark/CometCastBenchmark.scala b/spark/src/test/scala/org/apache/spark/sql/benchmark/CometCastBenchmark.scala index 975abd632f..799a6cc0c1 100644 --- a/spark/src/test/scala/org/apache/spark/sql/benchmark/CometCastBenchmark.scala +++ b/spark/src/test/scala/org/apache/spark/sql/benchmark/CometCastBenchmark.scala @@ -84,11 +84,11 @@ object CometCastBenchmark extends CometBenchmarkBase { val functionSQL = castExprSQL(toDataType, "value") val query = s"SELECT $functionSQL FROM parquetV1Table" val name = - s"Cast function to : ${toDataType} , ansi mode enabled : ${isAnsiMode}" + s"Cast function from : ${fromDataType} to : ${toDataType} , ansi mode enabled : ${isAnsiMode}" val extraConfigs = Map(SQLConf.ANSI_ENABLED.key -> isAnsiMode.toString) - runExpressionBenchmark(name, values, query, extraConfigs) + runExpressionBenchmark(name, values, query, extraConfigs, isAnsiMode) } } } diff --git a/spark/src/test/scala/org/apache/spark/sql/benchmark/CometConditionalExpressionBenchmark.scala b/spark/src/test/scala/org/apache/spark/sql/benchmark/CometConditionalExpressionBenchmark.scala index c5eb9ea390..9354b190b6 100644 --- a/spark/src/test/scala/org/apache/spark/sql/benchmark/CometConditionalExpressionBenchmark.scala +++ b/spark/src/test/scala/org/apache/spark/sql/benchmark/CometConditionalExpressionBenchmark.scala @@ -35,7 +35,7 @@ object CometConditionalExpressionBenchmark extends CometBenchmarkBase { val query = "select CASE WHEN c1 < 0 THEN '<0' WHEN c1 = 0 THEN '=0' ELSE '>0' END from parquetV1Table" - runExpressionBenchmark("Case When Expr", values, query) + runExpressionBenchmark("Case When Expr", values, query, isANSIEnabled = false) } } } @@ -47,7 +47,7 @@ object CometConditionalExpressionBenchmark extends CometBenchmarkBase { val query = "select IF (c1 < 0, '<0', '>=0') from parquetV1Table" - runExpressionBenchmark("If Expr", values, query) + runExpressionBenchmark("If Expr", values, query, isANSIEnabled = false) } } } diff --git a/spark/src/test/scala/org/apache/spark/sql/benchmark/CometDatetimeExpressionBenchmark.scala b/spark/src/test/scala/org/apache/spark/sql/benchmark/CometDatetimeExpressionBenchmark.scala index 47eff41bbd..db265b35c4 100644 --- a/spark/src/test/scala/org/apache/spark/sql/benchmark/CometDatetimeExpressionBenchmark.scala +++ b/spark/src/test/scala/org/apache/spark/sql/benchmark/CometDatetimeExpressionBenchmark.scala @@ -41,7 +41,7 @@ object CometDatetimeExpressionBenchmark extends CometBenchmarkBase { val isDictionary = if (useDictionary) "(Dictionary)" else "" val name = s"Date Truncate $isDictionary - $level" val query = s"select trunc(dt, '$level') from parquetV1Table" - runExpressionBenchmark(name, values, query) + runExpressionBenchmark(name, values, query, isANSIEnabled = false) } } } @@ -70,7 +70,7 @@ object CometDatetimeExpressionBenchmark extends CometBenchmarkBase { val isDictionary = if (useDictionary) "(Dictionary)" else "" val name = s"Timestamp Truncate $isDictionary - $level" val query = s"select date_trunc('$level', ts) from parquetV1Table" - runExpressionBenchmark(name, values, query) + runExpressionBenchmark(name, values, query, isANSIEnabled = false) } } } diff --git a/spark/src/test/scala/org/apache/spark/sql/benchmark/CometJsonExpressionBenchmark.scala b/spark/src/test/scala/org/apache/spark/sql/benchmark/CometJsonExpressionBenchmark.scala index 5b4741ba68..df66b11b3c 100644 --- a/spark/src/test/scala/org/apache/spark/sql/benchmark/CometJsonExpressionBenchmark.scala +++ b/spark/src/test/scala/org/apache/spark/sql/benchmark/CometJsonExpressionBenchmark.scala @@ -120,7 +120,12 @@ object CometJsonExpressionBenchmark extends CometBenchmarkBase { CometConf.getExprAllowIncompatConfigKey( classOf[JsonToStructs]) -> "true") ++ config.extraCometConfigs - runExpressionBenchmark(config.name, values, config.query, extraConfigs) + runExpressionBenchmark( + config.name, + values, + config.query, + extraConfigs, + isANSIEnabled = false) } } } diff --git a/spark/src/test/scala/org/apache/spark/sql/benchmark/CometPredicateExpressionBenchmark.scala b/spark/src/test/scala/org/apache/spark/sql/benchmark/CometPredicateExpressionBenchmark.scala index 6506c5665d..1a5f22cdcc 100644 --- a/spark/src/test/scala/org/apache/spark/sql/benchmark/CometPredicateExpressionBenchmark.scala +++ b/spark/src/test/scala/org/apache/spark/sql/benchmark/CometPredicateExpressionBenchmark.scala @@ -38,7 +38,7 @@ object CometPredicateExpressionBenchmark extends CometBenchmarkBase { val query = "select * from parquetV1Table where c1 in ('positive', 'zero')" - runExpressionBenchmark("in Expr", values, query) + runExpressionBenchmark("in Expr", values, query, isANSIEnabled = false) } } } diff --git a/spark/src/test/scala/org/apache/spark/sql/benchmark/CometStringExpressionBenchmark.scala b/spark/src/test/scala/org/apache/spark/sql/benchmark/CometStringExpressionBenchmark.scala index 41eabb8513..c2759d2f40 100644 --- a/spark/src/test/scala/org/apache/spark/sql/benchmark/CometStringExpressionBenchmark.scala +++ b/spark/src/test/scala/org/apache/spark/sql/benchmark/CometStringExpressionBenchmark.scala @@ -55,7 +55,12 @@ object CometStringExpressionBenchmark extends CometBenchmarkBase { val extraConfigs = Map(CometConf.COMET_CASE_CONVERSION_ENABLED.key -> "true") ++ config.extraCometConfigs - runExpressionBenchmark(config.name, values, config.query, extraConfigs) + runExpressionBenchmark( + config.name, + values, + config.query, + extraConfigs, + isANSIEnabled = false) } } } From 0aed80702d5cb13775d83696df9cf1fd5b9354fd Mon Sep 17 00:00:00 2001 From: B Vadlamani Date: Mon, 29 Dec 2025 11:11:01 -0800 Subject: [PATCH 2/5] ansi_suport_benchmarking --- .../benchmark/CometArithmeticBenchmark.scala | 4 +- .../sql/benchmark/CometBenchmarkBase.scala | 43 ++++++------------- .../sql/benchmark/CometCastBenchmark.scala | 2 +- .../CometConditionalExpressionBenchmark.scala | 4 +- .../CometDatetimeExpressionBenchmark.scala | 4 +- .../CometJsonExpressionBenchmark.scala | 2 +- .../CometPredicateExpressionBenchmark.scala | 2 +- .../CometStringExpressionBenchmark.scala | 2 +- 8 files changed, 23 insertions(+), 40 deletions(-) diff --git a/spark/src/test/scala/org/apache/spark/sql/benchmark/CometArithmeticBenchmark.scala b/spark/src/test/scala/org/apache/spark/sql/benchmark/CometArithmeticBenchmark.scala index 888d490085..911de69cbf 100644 --- a/spark/src/test/scala/org/apache/spark/sql/benchmark/CometArithmeticBenchmark.scala +++ b/spark/src/test/scala/org/apache/spark/sql/benchmark/CometArithmeticBenchmark.scala @@ -44,7 +44,7 @@ object CometArithmeticBenchmark extends CometBenchmarkBase { val name = s"Binary op ${dataType.sql}, dictionary = $useDictionary" val query = s"SELECT c1 ${op.sig} c2 FROM $table" - runExpressionBenchmark(name, values, query, isANSIEnabled = false) + runExpressionBenchmark(name, values, query, isAnsiMode = false) } } } @@ -64,7 +64,7 @@ object CometArithmeticBenchmark extends CometBenchmarkBase { val name = s"Binary op ${dataType.sql}, dictionary = $useDictionary" val query = s"SELECT c1 ${op.sig} c2 FROM $table" - runExpressionBenchmark(name, values, query, isANSIEnabled = false) + runExpressionBenchmark(name, values, query, isAnsiMode = false) } } } diff --git a/spark/src/test/scala/org/apache/spark/sql/benchmark/CometBenchmarkBase.scala b/spark/src/test/scala/org/apache/spark/sql/benchmark/CometBenchmarkBase.scala index c6a63e1edd..b9cf70e4c4 100644 --- a/spark/src/test/scala/org/apache/spark/sql/benchmark/CometBenchmarkBase.scala +++ b/spark/src/test/scala/org/apache/spark/sql/benchmark/CometBenchmarkBase.scala @@ -88,28 +88,6 @@ trait CometBenchmarkBase extends SqlBasedBenchmark { } } - /** Runs function `f` with Comet on and off. */ - final def runWithComet(name: String, cardinality: Long)(f: => Unit): Unit = { - val benchmark = new Benchmark(name, cardinality, output = output) - - benchmark.addCase(s"$name - Spark ") { _ => - withSQLConf(CometConf.COMET_ENABLED.key -> "false") { - f - } - } - - benchmark.addCase(s"$name - Comet") { _ => - withSQLConf( - CometConf.COMET_ENABLED.key -> "true", - CometConf.COMET_EXEC_ENABLED.key -> "true", - SQLConf.ANSI_ENABLED.key -> "false") { - f - } - } - - benchmark.run() - } - /** * Runs an expression benchmark with standard cases: Spark, Comet (Scan), Comet (Scan + Exec). * This provides a consistent benchmark structure for expression evaluation. @@ -127,32 +105,37 @@ trait CometBenchmarkBase extends SqlBasedBenchmark { name: String, cardinality: Long, query: String, - extraCometConfigs: Map[String, String] = Map.empty, - isANSIEnabled: Boolean): Unit = { + isAnsiMode: Boolean, + extraCometConfigs: Map[String, String] = Map.empty): Unit = { + val benchmark = new Benchmark(name, cardinality, output = output) benchmark.addCase("Spark") { _ => - withSQLConf(CometConf.COMET_ENABLED.key -> "false") { - spark.sql(query).noop() + withSQLConf( + CometConf.COMET_ENABLED.key -> "false", + SQLConf.ANSI_ENABLED.key -> isAnsiMode.toString) { + runSparkCommand(spark, query, isAnsiMode) } } benchmark.addCase("Comet (Scan)") { _ => withSQLConf( CometConf.COMET_ENABLED.key -> "true", - CometConf.COMET_EXEC_ENABLED.key -> "false") { - runSparkCommand(spark, query, isANSIEnabled) + CometConf.COMET_EXEC_ENABLED.key -> "false", + SQLConf.ANSI_ENABLED.key -> isAnsiMode.toString) { + runSparkCommand(spark, query, isAnsiMode) } } val cometExecConfigs = Map( CometConf.COMET_ENABLED.key -> "true", CometConf.COMET_EXEC_ENABLED.key -> "true", - "spark.sql.optimizer.constantFolding.enabled" -> "false") ++ extraCometConfigs + "spark.sql.optimizer.constantFolding.enabled" -> "false", + SQLConf.ANSI_ENABLED.key -> isAnsiMode.toString) ++ extraCometConfigs benchmark.addCase("Comet (Scan + Exec)") { _ => withSQLConf(cometExecConfigs.toSeq: _*) { - runSparkCommand(spark, query, isANSIEnabled) + runSparkCommand(spark, query, isAnsiMode) } } diff --git a/spark/src/test/scala/org/apache/spark/sql/benchmark/CometCastBenchmark.scala b/spark/src/test/scala/org/apache/spark/sql/benchmark/CometCastBenchmark.scala index 799a6cc0c1..c4bfeb28ba 100644 --- a/spark/src/test/scala/org/apache/spark/sql/benchmark/CometCastBenchmark.scala +++ b/spark/src/test/scala/org/apache/spark/sql/benchmark/CometCastBenchmark.scala @@ -88,7 +88,7 @@ object CometCastBenchmark extends CometBenchmarkBase { val extraConfigs = Map(SQLConf.ANSI_ENABLED.key -> isAnsiMode.toString) - runExpressionBenchmark(name, values, query, extraConfigs, isAnsiMode) + runExpressionBenchmark(name, values, query, isAnsiMode) } } } diff --git a/spark/src/test/scala/org/apache/spark/sql/benchmark/CometConditionalExpressionBenchmark.scala b/spark/src/test/scala/org/apache/spark/sql/benchmark/CometConditionalExpressionBenchmark.scala index 9354b190b6..b710552d44 100644 --- a/spark/src/test/scala/org/apache/spark/sql/benchmark/CometConditionalExpressionBenchmark.scala +++ b/spark/src/test/scala/org/apache/spark/sql/benchmark/CometConditionalExpressionBenchmark.scala @@ -35,7 +35,7 @@ object CometConditionalExpressionBenchmark extends CometBenchmarkBase { val query = "select CASE WHEN c1 < 0 THEN '<0' WHEN c1 = 0 THEN '=0' ELSE '>0' END from parquetV1Table" - runExpressionBenchmark("Case When Expr", values, query, isANSIEnabled = false) + runExpressionBenchmark("Case When Expr", values, query, isAnsiMode = false) } } } @@ -47,7 +47,7 @@ object CometConditionalExpressionBenchmark extends CometBenchmarkBase { val query = "select IF (c1 < 0, '<0', '>=0') from parquetV1Table" - runExpressionBenchmark("If Expr", values, query, isANSIEnabled = false) + runExpressionBenchmark("If Expr", values, query, isAnsiMode = false) } } } diff --git a/spark/src/test/scala/org/apache/spark/sql/benchmark/CometDatetimeExpressionBenchmark.scala b/spark/src/test/scala/org/apache/spark/sql/benchmark/CometDatetimeExpressionBenchmark.scala index db265b35c4..abcae06e7d 100644 --- a/spark/src/test/scala/org/apache/spark/sql/benchmark/CometDatetimeExpressionBenchmark.scala +++ b/spark/src/test/scala/org/apache/spark/sql/benchmark/CometDatetimeExpressionBenchmark.scala @@ -41,7 +41,7 @@ object CometDatetimeExpressionBenchmark extends CometBenchmarkBase { val isDictionary = if (useDictionary) "(Dictionary)" else "" val name = s"Date Truncate $isDictionary - $level" val query = s"select trunc(dt, '$level') from parquetV1Table" - runExpressionBenchmark(name, values, query, isANSIEnabled = false) + runExpressionBenchmark(name, values, query, isAnsiMode = false) } } } @@ -70,7 +70,7 @@ object CometDatetimeExpressionBenchmark extends CometBenchmarkBase { val isDictionary = if (useDictionary) "(Dictionary)" else "" val name = s"Timestamp Truncate $isDictionary - $level" val query = s"select date_trunc('$level', ts) from parquetV1Table" - runExpressionBenchmark(name, values, query, isANSIEnabled = false) + runExpressionBenchmark(name, values, query, isAnsiMode = false) } } } diff --git a/spark/src/test/scala/org/apache/spark/sql/benchmark/CometJsonExpressionBenchmark.scala b/spark/src/test/scala/org/apache/spark/sql/benchmark/CometJsonExpressionBenchmark.scala index df66b11b3c..57b1c982e6 100644 --- a/spark/src/test/scala/org/apache/spark/sql/benchmark/CometJsonExpressionBenchmark.scala +++ b/spark/src/test/scala/org/apache/spark/sql/benchmark/CometJsonExpressionBenchmark.scala @@ -125,7 +125,7 @@ object CometJsonExpressionBenchmark extends CometBenchmarkBase { values, config.query, extraConfigs, - isANSIEnabled = false) + isAnsiMode = false) } } } diff --git a/spark/src/test/scala/org/apache/spark/sql/benchmark/CometPredicateExpressionBenchmark.scala b/spark/src/test/scala/org/apache/spark/sql/benchmark/CometPredicateExpressionBenchmark.scala index 1a5f22cdcc..db68e2db40 100644 --- a/spark/src/test/scala/org/apache/spark/sql/benchmark/CometPredicateExpressionBenchmark.scala +++ b/spark/src/test/scala/org/apache/spark/sql/benchmark/CometPredicateExpressionBenchmark.scala @@ -38,7 +38,7 @@ object CometPredicateExpressionBenchmark extends CometBenchmarkBase { val query = "select * from parquetV1Table where c1 in ('positive', 'zero')" - runExpressionBenchmark("in Expr", values, query, isANSIEnabled = false) + runExpressionBenchmark("in Expr", values, query, isAnsiMode = false) } } } diff --git a/spark/src/test/scala/org/apache/spark/sql/benchmark/CometStringExpressionBenchmark.scala b/spark/src/test/scala/org/apache/spark/sql/benchmark/CometStringExpressionBenchmark.scala index c2759d2f40..0b73773bb5 100644 --- a/spark/src/test/scala/org/apache/spark/sql/benchmark/CometStringExpressionBenchmark.scala +++ b/spark/src/test/scala/org/apache/spark/sql/benchmark/CometStringExpressionBenchmark.scala @@ -60,7 +60,7 @@ object CometStringExpressionBenchmark extends CometBenchmarkBase { values, config.query, extraConfigs, - isANSIEnabled = false) + isAnsiMode = false) } } } From a8b065bae461e46becfe9edd6011a440c214a757 Mon Sep 17 00:00:00 2001 From: B Vadlamani Date: Mon, 29 Dec 2025 11:29:24 -0800 Subject: [PATCH 3/5] ansi_suport_benchmarking --- .../spark/sql/benchmark/CometJsonExpressionBenchmark.scala | 4 ++-- .../spark/sql/benchmark/CometStringExpressionBenchmark.scala | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/spark/src/test/scala/org/apache/spark/sql/benchmark/CometJsonExpressionBenchmark.scala b/spark/src/test/scala/org/apache/spark/sql/benchmark/CometJsonExpressionBenchmark.scala index 57b1c982e6..f52d4900d2 100644 --- a/spark/src/test/scala/org/apache/spark/sql/benchmark/CometJsonExpressionBenchmark.scala +++ b/spark/src/test/scala/org/apache/spark/sql/benchmark/CometJsonExpressionBenchmark.scala @@ -124,8 +124,8 @@ object CometJsonExpressionBenchmark extends CometBenchmarkBase { config.name, values, config.query, - extraConfigs, - isAnsiMode = false) + isAnsiMode = false, + extraConfigs) } } } diff --git a/spark/src/test/scala/org/apache/spark/sql/benchmark/CometStringExpressionBenchmark.scala b/spark/src/test/scala/org/apache/spark/sql/benchmark/CometStringExpressionBenchmark.scala index 0b73773bb5..7f27cd593b 100644 --- a/spark/src/test/scala/org/apache/spark/sql/benchmark/CometStringExpressionBenchmark.scala +++ b/spark/src/test/scala/org/apache/spark/sql/benchmark/CometStringExpressionBenchmark.scala @@ -59,8 +59,8 @@ object CometStringExpressionBenchmark extends CometBenchmarkBase { config.name, values, config.query, - extraConfigs, - isAnsiMode = false) + isAnsiMode = false, + extraConfigs) } } } From 8cf710c12dcb552f7fac44ddf5a06895a8055041 Mon Sep 17 00:00:00 2001 From: B Vadlamani Date: Mon, 29 Dec 2025 11:32:23 -0800 Subject: [PATCH 4/5] ansi_suport_benchmarking --- .../org/apache/spark/sql/benchmark/CometCastBenchmark.scala | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/spark/src/test/scala/org/apache/spark/sql/benchmark/CometCastBenchmark.scala b/spark/src/test/scala/org/apache/spark/sql/benchmark/CometCastBenchmark.scala index c4bfeb28ba..1868685559 100644 --- a/spark/src/test/scala/org/apache/spark/sql/benchmark/CometCastBenchmark.scala +++ b/spark/src/test/scala/org/apache/spark/sql/benchmark/CometCastBenchmark.scala @@ -85,9 +85,7 @@ object CometCastBenchmark extends CometBenchmarkBase { val query = s"SELECT $functionSQL FROM parquetV1Table" val name = s"Cast function from : ${fromDataType} to : ${toDataType} , ansi mode enabled : ${isAnsiMode}" - - val extraConfigs = Map(SQLConf.ANSI_ENABLED.key -> isAnsiMode.toString) - + runExpressionBenchmark(name, values, query, isAnsiMode) } } From 593b7889d9cabdf3af045443a6f8696e91259eff Mon Sep 17 00:00:00 2001 From: B Vadlamani Date: Mon, 29 Dec 2025 11:41:45 -0800 Subject: [PATCH 5/5] ansi_suport_benchmarking_fmt --- .../org/apache/spark/sql/benchmark/CometCastBenchmark.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/spark/src/test/scala/org/apache/spark/sql/benchmark/CometCastBenchmark.scala b/spark/src/test/scala/org/apache/spark/sql/benchmark/CometCastBenchmark.scala index 1868685559..b4a3c13ad0 100644 --- a/spark/src/test/scala/org/apache/spark/sql/benchmark/CometCastBenchmark.scala +++ b/spark/src/test/scala/org/apache/spark/sql/benchmark/CometCastBenchmark.scala @@ -85,7 +85,7 @@ object CometCastBenchmark extends CometBenchmarkBase { val query = s"SELECT $functionSQL FROM parquetV1Table" val name = s"Cast function from : ${fromDataType} to : ${toDataType} , ansi mode enabled : ${isAnsiMode}" - + runExpressionBenchmark(name, values, query, isAnsiMode) } }