From 246fcf3eb42d8a60b4204457fcab6d5c7e1f91a6 Mon Sep 17 00:00:00 2001 From: Vitalii Li Date: Mon, 20 Nov 2023 08:16:32 +0000 Subject: [PATCH] [SPARK] Companion change to SPARK-45828 to remove use of deprecated methods - [x] Spark - [ ] Standalone - [ ] Flink - [ ] Kernel - [ ] Other (fill in here) This is companion change for https://github.com/apache/spark/pull/43708. It replaces use of deprecated methods with `Symbol` parameter with methods with `String` parameter. Existing unit test No Closes delta-io/delta#2303 GitOrigin-RevId: a3cf851bcb1a8fb163d5748473b731b9e6857cbe --- .../sql/delta/ConvertToDeltaSuiteBase.scala | 2 +- .../spark/sql/delta/DeltaSinkSuite.scala | 6 ++--- .../apache/spark/sql/delta/DeltaSuite.scala | 24 +++++++++---------- .../delta/ShowDeltaTableColumnsSuite.scala | 2 +- .../optimize/OptimizeCompactionSuite.scala | 6 ++--- .../delta/optimize/OptimizeZOrderSuite.scala | 8 +++---- 6 files changed, 24 insertions(+), 24 deletions(-) diff --git a/spark/src/test/scala/org/apache/spark/sql/delta/ConvertToDeltaSuiteBase.scala b/spark/src/test/scala/org/apache/spark/sql/delta/ConvertToDeltaSuiteBase.scala index 97add2d4dea..ad20154d321 100644 --- a/spark/src/test/scala/org/apache/spark/sql/delta/ConvertToDeltaSuiteBase.scala +++ b/spark/src/test/scala/org/apache/spark/sql/delta/ConvertToDeltaSuiteBase.scala @@ -419,7 +419,7 @@ trait ConvertToDeltaSuiteBase extends ConvertToDeltaSuiteBaseCommons q.stop() // Add non-streaming data: this should not be ignored in conversion. - spark.range(11, 21).select('id.cast("int") as 'col1) + spark.range(11, 21).select('id.cast("int") as "col1") .write.mode("append").parquet(dataLocation) withSQLConf(("spark.databricks.delta.convert.useMetadataLog", "false")) { diff --git a/spark/src/test/scala/org/apache/spark/sql/delta/DeltaSinkSuite.scala b/spark/src/test/scala/org/apache/spark/sql/delta/DeltaSinkSuite.scala index 98647f88ea8..22d8b563207 100644 --- a/spark/src/test/scala/org/apache/spark/sql/delta/DeltaSinkSuite.scala +++ b/spark/src/test/scala/org/apache/spark/sql/delta/DeltaSinkSuite.scala @@ -358,7 +358,7 @@ class DeltaSinkSuite val e = intercept[AnalysisException] { spark.range(100) - .select('id.cast("integer"), 'id % 4 as 'by4, 'id.cast("integer") * 1000 as 'value) + .select('id.cast("integer"), 'id % 4 as "by4", 'id.cast("integer") * 1000 as "value") .write .format("delta") .partitionBy("id", "by4") @@ -393,7 +393,7 @@ class DeltaSinkSuite } val e = intercept[AnalysisException] { - spark.range(100).select('id, ('id * 3).cast("string") as 'value) + spark.range(100).select('id, ('id * 3).cast("string") as "value") .write .partitionBy("id") .format("delta") @@ -417,7 +417,7 @@ class DeltaSinkSuite .writeStream .option("checkpointLocation", checkpointDir.getCanonicalPath) .format("delta") - spark.range(100).select('id, ('id * 3).cast("string") as 'value) + spark.range(100).select('id, ('id * 3).cast("string") as "value") .write .format("delta") .mode("append") diff --git a/spark/src/test/scala/org/apache/spark/sql/delta/DeltaSuite.scala b/spark/src/test/scala/org/apache/spark/sql/delta/DeltaSuite.scala index 1945b5ff0de..717cd1fbc5f 100644 --- a/spark/src/test/scala/org/apache/spark/sql/delta/DeltaSuite.scala +++ b/spark/src/test/scala/org/apache/spark/sql/delta/DeltaSuite.scala @@ -1193,7 +1193,7 @@ class DeltaSuite extends QueryTest assert(tempDir.delete()) } - spark.range(100).select('id, 'id % 4 as 'by4, 'id % 8 as 'by8) + spark.range(100).select('id, 'id % 4 as "by4", 'id % 8 as "by8") .write .format("delta") .partitionBy("by4", "by8") @@ -1213,7 +1213,7 @@ class DeltaSuite extends QueryTest assert(tempDir.delete()) } - spark.range(100).select('id, 'id % 4 as 'by4) + spark.range(100).select('id, 'id % 4 as "by4") .write .format("delta") .partitionBy("by4") @@ -1266,14 +1266,14 @@ class DeltaSuite extends QueryTest assert(tempDir.delete()) } - spark.range(100).select('id, 'id % 4 as 'by4, 'id % 8 as 'by8) + spark.range(100).select('id, 'id % 4 as "by4", 'id % 8 as "by8") .write .format("delta") .partitionBy("by4", "by8") .save(tempDir.toString) val e = intercept[AnalysisException] { - spark.range(100).select('id, 'id % 4 as 'by4) + spark.range(100).select('id, 'id % 4 as "by4") .write .format("delta") .partitionBy("by4") @@ -1290,13 +1290,13 @@ class DeltaSuite extends QueryTest assert(tempDir.delete()) } - spark.range(100).select('id, ('id * 3).cast("string") as 'value) + spark.range(100).select('id, ('id * 3).cast("string") as "value") .write .format("delta") .save(tempDir.toString) val e = intercept[AnalysisException] { - spark.range(100).select('id, 'id * 3 as 'value) + spark.range(100).select('id, 'id * 3 as "value") .write .format("delta") .mode("append") @@ -1312,7 +1312,7 @@ class DeltaSuite extends QueryTest assert(tempDir.delete()) } - spark.range(100).select('id, 'id % 4 as 'by4) + spark.range(100).select('id, 'id % 4 as "by4") .write .format("delta") .partitionBy("by4") @@ -1323,7 +1323,7 @@ class DeltaSuite extends QueryTest val deltaLog = loadDeltaLog(tempDir.getAbsolutePath) assertPartitionExists("by4", deltaLog, files) - spark.range(101, 200).select('id, 'id % 4 as 'by4, 'id % 8 as 'by8) + spark.range(101, 200).select('id, 'id % 4 as "by4", 'id % 8 as "by8") .write .format("delta") .option(DeltaOptions.MERGE_SCHEMA_OPTION, "true") @@ -1332,7 +1332,7 @@ class DeltaSuite extends QueryTest checkAnswer( spark.read.format("delta").load(tempDir.toString), - spark.range(101, 200).select('id, 'id % 4 as 'by4, 'id % 8 as 'by8)) + spark.range(101, 200).select('id, 'id % 4 as "by4", 'id % 8 as "by8")) } } @@ -1342,7 +1342,7 @@ class DeltaSuite extends QueryTest assert(tempDir.delete()) } - spark.range(100).select('id, 'id % 4 as 'by4) + spark.range(100).select('id, 'id % 4 as "by4") .write .format("delta") .partitionBy("by4") @@ -1354,7 +1354,7 @@ class DeltaSuite extends QueryTest assertPartitionExists("by4", deltaLog, files) val e = intercept[AnalysisException] { - spark.range(101, 200).select('id, 'id % 4 as 'by4, 'id % 8 as 'by8) + spark.range(101, 200).select('id, 'id % 4 as "by4", 'id % 8 as "by8") .write .format("delta") .partitionBy("by4", "by8") @@ -1374,7 +1374,7 @@ class DeltaSuite extends QueryTest } val e = intercept[AnalysisException] { - spark.range(100).select('id, 'id % 4 as 'by4) + spark.range(100).select('id, 'id % 4 as "by4") .write .format("delta") .partitionBy("by4", "id") diff --git a/spark/src/test/scala/org/apache/spark/sql/delta/ShowDeltaTableColumnsSuite.scala b/spark/src/test/scala/org/apache/spark/sql/delta/ShowDeltaTableColumnsSuite.scala index 479210590a9..f291e616d38 100644 --- a/spark/src/test/scala/org/apache/spark/sql/delta/ShowDeltaTableColumnsSuite.scala +++ b/spark/src/test/scala/org/apache/spark/sql/delta/ShowDeltaTableColumnsSuite.scala @@ -164,7 +164,7 @@ class ShowDeltaTableColumnsSuite extends QueryTest withTempDir { tempDir => (70.to(79).seq ++ 75.to(79).seq) .toDF("id") - .withColumn("nested", struct(struct('id + 2 as 'b, 'id + 3 as 'c) as 'sub)) + .withColumn("nested", struct(struct('id + 2 as "b", 'id + 3 as "c") as "sub")) .write .format("delta") .save(tempDir.toString) diff --git a/spark/src/test/scala/org/apache/spark/sql/delta/optimize/OptimizeCompactionSuite.scala b/spark/src/test/scala/org/apache/spark/sql/delta/optimize/OptimizeCompactionSuite.scala index 56237659a24..63c551326c9 100644 --- a/spark/src/test/scala/org/apache/spark/sql/delta/optimize/OptimizeCompactionSuite.scala +++ b/spark/src/test/scala/org/apache/spark/sql/delta/optimize/OptimizeCompactionSuite.scala @@ -411,7 +411,7 @@ trait OptimizeCompactionSuiteBase extends QueryTest Seq(10, 100).foreach { count => appendToDeltaTable( spark.range(count) - .select('id, lit("2017-10-10").cast("date") as 'date, 'id % 5 as 'part), + .select('id, lit("2017-10-10").cast("date") as "date", 'id % 5 as "part"), path, Some(partitionColumns)) } @@ -486,7 +486,7 @@ trait OptimizeCompactionSuiteBase extends QueryTest test("optimize command with multiple partition predicates") { withTempDir { tempDir => def writeData(count: Int): Unit = { - spark.range(count).select('id, lit("2017-10-10").cast("date") as 'date, 'id % 5 as 'part) + spark.range(count).select('id, lit("2017-10-10").cast("date") as "date", 'id % 5 as "part") .write .partitionBy("date", "part") .format("delta") @@ -511,7 +511,7 @@ trait OptimizeCompactionSuiteBase extends QueryTest test("optimize command with multiple partition predicates with multiple where") { withTempDir { tempDir => def writeData(count: Int): Unit = { - spark.range(count).select('id, lit("2017-10-10").cast("date") as 'date, 'id % 5 as 'part) + spark.range(count).select('id, lit("2017-10-10").cast("date") as "date", 'id % 5 as "part") .write .partitionBy("date", "part") .format("delta") diff --git a/spark/src/test/scala/org/apache/spark/sql/delta/optimize/OptimizeZOrderSuite.scala b/spark/src/test/scala/org/apache/spark/sql/delta/optimize/OptimizeZOrderSuite.scala index ce8da97586c..2482eccc69a 100644 --- a/spark/src/test/scala/org/apache/spark/sql/delta/optimize/OptimizeZOrderSuite.scala +++ b/spark/src/test/scala/org/apache/spark/sql/delta/optimize/OptimizeZOrderSuite.scala @@ -54,7 +54,7 @@ trait OptimizeZOrderSuiteBase extends OptimizePartitionTableHelper test("optimize command: checks existence of interleaving columns") { withTempDir { tempDir => Seq(1, 2, 3).toDF("value") - .select('value, 'value % 2 as 'id, 'value % 3 as 'id2) + .select('value, 'value % 2 as "id", 'value % 3 as "id2") .write .format("delta") .save(tempDir.toString) @@ -68,7 +68,7 @@ trait OptimizeZOrderSuiteBase extends OptimizePartitionTableHelper test("optimize command: interleaving columns can't be partitioning columns") { withTempDir { tempDir => Seq(1, 2, 3).toDF("value") - .select('value, 'value % 2 as 'id, 'value % 3 as 'id2) + .select('value, 'value % 2 as "id", 'value % 3 as "id2") .write .format("delta") .partitionBy("id") @@ -143,7 +143,7 @@ trait OptimizeZOrderSuiteBase extends OptimizePartitionTableHelper withTempDir { tempDir => (0.to(79).seq ++ 40.to(79).seq ++ 60.to(79).seq ++ 70.to(79).seq ++ 75.to(79).seq) .toDF("id") - .withColumn("nested", struct(struct('id + 2 as 'b, 'id + 3 as 'c) as 'sub)) + .withColumn("nested", struct(struct('id + 2 as "b", 'id + 3 as "c") as "sub")) .write .format("delta") .save(tempDir.toString) @@ -164,7 +164,7 @@ trait OptimizeZOrderSuiteBase extends OptimizePartitionTableHelper "1", DELTA_OPTIMIZE_ZORDER_COL_STAT_CHECK.key -> "true") { val data = Seq(1, 2, 3).toDF("id") data.withColumn("nested", - struct(struct('id + 1 as 'p1, 'id + 2 as 'p2) as 'a, 'id + 3 as 'b)) + struct(struct('id + 1 as "p1", 'id + 2 as "p2") as "a", 'id + 3 as "b")) .write .format("delta") .save(tempDir.getAbsolutePath)