Skip to content

Commit

Permalink
[SPARK] Companion change to SPARK-45828 to remove use of deprecated m…
Browse files Browse the repository at this point in the history
…ethods

- [x] Spark
- [ ] Standalone
- [ ] Flink
- [ ] Kernel
- [ ] Other (fill in here)

This is companion change for apache/spark#43708. It replaces use of deprecated methods with `Symbol` parameter with methods with `String` parameter.

Existing unit test

No

Closes delta-io#2303

GitOrigin-RevId: a3cf851bcb1a8fb163d5748473b731b9e6857cbe
  • Loading branch information
vitaliili-db authored and allisonport-db committed Nov 20, 2023
1 parent 74965a3 commit 246fcf3
Show file tree
Hide file tree
Showing 6 changed files with 24 additions and 24 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -419,7 +419,7 @@ trait ConvertToDeltaSuiteBase extends ConvertToDeltaSuiteBaseCommons
q.stop()

// Add non-streaming data: this should not be ignored in conversion.
spark.range(11, 21).select('id.cast("int") as 'col1)
spark.range(11, 21).select('id.cast("int") as "col1")
.write.mode("append").parquet(dataLocation)

withSQLConf(("spark.databricks.delta.convert.useMetadataLog", "false")) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -358,7 +358,7 @@ class DeltaSinkSuite

val e = intercept[AnalysisException] {
spark.range(100)
.select('id.cast("integer"), 'id % 4 as 'by4, 'id.cast("integer") * 1000 as 'value)
.select('id.cast("integer"), 'id % 4 as "by4", 'id.cast("integer") * 1000 as "value")
.write
.format("delta")
.partitionBy("id", "by4")
Expand Down Expand Up @@ -393,7 +393,7 @@ class DeltaSinkSuite
}

val e = intercept[AnalysisException] {
spark.range(100).select('id, ('id * 3).cast("string") as 'value)
spark.range(100).select('id, ('id * 3).cast("string") as "value")
.write
.partitionBy("id")
.format("delta")
Expand All @@ -417,7 +417,7 @@ class DeltaSinkSuite
.writeStream
.option("checkpointLocation", checkpointDir.getCanonicalPath)
.format("delta")
spark.range(100).select('id, ('id * 3).cast("string") as 'value)
spark.range(100).select('id, ('id * 3).cast("string") as "value")
.write
.format("delta")
.mode("append")
Expand Down
24 changes: 12 additions & 12 deletions spark/src/test/scala/org/apache/spark/sql/delta/DeltaSuite.scala
Original file line number Diff line number Diff line change
Expand Up @@ -1193,7 +1193,7 @@ class DeltaSuite extends QueryTest
assert(tempDir.delete())
}

spark.range(100).select('id, 'id % 4 as 'by4, 'id % 8 as 'by8)
spark.range(100).select('id, 'id % 4 as "by4", 'id % 8 as "by8")
.write
.format("delta")
.partitionBy("by4", "by8")
Expand All @@ -1213,7 +1213,7 @@ class DeltaSuite extends QueryTest
assert(tempDir.delete())
}

spark.range(100).select('id, 'id % 4 as 'by4)
spark.range(100).select('id, 'id % 4 as "by4")
.write
.format("delta")
.partitionBy("by4")
Expand Down Expand Up @@ -1266,14 +1266,14 @@ class DeltaSuite extends QueryTest
assert(tempDir.delete())
}

spark.range(100).select('id, 'id % 4 as 'by4, 'id % 8 as 'by8)
spark.range(100).select('id, 'id % 4 as "by4", 'id % 8 as "by8")
.write
.format("delta")
.partitionBy("by4", "by8")
.save(tempDir.toString)

val e = intercept[AnalysisException] {
spark.range(100).select('id, 'id % 4 as 'by4)
spark.range(100).select('id, 'id % 4 as "by4")
.write
.format("delta")
.partitionBy("by4")
Expand All @@ -1290,13 +1290,13 @@ class DeltaSuite extends QueryTest
assert(tempDir.delete())
}

spark.range(100).select('id, ('id * 3).cast("string") as 'value)
spark.range(100).select('id, ('id * 3).cast("string") as "value")
.write
.format("delta")
.save(tempDir.toString)

val e = intercept[AnalysisException] {
spark.range(100).select('id, 'id * 3 as 'value)
spark.range(100).select('id, 'id * 3 as "value")
.write
.format("delta")
.mode("append")
Expand All @@ -1312,7 +1312,7 @@ class DeltaSuite extends QueryTest
assert(tempDir.delete())
}

spark.range(100).select('id, 'id % 4 as 'by4)
spark.range(100).select('id, 'id % 4 as "by4")
.write
.format("delta")
.partitionBy("by4")
Expand All @@ -1323,7 +1323,7 @@ class DeltaSuite extends QueryTest
val deltaLog = loadDeltaLog(tempDir.getAbsolutePath)
assertPartitionExists("by4", deltaLog, files)

spark.range(101, 200).select('id, 'id % 4 as 'by4, 'id % 8 as 'by8)
spark.range(101, 200).select('id, 'id % 4 as "by4", 'id % 8 as "by8")
.write
.format("delta")
.option(DeltaOptions.MERGE_SCHEMA_OPTION, "true")
Expand All @@ -1332,7 +1332,7 @@ class DeltaSuite extends QueryTest

checkAnswer(
spark.read.format("delta").load(tempDir.toString),
spark.range(101, 200).select('id, 'id % 4 as 'by4, 'id % 8 as 'by8))
spark.range(101, 200).select('id, 'id % 4 as "by4", 'id % 8 as "by8"))
}
}

Expand All @@ -1342,7 +1342,7 @@ class DeltaSuite extends QueryTest
assert(tempDir.delete())
}

spark.range(100).select('id, 'id % 4 as 'by4)
spark.range(100).select('id, 'id % 4 as "by4")
.write
.format("delta")
.partitionBy("by4")
Expand All @@ -1354,7 +1354,7 @@ class DeltaSuite extends QueryTest
assertPartitionExists("by4", deltaLog, files)

val e = intercept[AnalysisException] {
spark.range(101, 200).select('id, 'id % 4 as 'by4, 'id % 8 as 'by8)
spark.range(101, 200).select('id, 'id % 4 as "by4", 'id % 8 as "by8")
.write
.format("delta")
.partitionBy("by4", "by8")
Expand All @@ -1374,7 +1374,7 @@ class DeltaSuite extends QueryTest
}

val e = intercept[AnalysisException] {
spark.range(100).select('id, 'id % 4 as 'by4)
spark.range(100).select('id, 'id % 4 as "by4")
.write
.format("delta")
.partitionBy("by4", "id")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -164,7 +164,7 @@ class ShowDeltaTableColumnsSuite extends QueryTest
withTempDir { tempDir =>
(70.to(79).seq ++ 75.to(79).seq)
.toDF("id")
.withColumn("nested", struct(struct('id + 2 as 'b, 'id + 3 as 'c) as 'sub))
.withColumn("nested", struct(struct('id + 2 as "b", 'id + 3 as "c") as "sub"))
.write
.format("delta")
.save(tempDir.toString)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -411,7 +411,7 @@ trait OptimizeCompactionSuiteBase extends QueryTest
Seq(10, 100).foreach { count =>
appendToDeltaTable(
spark.range(count)
.select('id, lit("2017-10-10").cast("date") as 'date, 'id % 5 as 'part),
.select('id, lit("2017-10-10").cast("date") as "date", 'id % 5 as "part"),
path,
Some(partitionColumns))
}
Expand Down Expand Up @@ -486,7 +486,7 @@ trait OptimizeCompactionSuiteBase extends QueryTest
test("optimize command with multiple partition predicates") {
withTempDir { tempDir =>
def writeData(count: Int): Unit = {
spark.range(count).select('id, lit("2017-10-10").cast("date") as 'date, 'id % 5 as 'part)
spark.range(count).select('id, lit("2017-10-10").cast("date") as "date", 'id % 5 as "part")
.write
.partitionBy("date", "part")
.format("delta")
Expand All @@ -511,7 +511,7 @@ trait OptimizeCompactionSuiteBase extends QueryTest
test("optimize command with multiple partition predicates with multiple where") {
withTempDir { tempDir =>
def writeData(count: Int): Unit = {
spark.range(count).select('id, lit("2017-10-10").cast("date") as 'date, 'id % 5 as 'part)
spark.range(count).select('id, lit("2017-10-10").cast("date") as "date", 'id % 5 as "part")
.write
.partitionBy("date", "part")
.format("delta")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ trait OptimizeZOrderSuiteBase extends OptimizePartitionTableHelper
test("optimize command: checks existence of interleaving columns") {
withTempDir { tempDir =>
Seq(1, 2, 3).toDF("value")
.select('value, 'value % 2 as 'id, 'value % 3 as 'id2)
.select('value, 'value % 2 as "id", 'value % 3 as "id2")
.write
.format("delta")
.save(tempDir.toString)
Expand All @@ -68,7 +68,7 @@ trait OptimizeZOrderSuiteBase extends OptimizePartitionTableHelper
test("optimize command: interleaving columns can't be partitioning columns") {
withTempDir { tempDir =>
Seq(1, 2, 3).toDF("value")
.select('value, 'value % 2 as 'id, 'value % 3 as 'id2)
.select('value, 'value % 2 as "id", 'value % 3 as "id2")
.write
.format("delta")
.partitionBy("id")
Expand Down Expand Up @@ -143,7 +143,7 @@ trait OptimizeZOrderSuiteBase extends OptimizePartitionTableHelper
withTempDir { tempDir =>
(0.to(79).seq ++ 40.to(79).seq ++ 60.to(79).seq ++ 70.to(79).seq ++ 75.to(79).seq)
.toDF("id")
.withColumn("nested", struct(struct('id + 2 as 'b, 'id + 3 as 'c) as 'sub))
.withColumn("nested", struct(struct('id + 2 as "b", 'id + 3 as "c") as "sub"))
.write
.format("delta")
.save(tempDir.toString)
Expand All @@ -164,7 +164,7 @@ trait OptimizeZOrderSuiteBase extends OptimizePartitionTableHelper
"1", DELTA_OPTIMIZE_ZORDER_COL_STAT_CHECK.key -> "true") {
val data = Seq(1, 2, 3).toDF("id")
data.withColumn("nested",
struct(struct('id + 1 as 'p1, 'id + 2 as 'p2) as 'a, 'id + 3 as 'b))
struct(struct('id + 1 as "p1", 'id + 2 as "p2") as "a", 'id + 3 as "b"))
.write
.format("delta")
.save(tempDir.getAbsolutePath)
Expand Down

0 comments on commit 246fcf3

Please sign in to comment.