From 95aff65fab185e297d4eda94c015eaae0322df6c Mon Sep 17 00:00:00 2001 From: Zhipeng Mao Date: Mon, 19 Aug 2024 19:32:07 +0200 Subject: [PATCH] [SPARK] Refactor IdentityColumnTestUtils (#3568) #### Which Delta project/connector is this regarding? - [x] Spark - [ ] Standalone - [ ] Flink - [ ] Kernel - [ ] Other (fill in here) ## Description This PR is part of https://github.com/delta-io/delta/issues/1959 . The change refactors `IdentityColumnTestUtils` to reuse `createTableWithIdColAndIntValueCol` to create tables and to unify the column names in identity column tests. ## How was this patch tested? This is test only change. ## Does this PR introduce _any_ user-facing changes? No. --- .../spark/sql/delta/IdentityColumnSuite.scala | 8 ++++---- .../sql/delta/IdentityColumnTestUtils.scala | 18 ++++++------------ 2 files changed, 10 insertions(+), 16 deletions(-) diff --git a/spark/src/test/scala/org/apache/spark/sql/delta/IdentityColumnSuite.scala b/spark/src/test/scala/org/apache/spark/sql/delta/IdentityColumnSuite.scala index 9b005b483c7..e3bbd0bc955 100644 --- a/spark/src/test/scala/org/apache/spark/sql/delta/IdentityColumnSuite.scala +++ b/spark/src/test/scala/org/apache/spark/sql/delta/IdentityColumnSuite.scala @@ -384,9 +384,9 @@ trait IdentityColumnSuiteBase extends IdentityColumnTestUtils { withTable(tableName) { generateTableWithIdentityColumn(tableName) sql(s"RESTORE TABLE $tableName TO VERSION AS OF 3") - sql(s"INSERT INTO $tableName (val) VALUES (6)") + sql(s"INSERT INTO $tableName (value) VALUES (6)") checkAnswer( - sql(s"SELECT key, val FROM $tableName ORDER BY val ASC"), + sql(s"SELECT id, value FROM $tableName ORDER BY value ASC"), Seq(Row(0, 0), Row(1, 1), Row(2, 2), Row(6, 6)) ) } @@ -397,9 +397,9 @@ trait IdentityColumnSuiteBase extends IdentityColumnTestUtils { withTable(tableName) { generateTableWithIdentityColumn(tableName, step = -1) sql(s"RESTORE TABLE $tableName TO VERSION AS OF 3") - sql(s"INSERT INTO $tableName (val) VALUES (6)") + sql(s"INSERT INTO $tableName (value) VALUES (6)") checkAnswer( - sql(s"SELECT key, val FROM $tableName ORDER BY val ASC"), + sql(s"SELECT id, value FROM $tableName ORDER BY value ASC"), Seq(Row(0, 0), Row(-1, 1), Row(-2, 2), Row(-6, 6)) ) } diff --git a/spark/src/test/scala/org/apache/spark/sql/delta/IdentityColumnTestUtils.scala b/spark/src/test/scala/org/apache/spark/sql/delta/IdentityColumnTestUtils.scala index 8c1205da987..7dce6fa782a 100644 --- a/spark/src/test/scala/org/apache/spark/sql/delta/IdentityColumnTestUtils.scala +++ b/spark/src/test/scala/org/apache/spark/sql/delta/IdentityColumnTestUtils.scala @@ -78,26 +78,20 @@ trait IdentityColumnTestUtils } protected def generateTableWithIdentityColumn(tableName: String, step: Long = 1): Unit = { - createTable( + createTableWithIdColAndIntValueCol( tableName, - Seq( - IdentityColumnSpec( - GeneratedAlways, - startsWith = Some(0), - incrementBy = Some(step), - colName = "key" - ), - TestColumnSpec(colName = "val", dataType = LongType) - ) + GeneratedAlways, + startsWith = Some(0), + incrementBy = Some(step) ) // Insert numRows and make sure they assigned sequential IDs val numRows = 6 for (i <- 0 until numRows) { - sql(s"INSERT INTO $tableName (val) VALUES ($i)") + sql(s"INSERT INTO $tableName (value) VALUES ($i)") } val expectedAnswer = for (i <- 0 until numRows) yield Row(i * step, i) - checkAnswer(sql(s"SELECT * FROM $tableName ORDER BY val ASC"), expectedAnswer) + checkAnswer(sql(s"SELECT * FROM $tableName ORDER BY value ASC"), expectedAnswer) } /**