Skip to content

Commit

Permalink
[SPARK-48659][SQL][TESTS] Unify v1 and v2 ALTER TABLE .. SET TBLPROPE…
Browse files Browse the repository at this point in the history
…RTIES tests

### What changes were proposed in this pull request?
- Move parser tests from `o.a.s.s.c.p.DDLParserSuite` and `o.a.s.s.e.c.DDLParserSuite` to `AlterTableSetTblPropertiesParserSuite`.
 - Porting and refactoring DS v1 tests from `DDLSuite` and `HiveDDLSuite` to `v1.AlterTableSetTblPropertiesBase` and to `v1.AlterTableSetTblPropertiesSuite`.
- Add a test for DSv2 `ALTER TABLE .. RENAME` to `v2.AlterTableSetTblPropertiesSuite`.

### Why are the changes needed?
- To improve test coverage.
- Align with other similar tests, eg: `AlterTableRename*`

### Does this PR introduce _any_ user-facing change?
No.

### How was this patch tested?
- Add new UT & Update existed UT.
-  By running new test suites:
    ```
    $ build/sbt -Phive -Phive-thriftserver "test:testOnly *AlterTableSetTblPropertiesSuite"
    $ build/sbt -Phive -Phive-thriftserver "test:testOnly *AlterTableSetTblPropertiesParserSuite"
    ```

### Was this patch authored or co-authored using generative AI tooling?
No.

Closes apache#47018 from panbingkun/SPARK-48659.

Authored-by: panbingkun <[email protected]>
Signed-off-by: Wenchen Fan <[email protected]>
  • Loading branch information
panbingkun authored and cloud-fan committed Jun 21, 2024
1 parent 3469ec6 commit cd8bf11
Show file tree
Hide file tree
Showing 9 changed files with 263 additions and 62 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -1075,19 +1075,11 @@ class DDLParserSuite extends AnalysisTest {
ifExists = true))
}

// ALTER TABLE table_name SET TBLPROPERTIES ('comment' = new_comment);
// ALTER TABLE table_name UNSET TBLPROPERTIES [IF EXISTS] ('comment', 'key');
test("alter table: alter table properties") {
val sql1_table = "ALTER TABLE table_name SET TBLPROPERTIES ('test' = 'test', " +
"'comment' = 'new_comment')"
val sql2_table = "ALTER TABLE table_name UNSET TBLPROPERTIES ('comment', 'test')"
val sql3_table = "ALTER TABLE table_name UNSET TBLPROPERTIES IF EXISTS ('comment', 'test')"

comparePlans(
parsePlan(sql1_table),
SetTableProperties(
UnresolvedTable(Seq("table_name"), "ALTER TABLE ... SET TBLPROPERTIES", true),
Map("test" -> "test", "comment" -> "new_comment")))
comparePlans(
parsePlan(sql2_table),
UnsetTableProperties(
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.spark.sql.execution.command

import org.apache.spark.SparkThrowable
import org.apache.spark.sql.catalyst.analysis.{AnalysisTest, UnresolvedTable}
import org.apache.spark.sql.catalyst.parser.CatalystSqlParser.parsePlan
import org.apache.spark.sql.catalyst.parser.ParseException
import org.apache.spark.sql.catalyst.plans.logical.SetTableProperties
import org.apache.spark.sql.test.SharedSparkSession

class AlterTableSetTblPropertiesParserSuite extends AnalysisTest with SharedSparkSession {

private def parseException(sqlText: String): SparkThrowable = {
intercept[ParseException](sql(sqlText).collect())
}

// ALTER TABLE table_name SET TBLPROPERTIES ('comment' = new_comment);
test("alter table: alter table properties") {
val sql1_table = "ALTER TABLE table_name SET TBLPROPERTIES ('test' = 'test', " +
"'comment' = 'new_comment')"
comparePlans(
parsePlan(sql1_table),
SetTableProperties(
UnresolvedTable(Seq("table_name"), "ALTER TABLE ... SET TBLPROPERTIES", true),
Map("test" -> "test", "comment" -> "new_comment")))
}

test("alter table - property values must be set") {
val sql = "ALTER TABLE my_tab SET TBLPROPERTIES('key_without_value', 'key_with_value'='x')"
checkError(
exception = parseException(sql),
errorClass = "_LEGACY_ERROR_TEMP_0035",
parameters = Map("message" -> "Values must be specified for key(s): [key_without_value]"),
context = ExpectedContext(
fragment = sql,
start = 0,
stop = 78))
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.spark.sql.execution.command

import org.apache.spark.sql.{AnalysisException, QueryTest}
import org.apache.spark.sql.catalyst.TableIdentifier

/**
* This base suite contains unified tests for the `ALTER TABLE .. SET TBLPROPERTIES`
* command that check V1 and V2 table catalogs. The tests that cannot run for all supported
* catalogs are located in more specific test suites:
*
* - V2 table catalog tests:
* `org.apache.spark.sql.execution.command.v2.AlterTableSetTblPropertiesSuite`
* - V1 table catalog tests:
* `org.apache.spark.sql.execution.command.v1.AlterTableSetTblPropertiesSuiteBase`
* - V1 In-Memory catalog:
* `org.apache.spark.sql.execution.command.v1.AlterTableSetTblPropertiesSuite`
* - V1 Hive External catalog:
* `org.apache.spark.sql.hive.execution.command.AlterTableSetTblPropertiesSuite`
*/
trait AlterTableSetTblPropertiesSuiteBase extends QueryTest with DDLCommandTestUtils {
override val command = "ALTER TABLE .. SET TBLPROPERTIES"

def checkTblProps(tableIdent: TableIdentifier, expectedTblProps: Map[String, String]): Unit

test("alter table set tblproperties") {
withNamespaceAndTable("ns", "tbl") { t =>
sql(s"CREATE TABLE $t (col1 int, col2 string, a int, b int) $defaultUsing")
val tableIdent = TableIdentifier("tbl", Some("ns"), Some(catalog))
checkTblProps(tableIdent, Map.empty[String, String])

sql(s"ALTER TABLE $t SET TBLPROPERTIES ('k1' = 'v1', 'k2' = 'v2', 'k3' = 'v3')")
checkTblProps(tableIdent, Map("k1" -> "v1", "k2" -> "v2", "k3" -> "v3"))

sql(s"USE $catalog.ns")
sql(s"ALTER TABLE tbl SET TBLPROPERTIES ('k1' = 'v1', 'k2' = 'v2', 'k3' = 'v3')")
checkTblProps(tableIdent, Map("k1" -> "v1", "k2" -> "v2", "k3" -> "v3"))

sql(s"ALTER TABLE $t SET TBLPROPERTIES ('k1' = 'v1', 'k2' = 'v8')")
checkTblProps(tableIdent, Map("k1" -> "v1", "k2" -> "v8", "k3" -> "v3"))

// table to alter does not exist
checkError(
exception = intercept[AnalysisException] {
sql("ALTER TABLE does_not_exist SET TBLPROPERTIES ('winner' = 'loser')")
},
errorClass = "TABLE_OR_VIEW_NOT_FOUND",
parameters = Map("relationName" -> "`does_not_exist`"),
context = ExpectedContext(fragment = "does_not_exist", start = 12, stop = 25)
)
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -107,18 +107,6 @@ class DDLParserSuite extends AnalysisTest with SharedSparkSession {
stop = 98))
}

test("alter table - property values must be set") {
val sql = "ALTER TABLE my_tab SET TBLPROPERTIES('key_without_value', 'key_with_value'='x')"
checkError(
exception = parseException(sql),
errorClass = "_LEGACY_ERROR_TEMP_0035",
parameters = Map("message" -> "Values must be specified for key(s): [key_without_value]"),
context = ExpectedContext(
fragment = sql,
start = 0,
stop = 78))
}

test("alter table unset properties - property values must NOT be set") {
val sql = "ALTER TABLE my_tab UNSET TBLPROPERTIES('key_without_value', 'key_with_value'='x')"
checkError(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -327,10 +327,6 @@ abstract class DDLSuite extends QueryTest with DDLSuiteBase {

protected val reversedProperties = Seq(PROP_OWNER)

test("alter table: set properties (datasource table)") {
testSetProperties(isDatasourceTable = true)
}

test("alter table: unset properties (datasource table)") {
testUnsetProperties(isDatasourceTable = true)
}
Expand Down Expand Up @@ -1117,40 +1113,6 @@ abstract class DDLSuite extends QueryTest with DDLSuiteBase {
)
}

protected def testSetProperties(isDatasourceTable: Boolean): Unit = {
if (!isUsingHiveMetastore) {
assert(isDatasourceTable, "InMemoryCatalog only supports data source tables")
}
val catalog = spark.sessionState.catalog
val tableIdent = TableIdentifier("tab1", Some("dbx"))
createDatabase(catalog, "dbx")
createTable(catalog, tableIdent, isDatasourceTable)
def getProps: Map[String, String] = {
if (isUsingHiveMetastore) {
normalizeCatalogTable(catalog.getTableMetadata(tableIdent)).properties
} else {
catalog.getTableMetadata(tableIdent).properties
}
}
assert(getProps.isEmpty)
// set table properties
sql("ALTER TABLE dbx.tab1 SET TBLPROPERTIES ('andrew' = 'or14', 'kor' = 'bel')")
assert(getProps == Map("andrew" -> "or14", "kor" -> "bel"))
// set table properties without explicitly specifying database
catalog.setCurrentDatabase("dbx")
sql("ALTER TABLE tab1 SET TBLPROPERTIES ('kor' = 'belle', 'kar' = 'bol')")
assert(getProps == Map("andrew" -> "or14", "kor" -> "belle", "kar" -> "bol"))
// table to alter does not exist
checkError(
exception = intercept[AnalysisException] {
sql("ALTER TABLE does_not_exist SET TBLPROPERTIES ('winner' = 'loser')")
},
errorClass = "TABLE_OR_VIEW_NOT_FOUND",
parameters = Map("relationName" -> "`does_not_exist`"),
context = ExpectedContext(fragment = "does_not_exist", start = 12, stop = 25)
)
}

protected def testUnsetProperties(isDatasourceTable: Boolean): Unit = {
if (!isUsingHiveMetastore) {
assert(isDatasourceTable, "InMemoryCatalog only supports data source tables")
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.spark.sql.execution.command.v1

import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.execution.command
import org.apache.spark.sql.internal.StaticSQLConf.CATALOG_IMPLEMENTATION

/**
* This base suite contains unified tests for the `ALTER TABLE .. SET TBLPROPERTIES`
* command that check V1 table catalogs. The tests that cannot run for all V1 catalogs
* are located in more specific test suites:
*
* - V1 In-Memory catalog:
* `org.apache.spark.sql.execution.command.v1.AlterTableSetTblPropertiesSuite`
* - V1 Hive External catalog:
* `org.apache.spark.sql.hive.execution.command.AlterTableSetTblPropertiesSuite`
*/
trait AlterTableSetTblPropertiesSuiteBase extends command.AlterTableSetTblPropertiesSuiteBase {

private[sql] lazy val sessionCatalog = spark.sessionState.catalog

private def isUsingHiveMetastore: Boolean = {
spark.sparkContext.conf.get(CATALOG_IMPLEMENTATION) == "hive"
}

private def normalizeTblProps(props: Map[String, String]): Map[String, String] = {
props.filterNot(p => Seq("transient_lastDdlTime").contains(p._1))
}

private def getTableProperties(tableIdent: TableIdentifier): Map[String, String] = {
sessionCatalog.getTableMetadata(tableIdent).properties
}

override def checkTblProps(tableIdent: TableIdentifier,
expectedTblProps: Map[String, String]): Unit = {
val actualTblProps = getTableProperties(tableIdent)
if (isUsingHiveMetastore) {
assert(normalizeTblProps(actualTblProps) == expectedTblProps)
} else {
assert(actualTblProps == expectedTblProps)
}
}
}

class AlterTableSetTblPropertiesSuite
extends AlterTableSetTblPropertiesSuiteBase with CommandSuiteBase
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.spark.sql.execution.command.v2

import scala.jdk.CollectionConverters._

import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.connector.catalog.{Identifier, Table}
import org.apache.spark.sql.connector.catalog.CatalogV2Implicits.CatalogHelper
import org.apache.spark.sql.execution.command

/**
* The class contains tests for the `ALTER TABLE .. SET TBLPROPERTIES` command to
* check V2 table catalogs.
*/
class AlterTableSetTblPropertiesSuite
extends command.AlterTableSetTblPropertiesSuiteBase with CommandSuiteBase {

private def normalizeTblProps(props: Map[String, String]): Map[String, String] = {
props.filterNot(p => Seq("provider", "owner").contains(p._1))
}

private def getTableMetadata(tableIndent: TableIdentifier): Table = {
val nameParts = tableIndent.nameParts
val v2Catalog = spark.sessionState.catalogManager.catalog(nameParts.head).asTableCatalog
val namespace = nameParts.drop(1).init.toArray
v2Catalog.loadTable(Identifier.of(namespace, nameParts.last))
}

override def checkTblProps(tableIdent: TableIdentifier,
expectedTblProps: Map[String, String]): Unit = {
val actualTblProps = getTableMetadata(tableIdent).properties.asScala.toMap
assert(normalizeTblProps(actualTblProps) === expectedTblProps)
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -154,10 +154,6 @@ class HiveDDLSuite
fs.exists(filesystemPath)
}

test("alter table: set properties") {
testSetProperties(isDatasourceTable = false)
}

test("alter table: unset properties") {
testUnsetProperties(isDatasourceTable = false)
}
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package org.apache.spark.sql.hive.execution.command

import org.apache.spark.sql.execution.command.v1

/**
* The class contains tests for the `ALTER TABLE .. SET TBLPROPERTIES` command to check
* V1 Hive external table catalog.
*/
class AlterTableSetTblPropertiesSuite
extends v1.AlterTableSetTblPropertiesSuiteBase with CommandSuiteBase

0 comments on commit cd8bf11

Please sign in to comment.