diff --git a/spark/v3.5/spark-extensions/src/main/scala/org/apache/spark/sql/catalyst/analysis/RewriteViewCommands.scala b/spark/v3.5/spark-extensions/src/main/scala/org/apache/spark/sql/catalyst/analysis/RewriteViewCommands.scala index a025ec952f7e..2796bba88ff0 100644 --- a/spark/v3.5/spark-extensions/src/main/scala/org/apache/spark/sql/catalyst/analysis/RewriteViewCommands.scala +++ b/spark/v3.5/spark-extensions/src/main/scala/org/apache/spark/sql/catalyst/analysis/RewriteViewCommands.scala @@ -26,9 +26,11 @@ import org.apache.spark.sql.catalyst.analysis.ViewUtil.loadView import org.apache.spark.sql.catalyst.plans.logical.CreateView import org.apache.spark.sql.catalyst.plans.logical.DropView import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan +import org.apache.spark.sql.catalyst.plans.logical.ShowViews import org.apache.spark.sql.catalyst.plans.logical.views.CreateIcebergView import org.apache.spark.sql.catalyst.plans.logical.views.DropIcebergView import org.apache.spark.sql.catalyst.plans.logical.views.ResolvedV2View +import org.apache.spark.sql.catalyst.plans.logical.views.ShowIcebergViews import org.apache.spark.sql.catalyst.rules.Rule import org.apache.spark.sql.connector.catalog.CatalogManager import org.apache.spark.sql.connector.catalog.LookupCatalog @@ -54,6 +56,10 @@ case class RewriteViewCommands(spark: SparkSession) extends Rule[LogicalPlan] wi loadView(resolved.catalog, resolved.identifier) .map(v => ResolvedV2View(resolved.catalog.asViewCatalog, resolved.identifier, v)) .getOrElse(u) + + case ShowViews(UnresolvedNamespace(ns), pattern, output) + if isViewCatalog(spark.sessionState.catalogManager.currentCatalog) => + ShowIcebergViews(UnresolvedNamespace(ns), pattern, output) } private def isTempView(nameParts: Seq[String]): Boolean = { diff --git a/spark/v3.5/spark-extensions/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/views/ShowIcebergViews.scala b/spark/v3.5/spark-extensions/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/views/ShowIcebergViews.scala new file mode 100644 index 000000000000..b09c27acdc16 --- /dev/null +++ b/spark/v3.5/spark-extensions/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/views/ShowIcebergViews.scala @@ -0,0 +1,36 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + + +package org.apache.spark.sql.catalyst.plans.logical.views + +import org.apache.spark.sql.catalyst.expressions.Attribute +import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan +import org.apache.spark.sql.catalyst.plans.logical.ShowViews +import org.apache.spark.sql.catalyst.plans.logical.UnaryCommand + +case class ShowIcebergViews( + namespace: LogicalPlan, + pattern: Option[String], + override val output: Seq[Attribute] = ShowViews.getOutputAttrs) extends UnaryCommand { + override def child: LogicalPlan = namespace + + override protected def withNewChildInternal(newChild: LogicalPlan): ShowIcebergViews = + copy(namespace = newChild) +} diff --git a/spark/v3.5/spark-extensions/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DescribeV2ViewExec.scala b/spark/v3.5/spark-extensions/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DescribeV2ViewExec.scala new file mode 100644 index 000000000000..ab8835aa64a0 --- /dev/null +++ b/spark/v3.5/spark-extensions/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DescribeV2ViewExec.scala @@ -0,0 +1,78 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.spark.sql.execution.datasources.v2 + +import org.apache.spark.sql.catalyst.InternalRow +import org.apache.spark.sql.catalyst.expressions.Attribute +import org.apache.spark.sql.catalyst.util.escapeSingleQuotedString +import org.apache.spark.sql.connector.catalog.View +import org.apache.spark.sql.connector.catalog.ViewCatalog +import org.apache.spark.sql.execution.LeafExecNode +import scala.collection.JavaConverters._ + +case class DescribeV2ViewExec( + output: Seq[Attribute], + view: View, + isExtended: Boolean) extends V2CommandExec with LeafExecNode { + + import org.apache.spark.sql.connector.catalog.CatalogV2Implicits._ + + override protected def run(): Seq[InternalRow] = { + if (isExtended) { + (describeSchema :+ emptyRow) ++ describeExtended + } else { + describeSchema + } + } + + private def describeSchema: Seq[InternalRow] = + view.schema().map { column => + toCatalystRow( + column.name, + column.dataType.simpleString, + column.getComment().getOrElse("")) + } + + private def emptyRow: InternalRow = toCatalystRow("", "", "") + + private def describeExtended: Seq[InternalRow] = { + val outputColumns = view.queryColumnNames.mkString("[", ", ", "]") + val properties: Map[String, String] = view.properties.asScala.toMap -- ViewCatalog.RESERVED_PROPERTIES.asScala + val viewCatalogAndNamespace: Seq[String] = view.currentCatalog +: view.currentNamespace.toSeq + val viewProperties = properties.toSeq.sortBy(_._1).map { + case (key, value) => + s"'${escapeSingleQuotedString(key)}' = '${escapeSingleQuotedString(value)}'" + }.mkString("[", ", ", "]") + + + toCatalystRow("# Detailed View Information", "", "") :: + toCatalystRow("Comment", view.properties.getOrDefault(ViewCatalog.PROP_COMMENT, ""), "") :: + toCatalystRow("View Text", view.query, "") :: + toCatalystRow("View Catalog and Namespace", viewCatalogAndNamespace.quoted, "") :: + toCatalystRow("View Query Output Columns", outputColumns, "") :: + toCatalystRow("View Properties", viewProperties, "") :: + toCatalystRow("Created By", view.properties.getOrDefault(ViewCatalog.PROP_CREATE_ENGINE_VERSION, ""), "") :: + Nil + } + + override def simpleString(maxFields: Int): String = { + s"DescribeV2ViewExec" + } +} diff --git a/spark/v3.5/spark-extensions/src/main/scala/org/apache/spark/sql/execution/datasources/v2/ExtendedDataSourceV2Strategy.scala b/spark/v3.5/spark-extensions/src/main/scala/org/apache/spark/sql/execution/datasources/v2/ExtendedDataSourceV2Strategy.scala index b3dd8943919f..0fc9fe23a63c 100644 --- a/spark/v3.5/spark-extensions/src/main/scala/org/apache/spark/sql/execution/datasources/v2/ExtendedDataSourceV2Strategy.scala +++ b/spark/v3.5/spark-extensions/src/main/scala/org/apache/spark/sql/execution/datasources/v2/ExtendedDataSourceV2Strategy.scala @@ -27,6 +27,7 @@ import org.apache.spark.sql.SparkSession import org.apache.spark.sql.Strategy import org.apache.spark.sql.catalyst.InternalRow import org.apache.spark.sql.catalyst.analysis.ResolvedIdentifier +import org.apache.spark.sql.catalyst.analysis.ResolvedNamespace import org.apache.spark.sql.catalyst.expressions.Expression import org.apache.spark.sql.catalyst.expressions.GenericInternalRow import org.apache.spark.sql.catalyst.expressions.PredicateHelper @@ -35,6 +36,7 @@ import org.apache.spark.sql.catalyst.plans.logical.AlterViewAs import org.apache.spark.sql.catalyst.plans.logical.Call import org.apache.spark.sql.catalyst.plans.logical.CreateOrReplaceBranch import org.apache.spark.sql.catalyst.plans.logical.CreateOrReplaceTag +import org.apache.spark.sql.catalyst.plans.logical.DescribeRelation import org.apache.spark.sql.catalyst.plans.logical.DropBranch import org.apache.spark.sql.catalyst.plans.logical.DropIdentifierFields import org.apache.spark.sql.catalyst.plans.logical.DropPartitionField @@ -46,10 +48,13 @@ import org.apache.spark.sql.catalyst.plans.logical.ReplacePartitionField import org.apache.spark.sql.catalyst.plans.logical.SetIdentifierFields import org.apache.spark.sql.catalyst.plans.logical.SetViewProperties import org.apache.spark.sql.catalyst.plans.logical.SetWriteDistributionAndOrdering +import org.apache.spark.sql.catalyst.plans.logical.ShowCreateTable +import org.apache.spark.sql.catalyst.plans.logical.ShowTableProperties import org.apache.spark.sql.catalyst.plans.logical.UnsetViewProperties import org.apache.spark.sql.catalyst.plans.logical.views.CreateIcebergView import org.apache.spark.sql.catalyst.plans.logical.views.DropIcebergView import org.apache.spark.sql.catalyst.plans.logical.views.ResolvedV2View +import org.apache.spark.sql.catalyst.plans.logical.views.ShowIcebergViews import org.apache.spark.sql.connector.catalog.Identifier import org.apache.spark.sql.connector.catalog.TableCatalog import org.apache.spark.sql.connector.catalog.ViewCatalog @@ -152,6 +157,18 @@ case class ExtendedDataSourceV2Strategy(spark: SparkSession) extends Strategy wi val changes = propertyKeys.map(ViewChange.removeProperty) AlterV2ViewExec(catalog, ident, changes) :: Nil + case d@DescribeRelation(ResolvedV2View(_, _, view), _, isExtended, _) => + DescribeV2ViewExec(d.output, view, isExtended) :: Nil + + case show@ShowTableProperties(ResolvedV2View(_, _, view), propertyKey, _) => + ShowV2ViewPropertiesExec(show.output, view, propertyKey) :: Nil + + case show@ShowIcebergViews(ResolvedNamespace(catalog: ViewCatalog, namespace), pattern, _) => + ShowV2ViewsExec(show.output, catalog, namespace, pattern) :: Nil + + case show@ShowCreateTable(ResolvedV2View(_, _, view), _, _) => + ShowCreateV2ViewExec(show.output, view) :: Nil + case _ => Nil } diff --git a/spark/v3.5/spark-extensions/src/main/scala/org/apache/spark/sql/execution/datasources/v2/ShowCreateV2ViewExec.scala b/spark/v3.5/spark-extensions/src/main/scala/org/apache/spark/sql/execution/datasources/v2/ShowCreateV2ViewExec.scala new file mode 100644 index 000000000000..210771fed91e --- /dev/null +++ b/spark/v3.5/spark-extensions/src/main/scala/org/apache/spark/sql/execution/datasources/v2/ShowCreateV2ViewExec.scala @@ -0,0 +1,79 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.spark.sql.execution.datasources.v2 + +import org.apache.spark.sql.catalyst.InternalRow +import org.apache.spark.sql.catalyst.expressions.Attribute +import org.apache.spark.sql.catalyst.util.escapeSingleQuotedString +import org.apache.spark.sql.connector.catalog.View +import org.apache.spark.sql.connector.catalog.ViewCatalog +import org.apache.spark.sql.execution.LeafExecNode +import scala.collection.JavaConverters._ + +case class ShowCreateV2ViewExec(output: Seq[Attribute], view: View) + extends V2CommandExec with LeafExecNode { + + override protected def run(): Seq[InternalRow] = { + val builder = new StringBuilder + builder ++= s"CREATE VIEW ${view.name} " + showColumns(view, builder) + showComment(view, builder) + showProperties(view, builder) + builder ++= s"AS\n${view.query}\n" + + Seq(toCatalystRow(builder.toString)) + } + + private def showColumns(view: View, builder: StringBuilder): Unit = { + val columns = view.schema().fields + .map(x => s"${x.name}${x.getComment().map(c => s" COMMENT '$c'").getOrElse("")}") + .mkString("(", ", ", ")") + builder ++= columns + } + + private def showComment(view: View, builder: StringBuilder): Unit = { + Option(view.properties.get(ViewCatalog.PROP_COMMENT)) + .map(" COMMENT '" + escapeSingleQuotedString(_) + "'\n") + .foreach(builder.append) + } + + private def showProperties( + view: View, + builder: StringBuilder): Unit = { + val showProps = view.properties.asScala.toMap -- ViewCatalog.RESERVED_PROPERTIES.asScala + if (showProps.nonEmpty) { + val props = conf.redactOptions(showProps).toSeq.sortBy(_._1).map { + case (key, value) => + s"'${escapeSingleQuotedString(key)}' = '${escapeSingleQuotedString(value)}'" + } + + builder ++= " TBLPROPERTIES " + builder ++= concatByMultiLines(props) + } + } + + private def concatByMultiLines(iter: Iterable[String]): String = { + iter.mkString("(\n ", ",\n ", ")\n") + } + + override def simpleString(maxFields: Int): String = { + s"ShowCreateV2ViewExec" + } +} diff --git a/spark/v3.5/spark-extensions/src/main/scala/org/apache/spark/sql/execution/datasources/v2/ShowV2ViewPropertiesExec.scala b/spark/v3.5/spark-extensions/src/main/scala/org/apache/spark/sql/execution/datasources/v2/ShowV2ViewPropertiesExec.scala new file mode 100644 index 000000000000..89fafe99efc8 --- /dev/null +++ b/spark/v3.5/spark-extensions/src/main/scala/org/apache/spark/sql/execution/datasources/v2/ShowV2ViewPropertiesExec.scala @@ -0,0 +1,55 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.spark.sql.execution.datasources.v2 + +import org.apache.spark.sql.catalyst.InternalRow +import org.apache.spark.sql.catalyst.expressions.Attribute +import org.apache.spark.sql.connector.catalog.View +import org.apache.spark.sql.connector.catalog.ViewCatalog +import org.apache.spark.sql.execution.LeafExecNode +import scala.collection.JavaConverters._ + +case class ShowV2ViewPropertiesExec( + output: Seq[Attribute], + view: View, + propertyKey: Option[String]) extends V2CommandExec with LeafExecNode { + + override protected def run(): Seq[InternalRow] = { + propertyKey match { + case Some(p) => + val propValue = properties.getOrElse(p, + s"View ${view.name()} does not have property: $p") + Seq(toCatalystRow(p, propValue)) + case None => + properties.map { + case (k, v) => toCatalystRow(k, v) + }.toSeq + } + } + + + private def properties = { + view.properties.asScala.toMap -- ViewCatalog.RESERVED_PROPERTIES.asScala + } + + override def simpleString(maxFields: Int): String = { + s"ShowV2ViewPropertiesExec" + } +} diff --git a/spark/v3.5/spark-extensions/src/main/scala/org/apache/spark/sql/execution/datasources/v2/ShowV2ViewsExec.scala b/spark/v3.5/spark-extensions/src/main/scala/org/apache/spark/sql/execution/datasources/v2/ShowV2ViewsExec.scala new file mode 100644 index 000000000000..3aa85c3db54a --- /dev/null +++ b/spark/v3.5/spark-extensions/src/main/scala/org/apache/spark/sql/execution/datasources/v2/ShowV2ViewsExec.scala @@ -0,0 +1,66 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.spark.sql.execution.datasources.v2 + +import org.apache.spark.sql.catalyst.InternalRow +import org.apache.spark.sql.catalyst.expressions.Attribute +import org.apache.spark.sql.catalyst.util.StringUtils +import org.apache.spark.sql.connector.catalog.ViewCatalog +import org.apache.spark.sql.execution.LeafExecNode +import scala.collection.mutable.ArrayBuffer + +case class ShowV2ViewsExec( + output: Seq[Attribute], + catalog: ViewCatalog, + namespace: Seq[String], + pattern: Option[String]) extends V2CommandExec with LeafExecNode { + + import org.apache.spark.sql.connector.catalog.CatalogV2Implicits._ + + override protected def run(): Seq[InternalRow] = { + val rows = new ArrayBuffer[InternalRow]() + + // handle GLOBAL VIEWS + val globalTemp = session.sessionState.catalog.globalTempViewManager.database + if (namespace.nonEmpty && globalTemp == namespace.head) { + pattern.map(p => session.sessionState.catalog.globalTempViewManager.listViewNames(p)) + .getOrElse(session.sessionState.catalog.globalTempViewManager.listViewNames("*")) + .map(name => rows += toCatalystRow(globalTemp, name, true)) + } else { + val views = catalog.listViews(namespace: _*) + views.map { view => + if (pattern.map(StringUtils.filterPattern(Seq(view.name()), _).nonEmpty).getOrElse(true)) { + rows += toCatalystRow(view.namespace().quoted, view.name(), false) + } + } + } + + // include TEMP VIEWS + pattern.map(p => session.sessionState.catalog.listLocalTempViews(p)) + .getOrElse(session.sessionState.catalog.listLocalTempViews("*")) + .map(v => rows += toCatalystRow(v.database.toArray.quoted, v.table, true)) + + rows.toSeq + } + + override def simpleString(maxFields: Int): String = { + s"ShowV2ViewsExec" + } +} diff --git a/spark/v3.5/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/TestViews.java b/spark/v3.5/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/TestViews.java index b3b6a3d4ca2b..6c2e7eb1d178 100644 --- a/spark/v3.5/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/TestViews.java +++ b/spark/v3.5/spark-extensions/src/test/java/org/apache/iceberg/spark/extensions/TestViews.java @@ -1033,6 +1033,148 @@ public void alterViewUnsetUnknownProperty() { () -> sql("ALTER VIEW %s UNSET TBLPROPERTIES IF EXISTS ('unknown-key')", viewName)); } + @Test + public void describeView() { + String viewName = "describeView"; + + sql("CREATE VIEW %s AS SELECT id, data FROM %s WHERE id <= 3", viewName, tableName); + assertThat(sql("DESCRIBE %s", viewName)) + .containsExactlyInAnyOrder(row("id", "int", ""), row("data", "string", "")); + } + + @Test + public void describeExtendedView() { + String viewName = "describeExtendedView"; + String sql = String.format("SELECT id, data FROM %s WHERE id <= 3", tableName); + + sql( + "CREATE VIEW %s (new_id COMMENT 'ID', new_data COMMENT 'DATA') COMMENT 'view comment' AS %s", + viewName, sql); + assertThat(sql("DESCRIBE EXTENDED %s", viewName)) + .contains( + row("new_id", "int", "ID"), + row("new_data", "string", "DATA"), + row("Comment", "view comment", ""), + row("View Text", sql, ""), + row("View Catalog and Namespace", String.format("%s.%s", catalogName, NAMESPACE), ""), + row("View Query Output Columns", "[id, data]", ""), + row( + "View Properties", + String.format( + "['format-version' = '1', 'location' = '/%s/%s', 'provider' = 'iceberg']", + NAMESPACE, viewName), + "")); + } + + @Test + public void showViewProperties() { + String viewName = "showViewProps"; + + sql( + "CREATE VIEW %s TBLPROPERTIES ('key1'='val1', 'key2'='val2') AS SELECT id, data FROM %s WHERE id <= 3", + viewName, tableName); + assertThat(sql("SHOW TBLPROPERTIES %s", viewName)) + .contains(row("key1", "val1"), row("key2", "val2")); + } + + @Test + public void showViewPropertiesByKey() { + String viewName = "showViewPropsByKey"; + + sql("CREATE VIEW %s AS SELECT id, data FROM %s WHERE id <= 3", viewName, tableName); + assertThat(sql("SHOW TBLPROPERTIES %s", viewName)).contains(row("provider", "iceberg")); + + assertThat(sql("SHOW TBLPROPERTIES %s (provider)", viewName)) + .contains(row("provider", "iceberg")); + + assertThat(sql("SHOW TBLPROPERTIES %s (non.existing)", viewName)) + .contains( + row( + "non.existing", + String.format( + "View %s.%s.%s does not have property: non.existing", + catalogName, NAMESPACE, viewName))); + } + + @Test + public void showViews() throws NoSuchTableException { + insertRows(6); + String sql = String.format("SELECT * from %s", tableName); + sql("CREATE VIEW v1 AS %s", sql); + sql("CREATE VIEW prefixV2 AS %s", sql); + sql("CREATE VIEW prefixV3 AS %s", sql); + sql("CREATE GLOBAL TEMPORARY VIEW globalViewForListing AS %s", sql); + sql("CREATE TEMPORARY VIEW tempViewForListing AS %s", sql); + + assertThat(sql("SHOW VIEWS")) + .contains( + row(NAMESPACE.toString(), "v1", false), + row(NAMESPACE.toString(), "prefixV2", false), + row(NAMESPACE.toString(), "prefixV3", false), + // spark stores temp views case-insensitive by default + row("", "tempviewforlisting", true)); + + assertThat(sql("SHOW VIEWS IN %s", catalogName)) + .contains( + row(NAMESPACE.toString(), "v1", false), + row(NAMESPACE.toString(), "prefixV2", false), + row(NAMESPACE.toString(), "prefixV3", false)); + + assertThat(sql("SHOW VIEWS LIKE 'pref*'")) + .contains( + row(NAMESPACE.toString(), "prefixV2", false), + row(NAMESPACE.toString(), "prefixV3", false)); + + assertThat(sql("SHOW VIEWS LIKE 'non-existing'")).isEmpty(); + + assertThat(sql("SHOW VIEWS IN global_temp")) + .contains( + // spark stores temp views case-insensitive by default + row("global_temp", "globalviewforlisting", true), row("", "tempviewforlisting", true)); + } + + @Test + public void showCreateSimpleView() { + String viewName = "showCreateSimpleView"; + String sql = String.format("SELECT id, data FROM %s WHERE id <= 3", tableName); + + sql("CREATE VIEW %s AS %s", viewName, sql); + + String expected = + String.format( + "CREATE VIEW %s.%s.%s (id, data) TBLPROPERTIES (\n" + + " 'format-version' = '1',\n" + + " 'location' = '/%s/%s',\n" + + " 'provider' = 'iceberg')\n" + + "AS\n%s\n", + catalogName, NAMESPACE, viewName, NAMESPACE, viewName, sql); + assertThat(sql("SHOW CREATE TABLE %s", viewName)).containsExactly(row(expected)); + } + + @Test + public void showCreateComplexView() { + String viewName = "showCreateComplexView"; + String sql = String.format("SELECT id, data FROM %s WHERE id <= 3", tableName); + + sql( + "CREATE VIEW %s (new_id COMMENT 'ID', new_data COMMENT 'DATA')" + + "COMMENT 'view comment' TBLPROPERTIES ('key1'='val1', 'key2'='val2') AS %s", + viewName, sql); + + String expected = + String.format( + "CREATE VIEW %s.%s.%s (new_id COMMENT 'ID', new_data COMMENT 'DATA') COMMENT 'view comment'\n" + + " TBLPROPERTIES (\n" + + " 'format-version' = '1',\n" + + " 'key1' = 'val1',\n" + + " 'key2' = 'val2',\n" + + " 'location' = '/%s/%s',\n" + + " 'provider' = 'iceberg')\n" + + "AS\n%s\n", + catalogName, NAMESPACE, viewName, NAMESPACE, viewName, sql); + assertThat(sql("SHOW CREATE TABLE %s", viewName)).containsExactly(row(expected)); + } + private void insertRows(int numRows) throws NoSuchTableException { List records = Lists.newArrayListWithCapacity(numRows); for (int i = 1; i <= numRows; i++) { diff --git a/spark/v3.5/spark/src/main/java/org/apache/iceberg/spark/SparkCatalog.java b/spark/v3.5/spark/src/main/java/org/apache/iceberg/spark/SparkCatalog.java index 088025cb68e2..2126e3283cce 100644 --- a/spark/v3.5/spark/src/main/java/org/apache/iceberg/spark/SparkCatalog.java +++ b/spark/v3.5/spark/src/main/java/org/apache/iceberg/spark/SparkCatalog.java @@ -26,6 +26,7 @@ import java.util.Map; import java.util.Objects; import java.util.Set; +import java.util.StringJoiner; import java.util.TreeMap; import java.util.concurrent.TimeUnit; import java.util.regex.Matcher; @@ -539,8 +540,13 @@ public boolean dropNamespace(String[] namespace, boolean cascade) @Override public Identifier[] listViews(String... namespace) { - throw new UnsupportedOperationException( - "Listing views is not supported by catalog: " + catalogName); + if (null != asViewCatalog) { + return asViewCatalog.listViews(Namespace.of(namespace)).stream() + .map(ident -> Identifier.of(ident.namespace().levels(), ident.name())) + .toArray(Identifier[]::new); + } + + return new Identifier[0]; } @Override @@ -572,11 +578,13 @@ public View createView( if (null != asViewCatalog) { Schema icebergSchema = SparkSchemaUtil.convert(schema); + StringJoiner joiner = new StringJoiner(", "); + Arrays.stream(queryColumnNames).forEach(joiner::add); try { Map props = ImmutableMap.builder() .putAll(Spark3Util.rebuildCreateProperties(properties)) - .put("queryColumnNames", Arrays.toString(queryColumnNames)) + .put("queryColumnNames", joiner.toString()) .build(); org.apache.iceberg.view.View view = asViewCatalog diff --git a/spark/v3.5/spark/src/main/java/org/apache/iceberg/spark/source/SparkView.java b/spark/v3.5/spark/src/main/java/org/apache/iceberg/spark/source/SparkView.java index 3a4a92ce390b..d7a13562c408 100644 --- a/spark/v3.5/spark/src/main/java/org/apache/iceberg/spark/source/SparkView.java +++ b/spark/v3.5/spark/src/main/java/org/apache/iceberg/spark/source/SparkView.java @@ -87,8 +87,8 @@ public StructType schema() { @Override public String[] queryColumnNames() { - return properties().containsKey(QUERY_COLUMN_NAMES) - ? properties().get(QUERY_COLUMN_NAMES).split(", ") + return icebergView.properties().containsKey(QUERY_COLUMN_NAMES) + ? icebergView.properties().get(QUERY_COLUMN_NAMES).split(", ") : new String[0]; } @@ -112,11 +112,6 @@ public Map properties() { propsBuilder.put("provider", "iceberg"); propsBuilder.put("location", icebergView.location()); - if (icebergView.properties().containsKey(QUERY_COLUMN_NAMES)) { - String queryColumnNames = - icebergView.properties().get(QUERY_COLUMN_NAMES).replace("[", "").replace("]", ""); - propsBuilder.put(QUERY_COLUMN_NAMES, queryColumnNames); - } if (icebergView instanceof BaseView) { ViewOperations ops = ((BaseView) icebergView).operations();