Repository: spark Updated Branches: refs/heads/branch-2.0 2aae220b5 -> ef206ace2
[SPARK-18947][SQL] SQLContext.tableNames should not call Catalog.listTables ## What changes were proposed in this pull request? It's a huge waste to call `Catalog.listTables` in `SQLContext.tableNames`, which only need the table names, while `Catalog.listTables` will get the table metadata for each table name. ## How was this patch tested? N/A Author: Wenchen Fan <wenc...@databricks.com> Closes #16352 from cloud-fan/minor. (cherry picked from commit b7650f11c7afbdffc6f5caaafb5dcfd54f7a25ff) Signed-off-by: Wenchen Fan <wenc...@databricks.com> Project: http://git-wip-us.apache.org/repos/asf/spark/repo Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/ef206ace Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/ef206ace Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/ef206ace Branch: refs/heads/branch-2.0 Commit: ef206ace24d8588782fd9bd4fdf120f20fbfe841 Parents: 2aae220 Author: Wenchen Fan <wenc...@databricks.com> Authored: Wed Dec 21 19:39:00 2016 +0800 Committer: Wenchen Fan <wenc...@databricks.com> Committed: Wed Dec 21 19:40:15 2016 +0800 ---------------------------------------------------------------------- .../src/main/scala/org/apache/spark/sql/SQLContext.scala | 4 ++-- .../main/scala/org/apache/spark/sql/api/r/SQLUtils.scala | 9 +++++---- 2 files changed, 7 insertions(+), 6 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/spark/blob/ef206ace/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala ---------------------------------------------------------------------- diff --git a/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala b/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala index e7627ac..6013c01 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala @@ -730,7 +730,7 @@ class SQLContext private[sql](val sparkSession: SparkSession) * @since 1.3.0 */ def tableNames(): Array[String] = { - sparkSession.catalog.listTables().collect().map(_.name) + tableNames(sparkSession.catalog.currentDatabase) } /** @@ -740,7 +740,7 @@ class SQLContext private[sql](val sparkSession: SparkSession) * @since 1.3.0 */ def tableNames(databaseName: String): Array[String] = { - sparkSession.catalog.listTables(databaseName).collect().map(_.name) + sessionState.catalog.listTables(databaseName).map(_.table).toArray } /** http://git-wip-us.apache.org/repos/asf/spark/blob/ef206ace/sql/core/src/main/scala/org/apache/spark/sql/api/r/SQLUtils.scala ---------------------------------------------------------------------- diff --git a/sql/core/src/main/scala/org/apache/spark/sql/api/r/SQLUtils.scala b/sql/core/src/main/scala/org/apache/spark/sql/api/r/SQLUtils.scala index 08f8ded..8bc7f67 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/api/r/SQLUtils.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/api/r/SQLUtils.scala @@ -276,11 +276,12 @@ private[sql] object SQLUtils extends Logging { } def getTableNames(sparkSession: SparkSession, databaseName: String): Array[String] = { - databaseName match { - case n: String if n != null && n.trim.nonEmpty => - sparkSession.catalog.listTables(n).collect().map(_.name) + val db = databaseName match { + case _ if databaseName != null && databaseName.trim.nonEmpty => + databaseName case _ => - sparkSession.catalog.listTables().collect().map(_.name) + sparkSession.catalog.currentDatabase } + sparkSession.sessionState.catalog.listTables(db).map(_.table).toArray } } --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org For additional commands, e-mail: commits-h...@spark.apache.org