Github user marmbrus commented on a diff in the pull request: https://github.com/apache/spark/pull/6627#discussion_r32382879 --- Diff: sql/hive/src/main/scala/org/apache/spark/sql/hive/client/package.scala --- @@ -19,15 +19,27 @@ package org.apache.spark.sql.hive /** Support for interacting with different versions of the HiveMetastoreClient */ package object client { - private[client] abstract class HiveVersion(val fullVersion: String, val hasBuiltinsJar: Boolean) + private[client] abstract class HiveVersion( + val fullVersion: String, + val extraDeps: Seq[String] = Nil, + val exclusions: Seq[String] = Nil) // scalastyle:off private[client] object hive { - case object v10 extends HiveVersion("0.10.0", true) - case object v11 extends HiveVersion("0.11.0", false) - case object v12 extends HiveVersion("0.12.0", false) - case object v13 extends HiveVersion("0.13.1", false) + case object v12 extends HiveVersion("0.12.0") + case object v13 extends HiveVersion("0.13.1") + + // Hive 0.14 depends on calcite 0.9.2-incubating-SNAPSHOT which does not exist in + // maven central anymore, so override those with a version that exists. + // + // org.pentaho:pentaho-aggdesigner-algorithm is also nowhere to be found, so exclude + // it explicitly. If it's needed by the metastore client, users will have to dig it + // out of somewhere and use configuration to point Spark at the correct jars. --- End diff -- The JIRA I was thinking of would ask them to make sure that future versions of Hive can be used as a library simply by depending on them in maven. Seems like they should be able to ensure future releases don't depend on SNAPSHOT dependencies and whatnot.
--- If your project is set up for it, you can reply to this email and have your reply appear on GitHub as well. If your project does not have this feature enabled and wishes so, or if the feature is enabled but not working, please contact infrastructure at infrastruct...@apache.org or file a JIRA ticket with INFRA. --- --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org