Github user SparkQA commented on the issue:

    https://github.com/apache/spark/pull/14311
  
    **[Test build #63551 has 
finished](https://amplab.cs.berkeley.edu/jenkins/job/SparkPullRequestBuilder/63551/consoleFull)**
 for PR 14311 at commit 
[`9435f26`](https://github.com/apache/spark/commit/9435f265efc6aa96919112ff31b6bf0560b311ab).
     * This patch **fails Spark unit tests**.
     * This patch merges cleanly.
     * This patch adds the following public classes _(experimental)_:
      * `public class ShuffleIndexInformation `
      * `public class ShuffleIndexRecord `
      * `class SparkConf(loadDefaults: Boolean) extends Cloneable with Logging 
with Serializable `
      * `case class MonotonicallyIncreasingID() extends LeafExpression with 
Nondeterministic `
      * `case class SparkPartitionID() extends LeafExpression with 
Nondeterministic `
      * `case class AggregateExpression(`
      * `case class Least(children: Seq[Expression]) extends Expression `
      * `case class Greatest(children: Seq[Expression]) extends Expression `
      * `case class CurrentDatabase() extends LeafExpression with Unevaluable `
      * `class GenericInternalRow(val values: Array[Any]) extends 
BaseGenericInternalRow `
      * `class AbstractScalaRowIterator[T] extends Iterator[T] `
      * `case class CachedData(plan: LogicalPlan, cachedRepresentation: 
InMemoryRelation)`
      * `class CacheManager extends Logging `
      * `trait DataSourceScanExec extends LeafExecNode with CodegenSupport `
      * `case class RowDataSourceScanExec(`
      * `case class FileSourceScanExec(`
      * `case class ExternalRDD[T](`
      * `case class ExternalRDDScanExec[T](`
      * `case class LogicalRDD(`
      * `case class RDDScanExec(`
      * `trait FileRelation `
      * `case class LocalTableScanExec(`
      * `abstract class RowIterator `
      * `trait LeafExecNode extends SparkPlan `
      * `trait UnaryExecNode extends SparkPlan `
      * `trait BinaryExecNode extends SparkPlan `
      * `case class PlanLater(plan: LogicalPlan) extends LeafExecNode `
      * `abstract class SparkStrategies extends QueryPlanner[SparkPlan] `
      * `class UnsafeRowSerializer(`
      * `class TypedSumDouble[IN](val f: IN => Double) extends Aggregator[IN, 
Double, Double] `
      * `class TypedSumLong[IN](val f: IN => Long) extends Aggregator[IN, Long, 
Long] `
      * `class TypedCount[IN](val f: IN => Any) extends Aggregator[IN, Long, 
Long] `
      * `class TypedAverage[IN](val f: IN => Double) extends Aggregator[IN, 
(Double, Long), Double] `
      * `case class ScalaUDAF(`
      * `case class InMemoryRelation(`
      * `case class InMemoryTableScanExec(`
      * `trait RunnableCommand extends LogicalPlan with logical.Command `
      * `case class ExecutedCommandExec(cmd: RunnableCommand) extends SparkPlan 
`
      * `case class AlterTableRecoverPartitionsCommand(`
      * `case class DataSourceAnalysis(conf: CatalystConf) extends 
Rule[LogicalPlan] `
      * `class FindDataSourceTable(sparkSession: SparkSession) extends 
Rule[LogicalPlan] `
      * `case class InsertIntoDataSourceCommand(`
      * `case class InsertIntoHadoopFsRelationCommand(`
      * `case class PartitionDirectory(values: InternalRow, path: Path)`
      * `case class PartitionSpec(`
      * `case class CreateTable(tableDesc: CatalogTable, mode: SaveMode, query: 
Option[LogicalPlan])`
      * `case class JDBCPartition(whereClause: String, idx: Int) extends 
Partition `
      * `class ResolveDataSource(sparkSession: SparkSession) extends 
Rule[LogicalPlan] `
      * `case class PreprocessDDL(conf: SQLConf) extends Rule[LogicalPlan] `
      * `case class PreprocessTableInsertion(conf: SQLConf) extends 
Rule[LogicalPlan] `
      * `case class PreWriteCheck(conf: SQLConf, catalog: SessionCatalog)`
      * `  case class DebugExec(child: SparkPlan) extends UnaryExecNode with 
CodegenSupport `
      * `class ExchangeCoordinator(`
      * `case class MapPartitionsRWrapper(`
      * `class IncrementalExecution(`
      * `class ExecutionPage(parent: SQLTab) extends WebUIPage(\"execution\") 
with Logging `
      * `class SQLHistoryListenerFactory extends SparkHistoryListenerFactory `
      * `class SQLListener(conf: SparkConf) extends SparkListener with Logging `
      * `class SQLHistoryListener(conf: SparkConf, sparkUI: SparkUI)`
      * `class SQLTab(val listener: SQLListener, sparkUI: SparkUI)`
      * `case class SparkPlanGraph(`
      * `  implicit class SchemaAttribute(f: StructField) `


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastruct...@apache.org or file a JIRA ticket
with INFRA.
---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to