Repository: spark
Updated Branches:
  refs/heads/master 12de34833 -> 907df2fce


[SQL] [MINOR] remove unnecessary class

This class is identical to `org.apache.spark.sql.execution.datasources.jdbc. 
DefaultSource` and is not needed.

Author: Wenchen Fan <cloud0...@outlook.com>

Closes #8334 from cloud-fan/minor.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/907df2fc
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/907df2fc
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/907df2fc

Branch: refs/heads/master
Commit: 907df2fce00d2cbc9fae371344f05f800e0d2726
Parents: 12de348
Author: Wenchen Fan <cloud0...@outlook.com>
Authored: Thu Aug 20 13:51:54 2015 -0700
Committer: Reynold Xin <r...@databricks.com>
Committed: Thu Aug 20 13:51:54 2015 -0700

----------------------------------------------------------------------
 .../execution/datasources/DefaultSource.scala   | 64 --------------------
 1 file changed, 64 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/907df2fc/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DefaultSource.scala
----------------------------------------------------------------------
diff --git 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DefaultSource.scala
 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DefaultSource.scala
deleted file mode 100644
index 6e4cc4d..0000000
--- 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/DefaultSource.scala
+++ /dev/null
@@ -1,64 +0,0 @@
-/*
-* Licensed to the Apache Software Foundation (ASF) under one or more
-* contributor license agreements.  See the NOTICE file distributed with
-* this work for additional information regarding copyright ownership.
-* The ASF licenses this file to You under the Apache License, Version 2.0
-* (the "License"); you may not use this file except in compliance with
-* the License.  You may obtain a copy of the License at
-*
-*    http://www.apache.org/licenses/LICENSE-2.0
-*
-* Unless required by applicable law or agreed to in writing, software
-* distributed under the License is distributed on an "AS IS" BASIS,
-* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-* See the License for the specific language governing permissions and
-* limitations under the License.
-*/
-
-package org.apache.spark.sql.execution.datasources
-
-import java.util.Properties
-
-import org.apache.spark.sql.SQLContext
-import org.apache.spark.sql.execution.datasources.jdbc.{JDBCRelation, 
JDBCPartitioningInfo, DriverRegistry}
-import org.apache.spark.sql.sources.{BaseRelation, DataSourceRegister, 
RelationProvider}
-
-
-class DefaultSource extends RelationProvider with DataSourceRegister {
-
-  override def shortName(): String = "jdbc"
-
-  /** Returns a new base relation with the given parameters. */
-  override def createRelation(
-      sqlContext: SQLContext,
-      parameters: Map[String, String]): BaseRelation = {
-    val url = parameters.getOrElse("url", sys.error("Option 'url' not 
specified"))
-    val driver = parameters.getOrElse("driver", null)
-    val table = parameters.getOrElse("dbtable", sys.error("Option 'dbtable' 
not specified"))
-    val partitionColumn = parameters.getOrElse("partitionColumn", null)
-    val lowerBound = parameters.getOrElse("lowerBound", null)
-    val upperBound = parameters.getOrElse("upperBound", null)
-    val numPartitions = parameters.getOrElse("numPartitions", null)
-
-    if (driver != null) DriverRegistry.register(driver)
-
-    if (partitionColumn != null
-      && (lowerBound == null || upperBound == null || numPartitions == null)) {
-      sys.error("Partitioning incompletely specified")
-    }
-
-    val partitionInfo = if (partitionColumn == null) {
-      null
-    } else {
-      JDBCPartitioningInfo(
-        partitionColumn,
-        lowerBound.toLong,
-        upperBound.toLong,
-        numPartitions.toInt)
-    }
-    val parts = JDBCRelation.columnPartition(partitionInfo)
-    val properties = new Properties() // Additional properties that we will 
pass to getConnection
-    parameters.foreach(kv => properties.setProperty(kv._1, kv._2))
-    JDBCRelation(url, table, parts, properties)(sqlContext)
-  }
-}


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to