Hi all!
I am trying to read hana database using spark jdbc RDD
here is my code
def readFromHana() {
    val conf = new SparkConf()
    conf.setAppName("test").setMaster("local")
    val sc = new SparkContext(conf)
    val rdd = new JdbcRDD(sc, () => {
      Class.forName("com.sap.db.jdbc.Driver").newInstance()
     
DriverManager.getConnection("jdbc:sap://54.69.200.113:30015/?currentschema=LIVE2",
"mujadid", "786Xyz123")
    },
      "SELECT *  FROM MEMBERS LIMIT ? OFFSET  ?",
      0, 100, 1,
      (r: ResultSet) =>  convert(r) )
    println(rdd.count());
    sc.stop()
  }
  def convert(rs: ResultSet):String={
          val rsmd = rs.getMetaData()
          val numberOfColumns = rsmd.getColumnCount()
          var i = 1
          val row=new StringBuilder
          while (i <= numberOfColumns) {
            row.append( rs.getString(i)+",")
            i += 1
          }
          row.toString()
   }

The resultant count is 0

Any suggestion?

Thanks



--
View this message in context: 
http://apache-spark-user-list.1001560.n3.nabble.com/empty-jdbc-RDD-in-spark-tp22736.html
Sent from the Apache Spark User List mailing list archive at Nabble.com.

---------------------------------------------------------------------
To unsubscribe, e-mail: user-unsubscr...@spark.apache.org
For additional commands, e-mail: user-h...@spark.apache.org

Reply via email to