Could anyone help me with Spark scala using JDBC to get data? The JDBC is based 
on our home-brewn C++ JNIlibrary.  I use sbt to compile and package the app 
into a jar and use spark-submit to submit the jar to standalone Spark.The 
problem is that it finally gives "Exception in thread "main" 
java.lang.UnsatisfiedLinkError: ". Lots of googling and reading butstill I  
haven't figured out a solution. The JDBC has tested and works on non-Spark 
environment.
Here are my source code and env variable, scripts:

TestJDBC.scala:  (src/main/scala/TestJDBC.scala  in the sbt 
environment)--------------------------------------------------------------------------------------------------------------------------------------------------import
 org.apache.spark.SparkConfimport org.apache.spark.SparkContextimport 
org.apache.spark.SparkContext._import com.jaguar.jdbc.internal.jaguar._import 
com.jaguar.jdbc.JaguarDataSource
object TestJDBC {    def main(args: Array[String]) {        
System.load("/home/exeray/jaguar/lib/libJaguarClient.so");        
Class.forName("com.jaguar.jdbc.JaguarDriver");        val sparkConf = new 
SparkConf().setAppName("TestJDBC")        val sc = new SparkContext(sparkConf)  
      val sqlContext = new org.apache.spark.sql.SQLContext(sc)
        val url="jdbc:jaguar://127.0.0.1:8888/test"        val prop = new 
java.util.Properties        
prop.setProperty("driver","com.jaguar.jdbc.JaguarDriver")        
prop.setProperty("url", url );        prop.setProperty("user","test")        
prop.setProperty("password","test")        prop.setProperty("dbtable","t1")     
   prop.setProperty("partitionColumn","uid")        
prop.setProperty("lowerBound","2")        prop.setProperty("upperBound","200")  
      prop.setProperty("numPartitions","3")
        val people = sqlContext.read.jdbc(url,"t1",prop)    
}}-------------------------------------------------------------------------------------------------------------------
build.sbt:
lazy val root = (project in file(".")).  settings(    name := "TestJDBC",    
version := "1.0",    scalaVersion := "2.10.4")

libraryDependencies ++= Seq(    "org.apache.spark" %% "spark-sql" % "1.4.1",    
"org.apache.spark" %% "spark-core" % "1.4.1")

--------------------------------------------------------------------------------------------------------------------submit.sh:
#!/bin/bash
export LD_LIBRARY_PATH=$HOME/jaguar/lib

spark-submit --class TestJDBC  --master local[4] \        --driver-library-path 
$LD_LIBRARY_PATH \        --driver-class-path 
$HOME/jaguar/lib/jaguar-jdbc-2.0.jar \        --conf 
spark.executor.extraClassPath=$LD_LIBRARY_PATH \    
target/scala-2.10/testjdbc_2.10-1.0.jar
--------------------------------------------------------------------------------------$
 ./submit.sh

Fri Sep  4 15:05:09 PDT 2015Spark Command: /usr/lib/java/jdk1.7.0_75/bin/java 
-cp 
/home/exeray/jaguar/lib/jaguar-jdbc-2.0.jar:/home/exeray/opt/spark/conf/:/home/exeray/opt/spark/lib/s
       
park-assembly-1.4.1-hadoop2.4.0.jar:/home/exeray/opt/spark/lib/datanucleus-rdbms-3.2.9.jar:/home/exeray/opt/spark/lib/datanucleus-api-jdo-3.2.6.jar:/home/e
       
xeray/opt/spark/lib/datanucleus-core-3.2.10.jar:/home/exeray/opt/hadoop/etc/hadoop/
 -Xms512m -Xmx512m -XX:MaxPermSize=256m org.apache.spark.deploy.SparkSubmit 
--master local[4] --conf 
spark.driver.extraClassPath=/home/exeray/jaguar/lib/jaguar-jdbc-2.0.jar --conf 
spark.driver.extraLibraryPath=/home/exeray/jaguar/lib --conf 
spark.executor.extraClassPath=/home/exeray/jaguar/lib --class TestJDBC 
target/scala-2.10/testjdbc_2.10-1.0.jar========================================
15/09/04 15:05:10 INFO spark.SparkContext: Running Spark version 1.4.115/09/04 
15:05:10 WARN util.NativeCodeLoader: Unable to load native-hadoop library for 
your platform... using builtin-java classes where applicable15/09/04 15:05:10 
INFO spark.SecurityManager: Changing view acls to: exeray15/09/04 15:05:10 INFO 
spark.SecurityManager: Changing modify acls to: exeray15/09/04 15:05:10 INFO 
spark.SecurityManager: SecurityManager: authentication disabled; ui acls 
disabled; users with view permissions: Set(exeray); users w       ith modify 
permissions: Set(exeray)15/09/04 15:05:11 INFO slf4j.Slf4jLogger: Slf4jLogger 
started15/09/04 15:05:12 INFO Remoting: Starting remoting15/09/04 15:05:12 INFO 
Remoting: Remoting started; listening on addresses 
:[akka.tcp://sparkDriver@192.168.1.109:59107]15/09/04 15:05:12 INFO util.Utils: 
Successfully started service 'sparkDriver' on port 59107.15/09/04 15:05:12 INFO 
spark.SparkEnv: Registering MapOutputTracker15/09/04 15:05:12 INFO 
spark.SparkEnv: Registering BlockManagerMaster15/09/04 15:05:12 INFO 
storage.DiskBlockManager: Created local directory at 
/tmp/spark-d63e3b69-e5e7-4ebb-9a98-b28baad7244b/blockmgr-ecec3d57-110e-457a-af7 
      c-37d6aa2c183515/09/04 15:05:12 INFO storage.MemoryStore: MemoryStore 
started with capacity 265.4 MB15/09/04 15:05:12 INFO spark.HttpFileServer: HTTP 
File server directory is 
/tmp/spark-d63e3b69-e5e7-4ebb-9a98-b28baad7244b/httpd-27e4bd29-8df2-409d-989f-03
       331b1a4a9515/09/04 15:05:12 INFO spark.HttpServer: Starting HTTP 
Server15/09/04 15:05:12 INFO server.Server: jetty-8.y.z-SNAPSHOT15/09/04 
15:05:12 INFO server.AbstractConnector: Started 
SocketConnector@0.0.0.0:5022315/09/04 15:05:12 INFO util.Utils: Successfully 
started service 'HTTP file server' on port 50223.15/09/04 15:05:12 INFO 
spark.SparkEnv: Registering OutputCommitCoordinator15/09/04 15:05:12 INFO 
server.Server: jetty-8.y.z-SNAPSHOT15/09/04 15:05:12 INFO 
server.AbstractConnector: Started SelectChannelConnector@0.0.0.0:404015/09/04 
15:05:12 INFO util.Utils: Successfully started service 'SparkUI' on port 
4040.15/09/04 15:05:12 INFO ui.SparkUI: Started SparkUI at 
http://192.168.1.109:404015/09/04 15:05:12 INFO spark.SparkContext: Added JAR 
file:/home/exeray/jyue/TestJDBC/target/scala-2.10/testjdbc_2.10-1.0.jar at 
http://192.168.1.109:50223/       jars/testjdbc_2.10-1.0.jar with timestamp 
144140431287315/09/04 15:05:12 INFO executor.Executor: Starting executor ID 
driver on host localhost15/09/04 15:05:13 INFO util.Utils: Successfully started 
service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 
37337.15/09/04 15:05:13 INFO netty.NettyBlockTransferService: Server created on 
3733715/09/04 15:05:13 INFO storage.BlockManagerMaster: Trying to register 
BlockManager15/09/04 15:05:13 INFO storage.BlockManagerMasterEndpoint: 
Registering block manager localhost:37337 with 265.4 MB RAM, 
BlockManagerId(driver, localhost, 3       7337)15/09/04 15:05:13 INFO 
storage.BlockManagerMaster: Registered BlockManagerException in thread "main" 
java.lang.UnsatisfiedLinkError: 
com.jaguar.jdbc.internal.jaguar.JaguarClient.adbMakeObject()V        at 
com.jaguar.jdbc.internal.jaguar.JaguarClient.adbMakeObject(Native Method)       
 at com.jaguar.jdbc.internal.jaguar.JaguarClient.<init>(JaguarClient.java:10)   
     at 
com.jaguar.jdbc.internal.jaguar.JaguarProtocol.<init>(JaguarProtocol.java:120)  
      at com.jaguar.jdbc.JaguarDriver.connect(JaguarDriver.java:89)        at 
java.sql.DriverManager.getConnection(DriverManager.java:571)        at 
java.sql.DriverManager.getConnection(DriverManager.java:187)        at 
org.apache.spark.sql.jdbc.JDBCRDD$.resolveTable(JDBCRDD.scala:118)        at 
org.apache.spark.sql.jdbc.JDBCRelation.<init>(JDBCRelation.scala:128)        at 
org.apache.spark.sql.DataFrameReader.jdbc(DataFrameReader.scala:200)        at 
org.apache.spark.sql.DataFrameReader.jdbc(DataFrameReader.scala:130)        at 
TestJDBC$.main(TestJDBC.scala:50)        at TestJDBC.main(TestJDBC.scala)       
 at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)        at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)   
     at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:606)        at 
org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:665)
        at 
org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:170)        
at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:193)        at 
org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:112)        at 
org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)15/09/04 15:05:13 
INFO spark.SparkContext: Invoking stop() from shutdown hook
-------------------------------------------------------------------------------------conf/spark-env.sh:export
  JAVA_HOME=/usr/lib/java/jdk1.7.0_75export  
SCALA_HOME=/home/exeray/opt/scalaexport  SPARK_MASTER_IP=HD3export  
HADOOP_CONF_DIR=/home/exeray/opt/hadoop/etc/hadoopexport  
LD_LIBRARY_PATH=/home/exeray/jaguar/lib
export  SPARK_PRINT_LAUNCH_COMMAND=1
export SPARK_LIBRARY_PATH=$LD_LIBRARY_PATH
--------------------------------THANKS !!!

Jon

Reply via email to