HI All,
Please help me to fix Spark Cassandra Connector issue, find the details
below

*Command:*

dse spark-submit --master spark://10.246.43.15:7077 --class HelloWorld
--jars ///home/missingmerch/postgresql-9.4-1201.jdbc41.jar
///home/missingmerch/etl-0.0.1-SNAPSHOT.jar


*Error:*


WARN  2015-08-10 06:33:35 org.apache.spark.util.Utils: Service 'SparkUI'
could not bind on port 4040. Attempting port 4041.

Exception in thread "main" java.lang.NoSuchMethodError:
com.datastax.spark.connector.package$.toRDDFunctions(Lorg/apache/spark/rdd/RDD;Lscala/reflect/ClassTag;)Lcom/datastax/spark/connector/RDDFunctions;

        at HelloWorld$.main(HelloWorld.scala:29)

        at HelloWorld.main(HelloWorld.scala)

        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)

        at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)

        at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)

        at java.lang.reflect.Method.invoke(Method.java:606)

        at
org.apache.spark.deploy.SparkSubmit$.launch(SparkSubmit.scala:358)

        at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:75)

        at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)


*Code:*

*import* *org.apache*.spark.SparkContext

*import* *org.apache*.spark.SparkContext._

*import* *org.apache*.spark.SparkConf

*import* *org.apache*.spark.rdd.JdbcRDD

*import* *com.datastax*.spark.connector._

*import* com.datastax.spark.connector.cql.CassandraConnector

*import* com.datastax.bdp.spark.DseSparkConfHelper._

*import* java.sql.{Connection, DriverManager, ResultSet, PreparedStatement,
SQLException, Statement}

*object* HelloWorld {

    *def* main(args: Array[String]) {

      *def* createSparkContext() = {

       *val** myJar =
*getClass.getProtectionDomain.getCodeSource.getLocation.getPath


       *val* conf = *new* SparkConf().set("spark.cassandra.connection.host",
"10.246.43.15")

       .setAppName("First Spark App")

       .setMaster("local")

*       .s*etJars(Array(myJar))

       .set("cassandra.username", "username")

       .set("cassandra.password", "password")

       .forDse

   *new* SparkContext(conf)

}



      *val* sc = createSparkContext()

      *val* user="hkonak0"

      *val** pass=*"Winter18"

      Class.forName("org.postgresql.Driver").newInstance

      *val* url = "jdbc:postgresql://gptester:5432/db_test"

      *val* myRDD27 = *new* JdbcRDD( sc, ()=>
DriverManager.getConnection(url,user,pass),"select * from
wmax_vmax.arm_typ_txt LIMIT ? OFFSET ?",5,0,1,(r: ResultSet) => {(r.getInt(
"alarm_type_code"),r.getString("language_code"),r.getString(
"alrm_type_cd_desc"))})

      myRDD27.saveToCassandra("keyspace","arm_typ_txt",SomeColumns(
"alarm_type_code","language_code","alrm_type_cd_desc"))

      println(myRDD27.count())

      println(myRDD27.first)

      sc.stop()

      sys.exit()



    }

  }



*POM XML:*


<dependencies>

              <dependency>

                     <groupId>org.apache.spark</groupId>

                     <artifactId>spark-core_2.10</artifactId>

                     <version>1.2.2</version>

              </dependency>

              <dependency>

                     <groupId>org.apache.hadoop</groupId>

                     <artifactId>*hadoop*-client</artifactId>

                     <version>1.2.1</version>

              </dependency>

              <dependency>

                     <groupId>org.scala-*lang*</groupId>

                     <artifactId>*scala*-library</artifactId>

                     <version>2.10.5</version>

              </dependency>

              <dependency>

                     <groupId>*junit*</groupId>

                     <artifactId>*junit*</artifactId>

                     <version>3.8.1</version>

                     <scope>test</scope>

              </dependency>

              <dependency>

                     <groupId>com.datastax.dse</groupId>

                     <artifactId>*dse*</artifactId>

                     <version>4.7.2</version>

              <scope>system</scope>

                     <systemPath>C:\workspace\*etl*\*lib*\dse.jar</
systemPath>

       </dependency>

              <dependency>

              <groupId>com.datastax.spark</groupId>

              <artifactId>spark-*cassandra*-connector-java_2.10</artifactId>

              <version>1.1.1</version>

       </dependency>

       </dependencies>


Please let me know if any further details required to analyze the issue


Regards,

Satish Chandra

Reply via email to