See the instructions in the Spark documentation:
https://spark.apache.org/docs/latest/building-spark.html#building-for-scala-211

On Wed, Mar 16, 2016 at 7:05 PM satyajit vegesna <satyajit.apas...@gmail.com>
wrote:

>
>
> Hi,
>
> Scala version:2.11.7(had to upgrade the scala verison to enable case
> clasess to accept more than 22 parameters.)
>
> Spark version:1.6.1.
>
> PFB pom.xml
>
> Getting below error when trying to setup spark on intellij IDE,
>
> 16/03/16 18:36:44 INFO spark.SparkContext: Running Spark version 1.6.1
> Exception in thread "main" java.lang.NoClassDefFoundError:
> scala/collection/GenTraversableOnce$class at
> org.apache.spark.util.TimeStampedWeakValueHashMap.(TimeStampedWeakValueHashMap.scala:42)
> at org.apache.spark.SparkContext.(SparkContext.scala:298) at
> com.examples.testSparkPost$.main(testSparkPost.scala:27) at
> com.examples.testSparkPost.main(testSparkPost.scala) at
> sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
> at
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
> at java.lang.reflect.Method.invoke(Method.java:606) at
> com.intellij.rt.execution.application.AppMain.main(AppMain.java:140) Caused
> by: java.lang.ClassNotFoundException:
> scala.collection.GenTraversableOnce$class at
> java.net.URLClassLoader$1.run(URLClassLoader.java:366) at
> java.net.URLClassLoader$1.run(URLClassLoader.java:355) at
> java.security.AccessController.doPrivileged(Native Method) at
> java.net.URLClassLoader.findClass(URLClassLoader.java:354) at
> java.lang.ClassLoader.loadClass(ClassLoader.java:425) at
> sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308) at
> java.lang.ClassLoader.loadClass(ClassLoader.java:358) ... 9 more
>
> pom.xml:
>
> <project xmlns="http://maven.apache.org/POM/4.0.0"; 
> xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance";
>          xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
> http://maven.apache.org/maven-v4_0_0.xsd";>
>     <modelVersion>4.0.0</modelVersion>
>     <groupId>StreamProcess</groupId>
>     <artifactId>StreamProcess</artifactId>
>     <version>0.0.1-SNAPSHOT</version>
>     <name>${project.artifactId}</name>
>     <description>This is a boilerplate maven project to start using Spark in 
> Scala</description>
>     <inceptionYear>2010</inceptionYear>
>
>     <properties>
>         <maven.compiler.source>1.6</maven.compiler.source>
>         <maven.compiler.target>1.6</maven.compiler.target>
>         <encoding>UTF-8</encoding>
>         <scala.tools.version>2.10</scala.tools.version>
>         <!-- Put the Scala version of the cluster -->
>         <scala.version>2.11.7</scala.version>
>     </properties>
>
>     <!-- repository to add org.apache.spark -->
>     <repositories>
>         <repository>
>             <id>cloudera-repo-releases</id>
>             <url>https://repository.cloudera.com/artifactory/repo/</url>
>         </repository>
>     </repositories>
>
>     <build>
>         <sourceDirectory>src/main/scala</sourceDirectory>
>         <testSourceDirectory>src/test/scala</testSourceDirectory>
>         <plugins>
>                 <!-- any other plugins -->
>                 <plugin>
>                     <artifactId>maven-assembly-plugin</artifactId>
>                     <executions>
>                         <execution>
>                             <phase>package</phase>
>                             <goals>
>                                 <goal>single</goal>
>                             </goals>
>                         </execution>
>                     </executions>
>                     <configuration>
>                         <descriptorRefs>
>                             
> <descriptorRef>jar-with-dependencies</descriptorRef>
>                         </descriptorRefs>
>                     </configuration>
>                 </plugin>
>             <plugin>
>                 <!-- see http://davidb.github.com/scala-maven-plugin -->
>                 <groupId>net.alchim31.maven</groupId>
>                 <artifactId>scala-maven-plugin</artifactId>
>                 <version>3.2.2</version>
>                 <executions>
>                     <execution>
>                         <goals>
>                             <goal>compile</goal>
>                             <goal>testCompile</goal>
>                         </goals>
>                         <configuration>
>                             <args>
>                                 <!--<arg>-make:transitive</arg>-->
>                                 <arg>-dependencyfile</arg>
>                                 
> <arg>${project.build.directory}/.scala_dependencies</arg>
>                             </args>
>                         </configuration>
>                     </execution>
>                 </executions>
>             </plugin>
>
>             <!-- "package" command plugin -->
>             <plugin>
>                 <artifactId>maven-assembly-plugin</artifactId>
>                 <version>2.4.1</version>
>                 <configuration>
>                     <descriptorRefs>
>                         <descriptorRef>jar-with-dependencies</descriptorRef>
>                     </descriptorRefs>
>                 </configuration>
>                 <executions>
>                     <execution>
>                         <id>make-assembly</id>
>                         <phase>package</phase>
>                         <goals>
>                             <goal>single</goal>
>                         </goals>
>                     </execution>
>                 </executions>
>             </plugin>
>         </plugins>
>     </build>
>     <dependencies>
>         <dependency>
>             <groupId>org.scala-lang</groupId>
>             <artifactId>scala-library</artifactId>
>             <version>${scala.version}</version>
>         </dependency>
>         <dependency>
>             <groupId>org.mongodb.mongo-hadoop</groupId>
>             <artifactId>mongo-hadoop-core</artifactId>
>             <version>1.4.2</version>
>             <exclusions>
>                 <exclusion>
>                     <groupId>javax.servlet</groupId>
>                     <artifactId>servlet-api</artifactId>
>                 </exclusion>
>             </exclusions>
>         </dependency>
>         <dependency>
>                 <groupId>org.mongodb</groupId>
>                 <artifactId>mongodb-driver</artifactId>
>                 <version>3.2.2</version>
>             <exclusions>
>                 <exclusion>
>                     <groupId>javax.servlet</groupId>
>                     <artifactId>servlet-api</artifactId>
>                 </exclusion>
>             </exclusions>
>         </dependency>
>         <dependency>
>                 <groupId>org.mongodb</groupId>
>                 <artifactId>mongodb-driver</artifactId>
>                 <version>3.2.2</version>
>             <exclusions>
>                 <exclusion>
>                     <groupId>javax.servlet</groupId>
>                     <artifactId>servlet-api</artifactId>
>                 </exclusion>
>             </exclusions>
>             </dependency>
>         <dependency>
>             <groupId>org.apache.spark</groupId>
>             <artifactId>spark-streaming_2.10</artifactId>
>             <version>1.6.1</version>
>         </dependency>
>         <dependency>
>             <groupId>org.apache.spark</groupId>
>             <artifactId>spark-core_2.10</artifactId>
>             <version>1.6.1</version>
>         </dependency>
>         <dependency>
>             <groupId>org.apache.spark</groupId>
>             <artifactId>spark-sql_2.10</artifactId>
>             <version>1.6.1</version>
>         </dependency>
>         <dependency>
>             <groupId>org.apache.hadoop</groupId>
>             <artifactId>hadoop-hdfs</artifactId>
>             <version>2.6.0</version>
>         </dependency>
>         <dependency>
>             <groupId>org.apache.hadoop</groupId>
>             <artifactId>hadoop-auth</artifactId>
>             <version>2.6.0</version>
>         </dependency>
>         <dependency>
>             <groupId>org.apache.hadoop</groupId>
>             <artifactId>hadoop-common</artifactId>
>             <version>2.6.0</version>
>         </dependency>
>         <dependency>
>             <groupId>org.apache.hadoop</groupId>
>             <artifactId>hadoop-core</artifactId>
>             <version>1.2.1</version>
>         </dependency>
>     </dependencies>
> </project>
>
> Would like to know , what to be changed in pom to get things going.
>
> Regards,
> Satyajit.
>
>

Reply via email to