You should add spark-mllib_2.10 as a dependency instead of declaring
it as the artifactId. And always use the same version for spark-core
and spark-mllib. I saw you used 1.3.0 for spark-core but 1.4.0 for
spark-mllib, which is not guaranteed to work. If you set the scope to
"provided", mllib jar won't be included in the run time dependency. It
means that you need to use spark-summit from Spark to launch your
application. Please read the user guide:
http://spark.apache.org/docs/latest/submitting-applications.html.
-Xiangrui

On Sun, Jun 14, 2015 at 11:39 PM, masoom alam <masoom.a...@wanclouds.net> wrote:
> even if the following POM is also not working:
>
> <project xmlns="http://maven.apache.org/POM/4.0.0";
> xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance";
> xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
> http://maven.apache.org/maven-v4_0_0.xsd";>
>     <parent>
>         <artifactId>spark-parent_2.10</artifactId>
>         <groupId>org.apache.spark</groupId>
>         <version>1.4.0</version>
>     </parent>
>     <modelVersion>4.0.0</modelVersion>
>     <groupId>org.apache.spark</groupId>
>     <artifactId>spark-mllib_2.10</artifactId>
>     <name>Spark Project ML Library</name>
>     <url>http://spark.apache.org/</url>
>     <build>
>
> <outputDirectory>target/scala-${scala.binary.version}/classes</outputDirectory>
>
> <testOutputDirectory>target/scala-${scala.binary.version}/test-classes</testOutputDirectory>
>     </build>
>     <profiles>
>         <profile>
>             <id>netlib-lgpl</id>
>             <dependencies>
>                 <dependency>
>                     <groupId>com.github.fommil.netlib</groupId>
>                     <artifactId>all</artifactId>
>                     <version>${netlib.java.version}</version>
>                     <type>pom</type>
>                 </dependency>
>             </dependencies>
>         </profile>
>     </profiles>
>     <dependencies>
>         <dependency>
>             <groupId>org.apache.spark</groupId>
>             <artifactId>spark-core_2.10</artifactId>
>             <version>1.4.0</version>
>             <scope>compile</scope>
>         </dependency>
>         <dependency>
>             <groupId>org.apache.spark</groupId>
>             <artifactId>spark-streaming_2.10</artifactId>
>             <version>1.4.0</version>
>             <scope>compile</scope>
>         </dependency>
>         <dependency>
>             <groupId>org.apache.spark</groupId>
>             <artifactId>spark-sql_2.10</artifactId>
>             <version>1.4.0</version>
>             <scope>compile</scope>
>         </dependency>
>         <dependency>
>             <groupId>org.apache.spark</groupId>
>             <artifactId>spark-graphx_2.10</artifactId>
>             <version>1.4.0</version>
>             <scope>compile</scope>
>         </dependency>
>         <dependency>
>             <groupId>org.jblas</groupId>
>             <artifactId>jblas</artifactId>
>             <version>1.2.4</version>
>             <scope>test</scope>
>         </dependency>
>         <dependency>
>             <groupId>org.scalanlp</groupId>
>             <artifactId>breeze_2.10</artifactId>
>             <version>0.11.2</version>
>             <scope>compile</scope>
>             <exclusions>
>                 <exclusion>
>                     <artifactId>junit</artifactId>
>                     <groupId>junit</groupId>
>                 </exclusion>
>                 <exclusion>
>                     <artifactId>commons-math3</artifactId>
>                     <groupId>org.apache.commons</groupId>
>                 </exclusion>
>             </exclusions>
>         </dependency>
>         <dependency>
>             <groupId>org.apache.commons</groupId>
>             <artifactId>commons-math3</artifactId>
>             <version>3.4.1</version>
>             <scope>compile</scope>
>         </dependency>
>         <dependency>
>             <groupId>org.scalacheck</groupId>
>             <artifactId>scalacheck_2.10</artifactId>
>             <version>1.11.3</version>
>             <scope>test</scope>
>             <exclusions>
>                 <exclusion>
>                     <artifactId>test-interface</artifactId>
>                     <groupId>org.scala-sbt</groupId>
>                 </exclusion>
>             </exclusions>
>         </dependency>
>         <dependency>
>             <groupId>junit</groupId>
>             <artifactId>junit</artifactId>
>             <version>4.10</version>
>             <scope>test</scope>
>             <exclusions>
>                 <exclusion>
>                     <artifactId>hamcrest-core</artifactId>
>                     <groupId>org.hamcrest</groupId>
>                 </exclusion>
>             </exclusions>
>         </dependency>
>         <dependency>
>             <groupId>com.novocode</groupId>
>             <artifactId>junit-interface</artifactId>
>             <version>0.10</version>
>             <scope>test</scope>
>             <exclusions>
>                 <exclusion>
>                     <artifactId>junit-dep</artifactId>
>                     <groupId>junit</groupId>
>                 </exclusion>
>                 <exclusion>
>                     <artifactId>test-interface</artifactId>
>                     <groupId>org.scala-tools.testing</groupId>
>                 </exclusion>
>             </exclusions>
>         </dependency>
>         <dependency>
>             <groupId>org.mockito</groupId>
>             <artifactId>mockito-all</artifactId>
>             <version>1.9.5</version>
>             <scope>test</scope>
>         </dependency>
>         <dependency>
>             <groupId>org.apache.spark</groupId>
>             <artifactId>spark-streaming_2.10</artifactId>
>             <version>1.4.0</version>
>             <type>test-jar</type>
>             <scope>test</scope>
>         </dependency>
>         <dependency>
>             <groupId>org.jpmml</groupId>
>             <artifactId>pmml-model</artifactId>
>             <version>1.1.15</version>
>             <scope>compile</scope>
>             <exclusions>
>                 <exclusion>
>                     <artifactId>FastInfoset</artifactId>
>                     <groupId>com.sun.xml.fastinfoset</groupId>
>                 </exclusion>
>                 <exclusion>
>                     <artifactId>istack-commons-runtime</artifactId>
>                     <groupId>com.sun.istack</groupId>
>                 </exclusion>
>             </exclusions>
>         </dependency>
>         <dependency>
>             <groupId>org.codehaus.groovy</groupId>
>             <artifactId>groovy-all</artifactId>
>             <version>2.3.7</version>
>             <scope>provided</scope>
>         </dependency>
>         <dependency>
>             <groupId>org.scalatest</groupId>
>             <artifactId>scalatest_2.10</artifactId>
>             <version>2.2.1</version>
>             <scope>test</scope>
>         </dependency>
>     </dependencies>
>     <properties>
>         <sbt.project.name>mllib</sbt.project.name>
>     </properties>
> </project>
>
> Any clues?
>
> On Sun, Jun 14, 2015 at 8:20 PM, masoom alam <masoom.a...@wanclouds.net>
> wrote:
>>
>> Getting the following error:
>>
>> [INFO]
>> [INFO]
>> ------------------------------------------------------------------------
>> [INFO] Building example 0.0.1
>> [INFO]
>> ------------------------------------------------------------------------
>> Downloading:
>> http://repo.maven.apache.org/maven2/org/apache/spark/spark-mllib_2.10/1.4.0/spark-mllib_2.10-1.4.0.pom
>> [INFO]
>> ------------------------------------------------------------------------
>> [INFO] BUILD FAILURE
>> [INFO]
>> ------------------------------------------------------------------------
>> [INFO] Total time: 41.561s
>> [INFO] Finished at: Mon Jun 15 08:17:43 PKT 2015
>> [INFO] Final Memory: 6M/16M
>> [INFO]
>> ------------------------------------------------------------------------
>> [ERROR] Failed to execute goal on project learning-spark-mini-example:
>> Could not resolve dependencies for project
>> com.oreilly.learningsparkexamples.mini:learning-spark-mini-example:jar:0.0.1:
>> Failed to collect dependencies for
>> [org.apache.spark:spark-core_2.10:jar:1.3.0 (provided),
>> org.apache.spark:spark-mllib_2.10:jar:1.4.0 (compile)]: Failed to read
>> artifact descriptor for org.apache.spark:spark-mllib_2.10:jar:1.4.0: Could
>> not transfer artifact org.apache.spark:spark-mllib_2.10:pom:1.4.0 from/to
>> central (http://repo.maven.apache.org/maven2): repo.maven.apache.org:
>> Unknown host repo.maven.apache.org -> [Help 1]
>> [ERROR]
>> [ERROR] To see the full stack trace of the errors, re-run Maven with the
>> -e switch.
>> [ERROR] Re-run Maven using the -X switch to enable full debug logging.
>> [ERROR]
>> [ERROR] For more information about the errors and possible solutions,
>> please read the following articles:
>> [ERROR] [Help 1]
>> http://cwiki.apache.org/confluence/display/MAVEN/DependencyResolutionException
>>
>>
>>
>>
>> _____________________________________________________________________________________________
>>
>> My POM file is as follows:-
>>
>> <?xml version="1.0" encoding="UTF-8"?>
>> <!--<project xmlns="http://maven.apache.org/POM/4.0.0";
>>          xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance";
>>          xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
>> http://maven.apache.org/xsd/maven-4.0.0.xsd";>
>>     <modelVersion>4.0.0</modelVersion>
>>
>>     <groupId>com.oreilly.learningsparkexamples.mini</groupId>
>>     <artifactId>learning-spark-mini-example</artifactId>
>>     <version>1.0-SNAPSHOT</version>
>>
>>
>> </project>
>>  -->
>>
>> <project>
>>     <groupId>com.oreilly.learningsparkexamples.mini</groupId>
>>     <artifactId>learning-spark-mini-example</artifactId>
>>     <modelVersion>4.0.0</modelVersion>
>>     <name>example</name>
>>     <packaging>jar</packaging>
>>     <version>0.0.1</version>
>>     <dependencies>
>>         <dependency> <!-- Spark dependency -->
>>             <groupId>org.apache.spark</groupId>
>>             <artifactId>spark-core_2.10</artifactId>
>>             <version>1.3.0</version>
>>             <scope>provided</scope>
>>         </dependency>
>>         <dependency> <!-- Spark dependency -->
>>             <groupId>org.apache.spark</groupId>
>>             <artifactId>spark-mllib_2.10</artifactId>
>>             <version>1.4.0</version>
>>             <scope>provided</scope>
>>         </dependency>
>>     </dependencies>
>>     <properties>
>>         <java.version>1.7</java.version>
>>     </properties>
>>     <build>
>>         <pluginManagement>
>>             <plugins>
>>                 <plugin> <groupId>org.apache.maven.plugins</groupId>
>>                     <artifactId>maven-compiler-plugin</artifactId>
>>                     <version>3.1</version>
>>                     <configuration>
>>                         <source>${java.version}</source>
>>                         <target>${java.version}</target>
>>                     </configuration>
>>                 </plugin>
>>             </plugins>
>>         </pluginManagement>
>>     </build>
>> </project>
>>
>> ___________________________________________________________________________________________________
>>
>> I have noticed that it tries to download the following file:
>> http://repo.maven.apache.org/maven2/org/apache/spark/spark-mllib_2.10/1.4.0/spark-mllib_2.10-1.4.0.pom
>> which is available
>>
>> Any pointers?
>>
>> Thanks for the help.
>>
>>
>>
>>
>> On Sun, Jun 14, 2015 at 5:24 AM, masoom alam <masoom.a...@wanclouds.net>
>> wrote:
>>>
>>> Thanks a lot. Will try in a while n update
>>>
>>> Thanks again
>>>
>>> On Jun 14, 2015 5:13 PM, "Sonal Goyal" <sonalgoy...@gmail.com> wrote:
>>>>
>>>> Try with spark-mllib_2.10 as the artifactid
>>>>
>>>> On Jun 14, 2015 12:02 AM, "masoom alam" <masoom.a...@wanclouds.net>
>>>> wrote:
>>>>>
>>>>> This is not working:
>>>>>
>>>>>  <dependency> <!-- Spark dependency -->
>>>>>             <groupId>org.apache.spark.mlib</groupId>
>>>>>             <artifactId>spark-mlib</artifactId>
>>>>>             <!--<version>1.3.0</version> -->
>>>>>             <scope>provided</scope>
>>>>>         </dependency>
>>>>>
>>>>>
>>>>>
>>>>> On Sat, Jun 13, 2015 at 11:56 PM, masoom alam
>>>>> <masoom.a...@wanclouds.net> wrote:
>>>>>>
>>>>>> These two imports are missing and thus FP-growth is not compiling...
>>>>>>
>>>>>> import org.apache.spark.mllib.fpm.FPGrowth;
>>>>>> import org.apache.spark.mllib.fpm.FPGrowthModel;
>>>>>>
>>>>>> How to include the dependency in the POM file?
>>>>>>
>>>>>> On Sat, Jun 13, 2015 at 4:26 AM, masoom alam
>>>>>> <masoom.a...@wanclouds.net> wrote:
>>>>>>>
>>>>>>> Thanks for the answer. Any example?
>>>>>>>
>>>>>>> On Jun 13, 2015 2:13 PM, "Sonal Goyal" <sonalgoy...@gmail.com> wrote:
>>>>>>>>
>>>>>>>> I think you need to add dependency to spark mllib too.
>>>>>>>>
>>>>>>>> On Jun 13, 2015 11:10 AM, "masoom alam" <masoom.a...@wanclouds.net>
>>>>>>>> wrote:
>>>>>>>>>
>>>>>>>>> Hi every one,
>>>>>>>>>
>>>>>>>>> I am trying to run the FP growth example. I have tried to compile
>>>>>>>>> the following POM file:
>>>>>>>>>
>>>>>>>>> <project>
>>>>>>>>>     <groupId>com.oreilly.learningsparkexamples.mini</groupId>
>>>>>>>>>     <artifactId>learning-spark-mini-example</artifactId>
>>>>>>>>>     <modelVersion>4.0.0</modelVersion>
>>>>>>>>>     <name>example</name>
>>>>>>>>>     <packaging>jar</packaging>
>>>>>>>>>     <version>0.0.1</version>
>>>>>>>>>     <dependencies>
>>>>>>>>>         <dependency> <!-- Spark dependency -->
>>>>>>>>>             <groupId>org.apache.spark</groupId>
>>>>>>>>>             <artifactId>spark-core_2.10</artifactId>
>>>>>>>>>             <version>1.3.0</version>
>>>>>>>>>             <scope>provided</scope>
>>>>>>>>>         </dependency>
>>>>>>>>>     </dependencies>
>>>>>>>>>     <properties>
>>>>>>>>>         <java.version>1.7</java.version>
>>>>>>>>>     </properties>
>>>>>>>>>     <build>
>>>>>>>>>         <pluginManagement>
>>>>>>>>>             <plugins>
>>>>>>>>>                 <plugin>
>>>>>>>>> <groupId>org.apache.maven.plugins</groupId>
>>>>>>>>>                     <artifactId>maven-compiler-plugin</artifactId>
>>>>>>>>>                     <version>3.1</version>
>>>>>>>>>                     <configuration>
>>>>>>>>>                         <source>${java.version}</source>
>>>>>>>>>                         <target>${java.version}</target>
>>>>>>>>>                     </configuration>
>>>>>>>>>                 </plugin>
>>>>>>>>>         </plugins>
>>>>>>>>>     </pluginManagement>
>>>>>>>>> </build>
>>>>>>>>> </project>
>>>>>>>>>
>>>>>>>>> It successfully builds the project, but IDE is complaining that:
>>>>>>>>> Error:(29, 34) java: package org.apache.spark.mllib.fpm does not exist
>>>>>>>>>
>>>>>>>>> Just as a side note, I downloaded Version 1.3 of Spark so FP-growth
>>>>>>>>> algorithm should be part of it?
>>>>>>>>>
>>>>>>>>> Thanks.
>>>>>>
>>>>>>
>>>>>
>>
>

---------------------------------------------------------------------
To unsubscribe, e-mail: user-unsubscr...@spark.apache.org
For additional commands, e-mail: user-h...@spark.apache.org

Reply via email to