Michael Armbrust created SPARK-4462:
---------------------------------------

             Summary: flume-sink build broken in SBT
                 Key: SPARK-4462
                 URL: https://issues.apache.org/jira/browse/SPARK-4462
             Project: Spark
          Issue Type: Bug
          Components: Streaming
            Reporter: Michael Armbrust
            Assignee: Tathagata Das


{code}
$ sbt streaming-flume-sink/compile
Using /Library/Java/JavaVirtualMachines/jdk1.7.0_45.jdk/Contents/Home as 
default JAVA_HOME.
Note, this will be overridden by -java-home if it is set.
[info] Loading project definition from 
/Users/marmbrus/workspace/spark/project/project
[info] Loading project definition from 
/Users/marmbrus/.sbt/0.13/staging/ad8e8574a5bcb2d22d23/sbt-pom-reader/project
[warn] Multiple resolvers having different access mechanism configured with 
same name 'sbt-plugin-releases'. To avoid conflict, Remove duplicate project 
resolvers (`resolvers`) or rename publishing resolver (`publishTo`).
[warn] There may be incompatibilities among your library dependencies.
[warn] Here are some of the libraries that were evicted:
[warn]  * com.typesafe.sbt:sbt-git:0.6.1 -> 0.6.2
[warn]  * com.typesafe.sbt:sbt-site:0.7.0 -> 0.7.1
[warn] Run 'evicted' to see detailed eviction warnings
[info] Loading project definition from /Users/marmbrus/workspace/spark/project
[warn] There may be incompatibilities among your library dependencies.
[warn] Here are some of the libraries that were evicted:
[warn]  * org.apache.maven.wagon:wagon-provider-api:1.0-beta-6 -> 2.2
[warn] Run 'evicted' to see detailed eviction warnings
NOTE: SPARK_HIVE is deprecated, please use -Phive and -Phive-thriftserver flags.
Enabled default scala profile
[info] Set current project to spark-parent (in build 
file:/Users/marmbrus/workspace/spark/)
[warn] There may be incompatibilities among your library dependencies.
[warn] Here are some of the libraries that were evicted:
[warn]  * com.google.guava:guava:10.0.1 -> 14.0.1
[warn] Run 'evicted' to see detailed eviction warnings
[info] Compiling 5 Scala sources and 3 Java sources to 
/Users/marmbrus/workspace/spark/external/flume-sink/target/scala-2.10/classes...
[error] 
/Users/marmbrus/workspace/spark/external/flume-sink/src/main/scala/org/apache/spark/streaming/flume/sink/Logging.scala:19:
 object slf4j is not a member of package org
[error] import org.slf4j.{Logger, LoggerFactory}
[error]            ^
[error] 
/Users/marmbrus/workspace/spark/external/flume-sink/src/main/scala/org/apache/spark/streaming/flume/sink/Logging.scala:29:
 not found: type Logger
[error]   @transient private var log_ : Logger = null
[error]                                 ^
[error] 
/Users/marmbrus/workspace/spark/external/flume-sink/src/main/scala/org/apache/spark/streaming/flume/sink/Logging.scala:32:
 not found: type Logger
[error]   protected def log: Logger = {
[error]                      ^
[error] 
/Users/marmbrus/workspace/spark/external/flume-sink/src/main/scala/org/apache/spark/streaming/flume/sink/Logging.scala:40:
 not found: value LoggerFactory
[error]       log_ = LoggerFactory.getLogger(className)
[error]              ^
[error] 
/Users/marmbrus/workspace/spark/external/flume-sink/target/scala-2.10/src_managed/main/compiled_avro/org/apache/spark/streaming/flume/sink/EventBatch.java:9:
 object specific is not a member of package org.apache.avro
[error] public class EventBatch extends 
org.apache.avro.specific.SpecificRecordBase implements 
org.apache.avro.specific.SpecificRecord {
[error]                                                 ^
[error] 
/Users/marmbrus/workspace/spark/external/flume-sink/target/scala-2.10/src_managed/main/compiled_avro/org/apache/spark/streaming/flume/sink/EventBatch.java:9:
 object specific is not a member of package org.apache.avro
[error] public class EventBatch extends 
org.apache.avro.specific.SpecificRecordBase implements 
org.apache.avro.specific.SpecificRecord {
[error]                                                                         
                               ^
[error] 
/Users/marmbrus/workspace/spark/external/flume-sink/target/scala-2.10/src_managed/main/compiled_avro/org/apache/spark/streaming/flume/sink/SparkSinkEvent.java:9:
 object specific is not a member of package org.apache.avro
[error] public class SparkSinkEvent extends 
org.apache.avro.specific.SpecificRecordBase implements 
org.apache.avro.specific.SpecificRecord {
[error]                                                     ^
[error] 
/Users/marmbrus/workspace/spark/external/flume-sink/target/scala-2.10/src_managed/main/compiled_avro/org/apache/spark/streaming/flume/sink/SparkSinkEvent.java:9:
 object specific is not a member of package org.apache.avro
[error] public class SparkSinkEvent extends 
org.apache.avro.specific.SpecificRecordBase implements 
org.apache.avro.specific.SpecificRecord {
[error]                                                                         
                                   ^
[error] 
/Users/marmbrus/workspace/spark/external/flume-sink/src/main/scala/org/apache/spark/streaming/flume/sink/SparkSink.scala:22:
 object ipc is not a member of package org.apache.avro
[error] import org.apache.avro.ipc.NettyServer
[error]                        ^
[error] 
/Users/marmbrus/workspace/spark/external/flume-sink/src/main/scala/org/apache/spark/streaming/flume/sink/SparkSink.scala:23:
 object ipc is not a member of package org.apache.avro
[error] import org.apache.avro.ipc.specific.SpecificResponder
[error]                        ^
[error] 
/Users/marmbrus/workspace/spark/external/flume-sink/src/main/scala/org/apache/spark/streaming/flume/sink/SparkSink.scala:72:
 not found: type NettyServer
[error]   private var serverOpt: Option[NettyServer] = None
[error]                                 ^
[error] 
/Users/marmbrus/workspace/spark/external/flume-sink/src/main/scala/org/apache/spark/streaming/flume/sink/SparkSink.scala:86:
 not found: type SpecificResponder
[error]     val responder = new SpecificResponder(classOf[SparkFlumeProtocol], 
handler.get)
[error]                         ^
[error] 
/Users/marmbrus/workspace/spark/external/flume-sink/src/main/scala/org/apache/spark/streaming/flume/sink/SparkSink.scala:90:
 not found: type NettyServer
[error]     serverOpt = Option(new NettyServer(responder, new 
InetSocketAddress(hostname, port)))
[error]                            ^
[error] 13 errors found
[error] (streaming-flume-sink/compile:compile) Compilation failed
[error] Total time: 3 s, completed Nov 17, 2014 3:13:54 PM
{code}



--
This message was sent by Atlassian JIRA
(v6.3.4#6332)

---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscr...@spark.apache.org
For additional commands, e-mail: issues-h...@spark.apache.org

Reply via email to