Quantcast

failed to compile spark because of the missing packages

Previous Topic Next Topic
 
classic Classic list List threaded Threaded
4 messages Options
Reply | Threaded
Open this post in threaded view
|  
Report Content as Inappropriate

failed to compile spark because of the missing packages

Nan Zhu
Hi, all

I just downloaded spark 0.8.1, made some modification, and compile in my laptop, everything works fine

I sync the source code directory with my desktop via github (ignore all .jars and target), and then I copied lib-managed directory to my desktop 

I tried to compile with sbt. It throws out the following errors:

Can any one tell me what can be the reason of these errors?

Thank you very much!


[error] /home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkJLineReader.scala:12: object jline is not a member of package tools
[error] import scala.tools.jline.console.completer._
[error]                    ^
[error] /home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkJLineCompletion.scala:11: object jline is not a member of package tools
[error] import scala.tools.jline._
[error]                    ^
[error] /home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala:819: type mismatch;
[error]  found   : org.apache.spark.repl.SparkJLineReader
[error]  required: scala.tools.nsc.interpreter.InteractiveReader
[error]     else try SparkJLineReader(
[error]                              ^
[error] /home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala:1012: type mismatch;
[error]  found   : org.apache.spark.repl.SparkJLineReader
[error]  required: scala.tools.nsc.interpreter.InteractiveReader
[error]     repl.in = SparkJLineReader(repl)
[error]                               ^
[error] /home/zhunan/spark/streaming/src/main/scala/org/apache/spark/streaming/StreamingContext.scala:258: not found: value kafka
[error]     kafkaStream[String, kafka.serializer.StringDecoder](kafkaParams, topics, storageLevel)
[error]                         ^
[error] /home/zhunan/spark/streaming/src/main/scala/org/apache/spark/streaming/StreamingContext.scala:269: not found: value kafka
[error]   def kafkaStream[T: ClassManifest, D <: kafka.serializer.Decoder[_]: Manifest](
[error]                                          ^
[error] /home/zhunan/spark/streaming/src/main/scala/org/apache/spark/streaming/dstream/KafkaInputDStream.scala:27: not found: object kafka
[error] import kafka.consumer._
[error]        ^
[error] /home/zhunan/spark/streaming/src/main/scala/org/apache/spark/streaming/StreamingContext.scala:274: ambiguous implicit values:
[error]  both method fallbackStringCanBuildFrom in class LowPriorityImplicits of type [T]=> scala.collection.generic.CanBuildFrom[String,T,scala.collection.immutable.IndexedSeq[T]]
[error]  and value evidence$5 of type Manifest[D]
[error]  match expected type <error>
[error]     val inputStream = new KafkaInputDStream[T, D](this, kafkaParams, topics, storageLevel)
[error]                       ^
[warn] /home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkJLineCompletion.scala:32: type <error> in type pattern <error> is unchecked since it is eliminated by erasure
[warn]     catch { case _: MissingRequirementError => None }
[warn]                     ^
[error] /home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkJLineCompletion.scala:290: value executionFor is not a member of object SparkJLineCompletion.this.ids
[error]       (ids executionFor parsed) orElse
[error]            ^
[warn] /home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkJLineCompletion.scala:373: type <error> in type pattern <error> is unchecked since it is eliminated by erasure
[warn]         case ex: Exception =>
[warn]                  ^
[error] /home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkJLineReader.scala:11: object jline is not a member of package tools
[error] import scala.tools.jline.console.ConsoleReader
[error]                    ^
[warn] /home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkJLineReader.scala:24: type <error> in type pattern <error> is unchecked since it is eliminated by erasure
[warn]     catch { case _: Exception => Nil }
[warn]                     ^
[error] /home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkJLineReader.scala:39: class file needed by ConsoleReaderHelper is missing.
[error] reference value jline of package tools refers to nonexisting symbol.
[error]   class JLineConsoleReader extends ConsoleReader with ConsoleReaderHelper {
[error]                                                       ^
[error] /home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkJLineReader.scala:26: value getTerminal is not a member of SparkJLineReader.this.JLineConsoleReader
[error]   private def term = consoleReader.getTerminal()
[error]                                    ^
[error] /home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkJLineReader.scala:33: recursive value x$1 needs type
[error]       val Candidates(newCursor, newCandidates) = tc.complete(buf, cursor)
[error]                      ^
[error] /home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkJLineReader.scala:43: value print is not a member of SparkJLineReader.this.JLineConsoleReader
[error]       this.print(prompt)
[error]            ^
[error] /home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkJLineReader.scala:44: value flush is not a member of SparkJLineReader.this.JLineConsoleReader
[error]       this.flush()
[error]            ^
[error] /home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkJLineReader.scala:45: value readVirtualKey is not a member of SparkJLineReader.this.JLineConsoleReader
[error]       this.readVirtualKey()
[error]            ^
[error] /home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkJLineReader.scala:47: value resetPromptLine is not a member of SparkJLineReader.this.JLineConsoleReader
[error]     def eraseLine() = consoleReader.resetPromptLine("", "", 0)
[error]                                     ^
[error] /home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkJLineReader.scala:50: value setBellEnabled is not a member of SparkJLineReader.this.JLineConsoleReader
[error]     this setBellEnabled false
[error]          ^
[error] /home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkJLineReader.scala:52: value setHistory is not a member of SparkJLineReader.this.JLineConsoleReader
[error]       this setHistory history
[error]            ^
[error] /home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkJLineReader.scala:59: value addCompleter is not a member of SparkJLineReader.this.JLineConsoleReader
[error]       this addCompleter argCompletor
[error]            ^
[error] /home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkJLineReader.scala:60: value setAutoprintThreshold is not a member of SparkJLineReader.this.JLineConsoleReader
[error]       this setAutoprintThreshold 400 / max completion candidates without warning
[error]            ^
[error] /home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkJLineReader.scala:66: value getCursorBuffer is not a member of SparkJLineReader.this.JLineConsoleReader
[error]   def currentLine: String = consoleReader.getCursorBuffer.buffer.toString
[error]                                           ^
[error] /home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkJLineReader.scala:69: value delete is not a member of SparkJLineReader.this.JLineConsoleReader
[error]     while (consoleReader.delete()) { }
[error]                          ^
[error] /home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkJLineReader.scala:72: value readLine is not a member of SparkJLineReader.this.JLineConsoleReader
[error]   def readOneLine(prompt: String) = consoleReader readLine prompt
[error]                                                   ^
[warn] three warnings found
[error] 20 errors found
[error] /home/zhunan/spark/streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaStreamingContext.scala:181: not found: value kafka
[error]   def kafkaStream[T, D <: kafka.serializer.Decoder[_]](
[error]                           ^
[error] /home/zhunan/spark/streaming/src/main/scala/org/apache/spark/streaming/dstream/KafkaInputDStream.scala:49: could not find implicit value for evidence parameter of type ClassManifest[T]
[error] class KafkaInputDStream[T: ClassManifest, D <: Decoder[_]: Manifest](
[error]                          ^
[error] /home/zhunan/spark/streaming/src/main/scala/org/apache/spark/streaming/dstream/KafkaInputDStream.scala:58: ambiguous implicit values:
[error]  both method stringCanBuildFrom in object Predef of type => scala.collection.generic.CanBuildFrom[String,Char,String]
[error]  and method conforms in object Predef of type [A]=> <:<[A,A]
[error]  match expected type <error>
[error]     new KafkaReceiver[T, D](kafkaParams, topics, storageLevel)
[error]     ^
[error] /home/zhunan/spark/streaming/src/main/scala/org/apache/spark/streaming/dstream/KafkaInputDStream.scala:64: could not find implicit value for evidence parameter of type ClassManifest[<error>]
[error] class KafkaReceiver[T: ClassManifest, D <: Decoder[_]: Manifest](
[error]                      ^
[error] 8 errors found
[error] (streaming/compile:compile) Compilation failed
[error] (repl/compile:compile) Compilation failed
[error] Total time: 18 s, completed 23-Dec-2013 11:29:46 AM

Reply | Threaded
Open this post in threaded view
|  
Report Content as Inappropriate

Re: failed to compile spark because of the missing packages

Patrick Wendell
Hey Nan,

You shouldn't copy lib_managed manually. SBT will deal with that. Try
just using the same .gitignore settings that we have in the spark
github. Seems like you are accidentally including some files that
cause this to get messed up.

- Patrick

On Mon, Dec 23, 2013 at 8:37 AM, Nan Zhu <[hidden email]> wrote:

> Hi, all
>
> I just downloaded spark 0.8.1, made some modification, and compile in my
> laptop, everything works fine
>
> I sync the source code directory with my desktop via github (ignore all
> .jars and target), and then I copied lib-managed directory to my desktop
>
> I tried to compile with sbt. It throws out the following errors:
>
> Can any one tell me what can be the reason of these errors?
>
> Thank you very much!
>
>
> [error]
> /home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkJLineReader.scala:12:
> object jline is not a member of package tools
> [error] import scala.tools.jline.console.completer._
> [error]                    ^
> [error]
> /home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkJLineCompletion.scala:11:
> object jline is not a member of package tools
> [error] import scala.tools.jline._
> [error]                    ^
> [error]
> /home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala:819:
> type mismatch;
> [error]  found   : org.apache.spark.repl.SparkJLineReader
> [error]  required: scala.tools.nsc.interpreter.InteractiveReader
> [error]     else try SparkJLineReader(
> [error]                              ^
> [error]
> /home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala:1012:
> type mismatch;
> [error]  found   : org.apache.spark.repl.SparkJLineReader
> [error]  required: scala.tools.nsc.interpreter.InteractiveReader
> [error]     repl.in = SparkJLineReader(repl)
> [error]                               ^
> [error]
> /home/zhunan/spark/streaming/src/main/scala/org/apache/spark/streaming/StreamingContext.scala:258:
> not found: value kafka
> [error]     kafkaStream[String, kafka.serializer.StringDecoder](kafkaParams,
> topics, storageLevel)
> [error]                         ^
> [error]
> /home/zhunan/spark/streaming/src/main/scala/org/apache/spark/streaming/StreamingContext.scala:269:
> not found: value kafka
> [error]   def kafkaStream[T: ClassManifest, D <:
> kafka.serializer.Decoder[_]: Manifest](
> [error]                                          ^
> [error]
> /home/zhunan/spark/streaming/src/main/scala/org/apache/spark/streaming/dstream/KafkaInputDStream.scala:27:
> not found: object kafka
> [error] import kafka.consumer._
> [error]        ^
> [error]
> /home/zhunan/spark/streaming/src/main/scala/org/apache/spark/streaming/StreamingContext.scala:274:
> ambiguous implicit values:
> [error]  both method fallbackStringCanBuildFrom in class
> LowPriorityImplicits of type [T]=>
> scala.collection.generic.CanBuildFrom[String,T,scala.collection.immutable.IndexedSeq[T]]
> [error]  and value evidence$5 of type Manifest[D]
> [error]  match expected type <error>
> [error]     val inputStream = new KafkaInputDStream[T, D](this, kafkaParams,
> topics, storageLevel)
> [error]                       ^
> [warn]
> /home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkJLineCompletion.scala:32:
> type <error> in type pattern <error> is unchecked since it is eliminated by
> erasure
> [warn]     catch { case _: MissingRequirementError => None }
> [warn]                     ^
> [error]
> /home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkJLineCompletion.scala:290:
> value executionFor is not a member of object SparkJLineCompletion.this.ids
> [error]       (ids executionFor parsed) orElse
> [error]            ^
> [warn]
> /home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkJLineCompletion.scala:373:
> type <error> in type pattern <error> is unchecked since it is eliminated by
> erasure
> [warn]         case ex: Exception =>
> [warn]                  ^
> [error]
> /home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkJLineReader.scala:11:
> object jline is not a member of package tools
> [error] import scala.tools.jline.console.ConsoleReader
> [error]                    ^
> [warn]
> /home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkJLineReader.scala:24:
> type <error> in type pattern <error> is unchecked since it is eliminated by
> erasure
> [warn]     catch { case _: Exception => Nil }
> [warn]                     ^
> [error]
> /home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkJLineReader.scala:39:
> class file needed by ConsoleReaderHelper is missing.
> [error] reference value jline of package tools refers to nonexisting symbol.
> [error]   class JLineConsoleReader extends ConsoleReader with
> ConsoleReaderHelper {
> [error]                                                       ^
> [error]
> /home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkJLineReader.scala:26:
> value getTerminal is not a member of
> SparkJLineReader.this.JLineConsoleReader
> [error]   private def term = consoleReader.getTerminal()
> [error]                                    ^
> [error]
> /home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkJLineReader.scala:33:
> recursive value x$1 needs type
> [error]       val Candidates(newCursor, newCandidates) = tc.complete(buf,
> cursor)
> [error]                      ^
> [error]
> /home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkJLineReader.scala:43:
> value print is not a member of SparkJLineReader.this.JLineConsoleReader
> [error]       this.print(prompt)
> [error]            ^
> [error]
> /home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkJLineReader.scala:44:
> value flush is not a member of SparkJLineReader.this.JLineConsoleReader
> [error]       this.flush()
> [error]            ^
> [error]
> /home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkJLineReader.scala:45:
> value readVirtualKey is not a member of
> SparkJLineReader.this.JLineConsoleReader
> [error]       this.readVirtualKey()
> [error]            ^
> [error]
> /home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkJLineReader.scala:47:
> value resetPromptLine is not a member of
> SparkJLineReader.this.JLineConsoleReader
> [error]     def eraseLine() = consoleReader.resetPromptLine("", "", 0)
> [error]                                     ^
> [error]
> /home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkJLineReader.scala:50:
> value setBellEnabled is not a member of
> SparkJLineReader.this.JLineConsoleReader
> [error]     this setBellEnabled false
> [error]          ^
> [error]
> /home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkJLineReader.scala:52:
> value setHistory is not a member of SparkJLineReader.this.JLineConsoleReader
> [error]       this setHistory history
> [error]            ^
> [error]
> /home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkJLineReader.scala:59:
> value addCompleter is not a member of
> SparkJLineReader.this.JLineConsoleReader
> [error]       this addCompleter argCompletor
> [error]            ^
> [error]
> /home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkJLineReader.scala:60:
> value setAutoprintThreshold is not a member of
> SparkJLineReader.this.JLineConsoleReader
> [error]       this setAutoprintThreshold 400 / max completion candidates
> without warning
> [error]            ^
> [error]
> /home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkJLineReader.scala:66:
> value getCursorBuffer is not a member of
> SparkJLineReader.this.JLineConsoleReader
> [error]   def currentLine: String =
> consoleReader.getCursorBuffer.buffer.toString
> [error]                                           ^
> [error]
> /home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkJLineReader.scala:69:
> value delete is not a member of SparkJLineReader.this.JLineConsoleReader
> [error]     while (consoleReader.delete()) { }
> [error]                          ^
> [error]
> /home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkJLineReader.scala:72:
> value readLine is not a member of SparkJLineReader.this.JLineConsoleReader
> [error]   def readOneLine(prompt: String) = consoleReader readLine prompt
> [error]                                                   ^
> [warn] three warnings found
> [error] 20 errors found
> [error]
> /home/zhunan/spark/streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaStreamingContext.scala:181:
> not found: value kafka
> [error]   def kafkaStream[T, D <: kafka.serializer.Decoder[_]](
> [error]                           ^
> [error]
> /home/zhunan/spark/streaming/src/main/scala/org/apache/spark/streaming/dstream/KafkaInputDStream.scala:49:
> could not find implicit value for evidence parameter of type
> ClassManifest[T]
> [error] class KafkaInputDStream[T: ClassManifest, D <: Decoder[_]:
> Manifest](
> [error]                          ^
> [error]
> /home/zhunan/spark/streaming/src/main/scala/org/apache/spark/streaming/dstream/KafkaInputDStream.scala:58:
> ambiguous implicit values:
> [error]  both method stringCanBuildFrom in object Predef of type =>
> scala.collection.generic.CanBuildFrom[String,Char,String]
> [error]  and method conforms in object Predef of type [A]=> <:<[A,A]
> [error]  match expected type <error>
> [error]     new KafkaReceiver[T, D](kafkaParams, topics, storageLevel)
> [error]     ^
> [error]
> /home/zhunan/spark/streaming/src/main/scala/org/apache/spark/streaming/dstream/KafkaInputDStream.scala:64:
> could not find implicit value for evidence parameter of type
> ClassManifest[<error>]
> [error] class KafkaReceiver[T: ClassManifest, D <: Decoder[_]: Manifest](
> [error]                      ^
> [error] 8 errors found
> [error] (streaming/compile:compile) Compilation failed
> [error] (repl/compile:compile) Compilation failed
> [error] Total time: 18 s, completed 23-Dec-2013 11:29:46 AM
>
Reply | Threaded
Open this post in threaded view
|  
Report Content as Inappropriate

Re: failed to compile spark because of the missing packages

Nan Zhu
Hi, Patrick

Thanks for the reply

I still failed to compile the code, even I made the following attempts

1. download spark-0.8.1.tgz, 

2. decompress, and copy the files to the github local repo directory (.gitignore is just copied from https://github.com/apache/incubator-spark/blob/master/.gitignore)

3. push files to git repo

4. pull files in the desktop 

5. sbt/sbt assembly/assembly, failed with the same error as my last email

any further comments?

Best,

-- 
Nan Zhu

On Monday, December 23, 2013 at 12:22 PM, Patrick Wendell wrote:

Hey Nan,

You shouldn't copy lib_managed manually. SBT will deal with that. Try
just using the same .gitignore settings that we have in the spark
github. Seems like you are accidentally including some files that
cause this to get messed up.

- Patrick

On Mon, Dec 23, 2013 at 8:37 AM, Nan Zhu <[hidden email]> wrote:
Hi, all

I just downloaded spark 0.8.1, made some modification, and compile in my
laptop, everything works fine

I sync the source code directory with my desktop via github (ignore all
.jars and target), and then I copied lib-managed directory to my desktop

I tried to compile with sbt. It throws out the following errors:

Can any one tell me what can be the reason of these errors?

Thank you very much!


[error]
/home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkJLineReader.scala:12:
object jline is not a member of package tools
[error] import scala.tools.jline.console.completer._
[error] ^
[error]
/home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkJLineCompletion.scala:11:
object jline is not a member of package tools
[error] import scala.tools.jline._
[error] ^
[error]
/home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala:819:
type mismatch;
[error] found : org.apache.spark.repl.SparkJLineReader
[error] required: scala.tools.nsc.interpreter.InteractiveReader
[error] else try SparkJLineReader(
[error] ^
[error]
/home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala:1012:
type mismatch;
[error] found : org.apache.spark.repl.SparkJLineReader
[error] required: scala.tools.nsc.interpreter.InteractiveReader
[error] repl.in = SparkJLineReader(repl)
[error] ^
[error]
/home/zhunan/spark/streaming/src/main/scala/org/apache/spark/streaming/StreamingContext.scala:258:
not found: value kafka
[error] kafkaStream[String, kafka.serializer.StringDecoder](kafkaParams,
topics, storageLevel)
[error] ^
[error]
/home/zhunan/spark/streaming/src/main/scala/org/apache/spark/streaming/StreamingContext.scala:269:
not found: value kafka
[error] def kafkaStream[T: ClassManifest, D <:
kafka.serializer.Decoder[_]: Manifest](
[error] ^
[error]
/home/zhunan/spark/streaming/src/main/scala/org/apache/spark/streaming/dstream/KafkaInputDStream.scala:27:
not found: object kafka
[error] import kafka.consumer._
[error] ^
[error]
/home/zhunan/spark/streaming/src/main/scala/org/apache/spark/streaming/StreamingContext.scala:274:
ambiguous implicit values:
[error] both method fallbackStringCanBuildFrom in class
LowPriorityImplicits of type [T]=>
scala.collection.generic.CanBuildFrom[String,T,scala.collection.immutable.IndexedSeq[T]]
[error] and value evidence$5 of type Manifest[D]
[error] match expected type <error>
[error] val inputStream = new KafkaInputDStream[T, D](this, kafkaParams,
topics, storageLevel)
[error] ^
[warn]
/home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkJLineCompletion.scala:32:
type <error> in type pattern <error> is unchecked since it is eliminated by
erasure
[warn] catch { case _: MissingRequirementError => None }
[warn] ^
[error]
/home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkJLineCompletion.scala:290:
value executionFor is not a member of object SparkJLineCompletion.this.ids
[error] (ids executionFor parsed) orElse
[error] ^
[warn]
/home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkJLineCompletion.scala:373:
type <error> in type pattern <error> is unchecked since it is eliminated by
erasure
[warn] case ex: Exception =>
[warn] ^
[error]
/home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkJLineReader.scala:11:
object jline is not a member of package tools
[error] import scala.tools.jline.console.ConsoleReader
[error] ^
[warn]
/home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkJLineReader.scala:24:
type <error> in type pattern <error> is unchecked since it is eliminated by
erasure
[warn] catch { case _: Exception => Nil }
[warn] ^
[error]
/home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkJLineReader.scala:39:
class file needed by ConsoleReaderHelper is missing.
[error] reference value jline of package tools refers to nonexisting symbol.
[error] class JLineConsoleReader extends ConsoleReader with
ConsoleReaderHelper {
[error] ^
[error]
/home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkJLineReader.scala:26:
value getTerminal is not a member of
SparkJLineReader.this.JLineConsoleReader
[error] private def term = consoleReader.getTerminal()
[error] ^
[error]
/home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkJLineReader.scala:33:
recursive value x$1 needs type
[error] val Candidates(newCursor, newCandidates) = tc.complete(buf,
cursor)
[error] ^
[error]
/home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkJLineReader.scala:43:
value print is not a member of SparkJLineReader.this.JLineConsoleReader
[error] this.print(prompt)
[error] ^
[error]
/home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkJLineReader.scala:44:
value flush is not a member of SparkJLineReader.this.JLineConsoleReader
[error] this.flush()
[error] ^
[error]
/home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkJLineReader.scala:45:
value readVirtualKey is not a member of
SparkJLineReader.this.JLineConsoleReader
[error] this.readVirtualKey()
[error] ^
[error]
/home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkJLineReader.scala:47:
value resetPromptLine is not a member of
SparkJLineReader.this.JLineConsoleReader
[error] def eraseLine() = consoleReader.resetPromptLine("", "", 0)
[error] ^
[error]
/home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkJLineReader.scala:50:
value setBellEnabled is not a member of
SparkJLineReader.this.JLineConsoleReader
[error] this setBellEnabled false
[error] ^
[error]
/home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkJLineReader.scala:52:
value setHistory is not a member of SparkJLineReader.this.JLineConsoleReader
[error] this setHistory history
[error] ^
[error]
/home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkJLineReader.scala:59:
value addCompleter is not a member of
SparkJLineReader.this.JLineConsoleReader
[error] this addCompleter argCompletor
[error] ^
[error]
/home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkJLineReader.scala:60:
value setAutoprintThreshold is not a member of
SparkJLineReader.this.JLineConsoleReader
[error] this setAutoprintThreshold 400 / max completion candidates
without warning
[error] ^
[error]
/home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkJLineReader.scala:66:
value getCursorBuffer is not a member of
SparkJLineReader.this.JLineConsoleReader
[error] def currentLine: String =
consoleReader.getCursorBuffer.buffer.toString
[error] ^
[error]
/home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkJLineReader.scala:69:
value delete is not a member of SparkJLineReader.this.JLineConsoleReader
[error] while (consoleReader.delete()) { }
[error] ^
[error]
/home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkJLineReader.scala:72:
value readLine is not a member of SparkJLineReader.this.JLineConsoleReader
[error] def readOneLine(prompt: String) = consoleReader readLine prompt
[error] ^
[warn] three warnings found
[error] 20 errors found
[error]
/home/zhunan/spark/streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaStreamingContext.scala:181:
not found: value kafka
[error] def kafkaStream[T, D <: kafka.serializer.Decoder[_]](
[error] ^
[error]
/home/zhunan/spark/streaming/src/main/scala/org/apache/spark/streaming/dstream/KafkaInputDStream.scala:49:
could not find implicit value for evidence parameter of type
ClassManifest[T]
[error] class KafkaInputDStream[T: ClassManifest, D <: Decoder[_]:
Manifest](
[error] ^
[error]
/home/zhunan/spark/streaming/src/main/scala/org/apache/spark/streaming/dstream/KafkaInputDStream.scala:58:
ambiguous implicit values:
[error] both method stringCanBuildFrom in object Predef of type =>
scala.collection.generic.CanBuildFrom[String,Char,String]
[error] and method conforms in object Predef of type [A]=> <:<[A,A]
[error] match expected type <error>
[error] new KafkaReceiver[T, D](kafkaParams, topics, storageLevel)
[error] ^
[error]
/home/zhunan/spark/streaming/src/main/scala/org/apache/spark/streaming/dstream/KafkaInputDStream.scala:64:
could not find implicit value for evidence parameter of type
ClassManifest[<error>]
[error] class KafkaReceiver[T: ClassManifest, D <: Decoder[_]: Manifest](
[error] ^
[error] 8 errors found
[error] (streaming/compile:compile) Compilation failed
[error] (repl/compile:compile) Compilation failed
[error] Total time: 18 s, completed 23-Dec-2013 11:29:46 AM

Reply | Threaded
Open this post in threaded view
|  
Report Content as Inappropriate

Re: failed to compile spark because of the missing packages

Nan Zhu
I finally solved the issue manually

I found that when I compile with sbt, lib/ directory under streaming/ and repl/ is missing, 

The reason is that in the official .gitignore, it intends to ignore the “lib/“, while in the distributed tgz files, these two lib/ directories are included….

Best,

-- 
Nan Zhu

On Monday, December 23, 2013 at 4:12 PM, Nan Zhu wrote:

Hi, Patrick

Thanks for the reply

I still failed to compile the code, even I made the following attempts

1. download spark-0.8.1.tgz, 

2. decompress, and copy the files to the github local repo directory (.gitignore is just copied from https://github.com/apache/incubator-spark/blob/master/.gitignore)

3. push files to git repo

4. pull files in the desktop 

5. sbt/sbt assembly/assembly, failed with the same error as my last email

any further comments?

Best,

-- 
Nan Zhu

On Monday, December 23, 2013 at 12:22 PM, Patrick Wendell wrote:

Hey Nan,

You shouldn't copy lib_managed manually. SBT will deal with that. Try
just using the same .gitignore settings that we have in the spark
github. Seems like you are accidentally including some files that
cause this to get messed up.

- Patrick

On Mon, Dec 23, 2013 at 8:37 AM, Nan Zhu <[hidden email]> wrote:
Hi, all

I just downloaded spark 0.8.1, made some modification, and compile in my
laptop, everything works fine

I sync the source code directory with my desktop via github (ignore all
.jars and target), and then I copied lib-managed directory to my desktop

I tried to compile with sbt. It throws out the following errors:

Can any one tell me what can be the reason of these errors?

Thank you very much!


[error]
/home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkJLineReader.scala:12:
object jline is not a member of package tools
[error] import scala.tools.jline.console.completer._
[error] ^
[error]
/home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkJLineCompletion.scala:11:
object jline is not a member of package tools
[error] import scala.tools.jline._
[error] ^
[error]
/home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala:819:
type mismatch;
[error] found : org.apache.spark.repl.SparkJLineReader
[error] required: scala.tools.nsc.interpreter.InteractiveReader
[error] else try SparkJLineReader(
[error] ^
[error]
/home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkILoop.scala:1012:
type mismatch;
[error] found : org.apache.spark.repl.SparkJLineReader
[error] required: scala.tools.nsc.interpreter.InteractiveReader
[error] repl.in = SparkJLineReader(repl)
[error] ^
[error]
/home/zhunan/spark/streaming/src/main/scala/org/apache/spark/streaming/StreamingContext.scala:258:
not found: value kafka
[error] kafkaStream[String, kafka.serializer.StringDecoder](kafkaParams,
topics, storageLevel)
[error] ^
[error]
/home/zhunan/spark/streaming/src/main/scala/org/apache/spark/streaming/StreamingContext.scala:269:
not found: value kafka
[error] def kafkaStream[T: ClassManifest, D <:
kafka.serializer.Decoder[_]: Manifest](
[error] ^
[error]
/home/zhunan/spark/streaming/src/main/scala/org/apache/spark/streaming/dstream/KafkaInputDStream.scala:27:
not found: object kafka
[error] import kafka.consumer._
[error] ^
[error]
/home/zhunan/spark/streaming/src/main/scala/org/apache/spark/streaming/StreamingContext.scala:274:
ambiguous implicit values:
[error] both method fallbackStringCanBuildFrom in class
LowPriorityImplicits of type [T]=>
scala.collection.generic.CanBuildFrom[String,T,scala.collection.immutable.IndexedSeq[T]]
[error] and value evidence$5 of type Manifest[D]
[error] match expected type <error>
[error] val inputStream = new KafkaInputDStream[T, D](this, kafkaParams,
topics, storageLevel)
[error] ^
[warn]
/home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkJLineCompletion.scala:32:
type <error> in type pattern <error> is unchecked since it is eliminated by
erasure
[warn] catch { case _: MissingRequirementError => None }
[warn] ^
[error]
/home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkJLineCompletion.scala:290:
value executionFor is not a member of object SparkJLineCompletion.this.ids
[error] (ids executionFor parsed) orElse
[error] ^
[warn]
/home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkJLineCompletion.scala:373:
type <error> in type pattern <error> is unchecked since it is eliminated by
erasure
[warn] case ex: Exception =>
[warn] ^
[error]
/home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkJLineReader.scala:11:
object jline is not a member of package tools
[error] import scala.tools.jline.console.ConsoleReader
[error] ^
[warn]
/home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkJLineReader.scala:24:
type <error> in type pattern <error> is unchecked since it is eliminated by
erasure
[warn] catch { case _: Exception => Nil }
[warn] ^
[error]
/home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkJLineReader.scala:39:
class file needed by ConsoleReaderHelper is missing.
[error] reference value jline of package tools refers to nonexisting symbol.
[error] class JLineConsoleReader extends ConsoleReader with
ConsoleReaderHelper {
[error] ^
[error]
/home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkJLineReader.scala:26:
value getTerminal is not a member of
SparkJLineReader.this.JLineConsoleReader
[error] private def term = consoleReader.getTerminal()
[error] ^
[error]
/home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkJLineReader.scala:33:
recursive value x$1 needs type
[error] val Candidates(newCursor, newCandidates) = tc.complete(buf,
cursor)
[error] ^
[error]
/home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkJLineReader.scala:43:
value print is not a member of SparkJLineReader.this.JLineConsoleReader
[error] this.print(prompt)
[error] ^
[error]
/home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkJLineReader.scala:44:
value flush is not a member of SparkJLineReader.this.JLineConsoleReader
[error] this.flush()
[error] ^
[error]
/home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkJLineReader.scala:45:
value readVirtualKey is not a member of
SparkJLineReader.this.JLineConsoleReader
[error] this.readVirtualKey()
[error] ^
[error]
/home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkJLineReader.scala:47:
value resetPromptLine is not a member of
SparkJLineReader.this.JLineConsoleReader
[error] def eraseLine() = consoleReader.resetPromptLine("", "", 0)
[error] ^
[error]
/home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkJLineReader.scala:50:
value setBellEnabled is not a member of
SparkJLineReader.this.JLineConsoleReader
[error] this setBellEnabled false
[error] ^
[error]
/home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkJLineReader.scala:52:
value setHistory is not a member of SparkJLineReader.this.JLineConsoleReader
[error] this setHistory history
[error] ^
[error]
/home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkJLineReader.scala:59:
value addCompleter is not a member of
SparkJLineReader.this.JLineConsoleReader
[error] this addCompleter argCompletor
[error] ^
[error]
/home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkJLineReader.scala:60:
value setAutoprintThreshold is not a member of
SparkJLineReader.this.JLineConsoleReader
[error] this setAutoprintThreshold 400 / max completion candidates
without warning
[error] ^
[error]
/home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkJLineReader.scala:66:
value getCursorBuffer is not a member of
SparkJLineReader.this.JLineConsoleReader
[error] def currentLine: String =
consoleReader.getCursorBuffer.buffer.toString
[error] ^
[error]
/home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkJLineReader.scala:69:
value delete is not a member of SparkJLineReader.this.JLineConsoleReader
[error] while (consoleReader.delete()) { }
[error] ^
[error]
/home/zhunan/spark/repl/src/main/scala/org/apache/spark/repl/SparkJLineReader.scala:72:
value readLine is not a member of SparkJLineReader.this.JLineConsoleReader
[error] def readOneLine(prompt: String) = consoleReader readLine prompt
[error] ^
[warn] three warnings found
[error] 20 errors found
[error]
/home/zhunan/spark/streaming/src/main/scala/org/apache/spark/streaming/api/java/JavaStreamingContext.scala:181:
not found: value kafka
[error] def kafkaStream[T, D <: kafka.serializer.Decoder[_]](
[error] ^
[error]
/home/zhunan/spark/streaming/src/main/scala/org/apache/spark/streaming/dstream/KafkaInputDStream.scala:49:
could not find implicit value for evidence parameter of type
ClassManifest[T]
[error] class KafkaInputDStream[T: ClassManifest, D <: Decoder[_]:
Manifest](
[error] ^
[error]
/home/zhunan/spark/streaming/src/main/scala/org/apache/spark/streaming/dstream/KafkaInputDStream.scala:58:
ambiguous implicit values:
[error] both method stringCanBuildFrom in object Predef of type =>
scala.collection.generic.CanBuildFrom[String,Char,String]
[error] and method conforms in object Predef of type [A]=> <:<[A,A]
[error] match expected type <error>
[error] new KafkaReceiver[T, D](kafkaParams, topics, storageLevel)
[error] ^
[error]
/home/zhunan/spark/streaming/src/main/scala/org/apache/spark/streaming/dstream/KafkaInputDStream.scala:64:
could not find implicit value for evidence parameter of type
ClassManifest[<error>]
[error] class KafkaReceiver[T: ClassManifest, D <: Decoder[_]: Manifest](
[error] ^
[error] 8 errors found
[error] (streaming/compile:compile) Compilation failed
[error] (repl/compile:compile) Compilation failed
[error] Total time: 18 s, completed 23-Dec-2013 11:29:46 AM


Loading...