Spark: java.io.InvalidClassException: scala.Option; local class incompatible: stream classdesc serialVersionUID local class serialVersionUID

The following exception is thrown when launching spark program with “sbt run”.

Instead use “bin/spark-submit” and the exception will go away.

 

Exception

Caused by: java.io.InvalidClassException: scala.Option; local class incompatible: stream classdesc serialVersionUID = -2062608324514658839, local class serialVersionUID = -114498752079829388
 at java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:621)
 at java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1623)
 at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1518)
 at java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1623)
 at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1518)
 at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1774)
 at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1351)
 at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2000)
 at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1924)
 at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1801)
 at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1351)
 at java.io.ObjectInputStream.readArray(ObjectInputStream.java:1707)
 at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1345)
 at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2000)
 at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1924)
 at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1801)
 at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1351)
 at java.io.ObjectInputStream.readArray(ObjectInputStream.java:1707)
 at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1345)
 at java.io.ObjectInputStream.readObject(ObjectInputStream.java:371)
 at org.apache.spark.util.Utils$.deserialize(Utils.scala:104)
 at org.apache.spark.SparkContext$$anonfun$objectFile$1$$anonfun$apply$12.apply(SparkContext.scala:1214)
 at org.apache.spark.SparkContext$$anonfun$objectFile$1$$anonfun$apply$12.apply(SparkContext.scala:1214)
 at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:396)
 at scala.collection.Iterator$$anon$10.hasNext(Iterator.scala:350)
 at scala.collection.Iterator$class.foreach(Iterator.scala:742)
 at scala.collection.AbstractIterator.foreach(Iterator.scala:1194)
 at scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59)
 at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104)
 at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48)
 at scala.collection.TraversableOnce$class.to(TraversableOnce.scala:308)
 at scala.collection.AbstractIterator.to(Iterator.scala:1194)
 at scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:300)
 at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1194)
 at scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:287)
 at scala.collection.AbstractIterator.toArray(Iterator.scala:1194)
 at org.apache.spark.rdd.RDD$$anonfun$take$1$$anonfun$28.apply(RDD.scala:1298)
 at org.apache.spark.rdd.RDD$$anonfun$take$1$$anonfun$28.apply(RDD.scala:1298)
 at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:1839)
 at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:1839)
 at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:66)
 at org.apache.spark.scheduler.Task.run(Task.scala:88)
 at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:214)
 at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
 at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
 at java.lang.Thread.run(Thread.java:745)

 

About Neil Rubens

see http://ActiveIntelligence.org

This entry was posted in Uncategorized. Bookmark the permalink.

Leave a Reply

Your email address will not be published. Required fields are marked *

*