The repository for the G.A. codebase for Calliope. For the E.A. codebase request an early access to the development repo, http://tuplejump.github.io/calliope/
Apache License 2.0
48
stars
11
forks
source link
PersistDStream example raises org.apache.hadoop.conf.Configuration - java.io.IOException #18
I just tried the PersistDStream example and I don't know how to deal with the following exception : org.apache.hadoop.conf.Configuration - java.io.IOException: config()
I'm with the following releases :
"org.apache.spark" %% "spark-streaming" % "1.1.0",
"com.tuplejump" %% "calliope-core" % "1.1.0-CTP-U2"
Did I forget something ?
Thanks,
Oliver
11:59:45.074 [sparkDriver-akka.actor.default-dispatcher-15] DEBUG o.a.spark.scheduler.local.LocalActor - [actor] received message StatusUpdate(37,RUNNING,java.nio.HeapByteBuffer[pos=0 lim=0 cap=0]) from Actor[akka://sparkDriver/deadLetters]
11:59:45.075 [Executor task launch worker-1] DEBUG o.apache.spark.storage.BlockManager - Getting local block broadcast_19
11:59:45.075 [Executor task launch worker-1] DEBUG o.apache.spark.storage.BlockManager - Level for block broadcast_19 is StorageLevel(true, true, false, true, 1)
11:59:45.075 [Executor task launch worker-1] DEBUG o.apache.spark.storage.BlockManager - Getting block broadcast_19 from memory
11:59:45.075 [Executor task launch worker-1] DEBUG org.apache.spark.executor.Executor - Task 37's epoch is 1
11:59:45.075 [sparkDriver-akka.actor.default-dispatcher-15] DEBUG o.a.spark.scheduler.local.LocalActor - [actor] handled message (0.151476 ms) StatusUpdate(37,RUNNING,java.nio.HeapByteBuffer[pos=0 lim=0 cap=0]) from Actor[akka://sparkDriver/deadLetters]
11:59:45.078 [Executor task launch worker-1] DEBUG org.apache.hadoop.conf.Configuration - java.io.IOException: config()
at org.apache.hadoop.conf.Configuration.(Configuration.java:227)
at org.apache.hadoop.conf.Configuration.(Configuration.java:214)
at org.apache.spark.SerializableWritable.readObject(SerializableWritable.scala:41)
at sun.reflect.GeneratedMethodAccessor11.invoke(Unknown Source)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at java.io.ObjectStreamClass.invokeReadObject(ObjectStreamClass.java:1017)
at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1893)
at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1798)
at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1350)
at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1990)
at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1915)
at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1798)
at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1350)
at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1990)
at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1915)
at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1798)
at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1350)
at java.io.ObjectInputStream.readObject(ObjectInputStream.java:370)
at org.apache.spark.serializer.JavaDeserializationStream.readObject(JavaSerializer.scala:62)
at org.apache.spark.serializer.JavaSerializerInstance.deserialize(JavaSerializer.scala:87)
at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:57)
at org.apache.spark.scheduler.Task.run(Task.scala:54)
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:177)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
at java.lang.Thread.run(Thread.java:745)
11:59:45.078 [Executor task launch worker-1] DEBUG org.apache.hadoop.conf.Configuration - java.io.IOException: config()
at org.apache.hadoop.conf.Configuration.(Configuration.java:227)
at org.apache.hadoop.conf.Configuration.(Configuration.java:214)
at org.apache.hadoop.mapred.JobConf.(JobConf.java:321)
at sun.reflect.GeneratedConstructorAccessor12.newInstance(Unknown Source)
at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
at java.lang.reflect.Constructor.newInstance(Constructor.java:526)
at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:113)
at org.apache.hadoop.io.WritableFactories.newInstance(WritableFactories.java:53)
at org.apache.hadoop.io.ObjectWritable.readObject(ObjectWritable.java:236)
at org.apache.hadoop.io.ObjectWritable.readFields(ObjectWritable.java:66)
at org.apache.spark.SerializableWritable.readObject(SerializableWritable.scala:42)
at sun.reflect.GeneratedMethodAccessor11.invoke(Unknown Source)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at java.io.ObjectStreamClass.invokeReadObject(ObjectStreamClass.java:1017)
at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1893)
at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1798)
at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1350)
at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1990)
at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1915)
at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1798)
at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1350)
at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1990)
at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1915)
at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1798)
at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1350)
at java.io.ObjectInputStream.readObject(ObjectInputStream.java:370)
at org.apache.spark.serializer.JavaDeserializationStream.readObject(JavaSerializer.scala:62)
at org.apache.spark.serializer.JavaSerializerInstance.deserialize(JavaSerializer.scala:87)
at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:57)
at org.apache.spark.scheduler.Task.run(Task.scala:54)
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:177)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
at java.lang.Thread.run(Thread.java:745)
Hi,
I just tried the PersistDStream example and I don't know how to deal with the following exception : org.apache.hadoop.conf.Configuration - java.io.IOException: config()
I'm with the following releases : "org.apache.spark" %% "spark-streaming" % "1.1.0", "com.tuplejump" %% "calliope-core" % "1.1.0-CTP-U2"
Did I forget something ?
Thanks, Oliver
11:59:45.074 [sparkDriver-akka.actor.default-dispatcher-15] DEBUG o.a.spark.scheduler.local.LocalActor - [actor] received message StatusUpdate(37,RUNNING,java.nio.HeapByteBuffer[pos=0 lim=0 cap=0]) from Actor[akka://sparkDriver/deadLetters] 11:59:45.075 [Executor task launch worker-1] DEBUG o.apache.spark.storage.BlockManager - Getting local block broadcast_19 11:59:45.075 [Executor task launch worker-1] DEBUG o.apache.spark.storage.BlockManager - Level for block broadcast_19 is StorageLevel(true, true, false, true, 1) 11:59:45.075 [Executor task launch worker-1] DEBUG o.apache.spark.storage.BlockManager - Getting block broadcast_19 from memory 11:59:45.075 [Executor task launch worker-1] DEBUG org.apache.spark.executor.Executor - Task 37's epoch is 1 11:59:45.075 [sparkDriver-akka.actor.default-dispatcher-15] DEBUG o.a.spark.scheduler.local.LocalActor - [actor] handled message (0.151476 ms) StatusUpdate(37,RUNNING,java.nio.HeapByteBuffer[pos=0 lim=0 cap=0]) from Actor[akka://sparkDriver/deadLetters] 11:59:45.078 [Executor task launch worker-1] DEBUG org.apache.hadoop.conf.Configuration - java.io.IOException: config() at org.apache.hadoop.conf.Configuration.(Configuration.java:227)
at org.apache.hadoop.conf.Configuration.(Configuration.java:214)
at org.apache.spark.SerializableWritable.readObject(SerializableWritable.scala:41)
at sun.reflect.GeneratedMethodAccessor11.invoke(Unknown Source)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at java.io.ObjectStreamClass.invokeReadObject(ObjectStreamClass.java:1017)
at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1893)
at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1798)
at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1350)
at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1990)
at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1915)
at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1798)
at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1350)
at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1990)
at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1915)
at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1798)
at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1350)
at java.io.ObjectInputStream.readObject(ObjectInputStream.java:370)
at org.apache.spark.serializer.JavaDeserializationStream.readObject(JavaSerializer.scala:62)
at org.apache.spark.serializer.JavaSerializerInstance.deserialize(JavaSerializer.scala:87)
at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:57)
at org.apache.spark.scheduler.Task.run(Task.scala:54)
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:177)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
at java.lang.Thread.run(Thread.java:745)
11:59:45.078 [Executor task launch worker-1] DEBUG org.apache.hadoop.conf.Configuration - java.io.IOException: config() at org.apache.hadoop.conf.Configuration.(Configuration.java:227)
at org.apache.hadoop.conf.Configuration.(Configuration.java:214)
at org.apache.hadoop.mapred.JobConf.(JobConf.java:321)
at sun.reflect.GeneratedConstructorAccessor12.newInstance(Unknown Source)
at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
at java.lang.reflect.Constructor.newInstance(Constructor.java:526)
at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:113)
at org.apache.hadoop.io.WritableFactories.newInstance(WritableFactories.java:53)
at org.apache.hadoop.io.ObjectWritable.readObject(ObjectWritable.java:236)
at org.apache.hadoop.io.ObjectWritable.readFields(ObjectWritable.java:66)
at org.apache.spark.SerializableWritable.readObject(SerializableWritable.scala:42)
at sun.reflect.GeneratedMethodAccessor11.invoke(Unknown Source)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at java.io.ObjectStreamClass.invokeReadObject(ObjectStreamClass.java:1017)
at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1893)
at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1798)
at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1350)
at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1990)
at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1915)
at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1798)
at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1350)
at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1990)
at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1915)
at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1798)
at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1350)
at java.io.ObjectInputStream.readObject(ObjectInputStream.java:370)
at org.apache.spark.serializer.JavaDeserializationStream.readObject(JavaSerializer.scala:62)
at org.apache.spark.serializer.JavaSerializerInstance.deserialize(JavaSerializer.scala:87)
at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:57)
at org.apache.spark.scheduler.Task.run(Task.scala:54)
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:177)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
at java.lang.Thread.run(Thread.java:745)