cerndb / dist-keras

Distributed Deep Learning, with a focus on distributed training, using Keras and Apache Spark.
http://joerihermans.com/work/distributed-keras/
GNU General Public License v3.0
624 stars 169 forks source link

File "/home/hadoop/anaconda2/lib/python2.7/site-packages/scipy/ndimage/filters.py", line 36, in <module> from . import _ni_support ImportError: cannot import name _ni_support #82

Open hager123 opened 5 years ago

hager123 commented 5 years ago

when run this code trainer = SingleTrainer(keras_model=model, worker_optimizer=optimizer, loss=loss, features_col="features_normalized", label_col="label", num_epoch=1, batch_size=32) trained_model = trainer.train(training_set)

error display Py4JJavaError Traceback (most recent call last)

in () 3 loss=loss, features_col="features_normalized", 4 label_col="label", num_epoch=1, batch_size=32) ----> 5 trained_model = trainer.train(training_set) /home/hadoop/anaconda2/lib/python2.7/site-packages/distkeras/trainers.pyc in train(self, dataframe, shuffle) /usr/local/spark/python/pyspark/rdd.pyc in collect(self) 822 """ 823 with SCCallSiteSync(self.context) as css: --> 824 port = self.ctx._jvm.PythonRDD.collectAndServe(self._jrdd.rdd()) 825 return list(_load_from_socket(port, self._jrdd_deserializer)) 826 /usr/local/spark/python/lib/py4j-0.10.6-src.zip/py4j/java_gateway.py in __call__(self, *args) 1158 answer = self.gateway_client.send_command(command) 1159 return_value = get_return_value( -> 1160 answer, self.gateway_client, self.target_id, self.name) 1161 1162 for temp_arg in temp_args: /usr/local/spark/python/pyspark/sql/utils.pyc in deco(*a, **kw) 61 def deco(*a, **kw): 62 try: ---> 63 return f(*a, **kw) 64 except py4j.protocol.Py4JJavaError as e: 65 s = e.java_exception.toString() /usr/local/spark/python/lib/py4j-0.10.6-src.zip/py4j/protocol.py in get_return_value(answer, gateway_client, target_id, name) 318 raise Py4JJavaError( 319 "An error occurred while calling {0}{1}{2}.\n". --> 320 format(target_id, ".", name), value) 321 else: 322 raise Py4JError( Py4JJavaError: An error occurred while calling z:org.apache.spark.api.python.PythonRDD.collectAndServe. : org.apache.spark.SparkException: Job aborted due to stage failure: Task 0 in stage 33.0 failed 1 times, most recent failure: Lost task 0.0 in stage 33.0 (TID 77, localhost, executor driver): org.apache.spark.api.python.PythonException: Traceback (most recent call last): File "/usr/local/spark/python/lib/pyspark.zip/pyspark/worker.py", line 216, in main func, profiler, deserializer, serializer = read_command(pickleSer, infile) File "/usr/local/spark/python/lib/pyspark.zip/pyspark/worker.py", line 60, in read_command command = serializer.loads(command.value) File "/usr/local/spark/python/lib/pyspark.zip/pyspark/serializers.py", line 562, in loads return pickle.loads(obj) File "/tmp/spark-aca404a0-a60e-43d2-a913-acc7ed93a7a0/userFiles-59dd8a1c-344e-4b9d-8fb0-e7982aae3d8c/distkeras/utils.py", line 5, in from keras import backend as K File "/home/hadoop/anaconda2/lib/python2.7/site-packages/keras/__init__.py", line 7, in from . import datasets File "/home/hadoop/anaconda2/lib/python2.7/site-packages/keras/datasets/__init__.py", line 4, in from . import imdb File "/home/hadoop/anaconda2/lib/python2.7/site-packages/keras/datasets/imdb.py", line 8, in from ..preprocessing.sequence import _remove_long_seq File "/home/hadoop/anaconda2/lib/python2.7/site-packages/keras/preprocessing/__init__.py", line 12, in from . import image File "/home/hadoop/anaconda2/lib/python2.7/site-packages/keras/preprocessing/image.py", line 7, in from keras_preprocessing import image File "/home/hadoop/anaconda2/lib/python2.7/site-packages/keras_preprocessing/image.py", line 10, in import scipy.ndimage as ndi File "/home/hadoop/anaconda2/lib/python2.7/site-packages/scipy/ndimage/__init__.py", line 161, in from .filters import * File "/home/hadoop/anaconda2/lib/python2.7/site-packages/scipy/ndimage/filters.py", line 36, in from . import _ni_support ImportError: cannot import name _ni_support at org.apache.spark.api.python.BasePythonRunner$ReaderIterator.handlePythonException(PythonRunner.scala:298) at org.apache.spark.api.python.PythonRunner$$anon$1.read(PythonRunner.scala:438) at org.apache.spark.api.python.PythonRunner$$anon$1.read(PythonRunner.scala:421) at org.apache.spark.api.python.BasePythonRunner$ReaderIterator.hasNext(PythonRunner.scala:252) at org.apache.spark.InterruptibleIterator.hasNext(InterruptibleIterator.scala:37) at scala.collection.Iterator$class.foreach(Iterator.scala:893) at org.apache.spark.InterruptibleIterator.foreach(InterruptibleIterator.scala:28) at scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59) at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104) at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48) at scala.collection.TraversableOnce$class.to(TraversableOnce.scala:310) at org.apache.spark.InterruptibleIterator.to(InterruptibleIterator.scala:28) at scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:302) at org.apache.spark.InterruptibleIterator.toBuffer(InterruptibleIterator.scala:28) at scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:289) at org.apache.spark.InterruptibleIterator.toArray(InterruptibleIterator.scala:28) at org.apache.spark.rdd.RDD$$anonfun$collect$1$$anonfun$12.apply(RDD.scala:939) at org.apache.spark.rdd.RDD$$anonfun$collect$1$$anonfun$12.apply(RDD.scala:939) at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:2067) at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:2067) at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87) at org.apache.spark.scheduler.Task.run(Task.scala:109) at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:345) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Driver stacktrace: at org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1599) at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1587) at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1586) at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59) at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48) at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1586) at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:831) at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:831) at scala.Option.foreach(Option.scala:257) at org.apache.spark.scheduler.DAGScheduler.handleTaskSetFailed(DAGScheduler.scala:831) at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:1820) at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1769) at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1758) at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:48) at org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:642) at org.apache.spark.SparkContext.runJob(SparkContext.scala:2027) at org.apache.spark.SparkContext.runJob(SparkContext.scala:2048) at org.apache.spark.SparkContext.runJob(SparkContext.scala:2067) at org.apache.spark.SparkContext.runJob(SparkContext.scala:2092) at org.apache.spark.rdd.RDD$$anonfun$collect$1.apply(RDD.scala:939) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:112) at org.apache.spark.rdd.RDD.withScope(RDD.scala:363) at org.apache.spark.rdd.RDD.collect(RDD.scala:938) at org.apache.spark.api.python.PythonRDD$.collectAndServe(PythonRDD.scala:153) at org.apache.spark.api.python.PythonRDD.collectAndServe(PythonRDD.scala) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:244) at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357) at py4j.Gateway.invoke(Gateway.java:282) at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132) at py4j.commands.CallCommand.execute(CallCommand.java:79) at py4j.GatewayConnection.run(GatewayConnection.java:214) at java.lang.Thread.run(Thread.java:748) Caused by: org.apache.spark.api.python.PythonException: Traceback (most recent call last): File "/usr/local/spark/python/lib/pyspark.zip/pyspark/worker.py", line 216, in main func, profiler, deserializer, serializer = read_command(pickleSer, infile) File "/usr/local/spark/python/lib/pyspark.zip/pyspark/worker.py", line 60, in read_command command = serializer.loads(command.value) File "/usr/local/spark/python/lib/pyspark.zip/pyspark/serializers.py", line 562, in loads return pickle.loads(obj) File "/tmp/spark-aca404a0-a60e-43d2-a913-acc7ed93a7a0/userFiles-59dd8a1c-344e-4b9d-8fb0-e7982aae3d8c/distkeras/utils.py", line 5, in from keras import backend as K File "/home/hadoop/anaconda2/lib/python2.7/site-packages/keras/__init__.py", line 7, in from . import datasets File "/home/hadoop/anaconda2/lib/python2.7/site-packages/keras/datasets/__init__.py", line 4, in from . import imdb File "/home/hadoop/anaconda2/lib/python2.7/site-packages/keras/datasets/imdb.py", line 8, in from ..preprocessing.sequence import _remove_long_seq File "/home/hadoop/anaconda2/lib/python2.7/site-packages/keras/preprocessing/__init__.py", line 12, in from . import image File "/home/hadoop/anaconda2/lib/python2.7/site-packages/keras/preprocessing/image.py", line 7, in from keras_preprocessing import image File "/home/hadoop/anaconda2/lib/python2.7/site-packages/keras_preprocessing/image.py", line 10, in import scipy.ndimage as ndi File "/home/hadoop/anaconda2/lib/python2.7/site-packages/scipy/ndimage/__init__.py", line 161, in from .filters import * File "/home/hadoop/anaconda2/lib/python2.7/site-packages/scipy/ndimage/filters.py", line 36, in from . import _ni_support ImportError: cannot import name _ni_support at org.apache.spark.api.python.BasePythonRunner$ReaderIterator.handlePythonException(PythonRunner.scala:298) at org.apache.spark.api.python.PythonRunner$$anon$1.read(PythonRunner.scala:438) at org.apache.spark.api.python.PythonRunner$$anon$1.read(PythonRunner.scala:421) at org.apache.spark.api.python.BasePythonRunner$ReaderIterator.hasNext(PythonRunner.scala:252) at org.apache.spark.InterruptibleIterator.hasNext(InterruptibleIterator.scala:37) at scala.collection.Iterator$class.foreach(Iterator.scala:893) at org.apache.spark.InterruptibleIterator.foreach(InterruptibleIterator.scala:28) at scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59) at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104) at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48) at scala.collection.TraversableOnce$class.to(TraversableOnce.scala:310) at org.apache.spark.InterruptibleIterator.to(InterruptibleIterator.scala:28) at scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:302) at org.apache.spark.InterruptibleIterator.toBuffer(InterruptibleIterator.scala:28) at scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:289) at org.apache.spark.InterruptibleIterator.toArray(InterruptibleIterator.scala:28) at org.apache.spark.rdd.RDD$$anonfun$collect$1$$anonfun$12.apply(RDD.scala:939) at org.apache.spark.rdd.RDD$$anonfun$collect$1$$anonfun$12.apply(RDD.scala:939) at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:2067) at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:2067) at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87) at org.apache.spark.scheduler.Task.run(Task.scala:109) at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:345) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) ... 1 more