Open bruceforlearn opened 6 years ago
Hey,
I haven't seen this issue before, but I haven't tried to run this in a while... Could be that a more recent version of spark notebook (container) is missing something.
The only thing I could find is: https://community.cloudera.com/t5/Web-UI-Hue-Beeswax/HUE-Spark-notebook/td-p/40500
So you could try to get into the spark notebook container, find desktop.conf (or similar config file) and add the "use_new_editor" parameter to it, then try restarting the container and see what happens
root@sparknotebook:/var/run# find / -name hue.ini root@sparknotebook:/var/run# find / -name desktop root@sparknotebook:/var/run# find / -name conf /opt/docker/conf /proc/sys/net/ipv4/conf /proc/sys/net/ipv6/conf
root@sparknotebook:/opt/docker/conf# ls application.conf clusters data demo profiles scripts
root@sparknotebook:/opt/docker/conf# cat application.conf
application.secret = "nTnOIy6^yFM5o[Z_T6jBriIYm7id43TSeLJC1U?bxt?PhfMJeCYX@s;RcNqX]xeA"
application.langs = "en"
my.application.Router
,conf/my.application.routes
.logger.root = ERROR
logger.play = INFO
logger.application = DEBUG
manager { notebooks {
# Server dir (containing notebook files)
dir = ./notebooks
dir = ${?NOTEBOOKS_DIR}
###
# Default custom configuration for all **CREATED** notebooks
#
# custom {
# localRepo = "/tmp/repo",
# repos = ["m" % "default" % "http://repo.maven.apache.org/maven2/" % "maven"],
# deps = ["junit % junit % 4.12"],
# imports = ["import scala.util._"],
# sparkConf = {
# spark.number.works.too : "test",
# spark.boh: 2
# }
# }
###
# Override custom configuration for **ALL** notebooks
# TO USE WITH CARE → could break full reproductability
#
# override {
# localRepo = "/tmp/repo",
# repos = ["m" % "default" % "http://repo.maven.apache.org/maven2/" % "maven"],
# deps = ["junit % junit % 4.12"],
# imports = ["import scala.util._"],
# sparkConf = {
# spark.number.works.too : "test",
# spark.boh: 2
# }
# }
}
name = "Spark Notebook"
# maxBytesInFlight = 5M
kernel {
# Uncomment to kill kernel after inactivity timeout
# killTimeout = 60 minute
###
# Uncomment to not to start the kernel (spark-context) automatically when notebook was open
## autostartOnNotebookOpen = false
###
# Uncomment to enable remote vm debugging on the provided port
#
#debug.port=9090
###
# Change the level of debug in the logs/sn-session-$kernelId-$notebookPath.log file
#
#log.level=debug
###
# Add vmArgs to the remote process
#vmArgs=["-XX:+PrintGCDetails", "-XX:+PrintGCDetails", "-Dsun.io.serialization.extendedDebugInfo=true"]
###
# Working directory for kernel VMs
#dir=.
###
# List of URLs of kernel init scripts (to be run when a kernel first starts).
#init=[]
###
# Kernel VM memory settings
#heap=4g
#stack=-1 #default XSS
permGen = 1024m
#reservedCodeCache=-1 #default
###
# Classpath for kernel VMs (defaults to server VM classpath)
#classpath=[]
###
# REPL compiler options: Use the deprecation warning by default for more
# useful feedback about obsolete functions, etc.
# REPL compiler options: Use the -deprecation warning by default for more
# useful feedback about obsolete functions, etc. Use the -feature warning
# for more explicit warnings about "optional" language features that
# should be enabled explicitly. One of those that's used by Spark Notebook
# itself is "reflective calls".
compilerArgs=["-deprecation", "-feature", "-language:reflectiveCalls"]
}
clusters {
#file=./conf/clusters
}
enabled
is absent or set to false, tachyon support will be disabledurl
is set, then the local in-memory tachyon won't starttachyon { enabled = false #false if commented out / not present
# baseDir = "/spark-storage-dir" # default to '/share'
} }
notebook-server { akka { loggers = ["akka.event.slf4j.Slf4jLogger"] loglevel = "DEBUG" stdout-loglevel = "DEBUG"
log-config-on-start = off
daemonic = true
debug {
## enable function of LoggingReceive, which is to log any received message at DEBUG level
# receive = on
## enable DEBUG logging of all AutoReceiveMessages (Kill, PoisonPill and the like)
# autoreceive = on
## enable DEBUG logging of actor lifecycle changes
# lifecycle = on
}
actor {
provider = "akka.remote.RemoteActorRefProvider"
default-stash-dispatcher {
mailbox-type = "akka.dispatch.UnboundedDequeBasedMailbox"
}
}
remote {
## Debugging:
# log-sent-messages = on
# log-received-messages = on
enabled-transports = ["akka.remote.netty.tcp"]
# transport = "akka.remote.netty.NettyRemoteTransport"
## see (http://doc.akka.io/docs/akka/snapshot/scala/remoting.html)
# These configuration will help getting of akka timeouts, specially
# → threshold (12 for instance, it's refered as a good choice on ec2)
# → heartbeat-interval (10s to reduce the number of comm between the server and the notebook backend)
# → acceptable-heartbeat-pause (90s to reduce the number of comm between the server and the notebook backend)
#
#transport-failure-detector.heartbeat-interval = 4 s
#transport-failure-detector.threshold = 7.0 # raise it to 12 for instance on EC2/Mesos/Yarn/...
#transport-failure-detector.max-sample-size = 100
#transport-failure-detector.min-std-deviation = 100 ms
#transport-failure-detector.acceptable-heartbeat-pause = 10 s
#watch-failure-detector.heartbeat-interval = 1 s
#watch-failure-detector.threshold = 10.0 # raise it to 12 for instance on EC2/Mesos/Yarn/...
#watch-failure-detector.max-sample-size = 200
#watch-failure-detector.min-std-deviation = 100 ms
#watch-failure-detector.acceptable-heartbeat-pause = 10 s
#watch-failure-detector.unreachable-nodes-reaper-interval = 1s
#watch-failure-detector.expected-response-after = 3 s
netty.tcp {
hostname = "127.0.0.1"
port = 0
maximum-frame-size = "1 GiB"
}
}
} }
remote-repos { proxy {
# "host" = ...,
# "port" = ...,
# "username" = ...,
# "password" = ...,
# "nonProxyHosts" = ...
} } [desktop] use_new_editor=true
414033cdbb18 andypetrella/spark-notebook:0.6.3-scala-2.10.5-spark-1.6.1-hadoop-2.7.1-with-hive-with-parquet "bin/spark-notebook" 18 hours ago Up 2 seconds 0.0.0.0:4040->4040/tcp, 0.0.0.0:9000->9000/tcp, 9443/tcp sparknotebook
It doesn't work and show the same error
please help me
thank you
@marosmars Hi,
do you have PNDA 3.2 dockerfile?
you know it is abandoned with PNDA3.3 version
thank you
@bruceforlearn not sure which dockerfile you mean, can you give me some pointers ?
Hello @marosmars
After I run all containers, I open http://:8088/home happened ImportError ImportError: cannot import name USE_NEW_EDITOR
but http://:9000 - Spark notebook and http://:8080 - Spark master are normal
do you have any idea ?
thank you
root@uocv2:/home/pnda/pnda-quickstart# docker ps -a CONTAINER ID IMAGE NAMES 47be77162a05 gobblin/gobblin-standalone:ubuntu-gobblin-latest gobblin 316dae974f37 bde2020/hdfs-filebrowser:3.9 hdfsfb 414033cdbb18 andypetrella/spark-notebook:0.6.3-scala-2.10.5-spark-1.6.1-hp sparknotebook aebf6a521b36 earthquakesan/hadoop-spark-worker:1.0.0 pndaquickstart_spark-worker_1 a5930e0da798 earthquakesan/hadoop-spark-master:1.0.0 spark-master c69fa0cbac18 bde2020/hadoop-datanode:1.0.0 datanode2 acb090ffc86b bde2020/hadoop-namenode:1.0.0 namenode e69c7ed3f862 bde2020/hadoop-datanode:1.0.0 datanode1 3dd25b2ef1d5 spotify/kafka kafka
Traceback (most recent call last): File "/opt/hue/build/env/lib/python2.7/site-packages/Django-1.6.10-py2.7.egg/django/contrib/staticfiles/handlers.py", line 67, in call return self.application(environ, start_response) File "/opt/hue/build/env/lib/python2.7/site-packages/Django-1.6.10-py2.7.egg/django/core/handlers/wsgi.py", line 206, in call response = self.get_response(request) File "/opt/hue/build/env/lib/python2.7/site-packages/Django-1.6.10-py2.7.egg/django/core/handlers/base.py", line 194, in get_response response = self.handle_uncaught_exception(request, resolver, sys.exc_info()) File "/opt/hue/build/env/lib/python2.7/site-packages/Django-1.6.10-py2.7.egg/django/core/handlers/base.py", line 229, in handle_uncaught_exception return debug.technical_500_response(request, exc_info) File "/opt/hue/build/env/lib/python2.7/site-packages/django_extensions-1.5.0-py2.7.egg/django_extensions/management/technical_response.py", line 5, in null_technical_500_response six.reraise(exc_type, exc_value, tb) File "/opt/hue/build/env/lib/python2.7/site-packages/Django-1.6.10-py2.7.egg/django/core/handlers/base.py", line 112, in get_response response = wrapped_callback(request, callback_args, callback_kwargs) File "/opt/hue/build/env/lib/python2.7/site-packages/Django-1.6.10-py2.7.egg/django/db/transaction.py", line 371, in inner return func(args, kwargs) File "/opt/hue/desktop/core/src/desktop/auth/views.py", line 154, in dt_login 'active_directory': is_active_directory File "/opt/hue/desktop/core/src/desktop/lib/django_util.py", line 227, in render kwargs) File "/opt/hue/desktop/core/src/desktop/lib/django_util.py", line 148, in _render_to_response return django_mako.render_to_response(template, args, kwargs) File "/opt/hue/desktop/core/src/desktop/lib/django_mako.py", line 125, in render_to_response return HttpResponse(render_to_string(template_name, data_dictionary), kwargs) File "/opt/hue/desktop/core/src/desktop/lib/django_mako.py", line 114, in render_to_string_normal result = template.render(data_dict) File "/opt/hue/build/env/lib/python2.7/site-packages/Mako-0.8.1-py2.7.egg/mako/template.py", line 443, in render return runtime.render(self, self.callable, args, data) File "/opt/hue/build/env/lib/python2.7/site-packages/Mako-0.8.1-py2.7.egg/mako/runtime.py", line 786, in _render *_kwargs_forcallable(callable, data)) File "/opt/hue/build/env/lib/python2.7/site-packages/Mako-0.8.1-py2.7.egg/mako/runtime.py", line 818, in _render_context _exec_template(inherit, lclcontext, args=args, kwargs=kwargs) File "/opt/hue/build/env/lib/python2.7/site-packages/Mako-0.8.1-py2.7.egg/mako/runtime.py", line 844, in _exectemplate callable(context, args, kwargs) File "/tmp/tmpHIDfar/desktop/login.mako.py", line 47, in render_body __M_writer(unicode( commonheader("Welcome to Hue", "login", user, "50px") )) File "/opt/hue/desktop/core/src/desktop/views.py", line 384, in commonheader 'is_ldap_setup': 'desktop.auth.backend.LdapBackend' in desktop.conf.AUTH.BACKEND.get() File "/opt/hue/desktop/core/src/desktop/lib/django_mako.py", line 112, in render_to_string_normal template = lookup.get_template(template_name) File "/opt/hue/desktop/core/src/desktop/lib/django_mako.py", line 89, in get_template return real_loader.get_template(uri) File "/opt/hue/build/env/lib/python2.7/site-packages/Mako-0.8.1-py2.7.egg/mako/lookup.py", line 245, in get_template return self._load(srcfile, uri) File "/opt/hue/build/env/lib/python2.7/site-packages/Mako-0.8.1-py2.7.egg/mako/lookup.py", line 311, in _load self.template_args) File "/opt/hue/build/env/lib/python2.7/site-packages/Mako-0.8.1-py2.7.egg/mako/template.py", line 321, in init module = self._compile_from_file(path, filename) File "/opt/hue/build/env/lib/python2.7/site-packages/Mako-0.8.1-py2.7.egg/mako/template.py", line 379, in _compile_from_file module = compat.load_module(self.module_id, path) File "/opt/hue/build/env/lib/python2.7/site-packages/Mako-0.8.1-py2.7.egg/mako/compat.py", line 55, in load_module return imp.load_source(module_id, path, fp) File "/tmp/tmpHIDfar/desktop/common_header.mako.py", line 26, in
from desktop.conf import USE_NEW_EDITOR
ImportError: cannot import name USE_NEW_EDITOR