NullPointerException : - The Spark SQL phase planning failed with an internal error. Please, fill a bug report in, and provide the full stack trace. #112
However when trying to see data getting below error
[INTERNAL_ERROR] The Spark SQL phase planning failed with an internal error. Please, fill a bug report in, and provide the full stack trace.
Caused by: NullPointerException:
at org.apache.spark.sql.execution.QueryExecution$.toInternalError(QueryExecution.scala:764)
at org.apache.spark.sql.execution.QueryExecution$.withInternalError(QueryExecution.scala:776)
at org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$1(QueryExecution.scala:349)
at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:973)
at org.apache.spark.sql.execution.QueryExecution.executePhase(QueryExecution.scala:346)
at org.apache.spark.sql.execution.QueryExecution.sparkPlan$lzycompute(QueryExecution.scala:297)
at org.apache.spark.sql.execution.QueryExecution.sparkPlan(QueryExecution.scala:290)
at org.apache.spark.sql.execution.QueryExecution.$anonfun$executedPlan$1(QueryExecution.scala:309)
at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:973)
at org.apache.spark.sql.execution.QueryExecution.executedPlan$lzycompute(QueryExecution.scala:309)
at org.apache.spark.sql.execution.QueryExecution.executedPlan(QueryExecution.scala:304)
at org.apache.spark.sql.execution.QueryExecution.simpleString(QueryExecution.scala:394)
at org.apache.spark.sql.execution.QueryExecution.org$apache$spark$sql$execution$QueryExecution$$explainString(QueryExecution.scala:463)
at org.apache.spark.sql.execution.QueryExecution.explainStringLocal(QueryExecution.scala:425)
at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withCustomExecutionEnv$8(SQLExecution.scala:203)
at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:389)
at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withCustomExecutionEnv$1(SQLExecution.scala:187)
at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:973)
at org.apache.spark.sql.execution.SQLExecution$.withCustomExecutionEnv(SQLExecution.scala:142)
at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:339)
at org.apache.spark.sql.Dataset.withAction(Dataset.scala:4286)
Hi There, I am able to load data using cdm connector and spark_cdm_connector_1_1_19_3.jar
val df = spark.read.format("com.microsoft.cdm") .option("storage", "storage details") .option("manifestPath", "synapse/model.json") .option("sasToken", "<>")
.option("entity", "mis_resourceavailability")
.option("mode", "permissive")
.load()
df.show()
Also tried - df.select("*").show() but same error
However when trying to see data getting below error
[INTERNAL_ERROR] The Spark SQL phase planning failed with an internal error. Please, fill a bug report in, and provide the full stack trace. Caused by: NullPointerException: at org.apache.spark.sql.execution.QueryExecution$.toInternalError(QueryExecution.scala:764) at org.apache.spark.sql.execution.QueryExecution$.withInternalError(QueryExecution.scala:776) at org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$1(QueryExecution.scala:349) at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:973) at org.apache.spark.sql.execution.QueryExecution.executePhase(QueryExecution.scala:346) at org.apache.spark.sql.execution.QueryExecution.sparkPlan$lzycompute(QueryExecution.scala:297) at org.apache.spark.sql.execution.QueryExecution.sparkPlan(QueryExecution.scala:290) at org.apache.spark.sql.execution.QueryExecution.$anonfun$executedPlan$1(QueryExecution.scala:309) at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:973) at org.apache.spark.sql.execution.QueryExecution.executedPlan$lzycompute(QueryExecution.scala:309) at org.apache.spark.sql.execution.QueryExecution.executedPlan(QueryExecution.scala:304) at org.apache.spark.sql.execution.QueryExecution.simpleString(QueryExecution.scala:394) at org.apache.spark.sql.execution.QueryExecution.org$apache$spark$sql$execution$QueryExecution$$explainString(QueryExecution.scala:463) at org.apache.spark.sql.execution.QueryExecution.explainStringLocal(QueryExecution.scala:425) at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withCustomExecutionEnv$8(SQLExecution.scala:203) at org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:389) at org.apache.spark.sql.execution.SQLExecution$.$anonfun$withCustomExecutionEnv$1(SQLExecution.scala:187) at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:973) at org.apache.spark.sql.execution.SQLExecution$.withCustomExecutionEnv(SQLExecution.scala:142) at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:339) at org.apache.spark.sql.Dataset.withAction(Dataset.scala:4286)