25/01/20 10:32:50 ERROR Client: Application diagnostics message: User class threw exception: java.lang.NoClassDefFoundError: org/apache/spark/sql/catalyst/plans/logical/SupportsNonDeterministicExpression at java.base/java.lang.ClassLoader.defineClass1(Native Method) at java.base/java.lang.ClassLoader.defineClass(ClassLoader.java:1022) at java.base/java.security.SecureClassLoader.defineClass(SecureClassLoader.java:174) at java.base/java.net.URLClassLoader.defineClass(URLClassLoader.java:555) at java.base/java.net.URLClassLoader$1.run(URLClassLoader.java:458) at java.base/java.net.URLClassLoader$1.run(URLClassLoader.java:452) at java.base/java.security.AccessController.doPrivileged(Native Method) at java.base/java.net.URLClassLoader.findClass(URLClassLoader.java:451) at java.base/java.lang.ClassLoader.loadClass(ClassLoader.java:594) at java.base/java.lang.ClassLoader.loadClass(ClassLoader.java:527) at java.base/java.lang.ClassLoader.defineClass1(Native Method) at java.base/java.lang.ClassLoader.defineClass(ClassLoader.java:1022) at java.base/java.security.SecureClassLoader.defineClass(SecureClassLoader.java:174) at java.base/java.net.URLClassLoader.defineClass(URLClassLoader.java:555) at java.base/java.net.URLClassLoader$1.run(URLClassLoader.java:458) at java.base/java.net.URLClassLoader$1.run(URLClassLoader.java:452) at java.base/java.security.AccessController.doPrivileged(Native Method) at java.base/java.net.URLClassLoader.findClass(URLClassLoader.java:451) at java.base/java.lang.ClassLoader.loadClass(ClassLoader.java:594) at java.base/java.lang.ClassLoader.loadClass(ClassLoader.java:527) at io.delta.sql.DeltaSparkSessionExtension.$anonfun$apply$9(DeltaSparkSessionExtension.scala:119) at org.apache.spark.sql.SparkSessionExtensions.$anonfun$buildPostHocResolutionRules$1(SparkSessionExtensions.scala:211) at scala.collection.TraversableLike.$anonfun$map$1(TraversableLike.scala:286) at scala.collection.mutable.ResizableArray.foreach(ResizableArray.scala:62) at scala.collection.mutable.ResizableArray.foreach$(ResizableArray.scala:55) at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:49) at scala.collection.TraversableLike.map(TraversableLike.scala:286) at scala.collection.TraversableLike.map$(TraversableLike.scala:279) at scala.collection.AbstractTraversable.map(Traversable.scala:108) at org.apache.spark.sql.SparkSessionExtensions.buildPostHocResolutionRules(SparkSessionExtensions.scala:211) at org.apache.spark.sql.internal.BaseSessionStateBuilder.customPostHocResolutionRules(BaseSessionStateBuilder.scala:232) at org.apache.spark.sql.hive.HiveSessionStateBuilder$$anon$1.(HiveSessionStateBuilder.scala:108) at org.apache.spark.sql.hive.HiveSessionStateBuilder.analyzer(HiveSessionStateBuilder.scala:85) at org.apache.spark.sql.internal.BaseSessionStateBuilder.$anonfun$build$2(BaseSessionStateBuilder.scala:374) at org.apache.spark.sql.internal.SessionState.analyzer$lzycompute(SessionState.scala:92) at org.apache.spark.sql.internal.SessionState.analyzer(SessionState.scala:92) at org.apache.spark.sql.execution.QueryExecution.$anonfun$analyzed$1(QueryExecution.scala:77) at org.apache.spark.sql.catalyst.QueryPlanningTracker.measurePhase(QueryPlanningTracker.scala:138) at org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$2(QueryExecution.scala:219) at org.apache.spark.sql.execution.QueryExecution$.withInternalError(QueryExecution.scala:547) at org.apache.spark.sql.execution.QueryExecution.$anonfun$executePhase$1(QueryExecution.scala:219) at org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:900) at org.apache.spark.sql.execution.QueryExecution.executePhase(QueryExecution.scala:218) at org.apache.spark.sql.execution.QueryExecution.analyzed$lzycompute(QueryExecution.scala:77) at org.apache.spark.sql.execution.QueryExecution.analyzed(QueryExecution.scala:74) at org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:66) at org.apache.spark.sql.Dataset.(Dataset.scala:206) at org.apache.spark.sql.Dataset.(Dataset.scala:212) at org.apache.spark.sql.Dataset$.apply(Dataset.scala:76) at org.apache.spark.sql.SparkSession.createDataset(SparkSession.scala:486) at org.apache.spark.sql.SQLContext.createDataset(SQLContext.scala:354) at org.apache.spark.sql.SQLImplicits.localSeqToDatasetHolder(SQLImplicits.scala:244) at com.walmart.luminate.cperf.audit.AuditOps$DataframeFromAuditRecord.toDF(AuditOps.scala:331) at com.walmart.luminate.cperf.audit.AuditOps$.insertAuditRecord(AuditOps.scala:102) at com.walmart.luminate.cperf.OmsPOWorkflowController$.$anonfun$main$1(OmsPOWorkflowController.scala:72) at com.walmart.luminate.cperf.OmsPOWorkflowController$.$anonfun$main$1$adapted(OmsPOWorkflowController.scala:60) at scala.Option.foreach(Option.scala:407) at com.walmart.luminate.cperf.OmsPOWorkflowController$.main(OmsPOWorkflowController.scala:60) at com.walmart.luminate.cperf.OmsPOWorkflowController.main(OmsPOWorkflowController.scala) at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at java.base/jdk.internal.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at java.base/jdk.internal.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.base/java.lang.reflect.Method.invoke(Method.java:566) at org.apache.spark.deploy.yarn.ApplicationMaster$$anon$2.run(ApplicationMaster.scala:738) Caused by: java.lang.ClassNotFoundException: org.apache.spark.sql.catalyst.plans.logical.SupportsNonDeterministicExpression at java.base/java.net.URLClassLoader.findClass(URLClassLoader.java:476) at java.base/java.lang.ClassLoader.loadClass(ClassLoader.java:594) at java.base/java.lang.ClassLoader.loadClass(ClassLoader.java:527) ... 64 more Exception in thread "main" org.apache.spark.SparkException: Application application_1737008118714_4049 finished with failed status at org.apache.spark.deploy.yarn.Client.run(Client.scala:1309) at org.apache.spark.deploy.yarn.YarnClusterApplication.start(Client.scala:1742) at org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:1029) at org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:194) at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:217) at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:91) at org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:1120) at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:1129) at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)