運行Spark 任務出現的錯誤

Exception in thread "main" java.lang.StackOverflowErrorjava

at sun.util.calendar.AbstractCalendar.getTime(AbstractCalendar.java:175)apache

at java.util.Date.normalize(Date.java:1257)spa

at java.util.Date.normalize(Date.java:1204).net

at java.util.Date.getTimeImpl(Date.java:890)scala

at java.util.Date.<init>(Date.java:256)orm

at java.util.zip.ZipUtils.dosToJavaTime(ZipUtils.java:74)ip

at java.util.zip.ZipFile.getZipEntry(ZipFile.java:570)get

at java.util.zip.ZipFile.getEntry(ZipFile.java:313)it

at java.util.jar.JarFile.getEntry(JarFile.java:240)spark

at java.util.jar.JarFile.getJarEntry(JarFile.java:223)

at sun.misc.URLClassPath$JarLoader.getResource(URLClassPath.java:1005)

at sun.misc.URLClassPath$JarLoader.findResource(URLClassPath.java:983)

at sun.misc.URLClassPath.findResource(URLClassPath.java:188)

at java.net.URLClassLoader$2.run(URLClassLoader.java:569)

at java.net.URLClassLoader$2.run(URLClassLoader.java:567)

at java.security.AccessController.doPrivileged(Native Method)

at java.net.URLClassLoader.findResource(URLClassLoader.java:566)

at java.lang.ClassLoader.getResource(ClassLoader.java:1093)

at org.apache.spark.Logging$class.initializeLogging(Logging.scala:119)

at org.apache.spark.Logging$class.initializeIfNecessary(Logging.scala:106)

at org.apache.spark.Logging$class.log(Logging.scala:45)

at org.apache.spark.SecurityManager.log(SecurityManager.scala:134)

at org.apache.spark.Logging$class.initializeLogging(Logging.scala:122)

at org.apache.spark.Logging$class.initializeIfNecessary(Logging.scala:106)

at org.apache.spark.Logging$class.log(Logging.scala:45)

at org.apache.spark.SecurityManager.log(SecurityManager.scala:134)

at org.apache.spark.Logging$class.initializeLogging(Logging.scala:122)

at org.apache.spark.Logging$class.initializeIfNecessary(Logging.scala:106)

at org.apache.spark.Logging$class.log(Logging.scala:45)

at org.apache.spark.SecurityManager.log(SecurityManager.scala:134)

at org.apache.spark.Logging$class.initializeLogging(Logging.scala:122)

at org.apache.spark.Logging$class.initializeIfNecessary(Logging.scala:106)

at org.apache.spark.Logging$class.log(Logging.scala:45)

at org.apache.spark.SecurityManager.log(SecurityManager.scala:134)

at org.apache.spark.Logging$class.initializeLogging(Logging.scala:122)

at org.apache.spark.Logging$class.initializeIfNecessary(Logging.scala:106)

at org.apache.spark.Logging$class.log(Logging.scala:45)

at org.apache.spark.SecurityManager.log(SecurityManager.scala:134)

at org.apache.spark.Logging$class.initializeLogging(Logging.scala:122)

at org.apache.spark.Logging$class.initializeIfNecessary(Logging.scala:106)

at org.apache.spark.Logging$class.log(Logging.scala:45)

at org.apache.spark.SecurityManager.log(SecurityManager.scala:134)

at org.apache.spark.Logging$class.initializeLogging(Logging.scala:122)

at org.apache.spark.Logging$class.initializeIfNecessary(Logging.scala:106)

at org.apache.spark.Logging$class.log(Logging.scala:45)

at org.apache.spark.SecurityManager.log(SecurityManager.scala:134)

at org.apache.spark.Logging$class.initializeLogging(Logging.scala:122)

at org.apache.spark.Logging$class.initializeIfNecessary(Logging.scala:106)

at org.apache.spark.Logging$class.log(Logging.scala:45)

at org.apache.spark.SecurityManager.log(SecurityManager.scala:134)

at org.apache.spark.Logging$class.initializeLogging(Logging.scala:122)

at org.apache.spark.Logging$class.initializeIfNecessary(Logging.scala:106)

at org.apache.spark.Logging$class.log(Logging.scala:45)

at org.apache.spark.SecurityManager.log(SecurityManager.scala:134)

at org.apache.spark.Logging$class.initializeLogging(Logging.scala:122)

at org.apache.spark.Logging$class.initializeIfNecessary(Logging.scala:106)

at org.apache.spark.Logging$class.log(Logging.scala:45)

at org.apache.spark.SecurityManager.log(SecurityManager.scala:134)

at org.apache.spark.Logging$class.initializeLogging(Logging.scala:122)


解決辦法:

檢查引入的log4j 包有重複。刪除重複的。

相關文章
相關標籤/搜索