首页 > 解决方案 > 如何在访问 ambari 资源管理器作业历史记录页面时修复 500 错误?

问题描述

我正在使用 VirtualBox 中加载的 HDP 2.6.5 沙箱进行练习。运行 MR 作业后,我想检查日志。 Hadoop Ambari 屏幕 当我单击作业历史日志的 URL 时,我收到此错误:

抱歉,收到错误 500

在 Job History 日志 (http://localhost:19888/logs/) 中,我可以看到:

2021-06-08 15:57:56,127 ERROR webapp.Dispatcher (Dispatcher.java:service(182)) - error handling URI: /jobhistory/job/job_1623150185167_0004
java.lang.reflect.InvocationTargetException
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:498)
    at org.apache.hadoop.yarn.webapp.Dispatcher.service(Dispatcher.java:173)
    at javax.servlet.http.HttpServlet.service(HttpServlet.java:820)
    at com.google.inject.servlet.ServletDefinition.doService(ServletDefinition.java:263)
    at com.google.inject.servlet.ServletDefinition.service(ServletDefinition.java:178)
    at com.google.inject.servlet.ManagedServletPipeline.service(ManagedServletPipeline.java:91)
    at com.google.inject.servlet.FilterChainInvocation.doFilter(FilterChainInvocation.java:62)
    at com.sun.jersey.spi.container.servlet.ServletContainer.doFilter(ServletContainer.java:900)
    at com.sun.jersey.spi.container.servlet.ServletContainer.doFilter(ServletContainer.java:834)
    at com.sun.jersey.spi.container.servlet.ServletContainer.doFilter(ServletContainer.java:795)
    at com.google.inject.servlet.FilterDefinition.doFilter(FilterDefinition.java:163)
    at com.google.inject.servlet.FilterChainInvocation.doFilter(FilterChainInvocation.java:58)
    at com.google.inject.servlet.ManagedFilterPipeline.dispatch(ManagedFilterPipeline.java:118)
    at com.google.inject.servlet.GuiceFilter.doFilter(GuiceFilter.java:113)
    at org.mortbay.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1212)
    at org.apache.hadoop.security.http.XFrameOptionsFilter.doFilter(XFrameOptionsFilter.java:57)
    at org.mortbay.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1212)
    at org.apache.hadoop.http.lib.StaticUserWebFilter$StaticUserFilter.doFilter(StaticUserWebFilter.java:109)
    at org.mortbay.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1212)
    at org.apache.hadoop.http.HttpServer2$QuotingInputFilter.doFilter(HttpServer2.java:1400)
    at org.mortbay.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1212)
    at org.apache.hadoop.http.NoCacheFilter.doFilter(NoCacheFilter.java:45)
    at org.mortbay.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1212)
    at org.apache.hadoop.http.NoCacheFilter.doFilter(NoCacheFilter.java:45)
    at org.mortbay.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1212)
    at org.mortbay.jetty.servlet.ServletHandler.handle(ServletHandler.java:399)
    at org.mortbay.jetty.security.SecurityHandler.handle(SecurityHandler.java:216)
    at org.mortbay.jetty.servlet.SessionHandler.handle(SessionHandler.java:182)
    at org.mortbay.jetty.handler.ContextHandler.handle(ContextHandler.java:766)
    at org.mortbay.jetty.webapp.WebAppContext.handle(WebAppContext.java:450)
    at org.mortbay.jetty.handler.ContextHandlerCollection.handle(ContextHandlerCollection.java:230)
    at org.mortbay.jetty.handler.HandlerWrapper.handle(HandlerWrapper.java:152)
    at org.mortbay.jetty.Server.handle(Server.java:326)
    at org.mortbay.jetty.HttpConnection.handleRequest(HttpConnection.java:542)
    at org.mortbay.jetty.HttpConnection$RequestHandler.headerComplete(HttpConnection.java:928)
    at org.mortbay.jetty.HttpParser.parseNext(HttpParser.java:549)
    at org.mortbay.jetty.HttpParser.parseAvailable(HttpParser.java:212)
    at org.mortbay.jetty.HttpConnection.handle(HttpConnection.java:404)
    at org.mortbay.io.nio.SelectChannelEndPoint.run(SelectChannelEndPoint.java:410)
    at org.mortbay.thread.QueuedThreadPool$PoolThread.run(QueuedThreadPool.java:582)
Caused by: com.google.common.util.concurrent.ExecutionError: java.lang.NoClassDefFoundError: Could not initialize class java.net.NetworkInterface
    at com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2232)
    at com.google.common.cache.LocalCache.get(LocalCache.java:3965)
    at com.google.common.cache.LocalCache.getOrLoad(LocalCache.java:3969)
    at com.google.common.cache.LocalCache$LocalManualCache.get(LocalCache.java:4829)
    at com.google.common.cache.LocalCache$LocalManualCache.getUnchecked(LocalCache.java:4834)
    at org.apache.hadoop.mapreduce.v2.hs.CachedHistoryStorage.getFullJob(CachedHistoryStorage.java:193)
    at org.apache.hadoop.mapreduce.v2.hs.JobHistory.getJob(JobHistory.java:217)
    at org.apache.hadoop.mapreduce.v2.app.webapp.AppController.requireJob(AppController.java:381)
    at org.apache.hadoop.mapreduce.v2.app.webapp.AppController.job(AppController.java:108)
    at org.apache.hadoop.mapreduce.v2.hs.webapp.HsController.job(HsController.java:104)
    ... 43 more
Caused by: java.lang.NoClassDefFoundError: Could not initialize class java.net.NetworkInterface
    at org.apache.hadoop.net.NetUtils.isLocalAddress(NetUtils.java:691)
    at org.apache.hadoop.hdfs.DFSClient.isLocalAddress(DFSClient.java:1102)
    at org.apache.hadoop.hdfs.shortcircuit.DomainSocketFactory.getPathInfo(DomainSocketFactory.java:149)
    at org.apache.hadoop.hdfs.BlockReaderFactory.getBlockReaderLocal(BlockReaderFactory.java:417)
    at org.apache.hadoop.hdfs.BlockReaderFactory.build(BlockReaderFactory.java:334)
    at org.apache.hadoop.hdfs.DFSInputStream.blockSeekTo(DFSInputStream.java:669)
    at org.apache.hadoop.hdfs.DFSInputStream.readWithStrategy(DFSInputStream.java:888)
    at org.apache.hadoop.hdfs.DFSInputStream.read(DFSInputStream.java:945)
    at org.apache.hadoop.hdfs.DFSInputStream.read(DFSInputStream.java:741)
    at java.io.DataInputStream.readLine(DataInputStream.java:513)
    at org.apache.hadoop.mapreduce.jobhistory.EventReader.<init>(EventReader.java:68)
    at org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.parse(JobHistoryParser.java:139)
    at org.apache.hadoop.mapreduce.v2.hs.CompletedJob.loadFullHistoryData(CompletedJob.java:347)
    at org.apache.hadoop.mapreduce.v2.hs.CompletedJob.<init>(CompletedJob.java:101)
    at org.apache.hadoop.mapreduce.v2.hs.HistoryFileManager$HistoryFileInfo.loadJob(HistoryFileManager.java:463)
    at org.apache.hadoop.mapreduce.v2.hs.CachedHistoryStorage.loadJob(CachedHistoryStorage.java:180)
    at org.apache.hadoop.mapreduce.v2.hs.CachedHistoryStorage.access$000(CachedHistoryStorage.java:52)
    at org.apache.hadoop.mapreduce.v2.hs.CachedHistoryStorage$1.load(CachedHistoryStorage.java:103)
    at org.apache.hadoop.mapreduce.v2.hs.CachedHistoryStorage$1.load(CachedHistoryStorage.java:100)
    at com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3568)
    at com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2350)
    at com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2313)
    at com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2228)
    ... 52 more

如何避免500错误?

另外,如果 python MR 作业失败,我可以在此日志中看到 python 错误吗?在 SSH 终端中,我只能看到来自 Java 的错误。我对这整件事很陌生。

标签: hadoopmapreducehadoop-yarn

解决方案


推荐阅读