本文主要针对使用CDH平台的HUE时候碰到两类问题,最终问题并没有得到很好的解决,只是提供了一种绕行方式,欢迎知道的朋友补充。
## **NO 1: HUE执行jar包**
> 第一种报错
org.apache.oozie.action.hadoop.LauncherException: IO error
	at org.apache.oozie.action.hadoop.LauncherAM.setRecoveryId(LauncherAM.java:487)
	at org.apache.oozie.action.hadoop.LauncherAM.runActionMain(LauncherAM.java:403)
	at org.apache.oozie.action.hadoop.LauncherAM.access$300(LauncherAM.java:55)
	at org.apache.oozie.action.hadoop.LauncherAM$2.run(LauncherAM.java:223)
	at java.security.AccessController.doPrivileged(Native Method)
	at javax.security.auth.Subject.doAs(Subject.java:422)
	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1692)
	at org.apache.oozie.action.hadoop.LauncherAM.run(LauncherAM.java:217)
	at org.apache.oozie.action.hadoop.LauncherAM$1$.run(LauncherAM.java:153)
	at java.security.AccessController.doPrivileged(Native Method)
	at javax.security.auth.Subject.doAs(Subject.java:422)
	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1692)
	at org.apache.oozie.action.hadoop.LauncherAM.main(LauncherAM.java:141)
Caused by: java.lang.NumberFormatException: For input string: "30s"
	at java.lang.NumberFormatException.forInputString(NumberFormatException.java:65)
	at java.lang.Long.parseLong(Long.java:589)
	at java.lang.Long.parseLong(Long.java:631)
	at org.apache.hadoop.conf.Configuration.getLong(Configuration.java:1267)
	at org.apache.hadoop.hdfs.DFSClient$Conf.<init>(DFSClient.java:491)
	at org.apache.hadoop.hdfs.DFSClient.<init>(DFSClient.java:620)
	at org.apache.hadoop.hdfs.DFSClient.<init>(DFSClient.java:604)
	at org.apache.hadoop.hdfs.DistributedFileSystem.initialize(DistributedFileSystem.java:148)
	at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:2598)
	at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:91)
	at org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:2632)
	at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:2614)
	at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:370)
	at org.apache.oozie.action.hadoop.HdfsOperations.fileExists(HdfsOperations.java:76)
	at org.apache.oozie.action.hadoop.LauncherAM.setRecoveryId(LauncherAM.java:473)
	... 12 more
> 第二种报错
java.net.ConnectException: Connection timed out: no further information
	at sun.nio.ch.SocketChannelImpl.checkConnect(Native Method)
	at sun.nio.ch.SocketChannelImpl.finishConnect(SocketChannelImpl.java:717)
	at org.apache.hadoop.net.SocketIOWithTimeout.connect(SocketIOWithTimeout.java:206)
	at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:530)
	at org.apache.hadoop.hdfs.DFSClient.newConnectedPeer(DFSClient.java:3090)
	at org.apache.hadoop.hdfs.BlockReaderFactory.nextTcpPeer(BlockReaderFactory.java:778)
	at org.apache.hadoop.hdfs.BlockReaderFactory.getRemoteBlockReaderFromTcp(BlockReaderFactory.java:693)
	at org.apache.hadoop.hdfs.BlockReaderFactory.build(BlockReaderFactory.java:354)
	at org.apache.hadoop.hdfs.DFSInputStream.blockSeekTo(DFSInputStream.java:617)
	at org.apache.hadoop.hdfs.DFSInputStream.readWithStrategy(DFSInputStream.java:841)
	at org.apache.hadoop.hdfs.DFSInputStream.read(DFSInputStream.java:889)
	at java.io.DataInputStream.read(DataInputStream.java:100)
	at org.apache.hadoop.io.IOUtils.copyBytes(IOUtils.java:78)
	at org.apache.hadoop.io.IOUtils.copyBytes(IOUtils.java:52)
	at org.apache.hadoop.io.IOUtils.copyBytes(IOUtils.java:112)
	at org.apache.hadoop.fs.FileUtil.copy(FileUtil.java:366)
	at org.apache.hadoop.yarn.util.FSDownload.copy(FSDownload.java:265)
	at org.apache.hadoop.yarn.util.FSDownload.access$000(FSDownload.java:61)
	at org.apache.hadoop.yarn.util.FSDownload$2.run(FSDownload.java:359)
	at org.apache.hadoop.yarn.util.FSDownload$2$.run(FSDownload.java:357)
	at java.security.AccessController.doPrivileged(Native Method)
	at javax.security.auth.Subject.doAs(Subject.java:422)
	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1692)
	at org.apache.hadoop.yarn.util.FSDownload.call(FSDownload.java:356)
	at org.apache.hadoop.yarn.util.FSDownload.call(FSDownload.java:60)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)
2020-10-14 10:37:32,204 WARN [org.apache.hadoop.hdfs.DFSClient] - Failed to connect to /172.18.0.116:9866 for block, add to deadNodes and continue. java.net.ConnectException: Connection timed out: no further information
java.net.ConnectException: Connection timed out: no further information
	at sun.nio.ch.SocketChannelImpl.checkConnect(Native Method)
	at sun.nio.ch.SocketChannelImpl.finishConnect(SocketChannelImpl.java:717)
	at org.apache.hadoop.net.SocketIOWithTimeout.connect(SocketIOWithTimeout.java:206)
	at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:530)
	at org.apache.hadoop.hdfs.DFSClient.newConnectedPeer(DFSClient.java:3090)
	at org.apache.hadoop.hdfs.BlockReaderFactory.nextTcpPeer(BlockReaderFactory.java:778)
	at org.apache.hadoop.hdfs.BlockReaderFactory.getRemoteBlockReaderFromTcp(BlockReaderFactory.java:693)
	at org.apache.hadoop.hdfs.BlockReaderFactory.build(BlockReaderFactory.java:354)
	at org.apache.hadoop.hdfs.DFSInputStream.blockSeekTo(DFSInputStream.java:617)
	at org.apache.hadoop.hdfs.DFSInputStream.readWithStrategy(DFSInputStream.java:841)
	at org.apache.hadoop.hdfs.DFSInputStream.read(DFSInputStream.java:889)
	at java.io.DataInputStream.read(DataInputStream.java:100)
	at org.apache.hadoop.io.IOUtils.copyBytes(IOUtils.java:78)
	at org.apache.hadoop.io.IOUtils.copyBytes(IOUtils.java:52)
	at org.apache.hadoop.io.IOUtils.copyBytes(IOUtils.java:112)
	at org.apache.hadoop.fs.FileUtil.copy(FileUtil.java:366)
	at org.apache.hadoop.yarn.util.FSDownload.copy(FSDownload.java:265)
	at org.apache.hadoop.yarn.util.FSDownload.access$000(FSDownload.java:61)
	at org.apache.hadoop.yarn.util.FSDownload$2.run(FSDownload.java:359)
	at org.apache.hadoop.yarn.util.FSDownload$2$.run(FSDownload.java:357)
	at java.security.AccessController.doPrivileged(Native Method)
	at javax.security.auth.Subject.doAs(Subject.java:422)
	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1692)
	at org.apache.hadoop.yarn.util.FSDownload.call(FSDownload.java:356)
	at org.apache.hadoop.yarn.util.FSDownload.call(FSDownload.java:60)
	at java.util.concurrent.FutureTask.run(FutureTask.java:266)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
	at java.lang.Thread.run(Thread.java:748)
>   第三种报错

