连接不上hdfs

连接不上hdfs

​Java操作hdfs报错

public class HdfsOp {
   public static void main(String[] args) throws Exception {

       Configuration conf=new Configuration();
       conf.set("fs.defaultFS", "hdfs://192.168.44.129:9000");
       FileSystem fileSystem=FileSystem.get(conf);
       FileInputStream fis=new FileInputStream("D:\\user.txt");
       FSDataOutputStream fos=fileSystem.create(new Path("/user.txt"));
       IOUtils.copyBytes(fis,fos,1024,true);

   }
}

Exception in thread "main" java.net.ConnectException: Call From DESKTOP-T446KK7/192.168.137.1 to liyixin:9000 failed on connection exception: java.net.ConnectException: Connection refused: no further information; For more details see:  http://wiki.apache.org/hadoop/ConnectionRefused

at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)

at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)

at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)

at java.lang.reflect.Constructor.newInstance(Constructor.java:423)

at org.apache.hadoop.net.NetUtils.wrapWithMessage(NetUtils.java:831)

at org.apache.hadoop.net.NetUtils.wrapException(NetUtils.java:755)

at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1515)

at org.apache.hadoop.ipc.Client.call(Client.java:1457)

at org.apache.hadoop.ipc.Client.call(Client.java:1367)

at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:228)

at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:116)

at com.sun.proxy.$Proxy9.create(Unknown Source)

at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.create(ClientNamenodeProtocolTranslatorPB.java:365)

at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)

at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)

at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)

at java.lang.reflect.Method.invoke(Method.java:498)

at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:422)

at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:165)

at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:157)

at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:95)

at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:359)

at com.sun.proxy.$Proxy10.create(Unknown Source)

at org.apache.hadoop.hdfs.DFSOutputStream.newStreamForCreate(DFSOutputStream.java:276)

at org.apache.hadoop.hdfs.DFSClient.create(DFSClient.java:1216)

at org.apache.hadoop.hdfs.DFSClient.create(DFSClient.java:1195)

at org.apache.hadoop.hdfs.DFSClient.create(DFSClient.java:1133)

at org.apache.hadoop.hdfs.DistributedFileSystem$8.doCall(DistributedFileSystem.java:536)

at org.apache.hadoop.hdfs.DistributedFileSystem$8.doCall(DistributedFileSystem.java:533)

at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81)

at org.apache.hadoop.hdfs.DistributedFileSystem.create(DistributedFileSystem.java:547)

at org.apache.hadoop.hdfs.DistributedFileSystem.create(DistributedFileSystem.java:474)

at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:1118)

at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:1098)

at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:987)

at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:975)

at hdfs.HdfsOp.main(HdfsOp.java:26)

Caused by: java.net.ConnectException: Connection refused: no further information

at sun.nio.ch.SocketChannelImpl.checkConnect(Native Method)

at sun.nio.ch.SocketChannelImpl.finishConnect(SocketChannelImpl.java:717)

at org.apache.hadoop.net.SocketIOWithTimeout.connect(SocketIOWithTimeout.java:206)

at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:531)

at org.apache.hadoop.ipc.Client$Connection.setupConnection(Client.java:690)

at org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:794)

at org.apache.hadoop.ipc.Client$Connection.access$3700(Client.java:411)

at org.apache.hadoop.ipc.Client.getConnection(Client.java:1572)

at org.apache.hadoop.ipc.Client.call(Client.java:1403)

... 29 more


正在回答

登陆购买课程后可参与讨论,去登陆

1回答

错误信息提示的是连不上hdfs的9000端口
可能有以下问题
1.确认hadoop集群是否正常启动
2.确认集群防火墙是否关闭
3.在windows机器上打开cmd窗口,执行telnet命令看一下能不能联通集群的9000端口
命令如下:
telnet 集群主节点IP 9000

  • 夜星坠 提问者 #1
    集群正常启动,防火墙是关着的,telnet命令联不通
    2020-11-10 21:53:17
  • 徐老师 回复 提问者 夜星坠 #2
    如果集群正常,防火墙也关闭了,telnet却不通,唯一的可能应该是你现在指定的ip可能有问题了,你在windows的cmd命令行里面ping一下这个ip,看看通不通
    2020-11-10 21:57:05
  • 夜星坠 提问者 回复 徐老师 #3
    可以ping通
    2020-11-10 21:58:06
问题已解决,确定采纳
还有疑问,暂不采纳

恭喜解决一个难题,获得1积分~

来为老师/同学的回答评分吧

0 星

相似问题

登录后可查看更多问答,登录/注册

请稍等 ...
意见反馈 帮助中心 APP下载
官方微信

在线咨询

领取优惠

免费试听

领取大纲

扫描二维码,添加
你的专属老师