在master(即:host2)中执行

hadoop jar hadoop-test-1.1.2.jar DFSCIOTest -write -nrFiles 12 -fileSize 10240 -resFile test

最后fail,为啥,看了一下日志

  1. org.apache.hadoop.ipc.RemoteException: java.io.IOException:
    File /benchmarks/TestDFSIO/io_data/test_io_0 could only be replicated to 0 nodes, instead of 1
  2. at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:1639)
  3. at org.apache.hadoop.hdfs.server.namenode.NameNode.addBlock(NameNode.java:736)
  4. at sun.reflect.GeneratedMethodAccessor10.invoke(Unknown Source)
  5. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  6. at java.lang.reflect.Method.invoke(Method.java:606)
  7. at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:578)
  8. at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1393)
  9. at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1389)
  10. at java.security.AccessController.doPrivileged(Native Method)
  11. at javax.security.auth.Subject.doAs(Subject.java:415)
  12. at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1149)
  13. at org.apache.hadoop.ipc.Server$Handler.run(Server.java:1387)
  14.  
  15. at org.apache.hadoop.ipc.Client.call(Client.java:1107)
  16. at org.apache.hadoop.ipc.RPC$Invoker.invoke(RPC.java:229)
  17. at com.sun.proxy.$Proxy2.addBlock(Unknown Source)
  18. at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
  19. at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
  20. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  21. at java.lang.reflect.Method.invoke(Method.java:606)
  22. at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:85)
  23. at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:62)
  24. at com.sun.proxy.$Proxy2.addBlock(Unknown Source)
  25. at org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.locateFollowingBlock(DFSClient.java:3686)
  26. at org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.nextBlockOutputStream(DFSClient.java:3546)
  27. at org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.access$2600(DFSClient.java:2749)
  28. at org.apache.hadoop.hdfs.DFSClient$DFSOutputStream$DataStreamer.run(DFSClient.java:2989)
  29.  
  30. org.apache.hadoop.ipc.RemoteException: java.io.IOException:
    File /benchmarks/TestDFSIO/io_data/test_io_0 could only be replicated to 0 nodes, instead of 1
  31. at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:1639)
  32. at org.apache.hadoop.hdfs.server.namenode.NameNode.addBlock(NameNode.java:736)
  33. at sun.reflect.GeneratedMethodAccessor10.invoke(Unknown Source)
  34. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  35. at java.lang.reflect.Method.invoke(Method.java:606)
  36. at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:578)
  37. at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1393)
  38. at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1389)
  39. at java.security.AccessController.doPrivileged(Native Method)
  40. at javax.security.auth.Subject.doAs(Subject.java:415)
  41. at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1149)
  42. at org.apache.hadoop.ipc.Server$Handler.run(Server.java:1387)
  43.  
  44. at org.apache.hadoop.ipc.Client.call(Client.java:1107)
  45. at org.apache.hadoop.ipc.RPC$Invoker.invoke(RPC.java:229)
  46. at com.sun.proxy.$Proxy2.addBlock(Unknown Source)
  47. at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
  48. at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
  49. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  50. at java.lang.reflect.Method.invoke(Method.java:606)
  51. at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:85)
  52. at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:62)
  53. at com.sun.proxy.$Proxy2.addBlock(Unknown Source)
  54. at org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.locateFollowingBlock(DFSClient.java:3686)
  55. at org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.nextBlockOutputStream(DFSClient.java:3546)
  56. at org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.access$2600(DFSClient.java:2749)
  57. at org.apache.hadoop.hdfs.DFSClient$DFSOutputStream$DataStreamer.run(DFSClient.java:2989)
  58.  
  59. org.apache.hadoop.ipc.RemoteException: java.io.IOException:
    File /benchmarks/TestDFSIO/io_data/test_io_0 could only be replicated to 0 nodes, instead of 1
  60. at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:1639)
  61. at org.apache.hadoop.hdfs.server.namenode.NameNode.addBlock(NameNode.java:736)
  62. at sun.reflect.GeneratedMethodAccessor10.invoke(Unknown Source)
  63. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  64. at java.lang.reflect.Method.invoke(Method.java:606)
  65. at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:578)
  66. at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1393)
  67. at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:1389)
  68. at java.security.AccessController.doPrivileged(Native Method)
  69. at javax.security.auth.Subject.doAs(Subject.java:415)
  70. at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1149)
  71. at org.apache.hadoop.ipc.Server$Handler.run(Server.java:1387)
  72.  
  73. at org.apache.hadoop.ipc.Client.call(Client.java:1107)
  74. at org.apache.hadoop.ipc.RPC$Invoker.invoke(RPC.java:229)
  75. at com.sun.proxy.$Proxy2.addBlock(Unknown Source)
  76. at sun.reflect.GeneratedMethodAccessor2.invoke(Unknown Source)
  77. at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
  78. at java.lang.reflect.Method.invoke(Method.java:606)
  79. at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:85)
  80. at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:62)
  81. at com.sun.proxy.$Proxy2.addBlock(Unknown Source)
  82. at org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.locateFollowingBlock(DFSClient.java:3686)
  83. at org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.nextBlockOutputStream(DFSClient.java:3546)
  84. at org.apache.hadoop.hdfs.DFSClient$DFSOutputStream.access$2600(DFSClient.java:2749)
  85. at org.apache.hadoop.hdfs.DFSClient$DFSOutputStream$DataStreamer.run(DFSClient.java:2989)

attempt_201607141305_0001_m_000003_2: log4j:WARN No appenders could be found for logger (org.apache.hadoop.hdfs.DFSClient).

attempt_201607141305_0001_m_000003_2: log4j:WARN Please initialize the log4j system properly.
16/07/14 15:14:04 INFO mapred.JobClient: Job complete: job_201607141305_0001
16/07/14 15:14:04 INFO mapred.JobClient: Counters: 7
16/07/14 15:14:04 INFO mapred.JobClient: Job Counters
16/07/14 15:14:04 INFO mapred.JobClient: SLOTS_MILLIS_MAPS=7002226
16/07/14 15:14:04 INFO mapred.JobClient: Total time spent by all reduces waiting after reserving slots (ms)=0
16/07/14 15:14:04 INFO mapred.JobClient: Total time spent by all maps waiting after reserving slots (ms)=0
16/07/14 15:14:04 INFO mapred.JobClient: Launched map tasks=20
16/07/14 15:14:04 INFO mapred.JobClient: Data-local map tasks=20
16/07/14 15:14:04 INFO mapred.JobClient: SLOTS_MILLIS_REDUCES=0
16/07/14 15:14:04 INFO mapred.JobClient: Failed map tasks=1
16/07/14 15:14:04 INFO mapred.JobClient: Job Failed: # of failed Map Tasks exceeded allowed limit. FailedCount: 1. LastFailedTask: task_201607141305_0001_m_000002
java.io.IOException: Job failed!
at org.apache.hadoop.mapred.JobClient.runJob(JobClient.java:1327)
at org.apache.hadoop.fs.TestDFSIO.runIOTest(TestDFSIO.java:257)
at org.apache.hadoop.fs.TestDFSIO.writeTest(TestDFSIO.java:237)
at org.apache.hadoop.fs.TestDFSIO.run(TestDFSIO.java:457)
at org.apache.hadoop.util.ToolRunner.run(ToolRunner.java:65)
at org.apache.hadoop.util.ToolRunner.run(ToolRunner.java:79)
at org.apache.hadoop.fs.DFSCIOTest.main(DFSCIOTest.java:403)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at org.apache.hadoop.util.ProgramDriver$ProgramDescription.invoke(ProgramDriver.java:68)
at org.apache.hadoop.util.ProgramDriver.driver(ProgramDriver.java:139)
at org.apache.hadoop.test.AllTestDriver.main(AllTestDriver.java:81)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at org.apache.hadoop.util.RunJar.main(RunJar.java:156)

网上查找了很多,其中一下这个人说的 :

异常何时产生
Hadoop上传文件抛出的错误,即"hadoop fs –put [本地地址] [hadoop目录]"产生的异常。

该问题在网上答案蛮多,先总结如下:
1、系统或hdfs是否有足够空间(本人就是因为硬盘空间不足导致异常发生)
2、datanode数是否正常
3、是否在safemode
4、防火墙是否关闭
5、关闭hadoop、格式化、重启hadoop

我个人认为是自己的磁盘不够的,我将hadoop的数据都放在了home目录下了,导致我的磁盘都接近了97%,导致失败的.

解决方案

http://blog.csdn.net/zuiaituantuan/article/details/6533867

http://blog.csdn.net/wanghai__/article/details/5744158

http://www.docin.com/p-629030380.html

http://www.cnblogs.com/linjiqin/archive/2013/03/13/2957310.html

http://f.dataguru.cn/thread-32858-1-1.html

运行基准测试hadoop集群中的问题:org.apache.hadoop.ipc.RemoteException: java.io.IOException: File /benchmarks/TestDFSIO/io_data/test_的更多相关文章

  1. HBase中此类异常解决记录org.apache.hadoop.ipc.RemoteException(java.io.IOException):

    ERROR: Can't get master address from ZooKeeper; znode data == null   一定注意这只是问题的第一层表象,真的问题是: File /hb ...

  2. org.apache.hadoop.ipc.RemoteException(java.io.IOException)

    昨晚突然之间mr跑步起来了 jps查看 进程都在的,但是在reduce任务跑了85%的时候会抛异常 异常情况如下: 2016-09-21 21:32:28,538 INFO [org.apache.h ...

  3. hive运行query语句时提示错误:org.apache.hadoop.ipc.RemoteException: java.io.IOException: java.io.IOException:

    hive> select product_id, track_time from trackinfo limit 5; Total MapReduce jobs = 1 Launching Jo ...

  4. org.apache.hadoop.ipc.RemoteException: java.io.IOException:XXXXXXXXXXX could only be replicated to 0 nodes, instead of 1

    原因:Configured Capacity也就是datanode 没用分配容量 [root@dev9106 bin]# ./hadoop dfsadmin -report Configured Ca ...

  5. 在Hadoop集群中添加机器和删除机器

    本文转自:http://www.cnblogs.com/gpcuster/archive/2011/04/12/2013411.html 无论是在Hadoop集群中添加机器和删除机器,都无需停机,整个 ...

  6. Hadoop集群中添加硬盘

    Hadoop工作节点扩展硬盘空间 接到老板任务,Hadoop集群中硬盘空间不够用,要求加一台机器到Hadoop集群,并且每台机器在原有基础上加一块2T硬盘,老板给力啊,哈哈. 这些我把完成这项任务的步 ...

  7. org.apache.catalina.connector.ClientAbortException: java.io.IOException: 您的主机中的软件中止了一个已建立的连接。

    日志文件中有“java.io.IOException: 您的主机中的软件中止了一个已建立的连接.”错误 org.apache.catalina.connector.ClientAbortExcepti ...

  8. org.apache.catalina.connector.ClientAbortException: java.io.IOException: 你的主机中的软件中止了一个已建立的连接。

    org.apache.catalina.connector.ClientAbortException: java.io.IOException: 你的主机中的软件中止了一个已建立的连接. at org ...

  9. 如何使用Hive&R从Hadoop集群中提取数据进行分析

    一个简单的例子! 环境:CentOS6.5 Hadoop集群.Hive.R.RHive,具体安装及调试方法见博客内文档. 1.分析题目 --有一个用户数据样本(表名huserinfo)10万数据左右: ...

随机推荐

  1. Longest Increasing Common Subsequence (LICS)

    最长上升公共子序列(Longest Increasing Common Subsequence,LICS)也是经典DP问题,是LCS与LIS的混合. Problem 求数列 a[1..n], b[1. ...

  2. thinkphp ajax添加及删除

    写在前面的话:应客户需求需要给后台增加自助添加电影名称和链接的功能,添加后在微信前台能自动读取显示.开发步骤:1.由于是给后台添加一个增加电影及电影链接的功能,所以控制器在Admin下.在路径 App ...

  3. mysql 插入中文时出现ERROR 1366 (HY000): Incorrect string value: '\xC0\xEE\xCB\xC4' for column 'usern ame' at row 1

    1 环境: MySQL Server 6.0  命令行工具 2 问题 :  插入中文字符数据出现如下错误: ERROR 1366 (HY000): Incorrect string value: '\ ...

  4. TOJ3540Consumer(有依赖的背包)

    http://acm.tju.edu.cn/toj/showp3540.html3540.   Consumer Time Limit: 2.0 Seconds   Memory Limit: 655 ...

  5. Appserv环境下搭建的PHP环境升级PHP版本(支持微信端口以及thinkphp)

      Appserv 2.5.10 升级PHP from version 5.2 to 5.3 最近,在使用Appserv 2.5.10 中的PHP开发微信时,遇到下面错误 Fatal error  C ...

  6. Shell编程检测监控mysql的CPU占用率

    shell编程很强大! 网站访问量大的时候mysql的压力就比较大,当mysql的CPU利用率超过300%的时候就不能提供服务了,近乎卡死状态,这时候最好的方法就是重启mysql服务.由于这种事具有不 ...

  7. Linux目录结构及常用命令(转载)

    一.Linux目录结构 你想知道为什么某些程序位于/bin下,或者/sbin,或者/usr/bin,或/usr/sbin目录下吗?例如,less命令位于/usr/bin目录下.为什么没在/bin中,或 ...

  8. IDEA 从SVN检出项目相关配置

    1.新建好一个工程,然后通过SVN检出项目 2.检出后一般tomcat的环境是配置好的,点击上方Project Structure按钮,弹出窗体,查看Project项,一般没问题,如果要配置就配置Pr ...

  9. JSP 使用

    JSP教程: http://www.w3cschool.cc/jsp/jsp-tutorial.html jsp语法: 任何语言都有自己的语法,JAVA中有,JSP虽然是在JAVA上的一种应用,但是依 ...

  10. 锋利的jQuery-4--trigger()和triggerHandler()

    trigger()方法触发事件后,会执行浏览器默认操作. $("input").trigger("focus") 以上的代码不仅会执行input绑定的focus ...