Exception in thread "main" java.io.IOException: Mkdirs failed to create /user/centos/hbase-staging (exists=false, cwd=file:/home/centos)
at org.apache.hadoop.fs.ChecksumFileSystem.create(ChecksumFileSystem.java:440)
at org.apache.hadoop.fs.ChecksumFileSystem.create(ChecksumFileSystem.java:426)
at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:906)
at org.apache.hadoop.io.SequenceFile$Writer.<init>(SequenceFile.java:1071)
at org.apache.hadoop.io.SequenceFile$RecordCompressWriter.<init>(SequenceFile.java:1371)
at org.apache.hadoop.io.SequenceFile.createWriter(SequenceFile.java:272)
at org.apache.hadoop.io.SequenceFile.createWriter(SequenceFile.java:294)
at org.apache.hadoop.hbase.mapreduce.HFileOutputFormat2.writePartitions(HFileOutputFormat2.java:335)
at org.apache.hadoop.hbase.mapreduce.HFileOutputFormat2.configurePartitioner(HFileOutputFormat2.java:596)
at org.apache.hadoop.hbase.mapreduce.HFileOutputFormat2.configureIncrementalLoad(HFileOutputFormat2.java:440)
at org.apache.hadoop.hbase.mapreduce.HFileOutputFormat2.configureIncrementalLoad(HFileOutputFormat2.java:405)
at org.apache.hadoop.hbase.mapreduce.HFileOutputFormat2.configureIncrementalLoad(HFileOutputFormat2.java:386)
at bulkloadExample.BulkloadData.main(BulkloadData.java:88)

package bulkloadExample;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.HFileOutputFormat2;
import org.apache.hadoop.hbase.mapreduce.LoadIncrementalHFiles;
import org.apache.hadoop.hbase.mapreduce.PutSortReducer;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import java.io.IOException;
import java.net.URI; public class BulkloadData { static final String JOBNAME = "BulkLoad";
static final String TABLENAME = "wqbin:duowan_user_bulkLoad";
static final String PATH_IN = "/datain/duowan_user.txt"; //输入路径
static final String PATH_OUT = "/dataout"; //输入路径HFILE static final String SEPARATOR = "\t"; static final byte[] ColumnFamily = "f".getBytes(); // 列簇
// static final byte[] row_id = "id".getBytes(); // 列名
static final byte[] name = "name".getBytes();
static final byte[] pass = "pass".getBytes();
static final byte[] mail = "mail".getBytes();
static final byte[] nickname = "nickname".getBytes(); public static class Map extends Mapper<LongWritable, Text, ImmutableBytesWritable, Put> {
protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
String[] strArr = value.toString().split(SEPARATOR);
String row = strArr[0];
Put put = new Put(Bytes.toBytes(row.toString())); // rowkey
put.add(ColumnFamily, name, Bytes.toBytes(strArr[1]));
put.add(ColumnFamily, pass, Bytes.toBytes(strArr[2]));
put.add(ColumnFamily, mail, Bytes.toBytes(strArr[3]));
put.add(ColumnFamily, nickname, Bytes.toBytes(strArr[3])); context.write(new ImmutableBytesWritable(value.getBytes()), put);
}
} public static void main(String[] args) throws Exception { Configuration conf = HBaseConfiguration.create();
Connection conn = ConnectionFactory.createConnection(conf);
// conf.set("hbase.zookeeper.quorum", "xx,xx,xx");
conf.set("fs.defaultFS","file:///");
System.setProperty("HADOOP_USER_NAME", "centos"); Job job = Job.getInstance(conf, JOBNAME);
job.setJarByClass(BulkloadData.class); job.setMapOutputKeyClass(ImmutableBytesWritable.class);
job.setMapOutputValueClass(Put.class);
TableMapReduceUtil.addDependencyJars(job);
job.setMapperClass(Map.class);
//这个 SorterReducer(KeyValueSortReducer或PutSortReducer) 可以不指定,
//因为源码中已经做了判断
job.setReducerClass(PutSortReducer.class);
job.setOutputFormatClass(HFileOutputFormat2.class); FileSystem fs = FileSystem.get(URI.create("/"),conf); Path outPath = new Path(PATH_OUT);
// if (fs.exists(outPath))fs.delete(outPath, true); FileOutputFormat.setOutputPath(job, outPath);
FileInputFormat.setInputPaths(job, new Path(PATH_IN)); // HTable table = new HTable(conf, TABLENAME); HFileOutputFormat2.configureIncrementalLoad(job, conn.getTable(TableName.valueOf(TABLENAME)),
conn.getRegionLocator(TableName.valueOf(TABLENAME)));
System.out.println("load完毕"); if(job.waitForCompletion(true)){
LoadIncrementalHFiles loader = new LoadIncrementalHFiles(conf);
// loader.doBulkLoad( table); } System.exit(0);
}
}

"main" java.io.IOException: Mkdirs failed to create /user/centos/hbase-staging (exists=false, cwd=file:/home/centos)的更多相关文章

  1. Exception in thread "main" java.io.IOException: Mkdirs failed to create /var/folders/q0/1wg8sw1x0dg08cmm5m59sy8r0000gn/T/hadoop-unjar6090005653875084137/META-INF/license at org.apache.hadoop.util.Run

    在使用hadoop运行jar时出现. 解决方法 zip -d Test.jar LICENSE zip -d Test.jar META-INF/LICENSE 完美解决.

  2. nutch爬取时Exception in thread “main” java.io.IOException: Job failed!

    用cygwin运行nutch 1.2爬取提示IOException: $ bin/nutch crawl urls -dir crawl -depth 3 -topN 10 crawl started ...

  3. Exception in thread "main" java.io.IOException: Failed to set permissions of path

    在跑BuildForest的时候,编写了下面的程序: package test.breiman; import org.apache.mahout.classifier.df.mapreduce.Bu ...

  4. windows下eclipse远程连接hadoop错误“Exception in thread"main"java.io.IOException: Call to Master.Hadoop/172.20.145.22:9000 failed ”

    在VMware虚拟机下搭建了hadoop集群,ubuntu-12.04,一台master,三台slave.hadoop-0.20.2版本.在 master机器上利用eclipse-3.3连接hadoo ...

  5. github提交失败并报错java.io.IOException: Authentication failed:

    一.概述 我最近在写一个android的项目. 软件:android studio.Android studio VCS integration(插件) Android studio VCS inte ...

  6. 解决方案--java执行cmd命令ProcessBuilder--出错Exception in thread "main" java.io.IOException: Cannot run program "dir d:\": CreateProcess error=2(xjl456852原创)

    当我尝试在java中通过ProcessBuilder运行window的cmd命令时出现错误: public static void main(String [] args) throws IOExce ...

  7. android java.io.IOException: open failed: EBUSY (Device or resource busy)

    今天遇到一个奇怪的问题, 测试在程序的下载界面,下载一个文件第一次下载成功,删除后再下载结果下载报错, 程序:file.createNewFile(); 报错:java.io.IOException: ...

  8. java.io.IOException: open failed: EACCES (Permission denied)问题解决

    1.  问题描述:在Android中,用程序访问Sdcard时,有时出现“java.io.IOException: open failed: EACCES (Permission denied)&qu ...

  9. hadoop错误Ignoring exception during close for org.apache.hadoop.mapred.MapTask$NewOutputCollector@17bda0f2 java.io.IOException Spill failed

    1.错误    Ignoring exception during close for org.apache.hadoop.mapred.MapTask$NewOutputCollector@17bd ...

随机推荐

  1. 自然语言处理NLP学习笔记二:NLP实战-开源工具tensorflow与jiagu使用

    前言: NLP工具有人推荐使用spacy,有人推荐使用tensorflow. tensorflow:中文译作:张量(超过3维的叫张量)详细资料参考:http://www.tensorfly.cn/ J ...

  2. 2 基于梯度的攻击——PGD

    PGD攻击原论文地址——https://arxiv.org/pdf/1706.06083.pdf 1.PGD攻击的原理 PGD(Project Gradient Descent)攻击是一种迭代攻击,可 ...

  3. Python globals()和locals()比较

    Python的两个内置函数,globals()和locals() ,它们提供了基于字典的访问局部和全局变量的方式. globals()是可写的,即,可修改该字典中的键值,可新增和删除键值对. 而loc ...

  4. MSF魔鬼训练营-3.4.2网络漏洞扫描-OpenVAS(待补完)PS:在虚拟机里面运行OpenVAS扫描的进度真的是超慢啊...

     由于新版的kali中没有预装OpenVAS.所以在虚拟机中安装花了非常多的时间. 安装过程参考:http://www.cnblogs.com/zlslch/p/6872559.html过程写的非常详 ...

  5. Python全栈开发之3、深浅拷贝、变量和函数、递归、函数式编程、内置函数

    一.深浅拷贝 1.数字和字符串 对于 数字 和 字符串 而言,赋值.浅拷贝和深拷贝无意义,因为其永远指向同一个内存地址. import copy # 定义变量 数字.字符串 # n1 = 123 n1 ...

  6. hive_UDTF函数

    hive的UDTF函数是可以输入一行数据然后输出多行多列(可以是单行/单列)的函数 public class Tex extends GenericUDTF { /** * 对传入的参数进行初始化 * ...

  7. java crc32

      原文链接:http://blog.csdn.net/tbkken/article/details/8210952 在优化关系数据库的关联查询的时候,ON的字段选择为数值型,必须是会比字符型的数据快 ...

  8. python_0基础开始_day06

    第六节 1.小数据池 ==,is,id ==:查看等号两边的值是否一样 a = 9b = 9print(a == b) # 返回Truec = "dog"d = "dog ...

  9. mysql-1.1基础

    笔记内容:mysql基础,创建数据库,创建表,操作数据表,操作数据,简单查询,条件查询,排序,分组,聚合,连接查询(等值连接,内连接,外链接),子查询 自己提示:脑图笔记存于网盘中  右键:新标签页打 ...

  10. Codeforces 1194B. Yet Another Crosses Problem

    传送门 直接枚举填满哪一行,然后看看这一行填满以后哪一列最小 这个预处理一下 $cnt[i]$ 表示初始时第 $i$ 列有几个位置填满就可以做到 $O(m)$ 对于所有情况取个 $min$ 就是答案, ...