Layui_Tree模块遍历HDFS
注:转载请署名
一、实体
package com.ebd.application.common.Base; import java.util.List; public class HDFSDir { private String id; //自定id
private String pid; //父ID
private String name; //当前目录名称
private String alias; //目录别名,可不用
private String dir; //自"/"目录后的完整目录
private boolean spread; //是否展开(true,false)
private List<HDFSDir> children; //子目录 public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getPid() {
return pid;
}
public void setPid(String pid) {
this.pid = pid;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getAlias() {
return alias;
}
public void setAlias(String alias) {
this.alias = alias;
}
public String getDir() {
return dir;
}
public void setDir(String dir) {
this.dir = dir;
}
public boolean isSpread() {
return spread;
}
public void setSpread(boolean spread) {
this.spread = spread;
}
public List<HDFSDir> getChildren() {
return children;
}
public void setChildren(List<HDFSDir> children) {
this.children = children;
}
}
二、工具类
package hdfstest; import java.io.IOException;
import java.net.URI;
import java.util.ArrayList;
import java.util.List; import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapred.JobConf; import com.ebd.application.common.Base.HDFSDir;
import com.ebd.application.common.utils.Identities; import net.sf.json.JSONObject; public class HdfsListTest { //HDFS访问地址
private static final String HDFS = "hdfs://bigdata.hadoop.com:9000"; public HdfsListTest(Configuration conf) {
this(HDFS, conf);
} public HdfsListTest(String hdfs, Configuration conf) {
this.hdfsPath = hdfs;
this.conf = conf;
} //hdfs路径
private String hdfsPath; //Hadoop系统配置
private Configuration conf; //启动函数
public static void main(String[] args) throws IOException {
JobConf conf = config();
// System.out.println(conf.get("hadoop.http.staticuser.user"));
// System.out.println(System.getenv("HADOOP_HOME"));
HdfsListTest hdfs = new HdfsListTest(conf);
// hdfs.mkdirs("/testput");
// hdfs.copyFile("C:\\Users\\Administrator\\Desktop\\testput", "/testput/testput2");
// hdfs.catFile("/testput/testput");
// hdfs.download("/testput/testput", "E:\\");
// hdfs.ls("hdfs://bigdata.hadoop.com:9000/user");
// hdfs.rmr("/testput");
// List<String> fileList = hdfs.getTree("/","/","|-");
List<HDFSDir> kk = new ArrayList<HDFSDir>();
HDFSDir ds1 = new HDFSDir();
HDFSDir ds2 = new HDFSDir();
HDFSDir ds3 = new HDFSDir();
ds1.setId(Identities.uuid());
ds1.setDir("/testput");
ds2.setId(Identities.uuid());
ds2.setDir("/user");
ds3.setId(Identities.uuid());
ds3.setDir("/tmp");
// kk.add(ds1);
// kk.add(ds2);
// kk.add(ds3);
HDFSDir ds = new HDFSDir();
ds.setId(Identities.uuid());
ds.setDir("/");
kk.add(ds);
// List<HDFSDir> fileList = hdfs.getListTree("/","/user",0);
HDFSDir hdfss = hdfs.getChildNode(ds);
JSONObject object = JSONObject.fromObject(hdfss);
System.out.println(dirJsonFunc(object.toString()));
} //加载Hadoop配置文件
public static JobConf config(){
JobConf conf = new JobConf(HdfsListTest.class);
conf.setJobName("HdfsDAO");
conf.addResource("hadoop/core-site.xml");
conf.addResource("hadoop/hdfs-site.xml");
conf.addResource("hadoop/mapred-site.xml");
return conf;
} //在根目录下创建文件夹
public void mkdirs(String folder) throws IOException {
Path path = new Path(folder);
FileSystem fs = FileSystem.get(URI.create(hdfsPath), conf);
if (!fs.exists(path)) {
fs.mkdirs(path);
System.out.println("Create: " + folder);
}
fs.close();
} //某个文件夹的文件列表
public FileStatus[] ls(String folder) throws IOException {
Path path = new Path(folder);
FileSystem fs = FileSystem.get(URI.create(hdfsPath), conf);
FileStatus[] list = fs.listStatus(path);
System.out.println("ls: " + folder);
System.out.println("==========================================================");
if(list != null)
for (FileStatus f : list) {
System.out.printf("name: %s, folder: %s, size: %d\n", f.getPath(), f.isDir(), f.getLen());
// System.out.printf("%s, folder: %s, 大小: %dK\n", f.getPath().getName(), (f.isDir()?"目录":"文件"), f.getLen()/1024);
}
System.out.println("==========================================================");
fs.close();
return list;
} public void copyFile(String local, String remote) throws IOException { FileSystem fs = FileSystem.get(URI.create(hdfsPath), conf);
//remote---/用户/用户下的文件或文件夹
fs.copyFromLocalFile(new Path(local), new Path(remote));
System.out.println("copy from: " + local + " to " + remote);
fs.close();
} public void catFile(String remote) throws IOException { FSDataInputStream instream = null;
FileSystem fs = FileSystem.get(URI.create(hdfsPath), conf);
Path path = new Path(remote);
if(fs.isFile(path)){
fs.open(path);
instream = fs.open(path);
byte[] b = new byte[1024];
instream.read(b);
System.out.println(new String(b,"utf-8"));
fs.close();
}
} List <String> treeList = new ArrayList<String>();
public List<String> getTree(String top, String remote, String prefix) throws IOException { Path path = new Path(remote);
FileSystem fs = FileSystem.get(URI.create(hdfsPath), conf);
FileStatus[] list = fs.listStatus(path);
if(list != null)
for (FileStatus f : list) {
// System.out.printf("name: %s, folder: %s, size: %d\n", f.getPath(), f.isDir(), f.getLen());
System.out.println(prefix+ f.getPath().getName());
top += f.getPath().getName();
treeList.add(top);
if(fs.isDirectory(f.getPath())){
getTree(top,f.getPath().toString(),prefix+"-");
}
}
return treeList;
} int id = 0;
static int pid = 0;
List<HDFSDir> dirList = new ArrayList<HDFSDir>();
HDFSDir hdfsDir = null;
private List<HDFSDir> getListTree(String top, String remote, int pid) throws IOException {
Path path = new Path(remote);
FileSystem fs = FileSystem.get(URI.create(hdfsPath), conf);
FileStatus[] list = fs.listStatus(path);
if(list != null)
for (FileStatus f : list) {
if(f.isDirectory()){
hdfsDir = new HDFSDir();
// hdfsDir.setId(id++);
// hdfsDir.setPid(pid);
hdfsDir.setName(f.getPath().getName());
hdfsDir.setAlias(f.getPath().getName());
hdfsDir.setDir(f.getPath().toString().substring(HDFS.length()));
hdfsDir.setSpread(false);
System.out.println(f.getPath().getName()+"="+f.getPath().toString().substring(HDFS.length()));
dirList.add(hdfsDir);
}
// System.out.printf("name: %s, folder: %s, size: %d\n", f.getPath(), f.isDir(), f.getLen());
// System.out.println(prefix+ f.getPath().getName());
// top += f.getPath().getName();
// if(fs.isDirectory(f.getPath())){
// getListTree(top,f.getPath().toString(),pid++);
// }
}
return dirList;
} List<HDFSDir> cDirList = null;
public HDFSDir getChildNode(HDFSDir pDir) throws IOException{
Path path = null;
if(pDir.getChildren() != null && pDir.getChildren().size() >= 1){
for(HDFSDir p : pDir.getChildren()){
path = new Path(p.getDir());
FileSystem fs = FileSystem.get(URI.create(hdfsPath), conf);
FileStatus[] list = fs.listStatus(path);
if(list != null){
cDirList = new ArrayList<HDFSDir>();
for (FileStatus f : list) {
if(f.isDirectory()){
hdfsDir = new HDFSDir();
hdfsDir.setId(Identities.uuid());
hdfsDir.setPid(p.getId());
hdfsDir.setName(f.getPath().getName());
hdfsDir.setAlias(f.getPath().getName());
hdfsDir.setDir(f.getPath().toString().substring(HDFS.length()));
hdfsDir.setSpread(false);
cDirList.add(hdfsDir);
}
}
p.setChildren(cDirList);
for(HDFSDir pp : cDirList){
getChildNode(pp);
}
}
}
}else{
path = new Path(pDir.getDir());
FileSystem fs = FileSystem.get(URI.create(hdfsPath), conf);
FileStatus[] list = fs.listStatus(path);
if(list != null){
cDirList = new ArrayList<HDFSDir>();
for (FileStatus f : list) {
if(f.isDirectory()){
hdfsDir = new HDFSDir();
hdfsDir.setId(Identities.uuid());
hdfsDir.setPid(pDir.getId());
hdfsDir.setName(f.getPath().getName().equals("")?"/":f.getPath().getName());
hdfsDir.setAlias(f.getPath().getName().equals("")?"/":f.getPath().getName());
hdfsDir.setDir(f.getPath().toString().substring(HDFS.length()));
hdfsDir.setSpread(false);
cDirList.add(hdfsDir);
}
}
pDir.setChildren(cDirList);
for(HDFSDir pp : cDirList){
getChildNode(pp);
}
}
}
return pDir;
} public static String dirJsonFunc(String jsonStr) {
if (StringUtils.isNotBlank(jsonStr)) {
String[] reg_array = {"([\"])","(,['])","([']:)"};
String[] rpa_array = {"'",",",":"};
for(int i=0;i<reg_array.length;i++){
jsonStr = jsonStr.replaceAll(reg_array[i], rpa_array[i]);
}
jsonStr = jsonStr.replace("{'", "{");
jsonStr = jsonStr.replace("'}", "}");
}
return jsonStr;
} //删除文件或文件夹
public void rmr(String folder) throws IOException { Path path = new Path(folder);
FileSystem fs = FileSystem.get(URI.create(hdfsPath), conf);
fs.deleteOnExit(path);
System.out.println("Delete: " + folder);
fs.close();
} //下载文件到本地系统
public void download(String remote, String local) throws IOException { Path path = new Path(remote);
FileSystem fs = FileSystem.get(URI.create(hdfsPath), conf);
fs.copyToLocalFile(path, new Path(local));
System.out.println("download: from" + remote + " to " + local);
fs.close();
}
}
转换工具类
package test; import org.apache.commons.lang3.StringUtils; import com.ebd.application.common.Base.HDFSDir;
import com.ebd.application.common.utils.Identities; import net.sf.json.JSONArray; public class TestObjectToJson { public static void main(String[] args) {
HDFSDir ds = new HDFSDir();
ds.setId(Identities.uuid());
ds.setDir("/testput");
JSONArray js = JSONArray.fromObject(ds);
// System.out.println(js.toString()); String jsonStr = js.toString(); // String reg_1 = "([\"])"; //双引号转单引号
// String reg_2 = "(,['])"; //去掉逗号后面的单引号
// String reg_3 = "([']:)"; //去掉冒号前面的单引号
// String reg_4 = "('{'['])"; //去掉开头大括号后面的单引号
// Pattern pattern = Pattern.compile(regEx);
// jsonStr = jsonStr.replaceAll(reg_1, "'");
// jsonStr = jsonStr.replaceAll(reg_2, ",");
// jsonStr = jsonStr.replaceAll(reg_3, ":");
// jsonStr = jsonStr.replaceAll("{'", "{");
// System.out.println(jsonStr);
} public static String dirJsonFunc(String jsonStr) {
if (StringUtils.isNotBlank(jsonStr)) {
String[] reg_array = {"([\"])","(,['])","([']:)"};
String[] rpa_array = {"'",",",":"};
for(int i=0;i<reg_array.length;i++){
jsonStr = jsonStr.replaceAll(reg_array[i], rpa_array[i]);
}
jsonStr = jsonStr.replace("{'", "{");
jsonStr = jsonStr.replace("'}", "}");
}
return jsonStr;
}
}
工具类
package hdfstest; import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.net.URI;
import java.util.ArrayList;
import java.util.List; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.mapred.JobConf; import com.ebd.application.common.utils.CreateFileUtil; public class HdfsDirConsoleTest { //HDFS访问地址
private static final String HDFS = "hdfs://bigdata.hadoop.com:9000"; public HdfsDirConsoleTest(Configuration conf) {
this(HDFS, conf);
} public HdfsDirConsoleTest(String hdfs, Configuration conf) {
this.hdfsPath = hdfs;
this.conf = conf;
} //hdfs路径
private String hdfsPath; //Hadoop系统配置
private Configuration conf; //启动函数
public static void main(String[] args) throws IOException {
JobConf conf = config();
System.out.println(conf.get("hadoop.http.staticuser.user"));
System.out.println(System.getenv("HADOOP_HOME"));
HdfsDirConsoleTest hdfs = new HdfsDirConsoleTest(conf);
// hdfs.mkdirs("/testput");
// hdfs.copyFile("C:\\Users\\Administrator\\Desktop\\testput", "/testput/testput2");
// hdfs.catFile("/testput/testput");
hdfs.download("/testput/testput", "D:/ss/ss",conf);
// hdfs.ls("hdfs://bigdata.hadoop.com:9000/");
// hdfs.rmr("/testput");
// List<String> fileList = hdfs.getTree("/","/","|-");
// for(int i=0;i<fileList.size();i++){
// System.out.println(fileList.get(i));
// }
System.out.println("success!");
} //加载Hadoop配置文件
public static JobConf config(){
JobConf conf = new JobConf(HdfsDirConsoleTest.class);
conf.setJobName("HdfsDAO");
conf.addResource("hadoop/core-site.xml");
conf.addResource("hadoop/hdfs-site.xml");
conf.addResource("hadoop/mapred-site.xml");
return conf;
} //在根目录下创建文件夹
public void mkdirs(String folder) throws IOException {
Path path = new Path(folder);
FileSystem fs = FileSystem.get(URI.create(hdfsPath), conf);
if (!fs.exists(path)) {
fs.mkdirs(path);
System.out.println("Create: " + folder);
}
fs.close();
} //某个文件夹的文件列表
public FileStatus[] ls(String folder) throws IOException {
Path path = new Path(folder);
FileSystem fs = FileSystem.get(URI.create(hdfsPath), conf);
FileStatus[] list = fs.listStatus(path);
System.out.println("ls: " + folder);
System.out.println("==========================================================");
if(list != null)
for (FileStatus f : list) {
System.out.printf("name: %s, folder: %s, size: %d\n", f.getPath(), f.isDir(), f.getLen());
System.out.println(f.getOwner()+"=="+f.getBlockSize()+"="+f.getModificationTime()+"--"+f.getPermission()+"="+f.getReplication());
// System.out.printf("%s, folder: %s, 大小: %dK\n", f.getPath().getName(), (f.isDir()?"目录":"文件"), f.getLen()/1024);
}
System.out.println("==========================================================");
fs.close();
return list;
} public void copyFile(String local, String remote) throws IOException { FileSystem fs = FileSystem.get(URI.create(hdfsPath), conf);
//remote---/用户/用户下的文件或文件夹
fs.copyFromLocalFile(new Path(local), new Path(remote));
System.out.println("copy from: " + local + " to " + remote);
fs.close();
} public void catFile(String remote) throws IOException { FSDataInputStream instream = null;
FileSystem fs = FileSystem.get(URI.create(hdfsPath), conf);
Path path = new Path(remote);
if(fs.isFile(path)){
fs.open(path);
instream = fs.open(path);
byte[] b = new byte[1024];
instream.read(b);
System.out.println(new String(b,"utf-8"));
fs.close();
}
} List <String> treeList = new ArrayList<String>();
public List<String> getTree(String top, String remote, String prefix) throws IOException { Path path = new Path(remote);
FileSystem fs = FileSystem.get(URI.create(hdfsPath), conf);
FileStatus[] list = fs.listStatus(path);
if(list != null)
for (FileStatus f : list) {
// System.out.printf("name: %s, folder: %s, size: %d\n", f.getPath(), f.isDir(), f.getLen());
System.out.println(prefix+ f.getPath().getName());
top += f.getPath().getName();
treeList.add(top);
if(fs.isDirectory(f.getPath())){
getTree(top,f.getPath().toString(),prefix+"-");
}
}
return treeList;
} //删除文件或文件夹
public void rmr(String folder) throws IOException { Path path = new Path(folder);
FileSystem fs = FileSystem.get(URI.create(hdfsPath), conf);
fs.deleteOnExit(path);
System.out.println("Delete: " + folder);
fs.close();
} //下载文件到本地系统
public void download(String remote, String local,JobConf conf) throws IOException { // Path path = new Path(remote);
// FileSystem fs = FileSystem.get(URI.create(hdfsPath), conf);
// fs.copyToLocalFile(path, new Path(local));
// System.out.println("download: from" + remote + " to " + local);
// fs.close();
FileSystem fs = FileSystem.get(URI.create(remote),conf);
FSDataInputStream fsdi = fs.open(new Path(remote));
if(CreateFileUtil.createDir(local)){
OutputStream output = new FileOutputStream(local+remote.substring(remote.lastIndexOf("/")));
IOUtils.copyBytes(fsdi,output,4096,true);
}
} public static void makdir(String path) { String strPath = "E:/a/aa/";
File file = new File(strPath);
File fileParent = file.getParentFile();
if(!fileParent.exists()){
fileParent.mkdirs();
}
}
}
Layui_Tree模块遍历HDFS的更多相关文章
- 【hadoop】python通过hdfs模块读hdfs数据
hdfs官网:http://hdfscli.readthedocs.io/en/latest/api.html 一个非常好的博客:http://blog.csdn.net/gamer_gyt/arti ...
- Windows编程之模块遍历(C++实现)
Windows编程之模块遍历 PS: 主要扣代码使用,直接滑动到最下面使用. 遍历模块需要几个API,和一个结构体 1.创建进程快照 2.遍历首次模块 3.继续下次遍历 4.模块信息结构体 API 分 ...
- [Windows编程]模块遍历
模块遍历 整体思路 1.创建进程快照 2.遍历首次模块 3.继续下次遍历 4.模块信息结构体 相关API的调用 创建进程快照API HANDLE WINAPI CreateToolhelp32Snap ...
- Python 使用 os 模块遍历目录/获取当前文件的路径
1.列出指定目录下所包含的目录 item = os.listdir("/Users/jinchengxie/go") 返回的是一个列表, 里面包含了指定目录下所包含的所有的目录 2 ...
- python os模块 遍历目录
#os #os ->tree命令 import os #递归 #目录 ->文件,文件夹 -> 文件文件夹 dirpath = input('请输入你要遍历的目录\n') def ge ...
- [java开发篇][dom4j模块] 遍历xml文件
http://blog.csdn.net/chenleixing/article/details/44353491 在android studio 导入dom4j库(build-gradle(Moud ...
- [java开发篇][dom4j模块]遍历,解析xml
package com.softwinner.performance.benchmark; /** * Created by Administrator on 2017/7/21. */ import ...
- [java开发篇][dom模块] 遍历解析xml
http://blog.csdn.net/andie_guo/article/details/24844351 XML DOM节点树 XML DOM将XML文档作为树结构,树结构称为一个节点树.所有的 ...
- hadoop(一HDFS)
hadoop(一HDFS) 介绍 狭义上来说: hadoop指的是以下的三大系统: HDFS :分布式文件系统(高吞吐,没有延时要求,容错性,扩展能力) MapReduce : 分布式计算系统 Yar ...
随机推荐
- 源码篇:Python 实战案例----银行系统
import time import random import pickle import os class Card(object): def __init__(self, cardId, car ...
- springboot启动后总是自己shutdown
现象 这几天一直被一个问题困扰,每次springboot的tomcat启动之后, 然后过了一段时间看, 进程就突然自己关闭掉了. 然后日志是: ationConfigEmbeddedWebApplic ...
- 关于LCA
LCA:最近公共祖先 指在有根树中,找出某两个结点u和v最近的公共祖先 如图,5,7的最近公共祖先就是3 接下来,我们来了解如何求解LCA No.1 暴力 首先想到的肯定是暴力,我们搜索,从两个节点一 ...
- 4358: permu
4358: permu 链接 分析: 不删除的莫队+可撤销的并查集. 每次询问先固定左端点到一个块内,然后将这些右端点从小到大排序,然后询问的过程中,右端点不断往右走,左端点可能会撤销,但是移动区间不 ...
- Python 学习 第九篇:模块
模块是把程序代码和数据封装的Python文件,也就是说,每一个以扩展名py结尾的Python源代码文件都是一个模块.每一个模块文件就是一个独立的命名空间,用于封装顶层变量名:在一个模块文件的顶层定义的 ...
- .NetCore实践篇:分布式监控系统zipkin踩坑之路(二)
前言 <牧神记>有一句话说的好,破心中神.当不再对分布式,微服务,CLR畏惧迷茫的时候,你就破了心中神. zipkin复习 第一篇: .Net架构篇:思考如何设计一款实用的分布式监控系统? ...
- vsftpd虚拟账户配置
1. 概述 FTP是文件传输协议,在内外网的文件传输中使用广泛. 本篇博客主要介绍FTP服务器的部署和测试. 2. 软件环境部署 查看系统是否安装FTP软件(vsftpd),执行命令:rpm -qa ...
- QQ群管理员申请帖(本次截止日期为2017-03-25)
本帖专门为技术交流群申请管理员专用. 管理员的权利: 1.有权在成员违规的情况下直接剔除. 2.有权加入多个交流群. 3.有权引人入群. 4.艾特全体是权利,但要慎用,通常情况下,没有我本人的授意,不 ...
- Session之Config配置
<sessionState mode="Off|InProc|StateServer|SQLServer" cookieless="true|false" ...
- Linux内核设计期中总结
Linux内核设计期中总结 ● 知识点 一.计算机是如何工作的 计算机是按照冯·诺依曼存储程序的原理. 在执行程序时须先将要执行的相关程序和数据放入内存储器中,在执行程序时CPU根据当前程序指针寄存器 ...