Layui_Tree模块遍历HDFS
注:转载请署名
一、实体
package com.ebd.application.common.Base; import java.util.List; public class HDFSDir { private String id; //自定id
private String pid; //父ID
private String name; //当前目录名称
private String alias; //目录别名,可不用
private String dir; //自"/"目录后的完整目录
private boolean spread; //是否展开(true,false)
private List<HDFSDir> children; //子目录 public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getPid() {
return pid;
}
public void setPid(String pid) {
this.pid = pid;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getAlias() {
return alias;
}
public void setAlias(String alias) {
this.alias = alias;
}
public String getDir() {
return dir;
}
public void setDir(String dir) {
this.dir = dir;
}
public boolean isSpread() {
return spread;
}
public void setSpread(boolean spread) {
this.spread = spread;
}
public List<HDFSDir> getChildren() {
return children;
}
public void setChildren(List<HDFSDir> children) {
this.children = children;
}
}
二、工具类
package hdfstest; import java.io.IOException;
import java.net.URI;
import java.util.ArrayList;
import java.util.List; import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapred.JobConf; import com.ebd.application.common.Base.HDFSDir;
import com.ebd.application.common.utils.Identities; import net.sf.json.JSONObject; public class HdfsListTest { //HDFS访问地址
private static final String HDFS = "hdfs://bigdata.hadoop.com:9000"; public HdfsListTest(Configuration conf) {
this(HDFS, conf);
} public HdfsListTest(String hdfs, Configuration conf) {
this.hdfsPath = hdfs;
this.conf = conf;
} //hdfs路径
private String hdfsPath; //Hadoop系统配置
private Configuration conf; //启动函数
public static void main(String[] args) throws IOException {
JobConf conf = config();
// System.out.println(conf.get("hadoop.http.staticuser.user"));
// System.out.println(System.getenv("HADOOP_HOME"));
HdfsListTest hdfs = new HdfsListTest(conf);
// hdfs.mkdirs("/testput");
// hdfs.copyFile("C:\\Users\\Administrator\\Desktop\\testput", "/testput/testput2");
// hdfs.catFile("/testput/testput");
// hdfs.download("/testput/testput", "E:\\");
// hdfs.ls("hdfs://bigdata.hadoop.com:9000/user");
// hdfs.rmr("/testput");
// List<String> fileList = hdfs.getTree("/","/","|-");
List<HDFSDir> kk = new ArrayList<HDFSDir>();
HDFSDir ds1 = new HDFSDir();
HDFSDir ds2 = new HDFSDir();
HDFSDir ds3 = new HDFSDir();
ds1.setId(Identities.uuid());
ds1.setDir("/testput");
ds2.setId(Identities.uuid());
ds2.setDir("/user");
ds3.setId(Identities.uuid());
ds3.setDir("/tmp");
// kk.add(ds1);
// kk.add(ds2);
// kk.add(ds3);
HDFSDir ds = new HDFSDir();
ds.setId(Identities.uuid());
ds.setDir("/");
kk.add(ds);
// List<HDFSDir> fileList = hdfs.getListTree("/","/user",0);
HDFSDir hdfss = hdfs.getChildNode(ds);
JSONObject object = JSONObject.fromObject(hdfss);
System.out.println(dirJsonFunc(object.toString()));
} //加载Hadoop配置文件
public static JobConf config(){
JobConf conf = new JobConf(HdfsListTest.class);
conf.setJobName("HdfsDAO");
conf.addResource("hadoop/core-site.xml");
conf.addResource("hadoop/hdfs-site.xml");
conf.addResource("hadoop/mapred-site.xml");
return conf;
} //在根目录下创建文件夹
public void mkdirs(String folder) throws IOException {
Path path = new Path(folder);
FileSystem fs = FileSystem.get(URI.create(hdfsPath), conf);
if (!fs.exists(path)) {
fs.mkdirs(path);
System.out.println("Create: " + folder);
}
fs.close();
} //某个文件夹的文件列表
public FileStatus[] ls(String folder) throws IOException {
Path path = new Path(folder);
FileSystem fs = FileSystem.get(URI.create(hdfsPath), conf);
FileStatus[] list = fs.listStatus(path);
System.out.println("ls: " + folder);
System.out.println("==========================================================");
if(list != null)
for (FileStatus f : list) {
System.out.printf("name: %s, folder: %s, size: %d\n", f.getPath(), f.isDir(), f.getLen());
// System.out.printf("%s, folder: %s, 大小: %dK\n", f.getPath().getName(), (f.isDir()?"目录":"文件"), f.getLen()/1024);
}
System.out.println("==========================================================");
fs.close();
return list;
} public void copyFile(String local, String remote) throws IOException { FileSystem fs = FileSystem.get(URI.create(hdfsPath), conf);
//remote---/用户/用户下的文件或文件夹
fs.copyFromLocalFile(new Path(local), new Path(remote));
System.out.println("copy from: " + local + " to " + remote);
fs.close();
} public void catFile(String remote) throws IOException { FSDataInputStream instream = null;
FileSystem fs = FileSystem.get(URI.create(hdfsPath), conf);
Path path = new Path(remote);
if(fs.isFile(path)){
fs.open(path);
instream = fs.open(path);
byte[] b = new byte[1024];
instream.read(b);
System.out.println(new String(b,"utf-8"));
fs.close();
}
} List <String> treeList = new ArrayList<String>();
public List<String> getTree(String top, String remote, String prefix) throws IOException { Path path = new Path(remote);
FileSystem fs = FileSystem.get(URI.create(hdfsPath), conf);
FileStatus[] list = fs.listStatus(path);
if(list != null)
for (FileStatus f : list) {
// System.out.printf("name: %s, folder: %s, size: %d\n", f.getPath(), f.isDir(), f.getLen());
System.out.println(prefix+ f.getPath().getName());
top += f.getPath().getName();
treeList.add(top);
if(fs.isDirectory(f.getPath())){
getTree(top,f.getPath().toString(),prefix+"-");
}
}
return treeList;
} int id = 0;
static int pid = 0;
List<HDFSDir> dirList = new ArrayList<HDFSDir>();
HDFSDir hdfsDir = null;
private List<HDFSDir> getListTree(String top, String remote, int pid) throws IOException {
Path path = new Path(remote);
FileSystem fs = FileSystem.get(URI.create(hdfsPath), conf);
FileStatus[] list = fs.listStatus(path);
if(list != null)
for (FileStatus f : list) {
if(f.isDirectory()){
hdfsDir = new HDFSDir();
// hdfsDir.setId(id++);
// hdfsDir.setPid(pid);
hdfsDir.setName(f.getPath().getName());
hdfsDir.setAlias(f.getPath().getName());
hdfsDir.setDir(f.getPath().toString().substring(HDFS.length()));
hdfsDir.setSpread(false);
System.out.println(f.getPath().getName()+"="+f.getPath().toString().substring(HDFS.length()));
dirList.add(hdfsDir);
}
// System.out.printf("name: %s, folder: %s, size: %d\n", f.getPath(), f.isDir(), f.getLen());
// System.out.println(prefix+ f.getPath().getName());
// top += f.getPath().getName();
// if(fs.isDirectory(f.getPath())){
// getListTree(top,f.getPath().toString(),pid++);
// }
}
return dirList;
} List<HDFSDir> cDirList = null;
public HDFSDir getChildNode(HDFSDir pDir) throws IOException{
Path path = null;
if(pDir.getChildren() != null && pDir.getChildren().size() >= 1){
for(HDFSDir p : pDir.getChildren()){
path = new Path(p.getDir());
FileSystem fs = FileSystem.get(URI.create(hdfsPath), conf);
FileStatus[] list = fs.listStatus(path);
if(list != null){
cDirList = new ArrayList<HDFSDir>();
for (FileStatus f : list) {
if(f.isDirectory()){
hdfsDir = new HDFSDir();
hdfsDir.setId(Identities.uuid());
hdfsDir.setPid(p.getId());
hdfsDir.setName(f.getPath().getName());
hdfsDir.setAlias(f.getPath().getName());
hdfsDir.setDir(f.getPath().toString().substring(HDFS.length()));
hdfsDir.setSpread(false);
cDirList.add(hdfsDir);
}
}
p.setChildren(cDirList);
for(HDFSDir pp : cDirList){
getChildNode(pp);
}
}
}
}else{
path = new Path(pDir.getDir());
FileSystem fs = FileSystem.get(URI.create(hdfsPath), conf);
FileStatus[] list = fs.listStatus(path);
if(list != null){
cDirList = new ArrayList<HDFSDir>();
for (FileStatus f : list) {
if(f.isDirectory()){
hdfsDir = new HDFSDir();
hdfsDir.setId(Identities.uuid());
hdfsDir.setPid(pDir.getId());
hdfsDir.setName(f.getPath().getName().equals("")?"/":f.getPath().getName());
hdfsDir.setAlias(f.getPath().getName().equals("")?"/":f.getPath().getName());
hdfsDir.setDir(f.getPath().toString().substring(HDFS.length()));
hdfsDir.setSpread(false);
cDirList.add(hdfsDir);
}
}
pDir.setChildren(cDirList);
for(HDFSDir pp : cDirList){
getChildNode(pp);
}
}
}
return pDir;
} public static String dirJsonFunc(String jsonStr) {
if (StringUtils.isNotBlank(jsonStr)) {
String[] reg_array = {"([\"])","(,['])","([']:)"};
String[] rpa_array = {"'",",",":"};
for(int i=0;i<reg_array.length;i++){
jsonStr = jsonStr.replaceAll(reg_array[i], rpa_array[i]);
}
jsonStr = jsonStr.replace("{'", "{");
jsonStr = jsonStr.replace("'}", "}");
}
return jsonStr;
} //删除文件或文件夹
public void rmr(String folder) throws IOException { Path path = new Path(folder);
FileSystem fs = FileSystem.get(URI.create(hdfsPath), conf);
fs.deleteOnExit(path);
System.out.println("Delete: " + folder);
fs.close();
} //下载文件到本地系统
public void download(String remote, String local) throws IOException { Path path = new Path(remote);
FileSystem fs = FileSystem.get(URI.create(hdfsPath), conf);
fs.copyToLocalFile(path, new Path(local));
System.out.println("download: from" + remote + " to " + local);
fs.close();
}
}
转换工具类
package test; import org.apache.commons.lang3.StringUtils; import com.ebd.application.common.Base.HDFSDir;
import com.ebd.application.common.utils.Identities; import net.sf.json.JSONArray; public class TestObjectToJson { public static void main(String[] args) {
HDFSDir ds = new HDFSDir();
ds.setId(Identities.uuid());
ds.setDir("/testput");
JSONArray js = JSONArray.fromObject(ds);
// System.out.println(js.toString()); String jsonStr = js.toString(); // String reg_1 = "([\"])"; //双引号转单引号
// String reg_2 = "(,['])"; //去掉逗号后面的单引号
// String reg_3 = "([']:)"; //去掉冒号前面的单引号
// String reg_4 = "('{'['])"; //去掉开头大括号后面的单引号
// Pattern pattern = Pattern.compile(regEx);
// jsonStr = jsonStr.replaceAll(reg_1, "'");
// jsonStr = jsonStr.replaceAll(reg_2, ",");
// jsonStr = jsonStr.replaceAll(reg_3, ":");
// jsonStr = jsonStr.replaceAll("{'", "{");
// System.out.println(jsonStr);
} public static String dirJsonFunc(String jsonStr) {
if (StringUtils.isNotBlank(jsonStr)) {
String[] reg_array = {"([\"])","(,['])","([']:)"};
String[] rpa_array = {"'",",",":"};
for(int i=0;i<reg_array.length;i++){
jsonStr = jsonStr.replaceAll(reg_array[i], rpa_array[i]);
}
jsonStr = jsonStr.replace("{'", "{");
jsonStr = jsonStr.replace("'}", "}");
}
return jsonStr;
}
}
工具类
package hdfstest; import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.net.URI;
import java.util.ArrayList;
import java.util.List; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.mapred.JobConf; import com.ebd.application.common.utils.CreateFileUtil; public class HdfsDirConsoleTest { //HDFS访问地址
private static final String HDFS = "hdfs://bigdata.hadoop.com:9000"; public HdfsDirConsoleTest(Configuration conf) {
this(HDFS, conf);
} public HdfsDirConsoleTest(String hdfs, Configuration conf) {
this.hdfsPath = hdfs;
this.conf = conf;
} //hdfs路径
private String hdfsPath; //Hadoop系统配置
private Configuration conf; //启动函数
public static void main(String[] args) throws IOException {
JobConf conf = config();
System.out.println(conf.get("hadoop.http.staticuser.user"));
System.out.println(System.getenv("HADOOP_HOME"));
HdfsDirConsoleTest hdfs = new HdfsDirConsoleTest(conf);
// hdfs.mkdirs("/testput");
// hdfs.copyFile("C:\\Users\\Administrator\\Desktop\\testput", "/testput/testput2");
// hdfs.catFile("/testput/testput");
hdfs.download("/testput/testput", "D:/ss/ss",conf);
// hdfs.ls("hdfs://bigdata.hadoop.com:9000/");
// hdfs.rmr("/testput");
// List<String> fileList = hdfs.getTree("/","/","|-");
// for(int i=0;i<fileList.size();i++){
// System.out.println(fileList.get(i));
// }
System.out.println("success!");
} //加载Hadoop配置文件
public static JobConf config(){
JobConf conf = new JobConf(HdfsDirConsoleTest.class);
conf.setJobName("HdfsDAO");
conf.addResource("hadoop/core-site.xml");
conf.addResource("hadoop/hdfs-site.xml");
conf.addResource("hadoop/mapred-site.xml");
return conf;
} //在根目录下创建文件夹
public void mkdirs(String folder) throws IOException {
Path path = new Path(folder);
FileSystem fs = FileSystem.get(URI.create(hdfsPath), conf);
if (!fs.exists(path)) {
fs.mkdirs(path);
System.out.println("Create: " + folder);
}
fs.close();
} //某个文件夹的文件列表
public FileStatus[] ls(String folder) throws IOException {
Path path = new Path(folder);
FileSystem fs = FileSystem.get(URI.create(hdfsPath), conf);
FileStatus[] list = fs.listStatus(path);
System.out.println("ls: " + folder);
System.out.println("==========================================================");
if(list != null)
for (FileStatus f : list) {
System.out.printf("name: %s, folder: %s, size: %d\n", f.getPath(), f.isDir(), f.getLen());
System.out.println(f.getOwner()+"=="+f.getBlockSize()+"="+f.getModificationTime()+"--"+f.getPermission()+"="+f.getReplication());
// System.out.printf("%s, folder: %s, 大小: %dK\n", f.getPath().getName(), (f.isDir()?"目录":"文件"), f.getLen()/1024);
}
System.out.println("==========================================================");
fs.close();
return list;
} public void copyFile(String local, String remote) throws IOException { FileSystem fs = FileSystem.get(URI.create(hdfsPath), conf);
//remote---/用户/用户下的文件或文件夹
fs.copyFromLocalFile(new Path(local), new Path(remote));
System.out.println("copy from: " + local + " to " + remote);
fs.close();
} public void catFile(String remote) throws IOException { FSDataInputStream instream = null;
FileSystem fs = FileSystem.get(URI.create(hdfsPath), conf);
Path path = new Path(remote);
if(fs.isFile(path)){
fs.open(path);
instream = fs.open(path);
byte[] b = new byte[1024];
instream.read(b);
System.out.println(new String(b,"utf-8"));
fs.close();
}
} List <String> treeList = new ArrayList<String>();
public List<String> getTree(String top, String remote, String prefix) throws IOException { Path path = new Path(remote);
FileSystem fs = FileSystem.get(URI.create(hdfsPath), conf);
FileStatus[] list = fs.listStatus(path);
if(list != null)
for (FileStatus f : list) {
// System.out.printf("name: %s, folder: %s, size: %d\n", f.getPath(), f.isDir(), f.getLen());
System.out.println(prefix+ f.getPath().getName());
top += f.getPath().getName();
treeList.add(top);
if(fs.isDirectory(f.getPath())){
getTree(top,f.getPath().toString(),prefix+"-");
}
}
return treeList;
} //删除文件或文件夹
public void rmr(String folder) throws IOException { Path path = new Path(folder);
FileSystem fs = FileSystem.get(URI.create(hdfsPath), conf);
fs.deleteOnExit(path);
System.out.println("Delete: " + folder);
fs.close();
} //下载文件到本地系统
public void download(String remote, String local,JobConf conf) throws IOException { // Path path = new Path(remote);
// FileSystem fs = FileSystem.get(URI.create(hdfsPath), conf);
// fs.copyToLocalFile(path, new Path(local));
// System.out.println("download: from" + remote + " to " + local);
// fs.close();
FileSystem fs = FileSystem.get(URI.create(remote),conf);
FSDataInputStream fsdi = fs.open(new Path(remote));
if(CreateFileUtil.createDir(local)){
OutputStream output = new FileOutputStream(local+remote.substring(remote.lastIndexOf("/")));
IOUtils.copyBytes(fsdi,output,4096,true);
}
} public static void makdir(String path) { String strPath = "E:/a/aa/";
File file = new File(strPath);
File fileParent = file.getParentFile();
if(!fileParent.exists()){
fileParent.mkdirs();
}
}
}
Layui_Tree模块遍历HDFS的更多相关文章
- 【hadoop】python通过hdfs模块读hdfs数据
hdfs官网:http://hdfscli.readthedocs.io/en/latest/api.html 一个非常好的博客:http://blog.csdn.net/gamer_gyt/arti ...
- Windows编程之模块遍历(C++实现)
Windows编程之模块遍历 PS: 主要扣代码使用,直接滑动到最下面使用. 遍历模块需要几个API,和一个结构体 1.创建进程快照 2.遍历首次模块 3.继续下次遍历 4.模块信息结构体 API 分 ...
- [Windows编程]模块遍历
模块遍历 整体思路 1.创建进程快照 2.遍历首次模块 3.继续下次遍历 4.模块信息结构体 相关API的调用 创建进程快照API HANDLE WINAPI CreateToolhelp32Snap ...
- Python 使用 os 模块遍历目录/获取当前文件的路径
1.列出指定目录下所包含的目录 item = os.listdir("/Users/jinchengxie/go") 返回的是一个列表, 里面包含了指定目录下所包含的所有的目录 2 ...
- python os模块 遍历目录
#os #os ->tree命令 import os #递归 #目录 ->文件,文件夹 -> 文件文件夹 dirpath = input('请输入你要遍历的目录\n') def ge ...
- [java开发篇][dom4j模块] 遍历xml文件
http://blog.csdn.net/chenleixing/article/details/44353491 在android studio 导入dom4j库(build-gradle(Moud ...
- [java开发篇][dom4j模块]遍历,解析xml
package com.softwinner.performance.benchmark; /** * Created by Administrator on 2017/7/21. */ import ...
- [java开发篇][dom模块] 遍历解析xml
http://blog.csdn.net/andie_guo/article/details/24844351 XML DOM节点树 XML DOM将XML文档作为树结构,树结构称为一个节点树.所有的 ...
- hadoop(一HDFS)
hadoop(一HDFS) 介绍 狭义上来说: hadoop指的是以下的三大系统: HDFS :分布式文件系统(高吞吐,没有延时要求,容错性,扩展能力) MapReduce : 分布式计算系统 Yar ...
随机推荐
- MySQL 基础三 函数(聚合、字符串、时间、条件判断)
1.聚合 其它:GROUP_CONCAT.avg.sum.count.max.min SELECT typeid,GROUP_CONCAT(goodsname) FROM `goods` GROUP ...
- 【Codeforces 1110E】Magic Stones
Codeforces 1110 E 题意:给定两个数组,从第一个数组开始,每次可以挑选一个数,把它变化成左右两数之和减去原来的数,问是否可以将第一个数组转化成第二个. 思路: 结论:两个数组可以互相转 ...
- Python 远程桌面协议RDPY简介
转载请注明:@小五义http://www.cnblogs.com/xiaowuyiQQ群:64770604 RDPY 是基于 Twisted Python 实现的微软 RDP 远程桌面协议. RDPY ...
- xml中的四则运算与时间爱格式
取值第一个 和最后一个<tr> <td height="28" colspan="2" style="font-size:14px& ...
- CF1106F Lunar New Year and a Recursive Sequence 原根、矩阵快速幂、BSGS
传送门 好久没写数论题了写一次调了1h 首先发现递推式是一个乘方的形式,线性递推和矩阵快速幂似乎都做不了,那么是否能够把乘方运算变成加法运算和乘法运算呢? 使用原根!学过\(NTT\)的都知道\(99 ...
- odoo订餐系统之订单相关知识点理解
1.对重载函数name_get的理解 第一,此函数位于Model基类中,返回值是一个list列表,列表中的每个值是如(key,value)形式的键值对,此处为(id,name). 第二,在自己的Mod ...
- Python进阶:函数式编程(高阶函数,map,reduce,filter,sorted,返回函数,匿名函数,偏函数)...啊啊啊
函数式编程 函数是Python内建支持的一种封装,我们通过把大段代码拆成函数,通过一层一层的函数调用,就可以把复杂任务分解成简单的任务,这种分解可以称之为面向过程的程序设计.函数就是面向过程的程序设计 ...
- 【强化学习】python 实现 q-learning 例三(例一改写)
本文作者:hhh5460 本文地址:https://www.cnblogs.com/hhh5460/p/10139738.html 例一的代码是函数式编写的,这里用面向对象的方式重新撸了一遍.好处是, ...
- JVM规范系列第1章:引言
如果你还没下载<Java虚拟机规范>这本书,那么先点击这里下载再一块儿看吧. 前言 Java 虚拟机是一个抽象化的机器,整个规范中提及的 Java 虚拟机都是抽象化的概念,而不是特指 Or ...
- WPF开发汽车采样机上位机软件
由于项目需要,需开发同一套汽车.火车.皮带采样机的上位机软件. 看过之前的上位机软件,老版本都是DelPhi.VB开发,稍微新语言开发的是采用winform开发.要不就是使用组态软件. Delphi语 ...