<>一、windows客户端准备

<><1>双击安装hadoop依赖

<><2>放在非中文路径下,双击winutils.exe进行安装

如果安装错误,先安装微软运行库

<><3>新建Java Maven 工程

<><4>maven坐标
<dependencies> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>
hadoop-client</artifactId> <version>3.1.3</version> </dependency> <dependency> <
groupId>junit</groupId> <artifactId>junit</artifactId> <version>4.12</version>
</dependency> <dependency> <groupId>org.slf4j</groupId> <artifactId>
slf4j-log4j12</artifactId> <version>1.7.30</version> </dependency> </
dependencies>
<><5>在sre/main/resources下新建log4j.properties配置文件
log4j.rootLogger=INFO, stdout
log4j.appender.stdout=org.apache.log4j.ConsoleAppender
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
log4j.appender.stdout.layout.ConversionPattern=%d %p [%c] - %m%n
log4j.appender.logfile=org.apache.log4j.FileAppender
log4j.appender.logfile.File=target/spring.log
log4j.appender.logfile.layout=org.apache.log4j.PatternLayout
log4j.appender.logfile.layout.ConversionPattern=%d %p [%c] - %m%n
<><6>新建类 HdfsClient

<>二、API操作

<><1>创建文件夹
package com.demo.hdfs; import org.apache.hadoop.conf.Configuration; import org.
apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.junit.
After; import org.junit.Before; import org.junit.Test; import java.io.
IOException; import java.net.URI; import java.net.URISyntaxException; /** *
客户端代码操作 * 1. 获取一个客户端对象 * 2. 执行相关操作 * 3. 关闭资源 */ public class HdfsClient {
private FileSystem fs; @Before public void init() throws URISyntaxException,
InterruptedException, IOException { //连接集群nn地址 URI uri = new URI(
"hdfs://hadoop102:8020"); //创建一个配置文件 Configuration configuration = new
Configuration(); //用户 String user = "root"; //获取客户端对象 fs = FileSystem.get(uri,
configuration, user); } @After public void close() throws IOException { //关闭资源
fs.close(); } @Test public void testMkdir() throws IOException { //创建一个文件夹 fs.
mkdirs(new Path("/apipath1")); } }
<><2>上传
//上传 @Test public void testPut() throws IOException { //参数1 :是否删除元数据 //参数2:
是否允许覆盖 //参数3: 目的路径 fs.copyFromLocalFile(false, true, new Path(
"E:\\study\\BigData\\Hadoop\\脚本\\myhadoop.sh"), new Path("hdfs://hadoop102/code"
)); }
<><3>下载
//下载 @Test public void testGet() throws IOException { //参数1 :是否删除原数据 //参数2:
目的路径 //参数3: 原文件路径 //参数4:是否进行本地校验 ,false 开启,ture 关闭 fs.copyToLocalFile(false, new
Path("hdfs://hadoop102/code/myhadoop.sh"), new Path("E:\\"), true); }
.xxx.crc文件是一种校验方法

<><4>删除
//删除 @Test public void testRm() throws IOException { //参数1 :目的路径 //参数2: 是否递归删除
//1. 删除文件 //fs.delete(new Path("hdfs://hadoop102/code/myhadoop.sh"), true);
//2. 删除空目录 //fs.delete( new Path("hdfs://hadoop102/code/code1"), true); //3.
删除非空目录,如果不是递归删除,则报错 fs.delete(new Path("hdfs://hadoop102/code/code2"), true); }
<><5>文件更名和移动
//文件更名和移动 @Test public void testMv() throws IOException { //参数1 :原文件路径 //参数2:
目的文件路径 //1.文件名称更改 //fs.rename(new Path("hdfs://hadoop102/code/myhadoop.sh"),
new Path("hdfs://hadoop102/code/myhadoop_rename.sh")); //2.文件移动并修改
//fs.rename(new Path("hdfs://hadoop102/code/myhadoop_rename.sh"), new
Path("hdfs://hadoop102/myhadoop_remove_and_rename.sh")); //3. 目录更名 fs.rename(new
Path("hdfs://hadoop102/code"), new Path("hdfs://hadoop102/code_rename")); }
<><6>文件详细信息查看

查看文件名称、权限、长度、块信息
//文件详细信息查看 @Test public void testFileDetail() throws IOException { //参数1 :路径
//参数2: 目的文件路径 //获取所有文件信息 RemoteIterator<LocatedFileStatus> listFiles = fs.
listFiles(new Path("hdfs://hadoop102/"), true); //遍历文件信息 while (listFiles.
hasNext()) { LocatedFileStatus fileStatus = listFiles.next(); System.out.println
("-----------------------------------------"); System.out.println(fileStatus.
toString()); System.out.println("-----------------------------------------"); }
}
toString方法
public String toString() { StringBuilder sb = new StringBuilder(); sb.append(
this.getClass().getSimpleName()); sb.append("{"); sb.append("path=" + this.path)
; sb.append("; isDirectory=" + this.isdir); if (!this.isDirectory()) { sb.append
("; length=" + this.length); sb.append("; replication=" + this.block_replication
); sb.append("; blocksize=" + this.blocksize); } sb.append(";
modification_time=" + this.modification_time); sb.append("; access_time=" + this
.access_time); sb.append("; owner=" + this.owner); sb.append("; group=" + this.
group); sb.append("; permission=" + this.permission); sb.append("; isSymlink=" +
this.isSymlink()); if (this.isSymlink()) { try { sb.append("; symlink=" + this.
getSymlink()); } catch (IOException var3) { throw new RuntimeException(
"Unexpected exception", var3); } } sb.append("; hasAcl=" + this.hasAcl()); sb.
append("; isEncrypted=" + this.isEncrypted()); sb.append("; isErasureCoded=" +
this.isErasureCoded()); sb.append("}"); return sb.toString(); }
<><7>文件和文件夹判断
//判断是文件夹还是文件 @Test public void testFile() throws IOException { //获取所有文件信息
FileStatus[] fileStatuses = fs.listStatus(new Path("hdfs://hadoop102/"));
//遍历文件信息 for (FileStatus fileStatus : fileStatuses) { if (fileStatus.isFile()) {
System.out.println("this is file--" + fileStatus.toString()); System.out.println
("-----------------------------------------"); } else { System.out.println(
"this is path--" + fileStatus.toString()); System.out.println(
"-----------------------------------------"); } } }

技术
下载桌面版
GitHub
Gitee
SourceForge
百度网盘(提取码:draw)
云服务器优惠
华为云优惠券
腾讯云优惠券
阿里云优惠券
Vultr优惠券
站点信息
问题反馈
邮箱:[email protected]
吐槽一下
QQ群:766591547
关注微信