<> one ,windows Client preparation
<><1> Double click Install hadoop rely on
<><2> Put under non Chinese path , double-click winutils.exe Install
If the installation is wrong , Install Microsoft runtime first
<><3> newly build Java Maven engineering
<><4>maven coordinate
<dependencies> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>
hadoop-client</artifactId> <version>3.1.3</version> </dependency> <dependency> <
groupId>junit</groupId> <artifactId>junit</artifactId> <version>4.12</version>
</dependency> <dependency> <groupId>org.slf4j</groupId> <artifactId>
slf4j-log4j12</artifactId> <version>1.7.30</version> </dependency> </
dependencies>
<><5> stay sre/main/resources New under log4j.properties configuration file
log4j.rootLogger=INFO, stdout
log4j.appender.stdout=org.apache.log4j.ConsoleAppender
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
log4j.appender.stdout.layout.ConversionPattern=%d %p [%c] - %m%n
log4j.appender.logfile=org.apache.log4j.FileAppender
log4j.appender.logfile.File=target/spring.log
log4j.appender.logfile.layout=org.apache.log4j.PatternLayout
log4j.appender.logfile.layout.ConversionPattern=%d %p [%c] - %m%n
<><6> New class HdfsClient
<> two ,API operation
<><1> create folder
package com.demo.hdfs; import org.apache.hadoop.conf.Configuration; import org.
apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.junit.
After; import org.junit.Before; import org.junit.Test; import java.io.
IOException; import java.net.URI; import java.net.URISyntaxException; /** *
Client code operation * 1. Get a client object * 2. Perform related operations * 3. close resource */ public class HdfsClient {
private FileSystem fs; @Before public void init() throws URISyntaxException,
InterruptedException, IOException { // Connection cluster nn address URI uri = new URI(
"hdfs://hadoop102:8020"); // Create a profile Configuration configuration = new
Configuration(); // user String user = "root"; // Get client object fs = FileSystem.get(uri,
configuration, user); } @After public void close() throws IOException { // close resource
fs.close(); } @Test public void testMkdir() throws IOException { // Create a folder fs.
mkdirs(new Path("/apipath1")); } }
<><2> upload
// upload @Test public void testPut() throws IOException { // parameter 1 : Delete metadata // parameter 2:
Allow overwrite // parameter 3: Destination path fs.copyFromLocalFile(false, true, new Path(
"E:\\study\\BigData\\Hadoop\\ script \\myhadoop.sh"), new Path("hdfs://hadoop102/code"
)); }
<><3> download
// download @Test public void testGet() throws IOException { // parameter 1 : Delete original data // parameter 2:
Destination path // parameter 3: Original file path // parameter 4: Do you want to perform local verification ,false open ,ture close fs.copyToLocalFile(false, new
Path("hdfs://hadoop102/code/myhadoop.sh"), new Path("E:\\"), true); }
.xxx.crc File is a verification method
<><4> delete
// delete @Test public void testRm() throws IOException { // parameter 1 : Destination path // parameter 2: Delete recursively
//1. Delete file //fs.delete(new Path("hdfs://hadoop102/code/myhadoop.sh"), true);
//2. remove empty directories //fs.delete( new Path("hdfs://hadoop102/code/code1"), true); //3.
Delete non empty directory , If not recursive deletion , An error is reported fs.delete(new Path("hdfs://hadoop102/code/code2"), true); }
<><5> Renaming and moving files
// Renaming and moving files @Test public void testMv() throws IOException { // parameter 1 : Original file path // parameter 2:
Destination file path //1. File name change //fs.rename(new Path("hdfs://hadoop102/code/myhadoop.sh"),
new Path("hdfs://hadoop102/code/myhadoop_rename.sh")); //2. Move and modify files
//fs.rename(new Path("hdfs://hadoop102/code/myhadoop_rename.sh"), new
Path("hdfs://hadoop102/myhadoop_remove_and_rename.sh")); //3. Directory renaming fs.rename(new
Path("hdfs://hadoop102/code"), new Path("hdfs://hadoop102/code_rename")); }
<><6> View file details
View file name , jurisdiction , length , Block information
// View file details @Test public void testFileDetail() throws IOException { // parameter 1 : route
// parameter 2: Destination file path // Get all file information RemoteIterator<LocatedFileStatus> listFiles = fs.
listFiles(new Path("hdfs://hadoop102/"), true); // Traverse file information while (listFiles.
hasNext()) { LocatedFileStatus fileStatus = listFiles.next(); System.out.println
("-----------------------------------------"); System.out.println(fileStatus.
toString()); System.out.println("-----------------------------------------"); }
}
toString method
public String toString() { StringBuilder sb = new StringBuilder(); sb.append(
this.getClass().getSimpleName()); sb.append("{"); sb.append("path=" + this.path)
; sb.append("; isDirectory=" + this.isdir); if (!this.isDirectory()) { sb.append
("; length=" + this.length); sb.append("; replication=" + this.block_replication
); sb.append("; blocksize=" + this.blocksize); } sb.append(";
modification_time=" + this.modification_time); sb.append("; access_time=" + this
.access_time); sb.append("; owner=" + this.owner); sb.append("; group=" + this.
group); sb.append("; permission=" + this.permission); sb.append("; isSymlink=" +
this.isSymlink()); if (this.isSymlink()) { try { sb.append("; symlink=" + this.
getSymlink()); } catch (IOException var3) { throw new RuntimeException(
"Unexpected exception", var3); } } sb.append("; hasAcl=" + this.hasAcl()); sb.
append("; isEncrypted=" + this.isEncrypted()); sb.append("; isErasureCoded=" +
this.isErasureCoded()); sb.append("}"); return sb.toString(); }
<><7> File and folder judgment
// Determine whether it is a folder or a file @Test public void testFile() throws IOException { // Get all file information
FileStatus[] fileStatuses = fs.listStatus(new Path("hdfs://hadoop102/"));
// Traverse file information for (FileStatus fileStatus : fileStatuses) { if (fileStatus.isFile()) {
System.out.println("this is file--" + fileStatus.toString()); System.out.println
("-----------------------------------------"); } else { System.out.println(
"this is path--" + fileStatus.toString()); System.out.println(
"-----------------------------------------"); } } }
Technology
Daily Recommendation