Basic operations
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import org.junit.Test;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.DisplayName;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.Arrays;
@RunWith(JUnit4.class)
@DisplayName("Test using junit4")
public class HadoopClientTest {
private FileSystem fileSystem = null;
@BeforeEach
public void init() throws URISyntaxException, IOException, InterruptedException {
Configuration configuration = new Configuration();
configuration.set("dfs.replication", "1");
configuration.set("dfs.blocksize", "64m");
fileSystem = FileSystem.get(new URI("hdfs://hd-even-01:9000"), configuration, "root");
}
/**
* Copy files locally to Hadoop
*
* @throws URISyntaxException
* @throws IOException
* @throws InterruptedException
*/
@Test
public void copyFileFromLocal() throws URISyntaxException, IOException, InterruptedException {
// Upload a file
fileSystem.copyFromLocalFile(new Path("C:\\Users\\Administrator\\Desktop\\win10 Activate .txt"), new Path("/even1"));
// Close the flow and report an error winUtils, Because of the use of linux Adj. tar Package, if windows To use, you need to compile this winUtils Package can only be used
fileSystem.close();
}
/**
* From Hadoop Download files to local, download requires configuration Hadoop Environment and add winutils To bin Directory
*
* @throws URISyntaxException
* @throws IOException
* @throws InterruptedException
*/
@Test
public void copyFileToLocal() throws URISyntaxException, IOException, InterruptedException {
// Download a file
fileSystem.copyToLocalFile(new Path("/win10 Activate .txt"), new Path("E:/"));
// Close the flow and report an error winUtils, Because of the use of linux Adj. tar Package, if windows To use, you need to compile this winUtils Package can only be used
fileSystem.close();
}
/**
* Create a folder
*
* @throws IOException
*/
@Test
public void hdfsMkdir() throws IOException {
// Call the Create Folder method
fileSystem.mkdirs(new Path("/even1"));
// Close method
fileSystem.close();
}
/**
* Move a file / Modify file name
*/
public void hdfsRename() throws IOException {
fileSystem.rename(new Path(""), new Path(""));
fileSystem.close();
}
/**
* Delete a file / Folder
*
* @throws IOException
*/
@Test
public void hdfsRm() throws IOException {
// fileSystem.delete(new Path(""));
// No. 1 2 Parameters represent recursive deletion
fileSystem.delete(new Path(""), true);
fileSystem.close();
}
/**
* View hdfs Specify directory information
*
* @throws IOException
*/
@Test
public void hdfsLs() throws IOException {
// Call the method to return the remote iterator, the 2 The parameter is to list the files in the directory folder
RemoteIterator<LocatedFileStatus> listFiles = fileSystem.listFiles(new Path("/"), true);
while (listFiles.hasNext()) {
LocatedFileStatus locatedFileStatus = listFiles.next();
System.out.println(" File path: " + locatedFileStatus.getPath());
System.out.println(" Block size: " + locatedFileStatus.getBlockSize());
System.out.println(" File length: " + locatedFileStatus.getLen());
System.out.println(" Number of copies: " + locatedFileStatus.getReplication());
System.out.println(" Block information: " + Arrays.toString(locatedFileStatus.getBlockLocations()));
}
fileSystem.close();
}
/**
* Determine whether it is a file or a folder
*/
@Test
public void findHdfs() throws IOException {
// 1 Presenting status information
FileStatus[] listStatus = fileSystem.listStatus(new Path("/"));
// 2, Traverse all files
for (FileStatus fileStatus : listStatus) {
if (fileStatus.isFile())
System.out.println(" Is a file: " + fileStatus.getPath().getName());
else if (fileStatus.isDirectory())
System.out.println(" Is a folder: " + fileStatus.getPath().getName());
}
fileSystem.close();
}
}
File reading and writing
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.junit.Before;
import org.junit.Test;
import org.junit.jupiter.api.DisplayName;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
import java.io.*;
import java.net.URI;
import java.net.URISyntaxException;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
@RunWith(JUnit4.class)
@DisplayName("this is read write test!")
public class HadoopReadWriteTest {
FileSystem fileSystem = null;
Configuration configuration = null;
@Before
public void init() throws URISyntaxException, IOException, InterruptedException {
// 1 Load the configuration
configuration = new Configuration();
// 2 Build the client
fileSystem = FileSystem.get(new URI("hdfs://hd-even-01:9000/"), configuration, "root");
}
@Test
public void testReadData() throws IOException {
// 1 , get the hdfs File stream
FSDataInputStream open = fileSystem.open(new Path("/win10 Activate .txt"));
// 2 , setting 1 Size of secondary acquisition
byte[] bytes = new byte[1024];
// 3 Read data
while (open.read(bytes) != -1)
System.out.println(Arrays.toString(bytes));
open.close();
fileSystem.close();
}
/**
* Using cache streams
*
* @throws IOException
*/
@Test
public void testReadData1() throws IOException {
FSDataInputStream open = fileSystem.open(new Path("/win10 Activate .txt"));
// It will be faster to use buffered stream
BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(open, StandardCharsets.UTF_8));
String line = "";
while ((line = bufferedReader.readLine()) != null) {
System.out.println(line);
}
bufferedReader.close();
open.close();
fileSystem.close();
}
/**
* Specify the offset to realize read-only part of the content
*/
@Test
public void readSomeData() throws IOException {
FSDataInputStream open = fileSystem.open(new Path("/win10 Activate .txt"));
// Specifies the starting index
open.seek(14);
// Specify how much to read
byte[] bytes = new byte[5];
while (open.read(bytes) != -1)
System.out.println(new String(bytes));
open.close();
fileSystem.close();
}
/**
* Write data in stream mode
* @throws IOException
*/
@Test
public void writeData() throws IOException {
// 1 Gets the output stream
FSDataOutputStream out = fileSystem.create(new Path("/win11.txt"), false);
// 2 Get the file input stream to write
FileInputStream in = new FileInputStream(new File("C:\\Users\\Administrator\\Desktop\\xixi.txt"));
byte[] b = new byte[1024];
int read = 0;
while ((read = in.read(b)) != -1) {
out.write(b, 0, read);
}
in.close();
out.close();
fileSystem.close();
}
/**
* Write string directly
*/
@Test
public void writeData1() throws IOException {
// 1 Creating an output stream
FSDataOutputStream out = fileSystem.create(new Path("/aibaobao.txt"), false);
// 2 Write data
out.write("wochaoaibaobao".getBytes());
// 3, Close flow
IOUtils.closeStream(out);
fileSystem.close();
}
/**
* IOUtils Mode upload
*
* @throws IOException
*/
@Test
public void putToHdfs() throws IOException {
// 1 Gets the input stream
FileInputStream in = new FileInputStream(new File("C:\\Users\\Administrator\\Desktop\\xixi.txt"));
// 2 Gets the output stream
FSDataOutputStream out = fileSystem.create(new Path("/haddopPut.txt"), false);
// 3 , copy
IOUtils.copyBytes(in, out, configuration);
// 4 Close the stream
IOUtils.closeStream(in);
IOUtils.closeStream(out);
fileSystem.close();
}
/**
* IOUtils Mode download
* @throws IOException
*/
@Test
public void getFromHdfs() throws IOException {
// 1, Get the input stream
FSDataInputStream open = fileSystem.open(new Path("/haddopPut.txt"));
// 2, Get the output stream
FileOutputStream out = new FileOutputStream(new File("C:\\Users\\Administrator\\Desktop\\haddopPut.txt"));
// 3 , copy
IOUtils.copyBytes(open, out, configuration);
// 4 Close the stream
IOUtils.closeStream(open);
IOUtils.closeStream(out);
fileSystem.close();
}
}