import java.io.FileNotFoundException;
import java.io.IOException;
importjava.net.URI;
importjava.net.URISyntaxException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
import org.apache.hadoop.hdfs.server.namenode.status_jsp;
public class Test {
private static Configuration configuration;
private static FileSystem fileSystem;
public static FileSystem getFileSystem() throws IOException, URISyntaxException {
configuration = new Configuration();
URI uri = new URI("hdfs://Master:9000");
fileSystem = FileSystem.get(uri, configuration);
return fileSystem;
}
// 创建目录
public static void mkdir(String creatStringPath) throws IllegalArgumentException, IOException {
Path creatPath = new Path(creatStringPath);
fileSystem.mkdirs(creatPath);
System.out.println("创建成功");
fileSystem.close();
}
// 上传文件至hdfs
public static void copyToHdfs(String work, String aim) throws IOException {
Path workPath = new Path(work);
Path aimPath = new Path(aim);
fileSystem.copyFromLocalFile(workPath, aimPath);
fileSystem.close();
}
// 从hdfs下载文件
public static void getFile(String work, String aim) throws IOException {
Path workPath = new Path(work);
Path aimPath = new Path(aim);
fileSystem.copyToLocalFile(workPath, aimPath);
fileSystem.close();
}
// 获取目录下的所有文件
public static void listAllFile(String aim) throws FileNotFoundException, IOException {
Path aimPath = new Path(aim);
FileStatus[] status = fileSystem.listStatus(aimPath);
Path[] listedPaths = FileUtil.stat2Paths(status);
for (int i = 0; i < listedPaths.length; i++) {
System.out.println(listedPaths[i]);
}
fileSystem.close();
}
// 获取指定文件的位置
public static void findLocal(String work) throws IOException {
Path workPath = new Path(work);
FileStatus fileStatus = fileSystem.getFileStatus(workPath);
BlockLocation[] blockLocations = fileSystem.getFileBlockLocations(fileStatus, 0, fileStatus.getLen());
for (int i = 0; i < blockLocations.length; i++) {
String[] hosts = blockLocations[i].getHosts();
System.out.println(i + "-" + hosts[0]);
}
fileSystem.close();
}
// 删除文件操作
public static void delFile(String work) throws IOException {
Path workPath = new Path(work);
fileSystem.delete(workPath, true);
fileSystem.close();
}
// 获取hdfs集群节点信息
public static void getHDFSNodes() throws IOException {
DistributedFileSystem distributedFileSystem = (DistributedFileSystem) fileSystem;
DatanodeInfo[] datanodeInfos = distributedFileSystem.getDataNodeStats();
for (DatanodeInfo datanodeInfo : datanodeInfos) {
System.out.println(datanodeInfo.getHostName());
}
fileSystem.close();
}
public static void main(String[] args) throws IOException, URISyntaxException {
getFileSystem();
// 各项功能
// 1********创建目录r
// mkdir("/javaAPITest");
// 2 ********上传文件至hdfs
// copyToHdfs("/Users/hadoop/Downloads/duet.app", "/javaAPITest");
// 3********从hdfs下载文件
// getFile("/javaAPITest/duet.app", "/Users/hadoop/Documents");
// 4********获取目录下的所有文件
// listAllFile("/");
// 5********获取指定文件的位置
// findLocal("/javaAPITest/duet.app/Contents/Info.plist");
// 6********删除文件操作
// delFile("/javaAPITest");
// 7********获取hdfs集群节点信息
getHDFSNodes();
}
}