这期内容当中小编将会给大家带来有关java对hdfs的操作是怎样的,文章内容丰富且以专业的角度为大家分析和叙述,阅读完这篇文章希望大家可以有所收获。
package hdfs;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.net.URI;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
/**
* java对hdfs的操作
* @author 林
*
*/
public class App2 {
public static final String HDFS_PATH = "hdfs://hadoop:9000";
public static final String DRI_PATH = "/d1000";
public static final String FILE_PATH = "/d1000/f1000";
public static void main(String[] args) throws Exception {
FileSystem fileSystem = FileSystem.get(new URI(HDFS_PATH),new Configuration());
//创建文件夹
mkDri(fileSystem);
//上传文件
uploadDate(fileSystem);
//下载文件
downloadFile(fileSystem);
//删除文件
deleteFile(fileSystem);
}
public static void deleteFile(FileSystem fileSystem) throws IOException {
fileSystem.delete(new Path(FILE_PATH), true);
}
//下载文件
public static void downloadFile(FileSystem fileSystem) throws IOException {
FSDataInputStream in = fileSystem.open(new Path(FILE_PATH));
FileOutputStream out = new FileOutputStream(new File("D:/copy.txt"));
//IOUtils.copyBytes(in, System.out, 2048, true);下载到控制台
IOUtils.copyBytes(in,out, 2048, true);
}
//上传文件
public static void uploadDate(FileSystem fileSystem) throws IOException,
FileNotFoundException {
FSDataOutputStream out = fileSystem.create(new Path(FILE_PATH));
FileInputStream in = new FileInputStream("D:/hadoop安装.txt");
IOUtils.copyBytes(in, out, 1024,true);
}
//创建目录
public static void mkDri(FileSystem fileSystem) throws IOException {
fileSystem.mkdirs(new Path(DRI_PATH));
}
}
上述就是小编为大家分享的java对hdfs的操作是怎样的了,如果刚好有类似的疑惑,不妨参照上述分析进行理解。如果想知道更多相关知识,欢迎关注天达云行业资讯频道。