日韩性视频-久久久蜜桃-www中文字幕-在线中文字幕av-亚洲欧美一区二区三区四区-撸久久-香蕉视频一区-久久无码精品丰满人妻-国产高潮av-激情福利社-日韩av网址大全-国产精品久久999-日本五十路在线-性欧美在线-久久99精品波多结衣一区-男女午夜免费视频-黑人极品ⅴideos精品欧美棵-人人妻人人澡人人爽精品欧美一区-日韩一区在线看-欧美a级在线免费观看

歡迎訪問 生活随笔!

生活随笔

當前位置: 首頁 > 运维知识 > windows >内容正文

windows

Java操作HDFS文件系统

發布時間:2024/9/16 windows 46 豆豆
生活随笔 收集整理的這篇文章主要介紹了 Java操作HDFS文件系统 小編覺得挺不錯的,現在分享給大家,幫大家做個參考.

對于操作HDFS文件系統,需要有一個入口,對于Hadoop來說,編程入口就是FileSystem。
例如我們要使用API創建一個文件夾:

/*** @author vincent* @time 2019-10-14 22:39* 使用Java API操作HDFS文件系統* 關鍵點:* 1. 創建Configuration* 2. 獲取FileSystem* 3. HDFS API操作*/ public class HDFSApp {public static void main(String[] args) throws IOException, URISyntaxException, InterruptedException {// hadoop 鏈接Configuration configuration = new Configuration();FileSystem fileSystem = FileSystem.get(new URI("hdfs://vmware-ubuntu-1:9000"),configuration,"duandingyang");Path path = new Path("/hdfsapi/test");boolean result = fileSystem.mkdirs(path);System.out.println(result);} }

FileSystem.get(new URI("hdfs://vmware-ubuntu-1:9000"),configuration,"myusername");用戶名必須要寫,否則提示沒有權限

常用的API操作

package hadoop.hdfs;import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.*; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.util.Progressable; import org.junit.After; import org.junit.Before; import org.junit.Test;import java.io.*; import java.net.URI; import java.net.URISyntaxException;/*** 使用Java API操作HDFS文件系統* 關鍵點:* 1) 創建Configuration* 2) 獲取FileSystem* 3) HDFS API 操作*/ public class HDFSAPP {public static final String HDFS_PATH = "hdfs://swarm-worker1:9000";FileSystem fileSystem = null;Configuration configuration = null;/*** 構造一個訪問指定HDFS系統的客戶端對象* 第一個參數:HDFS的URI* 第二個參數:客戶端指定的配置參數* 第三個參數:客戶端的身份,即用戶名** @throws URISyntaxException*/@Beforepublic void setUp() throws URISyntaxException, IOException, InterruptedException {System.out.println("--------setUp---------");configuration = new Configuration();configuration.set("dfs.replication", "1");fileSystem = FileSystem.get(new URI("hdfs://swarm-worker1:9000"), configuration, "iie4bu");}/*** 創建HDFS文件夾** @throws IOException*/@Testpublic void mkdir() throws IOException {Path path = new Path("/hdfsapi/test/myDir");boolean mkdirs = fileSystem.mkdirs(path);System.out.println(mkdirs);}/*** 查看HDFS內容*/@Testpublic void text() throws IOException {FSDataInputStream in = fileSystem.open(new Path("/hdfsapi/test/a.txt"));IOUtils.copyBytes(in, System.out, 1024);in.close();}/*** 創建文件** @throws Exception*/@Testpublic void create() throws Exception {FSDataOutputStream out = fileSystem.create(new Path("/hdfsapi/test/b.txt"));out.writeUTF("hello world b");out.flush();out.close();}@Testpublic void testReplication() {System.out.println(configuration.get("dfs.replication"));}/*** 重命名文件** @throws Exception*/@Testpublic void rename() throws Exception {Path src = new Path("/hdfsapi/test/b.txt");Path dst = new Path("/hdfsapi/test/c.txt");boolean rename = fileSystem.rename(src, dst);System.out.println(rename);}/*** 小文件* 拷貝本地文件到HDFS文件系統* 將本地的E:/test/uid_person.txt文件拷貝到hdfs上的路徑/hdfsapi/test/下** @throws Exception*/@Testpublic void copyFromLocalFile() throws Exception {Path src = new Path("E:/test/uid_person.txt");Path dst = new Path("/hdfsapi/test/myDir/");fileSystem.copyFromLocalFile(src, dst);}/*** 大文件* 拷貝本地文件到HDFS文件系統:帶進度* 將本地的E:/test/uid_person.txt文件拷貝到hdfs上的路徑/hdfsapi/test/my.txt** @throws Exception*/@Testpublic void copyFromLocalBigFile() throws Exception {InputStream in= new BufferedInputStream(new FileInputStream(new File("E:/tools/linux/jdk-8u101-linux-x64.tar.gz")));FSDataOutputStream out = fileSystem.create(new Path("/hdfsapi/test/jdk.tar.gz"), new Progressable() {public void progress() {System.out.print(".");}});IOUtils.copyBytes(in, out, 4096);}/*** 拷貝HDFS文件到本地:下載* @throws Exception*/@Testpublic void copyToLocalFile() throws Exception {Path src = new Path("/hdfsapi/test/c.txt");Path dst = new Path("E:/test/a.txt");fileSystem.copyToLocalFile(src, dst);}/*** 查看目標文件夾下的所有文件* @throws Exception*/@Testpublic void listFile() throws Exception {Path path = new Path("/hdfsapi/test/");FileStatus[] fileStatuses = fileSystem.listStatus(path);for(FileStatus file: fileStatuses) {String isDir = file.isDirectory() ? "文件夾": "文件";String permission = file.getPermission().toString();short replication = file.getReplication();long len = file.getLen();String stringPath = file.getPath().toString();System.out.println("isDir:" + isDir + ", " + "permission: " + permission + ", " + "replication: " + replication + " , len: " + len + ", stringPath" + stringPath);}}/*** 遞歸查看目標文件夾下的所有文件* @throws Exception*/@Testpublic void listFileRecursive() throws Exception {Path path = new Path("/hdfsapi/test/");RemoteIterator<LocatedFileStatus> files = fileSystem.listFiles(path, true);while (files.hasNext()) {LocatedFileStatus file = files.next();String isDir = file.isDirectory() ? "文件夾": "文件";String permission = file.getPermission().toString();short replication = file.getReplication();long len = file.getLen();String stringPath = file.getPath().toString();System.out.println("isDir:" + isDir + ", " + "permission: " + permission + ", " + "replication: " + replication + " , len: " + len + ", stringPath" + stringPath);}}/*** 查看文件塊信息* @throws Exception*/@Testpublic void getFileBlockLocations() throws Exception {Path path = new Path("/hdfsapi/test/jdk.tar.gz");FileStatus fileStatus = fileSystem.getFileStatus(path);BlockLocation[] blocks = fileSystem.getFileBlockLocations(fileStatus, 0, fileStatus.getLen());for(BlockLocation block: blocks) {for (String name: block.getNames()) {System.out.println(name + ": " + block.getOffset() + ": " + block.getLength());}}}/*** 刪除文件* @throws Exception*/@Testpublic void delete() throws Exception {boolean delete = fileSystem.delete(new Path("/hdfsapi/test/a.txt"), true);System.out.println(delete);}@Afterpublic void tearDown() {configuration = null;fileSystem = null;System.out.println("--------tearDown---------");}}

總結

以上是生活随笔為你收集整理的Java操作HDFS文件系统的全部內容,希望文章能夠幫你解決所遇到的問題。

如果覺得生活随笔網站內容還不錯,歡迎將生活随笔推薦給好友。