日韩性视频-久久久蜜桃-www中文字幕-在线中文字幕av-亚洲欧美一区二区三区四区-撸久久-香蕉视频一区-久久无码精品丰满人妻-国产高潮av-激情福利社-日韩av网址大全-国产精品久久999-日本五十路在线-性欧美在线-久久99精品波多结衣一区-男女午夜免费视频-黑人极品ⅴideos精品欧美棵-人人妻人人澡人人爽精品欧美一区-日韩一区在线看-欧美a级在线免费观看

歡迎訪問 生活随笔!

生活随笔

當前位置: 首頁 > 编程资源 > 编程问答 >内容正文

编程问答

【Hadoop】HDFS客户端开发示例

發布時間:2025/5/22 编程问答 27 豆豆
生活随笔 收集整理的這篇文章主要介紹了 【Hadoop】HDFS客户端开发示例 小編覺得挺不錯的,現在分享給大家,幫大家做個參考.

1、原理、步驟

?

2、HDFS客戶端示例代碼

package com.ares.hadoop.hdfs;import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.net.URISyntaxException;import org.apache.commons.io.IOUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.LocatedFileStatus; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.RemoteIterator; import org.apache.log4j.Logger; import org.junit.After; import org.junit.Before; import org.junit.Test;public class HDFSTest {private static final Logger LOGGER = Logger.getLogger(HDFSTest.class);private FileSystem fs = null;@Beforepublic void setUp() throws IOException, URISyntaxException, InterruptedException {Configuration conf = new Configuration(); // conf.set("fs.defaultFS", "hdfs://HADOOP-NODE1:9000"); // URI uri = new URI("hdfs://HADOOP-NODE1:9000"); // this.fs = FileSystem.get(uri, conf, "HADOOP");this.fs = FileSystem.get(conf);}@Afterpublic void tearDown() throws IOException {// TODO Auto-generated method stubthis.fs.close();}@Testpublic void testGET() throws IOException {// TODO Auto-generated method stub LOGGER.debug("HDFSTest: GET FILE TEST");Path path = new Path("hdfs://HADOOP-NODE1:9000/jdk-7u60-linux-x64.tar.gz");FSDataInputStream fsDataInputStream = this.fs.open(path);FileOutputStream fileOutputStream = new FileOutputStream("./testdata/get-test-jdk.tar.gz");IOUtils.copy(fsDataInputStream, fileOutputStream);}@Testpublic void testPUT() throws IOException {// TODO Auto-generated method stub LOGGER.debug("HDFSTest: PUT FILE TEST");Path path = new Path("hdfs://HADOOP-NODE1:9000/put-test-jdk.tar.gz");FSDataOutputStream fsDataOutputStream = this.fs.create(path);FileInputStream fileInputStream = new FileInputStream("./testdata/test-jdk.tar.gz");IOUtils.copy(fileInputStream, fsDataOutputStream);}@Testpublic void testGET_NEW() throws IOException {// TODO Auto-generated method stub LOGGER.debug("HDFSTest: GET_NEW FILE TEST");Path src = new Path("hdfs://HADOOP-NODE1:9000/jdk-7u60-linux-x64.tar.gz");Path dst = new Path("./testdata/get-test-new-jdk.tar.gz");this.fs.copyToLocalFile(src, dst);}@Testpublic void testPUT_NEW() throws IOException {// TODO Auto-generated method stub LOGGER.debug("HDFSTest: PUT_NEW FILE TEST");Path src = new Path("./testdata/test-jdk.tar.gz");Path dst = new Path("hdfs://HADOOP-NODE1:9000/put-test-new-jdk.tar.gz");this.fs.copyFromLocalFile(src , dst);}@Testpublic void testMKDIR() throws IOException {// TODO Auto-generated method stub LOGGER.debug("HDFSTest: MKDIR TEST");Path f = new Path("/mkdir-test/testa/testb");this.fs.mkdirs(f);}@Testpublic void testRM() throws IOException {// TODO Auto-generated method stub LOGGER.debug("HDFSTest: RM TEST");Path f = new Path("/mkdir-test");this.fs.delete(f, true);}@Testpublic void testLIST() throws IOException {// TODO Auto-generated method stub LOGGER.debug("HDFSTest: LIST TEST");Path f = new Path("/");//LIST FILESRemoteIterator<LocatedFileStatus> files = this.fs.listFiles(f, true);while (files.hasNext()) {LocatedFileStatus file = files.next();LOGGER.debug(file.getPath());LOGGER.debug(file.getPath().getName());}//LIST DIRSFileStatus[] files2 = this.fs.listStatus(f); // for (int i = 0; i < files2.length; i++) { // LOGGER.debug(files2[i].getPath().getName()); // }for (FileStatus fileStatus : files2) {LOGGER.debug(fileStatus.getPath().getName());LOGGER.debug(fileStatus.isDirectory());}} }

?

總結

以上是生活随笔為你收集整理的【Hadoop】HDFS客户端开发示例的全部內容,希望文章能夠幫你解決所遇到的問題。

如果覺得生活随笔網站內容還不錯,歡迎將生活随笔推薦給好友。