日韩性视频-久久久蜜桃-www中文字幕-在线中文字幕av-亚洲欧美一区二区三区四区-撸久久-香蕉视频一区-久久无码精品丰满人妻-国产高潮av-激情福利社-日韩av网址大全-国产精品久久999-日本五十路在线-性欧美在线-久久99精品波多结衣一区-男女午夜免费视频-黑人极品ⅴideos精品欧美棵-人人妻人人澡人人爽精品欧美一区-日韩一区在线看-欧美a级在线免费观看

歡迎訪問 生活随笔!

生活随笔

當前位置: 首頁 > 编程资源 > 编程问答 >内容正文

编程问答

MapReduce基础开发之八HDFS文件CRUD操作

發布時間:2025/4/16 编程问答 21 豆豆
生活随笔 收集整理的這篇文章主要介紹了 MapReduce基础开发之八HDFS文件CRUD操作 小編覺得挺不錯的,現在分享給大家,幫大家做個參考.

HDFS文件操作的基礎代碼。

package com.hive;import java.io.BufferedInputStream; import java.io.BufferedOutputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream;import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.BlockLocation; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.LocatedFileStatus; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.RemoteIterator; import org.apache.hadoop.hdfs.DistributedFileSystem; import org.apache.hadoop.hdfs.protocol.DatanodeInfo;public class HdfsCRUD {public static void main(String[] args) {//提交集群執行yarn jar//ls//String srcPath="/tmp/fjs/dpi1/";//HdfsCRUD.list(srcPath);//cat//String file="/tmp/fjs/in/test.txt";//HdfsCRUD.readFile(file);//HdfsCRUD.getModificationTime(file);//HdfsCRUD.getBlockLocations(file);//HdfsCRUD.getHostnames();String dir="/tmp/fjs/in/hdfs";HdfsCRUD.mkdir(dir);}/** hdoop fs -ls命令*/public static void list(String srcPath) { Configuration conf = new Configuration(); FileSystem fs = null;try {fs = FileSystem.get(conf);RemoteIterator<LocatedFileStatus>rmIterator = fs.listLocatedStatus(new Path(srcPath)); while (rmIterator.hasNext()) { Path path = rmIterator.next().getPath(); if(fs.isDirectory(path)){ System.out.println("-----------DirectoryName: "+path.getName()); } else if(fs.isFile(path)){ System.out.println("-----------FileName: "+path.getName()); } } } catch (IOException e) {System.out.println(e.getMessage());}} /** hdoop fs -cat命令*/public static void readFile(String file){ Configuration conf = new Configuration(); FileSystem fs = null;try { fs= FileSystem.get(conf); Path path = new Path(file); if(!fs.exists(path)){ System.out.println("file'"+ file+"' doesn't exist!"); return ; } FSDataInputStream in = fs.open(path); String filename = file.substring(file.lastIndexOf('/') + 1, file.length()); OutputStream out = new BufferedOutputStream(new FileOutputStream(new File("/tmp/"+filename))); byte[] b = new byte[1024]; int numBytes = 0; while ((numBytes = in.read(b)) > 0) { out.write(b,0,numBytes); } in.close(); out.close(); fs.close(); }catch (IOException e) {System.out.println(e.getMessage());} } /** * Gets the information about the file modified time. */ public static void getModificationTime(String source){ try{Configuration conf = new Configuration(); FileSystem fs = FileSystem.get(conf); Path srcPath = new Path(source); // Check if the file alreadyexists if (!(fs.exists(srcPath))) { System.out.println("No such destination " + srcPath);return; } // Get the filename out of thefile path String filename = source.substring(source.lastIndexOf('/') + 1, source.length()); FileStatus fileStatus = fs.getFileStatus(srcPath); long modificationTime =fileStatus.getModificationTime(); System.out.println("modified datetime: " + System.out.format("File %s; Modification time :%2$tI:%2$tM:%2$tS%n",filename,modificationTime)); }catch (IOException e) {System.out.println(e.getMessage());} } /** * Gets the file block location info */ public static void getBlockLocations(String source){ try{Configuration conf = new Configuration(); FileSystem fs = FileSystem.get(conf); Path srcPath = new Path(source); // Check if the file alreadyexists if (!(fs.exists(srcPath))) { System.out.println("No such destination " + srcPath); return; } // Get the filename out of thefile path String filename = source.substring(source.lastIndexOf('/') + 1, source.length()); FileStatus fileStatus = fs.getFileStatus(srcPath); BlockLocation[] blkLocations = fs.getFileBlockLocations(fileStatus, 0, fileStatus.getLen()); int blkCount = blkLocations.length; System.out.println("File :" + filename + "stored at:"); for (int i=0; i < blkCount; i++) { String[] hosts = blkLocations[i].getHosts(); System.out.println("host ip:" +System.out.format("Host %d: %s %n", i, hosts)); } }catch (IOException e) {System.out.println(e.getMessage());} } /** 獲取Hadoop集群中data node的DNS主機名*/public static void getHostnames (){ try{Configuration config = new Configuration(); FileSystem fs = FileSystem.get(config); DistributedFileSystem hdfs = (DistributedFileSystem) fs; DatanodeInfo[] dataNodeStats = hdfs.getDataNodeStats(); String[]names = new String[dataNodeStats.length]; for (int i = 0; i < dataNodeStats.length; i++) { names[i]= dataNodeStats[i].getHostName(); System.out.println("datenode hostname:"+(dataNodeStats[i].getHostName())); } }catch (IOException e) {System.out.println(e.getMessage());} } /** hadoop fs -mkdir命令*/public static void mkdir(String dir){ Configuration conf = new Configuration(); FileSystem fs = null; try { fs= FileSystem.get(conf); Path path = new Path(dir); if(!fs.exists(path)){ fs.mkdirs(path); System.out.println("create directory '"+dir+"' successfully!"); }else{ System.out.println("directory '"+dir+"' exits!"); } }catch (IOException e) { System.out.println("FileSystem get configuration with anerror"); e.printStackTrace(); }finally{ if(fs!= null){ try { fs.close(); }catch (IOException e) { System.out.println(e.getMessage()); new RuntimeException(e); } } } } /** 本地文件上傳到hdfs * hadoop fs -put命令*/public void copyFromLocal (String source, String dest) { Configuration conf = new Configuration(); FileSystem fs=null; try { fs= FileSystem.get(conf); Path srcPath = new Path(source); Path dstPath = new Path(dest);// Check if the file alreadyexists if (!(fs.exists(dstPath))) { System.out.println("dstPathpath doesn't exist" ); System.out.println("No such destination " + dstPath); return; } // Get the filename out of thefile path String filename = source.substring(source.lastIndexOf('/') + 1, source.length());try{ //if the file exists in the destination path, it will throw exception. //fs.copyFromLocalFile(srcPath,dstPath);//remove and overwrite files with the method //copyFromLocalFile(booleandelSrc, boolean overwrite, Path src, Path dst) fs.copyFromLocalFile(false, true, srcPath, dstPath); System.out.println("File " + filename + "copied to " + dest); }catch(Exception e){ System.out.println(e.getMessage()); new RuntimeException(e); }finally{ fs.close(); } }catch (IOException e1) { System.out.println(e1.getMessage());new RuntimeException(e1); } } /** 添加一個文件到指定的目錄下*/public void addFile(String source, String dest) { // Conf object will readthe HDFS configuration parameters Configuration conf = new Configuration(); FileSystem fs=null; try { fs= FileSystem.get(conf); // Get the filename out of thefile path String filename = source.substring(source.lastIndexOf('/') + 1, source.length()); // Create the destination pathincluding the filename. if (dest.charAt(dest.length() - 1) != '/') { dest= dest + "/" + filename; }else { dest= dest + filename; } // Check if the file alreadyexists Path path = new Path(dest); if (fs.exists(path)) { System.out.println("File " + dest + " already exists"); return; } // Create a new file and writedata to it.FSDataOutputStream out = fs.create(path); InputStream in = new BufferedInputStream(new FileInputStream(new File(source)));byte[] b = new byte[1024]; int numBytes = 0; //In this way read and write datato destination file.while ((numBytes = in.read(b)) > 0) {out.write(b,0, numBytes); } in.close(); out.close(); fs.close(); }catch (IOException e) { System.out.println(e.getMessage());new RuntimeException(e); } } /** 重新命名hdfs中的文件名稱*/public void renameFile (String fromthis, String tothis){ Configuration conf = new Configuration(); FileSystem fs=null; try { fs= FileSystem.get(conf); Path fromPath = new Path(fromthis); Path toPath = new Path(tothis); if (!(fs.exists(fromPath))) { System.out.println("No such destination " + fromPath);return; } if (fs.exists(toPath)) { System.out.println("Already exists! " + toPath); return; } try{ boolean isRenamed = fs.rename(fromPath,toPath); //renames file name indeed.if(isRenamed){ System.out.println("Renamed from " + fromthis + " to " + tothis); } }catch(Exception e){ System.out.println(e.getMessage()); new RuntimeException(e); }finally{ fs.close(); } }catch (IOException e1) { System.out.println(e1.getMessage()); new RuntimeException(e1); } } /** 刪除指定的一個文件* hadoop fs -rm -r命令*/public void deleteFile(String file) { Configuration conf = new Configuration(); FileSystem fs=null; try { fs= FileSystem.get(conf); Path path = new Path(file); if (!fs.exists(path)) { System.out.println("File " + file + " does not exists"); return; } /* * recursively delete the file(s) if it is adirectory. * If you want to mark the path that will bedeleted as * a result of closing the FileSystem. * deleteOnExit(Path f) */ fs.delete(new Path(file), true); fs.close(); }catch (IOException e) { System.out.println(e.getMessage()); new RuntimeException(e); } } }

總結

以上是生活随笔為你收集整理的MapReduce基础开发之八HDFS文件CRUD操作的全部內容,希望文章能夠幫你解決所遇到的問題。

如果覺得生活随笔網站內容還不錯,歡迎將生活随笔推薦給好友。

主站蜘蛛池模板: 蜜桃av免费 | 午夜不卡久久精品无码免费 | 国产三级在线播放 | av播放网站 | 天天舔天天操天天干 | 免费三片在线视频 | 大地资源中文第三页 | av毛片网站 | 日本午夜电影 | 久久亚洲av永久无码精品 | 一本大道久久久久精品嫩草 | 手机在线中文字幕 | 亚洲成人a√ | 自拍偷拍三级 | 日韩一区二区三区电影 | 国产激情视频在线播放 | 夜夜爽夜夜操 | 善良的女邻居在线观看 | 欧美日韩国产黄色 | avtt在线播放 | 国产一级二级三级精品 | 99热在线免费| av天堂一区 | 日本美女黄色 | 日韩av中文在线观看 | 2024国产精品 | 老司机精品视频在线播放 | 中文字幕精品一二三四五六七八 | 奶水喷溅 在线播放 | 牛牛电影国产一区二区 | 欧美精品电影一区二区 | 久久久久久亚洲中文字幕无码 | 欧美爱爱一区二区 | 精品久久久久久久久久久久久 | 蜜桃av网站| 宅男深夜视频 | 国产精品成人av性教育 | 乌克兰av在线 | 欧美多p| 成人福利一区二区 | 国产成人手机在线 | 亚洲精品~无码抽插 | 在线观看亚洲国产 | 亚洲国产精选 | 91久久精品一区二区三区 | 亚洲精品激情 | 老熟女毛茸茸 | 国产又黄又猛的视频 | 天天色天 | 韩国一区二区三区在线观看 | 丝袜淫脚 | 蜜桃av噜噜一区二区三区小说 | 欧美日韩免费高清 | 亚洲美女激情视频 | 日韩欧美三级在线 | 8050午夜二级 | 女人被狂躁60分钟视频 | 国产 欧美 日韩 在线 | 欧美成人午夜免费视在线看片 | 亚洲第一视频在线观看 | 亚洲a√| 亚洲人成人一区二区在线观看 | 久久国产99 | 狠狠久久综合 | 亚洲欧美中文日韩在线观看 | 丰满大乳露双乳呻吟 | 欧美一二区 | 电影寂寞少女免费观看 | 日本护士体内she精2xxx | 爽妇网av| 少妇毛片一区二区三区 | 99热国内精品 | 黄色av视屏| 黄色长视频 | 久久精品视频偷拍 | 自拍偷拍专区 | 国产乱码精品一区二区三 | 亚洲激情五月婷婷 | 欧美黑粗硬 | 欧美亚洲在线播放 | 欧美日韩在线视频免费观看 | 日本一区免费电影 | 天天摸天天做 | 欧美亚州国产 | 日韩电影二区 | 午夜男人影院 | 一区二区三区精 | 精品无码国产一区二区三区51安 | 欧美国产日本在线 | 欧美一级黑人 | 妞干网这里只有精品 | 91热久久| 久久精品夜| 天天激情综合 | 久久久久成人精品无码 | 怡红院男人天堂 | 亚洲av少妇一区二区在线观看 | 国产美女视频免费观看下载软件 | 曰本无码人妻丰满熟妇啪啪 |