一区二区三区日韩精品-日韩经典一区二区三区-五月激情综合丁香婷婷-欧美精品中文字幕专区

分享

Hadoop HDFS文件操作的Java代碼

 melodyjian 2017-05-05
1、創(chuàng)建目錄
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

public class MakeDir {
	public static void main(String[] args) throws IOException {
		Configuration conf = new Configuration();
		FileSystem fs = FileSystem.get(conf);
		Path path = new Path("/user/hadoop/data/20130709");
		fs.create(path);
		fs.close();
	}
}


2、刪除目錄

import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

public class DeleteDir {
	public static void main(String[] args) throws IOException {
		Configuration conf = new Configuration();
		FileSystem fs = FileSystem.get(conf);
		
		Path path = new Path("/user/hadoop/data/20130710");
		fs.delete(path);
		fs.close();
	}
}


3、寫文件

import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

public class WriteFile {
	public static void main(String[] args) throws IOException {
		Configuration conf = new Configuration();
		FileSystem fs = FileSystem.get(conf);
		Path path = new Path("/user/hadoop/data/write.txt");
		FSDataOutputStream out = fs.create(path);
		out.writeUTF("da jia hao,cai shi zhen de hao!");
		fs.close();
	}
}


4、讀文件

import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

public class ReadFile {
	public static void main(String[] args) throws IOException {
		Configuration conf = new Configuration();
		FileSystem fs = FileSystem.get(conf);
		Path path = new Path("/user/hadoop/data/write.txt");
		
		if(fs.exists(path)){
			FSDataInputStream is = fs.open(path);
			FileStatus status = fs.getFileStatus(path);
			byte[] buffer = new byte[Integer.parseInt(String.valueOf(status.getLen()))];
			is.readFully(0, buffer);
			is.close();
            fs.close();
            System.out.println(buffer.toString());
		}
	}
}


5、上傳本地文件到HDFS

import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

public class CopyFromLocalFile {

	public static void main(String[] args) throws IOException {
		
		Configuration conf = new Configuration();
		FileSystem fs = FileSystem.get(conf);
		Path src = new Path("/home/hadoop/word.txt");
		Path dst = new Path("/user/hadoop/data/");
		fs.copyFromLocalFile(src, dst);
		fs.close();
	}
}


6、刪除文件

import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

public class DeleteFile {

	public static void main(String[] args) throws IOException {
		Configuration conf = new Configuration();
		FileSystem fs = FileSystem.get(conf);
		
		Path path = new Path("/user/hadoop/data/word.txt");
		fs.delete(path);
		fs.close();
	}
}


7、獲取給定目錄下的所有子目錄以及子文件

import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;

public class GetAllChildFile {
	static Configuration conf = new Configuration();
	
	
	public static void main(String[] args)throws IOException {
		FileSystem fs = FileSystem.get(conf);
		Path path = new Path("/user/hadoop");
		getFile(path,fs);
		//fs.close();
	}
	
	public static void getFile(Path path,FileSystem fs) throws IOException {
		
		FileStatus[] fileStatus = fs.listStatus(path);
		for(int i=0;i<fileStatus.length;i++){
			if(fileStatus[i].isDir()){
				Path p = new Path(fileStatus[i].getPath().toString());
				getFile(p,fs);
			}else{
				System.out.println(fileStatus[i].getPath().toString());
			}
		}
	}

}


8、查找某個(gè)文件在HDFS集群的位置

import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;

public class FindFile {
	
	public static void main(String[] args) throws IOException {	
		getFileLocal();
	}
	
	/**
	 * 查找某個(gè)文件在HDFS集群的位置
	 * @Title:  
	 * @Description: 
	 * @param 
	 * @return 
	 * @throws
	 */
	public static void getFileLocal() throws IOException{
		Configuration conf = new Configuration();
		FileSystem fs = FileSystem.get(conf);
		Path path = new Path("/user/hadoop/data/write.txt");
		
		FileStatus status = fs.getFileStatus(path);
		BlockLocation[] locations = fs.getFileBlockLocations(status, 0, status.getLen());
		
		int length = locations.length;
		for(int i=0;i<length;i++){
			String[] hosts = locations[i].getHosts();
			System.out.println("block_" + i + "_location:" + hosts[i]);
		}
	}
	
}


9、HDFS集群上所有節(jié)點(diǎn)名稱信息

package com.hadoop.file;

import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;

public class FindFile {
	
	public static void main(String[] args) throws IOException {	
		getHDFSNode();
	}
	
	/**
	 * HDFS集群上所有節(jié)點(diǎn)名稱信息
	 * @Title:  
	 * @Description: 
	 * @param 
	 * @return 
	 * @throws
	 */
	public static void getHDFSNode() throws IOException{
		Configuration conf = new Configuration();
		FileSystem fs = FileSystem.get(conf);

		DistributedFileSystem  dfs = (DistributedFileSystem)fs;
		DatanodeInfo[] dataNodeStats = dfs.getDataNodeStats();
		
		for(int i=0;i<dataNodeStats.length;i++){
			System.out.println("DataNode_" + i + "_Node:" + dataNodeStats[i].getHostName());
		}
		
	}
	
	
}


  

    本站是提供個(gè)人知識(shí)管理的網(wǎng)絡(luò)存儲(chǔ)空間,所有內(nèi)容均由用戶發(fā)布,不代表本站觀點(diǎn)。請(qǐng)注意甄別內(nèi)容中的聯(lián)系方式、誘導(dǎo)購(gòu)買等信息,謹(jǐn)防詐騙。如發(fā)現(xiàn)有害或侵權(quán)內(nèi)容,請(qǐng)點(diǎn)擊一鍵舉報(bào)。
    轉(zhuǎn)藏 分享 獻(xiàn)花(0

    0條評(píng)論

    發(fā)表

    請(qǐng)遵守用戶 評(píng)論公約

    類似文章 更多

    精品国产亚洲免费91| 国产又粗又黄又爽又硬的| 人妻精品一区二区三区视频免精| 开心激情网 激情五月天| 欧美激情区一区二区三区| 国产又粗又深又猛又爽又黄| 欧美激情一区二区亚洲专区| 亚洲第一视频少妇人妻系列| 九九热视频免费在线视频| 经典欧美熟女激情综合网| 久久这里只精品免费福利| 国产一区二区三区四区中文| 国产精品国产亚洲看不卡| 久久精品久久久精品久久| 暴力性生活在线免费视频| 亚洲中文字幕人妻系列| 加勒比系列一区二区在线观看| 激情五月天深爱丁香婷婷| 国产午夜在线精品视频| 少妇肥臀一区二区三区| 中文字幕久久精品亚洲乱码| 国产精品福利精品福利| 亚洲婷婷开心色四房播播| 国产传媒中文字幕东京热| 深夜视频成人在线观看| 国产又长又粗又爽免费视频| 国产熟女一区二区不卡| 欧美熟妇一区二区在线| 熟女体下毛荫荫黑森林自拍| 国产男女激情在线视频| 91亚洲精品国产一区| 久久精品蜜桃一区二区av| 国产欧美精品对白性色| 国产伦精品一一区二区三区高清版 | 老司机精品福利视频在线播放| 国产欧美日韩精品一区二| 亚洲一区二区三区四区| 麻豆视传媒短视频在线看 | 国产成人精品一区二区在线看| 欧美特色特黄一级大黄片| 美女被后入福利在线观看|