/**
*
*/
package com.hadoop.mn;
import java.io.InputStream;
import java.net.URL;
import org.apache.hadoop.fs.FsUrlStreamHandlerFactory;
import org.apache.hadoop.io.IOUtils;
/**
* @author Chase_Sun
* 从Hadoop URL中读取数据,使用Java.net.URL对象打开数据流,进而读取数据。
* 如果程序的其他组件已经声明了一个URLStreamHandlerFactory的实例,将无法再用这种方法从Hadoop中读取数据。
*
*/
public class URLCat {
static{
URL.setURLStreamHandlerFactory(new FsUrlStreamHandlerFactory());
}
/**
* @param args[0] 要读取的文件在hdfs服务器上的网络地址,例如:hdfs://localhost/user/root/qualify.txt
*/
public static void main(String[] args) throws Exception{
// TODO Auto-generated method stub
InputStream in = null;
try{
in = new URL(args[0]).openStream();
IOUtils.copyBytes(in, System.out, 4096, false);
}
finally{
IOUtils.closeStream(in);
}
}
}
执行这个类:
hadoop com/hadoop/mn/URLCat hdfs://localhost/user/root/qulify.txt
运行结果:
He sits on the top of tree
but he dose not to get down.
I am so scared of that.