HDFS全局状态信息
命令:bin/Hadoop dfsadmin -report
我们可以得到一份全局状态报告。这份报告包含了HDFS集群的基本信息,当然也有每台机器的一些情况。
以上讲的都是本地操作HDFS,都是基于在Ubuntu下并配置有hadoop环境下对HDFS的操作,作为客户端也可以在window系统下远程的对 HDFS进行操作,其实原理基本上差不多,只需要集群中namenode对外开放的IP和端口,就可以访问到HDFS
/**
* 对HDFS操作
* @author yujing
*
*/
public class Write {
public static void main(String[] args) {
try {
uploadTohdfs();
readHdfs();
getDirectoryFromHdfs();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
public static void uploadTohdfs() throws FileNotFoundException, IOException {
String localSrc = "D://qq.txt";
String dst = "hdfs://192.168.1.11:9000/usr/yujing/test.txt";
InputStream in = new BufferedInputStream(new FileInputStream(localSrc));
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(URI.create(dst), conf);
OutputStream out = fs.create(new Path(dst), new Progressable() {
public void progress() {
System.out.println(".");
}
});
System.out.println("上传文件成功");
IOUtils.copyBytes(in, out, 4096, true);
}
/** 从HDFS上读取文件 */
private static void readHdfs() throws FileNotFoundException, IOException {
String dst = "hdfs://192.168.1.11:9000/usr/yujing/test.txt";
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(URI.create(dst), conf);
FSDataInputStream hdfsInStream = fs.open(new Path(dst));
OutputStream out = new FileOutputStream("d:/qq-hdfs.txt");
byte[] ioBuffer = new byte[1024];
int readLen = hdfsInStream.read(ioBuffer);
while (-1 != readLen) {
out.write(ioBuffer, 0, readLen);
readLen = hdfsInStream.read(ioBuffer);
}
System.out.println("读文件成功");
out.close();
hdfsInStream.close();
fs.close();
}
/**
* 以append方式将内容添加到HDFS上文件的末尾;注意:文件更新,需要在hdfs-site.xml中添<property><name>dfs.
* append.support</name><value>true</value></property>
*/
private static void appendToHdfs() throws FileNotFoundException,
IOException {
String dst = "hdfs://192.168.1.11:9000/usr/yujing/test.txt";
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(URI.create(dst), conf);
FSDataOutputStream out = fs.append(new Path(dst));
int readLen = "zhangzk add by hdfs java api".getBytes().length;
while (-1 != readLen) {
out.write("zhangzk add by hdfs java api".getBytes(), 0, readLen);
}
out.close();
fs.close();
}
/** 从HDFS上删除文件 */
private static void deleteFromHdfs() throws FileNotFoundException,
IOException {
String dst = "hdfs://192.168.1.11:9000/usr/yujing";
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(URI.create(dst), conf);
fs.deleteOnExit(new Path(dst));
fs.close();
}
/** 遍历HDFS上的文件和目录 */
private static void getDirectoryFromHdfs() throws FileNotFoundException,
IOException {
String dst = "hdfs://192.168.1.11:9000/usr/yujing";
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(URI.create(dst), conf);
FileStatus fileList[] = fs.listStatus(new Path(dst));
int size = fileList.length;
for (int i = 0; i < size; i++) {
System.out.println("文件名name:" + fileList[i].getPath().getName()
+ "文件大小/t/tsize:" + fileList[i].getLen());
}
fs.close();
}
}
我们可以通过主机IP:50030就可以查看集群的所有信息,也可以查看到自己上传到HDFS上的文件。