HDFS Java API的使用实例(2)

<?xml version="1.0" encoding="UTF-8"?>
<?xml-stylesheet type="text/xsl" href="https://www.linuxidc.com/configuration.xsl"?>
<!--
  Licensed under the Apache License, Version 2.0 (the "License");
  you may not use this file except in compliance with the License.
  You may obtain a copy of the License at

Unless required by applicable law or agreed to in writing, software
  distributed under the License is distributed on an "AS IS" BASIS,
  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  See the License for the specific language governing permissions and
  limitations under the License. See accompanying LICENSE file.
-->

<!-- Put site-specific property overrides in this file. -->

<configuration>
    <property>
        <!-- default value 3 -->
        <name>dfs.replication</name>
        <value>1</value>
    </property>
</configuration>

(3)采用默认即可,打印hadoop的日志信息所需的配置文件。如果不配置,运行程序时eclipse控制台会提示警告

4.启动hadoop的hdfs的守护进程,并在hdfs文件系统中创建文件(文件共步骤5中java程序读取)

[hadoop@hadoop01 hadoop]$ cd /opt/modules/hadoop-2.6.5/
[hadoop@hadoop01 hadoop-2.6.5]$ sbin/start-dfs.sh
17/06/21 22:59:32 WARN util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
Starting namenodes on [hadoop01.zjl.com]
hadoop01.zjl.com: starting namenode, logging to /opt/modules/hadoop-2.6.5/logs/hadoop-hadoop-namenode-hadoop01.zjl.com.out
hadoop01.zjl.com: starting datanode, logging to /opt/modules/hadoop-2.6.5/logs/hadoop-hadoop-datanode-hadoop01.zjl.com.out
Starting secondary namenodes [0.0.0.0]
0.0.0.0: starting secondarynamenode, logging to /opt/modules/hadoop-2.6.5/logs/hadoop-hadoop-secondarynamenode-hadoop01.zjl.com.out
17/06/21 23:00:06 WARN util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
[hadoop@hadoop01 hadoop-2.6.5]$ jps
3987 NameNode
4377 Jps
4265 SecondaryNameNode
4076 DataNode
3135 org.eclipse.equinox.launcher_1.3.201.v20161025-1711.jar
[hadoop@hadoop01 hadoop-2.6.5]$ bin/hdfs dfs -mkdir -p /user/hadoop/mapreduce/wordcount/input
17/06/21 23:07:21 WARN util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
[hadoop@hadoop01 hadoop-2.6.5]$ cat wcinput/wc.input
hadoop yarn
hadoop mapreduce
hadoop hdfs
yarn nodemanager
hadoop resourcemanager
[hadoop@hadoop01 hadoop-2.6.5]$ bin/hdfs dfs -put wcinput/wc.input /user/hadoop/mapreduce/wordcount/input/
17/06/21 23:20:40 WARN util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
[hadoop@hadoop01 hadoop-2.6.5]$

5.java代码

package com.zjl.myhadoop;

import java.io.File;
import java.io.FileInputStream;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;

/**
 *
 * @author hadoop
 *
 */
public class HdfsApp {
   
    /**
    * get file system
    * @return
    * @throws Exception
    */
    public static FileSystem getFileSystem() throws Exception{
        //read configuration
        //core-site.xml,core-default-site.xml,hdfs-site.xml,hdfs-default-site.xml
        Configuration conf = new Configuration();
        //create file system
        FileSystem fileSystem = FileSystem.get(conf);
        return fileSystem;
    }
   
    /**
    * read file from hdfs file system,output to the console
    * @param fileName
    * @throws Exception
    */
    public static void read(String fileName) throws Exception {
        //read path
        Path readPath = new Path(fileName);
        //get file system
        FileSystem fileSystem = getFileSystem();
        //open file
        FSDataInputStream inStream = fileSystem.open(readPath);
        try{
            //read file
            IOUtils.copyBytes(inStream, System.out, 4096, false);
        }catch (Exception e) {
            e.printStackTrace();
        }finally {
            //io close
            IOUtils.closeStream(inStream);
        }
    }
   
    public static void upload(String inFileName, String outFileName) throws Exception {
       
        //file input stream,local file
        FileInputStream inStream = new FileInputStream(new File(inFileName));
       
        //get file system
        FileSystem fileSystem = getFileSystem();
        //write path,hdfs file system
        Path writePath = new Path(outFileName);
       
        //output stream
        FSDataOutputStream outStream = fileSystem.create(writePath);
        try{
            //write file
            IOUtils.copyBytes(inStream, outStream, 4096, false);
        }catch (Exception e) {
            e.printStackTrace();
        }finally {
            //io close
            IOUtils.closeStream(inStream);
            IOUtils.closeStream(outStream);
        }
    }
    public static void main( String[] args ) throws Exception {
        //1.read file from hdfs to console
//        String fileName = "/user/hadoop/mapreduce/wordcount/input/wc.input";
//        read(fileName);
       
        //2.upload file from local file system to hdfs file system
        //file input stream,local file
        String inFileName = "/opt/modules/hadoop-2.6.5/wcinput/wc.input";
        String outFileName = "/user/hadoop/put-wc.input";
        upload(inFileName, outFileName);
    }
}

6.调用方法 read(fileName)

7.进入hdfs文件系统查看/user/hadoop目录

8.调用upload(inFileName, outFileName),然后刷新步骤7的页面,文件上传成功

Hadoop如何修改HDFS文件存储块大小 

将本地文件拷到HDFS中

从HDFS下载文件到本地

将本地文件上传至HDFS

HDFS基本文件常用命令

Hadoop中HDFS和MapReduce节点基本简介

《Hadoop实战》中文版+英文文字版+源码【PDF】

Hadoop: The Definitive Guide【PDF版】

内容版权声明:除非注明,否则皆为本站原创文章。

转载注明出处:https://www.heiqu.com/33d6bfceba5e6a44a7000e4a560a9c77.html