Github user xunzhang commented on a diff in the pull request:

    https://github.com/apache/incubator-hawq/pull/828#discussion_r72932215
  
    --- Diff: src/test/feature/lib/hdfs_config.h ---
    @@ -0,0 +1,172 @@
    +#ifndef HAWQ_SRC_TEST_FEATURE_LIB_HDFS_CONFIG_H_
    +#define HAWQ_SRC_TEST_FEATURE_LIB_HDFS_CONFIG_H_
    +
    +#include <string>
    +#include <vector>
    +
    +#include "psql.h"
    +#include "sql_util.h"
    +#include "xml_parser.h"
    +
    +namespace hawq {
    +namespace test {
    +
    +/**
    + * HdfsConfig common libray. Get detailed information about HDFS
    + * including checking state of namenodes and datanodes, get parameter value
    + * @author Chunling Wang
    + */
    +class HdfsConfig {
    +  public:
    +    /**
    +     * HdfsConfig constructor
    +     */
    +    HdfsConfig(): psql(HAWQ_DB, HAWQ_HOST, HAWQ_PORT, HAWQ_USER, 
HAWQ_PASSWORD) {}
    +
    +    /**
    +     * HdfsConfig desstructor
    +     */
    +    ~HdfsConfig() {}
    +
    +    /**
    +     * whether HDFS is in HA mode
    +     * @return true if HDFS is HA
    +     */
    +    bool isHA();
    +
    +    /**
    +     * whether HDFS is kerbos
    +     * @return true if HDFS is kerbos
    +     */
    +    bool isKerbos();
    +
    +    /**
    +     * whether HDFS supports truncate operation
    +     * @return true if HDFS supports truncate operation
    +     */
    +    bool isTruncate();
    +
    +    /**
    +     * get HADOOP working directory
    +     * @return HADOOP working directory
    +     */
    +    std::string getHadoopHome();
    +
    +    /**
    +     * get HDFS active namenode's hostname and port information
    +     * @param activenamenode, active namenode hostname reference which 
will be set
    +     * @param port, active namenode port reference which will be set
    +     * @return true if getActiveNamenode succeeded
    +     */
    +    bool getActiveNamenode(std::string &activenamenode,
    +                           int &port);
    +
    +    /**
    +     * get HDFS standby namenode's hostname and port information
    +     * @param standbynamenode, standby namenode hostname reference which 
will be set
    +     * @param port, standby namenode port reference which will be set
    +     * @return true if getStandbyNamenode succeeded
    +     */
    +    bool getStandbyNamenode(std::string &standbynamenode,
    +                            int &port);
    +
    +    /**
    +     * get HDFS namenode(s) information
    +     * @param namenodes, namenodes' hostnames reference which will be set
    +     * @param port, namenodes' ports reference which will be set
    +     */
    +    void getNamenodes(std::vector<std::string> &namenodes,
    +                      std::vector<int> &port);
    +
    +    /**
    +     * get HDFS datanodes information
    +     * @param datanodelist, datanodes' hostnames reference which will be 
set
    +     * @param port, datanodes' ports reference which will be set
    +     */
    +    void getDatanodelist(std::vector<std::string> &datanodelist,
    +                         std::vector<int> &port);
    +
    +    /**
    +     * get HDFS active datanodes information
    +     * @param activedatanodes, active datanodes' hostnames reference which 
will be set
    +     * @param port, active datanodes' ports reference which will be set
    +     */
    +    void getActiveDatanodes(std::vector<std::string> &activedatanodes,
    +                            std::vector<int> &port);
    +
    +    /**
    +     * whether HDFS is in safe mode
    +     * @return true if HDFS is in safe node
    +     */
    +    bool isSafemode();
    +
    +    /**
    +     * get parameter value in ./etc/hdfs-client.xml or 
./etc/hadoop/hdfs-site.xml according to parameter name
    +     * @param parameterName, used to get parameter value
    +     * @param conftype, get parameter value, 'hdfs' or 'HDFS' from 
./etc/hdfs-client.xml, others from ./etc/hadoop/hdfs-site.xml
    +     * @return parameter value
    +     */
    +    std::string getParameterValue(const std::string &parameterName, const 
std::string &conftype);
    +    
    +    /**
    +     * get parameter value in ./etc/hadoop/hdfs-site.xml according to 
parameter name
    +     * @param parameterName, used to get parameter value
    +     * @return parameter value
    +     */
    +    std::string getParameterValue(const std::string &parameterName);
    +
    +    /**
    +     * set parameter value in ./etc/hdfs-client.xml or 
./etc/hadoop/hdfs-site.xml according to parameter name
    +     * @param parameterName, parameter name which used to set parameter 
value
    +     * @param parameterValue, parameter value which to be set
    +     * @param conftype, get parameter value, 'hdfs' or 'HDFS' from 
./etc/hdfs-client.xml, others from ./etc/hadoop/hdfs-site.xml
    +     * @return true if succeeded
    +     */
    +    bool setParameterValue(const std::string &parameterName, const 
std::string &parameterValue, const std::string &conftype);
    +
    +    /**
    +     * set parameter value in ./etc/hadoop/hdfs-site.xml according to 
parameter name
    +     * @param parameterName, parameter name which used to set parameter 
value
    +     * @param parameterValue, parameter value which to be set
    +     * @return true if succeeded
    +     */
    +    bool setParameterValue(const std::string &parameterName, const 
std::string &parameterValue);
    +
    +  private:
    +    /**
    +     * @return yarn user
    --- End diff --
    
    TYPO: `return hdfs user`. Actually, I do not suggest that we should add 
doxygen-style comments of private interfaces.


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastruct...@apache.org or file a JIRA ticket
with INFRA.
---

Reply via email to