http://git-wip-us.apache.org/repos/asf/hadoop/blob/7a542fb3/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfspp/tests/CMakeLists.txt
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfspp/tests/CMakeLists.txt
 
b/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfspp/tests/CMakeLists.txt
deleted file mode 100644
index 3331935..0000000
--- 
a/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfspp/tests/CMakeLists.txt
+++ /dev/null
@@ -1,170 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# Delegate some functionality to libhdfs, until libhdfspp is complete.
-set (LIBHDFS_SRC_DIR ../../libhdfs)
-set (LIBHDFS_TESTS_DIR ../../libhdfs-tests)
-set (LIBHDFSPP_SRC_DIR ..)
-set (LIBHDFSPP_LIB_DIR ${LIBHDFSPP_SRC_DIR}/lib)
-set (LIBHDFSPP_BINDING_C ${LIBHDFSPP_LIB_DIR}/bindings/c)
-set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-missing-field-initializers")
-set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Wno-missing-field-initializers")
-
-include_directories(
-    ${GENERATED_JAVAH}
-    ${CMAKE_CURRENT_LIST_DIR}
-    ${CMAKE_BINARY_DIR}
-    ${JNI_INCLUDE_DIRS}
-    ${LIBHDFS_SRC_DIR}/include
-    ${LIBHDFS_SRC_DIR}/..
-    ${LIBHDFS_SRC_DIR}
-    ${OS_DIR}
-)
-add_library(test_common_obj OBJECT mock_connection.cc)
-add_library(test_common $<TARGET_OBJECTS:test_common_obj>)
-
-set(PROTOBUF_IMPORT_DIRS ${PROTO_HADOOP_TEST_DIR})
-
-protobuf_generate_cpp(PROTO_TEST_SRCS PROTO_TEST_HDRS
-  ${PROTO_HADOOP_TEST_DIR}/test.proto
-  ${PROTO_HADOOP_TEST_DIR}/test_rpc_service.proto
-)
-
-# Shamelessly stolen from
-#    
http://stackoverflow.com/questions/9303711/how-do-i-make-ctest-run-a-program-with-valgrind-without-dart
-function(add_memcheck_test name binary)
-  add_test(${name} ${binary} ${ARGN})
-
-  if(NOT MEMORYCHECK_COMMAND MATCHES "MEMORYCHECK_COMMAND-NOTFOUND" AND NOT 
SKIP_VALGRIND)
-    set(memcheck_command "${MEMORYCHECK_COMMAND} 
${MEMORYCHECK_COMMAND_OPTIONS}")
-    separate_arguments(memcheck_command)
-    add_test(memcheck_${name} ${memcheck_command} ./${binary} ${ARGN})
-  endif()
-endfunction(add_memcheck_test)
-
-#
-#
-#   UNIT TESTS - TEST SELECTED PARTS OF THE LIBRARY
-#
-#
-
-add_executable(uri_test uri_test.cc)
-target_link_libraries(uri_test common gmock_main ${CMAKE_THREAD_LIBS_INIT})
-add_memcheck_test(uri uri_test)
-
-add_executable(remote_block_reader_test remote_block_reader_test.cc)
-target_link_libraries(remote_block_reader_test test_common reader proto common 
connection ${PROTOBUF_LIBRARIES} ${OPENSSL_LIBRARIES} gmock_main 
${CMAKE_THREAD_LIBS_INIT})
-add_memcheck_test(remote_block_reader remote_block_reader_test)
-
-add_executable(sasl_digest_md5_test sasl_digest_md5_test.cc)
-target_link_libraries(sasl_digest_md5_test common ${PROTOBUF_LIBRARIES} 
${OPENSSL_LIBRARIES} gmock_main ${CMAKE_THREAD_LIBS_INIT})
-add_memcheck_test(sasl_digest_md5 sasl_digest_md5_test)
-
-add_executable(retry_policy_test retry_policy_test.cc)
-target_link_libraries(retry_policy_test common gmock_main 
${CMAKE_THREAD_LIBS_INIT})
-add_memcheck_test(retry_policy retry_policy_test)
-
-include_directories(${CMAKE_CURRENT_BINARY_DIR})
-add_executable(rpc_engine_test rpc_engine_test.cc ${PROTO_TEST_SRCS} 
${PROTO_TEST_HDRS})
-target_link_libraries(rpc_engine_test test_common rpc proto common 
${PROTOBUF_LIBRARIES} ${OPENSSL_LIBRARIES} ${SASL_LIBRARIES} gmock_main 
${CMAKE_THREAD_LIBS_INIT})
-add_memcheck_test(rpc_engine rpc_engine_test)
-
-add_executable(bad_datanode_test bad_datanode_test.cc)
-target_link_libraries(bad_datanode_test rpc reader proto fs bindings_c rpc 
proto common reader connection ${PROTOBUF_LIBRARIES} ${OPENSSL_LIBRARIES} 
${SASL_LIBRARIES} gmock_main ${CMAKE_THREAD_LIBS_INIT})
-add_memcheck_test(bad_datanode bad_datanode_test)
-
-add_executable(node_exclusion_test node_exclusion_test.cc)
-target_link_libraries(node_exclusion_test fs gmock_main common 
${PROTOBUF_LIBRARIES} ${OPENSSL_LIBRARIES} ${SASL_LIBRARIES} 
${CMAKE_THREAD_LIBS_INIT})
-add_memcheck_test(node_exclusion node_exclusion_test)
-
-add_executable(configuration_test configuration_test.cc)
-target_link_libraries(configuration_test common gmock_main 
${CMAKE_THREAD_LIBS_INIT})
-add_memcheck_test(configuration configuration_test)
-
-add_executable(hdfs_configuration_test hdfs_configuration_test.cc)
-target_link_libraries(hdfs_configuration_test common gmock_main 
${CMAKE_THREAD_LIBS_INIT})
-add_memcheck_test(hdfs_configuration hdfs_configuration_test)
-
-add_executable(hdfspp_errors_test hdfspp_errors.cc)
-target_link_libraries(hdfspp_errors_test common gmock_main bindings_c fs rpc 
proto common reader connection ${PROTOBUF_LIBRARIES} ${OPENSSL_LIBRARIES} 
${SASL_LIBRARIES} gmock_main ${CMAKE_THREAD_LIBS_INIT})
-add_memcheck_test(hdfspp_errors hdfspp_errors_test)
-
-add_executable(hdfs_builder_test hdfs_builder_test.cc)
-target_link_libraries(hdfs_builder_test test_common gmock_main bindings_c fs 
rpc proto common reader connection ${PROTOBUF_LIBRARIES} ${OPENSSL_LIBRARIES} 
${SASL_LIBRARIES} gmock_main ${CMAKE_THREAD_LIBS_INIT})
-add_memcheck_test(hdfs_builder_test hdfs_builder_test)
-
-add_executable(logging_test logging_test.cc)
-target_link_libraries(logging_test common gmock_main bindings_c fs rpc proto 
common reader connection ${PROTOBUF_LIBRARIES} ${OPENSSL_LIBRARIES} 
${SASL_LIBRARIES} gmock_main ${CMAKE_THREAD_LIBS_INIT})
-add_memcheck_test(logging_test logging_test)
-
-add_executable(hdfs_ioservice_test hdfs_ioservice_test.cc)
-target_link_libraries(hdfs_ioservice_test fs gmock_main common 
${PROTOBUF_LIBRARIES} ${OPENSSL_LIBRARIES} ${SASL_LIBRARIES} 
${CMAKE_THREAD_LIBS_INIT})
-add_memcheck_test(hdfs_ioservice hdfs_ioservice_test)
-
-add_executable(user_lock_test user_lock_test.cc)
-target_link_libraries(user_lock_test fs gmock_main common 
${PROTOBUF_LIBRARIES} ${OPENSSL_LIBRARIES} ${SASL_LIBRARIES} 
${CMAKE_THREAD_LIBS_INIT})
-add_memcheck_test(user_lock user_lock_test)
-
-add_executable(hdfs_config_connect_bugs_test hdfs_config_connect_bugs.cc)
-target_link_libraries(hdfs_config_connect_bugs_test common gmock_main 
bindings_c fs rpc proto common reader connection ${PROTOBUF_LIBRARIES} 
${OPENSSL_LIBRARIES} ${SASL_LIBRARIES} ${CMAKE_THREAD_LIBS_INIT})
-add_memcheck_test(hdfs_config_connect_bugs hdfs_config_connect_bugs_test)
-
-
-
-#
-#
-#   INTEGRATION TESTS - TESTS THE FULL LIBRARY AGAINST ACTUAL SERVERS
-#
-#
-# This test requires a great deal of Hadoop Java infrastructure to run.
-#
-if(HADOOP_BUILD)
-
-include_directories (
-  #TODO: Put this in a variable up top and pull it out here
-    ${CMAKE_CURRENT_SOURCE_DIR}/../../libhdfs-tests/
-)
-
-add_library(hdfspp_test_shim_static STATIC hdfs_shim.c libhdfs_wrapper.c 
libhdfspp_wrapper.cc ${LIBHDFSPP_BINDING_C}/hdfs.cc)
-
-# TODO: get all of the mini dfs library bits here in one place
-# add_library(hdfspp_mini_cluster     native_mini_dfs ${JAVA_JVM_LIBRARY} )
-
-#TODO: Link against full library rather than just parts
-
-build_libhdfs_test(libhdfs_threaded hdfspp_test_shim_static expect.c 
test_libhdfs_threaded.c ${OS_DIR}/thread.c)
-link_libhdfs_test(libhdfs_threaded hdfspp_test_shim_static fs reader rpc proto 
common connection ${PROTOBUF_LIBRARIES} ${OPENSSL_LIBRARIES} native_mini_dfs 
${JAVA_JVM_LIBRARY} ${SASL_LIBRARIES} )
-add_libhdfs_test  (libhdfs_threaded hdfspp_test_shim_static)
-
-build_libhdfs_test(hdfspp_mini_dfs_smoke hdfspp_test_shim_static 
${CMAKE_CURRENT_LIST_DIR}/hdfspp_mini_dfs_smoke.cc)
-link_libhdfs_test (hdfspp_mini_dfs_smoke hdfspp_test_shim_static fs reader rpc 
proto common connection gmock_main ${PROTOBUF_LIBRARIES} ${OPENSSL_LIBRARIES} 
native_mini_dfs ${JAVA_JVM_LIBRARY} ${SASL_LIBRARIES})
-add_libhdfs_test  (hdfspp_mini_dfs_smoke hdfspp_test_shim_static)
-
-build_libhdfs_test(libhdfs_mini_stress hdfspp_test_shim_static expect.c 
test_libhdfs_mini_stress.c ${OS_DIR}/thread.c)
-link_libhdfs_test(libhdfs_mini_stress hdfspp_test_shim_static fs reader rpc 
proto common connection ${PROTOBUF_LIBRARIES} ${OPENSSL_LIBRARIES} 
native_mini_dfs ${JAVA_JVM_LIBRARY} ${SASL_LIBRARIES})
-add_libhdfs_test(libhdfs_mini_stress hdfspp_test_shim_static)
-
-build_libhdfs_test(hdfs_ext hdfspp_test_shim_static 
${CMAKE_CURRENT_LIST_DIR}/hdfs_ext_test.cc)
-link_libhdfs_test (hdfs_ext hdfspp_test_shim_static hdfspp_static gmock_main 
native_mini_dfs ${JAVA_JVM_LIBRARY}  ${SASL_LIBRARIES})
-add_libhdfs_test  (hdfs_ext hdfspp_test_shim_static)
-
-#build_libhdfs_test(hdfs_config_connect_bugs hdfspp_test_shim_static 
${CMAKE_CURRENT_LIST_DIR}/hdfs_config_connect_bugs.cc)
-#link_libhdfs_test (hdfs_config_connect_bugs hdfspp_test_shim_static 
hdfspp_static gmock_main native_mini_dfs ${JAVA_JVM_LIBRARY}  ${SASL_LIBRARIES})
-#add_libhdfs_test  (hdfs_config_connect_bugs hdfspp_test_shim_static)
-
-endif(HADOOP_BUILD)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7a542fb3/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfspp/tests/bad_datanode_test.cc
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfspp/tests/bad_datanode_test.cc
 
b/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfspp/tests/bad_datanode_test.cc
deleted file mode 100644
index 00bbf3d..0000000
--- 
a/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfspp/tests/bad_datanode_test.cc
+++ /dev/null
@@ -1,330 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "fs/filesystem.h"
-#include "fs/bad_datanode_tracker.h"
-#include "common/libhdfs_events_impl.h"
-
-#include "common/util.h"
-
-#include <gmock/gmock.h>
-
-using hadoop::common::TokenProto;
-using hadoop::hdfs::DatanodeInfoProto;
-using hadoop::hdfs::DatanodeIDProto;
-using hadoop::hdfs::ExtendedBlockProto;
-using hadoop::hdfs::LocatedBlockProto;
-using hadoop::hdfs::LocatedBlocksProto;
-
-using ::testing::_;
-using ::testing::InvokeArgument;
-using ::testing::Return;
-
-using namespace hdfs;
-
-class MockReader : public BlockReader {
-public:
-  MOCK_METHOD2(
-      AsyncReadPacket,
-      void(const asio::mutable_buffers_1 &,
-           const std::function<void(const Status &, size_t transferred)> &));
-
-  MOCK_METHOD5(AsyncRequestBlock,
-               void(const std::string &client_name,
-                     const hadoop::hdfs::ExtendedBlockProto *block,
-                     uint64_t length, uint64_t offset,
-                     const std::function<void(Status)> &handler));
-
-  MOCK_METHOD5(AsyncReadBlock, void(
-    const std::string & client_name,
-    const hadoop::hdfs::LocatedBlockProto &block,
-    size_t offset,
-    const MutableBuffers &buffers,
-    const std::function<void(const Status &, size_t)> handler));
-
-  virtual void CancelOperation() override {
-    /* no-op, declared pure virtual */
-  }
-};
-
-class MockDNConnection : public DataNodeConnection, public 
std::enable_shared_from_this<MockDNConnection> {
-    void Connect(std::function<void(Status status, 
std::shared_ptr<DataNodeConnection> dn)> handler) override {
-      handler(Status::OK(), shared_from_this());
-    }
-
-  void async_read_some(const MutableBuffers &buf,
-        std::function<void (const asio::error_code & error,
-                               std::size_t bytes_transferred) > handler) 
override {
-      (void)buf;
-      handler(asio::error::fault, 0);
-  }
-
-  void async_write_some(const ConstBuffers &buf,
-            std::function<void (const asio::error_code & error,
-                                 std::size_t bytes_transferred) > handler) 
override {
-      (void)buf;
-      handler(asio::error::fault, 0);
-  }
-
-  virtual void Cancel() override {
-    /* no-op, declared pure virtual */
-  }
-};
-
-
-class PartialMockFileHandle : public FileHandleImpl {
-  using FileHandleImpl::FileHandleImpl;
-public:
-  std::shared_ptr<MockReader> mock_reader_ = std::make_shared<MockReader>();
-protected:
-  std::shared_ptr<BlockReader> CreateBlockReader(const BlockReaderOptions 
&options,
-                                                 
std::shared_ptr<DataNodeConnection> dn,
-                                                 
std::shared_ptr<hdfs::LibhdfsEvents> event_handlers) override
-  {
-      (void) options; (void) dn; (void) event_handlers;
-    assert(mock_reader_);
-    return mock_reader_;
-  }
-  std::shared_ptr<DataNodeConnection> CreateDataNodeConnection(
-      ::asio::io_service *io_service,
-      const ::hadoop::hdfs::DatanodeInfoProto & dn,
-      const hadoop::common::TokenProto * token) override {
-    (void) io_service; (void) dn; (void) token;
-    return std::make_shared<MockDNConnection>();
-  }
-
-
-};
-
-TEST(BadDataNodeTest, TestNoNodes) {
-  auto file_info = std::make_shared<struct FileInfo>();
-  file_info->file_length_ = 1; //To avoid running into EOF
-  file_info->blocks_.push_back(LocatedBlockProto());
-  LocatedBlockProto & block = file_info->blocks_[0];
-  ExtendedBlockProto *b = block.mutable_b();
-  b->set_poolid("");
-  b->set_blockid(1);
-  b->set_generationstamp(1);
-  b->set_numbytes(4096);
-
-  // Set up the one block to have one datanode holding it
-  DatanodeInfoProto *di = block.add_locs();
-  DatanodeIDProto *dnid = di->mutable_id();
-  dnid->set_datanodeuuid("foo");
-
-  char buf[4096] = {
-      0,
-  };
-  IoServiceImpl io_service;
-  auto bad_node_tracker = std::make_shared<BadDataNodeTracker>();
-  auto monitors = std::make_shared<LibhdfsEvents>();
-  bad_node_tracker->AddBadNode("foo");
-
-  PartialMockFileHandle is("cluster", "file", &io_service.io_service(), 
GetRandomClientName(), file_info, bad_node_tracker, monitors);
-  Status stat;
-  size_t read = 0;
-
-  // Exclude the one datanode with the data
-  is.AsyncPreadSome(0, asio::buffer(buf, sizeof(buf)), nullptr,
-      [&stat, &read](const Status &status, const std::string &, size_t 
transferred) {
-        stat = status;
-        read = transferred;
-      });
-
-  // Should fail with no resource available
-  ASSERT_EQ(static_cast<int>(std::errc::resource_unavailable_try_again), 
stat.code());
-  ASSERT_EQ(0UL, read);
-}
-
-TEST(BadDataNodeTest, NNEventCallback) {
-  auto file_info = std::make_shared<struct FileInfo>();
-  file_info->file_length_ = 1; //To avoid running into EOF
-  file_info->blocks_.push_back(LocatedBlockProto());
-  LocatedBlockProto & block = file_info->blocks_[0];
-  ExtendedBlockProto *b = block.mutable_b();
-  b->set_poolid("");
-  b->set_blockid(1);
-  b->set_generationstamp(1);
-  b->set_numbytes(4096);
-
-  // Set up the one block to have one datanodes holding it
-  DatanodeInfoProto *di = block.add_locs();
-  DatanodeIDProto *dnid = di->mutable_id();
-  dnid->set_datanodeuuid("dn1");
-
-  char buf[4096] = {
-      0,
-  };
-  IoServiceImpl io_service;
-  auto tracker = std::make_shared<BadDataNodeTracker>();
-
-
-  // Set up event callbacks
-  int calls = 0;
-  std::vector<std::string> callbacks;
-  auto monitors = std::make_shared<LibhdfsEvents>();
-  monitors->set_file_callback([&calls, &callbacks] (const char * event,
-                    const char * cluster,
-                    const char * file,
-                    int64_t value) {
-    (void)cluster; (void) file; (void)value;
-    callbacks.push_back(event);
-
-    // Allow connect call to succeed by fail on read
-    if (calls++ == 1)
-      return event_response::test_err(Status::Error("Test"));
-
-    return event_response::make_ok();
-  });
-  PartialMockFileHandle is("cluster", "file", &io_service.io_service(), 
GetRandomClientName(),  file_info, tracker, monitors);
-  Status stat;
-  size_t read = 0;
-
-  EXPECT_CALL(*is.mock_reader_, AsyncReadBlock(_,_,_,_,_))
-      // Will return OK, but our callback will subvert it
-      .WillOnce(InvokeArgument<4>(
-          Status::OK(), 0));
-
-  is.AsyncPreadSome(
-      0, asio::buffer(buf, sizeof(buf)), nullptr,
-      [&stat, &read](const Status &status, const std::string &,
-                     size_t transferred) {
-        stat = status;
-        read = transferred;
-      });
-
-  ASSERT_FALSE(stat.ok());
-  ASSERT_EQ(2, callbacks.size());
-  ASSERT_EQ(FILE_DN_CONNECT_EVENT, callbacks[0]);
-  ASSERT_EQ(FILE_DN_READ_EVENT, callbacks[1]);
-}
-
-
-TEST(BadDataNodeTest, RecoverableError) {
-  auto file_info = std::make_shared<struct FileInfo>();
-  file_info->file_length_ = 1; //To avoid running into EOF
-  file_info->blocks_.push_back(LocatedBlockProto());
-  LocatedBlockProto & block = file_info->blocks_[0];
-  ExtendedBlockProto *b = block.mutable_b();
-  b->set_poolid("");
-  b->set_blockid(1);
-  b->set_generationstamp(1);
-  b->set_numbytes(4096);
-
-  // Set up the one block to have one datanode holding it
-  DatanodeInfoProto *di = block.add_locs();
-  DatanodeIDProto *dnid = di->mutable_id();
-  dnid->set_datanodeuuid("foo");
-
-  char buf[4096] = {
-      0,
-  };
-  IoServiceImpl io_service;
-  auto tracker = std::make_shared<BadDataNodeTracker>();
-  auto monitors = std::make_shared<LibhdfsEvents>();
-  PartialMockFileHandle is("cluster", "file", &io_service.io_service(), 
GetRandomClientName(),  file_info, tracker, monitors);
-  Status stat;
-  size_t read = 0;
-  EXPECT_CALL(*is.mock_reader_, AsyncReadBlock(_,_,_,_,_))
-      // resource unavailable error
-      .WillOnce(InvokeArgument<4>(
-          Status::ResourceUnavailable("Unable to get some resource, try again 
later"), 0));
-
-
-  is.AsyncPreadSome(
-      0, asio::buffer(buf, sizeof(buf)), nullptr,
-      [&stat, &read](const Status &status, const std::string &,
-                     size_t transferred) {
-        stat = status;
-        read = transferred;
-      });
-
-  ASSERT_FALSE(stat.ok());
-
-  std::string failing_dn = "id_of_bad_datanode";
-  if (!stat.ok()) {
-    if (FileHandle::ShouldExclude(stat)) {
-      tracker->AddBadNode(failing_dn);
-    }
-  }
-
-  ASSERT_FALSE(tracker->IsBadNode(failing_dn));
-}
-
-TEST(BadDataNodeTest, InternalError) {
-  auto file_info = std::make_shared<struct FileInfo>();
-  file_info->file_length_ = 1; //To avoid running into EOF
-  file_info->blocks_.push_back(LocatedBlockProto());
-  LocatedBlockProto & block = file_info->blocks_[0];
-  ExtendedBlockProto *b = block.mutable_b();
-  b->set_poolid("");
-  b->set_blockid(1);
-  b->set_generationstamp(1);
-  b->set_numbytes(4096);
-
-  // Set up the one block to have one datanode holding it
-  DatanodeInfoProto *di = block.add_locs();
-  DatanodeIDProto *dnid = di->mutable_id();
-  dnid->set_datanodeuuid("foo");
-
-  char buf[4096] = {
-      0,
-  };
-  IoServiceImpl io_service;
-  auto tracker = std::make_shared<BadDataNodeTracker>();
-  auto monitors = std::make_shared<LibhdfsEvents>();
-  PartialMockFileHandle is("cluster", "file", &io_service.io_service(), 
GetRandomClientName(),  file_info, tracker, monitors);
-  Status stat;
-  size_t read = 0;
-  EXPECT_CALL(*is.mock_reader_, AsyncReadBlock(_,_,_,_,_))
-      // resource unavailable error
-      .WillOnce(InvokeArgument<4>(
-              Status::Exception("server_explosion_exception",
-                                "the server exploded"),
-                                sizeof(buf)));
-
-  is.AsyncPreadSome(
-      0, asio::buffer(buf, sizeof(buf)), nullptr,
-      [&stat, &read](const Status &status, const std::string &,
-                     size_t transferred) {
-        stat = status;
-        read = transferred;
-      });
-
-  ASSERT_FALSE(stat.ok());
-
-  std::string failing_dn = "id_of_bad_datanode";
-  if (!stat.ok()) {
-    if (FileHandle::ShouldExclude(stat)) {
-      tracker->AddBadNode(failing_dn);
-    }
-  }
-
-  ASSERT_TRUE(tracker->IsBadNode(failing_dn));
-}
-
-int main(int argc, char *argv[]) {
-  // The following line must be executed to initialize Google Mock
-  // (and Google Test) before running the tests.
-  ::testing::InitGoogleMock(&argc, argv);
-  int exit_code = RUN_ALL_TESTS();
-
-  // Clean up static data and prevent valgrind memory leaks
-  google::protobuf::ShutdownProtobufLibrary();
-  return exit_code;
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7a542fb3/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfspp/tests/configuration_test.cc
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfspp/tests/configuration_test.cc
 
b/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfspp/tests/configuration_test.cc
deleted file mode 100644
index 9534204..0000000
--- 
a/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfspp/tests/configuration_test.cc
+++ /dev/null
@@ -1,573 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "configuration_test.h"
-#include "common/configuration.h"
-#include "common/configuration_loader.h"
-#include <gmock/gmock.h>
-#include <cstdio>
-#include <fstream>
-
-using ::testing::_;
-
-using namespace hdfs;
-
-namespace hdfs {
-
-TEST(ConfigurationTest, TestDegenerateInputs) {
-  /* Completely empty stream */
-  {
-    std::stringstream stream;
-    ConfigurationLoader config_loader;
-    config_loader.ClearSearchPath();
-    optional<Configuration> config = config_loader.Load<Configuration>("");
-    EXPECT_FALSE(config && "Empty stream");
-  }
-
-  /* No values */
-  {
-    std::string data = "<configuration></configuration>";
-    ConfigurationLoader config_loader;
-    config_loader.ClearSearchPath();
-    optional<Configuration> config = config_loader.Load<Configuration>(data);
-    EXPECT_TRUE(config && "Blank config");
-  }
-
-  /* Extraneous values */
-  {
-    std::string data = "<configuration><spam></spam></configuration>";
-    ConfigurationLoader config_loader;
-    config_loader.ClearSearchPath();
-    optional<Configuration> config = config_loader.Load<Configuration>(data);
-    EXPECT_TRUE(config && "Extraneous values");
-  }
-}
-
-TEST(ConfigurationTest, TestBasicOperations) {
-  /* Single value */
-  {
-    std::stringstream stream;
-    simpleConfigStream(stream, "key1", "value1");
-    ConfigurationLoader config_loader;
-    config_loader.ClearSearchPath();
-    optional<Configuration> config = 
config_loader.Load<Configuration>(stream.str());
-    EXPECT_TRUE(config && "Parse single value");
-    EXPECT_EQ("value1", config->GetWithDefault("key1", ""));
-  }
-
-  /* Multiple values */
-  {
-    optional<Configuration> config =
-        simpleConfig("key1", "value1", "key2", "value2");
-    EXPECT_EQ("value1", config->GetWithDefault("key1", ""));
-    EXPECT_EQ("value2", config->GetWithDefault("key2", ""));
-  }
-
-  /* Case-insensitive */
-  {
-    std::stringstream stream;
-    simpleConfigStream(stream, "key1", "value1");
-    ConfigurationLoader config_loader;
-    config_loader.ClearSearchPath();
-    optional<Configuration> config = 
config_loader.Load<Configuration>(stream.str());
-    EXPECT_TRUE(config && "Parse single value");
-    EXPECT_EQ("value1", config->GetWithDefault("KEY1", ""));
-  }
-
-  /* No defaults */
-  {
-    std::stringstream stream;
-    simpleConfigStream(stream, "key1", "value1");
-    ConfigurationLoader config_loader;
-    config_loader.ClearSearchPath();
-    optional<Configuration> config = 
config_loader.Load<Configuration>(stream.str());
-    EXPECT_TRUE(config && "Parse single value");
-    optional<std::string> value = config->Get("key1");
-    EXPECT_TRUE((bool)value);
-    EXPECT_EQ("value1", *value);
-    EXPECT_FALSE(config->Get("key2"));
-  }
-}
-
-TEST(ConfigurationTest, TestCompactValues) {
-  {
-    std::stringstream stream;
-    stream << "<configuration><property name=\"key1\" "
-              "value=\"value1\"/></configuration>";
-    ConfigurationLoader config_loader;
-    config_loader.ClearSearchPath();
-    optional<Configuration> config = 
config_loader.Load<Configuration>(stream.str());
-    EXPECT_TRUE(config && "Compact value parse");
-    EXPECT_EQ("value1", config->GetWithDefault("key1", ""));
-  }
-}
-
-TEST(ConfigurationTest, TestMultipleResources) {
-  /* Single value */
-  {
-    std::stringstream stream;
-    simpleConfigStream(stream, "key1", "value1");
-    ConfigurationLoader config_loader;
-    config_loader.ClearSearchPath();
-    optional<Configuration> config = 
config_loader.Load<Configuration>(stream.str());
-    EXPECT_TRUE(config && "Parse first stream");
-    EXPECT_EQ("value1", config->GetWithDefault("key1", ""));
-
-    std::stringstream stream2;
-    simpleConfigStream(stream2, "key2", "value2");
-    optional<Configuration> config2 =
-        ConfigurationLoader().OverlayResourceString(config.value(), 
stream2.str());
-    EXPECT_TRUE(config2 && "Parse second stream");
-    EXPECT_EQ("value1", config2->GetWithDefault("key1", ""));
-    EXPECT_EQ("value2", config2->GetWithDefault("key2", ""));
-  }
-}
-
-TEST(ConfigurationTest, TestStringResource) {
-  /* Single value */
-  {
-    std::stringstream stream;
-    simpleConfigStream(stream, "key1", "value1");
-    std::string str = stream.str();
-
-    ConfigurationLoader config_loader;
-    config_loader.ClearSearchPath();
-    optional<Configuration> config = 
config_loader.Load<Configuration>(stream.str());
-    EXPECT_TRUE(config && "Parse single value");
-    EXPECT_EQ("value1", config->GetWithDefault("key1", ""));
-  }
-}
-
-TEST(ConfigurationTest, TestValueOverlay) {
-  /* Incremental updates */
-  {
-    ConfigurationLoader loader;
-    std::stringstream stream;
-    stream << "<configuration>"
-            
"<property><name>key1</name><value>value1</value><final>false</final></property>"
-            
"<property><name>final2</name><value>value2</value><final>true</final></property>"
-            "</configuration>";
-    optional<Configuration> config = loader.Load<Configuration>(stream.str());
-    EXPECT_TRUE(config && "Parse single value");
-    EXPECT_EQ("value1", config->GetWithDefault("key1", ""));
-    EXPECT_EQ("value2", config->GetWithDefault("final2", ""));
-    config = loader.OverlayValue(config.value(), "key3", "value3");
-    EXPECT_EQ("value1", config->GetWithDefault("key1", ""));
-    EXPECT_EQ("value2", config->GetWithDefault("final2", ""));
-    EXPECT_EQ("value3", config->GetWithDefault("key3", ""));
-    config = loader.OverlayValue(config.value(), "final2", "value4");
-    EXPECT_EQ("value1", config->GetWithDefault("key1", ""));
-    EXPECT_EQ("value2", config->GetWithDefault("final2", ""));
-    EXPECT_EQ("value3", config->GetWithDefault("key3", ""));
-
-    // Case insensitive overlay
-    config = loader.OverlayValue(config.value(), "KEY3", "value3a");
-    EXPECT_EQ("value1", config->GetWithDefault("key1", ""));
-    EXPECT_EQ("value2", config->GetWithDefault("final2", ""));
-    EXPECT_EQ("value3a", config->GetWithDefault("key3", ""));
-  }
-}
-
-TEST(ConfigurationTest, TestFinal) {
-  {
-    /* Explicitly non-final non-compact value */
-    std::stringstream stream;
-    stream << "<configuration><property><name>key1</name><value>value1</"
-              "value><final>false</final></property></configuration>";
-    ConfigurationLoader config_loader;
-    config_loader.ClearSearchPath();
-    optional<Configuration> config = 
config_loader.Load<Configuration>(stream.str());
-    EXPECT_TRUE(config && "Parse first stream");
-    EXPECT_EQ("value1", config->GetWithDefault("key1", ""));
-
-    std::stringstream stream2;
-    simpleConfigStream(stream2, "key1", "value2");
-    optional<Configuration> config2 =
-        ConfigurationLoader().OverlayResourceString(config.value(), 
stream2.str());
-    EXPECT_TRUE(config2 && "Parse second stream");
-    EXPECT_EQ("value2", config2->GetWithDefault("key1", ""));
-  }
-  {
-    /* Explicitly final non-compact value */
-    std::stringstream stream;
-    stream << "<configuration><property><name>key1</name><value>value1</"
-              "value><final>true</final></property></configuration>";
-    ConfigurationLoader config_loader;
-    config_loader.ClearSearchPath();
-    optional<Configuration> config = 
config_loader.Load<Configuration>(stream.str());
-    EXPECT_TRUE(config && "Parse first stream");
-    EXPECT_EQ("value1", config->GetWithDefault("key1", ""));
-
-    std::stringstream stream2;
-    simpleConfigStream(stream2, "key1", "value2");
-    optional<Configuration> config2 =
-        ConfigurationLoader().OverlayResourceString(config.value(), 
stream2.str());
-    EXPECT_TRUE(config2 && "Parse second stream");
-    EXPECT_EQ("value1", config2->GetWithDefault("key1", ""));
-  }
-  {
-    /* Explicitly non-final compact value */
-    std::stringstream stream;
-    stream << "<configuration><property name=\"key1\" value=\"value1\" "
-              "final=\"false\"/></configuration>";
-    ConfigurationLoader config_loader;
-    config_loader.ClearSearchPath();
-    optional<Configuration> config = 
config_loader.Load<Configuration>(stream.str());
-    EXPECT_TRUE(config && "Parse first stream");
-    EXPECT_EQ("value1", config->GetWithDefault("key1", ""));
-
-    std::stringstream stream2;
-    simpleConfigStream(stream2, "key1", "value2");
-    optional<Configuration> config2 =
-        ConfigurationLoader().OverlayResourceString(config.value(), 
stream2.str());
-    EXPECT_TRUE(config2 && "Parse second stream");
-    EXPECT_EQ("value2", config2->GetWithDefault("key1", ""));
-  }
-  {
-    /* Explicitly final compact value */
-    std::stringstream stream;
-    stream << "<configuration><property name=\"key1\" value=\"value1\" "
-              "final=\"true\"/></configuration>";
-    ConfigurationLoader config_loader;
-    config_loader.ClearSearchPath();
-    optional<Configuration> config = 
config_loader.Load<Configuration>(stream.str());
-    EXPECT_TRUE(config && "Parse first stream");
-    EXPECT_EQ("value1", config->GetWithDefault("key1", ""));
-
-    std::stringstream stream2;
-    simpleConfigStream(stream2, "key1", "value2");
-    optional<Configuration> config2 =
-        ConfigurationLoader().OverlayResourceString(config.value(), 
stream2.str());
-    EXPECT_TRUE(config2 && "Parse second stream");
-    EXPECT_EQ("value1", config2->GetWithDefault("key1", ""));
-  }
-  {
-    /* Bogus final value */
-    std::stringstream stream;
-    stream << "<configuration><property><name>key1</name><value>value1</"
-              "value><final>spam</final></property></configuration>";
-    ConfigurationLoader config_loader;
-    config_loader.ClearSearchPath();
-    optional<Configuration> config = 
config_loader.Load<Configuration>(stream.str());
-    EXPECT_TRUE(config && "Parse first stream");
-    EXPECT_EQ("value1", config->GetWithDefault("key1", ""));
-
-    std::stringstream stream2;
-    simpleConfigStream(stream2, "key1", "value2");
-    optional<Configuration> config2 =
-        ConfigurationLoader().OverlayResourceString(config.value(), 
stream2.str());
-    EXPECT_TRUE(config2 && "Parse second stream");
-    EXPECT_EQ("value2", config2->GetWithDefault("key1", ""));
-  }
-  {
-    /* Blank final value */
-    std::stringstream stream;
-    stream << "<configuration><property><name>key1</name><value>value1</"
-              "value><final></final></property></configuration>";
-    ConfigurationLoader config_loader;
-    config_loader.ClearSearchPath();
-    optional<Configuration> config = 
config_loader.Load<Configuration>(stream.str());
-    EXPECT_TRUE(config && "Parse first stream");
-    EXPECT_EQ("value1", config->GetWithDefault("key1", ""));
-
-    std::stringstream stream2;
-    simpleConfigStream(stream2, "key1", "value2");
-    optional<Configuration> config2 =
-        ConfigurationLoader().OverlayResourceString(config.value(), 
stream2.str());
-    EXPECT_TRUE(config2 && "Parse second stream");
-    EXPECT_EQ("value2", config2->GetWithDefault("key1", ""));
-  }
-}
-
-TEST(ConfigurationTest, TestFileReads)
-{
-  // Single stream
-  {
-    TempFile tempFile;
-    writeSimpleConfig(tempFile.filename, "key1", "value1");
-
-    ConfigurationLoader config_loader;
-    config_loader.ClearSearchPath();
-    optional<Configuration> config = 
config_loader.LoadFromFile<Configuration>(tempFile.filename);
-    EXPECT_TRUE(config && "Parse first stream");
-    EXPECT_EQ("value1", config->GetWithDefault("key1", ""));
-  }
-
-  // Multiple files
-  {
-    TempFile tempFile;
-    writeSimpleConfig(tempFile.filename, "key1", "value1");
-
-    ConfigurationLoader loader;
-    optional<Configuration> config = 
loader.LoadFromFile<Configuration>(tempFile.filename);
-    ASSERT_TRUE(config && "Parse first stream");
-    EXPECT_EQ("value1", config->GetWithDefault("key1", ""));
-
-    TempFile tempFile2;
-    writeSimpleConfig(tempFile2.filename, "key2", "value2");
-    optional<Configuration> config2 = loader.OverlayResourceFile(*config, 
tempFile2.filename);
-    ASSERT_TRUE(config2 && "Parse second stream");
-    EXPECT_EQ("value1", config2->GetWithDefault("key1", ""));
-    EXPECT_EQ("value2", config2->GetWithDefault("key2", ""));
-  }
-
-  // Try to add a directory
-  {
-    TempDir tempDir;
-
-    ConfigurationLoader config_loader;
-    config_loader.ClearSearchPath();
-    optional<Configuration> config = 
config_loader.LoadFromFile<Configuration>(tempDir.path);
-    EXPECT_FALSE(config && "Add directory as file resource");
-  }
-
-
-  // Search path splitting
-  {
-    ConfigurationLoader loader;
-    loader.SetSearchPath("foo:/bar:baz/:/fioux/:/bar/bar/bong");
-
-    // Paths will have / appended to them if not already present
-    EXPECT_EQ("foo/:/bar/:baz/:/fioux/:/bar/bar/bong/", 
loader.GetSearchPath());
-  }
-
-  // Search path
-  {
-    TempDir tempDir1;
-    TempFile tempFile1(tempDir1.path + "/file1.xml");
-    writeSimpleConfig(tempFile1.filename, "key1", "value1");
-    TempDir tempDir2;
-    TempFile tempFile2(tempDir2.path + "/file2.xml");
-    writeSimpleConfig(tempFile2.filename, "key2", "value2");
-    TempDir tempDir3;
-    TempFile tempFile3(tempDir3.path + "/file3.xml");
-    writeSimpleConfig(tempFile3.filename, "key3", "value3");
-
-    ConfigurationLoader loader;
-    loader.SetSearchPath(tempDir1.path + ":" + tempDir2.path + ":" + 
tempDir3.path);
-    optional<Configuration> config1 = 
loader.LoadFromFile<Configuration>("file1.xml");
-    EXPECT_TRUE(config1 && "Parse first stream");
-    optional<Configuration> config2 = loader.OverlayResourceFile(*config1, 
"file2.xml");
-    EXPECT_TRUE(config2 && "Parse second stream");
-    optional<Configuration> config3 = loader.OverlayResourceFile(*config2, 
"file3.xml");
-    EXPECT_TRUE(config3 && "Parse third stream");
-    EXPECT_EQ("value1", config3->GetWithDefault("key1", ""));
-    EXPECT_EQ("value2", config3->GetWithDefault("key2", ""));
-    EXPECT_EQ("value3", config3->GetWithDefault("key3", ""));
-  }
-}
-
-TEST(ConfigurationTest, TestDefaultConfigs) {
-  // Search path
-  {
-    TempDir tempDir;
-    TempFile coreSite(tempDir.path + "/core-site.xml");
-    writeSimpleConfig(coreSite.filename, "key1", "value1");
-
-    ConfigurationLoader loader;
-    loader.SetSearchPath(tempDir.path);
-
-    optional<Configuration> config = 
loader.LoadDefaultResources<Configuration>();
-    EXPECT_TRUE(config && "Parse streams");
-    EXPECT_EQ("value1", config->GetWithDefault("key1", ""));
-  }
-}
-
-TEST(ConfigurationTest, TestIntConversions) {
-  /* No defaults */
-  {
-    std::stringstream stream;
-    simpleConfigStream(stream, "key1", "1");
-    ConfigurationLoader config_loader;
-    config_loader.ClearSearchPath();
-    optional<Configuration> config = 
config_loader.Load<Configuration>(stream.str());
-    EXPECT_TRUE(config && "Parse single value");
-    optional<int64_t> value = config->GetInt("key1");
-    EXPECT_TRUE((bool)value);
-    EXPECT_EQ(1, *value);
-    EXPECT_FALSE(config->GetInt("key2"));
-  }
-
-  {
-    optional<Configuration> config = simpleConfig("key1", "1");
-    EXPECT_EQ(1, config->GetIntWithDefault("key1", -1));
-  }
-  {
-    optional<Configuration> config = simpleConfig("key1", "-100");
-    EXPECT_EQ(-100, config->GetIntWithDefault("key1", -1));
-  }
-  {
-    optional<Configuration> config = simpleConfig("key1", " 1 ");
-    EXPECT_EQ(1, config->GetIntWithDefault("key1", -1));
-  }
-  {
-    optional<Configuration> config = simpleConfig("key1", "");
-    EXPECT_EQ(-1, config->GetIntWithDefault("key1", -1));
-  }
-  {
-    optional<Configuration> config = simpleConfig("key1", "spam");
-    EXPECT_EQ(-1, config->GetIntWithDefault("key1", -1));
-  }
-  {
-    optional<Configuration> config = simpleConfig("key2", "");
-    EXPECT_EQ(-1, config->GetIntWithDefault("key1", -1));
-  }
-}
-
-TEST(ConfigurationTest, TestDoubleConversions) {
-  /* No defaults */
-  {
-    std::stringstream stream;
-    simpleConfigStream(stream, "key1", "1");
-    ConfigurationLoader config_loader;
-    config_loader.ClearSearchPath();
-    optional<Configuration> config = 
config_loader.Load<Configuration>(stream.str());
-    EXPECT_TRUE(config && "Parse single value");
-    optional<double> value = config->GetDouble("key1");
-    EXPECT_TRUE((bool)value);
-    EXPECT_EQ(1, *value);
-    EXPECT_FALSE(config->GetDouble("key2"));
-  }
-
-  {
-    optional<Configuration> config = simpleConfig("key1", "1");
-    EXPECT_EQ(1, config->GetDoubleWithDefault("key1", -1));
-  }
-  {
-    optional<Configuration> config = simpleConfig("key1", "-100");
-    EXPECT_EQ(-100, config->GetDoubleWithDefault("key1", -1));
-  }
-  {
-    optional<Configuration> config = simpleConfig("key1", " 1 ");
-    EXPECT_EQ(1, config->GetDoubleWithDefault("key1", -1));
-  }
-  {
-    optional<Configuration> config = simpleConfig("key1", "");
-    EXPECT_EQ(-1, config->GetDoubleWithDefault("key1", -1));
-  }
-  {
-    optional<Configuration> config = simpleConfig("key1", "spam");
-    EXPECT_EQ(-1, config->GetDoubleWithDefault("key1", -1));
-  }
-  {
-    optional<Configuration> config = simpleConfig("key2", "");
-    EXPECT_EQ(-1, config->GetDoubleWithDefault("key1", -1));
-  }
-  { /* Out of range */
-    optional<Configuration> config = simpleConfig("key2", "1e9999");
-    EXPECT_EQ(-1, config->GetDoubleWithDefault("key1", -1));
-  }
-}
-
-TEST(ConfigurationTest, TestBoolConversions) {
-  /* No defaults */
-  {
-    std::stringstream stream;
-    simpleConfigStream(stream, "key1", "true");
-    ConfigurationLoader config_loader;
-    config_loader.ClearSearchPath();
-    optional<Configuration> config = 
config_loader.Load<Configuration>(stream.str());
-    EXPECT_TRUE(config && "Parse single value");
-    optional<bool> value = config->GetBool("key1");
-    EXPECT_TRUE((bool)value);
-    EXPECT_EQ(true, *value);
-    EXPECT_FALSE(config->GetBool("key2"));
-  }
-
-  {
-    optional<Configuration> config = simpleConfig("key1", "true");
-    EXPECT_EQ(true, config->GetBoolWithDefault("key1", false));
-  }
-  {
-    optional<Configuration> config = simpleConfig("key1", "tRuE");
-    EXPECT_EQ(true, config->GetBoolWithDefault("key1", false));
-  }
-  {
-    optional<Configuration> config = simpleConfig("key1", "false");
-    EXPECT_FALSE(config->GetBoolWithDefault("key1", true));
-  }
-  {
-    optional<Configuration> config = simpleConfig("key1", "FaLsE");
-    EXPECT_FALSE(config->GetBoolWithDefault("key1", true));
-  }
-  {
-    optional<Configuration> config = simpleConfig("key1", " FaLsE ");
-    EXPECT_FALSE(config->GetBoolWithDefault("key1", true));
-  }
-  {
-    optional<Configuration> config = simpleConfig("key1", "");
-    EXPECT_EQ(true, config->GetBoolWithDefault("key1", true));
-  }
-  {
-    optional<Configuration> config = simpleConfig("key1", "spam");
-    EXPECT_EQ(true, config->GetBoolWithDefault("key1", true));
-  }
-  {
-    optional<Configuration> config = simpleConfig("key1", "");
-    EXPECT_EQ(true, config->GetBoolWithDefault("key2", true));
-  }
-}
-
-TEST(ConfigurationTest, TestUriConversions) {
-  /* No defaults */
-  {
-    std::stringstream stream;
-    simpleConfigStream(stream, "key1", "hdfs:///");
-    ConfigurationLoader config_loader;
-    config_loader.ClearSearchPath();
-    optional<Configuration> config = 
config_loader.Load<Configuration>(stream.str());
-    EXPECT_TRUE(config && "Parse single value");
-    optional<URI> value = config->GetUri("key1");
-    EXPECT_TRUE((bool)value);
-    EXPECT_EQ("hdfs:///", value->str());
-    EXPECT_FALSE(config->GetUri("key2"));
-  }
-
-  {
-    optional<Configuration> config = simpleConfig("key1", "hdfs:///");
-    EXPECT_EQ("hdfs:///", config->GetUriWithDefault("key1", "http:///";).str());
-  }
-  {
-    optional<Configuration> config = simpleConfig("key1", " hdfs:/// ");
-    EXPECT_EQ("hdfs:///", config->GetUriWithDefault("key1", "http:///";).str());
-  }
-  {
-    optional<Configuration> config = simpleConfig("key1", "");
-    EXPECT_EQ("", config->GetUriWithDefault("key1", "http:///";).str());
-  }
-  {
-    optional<Configuration> config = simpleConfig("key1", "%%");  // invalid 
URI
-    EXPECT_EQ("http:///";, config->GetUriWithDefault("key1", "http:///";).str());
-  }
-  {
-    optional<Configuration> config = simpleConfig("key2", "hdfs:///");
-    EXPECT_EQ("http:///";, config->GetUriWithDefault("key1", "http:///";).str());
-  }
-}
-
-
-
-int main(int argc, char *argv[]) {
-  /*
-   *  The following line must be executed to initialize Google Mock
-   * (and Google Test) before running the tests.
-   */
-  ::testing::InitGoogleMock(&argc, argv);
-  return RUN_ALL_TESTS();
-}
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7a542fb3/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfspp/tests/configuration_test.h
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfspp/tests/configuration_test.h
 
b/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfspp/tests/configuration_test.h
deleted file mode 100644
index 9ad11b7..0000000
--- 
a/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfspp/tests/configuration_test.h
+++ /dev/null
@@ -1,158 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-#ifndef TESTS_CONFIGURATION_H_
-#define TESTS_CONFIGURATION_H_
-
-#include "hdfspp/config_parser.h"
-#include "common/configuration.h"
-#include "common/configuration_loader.h"
-#include <cstdio>
-#include <fstream>
-#include <istream>
-#include <ftw.h>
-#include <gmock/gmock.h>
-
-namespace hdfs {
-
-template <typename T, typename U>
-void simpleConfigStreamProperty(std::stringstream& out, T key, U value) {
-  out << "<property>"
-      << "<name>" << key << "</name>"
-      << "<value>" << value << "</value>"
-      << "</property>";
-}
-
-template <typename T, typename U, typename... Args>
-void simpleConfigStreamProperty(std::stringstream& out, T key, U value,
-                                Args... args) {
-  simpleConfigStreamProperty(out, key, value);
-  simpleConfigStreamProperty(out, args...);
-}
-
-template <typename... Args>
-void simpleConfigStream(std::stringstream& out, Args... args) {
-  out << "<configuration>";
-  simpleConfigStreamProperty(out, args...);
-  out << "</configuration>";
-}
-
-template <typename T, typename U>
-void damagedConfigStreamProperty(std::stringstream& out, T key, U value) {
-  out << "<propertyy>"
-      << "<name>" << key << "</name>"
-      << "<value>" << value << "</value>"
-      << "</property>";
-}
-
-template <typename T, typename U, typename... Args>
-void damagedConfigStreamProperty(std::stringstream& out, T key, U value,
-                                Args... args) {
-  damagedConfigStreamProperty(out, key, value);
-  damagedConfigStreamProperty(out, args...);
-}
-
-template <typename... Args>
-void damagedConfigStream(std::stringstream& out, Args... args) {
-  out << "<configuration>";
-  damagedConfigStreamProperty(out, args...);
-  out << "</configuration>";
-}
-
-template <typename... Args>
-optional<Configuration> simpleConfig(Args... args) {
-  std::stringstream stream;
-  simpleConfigStream(stream, args...);
-  ConfigurationLoader config_loader;
-  config_loader.ClearSearchPath();
-  optional<Configuration> parse = 
config_loader.Load<Configuration>(stream.str());
-  EXPECT_TRUE((bool)parse);
-
-  return parse;
-}
-
-template <typename... Args>
-void writeSimpleConfig(const std::string& filename, Args... args) {
-  std::stringstream stream;
-  simpleConfigStream(stream, args...);
-
-  std::ofstream out;
-  out.open(filename);
-  out << stream.rdbuf();
-}
-
-template <typename... Args>
-void writeDamagedConfig(const std::string& filename, Args... args) {
-  std::stringstream stream;
-  damagedConfigStream(stream, args...);
-
-  std::ofstream out;
-  out.open(filename);
-  out << stream.rdbuf();
-}
-
-// TempDir: is deleted on destruction
-class TempFile {
-public:
-  std::string filename;
-  char        fn_buffer[128];
-  int         tempFileHandle;
-  TempFile() : tempFileHandle(-1) {
-    strncpy(fn_buffer, "/tmp/test_XXXXXXXXXX", sizeof(fn_buffer));
-    tempFileHandle = mkstemp(fn_buffer);
-    EXPECT_NE(-1, tempFileHandle);
-    filename = fn_buffer;
-  }
-  TempFile(const std::string & fn) : filename(fn), tempFileHandle(-1) {
-    strncpy(fn_buffer, fn.c_str(), sizeof(fn_buffer));
-    fn_buffer[sizeof(fn_buffer)-1] = 0;
-  }
-  ~TempFile() { if(-1 != tempFileHandle) close(tempFileHandle); 
unlink(fn_buffer); }
-};
-
-
-// Callback to remove a directory in the nftw visitor
-int nftw_remove(const char *fpath, const struct stat *sb, int typeflag, struct 
FTW *ftwbuf)
-{
-  (void)sb; (void)typeflag; (void)ftwbuf;
-
-  int rv = remove(fpath);
-  EXPECT_EQ(0, rv);
-  return rv;
-}
-
-// TempDir: is created in ctor and recursively deletes in dtor
-class TempDir {
-public:
-  std::string path;
-  TempDir() {
-    char        fn_buffer[128];
-    strncpy(fn_buffer, "/tmp/test_dir_XXXXXXXXXX", sizeof(fn_buffer));
-    const char * returned_path = mkdtemp(fn_buffer);
-    EXPECT_NE(nullptr, returned_path);
-    path = returned_path;
-  }
-  ~TempDir() {
-    if(!path.empty())
-      nftw(path.c_str(), nftw_remove, 64, FTW_DEPTH | FTW_PHYS);
-  }
-};
-
-
-}
-
-#endif

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7a542fb3/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfspp/tests/hdfs_builder_test.cc
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfspp/tests/hdfs_builder_test.cc
 
b/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfspp/tests/hdfs_builder_test.cc
deleted file mode 100644
index 01db69d..0000000
--- 
a/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfspp/tests/hdfs_builder_test.cc
+++ /dev/null
@@ -1,137 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "hdfspp/hdfs_ext.h"
-#include "configuration_test.h"
-#include <gmock/gmock.h>
-#include <google/protobuf/stubs/common.h>
-
-using ::testing::_;
-
-using namespace hdfs;
-
-TEST(HdfsBuilderTest, TestStubBuilder) {
-  {
-    TempDir tempDir1;
-
-    hdfsBuilder * builder = hdfsNewBuilderFromDirectory(tempDir1.path.c_str());
-    hdfsFreeBuilder(builder);
-  }
-
-  {
-    hdfsBuilder * builder = 
hdfsNewBuilderFromDirectory("/this/path/does/not/exist");
-    hdfsFreeBuilder(builder);
-  }
-}
-
-TEST(HdfsBuilderTest, TestRead)
-{
-  // Reading string values
-  {
-    TempDir tempDir1;
-    TempFile tempFile1(tempDir1.path + "/core-site.xml");
-    writeSimpleConfig(tempFile1.filename, "key1", "value1");
-
-    hdfsBuilder * builder = hdfsNewBuilderFromDirectory(tempDir1.path.c_str());
-
-    char * readVal = nullptr;
-    int result = hdfsBuilderConfGetStr(builder, "key1", &readVal);
-    ASSERT_EQ(0, result);
-    ASSERT_NE(nullptr, readVal);
-    EXPECT_EQ("value1", std::string(readVal));
-    hdfsConfStrFree(readVal);
-
-    readVal = nullptr;
-    result = hdfsBuilderConfGetStr(builder, "key2", &readVal);
-    ASSERT_EQ(0, result);
-    EXPECT_EQ(nullptr, readVal);
-
-    hdfsFreeBuilder(builder);
-  }
-
-  // Reading int values
-  {
-    TempDir tempDir1;
-    TempFile tempFile1(tempDir1.path + "/core-site.xml");
-    writeSimpleConfig(tempFile1.filename, "key1", "100");
-
-    hdfsBuilder * builder = hdfsNewBuilderFromDirectory(tempDir1.path.c_str());
-
-    int readVal = -1;
-    int result = hdfsBuilderConfGetInt(builder, "key1", &readVal);
-    EXPECT_EQ(0, result);
-    EXPECT_EQ(100, readVal);
-
-    readVal = -1;
-    result = hdfsBuilderConfGetInt(builder, "key2", &readVal);
-    EXPECT_EQ(0, result);
-    EXPECT_EQ(-1, readVal);
-
-    hdfsFreeBuilder(builder);
-  }
-}
-
-TEST(HdfsBuilderTest, TestSet)
-{
-  {
-    // Setting values in an empty builder
-    // Don't use default, or it will load any data in /etc/hadoop
-    hdfsBuilder * builder = 
hdfsNewBuilderFromDirectory("/this/path/does/not/exist");
-
-    int result = hdfsBuilderConfSetStr(builder, "key1", "100");
-    EXPECT_EQ(0, result);
-
-    int readVal = -1;
-    result = hdfsBuilderConfGetInt(builder, "key1", &readVal);
-    EXPECT_EQ(0, result);
-    EXPECT_EQ(100, readVal);
-
-    // Set value in non-empty builder
-    result = hdfsBuilderConfSetStr(builder, "key2", "200");
-    EXPECT_EQ(0, result);
-
-    readVal = -1;
-    result = hdfsBuilderConfGetInt(builder, "key2", &readVal);
-    EXPECT_EQ(0, result);
-    EXPECT_EQ(200, readVal);
-
-    // Overwrite value
-    result = hdfsBuilderConfSetStr(builder, "key2", "300");
-    EXPECT_EQ(0, result);
-
-    readVal = -1;
-    result = hdfsBuilderConfGetInt(builder, "key2", &readVal);
-    EXPECT_EQ(0, result);
-    EXPECT_EQ(300, readVal);
-
-    hdfsFreeBuilder(builder);
-  }
-}
-
-int main(int argc, char *argv[]) {
-  /*
-   *  The following line must be executed to initialize Google Mock
-   * (and Google Test) before running the tests.
-   */
-  ::testing::InitGoogleMock(&argc, argv);
-  int exit_code = RUN_ALL_TESTS();
-
-  // Clean up static data and prevent valgrind memory leaks
-  google::protobuf::ShutdownProtobufLibrary();
-  return exit_code;
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7a542fb3/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfspp/tests/hdfs_config_connect_bugs.cc
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfspp/tests/hdfs_config_connect_bugs.cc
 
b/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfspp/tests/hdfs_config_connect_bugs.cc
deleted file mode 100644
index fc31227..0000000
--- 
a/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfspp/tests/hdfs_config_connect_bugs.cc
+++ /dev/null
@@ -1,136 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "hdfspp/hdfs_ext.h"
-
-#include "configuration_test.h"
-
-#include <google/protobuf/stubs/common.h>
-
-#include <cstring>
-#include <chrono>
-#include <exception>
-
-
-static const char *hdfs_11294_core_site_txt =
-"<configuration>\n"
-"  <property name=\"fs.defaultFS\" value=\"hdfs://NAMESERVICE1\"/>\n"
-"  <property name=\"hadoop.security.authentication\" value=\"simple\"/>\n"
-"  <property name=\"ipc.client.connect.retry.interval\" value=\"1\">\n"
-"</configuration>\n";
-
-static const char *hdfs_11294_hdfs_site_txt =
-"<configuration>\n"
-"  <property>\n"
-"    <name>dfs.nameservices</name>\n"
-"    <value>NAMESERVICE1</value>\n"
-"  </property>\n"
-"  <property>\n"
-"    <name>dfs.ha.namenodes.NAMESERVICE1</name>\n"
-"    <value>nn1, nn2</value>\n"
-"  </property>\n"
-"  <property>\n"
-"    <name>dfs.namenode.rpc-address.NAMESERVICE1.nn1</name>\n"
-"    <value>nonesuch1.apache.org:8020</value>\n"
-"  </property>\n"
-"  <property>\n"
-"    <name>dfs.namenode.servicerpc-address.NAMESERVICE1.nn1</name>\n"
-"    <value>nonesuch1.apache.org:8040</value>\n"
-"  </property>\n"
-"  <property>\n"
-"    <name>dfs.namenode.http-address.NAMESERVICE1.nn1</name>\n"
-"    <value>nonesuch1.apache.org:50070</value>\n"
-"  </property>\n"
-"  <property>\n"
-"    <name>dfs.namenode.rpc-address.NAMESERVICE1.nn2</name>\n"
-"    <value>nonesuch2.apache.org:8020</value>\n"
-"  </property>\n"
-"  <property>\n"
-"    <name>dfs.namenode.servicerpc-address.NAMESERVICE1.nn2</name>\n"
-"    <value>nonesuch2.apache.org:8040</value>\n"
-"  </property>\n"
-"  <property>\n"
-"    <name>dfs.namenode.http-address.NAMESERVICE1.nn2</name>\n"
-"    <value>nonesuch2.apache.org:50070</value>\n"
-"  </property>\n"
-"</configuration>\n";
-
-
-
-
-namespace hdfs {
-
-// Make sure we can set up a mini-cluster and connect to it
-TEST(ConfigConnectBugs, Test_HDFS_11294) {
-  // Directory for hdfs config
-  TempDir td;
-
-  const std::string& tempDirPath = td.path;
-  const std::string coreSitePath = tempDirPath + "/core-site.xml";
-  const std::string hdfsSitePath = tempDirPath + "/hdfs-site.xml";
-
-  // Write configs
-  FILE *coreSite = fopen(coreSitePath.c_str(), "w");
-  EXPECT_NE(coreSite, nullptr);
-  int coreSiteLength = strlen(hdfs_11294_core_site_txt);
-  size_t res = fwrite(hdfs_11294_core_site_txt, 1, coreSiteLength, coreSite);
-  EXPECT_EQ(res, coreSiteLength);
-  EXPECT_EQ(fclose(coreSite), 0);
-
-  FILE *hdfsSite = fopen(hdfsSitePath.c_str(), "w");
-  EXPECT_NE(hdfsSite, nullptr);
-  int hdfsSiteLength = strlen(hdfs_11294_hdfs_site_txt);
-  res = fwrite(hdfs_11294_hdfs_site_txt, 1, hdfsSiteLength, hdfsSite);
-  EXPECT_EQ(res, hdfsSiteLength);
-  EXPECT_EQ(fclose(hdfsSite), 0);
-
-  // Load configs with new FS
-  hdfsBuilder *bld = hdfsNewBuilderFromDirectory(tempDirPath.c_str());
-  hdfsBuilderSetNameNode(bld, "NAMESERVICE1");
-
-  // In HDFS-11294 connecting would crash because DNS couldn't resolve
-  // endpoints but the RpcEngine would attempt to dereference a non existant
-  // element in a std::vector and crash.  Test passes if connect doesn't crash.
-  hdfsFS fileSystem = hdfsBuilderConnect(bld);
-
-  // FS shouldn't be created if it can't connect.
-  EXPECT_EQ(fileSystem, nullptr);
-
-  // Verify it got to endpoint check
-  char errMsgBuf[100];
-  memset(errMsgBuf, 0, 100);
-  EXPECT_EQ( hdfsGetLastError(errMsgBuf, 100), 0);
-  EXPECT_STREQ(errMsgBuf, "Exception:No endpoints found for namenode");
-
-
-  // remove config files
-  EXPECT_EQ(remove(coreSitePath.c_str()), 0);
-  EXPECT_EQ(remove(hdfsSitePath.c_str()), 0);
-}
-
-} // end namespace hdfs
-
-int main(int argc, char *argv[]) {
-  // The following line must be executed to initialize Google Mock
-  // (and Google Test) before running the tests.
-  ::testing::InitGoogleMock(&argc, argv);
-  int exit_code = RUN_ALL_TESTS();
-  google::protobuf::ShutdownProtobufLibrary();
-
-  return exit_code;
-}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/7a542fb3/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfspp/tests/hdfs_configuration_test.cc
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfspp/tests/hdfs_configuration_test.cc
 
b/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfspp/tests/hdfs_configuration_test.cc
deleted file mode 100644
index b21725c..0000000
--- 
a/hadoop-hdfs-project/hadoop-hdfs-native-client/src/main/native/libhdfspp/tests/hdfs_configuration_test.cc
+++ /dev/null
@@ -1,172 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include "common/hdfs_configuration.h"
-#include "configuration_test.h"
-#include <gmock/gmock.h>
-#include <iostream>
-
-using ::testing::_;
-
-using namespace hdfs;
-
-namespace hdfs
-{
-TEST(HdfsConfigurationTest, TestDefaultOptions)
-{
-  // Completely empty stream
-  {
-    ConfigurationLoader config_loader;
-    config_loader.ClearSearchPath();
-    HdfsConfiguration empty_config = 
config_loader.NewConfig<HdfsConfiguration>();
-    Options options = empty_config.GetOptions();
-    EXPECT_EQ(Options::kDefaultRpcTimeout, options.rpc_timeout);
-  }
-}
-
-TEST(HdfsConfigurationTest, TestSetOptions)
-{
-  // Completely empty stream
-  {
-    std::stringstream stream;
-    simpleConfigStream(stream,
-                       HdfsConfiguration::kFsDefaultFsKey, "/FDFK",
-                       HdfsConfiguration::kDfsClientSocketTimeoutKey, 100,
-                       HdfsConfiguration::kIpcClientConnectMaxRetriesKey, 101,
-                       HdfsConfiguration::kIpcClientConnectRetryIntervalKey, 
102,
-                       HdfsConfiguration::kIpcClientConnectTimeoutKey, 103,
-                       HdfsConfiguration::kHadoopSecurityAuthenticationKey, 
HdfsConfiguration::kHadoopSecurityAuthentication_kerberos
-            );
-    ConfigurationLoader config_loader;
-    config_loader.ClearSearchPath();
-    optional<HdfsConfiguration> config = 
config_loader.Load<HdfsConfiguration>(stream.str());
-    EXPECT_TRUE(config && "Read stream");
-    Options options = config->GetOptions();
-
-    EXPECT_EQ("/FDFK", options.defaultFS.str());
-    EXPECT_EQ(100, options.rpc_timeout);
-    EXPECT_EQ(101, options.max_rpc_retries);
-    EXPECT_EQ(102, options.rpc_retry_delay_ms);
-    EXPECT_EQ(103, options.rpc_connect_timeout);
-    EXPECT_EQ(Options::kKerberos, options.authentication);
-  }
-}
-
-TEST(HdfsConfigurationTest, TestDefaultConfigs) {
-  // Search path
-  {
-    TempDir tempDir;
-    TempFile coreSite(tempDir.path + "/core-site.xml");
-    writeSimpleConfig(coreSite.filename, "key1", "value1");
-    TempFile hdfsSite(tempDir.path + "/hdfs-site.xml");
-    writeSimpleConfig(hdfsSite.filename, "key2", "value2");
-
-    ConfigurationLoader loader;
-    loader.SetSearchPath(tempDir.path);
-
-    optional<HdfsConfiguration> config = 
loader.LoadDefaultResources<HdfsConfiguration>();
-    EXPECT_TRUE(config && "Parse streams");
-    EXPECT_EQ("value1", config->GetWithDefault("key1", ""));
-    EXPECT_EQ("value2", config->GetWithDefault("key2", ""));
-  }
-
-  // Only core-site.xml available
-  {
-    TempDir tempDir;
-    TempFile coreSite(tempDir.path + "/core-site.xml");
-    writeSimpleConfig(coreSite.filename, "key1", "value1");
-
-    ConfigurationLoader loader;
-    loader.SetSearchPath(tempDir.path);
-
-    optional<HdfsConfiguration> config = 
loader.LoadDefaultResources<HdfsConfiguration>();
-    EXPECT_TRUE(config && "Parse streams");
-    EXPECT_EQ("value1", config->GetWithDefault("key1", ""));
-  }
-
-  // Only hdfs-site available
-  {
-    TempDir tempDir;
-    TempFile hdfsSite(tempDir.path + "/hdfs-site.xml");
-    writeSimpleConfig(hdfsSite.filename, "key2", "value2");
-
-    ConfigurationLoader loader;
-    loader.SetSearchPath(tempDir.path);
-
-    optional<HdfsConfiguration> config = 
loader.LoadDefaultResources<HdfsConfiguration>();
-    EXPECT_TRUE(config && "Parse streams");
-    EXPECT_EQ("value2", config->GetWithDefault("key2", ""));
-  }
-
-
-}
-
-TEST(HdfsConfigurationTest, TestConfigParserAPI) {
-  // Config parser API
-  {
-    TempDir tempDir;
-    TempFile coreSite(tempDir.path + "/core-site.xml");
-    writeSimpleConfig(coreSite.filename, "key1", "value1");
-    TempFile hdfsSite(tempDir.path + "/hdfs-site.xml");
-    writeSimpleConfig(hdfsSite.filename, "key2", "value2");
-
-    ConfigParser parser(tempDir.path);
-
-    EXPECT_EQ("value1", parser.get_string_or("key1", ""));
-    EXPECT_EQ("value2", parser.get_string_or("key2", ""));
-
-    auto stats = parser.ValidateResources();
-
-    EXPECT_EQ("core-site.xml", stats[0].first);
-    EXPECT_EQ("OK", stats[0].second.ToString());
-
-    EXPECT_EQ("hdfs-site.xml", stats[1].first);
-    EXPECT_EQ("OK", stats[1].second.ToString());
-  }
-
-  {
-    TempDir tempDir;
-    TempFile coreSite(tempDir.path + "/core-site.xml");
-    writeSimpleConfig(coreSite.filename, "key1", "value1");
-    TempFile hdfsSite(tempDir.path + "/hdfs-site.xml");
-    writeDamagedConfig(hdfsSite.filename, "key2", "value2");
-
-    ConfigParser parser(tempDir.path);
-
-    EXPECT_EQ("value1", parser.get_string_or("key1", ""));
-    EXPECT_EQ("", parser.get_string_or("key2", ""));
-
-    auto stats = parser.ValidateResources();
-
-    EXPECT_EQ("core-site.xml", stats[0].first);
-    EXPECT_EQ("OK", stats[0].second.ToString());
-
-    EXPECT_EQ("hdfs-site.xml", stats[1].first);
-    EXPECT_EQ("Exception:The configuration file has invalid xml around 
character 74", stats[1].second.ToString());
-  }
-}
-
-int main(int argc, char *argv[])
-{
-  // The following line must be executed to initialize Google Mock
-  // (and Google Test) before running the tests.
-  ::testing::InitGoogleMock(&argc, argv);
-  return RUN_ALL_TESTS();
-}
-
-}


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org

Reply via email to