This is an automated email from the ASF dual-hosted git repository.
laiyingchun pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-pegasus.git
The following commit(s) were added to refs/heads/master by this push:
new b9fdb327a fix(shell): fix empty hashkey write error in copy_data
(#1004)
b9fdb327a is described below
commit b9fdb327a764bfa0a159a68fb94cbf27876e0712
Author: liguohao <[email protected]>
AuthorDate: Wed Jul 13 16:59:52 2022 +0800
fix(shell): fix empty hashkey write error in copy_data (#1004)
---
src/shell/command_helper.h | 55 +++++++--
src/test/function_test/run.sh | 2 +
src/test/function_test/test_copy.cpp | 231 +++++++++++++++++++++++++++++++++++
src/test/function_test/test_scan.cpp | 126 +------------------
src/test/function_test/utils.h | 124 +++++++++++++++++++
5 files changed, 401 insertions(+), 137 deletions(-)
diff --git a/src/shell/command_helper.h b/src/shell/command_helper.h
index 53062a3a2..04f326562 100644
--- a/src/shell/command_helper.h
+++ b/src/shell/command_helper.h
@@ -63,6 +63,7 @@ using namespace dsn::replication;
#endif
DEFINE_TASK_CODE(LPC_SCAN_DATA, TASK_PRIORITY_COMMON,
::dsn::THREAD_POOL_DEFAULT)
+
enum scan_data_operator
{
SCAN_COPY,
@@ -164,7 +165,7 @@ struct scan_data_context
pegasus::pegasus_client *client_,
pegasus::geo::geo_client *geoclient_,
std::atomic_bool *error_occurred_,
- int max_multi_set_concurrency = 100,
+ int max_multi_set_concurrency = 20,
bool stat_size_ = false,
std::shared_ptr<rocksdb::Statistics> statistics_ =
nullptr,
int top_count_ = 0,
@@ -328,18 +329,48 @@ inline void scan_multi_data_next(scan_data_context
*context)
int ttl_seconds = 0;
ttl_seconds = compute_ttl_seconds(expire_ts_seconds,
ts_expired);
if (!ts_expired) {
- context->data_count++;
- if (context->multi_kvs.find(hash_key) ==
context->multi_kvs.end()) {
- context->multi_kvs.emplace(hash_key,
- std::map<std::string,
std::string>());
- }
- if (context->multi_ttl_seconds < ttl_seconds ||
ttl_seconds == 0) {
- context->multi_ttl_seconds = ttl_seconds;
- }
-
context->multi_kvs[hash_key].emplace(std::move(sort_key), std::move(value));
+ // empty hashkey should get hashkey by sortkey
+ if (hash_key == "") {
+ // wait for satisfied with
max_multi_set_concurrency
+ context->sema.wait();
+
+ auto callback = [context](
+ int err,
pegasus::pegasus_client::internal_info &&info) {
+ if (err != pegasus::PERR_OK) {
+ if
(!context->split_completed.exchange(true)) {
+ fprintf(stderr,
+ "ERROR: split[%d] async check
and set failed: %s\n",
+ context->split_id,
+
context->client->get_error_string(err));
+ context->error_occurred->store(true);
+ }
+ } else {
+ context->split_rows++;
+ }
+ context->sema.signal();
+ };
+
+ context->client->async_set(hash_key,
+ sort_key,
+ value,
+ std::move(callback),
+ context->timeout_ms,
+ ttl_seconds);
+ } else {
+ context->data_count++;
+ if (context->multi_kvs.find(hash_key) ==
context->multi_kvs.end()) {
+ context->multi_kvs.emplace(hash_key,
+
std::map<std::string, std::string>());
+ }
+ if (context->multi_ttl_seconds < ttl_seconds ||
ttl_seconds == 0) {
+ context->multi_ttl_seconds = ttl_seconds;
+ }
+
context->multi_kvs[hash_key].emplace(std::move(sort_key),
+
std::move(value));
- if (context->data_count >= context->max_batch_count) {
- batch_execute_multi_set(context);
+ if (context->data_count >=
context->max_batch_count) {
+ batch_execute_multi_set(context);
+ }
}
}
}
diff --git a/src/test/function_test/run.sh b/src/test/function_test/run.sh
index fd5aca27f..c4a9b1ba3 100755
--- a/src/test/function_test/run.sh
+++ b/src/test/function_test/run.sh
@@ -68,6 +68,8 @@ GTEST_OUTPUT="xml:$REPORT_DIR/recall.xml"
GTEST_FILTER="drop_and_recall.*" ./$te
exit_if_fail $? "run test recall failed: $test_case $config_file $table_name"
GTEST_OUTPUT="xml:$REPORT_DIR/batch_get.xml" GTEST_FILTER="batch_get.*"
./$test_case $config_file $table_name
exit_if_fail $? "run test batch_get failed: $test_case $config_file
$table_name"
+GTEST_OUTPUT="xml:$REPORT_DIR/copy_data.xml" GTEST_FILTER="copy_data_test.*"
./$test_case $config_file $table_name
+exit_if_fail $? "run test copy_data_test failed: $test_case $config_file
$table_name"
if [ $on_travis == "NO" ]; then
GTEST_OUTPUT="xml:$REPORT_DIR/restore.xml" GTEST_FILTER="restore_test.*"
./$test_case $config_file $table_name
exit_if_fail $? "run test restore_test failed: $test_case $config_file
$table_name"
diff --git a/src/test/function_test/test_copy.cpp
b/src/test/function_test/test_copy.cpp
new file mode 100644
index 000000000..4aae1bcb0
--- /dev/null
+++ b/src/test/function_test/test_copy.cpp
@@ -0,0 +1,231 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+#include <cstdlib>
+#include <string>
+#include <vector>
+#include <map>
+
+#include <dsn/dist/fmt_logging.h>
+#include <dsn/dist/replication/replication_ddl_client.h>
+#include <dsn/service_api_c.h>
+#include <gtest/gtest.h>
+#include <pegasus/client.h>
+#include <unistd.h>
+
+#include "base/pegasus_const.h"
+#include "base/pegasus_utils.h"
+#include "global_env.h"
+#include "shell/commands.h"
+#include "utils.h"
+
+using namespace ::pegasus;
+using std::map;
+using std::string;
+using std::vector;
+
+extern std::shared_ptr<dsn::replication::replication_ddl_client> ddl_client;
+static const char CCH[] =
"_0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ";
+static const int max_batch_count = 500;
+static const int timeout_ms = 5000;
+static const int max_multi_set_concurrency = 20;
+static const int default_partitions = 4;
+static const string empty_hash_key = "";
+static const string srouce_app_name = "copy_data_source_table";
+static const string destination_app_name = "copy_data_destination_table";
+static char buffer[256];
+static map<string, map<string, string>> base_data;
+static pegasus_client *srouce_client;
+static pegasus_client *destination_client;
+
+static void verify_data()
+{
+ pegasus_client::scan_options options;
+ vector<pegasus_client::pegasus_scanner *> scanners;
+ int ret = destination_client->get_unordered_scanners(INT_MAX, options,
scanners);
+ ASSERT_EQ(0, ret) << "Error occurred when getting scanner. error="
+ << destination_client->get_error_string(ret);
+
+ string hash_key;
+ string sort_key;
+ string value;
+ map<string, map<string, string>> data;
+ for (auto scanner : scanners) {
+ ASSERT_NE(nullptr, scanner);
+ while (PERR_OK == (ret = (scanner->next(hash_key, sort_key, value)))) {
+ check_and_put(data, hash_key, sort_key, value);
+ }
+ ASSERT_EQ(PERR_SCAN_COMPLETE, ret) << "Error occurred when scan.
error="
+ <<
destination_client->get_error_string(ret);
+ delete scanner;
+ }
+
+ compare(data, base_data);
+
+ ddebug_f("Data and base_data are the same.");
+}
+
+static void create_table_and_get_client()
+{
+ dsn::error_code err;
+ err = ddl_client->create_app(srouce_app_name, "pegasus",
default_partitions, 3, {}, false);
+ ASSERT_EQ(dsn::ERR_OK, err);
+
+ err = ddl_client->create_app(destination_app_name, "pegasus",
default_partitions, 3, {}, false);
+ ASSERT_EQ(dsn::ERR_OK, err);
+
+ srouce_client = pegasus_client_factory::get_client("mycluster",
srouce_app_name.c_str());
+ destination_client =
+ pegasus_client_factory::get_client("mycluster",
destination_app_name.c_str());
+}
+
+// REQUIRED: 'buffer' has been filled with random chars.
+static const string random_string()
+{
+ int pos = random() % sizeof(buffer);
+ buffer[pos] = CCH[random() % sizeof(CCH)];
+ unsigned int length = random() % sizeof(buffer) + 1;
+ if (pos + length < sizeof(buffer)) {
+ return string(buffer + pos, length);
+ } else {
+ return string(buffer + pos, sizeof(buffer) - pos) +
+ string(buffer, length + pos - sizeof(buffer));
+ }
+}
+
+static void fill_data()
+{
+ ddebug_f("FILLING_DATA...");
+
+ srandom((unsigned int)time(nullptr));
+ for (auto &c : buffer) {
+ c = CCH[random() % sizeof(CCH)];
+ }
+
+ string hash_key;
+ string sort_key;
+ string value;
+ while (base_data[empty_hash_key].size() < 1000) {
+ sort_key = random_string();
+ value = random_string();
+ int ret = srouce_client->set(empty_hash_key, sort_key, value);
+ ASSERT_EQ(PERR_OK, ret) << "Error occurred when set, hash_key=" <<
hash_key
+ << ", sort_key=" << sort_key
+ << ", error=" <<
srouce_client->get_error_string(ret);
+ base_data[empty_hash_key][sort_key] = value;
+ }
+
+ while (base_data.size() < 500) {
+ hash_key = random_string();
+ while (base_data[hash_key].size() < 10) {
+ sort_key = random_string();
+ value = random_string();
+ int ret = srouce_client->set(hash_key, sort_key, value);
+ ASSERT_EQ(PERR_OK, ret) << "Error occurred when set, hash_key=" <<
hash_key
+ << ", sort_key=" << sort_key
+ << ", error=" <<
srouce_client->get_error_string(ret);
+ base_data[hash_key][sort_key] = value;
+ }
+ }
+
+ ddebug_f("Data filled.");
+}
+
+class copy_data_test : public testing::Test
+{
+public:
+ static void SetUpTestCase()
+ {
+ ddebug_f("SetUp...");
+ create_table_and_get_client();
+ fill_data();
+ }
+
+ static void TearDownTestCase()
+ {
+ ddebug_f("TearDown...");
+ chdir(global_env::instance()._pegasus_root.c_str());
+ system("./run.sh clear_onebox");
+ system("./run.sh start_onebox -w");
+ chdir(global_env::instance()._working_dir.c_str());
+ }
+};
+
+TEST_F(copy_data_test, EMPTY_HASH_KEY_COPY)
+{
+ ddebug_f("TESTING_COPY_DATA, EMPTY HASH_KEY COPY ....");
+
+ pegasus_client::scan_options options;
+ options.return_expire_ts = true;
+ vector<pegasus::pegasus_client::pegasus_scanner *> raw_scanners;
+ int ret = srouce_client->get_unordered_scanners(INT_MAX, options,
raw_scanners);
+ ASSERT_EQ(pegasus::PERR_OK, ret) << "Error occurred when getting scanner.
error="
+ << srouce_client->get_error_string(ret);
+
+ ddebug_f("open source app scanner succeed, partition_count = {}",
raw_scanners.size());
+
+ vector<pegasus::pegasus_client::pegasus_scanner_wrapper> scanners;
+ for (auto raw_scanner : raw_scanners) {
+ ASSERT_NE(nullptr, raw_scanner);
+ scanners.push_back(raw_scanner->get_smart_wrapper());
+ }
+ raw_scanners.clear();
+
+ int split_count = scanners.size();
+ ddebug_f("prepare scanners succeed, split_count = {}", split_count);
+
+ std::atomic_bool error_occurred(false);
+ vector<std::unique_ptr<scan_data_context>> contexts;
+
+ for (int i = 0; i < split_count; i++) {
+ scan_data_context *context = new scan_data_context(SCAN_AND_MULTI_SET,
+ i,
+ max_batch_count,
+ timeout_ms,
+ scanners[i],
+ destination_client,
+ nullptr,
+ &error_occurred,
+
max_multi_set_concurrency);
+ contexts.emplace_back(context);
+ dsn::tasking::enqueue(LPC_SCAN_DATA, nullptr,
std::bind(scan_multi_data_next, context));
+ }
+
+ // wait thread complete
+ int sleep_seconds = 0;
+ while (sleep_seconds < 120) {
+ std::this_thread::sleep_for(std::chrono::seconds(1));
+ sleep_seconds++;
+ int completed_split_count = 0;
+ for (int i = 0; i < split_count; i++) {
+ if (contexts[i]->split_completed.load()) {
+ completed_split_count++;
+ }
+ }
+ if (completed_split_count == split_count) {
+ break;
+ }
+ }
+
+ ASSERT_FALSE(error_occurred.load()) << "error occurred, processing
terminated or timeout!";
+
+ verify_data();
+
+ ddebug_f("finished copy data test..");
+}
diff --git a/src/test/function_test/test_scan.cpp
b/src/test/function_test/test_scan.cpp
index c34cdc912..853a098c5 100644
--- a/src/test/function_test/test_scan.cpp
+++ b/src/test/function_test/test_scan.cpp
@@ -29,6 +29,7 @@
#include <gtest/gtest.h>
#include "base/pegasus_const.h"
#include "base/pegasus_utils.h"
+#include "utils.h"
using namespace ::pegasus;
@@ -54,131 +55,6 @@ static const std::string random_string()
std::string(buffer, length + pos - sizeof(buffer));
}
}
-
-static void
-check_and_put(std::map<std::string, std::map<std::string,
std::pair<std::string, uint32_t>>> &data,
- const std::string &hash_key,
- const std::string &sort_key,
- const std::string &value,
- uint32_t expire_ts_seconds)
-{
- auto it1 = data.find(hash_key);
- if (it1 != data.end()) {
- auto it2 = it1->second.find(sort_key);
- ASSERT_EQ(it1->second.end(), it2)
- << "Duplicate: hash_key=" << hash_key << ", sort_key=" << sort_key
- << ", old_value=" << it2->second.first << ", new_value=" << value
- << ", old_expire_ts_seconds=" << it2->second.second
- << ", new_expire_ts_seconds=" << expire_ts_seconds;
- }
- data[hash_key][sort_key] = std::pair<std::string, uint32_t>(value,
expire_ts_seconds);
-}
-
-static void check_and_put(std::map<std::string, std::map<std::string,
std::string>> &data,
- const std::string &hash_key,
- const std::string &sort_key,
- const std::string &value)
-{
- auto it1 = data.find(hash_key);
- if (it1 != data.end()) {
- auto it2 = it1->second.find(sort_key);
- ASSERT_EQ(it1->second.end(), it2)
- << "Duplicate: hash_key=" << hash_key << ", sort_key=" << sort_key
- << ", old_value=" << it2->second << ", new_value=" << value;
- }
- data[hash_key][sort_key] = value;
-}
-
-static void check_and_put(std::map<std::string, std::string> &data,
- const std::string &hash_key,
- const std::string &sort_key,
- const std::string &value)
-{
- auto it1 = data.find(sort_key);
- ASSERT_EQ(data.end(), it1) << "Duplicate: hash_key=" << hash_key << ",
sort_key=" << sort_key
- << ", old_value=" << it1->second << ",
new_value=" << value;
- data[sort_key] = value;
-}
-
-static void compare(const std::pair<std::string, uint32_t> &data,
- const std::pair<std::string, uint32_t> &base,
- const std::string &hash_key,
- const std::string sort_key)
-{
- ASSERT_EQ(base.first, data.first)
- << "Diff value: hash_key=" << hash_key << ", sort_key=" << sort_key
- << ", data_value=" << data.first << ", data_expire_ts_seconds=" <<
data.second
- << ", base_value=" << base.first << ", base_expire_ts_seconds=" <<
base.second;
-
- ASSERT_TRUE(data.second >= base.second && data.second - base.second <= 1)
- << "Diff expire_ts_seconds: hash_key=" << hash_key << ", sort_key=" <<
sort_key
- << ", data_value=" << data.first << ", data_expire_ts_seconds=" <<
data.second
- << ", base_value=" << base.first << ", base_expire_ts_seconds=" <<
base.second;
-}
-
-static void compare(const std::map<std::string, std::pair<std::string,
uint32_t>> &data,
- const std::map<std::string, std::pair<std::string,
uint32_t>> &base,
- const std::string &hash_key)
-{
- for (auto it1 = data.begin(), it2 = base.begin();; ++it1, ++it2) {
- if (it1 == data.end()) {
- ASSERT_EQ(base.end(), it2)
- << "Only in base: hash_key=" << hash_key << ", sort_key=" <<
it2->first
- << ", value=" << it2->second.first << ", expire_ts_seconds="
<< it2->second.second;
- break;
- }
- ASSERT_NE(base.end(), it2) << "Only in data: hash_key=" << hash_key
- << ", sort_key=" << it1->first << ",
value=" << it1->second.first
- << ", expire_ts_seconds=" <<
it1->second.second;
- ASSERT_EQ(it2->first, it1->first)
- << "Diff sort_key: hash_key=" << hash_key << ", data_sort_key=" <<
it1->first
- << ", data_value=" << it1->second.first
- << ", data_expire_ts_seconds=" << it1->second.second << ",
base_sort_key=" << it2->first
- << ", base_value=" << it2->second.first
- << ", base_expire_ts_seconds=" << it2->second.second;
- compare(it1->second, it2->second, hash_key, it1->first);
- }
-
- dinfo("Data and base are the same.");
-}
-
-static void compare(const std::map<std::string, std::string> &data,
- const std::map<std::string, std::string> &base,
- const std::string &hash_key)
-{
- for (auto it1 = data.begin(), it2 = base.begin();; ++it1, ++it2) {
- if (it1 == data.end()) {
- ASSERT_EQ(base.end(), it2) << "Only in base: hash_key=" << hash_key
- << ", sort_key=" << it2->first << ",
value=" << it2->second;
- break;
- }
- ASSERT_NE(base.end(), it2) << "Only in data: hash_key=" << hash_key
- << ", sort_key=" << it1->first << ",
value=" << it1->second;
- ASSERT_EQ(*it2, *it1) << "Diff: hash_key=" << hash_key << ",
data_sort_key=" << it1->first
- << ", data_value=" << it1->second << ",
base_sort_key=" << it2->first
- << ", base_value=" << it2->second;
- }
-
- dinfo("Data and base are the same.");
-}
-
-template <typename T, typename U>
-static void compare(const T &data, const U &base)
-{
- for (auto it1 = data.begin(), it2 = base.begin();; ++it1, ++it2) {
- if (it1 == data.end()) {
- ASSERT_EQ(base.end(), it2) << "Only in base: hash_key=" <<
it2->first;
- break;
- }
- ASSERT_NE(base.end(), it2) << "Only in data: hash_key=" << it1->first;
- ASSERT_EQ(it1->first, it2->first) << "Diff: data_hash_key=" <<
it1->first
- << ", base_hash_key=" << it2->first;
- compare(it1->second, it2->second, it1->first);
- }
-
- dinfo("Data and base are the same.");
-}
-
static void clear_database()
{
ddebug("CLEARING_DATABASE...");
diff --git a/src/test/function_test/utils.h b/src/test/function_test/utils.h
index c08e130c6..02d47a56d 100644
--- a/src/test/function_test/utils.h
+++ b/src/test/function_test/utils.h
@@ -80,3 +80,127 @@ generate_sortkey_value_map(const std::vector<std::string>
sortkeys,
}
return result;
}
+
+inline void
+check_and_put(std::map<std::string, std::map<std::string,
std::pair<std::string, uint32_t>>> &data,
+ const std::string &hash_key,
+ const std::string &sort_key,
+ const std::string &value,
+ uint32_t expire_ts_seconds)
+{
+ auto it1 = data.find(hash_key);
+ if (it1 != data.end()) {
+ auto it2 = it1->second.find(sort_key);
+ ASSERT_EQ(it1->second.end(), it2)
+ << "Duplicate: hash_key=" << hash_key << ", sort_key=" << sort_key
+ << ", old_value=" << it2->second.first << ", new_value=" << value
+ << ", old_expire_ts_seconds=" << it2->second.second
+ << ", new_expire_ts_seconds=" << expire_ts_seconds;
+ }
+ data[hash_key][sort_key] = std::pair<std::string, uint32_t>(value,
expire_ts_seconds);
+}
+
+inline void check_and_put(std::map<std::string, std::map<std::string,
std::string>> &data,
+ const std::string &hash_key,
+ const std::string &sort_key,
+ const std::string &value)
+{
+ auto it1 = data.find(hash_key);
+ if (it1 != data.end()) {
+ auto it2 = it1->second.find(sort_key);
+ ASSERT_EQ(it1->second.end(), it2)
+ << "Duplicate: hash_key=" << hash_key << ", sort_key=" << sort_key
+ << ", old_value=" << it2->second << ", new_value=" << value;
+ }
+ data[hash_key][sort_key] = value;
+}
+
+inline void check_and_put(std::map<std::string, std::string> &data,
+ const std::string &hash_key,
+ const std::string &sort_key,
+ const std::string &value)
+{
+ auto it1 = data.find(sort_key);
+ ASSERT_EQ(data.end(), it1) << "Duplicate: hash_key=" << hash_key << ",
sort_key=" << sort_key
+ << ", old_value=" << it1->second << ",
new_value=" << value;
+ data[sort_key] = value;
+}
+
+inline void compare(const std::pair<std::string, uint32_t> &data,
+ const std::pair<std::string, uint32_t> &base,
+ const std::string &hash_key,
+ const std::string sort_key)
+{
+ ASSERT_EQ(base.first, data.first)
+ << "Diff value: hash_key=" << hash_key << ", sort_key=" << sort_key
+ << ", data_value=" << data.first << ", data_expire_ts_seconds=" <<
data.second
+ << ", base_value=" << base.first << ", base_expire_ts_seconds=" <<
base.second;
+
+ ASSERT_TRUE(data.second >= base.second && data.second - base.second <= 1)
+ << "Diff expire_ts_seconds: hash_key=" << hash_key << ", sort_key=" <<
sort_key
+ << ", data_value=" << data.first << ", data_expire_ts_seconds=" <<
data.second
+ << ", base_value=" << base.first << ", base_expire_ts_seconds=" <<
base.second;
+}
+
+inline void compare(const std::map<std::string, std::pair<std::string,
uint32_t>> &data,
+ const std::map<std::string, std::pair<std::string,
uint32_t>> &base,
+ const std::string &hash_key)
+{
+ for (auto it1 = data.begin(), it2 = base.begin();; ++it1, ++it2) {
+ if (it1 == data.end()) {
+ ASSERT_EQ(base.end(), it2)
+ << "Only in base: hash_key=" << hash_key << ", sort_key=" <<
it2->first
+ << ", value=" << it2->second.first << ", expire_ts_seconds="
<< it2->second.second;
+ break;
+ }
+ ASSERT_NE(base.end(), it2) << "Only in data: hash_key=" << hash_key
+ << ", sort_key=" << it1->first << ",
value=" << it1->second.first
+ << ", expire_ts_seconds=" <<
it1->second.second;
+ ASSERT_EQ(it2->first, it1->first)
+ << "Diff sort_key: hash_key=" << hash_key << ", data_sort_key=" <<
it1->first
+ << ", data_value=" << it1->second.first
+ << ", data_expire_ts_seconds=" << it1->second.second << ",
base_sort_key=" << it2->first
+ << ", base_value=" << it2->second.first
+ << ", base_expire_ts_seconds=" << it2->second.second;
+ compare(it1->second, it2->second, hash_key, it1->first);
+ }
+
+ dinfo("Data and base are the same.");
+}
+
+inline void compare(const std::map<std::string, std::string> &data,
+ const std::map<std::string, std::string> &base,
+ const std::string &hash_key)
+{
+ for (auto it1 = data.begin(), it2 = base.begin();; ++it1, ++it2) {
+ if (it1 == data.end()) {
+ ASSERT_EQ(base.end(), it2) << "Only in base: hash_key=" << hash_key
+ << ", sort_key=" << it2->first << ",
value=" << it2->second;
+ break;
+ }
+ ASSERT_NE(base.end(), it2) << "Only in data: hash_key=" << hash_key
+ << ", sort_key=" << it1->first << ",
value=" << it1->second;
+ ASSERT_EQ(*it2, *it1) << "Diff: hash_key=" << hash_key << ",
data_sort_key=" << it1->first
+ << ", data_value=" << it1->second << ",
base_sort_key=" << it2->first
+ << ", base_value=" << it2->second;
+ }
+
+ dinfo("Data and base are the same.");
+}
+
+template <typename T, typename U>
+inline void compare(const T &data, const U &base)
+{
+ for (auto it1 = data.begin(), it2 = base.begin();; ++it1, ++it2) {
+ if (it1 == data.end()) {
+ ASSERT_EQ(base.end(), it2) << "Only in base: hash_key=" <<
it2->first;
+ break;
+ }
+ ASSERT_NE(base.end(), it2) << "Only in data: hash_key=" << it1->first;
+ ASSERT_EQ(it1->first, it2->first) << "Diff: data_hash_key=" <<
it1->first
+ << ", base_hash_key=" << it2->first;
+ compare(it1->second, it2->second, it1->first);
+ }
+
+ dinfo("Data and base are the same.");
+}
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]