Browse Source

added tests and an example.

git-svn-id: svn://bind10.isc.org/svn/bind10/branches/trac241@2490 e5f2f494-b856-4b98-b285-d166d9295462
JINMEI Tatuya 15 years ago
parent
commit
f358c98613

+ 3 - 0
configure.ac

@@ -405,6 +405,9 @@ AC_CONFIG_FILES([Makefile
                  src/bin/xfrout/tests/Makefile
                  src/bin/usermgr/Makefile
                  src/lib/Makefile
+                 src/lib/bench/Makefile
+                 src/lib/bench/example/Makefile
+                 src/lib/bench/tests/Makefile
                  src/lib/cc/Makefile
                  src/lib/python/Makefile
                  src/lib/python/isc/Makefile

+ 1 - 1
src/lib/Makefile.am

@@ -1,4 +1,4 @@
-SUBDIRS = exceptions dns cc config datasrc python
+SUBDIRS = exceptions dns cc config datasrc bench python
 if HAVE_BOOST_PYTHON
 SUBDIRS += xfr
 endif

+ 5 - 0
src/lib/bench/Makefile.am

@@ -1,4 +1,9 @@
+SUBDIRS = . tests example
+
 AM_CPPFLAGS = -I$(top_srcdir)/src/lib -I$(top_builddir)/src/lib
+AM_CXXFLAGS = $(B10_CXXFLAGS)
+
+CLEANFILES = *.gcno *.gcda
 
 lib_LTLIBRARIES = libbench.la
 libbench_la_SOURCES = benchmark_util.h benchmark_util.cc

+ 26 - 6
src/lib/bench/benchmark.h

@@ -28,8 +28,20 @@ namespace bench {
 template <typename T>
 class BenchMark {
 public:
-    BenchMark(const int iterations, T& target) :
-        iterations_(iterations), sub_iterations_(0), target_(target) {}
+    BenchMark(const int iterations, T target) :
+        iterations_(iterations), sub_iterations_(0), target_(target)
+    {
+        run();
+        printResult();
+    }
+    BenchMark(const int iterations, T& target, const bool immediate = true) :
+        iterations_(iterations), sub_iterations_(0), target_(target)
+    {
+        if (immediate) {
+            run();
+            printResult();
+        }
+    }
     void setUp() {}
     void tearDown() {}
     void run() {
@@ -43,7 +55,6 @@ public:
         gettimeofday(&end, NULL);
         tv_diff_ = tv_subtract(end, beg);
 
-        printResult();
         tearDown();
     }
     void printResult() const {
@@ -60,15 +71,24 @@ public:
                 static_cast<double>(tv_diff_.tv_usec) / ONE_MILLION);
     }
     double getAverageTime() const {
+        if (sub_iterations_ == 0) {
+            return (TIME_FAILURE);
+        }
         return ((tv_diff_.tv_sec +
                  static_cast<double>(tv_diff_.tv_usec) / ONE_MILLION ) /
                 sub_iterations_);
     }
     double getIterationPerSecond() const {
-        return (sub_iterations_ /
-                (tv_diff_.tv_sec +
-                 static_cast<double>(tv_diff_.tv_usec) / ONE_MILLION));
+        const double duration_usec = tv_diff_.tv_sec +
+            static_cast<double>(tv_diff_.tv_usec) / ONE_MILLION;
+        if (duration_usec == 0) {
+            return (ITERATION_FAILURE);
+        }
+        return (sub_iterations_ / duration_usec);
     }
+public:
+    static const double TIME_FAILURE = -1;
+    static const double ITERATION_FAILURE = -1;
 private:
     // return t1 - t2
     struct timeval tv_subtract(const struct timeval& t1,

+ 24 - 13
src/lib/bench/benchmark_util.cc

@@ -40,25 +40,32 @@ namespace isc {
 namespace bench {
 void
 loadQueryData(const char* const input_file, BenchQueries& queries,
-              const RRClass& qclass)
+              const RRClass& qclass, const bool strict)
 {
     ifstream ifs;
 
     ifs.open(input_file, ios_base::in);
     if ((ifs.rdstate() & istream::failbit) != 0) {
-        isc_throw(Exception, "failed to query data file: " +
+        isc_throw(BenchMarkError, "failed to load query data file: " +
                   string(input_file));
     }
+    loadQueryData(ifs, queries, qclass, strict);
+    ifs.close();
+}
 
+void
+loadQueryData(istream& input, BenchQueries& queries, const RRClass& qclass,
+              const bool strict)
+{
     string line;
     unsigned int linenum = 0;
     Message query_message(Message::RENDER);
     OutputBuffer buffer(128); // this should be sufficiently large
     MessageRenderer renderer(buffer);
-    while (getline(ifs, line), !ifs.eof()) {
+    while (getline(input, line), !input.eof()) {
         ++linenum;
-        if (ifs.bad() || ifs.fail()) {
-            isc_throw(Exception,
+        if (input.bad() || input.fail()) {
+            isc_throw(BenchMarkError,
                       "Unexpected line in query data file around line " <<
                       linenum);
         }
@@ -70,22 +77,23 @@ loadQueryData(const char* const input_file, BenchQueries& queries,
         string qname_string, qtype_string;
         iss >> qname_string >> qtype_string;
         if (iss.bad() || iss.fail()) {
-            cerr << "unexpected input around line " << linenum << " (ignored)"
-                 << endl;
+            if (strict) {
+                isc_throw(BenchMarkError,
+                          "load query: unexpected input around line " <<
+                          linenum);
+            }
             continue;
         }
 
         // We expect broken lines of data, which will be ignored with a
         // warning message.
         try {
-            Name qname(qname_string);
-            RRType qtype(qtype_string);
-
             query_message.clear(Message::RENDER);
             query_message.setQid(0);
             query_message.setOpcode(Opcode::QUERY());
             query_message.setRcode(Rcode::NOERROR());
-            query_message.addQuestion(Question(qname, qclass, qtype));
+            query_message.addQuestion(Question(Name(qname_string), qclass,
+                                               RRType(qtype_string)));
 
             renderer.clear();
             query_message.toWire(renderer);
@@ -95,8 +103,11 @@ loadQueryData(const char* const input_file, BenchQueries& queries,
                 buffer.getLength());
             queries.push_back(query_data);
         } catch (const Exception& error) {
-            cerr << "failed to parse/create query around line " << linenum <<
-                ": " << error.what() << " (ignored)" << endl;
+            if (strict) {
+                isc_throw(BenchMarkError,
+                          "failed to parse/create query around line " <<
+                          linenum);
+            }
             continue;
         }
     }

+ 16 - 1
src/lib/bench/benchmark_util.h

@@ -17,16 +17,31 @@
 #ifndef __BENCHMARK_UTIL_H
 #define __BENCHMARK_UTIL_H 1
 
+#include <istream>
 #include <vector>
 
+#include <exceptions/exceptions.h>
+
 namespace isc {
 namespace dns {
 class RRClass;
 }
+
 namespace bench {
+class BenchMarkError : public Exception {
+public:
+    BenchMarkError(const char* file, size_t line, const char* what) :
+        isc::Exception(file, line, what) {}
+};
+
 typedef std::vector<std::vector<unsigned char> > BenchQueries; 
+
+/// Describe exception guarantee.  This function only offers the basic
+/// exception guarantee.
 void loadQueryData(const char* input_file, BenchQueries& queries,
-                   const isc::dns::RRClass& qclass);
+                   const isc::dns::RRClass& qclass, const bool strict = false);
+void loadQueryData(std::istream& input, BenchQueries& queries,
+                   const isc::dns::RRClass& qclass, const bool strict = false);
 }
 }
 #endif  // __BENCHMARK_UTIL_H

+ 9 - 0
src/lib/bench/example/Makefile.am

@@ -0,0 +1,9 @@
+AM_CPPFLAGS = -I$(top_srcdir)/src/lib -I$(top_builddir)/src/lib
+
+CLEANFILES = *.gcno *.gcda
+
+noinst_PROGRAMS = search_bench
+search_bench_SOURCES = search_bench.cc
+
+search_bench_LDADD = $(top_builddir)/src/lib/exceptions/libexceptions.la
+search_bench_LDADD += $(top_builddir)/src/lib/bench/libbench.la

+ 144 - 0
src/lib/bench/example/search_bench.cc

@@ -0,0 +1,144 @@
+// Copyright (C) 2010  Internet Systems Consortium, Inc. ("ISC")
+//
+// Permission to use, copy, modify, and/or distribute this software for any
+// purpose with or without fee is hereby granted, provided that the above
+// copyright notice and this permission notice appear in all copies.
+//
+// THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH
+// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+// AND FITNESS.  IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT,
+// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+// OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+// PERFORMANCE OF THIS SOFTWARE.
+
+// $Id$
+
+#include <unistd.h>             // for getpid
+
+#include <cstdlib>              // for rand
+#include <algorithm>
+#include <iostream>
+#include <vector>
+#include <set>
+
+#include <exceptions/exceptions.h>
+
+#include <bench/benchmark.h>
+
+using namespace std;
+using namespace isc::bench;
+
+namespace {
+template <bool Sorted>
+class VectorSearchBenchMark {
+public:
+    VectorSearchBenchMark(const vector<int>& data,
+                          const vector<int>& keys) :
+        data_(data), keys_(keys)
+    {}
+    unsigned int run() {
+        vector<int>::const_iterator iter;
+        vector<int>::const_iterator end_key = keys_.end();
+        for (iter = keys_.begin(); iter != end_key; ++iter) {
+            if (Sorted) {
+                binary_search(data_.begin(), data_.end(), *iter);
+            } else {
+                find(data_.begin(), data_.end(), *iter);
+            }
+        }
+        return (keys_.size());
+    }
+private:
+    const vector<int>& data_;
+    const vector<int>& keys_;
+};
+
+class SetSearchBenchMark {
+public:
+    SetSearchBenchMark(const set<int>& data, const vector<int>& keys) :
+        data_(data), keys_(keys)
+    {}
+    unsigned int run() {
+        vector<int>::const_iterator iter;
+        vector<int>::const_iterator end_key = keys_.end();
+        for (iter = keys_.begin(); iter != end_key; ++iter) {
+            data_.find(*iter);
+        }        
+        return (keys_.size());
+    }
+public:   // make it visible to the BenchMark class
+    const set<int>& data_;
+private:
+    const vector<int>& keys_;
+};
+}
+
+namespace isc {
+namespace bench {
+template<>
+void
+BenchMark<SetSearchBenchMark>::setUp() {
+    cout << "Benchmark for searching std::set (size="
+         << target_.data_.size() << ")" << endl;    
+}
+}
+}
+
+namespace {
+const int DEFAULT_ITERATION = 100;
+const int DEFAULT_SIZE = 10000;
+
+void
+usage() {
+    cerr << "Usage: search_bench [-n iterations] [-s data_size]" << endl;
+    exit (1);
+}
+}
+
+int
+main(int argc, char* argv[]) {
+    int ch;
+    int iteration = DEFAULT_ITERATION;
+    int size = DEFAULT_SIZE;
+    while ((ch = getopt(argc, argv, "n:s:")) != -1) {
+        switch (ch) {
+        case 'n':
+            iteration = atoi(optarg);
+            break;
+        case 's':
+            size = atoi(optarg);
+            break;
+        case '?':
+        default:
+            usage();
+        }
+    }
+    argc -= optind;
+    argv += optind;
+    if (argc != 0) {
+        usage();
+    }
+
+    srand(getpid());
+    vector<int> data_vector;
+    set<int> data_set;
+    vector<int> keys;
+    for (int i = 0; i < size; ++i) {
+        data_vector.push_back(i);
+        data_set.insert(i);
+        keys.push_back(rand() % size);
+    }
+
+    cout << "Benchmark for linear search" << endl;
+    BenchMark<VectorSearchBenchMark<false> >(iteration,
+                                             VectorSearchBenchMark<false>(
+                                                 data_vector, keys));
+    cout << "Benchmark for binary search" << endl;
+    BenchMark<VectorSearchBenchMark<true> >(iteration,
+                                             VectorSearchBenchMark<true>(
+                                                 data_vector, keys));
+    BenchMark<SetSearchBenchMark>(iteration,
+                                  SetSearchBenchMark(data_set, keys));
+    return (0);
+}

+ 22 - 0
src/lib/bench/tests/Makefile.am

@@ -0,0 +1,22 @@
+AM_CPPFLAGS = -I$(top_builddir)/src/lib -I$(top_srcdir)/src/lib
+AM_CPPFLAGS += -DTEST_DATA_DIR=\"$(srcdir)/testdata\"
+AM_CXXFLAGS = $(B10_CXXFLAGS)
+
+CLEANFILES = *.gcno *.gcda
+
+TESTS =
+if HAVE_GTEST
+TESTS += run_unittests
+run_unittests_SOURCES = run_unittests.cc
+run_unittests_SOURCES += benchmark_unittest.cc
+run_unittests_SOURCES += loadquery_unittest.cc
+
+run_unittests_CPPFLAGS = $(AM_CPPFLAGS) $(GTEST_INCLUDES)
+run_unittests_LDFLAGS = $(AM_LDFLAGS) $(GTEST_LDFLAGS)
+run_unittests_LDADD = $(top_builddir)/src/lib/exceptions/libexceptions.la
+run_unittests_LDADD += $(top_builddir)/src/lib/dns/libdns.la
+run_unittests_LDADD += $(top_builddir)/src/lib/bench/libbench.la
+run_unittests_LDADD += $(GTEST_LDADD)
+endif
+
+noinst_PROGRAMS = $(TESTS)

+ 114 - 0
src/lib/bench/tests/benchmark_unittest.cc

@@ -0,0 +1,114 @@
+// Copyright (C) 2010  Internet Systems Consortium, Inc. ("ISC")
+//
+// Permission to use, copy, modify, and/or distribute this software for any
+// purpose with or without fee is hereby granted, provided that the above
+// copyright notice and this permission notice appear in all copies.
+//
+// THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH
+// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+// AND FITNESS.  IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT,
+// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+// OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+// PERFORMANCE OF THIS SOFTWARE.
+
+// $Id$
+
+#include <unistd.h>             // for usleep
+
+#include <bench/benchmark.h>
+
+#include <gtest/gtest.h>
+
+using namespace std;
+using namespace isc::bench;
+
+namespace {
+// Our "benchmark" simply sleeps for a short period, and reports a faked
+// number of iterations.
+class TestBenchMark {
+public:
+    TestBenchMark(const int sub_iterations, const int sleep_time) :
+        sub_iterations_(sub_iterations), sleep_time_(sleep_time),
+        setup_completed_(false), teardown_completed_(false)
+    {}
+    unsigned int run() {
+        usleep(sleep_time_);
+        return (sub_iterations_);
+    }
+    const int sub_iterations_;
+    const int sleep_time_;
+    bool setup_completed_;
+    bool teardown_completed_;
+};
+}
+
+namespace isc {
+namespace bench {
+template <>
+void
+BenchMark<TestBenchMark>::setUp() {
+    target_.setup_completed_ = true;
+};
+
+template <>
+void
+BenchMark<TestBenchMark>::tearDown() {
+    target_.teardown_completed_ = true;
+};
+}
+}
+
+namespace {
+TEST(BenchMarkTest, run) {
+    // use some uncommon iterations for testing purpose:
+    const int sub_iterations = 23;
+    const int sleep_time = 50000; // will sleep for 50ms
+    // we cannot expect particular accuracy on the measured duration, so
+    // we'll include some conservative margin (20%) and perform range
+    // comparison below.
+    const int duration_margin = 10000; // 10ms
+    const int ONE_MILLION = 1000000;
+
+    TestBenchMark test_bench(sub_iterations, sleep_time);
+    BenchMark<TestBenchMark> bench(1, test_bench, false);
+    // Check pre-test conditions.
+    EXPECT_FALSE(test_bench.setup_completed_);
+    EXPECT_FALSE(test_bench.teardown_completed_);
+
+    bench.run();
+
+    // Check if specialized setup and teardown were performed.
+    EXPECT_TRUE(test_bench.setup_completed_);
+    EXPECT_TRUE(test_bench.teardown_completed_);
+
+    // Check accuracy of the measured statistics.
+    EXPECT_EQ(sub_iterations, bench.getIteration());
+    EXPECT_LT(sleep_time - duration_margin, bench.getDuration() * ONE_MILLION);
+    EXPECT_GT(sleep_time + duration_margin, bench.getDuration() * ONE_MILLION);
+    EXPECT_LT((sleep_time - duration_margin) /
+              static_cast<double>(sub_iterations),
+              bench.getAverageTime() * ONE_MILLION);
+    EXPECT_GT((sleep_time + duration_margin) /
+              static_cast<double>(sub_iterations),
+              bench.getAverageTime() * ONE_MILLION);
+    EXPECT_LT(static_cast<double>(sub_iterations) /
+              (sleep_time + duration_margin),
+              bench.getIterationPerSecond() / ONE_MILLION);
+    EXPECT_GT(static_cast<double>(sub_iterations) /
+              (sleep_time - duration_margin),
+              bench.getIterationPerSecond() / ONE_MILLION);
+}
+
+TEST(BenchMarkTest, runWithNoIteration) {
+    // we'll lie on the number of iteration (0).  it will result in
+    // meaningless result, but at least it shouldn't crash.
+    TestBenchMark test_bench(0, 0);
+    BenchMark<TestBenchMark> bench(1, test_bench, false);
+    bench.run();
+    EXPECT_EQ(0, bench.getIteration());
+    // Since the reported iteration is 0, naive calculation of the average
+    // time would cause a division by 0 failure.
+    EXPECT_EQ(bench.TIME_FAILURE, bench.getAverageTime());
+}
+}

+ 198 - 0
src/lib/bench/tests/loadquery_unittest.cc

@@ -0,0 +1,198 @@
+// Copyright (C) 2010  Internet Systems Consortium, Inc. ("ISC")
+//
+// Permission to use, copy, modify, and/or distribute this software for any
+// purpose with or without fee is hereby granted, provided that the above
+// copyright notice and this permission notice appear in all copies.
+//
+// THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH
+// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+// AND FITNESS.  IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT,
+// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+// OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+// PERFORMANCE OF THIS SOFTWARE.
+
+// $Id$
+
+#include <algorithm>
+#include <utility>
+#include <vector>
+#include <sstream>
+
+#include <dns/buffer.h>
+#include <dns/message.h>
+#include <dns/name.h>
+#include <dns/rrclass.h>
+#include <dns/rrtype.h>
+
+#include <bench/benchmark_util.h>
+
+#include <gtest/gtest.h>
+
+using namespace std;
+using namespace isc::bench;
+using namespace isc::dns;
+
+namespace {
+typedef pair<string, string> QueryParam;
+
+class LoadQueryTest : public ::testing::Test {
+protected:
+    LoadQueryTest() : query_rrclass(RRClass::IN()) {
+        queries.push_back(QueryParam("www.example.org", "AAAA"));
+        queries.push_back(QueryParam("www.example.com", "A"));
+        queries.push_back(QueryParam("test.example", "NS"));
+    }
+    RRClass query_rrclass;
+    BenchQueries result_queries;
+    vector<QueryParam> queries;
+    stringstream query_stream;
+    static const char* const DATA_DIR;
+};
+
+const char* const LoadQueryTest::DATA_DIR = TEST_DATA_DIR;
+
+class QueryInserter {
+public:
+    QueryInserter(stringstream& stream) : stream_(stream) {}
+    void operator()(const QueryParam& query) {
+        stream_ << query.first << " " << query.second << endl;
+    }
+private:
+    stringstream& stream_;
+};
+
+class QueryChecker {
+public:
+    QueryChecker(const vector<QueryParam>* expected, const RRClass& rrclass) :
+        expected_(expected), rrclass_(rrclass)
+    {
+        if (expected != NULL) {
+            iter_ = expected_->begin();
+        }
+    }
+    void operator()(const vector<unsigned char>& actual_data) {
+        InputBuffer buffer(&actual_data[0], actual_data.size());
+        Message message(Message::PARSE);
+        message.fromWire(buffer);
+
+        // Check if the header part indicates an expected standard query.
+        EXPECT_EQ(0, message.getQid());
+        EXPECT_EQ(Opcode::QUERY(), message.getOpcode());
+        EXPECT_EQ(Rcode::NOERROR(), message.getRcode());
+        EXPECT_EQ(Rcode::NOERROR(), message.getRcode());
+        EXPECT_FALSE(message.getHeaderFlag(MessageFlag::QR()));
+        EXPECT_FALSE(message.getHeaderFlag(MessageFlag::AA()));
+        EXPECT_EQ(1, message.getRRCount(Section::QUESTION()));
+        EXPECT_EQ(0, message.getRRCount(Section::ANSWER()));
+        EXPECT_EQ(0, message.getRRCount(Section::AUTHORITY()));
+        EXPECT_EQ(0, message.getRRCount(Section::ADDITIONAL()));
+
+        // Check if the question matches our original data, if the expected
+        // data is given.
+        if (expected_ != NULL) {
+            ConstQuestionPtr question = *message.beginQuestion();;
+            EXPECT_EQ(Name((*iter_).first), question->getName());
+            EXPECT_EQ(RRType((*iter_).second), question->getType());
+            EXPECT_EQ(rrclass_, question->getClass());
+        
+            ++iter_;
+        }
+    }
+private:
+    const vector<QueryParam>* expected_;
+    vector<QueryParam>::const_iterator iter_;
+    const RRClass rrclass_;
+};
+
+TEST_F(LoadQueryTest, load) {
+    for_each(queries.begin(), queries.end(), QueryInserter(query_stream));
+
+    loadQueryData(query_stream, result_queries, query_rrclass);
+
+    EXPECT_EQ(queries.size(), result_queries.size());
+    for_each(result_queries.begin(), result_queries.end(),
+             QueryChecker(&queries, query_rrclass));
+}
+
+TEST_F(LoadQueryTest, loadForCHClass) {
+    for_each(queries.begin(), queries.end(), QueryInserter(query_stream));
+    query_rrclass = RRClass::CH();
+
+    loadQueryData(query_stream, result_queries, query_rrclass);
+
+    EXPECT_EQ(queries.size(), result_queries.size());
+    for_each(result_queries.begin(), result_queries.end(),
+             QueryChecker(&queries, query_rrclass));
+}
+
+TEST_F(LoadQueryTest, loadWithComment) {
+    for_each(queries.begin(), queries.end(), QueryInserter(query_stream));
+    // add a comment line.  this shouldn't change the result.
+    query_stream << "# this is a comment" << endl;
+    query_stream << endl;       // empty line.  should be ignored, too.
+
+    loadQueryData(query_stream, result_queries, query_rrclass);
+    EXPECT_EQ(queries.size(), result_queries.size());
+    for_each(result_queries.begin(), result_queries.end(),
+             QueryChecker(&queries, query_rrclass));
+}
+
+TEST_F(LoadQueryTest, loadWithIncompleteData) {
+    for_each(queries.begin(), queries.end(), QueryInserter(query_stream));
+    // RRType is missing.  It should be ignored by default.
+    query_stream << "type-is-missing" << endl;
+
+    loadQueryData(query_stream, result_queries, query_rrclass);
+    EXPECT_EQ(queries.size(), result_queries.size());
+    for_each(result_queries.begin(), result_queries.end(),
+             QueryChecker(&queries, query_rrclass));
+}
+
+TEST_F(LoadQueryTest, loadWithIncompleteDataToBeRejected) {
+    for_each(queries.begin(), queries.end(), QueryInserter(query_stream));
+    // RRType is missing.  We're going to specify the "strict" check, so
+    // we should receive an exception.
+    query_stream << "type-is-missing" << endl;
+    EXPECT_THROW(loadQueryData(query_stream, result_queries, query_rrclass,
+                               true), BenchMarkError);
+}
+
+TEST_F(LoadQueryTest, loadWithBadData) {
+    for_each(queries.begin(), queries.end(), QueryInserter(query_stream));
+    // invalid RRType.  It should be ignored by default.
+    query_stream << "www.example.com NOSUCHRRTYPE" << endl;
+
+    loadQueryData(query_stream, result_queries, query_rrclass);
+    EXPECT_EQ(queries.size(), result_queries.size());
+    for_each(result_queries.begin(), result_queries.end(),
+             QueryChecker(&queries, query_rrclass));
+}
+
+TEST_F(LoadQueryTest, loadWithBadDataToBeRejected) {
+    for_each(queries.begin(), queries.end(), QueryInserter(query_stream));
+    // invalid RRType, which should trigger an exception.
+    query_stream << "www.example.com NOSUCHRRTYPE" << endl;
+    EXPECT_THROW(loadQueryData(query_stream, result_queries, query_rrclass,
+                               true), BenchMarkError);
+}
+
+TEST_F(LoadQueryTest, loadFromFile) {
+    const string data_file = string(DATA_DIR) + string("/query.txt");
+    loadQueryData(data_file.c_str(), result_queries, query_rrclass);
+    EXPECT_LT(0, result_queries.size());
+
+    // We are going to skip matching the query data; we only check the header.
+    // We could check the data, too, but to do so we need to populate the
+    // expected data from the file (or prepare a consistent copy locally).
+    // Since the implementation is shared with the stringstream case, the
+    // additional setup wouldn't be worthwhile.
+    for_each(result_queries.begin(), result_queries.end(),
+             QueryChecker(NULL, query_rrclass));
+}
+
+TEST_F(LoadQueryTest, loadFromFileNotExist) {
+    EXPECT_THROW(loadQueryData("notexistent/query.data", result_queries,
+                               query_rrclass), BenchMarkError);
+}
+}

+ 24 - 0
src/lib/bench/tests/run_unittests.cc

@@ -0,0 +1,24 @@
+// Copyright (C) 2010  Internet Systems Consortium, Inc. ("ISC")
+//
+// Permission to use, copy, modify, and/or distribute this software for any
+// purpose with or without fee is hereby granted, provided that the above
+// copyright notice and this permission notice appear in all copies.
+//
+// THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH
+// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+// AND FITNESS.  IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT,
+// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+// OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+// PERFORMANCE OF THIS SOFTWARE.
+
+// $Id$
+
+#include <gtest/gtest.h>
+
+int
+main(int argc, char* argv[]) {
+    ::testing::InitGoogleTest(&argc, argv);
+
+    return (RUN_ALL_TESTS());
+}

+ 6 - 0
src/lib/bench/tests/testdata/query.txt

@@ -0,0 +1,6 @@
+# This is sample query data for benchmark.
+# The format is the same as BIND 9's queryperf.
+
+www.example.com TXT
+www.example.org SOA
+ftp.example.org RRSIG