Browse Source

merged branches/trac241, benchmark framework.

git-svn-id: svn://bind10.isc.org/svn/bind10/trunk@2664 e5f2f494-b856-4b98-b285-d166d9295462
JINMEI Tatuya 14 years ago
parent
commit
9a979bd5d0

+ 3 - 0
configure.ac

@@ -413,6 +413,9 @@ AC_CONFIG_FILES([Makefile
                  src/bin/xfrout/tests/Makefile
                  src/bin/usermgr/Makefile
                  src/lib/Makefile
+                 src/lib/bench/Makefile
+                 src/lib/bench/example/Makefile
+                 src/lib/bench/tests/Makefile
                  src/lib/cc/Makefile
                  src/lib/python/Makefile
                  src/lib/python/isc/Makefile

+ 1 - 1
doc/Doxyfile

@@ -568,7 +568,7 @@ WARN_LOGFILE           =
 # directories like "/usr/src/myproject". Separate the files or directories
 # with spaces.
 
-INPUT                  = ../src/lib/cc ../src/lib/config ../src/lib/dns ../src/lib/exceptions ../src/lib/datasrc ../src/bin/auth
+INPUT                  = ../src/lib/cc ../src/lib/config ../src/lib/dns ../src/lib/exceptions ../src/lib/datasrc ../src/bin/auth ../src/lib/bench
 
 # This tag can be used to specify the character encoding of the source files
 # that doxygen parses. Internally doxygen uses the UTF-8 encoding, which is

+ 1 - 1
src/lib/Makefile.am

@@ -1 +1 @@
-SUBDIRS = exceptions dns cc config datasrc python xfr
+SUBDIRS = exceptions dns cc config datasrc python xfr bench

+ 9 - 0
src/lib/bench/Makefile.am

@@ -0,0 +1,9 @@
+SUBDIRS = . tests example
+
+AM_CPPFLAGS = -I$(top_srcdir)/src/lib -I$(top_builddir)/src/lib
+AM_CXXFLAGS = $(B10_CXXFLAGS)
+
+CLEANFILES = *.gcno *.gcda
+
+lib_LTLIBRARIES = libbench.la
+libbench_la_SOURCES = benchmark_util.h benchmark_util.cc

+ 395 - 0
src/lib/bench/benchmark.h

@@ -0,0 +1,395 @@
+// Copyright (C) 2010  Internet Systems Consortium, Inc. ("ISC")
+//
+// Permission to use, copy, modify, and/or distribute this software for any
+// purpose with or without fee is hereby granted, provided that the above
+// copyright notice and this permission notice appear in all copies.
+//
+// THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH
+// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+// AND FITNESS.  IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT,
+// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+// OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+// PERFORMANCE OF THIS SOFTWARE.
+
+// $Id$
+
+#ifndef __BENCHMARK_H
+#define __BENCHMARK_H 1
+
+#include <sys/time.h>
+
+#include <iostream>
+#include <ios>
+
+namespace isc {
+namespace bench {
+
+/// \brief Templated micro benchmark framework.
+///
+/// "Premature optimization is the root of all evil."
+/// But programmers are often tempted to focus on performance optimization
+/// too early.
+/// Likewise, it's not uncommon for an engineer to introduce a minor
+/// optimization with a lot of complicated code logic that actually improves
+/// performance only marginally.
+/// Making benchmark easier will help avoid such common pitfalls.
+/// Of course, it also helps when we really need to introduce optimization
+/// to identify where is the bottleneck and see how a particular optimization
+/// improves the performance.
+///
+/// The BenchMark class template provides a simple framework for so-called
+/// "stopwatch" micro benchmarking.
+/// It just encapsulates the common operations for this type of benchmark:
+/// repeat a specified operation for a given number of times,
+/// record the start and end times of the operations,
+/// and provide major accumulated results such as the number of iterations
+/// per second.
+/// The main goal of this class is to help developers write benchmark test
+/// cases with fewer strokes of typing.
+///
+/// The constructors of a \c BenchMark class is to take the number of
+/// iterations (which is referred to as \c niter below)
+/// and an object (or reference to it) of the type of the template
+/// parameter, \c T.  Class \c T implements the benchmark target code via
+/// its \c run() method, whose signature is as follows:
+/// \code  unsigned int T::run(); \endcode
+///
+/// A BenchMark class object, via its own \c run() method, calls \c T::run()
+/// for \c niter times.
+/// In the simplest form \c T::run() would perform a single operation to
+/// be benchmarked and returns 1.
+/// In some cases, however, the operation is very lightweight (e.g. performing
+/// a binary search on a moderate length of integer vector), and it may be
+/// desirable to have an internal iterations within \c T::run() to avoid
+/// the overhead of function calls to \c T::run().
+/// In that case, \c T::run() would return the number of internal iterations
+/// instead of 1.
+///
+/// The \c BenchMark::run() method records some statistics %data on the
+/// benchmarking, including the start and end times and the total number of
+/// iterations (which is the sum of the return value of \c T::run(), and,
+/// is equal to \c niter in the simplest case where \c T::run() always
+/// returns 1).
+/// This %data can be retried via other methods of \c BenchMark, but in
+/// the primarily intended use case the \c BenchMark object would calls its
+/// \c run() method at the end of its construction, and prints summarized
+/// statistics to the standard output.
+/// This way, the developer can only write a single line of code to perform
+/// everything other than the benchmark target code (see the example below).
+///
+/// \b Example
+///
+/// Suppose that we want to measure performance of the search (find)
+/// operation on STL set objects.  We'd first define the implementation
+/// class (to be the template parameter of the \c BenchMark class) as follows:
+///
+/// \code class SetSearchBenchMark {
+/// public:
+///    SetSearchBenchMark(const set<int>& data, const vector<int>& keys) :
+///        data_(data), keys_(keys)
+///    {}
+///    unsigned int run() {
+///        vector<int>::const_iterator iter;
+///        vector<int>::const_iterator end_key = keys_.end();
+///        for (iter = keys_.begin(); iter != end_key; ++iter) {
+///            data_.find(*iter);
+///        }        
+///        return (keys_.size());
+///    }
+///    const set<int>& data_;
+///    const vector<int>& keys_;
+/// }; \endcode
+///
+/// In its constructor the \c SetSearchBenchMark class takes a set of
+/// integers (\c %data) and a vector of integers (\c keys).  \c %data is
+/// the STL set to be searched, and \c keys stores the search keys.
+/// The constructor keeps local references to these objects.
+///
+/// The \c SetSearchBenchMark::run() method, which is called via
+/// \c BenchMark::run(), iterates over the key vector, and performs the
+/// \c find() method of the set class for each key.
+/// (This is only for performance measurement, so the result is ignored).
+/// Note that this \c run() method has its own internal iterations.
+/// This is because each invocation of \c find() would be pretty lightweight,
+/// and the function call overhead may be relatively heavier.
+/// Due to the internal iterations, this method returns the number of
+/// \c find() calls, which is equal to the size of the key vector.
+///
+/// Next, we should prepare test %data.  In this simple example, let's assume
+/// we use a fixed size: a set of 10,000 integers (from 0 to 9999), and use
+/// the same number of search keys randomly chosen from that range:
+/// \code
+///    set<int> data_set;
+///    vector<int> keys;
+///    for (int i = 0; i < 10000; ++i) {
+///        data_set.insert(i);
+///        keys.push_back(rand() % 10000);
+///    } \endcode
+///
+/// Then construct a \c BenchMark<SetSearchBenchMark> object with the
+/// test %data:
+/// \code
+///    BenchMark<SetSearchBenchMark>(100, SetSearchBenchMark(data_set, keys));
+/// \endcode
+/// Here we specify 100 for the number of iterations, which would cause
+/// 1 million search attempts in total.
+///
+/// That's it.  If we put these in a C++ source file with usual necessary
+/// stuff (such as \c %main()), compile it, and run the executable, then
+/// we'll see something like this:
+///
+/// \code Performed 1000000 iterations in 0.180172s (5550251.98ips) \endcode
+///
+/// It should be obvious what this means (ips stands for "iterations
+///  per second").
+///
+/// A complete example program of this measurement scenario (with some
+/// additional test cases and configurable parameters) can be found in
+/// example/search_bench.cc.
+///
+/// \b Customization
+///
+/// The above simple usage should be sufficient in many benchmark cases,
+/// but the \c BenchMark class provides some customization points by
+/// specializing some of its (templated) public methods.
+/// For example, assume you want to customize the output of benchmark result.
+/// It can be done by specializing \c BenchMark::printResult():
+/// \code namespace isc {
+/// namespace bench {
+/// template<>
+/// void
+/// BenchMark<SetSearchBenchMark>::printResult() const {
+///     cout << "Searched for " << target_.keys_.size() << " keys "
+///         << getIteration() << " times in " << getDuration() << "s" << endl;
+/// }
+/// }
+/// } \endcode
+///
+/// Then the Result would be something like this:
+///
+/// \code Searched for 10000 keys 1000000 times in 0.21s \endcode
+///
+/// Note that the specialization must be defined in the same namespace as
+/// that of the \c BenchMark class, that is, \c isc::bench.
+/// It should also be noted that the corresponding \c SetSearchBenchMark
+/// object can be accessed (through its public interfaces) via the \c target_
+/// member variable of \c BenchMark.
+///
+/// <b>Future Plans and Compatibility Notes</b>
+///
+/// Currently, benchmark developers need to write supplemental code that is
+/// not directly related to benchmarks (such as \c %main()) by hand.
+/// It would be better if we could minimize such development overhead.
+/// In future versions we may provide a common \c %main() function and
+/// option parsers, thereby allowing the developer to only write the benchmark
+/// classes and invoke them, just like what various unit test frameworks do.
+///
+/// If and when we implement it, some existing benchmark cases may need to be
+/// adjusted.
+template <typename T>
+class BenchMark {
+    ///
+    /// \name Constructors
+    ///
+    /// Note: The copy constructor and the assignment operator are
+    /// intentionally defined as private, making this class non-copyable.
+    /// We use the default destructor.
+    //@{
+private:
+    BenchMark(const BenchMark& source);
+    BenchMark& operator=(const BenchMark& source);
+public:
+    /// \bench Constructor for immediate run.
+    ///
+    /// This is the constructor that is expected to be used normally.
+    /// It runs the benchmark within the constructor and prints the result,
+    /// thereby making it possible to do everything with a single line of
+    /// code (see the above example).
+    ///
+    /// \param iterations The number of iterations.  The \c run() method will
+    /// be called this number of times.
+    /// \param target The templated class object that
+    /// implements the code to be benchmarked.
+    BenchMark(const int iterations, T target) :
+        iterations_(iterations), sub_iterations_(0), target_(target)
+    {
+        initialize(true);
+    }
+
+    /// \bench Constructor for finer-grained control.
+    ///
+    /// This constructor takes the third parameter, \c immediate, to control
+    /// whether to run the benchmark within the constructor.
+    /// It also takes a reference to the templated class object rather than
+    /// an object copy, so if the copy overhead isn't acceptable this
+    /// constructor must be used.
+    ///
+    /// \param iterations The number of iterations.  The \c run() method will
+    /// be called this number of times.
+    /// \param target A reference to the templated class object that
+    /// implements the code to be benchmarked.
+    /// \param immediate If \c true the benchmark will be performed within
+    /// the constructor; otherwise it only does initialization.
+    BenchMark(const int iterations, T& target, const bool immediate) :
+        iterations_(iterations), sub_iterations_(0), target_(target)
+    {
+        initialize(immediate);
+    }
+    //@}
+
+    /// \brief Hook to be called before starting benchmark.
+    ///
+    /// This method will be called from \c run() before starting the benchmark.
+    /// By default it's empty, but can be customized via template
+    /// specialization.
+    void setUp() {}
+
+    /// \brief Hook to be called after benchmark.
+    ///
+    /// This method will be called from \c run() when the benchmark completes.
+    /// By default it's empty, but can be customized via template
+    /// specialization.
+    void tearDown() {}
+
+    /// \brief Perform benchmark.
+    ///
+    /// This method first calls \c setUp().
+    /// It then records the current time, calls \c T::run() for the number
+    /// of times specified on construction, and records the time on completion.
+    /// Finally, it calls \c tearDown().
+    void run() {
+        setUp();
+
+        struct timeval beg, end;
+        gettimeofday(&beg, NULL);
+        for (int i = 0; i < iterations_; ++i) {
+            sub_iterations_ += target_.run();
+        }
+        gettimeofday(&end, NULL);
+        tv_diff_ = tv_subtract(end, beg);
+
+        tearDown();
+    }
+
+    /// \brief Print the benchmark result.
+    ///
+    /// This method prints the benchmark result in a common style to the
+    /// standard out.  The result contains the number of total iterations,
+    /// the duration of the test, and the number of iterations per second
+    /// calculated from the previous two parameters.
+    ///
+    /// A call to this method is only meaningful after the completion of
+    /// \c run().  The behavior is undefined in other cases.
+    void printResult() const {
+        std::cout.precision(6);
+        std::cout << "Performed " << getIteration() << " iterations in "
+                  << std::fixed << getDuration() << "s";
+        std::cout.precision(2);
+        std::cout << " (" << std::fixed << getIterationPerSecond() << "ips)"
+                  << std::endl;
+    }
+
+    /// \brief Return the number of iterations.
+    ///
+    /// It returns the total iterations of benchmark, which is the sum
+    /// of the return value of \c T::run() over all calls to it
+    /// (note that it may not equal to the number of calls to \c T::run(),
+    /// which was specified on construction of this class).
+    ///
+    /// A call to this method is only meaningful after the completion of
+    /// \c run().  The behavior is undefined in other cases.
+    unsigned int getIteration() const { return (sub_iterations_); }
+
+    /// \brief Return the duration of benchmark in seconds.
+    ///
+    /// The highest possible precision of this value is microseconds.
+    ///
+    /// A call to this method is only meaningful after the completion of
+    /// \c run().  The behavior is undefined in other cases.
+    double getDuration() const {
+        return (tv_diff_.tv_sec +
+                static_cast<double>(tv_diff_.tv_usec) / ONE_MILLION);
+    }
+
+    /// \brief Return the average duration per iteration in seconds.
+    ///
+    /// The highest possible precision of this value is microseconds.
+    /// The iteration is the sum of the return value of \c T::run() over
+    /// all calls to it (note that it may not equal to the number of calls
+    /// to \c T::run()).
+    ///
+    /// If it cannot calculate the average, it returns \c TIME_FAILURE.
+    ///
+    /// A call to this method is only meaningful after the completion of
+    /// \c run().  The behavior is undefined in other cases.
+    double getAverageTime() const {
+        if (sub_iterations_ == 0) {
+            return (TIME_FAILURE);
+        }
+        return ((tv_diff_.tv_sec +
+                 static_cast<double>(tv_diff_.tv_usec) / ONE_MILLION ) /
+                sub_iterations_);
+    }
+
+    /// \brief Return the number of possible iterations per second based on
+    /// the benchmark result.
+    ///
+    /// If it cannot calculate that number (e.g. because the duration is
+    /// too small) it returns \c ITERATION_FAILURE.
+    /// A call to this method is only meaningful after the completion of
+    /// \c run().  The behavior is undefined in other cases.
+    double getIterationPerSecond() const {
+        const double duration_usec = tv_diff_.tv_sec +
+            static_cast<double>(tv_diff_.tv_usec) / ONE_MILLION;
+        if (duration_usec == 0) {
+            return (ITERATION_FAILURE);
+        }
+        return (sub_iterations_ / duration_usec);
+    }
+public:
+    /// \brief A constant that indicates a failure in \c getAverageTime().
+    static const double TIME_FAILURE = -1;
+
+    /// \brief A constant that indicates a failure in
+    /// \c getIterationPerSecond().
+    static const double ITERATION_FAILURE = -1;
+private:
+    void initialize(const bool immediate) {
+        if (immediate) {
+            run();
+            printResult();
+        }
+    }
+private:
+    // return t1 - t2
+    struct timeval tv_subtract(const struct timeval& t1,
+                               const struct timeval& t2)
+    {
+        struct timeval result;
+
+        result.tv_sec = t1.tv_sec - t2.tv_sec;
+        if (t1.tv_usec >= t2.tv_usec) {
+            result.tv_usec = t1.tv_usec- t2.tv_usec;
+        } else {
+            result.tv_usec = ONE_MILLION + t1.tv_usec - t2.tv_usec;
+            --result.tv_sec;
+        }
+
+        return (result);
+    }
+private:
+    static const int ONE_MILLION = 1000000;
+    const unsigned int iterations_;
+    unsigned int sub_iterations_;
+    T& target_;
+    struct timeval tv_diff_;
+};
+
+}
+}
+#endif  // __BENCHMARK_H
+
+// Local Variables: 
+// mode: c++
+// End: 

+ 116 - 0
src/lib/bench/benchmark_util.cc

@@ -0,0 +1,116 @@
+// Copyright (C) 2010  Internet Systems Consortium, Inc. ("ISC")
+//
+// Permission to use, copy, modify, and/or distribute this software for any
+// purpose with or without fee is hereby granted, provided that the above
+// copyright notice and this permission notice appear in all copies.
+//
+// THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH
+// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+// AND FITNESS.  IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT,
+// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+// OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+// PERFORMANCE OF THIS SOFTWARE.
+
+// $Id$
+
+#include <fstream>
+#include <iostream>
+#include <string>
+#include <vector>
+
+#include <exceptions/exceptions.h>
+
+#include <dns/buffer.h>
+#include <dns/exceptions.h>
+#include <dns/name.h>
+#include <dns/message.h>
+#include <dns/messagerenderer.h>
+#include <dns/rrtype.h>
+#include <dns/rrclass.h>
+#include <dns/question.h>
+
+#include <bench/benchmark_util.h>
+
+using namespace std;
+using namespace isc;
+using namespace isc::dns;
+
+namespace isc {
+namespace bench {
+void
+loadQueryData(const char* const input_file, BenchQueries& queries,
+              const RRClass& qclass, const bool strict)
+{
+    ifstream ifs;
+
+    ifs.open(input_file, ios_base::in);
+    if ((ifs.rdstate() & istream::failbit) != 0) {
+        isc_throw(BenchMarkError, "failed to load query data file: " +
+                  string(input_file));
+    }
+    loadQueryData(ifs, queries, qclass, strict);
+    ifs.close();
+}
+
+void
+loadQueryData(istream& input, BenchQueries& queries, const RRClass& qclass,
+              const bool strict)
+{
+    string line;
+    unsigned int linenum = 0;
+    Message query_message(Message::RENDER);
+    OutputBuffer buffer(128); // this should be sufficiently large
+    MessageRenderer renderer(buffer);
+    while (getline(input, line), !input.eof()) {
+        ++linenum;
+        if (input.bad() || input.fail()) {
+            isc_throw(BenchMarkError,
+                      "Unexpected line in query data file around line " <<
+                      linenum);
+        }
+        if (line.empty() || line[0] == '#') {
+            continue;           // skip comment and blank lines
+        }
+
+        istringstream iss(line);
+        string qname_string, qtype_string;
+        iss >> qname_string >> qtype_string;
+        if (iss.bad() || iss.fail()) {
+            if (strict) {
+                isc_throw(BenchMarkError,
+                          "load query: unexpected input around line " <<
+                          linenum);
+            }
+            continue;
+        }
+
+        // We expect broken lines of data, which will be ignored with a
+        // warning message.
+        try {
+            query_message.clear(Message::RENDER);
+            query_message.setQid(0);
+            query_message.setOpcode(Opcode::QUERY());
+            query_message.setRcode(Rcode::NOERROR());
+            query_message.addQuestion(Question(Name(qname_string), qclass,
+                                               RRType(qtype_string)));
+
+            renderer.clear();
+            query_message.toWire(renderer);
+            vector<unsigned char> query_data(
+                static_cast<const unsigned char*>(buffer.getData()),
+                static_cast<const unsigned char*>(buffer.getData()) +
+                buffer.getLength());
+            queries.push_back(query_data);
+        } catch (const Exception& error) {
+            if (strict) {
+                isc_throw(BenchMarkError,
+                          "failed to parse/create query around line " <<
+                          linenum);
+            }
+            continue;
+        }
+    }
+}
+}
+}

+ 149 - 0
src/lib/bench/benchmark_util.h

@@ -0,0 +1,149 @@
+// Copyright (C) 2010  Internet Systems Consortium, Inc. ("ISC")
+//
+// Permission to use, copy, modify, and/or distribute this software for any
+// purpose with or without fee is hereby granted, provided that the above
+// copyright notice and this permission notice appear in all copies.
+//
+// THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH
+// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+// AND FITNESS.  IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT,
+// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+// OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+// PERFORMANCE OF THIS SOFTWARE.
+
+// $Id$
+
+#ifndef __BENCHMARK_UTIL_H
+#define __BENCHMARK_UTIL_H 1
+
+/// \file
+/// Utilities to help write benchmark cases.
+///
+/// The initial version of this library only contains utilities for very
+/// specific benchmark cases, that is, building DNS query data.
+/// It's not clear if we have more utilities including scenario-independent
+/// ones in future, but we have them here for now.
+/// If we find we only need utilities specific to individual benchmark
+/// scenarios, we may move them to more specific places.
+/// For example, the query generator may go to benchmarks for DNS server
+/// implementations.
+
+#include <istream>
+#include <vector>
+
+#include <exceptions/exceptions.h>
+
+namespace isc {
+namespace dns {
+class RRClass;
+}
+
+namespace bench {
+/// \brief An exception that is thrown if an error occurs within the benchmark
+/// module.
+class BenchMarkError : public Exception {
+public:
+    BenchMarkError(const char* file, size_t line, const char* what) :
+        isc::Exception(file, line, what) {}
+};
+
+/// \brief A convenient shortcut type to represent a sequence of query %data
+/// in wire format.
+typedef std::vector<std::vector<unsigned char> > BenchQueries; 
+
+/// \brief Load query %data from a file into a vector.
+///
+/// The format of the %data file is a sequence of tuples of query name and
+/// query type.  Each line specifies a single tuple.  Empty lines and
+/// lines beginning with a pound sign (#) are considered comments and will
+/// be ignored.  Example:
+/// \code
+/// # This is a comment line, will be ignored.  same for the next line.
+///
+/// www.example.com AAAA
+/// ftp.example.org NS
+/// text.dot.example TXT \endcode
+///
+/// For those who are familiar with BIND 9's queryperf tool, this is the
+/// same as the simplest form of the input file for queryperf.
+///
+/// For each tuple, this function builds a wire-format non recursive DNS
+/// query message, and appends it to the given vector in a form of
+/// a vector of <code>unsigned char</code>.
+///
+/// The resulting vector can be used, e.g., for benchmarking query processing
+/// code without involving disk access or network I/O.
+/// It would be handier than existing tool such as queryperf and can help
+/// measure the "bare" (or the best possible) performance of the query
+/// processing itself.
+///
+/// If this function fails to open the specified file to read the %data,
+/// an exception of class \c BenchMarkError will be thrown.
+/// If it fails to recognize an input line either as a comment or as
+/// a tuple of strings, an exception of class \c BenchMarkError will be
+/// thrown.
+///
+/// By default, this function does not require the strings be a valid
+/// domain name or a valid textual representation of an RR type.
+/// This is because the input %data may be built from a packet dump of
+/// real query samples without validation, which may contain bogus values.
+/// It would make more sense to just ignore the bogus %data than filter
+/// the sample beforehand.
+/// This behavior can be changed by setting the \c strict argument to
+/// \c true, in which case if this function fails to parse the query name
+/// or the type, it will throw an exception of class \c BenchMarkError.
+///
+/// If memory allocation fails during the processing, a corresponding standard
+/// exception will be thrown.
+///
+/// This function only offers the basic exception guarantee.  That is, if
+/// exception is thrown from this function, it is not guaranteed that
+/// \c queries keeps the content before this function is called.
+/// It is not so difficult to offer a stronger exception guarantee, but
+/// since this function is used in a limited usage, mainly for testing
+/// purposes, its benefit wouldn't outweigh the implementation complexity.
+///
+/// \param input_file A character string specifying the %data file name.
+/// \param queries A vector wherein the query %data is to be stored.
+/// \param qclass The RR class of the resulting queries.  The same RR class
+/// is used for all queries.
+/// \param strict If \c true, apply stricter validation on the query name and
+/// query RR types; otherwise invalid inputs will be ignored.
+void loadQueryData(const char* input_file, BenchQueries& queries,
+                   const isc::dns::RRClass& qclass, const bool strict = false);
+
+/// \brief Load query %data from an input stream into a vector.
+///
+/// This version of function is same as
+/// loadQueryData(const char*,  BenchQueries&, const isc::dns::RRClass&, const bool)
+/// except it reads the input query sequence from a specified input stream.
+///
+/// This version will be used for a smaller scale test where query %data is
+///  hardcoded in the benchmark source code.  For example, we could build
+/// a sequence of wire-format queries via the following code:
+/// \code
+///    vector<QueryParam> queries;
+///    stringstream qstream;
+///    qstream << "www.example.com AAAA" << endl
+///            << "ftp.example.org NS" << endl
+///            << "text.dot.example TXT" << endl;
+///    loadQueryData(qstream, queries, RRClass::IN()); \endcode
+/// This will result in the same sequence of queries as the example using
+/// a %data file shown in the other version of the function.
+///
+/// \param input An input stream object that is to emit the query sequence.
+/// \param queries A vector wherein the query %data is to be stored.
+/// \param qclass The RR class of the resulting queries.  The same RR class
+/// is used for all queries.
+/// \param strict If \c true, apply stricter validation on the query name and
+/// query RR types; otherwise invalid inputs will be ignored.
+void loadQueryData(std::istream& input, BenchQueries& queries,
+                   const isc::dns::RRClass& qclass, const bool strict = false);
+}
+}
+#endif  // __BENCHMARK_UTIL_H
+
+// Local Variables: 
+// mode: c++
+// End: 

+ 9 - 0
src/lib/bench/example/Makefile.am

@@ -0,0 +1,9 @@
+AM_CPPFLAGS = -I$(top_srcdir)/src/lib -I$(top_builddir)/src/lib
+
+CLEANFILES = *.gcno *.gcda
+
+noinst_PROGRAMS = search_bench
+search_bench_SOURCES = search_bench.cc
+
+search_bench_LDADD = $(top_builddir)/src/lib/exceptions/libexceptions.la
+

+ 144 - 0
src/lib/bench/example/search_bench.cc

@@ -0,0 +1,144 @@
+// Copyright (C) 2010  Internet Systems Consortium, Inc. ("ISC")
+//
+// Permission to use, copy, modify, and/or distribute this software for any
+// purpose with or without fee is hereby granted, provided that the above
+// copyright notice and this permission notice appear in all copies.
+//
+// THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH
+// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+// AND FITNESS.  IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT,
+// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+// OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+// PERFORMANCE OF THIS SOFTWARE.
+
+// $Id$
+
+#include <unistd.h>             // for getpid
+
+#include <cstdlib>              // for rand
+#include <algorithm>
+#include <iostream>
+#include <vector>
+#include <set>
+
+#include <exceptions/exceptions.h>
+
+#include <bench/benchmark.h>
+
+using namespace std;
+using namespace isc::bench;
+
+namespace {
+template <bool Sorted>
+class VectorSearchBenchMark {
+public:
+    VectorSearchBenchMark(const vector<int>& data,
+                          const vector<int>& keys) :
+        data_(data), keys_(keys)
+    {}
+    unsigned int run() {
+        vector<int>::const_iterator iter;
+        vector<int>::const_iterator end_key = keys_.end();
+        for (iter = keys_.begin(); iter != end_key; ++iter) {
+            if (Sorted) {
+                binary_search(data_.begin(), data_.end(), *iter);
+            } else {
+                find(data_.begin(), data_.end(), *iter);
+            }
+        }
+        return (keys_.size());
+    }
+private:
+    const vector<int>& data_;
+    const vector<int>& keys_;
+};
+
+class SetSearchBenchMark {
+public:
+    SetSearchBenchMark(const set<int>& data, const vector<int>& keys) :
+        data_(data), keys_(keys)
+    {}
+    unsigned int run() {
+        vector<int>::const_iterator iter;
+        vector<int>::const_iterator end_key = keys_.end();
+        for (iter = keys_.begin(); iter != end_key; ++iter) {
+            data_.find(*iter);
+        }        
+        return (keys_.size());
+    }
+public:   // make it visible to the BenchMark class
+    const set<int>& data_;
+private:
+    const vector<int>& keys_;
+};
+}
+
+namespace isc {
+namespace bench {
+template<>
+void
+BenchMark<SetSearchBenchMark>::setUp() {
+    cout << "Benchmark for searching std::set (size="
+         << target_.data_.size() << ")" << endl;    
+}
+}
+}
+
+namespace {
+const int DEFAULT_ITERATION = 100;
+const int DEFAULT_SIZE = 10000;
+
+void
+usage() {
+    cerr << "Usage: search_bench [-n iterations] [-s data_size]" << endl;
+    exit (1);
+}
+}
+
+int
+main(int argc, char* argv[]) {
+    int ch;
+    int iteration = DEFAULT_ITERATION;
+    int size = DEFAULT_SIZE;
+    while ((ch = getopt(argc, argv, "n:s:")) != -1) {
+        switch (ch) {
+        case 'n':
+            iteration = atoi(optarg);
+            break;
+        case 's':
+            size = atoi(optarg);
+            break;
+        case '?':
+        default:
+            usage();
+        }
+    }
+    argc -= optind;
+    argv += optind;
+    if (argc != 0) {
+        usage();
+    }
+
+    srand(getpid());
+    vector<int> data_vector;
+    set<int> data_set;
+    vector<int> keys;
+    for (int i = 0; i < size; ++i) {
+        data_vector.push_back(i);
+        data_set.insert(i);
+        keys.push_back(rand() % size);
+    }
+
+    cout << "Benchmark for linear search" << endl;
+    BenchMark<VectorSearchBenchMark<false> >(iteration,
+                                             VectorSearchBenchMark<false>(
+                                                 data_vector, keys));
+    cout << "Benchmark for binary search" << endl;
+    BenchMark<VectorSearchBenchMark<true> >(iteration,
+                                             VectorSearchBenchMark<true>(
+                                                 data_vector, keys));
+    BenchMark<SetSearchBenchMark>(iteration,
+                                  SetSearchBenchMark(data_set, keys));
+    return (0);
+}

+ 22 - 0
src/lib/bench/tests/Makefile.am

@@ -0,0 +1,22 @@
+AM_CPPFLAGS = -I$(top_builddir)/src/lib -I$(top_srcdir)/src/lib
+AM_CPPFLAGS += -DTEST_DATA_DIR=\"$(srcdir)/testdata\"
+AM_CXXFLAGS = $(B10_CXXFLAGS)
+
+CLEANFILES = *.gcno *.gcda
+
+TESTS =
+if HAVE_GTEST
+TESTS += run_unittests
+run_unittests_SOURCES = run_unittests.cc
+run_unittests_SOURCES += benchmark_unittest.cc
+run_unittests_SOURCES += loadquery_unittest.cc
+
+run_unittests_CPPFLAGS = $(AM_CPPFLAGS) $(GTEST_INCLUDES)
+run_unittests_LDFLAGS = $(AM_LDFLAGS) $(GTEST_LDFLAGS)
+run_unittests_LDADD = $(top_builddir)/src/lib/exceptions/libexceptions.la
+run_unittests_LDADD += $(top_builddir)/src/lib/dns/libdns++.la
+run_unittests_LDADD += $(top_builddir)/src/lib/bench/libbench.la
+run_unittests_LDADD += $(GTEST_LDADD)
+endif
+
+noinst_PROGRAMS = $(TESTS)

+ 143 - 0
src/lib/bench/tests/benchmark_unittest.cc

@@ -0,0 +1,143 @@
+// Copyright (C) 2010  Internet Systems Consortium, Inc. ("ISC")
+//
+// Permission to use, copy, modify, and/or distribute this software for any
+// purpose with or without fee is hereby granted, provided that the above
+// copyright notice and this permission notice appear in all copies.
+//
+// THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH
+// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+// AND FITNESS.  IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT,
+// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+// OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+// PERFORMANCE OF THIS SOFTWARE.
+
+// $Id$
+
+#include <unistd.h>             // for usleep
+
+#include <bench/benchmark.h>
+
+#include <gtest/gtest.h>
+
+using namespace std;
+using namespace isc::bench;
+
+namespace {
+// Our "benchmark" simply sleeps for a short period, and reports a faked
+// number of iterations.
+class TestBenchMark {
+public:
+    TestBenchMark(const int sub_iterations, const int sleep_time) :
+        sub_iterations_(sub_iterations), sleep_time_(sleep_time),
+        setup_completed_(false), teardown_completed_(false)
+    {}
+    unsigned int run() {
+        usleep(sleep_time_);
+        return (sub_iterations_);
+    }
+    const int sub_iterations_;
+    const int sleep_time_;
+    bool setup_completed_;
+    bool teardown_completed_;
+};
+}
+
+namespace isc {
+namespace bench {
+template <>
+void
+BenchMark<TestBenchMark>::setUp() {
+    target_.setup_completed_ = true;
+};
+
+template <>
+void
+BenchMark<TestBenchMark>::tearDown() {
+    target_.teardown_completed_ = true;
+};
+
+// XXX: some compilers cannot find class static constants used in
+// EXPECT_xxx macross, for which we need an explicit definition.
+template <typename T>
+const double BenchMark<T>::TIME_FAILURE;
+}
+}
+
+namespace {
+TEST(BenchMarkTest, run) {
+    // use some uncommon iterations for testing purpose:
+    const int sub_iterations = 23;
+    const int sleep_time = 50000; // will sleep for 50ms
+    // we cannot expect particular accuracy on the measured duration, so
+    // we'll include some conservative margin (20%) and perform range
+    // comparison below.
+    const int duration_margin = 10000; // 10ms
+    const int ONE_MILLION = 1000000;
+
+    // Prerequisite check: since the tests in this case may depend on subtle
+    // timing, it may result in false positives.  There are reportedly systems
+    // where usleep() doesn't work as this test expects.  So we check the
+    // conditions before the tests, and if it fails skip the tests at the
+    // risk of overlooking possible bugs.
+    struct timeval check_begin, check_end;
+    gettimeofday(&check_begin, NULL);
+    usleep(sleep_time);
+    gettimeofday(&check_end, NULL);
+    check_end.tv_sec -= check_begin.tv_sec;
+    if (check_end.tv_usec >= check_begin.tv_usec) {
+        check_end.tv_usec = check_end.tv_usec - check_begin.tv_usec;
+    } else {
+        check_end.tv_usec = ONE_MILLION + check_begin.tv_usec -
+            check_end.tv_usec;
+        --check_end.tv_sec;
+    }
+    if (check_end.tv_sec != 0 ||
+        sleep_time - duration_margin > check_end.tv_usec ||
+        sleep_time + duration_margin < check_end.tv_usec) {
+        cerr << "Prerequisite check failed.  skipping test" << endl;
+        return;
+    }
+
+    TestBenchMark test_bench(sub_iterations, sleep_time);
+    BenchMark<TestBenchMark> bench(1, test_bench, false);
+    // Check pre-test conditions.
+    EXPECT_FALSE(test_bench.setup_completed_);
+    EXPECT_FALSE(test_bench.teardown_completed_);
+
+    bench.run();
+
+    // Check if specialized setup and teardown were performed.
+    EXPECT_TRUE(test_bench.setup_completed_);
+    EXPECT_TRUE(test_bench.teardown_completed_);
+
+    // Check accuracy of the measured statistics.
+    EXPECT_EQ(sub_iterations, bench.getIteration());
+    EXPECT_LT(sleep_time - duration_margin, bench.getDuration() * ONE_MILLION);
+    EXPECT_GT(sleep_time + duration_margin, bench.getDuration() * ONE_MILLION);
+    EXPECT_LT((sleep_time - duration_margin) /
+              static_cast<double>(sub_iterations),
+              bench.getAverageTime() * ONE_MILLION);
+    EXPECT_GT((sleep_time + duration_margin) /
+              static_cast<double>(sub_iterations),
+              bench.getAverageTime() * ONE_MILLION);
+    EXPECT_LT(static_cast<double>(sub_iterations) /
+              (sleep_time + duration_margin),
+              bench.getIterationPerSecond() / ONE_MILLION);
+    EXPECT_GT(static_cast<double>(sub_iterations) /
+              (sleep_time - duration_margin),
+              bench.getIterationPerSecond() / ONE_MILLION);
+}
+
+TEST(BenchMarkTest, runWithNoIteration) {
+    // we'll lie on the number of iteration (0).  it will result in
+    // meaningless result, but at least it shouldn't crash.
+    TestBenchMark test_bench(0, 0);
+    BenchMark<TestBenchMark> bench(1, test_bench, false);
+    bench.run();
+    EXPECT_EQ(0, bench.getIteration());
+    // Since the reported iteration is 0, naive calculation of the average
+    // time would cause a division by 0 failure.
+    EXPECT_EQ(bench.TIME_FAILURE, bench.getAverageTime());
+}
+}

+ 198 - 0
src/lib/bench/tests/loadquery_unittest.cc

@@ -0,0 +1,198 @@
+// Copyright (C) 2010  Internet Systems Consortium, Inc. ("ISC")
+//
+// Permission to use, copy, modify, and/or distribute this software for any
+// purpose with or without fee is hereby granted, provided that the above
+// copyright notice and this permission notice appear in all copies.
+//
+// THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH
+// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+// AND FITNESS.  IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT,
+// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+// OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+// PERFORMANCE OF THIS SOFTWARE.
+
+// $Id$
+
+#include <algorithm>
+#include <utility>
+#include <vector>
+#include <sstream>
+
+#include <dns/buffer.h>
+#include <dns/message.h>
+#include <dns/name.h>
+#include <dns/rrclass.h>
+#include <dns/rrtype.h>
+
+#include <bench/benchmark_util.h>
+
+#include <gtest/gtest.h>
+
+using namespace std;
+using namespace isc::bench;
+using namespace isc::dns;
+
+namespace {
+typedef pair<string, string> QueryParam;
+
+class LoadQueryTest : public ::testing::Test {
+protected:
+    LoadQueryTest() : query_rrclass(RRClass::IN()) {
+        queries.push_back(QueryParam("www.example.org", "AAAA"));
+        queries.push_back(QueryParam("www.example.com", "A"));
+        queries.push_back(QueryParam("test.example", "NS"));
+    }
+    RRClass query_rrclass;
+    BenchQueries result_queries;
+    vector<QueryParam> queries;
+    stringstream query_stream;
+    static const char* const DATA_DIR;
+};
+
+const char* const LoadQueryTest::DATA_DIR = TEST_DATA_DIR;
+
+class QueryInserter {
+public:
+    QueryInserter(stringstream& stream) : stream_(stream) {}
+    void operator()(const QueryParam& query) {
+        stream_ << query.first << " " << query.second << endl;
+    }
+private:
+    stringstream& stream_;
+};
+
+class QueryChecker {
+public:
+    QueryChecker(const vector<QueryParam>* expected, const RRClass& rrclass) :
+        expected_(expected), rrclass_(rrclass)
+    {
+        if (expected != NULL) {
+            iter_ = expected_->begin();
+        }
+    }
+    void operator()(const vector<unsigned char>& actual_data) {
+        InputBuffer buffer(&actual_data[0], actual_data.size());
+        Message message(Message::PARSE);
+        message.fromWire(buffer);
+
+        // Check if the header part indicates an expected standard query.
+        EXPECT_EQ(0, message.getQid());
+        EXPECT_EQ(Opcode::QUERY(), message.getOpcode());
+        EXPECT_EQ(Rcode::NOERROR(), message.getRcode());
+        EXPECT_EQ(Rcode::NOERROR(), message.getRcode());
+        EXPECT_FALSE(message.getHeaderFlag(MessageFlag::QR()));
+        EXPECT_FALSE(message.getHeaderFlag(MessageFlag::AA()));
+        EXPECT_EQ(1, message.getRRCount(Section::QUESTION()));
+        EXPECT_EQ(0, message.getRRCount(Section::ANSWER()));
+        EXPECT_EQ(0, message.getRRCount(Section::AUTHORITY()));
+        EXPECT_EQ(0, message.getRRCount(Section::ADDITIONAL()));
+
+        // Check if the question matches our original data, if the expected
+        // data is given.
+        if (expected_ != NULL) {
+            ConstQuestionPtr question = *message.beginQuestion();;
+            EXPECT_EQ(Name((*iter_).first), question->getName());
+            EXPECT_EQ(RRType((*iter_).second), question->getType());
+            EXPECT_EQ(rrclass_, question->getClass());
+        
+            ++iter_;
+        }
+    }
+private:
+    const vector<QueryParam>* expected_;
+    vector<QueryParam>::const_iterator iter_;
+    const RRClass rrclass_;
+};
+
+TEST_F(LoadQueryTest, load) {
+    for_each(queries.begin(), queries.end(), QueryInserter(query_stream));
+
+    loadQueryData(query_stream, result_queries, query_rrclass);
+
+    EXPECT_EQ(queries.size(), result_queries.size());
+    for_each(result_queries.begin(), result_queries.end(),
+             QueryChecker(&queries, query_rrclass));
+}
+
+TEST_F(LoadQueryTest, loadForCHClass) {
+    for_each(queries.begin(), queries.end(), QueryInserter(query_stream));
+    query_rrclass = RRClass::CH();
+
+    loadQueryData(query_stream, result_queries, query_rrclass);
+
+    EXPECT_EQ(queries.size(), result_queries.size());
+    for_each(result_queries.begin(), result_queries.end(),
+             QueryChecker(&queries, query_rrclass));
+}
+
+TEST_F(LoadQueryTest, loadWithComment) {
+    for_each(queries.begin(), queries.end(), QueryInserter(query_stream));
+    // add a comment line.  this shouldn't change the result.
+    query_stream << "# this is a comment" << endl;
+    query_stream << endl;       // empty line.  should be ignored, too.
+
+    loadQueryData(query_stream, result_queries, query_rrclass);
+    EXPECT_EQ(queries.size(), result_queries.size());
+    for_each(result_queries.begin(), result_queries.end(),
+             QueryChecker(&queries, query_rrclass));
+}
+
+TEST_F(LoadQueryTest, loadWithIncompleteData) {
+    for_each(queries.begin(), queries.end(), QueryInserter(query_stream));
+    // RRType is missing.  It should be ignored by default.
+    query_stream << "type-is-missing" << endl;
+
+    loadQueryData(query_stream, result_queries, query_rrclass);
+    EXPECT_EQ(queries.size(), result_queries.size());
+    for_each(result_queries.begin(), result_queries.end(),
+             QueryChecker(&queries, query_rrclass));
+}
+
+TEST_F(LoadQueryTest, loadWithIncompleteDataToBeRejected) {
+    for_each(queries.begin(), queries.end(), QueryInserter(query_stream));
+    // RRType is missing.  We're going to specify the "strict" check, so
+    // we should receive an exception.
+    query_stream << "type-is-missing" << endl;
+    EXPECT_THROW(loadQueryData(query_stream, result_queries, query_rrclass,
+                               true), BenchMarkError);
+}
+
+TEST_F(LoadQueryTest, loadWithBadData) {
+    for_each(queries.begin(), queries.end(), QueryInserter(query_stream));
+    // invalid RRType.  It should be ignored by default.
+    query_stream << "www.example.com NOSUCHRRTYPE" << endl;
+
+    loadQueryData(query_stream, result_queries, query_rrclass);
+    EXPECT_EQ(queries.size(), result_queries.size());
+    for_each(result_queries.begin(), result_queries.end(),
+             QueryChecker(&queries, query_rrclass));
+}
+
+TEST_F(LoadQueryTest, loadWithBadDataToBeRejected) {
+    for_each(queries.begin(), queries.end(), QueryInserter(query_stream));
+    // invalid RRType, which should trigger an exception.
+    query_stream << "www.example.com NOSUCHRRTYPE" << endl;
+    EXPECT_THROW(loadQueryData(query_stream, result_queries, query_rrclass,
+                               true), BenchMarkError);
+}
+
+TEST_F(LoadQueryTest, loadFromFile) {
+    const string data_file = string(DATA_DIR) + string("/query.txt");
+    loadQueryData(data_file.c_str(), result_queries, query_rrclass);
+    EXPECT_LT(0, result_queries.size());
+
+    // We are going to skip matching the query data; we only check the header.
+    // We could check the data, too, but to do so we need to populate the
+    // expected data from the file (or prepare a consistent copy locally).
+    // Since the implementation is shared with the stringstream case, the
+    // additional setup wouldn't be worthwhile.
+    for_each(result_queries.begin(), result_queries.end(),
+             QueryChecker(NULL, query_rrclass));
+}
+
+TEST_F(LoadQueryTest, loadFromFileNotExist) {
+    EXPECT_THROW(loadQueryData("notexistent/query.data", result_queries,
+                               query_rrclass), BenchMarkError);
+}
+}

+ 24 - 0
src/lib/bench/tests/run_unittests.cc

@@ -0,0 +1,24 @@
+// Copyright (C) 2010  Internet Systems Consortium, Inc. ("ISC")
+//
+// Permission to use, copy, modify, and/or distribute this software for any
+// purpose with or without fee is hereby granted, provided that the above
+// copyright notice and this permission notice appear in all copies.
+//
+// THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH
+// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+// AND FITNESS.  IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT,
+// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+// OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+// PERFORMANCE OF THIS SOFTWARE.
+
+// $Id$
+
+#include <gtest/gtest.h>
+
+int
+main(int argc, char* argv[]) {
+    ::testing::InitGoogleTest(&argc, argv);
+
+    return (RUN_ALL_TESTS());
+}

+ 6 - 0
src/lib/bench/tests/testdata/query.txt

@@ -0,0 +1,6 @@
+# This is sample query data for benchmark.
+# The format is the same as BIND 9's queryperf.
+
+www.example.com TXT
+www.example.org SOA
+ftp.example.org RRSIG