benchmark_unittest.cc 5.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143
  1. // Copyright (C) 2010 Internet Systems Consortium, Inc. ("ISC")
  2. //
  3. // Permission to use, copy, modify, and/or distribute this software for any
  4. // purpose with or without fee is hereby granted, provided that the above
  5. // copyright notice and this permission notice appear in all copies.
  6. //
  7. // THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH
  8. // REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
  9. // AND FITNESS. IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT,
  10. // INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
  11. // LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
  12. // OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
  13. // PERFORMANCE OF THIS SOFTWARE.
  14. // $Id$
  15. #include <unistd.h> // for usleep
  16. #include <bench/benchmark.h>
  17. #include <gtest/gtest.h>
  18. using namespace std;
  19. using namespace isc::bench;
  20. namespace {
  21. // Our "benchmark" simply sleeps for a short period, and reports a faked
  22. // number of iterations.
  23. class TestBenchMark {
  24. public:
  25. TestBenchMark(const int sub_iterations, const int sleep_time) :
  26. sub_iterations_(sub_iterations), sleep_time_(sleep_time),
  27. setup_completed_(false), teardown_completed_(false)
  28. {}
  29. unsigned int run() {
  30. usleep(sleep_time_);
  31. return (sub_iterations_);
  32. }
  33. const int sub_iterations_;
  34. const int sleep_time_;
  35. bool setup_completed_;
  36. bool teardown_completed_;
  37. };
  38. }
  39. namespace isc {
  40. namespace bench {
  41. template <>
  42. void
  43. BenchMark<TestBenchMark>::setUp() {
  44. target_.setup_completed_ = true;
  45. };
  46. template <>
  47. void
  48. BenchMark<TestBenchMark>::tearDown() {
  49. target_.teardown_completed_ = true;
  50. };
  51. // XXX: some compilers cannot find class static constants used in
  52. // EXPECT_xxx macross, for which we need an explicit definition.
  53. template <typename T>
  54. const int BenchMark<T>::TIME_FAILURE;
  55. }
  56. }
  57. namespace {
  58. TEST(BenchMarkTest, run) {
  59. // use some uncommon iterations for testing purpose:
  60. const int sub_iterations = 23;
  61. const int sleep_time = 50000; // will sleep for 50ms
  62. // we cannot expect particular accuracy on the measured duration, so
  63. // we'll include some conservative margin (25%) and perform range
  64. // comparison below.
  65. const int duration_margin = 12500; // 12.5ms
  66. const int ONE_MILLION = 1000000;
  67. // Prerequisite check: since the tests in this case may depend on subtle
  68. // timing, it may result in false positives. There are reportedly systems
  69. // where usleep() doesn't work as this test expects. So we check the
  70. // conditions before the tests, and if it fails skip the tests at the
  71. // risk of overlooking possible bugs.
  72. struct timeval check_begin, check_end;
  73. gettimeofday(&check_begin, NULL);
  74. usleep(sleep_time);
  75. gettimeofday(&check_end, NULL);
  76. check_end.tv_sec -= check_begin.tv_sec;
  77. if (check_end.tv_usec >= check_begin.tv_usec) {
  78. check_end.tv_usec = check_end.tv_usec - check_begin.tv_usec;
  79. } else {
  80. check_end.tv_usec = ONE_MILLION + check_begin.tv_usec -
  81. check_end.tv_usec;
  82. --check_end.tv_sec;
  83. }
  84. if (check_end.tv_sec != 0 ||
  85. sleep_time - duration_margin > check_end.tv_usec ||
  86. sleep_time + duration_margin < check_end.tv_usec) {
  87. cerr << "Prerequisite check failed. skipping test" << endl;
  88. return;
  89. }
  90. TestBenchMark test_bench(sub_iterations, sleep_time);
  91. BenchMark<TestBenchMark> bench(1, test_bench, false);
  92. // Check pre-test conditions.
  93. EXPECT_FALSE(test_bench.setup_completed_);
  94. EXPECT_FALSE(test_bench.teardown_completed_);
  95. bench.run();
  96. // Check if specialized setup and teardown were performed.
  97. EXPECT_TRUE(test_bench.setup_completed_);
  98. EXPECT_TRUE(test_bench.teardown_completed_);
  99. // Check accuracy of the measured statistics.
  100. EXPECT_EQ(sub_iterations, bench.getIteration());
  101. EXPECT_LT(sleep_time - duration_margin, bench.getDuration() * ONE_MILLION);
  102. EXPECT_GT(sleep_time + duration_margin, bench.getDuration() * ONE_MILLION);
  103. EXPECT_LT((sleep_time - duration_margin) /
  104. static_cast<double>(sub_iterations),
  105. bench.getAverageTime() * ONE_MILLION);
  106. EXPECT_GT((sleep_time + duration_margin) /
  107. static_cast<double>(sub_iterations),
  108. bench.getAverageTime() * ONE_MILLION);
  109. EXPECT_LT(static_cast<double>(sub_iterations) /
  110. (sleep_time + duration_margin),
  111. bench.getIterationPerSecond() / ONE_MILLION);
  112. EXPECT_GT(static_cast<double>(sub_iterations) /
  113. (sleep_time - duration_margin),
  114. bench.getIterationPerSecond() / ONE_MILLION);
  115. }
  116. TEST(BenchMarkTest, runWithNoIteration) {
  117. // we'll lie on the number of iteration (0). it will result in
  118. // meaningless result, but at least it shouldn't crash.
  119. TestBenchMark test_bench(0, 0);
  120. BenchMark<TestBenchMark> bench(1, test_bench, false);
  121. bench.run();
  122. EXPECT_EQ(0, bench.getIteration());
  123. // Since the reported iteration is 0, naive calculation of the average
  124. // time would cause a division by 0 failure.
  125. EXPECT_EQ(bench.TIME_FAILURE, bench.getAverageTime());
  126. }
  127. }