Browse Source

[4096] Merge branch 'master' into trac4096

    Eval library now supports actual parsing.  Need to use it
    in class definition's ExpressionParser.
Thomas Markwalder 9 years ago
parent
commit
02f6c505d1
60 changed files with 8493 additions and 256 deletions
  1. 27 0
      ChangeLog
  2. 50 0
      configure.ac
  3. 12 10
      doc/Makefile.am
  4. 43 0
      doc/examples/kea4/hooks.json
  5. 43 0
      doc/examples/kea6/hooks.json
  6. 21 0
      doc/guide/admin.xml
  7. 7 7
      doc/guide/ddns.xml
  8. 2 5
      doc/guide/dhcp4-srv.xml
  9. 2 2
      doc/guide/dhcp6-srv.xml
  10. 26 10
      doc/guide/hooks.xml
  11. 11 3
      src/bin/dhcp4/dhcp4.spec
  12. 1 2
      src/bin/dhcp4/tests/config_parser_unittest.cc
  13. 11 3
      src/bin/dhcp6/dhcp6.spec
  14. 4 1
      src/bin/dhcp6/tests/config_parser_unittest.cc
  15. 1 1
      src/lib/dhcpsrv/client_class_def.h
  16. 16 14
      src/lib/dhcpsrv/csv_lease_file4.cc
  17. 2 2
      src/lib/dhcpsrv/csv_lease_file4.h
  18. 21 18
      src/lib/dhcpsrv/csv_lease_file6.cc
  19. 2 2
      src/lib/dhcpsrv/csv_lease_file6.h
  20. 22 0
      src/lib/dhcpsrv/dhcpsrv_messages.mes
  21. 11 1
      src/lib/dhcpsrv/lease_file_loader.h
  22. 88 60
      src/lib/dhcpsrv/memfile_lease_mgr.cc
  23. 21 2
      src/lib/dhcpsrv/memfile_lease_mgr.h
  24. 53 5
      src/lib/dhcpsrv/parsers/dhcp_parsers.cc
  25. 23 0
      src/lib/dhcpsrv/parsers/dhcp_parsers.h
  26. 170 23
      src/lib/dhcpsrv/tests/csv_lease_file4_unittest.cc
  27. 198 26
      src/lib/dhcpsrv/tests/csv_lease_file6_unittest.cc
  28. 108 14
      src/lib/dhcpsrv/tests/dhcp_parsers_unittest.cc
  29. 133 0
      src/lib/dhcpsrv/tests/memfile_lease_mgr_unittest.cc
  30. 44 0
      src/lib/eval/Makefile.am
  31. 113 3
      src/lib/eval/eval.dox
  32. 52 0
      src/lib/eval/eval_context.cc
  33. 96 0
      src/lib/eval/eval_context.h
  34. 28 0
      src/lib/eval/eval_context_decl.h
  35. 0 5
      src/lib/eval/eval_messages.mes
  36. 41 0
      src/lib/eval/evaluate.cc
  37. 38 0
      src/lib/eval/evaluate.h
  38. 2251 0
      src/lib/eval/lexer.cc
  39. 156 0
      src/lib/eval/lexer.ll
  40. 193 0
      src/lib/eval/location.hh
  41. 1039 0
      src/lib/eval/parser.cc
  42. 1058 0
      src/lib/eval/parser.h
  43. 141 0
      src/lib/eval/parser.yy
  44. 181 0
      src/lib/eval/position.hh
  45. 158 0
      src/lib/eval/stack.hh
  46. 4 1
      src/lib/eval/tests/Makefile.am
  47. 310 0
      src/lib/eval/tests/context_unittest.cc
  48. 246 0
      src/lib/eval/tests/evaluate_unittest.cc
  49. 23 16
      src/lib/eval/tests/token_unittest.cc
  50. 9 7
      src/lib/eval/token.cc
  51. 22 1
      src/lib/eval/token.h
  52. 27 7
      src/lib/hooks/hooks_user.dox
  53. 1 0
      src/lib/util/Makefile.am
  54. 8 2
      src/lib/util/csv_file.cc
  55. 9 3
      src/lib/util/csv_file.h
  56. 1 0
      src/lib/util/tests/Makefile.am
  57. 29 0
      src/lib/util/tests/csv_file_unittest.cc
  58. 509 0
      src/lib/util/tests/versioned_csv_file_unittest.cc
  59. 251 0
      src/lib/util/versioned_csv_file.cc
  60. 326 0
      src/lib/util/versioned_csv_file.h

+ 27 - 0
ChangeLog

@@ -1,3 +1,30 @@
+1050.   [doc]     [tmark]
+	Corrected the descriptions of ncr-protocol and ncr-format parameters
+	in the Kea Admin Guide.
+	(Trac #4117    git 034c1c95b57768d5abbc7fb40cc57d7cadad21dd)
+
+1049.	[build]		fdupont
+	Add a new --enable-generate-parser configuration parameter
+	(disabled by default) which makes flex and bison to regenerate
+	parser files.
+	(trac #4125, git 18321bf85f93b24d720f1ab2d90b4f4da85bc471)
+
+1048.	[func]		fdupont,tomek
+	Implement expression parser for client classification.
+	(Trac #4088, git ac9eb312bfd1c6bf22a868ad789a0c049f33f637)
+
+1047.	[func]*		stephen
+	Change the way that hooks libraries are defined in the configuration
+	file in preparation for allowing the specification of library-specific
+	parameters in a future version of Kea.
+	(Trac #3259, git b2986b0b0299e691b13123922129bdbf8575afdb)
+
+1046.	[func]		tmark
+	Upon startup Kea servers will now detect memfile lease files
+	that need upgrading, and will launch in instance of the LFC
+	to convert them to the most current memfile schema version.
+	(Trac #3601, git ce4b0e42e8a01bbf3b58fdb1f505bbd6e2fad134)
+
 1045.	[func]		tmark
 	Added classes for storing client class definitions to libdhcpsrv.
 	(Trac #4095, git 1039a942450e2a45a1e1aa9924cae4fdbd1541fe)

+ 50 - 0
configure.ac

@@ -1212,6 +1212,51 @@ AC_SUBST(PERL)
 AC_PATH_PROGS(AWK, gawk awk)
 AC_SUBST(AWK)
 
+AC_ARG_ENABLE(generate_parser, [AC_HELP_STRING([--enable-generate-parser],
+  [indicates that the parsers will be regenerated. This implies that the
+   bison and flex are required [default=no]])],
+   enable_generate_parser=$enableval, enable_generate_parser=no)
+
+# Check if flex is avaible. Flex is not needed for building Kea sources,
+# unless you want to regenerate grammar in src/lib/eval
+AC_PROG_LEX
+
+# Check if bison is available. Bison is not needed for building Kea sources,
+# unless you want to regenerate grammar in src/lib/eval
+AC_PROG_YACC
+
+if test "x$enable_generate_parser" != "xno"; then
+
+    if test "x$LEX" == "x"; then
+       AC_MSG_ERROR([Flex is required for enable-generate-parser, but was not found])
+    fi
+
+    if test "x$YACC" == "x"; then
+       AC_MSG_ERROR([Bison is required for enable-generate-parser, but was not found])
+    fi
+
+# Ok, let's check if we have at least 3.0.0 version of the bison. The code used
+# to generate src/lib/eval parser is roughly based on bison 3.0 examples.
+   cat > bisontest.y << EOF
+%require "3.0.0"
+%token X
+%%
+%start Y;
+Y: X;
+EOF
+# Try to compile.
+    $YACC bisontest.y -o bisontest.cc
+
+    if test $? -ne 0 ; then
+        $YACC -V
+        $RM -f bisontest.y bisontest.cc
+        AC_MSG_ERROR("Error with $YACC. Possibly incorrect version? Required at least 3.0.0.")
+    fi
+    $RM -f bisontest.y bisontest.cc
+fi
+
+AM_CONDITIONAL([GENERATE_PARSER], [test x$enable_generate_parser != xno])
+
 AC_ARG_ENABLE(generate_docs, [AC_HELP_STRING([--enable-generate-docs],
   [regenerate documentation using Docbook [default=no]])],
   enable_generate_docs=$enableval, enable_generate_docs=no)
@@ -1527,6 +1572,10 @@ Log4cplus:
 
 Kea config backend:
   CONFIG_BACKEND:  ${CONFIG_BACKEND}
+
+Flex/bison:
+  FLEX:  ${LEX}
+  BISON: ${YACC}
 END
 
 # Avoid confusion on DNS/DHCP and only mention MySQL if it
@@ -1587,6 +1636,7 @@ Developer:
   C++ Code Coverage: $USE_LCOV
   Logger checks: $enable_logger_checks
   Generate Documentation: $enable_generate_docs
+  Parser Generation: $enable_generate_parser
 
 END
 

+ 12 - 10
doc/Makefile.am

@@ -8,20 +8,22 @@ EXTRA_DIST += devel/qa.dox
 
 EXTRA_DIST += images/isc-logo.png
 
-nobase_dist_doc_DATA  = examples/kea4/single-subnet.json
-nobase_dist_doc_DATA += examples/kea4/several-subnets.json
+nobase_dist_doc_DATA  = examples/ddns/sample1.json
+nobase_dist_doc_DATA += examples/ddns/template.json
+nobase_dist_doc_DATA += examples/kea4/hooks.json
+nobase_dist_doc_DATA += examples/kea4/leases-expiration.json
 nobase_dist_doc_DATA += examples/kea4/multiple-options.json
 nobase_dist_doc_DATA += examples/kea4/reservations.json
-nobase_dist_doc_DATA += examples/kea4/leases-expiration.json
-nobase_dist_doc_DATA += examples/kea6/simple.json
-nobase_dist_doc_DATA += examples/kea6/several-subnets.json
-nobase_dist_doc_DATA += examples/kea6/multiple-options.json
+nobase_dist_doc_DATA += examples/kea4/several-subnets.json
+nobase_dist_doc_DATA += examples/kea4/single-subnet.json
 nobase_dist_doc_DATA += examples/kea6/advanced.json
-nobase_dist_doc_DATA += examples/kea6/stateless.json
-nobase_dist_doc_DATA += examples/kea6/reservations.json
+nobase_dist_doc_DATA += examples/kea6/hooks.json
 nobase_dist_doc_DATA += examples/kea6/leases-expiration.json
-nobase_dist_doc_DATA += examples/ddns/sample1.json
-nobase_dist_doc_DATA += examples/ddns/template.json
+nobase_dist_doc_DATA += examples/kea6/multiple-options.json
+nobase_dist_doc_DATA += examples/kea6/reservations.json
+nobase_dist_doc_DATA += examples/kea6/several-subnets.json
+nobase_dist_doc_DATA += examples/kea6/simple.json
+nobase_dist_doc_DATA += examples/kea6/stateless.json
 
 devel:
 	mkdir -p html

+ 43 - 0
doc/examples/kea4/hooks.json

@@ -0,0 +1,43 @@
+# This is an example configuration file for the DHCPv4 server in Kea
+# illustrating the configuration of hooks libraries.  It uses a basic scenario
+# of one IPv4 subnet configured with the default values for all parameters.
+
+{"Dhcp4":
+
+{
+# Kea is told to listen on the ethX interface only.
+  "interfaces-config": {
+    "interfaces": [ "ethX" ]
+  },
+
+# Set up the storage for leases.
+  "lease-database": {
+    "type": "memfile"
+  },
+
+# Define a single subnet.
+  "subnet4": [
+    {
+      "pools": [ { "pool": "192.0.2.1 - 19.2.0.2.200" } ],
+      "subnet": "192.0.2.0/24",
+      "interface": "ethX"
+    }
+  ],
+
+# Set up the hooks libraries.  For this example, we assume that two libraries
+# are loaded, called "security" and "charging".  Note that order is important:
+# "security" is specified first so if both libraries supply a hook function
+# for a given hook, the function in "security" will be called before that in
+# "charging".
+
+  "hooks-libraries": [
+     {
+        "library": "/opt/lib/security.so"
+     },
+     {
+        "library": "/opt/lib/charging.so"
+     }
+  ]
+}
+
+}

+ 43 - 0
doc/examples/kea6/hooks.json

@@ -0,0 +1,43 @@
+# This is an example configuration file for the DHCPv6 server in Kea
+# illustrating the configuration of hooks libraries.  It uses a basic scenario
+# of one IPv6 subnet configured with the default values for all parameters.
+
+{"Dhcp6":
+
+{
+# Kea is told to listen on the ethX interface only.
+  "interfaces-config": {
+    "interfaces": [ "ethX" ]
+  },
+
+# Set up the storage for leases.
+  "lease-database": {
+    "type": "memfile"
+  },
+
+# Define a single subnet.
+  "subnet6": [
+    {
+      "pools": [ { "pool": "2001:db8:1::/80" } ],
+      "subnet": "2001:db8:1::/64",
+      "interface": "ethX"
+    }
+  ],
+
+# Set up the hooks libraries.  For this example, we assume that two libraries
+# are loaded, called "security" and "charging".  Note that order is important:
+# "security" is specified first so if both libraries supply a hook function
+# for a given hook, the function in "security" will be called before that in
+# "charging".
+
+  "hooks-libraries": [
+     {
+        "library": "/opt/lib/security.so"
+     },
+     {
+        "library": "/opt/lib/charging.so"
+     }
+  ]
+}
+
+}

+ 21 - 0
doc/guide/admin.xml

@@ -150,6 +150,27 @@
         will create an empty lease file if one is not
         present. Necessary disk write permission is required.
       </para>
+      <section id="memfile-upgrade">
+        <title>Upgrading Memfile Lease Files from an Earlier Version of Kea</title>
+        <para>
+        There are no special steps required to upgrade memfile lease files
+        from an earlier version of Kea to a new version of Kea.
+
+        During startup the servers will check the schema version of the lease
+        files against their own.  If there is a mismatch, the servers will
+        automatically launch the LFC process to convert the files to the
+        server's schema version.  While this mechanism is primarily meant to
+        ease the process of upgrading to newer versions of Kea, it can also
+        be used for downgrading should the need arise.  When upgrading, any
+        values not present in the original lease files will be assigned
+        appropriate default values.  When downgrading, any data present in
+        the files but not in the server's schema will be dropped.
+
+        If you wish to convert the files manually, prior to starting the
+        servers you may do so by running the LFC process yourself.
+        See <xref linkend="kea-lfc"/> for more information.
+        </para>
+      </section>
       <!-- @todo: document lease file upgrades once they are implemented in kea-admin -->
     </section>
 

+ 7 - 7
doc/guide/ddns.xml

@@ -152,7 +152,7 @@ strings <userinput>path</userinput>/kea-dhcp-ddns | sed -n 's/;;;; //p'
         If the file already exists and contains the PID of a live process,
         the server will issue a DHCP_DDNS_ALREADY_RUNNING log message and exit. It
         is possible, though unlikely, that the file is a remnant of a system crash
-        and the process to which the PID belongs is unrelated to Kea.  In such a 
+        and the process to which the PID belongs is unrelated to Kea.  In such a
         case it would be necessary to manually delete the PID file.
       </para>
 
@@ -233,15 +233,15 @@ strings <userinput>path</userinput>/kea-dhcp-ddns | sed -n 's/;;;; //p'
       </simpara></listitem>
 
       <listitem><simpara>
-      <command>ncr_protocol</command> - Packet format to use when sending requests to D2.
-      Currently only JSON format is supported.  Other formats may be available
-      in future releases.
+      <command>ncr_protocol</command> - Socket protocol to use when sending requests to D2.
+      Currently only UDP is supported.  TCP may be available in an upcoming
+      release.
       </simpara></listitem>
 
       <listitem><simpara>
-      <command>ncr_format</command> - Socket protocol to use when sending requests to D2.
-      Currently only UDP is supported.  TCP may be available in an upcoming
-      release.
+      <command>ncr_format</command> - Packet format to use when sending requests to D2.
+      Currently only JSON format is supported.  Other formats may be available
+      in future releases.
       </simpara></listitem>
 
       </itemizedlist>

+ 2 - 5
doc/guide/dhcp4-srv.xml

@@ -1769,18 +1769,15 @@ It is merely echoed by the server
       continue lease operations without running the risk that its memory usage
       grows without limit.  The default value is 1024.
       </simpara></listitem>
-
       <listitem><simpara>
-      <command>ncr-format</command> - socket protocol use when sending requests to D2.  Currently
+      <command>ncr-protocol</command> - socket protocol use when sending requests to D2.  Currently
       only UDP is supported.  TCP may be available in an upcoming release.
       </simpara></listitem>
-
       <listitem><simpara>
-      <command>ncr-protocol</command> - packet format to use when sending requests to D2.
+      <command>ncr-format</command> - packet format to use when sending requests to D2.
       Currently only JSON format is supported.  Other formats may be available
       in future releases.
       </simpara></listitem>
-
       </itemizedlist>
       By default, kea-dhcp-ddns is assumed to be running on the same machine as kea-dhcp4, and
       all of the default values mentioned above should be sufficient.

+ 2 - 2
doc/guide/dhcp6-srv.xml

@@ -1751,11 +1751,11 @@ should include options from the isc option space:
       continue lease operations.  The default value is 1024.
       </simpara></listitem>
       <listitem><simpara>
-      <command>ncr-format</command> - Socket protocol use when sending requests to D2.  Currently
+      <command>ncr-protocol</command> - Socket protocol use when sending requests to D2.  Currently
       only UDP is supported.  TCP may be available in an upcoming release.
       </simpara></listitem>
       <listitem><simpara>
-      <command>ncr-protocol</command> - Packet format to use when sending requests to D2.
+      <command>ncr-format</command> - Packet format to use when sending requests to D2.
       Currently only JSON format is supported.  Other formats may be available
       in future releases.
       </simpara></listitem>

+ 26 - 10
doc/guide/hooks.xml

@@ -46,25 +46,30 @@
       <command>hooks-libraries</command> keyword in the
       configuration for that process. (Note that
       the word "hooks" is plural).  The value of the keyword
-      is an array of strings, each string corresponding to a hooks library.
-      For example, to set up two hooks libraries for the DHCPv4 server, the
-      configuration would be:
+      is an array of map structures, each structure corresponding to a hooks
+      library.  For example, to set up two hooks libraries for the DHCPv4
+      server, the configuration would be:
 <screen>
 <userinput>"Dhcp4": {
     :
     "hooks-libraries": [
-       "/opt/charging.so",
-       "/opt/local/notification.so"
+        {
+            "library": "/opt/charging.so"
+        },
+        {
+            "library": "/opt/local/notification.so"
+        }
     ]
     :
 }</userinput>
 </screen>
       </para>
       <note><para>
-      At present, the libraries are specified as a simple list.  A future
-      version of Kea will support the capability of specifying a set of
-      parameters for each library. When that is added, it is likely
-      that the syntax for specifying hooks libraries will change.
+        This is a change to the syntax used in Kea 0.9.2 and earlier, where
+        hooks-libraries was a list of strings, each string being the name of
+        a library.  The change has been made in Kea 1.0 to facilitate the
+        specification of library-specific parameters, a feature that will be
+        added to a future version of Kea.
       </para></note>
       <para>
       Notes:
@@ -79,7 +84,18 @@
           <listitem><para>
           An empty list has the same effect as omitting the
           <command>hooks-libraries</command> configuration element all together.
-          </para></listitem>
+          </para>
+          <note><para>
+          There is one case where this is not true: if Kea
+          is running with a configuration that contains a
+          <command>hooks-libraries</command> item, and that item is
+          removed and the configuration reloaded, the removal will be
+          ignored and the libraries remain loaded.  As a workaround,
+          instead of removing the <command>hooks-libraries</command>
+          item, change it to an empty list.  This will be fixed in a
+          future version of Kea.
+          </para></note>
+          </listitem>
         </itemizedlist>
       </para>
       <para>

+ 11 - 3
src/bin/dhcp4/dhcp4.spec

@@ -10,10 +10,18 @@
         "item_default": [],
         "list_item_spec":
         {
-          "item_name": "hooks-library",
-          "item_type": "string",
+          "item_name": "hooks-library-spec",
+          "item_type": "map",
           "item_optional": false,
-          "item_default": ""
+          "item_default": {},
+          "map_item_spec": [
+             {
+                "item_name": "library",
+                "item_type": "string",
+                "item_optional": false,
+                "item_default": ""
+             }
+          ]
         }
       },
 

+ 1 - 2
src/bin/dhcp4/tests/config_parser_unittest.cc

@@ -2772,7 +2772,6 @@ TEST_F(Dhcp4ParserTest, vendorOptionsCsv) {
 // of hooks libraries.
 std::string
 buildHooksLibrariesConfig(const std::vector<std::string>& libraries) {
-    const string quote("\"");
 
     // Create the first part of the configuration string.
     string config =
@@ -2786,7 +2785,7 @@ buildHooksLibrariesConfig(const std::vector<std::string>& libraries) {
         if (i > 0) {
             config += string(", ");
         }
-        config += (quote + libraries[i] + quote);
+        config += (string("{ \"library\": \"") + libraries[i] + string("\" }"));
     }
 
     // Append the remainder of the configuration.

+ 11 - 3
src/bin/dhcp6/dhcp6.spec

@@ -10,10 +10,18 @@
         "item_default": [],
         "list_item_spec":
         {
-          "item_name": "hooks-library",
-          "item_type": "string",
+          "item_name": "hooks-library-spec",
+          "item_type": "map",
           "item_optional": false,
-          "item_default": ""
+          "item_default": {},
+          "map_item_spec": [
+             {
+                "item_name": "library",
+                "item_type": "string",
+                "item_optional": false,
+                "item_default": ""
+             }
+          ]
         }
       },
 

+ 4 - 1
src/bin/dhcp6/tests/config_parser_unittest.cc

@@ -2912,6 +2912,9 @@ TEST_F(Dhcp6ParserTest, DISABLED_stdOptionDataEncapsulate) {
 // of hooks libraries.
 std::string
 buildHooksLibrariesConfig(const std::vector<std::string>& libraries) {
+    const string lbrace("{");
+    const string rbrace("}");
+    const string liblabel("\"library\": ");
     const string quote("\"");
 
     // Create the first part of the configuration string.
@@ -2924,7 +2927,7 @@ buildHooksLibrariesConfig(const std::vector<std::string>& libraries) {
         if (i > 0) {
             config += string(", ");
         }
-        config += (quote + libraries[i] + quote);
+        config += (lbrace + liblabel + quote + libraries[i] + quote + rbrace);
     }
 
     // Append the remainder of the configuration.

+ 1 - 1
src/lib/dhcpsrv/client_class_def.h

@@ -132,7 +132,7 @@ typedef boost::shared_ptr<ClientClassDef> ClientClassDefPtr;
 /// @brief Defines a map of ClientClassDef's, keyed by the class name.
 typedef std::map<std::string,ClientClassDefPtr> ClientClassDefMap;
 
-/// @brief Defines a pointer to a ClientClassDictionary
+/// @brief Defines a pointer to a ClientClassDefMap
 typedef boost::shared_ptr<ClientClassDefMap> ClientClassDefMapPtr;
 
 /// @brief Defines a pair for working wiht ClientClassMap

+ 16 - 14
src/lib/dhcpsrv/csv_lease_file4.cc

@@ -22,14 +22,14 @@ namespace isc {
 namespace dhcp {
 
 CSVLeaseFile4::CSVLeaseFile4(const std::string& filename)
-    : CSVFile(filename) {
+    : VersionedCSVFile(filename) {
     initColumns();
 }
 
 void
 CSVLeaseFile4::open(const bool seek_to_end) {
     // Call the base class to open the file
-    CSVFile::open(seek_to_end);
+    VersionedCSVFile::open(seek_to_end);
 
     // and clear any statistics we may have
     clearStatistics();
@@ -62,7 +62,7 @@ CSVLeaseFile4::append(const Lease4& lease) {
     row.writeAt(getColumnIndex("state"), lease.state_);
 
     try {
-        CSVFile::append(row);
+        VersionedCSVFile::append(row);
     } catch (const std::exception&) {
         // Catch any errors so we can bump the error counter than rethrow it
         ++write_errs_;
@@ -85,7 +85,7 @@ CSVLeaseFile4::next(Lease4Ptr& lease) {
     try {
         // Get the row of CSV values.
         CSVRow row;
-        CSVFile::next(row);
+        VersionedCSVFile::next(row);
         // The empty row signals EOF.
         if (row == CSVFile::EMPTY_ROW()) {
             lease.reset();
@@ -137,16 +137,18 @@ CSVLeaseFile4::next(Lease4Ptr& lease) {
 
 void
 CSVLeaseFile4::initColumns() {
-    addColumn("address");
-    addColumn("hwaddr");
-    addColumn("client_id");
-    addColumn("valid_lifetime");
-    addColumn("expire");
-    addColumn("subnet_id");
-    addColumn("fqdn_fwd");
-    addColumn("fqdn_rev");
-    addColumn("hostname");
-    addColumn("state");
+    addColumn("address", "1.0");
+    addColumn("hwaddr", "1.0");
+    addColumn("client_id", "1.0");
+    addColumn("valid_lifetime", "1.0");
+    addColumn("expire", "1.0");
+    addColumn("subnet_id", "1.0");
+    addColumn("fqdn_fwd", "1.0");
+    addColumn("fqdn_rev", "1.0");
+    addColumn("hostname", "1.0");
+    addColumn("state", "2.0", "0");
+    // Any file with less than hostname is invalid
+    setMinimumValidColumns("hostname");
 }
 
 IOAddress

+ 2 - 2
src/lib/dhcpsrv/csv_lease_file4.h

@@ -20,7 +20,7 @@
 #include <dhcpsrv/lease.h>
 #include <dhcpsrv/subnet.h>
 #include <dhcpsrv/lease_file_stats.h>
-#include <util/csv_file.h>
+#include <util/versioned_csv_file.h>
 #include <stdint.h>
 #include <string>
 #include <time.h>
@@ -39,7 +39,7 @@ namespace dhcp {
 /// validation (see http://kea.isc.org/ticket/2405). However, when #2405
 /// is implemented, the @c next function may need to be updated to use the
 /// validation capablity of @c Lease4.
-class CSVLeaseFile4 : public isc::util::CSVFile, public LeaseFileStats {
+class CSVLeaseFile4 : public isc::util::VersionedCSVFile, public LeaseFileStats {
 public:
 
     /// @brief Constructor.

+ 21 - 18
src/lib/dhcpsrv/csv_lease_file6.cc

@@ -23,14 +23,14 @@ namespace isc {
 namespace dhcp {
 
 CSVLeaseFile6::CSVLeaseFile6(const std::string& filename)
-    : CSVFile(filename) {
+    : VersionedCSVFile(filename) {
     initColumns();
 }
 
 void
 CSVLeaseFile6::open(const bool seek_to_end) {
     // Call the base class to open the file
-    CSVFile::open(seek_to_end);
+    VersionedCSVFile::open(seek_to_end);
 
     // and clear any statistics we may have
     clearStatistics();
@@ -61,7 +61,7 @@ CSVLeaseFile6::append(const Lease6& lease) {
     }
     row.writeAt(getColumnIndex("state"), lease.state_);
     try {
-        CSVFile::append(row);
+        VersionedCSVFile::append(row);
     } catch (const std::exception&) {
         // Catch any errors so we can bump the error counter than rethrow it
         ++write_errs_;
@@ -84,7 +84,7 @@ CSVLeaseFile6::next(Lease6Ptr& lease) {
     try {
         // Get the row of CSV values.
         CSVRow row;
-        CSVFile::next(row);
+        VersionedCSVFile::next(row);
         // The empty row signals EOF.
         if (row == CSVFile::EMPTY_ROW()) {
             lease.reset();
@@ -122,20 +122,23 @@ CSVLeaseFile6::next(Lease6Ptr& lease) {
 
 void
 CSVLeaseFile6::initColumns() {
-    addColumn("address");
-    addColumn("duid");
-    addColumn("valid_lifetime");
-    addColumn("expire");
-    addColumn("subnet_id");
-    addColumn("pref_lifetime");
-    addColumn("lease_type");
-    addColumn("iaid");
-    addColumn("prefix_len");
-    addColumn("fqdn_fwd");
-    addColumn("fqdn_rev");
-    addColumn("hostname");
-    addColumn("hwaddr");
-    addColumn("state");
+    addColumn("address", "1.0");
+    addColumn("duid", "1.0");
+    addColumn("valid_lifetime", "1.0");
+    addColumn("expire", "1.0");
+    addColumn("subnet_id", "1.0");
+    addColumn("pref_lifetime", "1.0");
+    addColumn("lease_type", "1.0");
+    addColumn("iaid", "1.0");
+    addColumn("prefix_len", "1.0");
+    addColumn("fqdn_fwd", "1.0");
+    addColumn("fqdn_rev", "1.0");
+    addColumn("hostname", "1.0");
+    addColumn("hwaddr", "2.0");
+    addColumn("state", "3.0", "0");
+
+    // Any file with less than hostname is invalid
+    setMinimumValidColumns("hostname");
 }
 
 Lease::Type

+ 2 - 2
src/lib/dhcpsrv/csv_lease_file6.h

@@ -20,7 +20,7 @@
 #include <dhcpsrv/lease.h>
 #include <dhcpsrv/subnet.h>
 #include <dhcpsrv/lease_file_stats.h>
-#include <util/csv_file.h>
+#include <util/versioned_csv_file.h>
 #include <stdint.h>
 #include <string>
 
@@ -38,7 +38,7 @@ namespace dhcp {
 /// validation (see http://kea.isc.org/ticket/2405). However, when #2405
 /// is implemented, the @c next function may need to be updated to use the
 /// validation capablity of @c Lease6.
-class CSVLeaseFile6 : public isc::util::CSVFile, public LeaseFileStats {
+class CSVLeaseFile6 : public isc::util::VersionedCSVFile, public LeaseFileStats {
 public:
 
     /// @brief Constructor.

+ 22 - 0
src/lib/dhcpsrv/dhcpsrv_messages.mes

@@ -220,6 +220,13 @@ with the specified address to the memory file backend database.
 The code has issued a commit call.  For the memory file database, this is
 a no-op.
 
+% DHCPRSV_MEMFILE_CONVERTING_LEASE_FILES running LFC now to convert lease files to the current schema: %1.%2
+A warning message issued when the server has detected lease files that need
+to be either upgraded or downgraded to match the server's schema, and that
+the server is automatically running the LFC process to perform the conversion.
+This should only occur the first time the server is launched following a Kea
+installation upgrade (or downgrade).
+
 % DHCPSRV_MEMFILE_DB opening memory file lease database: %1
 This informational message is logged when a DHCP server (either V4 or
 V6) is about to open a memory file lease database.  The parameters of
@@ -352,6 +359,21 @@ timer used for lease file cleanup scheduling. This is highly unlikely
 and indicates programming error. The message include the reason for this
 error.
 
+% DHCPSRV_MEMFILE_NEEDS_DOWNGRADING version of lease file: %1 schema is later than version %2
+A warning message issued when the schema of the lease file loaded by the server
+is newer than the memfile schema of the server.  The server converts the lease
+data from newer schemas to its schema as it is read, therefore the lease
+information in use by the server will be correct. Note though, that any data
+data stored in newer schema fields will be dropped.  What remains is for the
+file itself to be rewritten using the current schema.
+
+% DHCPSRV_MEMFILE_NEEDS_UPGRADING version of lease file: %1 schema is earlier than version %2
+A warning message issued when the schema of the lease file loaded by the server
+pre-dates the memfile schema of the server.  Note that the server converts the
+lease data from older schemas to the current schema as it is read, therefore
+the lease information in use by the server will be correct.  What remains is
+for the file itself to be rewritten using the current schema.
+
 % DHCPSRV_MEMFILE_NO_STORAGE running in non-persistent mode, leases will be lost after restart
 A warning message issued when writes of leases to disk have been disabled
 in the configuration. This mode is useful for some kinds of performance

+ 11 - 1
src/lib/dhcpsrv/lease_file_loader.h

@@ -17,7 +17,7 @@
 
 #include <dhcpsrv/dhcpsrv_log.h>
 #include <dhcpsrv/memfile_lease_storage.h>
-#include <util/csv_file.h>
+#include <util/versioned_csv_file.h>
 
 #include <boost/shared_ptr.hpp>
 
@@ -154,6 +154,16 @@ public:
             }
         }
 
+        if (lease_file.needsConversion()) {
+            LOG_WARN(dhcpsrv_logger,
+                     (lease_file.getInputSchemaState()
+                      == util::VersionedCSVFile::NEEDS_UPGRADE
+                      ?  DHCPSRV_MEMFILE_NEEDS_UPGRADING
+                      : DHCPSRV_MEMFILE_NEEDS_DOWNGRADING))
+                     .arg(lease_file.getFilename())
+                     .arg(lease_file.getSchemaVersion());
+        }
+
         if (close_file_on_exit) {
             lease_file.close();
         }

+ 88 - 60
src/lib/dhcpsrv/memfile_lease_mgr.cc

@@ -90,9 +90,13 @@ public:
     /// or NULL. If this is NULL, the @c lease_file6 must be non-null.
     /// @param lease_file6 A pointer to the DHCPv6 lease file to be cleaned up
     /// or NULL. If this is NULL, the @c lease_file4 must be non-null.
+    /// @param run_once_now A flag that causes LFC to be invoked immediately,
+    /// regardless of the value of lfc_interval.  This is primarily used to
+    /// cause lease file schema upgrades upon startup.
     void setup(const uint32_t lfc_interval,
                const boost::shared_ptr<CSVLeaseFile4>& lease_file4,
-               const boost::shared_ptr<CSVLeaseFile6>& lease_file6);
+               const boost::shared_ptr<CSVLeaseFile6>& lease_file6,
+               bool run_once_now = false);
 
     /// @brief Spawns a new process.
     void execute();
@@ -155,58 +159,67 @@ LFCSetup::~LFCSetup() {
 void
 LFCSetup::setup(const uint32_t lfc_interval,
                 const boost::shared_ptr<CSVLeaseFile4>& lease_file4,
-                const boost::shared_ptr<CSVLeaseFile6>& lease_file6) {
+                const boost::shared_ptr<CSVLeaseFile6>& lease_file6,
+                bool run_once_now) {
 
-    // If LFC is enabled, we have to setup the interval timer and prepare for
-    // executing the kea-lfc process.
-    if (lfc_interval > 0) {
-        std::string executable;
-        char* c_executable = getenv(KEA_LFC_EXECUTABLE_ENV_NAME);
-        if (c_executable == NULL) {
-            executable = KEA_LFC_EXECUTABLE;
+    // If to nothing to do, punt
+    if (lfc_interval == 0 && !run_once_now) {
+        return;
+    }
 
-        } else {
-            executable = c_executable;
-        }
+    // Start preparing the command line for kea-lfc.
+    std::string executable;
+    char* c_executable = getenv(KEA_LFC_EXECUTABLE_ENV_NAME);
+    if (c_executable == NULL) {
+        executable = KEA_LFC_EXECUTABLE;
+    } else {
+        executable = c_executable;
+    }
 
-        // Start preparing the command line for kea-lfc.
-
-        // Gather the base file name.
-        std::string lease_file = lease_file4 ? lease_file4->getFilename() :
-            lease_file6->getFilename();
-
-        // Create the other names by appending suffixes to the base name.
-        util::ProcessArgs args;
-        // Universe: v4 or v6.
-        args.push_back(lease_file4 ? "-4" : "-6");
-        // Previous file.
-        args.push_back("-x");
-        args.push_back(Memfile_LeaseMgr::appendSuffix(lease_file,
-                                                      Memfile_LeaseMgr::FILE_PREVIOUS));
-        // Input file.
-        args.push_back("-i");
-        args.push_back(Memfile_LeaseMgr::appendSuffix(lease_file,
-                                                      Memfile_LeaseMgr::FILE_INPUT));
-        // Output file.
-        args.push_back("-o");
-        args.push_back(Memfile_LeaseMgr::appendSuffix(lease_file,
-                                                      Memfile_LeaseMgr::FILE_OUTPUT));
-        // Finish file.
-        args.push_back("-f");
-        args.push_back(Memfile_LeaseMgr::appendSuffix(lease_file,
-                                                      Memfile_LeaseMgr::FILE_FINISH));
-        // PID file.
-        args.push_back("-p");
-        args.push_back(Memfile_LeaseMgr::appendSuffix(lease_file,
-                                                      Memfile_LeaseMgr::FILE_PID));
-
-        // The configuration file is currently unused.
-        args.push_back("-c");
-        args.push_back("ignored-path");
-
-        // Create the process (do not start it yet).
-        process_.reset(new util::ProcessSpawn(executable, args));
+    // Gather the base file name.
+    std::string lease_file = lease_file4 ? lease_file4->getFilename() :
+                                           lease_file6->getFilename();
+
+    // Create the other names by appending suffixes to the base name.
+    util::ProcessArgs args;
+    // Universe: v4 or v6.
+    args.push_back(lease_file4 ? "-4" : "-6");
+
+    // Previous file.
+    args.push_back("-x");
+    args.push_back(Memfile_LeaseMgr::appendSuffix(lease_file,
+                                                  Memfile_LeaseMgr::FILE_PREVIOUS));
+    // Input file.
+    args.push_back("-i");
+    args.push_back(Memfile_LeaseMgr::appendSuffix(lease_file,
+                                                  Memfile_LeaseMgr::FILE_INPUT));
+    // Output file.
+    args.push_back("-o");
+    args.push_back(Memfile_LeaseMgr::appendSuffix(lease_file,
+                                                  Memfile_LeaseMgr::FILE_OUTPUT));
+    // Finish file.
+    args.push_back("-f");
+    args.push_back(Memfile_LeaseMgr::appendSuffix(lease_file,
+                                                  Memfile_LeaseMgr::FILE_FINISH));
+    // PID file.
+    args.push_back("-p");
+    args.push_back(Memfile_LeaseMgr::appendSuffix(lease_file,
+                                                  Memfile_LeaseMgr::FILE_PID));
+
+    // The configuration file is currently unused.
+    args.push_back("-c");
+    args.push_back("ignored-path");
+
+    // Create the process (do not start it yet).
+    process_.reset(new util::ProcessSpawn(executable, args));
+
+    // If we've been told to run it once now, invoke the callback directly.
+    if (run_once_now) {
+        callback_();
+    }
 
+    // If it's suposed to run periodically, setup that now.
+    if (lfc_interval > 0) {
         // Set the timer to call callback function periodically.
         LOG_INFO(dhcpsrv_logger, DHCPSRV_MEMFILE_LFC_SETUP).arg(lfc_interval);
 
@@ -253,19 +266,25 @@ const int Memfile_LeaseMgr::MINOR_VERSION;
 Memfile_LeaseMgr::Memfile_LeaseMgr(const DatabaseConnection::ParameterMap& parameters)
     : LeaseMgr(), lfc_setup_(), conn_(parameters)
     {
+    bool conversion_needed = false;
+
     // Check the universe and use v4 file or v6 file.
     std::string universe = conn_.getParameter("universe");
     if (universe == "4") {
         std::string file4 = initLeaseFilePath(V4);
         if (!file4.empty()) {
-            loadLeasesFromFiles<Lease4, CSVLeaseFile4>(file4, lease_file4_,
-                                                       storage4_);
+            conversion_needed = loadLeasesFromFiles<Lease4,
+                                                 CSVLeaseFile4>(file4,
+                                                                lease_file4_,
+                                                                storage4_);
         }
     } else {
         std::string file6 = initLeaseFilePath(V6);
         if (!file6.empty()) {
-            loadLeasesFromFiles<Lease6, CSVLeaseFile6>(file6, lease_file6_,
-                                                       storage6_);
+            conversion_needed = loadLeasesFromFiles<Lease6,
+                                                 CSVLeaseFile6>(file6,
+                                                                lease_file6_,
+                                                                storage6_);
         }
     }
 
@@ -275,9 +294,12 @@ Memfile_LeaseMgr::Memfile_LeaseMgr(const DatabaseConnection::ParameterMap& param
     // operation.
    if (!persistLeases(V4) && !persistLeases(V6)) {
         LOG_WARN(dhcpsrv_logger, DHCPSRV_MEMFILE_NO_STORAGE);
-
     } else  {
-       lfcSetup();
+        if (conversion_needed) {
+            LOG_WARN(dhcpsrv_logger, DHCPRSV_MEMFILE_CONVERTING_LEASE_FILES)
+                    .arg(MAJOR_VERSION).arg(MINOR_VERSION);
+        }
+        lfcSetup(conversion_needed);
     }
 }
 
@@ -867,7 +889,7 @@ Memfile_LeaseMgr::initLeaseFilePath(Universe u) {
 }
 
 template<typename LeaseObjectType, typename LeaseFileType, typename StorageType>
-void Memfile_LeaseMgr::loadLeasesFromFiles(const std::string& filename,
+bool Memfile_LeaseMgr::loadLeasesFromFiles(const std::string& filename,
                                            boost::shared_ptr<LeaseFileType>& lease_file,
                                            StorageType& storage) {
     // Check if the instance of the LFC is running right now. If it is
@@ -885,11 +907,12 @@ void Memfile_LeaseMgr::loadLeasesFromFiles(const std::string& filename,
     storage.clear();
 
     // Load the leasefile.completed, if exists.
+    bool conversion_needed = false;
     lease_file.reset(new LeaseFileType(std::string(filename + ".completed")));
     if (lease_file->exists()) {
         LeaseFileLoader::load<LeaseObjectType>(*lease_file, storage,
                                                MAX_LEASE_ERRORS);
-
+        conversion_needed = conversion_needed || lease_file->needsConversion();
     } else {
         // If the leasefile.completed doesn't exist, let's load the leases
         // from leasefile.2 and leasefile.1, if they exist.
@@ -897,12 +920,14 @@ void Memfile_LeaseMgr::loadLeasesFromFiles(const std::string& filename,
         if (lease_file->exists()) {
             LeaseFileLoader::load<LeaseObjectType>(*lease_file, storage,
                                                    MAX_LEASE_ERRORS);
+            conversion_needed =  conversion_needed || lease_file->needsConversion();
         }
 
         lease_file.reset(new LeaseFileType(appendSuffix(filename, FILE_INPUT)));
         if (lease_file->exists()) {
             LeaseFileLoader::load<LeaseObjectType>(*lease_file, storage,
                                                    MAX_LEASE_ERRORS);
+            conversion_needed =  conversion_needed || lease_file->needsConversion();
         }
     }
 
@@ -915,6 +940,9 @@ void Memfile_LeaseMgr::loadLeasesFromFiles(const std::string& filename,
     lease_file.reset(new LeaseFileType(filename));
     LeaseFileLoader::load<LeaseObjectType>(*lease_file, storage,
                                            MAX_LEASE_ERRORS, false);
+    conversion_needed =  conversion_needed || lease_file->needsConversion();
+
+    return (conversion_needed);
 }
 
 
@@ -942,7 +970,7 @@ Memfile_LeaseMgr::lfcCallback() {
 }
 
 void
-Memfile_LeaseMgr::lfcSetup() {
+Memfile_LeaseMgr::lfcSetup(bool conversion_needed) {
     std::string lfc_interval_str = "0";
     try {
         lfc_interval_str = conn_.getParameter("lfc-interval");
@@ -958,9 +986,9 @@ Memfile_LeaseMgr::lfcSetup() {
                   << lfc_interval_str << " specified");
     }
 
-    if (lfc_interval > 0) {
+    if (lfc_interval > 0 || conversion_needed) {
         lfc_setup_.reset(new LFCSetup(boost::bind(&Memfile_LeaseMgr::lfcCallback, this)));
-        lfc_setup_->setup(lfc_interval, lease_file4_, lease_file6_);
+        lfc_setup_->setup(lfc_interval, lease_file4_, lease_file6_, conversion_needed);
     }
 }
 

+ 21 - 2
src/lib/dhcpsrv/memfile_lease_mgr.h

@@ -118,6 +118,18 @@ public:
 
     /// @brief The sole lease manager constructor
     ///
+    /// This method:
+    /// - Initializes the new instance based on the parameters given
+    /// - Loads (or creates) the appropriate lease file(s)
+    /// - Initiates the periodic scheduling of the LFC (if enabled)
+    ///
+    /// If any of the files loaded require conversion to the current schema
+    /// (upgrade or downgrade), @c lfcSetup() will be invoked with its
+    /// @c run_once_now parameter set to true.  This causes lfcSetup() to
+    /// invoke the LFC process immediately regardless of whether LFC is
+    /// enabled. This ensures that any files which need conversion are
+    /// converted automatically.
+    ///
     /// dbconfig is a generic way of passing parameters. Parameters
     /// are passed in the "name=value" format, separated by spaces.
     /// Values may be enclosed in double quotes, if needed.
@@ -549,11 +561,14 @@ private:
     /// @tparam LeaseFileType @c CSVLeaseFile4 or @c CSVLeaseFile6.
     /// @tparam StorageType @c Lease4Storage or @c Lease6Storage.
     ///
+    /// @return Returns true if any of the files loaded need conversion from
+    /// an older or newer schema.
+    ///
     /// @throw CSVFileError when parsing any of the lease files fails.
     /// @throw DbOpenError when it is found that the LFC is in progress.
     template<typename LeaseObjectType, typename LeaseFileType,
              typename StorageType>
-    void loadLeasesFromFiles(const std::string& filename,
+    bool loadLeasesFromFiles(const std::string& filename,
                              boost::shared_ptr<LeaseFileType>& lease_file,
                              StorageType& storage);
 
@@ -626,7 +641,11 @@ private:
     /// Kea build directory, the @c KEA_LFC_EXECUTABLE environmental
     /// variable should be set to hold an absolute path to the kea-lfc
     /// excutable.
-    void lfcSetup();
+    /// @param conversion_needed flag that indicates input lease file(s) are
+    /// schema do not match the current schema (older or newer), and need
+    /// conversion. This value is passed through to LFCSetup::setup() via its
+    /// run_once_now parameter.
+    void lfcSetup(bool conversion_needed = false);
 
     /// @brief Performs a lease file cleanup for DHCPv4 or DHCPv6.
     ///

+ 53 - 5
src/lib/dhcpsrv/parsers/dhcp_parsers.cc

@@ -248,17 +248,65 @@ HooksLibrariesParser::HooksLibrariesParser(const std::string& param_name)
     }
 }
 
+// Parse the configuration.  As Kea has not yet implemented parameters, the
+// parsing code only checks that:
+//
+// 1. Each element in the hooks-libraries list is a map
+// 2. The map contains an element "library" whose value is a string: all
+//    other elements in the map are ignored.
 void
 HooksLibrariesParser::build(ConstElementPtr value) {
     // Initialize.
     libraries_.clear();
     changed_ = false;
 
-    // Extract the list of libraries.
-    BOOST_FOREACH(ConstElementPtr iface, value->listValue()) {
-        string libname = iface->str();
-        boost::erase_all(libname, "\"");
-        libraries_.push_back(libname);
+    // This is the new syntax.  Iterate through it and get each map.
+    BOOST_FOREACH(ConstElementPtr library_entry, value->listValue()) {
+        // Is it a map?
+        if (library_entry->getType() != Element::map) {
+            isc_throw(DhcpConfigError, "hooks library configuration error:"
+                " one or more entries in the hooks-libraries list is not"
+                " a map (" << library_entry->getPosition() << ")");
+        }
+
+        // Iterate iterate through each element in the map.  We check
+        // whether we have found a library element.
+        bool lib_found = false;
+        BOOST_FOREACH(ConfigPair entry_item, library_entry->mapValue()) {
+            if (entry_item.first == "library") {
+                if (entry_item.second->getType() != Element::string) {
+                    isc_throw(DhcpConfigError, "hooks library configuration"
+                        " error: value of 'library' element is not a string"
+                        " giving the path to a hooks library (" <<
+                        entry_item.second->getPosition() << ")");
+                }
+
+                // Get the name of the library and add it to the list after
+                // removing quotes.
+                string libname = (entry_item.second)->stringValue();
+
+                // Remove leading/trailing quotes and any leading/trailing
+                // spaces.
+                boost::erase_all(libname, "\"");
+                libname = isc::util::str::trim(libname);
+                if (libname.empty()) {
+                    isc_throw(DhcpConfigError, "hooks library configuration"
+                        " error: value of 'library' element must not be"
+                        " blank (" <<
+                        entry_item.second->getPosition() << ")");
+                }
+                libraries_.push_back(libname);
+
+                // Note we have found the library name.
+                lib_found = true;
+            }
+        }
+        if (! lib_found) {
+            isc_throw(DhcpConfigError, "hooks library configuration error:"
+                " one or more hooks-libraries elements are missing the"
+                " name of the library"  <<
+                " (" << library_entry->getPosition() << ")");
+        }
     }
 
     // Check if the list of libraries has changed.  If not, nothing is done

+ 23 - 0
src/lib/dhcpsrv/parsers/dhcp_parsers.h

@@ -479,6 +479,29 @@ public:
     /// checks each of the libraries in the list for validity (they exist and
     /// have a "version" function that returns the correct value).
     ///
+    /// The syntax for specifying hooks libraries allow for library-specific
+    /// parameters to be specified along with the library, e.g.
+    ///
+    /// @code
+    ///      "hooks-libraries": [
+    ///          {
+    ///              "library": "hook-lib-1.so",
+    ///              "parameters": {
+    ///                  "alpha": "a string",
+    ///                  "beta": 42
+    ///              }
+    ///          },
+    ///          :
+    ///      ]
+    /// @endcode
+    ///
+    /// As Kea has not yet implemented parameters, the parsing code only checks
+    /// that:
+    ///
+    /// -# Each element in the hooks-libraries list is a map
+    /// -# The map contains an element "library" whose value is a string: all
+    ///    other elements in the map are ignored.
+    ///
     /// @param value pointer to the content of parsed values
     virtual void build(isc::data::ConstElementPtr value);
 

+ 170 - 23
src/lib/dhcpsrv/tests/csv_lease_file4_unittest.cc

@@ -18,8 +18,6 @@
 #include <dhcpsrv/csv_lease_file4.h>
 #include <dhcpsrv/lease.h>
 #include <dhcpsrv/tests/lease_file_io.h>
-#include <boost/scoped_ptr.hpp>
-#include <boost/shared_ptr.hpp>
 #include <gtest/gtest.h>
 #include <sstream>
 
@@ -125,22 +123,22 @@ TEST_F(CSVLeaseFile4Test, parse) {
     writeSampleFile();
 
     // Open the lease file.
-    boost::scoped_ptr<CSVLeaseFile4> lf(new CSVLeaseFile4(filename_));
-    ASSERT_NO_THROW(lf->open());
+    CSVLeaseFile4 lf(filename_);
+    ASSERT_NO_THROW(lf.open());
 
     // Verify the counters are cleared
     {
     SCOPED_TRACE("Check stats are empty");
-    checkStats(*lf, 0, 0, 0, 0, 0, 0);
+    checkStats(lf, 0, 0, 0, 0, 0, 0);
     }
 
     Lease4Ptr lease;
     // Reading first read should be successful.
     {
     SCOPED_TRACE("First lease valid");
-    EXPECT_TRUE(lf->next(lease));
+    EXPECT_TRUE(lf.next(lease));
     ASSERT_TRUE(lease);
-    checkStats(*lf, 1, 1, 0, 0, 0, 0);
+    checkStats(lf, 1, 1, 0, 0, 0, 0);
 
     // Verify that the lease attributes are correct.
     EXPECT_EQ("192.0.2.1", lease->addr_.toText());
@@ -159,17 +157,17 @@ TEST_F(CSVLeaseFile4Test, parse) {
     // Second lease is malformed - HW address is empty.
     {
     SCOPED_TRACE("Second lease malformed");
-    EXPECT_FALSE(lf->next(lease));
-    checkStats(*lf, 2, 1, 1, 0, 0, 0);
+    EXPECT_FALSE(lf.next(lease));
+    checkStats(lf, 2, 1, 1, 0, 0, 0);
     }
 
     // Even though parsing previous lease failed, reading the next lease should be
     // successful.
     {
     SCOPED_TRACE("Third lease valid");
-    EXPECT_TRUE(lf->next(lease));
+    EXPECT_TRUE(lf.next(lease));
     ASSERT_TRUE(lease);
-    checkStats(*lf, 3, 2, 1, 0, 0, 0);
+    checkStats(lf, 3, 2, 1, 0, 0, 0);
 
     // Verify that the third lease is correct.
     EXPECT_EQ("192.0.3.15", lease->addr_.toText());
@@ -190,28 +188,28 @@ TEST_F(CSVLeaseFile4Test, parse) {
     // lease pointer should be NULL.
     {
     SCOPED_TRACE("Fifth read empty");
-    EXPECT_TRUE(lf->next(lease));
+    EXPECT_TRUE(lf.next(lease));
     EXPECT_FALSE(lease);
-    checkStats(*lf, 4, 2, 1, 0, 0, 0);
+    checkStats(lf, 4, 2, 1, 0, 0, 0);
     }
 
     // We should be able to do it again.
     {
     SCOPED_TRACE("Sixth read empty");
-    EXPECT_TRUE(lf->next(lease));
+    EXPECT_TRUE(lf.next(lease));
     EXPECT_FALSE(lease);
-    checkStats(*lf, 5, 2, 1, 0, 0, 0);
+    checkStats(lf, 5, 2, 1, 0, 0, 0);
     }
 }
 
 // This test checks creation of the lease file and writing leases.
 TEST_F(CSVLeaseFile4Test, recreate) {
-    boost::scoped_ptr<CSVLeaseFile4> lf(new CSVLeaseFile4(filename_));
-    ASSERT_NO_THROW(lf->recreate());
+    CSVLeaseFile4 lf(filename_);
+    ASSERT_NO_THROW(lf.recreate());
     ASSERT_TRUE(io_.exists());
 
     // Verify the counters are cleared
-    checkStats(*lf, 0, 0, 0, 0, 0, 0);
+    checkStats(lf, 0, 0, 0, 0, 0, 0);
 
     // Create first lease, with NULL client id.
     Lease4Ptr lease(new Lease4(IOAddress("192.0.3.2"),
@@ -222,8 +220,8 @@ TEST_F(CSVLeaseFile4Test, recreate) {
     lease->state_ = Lease::STATE_EXPIRED_RECLAIMED;
     {
     SCOPED_TRACE("First write");
-    ASSERT_NO_THROW(lf->append(*lease));
-    checkStats(*lf, 0, 0, 0, 1, 1, 0);
+    ASSERT_NO_THROW(lf.append(*lease));
+    checkStats(lf, 0, 0, 0, 1, 1, 0);
     }
 
     // Create second lease, with non-NULL client id.
@@ -233,12 +231,12 @@ TEST_F(CSVLeaseFile4Test, recreate) {
                            100, 60, 90, 0, 7));
     {
     SCOPED_TRACE("Second write");
-    ASSERT_NO_THROW(lf->append(*lease));
-    checkStats(*lf, 0, 0, 0, 2, 2, 0);
+    ASSERT_NO_THROW(lf.append(*lease));
+    checkStats(lf, 0, 0, 0, 2, 2, 0);
     }
 
     // Close the lease file.
-    lf->close();
+    lf.close();
     // Check that the contents of the csv file are correct.
     EXPECT_EQ("address,hwaddr,client_id,valid_lifetime,expire,subnet_id,"
               "fqdn_fwd,fqdn_rev,hostname,state\n"
@@ -248,6 +246,155 @@ TEST_F(CSVLeaseFile4Test, recreate) {
               io_.readFile());
 }
 
+// Verifies that a schema 1.0 file with records from
+// schema 1.0 and 2.0 loads correctly.
+TEST_F(CSVLeaseFile4Test, mixedSchemaload) {
+    // Create mixed schema file
+    io_.writeFile(
+                  // schema 1.0 header
+                  "address,hwaddr,client_id,valid_lifetime,expire,subnet_id,"
+                  "fqdn_fwd,fqdn_rev,hostname\n"
+                  // schema 1.0 record
+                  "192.0.2.1,06:07:08:09:1a:bc,,200,200,8,1,1,"
+                  "one.example.com\n"
+                  // schema 2.0 record - has state
+                  "192.0.2.2,06:07:08:09:2a:bc,,200,200,8,1,1,"
+                  "two.example.com,1\n"
+                  // schema 2.0 record - has state
+                  "192.0.2.3,06:07:08:09:3a:bc,,200,200,8,1,1,"
+                  "three.example.com,2\n"
+                   );
+
+    // Open the lease file.
+    CSVLeaseFile4 lf(filename_);
+    ASSERT_NO_THROW(lf.open());
+
+    Lease4Ptr lease;
+
+    // Reading first read should be successful.
+    {
+    SCOPED_TRACE("First lease valid");
+    EXPECT_TRUE(lf.next(lease));
+    ASSERT_TRUE(lease);
+
+    // Verify that the lease attributes are correct.
+    EXPECT_EQ("192.0.2.1", lease->addr_.toText());
+    HWAddr hwaddr1(*lease->hwaddr_);
+    EXPECT_EQ("06:07:08:09:1a:bc", hwaddr1.toText(false));
+    EXPECT_FALSE(lease->client_id_);
+    EXPECT_EQ(200, lease->valid_lft_);
+    EXPECT_EQ(0, lease->cltt_);
+    EXPECT_EQ(8, lease->subnet_id_);
+    EXPECT_TRUE(lease->fqdn_fwd_);
+    EXPECT_TRUE(lease->fqdn_rev_);
+    EXPECT_EQ("one.example.com", lease->hostname_);
+    // Verify that added state is DEFAULT
+    EXPECT_EQ(Lease::STATE_DEFAULT, lease->state_);
+    }
+
+    {
+    SCOPED_TRACE("Second lease valid");
+    EXPECT_TRUE(lf.next(lease));
+    ASSERT_TRUE(lease);
+
+    // Verify that the lease attributes are correct.
+    EXPECT_EQ("192.0.2.2", lease->addr_.toText());
+    HWAddr hwaddr1(*lease->hwaddr_);
+    EXPECT_EQ("06:07:08:09:2a:bc", hwaddr1.toText(false));
+    EXPECT_FALSE(lease->client_id_);
+    EXPECT_EQ(200, lease->valid_lft_);
+    EXPECT_EQ(0, lease->cltt_);
+    EXPECT_EQ(8, lease->subnet_id_);
+    EXPECT_TRUE(lease->fqdn_fwd_);
+    EXPECT_TRUE(lease->fqdn_rev_);
+    EXPECT_EQ("two.example.com", lease->hostname_);
+    EXPECT_EQ(Lease::STATE_DECLINED, lease->state_);
+    }
+
+    {
+    SCOPED_TRACE("Third lease valid");
+    EXPECT_TRUE(lf.next(lease));
+    ASSERT_TRUE(lease);
+
+    // Verify that the third lease is correct.
+    EXPECT_EQ("192.0.2.3", lease->addr_.toText());
+    HWAddr hwaddr1(*lease->hwaddr_);
+    EXPECT_EQ("06:07:08:09:3a:bc", hwaddr1.toText(false));
+    EXPECT_FALSE(lease->client_id_);
+    EXPECT_EQ(200, lease->valid_lft_);
+    EXPECT_EQ(0, lease->cltt_);
+    EXPECT_EQ(8, lease->subnet_id_);
+    EXPECT_TRUE(lease->fqdn_fwd_);
+    EXPECT_TRUE(lease->fqdn_rev_);
+    EXPECT_EQ("three.example.com", lease->hostname_);
+    EXPECT_EQ(Lease::STATE_EXPIRED_RECLAIMED, lease->state_);
+    }
+}
+
+
+// Verifies that a lease file with fewer header columns than the
+// minimum allowed will not open.
+TEST_F(CSVLeaseFile4Test, tooFewHeaderColumns) {
+    // Create 1.0 file
+    io_.writeFile("address,hwaddr,client_id,valid_lifetime,expire,subnet_id,"
+                  "fqdn_fwd,fqdn_rev\n");
+
+    // Open the lease file.
+    CSVLeaseFile4 lf(filename_);
+    ASSERT_THROW(lf.open(), CSVFileError);
+}
+
+// Verifies that a lease file with an unrecognized column header
+// will not open.
+TEST_F(CSVLeaseFile4Test, invalidHeaderColumn) {
+    // Create 1.0 file
+    io_.writeFile("address,hwaddr,BOGUS,valid_lifetime,expire,subnet_id,"
+                  "fqdn_fwd,fqdn_rev,hostname,state\n");
+
+    // Open the lease file.
+    CSVLeaseFile4 lf(filename_);
+    ASSERT_THROW(lf.open(), CSVFileError);
+}
+
+// Verifies that a lease file with more header columns than defined
+// columns will downgrade.
+TEST_F(CSVLeaseFile4Test, downGrade) {
+    // Create 2.0 PLUS a column file
+    io_.writeFile("address,hwaddr,client_id,valid_lifetime,expire,subnet_id,"
+                  "fqdn_fwd,fqdn_rev,hostname,state,FUTURE_COL\n"
+
+                  "192.0.2.3,06:07:08:09:3a:bc,,200,200,8,1,1,"
+                  "three.example.com,2,BOGUS\n");
+
+    // Lease file should open and report as needing downgrade.
+    CSVLeaseFile4 lf(filename_);
+    ASSERT_NO_THROW(lf.open());
+    EXPECT_TRUE(lf.needsConversion());
+    EXPECT_EQ(util::VersionedCSVFile::NEEDS_DOWNGRADE,
+              lf.getInputSchemaState());
+    Lease4Ptr lease;
+
+    {
+    SCOPED_TRACE("First lease valid");
+    EXPECT_TRUE(lf.next(lease));
+    ASSERT_TRUE(lease);
+
+    // Verify that the third lease is correct.
+    EXPECT_EQ("192.0.2.3", lease->addr_.toText());
+    HWAddr hwaddr1(*lease->hwaddr_);
+    EXPECT_EQ("06:07:08:09:3a:bc", hwaddr1.toText(false));
+    EXPECT_FALSE(lease->client_id_);
+    EXPECT_EQ(200, lease->valid_lft_);
+    EXPECT_EQ(0, lease->cltt_);
+    EXPECT_EQ(8, lease->subnet_id_);
+    EXPECT_TRUE(lease->fqdn_fwd_);
+    EXPECT_TRUE(lease->fqdn_rev_);
+    EXPECT_EQ("three.example.com", lease->hostname_);
+    EXPECT_EQ(Lease::STATE_EXPIRED_RECLAIMED, lease->state_);
+    }
+}
+
+
 /// @todo Currently we don't check invalid lease attributes, such as invalid
 /// lease type, invalid preferred lifetime vs valid lifetime etc. The Lease6
 /// should be extended with the function that validates lease attributes. Once

+ 198 - 26
src/lib/dhcpsrv/tests/csv_lease_file6_unittest.cc

@@ -18,8 +18,6 @@
 #include <dhcpsrv/csv_lease_file6.h>
 #include <dhcpsrv/lease.h>
 #include <dhcpsrv/tests/lease_file_io.h>
-#include <boost/scoped_ptr.hpp>
-#include <boost/shared_ptr.hpp>
 #include <gtest/gtest.h>
 #include <sstream>
 
@@ -126,22 +124,22 @@ TEST_F(CSVLeaseFile6Test, parse) {
     writeSampleFile();
 
     // Open the lease file.
-    boost::scoped_ptr<CSVLeaseFile6> lf(new CSVLeaseFile6(filename_));
-    ASSERT_NO_THROW(lf->open());
+    CSVLeaseFile6 lf(filename_);
+    ASSERT_NO_THROW(lf.open());
 
     // Verify the counters are cleared
     {
     SCOPED_TRACE("Check stats are empty");
-    checkStats(*lf, 0, 0, 0, 0, 0, 0);
+    checkStats(lf, 0, 0, 0, 0, 0, 0);
     }
 
     Lease6Ptr lease;
     // Reading first read should be successful.
     {
     SCOPED_TRACE("First lease valid");
-    EXPECT_TRUE(lf->next(lease));
+    EXPECT_TRUE(lf.next(lease));
     ASSERT_TRUE(lease);
-    checkStats(*lf, 1, 1, 0, 0, 0, 0);
+    checkStats(lf, 1, 1, 0, 0, 0, 0);
 
     // Verify that the lease attributes are correct.
     EXPECT_EQ("2001:db8:1::1", lease->addr_.toText());
@@ -162,17 +160,17 @@ TEST_F(CSVLeaseFile6Test, parse) {
     // Second lease is malformed - DUID is empty.
     {
     SCOPED_TRACE("Second lease malformed");
-    EXPECT_FALSE(lf->next(lease));
-    checkStats(*lf, 2, 1, 1, 0, 0, 0);
+    EXPECT_FALSE(lf.next(lease));
+    checkStats(lf, 2, 1, 1, 0, 0, 0);
     }
 
     // Even, parsing previous lease failed, reading the next lease should be
     // successful.
     {
     SCOPED_TRACE("Third lease valid");
-    EXPECT_TRUE(lf->next(lease));
+    EXPECT_TRUE(lf.next(lease));
     ASSERT_TRUE(lease);
-    checkStats(*lf, 3, 2, 1, 0, 0, 0);
+    checkStats(lf, 3, 2, 1, 0, 0, 0);
 
     // Verify that the third lease is correct.
     EXPECT_EQ("2001:db8:2::10", lease->addr_.toText());
@@ -193,9 +191,9 @@ TEST_F(CSVLeaseFile6Test, parse) {
     // Reading the fourth lease should be successful.
     {
     SCOPED_TRACE("Fourth lease valid");
-    EXPECT_TRUE(lf->next(lease));
+    EXPECT_TRUE(lf.next(lease));
     ASSERT_TRUE(lease);
-    checkStats(*lf, 4, 3, 1, 0, 0, 0);
+    checkStats(lf, 4, 3, 1, 0, 0, 0);
 
     // Verify that the lease is correct.
     EXPECT_EQ("3000:1::", lease->addr_.toText());
@@ -217,30 +215,30 @@ TEST_F(CSVLeaseFile6Test, parse) {
     // lease pointer should be NULL.
     {
     SCOPED_TRACE("Fifth read empty");
-    EXPECT_TRUE(lf->next(lease));
+    EXPECT_TRUE(lf.next(lease));
     EXPECT_FALSE(lease);
-    checkStats(*lf, 5, 3, 1, 0, 0, 0);
+    checkStats(lf, 5, 3, 1, 0, 0, 0);
     }
 
     // We should be able to do it again.
     {
     SCOPED_TRACE("Sixth read empty");
-    EXPECT_TRUE(lf->next(lease));
+    EXPECT_TRUE(lf.next(lease));
     EXPECT_FALSE(lease);
-    checkStats(*lf, 6, 3, 1, 0, 0, 0);
+    checkStats(lf, 6, 3, 1, 0, 0, 0);
     }
 }
 
 // This test checks creation of the lease file and writing leases.
 TEST_F(CSVLeaseFile6Test, recreate) {
-    boost::scoped_ptr<CSVLeaseFile6> lf(new CSVLeaseFile6(filename_));
-    ASSERT_NO_THROW(lf->recreate());
+    CSVLeaseFile6 lf(filename_);
+    ASSERT_NO_THROW(lf.recreate());
     ASSERT_TRUE(io_.exists());
 
     // Verify the counters are cleared
     {
     SCOPED_TRACE("Check stats are empty");
-    checkStats(*lf, 0, 0, 0, 0, 0, 0);
+    checkStats(lf, 0, 0, 0, 0, 0, 0);
     }
 
     Lease6Ptr lease(new Lease6(Lease::TYPE_NA, IOAddress("2001:db8:1::1"),
@@ -250,8 +248,8 @@ TEST_F(CSVLeaseFile6Test, recreate) {
     lease->cltt_ = 0;
     {
     SCOPED_TRACE("First write");
-    ASSERT_NO_THROW(lf->append(*lease));
-    checkStats(*lf, 0, 0, 0, 1, 1, 0);
+    ASSERT_NO_THROW(lf.append(*lease));
+    checkStats(lf, 0, 0, 0, 1, 1, 0);
     }
 
     lease.reset(new Lease6(Lease::TYPE_NA, IOAddress("2001:db8:2::10"),
@@ -261,8 +259,8 @@ TEST_F(CSVLeaseFile6Test, recreate) {
     lease->cltt_ = 0;
     {
     SCOPED_TRACE("Second write");
-    ASSERT_NO_THROW(lf->append(*lease));
-    checkStats(*lf, 0, 0, 0, 2, 2, 0);
+    ASSERT_NO_THROW(lf.append(*lease));
+    checkStats(lf, 0, 0, 0, 2, 2, 0);
     }
 
     lease.reset(new Lease6(Lease::TYPE_PD, IOAddress("3000:1:1::"),
@@ -272,8 +270,8 @@ TEST_F(CSVLeaseFile6Test, recreate) {
     lease->cltt_ = 0;
     {
     SCOPED_TRACE("Third write");
-    ASSERT_NO_THROW(lf->append(*lease));
-    checkStats(*lf, 0, 0, 0, 3, 3, 0);
+    ASSERT_NO_THROW(lf.append(*lease));
+    checkStats(lf, 0, 0, 0, 3, 3, 0);
     }
 
     EXPECT_EQ("address,duid,valid_lifetime,expire,subnet_id,pref_lifetime,"
@@ -288,6 +286,180 @@ TEST_F(CSVLeaseFile6Test, recreate) {
               io_.readFile());
 }
 
+// Verifies that a 1.0 schema file with records from
+// schema 1.0, 2.0, and 3.0 loads correctly.
+TEST_F(CSVLeaseFile6Test, mixedSchemaLoad) {
+    // Create a mixed schema file
+    io_.writeFile(
+             // schema 1.0 header
+              "address,duid,valid_lifetime,expire,subnet_id,pref_lifetime,"
+              "lease_type,iaid,prefix_len,fqdn_fwd,fqdn_rev,hostname\n"
+              // schema 1.0 record
+              "2001:db8:1::1,00:01:02:03:04:05:06:0a:0b:0c:0d:0e:01,"
+              "200,200,8,100,0,7,0,1,1,one.example.com\n"
+
+              // schema 2.0 record - has hwaddr
+              "2001:db8:1::2,00:01:02:03:04:05:06:0a:0b:0c:0d:0e:02,"
+              "200,200,8,100,0,7,0,1,1,two.example.com,01:02:03:04:05\n"
+
+              // schema 3.0 record - has hwaddr and state
+              "2001:db8:1::3,00:01:02:03:04:05:06:0a:0b:0c:0d:0e:03,"
+              "200,200,8,100,0,7,0,1,1,three.example.com,0a:0b:0c:0d:0e,1\n");
+
+    // Open the lease file.
+    CSVLeaseFile6 lf(filename_);
+    ASSERT_NO_THROW(lf.open());
+
+    Lease6Ptr lease;
+    {
+    SCOPED_TRACE("First lease valid");
+    EXPECT_TRUE(lf.next(lease));
+    ASSERT_TRUE(lease);
+
+    // Verify that the lease attributes are correct.
+    EXPECT_EQ("2001:db8:1::1", lease->addr_.toText());
+    ASSERT_TRUE(lease->duid_);
+    EXPECT_EQ("00:01:02:03:04:05:06:0a:0b:0c:0d:0e:01", lease->duid_->toText());
+    EXPECT_EQ(200, lease->valid_lft_);
+    EXPECT_EQ(0, lease->cltt_);
+    EXPECT_EQ(8, lease->subnet_id_);
+    EXPECT_EQ(100, lease->preferred_lft_);
+    EXPECT_EQ(Lease::TYPE_NA, lease->type_);
+    EXPECT_EQ(7, lease->iaid_);
+    EXPECT_EQ(0, lease->prefixlen_);
+    EXPECT_TRUE(lease->fqdn_fwd_);
+    EXPECT_TRUE(lease->fqdn_rev_);
+    EXPECT_EQ("one.example.com", lease->hostname_);
+    // Verify that added HWaddr is empty
+    EXPECT_FALSE(lease->hwaddr_);
+    // Verify that added state is STATE_DEFAULT
+    EXPECT_EQ(Lease::STATE_DEFAULT, lease->state_);
+    }
+
+    {
+    SCOPED_TRACE("Second lease valid");
+    EXPECT_TRUE(lf.next(lease));
+    ASSERT_TRUE(lease);
+
+    // Verify that the lease attributes are correct.
+    EXPECT_EQ("2001:db8:1::2", lease->addr_.toText());
+    ASSERT_TRUE(lease->duid_);
+    EXPECT_EQ("00:01:02:03:04:05:06:0a:0b:0c:0d:0e:02", lease->duid_->toText());
+    EXPECT_EQ(200, lease->valid_lft_);
+    EXPECT_EQ(0, lease->cltt_);
+    EXPECT_EQ(8, lease->subnet_id_);
+    EXPECT_EQ(100, lease->preferred_lft_);
+    EXPECT_EQ(Lease::TYPE_NA, lease->type_);
+    EXPECT_EQ(7, lease->iaid_);
+    EXPECT_EQ(0, lease->prefixlen_);
+    EXPECT_TRUE(lease->fqdn_fwd_);
+    EXPECT_TRUE(lease->fqdn_rev_);
+    EXPECT_EQ("two.example.com", lease->hostname_);
+    ASSERT_TRUE(lease->hwaddr_);
+    EXPECT_EQ("01:02:03:04:05", lease->hwaddr_->toText(false));
+    // Verify that added state is STATE_DEFAULT
+    EXPECT_EQ(Lease::STATE_DEFAULT, lease->state_);
+    }
+
+    {
+    SCOPED_TRACE("Third lease valid");
+    EXPECT_TRUE(lf.next(lease));
+    ASSERT_TRUE(lease);
+
+    // Verify that the lease attributes are correct.
+    EXPECT_EQ("2001:db8:1::3", lease->addr_.toText());
+    ASSERT_TRUE(lease->duid_);
+    EXPECT_EQ("00:01:02:03:04:05:06:0a:0b:0c:0d:0e:03", lease->duid_->toText());
+    EXPECT_EQ(200, lease->valid_lft_);
+    EXPECT_EQ(0, lease->cltt_);
+    EXPECT_EQ(8, lease->subnet_id_);
+    EXPECT_EQ(100, lease->preferred_lft_);
+    EXPECT_EQ(Lease::TYPE_NA, lease->type_);
+    EXPECT_EQ(7, lease->iaid_);
+    EXPECT_EQ(0, lease->prefixlen_);
+    EXPECT_TRUE(lease->fqdn_fwd_);
+    EXPECT_TRUE(lease->fqdn_rev_);
+    EXPECT_EQ("three.example.com", lease->hostname_);
+    ASSERT_TRUE(lease->hwaddr_);
+    EXPECT_EQ("0a:0b:0c:0d:0e", lease->hwaddr_->toText(false));
+    EXPECT_EQ(Lease::STATE_DECLINED, lease->state_);
+    }
+
+}
+
+// Verifies that a lease file with fewer header columns than the
+// minimum allowed will not open.
+TEST_F(CSVLeaseFile6Test, tooFewHeaderColumns) {
+    io_.writeFile("address,duid,valid_lifetime,expire,subnet_id,pref_lifetime,"
+              "lease_type,iaid,prefix_len,fqdn_fwd,fqdn_rev\n");
+
+    // Open should fail.
+    CSVLeaseFile6 lf(filename_);
+    ASSERT_THROW(lf.open(), CSVFileError);
+}
+
+// Verifies that a lease file with an unrecognized column header
+// will not open.
+TEST_F(CSVLeaseFile6Test, invalidHeaderColumn) {
+    io_.writeFile("address,BOGUS,valid_lifetime,expire,subnet_id,pref_lifetime,"
+              "lease_type,iaid,prefix_len,fqdn_fwd,fqdn_rev,hostname,"
+              "hwaddr,state\n");
+
+    // Open should fail.
+    CSVLeaseFile6 lf(filename_);
+    ASSERT_THROW(lf.open(), CSVFileError);
+}
+
+// Verifies that a lease file with more header columns than defined
+// columns will open as needing a downgrade.
+TEST_F(CSVLeaseFile6Test, downGrade) {
+    // Create a mixed schema file
+    io_.writeFile(
+             // schema 1.0 header
+              "address,duid,valid_lifetime,expire,subnet_id,pref_lifetime,"
+              "lease_type,iaid,prefix_len,fqdn_fwd,fqdn_rev,hostname,"
+              "hwaddr,state,FUTURE_COL\n"
+
+              // schema 3.0 record - has hwaddr and state
+              "2001:db8:1::3,00:01:02:03:04:05:06:0a:0b:0c:0d:0e:03,"
+              "200,200,8,100,0,7,0,1,1,three.example.com,0a:0b:0c:0d:0e,1,"
+              "BOGUS\n");
+
+    // Open should succeed in the event someone is downgrading.
+    CSVLeaseFile6 lf(filename_);
+    ASSERT_NO_THROW(lf.open());
+    EXPECT_TRUE(lf.needsConversion());
+    EXPECT_EQ(util::VersionedCSVFile::NEEDS_DOWNGRADE,
+              lf.getInputSchemaState());
+
+
+    Lease6Ptr lease;
+    {
+    SCOPED_TRACE("First lease valid");
+    EXPECT_TRUE(lf.next(lease));
+    ASSERT_TRUE(lease);
+
+    // Verify that the lease attributes are correct.
+    EXPECT_EQ("2001:db8:1::3", lease->addr_.toText());
+    ASSERT_TRUE(lease->duid_);
+    EXPECT_EQ("00:01:02:03:04:05:06:0a:0b:0c:0d:0e:03", lease->duid_->toText());
+    EXPECT_EQ(200, lease->valid_lft_);
+    EXPECT_EQ(0, lease->cltt_);
+    EXPECT_EQ(8, lease->subnet_id_);
+    EXPECT_EQ(100, lease->preferred_lft_);
+    EXPECT_EQ(Lease::TYPE_NA, lease->type_);
+    EXPECT_EQ(7, lease->iaid_);
+    EXPECT_EQ(0, lease->prefixlen_);
+    EXPECT_TRUE(lease->fqdn_fwd_);
+    EXPECT_TRUE(lease->fqdn_rev_);
+    EXPECT_EQ("three.example.com", lease->hostname_);
+    ASSERT_TRUE(lease->hwaddr_);
+    EXPECT_EQ("0a:0b:0c:0d:0e", lease->hwaddr_->toText(false));
+    EXPECT_EQ(Lease::STATE_DECLINED, lease->state_);
+    }
+}
+
+
 /// @todo Currently we don't check invalid lease attributes, such as invalid
 /// lease type, invalid preferred lifetime vs valid lifetime etc. The Lease6
 /// should be extended with the function that validates lease attributes. Once

+ 108 - 14
src/lib/dhcpsrv/tests/dhcp_parsers_unittest.cc

@@ -931,28 +931,48 @@ TEST_F(ParseConfigTest, emptyOptionData) {
     ASSERT_EQ(0, opt->getAddresses().size());
 }
 
-};  // Anonymous namespace
-
 /// The next set of tests check basic operation of the HooksLibrariesParser.
-
-
-// Utility function for setting up the "hooks-libraries" configuration.
 //
-// Returns a hooks-libraries configuration element that contains zero to
-// three libraries, depending on what arguments are supplied.
+// Convenience function to set a configuration of zero or more hooks
+// libraries:
+//
+// lib1 - No parameters
+// lib2 - Empty parameters statement
+// lib3 - Valid parameters
 std::string
 setHooksLibrariesConfig(const char* lib1 = NULL, const char* lib2 = NULL,
                         const char* lib3 = NULL) {
-    const std::string quote("\"");
-    const std::string comma_space(", ");
-
-    std::string config = std::string("{ \"hooks-libraries\": [");
+    const string lbrace("{");
+    const string rbrace("}");
+    const string quote("\"");
+    const string comma_space(", ");
+    const string library("\"library\": ");
+    const string parameters("\"parameters\": ");
+
+    string config = string("{ \"hooks-libraries\": [");
     if (lib1 != NULL) {
-        config += (quote + std::string(lib1) + quote);
+        // Library 1 has no parameters
+        config += lbrace;
+        config += library + quote + std::string(lib1) + quote;
+        config += rbrace;
+
         if (lib2 != NULL) {
-            config += (comma_space + quote + std::string(lib2) + quote);
+            // Library 2 has an empty parameters statement
+            config += comma_space + lbrace;
+            config += library + quote + std::string(lib2) + quote + comma_space;
+            config += string("\"parameters\": {}");
+            config += rbrace;
+
             if (lib3 != NULL) {
-                config += (comma_space + quote + std::string(lib3) + quote);
+                // Library 3 has valid parameters
+                config += comma_space + lbrace;
+                config += library + quote + std::string(lib3) + quote + comma_space;
+                config += string("\"parameters\": {");
+                config += string("    \"svalue\": \"string value\", ");
+                config += string("    \"ivalue\": 42, ");     // Integer value
+                config += string("    \"bvalue\": true");     // Boolean value
+                config += string("}");
+                config += rbrace;
             }
         }
     }
@@ -1242,6 +1262,78 @@ TEST_F(ParseConfigTest, reconfigureInvalidHooksLibraries) {
     EXPECT_EQ(CALLOUT_LIBRARY_1, hooks_libraries[0]);
 }
 
+// Check that if hooks-libraries contains invalid syntax, it is detected.
+TEST_F(ParseConfigTest, invalidSyntaxHooksLibraries) {
+
+    // Element holds a mixture of (valid) maps and non-maps.
+    string config1 = "{ \"hooks-libraries\": [ "
+        "{ \"library\": \"/opt/lib/lib1\" }, "
+        "\"/opt/lib/lib2\" "
+        "] }";
+    string error1 = "one or more entries in the hooks-libraries list is not"
+                    " a map";
+
+    int rcode = parseConfiguration(config1);
+    ASSERT_NE(0, rcode);
+    EXPECT_TRUE(error_text_.find(error1) != string::npos) <<
+        "Error text returned from parse failure is " << error_text_;
+
+    // Element holds valid maps, except one where the library element is not
+    // a string.
+    string config2 = "{ \"hooks-libraries\": [ "
+        "{ \"library\": \"/opt/lib/lib1\" }, "
+        "{ \"library\": 123 } "
+        "] }";
+    string error2 = "value of 'library' element is not a string giving"
+                    " the path to a hooks library";
+
+    rcode = parseConfiguration(config2);
+    ASSERT_NE(0, rcode);
+    EXPECT_TRUE(error_text_.find(error2) != string::npos) <<
+        "Error text returned from parse failure is " << error_text_;
+
+    // Element holds valid maps, except one where the library element is the
+    // empty string.
+    string config3 = "{ \"hooks-libraries\": [ "
+        "{ \"library\": \"/opt/lib/lib1\" }, "
+        "{ \"library\": \"\" } "
+        "] }";
+    string error3 = "value of 'library' element must not be blank";
+
+    rcode = parseConfiguration(config3);
+    ASSERT_NE(0, rcode);
+    EXPECT_TRUE(error_text_.find(error3) != string::npos) <<
+        "Error text returned from parse failure is " << error_text_;
+
+    // Element holds valid maps, except one where the library element is all
+    // spaces.
+    string config4 = "{ \"hooks-libraries\": [ "
+        "{ \"library\": \"/opt/lib/lib1\" }, "
+        "{ \"library\": \"      \" } "
+        "] }";
+    string error4 = "value of 'library' element must not be blank";
+
+    rcode = parseConfiguration(config4);
+    ASSERT_NE(0, rcode);
+    EXPECT_TRUE(error_text_.find(error3) != string::npos) <<
+        "Error text returned from parse failure is " << error_text_;
+
+    // Element holds valid maps, except one that does not contain a
+    // 'library' element.
+    string config5 = "{ \"hooks-libraries\": [ "
+        "{ \"library\": \"/opt/lib/lib1\" }, "
+        "{ \"parameters\": { \"alpha\": 123 } }, "
+        "{ \"library\": \"/opt/lib/lib2\" } "
+        "] }";
+    string error5 = "one or more hooks-libraries elements are missing the"
+                    " name of the library";
+
+    rcode = parseConfiguration(config5);
+    ASSERT_NE(0, rcode);
+    EXPECT_TRUE(error_text_.find(error5) != string::npos) <<
+        "Error text returned from parse failure is " << error_text_;
+}
+
 /// @brief Checks that a valid, enabled D2 client configuration works correctly.
 TEST_F(ParseConfigTest, validD2Config) {
 
@@ -2065,3 +2157,5 @@ TEST_F(ParseConfigTest, validRelayInfo6) {
 // There's no test for ControlSocketParser, as it is tested in the DHCPv4 code
 // (see CtrlDhcpv4SrvTest.commandSocketBasic in
 // src/bin/dhcp4/tests/ctrl_dhcp4_srv_unittest.cc).
+
+};  // Anonymous namespace

+ 133 - 0
src/lib/dhcpsrv/tests/memfile_lease_mgr_unittest.cc

@@ -1388,4 +1388,137 @@ TEST_F(MemfileLeaseMgrTest, load6LFCInProgress) {
     ASSERT_NO_THROW(lease_mgr.reset(new NakedMemfileLeaseMgr(pmap)));
 }
 
+// Verifies that LFC is automatically run during MemfileLeasemMgr construction
+// when the lease file(s) being loaded need to be upgraded.
+TEST_F(MemfileLeaseMgrTest, leaseUpgrade4) {
+    // Create header strings for each schema
+    std::string header_1_0 =
+        "address,hwaddr,client_id,valid_lifetime,expire,"
+        "subnet_id,fqdn_fwd,fqdn_rev,hostname\n";
+
+    std::string header_2_0 =
+        "address,hwaddr,client_id,valid_lifetime,expire,"
+        "subnet_id,fqdn_fwd,fqdn_rev,hostname,state\n";
+
+    // Create 1.0 Schema current lease file with two entries for
+    // the same lease
+    std::string current_file_contents = header_1_0 +
+        "192.0.2.2,02:02:02:02:02:02,,200,200,8,1,1,\n"
+        "192.0.2.2,02:02:02:02:02:02,,200,800,8,1,1,\n";
+    LeaseFileIO current_file(getLeaseFilePath("leasefile4_0.csv"));
+    current_file.writeFile(current_file_contents);
+
+    // Create 1.0 Schema previous lease file, with two entries for
+    // a another lease
+    std::string previous_file_contents = header_1_0 +
+        "192.0.2.3,03:03:03:03:03:03,,200,200,8,1,1,\n"
+        "192.0.2.3,03:03:03:03:03:03,,200,800,8,1,1,\n";
+    LeaseFileIO previous_file(getLeaseFilePath("leasefile4_0.csv.2"));
+    previous_file.writeFile(previous_file_contents);
+
+    // Create the backend.
+    DatabaseConnection::ParameterMap pmap;
+    pmap["type"] = "memfile";
+    pmap["universe"] = "4";
+    pmap["name"] = getLeaseFilePath("leasefile4_0.csv");
+    pmap["lfc-interval"] = "0";
+    boost::scoped_ptr<NakedMemfileLeaseMgr> lease_mgr(new NakedMemfileLeaseMgr(pmap));
+
+    // Since lease files are loaded during lease manager
+    // constructor, LFC should get launched automatically.
+    // The new lease file should be 2.0 schema and have no entries
+    ASSERT_TRUE(current_file.exists());
+    EXPECT_EQ(header_2_0, current_file.readFile());
+
+    // Wait for the LFC process to complete and
+    // make sure it has returned an exit status of 0.
+    ASSERT_TRUE(waitForProcess(*lease_mgr, 2));
+    ASSERT_EQ(0, lease_mgr->getLFCExitStatus())
+        << "Executing the LFC process failed: make sure that"
+        " the kea-lfc program has been compiled.";
+
+    // The LFC should have created a 2.0 schema completion file with the
+    // one entry for each lease and moved it to leasefile4_0.csv.2
+    LeaseFileIO input_file(getLeaseFilePath("leasefile4_0.csv.2"), false);
+    ASSERT_TRUE(input_file.exists());
+
+    // Verify cleaned, converted contents
+    std::string result_file_contents = header_2_0 +
+        "192.0.2.2,02:02:02:02:02:02,,200,800,8,1,1,,0\n"
+        "192.0.2.3,03:03:03:03:03:03,,200,800,8,1,1,,0\n";
+    EXPECT_EQ(result_file_contents, input_file.readFile());
+}
+
+TEST_F(MemfileLeaseMgrTest, leaseUpgrade6) {
+    // Create header strings for all three schemas
+    std::string header_1_0 =
+        "address,duid,valid_lifetime,expire,subnet_id,"
+        "pref_lifetime,lease_type,iaid,prefix_len,fqdn_fwd,"
+        "fqdn_rev,hostname\n";
+
+    std::string header_2_0 =
+        "address,duid,valid_lifetime,expire,subnet_id,"
+        "pref_lifetime,lease_type,iaid,prefix_len,fqdn_fwd,"
+        "fqdn_rev,hostname,hwaddr\n";
+
+    std::string header_3_0 =
+        "address,duid,valid_lifetime,expire,subnet_id,"
+        "pref_lifetime,lease_type,iaid,prefix_len,fqdn_fwd,"
+        "fqdn_rev,hostname,hwaddr,state\n";
+
+    // The current lease file is schema 1.0 and has two entries for
+    // the same lease
+    std::string current_file_contents = header_1_0 +
+        "2001:db8:1::1,00:01:02:03:04:05:06:0a:0b:0c:0d:0e:0f,200,200,"
+        "8,100,0,7,0,1,1,,\n"
+        "2001:db8:1::1,00:01:02:03:04:05:06:0a:0b:0c:0d:0e:0f,200,800,"
+        "8,100,0,7,0,1,1,,\n";
+    LeaseFileIO current_file(getLeaseFilePath("leasefile6_0.csv"));
+    current_file.writeFile(current_file_contents);
+
+    // The previous lease file is schema 2.0 and has two entries for
+    // a different lease
+    std::string previous_file_contents = header_2_0 +
+        "2001:db8:1::2,01:01:01:01:01:01:01:01:01:01:01:01:01,200,200,"
+        "8,100,0,7,0,1,1,,11:22:33:44:55\n"
+        "2001:db8:1::2,01:01:01:01:01:01:01:01:01:01:01:01:01,200,800,"
+        "8,100,0,7,0,1,1,,11:22:33:44:55\n";
+    LeaseFileIO previous_file(getLeaseFilePath("leasefile6_0.csv.2"));
+    previous_file.writeFile(previous_file_contents);
+
+    // Create the backend.
+    DatabaseConnection::ParameterMap pmap;
+    pmap["type"] = "memfile";
+    pmap["universe"] = "6";
+    pmap["name"] = getLeaseFilePath("leasefile6_0.csv");
+    pmap["lfc-interval"] = "0";
+    boost::scoped_ptr<NakedMemfileLeaseMgr> lease_mgr(new NakedMemfileLeaseMgr(pmap));
+
+    // Since lease files are loaded during lease manager
+    // constructor, LFC should get launched automatically.
+    // The new lease file should been 3.0 and contain no leases.
+    ASSERT_TRUE(current_file.exists());
+    EXPECT_EQ(header_3_0, current_file.readFile());
+
+    // Wait for the LFC process to complete and
+    // make sure it has returned an exit status of 0.
+    ASSERT_TRUE(waitForProcess(*lease_mgr, 2));
+    ASSERT_EQ(0, lease_mgr->getLFCExitStatus())
+        << "Executing the LFC process failed: make sure that"
+        " the kea-lfc program has been compiled.";
+
+    // The LFC should have created a 3.0 schema cleaned file with one entry
+    // for each lease as leasefile6_0.csv.2
+    LeaseFileIO input_file(getLeaseFilePath("leasefile6_0.csv.2"), false);
+    ASSERT_TRUE(input_file.exists());
+
+    // Verify cleaned, converted contents
+    std::string result_file_contents = header_3_0 +
+        "2001:db8:1::1,00:01:02:03:04:05:06:0a:0b:0c:0d:0e:0f,200,800,"
+        "8,100,0,7,0,1,1,,,0\n"
+        "2001:db8:1::2,01:01:01:01:01:01:01:01:01:01:01:01:01,200,800,"
+        "8,100,0,7,0,1,1,,11:22:33:44:55,0\n";
+    EXPECT_EQ(result_file_contents, input_file.readFile());
+}
+
 }; // end of anonymous namespace

+ 44 - 0
src/lib/eval/Makefile.am

@@ -13,8 +13,14 @@ AM_CXXFLAGS += $(WARNING_NO_MISSING_FIELD_INITIALIZERS_CFLAG)
 lib_LTLIBRARIES = libkea-eval.la
 libkea_eval_la_SOURCES  =
 libkea_eval_la_SOURCES += eval_log.cc eval_log.h
+libkea_eval_la_SOURCES += evaluate.cc evaluate.h
 libkea_eval_la_SOURCES += token.cc token.h
 
+libkea_eval_la_SOURCES += parser.cc parser.h
+libkea_eval_la_SOURCES += lexer.cc
+libkea_eval_la_SOURCES += location.hh position.hh stack.hh
+libkea_eval_la_SOURCES += eval_context.cc eval_context.h eval_context_decl.h
+
 nodist_libkea_eval_la_SOURCES = eval_messages.h eval_messages.cc
 
 libkea_eval_la_CXXFLAGS = $(AM_CXXFLAGS)
@@ -30,6 +36,7 @@ libkea_eval_la_LDFLAGS += $(CRYPTO_LDFLAGS)
 
 EXTRA_DIST  = eval.dox
 EXTRA_DIST += eval_messages.mes
+EXTRA_DIST += lexer.ll parser.yy
 
 # Define rule to build logging source files from message file
 eval_messages.h eval_messages.cc: s-messages
@@ -48,3 +55,40 @@ s-messages: eval_messages.mes
 BUILT_SOURCES = eval_messages.h eval_messages.cc
 
 CLEANFILES = eval_messages.h eval_messages.cc s-messages
+
+# If we want to get rid of all flex/bison generated files, we need to use
+# make maintainer-clean. The proper way to introduce custom commands for
+# that operation is to define maintainer-clean-local target. However,
+# make maintainer-clean also removes Makefile, so running configure script
+# is required. To make it easy to rebuild flex/bison without going through
+# reconfigure, a new target parser-clean has been added.
+maintainer-clean-local:
+	rm -f location.hh lexer.cc parser.cc parser.h position.hh stack.hh
+
+# To regenerate flex/bison files, one can do:
+#
+# make parser-clean
+# make parser
+#
+# This is needed only when the lexer.ll or parser.yy files are modified.
+# Make sure you have both flex and bison installed.
+parser-clean: maintainer-clean-local
+
+if GENERATE_PARSER
+
+parser: lexer.cc location.hh position.hh stack.hh parser.cc parser.h
+	@echo "Flex/bison files regenerated"
+
+# --- Flex/Bison stuff below --------------------------------------------------
+location.hh position.hh stack.hh parser.cc parser.h: parser.yy
+	$(YACC) --defines=parser.h -o parser.cc parser.yy
+
+lexer.cc: lexer.ll
+	$(LEX) -o lexer.cc lexer.ll
+
+else
+
+parser location.hh position.hh stack.hh parser.cc parser.h lexer.cc:
+	@echo Parser generation disabled. Configure with --enable-generate-parser to enable it.
+
+endif

+ 113 - 3
src/lib/eval/eval.dox

@@ -13,8 +13,118 @@
 // PERFORMANCE OF THIS SOFTWARE.
 
 /**
-  @page dhcpEval Expression evaluation (client classification)
+  @page dhcpEval libeval - Expression evaluation and client classification
 
-  @todo: Document how the expression evaluation is implemented.
+  @section dhcpEvalIntroduction Introduction
 
- */
+  The core of the libeval library is a parser that is able to parse an
+  expression (e.g. option[123] == 'APC'). This is currently used for client
+  classification, but in the future may be also used for other applications.
+
+  The external interface to the library is the @ref isc::eval::EvalContext
+  class.  Once instantiated, it offers a major method:
+  @ref isc::eval::EvalContext::parseString, which parses the specified
+  string.  Once the expression is parsed, it is converted to a collection of
+  tokens that are stored in Reverse Polish Notation in
+  EvalContext::expression.
+
+  Internally, the parser code is generated by flex and bison. These two
+  tools convert lexer.ll and parser.yy files into a number of .cc and .hh files.
+  To avoid a build of Kea depending on the presence of flex and bison, the
+  result of the generation is checked into the github repository and is
+  distributed in the tarballs.
+
+  @section dhcpEvalLexer Lexer generation using flex
+
+  Flex is used to generate the lexer, a piece of code that converts input
+  data into a series of tokens. It contains a small number of directives,
+  but the majority of the code consists of the definitions of tokens. These
+  definitions are regular expressions that define various tokens, e.g. strings,
+  numbers, parentheses, etc. Once the expression is matched, the associated
+  action is executed. In the majority of the cases a generator method from
+  @ref isc::eval::EvalParser is called, which returns returns a newly created
+  bison token. The purpose of the lexer is to generate a stream
+  of tokens that are consumed by the parser.
+
+  lexer.cc and lexer.hh must not be edited. If there is a need
+  to introduce changes, lexer.ll must be updated and the .cc and .hh files
+  regenerated.
+
+  @section dhcpEvalParser Parser generation using bison
+
+  Bison is used to generate the parser, a piece of code that consumes a
+  stream of tokens and attempts to match it against a defined grammar.
+  The bison parser is created from parser.yy. It contains
+  a number of directives, but the two most important sections are:
+  a list of tokens (for each token defined here, bison will generate the
+  make_NAMEOFTOKEN method in the @ref isc::eval::EvalParser class) and
+  the grammar. The Grammar is a tree like structure with possible loops.
+
+  Here is an over-simplified version of the grammar:
+
+@code
+01. %start expression;
+02.
+03. expression : token EQUAL token
+04.            | token
+05.            ;
+06.
+07. token : STRING
+08.             {
+09.                 TokenPtr str(new TokenString($1));
+10.                 ctx.expression.push_back(str);
+11.             }
+12.       | HEXSTRING
+13.             {
+14.                 TokenPtr hex(new TokenHexString($1));
+15.                 ctx.expression.push_back(hex);
+16.             }
+17.       | OPTION '[' INTEGER ']'
+18.             {
+19.                 TokenPtr opt(new TokenOption($3));
+20.                 ctx.expression.push_back(opt);
+21.              }
+22.       ;
+@endcode
+
+This code determines that the grammar starts from expression (line 1).
+The actual definition of expression (lines 3-5) may either be a
+single token or an expression "token == token" (EQUAL has been defined as
+"==" elsewhere). Token is further
+defined in lines 7-22: it may either be a string (lines 7-11),
+a hex string (lines 12-16) or option (lines 17-21).
+When the actual case is determined, the respective C++ action
+is executed. For example, if the token is a string, the TokenString class is
+instantiated with the appropriate value and put onto the expression vector.
+
+@section dhcpEvalMakefile Generating parser files
+
+ In the general case, we want to avoid generating parser files, so an
+ average user interested in just compiling Kea would not need flex or
+ bison. Therefore the generated files are already included in the
+ git repository and will be included in the tarball releases.
+
+ However, there will be cases when one of the developers would want
+ to tweak the lexer.ll and parser.yy files and then regenerate
+ the code. For this purpose, two makefile targets are defined:
+ @code
+ make parser
+ @endcode
+ will generate the parsers and
+ @code
+ make parser-clean
+ @endcode
+ will remove the files. Generated files removal was also hooked
+ into the maintainer-clean target.
+
+@section dhcpEvalConfigure Configure options
+
+ Since the flex/bison tools are not necessary for a regular compilation,
+ checks conducted during configure, but the lack of flex or
+ bison tools does not stop the configure process. There is a flag
+ (--enable-generate-parser) that tells configure script that the
+ parser will be generated. With this flag, the checks for flex/bison
+ are mandatory. If either tool is missing or at too early a version, the
+ configure process will terminate with an error.
+
+*/

+ 52 - 0
src/lib/eval/eval_context.cc

@@ -0,0 +1,52 @@
+// Copyright (C) 2015 Internet Systems Consortium, Inc. ("ISC")
+//
+// Permission to use, copy, modify, and/or distribute this software for any
+// purpose with or without fee is hereby granted, provided that the above
+// copyright notice and this permission notice appear in all copies.
+//
+// THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH
+// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+// AND FITNESS.  IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT,
+// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+// OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+// PERFORMANCE OF THIS SOFTWARE.
+
+#include <eval/eval_context.h>
+#include <eval/parser.h>
+#include <exceptions/exceptions.h>
+#include <fstream>
+
+EvalContext::EvalContext()
+  : trace_scanning_(false), trace_parsing_(false)
+{
+}
+
+EvalContext::~EvalContext()
+{
+}
+
+bool
+EvalContext::parseString(const std::string& str)
+{
+    file_ = "<string>";
+    string_ = str;
+    scanStringBegin();
+    isc::eval::EvalParser parser(*this);
+    parser.set_debug_level(trace_parsing_);
+    int res = parser.parse();
+    scanStringEnd();
+    return (res == 0);
+}
+
+void
+EvalContext::error(const isc::eval::location& loc, const std::string& what)
+{
+    isc_throw(EvalParseError, loc << ": " << what);
+}
+
+void
+EvalContext::error (const std::string& what)
+{
+    isc_throw(EvalParseError, what);
+}

+ 96 - 0
src/lib/eval/eval_context.h

@@ -0,0 +1,96 @@
+// Copyright (C) 2015 Internet Systems Consortium, Inc. ("ISC")
+//
+// Permission to use, copy, modify, and/or distribute this software for any
+// purpose with or without fee is hereby granted, provided that the above
+// copyright notice and this permission notice appear in all copies.
+//
+// THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH
+// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+// AND FITNESS.  IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT,
+// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+// OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+// PERFORMANCE OF THIS SOFTWARE.
+
+#ifndef EVAL_CONTEXT_H
+#define EVAL_CONTEXT_H
+#include <string>
+#include <map>
+#include <eval/parser.h>
+#include <eval/eval_context_decl.h>
+#include <exceptions/exceptions.h>
+
+// Tell Flex the lexer's prototype ...
+#define YY_DECL isc::eval::EvalParser::symbol_type yylex (EvalContext& driver)
+
+// ... and declare it for the parser's sake.
+YY_DECL;
+
+namespace isc {
+namespace eval {
+
+/// @brief Evaluation error exception raised when trying to parse an axceptions.
+class EvalParseError : public isc::Exception {
+public:
+    EvalParseError(const char* file, size_t line, const char* what) :
+        isc::Exception(file, line, what) { };
+};
+
+
+/// @brief Evaluation context, an interface to the expression evaluation.
+class EvalContext
+{
+public:
+    /// @brief Default constructor.
+    EvalContext();
+
+    /// @brief destructor
+    virtual ~EvalContext();
+
+    /// @brief Parsed expression (output tokens are stored here)
+    isc::dhcp::Expression expression;
+
+    /// @brief Method called before scanning starts on a string.
+    void scanStringBegin();
+
+    /// @brief Method called after the last tokens are scanned from a string.
+    void scanStringEnd();
+    
+    /// @brief Run the parser on the string specified.
+    ///
+    /// @param str string to be written
+    /// @return true on success.
+    bool parseString(const std::string& str);
+
+    /// @brief The name of the file being parsed.
+    /// Used later to pass the file name to the location tracker.
+    std::string file_;
+
+    /// @brief The string being parsed.
+    std::string string_;
+
+    /// @brief Error handler
+    ///
+    /// @param loc location within the parsed file when experienced a problem.
+    /// @param what string explaining the nature of the error.
+    void error(const isc::eval::location& loc, const std::string& what);
+
+    /// @brief Error handler
+    ///
+    /// This is a simplified error reporting tool for possible future
+    /// cases when the EvalParser is not able to handle the packet.
+    void error(const std::string& what);
+
+ private:
+    /// @brief Flag determining scanner debugging.
+    bool trace_scanning_;
+
+    /// @brief Flag determing parser debugging.
+    bool trace_parsing_;
+  
+};
+
+}; // end of isc::eval namespace
+}; // end of isc namespace
+
+#endif

+ 28 - 0
src/lib/eval/eval_context_decl.h

@@ -0,0 +1,28 @@
+// Copyright (C) 2015 Internet Systems Consortium, Inc. ("ISC")
+//
+// Permission to use, copy, modify, and/or distribute this software for any
+// purpose with or without fee is hereby granted, provided that the above
+// copyright notice and this permission notice appear in all copies.
+//
+// THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH
+// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+// AND FITNESS.  IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT,
+// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+// OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+// PERFORMANCE OF THIS SOFTWARE.
+
+#ifndef EVAL_CONTEXT_DECL_H
+#define EVAL_CONTEXT_DECL_H
+
+/// @file eval_context_decl.h Forward declaration of the EvalContext class
+
+namespace isc {
+namespace eval {
+
+class EvalContext;
+
+}; // end of isc::eval namespace
+}; // end of isc namespace
+
+#endif

+ 0 - 5
src/lib/eval/eval_messages.mes

@@ -18,8 +18,3 @@ $NAMESPACE isc::dhcp
 This debug message indicates that the expression has been evaluated
 to said value. This message is mostly useful during debugging of the
 client classification expressions.
-
-% EVAL_SUBSTRING_BAD_PARAM_CONVERSION starting %1, length %2
-This debug message indicates that the parameter for the starting postion
-or length of the substring couldn't be converted to an integer.  In this
-case the substring routine returns an empty string.

+ 41 - 0
src/lib/eval/evaluate.cc

@@ -0,0 +1,41 @@
+// Copyright (C) 2015 Internet Systems Consortium, Inc. ("ISC")
+//
+// Permission to use, copy, modify, and/or distribute this software for any
+// purpose with or without fee is hereby granted, provided that the above
+// copyright notice and this permission notice appear in all copies.
+//
+// THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH
+// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+// AND FITNESS.  IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT,
+// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+// OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+// PERFORMANCE OF THIS SOFTWARE.
+
+#include <eval/evaluate.h>
+
+namespace isc {
+namespace dhcp {
+
+bool evaluate(const Expression& expr, const Pkt& pkt) {
+    ValueStack values;
+    for (Expression::const_iterator it = expr.begin();
+         it != expr.end(); ++it) {
+        (*it)->evaluate(pkt, values);
+    }
+    if (values.size() != 1) {
+        isc_throw(EvalBadStack, "Incorrect stack order. Expected exactly "
+                  "1 value at the end of evaluatuion, got " << values.size());
+    }
+    if (values.top() == "false") {
+        return (false);
+    } else if (values.top() == "true") {
+        return (true);
+    } else {
+        isc_throw(EvalTypeError, "Incorrect evaluation type. Expected "
+                  "\"false\" or \"true\", got \"" << values.top() << "\"");
+    }
+}
+
+}; // end of isc::dhcp namespace
+}; // end of isc namespace

+ 38 - 0
src/lib/eval/evaluate.h

@@ -0,0 +1,38 @@
+// Copyright (C) 2015 Internet Systems Consortium, Inc. ("ISC")
+//
+// Permission to use, copy, modify, and/or distribute this software for any
+// purpose with or without fee is hereby granted, provided that the above
+// copyright notice and this permission notice appear in all copies.
+//
+// THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH
+// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+// AND FITNESS.  IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT,
+// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+// OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+// PERFORMANCE OF THIS SOFTWARE.
+
+#ifndef EVALUATE_H
+#define EVALUATE_H
+
+#include <eval/token.h>
+
+namespace isc {
+namespace dhcp {
+
+/// @brief Evaluate a RPN expression for a v4 or v6 packet and return
+///        a true or false decision
+///
+/// @param expr the RPN expression, i.e., a vector of parsed tokens
+/// @param pkt  The v4 or v6 packet
+/// @return the boolean decision
+/// @throw EvalStackError if there is not exactly one element on the value
+///        stack at the end of the evaluation
+/// @throw EvalTypeError if the value at the top of the stack at the
+///        end of the evaluation is not "false" or "true"
+bool evaluate(const Expression& expr, const Pkt& pkt);
+
+}; // end of isc::dhcp namespace
+}; // end of isc namespace
+
+#endif

File diff suppressed because it is too large
+ 2251 - 0
src/lib/eval/lexer.cc


+ 156 - 0
src/lib/eval/lexer.ll

@@ -0,0 +1,156 @@
+/* Copyright (C) 2015 Internet Systems Consortium, Inc. ("ISC")
+
+   Permission to use, copy, modify, and/or distribute this software for any
+   purpose with or without fee is hereby granted, provided that the above
+   copyright notice and this permission notice appear in all copies.
+
+   THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH
+   REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+   AND FITNESS.  IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT,
+   INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+   LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+   OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+   PERFORMANCE OF THIS SOFTWARE. */
+
+%{ /* -*- C++ -*- */
+#include <cerrno>
+#include <climits>
+#include <cstdlib>
+#include <string>
+#include <eval/eval_context.h>
+#include <eval/parser.h>
+#include <boost/lexical_cast.hpp>
+
+// Work around an incompatibility in flex (at least versions
+// 2.5.31 through 2.5.33): it generates code that does
+// not conform to C89.  See Debian bug 333231
+// <http://bugs.debian.org/cgi-bin/bugreport.cgi?bug=333231>.
+# undef yywrap
+# define yywrap() 1
+
+// The location of the current token. The lexer will keep updating it. This
+// variable will be useful for logging errors.
+static isc::eval::location loc;
+%}
+
+/* noyywrap disables automatic rewinding for the next file to parse. Since we
+   always parse only a single string, there's no need to do any wraps. And
+   using yywrap requires linking with -lfl, which provides the default yywrap
+   implementation that always returns 1 anyway. */
+%option noyywrap
+
+/* nounput simplifies the lexer, by removing support for putting a character
+   back into the input stream. We never use such capability anyway. */
+%option nounput
+
+/* batch means that we'll never use the generated lexer interactively. */
+%option batch
+
+/* Enables debug mode. To see the debug messages, one needs to also set
+   yy_flex_debug to 1, then the debug messages will be printed on stderr. */
+%option debug
+
+/* I have no idea what this option does, except it was specified in the bison
+   examples and Postgres folks added it to remove gcc 4.3 warnings. Let's
+   be on the safe side and keep it. */
+%option noinput
+
+/* This line tells flex to track the line numbers. It's not really that
+   useful for client classes, which typically are one-liners, but it may be
+   useful in more complex cases. */
+%option yylineno
+
+/* These are not token expressions yet, just convenience expressions that
+   can be used during actual token definitions. */
+int   \-?[0-9]+
+hex   [0-9a-fA-F]+
+blank [ \t]
+
+%{
+// This code run each time a pattern is matched. It updates the location
+// by moving it ahead by yyleng bytes. yyleng specifies the length of the
+// currently matched token.
+#define YY_USER_ACTION  loc.columns(yyleng);
+%}
+
+%%
+
+%{
+    // Code run each time yylex is called.
+    loc.step();
+%}
+
+{blank}+   {
+    // Ok, we found a with space. Let's ignore it and update loc variable.
+    loc.step();
+}
+[\n]+      {
+    // Newline found. Let's update the location and continue.
+    loc.lines(yyleng);
+    loc.step();
+}
+
+\'[^\'\n]*\' {
+    // A string has been matched. It contains the actual string and single quotes.
+    // We need to get those quotes out of the way and just use its content, e.g.
+    // for 'foo' we should get foo
+    std::string tmp(yytext+1);
+    tmp.resize(tmp.size() - 1);
+
+    return isc::eval::EvalParser::make_STRING(tmp, loc);
+}
+
+0[xX]{hex} {
+    // A hex string has been matched. It contains the '0x' or '0X' header
+    // followed by at least one hexadecimal digit.
+    return isc::eval::EvalParser::make_HEXSTRING(yytext, loc);
+}
+
+{int} {
+    // An integer was found.
+    std::string tmp(yytext);
+
+    try {
+        static_cast<void>(boost::lexical_cast<int>(tmp));
+    } catch (const boost::bad_lexical_cast &) {
+        driver.error(loc, "Failed to convert " + tmp + " to an integer.");
+    }
+
+    // The parser needs the string form as double conversion is no lossless
+    return isc::eval::EvalParser::make_INTEGER(tmp, loc);
+}
+
+"=="        return isc::eval::EvalParser::make_EQUAL(loc);
+"option"    return isc::eval::EvalParser::make_OPTION(loc);
+"substring" return isc::eval::EvalParser::make_SUBSTRING(loc);
+"all"       return isc::eval::EvalParser::make_ALL(loc);
+"("         return isc::eval::EvalParser::make_LPAREN(loc);
+")"         return isc::eval::EvalParser::make_RPAREN(loc);
+"["         return isc::eval::EvalParser::make_LBRACKET(loc);
+"]"         return isc::eval::EvalParser::make_RBRACKET(loc);
+","         return isc::eval::EvalParser::make_COMA(loc);
+
+.          driver.error (loc, "Invalid character: " + std::string(yytext));
+<<EOF>>    return isc::eval::EvalParser::make_END(loc);
+%%
+
+using namespace isc::eval;
+
+void
+EvalContext::scanStringBegin()
+{
+    loc.initialize(&file_);
+    yy_flex_debug = trace_scanning_;
+    YY_BUFFER_STATE buffer;
+    buffer = yy_scan_bytes(string_.c_str(), string_.size());
+    if (!buffer) {
+        error("cannot scan string");
+        exit(EXIT_FAILURE);
+    }
+}
+
+void
+EvalContext::scanStringEnd()
+{
+    yy_delete_buffer(YY_CURRENT_BUFFER);
+}

+ 193 - 0
src/lib/eval/location.hh

@@ -0,0 +1,193 @@
+// Generated 2015115
+// A Bison parser, made by GNU Bison 3.0.4.
+
+// Locations for Bison parsers in C++
+
+// Copyright (C) 2002-2015 Free Software Foundation, Inc.
+
+// This program is free software: you can redistribute it and/or modify
+// it under the terms of the GNU General Public License as published by
+// the Free Software Foundation, either version 3 of the License, or
+// (at your option) any later version.
+
+// This program is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+// GNU General Public License for more details.
+
+// You should have received a copy of the GNU General Public License
+// along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+// As a special exception, you may create a larger work that contains
+// part or all of the Bison parser skeleton and distribute that work
+// under terms of your choice, so long as that work isn't itself a
+// parser generator using the skeleton or a modified version thereof
+// as a parser skeleton.  Alternatively, if you modify or redistribute
+// the parser skeleton itself, you may (at your option) remove this
+// special exception, which will cause the skeleton and the resulting
+// Bison output files to be licensed under the GNU General Public
+// License without this special exception.
+
+// This special exception was added by the Free Software Foundation in
+// version 2.2 of Bison.
+
+/**
+ ** \file location.hh
+ ** Define the isc::eval::location class.
+ */
+
+#ifndef YY_YY_LOCATION_HH_INCLUDED
+# define YY_YY_LOCATION_HH_INCLUDED
+
+# include "position.hh"
+
+#line 21 "parser.yy" // location.cc:337
+namespace isc { namespace eval {
+#line 46 "location.hh" // location.cc:337
+  /// Abstract a location.
+  class location
+  {
+  public:
+
+    /// Construct a location from \a b to \a e.
+    location (const position& b, const position& e)
+      : begin (b)
+      , end (e)
+    {
+    }
+
+    /// Construct a 0-width location in \a p.
+    explicit location (const position& p = position ())
+      : begin (p)
+      , end (p)
+    {
+    }
+
+    /// Construct a 0-width location in \a f, \a l, \a c.
+    explicit location (std::string* f,
+                       unsigned int l = 1u,
+                       unsigned int c = 1u)
+      : begin (f, l, c)
+      , end (f, l, c)
+    {
+    }
+
+
+    /// Initialization.
+    void initialize (std::string* f = YY_NULLPTR,
+                     unsigned int l = 1u,
+                     unsigned int c = 1u)
+    {
+      begin.initialize (f, l, c);
+      end = begin;
+    }
+
+    /** \name Line and Column related manipulators
+     ** \{ */
+  public:
+    /// Reset initial location to final location.
+    void step ()
+    {
+      begin = end;
+    }
+
+    /// Extend the current location to the COUNT next columns.
+    void columns (int count = 1)
+    {
+      end += count;
+    }
+
+    /// Extend the current location to the COUNT next lines.
+    void lines (int count = 1)
+    {
+      end.lines (count);
+    }
+    /** \} */
+
+
+  public:
+    /// Beginning of the located region.
+    position begin;
+    /// End of the located region.
+    position end;
+  };
+
+  /// Join two locations, in place.
+  inline location& operator+= (location& res, const location& end)
+  {
+    res.end = end.end;
+    return res;
+  }
+
+  /// Join two locations.
+  inline location operator+ (location res, const location& end)
+  {
+    return res += end;
+  }
+
+  /// Add \a width columns to the end position, in place.
+  inline location& operator+= (location& res, int width)
+  {
+    res.columns (width);
+    return res;
+  }
+
+  /// Add \a width columns to the end position.
+  inline location operator+ (location res, int width)
+  {
+    return res += width;
+  }
+
+  /// Subtract \a width columns to the end position, in place.
+  inline location& operator-= (location& res, int width)
+  {
+    return res += -width;
+  }
+
+  /// Subtract \a width columns to the end position.
+  inline location operator- (location res, int width)
+  {
+    return res -= width;
+  }
+
+  /// Compare two location objects.
+  inline bool
+  operator== (const location& loc1, const location& loc2)
+  {
+    return loc1.begin == loc2.begin && loc1.end == loc2.end;
+  }
+
+  /// Compare two location objects.
+  inline bool
+  operator!= (const location& loc1, const location& loc2)
+  {
+    return !(loc1 == loc2);
+  }
+
+  /** \brief Intercept output stream redirection.
+   ** \param ostr the destination output stream
+   ** \param loc a reference to the location to redirect
+   **
+   ** Avoid duplicate information.
+   */
+  template <typename YYChar>
+  inline std::basic_ostream<YYChar>&
+  operator<< (std::basic_ostream<YYChar>& ostr, const location& loc)
+  {
+    unsigned int end_col = 0 < loc.end.column ? loc.end.column - 1 : 0;
+    ostr << loc.begin;
+    if (loc.end.filename
+        && (!loc.begin.filename
+            || *loc.begin.filename != *loc.end.filename))
+      ostr << '-' << loc.end.filename << ':' << loc.end.line << '.' << end_col;
+    else if (loc.begin.line < loc.end.line)
+      ostr << '-' << loc.end.line << '.' << end_col;
+    else if (loc.begin.column < end_col)
+      ostr << '-' << end_col;
+    return ostr;
+  }
+
+#line 21 "parser.yy" // location.cc:337
+} } // isc::eval
+#line 192 "location.hh" // location.cc:337
+#endif // !YY_YY_LOCATION_HH_INCLUDED

File diff suppressed because it is too large
+ 1039 - 0
src/lib/eval/parser.cc


File diff suppressed because it is too large
+ 1058 - 0
src/lib/eval/parser.h


+ 141 - 0
src/lib/eval/parser.yy

@@ -0,0 +1,141 @@
+/* Copyright (C) 2015 Internet Systems Consortium, Inc. ("ISC")
+
+   Permission to use, copy, modify, and/or distribute this software for any
+   purpose with or without fee is hereby granted, provided that the above
+   copyright notice and this permission notice appear in all copies.
+
+   THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH
+   REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+   AND FITNESS.  IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT,
+   INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+   LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+   OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+   PERFORMANCE OF THIS SOFTWARE. */
+
+%skeleton "lalr1.cc" /* -*- C++ -*- */
+%require "3.0.0"
+%defines
+%define parser_class_name {EvalParser}
+%define api.token.constructor
+%define api.value.type variant
+%define api.namespace {isc::eval}
+%define parse.assert
+%code requires
+{
+#include <string>
+#include <eval/token.h>
+#include <eval/eval_context_decl.h>
+#include <boost/lexical_cast.hpp>
+
+using namespace isc::dhcp;
+using namespace isc::eval;
+}
+// The parsing context.
+%param { EvalContext& ctx }
+%locations
+%define parse.trace
+%define parse.error verbose
+%code
+{
+# include "eval_context.h"
+}
+%define api.token.prefix {TOKEN_}
+%token
+  END  0  "end of file"
+  EQUAL "=="
+  OPTION "option"
+  SUBSTRING "substring"
+  ALL "all"
+  COMA ","
+  LPAREN  "("
+  RPAREN  ")"
+  LBRACKET "["
+  RBRACKET "]"
+;
+
+%token <std::string> STRING "constant string"
+%token <std::string> INTEGER "integer"
+%token <std::string> HEXSTRING "constant hexstring"
+%token <std::string> TOKEN
+
+%printer { yyoutput << $$; } <*>;
+%%
+
+// The whole grammar starts with an expression.
+%start expression;
+
+// Expression can either be a single token or a (something == something) expression
+
+expression : bool_expr
+           ;
+
+bool_expr : string_expr EQUAL string_expr
+                {
+                    TokenPtr eq(new TokenEqual());
+                    ctx.expression.push_back(eq);
+                }
+          ;
+
+string_expr : STRING
+                  {
+                      TokenPtr str(new TokenString($1));
+                      ctx.expression.push_back(str);
+                  }
+            | HEXSTRING
+                  {
+                      TokenPtr hex(new TokenHexString($1));
+                      ctx.expression.push_back(hex);
+                  }
+            | OPTION "[" INTEGER "]"
+                  {
+                      int n = 0;
+                      try {
+                          n  = boost::lexical_cast<int>($3);
+                      } catch (const boost::bad_lexical_cast &) {
+                          // This can't happen...
+                          ctx.error(@3,
+                                    "Option code has invalid value in " + $3);
+                      }
+                      if (n < 0 || n > 65535) {
+                          ctx.error(@3,
+                                    "Option code has invalid value in "
+                                    + $3 + ". Allowed range: 0..65535");
+                      }
+                      TokenPtr opt(new TokenOption(static_cast<uint16_t>(n)));
+                      ctx.expression.push_back(opt);
+                  }
+            | SUBSTRING "(" string_expr "," start_expr "," length_expr ")"
+                  {
+                      TokenPtr sub(new TokenSubstring());
+                      ctx.expression.push_back(sub);
+                  }
+            | TOKEN
+                // Temporary unused token to avoid explict but long errors
+            ;
+
+start_expr : INTEGER
+                 {
+                     TokenPtr str(new TokenString($1));
+                     ctx.expression.push_back(str);
+                 }
+           ;
+
+length_expr : INTEGER
+                  {
+                      TokenPtr str(new TokenString($1));
+                      ctx.expression.push_back(str);
+                  }
+            | ALL
+                 {
+                     TokenPtr str(new TokenString("all"));
+                     ctx.expression.push_back(str);
+                 }
+            ;
+
+%%
+void
+isc::eval::EvalParser::error(const location_type& loc,
+                             const std::string& what)
+{
+    ctx.error(loc, what);
+}

+ 181 - 0
src/lib/eval/position.hh

@@ -0,0 +1,181 @@
+// Generated 2015115
+// A Bison parser, made by GNU Bison 3.0.4.
+
+// Positions for Bison parsers in C++
+
+// Copyright (C) 2002-2015 Free Software Foundation, Inc.
+
+// This program is free software: you can redistribute it and/or modify
+// it under the terms of the GNU General Public License as published by
+// the Free Software Foundation, either version 3 of the License, or
+// (at your option) any later version.
+
+// This program is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+// GNU General Public License for more details.
+
+// You should have received a copy of the GNU General Public License
+// along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+// As a special exception, you may create a larger work that contains
+// part or all of the Bison parser skeleton and distribute that work
+// under terms of your choice, so long as that work isn't itself a
+// parser generator using the skeleton or a modified version thereof
+// as a parser skeleton.  Alternatively, if you modify or redistribute
+// the parser skeleton itself, you may (at your option) remove this
+// special exception, which will cause the skeleton and the resulting
+// Bison output files to be licensed under the GNU General Public
+// License without this special exception.
+
+// This special exception was added by the Free Software Foundation in
+// version 2.2 of Bison.
+
+/**
+ ** \file position.hh
+ ** Define the isc::eval::position class.
+ */
+
+#ifndef YY_YY_POSITION_HH_INCLUDED
+# define YY_YY_POSITION_HH_INCLUDED
+
+# include <algorithm> // std::max
+# include <iostream>
+# include <string>
+
+# ifndef YY_NULLPTR
+#  if defined __cplusplus && 201103L <= __cplusplus
+#   define YY_NULLPTR nullptr
+#  else
+#   define YY_NULLPTR 0
+#  endif
+# endif
+
+#line 21 "parser.yy" // location.cc:337
+namespace isc { namespace eval {
+#line 56 "position.hh" // location.cc:337
+  /// Abstract a position.
+  class position
+  {
+  public:
+    /// Construct a position.
+    explicit position (std::string* f = YY_NULLPTR,
+                       unsigned int l = 1u,
+                       unsigned int c = 1u)
+      : filename (f)
+      , line (l)
+      , column (c)
+    {
+    }
+
+
+    /// Initialization.
+    void initialize (std::string* fn = YY_NULLPTR,
+                     unsigned int l = 1u,
+                     unsigned int c = 1u)
+    {
+      filename = fn;
+      line = l;
+      column = c;
+    }
+
+    /** \name Line and Column related manipulators
+     ** \{ */
+    /// (line related) Advance to the COUNT next lines.
+    void lines (int count = 1)
+    {
+      if (count)
+        {
+          column = 1u;
+          line = add_ (line, count, 1);
+        }
+    }
+
+    /// (column related) Advance to the COUNT next columns.
+    void columns (int count = 1)
+    {
+      column = add_ (column, count, 1);
+    }
+    /** \} */
+
+    /// File name to which this position refers.
+    std::string* filename;
+    /// Current line number.
+    unsigned int line;
+    /// Current column number.
+    unsigned int column;
+
+  private:
+    /// Compute max(min, lhs+rhs) (provided min <= lhs).
+    static unsigned int add_ (unsigned int lhs, int rhs, unsigned int min)
+    {
+      return (0 < rhs || -static_cast<unsigned int>(rhs) < lhs
+              ? rhs + lhs
+              : min);
+    }
+  };
+
+  /// Add \a width columns, in place.
+  inline position&
+  operator+= (position& res, int width)
+  {
+    res.columns (width);
+    return res;
+  }
+
+  /// Add \a width columns.
+  inline position
+  operator+ (position res, int width)
+  {
+    return res += width;
+  }
+
+  /// Subtract \a width columns, in place.
+  inline position&
+  operator-= (position& res, int width)
+  {
+    return res += -width;
+  }
+
+  /// Subtract \a width columns.
+  inline position
+  operator- (position res, int width)
+  {
+    return res -= width;
+  }
+
+  /// Compare two position objects.
+  inline bool
+  operator== (const position& pos1, const position& pos2)
+  {
+    return (pos1.line == pos2.line
+            && pos1.column == pos2.column
+            && (pos1.filename == pos2.filename
+                || (pos1.filename && pos2.filename
+                    && *pos1.filename == *pos2.filename)));
+  }
+
+  /// Compare two position objects.
+  inline bool
+  operator!= (const position& pos1, const position& pos2)
+  {
+    return !(pos1 == pos2);
+  }
+
+  /** \brief Intercept output stream redirection.
+   ** \param ostr the destination output stream
+   ** \param pos a reference to the position to redirect
+   */
+  template <typename YYChar>
+  inline std::basic_ostream<YYChar>&
+  operator<< (std::basic_ostream<YYChar>& ostr, const position& pos)
+  {
+    if (pos.filename)
+      ostr << *pos.filename << ':';
+    return ostr << pos.line << '.' << pos.column;
+  }
+
+#line 21 "parser.yy" // location.cc:337
+} } // isc::eval
+#line 180 "position.hh" // location.cc:337
+#endif // !YY_YY_POSITION_HH_INCLUDED

+ 158 - 0
src/lib/eval/stack.hh

@@ -0,0 +1,158 @@
+// Generated 2015115
+// A Bison parser, made by GNU Bison 3.0.4.
+
+// Stack handling for Bison parsers in C++
+
+// Copyright (C) 2002-2015 Free Software Foundation, Inc.
+
+// This program is free software: you can redistribute it and/or modify
+// it under the terms of the GNU General Public License as published by
+// the Free Software Foundation, either version 3 of the License, or
+// (at your option) any later version.
+
+// This program is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+// GNU General Public License for more details.
+
+// You should have received a copy of the GNU General Public License
+// along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+// As a special exception, you may create a larger work that contains
+// part or all of the Bison parser skeleton and distribute that work
+// under terms of your choice, so long as that work isn't itself a
+// parser generator using the skeleton or a modified version thereof
+// as a parser skeleton.  Alternatively, if you modify or redistribute
+// the parser skeleton itself, you may (at your option) remove this
+// special exception, which will cause the skeleton and the resulting
+// Bison output files to be licensed under the GNU General Public
+// License without this special exception.
+
+// This special exception was added by the Free Software Foundation in
+// version 2.2 of Bison.
+
+/**
+ ** \file stack.hh
+ ** Define the isc::eval::stack class.
+ */
+
+#ifndef YY_YY_STACK_HH_INCLUDED
+# define YY_YY_STACK_HH_INCLUDED
+
+# include <vector>
+
+#line 21 "parser.yy" // stack.hh:151
+namespace isc { namespace eval {
+#line 46 "stack.hh" // stack.hh:151
+  template <class T, class S = std::vector<T> >
+  class stack
+  {
+  public:
+    // Hide our reversed order.
+    typedef typename S::reverse_iterator iterator;
+    typedef typename S::const_reverse_iterator const_iterator;
+
+    stack ()
+      : seq_ ()
+    {
+      seq_.reserve (200);
+    }
+
+    stack (unsigned int n)
+      : seq_ (n)
+    {}
+
+    inline
+    T&
+    operator[] (unsigned int i)
+    {
+      return seq_[seq_.size () - 1 - i];
+    }
+
+    inline
+    const T&
+    operator[] (unsigned int i) const
+    {
+      return seq_[seq_.size () - 1 - i];
+    }
+
+    /// Steal the contents of \a t.
+    ///
+    /// Close to move-semantics.
+    inline
+    void
+    push (T& t)
+    {
+      seq_.push_back (T());
+      operator[](0).move (t);
+    }
+
+    inline
+    void
+    pop (unsigned int n = 1)
+    {
+      for (; n; --n)
+        seq_.pop_back ();
+    }
+
+    void
+    clear ()
+    {
+      seq_.clear ();
+    }
+
+    inline
+    typename S::size_type
+    size () const
+    {
+      return seq_.size ();
+    }
+
+    inline
+    const_iterator
+    begin () const
+    {
+      return seq_.rbegin ();
+    }
+
+    inline
+    const_iterator
+    end () const
+    {
+      return seq_.rend ();
+    }
+
+  private:
+    stack (const stack&);
+    stack& operator= (const stack&);
+    /// The wrapped container.
+    S seq_;
+  };
+
+  /// Present a slice of the top of a stack.
+  template <class T, class S = stack<T> >
+  class slice
+  {
+  public:
+    slice (const S& stack, unsigned int range)
+      : stack_ (stack)
+      , range_ (range)
+    {}
+
+    inline
+    const T&
+    operator [] (unsigned int i) const
+    {
+      return stack_[range_ - i];
+    }
+
+  private:
+    const S& stack_;
+    unsigned int range_;
+  };
+
+#line 21 "parser.yy" // stack.hh:151
+} } // isc::eval
+#line 156 "stack.hh" // stack.hh:151
+
+#endif // !YY_YY_STACK_HH_INCLUDED

+ 4 - 1
src/lib/eval/tests/Makefile.am

@@ -26,7 +26,10 @@ if HAVE_GTEST
 
 TESTS += libeval_unittests
 
-libeval_unittests_SOURCES  = token_unittest.cc run_unittests.cc
+libeval_unittests_SOURCES  = context_unittest.cc
+libeval_unittests_SOURCES += evaluate_unittest.cc
+libeval_unittests_SOURCES += token_unittest.cc
+libeval_unittests_SOURCES += run_unittests.cc
 libeval_unittests_CXXFLAGS = $(AM_CXXFLAGS)
 libeval_unittests_CPPFLAGS = $(AM_CPPFLAGS) $(GTEST_INCLUDES)
 libeval_unittests_LDFLAGS  = $(AM_LDFLAGS) $(CRYPTO_LDFLAGS) $(GTEST_LDFLAGS)

+ 310 - 0
src/lib/eval/tests/context_unittest.cc

@@ -0,0 +1,310 @@
+// Copyright (C) 2015 Internet Systems Consortium, Inc. ("ISC")
+//
+// Permission to use, copy, modify, and/or distribute this software for any
+// purpose with or without fee is hereby granted, provided that the above
+// copyright notice and this permission notice appear in all copies.
+//
+// THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH
+// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+// AND FITNESS.  IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT,
+// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+// OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+// PERFORMANCE OF THIS SOFTWARE.
+
+#include <config.h>
+#include <eval/token.h>
+#include <eval/eval_context.h>
+#include <eval/token.h>
+#include <dhcp/pkt4.h>
+
+#include <boost/shared_ptr.hpp>
+#include <boost/scoped_ptr.hpp>
+#include <gtest/gtest.h>
+
+using namespace std;
+using namespace isc::dhcp;
+
+namespace {
+
+/// @brief Test class for testing EvalContext aka class test parsing
+class EvalContextTest : public ::testing::Test {
+public:
+    /// @brief checks if the given token is a string with the expected value
+    void checkTokenString(const TokenPtr& token, const std::string& expected) {
+        ASSERT_TRUE(token);
+        boost::shared_ptr<TokenString> str =
+            boost::dynamic_pointer_cast<TokenString>(token);
+        ASSERT_TRUE(str);
+
+        Pkt4Ptr pkt4(new Pkt4(DHCPDISCOVER, 12345));
+        ValueStack values;
+
+        EXPECT_NO_THROW(token->evaluate(*pkt4, values));
+
+        ASSERT_EQ(1, values.size());
+
+        EXPECT_EQ(expected, values.top());
+    }
+
+    /// @brief checks if the given token is a hex string with the expected value
+    void checkTokenHexString(const TokenPtr& token,
+                             const std::string& expected) {
+        ASSERT_TRUE(token);
+        boost::shared_ptr<TokenHexString> hex =
+            boost::dynamic_pointer_cast<TokenHexString>(token);
+        ASSERT_TRUE(hex);
+
+        Pkt4Ptr pkt4(new Pkt4(DHCPDISCOVER, 12345));
+        ValueStack values;
+
+        EXPECT_NO_THROW(token->evaluate(*pkt4, values));
+
+        ASSERT_EQ(1, values.size());
+
+        EXPECT_EQ(expected, values.top());
+    }
+
+    /// @brief checks if the given token is an equal operator
+    void checkTokenEq(const TokenPtr& token) {
+        ASSERT_TRUE(token);
+        boost::shared_ptr<TokenEqual> eq =
+            boost::dynamic_pointer_cast<TokenEqual>(token);
+        EXPECT_TRUE(eq);
+    }
+
+    /// @brief checks if the given token is an option with the expected code
+    void checkTokenOption(const TokenPtr& token, uint16_t expected_code) {
+        ASSERT_TRUE(token);
+        boost::shared_ptr<TokenOption> opt =
+            boost::dynamic_pointer_cast<TokenOption>(token);
+        ASSERT_TRUE(opt);
+
+        EXPECT_EQ(expected_code, opt->getCode());
+    }
+
+    /// @brief checks if the given token is a substring operator
+    void checkTokenSubstring(const TokenPtr& token) {
+        ASSERT_TRUE(token);
+        boost::shared_ptr<TokenSubstring> sub =
+            boost::dynamic_pointer_cast<TokenSubstring>(token);
+        EXPECT_TRUE(sub);
+    }
+
+    /// @brief checks if the given expression raises the expected message
+    /// when it is parsed.
+    void checkError(const string& expr, const string& msg) {
+        EvalContext eval;
+        parsed_ = false;
+        try {
+            parsed_ = eval.parseString(expr);
+            FAIL() << "Expected EvalParseError but nothing was raised";
+        }
+        catch (const EvalParseError& ex) {
+            EXPECT_EQ(msg, ex.what());
+            EXPECT_FALSE(parsed_);
+        }
+        catch (...) {
+            FAIL() << "Expected EvalParseError but something else was raised";
+        }
+    }
+
+    bool parsed_; ///< Parsing status
+};
+
+// Test the parsing of a basic expression
+TEST_F(EvalContextTest, basic) {
+
+    EvalContext tmp;
+
+    EXPECT_NO_THROW(parsed_ = tmp.parseString("option[123] == 'MSFT'"));
+    EXPECT_TRUE(parsed_);
+}
+
+// Test the parsing of a string terminal
+TEST_F(EvalContextTest, string) {
+    EvalContext eval;
+
+    EXPECT_NO_THROW(parsed_ = eval.parseString("'foo' == 'bar'"));
+    EXPECT_TRUE(parsed_);
+
+    ASSERT_EQ(3, eval.expression.size());
+
+    TokenPtr tmp1  = eval.expression.at(0);
+    TokenPtr tmp2  = eval.expression.at(1);
+
+    checkTokenString(tmp1, "foo");
+    checkTokenString(tmp2, "bar");
+}
+
+// Test the parsing of a basic expression using integers
+TEST_F(EvalContextTest, integer) {
+
+    EvalContext eval;
+
+    EXPECT_NO_THROW(parsed_ =
+        eval.parseString("substring(option[123], 0, 2) == '42'"));
+    EXPECT_TRUE(parsed_);
+}
+
+// Test the parsing of a hexstring terminal
+TEST_F(EvalContextTest, hexstring) {
+    EvalContext eval;
+
+    EXPECT_NO_THROW(parsed_ = eval.parseString("0x666f6f == 'foo'"));
+    EXPECT_TRUE(parsed_);
+
+    ASSERT_EQ(3, eval.expression.size());
+
+    TokenPtr tmp = eval.expression.at(0);
+
+    checkTokenHexString(tmp, "foo");
+}
+
+// Test the parsing of a hexstring terminal with an odd number of
+// hexadecimal digits
+TEST_F(EvalContextTest, oddHexstring) {
+    EvalContext eval;
+
+    EXPECT_NO_THROW(parsed_ = eval.parseString("0X7 == 'foo'"));
+    EXPECT_TRUE(parsed_);
+
+    ASSERT_EQ(3, eval.expression.size());
+
+    TokenPtr tmp = eval.expression.at(0);
+
+    checkTokenHexString(tmp, "\a");
+}
+
+// Test the parsing of an equal expression
+TEST_F(EvalContextTest, equal) {
+    EvalContext eval;
+
+    EXPECT_NO_THROW(parsed_ = eval.parseString("'foo' == 'bar'"));
+    EXPECT_TRUE(parsed_);
+
+    ASSERT_EQ(3, eval.expression.size());
+
+    TokenPtr tmp1 = eval.expression.at(0);
+    TokenPtr tmp2 = eval.expression.at(1);
+    TokenPtr tmp3 = eval.expression.at(2);
+
+    checkTokenString(tmp1, "foo");
+    checkTokenString(tmp2, "bar");
+    checkTokenEq(tmp3);
+}
+
+// Test the parsing of an option terminal
+TEST_F(EvalContextTest, option) {
+    EvalContext eval;
+
+    EXPECT_NO_THROW(parsed_ = eval.parseString("option[123] == 'foo'"));
+    EXPECT_TRUE(parsed_);
+    ASSERT_EQ(3, eval.expression.size());
+    checkTokenOption(eval.expression.at(0), 123);
+}
+
+// Test the parsing of a substring expression
+TEST_F(EvalContextTest, substring) {
+    EvalContext eval;
+
+    EXPECT_NO_THROW(parsed_ =
+        eval.parseString("substring('foobar',2,all) == 'obar'"));
+    EXPECT_TRUE(parsed_);
+
+    ASSERT_EQ(6, eval.expression.size());
+
+    TokenPtr tmp1 = eval.expression.at(0);
+    TokenPtr tmp2 = eval.expression.at(1);
+    TokenPtr tmp3 = eval.expression.at(2);
+    TokenPtr tmp4 = eval.expression.at(3);
+
+    checkTokenString(tmp1, "foobar");
+    checkTokenString(tmp2, "2");
+    checkTokenString(tmp3, "all");
+    checkTokenSubstring(tmp4);
+}
+
+// Test some scanner error cases
+TEST_F(EvalContextTest, scanErrors) {
+    checkError("'", "<string>:1.1: Invalid character: '");
+    checkError("'\''", "<string>:1.3: Invalid character: '");
+    checkError("'\n'", "<string>:1.1: Invalid character: '");
+    checkError("0x123h", "<string>:1.6: Invalid character: h");
+    checkError("=", "<string>:1.1: Invalid character: =");
+    checkError("subtring", "<string>:1.1: Invalid character: s");
+    checkError("foo", "<string>:1.1: Invalid character: f");
+    checkError(" bar", "<string>:1.2: Invalid character: b");
+}
+
+// Tests some scanner/parser error cases
+TEST_F(EvalContextTest, scanParseErrors) {
+    checkError("", "<string>:1.1: syntax error, unexpected end of file");
+    checkError(" ", "<string>:1.2: syntax error, unexpected end of file");
+    checkError("0x", "<string>:1.1: syntax error, unexpected integer");
+    checkError("0abc",
+               "<string>:1.1: syntax error, unexpected integer");
+    checkError("===", "<string>:1.1-2: syntax error, unexpected ==");
+    checkError("option[-1]",
+               "<string>:1.8-9: Option code has invalid "
+               "value in -1. Allowed range: 0..65535");
+    checkError("option[65536]",
+               "<string>:1.8-12: Option code has invalid "
+               "value in 65536. Allowed range: 0..65535");
+    checkError("option[12345678901234567890]",
+               "<string>:1.8-27: Failed to convert 12345678901234567890 "
+               "to an integer.");
+    checkError("option[123] < 'foo'", "<string>:1.13: Invalid character: <");
+    checkError("substring('foo',12345678901234567890,1)",
+               "<string>:1.17-36: Failed to convert 12345678901234567890 "
+               "to an integer.");
+}
+
+// Tests some parser error cases
+TEST_F(EvalContextTest, parseErrors) {
+    checkError("'foo''bar'",
+               "<string>:1.6-10: syntax error, unexpected constant string, "
+               "expecting ==");
+    checkError("== 'ab'", "<string>:1.1-2: syntax error, unexpected ==");
+    checkError("'foo' ==",
+               "<string>:1.9: syntax error, unexpected end of file");
+    checkError("option 'ab'",
+               "<string>:1.8-11: syntax error, unexpected "
+               "constant string, expecting [");
+    checkError("option(10) == 'ab'",
+               "<string>:1.7: syntax error, "
+               "unexpected (, expecting [");
+    checkError("option['ab'] == 'foo'",
+               "<string>:1.8-11: syntax error, "
+               "unexpected constant string, "
+               "expecting integer");
+    checkError("option[0xa] == 'ab'",
+               "<string>:1.8-10: syntax error, "
+               "unexpected constant hexstring, "
+               "expecting integer");
+    checkError("substring('foobar') == 'f'",
+               "<string>:1.19: syntax error, "
+               "unexpected ), expecting \",\"");
+    checkError("substring('foobar',3) == 'bar'",
+               "<string>:1.21: syntax error, unexpected ), expecting \",\"");
+    checkError("substring('foobar','3',3) == 'bar'",
+               "<string>:1.20-22: syntax error, unexpected constant string, "
+               "expecting integer");
+    checkError("substring('foobar',1,a) == 'foo'",
+               "<string>:1.22: Invalid character: a");
+}
+
+// Tests some type error cases (caught only by the strongly typed parser)
+TEST_F(EvalContextTest, typeErrors) {
+    checkError("'foobar'",
+               "<string>:1.9: syntax error, unexpected end of file, "
+               "expecting ==");
+    checkError("substring('foobar',all,1) == 'foo'",
+               "<string>:1.20-22: syntax error, unexpected all, "
+               "expecting integer");
+    checkError("substring('foobar',0x32,1) == 'foo'",
+               "<string>:1.20-23: syntax error, unexpected constant "
+               "hexstring, expecting integer");
+}
+
+};

+ 246 - 0
src/lib/eval/tests/evaluate_unittest.cc

@@ -0,0 +1,246 @@
+// Copyright (C) 2015 Internet Systems Consortium, Inc. ("ISC")
+//
+// Permission to use, copy, modify, and/or distribute this software for any
+// purpose with or without fee is hereby granted, provided that the above
+// copyright notice and this permission notice appear in all copies.
+//
+// THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH
+// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+// AND FITNESS.  IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT,
+// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+// OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+// PERFORMANCE OF THIS SOFTWARE.
+
+#include <config.h>
+#include <eval/evaluate.h>
+#include <eval/token.h>
+#include <dhcp/pkt4.h>
+#include <dhcp/pkt6.h>
+#include <dhcp/dhcp4.h>
+#include <dhcp/dhcp6.h>
+#include <dhcp/option_string.h>
+
+#include <boost/shared_ptr.hpp>
+#include <boost/scoped_ptr.hpp>
+#include <gtest/gtest.h>
+
+using namespace std;
+using namespace isc::dhcp;
+
+namespace {
+
+/// @brief Test fixture for testing evaluation.
+///
+/// This class provides several convenience objects to be used during testing
+/// of the evaluation of classification expressions.
+class EvaluateTest : public ::testing::Test {
+public:
+
+    /// @brief Initializes Pkt4,Pkt6 and options that can be useful for
+    ///        evaluation tests.
+    EvaluateTest() {
+        e_.clear();
+
+        pkt4_.reset(new Pkt4(DHCPDISCOVER, 12345));
+        pkt6_.reset(new Pkt6(DHCPV6_SOLICIT, 12345));
+
+        // Add options with easily identifiable strings in them
+        option_str4_.reset(new OptionString(Option::V4, 100, "hundred4"));
+        option_str6_.reset(new OptionString(Option::V6, 100, "hundred6"));
+
+        pkt4_->addOption(option_str4_);
+        pkt6_->addOption(option_str6_);
+    }
+
+    Expression e_; ///< An expression
+
+    bool result_; ///< A decision
+
+    Pkt4Ptr pkt4_; ///< A stub DHCPv4 packet
+    Pkt6Ptr pkt6_; ///< A stub DHCPv6 packet
+
+    OptionPtr option_str4_; ///< A string option for DHCPv4
+    OptionPtr option_str6_; ///< A string option for DHCPv6
+
+    /// @todo: Add more option types here
+};
+
+// This checks the empty expression: it should raise EvalBadStack
+// when evaluated with a Pkt4. (The actual packet is not used)
+TEST_F(EvaluateTest, empty4) {
+    ASSERT_THROW(evaluate(e_, *pkt4_), EvalBadStack);
+}
+
+// This checks the empty expression: it should raise EvalBadStack
+// when evaluated with a Pkt6. (The actual packet is not used)
+TEST_F(EvaluateTest, empty6) {
+    ASSERT_THROW(evaluate(e_, *pkt6_), EvalBadStack);
+}
+
+// This checks the { "false" } expression: it should return false
+// when evaluated with a Pkt4. (The actual packet is not used)
+TEST_F(EvaluateTest, false4) {
+    TokenPtr tfalse;
+    ASSERT_NO_THROW(tfalse.reset(new TokenString("false")));
+    e_.push_back(tfalse);
+    ASSERT_NO_THROW(result_ = evaluate(e_, *pkt4_));
+    EXPECT_FALSE(result_);
+}
+
+// This checks the { "false" } expression: it should return false
+// when evaluated with a Pkt6. (The actual packet is not used)
+TEST_F(EvaluateTest, false6) {
+    TokenPtr tfalse;
+    ASSERT_NO_THROW(tfalse.reset(new TokenString("false")));
+    e_.push_back(tfalse);
+    ASSERT_NO_THROW(result_ = evaluate(e_, *pkt6_));
+    EXPECT_FALSE(result_);
+}
+
+// This checks the { "true" } expression: it should return true
+// when evaluated with a Pkt4. (The actual packet is not used)
+TEST_F(EvaluateTest, true4) {
+    TokenPtr ttrue;
+    ASSERT_NO_THROW(ttrue.reset(new TokenString("true")));
+    e_.push_back(ttrue);
+    ASSERT_NO_THROW(result_ = evaluate(e_, *pkt4_));
+    EXPECT_TRUE(result_);
+}
+
+// This checks the { "true" } expression: it should return true
+// when evaluated with a Pkt6. (The actual packet is not used)
+TEST_F(EvaluateTest, true6) {
+    TokenPtr ttrue;
+    ASSERT_NO_THROW(ttrue.reset(new TokenString("true")));
+    e_.push_back(ttrue);
+    ASSERT_NO_THROW(result_ = evaluate(e_, *pkt6_));
+    EXPECT_TRUE(result_);
+}
+
+// This checks the evaluation must lead to "false" or "true"
+// with a Pkt4. (The actual packet is not used)
+TEST_F(EvaluateTest, bad4) {
+    TokenPtr bad;
+    ASSERT_NO_THROW(bad.reset(new TokenString("bad")));
+    e_.push_back(bad);
+    ASSERT_THROW(evaluate(e_, *pkt4_), EvalTypeError);
+}
+
+// This checks the evaluation must lead to "false" or "true"
+// with a Pkt6. (The actual packet is not used)
+TEST_F(EvaluateTest, bad6) {
+    TokenPtr bad;
+    ASSERT_NO_THROW(bad.reset(new TokenString("bad")));
+    e_.push_back(bad);
+    ASSERT_THROW(evaluate(e_, *pkt6_), EvalTypeError);
+}
+
+// This checks the evaluation must leave only one value on the stack
+// with a Pkt4. (The actual packet is not used)
+TEST_F(EvaluateTest, two4) {
+    TokenPtr ttrue;
+    ASSERT_NO_THROW(ttrue.reset(new TokenString("true")));
+    e_.push_back(ttrue);
+    e_.push_back(ttrue);
+    ASSERT_THROW(evaluate(e_, *pkt4_), EvalBadStack);
+}
+
+// This checks the evaluation must leave only one value on the stack
+// with a Pkt6. (The actual packet is not used)
+TEST_F(EvaluateTest, two6) {
+    TokenPtr ttrue;
+    ASSERT_NO_THROW(ttrue.reset(new TokenString("true")));
+    e_.push_back(ttrue);
+    e_.push_back(ttrue);
+    ASSERT_THROW(evaluate(e_, *pkt6_), EvalBadStack);
+}
+
+// A more complex test evaluated with a Pkt4. (The actual packet is not used)
+TEST_F(EvaluateTest, compare4) {
+    TokenPtr tfoo;
+    TokenPtr tbar;
+    TokenPtr tequal;
+
+    ASSERT_NO_THROW(tfoo.reset(new TokenString("foo")));
+    e_.push_back(tfoo);
+    ASSERT_NO_THROW(tbar.reset(new TokenString("bar")));
+    e_.push_back(tbar);
+    ASSERT_NO_THROW(tequal.reset(new TokenEqual()));
+    e_.push_back(tequal);
+
+    ASSERT_NO_THROW(result_ = evaluate(e_, *pkt4_));
+    EXPECT_FALSE(result_);
+}
+
+// A more complex test evaluated with a Pkt6. (The actual packet is not used)
+TEST_F(EvaluateTest, compare6) {
+    TokenPtr tfoo;
+    TokenPtr tbar;
+    TokenPtr tequal;
+
+    ASSERT_NO_THROW(tfoo.reset(new TokenString("foo")));
+    e_.push_back(tfoo);
+    ASSERT_NO_THROW(tbar.reset(new TokenString("bar")));
+    e_.push_back(tbar);
+    ASSERT_NO_THROW(tequal.reset(new TokenEqual()));
+    e_.push_back(tequal);
+
+    ASSERT_NO_THROW(result_ = evaluate(e_, *pkt6_));
+    EXPECT_FALSE(result_);
+}
+
+// A test using packets.
+TEST_F(EvaluateTest, packet) {
+    TokenPtr toption;
+    TokenPtr tstring;
+    TokenPtr tequal;
+
+    ASSERT_NO_THROW(toption.reset(new TokenOption(100)));
+    e_.push_back(toption);
+    ASSERT_NO_THROW(tstring.reset(new TokenString("hundred4")));
+    e_.push_back(tstring);
+    ASSERT_NO_THROW(tequal.reset(new TokenEqual()));
+    e_.push_back(tequal);
+
+    ASSERT_NO_THROW(result_ = evaluate(e_, *pkt4_));
+    EXPECT_TRUE(result_);
+    ASSERT_NO_THROW(result_ = evaluate(e_, *pkt6_));
+    EXPECT_FALSE(result_);
+}
+
+// A test using substring on an option.
+TEST_F(EvaluateTest, complex) {
+    TokenPtr toption;
+    TokenPtr tstart;
+    TokenPtr tlength;
+    TokenPtr tsubstring;
+    TokenPtr tstring;
+    TokenPtr tequal;
+
+    // Get the option, i.e., "hundred[46]"
+    ASSERT_NO_THROW(toption.reset(new TokenOption(100)));
+    e_.push_back(toption);
+
+    // Get substring("hundred[46]", 0, 7), i.e., "hundred"
+    ASSERT_NO_THROW(tstart.reset(new TokenString("0")));
+    e_.push_back(tstart);
+    ASSERT_NO_THROW(tlength.reset(new TokenString("7")));
+    e_.push_back(tlength);
+    ASSERT_NO_THROW(tsubstring.reset(new TokenSubstring()));
+    e_.push_back(tsubstring);
+
+    // Compare with "hundred"
+    ASSERT_NO_THROW(tstring.reset(new TokenString("hundred")));
+    e_.push_back(tstring);
+    ASSERT_NO_THROW(tequal.reset(new TokenEqual()));
+    e_.push_back(tequal);
+
+    // Should return true for v4 and v6 packets
+    ASSERT_NO_THROW(result_ = evaluate(e_, *pkt4_));
+    EXPECT_TRUE(result_);
+    ASSERT_NO_THROW(result_ = evaluate(e_, *pkt6_));
+    EXPECT_TRUE(result_);
+}
+
+};

+ 23 - 16
src/lib/eval/tests/token_unittest.cc

@@ -72,10 +72,12 @@ public:
     /// @param test_start The postion to start when getting a substring
     /// @param test_length The length of the substring to get
     /// @param result_string The expected result of the eval
+    /// @param should_throw The eval will throw
     void verifySubstringEval(const std::string& test_string,
                              const std::string& test_start,
                              const std::string& test_length,
-                             const std::string& result_string) {
+                             const std::string& result_string,
+                             bool should_throw = false) {
 
         // create the token
         ASSERT_NO_THROW(t_.reset(new TokenSubstring()));
@@ -86,14 +88,19 @@ public:
         values_.push(test_length);
 
         // evaluate the token
-        EXPECT_NO_THROW(t_->evaluate(*pkt4_, values_));
-
-        // verify results
-        ASSERT_EQ(1, values_.size());
-        EXPECT_EQ(result_string, values_.top());
-
-        // remove result
-        values_.pop();
+        if (should_throw) {
+            EXPECT_THROW(t_->evaluate(*pkt4_, values_), EvalTypeError);
+            ASSERT_EQ(0, values_.size());
+        } else {
+            EXPECT_NO_THROW(t_->evaluate(*pkt4_, values_));
+
+            // verify results
+            ASSERT_EQ(1, values_.size());
+            EXPECT_EQ(result_string, values_.top());
+
+            // remove result
+            values_.pop();
+        }
     }
 
     /// @todo: Add more option types here
@@ -443,13 +450,13 @@ TEST_F(TokenTest, substringStartingPosition) {
 // Check what happens if we use strings that aren't numbers for start or length
 // We should return the empty string
 TEST_F(TokenTest, substringBadParams) {
-    verifySubstringEval("foobar", "0ick", "all", "");
-    verifySubstringEval("foobar", "ick0", "all", "");
-    verifySubstringEval("foobar", "ick", "all", "");
-    verifySubstringEval("foobar", "0", "ick", "");
-    verifySubstringEval("foobar", "0", "0ick", "");
-    verifySubstringEval("foobar", "0", "ick0", "");
-    verifySubstringEval("foobar", "0", "allaboard", "");
+    verifySubstringEval("foobar", "0ick", "all", "", true);
+    verifySubstringEval("foobar", "ick0", "all", "", true);
+    verifySubstringEval("foobar", "ick", "all", "", true);
+    verifySubstringEval("foobar", "0", "ick", "", true);
+    verifySubstringEval("foobar", "0", "0ick", "", true);
+    verifySubstringEval("foobar", "0", "ick0", "", true);
+    verifySubstringEval("foobar", "0", "allaboard", "", true);
 }
 
 // lastly check that we don't get anything if the string is empty or

+ 9 - 7
src/lib/eval/token.cc

@@ -121,19 +121,21 @@ TokenSubstring::evaluate(const Pkt& /*pkt*/, ValueStack& values) {
     int length;
     try {
         start_pos = boost::lexical_cast<int>(start_str);
+    } catch (const boost::bad_lexical_cast&) {
+        isc_throw(EvalTypeError, "the parameter '" << start_str
+                  << "' for the starting postion of the substring "
+                  << "couldn't be converted to an integer.");
+    }
+    try {
         if (len_str == "all") {
             length = string_str.length();
         } else {
             length = boost::lexical_cast<int>(len_str);
         }
     } catch (const boost::bad_lexical_cast&) {
-        LOG_DEBUG(eval_logger, EVAL_DBG_TRACE,
-                  EVAL_SUBSTRING_BAD_PARAM_CONVERSION)
-            .arg(start_str)
-            .arg(len_str);
-
-        values.push("");
-        return;
+        isc_throw(EvalTypeError, "the parameter '" << len_str
+                  << "' for the length of the substring "
+                  << "couldn't be converted to an integer.");
     }
 
     const int string_length = string_str.length();

+ 22 - 1
src/lib/eval/token.h

@@ -40,7 +40,7 @@ typedef boost::shared_ptr<Expression> ExpressionPtr;
 /// Evaluated values are stored as a stack of strings
 typedef std::stack<std::string> ValueStack;
 
-/// @brief EvalStackError is thrown when more or less parameters are on the
+/// @brief EvalBadStack is thrown when more or less parameters are on the
 ///        stack than expected.
 class EvalBadStack : public Exception {
 public:
@@ -48,6 +48,15 @@ public:
         isc::Exception(file, line, what) { };
 };
 
+/// @brief EvalTypeError is thrown when a value on the stack has a content
+///        with an unexpected type.
+class EvalTypeError : public Exception {
+public:
+    EvalTypeError(const char* file, size_t line, const char* what) :
+        isc::Exception(file, line, what) { };
+};
+
+
 /// @brief Base class for all tokens
 ///
 /// It provides an interface for all tokens and storage for string representation
@@ -157,6 +166,16 @@ public:
     /// @param values value of the option will be pushed here (or "")
     void evaluate(const Pkt& pkt, ValueStack& values);
 
+    /// @brief Returns option-code
+    ///
+    /// This method is used in testing to determine if the parser had
+    /// instantiated TokenOption with correct parameters.
+    ///
+    /// @return option-code of the option this token expects to extract.
+    uint16_t getCode() const {
+        return (option_code_);
+    }
+
 private:
     uint16_t option_code_; ///< code of the option to be extracted
 };
@@ -233,6 +252,8 @@ public:
     /// - -1, -4  => "ooba"
     ///
     /// @throw EvalBadStack if there are less than 3 values on stack
+    /// @throw EvalTypeError if start is not a number or length a number or
+    ///        the special value "all".
     ///
     /// @param pkt (unused)
     /// @param values - stack of values (3 arguments will be popped, 1 result

+ 27 - 7
src/lib/hooks/hooks_user.dox

@@ -792,19 +792,32 @@ DHCPv4 module, it must be listed in the "hooks-libraries" element of the
 @code
 "Dhcp4": {
        :
-    "hooks-libraries": [ "/usr/local/lib/example.so" ]
-       :
+    "hooks-libraries": [
+        {
+            "library": "/usr/local/lib/example.so"
+        }
+    ]
+        :
 }
 @endcode
 (Note that "hooks" is plural.)
 
+Each entry in the "hooks-libraries" list is a structure (a "map" in JSON
+parlance) that holds the following element:
+- library - the name of the library to load.  This must be a string.
+
+@note The syntax of the hooks-libraries configuration element has changed
+since kea 0.9.2 (in that version, "hooks-libraries" was just a list of
+libraries).  This change is in preparation for the introduction of
+library-specific parameters, which will be added to Kea in a version after 1.0.
+
 The DHCPv4 server will load the library and execute the callouts each time a
 request is received.
 
-@note The above assumes that the hooks library will be used with a version of
-Kea that is dynamically-linked.  For information regarding running
-hooks libraries against a statically-linked Kea, see
-@ref hooksdgStaticallyLinkedKea.
+@note All the above assumes that the hooks library will be used with a
+version of Kea that is dynamically-linked.  For information regarding
+running hooks libraries against a statically-linked Kea, see @ref
+hooksdgStaticallyLinkedKea.
 
 @section hooksdgAdvancedTopics Advanced Topics
 
@@ -1184,8 +1197,15 @@ as separate elements of the hooks-libraries configuration element, e.g.
 @code
 "Dhcp4": {
        :
-    "hooks-libraries": [ "/usr/lib/library1.so", "/opt/library2.so" ]
+    "hooks-libraries": [
+        {
+            "library": "/usr/lib/library1.so"
+        },
+        {
+            "library": "/opt/library2.so"
+        }
        :
+    ]
 }
 @endcode
 

+ 1 - 0
src/lib/util/Makefile.am

@@ -23,6 +23,7 @@ libkea_util_la_SOURCES += range_utilities.h
 libkea_util_la_SOURCES += signal_set.cc signal_set.h
 libkea_util_la_SOURCES += stopwatch.cc stopwatch.h
 libkea_util_la_SOURCES += stopwatch_impl.cc stopwatch_impl.h
+libkea_util_la_SOURCES += versioned_csv_file.h versioned_csv_file.cc
 libkea_util_la_SOURCES += watch_socket.cc watch_socket.h
 libkea_util_la_SOURCES += encode/base16_from_binary.h
 libkea_util_la_SOURCES += encode/base32hex.h encode/base64.h

+ 8 - 2
src/lib/util/csv_file.cc

@@ -65,6 +65,12 @@ CSVRow::writeAt(const size_t at, const char* value) {
     values_[at] = value;
 }
 
+void
+CSVRow::trim(const size_t count) {
+    checkIndex(count);
+    values_.resize(values_.size() - count);
+}
+
 std::ostream& operator<<(std::ostream& os, const CSVRow& row) {
     os << row.render();
     return (os);
@@ -296,9 +302,9 @@ CSVFile::open(const bool seek_to_end) {
 
             // Check the header against the columns specified for the CSV file.
             if (!validateHeader(header)) {
-
                 isc_throw(CSVFileError, "invalid header '" << header
-                          << "' in CSV file '" << filename_ << "'");
+                          << "' in CSV file '" << filename_ << "': "
+                          << getReadMsg());
             }
 
             // Everything is good, so if we haven't added any columns yet,

+ 9 - 3
src/lib/util/csv_file.h

@@ -117,6 +117,14 @@ public:
     /// @c CSVRow::getValuesCount.
     std::string readAt(const size_t at) const;
 
+    /// @brief Trims a given number of elements from the end of a row
+    ///
+    /// @param number of elements to trim
+    ///
+    /// @throw CSVFileError if the number to trim is larger than
+    /// then the number of elements
+    void trim(const size_t count);
+
     /// @brief Retrieves a value from the internal container.
     ///
     /// This method is reads a value from the internal container and converts
@@ -404,7 +412,7 @@ public:
     /// Otherwise, this function will write the header to the file.
     /// In order to write rows to opened file, the @c append function
     /// should be called.
-    void recreate();
+    virtual void recreate();
 
     /// @brief Sets error message after row validation.
     ///
@@ -469,8 +477,6 @@ protected:
     /// This function is called internally by @ref CSVFile::open. Derived classes
     /// may add extra validation steps.
     ///
-    /// @todo There should be a support for optional columns (see ticket #3626).
-    ///
     /// @param header A row holding a header.
     /// @return true if header matches the columns; false otherwise.
     virtual bool validateHeader(const CSVRow& header);

+ 1 - 0
src/lib/util/tests/Makefile.am

@@ -50,6 +50,7 @@ run_unittests_SOURCES += time_utilities_unittest.cc
 run_unittests_SOURCES += range_utilities_unittest.cc
 run_unittests_SOURCES += signal_set_unittest.cc
 run_unittests_SOURCES += stopwatch_unittest.cc
+run_unittests_SOURCES += versioned_csv_file_unittest.cc
 run_unittests_SOURCES += watch_socket_unittests.cc
 
 

+ 29 - 0
src/lib/util/tests/csv_file_unittest.cc

@@ -109,6 +109,35 @@ TEST(CSVRow, append) {
     EXPECT_EQ("alpha,beta,gamma,delta,epsilon", text);
 }
 
+// This test checks that a row can be trimmed of
+// a given number of elements
+TEST(CSVRow, trim) {
+    CSVRow row("zero,one,two,three,four");
+    ASSERT_EQ(5, row.getValuesCount());
+    EXPECT_EQ("zero", row.readAt(0));
+    EXPECT_EQ("one", row.readAt(1));
+    EXPECT_EQ("two", row.readAt(2));
+    EXPECT_EQ("three", row.readAt(3));
+    EXPECT_EQ("four", row.readAt(4));
+
+    ASSERT_THROW(row.trim(10), CSVFileError);
+
+    // Verify that we can erase just one
+    ASSERT_NO_THROW(row.trim(1));
+    ASSERT_EQ(4, row.getValuesCount());
+    EXPECT_EQ("zero", row.readAt(0));
+    EXPECT_EQ("one", row.readAt(1));
+    EXPECT_EQ("two", row.readAt(2));
+    EXPECT_EQ("three", row.readAt(3));
+
+    // Verfiy we can trim more than one
+    ASSERT_NO_THROW(row.trim(2));
+    ASSERT_EQ(2, row.getValuesCount());
+    EXPECT_EQ("zero", row.readAt(0));
+    EXPECT_EQ("one", row.readAt(1));
+}
+
+
 /// @brief Test fixture class for testing operations on CSV file.
 ///
 /// It implements basic operations on files, such as reading writing

+ 509 - 0
src/lib/util/tests/versioned_csv_file_unittest.cc

@@ -0,0 +1,509 @@
+// Copyright (C) 2015 Internet Systems Consortium, Inc. ("ISC")
+//
+// Permission to use, copy, modify, and/or distribute this software for any
+// purpose with or without fee is hereby granted, provided that the above
+// copyright notice and this permission notice appear in all copies.
+//
+// THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH
+// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+// AND FITNESS.  IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT,
+// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+// OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+// PERFORMANCE OF THIS SOFTWARE.
+
+#include <config.h>
+#include <util/versioned_csv_file.h>
+#include <boost/scoped_ptr.hpp>
+#include <gtest/gtest.h>
+#include <fstream>
+#include <sstream>
+#include <string>
+
+#include <boost/algorithm/string/classification.hpp>
+#include <boost/algorithm/string/constants.hpp>
+#include <boost/algorithm/string/split.hpp>
+
+namespace {
+
+using namespace isc::util;
+
+/// @brief Test fixture class for testing operations on VersionedCSVFile.
+///
+/// It implements basic operations on files, such as reading writing
+/// file removal and checking presence of the file. This is used by
+/// unit tests to verify correctness of the file created by the
+/// CSVFile class.
+class VersionedCSVFileTest : public ::testing::Test {
+public:
+
+    /// @brief Constructor.
+    ///
+    /// Sets the path to the CSV file used throughout the tests.
+    /// The name of the file is test.csv and it is located in the
+    /// current build folder.
+    ///
+    /// It also deletes any dangling files after previous tests.
+    VersionedCSVFileTest();
+
+    /// @brief Destructor.
+    ///
+    /// Deletes the test CSV file if any.
+    virtual ~VersionedCSVFileTest();
+
+    /// @brief Prepends the absolute path to the file specified
+    /// as an argument.
+    ///
+    /// @param filename Name of the file.
+    /// @return Absolute path to the test file.
+    static std::string absolutePath(const std::string& filename);
+
+    /// @brief Check if test file exists on disk.
+    bool exists() const;
+
+    /// @brief Reads whole CSV file.
+    ///
+    /// @return Contents of the file.
+    std::string readFile() const;
+
+    /// @brief Removes existing file (if any).
+    int removeFile() const;
+
+    /// @brief Creates file with contents.
+    ///
+    /// @param contents Contents of the file.
+    void writeFile(const std::string& contents) const;
+
+    /// @brief Absolute path to the file used in the tests.
+    std::string testfile_;
+
+};
+
+VersionedCSVFileTest::VersionedCSVFileTest()
+    : testfile_(absolutePath("test.csv")) {
+    static_cast<void>(removeFile());
+}
+
+VersionedCSVFileTest::~VersionedCSVFileTest() {
+    static_cast<void>(removeFile());
+}
+
+std::string
+VersionedCSVFileTest::absolutePath(const std::string& filename) {
+    std::ostringstream s;
+    s << TEST_DATA_BUILDDIR << "/" << filename;
+    return (s.str());
+}
+
+bool
+VersionedCSVFileTest::exists() const {
+    std::ifstream fs(testfile_.c_str());
+    bool ok = fs.good();
+    fs.close();
+    return (ok);
+}
+
+std::string
+VersionedCSVFileTest::readFile() const {
+    std::ifstream fs(testfile_.c_str());
+    if (!fs.is_open()) {
+        return ("");
+    }
+    std::string contents((std::istreambuf_iterator<char>(fs)),
+                         std::istreambuf_iterator<char>());
+    fs.close();
+    return (contents);
+}
+
+int
+VersionedCSVFileTest::removeFile() const {
+    return (remove(testfile_.c_str()));
+}
+
+void
+VersionedCSVFileTest::writeFile(const std::string& contents) const {
+    std::ofstream fs(testfile_.c_str(), std::ofstream::out);
+    if (fs.is_open()) {
+        fs << contents;
+        fs.close();
+    }
+}
+
+// This test checks that the function which is used to add columns of the
+// CSV file works as expected.
+TEST_F(VersionedCSVFileTest, addColumn) {
+    boost::scoped_ptr<VersionedCSVFile> csv(new VersionedCSVFile(testfile_));
+
+    // Verify that we're not allowed to open it without the schema
+    ASSERT_THROW(csv->open(), VersionedCSVFileError);
+
+    // Add two columns.
+    ASSERT_NO_THROW(csv->addColumn("animal", "1.0", ""));
+    ASSERT_NO_THROW(csv->addColumn("color", "2.0", "blue"));
+
+    // Make sure we can't add duplicates.
+    EXPECT_THROW(csv->addColumn("animal", "1.0", ""), CSVFileError);
+    EXPECT_THROW(csv->addColumn("color", "2.0", "blue"), CSVFileError);
+
+    // But we should still be able to add unique columns.
+    EXPECT_NO_THROW(csv->addColumn("age", "3.0", "21"));
+
+    // Assert that the file is opened, because the rest of the test relies
+    // on this.
+    ASSERT_NO_THROW(csv->recreate());
+    ASSERT_TRUE(exists());
+
+    // We should have 3 defined columns
+    // Input Header should match defined columns on new files
+    // Valid columns should match defined columns on new files
+    // Minium valid columns wasn't set. (Remember it's optional)
+    EXPECT_EQ(3, csv->getColumnCount());
+    EXPECT_EQ(3, csv->getInputHeaderCount());
+    EXPECT_EQ(3, csv->getValidColumnCount());
+    EXPECT_EQ(0, csv->getMinimumValidColumns());
+
+    // Schema versions for new files should always match
+    EXPECT_EQ("3.0", csv->getInputSchemaVersion());
+    EXPECT_EQ("3.0", csv->getSchemaVersion());
+
+    // Input Schema State should be current for new files
+    EXPECT_EQ(VersionedCSVFile::CURRENT, csv->getInputSchemaState());
+    EXPECT_FALSE(csv->needsConversion());
+
+    // Make sure we can't add columns (even unique) when the file is open.
+    ASSERT_THROW(csv->addColumn("zoo", "3.0", ""), CSVFileError);
+
+    // Close the file.
+    ASSERT_NO_THROW(csv->close());
+    // And check that now it is possible to add the column.
+    EXPECT_NO_THROW(csv->addColumn("zoo", "3.0", ""));
+}
+
+// Verifies that a current schema version file loads correctly.
+TEST_F(VersionedCSVFileTest, currentSchemaTest) {
+
+    // Create our versioned file, with three columns
+    boost::scoped_ptr<VersionedCSVFile> csv(new VersionedCSVFile(testfile_));
+    ASSERT_NO_THROW(csv->addColumn("animal", "2.0", ""));
+    ASSERT_NO_THROW(csv->addColumn("color", "2.0", "grey"));
+    ASSERT_NO_THROW(csv->addColumn("age", "2.0", "0"));
+
+    // Write a file compliant with the current schema version.
+    writeFile("animal,color,age\n"
+              "cat,black,2\n"
+              "lion,yellow,17\n"
+              "dog,brown,5\n");
+
+    // Header should pass validation and allow the open to succeed.
+    ASSERT_NO_THROW(csv->open());
+
+    // For schema current file We should have:
+    // 3 defined columns
+    // 3 columns total found in the header
+    // 3 valid columns found in the header
+    // Minium valid columns wasn't set. (Remember it's optional)
+    EXPECT_EQ(3, csv->getColumnCount());
+    EXPECT_EQ(3, csv->getInputHeaderCount());
+    EXPECT_EQ(3, csv->getValidColumnCount());
+    EXPECT_EQ(0, csv->getMinimumValidColumns());
+
+    // Input schema and current schema should both be  2.0
+    EXPECT_EQ("2.0", csv->getInputSchemaVersion());
+    EXPECT_EQ("2.0", csv->getSchemaVersion());
+
+    // Input Schema State should be CURRENT
+    EXPECT_EQ(VersionedCSVFile::CURRENT, csv->getInputSchemaState());
+    EXPECT_FALSE(csv->needsConversion());
+
+    // First row is correct.
+    CSVRow row;
+    ASSERT_TRUE(csv->next(row));
+    EXPECT_EQ("cat", row.readAt(0));
+    EXPECT_EQ("black", row.readAt(1));
+    EXPECT_EQ("2", row.readAt(2));
+
+    // Second row is correct.
+    ASSERT_TRUE(csv->next(row));
+    EXPECT_EQ("lion", row.readAt(0));
+    EXPECT_EQ("yellow", row.readAt(1));
+    EXPECT_EQ("17", row.readAt(2));
+
+    // Third row is correct.
+    ASSERT_TRUE(csv->next(row));
+    EXPECT_EQ("dog", row.readAt(0));
+    EXPECT_EQ("brown", row.readAt(1));
+    EXPECT_EQ("5", row.readAt(2));
+}
+
+
+// Verifies the basic ability to upgrade valid files.
+// It starts with a version 1.0 file and updates
+// it through two schema evolutions.
+TEST_F(VersionedCSVFileTest, upgradeOlderVersions) {
+
+    // Create version 1.0 schema  CSV file
+    writeFile("animal\n"
+              "cat\n"
+              "lion\n"
+              "dog\n");
+
+    // Create our versioned file, with two columns, one for each
+    // schema version
+    boost::scoped_ptr<VersionedCSVFile> csv(new VersionedCSVFile(testfile_));
+    ASSERT_NO_THROW(csv->addColumn("animal", "1.0", ""));
+    ASSERT_NO_THROW(csv->addColumn("color", "2.0", "blue"));
+
+    // Header should pass validation and allow the open to succeed.
+    ASSERT_NO_THROW(csv->open());
+
+    // We should have:
+    // 2 defined columns
+    // 1 column found in the header
+    // 1 valid column in the header
+    // Minium valid columns wasn't set. (Remember it's optional)
+    EXPECT_EQ(2, csv->getColumnCount());
+    EXPECT_EQ(1, csv->getInputHeaderCount());
+    EXPECT_EQ(1, csv->getValidColumnCount());
+    EXPECT_EQ(0, csv->getMinimumValidColumns());
+
+    // Input schema should be 1.0, while our current schema should be 2.0
+    EXPECT_EQ("1.0", csv->getInputSchemaVersion());
+    EXPECT_EQ("2.0", csv->getSchemaVersion());
+
+    // Input Schema State should be NEEDS_UPGRADE
+    EXPECT_EQ(VersionedCSVFile::NEEDS_UPGRADE, csv->getInputSchemaState());
+    EXPECT_TRUE(csv->needsConversion());
+
+    // First row is correct.
+    CSVRow row;
+    ASSERT_TRUE(csv->next(row));
+    EXPECT_EQ("cat", row.readAt(0));
+    EXPECT_EQ("blue", row.readAt(1));
+
+    // Second row is correct.
+    ASSERT_TRUE(csv->next(row));
+    EXPECT_EQ("lion", row.readAt(0));
+    EXPECT_EQ("blue", row.readAt(1));
+
+    // Third row is correct.
+    ASSERT_TRUE(csv->next(row));
+    EXPECT_EQ("dog", row.readAt(0));
+    EXPECT_EQ("blue", row.readAt(1));
+
+    // Now, let's try to append something to this file.
+    CSVRow row_write(2);
+    row_write.writeAt(0, "bird");
+    row_write.writeAt(1, "yellow");
+    ASSERT_NO_THROW(csv->append(row_write));
+
+    // Close the file
+    ASSERT_NO_THROW(csv->flush());
+    ASSERT_NO_THROW(csv->close());
+
+
+    // Check the the file contents are correct.
+    EXPECT_EQ("animal\n"
+              "cat\n"
+              "lion\n"
+              "dog\n"
+              "bird,yellow\n",
+              readFile());
+
+    // Create a third schema by adding a column
+    ASSERT_NO_THROW(csv->addColumn("age", "3.0", "21"));
+    ASSERT_EQ(3, csv->getColumnCount());
+
+    // Header should pass validation and allow the open to succeed
+    ASSERT_NO_THROW(csv->open());
+
+    // We should have:
+    // 3 defined columns
+    // 1 column found in the header
+    // 1 valid column in the header
+    // Minium valid columns wasn't set. (Remember it's optional)
+    EXPECT_EQ(3, csv->getColumnCount());
+    EXPECT_EQ(1, csv->getInputHeaderCount());
+    EXPECT_EQ(1, csv->getValidColumnCount());
+    EXPECT_EQ(0, csv->getMinimumValidColumns());
+
+    // Make sure schema versions are accurate
+    EXPECT_EQ("1.0", csv->getInputSchemaVersion());
+    EXPECT_EQ("3.0", csv->getSchemaVersion());
+
+    // Input Schema State should be NEEDS_UPGRADE
+    EXPECT_EQ(VersionedCSVFile::NEEDS_UPGRADE, csv->getInputSchemaState());
+    EXPECT_TRUE(csv->needsConversion());
+
+    // First row is correct.
+    ASSERT_TRUE(csv->next(row));
+    EXPECT_EQ("cat", row.readAt(0));
+    EXPECT_EQ("blue", row.readAt(1));
+    EXPECT_EQ("21", row.readAt(2));
+
+    // Second row is correct.
+    ASSERT_TRUE(csv->next(row));
+    EXPECT_EQ("lion", row.readAt(0));
+    EXPECT_EQ("blue", row.readAt(1));
+    EXPECT_EQ("21", row.readAt(2));
+
+    // Third row is correct.
+    ASSERT_TRUE(csv->next(row));
+    EXPECT_EQ("dog", row.readAt(0));
+    EXPECT_EQ("blue", row.readAt(1));
+    EXPECT_EQ("21", row.readAt(2));
+
+    // Fourth row is correct.
+    ASSERT_TRUE(csv->next(row));
+    EXPECT_EQ("bird", row.readAt(0));
+    EXPECT_EQ("yellow", row.readAt(1));
+    EXPECT_EQ("21", row.readAt(2));
+}
+
+TEST_F(VersionedCSVFileTest, minimumValidColumn) {
+    // Create version 1.0 schema  CSV file
+    writeFile("animal\n"
+              "cat\n"
+              "lion\n"
+              "dog\n");
+
+    // Create our versioned file, with three columns, one for each
+    // schema version
+    boost::scoped_ptr<VersionedCSVFile> csv(new VersionedCSVFile(testfile_));
+    ASSERT_NO_THROW(csv->addColumn("animal", "1.0", ""));
+    ASSERT_NO_THROW(csv->addColumn("color", "2.0", "blue"));
+    ASSERT_NO_THROW(csv->addColumn("age", "3.0", "21"));
+
+    // Verify we can't set minimum columns with a non-existent column
+    EXPECT_THROW(csv->setMinimumValidColumns("bogus"), VersionedCSVFileError);
+
+    // Set the minimum number of columns to "color"
+    csv->setMinimumValidColumns("color");
+    EXPECT_EQ(2, csv->getMinimumValidColumns());
+
+    // Header validation should fail, too few columns
+    ASSERT_THROW(csv->open(), CSVFileError);
+
+    // Set the minimum number of columns to 1.  File should parse now.
+    csv->setMinimumValidColumns("animal");
+    EXPECT_EQ(1, csv->getMinimumValidColumns());
+    ASSERT_NO_THROW(csv->open());
+
+    // First row is correct.
+    CSVRow row;
+    ASSERT_TRUE(csv->next(row));
+    EXPECT_EQ("cat", row.readAt(0));
+    EXPECT_EQ("blue", row.readAt(1));
+    EXPECT_EQ("21", row.readAt(2));
+
+    ASSERT_TRUE(csv->next(row));
+    EXPECT_EQ("lion", row.readAt(0));
+    EXPECT_EQ("blue", row.readAt(1));
+    EXPECT_EQ("21", row.readAt(2));
+
+    ASSERT_TRUE(csv->next(row));
+    EXPECT_EQ("dog", row.readAt(0));
+    EXPECT_EQ("blue", row.readAt(1));
+    EXPECT_EQ("21", row.readAt(2));
+}
+
+TEST_F(VersionedCSVFileTest, invalidHeaderColumn) {
+
+    // Create our version 2.0 schema file
+    boost::scoped_ptr<VersionedCSVFile> csv(new VersionedCSVFile(testfile_));
+    ASSERT_NO_THROW(csv->addColumn("animal", "1.0", ""));
+    ASSERT_NO_THROW(csv->addColumn("color", "2.0", "blue"));
+
+    // Create a file with the correct number of columns but a wrong column name
+    writeFile("animal,colour\n"
+              "cat,red\n"
+              "lion,green\n");
+
+    // Header validation should fail, we have an invalid column
+    ASSERT_THROW(csv->open(), CSVFileError);
+}
+
+TEST_F(VersionedCSVFileTest, downGrading) {
+    // Create our version 2.0 schema file
+    boost::scoped_ptr<VersionedCSVFile> csv(new VersionedCSVFile(testfile_));
+    ASSERT_NO_THROW(csv->addColumn("animal", "1.0", ""));
+    ASSERT_NO_THROW(csv->addColumn("color", "2.0", "blue"));
+
+    // Create schema 2.0 file PLUS an extra column
+    writeFile("animal,color,age\n"
+              "cat,red,5\n"
+              "lion,green,8\n");
+
+    // Header should validate and file should open.
+    ASSERT_NO_THROW(csv->open());
+
+    // We should have:
+    // 2 defined columns
+    // 3 columns found in the header
+    // 2 valid columns in the header
+    // Minium valid columns wasn't set. (Remember it's optional)
+    EXPECT_EQ(2, csv->getColumnCount());
+    EXPECT_EQ(3, csv->getInputHeaderCount());
+    EXPECT_EQ(2, csv->getValidColumnCount());
+    EXPECT_EQ(0, csv->getMinimumValidColumns());
+
+    // Input schema and current schema should both be 2.0
+    EXPECT_EQ("2.0", csv->getInputSchemaVersion());
+    EXPECT_EQ("2.0", csv->getSchemaVersion());
+
+    // Input Schema State should be NEEDS_DOWNGRADE
+    EXPECT_EQ(VersionedCSVFile::NEEDS_DOWNGRADE, csv->getInputSchemaState());
+    EXPECT_TRUE(csv->needsConversion());
+
+    // First row is correct.
+    CSVRow row;
+    EXPECT_TRUE(csv->next(row));
+    EXPECT_EQ("cat", row.readAt(0));
+    EXPECT_EQ("red", row.readAt(1));
+
+    // No data beyond the second column
+    EXPECT_THROW(row.readAt(2), CSVFileError);
+
+    // Second row is correct.
+    ASSERT_TRUE(csv->next(row));
+    EXPECT_EQ("lion", row.readAt(0));
+    EXPECT_EQ("green", row.readAt(1));
+
+    // No data beyond the second column
+    EXPECT_THROW(row.readAt(2), CSVFileError);
+}
+
+
+TEST_F(VersionedCSVFileTest, rowChecking) {
+    // Create version 2.0 schema CSV file with a
+    // - valid header
+    // - row 0 has too many values
+    // - row 1 is valid
+    // - row 3 is too few values
+    writeFile("animal,color\n"
+              "cat,red,bogus_row_value\n"
+              "lion,green\n"
+              "too_few\n");
+
+    // Create our versioned file, with two columns, one for each
+    // schema version
+    boost::scoped_ptr<VersionedCSVFile> csv(new VersionedCSVFile(testfile_));
+    csv->addColumn("animal", "1.0", "");
+    csv->addColumn("color", "2.0", "blue");
+
+    // Header validation should pass, so we can open
+    ASSERT_NO_THROW(csv->open());
+
+    CSVRow row;
+    // First row has too many
+    EXPECT_FALSE(csv->next(row));
+
+    // Second row is valid
+    ASSERT_TRUE(csv->next(row));
+    EXPECT_EQ("lion", row.readAt(0));
+    EXPECT_EQ("green", row.readAt(1));
+
+    // Third row has too few
+    EXPECT_FALSE(csv->next(row));
+}
+
+} // end of anonymous namespace

+ 251 - 0
src/lib/util/versioned_csv_file.cc

@@ -0,0 +1,251 @@
+// Copyright (C) 2015 Internet Systems Consortium, Inc. ("ISC")
+//
+// Permission to use, copy, modify, and/or distribute this software for any
+// purpose with or without fee is hereby granted, provided that the above
+// copyright notice and this permission notice appear in all copies.
+//
+// THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH
+// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+// AND FITNESS.  IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT,
+// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+// OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+// PERFORMANCE OF THIS SOFTWARE.
+
+#include <util/versioned_csv_file.h>
+
+namespace isc {
+namespace util {
+
+VersionedCSVFile::VersionedCSVFile(const std::string& filename)
+    : CSVFile(filename), columns_(0), valid_column_count_(0),
+      minimum_valid_columns_(0), input_header_count_(0),
+      input_schema_state_(CURRENT) {
+}
+
+VersionedCSVFile::~VersionedCSVFile() {
+}
+
+void
+VersionedCSVFile::addColumn(const std::string& name,
+                            const std::string& version,
+                            const std::string& default_value) {
+    CSVFile::addColumn(name);
+    columns_.push_back(VersionedColumnPtr(new VersionedColumn(name, version,
+                                                              default_value)));
+}
+
+void
+VersionedCSVFile::setMinimumValidColumns(const std::string& column_name) {
+    int index = getColumnIndex(column_name);
+    if (index <  0) {
+        isc_throw(VersionedCSVFileError,
+                  "setMinimumValidColumns: " << column_name << " is defined");
+    }
+
+    minimum_valid_columns_ = index + 1;
+}
+
+size_t
+VersionedCSVFile::getMinimumValidColumns() const {
+    return (minimum_valid_columns_);
+}
+
+size_t
+VersionedCSVFile::getValidColumnCount() const {
+    return (valid_column_count_);
+}
+
+size_t
+VersionedCSVFile::getInputHeaderCount() const {
+    return (input_header_count_);
+}
+
+void
+VersionedCSVFile::open(const bool seek_to_end) {
+    if (getColumnCount() == 0) {
+        isc_throw(VersionedCSVFileError,
+                  "no schema has been defined, cannot open CSV file :"
+                  << getFilename());
+    }
+
+    CSVFile::open(seek_to_end);
+}
+
+void
+VersionedCSVFile::recreate() {
+    if (getColumnCount() == 0) {
+        isc_throw(VersionedCSVFileError,
+                  "no schema has been defined, cannot create CSV file :"
+                  << getFilename());
+    }
+
+    CSVFile::recreate();
+    // For new files they always match.
+    input_header_count_ = valid_column_count_ = getColumnCount();
+}
+
+VersionedCSVFile::InputSchemaState
+VersionedCSVFile::getInputSchemaState() const {
+    return (input_schema_state_);
+}
+
+bool
+VersionedCSVFile::needsConversion() const {
+    return (input_schema_state_ != CURRENT);
+}
+
+std::string
+VersionedCSVFile::getInputSchemaVersion() const {
+    if (getValidColumnCount() > 0) {
+        return (getVersionedColumn(getValidColumnCount() - 1)->version_);
+    }
+
+    return ("undefined");
+}
+
+std::string
+VersionedCSVFile::getSchemaVersion() const {
+    if (getColumnCount() > 0) {
+        return (getVersionedColumn(getColumnCount() - 1)->version_);
+    }
+
+    return ("undefined");
+}
+
+const VersionedColumnPtr&
+VersionedCSVFile::getVersionedColumn(const size_t index) const {
+    if (index >= getColumnCount()) {
+        isc_throw(isc::OutOfRange, "versioned column index " << index
+                  << " out of range;  CSV file : " << getFilename()
+                  << " only has " << getColumnCount() << " columns ");
+    }
+
+    return (columns_[index]);
+}
+
+bool
+VersionedCSVFile::next(CSVRow& row) {
+    setReadMsg("success");
+    // Use base class to physical read the row, but skip its row
+    // validation
+    CSVFile::next(row, true);
+    if (row == CSVFile::EMPTY_ROW()) {
+        return(true);
+    }
+
+    bool row_valid = true;
+    switch(getInputSchemaState()) {
+        case CURRENT:
+            // All rows must match than the current schema
+            if (row.getValuesCount() != getColumnCount()) {
+                columnCountError(row, "must match current schema");
+                row_valid = false;
+            }
+            break;
+
+        case NEEDS_UPGRADE:
+            // The input header met the minimum column count but
+            // is less than the current schema so:
+            // Rows must not be shorter than the valid column count
+            // and not longer than the current schema
+            if (row.getValuesCount() < getValidColumnCount()) {
+                columnCountError(row, "too few columns to upgrade");
+                row_valid = false;
+            } else if (row.getValuesCount() > getColumnCount()) {
+                columnCountError(row, "too many columns to upgrade");
+                row_valid = false;
+            } else {
+                // Add any missing values
+                for (size_t index = row.getValuesCount();
+                     index < getColumnCount(); ++index) {
+                    row.append(columns_[index]->default_value_);
+                }
+            }
+            break;
+
+        case NEEDS_DOWNGRADE:
+            // The input header exceeded current schema so:
+            // Rows may be as long as input header but not shorter than
+            // the the current schema
+            if (row.getValuesCount() < getColumnCount()) {
+                columnCountError(row, "too few columns to downgrade");
+            } else if (row.getValuesCount() > getInputHeaderCount()) {
+                columnCountError(row, "too many columns to downgrade");
+            } else {
+                // Toss any the extra columns
+                row.trim(row.getValuesCount() - getColumnCount());
+            }
+            break;
+    }
+
+    return (row_valid);
+}
+
+void
+VersionedCSVFile::columnCountError(const CSVRow& row,
+                                  const std::string& reason) {
+    std::ostringstream s;
+    s <<  "Invalid number of columns: "
+      << row.getValuesCount()  << " in row: '" << row
+      << "', file: '" << getFilename() << "' : " << reason;
+      setReadMsg(s.str());
+}
+
+bool
+VersionedCSVFile::validateHeader(const CSVRow& header) {
+    if (getColumnCount() == 0) {
+        isc_throw(VersionedCSVFileError,
+                  "cannot validate header, no schema has been defined");
+    }
+
+    input_header_count_ = header.getValuesCount();
+
+    // Iterate over the number of columns in the header, testing
+    // each against the defined column in the same position.
+    // If there is a mismatch, bail.
+    size_t i = 0;
+    for (  ; i < getInputHeaderCount() && i < getColumnCount(); ++i) {
+        if (getColumnName(i) != header.readAt(i)) {
+            std::ostringstream s;
+            s << " - header contains an invalid column: '"
+              << header.readAt(i) << "'";
+            setReadMsg(s.str());
+            return (false);
+        }
+    }
+
+    // If we found too few valid columns, then we cannot convert this
+    // file.  It's too old, too corrupt, or not a Kea file.
+    if (i < getMinimumValidColumns()) {
+        std::ostringstream s;
+        s << " - header has only " << i << " valid column(s), "
+          << "it must have at least " << getMinimumValidColumns();
+        setReadMsg(s.str());
+        return (false);
+    }
+
+    // Remember the number of valid columns we found.  When this number
+    // is less than the number of defined columns, then we have an older
+    // version of the lease file.  We'll need this value to validate
+    // and upgrade data rows.
+    valid_column_count_ = i;
+
+    if (getValidColumnCount() < getColumnCount()) {
+        input_schema_state_ = NEEDS_UPGRADE;
+    } else if (getInputHeaderCount() > getColumnCount()) {
+        // If there are more values in the header than defined columns
+        // then, we'll drop the extra.  This allows someone to attempt to
+        // downgrade if need be.
+        input_schema_state_ = NEEDS_DOWNGRADE;
+        std::ostringstream s;
+        s << " - header has " << getInputHeaderCount() - getColumnCount()
+          << " extra column(s), these will be ignored";
+        setReadMsg(s.str());
+    }
+
+    return (true);
+}
+
+} // end of isc::util namespace
+} // end of isc namespace

+ 326 - 0
src/lib/util/versioned_csv_file.h

@@ -0,0 +1,326 @@
+// Copyright (C) 2015 Internet Systems Consortium, Inc. ("ISC")
+//
+// Permission to use, copy, modify, and/or distribute this software for any
+// purpose with or without fee is hereby granted, provided that the above
+// copyright notice and this permission notice appear in all copies.
+//
+// THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH
+// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+// AND FITNESS.  IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT,
+// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
+// OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+// PERFORMANCE OF THIS SOFTWARE.
+
+#ifndef VERSIONED_CSV_FILE_H
+#define VERSIONED_CSV_FILE_H
+
+#include <util/csv_file.h>
+
+namespace isc {
+namespace util {
+
+/// @brief Exception thrown when an error occurs during CSV file processing.
+class VersionedCSVFileError : public Exception {
+public:
+    VersionedCSVFileError(const char* file, size_t line, const char* what) :
+        isc::Exception(file, line, what) { };
+};
+
+/// @brief Contains the metadata for a single column in a file.
+class VersionedColumn {
+public:
+    /// @brief Constructor
+    ///
+    /// @param name Name of the column.
+    /// @param version Text representation of the schema version in which
+    /// this column first appeared.
+    /// @param default_value The value the column should be assigned if it
+    /// is not present in a data row. It defaults to an empty string, ""
+    VersionedColumn(const std::string& name, const std::string& version,
+               const std::string& default_value = "")
+        : name_(name), version_(version), default_value_(default_value) {
+    };
+
+    /// @brief Destructor
+    virtual ~VersionedColumn(){};
+
+    /// @brief Name of the column.
+    std::string name_;
+
+    /// @brief Text representation of the schema version in which
+    /// this column first appeared.
+    std::string version_;
+
+    /// @brief default_value The value the column should be assigned if it
+    /// is not present in a data row.
+    std::string default_value_;
+};
+
+/// @brief Defines a smart pointer to VersionedColumn
+typedef boost::shared_ptr<VersionedColumn> VersionedColumnPtr;
+
+/// @brief Implements a CSV file that supports multiple versions of
+/// the file's "schema".  This allows files with older schemas to be
+/// upgraded to newer schemas as they are being read.  The file's schema
+/// is defined through a list of column descriptors, or @ref
+/// isc::util::VersionedColumn(s). Each descriptor contains metadata describing
+/// the column, consisting of the column's name, the version label in which
+/// the column was added to the schema, and a default value to be used if the
+/// column is missing from the file.  Note that the column descriptors are
+/// defined in the order they occur in the file, when reading a row from left
+/// to right.  This also assumes that when new version of the schema evolves,
+/// all new columns are added at the end of the row.  In other words, the
+/// order of the columns reflects not only the order in which they occur
+/// in a row but also the order they were added to the schema.  Conceptually,
+/// the entire list of columns defined constitutes the current schema.  Earlier
+/// schema versions are therefore subsets of this list.   Creating the schema
+/// is done by calling VersionedCSVfile::addColumn() for each column.  Note
+/// that the schema must be defined prior to opening the file.
+///
+/// The first row of the file is always the header row and is a comma-separated
+/// list of the names of the column in the file.  This row is used when
+/// opening the file via @ref VersionedCSVFile::open(), to identify its schema
+/// version so that it may be be read correctly.  This is done by comparing
+/// the column found in the header to the columns defined in the schema. The
+/// columns must match both by name and the order in which they occur.
+///
+/// -# If there are fewer columns in the header than in the schema, the file
+/// is presumed to be an earlier schema version and will be upgraded as it is
+/// read.  There is an ability to mark a specific column as being the minimum
+/// column which must be present, see @ref VersionedCSVFile::
+/// setMinimumValidColumns().  If the header columns do not match up to this
+/// minimum column, the file is presumed to be too old to upgrade and the
+/// open will fail.  A valid, upgradable file will have an input schema
+/// state of VersionedCSVFile::NEEDS_UPGRADE.
+///
+/// -# If there is a mismatch between a found column name and the column name
+/// defined for that position in the row, the file is presumed to be invalid
+/// and the open will fail.
+///
+/// -# If the content of the header matches exactly the columns defined in
+/// the schema, the file is considered to match the schema exactly and the
+/// input schema state will VersionedCSVFile::CURRENT.
+///
+/// -# If there columns in the header beyond all of the columns defined in
+/// the schema (i.e the schema is a subset of the header), then the file
+/// is presumed to be from a newer version of Kea and can be downgraded. The
+/// input schema state fo the file will be set to
+/// VersionedCSVFile::NEEDS_DOWNGRADE.
+///
+/// After successfully opening a file,  rows are read one at a time via
+/// @ref VersionedCSVFile::next() and handled according to the input schema
+/// state.   Each data row is expected to have at least the same number of
+/// columns as were found in the header. Any row which as fewer values is
+/// discarded as invalid.  Similarly, any row which is found to have more
+/// values than were found in the header is discarded as invalid.
+///
+/// When upgrading a row, the values for each missing column is filled in
+/// with the default value specified by that column's descriptor.  When
+/// downgrading a row, extraneous values are dropped from the row.
+///
+/// It is important to note that upgrading or downgrading a file does NOT
+/// alter the physical file itself.  Rather the conversion occurs after the
+/// raw data has been read but before it is passed to caller.
+///
+/// Also note that there is currently no support for writing out a file in
+/// anything other than the current schema.
+class VersionedCSVFile : public CSVFile {
+public:
+
+    /// @brief Possible input file schema states.
+    /// Used to categorize the input file's schema, relative to the defined
+    /// schema.
+    enum InputSchemaState {
+        CURRENT,
+        NEEDS_UPGRADE,
+        NEEDS_DOWNGRADE
+    };
+
+    /// @brief Constructor.
+    ///
+    /// @param filename CSV file name.
+    VersionedCSVFile(const std::string& filename);
+
+    /// @brief Destructor
+    virtual ~VersionedCSVFile();
+
+    /// @brief Adds metadata for a single column to the schema.
+    ///
+    /// This method appends a new column description to the file's schema.
+    /// Note this does not cause anything to be written to the physical file.
+    /// The name of the column will be placed in the CSV header when new file
+    /// is created by calling @c recreate or @c open function.
+    ///
+    /// @param name Name of the column.
+    /// @param version  Text representation of the schema version in which
+    /// this column first appeared.
+    /// @param default_value value the missing column should be given during
+    /// an upgrade.  It defaults to an empty string, ""
+    ///
+    /// @throw CSVFileError if a column with the specified name exists.
+    void addColumn(const std::string& col_name, const std::string& version,
+                   const std::string& default_value = "");
+
+    /// @brief Sets the minimum number of valid columns based on a given column
+    ///
+    /// @param column_name Name of the column which positionally represents
+    /// the minimum columns which must be present in a file and to be
+    /// considered valid.
+    void setMinimumValidColumns(const std::string& column_name);
+
+    /// @brief Returns the minimum number of columns which must be present
+    /// for the file to be considered valid.
+    size_t getMinimumValidColumns() const;
+
+    /// @brief Returns the number of columns found in the input header
+    size_t getInputHeaderCount() const;
+
+    /// @brief Returns the number of valid columns found in the header
+    /// For newly created files this will always match the number of defined
+    /// columns (i.e. getColumnCount()).  For existing files, this will be
+    /// the number of columns in the header that match the defined columnns.
+    /// When this number is less than getColumnCount() it means the input file
+    /// is from an earlier schema.  This value is zero until the file has
+    /// been opened.
+    size_t getValidColumnCount() const;
+
+    /// @brief Opens existing file or creates a new one.
+    ///
+    /// This function will try to open existing file if this file has size
+    /// greater than 0. If the file doesn't exist or has size of 0, the
+    /// file is recreated. If the existing file has been opened, the header
+    /// is parsed and and validated against the schema.
+    /// By default, the data pointer in the file is set to the beginning of
+    /// the first data row. In order to retrieve the row contents the @c next
+    /// function should be called. If a @c seek_to_end parameter is set to
+    /// true, the file will be opened and the internal pointer will be set
+    /// to the end of file.
+    ///
+    /// @param seek_to_end A boolean value which indicates if the intput and
+    /// output file pointer should be set at the end of file.
+    ///
+    /// @throw VersionedCSVFileError if schema has not been defined,
+    /// CSVFileError when IO operation fails, or header fails to validate.
+    virtual void open(const bool seek_to_end = false);
+
+    /// @brief Creates a new CSV file.
+    ///
+    /// The file creation will fail if there are no columns specified.
+    /// Otherwise, this function will write the header to the file.
+    /// In order to write rows to opened file, the @c append function
+    /// should be called.
+    ///
+    /// @throw VersionedCSVFileError if schema has not been defined
+    /// CSVFileError if an IO operation fails
+    virtual void recreate();
+
+    /// @brief Reads next row from the file file.
+    ///
+    /// This function will return the @c CSVRow object representing a
+    /// parsed row if parsing is successful. If the end of file has been
+    /// reached, the empty row is returned (a row containing no values).
+    ///
+    /// 1. If the row has fewer values than were found in the header it is
+    /// discarded as invalid.
+    ///
+    /// 2. If the row is found to have more values than are defined in the
+    /// schema it is discarded as invalid
+    ///
+    /// When a valid row has fewer than the defined number of columns, the
+    /// values for each missing column is filled in with the default value
+    /// specified by that column's descriptor.
+    ///
+    /// @param [out] row Object receiving the parsed CSV file.
+    /// @param skip_validation Do not perform validation.
+    ///
+    /// @return true if row has been read and validated; false if validation
+    /// failed.
+    bool next(CSVRow& row);
+
+    /// @brief Returns the schema version of the physical file
+    ///
+    /// @return text version of the schema found or string "undefined" if the
+    /// file has not been opened
+    std::string getInputSchemaVersion() const;
+
+    /// @brief text version of current schema supported by the file's metadata
+    ///
+    /// @return text version info assigned to the last column in the list of
+    /// defined column, or the string "undefined" if no columns have been
+    /// defined.
+    std::string getSchemaVersion() const;
+
+    /// @brief Fetch the column descriptor for a given index
+    ///
+    /// @param index index within the list of columns of the desired column
+    /// @return a pointer to the VersionedColumn at the given index
+    /// @trow OutOfRange exception if the index is invalid
+    const VersionedColumnPtr& getVersionedColumn(const size_t index) const;
+
+    /// @brief Fetches the state of the input file's schema
+    ///
+    /// Reflects that state of the input file's schema relative to the
+    /// defined schema as a enum, InputSchemaState.
+    ///
+    /// @return VersionedCSVFile::CURRENT if the input file schema matches
+    /// the defined schema, NEEDS_UPGRADE if the input file schema is older,
+    /// and NEEDS_DOWNGRADE if it is newer
+    enum InputSchemaState getInputSchemaState() const;
+
+    /// @brief Returns true if the input file schema state is not CURRENT
+    bool needsConversion() const;
+
+protected:
+
+    /// @brief Validates the header of a VersionedCSVFile
+    ///
+    /// This function is called internally when the reading in an existing
+    /// file.  It parses the header row of the file, comparing each value
+    /// in succession against the defined list of columns.  If the header
+    /// contains too few matching columns (i.e. less than @c
+    /// minimum_valid_columns_) or too many (more than the number of defined
+    /// columns), the file is presumed to be either too old, too new, or too
+    /// corrupt to process.  Otherwise it retains the number of valid columns
+    /// found and deems the header valid.
+    ///
+    /// @param header A row holding a header.
+    /// @return true if header matches the columns; false otherwise.
+    virtual bool validateHeader(const CSVRow& header);
+
+    /// @brief Convenience method for adding an error message
+    ///
+    /// Constructs an error message indicating that the number of columns
+    /// in a given row are wrong and why, then adds it readMsg.
+    ///
+    /// @param row The row in error
+    /// @param reason An explanation as to why the row column count is wrong
+    void columnCountError(const CSVRow& row, const std::string& reason);
+
+private:
+    /// @brief Holds the collection of column descriptors
+    std::vector<VersionedColumnPtr> columns_;
+
+    /// @brief Number of valid columns present in input file. If this is less
+    /// than the number of columns defined, this implies the input file is
+    /// from an earlier version of the code.
+    size_t valid_column_count_;
+
+    /// @brief Minimum number of valid columns an input file must contain.
+    /// If an input file does not meet this number it cannot be upgraded.
+    size_t minimum_valid_columns_;
+
+    /// @brief The number of columns found in the input header row
+    /// This value represent the number of columns present, in the header
+    /// valid or otherwise.
+    size_t input_header_count_;
+
+    /// @brief The state of the input schema in relation to the current schema
+    enum InputSchemaState input_schema_state_;
+};
+
+
+} // namespace isc::util
+} // namespace isc
+
+#endif // VERSIONED_CSV_FILE_H