Browse Source

[5076] Bison parser for Agent implemented.

Tomek Mrugalski 8 years ago
parent
commit
3cf4f98119

+ 2 - 1
doc/Makefile.am

@@ -6,7 +6,8 @@ EXTRA_DIST += devel/contribute.dox
 EXTRA_DIST += devel/mainpage.dox
 EXTRA_DIST += devel/unit-tests.dox
 
-nobase_dist_doc_DATA  = examples/ddns/sample1.json
+nobase_dist_doc_DATA  = examples/agent/simple.json
+nobase_dist_doc_DATA += examples/ddns/sample1.json
 nobase_dist_doc_DATA += examples/ddns/template.json
 nobase_dist_doc_DATA += examples/kea4/advanced.json
 nobase_dist_doc_DATA += examples/kea4/backends.json

+ 80 - 0
doc/examples/agent/simple.json

@@ -0,0 +1,80 @@
+// This is a simple example of a configuration for Control-Agent (CA) or simply
+// Agent. This server provides RESTful interface for all Kea servers.
+{
+    "Control-agent":
+    {
+	// We need to specify where the agent should listen to incoming HTTP
+	// queries. Note that agent does not provide SSL or TLS protection
+	// on its own, so limiting the traffic to localhost is a good idea.
+	"http-host": "localhost",
+
+	// Another mandatory parameter is the HTTP port.
+	"http-port": 8000,
+
+	// This map specifies where control channel of each server is configured
+	// to listen on. See 'control-socket' object in the respective
+	// servers. At this time the only supported socket type is "unix".
+	// Make sure that the Agent and respective servers configuration
+	// matches exactly, otherwise they won't be able to communicate.
+	"control-sockets":
+	{
+	    // This is how the Agent can communicate with the DHCPv4 server.
+	    "dhcp4-server":
+	    {
+		"socket-type": "unix",
+		"socket-name": "/path/to/the/unix/socket-v4"
+	    },
+
+	    // Location of the DHCPv6 command channel socket.
+	    "dhcp6-server":
+	    {
+		"socket-type": "unix",
+		"socket-name": "/path/to/the/unix/socket-v6"
+	    },
+
+	    // Currently DHCP-DDNS (nicknamed D2) does not support
+	    // command channel yet, but we hope this will change in the
+	    // future.
+	    "d2-server":
+	    {
+		"socket-type": "unix",
+		"socket-name": "/path/to/the/unix/socket-d2"
+	    }
+	},
+
+	// CA is able to load hook libraries that augment its operation.
+	// The primary functionality is the ability to add new commands.
+	"hooks-libraries": [
+	    // Hook libraries list may contain more than one library.
+	    {
+		// The only necessary parameter is the library filename.
+		"library": "/opt/local/control-agent-commands.so",
+
+		// Some libraries may support parameters. Make sure you
+		// type this section carefully, as the CA does not validate
+		// it (because the format is library specific).
+		"parameters": {
+		    "param1": "foo"
+		}
+	    }
+	]
+
+    },
+
+    // Similar to other Kea components, CA also uses logging.
+    "Logging":
+    {
+	"loggers": [
+	    {
+		"name": "kea-ctrl-agent",
+		"output_options": [
+		    {
+			"output": "/var/log/kea-ctrl-agent.log"
+		    }
+		],
+		"severity": "INFO",
+		"debuglevel": 0
+	    }
+	]
+    }
+}

+ 31 - 0
src/bin/agent/Makefile.am

@@ -46,6 +46,9 @@ libagent_la_SOURCES  = ctrl_agent_cfg_mgr.cc ctrl_agent_cfg_mgr.h
 libagent_la_SOURCES += ctrl_agent_controller.cc ctrl_agent_controller.h
 libagent_la_SOURCES += ctrl_agent_log.cc ctrl_agent_log.h
 libagent_la_SOURCES += ctrl_agent_process.cc ctrl_agent_process.h
+libagent_la_SOURCES += agent_parser.cc agent_parser.h
+libagent_la_SOURCES += parser_context.cc parser_context.h parser_context_decl.h
+libagent_la_SOURCES += agent_lexer.ll
 
 nodist_libagent_la_SOURCES = ctrl_agent_messages.h ctrl_agent_messages.cc
 EXTRA_DIST += ctrl_agent_messages.mes
@@ -76,3 +79,31 @@ kea_ctrl_agent_LDADD += $(LOG4CPLUS_LIBS) $(CRYPTO_LIBS) $(BOOST_LIBS)
 
 kea_ctrl_agent_LDFLAGS = $(AM_LDFLAGS) $(CRYPTO_LDFLAGS)
 
+
+
+if GENERATE_PARSER
+
+parser: agent_lexer.cc location.hh position.hh stack.hh agent_parser.cc agent_parser.h
+	@echo "Flex/bison files regenerated"
+
+# --- Flex/Bison stuff below --------------------------------------------------
+# When debugging grammar issues, it's useful to add -v to bison parameters.
+# bison will generate parser.output file that explains the whole grammar.
+# It can be used to manually follow what's going on in the parser.
+# This is especially useful if yydebug_ is set to 1 as that variable
+# will cause parser to print out its internal state.
+# Call flex with -s to check that the default rule can be suppressed
+# Call bison with -W to get warnings like unmarked empty rules
+# Note C++11 deprecated register still used by flex < 2.6.0
+location.hh position.hh stack.hh agent_parser.cc agent_parser.h: agent_parser.yy
+	$(YACC) --defines=agent_parser.h --report=all --report-file=agent_parser.report -o agent_parser.cc agent_parser.yy
+
+agent_lexer.cc: agent_lexer.ll
+	$(LEX) --prefix parser6_ -o agent_lexer.cc agent_lexer.ll
+
+else
+
+parser location.hh position.hh stack.hh agent_parser.cc agent_parser.h agent_lexer.cc:
+	@echo Parser generation disabled. Configure with --enable-generate-parser to enable it.
+
+endif

+ 641 - 0
src/bin/agent/agent_lexer.ll

@@ -0,0 +1,641 @@
+/* Copyright (C) 2017 Internet Systems Consortium, Inc. ("ISC")
+
+   This Source Code Form is subject to the terms of the Mozilla Public
+   License, v. 2.0. If a copy of the MPL was not distributed with this
+   file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+%{ /* -*- C++ -*- */
+#include <cerrno>
+#include <climits>
+#include <cstdlib>
+#include <string>
+#include <agent/parser_context.h>
+#include <asiolink/io_address.h>
+#include <boost/lexical_cast.hpp>
+#include <exceptions/exceptions.h>
+#include <cc/dhcp_config_error.h>
+
+// Work around an incompatibility in flex (at least versions
+// 2.5.31 through 2.5.33): it generates code that does
+// not conform to C89.  See Debian bug 333231
+// <http://bugs.debian.org/cgi-bin/bugreport.cgi?bug=333231>.
+# undef yywrap
+# define yywrap() 1
+
+namespace {
+
+bool start_token_flag = false;
+
+isc::agent::ParserContext::ParserType start_token_value;
+unsigned int comment_start_line = 0;
+
+using namespace isc;
+using isc::agent::AgentParser;
+
+};
+
+// To avoid the call to exit... oops!
+#define YY_FATAL_ERROR(msg) isc::agent::ParserContext::fatal(msg)
+%}
+
+/* noyywrap disables automatic rewinding for the next file to parse. Since we
+   always parse only a single string, there's no need to do any wraps. And
+   using yywrap requires linking with -lfl, which provides the default yywrap
+   implementation that always returns 1 anyway. */
+%option noyywrap
+
+/* nounput simplifies the lexer, by removing support for putting a character
+   back into the input stream. We never use such capability anyway. */
+%option nounput
+
+/* batch means that we'll never use the generated lexer interactively. */
+%option batch
+
+/* avoid to get static global variables to remain with C++. */
+/* in last resort %option reentrant */
+
+/* Enables debug mode. To see the debug messages, one needs to also set
+   yy_flex_debug to 1, then the debug messages will be printed on stderr. */
+%option debug
+
+/* I have no idea what this option does, except it was specified in the bison
+   examples and Postgres folks added it to remove gcc 4.3 warnings. Let's
+   be on the safe side and keep it. */
+%option noinput
+
+%x COMMENT
+%x DIR_ENTER DIR_INCLUDE DIR_EXIT
+
+/* These are not token expressions yet, just convenience expressions that
+   can be used during actual token definitions. Note some can match
+   incorrect inputs (e.g., IP addresses) which must be checked. */
+int   \-?[0-9]+
+blank [ \t\r]
+
+UnicodeEscapeSequence           u[0-9A-Fa-f]{4}
+JSONEscapeCharacter             ["\\/bfnrt]
+JSONEscapeSequence              {JSONEscapeCharacter}|{UnicodeEscapeSequence}
+JSONStandardCharacter           [^\x00-\x1f"\\]
+JSONStringCharacter             {JSONStandardCharacter}|\\{JSONEscapeSequence}
+JSONString                      \"{JSONStringCharacter}*\"
+
+/* for errors */
+
+BadUnicodeEscapeSequence        u[0-9A-Fa-f]{0,3}[^0-9A-Fa-f]
+BadJSONEscapeSequence           [^"\\/bfnrtu]|{BadUnicodeEscapeSequence}
+ControlCharacter                [\x00-\x1f]
+ControlCharacterFill            [^"\\]|\\{JSONEscapeSequence}
+
+%{
+// This code run each time a pattern is matched. It updates the location
+// by moving it ahead by yyleng bytes. yyleng specifies the length of the
+// currently matched token.
+#define YY_USER_ACTION  driver.loc_.columns(yyleng);
+%}
+
+%%
+
+%{
+    // This part of the code is copied over to the verbatim to the top
+    // of the generated yylex function. Explanation:
+    // http://www.gnu.org/software/bison/manual/html_node/Multiple-start_002dsymbols.html
+
+    // Code run each time yylex is called.
+    driver.loc_.step();
+
+    // We currently have 3 points of entries defined:
+    // START_JSON - which expects any valid JSON
+    // START_AGENT - which expects full configuration (with outer map and Control-agent
+    //               object in it.
+    // START_SUB_AGENT - which expects only content of the Control-agent, this is
+    //                   primarily useful for testing.
+    if (start_token_flag) {
+        start_token_flag = false;
+        switch (start_token_value) {
+        case ParserContext::PARSER_JSON:
+        default:
+            return isc::agent::AgentParser::make_START_JSON(driver.loc_);
+        case ParserContext::PARSER_AGENT:
+            return isc::agent::AgentParser::make_START_AGENT(driver.loc_);
+        case ParserContext::PARSER_SUB_AGENT:
+            return isc::agent::AgentParser::make_START_SUB_AGENT(driver.loc_);
+        }
+    }
+%}
+
+#.* ;
+
+"//"(.*) ;
+
+"/*" {
+  BEGIN(COMMENT);
+  comment_start_line = driver.loc_.end.line;;
+}
+
+<COMMENT>"*/" BEGIN(INITIAL);
+<COMMENT>. ;
+<COMMENT><<EOF>> {
+    isc_throw(ParseError, "Comment not closed. (/* in line " << comment_start_line);
+}
+
+"<?" BEGIN(DIR_ENTER);
+<DIR_ENTER>"include" BEGIN(DIR_INCLUDE);
+<DIR_INCLUDE>\"([^\"\n])+\" {
+    // Include directive.
+
+    // Extract the filename.
+    std::string tmp(yytext+1);
+    tmp.resize(tmp.size() - 1);
+
+    driver.includeFile(tmp);
+}
+<DIR_ENTER,DIR_INCLUDE,DIR_EXIT><<EOF>> {
+    isc_throw(ParseError, "Directive not closed.");
+}
+<DIR_EXIT>"?>" BEGIN(INITIAL);
+
+
+<*>{blank}+   {
+    // Ok, we found a with space. Let's ignore it and update loc variable.
+    driver.loc_.step();
+}
+
+<*>[\n]+      {
+    // Newline found. Let's update the location and continue.
+    driver.loc_.lines(yyleng);
+    driver.loc_.step();
+}
+
+
+\"Control-agent\" {
+    if (driver.ctx_ != ParserContext::NO_KEYWORDS) {
+        return AgentParser::make_CONTROL_AGENT(driver.loc_);
+    } else {
+        return AgentParser::make_STRING("Control-agent", driver.loc_);
+    }
+}
+
+\"http-host\" {
+    if (driver.ctx_ != ParserContext::NO_KEYWORDS) {
+        return AgentParser::make_HTTP_HOST(driver.loc_);
+    } else {
+        return AgentParser::make_STRING("http-host", driver.loc_);
+    }
+}
+
+\"http-port\" {
+    if (driver.ctx_ != ParserContext::NO_KEYWORDS) {
+        return AgentParser::make_HTTP_PORT(driver.loc_);
+    } else {
+        return AgentParser::make_STRING("http-port", driver.loc_);
+    }
+}
+
+\"dhcp4-server\" {
+    if (driver.ctx_ != ParserContext::NO_KEYWORDS) {
+        return AgentParser::make_DHCP4_SERVER(driver.loc_);
+    } else {
+        return AgentParser::make_STRING("dhcp4-server", driver.loc_);
+    }
+}
+
+\"dhcp6-server\" {
+    if (driver.ctx_ != ParserContext::NO_KEYWORDS) {
+        return AgentParser::make_DHCP6_SERVER(driver.loc_);
+    } else {
+        return AgentParser::make_STRING("dhcp6-server", driver.loc_);
+    }
+}
+
+\"d2-server\" {
+    if (driver.ctx_ != ParserContext::NO_KEYWORDS) {
+        return AgentParser::make_D2_SERVER(driver.loc_);
+    } else {
+        return AgentParser::make_STRING("d2-server", driver.loc_);
+    }
+}
+
+(\"unix\") {
+    if (driver.ctx_ != ParserContext::NO_KEYWORDS) {
+        return AgentParser::make_UNIX(driver.loc_);
+    }
+    std::string tmp(yytext+1);
+    tmp.resize(tmp.size() - 1);
+    return AgentParser::make_STRING(tmp, driver.loc_);
+}
+
+\"name\" {
+    if (driver.ctx_ != ParserContext::NO_KEYWORDS) {
+        return AgentParser::make_NAME(driver.loc_);
+    } else {
+        return AgentParser::make_STRING("name", driver.loc_);
+    }
+}
+
+
+\"Logging\" {
+    if (driver.ctx_ != ParserContext::NO_KEYWORDS) {
+        return AgentParser::make_LOGGING(driver.loc_);
+    } else {
+        return AgentParser::make_STRING("Logging", driver.loc_);
+    }
+}
+
+\"loggers\" {
+    if (driver.ctx_ != ParserContext::NO_KEYWORDS) {
+        return AgentParser::make_LOGGERS(driver.loc_);
+    } else {
+        return AgentParser::make_STRING("loggers", driver.loc_);
+    }
+}
+
+\"output_options\" {
+    if (driver.ctx_ != ParserContext::NO_KEYWORDS) {
+        return AgentParser::make_OUTPUT_OPTIONS(driver.loc_);
+    } else {
+        return AgentParser::make_STRING("output_options", driver.loc_);
+    }
+}
+
+\"output\" {
+    if (driver.ctx_ != ParserContext::NO_KEYWORDS) {
+        return AgentParser::make_OUTPUT(driver.loc_);
+    } else {
+        return AgentParser::make_STRING("output", driver.loc_);
+    }
+}
+
+\"debuglevel\" {
+    if (driver.ctx_ != ParserContext::NO_KEYWORDS) {
+        return AgentParser::make_DEBUGLEVEL(driver.loc_);
+    } else {
+        return AgentParser::make_STRING("debuglevel", driver.loc_);
+    }
+}
+
+\"severity\" {
+    if (driver.ctx_ != ParserContext::NO_KEYWORDS) {
+        return AgentParser::make_SEVERITY(driver.loc_);
+    } else {
+        return AgentParser::make_STRING("severity", driver.loc_);
+    }
+}
+
+\"hooks-libraries\" {
+    if (driver.ctx_ != ParserContext::NO_KEYWORDS) {
+        return AgentParser::make_HOOKS_LIBRARIES(driver.loc_);
+    } else {
+        return AgentParser::make_STRING("hooks-libraries", driver.loc_);
+    }
+}
+
+
+\"parameters\" {
+    if (driver.ctx_ != ParserContext::NO_KEYWORDS) {
+        return AgentParser::make_PARAMETERS(driver.loc_);
+    } else {
+        return AgentParser::make_STRING("parameters", driver.loc_);
+    }
+}
+
+\"library\" {
+    if (driver.ctx_ != ParserContext::NO_KEYWORDS) {
+        return AgentParser::make_LIBRARY(driver.loc_);
+    } else {
+        return AgentParser::make_STRING("library", driver.loc_);
+    }
+}
+
+
+\"control-sockets\" {
+    if (driver.ctx_ != ParserContext::NO_KEYWORDS) {
+        return AgentParser::make_CONTROL_SOCKETS(driver.loc_);
+    } else {
+        return AgentParser::make_STRING("control-sockets", driver.loc_);
+    }
+}
+
+\"socket-type\" {
+    if (driver.ctx_ != ParserContext::NO_KEYWORDS) {
+        return AgentParser::make_SOCKET_TYPE(driver.loc_);
+    } else {
+        return AgentParser::make_STRING("socket-type", driver.loc_);
+    }
+}
+
+\"socket-name\" {
+    if (driver.ctx_ != ParserContext::NO_KEYWORDS) {
+        return AgentParser::make_SOCKET_NAME(driver.loc_);
+    } else {
+        return AgentParser::make_STRING("socket-name", driver.loc_);
+    }
+}
+
+\"Dhcp4\" {
+    if (driver.ctx_ != ParserContext::NO_KEYWORDS) {
+        return AgentParser::make_DHCP4(driver.loc_);
+    } else {
+        return AgentParser::make_STRING("Dhcp4", driver.loc_);
+    }
+}
+
+\"Dhcp6\" {
+    if (driver.ctx_ != ParserContext::NO_KEYWORDS) {
+        return AgentParser::make_DHCP4(driver.loc_);
+    } else {
+        return AgentParser::make_STRING("Dhcp4", driver.loc_);
+    }
+}
+
+{JSONString} {
+    // A string has been matched. It contains the actual string and single quotes.
+    // We need to get those quotes out of the way and just use its content, e.g.
+    // for 'foo' we should get foo
+    std::string raw(yytext+1);
+    size_t len = raw.size() - 1;
+    raw.resize(len);
+    std::string decoded;
+    decoded.reserve(len);
+    for (size_t pos = 0; pos < len; ++pos) {
+        int b = 0;
+        char c = raw[pos];
+        switch (c) {
+        case '"':
+            // impossible condition
+            driver.error(driver.loc_, "Bad quote in \"" + raw + "\"");
+        case '\\':
+            ++pos;
+            if (pos >= len) {
+                // impossible condition
+                driver.error(driver.loc_, "Overflow escape in \"" + raw + "\"");
+            }
+            c = raw[pos];
+            switch (c) {
+            case '"':
+            case '\\':
+            case '/':
+                decoded.push_back(c);
+                break;
+            case 'b':
+                decoded.push_back('\b');
+                break;
+            case 'f':
+                decoded.push_back('\f');
+                break;
+            case 'n':
+                decoded.push_back('\n');
+                break;
+            case 'r':
+                decoded.push_back('\r');
+                break;
+            case 't':
+                decoded.push_back('\t');
+                break;
+            case 'u':
+                // support only \u0000 to \u00ff
+                ++pos;
+                if (pos + 4 > len) {
+                    // impossible condition
+                    driver.error(driver.loc_,
+                                 "Overflow unicode escape in \"" + raw + "\"");
+                }
+                if ((raw[pos] != '0') || (raw[pos + 1] != '0')) {
+                    driver.error(driver.loc_, "Unsupported unicode escape in \"" + raw + "\"");
+                }
+                pos += 2;
+                c = raw[pos];
+                if ((c >= '0') && (c <= '9')) {
+                    b = (c - '0') << 4;
+                } else if ((c >= 'A') && (c <= 'F')) {
+                    b = (c - 'A' + 10) << 4;
+                } else if ((c >= 'a') && (c <= 'f')) {
+                    b = (c - 'a' + 10) << 4;
+                } else {
+                    // impossible condition
+                    driver.error(driver.loc_, "Not hexadecimal in unicode escape in \"" + raw + "\"");
+                }
+                pos++;
+                c = raw[pos];
+                if ((c >= '0') && (c <= '9')) {
+                    b |= c - '0';
+                } else if ((c >= 'A') && (c <= 'F')) {
+                    b |= c - 'A' + 10;
+                } else if ((c >= 'a') && (c <= 'f')) {
+                    b |= c - 'a' + 10;
+                } else {
+                    // impossible condition
+                    driver.error(driver.loc_, "Not hexadecimal in unicode escape in \"" + raw + "\"");
+                }
+                decoded.push_back(static_cast<char>(b & 0xff));
+                break;
+            default:
+                // impossible condition
+                driver.error(driver.loc_, "Bad escape in \"" + raw + "\"");
+            }
+            break;
+        default:
+            if ((c >= 0) && (c < 0x20)) {
+                // impossible condition
+                driver.error(driver.loc_, "Invalid control in \"" + raw + "\"");
+            }
+            decoded.push_back(c);
+        }
+    }
+
+    return AgentParser::make_STRING(decoded, driver.loc_);
+}
+
+\"{JSONStringCharacter}*{ControlCharacter}{ControlCharacterFill}*\" {
+    // Bad string with a forbidden control character inside
+    driver.error(driver.loc_, "Invalid control in " + std::string(yytext));
+}
+
+\"{JSONStringCharacter}*\\{BadJSONEscapeSequence}[^\x00-\x1f"]*\" {
+    // Bad string with a bad escape inside
+    driver.error(driver.loc_, "Bad escape in " + std::string(yytext));
+}
+
+\"{JSONStringCharacter}*\\\" {
+    // Bad string with an open escape at the end
+    driver.error(driver.loc_, "Overflow escape in " + std::string(yytext));
+}
+
+"["    { return AgentParser::make_LSQUARE_BRACKET(driver.loc_); }
+"]"    { return AgentParser::make_RSQUARE_BRACKET(driver.loc_); }
+"{"    { return AgentParser::make_LCURLY_BRACKET(driver.loc_); }
+"}"    { return AgentParser::make_RCURLY_BRACKET(driver.loc_); }
+","    { return AgentParser::make_COMMA(driver.loc_); }
+":"    { return AgentParser::make_COLON(driver.loc_); }
+
+{int} {
+    // An integer was found.
+    std::string tmp(yytext);
+    int64_t integer = 0;
+    try {
+        // In substring we want to use negative values (e.g. -1).
+        // In enterprise-id we need to use values up to 0xffffffff.
+        // To cover both of those use cases, we need at least
+        // int64_t.
+        integer = boost::lexical_cast<int64_t>(tmp);
+    } catch (const boost::bad_lexical_cast &) {
+        driver.error(driver.loc_, "Failed to convert " + tmp + " to an integer.");
+    }
+
+    // The parser needs the string form as double conversion is no lossless
+    return AgentParser::make_INTEGER(integer, driver.loc_);
+}
+
+[-+]?[0-9]*\.?[0-9]*([eE][-+]?[0-9]+)? {
+    // A floating point was found.
+    std::string tmp(yytext);
+    double fp = 0.0;
+    try {
+        fp = boost::lexical_cast<double>(tmp);
+    } catch (const boost::bad_lexical_cast &) {
+        driver.error(driver.loc_, "Failed to convert " + tmp + " to a floating point.");
+    }
+
+    return AgentParser::make_FLOAT(fp, driver.loc_);
+}
+
+true|false {
+    string tmp(yytext);
+    return AgentParser::make_BOOLEAN(tmp == "true", driver.loc_);
+}
+
+null {
+   return AgentParser::make_NULL_TYPE(driver.loc_);
+}
+
+(?i:true) driver.error (driver.loc_, "JSON true reserved keyword is lower case only");
+
+(?i:false) driver.error (driver.loc_, "JSON false reserved keyword is lower case only");
+
+(?i:null) driver.error (driver.loc_, "JSON null reserved keyword is lower case only");
+
+<*>.   driver.error (driver.loc_, "Invalid character: " + std::string(yytext));
+
+<<EOF>> {
+    if (driver.states_.empty()) {
+        return AgentParser::make_END(driver.loc_);
+    }
+    driver.loc_ = driver.locs_.back();
+    driver.locs_.pop_back();
+    driver.file_ = driver.files_.back();
+    driver.files_.pop_back();
+    if (driver.sfile_) {
+        fclose(driver.sfile_);
+        driver.sfile_ = 0;
+    }
+    if (!driver.sfiles_.empty()) {
+        driver.sfile_ = driver.sfiles_.back();
+        driver.sfiles_.pop_back();
+    }
+    parser6__delete_buffer(YY_CURRENT_BUFFER);
+    parser6__switch_to_buffer(driver.states_.back());
+    driver.states_.pop_back();
+
+    BEGIN(DIR_EXIT);
+}
+
+%%
+
+using namespace isc::dhcp;
+
+void
+ParserContext::scanStringBegin(const std::string& str, ParserType parser_type)
+{
+    start_token_flag = true;
+    start_token_value = parser_type;
+
+    file_ = "<string>";
+    sfile_ = 0;
+    loc_.initialize(&file_);
+    yy_flex_debug = trace_scanning_;
+    YY_BUFFER_STATE buffer;
+    buffer = parser6__scan_bytes(str.c_str(), str.size());
+    if (!buffer) {
+        fatal("cannot scan string");
+        // fatal() throws an exception so this can't be reached
+    }
+}
+
+void
+ParserContext::scanFileBegin(FILE * f,
+                              const std::string& filename,
+                              ParserType parser_type)
+{
+    start_token_flag = true;
+    start_token_value = parser_type;
+
+    file_ = filename;
+    sfile_ = f;
+    loc_.initialize(&file_);
+    yy_flex_debug = trace_scanning_;
+    YY_BUFFER_STATE buffer;
+
+    // See dhcp6_lexer.cc header for available definitions
+    buffer = parser6__create_buffer(f, 65536 /*buffer size*/);
+    if (!buffer) {
+        fatal("cannot scan file " + filename);
+    }
+    parser6__switch_to_buffer(buffer);
+}
+
+void
+ParserContext::scanEnd() {
+    if (sfile_)
+        fclose(sfile_);
+    sfile_ = 0;
+    static_cast<void>(parser6_lex_destroy());
+    // Close files
+    while (!sfiles_.empty()) {
+        FILE* f = sfiles_.back();
+        if (f) {
+            fclose(f);
+        }
+        sfiles_.pop_back();
+    }
+    // Delete states
+    while (!states_.empty()) {
+        parser6__delete_buffer(states_.back());
+        states_.pop_back();
+    }
+}
+
+void
+ParserContext::includeFile(const std::string& filename) {
+    if (states_.size() > 10) {
+        fatal("Too many nested include.");
+    }
+
+    FILE* f = fopen(filename.c_str(), "r");
+    if (!f) {
+        fatal("Can't open include file " + filename);
+    }
+    if (sfile_) {
+        sfiles_.push_back(sfile_);
+    }
+    sfile_ = f;
+    states_.push_back(YY_CURRENT_BUFFER);
+    YY_BUFFER_STATE buffer;
+    buffer = parser6__create_buffer(f, 65536 /*buffer size*/);
+    if (!buffer) {
+        fatal( "Can't scan include file " + filename);
+    }
+    parser6__switch_to_buffer(buffer);
+    files_.push_back(file_);
+    file_ = filename;
+    locs_.push_back(loc_);
+    loc_.initialize(&file_);
+
+    BEGIN(INITIAL);
+}
+
+namespace {
+/// To avoid unused function error
+class Dummy {
+    // cppcheck-suppress unusedPrivateFunction
+    void dummy() { yy_fatal_error("Fix me: how to disable its definition?"); }
+};
+}

+ 553 - 0
src/bin/agent/agent_parser.yy

@@ -0,0 +1,553 @@
+/* Copyright (C) 2017 Internet Systems Consortium, Inc. ("ISC")
+
+   This Source Code Form is subject to the terms of the Mozilla Public
+   License, v. 2.0. If a copy of the MPL was not distributed with this
+   file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+%skeleton "lalr1.cc" /* -*- C++ -*- */
+%require "3.0.0"
+%defines
+%define parser_class_name {AgentParser}
+%define api.prefix {agent_}
+%define api.token.constructor
+%define api.value.type variant
+%define api.namespace {isc::agent}
+%define parse.assert
+%code requires
+{
+#include <string>
+#include <cc/data.h>
+#include <boost/lexical_cast.hpp>
+#include <agent/parser_context_decl.h>
+
+using namespace isc::agent;
+using namespace isc::data;
+using namespace std;
+}
+// The parsing context.
+%param { isc::agent::ParserContext& ctx }
+%locations
+%define parse.trace
+%define parse.error verbose
+%code
+{
+#include <agent/parser_context.h>
+}
+
+
+%define api.token.prefix {TOKEN_}
+// Tokens in an order which makes sense and related to the intented use.
+// Actual regexps for tokens are defined in agent_lexer.ll.
+%token
+  END  0  "end of file"
+  COMMA ","
+  COLON ":"
+  LSQUARE_BRACKET "["
+  RSQUARE_BRACKET "]"
+  LCURLY_BRACKET "{"
+  RCURLY_BRACKET "}"
+  NULL_TYPE "null"
+
+  CONTROL_AGENT "Control-agent"
+  CONTROL_SOCKETS "control-sockets"
+  HTTP_HOST "http-host"
+  HTTP_PORT "http-port"
+  DHCP4_SERVER "dhcp4-server"
+  DHCP6_SERVER "dhcp6-server"
+  D2_SERVER "d2-server"
+
+  HOOKS_LIBRARIES "hooks-libraries"
+  LIBRARY "library"
+  PARAMETERS "parameters"
+
+  SOCKET_TYPE "socket-type"
+  SOCKET_NAME "socket-name"
+
+  UNIX "unix"
+
+  LOGGING "Logging"
+  LOGGERS "loggers"
+  OUTPUT_OPTIONS "output_options"
+  OUTPUT "output"
+  DEBUGLEVEL "debuglevel"
+  SEVERITY "severity"
+  NAME "name"
+
+  DHCP4 "Dhcp4"
+  DHCP6 "Dhcp6"
+  DHCPDDNS "DhcpDdns"
+
+  // Not real tokens, just a way to signal what the parser is expected to
+  // parse. This define the starting point. It either can be full grammar
+  // (START_AGENT), part of the grammar related to control-agent (START_SUB_AGENT)
+  // or can be any valid JSON (START_JSON)
+  START_JSON
+  START_AGENT
+  START_SUB_AGENT
+;
+
+%token <std::string> STRING "constant string"
+%token <int64_t> INTEGER "integer"
+%token <double> FLOAT "floating point"
+%token <bool> BOOLEAN "boolean"
+
+%type <ElementPtr> value
+%type <ElementPtr> socket_type_value
+
+%printer { yyoutput << $$; } <*>;
+
+%%
+
+// The whole grammar starts with a map, because the config file
+// constists of Control-Agent, DhcpX, Logger and DhcpDdns entries in one big { }.
+%start start;
+
+// The starting token can be one of those listed below. Note these are
+// "fake" tokens. They're produced by the lexer before any input text
+// is parsed.
+start: START_JSON      { ctx.ctx_ = ctx.NO_KEYWORDS; } json
+     | START_AGENT     { ctx.ctx_ = ctx.KEYWORDS; } agent_syntax_map
+     | START_SUB_AGENT { ctx.ctx_ = ctx.KEYWORDS; } sub_agent 
+     ;
+
+// This rule defines a "shortcut". Instead of specifying the whole structure
+// expected by full grammar, we can tell the parser to start from content of
+// the Control-agent. This is very useful for unit-testing, so we don't need
+// to repeat the outer map and "Control-agent" map. We can simply provide
+// the concents of that map.
+sub_agent: LCURLY_BRACKET {
+    // Parse the Control-agent map
+    ElementPtr m(new MapElement(ctx.loc2pos(@1)));
+    ctx.stack_.push_back(m);
+} global_params RCURLY_BRACKET {
+    // parsing completed
+};
+
+// --- generic JSON parser -----------------------------------------------------
+
+// json expression can be a value. What value means is defined below.
+json: value {
+    // Push back the JSON value on the stack
+    ctx.stack_.push_back($1);
+};
+
+// Rules for value. This can be one of the primary types allowed in JSON.
+value: INTEGER { $$ = ElementPtr(new IntElement($1, ctx.loc2pos(@1))); }
+     | FLOAT { $$ = ElementPtr(new DoubleElement($1, ctx.loc2pos(@1))); }
+     | BOOLEAN { $$ = ElementPtr(new BoolElement($1, ctx.loc2pos(@1))); }
+     | STRING { $$ = ElementPtr(new StringElement($1, ctx.loc2pos(@1))); }
+     | NULL_TYPE { $$ = ElementPtr(new NullElement(ctx.loc2pos(@1))); }
+     | map { $$ = ctx.stack_.back(); ctx.stack_.pop_back(); }
+     | list_generic { $$ = ctx.stack_.back(); ctx.stack_.pop_back(); }
+     ;
+
+// Rule for map. It will start with {, have some content and will end with }.
+map: LCURLY_BRACKET {
+    // This code is executed when we're about to start parsing
+    // the content of the map
+    ElementPtr m(new MapElement(ctx.loc2pos(@1)));
+    ctx.stack_.push_back(m);
+} map_content RCURLY_BRACKET {
+    // map parsing completed. If we ever want to do any wrap up
+    // (maybe some sanity checking), this would be the best place
+    // for it.
+};
+
+// Rule for map content. In some cases it is allowed to have an empty map,
+// so we should say that explicitly. In most cases, though, there will
+// be some actual content inside. That's defined by not_empty_map
+map_content: %empty // empty map
+           | not_empty_map
+           ;
+
+// Rule for content of the map. It can have one of two formats:
+// 1) string: value
+// 2) non_empty_map , string: value
+// The first case covers a single entry, while the second case
+// covers all longer lists recursively.
+not_empty_map: STRING COLON value {
+                  // map containing a single entry
+                  ctx.stack_.back()->set($1, $3);
+                  }
+             | not_empty_map COMMA STRING COLON value {
+                  // map consisting of a shorter map followed by
+                  // comma and string:value
+                  ctx.stack_.back()->set($3, $5);
+                  }
+             ;
+
+list_generic: LSQUARE_BRACKET {
+    ElementPtr l(new ListElement(ctx.loc2pos(@1)));
+    ctx.stack_.push_back(l);
+} list_content RSQUARE_BRACKET {
+};
+
+list_content: %empty // Empty list
+            | not_empty_list
+            ;
+
+not_empty_list: value {
+                  // List consisting of a single element.
+                  ctx.stack_.back()->add($1);
+                  }
+              | not_empty_list COMMA value {
+                  // List ending with , and a value.
+                  ctx.stack_.back()->add($3);
+                  }
+              ;
+
+// --- generic JSON parser ends here -------------------------------------------
+
+// --- syntax checking parser starts here --------------------------------------
+
+// Unknown keyword in a map. This clever rule can be added to any map
+// if you want to have a nice expression printed when unknown (mistyped?)
+// parameter is found.
+unknown_map_entry: STRING COLON {
+    const std::string& keyword = $1;
+    error(@1,
+          "got unexpected keyword \"" + keyword + "\" in map.");
+};
+
+// This defines the top-level { } that holds Control-agent, Dhcp6, Dhcp4,
+// DhcpDdns or Logging objects.
+agent_syntax_map: LCURLY_BRACKET {
+    // This code is executed when we're about to start parsing
+    // the content of the map
+    ElementPtr m(new MapElement(ctx.loc2pos(@1)));
+    ctx.stack_.push_back(m);
+} global_objects RCURLY_BRACKET {
+    // map parsing completed. If we ever want to do any wrap up
+    // (maybe some sanity checking), this would be the best place
+    // for it.
+};
+
+// This represents top-level entries: Control-agent, Logging, possibly others
+global_objects: global_object
+              | global_objects COMMA global_object
+              ;
+
+// This represents a single top level entry, e.g. Control-agent, Dhcp6 or DhcpDdns.
+global_object: agent_object
+             | logging_object
+             | dhcp4_json_object
+             | dhcp6_json_object
+             | dhcpddns_json_object
+             | unknown_map_entry
+             ;
+
+// This define the Control-agent object.
+agent_object: CONTROL_AGENT {
+    // Let's create a MapElement that will represent it, add it to the top level
+    // map (that's already on the stack) and put the new map on the stack as well,
+    // so child elements will be able to add themselves to it.
+    ElementPtr m(new MapElement(ctx.loc2pos(@1)));
+    ctx.stack_.back()->set("Control-agent", m);
+    ctx.stack_.push_back(m);
+
+    // And tell the lexer that we definitely want keywords to be recognized.
+    ctx.enter(ctx.KEYWORDS);
+} COLON LCURLY_BRACKET global_params RCURLY_BRACKET {
+    // Ok, we're done with parsing control-agent. Let's take the map off the stack.
+    ctx.stack_.pop_back();
+
+    // And tell the lexer to return to its previous state (probably KEYWORDS as well)
+    ctx.leave();
+};
+
+global_params: global_param
+             | global_params COMMA global_param
+             ;
+
+// These are the parameters that are allowed in the top-level for
+// Dhcp6.
+global_param: http_host
+            | http_port
+            | control_sockets
+            | hooks_libraries
+            | unknown_map_entry
+            ;
+
+http_host: HTTP_HOST COLON STRING {
+    ElementPtr host(new StringElement($3, ctx.loc2pos(@3)));
+    ctx.stack_.back()->set("http-host", host);
+};
+
+http_port: HTTP_PORT COLON INTEGER {
+    ElementPtr prf(new IntElement($3, ctx.loc2pos(@3)));
+    ctx.stack_.back()->set("http-port", prf);
+};
+
+// --- hooks-libraries ---------------------------------------------------------
+hooks_libraries: HOOKS_LIBRARIES {
+    ElementPtr l(new ListElement(ctx.loc2pos(@1)));
+    ctx.stack_.back()->set("hooks-libraries", l);
+    ctx.stack_.push_back(l);
+} COLON LSQUARE_BRACKET hooks_libraries_list RSQUARE_BRACKET {
+    ctx.stack_.pop_back();
+};
+
+hooks_libraries_list: %empty
+                    | not_empty_hooks_libraries_list
+                    ;
+
+not_empty_hooks_libraries_list: hooks_library
+    | not_empty_hooks_libraries_list COMMA hooks_library
+    ;
+
+hooks_library: LCURLY_BRACKET {
+    ElementPtr m(new MapElement(ctx.loc2pos(@1)));
+    ctx.stack_.back()->add(m);
+    ctx.stack_.push_back(m);
+} hooks_params RCURLY_BRACKET {
+    ctx.stack_.pop_back();
+};
+
+hooks_params: hooks_param
+            | hooks_params COMMA hooks_param
+            | unknown_map_entry
+            ;
+
+hooks_param: library
+           | parameters
+           ;
+
+library: LIBRARY {
+    ctx.enter(ctx.NO_KEYWORDS);
+} COLON STRING {
+    ElementPtr lib(new StringElement($4, ctx.loc2pos(@4)));
+    ctx.stack_.back()->set("library", lib);
+    ctx.leave();
+};
+
+parameters: PARAMETERS {
+    ctx.enter(ctx.NO_KEYWORDS);
+} COLON value {
+    ctx.stack_.back()->set("parameters", $4);
+    ctx.leave();
+};
+
+// --- hooks-libraries end here ------------------------------------------------
+
+// --- control-sockets starts here ---------------------------------------------
+control_sockets: CONTROL_SOCKETS COLON LCURLY_BRACKET {
+    ElementPtr m(new MapElement(ctx.loc2pos(@1)));
+    ctx.stack_.back()->set("control-sockets", m);
+    ctx.stack_.push_back(m);
+} control_sockets_params RCURLY_BRACKET {
+    ctx.stack_.pop_back();
+}
+
+// This defines what kind of control-sockets parameters we allow.
+// Note that empty map is not allowed here, because at least one control socket
+// is required.
+control_sockets_params: control_socket
+               | control_sockets_params COMMA control_socket
+               | unknown_map_entry
+               ;
+
+// We currently support three types of sockets: DHCPv4, DHCPv6 and D2
+// (even though D2 socket support is not yet implemented).
+control_socket: dhcp4_server_socket
+              | dhcp6_server_socket
+              | d2_server_socket
+              ;
+
+// That's an entry for dhcp4-server socket.
+dhcp4_server_socket: DHCP4_SERVER {
+    ElementPtr m(new MapElement(ctx.loc2pos(@1)));
+    ctx.stack_.back()->set("dhcp4-server", m);
+    ctx.stack_.push_back(m);
+} COLON LCURLY_BRACKET control_socket_params RCURLY_BRACKET {
+    ctx.stack_.pop_back();
+};
+
+// That's an entry for dhcp6-server socket.
+dhcp6_server_socket: DHCP6_SERVER {
+    ElementPtr m(new MapElement(ctx.loc2pos(@1)));
+    ctx.stack_.back()->set("dhcp6-server", m);
+    ctx.stack_.push_back(m);
+} COLON LCURLY_BRACKET control_socket_params RCURLY_BRACKET {
+    ctx.stack_.pop_back();
+};
+
+// That's an entry for d2-server socket.
+d2_server_socket: D2_SERVER {
+    ElementPtr m(new MapElement(ctx.loc2pos(@1)));
+    ctx.stack_.back()->set("d2-server", m);
+    ctx.stack_.push_back(m);
+} COLON LCURLY_BRACKET control_socket_params RCURLY_BRACKET {
+    ctx.stack_.pop_back();
+};
+
+// Socket parameters consist of one or more parameters.
+control_socket_params: control_socket_param
+                     | control_socket_params COMMA control_socket_param
+                     ;
+
+// We currently support two socket parameters: type and name.
+control_socket_param: socket_type
+                    | socket_name
+                    ;
+
+// This rule specifies socket type.
+socket_type: SOCKET_TYPE {
+} COLON socket_type_value {
+    ctx.stack_.back()->set("socket-type", $4);
+};
+
+// We currently allow only unix domain sockets
+socket_type_value : UNIX { $$ = ElementPtr(new StringElement("unix", ctx.loc2pos(@1))); }
+
+// This rule defines socket-name parameter.
+socket_name: SOCKET_NAME {
+    ctx.enter(ctx.NO_KEYWORDS);
+} COLON STRING {
+    ElementPtr name(new StringElement($4, ctx.loc2pos(@4)));
+    ctx.stack_.back()->set("socket-name", name);
+    ctx.leave();
+};
+
+// --- control-sockets end here ------------------------------------------------
+
+// JSON entries for other global objects (Dhcp4,Dhcp6 and DhcpDdns)
+dhcp4_json_object: DHCP4 {
+    ctx.enter(ctx.NO_KEYWORDS);
+} COLON value {
+    ctx.stack_.back()->set("Dhcp4", $4);
+    ctx.leave();
+};
+
+dhcp6_json_object: DHCP6 {
+    ctx.enter(ctx.NO_KEYWORDS);
+} COLON value {
+    ctx.stack_.back()->set("Dhcp6", $4);
+    ctx.leave();
+};
+
+dhcpddns_json_object: DHCPDDNS {
+    ctx.enter(ctx.NO_KEYWORDS);
+} COLON value {
+    ctx.stack_.back()->set("DhcpDdns", $4);
+    ctx.leave();
+};
+
+// --- Logging starts here -----------------------------------------------------
+
+// This defines the top level "Logging" object. It parses
+// the following "Logging": { ... }. The ... is defined
+// by logging_params
+logging_object: LOGGING {
+    ElementPtr m(new MapElement(ctx.loc2pos(@1)));
+    ctx.stack_.back()->set("Logging", m);
+    ctx.stack_.push_back(m);
+} COLON LCURLY_BRACKET logging_params RCURLY_BRACKET {
+    ctx.stack_.pop_back();
+};
+
+// This defines the list of allowed parameters that may appear
+// in the top-level Logging object. It can either be a single
+// parameter or several parameters separated by commas.
+logging_params: logging_param
+              | logging_params COMMA logging_param
+              ;
+
+// There's currently only one parameter defined, which is "loggers".
+logging_param: loggers;
+
+// "loggers", the only parameter currently defined in "Logging" object,
+// is "Loggers": [ ... ].
+loggers: LOGGERS {
+    ElementPtr l(new ListElement(ctx.loc2pos(@1)));
+    ctx.stack_.back()->set("loggers", l);
+    ctx.stack_.push_back(l);
+}  COLON LSQUARE_BRACKET loggers_entries RSQUARE_BRACKET {
+    ctx.stack_.pop_back();
+};
+
+// These are the parameters allowed in loggers: either one logger
+// entry or multiple entries separate by commas.
+loggers_entries: logger_entry
+               | loggers_entries COMMA logger_entry
+               ;
+
+// This defines a single entry defined in loggers in Logging.
+logger_entry: LCURLY_BRACKET {
+    ElementPtr l(new MapElement(ctx.loc2pos(@1)));
+    ctx.stack_.back()->add(l);
+    ctx.stack_.push_back(l);
+} logger_params RCURLY_BRACKET {
+    ctx.stack_.pop_back();
+};
+
+logger_params: logger_param
+             | logger_params COMMA logger_param
+             ;
+
+logger_param: name
+            | output_options_list
+            | debuglevel
+            | severity
+            | unknown_map_entry
+            ;
+
+name: NAME {
+    ctx.enter(ctx.NO_KEYWORDS);
+} COLON STRING {
+    ElementPtr name(new StringElement($4, ctx.loc2pos(@4)));
+    ctx.stack_.back()->set("name", name);
+    ctx.leave();
+};
+
+debuglevel: DEBUGLEVEL COLON INTEGER {
+    ElementPtr dl(new IntElement($3, ctx.loc2pos(@3)));
+    ctx.stack_.back()->set("debuglevel", dl);
+};
+severity: SEVERITY {
+    ctx.enter(ctx.NO_KEYWORDS);
+} COLON STRING {
+    ElementPtr sev(new StringElement($4, ctx.loc2pos(@4)));
+    ctx.stack_.back()->set("severity", sev);
+    ctx.leave();
+};
+
+output_options_list: OUTPUT_OPTIONS {
+    ElementPtr l(new ListElement(ctx.loc2pos(@1)));
+    ctx.stack_.back()->set("output_options", l);
+    ctx.stack_.push_back(l);
+} COLON LSQUARE_BRACKET output_options_list_content RSQUARE_BRACKET {
+    ctx.stack_.pop_back();
+};
+
+output_options_list_content: output_entry
+                           | output_options_list_content COMMA output_entry
+                           ;
+
+output_entry: LCURLY_BRACKET {
+    ElementPtr m(new MapElement(ctx.loc2pos(@1)));
+    ctx.stack_.back()->add(m);
+    ctx.stack_.push_back(m);
+} output_params RCURLY_BRACKET {
+    ctx.stack_.pop_back();
+};
+
+output_params: output_param
+             | output_params COMMA output_param
+             ;
+
+output_param: OUTPUT {
+    ctx.enter(ctx.NO_KEYWORDS);
+} COLON STRING {
+    ElementPtr sev(new StringElement($4, ctx.loc2pos(@4)));
+    ctx.stack_.back()->set("output", sev);
+    ctx.leave();
+};
+
+%%
+
+void
+isc::agent::AgentParser::error(const location_type& loc,
+                               const std::string& what)
+{
+    ctx.error(loc, what);
+}

+ 128 - 0
src/bin/agent/parser_context.cc

@@ -0,0 +1,128 @@
+// Copyright (C) 2017 Internet Systems Consortium, Inc. ("ISC")
+//
+// This Source Code Form is subject to the terms of the Mozilla Public
+// License, v. 2.0. If a copy of the MPL was not distributed with this
+// file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+#include <agent/parser_context.h>
+#include <agent/agent_parser.h>
+#include <exceptions/exceptions.h>
+#include <cc/dhcp_config_error.h>
+#include <cc/data.h>
+#include <fstream>
+#include <limits>
+
+namespace isc {
+namespace agent {
+
+ParserContext::ParserContext()
+  : ctx_(NO_KEYWORDS), trace_scanning_(false), trace_parsing_(false)
+{
+}
+
+ParserContext::~ParserContext()
+{
+}
+
+isc::data::ElementPtr
+ParserContext::parseString(const std::string& str, ParserType parser_type)
+{
+    scanStringBegin(str, parser_type);
+    return (parseCommon());
+}
+
+isc::data::ElementPtr
+ParserContext::parseFile(const std::string& filename, ParserType parser_type) {
+    FILE* f = fopen(filename.c_str(), "r");
+    if (!f) {
+        isc_throw(ParseError, "Unable to open file " << filename);
+    }
+    scanFileBegin(f, filename, parser_type);
+    return (parseCommon());
+}
+
+isc::data::ElementPtr
+ParserContext::parseCommon() {
+    isc::agent::AgentParser parser(*this);
+    // Uncomment this to get detailed parser logs.
+    // trace_parsing_ = true;
+    parser.set_debug_level(trace_parsing_);
+    try {
+        int res = parser.parse();
+        if (res != 0) {
+            isc_throw(ParseError, "Parser abort");
+        }
+        scanEnd();
+    }
+    catch (...) {
+        scanEnd();
+        throw;
+    }
+    if (stack_.size() == 1) {
+        return (stack_[0]);
+    } else {
+        isc_throw(ParseError, "Expected exactly one terminal Element expected, found "
+                  << stack_.size());
+    }
+}
+
+
+void
+ParserContext::error(const isc::agent::location& loc, const std::string& what)
+{
+    isc_throw(ParseError, loc << ": " << what);
+}
+
+void
+ParserContext::error(const std::string& what)
+{
+    isc_throw(ParseError, what);
+}
+
+void
+ParserContext::fatal(const std::string& what)
+{
+    isc_throw(ParseError, what);
+}
+
+isc::data::Element::Position
+ParserContext::loc2pos(isc::agent::location& loc)
+{
+    const std::string& file = *loc.begin.filename;
+    const uint32_t line = loc.begin.line;
+    const uint32_t pos = loc.begin.column;
+    return (isc::data::Element::Position(file, line, pos));
+}
+
+void
+ParserContext::enter(const LexerContext& ctx)
+{
+    cstack_.push_back(ctx_);
+    ctx_ = ctx;
+}
+
+void
+ParserContext::leave()
+{
+    if (cstack_.empty()) {
+        fatal("unbalanced syntactic context");
+    }
+    ctx_ = cstack_.back();
+    cstack_.pop_back();
+}
+
+const std::string
+ParserContext::contextName()
+{
+    switch (ctx_) {
+    case NO_KEYWORDS:
+        return ("no keywords");
+    case KEYWORDS:
+        return ("keywords");
+    default:
+        return ("__unknown__");
+    }
+}
+
+};
+};

+ 242 - 0
src/bin/agent/parser_context.h

@@ -0,0 +1,242 @@
+// Copyright (C) 2017 Internet Systems Consortium, Inc. ("ISC")
+//
+// This Source Code Form is subject to the terms of the Mozilla Public
+// License, v. 2.0. If a copy of the MPL was not distributed with this
+// file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+#ifndef PARSER_CONTEXT_H
+#define PARSER_CONTEXT_H
+#include <string>
+#include <map>
+#include <vector>
+#include <agent/agent_parser.h>
+#include <agent/parser_context_decl.h>
+#include <exceptions/exceptions.h>
+
+// Tell Flex the lexer's prototype ...
+#define YY_DECL isc::agent::AgentParser::symbol_type agent_lex (ParserContext& driver)
+
+// ... and declare it for the parser's sake.
+YY_DECL;
+
+namespace isc {
+namespace agent {
+
+/// @brief Parser context is a wrapper around flex/bison instances dedicated to
+///        Control-agent config file parser.
+///
+/// It follows the same principle as other components. The primary interface
+/// are @ref parseString and @ref parseFile methods. All other methods are
+/// public for testing purposes only. This interface allows parsing the
+/// whole configuration with syntactic checking (which is by far the most
+/// frequent use), but it also allows parsing input as generic JSON or
+/// parse only content of the Control-agent object, which is a subset
+/// of full grammar (this will be very useful for unit-tests to not duplicate
+/// unnecessary parts of the config file).
+class ParserContext
+{
+public:
+
+    /// @brief Defines currently supported scopes
+    ///
+    /// AgentParser is able to parse several types of scope. Usually,
+    /// when it parses a config file, it expects the data to have a map
+    /// with Control-agent in it and all the parameters within that map.
+    /// However, sometimes the parser is expected to parse only a subset
+    /// of that information.
+    typedef enum {
+        /// This parser will parse the content as generic JSON.
+        PARSER_JSON,
+
+        /// This parser will expect the content as Control-agent config wrapped
+        /// in a map (that's the regular config file)
+        PARSER_AGENT,
+
+        /// This parser will expect only the content of Control-agent.
+        PARSER_SUB_AGENT
+    } ParserType;
+
+    /// @brief Default constructor.
+    ParserContext();
+
+    /// @brief destructor
+    virtual ~ParserContext();
+
+    /// @brief JSON elements being parsed.
+    std::vector<isc::data::ElementPtr> stack_;
+
+    /// @brief Method called before scanning starts on a string.
+    ///
+    /// @param str string to be parsed
+    /// @param type specifies expected content
+    void scanStringBegin(const std::string& str, ParserType type);
+
+    /// @brief Method called before scanning starts on a file.
+    ///
+    /// @param f stdio FILE pointer
+    /// @param filename file to be parsed
+    /// @param type specifies expected content
+    void scanFileBegin(FILE* f, const std::string& filename, ParserType type);
+
+    /// @brief Method called after the last tokens are scanned.
+    void scanEnd();
+
+    /// @brief Divert input to an include file.
+    ///
+    /// @param filename file to be included
+    void includeFile(const std::string& filename);
+
+    /// @brief Run the parser on the string specified.
+    ///
+    /// This method parses specified string. Depending on the value of
+    /// parser_type, parser may either check only that the input is valid
+    /// JSON, or may do more specific syntax checking. See @ref ParserType
+    /// for supported syntax checkers.
+    ///
+    /// @param str string to be parsed
+    /// @param parser_type specifies expected content (usually DHCP6 or generic JSON)
+    /// @return Element structure representing parsed text.
+    isc::data::ElementPtr parseString(const std::string& str,
+                                      ParserType parser_type);
+
+    /// @brief Run the parser on the file specified.
+    ///
+    /// This method parses specified file. Depending on the value of
+    /// parser_type, parser may either check only that the input is valid
+    /// JSON, or may do more specific syntax checking. See @ref ParserType
+    /// for supported syntax checkers.
+    ///
+    /// @param filename file to be parsed
+    /// @param parser_type specifies expected content (usually PARSER_AGENT or
+    ///                                                PARSER_JSON)
+    /// @return Element structure representing parsed text.
+    isc::data::ElementPtr parseFile(const std::string& filename,
+                                    ParserType parser_type);
+
+    /// @brief Error handler
+    ///
+    /// @param loc location within the parsed file when experienced a problem.
+    /// @param what string explaining the nature of the error.
+    /// @throw ParseError
+    void error(const isc::agent::location& loc, const std::string& what);
+
+    /// @brief Error handler
+    ///
+    /// This is a simplified error reporting tool for possible future
+    /// cases when the AgentParser is not able to handle the packet.
+    ///
+    /// @param what string explaining the nature of the error.
+    /// @throw ParseError
+    void error(const std::string& what);
+
+    /// @brief Fatal error handler
+    ///
+    /// This is for should not happen but fatal errors.
+    /// Used by YY_FATAL_ERROR macro so required to be static.
+    ///
+    /// @param what string explaining the nature of the error.
+    /// @throw ParseError
+    static void fatal(const std::string& what);
+
+    /// @brief Converts bison's position to one understandable by isc::data::Element
+    ///
+    /// Convert a bison location into an element position
+    /// (take the begin, the end is lost)
+    ///
+    /// @param loc location in bison format
+    /// @return Position in format accepted by Element
+    isc::data::Element::Position loc2pos(isc::agent::location& loc);
+
+    /// @brief Defines syntactic contexts for lexical tie-ins
+    typedef enum {
+        ///< This one is used in pure JSON mode.
+        NO_KEYWORDS,
+
+        ///< Used while parsing content of Dhcp6.
+        KEYWORDS
+    } LexerContext;
+
+    /// @brief File name
+    std::string file_;
+
+    /// @brief File name stack
+    std::vector<std::string> files_;
+
+    /// @brief Location of the current token
+    ///
+    /// The lexer will keep updating it. This variable will be useful
+    /// for logging errors.
+    isc::agent::location loc_;
+
+    /// @brief Location stack
+    std::vector<isc::agent::location> locs_;
+
+    /// @brief Lexer state stack
+    std::vector<struct yy_buffer_state*> states_;
+
+    /// @brief sFile (aka FILE)
+    FILE* sfile_;
+
+    /// @brief sFile (aka FILE) stack
+    ///
+    /// This is a stack of files. Typically there's only one file (the
+    /// one being currently parsed), but there may be more if one
+    /// file includes another.
+    std::vector<FILE*> sfiles_;
+
+    /// @brief Current syntactic context
+    LexerContext ctx_;
+
+    /// @brief Enter a new syntactic context
+    ///
+    /// Entering a new syntactic context is useful in several ways.
+    /// First, it allows the parser to avoid conflicts. Second, it
+    /// allows the lexer to return different tokens depending on
+    /// context (e.g. if "renew-timer" string is detected, the lexer
+    /// will return STRING token if in JSON mode or RENEW_TIMER if
+    /// in DHCP6 mode. Finally, the syntactic context allows the
+    /// error message to be more descriptive if the input string
+    /// does not parse properly. Control Agent parser uses simplified
+    /// contexts: either it recognizes keywords (value set to KEYWORDS)
+    /// or not (value set to NO_KEYWORDS).
+    ///
+    /// Make sure to call @ref leave() once the parsing of your
+    /// context is complete.
+    ///
+    /// @param ctx the syntactic context to enter into
+    void enter(const LexerContext& ctx);
+
+    /// @brief Leave a syntactic context
+    ///
+    /// @ref enter() must be called before (when entering a new scope
+    /// or context). Once you complete the parsing, this method
+    /// should be called.
+    ///
+    /// @throw isc::Unexpected if unbalanced (more leave() than enter() calls)
+    void leave();
+
+    /// @brief Get the syntactix context name
+    ///
+    /// @return printable name of the context.
+    const std::string contextName();
+
+ private:
+    /// @brief Flag determining scanner debugging.
+    bool trace_scanning_;
+
+    /// @brief Flag determing parser debugging.
+    bool trace_parsing_;
+
+    /// @brief Syntactic context stack
+    std::vector<LexerContext> cstack_;
+
+    /// @brief Common part of parseXXX
+    ///
+    /// @return Element structure representing parsed text.
+    isc::data::ElementPtr parseCommon();
+};
+
+}; // end of isc::eval namespace
+}; // end of isc namespace
+
+#endif

+ 20 - 0
src/bin/agent/parser_context_decl.h

@@ -0,0 +1,20 @@
+// Copyright (C) 2017 Internet Systems Consortium, Inc. ("ISC")
+//
+// This Source Code Form is subject to the terms of the Mozilla Public
+// License, v. 2.0. If a copy of the MPL was not distributed with this
+// file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+#ifndef AGENT_CONTEXT_DECL_H
+#define AGENT_CONTEXT_DECL_H
+
+/// @file agent/parser_context_decl.h Forward declaration of the ParserContext class
+
+namespace isc {
+namespace agent {
+
+class ParserContext;
+
+}; // end of isc::dhcp namespace
+}; // end of isc namespace
+
+#endif

+ 3 - 0
src/bin/agent/tests/Makefile.am

@@ -22,6 +22,7 @@ AM_CPPFLAGS += -I$(top_srcdir)/src/bin
 AM_CPPFLAGS += $(BOOST_INCLUDES)
 AM_CPPFLAGS += -DTEST_DATA_BUILDDIR=\"$(abs_top_builddir)/src/bin/agent/tests\"
 AM_CPPFLAGS += -DINSTALL_PROG=\"$(abs_top_srcdir)/install-sh\"
+AM_CPPFLAGS += -DCFG_EXAMPLES=\"$(abs_top_srcdir)/doc/examples/agent\"
 
 CLEANFILES = $(builddir)/interfaces.txt $(builddir)/logger_lockfile
 
@@ -43,6 +44,7 @@ TESTS += ctrl_agent_unittests
 
 ctrl_agent_unittests_SOURCES  = ctrl_agent_cfg_mgr_unittests.cc
 ctrl_agent_unittests_SOURCES += ctrl_agent_controller_unittests.cc
+ctrl_agent_unittests_SOURCES += parser_unittests.cc
 ctrl_agent_unittests_SOURCES += ctrl_agent_process_unittests.cc
 ctrl_agent_unittests_SOURCES += ctrl_agent_unittests.cc
 
@@ -66,6 +68,7 @@ ctrl_agent_unittests_LDADD += $(top_builddir)/src/lib/cryptolink/libkea-cryptoli
 ctrl_agent_unittests_LDADD += $(top_builddir)/src/lib/hooks/libkea-hooks.la
 ctrl_agent_unittests_LDADD += $(top_builddir)/src/lib/log/libkea-log.la
 ctrl_agent_unittests_LDADD += $(top_builddir)/src/lib/util/threads/libkea-threads.la
+ctrl_agent_unittests_LDADD += $(top_builddir)/src/lib/testutils/libkea-testutils.la
 ctrl_agent_unittests_LDADD += $(top_builddir)/src/lib/util/libkea-util.la
 ctrl_agent_unittests_LDADD += $(top_builddir)/src/lib/exceptions/libkea-exceptions.la
 ctrl_agent_unittests_LDADD += $(LOG4CPLUS_LIBS) $(CRYPTO_LIBS)

+ 591 - 0
src/bin/agent/tests/parser_unittests.cc

@@ -0,0 +1,591 @@
+// Copyright (C) 2017 Internet Systems Consortium, Inc. ("ISC")
+//
+// This Source Code Form is subject to the terms of the Mozilla Public
+// License, v. 2.0. If a copy of the MPL was not distributed with this
+// file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+#include <gtest/gtest.h>
+#include <cc/data.h>
+#include <agent/parser_context.h>
+#include <cc/dhcp_config_error.h>
+#include <testutils/io_utils.h>
+
+using namespace isc::data;
+using namespace std;
+
+namespace isc {
+namespace agent {
+namespace test {
+
+/// @brief compares two JSON trees
+///
+/// If differences are discovered, gtest failure is reported (using EXPECT_EQ)
+///
+/// @param a first to be compared
+/// @param b second to be compared
+void compareJSON(ConstElementPtr a, ConstElementPtr b) {
+    ASSERT_TRUE(a);
+    ASSERT_TRUE(b);
+    EXPECT_EQ(a->str(), b->str());
+}
+
+/// @brief Tests if the input string can be parsed with specific parser
+///
+/// The input text will be passed to bison parser of specified type.
+/// Then the same input text is passed to legacy JSON parser and outputs
+/// from both parsers are compared. The legacy comparison can be disabled,
+/// if the feature tested is not supported by the old parser (e.g.
+/// new comment styles)
+///
+/// @param txt text to be compared
+/// @param parser_type bison parser type to be instantiated
+/// @param compare whether to compare the output with legacy JSON parser
+void testParser(const std::string& txt, ParserContext::ParserType parser_type,
+    bool compare = true) {
+    ConstElementPtr test_json;
+
+    ASSERT_NO_THROW({
+            try {
+                ParserContext ctx;
+                test_json = ctx.parseString(txt, parser_type);
+            } catch (const std::exception &e) {
+                cout << "EXCEPTION: " << e.what() << endl;
+                throw;
+            }
+
+    });
+
+    if (!compare) {
+        return;
+    }
+
+    // Now compare if both representations are the same.
+    ElementPtr reference_json;
+    ASSERT_NO_THROW(reference_json = Element::fromJSON(txt, true));
+    compareJSON(reference_json, test_json);
+}
+
+TEST(ParserTest, mapInMap) {
+    string txt = "{ \"xyzzy\": { \"foo\": 123, \"baz\": 456 } }";
+    testParser(txt, ParserContext::PARSER_JSON);
+}
+
+TEST(ParserTest, listInList) {
+    string txt = "[ [ \"Britain\", \"Wales\", \"Scotland\" ], "
+                 "[ \"Pomorze\", \"Wielkopolska\", \"Tatry\"] ]";
+    testParser(txt, ParserContext::PARSER_JSON);
+}
+
+TEST(ParserTest, nestedMaps) {
+    string txt = "{ \"europe\": { \"UK\": { \"London\": { \"street\": \"221B Baker\" }}}}";
+    testParser(txt, ParserContext::PARSER_JSON);
+}
+
+TEST(ParserTest, nestedLists) {
+    string txt = "[ \"half\", [ \"quarter\", [ \"eighth\", [ \"sixteenth\" ]]]]";
+    testParser(txt, ParserContext::PARSER_JSON);
+}
+
+TEST(ParserTest, listsInMaps) {
+    string txt = "{ \"constellations\": { \"orion\": [ \"rigel\", \"betelguese\" ], "
+                    "\"cygnus\": [ \"deneb\", \"albireo\"] } }";
+    testParser(txt, ParserContext::PARSER_JSON);
+}
+
+TEST(ParserTest, mapsInLists) {
+    string txt = "[ { \"body\": \"earth\", \"gravity\": 1.0 },"
+                 " { \"body\": \"mars\", \"gravity\": 0.376 } ]";
+    testParser(txt, ParserContext::PARSER_JSON);
+}
+
+TEST(ParserTest, types) {
+    string txt = "{ \"string\": \"foo\","
+                   "\"integer\": 42,"
+                   "\"boolean\": true,"
+                   "\"map\": { \"foo\": \"bar\" },"
+                   "\"list\": [ 1, 2, 3 ],"
+                   "\"null\": null }";
+    testParser(txt, ParserContext::PARSER_JSON);
+}
+
+TEST(ParserTest, keywordJSON) {
+    string txt = "{ \"name\": \"user\","
+                   "\"type\": \"password\","
+                   "\"user\": \"name\","
+                   "\"password\": \"type\" }";
+    testParser(txt, ParserContext::PARSER_JSON);
+}
+
+// This test checks if full config (with top level and Control-agent objects) can
+// be parsed with syntactic checking (and as pure JSON).
+TEST(ParserTest, keywordAgent) {
+    string txt = "{ \"Control-agent\": {\n"
+        "    \"http-host\": \"localhost\",\n"
+        "    \"http-port\": 8000,\n"
+        "    \"control-sockets\": {"
+        "        \"dhcp4-server\": {"
+        "            \"socket-type\": \"unix\","
+        "            \"socket-name\": \"/path/to/the/unix/socket-v4\""
+        "        },"
+        "        \"dhcp6-server\": {"
+        "            \"socket-type\": \"unix\","
+        "            \"socket-name\": \"/path/to/the/unix/socket-v6\""
+        "        },"
+        "        \"d2-server\": {"
+        "            \"socket-type\": \"unix\","
+        "            \"socket-name\": \"/path/to/the/unix/socket-d2\""
+        "        }"
+        "    },"
+        "    \"hooks-libraries\": ["
+        "    {"
+        "        \"library\": \"/opt/local/control-agent-commands.so\","
+        "        \"parameters\": {"
+        "            \"param1\": \"foo\""
+        "        }"
+        "    }"
+        "   ]"
+        "} }";
+    // This is a full config, so we'll parse it as full config (PARSER_AGENT)
+    testParser(txt, ParserContext::PARSER_AGENT);
+    testParser(txt, ParserContext::PARSER_JSON);
+}
+
+// This test checks if simplified config (without top level and Control-agent
+// objects) can be parsed with syntactic checking (and as pure JSON).
+TEST(ParserTest, keywordSubAgent) {
+
+    // This is similar to previous test, but note the lack of outer
+    // map and Control-agent.
+    string txt = "{\n"
+        "    \"http-host\": \"localhost\",\n"
+        "    \"http-port\": 8000,\n"
+        "    \"control-sockets\": {"
+        "        \"dhcp4-server\": {"
+        "            \"socket-type\": \"unix\","
+        "            \"socket-name\": \"/path/to/the/unix/socket-v4\""
+        "        },"
+        "        \"dhcp6-server\": {"
+        "            \"socket-type\": \"unix\","
+        "            \"socket-name\": \"/path/to/the/unix/socket-v6\""
+        "        },"
+        "        \"d2-server\": {"
+        "            \"socket-type\": \"unix\","
+        "            \"socket-name\": \"/path/to/the/unix/socket-d2\""
+        "        }"
+        "    },"
+        "    \"hooks-libraries\": ["
+        "    {"
+        "        \"library\": \"/opt/local/control-agent-commands.so\","
+        "        \"parameters\": {"
+        "            \"param1\": \"foo\""
+        "        }"
+        "    }"
+        "   ]"
+        "}";
+
+    // This is only a subset of full config, so we'll parse with PARSER_SUB_AGENT.
+    testParser(txt, ParserContext::PARSER_SUB_AGENT);
+    testParser(txt, ParserContext::PARSER_JSON);
+}
+
+// Tests if bash (#) comments are supported. That's the only comment type that
+// was supported by the old parser.
+TEST(ParserTest, bashComments) {
+    string txt= "{ \"Control-agent\": {"
+                "  \"http-host\": \"localhost\","
+                "  \"http-port\": 9000,\n"
+                "  \"control-sockets\": {\n"
+                "    \"d2-server\": {\n"
+                "# this is a comment\n"
+                "\"socket-type\": \"unix\", \n"
+                "# This socket is mine. I can name it whatever\n"
+                "# I like, ok?\n"
+                "\"socket-name\": \"Hector\" \n"
+                "} } } }";
+    testParser(txt, ParserContext::PARSER_AGENT);
+}
+
+// Tests if C++ (//) comments can start anywhere, not just in the first line.
+TEST(ParserTest, cppComments) {
+    string txt= "{ \"Control-agent\": {"
+                "  \"http-host\": \"localhost\","
+                "  \"http-port\": 9001, // the level is over 9000!\n"
+                "  \"control-sockets\": {\n"
+                "    // Let's try talking to D2. Sadly, it never talks"
+                "    // to us back :( Maybe he doesn't like his name?\n"
+                "    \"d2-server\": {"
+                "\"socket-type\": \"unix\", \n"
+                "\"socket-name\": \"Hector\" \n"
+                "} } } }";
+
+    testParser(txt, ParserContext::PARSER_AGENT, false);
+}
+
+// Tests if bash (#) comments can start anywhere, not just in the first line.
+TEST(ParserTest, bashCommentsInline) {
+    string txt= "{ \"Control-agent\": {"
+                "  \"http-host\": \"localhost\","
+                "  \"http-port\": 9000,\n"
+                "  \"control-sockets\": {\n"
+                "    \"d2-server\": {"
+                "\"socket-type\": \"unix\", # Maybe Hector is not really a \n"
+                "\"socket-name\": \"Hector\" # Unix process?\n"
+                "# Oh no! He's a windows one and just pretending!\n"
+                "} } } }";
+    testParser(txt, ParserContext::PARSER_AGENT, false);
+}
+
+// Tests if multi-line C style comments are handled correctly.
+TEST(ParserTest, multilineComments) {
+    string txt= "{ \"Control-agent\": {"
+                "  \"http-host\": \"localhost\","
+                "  \"http-port\": 9000,\n"
+                "  \"control-sockets\": {\n"
+                "    \"dhcp4-server\": {\n"
+                "        \"socket-type\": \"unix\"\n"
+                "    }\n"
+                "  /* Ok, forget about it. If Hector doesn't want to talk,\n"
+                "     we won't talk to him either. We now have quiet days. */\n"
+                "  /* \"d2-server\": {"
+                "  \"socket-type\": \"unix\",\n"
+                "\"socket-name\": \"Hector\"\n"
+                "}*/ } } }";
+    testParser(txt, ParserContext::PARSER_AGENT, false);
+}
+
+/// @brief Loads specified example config file
+///
+/// This test loads specified example file twice: first, using the legacy
+/// JSON file and then second time using bison parser. Two created Element
+/// trees are then compared. The input is decommented before it is passed
+/// to legacy parser (as legacy support for comments is very limited).
+///
+/// @param fname name of the file to be loaded
+void testFile(const std::string& fname) {
+    ElementPtr reference_json;
+    ConstElementPtr test_json;
+
+    string decommented = decommentJSONfile(fname);
+
+    cout << "Parsing file " << fname << "(" << decommented << ")" << endl;
+
+    EXPECT_NO_THROW(reference_json = Element::fromJSONFile(decommented, true));
+
+    // remove the temporary file
+    EXPECT_NO_THROW(::remove(decommented.c_str()));
+
+    EXPECT_NO_THROW(
+    try {
+        ParserContext ctx;
+        test_json = ctx.parseFile(fname, ParserContext::PARSER_AGENT);
+    } catch (const std::exception &x) {
+        cout << "EXCEPTION: " << x.what() << endl;
+        throw;
+    });
+
+    ASSERT_TRUE(reference_json);
+    ASSERT_TRUE(test_json);
+
+    compareJSON(reference_json, test_json);
+}
+
+// This test loads all available existing files. Each config is loaded
+// twice: first with the existing Element::fromJSONFile() and then
+// the second time with AgentParser. Both JSON trees are then compared.
+// Hopefully the list of example configs will grow over time.
+TEST(ParserTest, file) {
+    vector<string> configs;
+    configs.push_back("simple.json");
+
+    for (int i = 0; i<configs.size(); i++) {
+        testFile(string(CFG_EXAMPLES) + "/" + configs[i]);
+    }
+}
+
+/// @brief Tests error conditions in AgentParser
+///
+/// @param txt text to be parsed
+/// @param parser_type type of the parser to be used in the test
+/// @param msg expected content of the exception
+void testError(const std::string& txt,
+               ParserContext::ParserType parser_type,
+               const std::string& msg)
+{
+    try {
+        ParserContext ctx;
+        ConstElementPtr parsed = ctx.parseString(txt, parser_type);
+        FAIL() << "Expected ParseError but nothing was raised (expected: "
+               << msg << ")";
+    }
+    catch (const ParseError& ex) {
+        EXPECT_EQ(msg, ex.what());
+    }
+    catch (...) {
+        FAIL() << "Expected ParseError but something else was raised";
+    }
+}
+
+// Verify that error conditions are handled correctly.
+TEST(ParserTest, errors) {
+    // no input
+    testError("", ParserContext::PARSER_JSON,
+              "<string>:1.1: syntax error, unexpected end of file");
+    testError(" ", ParserContext::PARSER_JSON,
+              "<string>:1.2: syntax error, unexpected end of file");
+    testError("\n", ParserContext::PARSER_JSON,
+              "<string>:2.1: syntax error, unexpected end of file");
+    testError("\t", ParserContext::PARSER_JSON,
+              "<string>:1.2: syntax error, unexpected end of file");
+    testError("\r", ParserContext::PARSER_JSON,
+              "<string>:1.2: syntax error, unexpected end of file");
+
+    // comments
+    testError("# nothing\n",
+              ParserContext::PARSER_JSON,
+              "<string>:2.1: syntax error, unexpected end of file");
+    testError(" #\n",
+              ParserContext::PARSER_JSON,
+              "<string>:2.1: syntax error, unexpected end of file");
+    testError("// nothing\n",
+              ParserContext::PARSER_JSON,
+              "<string>:2.1: syntax error, unexpected end of file");
+    testError("/* nothing */\n",
+              ParserContext::PARSER_JSON,
+              "<string>:2.1: syntax error, unexpected end of file");
+    testError("/* no\nthing */\n",
+              ParserContext::PARSER_JSON,
+              "<string>:3.1: syntax error, unexpected end of file");
+    testError("/* no\nthing */\n\n",
+              ParserContext::PARSER_JSON,
+              "<string>:4.1: syntax error, unexpected end of file");
+    testError("/* nothing\n",
+              ParserContext::PARSER_JSON,
+              "Comment not closed. (/* in line 1");
+    testError("\n\n\n/* nothing\n",
+              ParserContext::PARSER_JSON,
+              "Comment not closed. (/* in line 4");
+    testError("{ /* */*/ }\n",
+              ParserContext::PARSER_JSON,
+              "<string>:1.3-8: Invalid character: *");
+    testError("{ /* // *// }\n",
+              ParserContext::PARSER_JSON,
+              "<string>:1.3-11: Invalid character: /");
+    testError("{ /* // *///  }\n",
+              ParserContext::PARSER_JSON,
+              "<string>:2.1: syntax error, unexpected end of file, "
+              "expecting }");
+
+    // includes
+    testError("<?\n",
+              ParserContext::PARSER_JSON,
+              "Directive not closed.");
+    testError("<?include\n",
+              ParserContext::PARSER_JSON,
+              "Directive not closed.");
+    string file = string(CFG_EXAMPLES) + "/" + "simple.json";
+    testError("<?include \"" + file + "\"\n",
+              ParserContext::PARSER_JSON,
+              "Directive not closed.");
+    testError("<?include \"/foo/bar\" ?>/n",
+              ParserContext::PARSER_JSON,
+              "Can't open include file /foo/bar");
+
+    // JSON keywords
+    testError("{ \"foo\": True }",
+              ParserContext::PARSER_JSON,
+              "<string>:1.10-13: JSON true reserved keyword is lower case only");
+    testError("{ \"foo\": False }",
+              ParserContext::PARSER_JSON,
+              "<string>:1.10-14: JSON false reserved keyword is lower case only");
+    testError("{ \"foo\": NULL }",
+              ParserContext::PARSER_JSON,
+              "<string>:1.10-13: JSON null reserved keyword is lower case only");
+    testError("{ \"foo\": Tru }",
+              ParserContext::PARSER_JSON,
+              "<string>:1.10: Invalid character: T");
+    testError("{ \"foo\": nul }",
+              ParserContext::PARSER_JSON,
+              "<string>:1.10: Invalid character: n");
+
+    // numbers
+    testError("123",
+              ParserContext::PARSER_AGENT,
+              "<string>:1.1-3: syntax error, unexpected integer, "
+              "expecting {");
+    testError("-456",
+              ParserContext::PARSER_AGENT,
+              "<string>:1.1-4: syntax error, unexpected integer, "
+              "expecting {");
+    testError("-0001",
+              ParserContext::PARSER_AGENT,
+              "<string>:1.1-5: syntax error, unexpected integer, "
+              "expecting {");
+    testError("1234567890123456789012345678901234567890",
+              ParserContext::PARSER_JSON,
+              "<string>:1.1-40: Failed to convert "
+              "1234567890123456789012345678901234567890"
+              " to an integer.");
+    testError("-3.14e+0",
+              ParserContext::PARSER_AGENT,
+              "<string>:1.1-8: syntax error, unexpected floating point, "
+              "expecting {");
+    testError("1e50000",
+              ParserContext::PARSER_JSON,
+              "<string>:1.1-7: Failed to convert 1e50000 "
+              "to a floating point.");
+
+    // strings
+    testError("\"aabb\"",
+              ParserContext::PARSER_AGENT,
+              "<string>:1.1-6: syntax error, unexpected constant string, "
+              "expecting {");
+    testError("{ \"aabb\"err",
+              ParserContext::PARSER_JSON,
+              "<string>:1.9: Invalid character: e");
+    testError("{ err\"aabb\"",
+              ParserContext::PARSER_JSON,
+              "<string>:1.3: Invalid character: e");
+    testError("\"a\n\tb\"",
+              ParserContext::PARSER_JSON,
+              "<string>:1.1-6: Invalid control in \"a\n\tb\"");
+    testError("\"a\\n\\tb\"",
+              ParserContext::PARSER_AGENT,
+              "<string>:1.1-8: syntax error, unexpected constant string, "
+              "expecting {");
+    testError("\"a\\x01b\"",
+              ParserContext::PARSER_JSON,
+              "<string>:1.1-8: Bad escape in \"a\\x01b\"");
+    testError("\"a\\u0162\"",
+              ParserContext::PARSER_JSON,
+              "<string>:1.1-9: Unsupported unicode escape in \"a\\u0162\"");
+    testError("\"a\\u062z\"",
+              ParserContext::PARSER_JSON,
+              "<string>:1.1-9: Bad escape in \"a\\u062z\"");
+    testError("\"abc\\\"",
+              ParserContext::PARSER_JSON,
+              "<string>:1.1-6: Overflow escape in \"abc\\\"");
+
+    // from data_unittest.c
+    testError("\\a",
+              ParserContext::PARSER_JSON,
+              "<string>:1.1: Invalid character: \\");
+    testError("\\",
+              ParserContext::PARSER_JSON,
+              "<string>:1.1: Invalid character: \\");
+    testError("\\\"\\\"",
+              ParserContext::PARSER_JSON,
+              "<string>:1.1: Invalid character: \\");
+
+    // want a map
+    testError("[]\n",
+              ParserContext::PARSER_AGENT,
+              "<string>:1.1: syntax error, unexpected [, "
+              "expecting {");
+    testError("[]\n",
+              ParserContext::PARSER_AGENT,
+              "<string>:1.1: syntax error, unexpected [, "
+              "expecting {");
+    testError("{ 123 }\n",
+              ParserContext::PARSER_JSON,
+              "<string>:1.3-5: syntax error, unexpected integer, "
+              "expecting }");
+    testError("{ 123 }\n",
+              ParserContext::PARSER_AGENT,
+              "<string>:1.3-5: syntax error, unexpected integer");
+    testError("{ \"foo\" }\n",
+              ParserContext::PARSER_JSON,
+              "<string>:1.9: syntax error, unexpected }, "
+              "expecting :");
+    testError("{ \"foo\" }\n",
+              ParserContext::PARSER_AGENT,
+              "<string>:1.9: syntax error, unexpected }, expecting :");
+    testError("{ \"foo\":null }\n",
+              ParserContext::PARSER_AGENT,
+              "<string>:1.3-7: got unexpected keyword "
+              "\"foo\" in map.");
+    testError("{ \"Control-agent\" }\n",
+              ParserContext::PARSER_AGENT,
+              "<string>:1.11: syntax error, unexpected }, "
+              "expecting :");
+    testError("{ \"Control-agent\":[]\n",
+              ParserContext::PARSER_AGENT,
+              "<string>:2.1: syntax error, unexpected end of file, "
+              "expecting \",\" or }");
+    testError("{}{}\n",
+              ParserContext::PARSER_JSON,
+              "<string>:1.3: syntax error, unexpected {, "
+              "expecting end of file");
+
+    // bad commas
+    testError("{ , }\n",
+              ParserContext::PARSER_JSON,
+              "<string>:1.3: syntax error, unexpected \",\", "
+              "expecting }");
+    testError("{ , \"foo\":true }\n",
+              ParserContext::PARSER_JSON,
+              "<string>:1.3: syntax error, unexpected \",\", "
+              "expecting }");
+    testError("{ \"foo\":true, }\n",
+              ParserContext::PARSER_JSON,
+              "<string>:1.15: syntax error, unexpected }, "
+              "expecting constant string");
+
+    // bad type
+    testError("{ \"Control-agent\":{\n"
+              "  \"http-port\":false }}\n",
+              ParserContext::PARSER_AGENT,
+              "<string>:2.15-19: syntax error, unexpected boolean, "
+              "expecting integer");
+
+    // unknown keyword
+    testError("{ \"Control-agent\":{\n"
+              " \"topping\": \"Mozarella\" }}\n",
+              ParserContext::PARSER_AGENT,
+              "<string>:2.2-10: got unexpected keyword "
+              "\"topping\" in map.");
+}
+
+// Check unicode escapes
+TEST(ParserTest, unicodeEscapes) {
+    ConstElementPtr result;
+    string json;
+
+    // check we can reread output
+    for (char c = -128; c < 127; ++c) {
+        string ins(" ");
+        ins[1] = c;
+        ConstElementPtr e(new StringElement(ins));
+        json = e->str();
+        ASSERT_NO_THROW(
+        try {
+            ParserContext ctx;
+            result = ctx.parseString(json, ParserContext::PARSER_JSON);
+        } catch (const std::exception &x) {
+            cout << "EXCEPTION: " << x.what() << endl;
+            throw;
+        });
+        ASSERT_EQ(Element::string, result->getType());
+        EXPECT_EQ(ins, result->stringValue());
+    }
+}
+
+// This test checks that all representations of a slash is recognized properly.
+TEST(ParserTest, unicodeSlash) {
+    // check the 4 possible encodings of solidus '/'
+    ConstElementPtr result;
+    string json = "\"/\\/\\u002f\\u002F\"";
+    ASSERT_NO_THROW(
+    try {
+        ParserContext ctx;
+        result = ctx.parseString(json, ParserContext::PARSER_JSON);
+    } catch (const std::exception &x) {
+        cout << "EXCEPTION: " << x.what() << endl;
+        throw;
+    });
+    ASSERT_EQ(Element::string, result->getType());
+    EXPECT_EQ("////", result->stringValue());
+}
+
+};
+};
+};