Browse Source

[master] Merge branch 'trac5076' (flex/bison for control agent)

Tomek Mrugalski 8 years ago
parent
commit
d99048aa5b

+ 2 - 1
doc/Makefile.am

@@ -6,7 +6,8 @@ EXTRA_DIST += devel/contribute.dox
 EXTRA_DIST += devel/mainpage.dox
 EXTRA_DIST += devel/unit-tests.dox
 
-nobase_dist_doc_DATA  = examples/ddns/sample1.json
+nobase_dist_doc_DATA  = examples/agent/simple.json
+nobase_dist_doc_DATA += examples/ddns/sample1.json
 nobase_dist_doc_DATA += examples/ddns/template.json
 nobase_dist_doc_DATA += examples/kea4/advanced.json
 nobase_dist_doc_DATA += examples/kea4/backends.json

+ 80 - 0
doc/examples/agent/simple.json

@@ -0,0 +1,80 @@
+// This is a simple example of a configuration for Control-Agent (CA) or simply
+// Agent. This server provides RESTful interface for all Kea servers.
+{
+    "Control-agent":
+    {
+	// We need to specify where the agent should listen to incoming HTTP
+	// queries. Note that agent does not provide SSL or TLS protection
+	// on its own, so limiting the traffic to localhost is a good idea.
+	"http-host": "localhost",
+
+	// Another mandatory parameter is the HTTP port.
+	"http-port": 8000,
+
+	// This map specifies where control channel of each server is configured
+	// to listen on. See 'control-socket' object in the respective
+	// servers. At this time the only supported socket type is "unix".
+	// Make sure that the Agent and respective servers configuration
+	// matches exactly, otherwise they won't be able to communicate.
+	"control-sockets":
+	{
+	    // This is how the Agent can communicate with the DHCPv4 server.
+	    "dhcp4-server":
+	    {
+		"socket-type": "unix",
+		"socket-name": "/path/to/the/unix/socket-v4"
+	    },
+
+	    // Location of the DHCPv6 command channel socket.
+	    "dhcp6-server":
+	    {
+		"socket-type": "unix",
+		"socket-name": "/path/to/the/unix/socket-v6"
+	    },
+
+	    // Currently DHCP-DDNS (nicknamed D2) does not support
+	    // command channel yet, but we hope this will change in the
+	    // future.
+	    "d2-server":
+	    {
+		"socket-type": "unix",
+		"socket-name": "/path/to/the/unix/socket-d2"
+	    }
+	},
+
+	// CA is able to load hook libraries that augment its operation.
+	// The primary functionality is the ability to add new commands.
+	"hooks-libraries": [
+	    // Hook libraries list may contain more than one library.
+	    {
+		// The only necessary parameter is the library filename.
+		"library": "/opt/local/control-agent-commands.so",
+
+		// Some libraries may support parameters. Make sure you
+		// type this section carefully, as the CA does not validate
+		// it (because the format is library specific).
+		"parameters": {
+		    "param1": "foo"
+		}
+	    }
+	]
+
+    },
+
+    // Similar to other Kea components, CA also uses logging.
+    "Logging":
+    {
+	"loggers": [
+	    {
+		"name": "kea-ctrl-agent",
+		"output_options": [
+		    {
+			"output": "/var/log/kea-ctrl-agent.log"
+		    }
+		],
+		"severity": "INFO",
+		"debuglevel": 0
+	    }
+	]
+    }
+}

+ 31 - 0
src/bin/agent/Makefile.am

@@ -46,6 +46,9 @@ libagent_la_SOURCES  = ctrl_agent_cfg_mgr.cc ctrl_agent_cfg_mgr.h
 libagent_la_SOURCES += ctrl_agent_controller.cc ctrl_agent_controller.h
 libagent_la_SOURCES += ctrl_agent_log.cc ctrl_agent_log.h
 libagent_la_SOURCES += ctrl_agent_process.cc ctrl_agent_process.h
+libagent_la_SOURCES += agent_parser.cc agent_parser.h
+libagent_la_SOURCES += parser_context.cc parser_context.h parser_context_decl.h
+libagent_la_SOURCES += agent_lexer.ll
 
 nodist_libagent_la_SOURCES = ctrl_agent_messages.h ctrl_agent_messages.cc
 EXTRA_DIST += ctrl_agent_messages.mes
@@ -76,3 +79,31 @@ kea_ctrl_agent_LDADD += $(LOG4CPLUS_LIBS) $(CRYPTO_LIBS) $(BOOST_LIBS)
 
 kea_ctrl_agent_LDFLAGS = $(AM_LDFLAGS) $(CRYPTO_LDFLAGS)
 
+
+
+if GENERATE_PARSER
+
+parser: agent_lexer.cc location.hh position.hh stack.hh agent_parser.cc agent_parser.h
+	@echo "Flex/bison files regenerated"
+
+# --- Flex/Bison stuff below --------------------------------------------------
+# When debugging grammar issues, it's useful to add -v to bison parameters.
+# bison will generate parser.output file that explains the whole grammar.
+# It can be used to manually follow what's going on in the parser.
+# This is especially useful if yydebug_ is set to 1 as that variable
+# will cause parser to print out its internal state.
+# Call flex with -s to check that the default rule can be suppressed
+# Call bison with -W to get warnings like unmarked empty rules
+# Note C++11 deprecated register still used by flex < 2.6.0
+location.hh position.hh stack.hh agent_parser.cc agent_parser.h: agent_parser.yy
+	$(YACC) --defines=agent_parser.h --report=all --report-file=agent_parser.report -o agent_parser.cc agent_parser.yy
+
+agent_lexer.cc: agent_lexer.ll
+	$(LEX) --prefix agent_ -o agent_lexer.cc agent_lexer.ll
+
+else
+
+parser location.hh position.hh stack.hh agent_parser.cc agent_parser.h agent_lexer.cc:
+	@echo Parser generation disabled. Configure with --enable-generate-parser to enable it.
+
+endif

File diff suppressed because it is too large
+ 3466 - 0
src/bin/agent/agent_lexer.cc


+ 668 - 0
src/bin/agent/agent_lexer.ll

@@ -0,0 +1,668 @@
+/* Copyright (C) 2017 Internet Systems Consortium, Inc. ("ISC")
+
+   This Source Code Form is subject to the terms of the Mozilla Public
+   License, v. 2.0. If a copy of the MPL was not distributed with this
+   file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+%{ /* -*- C++ -*- */
+#include <cerrno>
+#include <climits>
+#include <cstdlib>
+#include <string>
+#include <agent/parser_context.h>
+#include <asiolink/io_address.h>
+#include <boost/lexical_cast.hpp>
+#include <exceptions/exceptions.h>
+#include <cc/dhcp_config_error.h>
+
+// Work around an incompatibility in flex (at least versions
+// 2.5.31 through 2.5.33): it generates code that does
+// not conform to C89.  See Debian bug 333231
+// <http://bugs.debian.org/cgi-bin/bugreport.cgi?bug=333231>.
+# undef yywrap
+# define yywrap() 1
+
+namespace {
+
+bool start_token_flag = false;
+
+isc::agent::ParserContext::ParserType start_token_value;
+unsigned int comment_start_line = 0;
+
+using namespace isc;
+using isc::agent::AgentParser;
+
+};
+
+// To avoid the call to exit... oops!
+#define YY_FATAL_ERROR(msg) isc::agent::ParserContext::fatal(msg)
+%}
+
+/* noyywrap disables automatic rewinding for the next file to parse. Since we
+   always parse only a single string, there's no need to do any wraps. And
+   using yywrap requires linking with -lfl, which provides the default yywrap
+   implementation that always returns 1 anyway. */
+%option noyywrap
+
+/* nounput simplifies the lexer, by removing support for putting a character
+   back into the input stream. We never use such capability anyway. */
+%option nounput
+
+/* batch means that we'll never use the generated lexer interactively. */
+%option batch
+
+/* avoid to get static global variables to remain with C++. */
+/* in last resort %option reentrant */
+
+/* Enables debug mode. To see the debug messages, one needs to also set
+   yy_flex_debug to 1, then the debug messages will be printed on stderr. */
+%option debug
+
+/* I have no idea what this option does, except it was specified in the bison
+   examples and Postgres folks added it to remove gcc 4.3 warnings. Let's
+   be on the safe side and keep it. */
+%option noinput
+
+%x COMMENT
+%x DIR_ENTER DIR_INCLUDE DIR_EXIT
+
+/* These are not token expressions yet, just convenience expressions that
+   can be used during actual token definitions. Note some can match
+   incorrect inputs (e.g., IP addresses) which must be checked. */
+int   \-?[0-9]+
+blank [ \t\r]
+
+UnicodeEscapeSequence           u[0-9A-Fa-f]{4}
+JSONEscapeCharacter             ["\\/bfnrt]
+JSONEscapeSequence              {JSONEscapeCharacter}|{UnicodeEscapeSequence}
+JSONStandardCharacter           [^\x00-\x1f"\\]
+JSONStringCharacter             {JSONStandardCharacter}|\\{JSONEscapeSequence}
+JSONString                      \"{JSONStringCharacter}*\"
+
+/* for errors */
+
+BadUnicodeEscapeSequence        u[0-9A-Fa-f]{0,3}[^0-9A-Fa-f]
+BadJSONEscapeSequence           [^"\\/bfnrtu]|{BadUnicodeEscapeSequence}
+ControlCharacter                [\x00-\x1f]
+ControlCharacterFill            [^"\\]|\\{JSONEscapeSequence}
+
+%{
+// This code run each time a pattern is matched. It updates the location
+// by moving it ahead by yyleng bytes. yyleng specifies the length of the
+// currently matched token.
+#define YY_USER_ACTION  driver.loc_.columns(yyleng);
+%}
+
+%%
+
+%{
+    // This part of the code is copied over to the verbatim to the top
+    // of the generated yylex function. Explanation:
+    // http://www.gnu.org/software/bison/manual/html_node/Multiple-start_002dsymbols.html
+
+    // Code run each time yylex is called.
+    driver.loc_.step();
+
+    // We currently have 3 points of entries defined:
+    // START_JSON - which expects any valid JSON
+    // START_AGENT - which expects full configuration (with outer map and Control-agent
+    //               object in it.
+    // START_SUB_AGENT - which expects only content of the Control-agent, this is
+    //                   primarily useful for testing.
+    if (start_token_flag) {
+        start_token_flag = false;
+        switch (start_token_value) {
+        case ParserContext::PARSER_JSON:
+        default:
+            return isc::agent::AgentParser::make_START_JSON(driver.loc_);
+        case ParserContext::PARSER_AGENT:
+            return isc::agent::AgentParser::make_START_AGENT(driver.loc_);
+        case ParserContext::PARSER_SUB_AGENT:
+            return isc::agent::AgentParser::make_START_SUB_AGENT(driver.loc_);
+        }
+    }
+%}
+
+#.* ;
+
+"//"(.*) ;
+
+"/*" {
+  BEGIN(COMMENT);
+  comment_start_line = driver.loc_.end.line;;
+}
+
+<COMMENT>"*/" BEGIN(INITIAL);
+<COMMENT>. ;
+<COMMENT><<EOF>> {
+    isc_throw(ParseError, "Comment not closed. (/* in line " << comment_start_line);
+}
+
+"<?" BEGIN(DIR_ENTER);
+<DIR_ENTER>"include" BEGIN(DIR_INCLUDE);
+<DIR_INCLUDE>\"([^\"\n])+\" {
+    // Include directive.
+
+    // Extract the filename.
+    std::string tmp(yytext+1);
+    tmp.resize(tmp.size() - 1);
+
+    driver.includeFile(tmp);
+}
+<DIR_ENTER,DIR_INCLUDE,DIR_EXIT><<EOF>> {
+    isc_throw(ParseError, "Directive not closed.");
+}
+<DIR_EXIT>"?>" BEGIN(INITIAL);
+
+
+<*>{blank}+   {
+    // Ok, we found a with space. Let's ignore it and update loc variable.
+    driver.loc_.step();
+}
+
+<*>[\n]+      {
+    // Newline found. Let's update the location and continue.
+    driver.loc_.lines(yyleng);
+    driver.loc_.step();
+}
+
+
+\"Control-agent\" {
+    switch(driver.ctx_) {
+    case ParserContext::CONFIG:
+        return AgentParser::make_CONTROL_AGENT(driver.loc_);
+    default:
+        return AgentParser::make_STRING("Control-agent", driver.loc_);
+    }
+}
+
+\"http-host\" {
+    switch(driver.ctx_) {
+    case ParserContext::AGENT:
+        return AgentParser::make_HTTP_HOST(driver.loc_);
+    default:
+        return AgentParser::make_STRING("http-host", driver.loc_);
+    }
+}
+
+\"http-port\" {
+    switch(driver.ctx_) {
+    case ParserContext::AGENT:
+        return AgentParser::make_HTTP_PORT(driver.loc_);
+    default:
+        return AgentParser::make_STRING("http-port", driver.loc_);
+    }
+}
+
+\"control-sockets\" {
+    switch(driver.ctx_) {
+    case ParserContext::AGENT:
+        return AgentParser::make_CONTROL_SOCKETS(driver.loc_);
+    default:
+        return AgentParser::make_STRING("control-sockets", driver.loc_);
+    }
+}
+
+\"dhcp4-server\" {
+    switch(driver.ctx_) {
+    case ParserContext::CONTROL_SOCKETS:
+        return AgentParser::make_DHCP4_SERVER(driver.loc_);
+    default:
+        return AgentParser::make_STRING("dhcp4-server", driver.loc_);
+    }
+}
+
+\"dhcp6-server\" {
+    switch(driver.ctx_) {
+    case ParserContext::CONTROL_SOCKETS:
+        return AgentParser::make_DHCP6_SERVER(driver.loc_);
+    default:
+        return AgentParser::make_STRING("dhcp6-server", driver.loc_);
+    }
+}
+
+\"d2-server\" {
+    switch(driver.ctx_) {
+    case ParserContext::CONTROL_SOCKETS:
+        return AgentParser::make_D2_SERVER(driver.loc_);
+    default:
+        return AgentParser::make_STRING("d2-server", driver.loc_);
+    }
+}
+
+\"socket-name\" {
+    switch(driver.ctx_) {
+    case ParserContext::SERVER:
+        return AgentParser::make_SOCKET_NAME(driver.loc_);
+    default:
+        return AgentParser::make_STRING("socket-name", driver.loc_);
+    }
+}
+
+\"socket-type\" {
+    switch(driver.ctx_) {
+    case ParserContext::SERVER:
+        return AgentParser::make_SOCKET_TYPE(driver.loc_);
+    default:
+        return AgentParser::make_STRING("socket-type", driver.loc_);
+    }
+}
+
+\"unix\" {
+    switch(driver.ctx_) {
+    case ParserContext::SOCKET_TYPE:
+        return AgentParser::make_UNIX(driver.loc_);
+    default:
+        return AgentParser::make_STRING("unix", driver.loc_);
+    }
+}
+
+\"hooks-libraries\" {
+    switch(driver.ctx_) {
+    case ParserContext::AGENT:
+        return AgentParser::make_HOOKS_LIBRARIES(driver.loc_);
+    default:
+        return AgentParser::make_STRING("hooks-libraries", driver.loc_);
+    }
+}
+
+\"library\" {
+    switch(driver.ctx_) {
+    case ParserContext::HOOKS_LIBRARIES:
+        return AgentParser::make_LIBRARY(driver.loc_);
+    default:
+        return AgentParser::make_STRING("library", driver.loc_);
+    }
+}
+
+\"parameters\" {
+    switch(driver.ctx_) {
+    case ParserContext::HOOKS_LIBRARIES:
+        return AgentParser::make_PARAMETERS(driver.loc_);
+    default:
+        return AgentParser::make_STRING("parameters", driver.loc_);
+    }
+}
+
+\"Logging\" {
+    switch(driver.ctx_) {
+    case ParserContext::CONFIG:
+        return AgentParser::make_LOGGING(driver.loc_);
+    default:
+        return AgentParser::make_STRING("Logging", driver.loc_);
+    }
+}
+
+\"loggers\" {
+    switch(driver.ctx_) {
+    case ParserContext::LOGGING:
+        return AgentParser::make_LOGGERS(driver.loc_);
+    default:
+        return AgentParser::make_STRING("loggers", driver.loc_);
+    }
+}
+
+\"name\" {
+    switch(driver.ctx_) {
+    case ParserContext::LOGGERS:
+        return AgentParser::make_NAME(driver.loc_);
+    default:
+        return AgentParser::make_STRING("name", driver.loc_);
+    }
+}
+
+\"output_options\" {
+    switch(driver.ctx_) {
+    case ParserContext::LOGGERS:
+        return AgentParser::make_OUTPUT_OPTIONS(driver.loc_);
+    default:
+        return AgentParser::make_STRING("output_options", driver.loc_);
+    }
+}
+
+\"output\" {
+    switch(driver.ctx_) {
+    case ParserContext::OUTPUT_OPTIONS:
+        return AgentParser::make_OUTPUT(driver.loc_);
+    default:
+        return AgentParser::make_STRING("output", driver.loc_);
+    }
+}
+
+\"debuglevel\" {
+    switch(driver.ctx_) {
+    case ParserContext::LOGGERS:
+        return AgentParser::make_DEBUGLEVEL(driver.loc_);
+    default:
+        return AgentParser::make_STRING("debuglevel", driver.loc_);
+    }
+}
+
+\"severity\" {
+    switch(driver.ctx_) {
+    case ParserContext::LOGGERS:
+        return AgentParser::make_SEVERITY(driver.loc_);
+    default:
+        return AgentParser::make_STRING("severity", driver.loc_);
+    }
+}
+
+\"Dhcp4\" {
+    switch(driver.ctx_) {
+    case ParserContext::CONFIG:
+        return AgentParser::make_DHCP4(driver.loc_);
+    default:
+        return AgentParser::make_STRING("Dhcp4", driver.loc_);
+    }
+}
+
+\"Dhcp6\" {
+    switch(driver.ctx_) {
+    case ParserContext::CONFIG:
+        return AgentParser::make_DHCP6(driver.loc_);
+    default:
+        return AgentParser::make_STRING("Dhcp6", driver.loc_);
+    }
+}
+
+\"DhcpDdns\" {
+    switch(driver.ctx_) {
+    case ParserContext::CONFIG:
+        return AgentParser::make_DHCPDDNS(driver.loc_);
+    default:
+        return AgentParser::make_STRING("DhcpDdns", driver.loc_);
+    }
+}
+
+{JSONString} {
+    // A string has been matched. It contains the actual string and single quotes.
+    // We need to get those quotes out of the way and just use its content, e.g.
+    // for 'foo' we should get foo
+    std::string raw(yytext+1);
+    size_t len = raw.size() - 1;
+    raw.resize(len);
+    std::string decoded;
+    decoded.reserve(len);
+    for (size_t pos = 0; pos < len; ++pos) {
+        int b = 0;
+        char c = raw[pos];
+        switch (c) {
+        case '"':
+            // impossible condition
+            driver.error(driver.loc_, "Bad quote in \"" + raw + "\"");
+        case '\\':
+            ++pos;
+            if (pos >= len) {
+                // impossible condition
+                driver.error(driver.loc_, "Overflow escape in \"" + raw + "\"");
+            }
+            c = raw[pos];
+            switch (c) {
+            case '"':
+            case '\\':
+            case '/':
+                decoded.push_back(c);
+                break;
+            case 'b':
+                decoded.push_back('\b');
+                break;
+            case 'f':
+                decoded.push_back('\f');
+                break;
+            case 'n':
+                decoded.push_back('\n');
+                break;
+            case 'r':
+                decoded.push_back('\r');
+                break;
+            case 't':
+                decoded.push_back('\t');
+                break;
+            case 'u':
+                // support only \u0000 to \u00ff
+                ++pos;
+                if (pos + 4 > len) {
+                    // impossible condition
+                    driver.error(driver.loc_,
+                                 "Overflow unicode escape in \"" + raw + "\"");
+                }
+                if ((raw[pos] != '0') || (raw[pos + 1] != '0')) {
+                    driver.error(driver.loc_, "Unsupported unicode escape in \"" + raw + "\"");
+                }
+                pos += 2;
+                c = raw[pos];
+                if ((c >= '0') && (c <= '9')) {
+                    b = (c - '0') << 4;
+                } else if ((c >= 'A') && (c <= 'F')) {
+                    b = (c - 'A' + 10) << 4;
+                } else if ((c >= 'a') && (c <= 'f')) {
+                    b = (c - 'a' + 10) << 4;
+                } else {
+                    // impossible condition
+                    driver.error(driver.loc_, "Not hexadecimal in unicode escape in \"" + raw + "\"");
+                }
+                pos++;
+                c = raw[pos];
+                if ((c >= '0') && (c <= '9')) {
+                    b |= c - '0';
+                } else if ((c >= 'A') && (c <= 'F')) {
+                    b |= c - 'A' + 10;
+                } else if ((c >= 'a') && (c <= 'f')) {
+                    b |= c - 'a' + 10;
+                } else {
+                    // impossible condition
+                    driver.error(driver.loc_, "Not hexadecimal in unicode escape in \"" + raw + "\"");
+                }
+                decoded.push_back(static_cast<char>(b & 0xff));
+                break;
+            default:
+                // impossible condition
+                driver.error(driver.loc_, "Bad escape in \"" + raw + "\"");
+            }
+            break;
+        default:
+            if ((c >= 0) && (c < 0x20)) {
+                // impossible condition
+                driver.error(driver.loc_, "Invalid control in \"" + raw + "\"");
+            }
+            decoded.push_back(c);
+        }
+    }
+
+    return AgentParser::make_STRING(decoded, driver.loc_);
+}
+
+\"{JSONStringCharacter}*{ControlCharacter}{ControlCharacterFill}*\" {
+    // Bad string with a forbidden control character inside
+    driver.error(driver.loc_, "Invalid control in " + std::string(yytext));
+}
+
+\"{JSONStringCharacter}*\\{BadJSONEscapeSequence}[^\x00-\x1f"]*\" {
+    // Bad string with a bad escape inside
+    driver.error(driver.loc_, "Bad escape in " + std::string(yytext));
+}
+
+\"{JSONStringCharacter}*\\\" {
+    // Bad string with an open escape at the end
+    driver.error(driver.loc_, "Overflow escape in " + std::string(yytext));
+}
+
+"["    { return AgentParser::make_LSQUARE_BRACKET(driver.loc_); }
+"]"    { return AgentParser::make_RSQUARE_BRACKET(driver.loc_); }
+"{"    { return AgentParser::make_LCURLY_BRACKET(driver.loc_); }
+"}"    { return AgentParser::make_RCURLY_BRACKET(driver.loc_); }
+","    { return AgentParser::make_COMMA(driver.loc_); }
+":"    { return AgentParser::make_COLON(driver.loc_); }
+
+{int} {
+    // An integer was found.
+    std::string tmp(yytext);
+    int64_t integer = 0;
+    try {
+        // In substring we want to use negative values (e.g. -1).
+        // In enterprise-id we need to use values up to 0xffffffff.
+        // To cover both of those use cases, we need at least
+        // int64_t.
+        integer = boost::lexical_cast<int64_t>(tmp);
+    } catch (const boost::bad_lexical_cast &) {
+        driver.error(driver.loc_, "Failed to convert " + tmp + " to an integer.");
+    }
+
+    // The parser needs the string form as double conversion is no lossless
+    return AgentParser::make_INTEGER(integer, driver.loc_);
+}
+
+[-+]?[0-9]*\.?[0-9]*([eE][-+]?[0-9]+)? {
+    // A floating point was found.
+    std::string tmp(yytext);
+    double fp = 0.0;
+    try {
+        fp = boost::lexical_cast<double>(tmp);
+    } catch (const boost::bad_lexical_cast &) {
+        driver.error(driver.loc_, "Failed to convert " + tmp + " to a floating point.");
+    }
+
+    return AgentParser::make_FLOAT(fp, driver.loc_);
+}
+
+true|false {
+    string tmp(yytext);
+    return AgentParser::make_BOOLEAN(tmp == "true", driver.loc_);
+}
+
+null {
+   return AgentParser::make_NULL_TYPE(driver.loc_);
+}
+
+(?i:true) driver.error (driver.loc_, "JSON true reserved keyword is lower case only");
+
+(?i:false) driver.error (driver.loc_, "JSON false reserved keyword is lower case only");
+
+(?i:null) driver.error (driver.loc_, "JSON null reserved keyword is lower case only");
+
+<*>.   driver.error (driver.loc_, "Invalid character: " + std::string(yytext));
+
+<<EOF>> {
+    if (driver.states_.empty()) {
+        return AgentParser::make_END(driver.loc_);
+    }
+    driver.loc_ = driver.locs_.back();
+    driver.locs_.pop_back();
+    driver.file_ = driver.files_.back();
+    driver.files_.pop_back();
+    if (driver.sfile_) {
+        fclose(driver.sfile_);
+        driver.sfile_ = 0;
+    }
+    if (!driver.sfiles_.empty()) {
+        driver.sfile_ = driver.sfiles_.back();
+        driver.sfiles_.pop_back();
+    }
+    agent__delete_buffer(YY_CURRENT_BUFFER);
+    agent__switch_to_buffer(driver.states_.back());
+    driver.states_.pop_back();
+
+    BEGIN(DIR_EXIT);
+}
+
+%%
+
+using namespace isc::dhcp;
+
+void
+ParserContext::scanStringBegin(const std::string& str, ParserType parser_type)
+{
+    start_token_flag = true;
+    start_token_value = parser_type;
+
+    file_ = "<string>";
+    sfile_ = 0;
+    loc_.initialize(&file_);
+    yy_flex_debug = trace_scanning_;
+    YY_BUFFER_STATE buffer;
+    buffer = agent__scan_bytes(str.c_str(), str.size());
+    if (!buffer) {
+        fatal("cannot scan string");
+        // fatal() throws an exception so this can't be reached
+    }
+}
+
+void
+ParserContext::scanFileBegin(FILE * f,
+                              const std::string& filename,
+                              ParserType parser_type)
+{
+    start_token_flag = true;
+    start_token_value = parser_type;
+
+    file_ = filename;
+    sfile_ = f;
+    loc_.initialize(&file_);
+    yy_flex_debug = trace_scanning_;
+    YY_BUFFER_STATE buffer;
+
+    // See dhcp6_lexer.cc header for available definitions
+    buffer = agent__create_buffer(f, 65536 /*buffer size*/);
+    if (!buffer) {
+        fatal("cannot scan file " + filename);
+    }
+    agent__switch_to_buffer(buffer);
+}
+
+void
+ParserContext::scanEnd() {
+    if (sfile_)
+        fclose(sfile_);
+    sfile_ = 0;
+    static_cast<void>(agent_lex_destroy());
+    // Close files
+    while (!sfiles_.empty()) {
+        FILE* f = sfiles_.back();
+        if (f) {
+            fclose(f);
+        }
+        sfiles_.pop_back();
+    }
+    // Delete states
+    while (!states_.empty()) {
+        agent__delete_buffer(states_.back());
+        states_.pop_back();
+    }
+}
+
+void
+ParserContext::includeFile(const std::string& filename) {
+    if (states_.size() > 10) {
+        fatal("Too many nested include.");
+    }
+
+    FILE* f = fopen(filename.c_str(), "r");
+    if (!f) {
+        fatal("Can't open include file " + filename);
+    }
+    if (sfile_) {
+        sfiles_.push_back(sfile_);
+    }
+    sfile_ = f;
+    states_.push_back(YY_CURRENT_BUFFER);
+    YY_BUFFER_STATE buffer;
+    buffer = agent__create_buffer(f, 65536 /*buffer size*/);
+    if (!buffer) {
+        fatal( "Can't scan include file " + filename);
+    }
+    agent__switch_to_buffer(buffer);
+    files_.push_back(file_);
+    file_ = filename;
+    locs_.push_back(loc_);
+    loc_.initialize(&file_);
+
+    BEGIN(INITIAL);
+}
+
+namespace {
+/// To avoid unused function error
+class Dummy {
+    // cppcheck-suppress unusedPrivateFunction
+    void dummy() { yy_fatal_error("Fix me: how to disable its definition?"); }
+};
+}

File diff suppressed because it is too large
+ 1831 - 0
src/bin/agent/agent_parser.cc


File diff suppressed because it is too large
+ 1437 - 0
src/bin/agent/agent_parser.h


+ 575 - 0
src/bin/agent/agent_parser.yy

@@ -0,0 +1,575 @@
+/* Copyright (C) 2017 Internet Systems Consortium, Inc. ("ISC")
+
+   This Source Code Form is subject to the terms of the Mozilla Public
+   License, v. 2.0. If a copy of the MPL was not distributed with this
+   file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+%skeleton "lalr1.cc" /* -*- C++ -*- */
+%require "3.0.0"
+%defines
+%define parser_class_name {AgentParser}
+%define api.prefix {agent_}
+%define api.token.constructor
+%define api.value.type variant
+%define api.namespace {isc::agent}
+%define parse.assert
+%code requires
+{
+#include <string>
+#include <cc/data.h>
+#include <boost/lexical_cast.hpp>
+#include <agent/parser_context_decl.h>
+
+using namespace isc::agent;
+using namespace isc::data;
+using namespace std;
+}
+// The parsing context.
+%param { isc::agent::ParserContext& ctx }
+%locations
+%define parse.trace
+%define parse.error verbose
+%code
+{
+#include <agent/parser_context.h>
+}
+
+
+%define api.token.prefix {TOKEN_}
+// Tokens in an order which makes sense and related to the intented use.
+// Actual regexps for tokens are defined in agent_lexer.ll.
+%token
+  END  0  "end of file"
+  COMMA ","
+  COLON ":"
+  LSQUARE_BRACKET "["
+  RSQUARE_BRACKET "]"
+  LCURLY_BRACKET "{"
+  RCURLY_BRACKET "}"
+  NULL_TYPE "null"
+
+  CONTROL_AGENT "Control-agent"
+  HTTP_HOST "http-host"
+  HTTP_PORT "http-port"
+
+  CONTROL_SOCKETS "control-sockets"
+  DHCP4_SERVER "dhcp4-server"
+  DHCP6_SERVER "dhcp6-server"
+  D2_SERVER "d2-server"
+  SOCKET_NAME "socket-name"
+  SOCKET_TYPE "socket-type"
+  UNIX "unix"
+
+  HOOKS_LIBRARIES "hooks-libraries"
+  LIBRARY "library"
+  PARAMETERS "parameters"
+
+  LOGGING "Logging"
+  LOGGERS "loggers"
+  NAME "name"
+  OUTPUT_OPTIONS "output_options"
+  OUTPUT "output"
+  DEBUGLEVEL "debuglevel"
+  SEVERITY "severity"
+
+  DHCP4 "Dhcp4"
+  DHCP6 "Dhcp6"
+  DHCPDDNS "DhcpDdns"
+
+  // Not real tokens, just a way to signal what the parser is expected to
+  // parse. This define the starting point. It either can be full grammar
+  // (START_AGENT), part of the grammar related to control-agent (START_SUB_AGENT)
+  // or can be any valid JSON (START_JSON)
+  START_JSON
+  START_AGENT
+  START_SUB_AGENT
+;
+
+%token <std::string> STRING "constant string"
+%token <int64_t> INTEGER "integer"
+%token <double> FLOAT "floating point"
+%token <bool> BOOLEAN "boolean"
+
+%type <ElementPtr> value
+%type <ElementPtr> socket_type_value
+
+%printer { yyoutput << $$; } <*>;
+
+%%
+
+// The whole grammar starts with a map, because the config file
+// consists of Control-Agent, DhcpX, Logger and DhcpDdns entries in one big { }.
+%start start;
+
+// The starting token can be one of those listed below. Note these are
+// "fake" tokens. They're produced by the lexer before any input text
+// is parsed.
+start: START_JSON      { ctx.ctx_ = ctx.NO_KEYWORDS; } json
+     | START_AGENT     { ctx.ctx_ = ctx.CONFIG; } agent_syntax_map
+     | START_SUB_AGENT { ctx.ctx_ = ctx.AGENT; } sub_agent 
+     ;
+
+// This rule defines a "shortcut". Instead of specifying the whole structure
+// expected by full grammar, we can tell the parser to start from content of
+// the Control-agent. This is very useful for unit-testing, so we don't need
+// to repeat the outer map and "Control-agent" map. We can simply provide
+// the concents of that map.
+sub_agent: LCURLY_BRACKET {
+    // Parse the Control-agent map
+    ElementPtr m(new MapElement(ctx.loc2pos(@1)));
+    ctx.stack_.push_back(m);
+} global_params RCURLY_BRACKET {
+    // parsing completed
+};
+
+// --- generic JSON parser -----------------------------------------------------
+
+// json expression can be a value. What value means is defined below.
+json: value {
+    // Push back the JSON value on the stack
+    ctx.stack_.push_back($1);
+};
+
+// Rules for value. This can be one of the primary types allowed in JSON.
+value: INTEGER { $$ = ElementPtr(new IntElement($1, ctx.loc2pos(@1))); }
+     | FLOAT { $$ = ElementPtr(new DoubleElement($1, ctx.loc2pos(@1))); }
+     | BOOLEAN { $$ = ElementPtr(new BoolElement($1, ctx.loc2pos(@1))); }
+     | STRING { $$ = ElementPtr(new StringElement($1, ctx.loc2pos(@1))); }
+     | NULL_TYPE { $$ = ElementPtr(new NullElement(ctx.loc2pos(@1))); }
+     | map { $$ = ctx.stack_.back(); ctx.stack_.pop_back(); }
+     | list_generic { $$ = ctx.stack_.back(); ctx.stack_.pop_back(); }
+     ;
+
+// Rule for map. It will start with {, have some content and will end with }.
+map: LCURLY_BRACKET {
+    // This code is executed when we're about to start parsing
+    // the content of the map
+    ElementPtr m(new MapElement(ctx.loc2pos(@1)));
+    ctx.stack_.push_back(m);
+} map_content RCURLY_BRACKET {
+    // map parsing completed. If we ever want to do any wrap up
+    // (maybe some sanity checking), this would be the best place
+    // for it.
+};
+
+// Rule for map content. In some cases it is allowed to have an empty map,
+// so we should say that explicitly. In most cases, though, there will
+// be some actual content inside. That's defined by not_empty_map
+map_content: %empty // empty map
+           | not_empty_map
+           ;
+
+// Rule for content of the map. It can have one of two formats:
+// 1) string: value
+// 2) non_empty_map , string: value
+// The first case covers a single entry, while the second case
+// covers all longer lists recursively.
+not_empty_map: STRING COLON value {
+                  // map containing a single entry
+                  ctx.stack_.back()->set($1, $3);
+                  }
+             | not_empty_map COMMA STRING COLON value {
+                  // map consisting of a shorter map followed by
+                  // comma and string:value
+                  ctx.stack_.back()->set($3, $5);
+                  }
+             ;
+
+list_generic: LSQUARE_BRACKET {
+    ElementPtr l(new ListElement(ctx.loc2pos(@1)));
+    ctx.stack_.push_back(l);
+} list_content RSQUARE_BRACKET {
+};
+
+list_content: %empty // Empty list
+            | not_empty_list
+            ;
+
+not_empty_list: value {
+                  // List consisting of a single element.
+                  ctx.stack_.back()->add($1);
+                  }
+              | not_empty_list COMMA value {
+                  // List ending with , and a value.
+                  ctx.stack_.back()->add($3);
+                  }
+              ;
+
+// --- generic JSON parser ends here -------------------------------------------
+
+// --- syntax checking parser starts here --------------------------------------
+
+// Unknown keyword in a map. This clever rule can be added to any map
+// if you want to have a nice expression printed when unknown (mistyped?)
+// parameter is found.
+unknown_map_entry: STRING COLON {
+    const std::string& where = ctx.contextName();
+    const std::string& keyword = $1;
+    error(@1,
+          "got unexpected keyword \"" + keyword + "\" in " + where + " map.");
+};
+
+// This defines the top-level { } that holds Control-agent, Dhcp6, Dhcp4,
+// DhcpDdns or Logging objects.
+agent_syntax_map: LCURLY_BRACKET {
+    // This code is executed when we're about to start parsing
+    // the content of the map
+    ElementPtr m(new MapElement(ctx.loc2pos(@1)));
+    ctx.stack_.push_back(m);
+} global_objects RCURLY_BRACKET {
+    // map parsing completed. If we ever want to do any wrap up
+    // (maybe some sanity checking), this would be the best place
+    // for it.
+};
+
+// This represents top-level entries: Control-agent, Logging, possibly others
+global_objects: global_object
+              | global_objects COMMA global_object
+              ;
+
+// This represents a single top level entry, e.g. Control-agent, Dhcp6 or DhcpDdns.
+global_object: agent_object
+             | logging_object
+             | dhcp4_json_object
+             | dhcp6_json_object
+             | dhcpddns_json_object
+             | unknown_map_entry
+             ;
+
+// This define the Control-agent object.
+agent_object: CONTROL_AGENT {
+
+    // Let's create a MapElement that will represent it, add it to the
+    // top level map (that's already on the stack) and put the new map
+    // on the stack as well, so child elements will be able to add
+    // themselves to it.
+    ElementPtr m(new MapElement(ctx.loc2pos(@1)));
+    ctx.stack_.back()->set("Control-agent", m);
+    ctx.stack_.push_back(m);
+    ctx.enter(ctx.AGENT);
+} COLON LCURLY_BRACKET global_params RCURLY_BRACKET {
+    // Ok, we're done with parsing control-agent. Let's take the map
+    // off the stack.
+    ctx.stack_.pop_back();
+    ctx.leave();
+};
+
+global_params: global_param
+             | global_params COMMA global_param
+             ;
+
+// These are the parameters that are allowed in the top-level for
+// Dhcp6.
+global_param: http_host
+            | http_port
+            | control_sockets
+            | hooks_libraries
+            | unknown_map_entry
+            ;
+
+http_host: HTTP_HOST {
+    ctx.enter(ctx.NO_KEYWORDS);
+} COLON STRING {
+    ElementPtr host(new StringElement($4, ctx.loc2pos(@4)));
+    ctx.stack_.back()->set("http-host", host);
+    ctx.leave();
+};
+
+http_port: HTTP_PORT COLON INTEGER {
+    ElementPtr prf(new IntElement($3, ctx.loc2pos(@3)));
+    ctx.stack_.back()->set("http-port", prf);
+};
+
+// --- hooks-libraries ---------------------------------------------------------
+hooks_libraries: HOOKS_LIBRARIES {
+    ElementPtr l(new ListElement(ctx.loc2pos(@1)));
+    ctx.stack_.back()->set("hooks-libraries", l);
+    ctx.stack_.push_back(l);
+    ctx.enter(ctx.HOOKS_LIBRARIES);
+} COLON LSQUARE_BRACKET hooks_libraries_list RSQUARE_BRACKET {
+    ctx.stack_.pop_back();
+    ctx.leave();
+};
+
+hooks_libraries_list: %empty
+                    | not_empty_hooks_libraries_list
+                    ;
+
+not_empty_hooks_libraries_list: hooks_library
+    | not_empty_hooks_libraries_list COMMA hooks_library
+    ;
+
+hooks_library: LCURLY_BRACKET {
+    ElementPtr m(new MapElement(ctx.loc2pos(@1)));
+    ctx.stack_.back()->add(m);
+    ctx.stack_.push_back(m);
+} hooks_params RCURLY_BRACKET {
+    ctx.stack_.pop_back();
+};
+
+hooks_params: hooks_param
+            | hooks_params COMMA hooks_param
+            | unknown_map_entry
+            ;
+
+hooks_param: library
+           | parameters
+           ;
+
+library: LIBRARY {
+    ctx.enter(ctx.NO_KEYWORDS);
+} COLON STRING {
+    ElementPtr lib(new StringElement($4, ctx.loc2pos(@4)));
+    ctx.stack_.back()->set("library", lib);
+    ctx.leave();
+};
+
+parameters: PARAMETERS {
+    ctx.enter(ctx.NO_KEYWORDS);
+} COLON value {
+    ctx.stack_.back()->set("parameters", $4);
+    ctx.leave();
+};
+
+// --- hooks-libraries end here ------------------------------------------------
+
+// --- control-sockets starts here ---------------------------------------------
+control_sockets: CONTROL_SOCKETS COLON LCURLY_BRACKET {
+    ElementPtr m(new MapElement(ctx.loc2pos(@1)));
+    ctx.stack_.back()->set("control-sockets", m);
+    ctx.stack_.push_back(m);
+    ctx.enter(ctx.CONTROL_SOCKETS);
+} control_sockets_params RCURLY_BRACKET {
+    ctx.stack_.pop_back();
+    ctx.leave();
+};
+
+// This defines what kind of control-sockets parameters we allow.
+// Note that empty map is not allowed here, because at least one control socket
+// is required.
+control_sockets_params: control_socket
+                      | control_sockets_params COMMA control_socket
+                      ;
+
+// We currently support three types of sockets: DHCPv4, DHCPv6 and D2
+// (even though D2 socket support is not yet implemented).
+control_socket: dhcp4_server_socket
+              | dhcp6_server_socket
+              | d2_server_socket
+              | unknown_map_entry
+              ;
+
+// That's an entry for dhcp4-server socket.
+dhcp4_server_socket: DHCP4_SERVER {
+    ElementPtr m(new MapElement(ctx.loc2pos(@1)));
+    ctx.stack_.back()->set("dhcp4-server", m);
+    ctx.stack_.push_back(m);
+    ctx.enter(ctx.SERVER);
+} COLON LCURLY_BRACKET control_socket_params RCURLY_BRACKET {
+    ctx.stack_.pop_back();
+    ctx.leave();
+};
+
+// That's an entry for dhcp6-server socket.
+dhcp6_server_socket: DHCP6_SERVER {
+    ElementPtr m(new MapElement(ctx.loc2pos(@1)));
+    ctx.stack_.back()->set("dhcp6-server", m);
+    ctx.stack_.push_back(m);
+    ctx.enter(ctx.SERVER);
+} COLON LCURLY_BRACKET control_socket_params RCURLY_BRACKET {
+    ctx.stack_.pop_back();
+    ctx.leave();
+};
+
+// That's an entry for d2-server socket.
+d2_server_socket: D2_SERVER {
+    ElementPtr m(new MapElement(ctx.loc2pos(@1)));
+    ctx.stack_.back()->set("d2-server", m);
+    ctx.stack_.push_back(m);
+    ctx.enter(ctx.SERVER);
+} COLON LCURLY_BRACKET control_socket_params RCURLY_BRACKET {
+    ctx.stack_.pop_back();
+    ctx.leave();
+};
+
+// Socket parameters consist of one or more parameters.
+control_socket_params: control_socket_param
+                     | control_socket_params COMMA control_socket_param
+                     ;
+
+// We currently support two socket parameters: type and name.
+control_socket_param: socket_name
+                    | socket_type
+                    ;
+
+// This rule defines socket-name parameter.
+socket_name: SOCKET_NAME {
+    ctx.enter(ctx.NO_KEYWORDS);
+} COLON STRING {
+    ElementPtr name(new StringElement($4, ctx.loc2pos(@4)));
+    ctx.stack_.back()->set("socket-name", name);
+    ctx.leave();
+};
+
+// This rule specifies socket type.
+socket_type: SOCKET_TYPE {
+    ctx.enter(ctx.SOCKET_TYPE);
+} COLON socket_type_value {
+    ctx.stack_.back()->set("socket-type", $4);
+    ctx.leave();
+};
+
+// We currently allow only unix domain sockets
+socket_type_value : UNIX { $$ = ElementPtr(new StringElement("unix", ctx.loc2pos(@1))); }
+                  ;
+
+// --- control-sockets end here ------------------------------------------------
+
+// JSON entries for other global objects (Dhcp4,Dhcp6 and DhcpDdns)
+dhcp4_json_object: DHCP4 {
+    ctx.enter(ctx.NO_KEYWORDS);
+} COLON value {
+    ctx.stack_.back()->set("Dhcp4", $4);
+    ctx.leave();
+};
+
+dhcp6_json_object: DHCP6 {
+    ctx.enter(ctx.NO_KEYWORDS);
+} COLON value {
+    ctx.stack_.back()->set("Dhcp6", $4);
+    ctx.leave();
+};
+
+dhcpddns_json_object: DHCPDDNS {
+    ctx.enter(ctx.NO_KEYWORDS);
+} COLON value {
+    ctx.stack_.back()->set("DhcpDdns", $4);
+    ctx.leave();
+};
+
+// --- Logging starts here -----------------------------------------------------
+
+// This defines the top level "Logging" object. It parses
+// the following "Logging": { ... }. The ... is defined
+// by logging_params
+logging_object: LOGGING {
+    ElementPtr m(new MapElement(ctx.loc2pos(@1)));
+    ctx.stack_.back()->set("Logging", m);
+    ctx.stack_.push_back(m);
+    ctx.enter(ctx.LOGGING);
+} COLON LCURLY_BRACKET logging_params RCURLY_BRACKET {
+    ctx.stack_.pop_back();
+    ctx.leave();
+};
+
+// This defines the list of allowed parameters that may appear
+// in the top-level Logging object. It can either be a single
+// parameter or several parameters separated by commas.
+logging_params: logging_param
+              | logging_params COMMA logging_param
+              ;
+
+// There's currently only one parameter defined, which is "loggers".
+logging_param: loggers;
+
+// "loggers", the only parameter currently defined in "Logging" object,
+// is "Loggers": [ ... ].
+loggers: LOGGERS {
+    ElementPtr l(new ListElement(ctx.loc2pos(@1)));
+    ctx.stack_.back()->set("loggers", l);
+    ctx.stack_.push_back(l);
+    ctx.enter(ctx.LOGGERS);
+}  COLON LSQUARE_BRACKET loggers_entries RSQUARE_BRACKET {
+    ctx.stack_.pop_back();
+    ctx.leave();
+};
+
+// These are the parameters allowed in loggers: either one logger
+// entry or multiple entries separate by commas.
+loggers_entries: logger_entry
+               | loggers_entries COMMA logger_entry
+               ;
+
+// This defines a single entry defined in loggers in Logging.
+logger_entry: LCURLY_BRACKET {
+    ElementPtr l(new MapElement(ctx.loc2pos(@1)));
+    ctx.stack_.back()->add(l);
+    ctx.stack_.push_back(l);
+} logger_params RCURLY_BRACKET {
+    ctx.stack_.pop_back();
+};
+
+logger_params: logger_param
+             | logger_params COMMA logger_param
+             ;
+
+logger_param: name
+            | output_options_list
+            | debuglevel
+            | severity
+            | unknown_map_entry
+            ;
+
+name: NAME {
+    ctx.enter(ctx.NO_KEYWORDS);
+} COLON STRING {
+    ElementPtr name(new StringElement($4, ctx.loc2pos(@4)));
+    ctx.stack_.back()->set("name", name);
+    ctx.leave();
+};
+
+debuglevel: DEBUGLEVEL COLON INTEGER {
+    ElementPtr dl(new IntElement($3, ctx.loc2pos(@3)));
+    ctx.stack_.back()->set("debuglevel", dl);
+};
+
+severity: SEVERITY {
+    ctx.enter(ctx.NO_KEYWORDS);
+} COLON STRING {
+    ElementPtr sev(new StringElement($4, ctx.loc2pos(@4)));
+    ctx.stack_.back()->set("severity", sev);
+    ctx.leave();
+};
+
+output_options_list: OUTPUT_OPTIONS {
+    ElementPtr l(new ListElement(ctx.loc2pos(@1)));
+    ctx.stack_.back()->set("output_options", l);
+    ctx.stack_.push_back(l);
+    ctx.enter(ctx.OUTPUT_OPTIONS);
+} COLON LSQUARE_BRACKET output_options_list_content RSQUARE_BRACKET {
+    ctx.stack_.pop_back();
+    ctx.leave();
+};
+
+output_options_list_content: output_entry
+                           | output_options_list_content COMMA output_entry
+                           ;
+
+output_entry: LCURLY_BRACKET {
+    ElementPtr m(new MapElement(ctx.loc2pos(@1)));
+    ctx.stack_.back()->add(m);
+    ctx.stack_.push_back(m);
+} output_params RCURLY_BRACKET {
+    ctx.stack_.pop_back();
+};
+
+output_params: output_param
+             | output_params COMMA output_param
+             ;
+
+output_param: OUTPUT {
+    ctx.enter(ctx.NO_KEYWORDS);
+} COLON STRING {
+    ElementPtr sev(new StringElement($4, ctx.loc2pos(@4)));
+    ctx.stack_.back()->set("output", sev);
+    ctx.leave();
+};
+
+%%
+
+void
+isc::agent::AgentParser::error(const location_type& loc,
+                               const std::string& what)
+{
+    ctx.error(loc, what);
+}

+ 193 - 0
src/bin/agent/location.hh

@@ -0,0 +1,193 @@
+// Generated 201702082310
+// A Bison parser, made by GNU Bison 3.0.4.
+
+// Locations for Bison parsers in C++
+
+// Copyright (C) 2002-2015 Free Software Foundation, Inc.
+
+// This program is free software: you can redistribute it and/or modify
+// it under the terms of the GNU General Public License as published by
+// the Free Software Foundation, either version 3 of the License, or
+// (at your option) any later version.
+
+// This program is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+// GNU General Public License for more details.
+
+// You should have received a copy of the GNU General Public License
+// along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+// As a special exception, you may create a larger work that contains
+// part or all of the Bison parser skeleton and distribute that work
+// under terms of your choice, so long as that work isn't itself a
+// parser generator using the skeleton or a modified version thereof
+// as a parser skeleton.  Alternatively, if you modify or redistribute
+// the parser skeleton itself, you may (at your option) remove this
+// special exception, which will cause the skeleton and the resulting
+// Bison output files to be licensed under the GNU General Public
+// License without this special exception.
+
+// This special exception was added by the Free Software Foundation in
+// version 2.2 of Bison.
+
+/**
+ ** \file location.hh
+ ** Define the isc::agent::location class.
+ */
+
+#ifndef YY_AGENT_LOCATION_HH_INCLUDED
+# define YY_AGENT_LOCATION_HH_INCLUDED
+
+# include "position.hh"
+
+#line 14 "agent_parser.yy" // location.cc:296
+namespace isc { namespace agent {
+#line 46 "location.hh" // location.cc:296
+  /// Abstract a location.
+  class location
+  {
+  public:
+
+    /// Construct a location from \a b to \a e.
+    location (const position& b, const position& e)
+      : begin (b)
+      , end (e)
+    {
+    }
+
+    /// Construct a 0-width location in \a p.
+    explicit location (const position& p = position ())
+      : begin (p)
+      , end (p)
+    {
+    }
+
+    /// Construct a 0-width location in \a f, \a l, \a c.
+    explicit location (std::string* f,
+                       unsigned int l = 1u,
+                       unsigned int c = 1u)
+      : begin (f, l, c)
+      , end (f, l, c)
+    {
+    }
+
+
+    /// Initialization.
+    void initialize (std::string* f = YY_NULLPTR,
+                     unsigned int l = 1u,
+                     unsigned int c = 1u)
+    {
+      begin.initialize (f, l, c);
+      end = begin;
+    }
+
+    /** \name Line and Column related manipulators
+     ** \{ */
+  public:
+    /// Reset initial location to final location.
+    void step ()
+    {
+      begin = end;
+    }
+
+    /// Extend the current location to the COUNT next columns.
+    void columns (int count = 1)
+    {
+      end += count;
+    }
+
+    /// Extend the current location to the COUNT next lines.
+    void lines (int count = 1)
+    {
+      end.lines (count);
+    }
+    /** \} */
+
+
+  public:
+    /// Beginning of the located region.
+    position begin;
+    /// End of the located region.
+    position end;
+  };
+
+  /// Join two locations, in place.
+  inline location& operator+= (location& res, const location& end)
+  {
+    res.end = end.end;
+    return res;
+  }
+
+  /// Join two locations.
+  inline location operator+ (location res, const location& end)
+  {
+    return res += end;
+  }
+
+  /// Add \a width columns to the end position, in place.
+  inline location& operator+= (location& res, int width)
+  {
+    res.columns (width);
+    return res;
+  }
+
+  /// Add \a width columns to the end position.
+  inline location operator+ (location res, int width)
+  {
+    return res += width;
+  }
+
+  /// Subtract \a width columns to the end position, in place.
+  inline location& operator-= (location& res, int width)
+  {
+    return res += -width;
+  }
+
+  /// Subtract \a width columns to the end position.
+  inline location operator- (location res, int width)
+  {
+    return res -= width;
+  }
+
+  /// Compare two location objects.
+  inline bool
+  operator== (const location& loc1, const location& loc2)
+  {
+    return loc1.begin == loc2.begin && loc1.end == loc2.end;
+  }
+
+  /// Compare two location objects.
+  inline bool
+  operator!= (const location& loc1, const location& loc2)
+  {
+    return !(loc1 == loc2);
+  }
+
+  /** \brief Intercept output stream redirection.
+   ** \param ostr the destination output stream
+   ** \param loc a reference to the location to redirect
+   **
+   ** Avoid duplicate information.
+   */
+  template <typename YYChar>
+  inline std::basic_ostream<YYChar>&
+  operator<< (std::basic_ostream<YYChar>& ostr, const location& loc)
+  {
+    unsigned int end_col = 0 < loc.end.column ? loc.end.column - 1 : 0;
+    ostr << loc.begin;
+    if (loc.end.filename
+        && (!loc.begin.filename
+            || *loc.begin.filename != *loc.end.filename))
+      ostr << '-' << loc.end.filename << ':' << loc.end.line << '.' << end_col;
+    else if (loc.begin.line < loc.end.line)
+      ostr << '-' << loc.end.line << '.' << end_col;
+    else if (loc.begin.column < end_col)
+      ostr << '-' << end_col;
+    return ostr;
+  }
+
+#line 14 "agent_parser.yy" // location.cc:296
+} } // isc::agent
+#line 192 "location.hh" // location.cc:296
+#endif // !YY_AGENT_LOCATION_HH_INCLUDED

+ 144 - 0
src/bin/agent/parser_context.cc

@@ -0,0 +1,144 @@
+// Copyright (C) 2017 Internet Systems Consortium, Inc. ("ISC")
+//
+// This Source Code Form is subject to the terms of the Mozilla Public
+// License, v. 2.0. If a copy of the MPL was not distributed with this
+// file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+#include <agent/parser_context.h>
+#include <agent/agent_parser.h>
+#include <exceptions/exceptions.h>
+#include <cc/dhcp_config_error.h>
+#include <cc/data.h>
+#include <fstream>
+#include <limits>
+
+namespace isc {
+namespace agent {
+
+ParserContext::ParserContext()
+  : ctx_(NO_KEYWORDS), trace_scanning_(false), trace_parsing_(false)
+{
+}
+
+ParserContext::~ParserContext()
+{
+}
+
+isc::data::ElementPtr
+ParserContext::parseString(const std::string& str, ParserType parser_type)
+{
+    scanStringBegin(str, parser_type);
+    return (parseCommon());
+}
+
+isc::data::ElementPtr
+ParserContext::parseFile(const std::string& filename, ParserType parser_type) {
+    FILE* f = fopen(filename.c_str(), "r");
+    if (!f) {
+        isc_throw(ParseError, "Unable to open file " << filename);
+    }
+    scanFileBegin(f, filename, parser_type);
+    return (parseCommon());
+}
+
+isc::data::ElementPtr
+ParserContext::parseCommon() {
+    isc::agent::AgentParser parser(*this);
+    // Uncomment this to get detailed parser logs.
+    // trace_parsing_ = true;
+    parser.set_debug_level(trace_parsing_);
+    try {
+        int res = parser.parse();
+        if (res != 0) {
+            isc_throw(ParseError, "Parser abort");
+        }
+        scanEnd();
+    }
+    catch (...) {
+        scanEnd();
+        throw;
+    }
+    if (stack_.size() == 1) {
+        return (stack_[0]);
+    } else {
+        isc_throw(ParseError, "Expected exactly one terminal Element expected, found "
+                  << stack_.size());
+    }
+}
+
+
+void
+ParserContext::error(const isc::agent::location& loc, const std::string& what)
+{
+    isc_throw(ParseError, loc << ": " << what);
+}
+
+void
+ParserContext::error(const std::string& what)
+{
+    isc_throw(ParseError, what);
+}
+
+void
+ParserContext::fatal(const std::string& what)
+{
+    isc_throw(ParseError, what);
+}
+
+isc::data::Element::Position
+ParserContext::loc2pos(isc::agent::location& loc)
+{
+    const std::string& file = *loc.begin.filename;
+    const uint32_t line = loc.begin.line;
+    const uint32_t pos = loc.begin.column;
+    return (isc::data::Element::Position(file, line, pos));
+}
+
+void
+ParserContext::enter(const LexerContext& ctx)
+{
+    cstack_.push_back(ctx_);
+    ctx_ = ctx;
+}
+
+void
+ParserContext::leave()
+{
+    if (cstack_.empty()) {
+        fatal("unbalanced syntactic context");
+    }
+    ctx_ = cstack_.back();
+    cstack_.pop_back();
+}
+
+const std::string
+ParserContext::contextName()
+{
+    switch (ctx_) {
+    case NO_KEYWORDS:
+        return ("__no keywords__");
+    case CONFIG:
+        return ("toplevel");
+    case AGENT:
+        return ("Control-agent");
+    case LOGGING:
+        return ("Logging");
+    case CONTROL_SOCKETS:
+        return ("control-sockets");
+    case SERVER:
+        return ("xxx-server");
+    case SOCKET_TYPE:
+        return ("socket-type");
+    case HOOKS_LIBRARIES:
+        return ("hooks-librairies");
+    case LOGGERS:
+        return ("loggers");
+    case OUTPUT_OPTIONS:
+        return ("output-options");
+    default:
+        return ("__unknown__");
+    }
+}
+
+};
+};

+ 267 - 0
src/bin/agent/parser_context.h

@@ -0,0 +1,267 @@
+// Copyright (C) 2017 Internet Systems Consortium, Inc. ("ISC")
+//
+// This Source Code Form is subject to the terms of the Mozilla Public
+// License, v. 2.0. If a copy of the MPL was not distributed with this
+// file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+#ifndef PARSER_CONTEXT_H
+#define PARSER_CONTEXT_H
+#include <string>
+#include <map>
+#include <vector>
+#include <agent/agent_parser.h>
+#include <agent/parser_context_decl.h>
+#include <exceptions/exceptions.h>
+
+// Tell Flex the lexer's prototype ...
+#define YY_DECL isc::agent::AgentParser::symbol_type agent_lex (ParserContext& driver)
+
+// ... and declare it for the parser's sake.
+YY_DECL;
+
+namespace isc {
+namespace agent {
+
+/// @brief Parser context is a wrapper around flex/bison instances dedicated to
+///        Control-agent config file parser.
+///
+/// It follows the same principle as other components. The primary interface
+/// are @ref parseString and @ref parseFile methods. All other methods are
+/// public for testing purposes only. This interface allows parsing the
+/// whole configuration with syntactic checking (which is by far the most
+/// frequent use), but it also allows parsing input as generic JSON or
+/// parse only content of the Control-agent object, which is a subset
+/// of full grammar (this will be very useful for unit-tests to not duplicate
+/// unnecessary parts of the config file).
+class ParserContext
+{
+public:
+
+    /// @brief Defines currently supported scopes
+    ///
+    /// AgentParser is able to parse several types of scope. Usually,
+    /// when it parses a config file, it expects the data to have a map
+    /// with Control-agent in it and all the parameters within that map.
+    /// However, sometimes the parser is expected to parse only a subset
+    /// of that information.
+    typedef enum {
+        /// This parser will parse the content as generic JSON.
+        PARSER_JSON,
+
+        /// This parser will expect the content as Control-agent config wrapped
+        /// in a map (that's the regular config file)
+        PARSER_AGENT,
+
+        /// This parser will expect only the content of Control-agent.
+        PARSER_SUB_AGENT
+    } ParserType;
+
+    /// @brief Default constructor.
+    ParserContext();
+
+    /// @brief destructor
+    virtual ~ParserContext();
+
+    /// @brief JSON elements being parsed.
+    std::vector<isc::data::ElementPtr> stack_;
+
+    /// @brief Method called before scanning starts on a string.
+    ///
+    /// @param str string to be parsed
+    /// @param type specifies expected content
+    void scanStringBegin(const std::string& str, ParserType type);
+
+    /// @brief Method called before scanning starts on a file.
+    ///
+    /// @param f stdio FILE pointer
+    /// @param filename file to be parsed
+    /// @param type specifies expected content
+    void scanFileBegin(FILE* f, const std::string& filename, ParserType type);
+
+    /// @brief Method called after the last tokens are scanned.
+    void scanEnd();
+
+    /// @brief Divert input to an include file.
+    ///
+    /// @param filename file to be included
+    void includeFile(const std::string& filename);
+
+    /// @brief Run the parser on the string specified.
+    ///
+    /// This method parses specified string. Depending on the value of
+    /// parser_type, parser may either check only that the input is valid
+    /// JSON, or may do more specific syntax checking. See @ref ParserType
+    /// for supported syntax checkers.
+    ///
+    /// @param str string to be parsed
+    /// @param parser_type specifies expected content (usually AGENT or generic JSON)
+    /// @return Element structure representing parsed text.
+    isc::data::ElementPtr parseString(const std::string& str,
+                                      ParserType parser_type);
+
+    /// @brief Run the parser on the file specified.
+    ///
+    /// This method parses specified file. Depending on the value of
+    /// parser_type, parser may either check only that the input is valid
+    /// JSON, or may do more specific syntax checking. See @ref ParserType
+    /// for supported syntax checkers.
+    ///
+    /// @param filename file to be parsed
+    /// @param parser_type specifies expected content (usually PARSER_AGENT or
+    ///                                                PARSER_JSON)
+    /// @return Element structure representing parsed text.
+    isc::data::ElementPtr parseFile(const std::string& filename,
+                                    ParserType parser_type);
+
+    /// @brief Error handler
+    ///
+    /// @param loc location within the parsed file when experienced a problem.
+    /// @param what string explaining the nature of the error.
+    /// @throw ParseError
+    void error(const isc::agent::location& loc, const std::string& what);
+
+    /// @brief Error handler
+    ///
+    /// This is a simplified error reporting tool for possible future
+    /// cases when the AgentParser is not able to handle the packet.
+    ///
+    /// @param what string explaining the nature of the error.
+    /// @throw ParseError
+    void error(const std::string& what);
+
+    /// @brief Fatal error handler
+    ///
+    /// This is for should not happen but fatal errors.
+    /// Used by YY_FATAL_ERROR macro so required to be static.
+    ///
+    /// @param what string explaining the nature of the error.
+    /// @throw ParseError
+    static void fatal(const std::string& what);
+
+    /// @brief Converts bison's position to one understandable by isc::data::Element
+    ///
+    /// Convert a bison location into an element position
+    /// (take the begin, the end is lost)
+    ///
+    /// @param loc location in bison format
+    /// @return Position in format accepted by Element
+    isc::data::Element::Position loc2pos(isc::agent::location& loc);
+
+    /// @brief Defines syntactic contexts for lexical tie-ins
+    typedef enum {
+        ///< This one is used in pure JSON mode.
+        NO_KEYWORDS,
+
+        ///< Used while parsing top level (that contains Control-agent, Logging and others)
+        CONFIG,
+
+        ///< Used while parsing content of Agent.
+        AGENT,
+
+        ///< Used while parsing content of Logging.
+        LOGGING,
+
+        ///< Used while parsing Control-agent/control-sockets.
+        CONTROL_SOCKETS,
+
+        ///< Used while parsing Control-agent/control-socket/*-server.
+        SERVER,
+
+        ///< Used while parsing Control-agent/control-socket/*-server/socket-type.
+        SOCKET_TYPE,
+
+        ///< Used while parsing Control-agent/hooks-libraries.
+        HOOKS_LIBRARIES,
+
+        ///< Used while parsing Logging/loggers structures.
+        LOGGERS,
+
+        ///< Used while parsing Logging/loggers/output_options structures.
+        OUTPUT_OPTIONS
+
+    } LexerContext;
+
+    /// @brief File name
+    std::string file_;
+
+    /// @brief File name stack
+    std::vector<std::string> files_;
+
+    /// @brief Location of the current token
+    ///
+    /// The lexer will keep updating it. This variable will be useful
+    /// for logging errors.
+    isc::agent::location loc_;
+
+    /// @brief Location stack
+    std::vector<isc::agent::location> locs_;
+
+    /// @brief Lexer state stack
+    std::vector<struct yy_buffer_state*> states_;
+
+    /// @brief sFile (aka FILE)
+    FILE* sfile_;
+
+    /// @brief sFile (aka FILE) stack
+    ///
+    /// This is a stack of files. Typically there's only one file (the
+    /// one being currently parsed), but there may be more if one
+    /// file includes another.
+    std::vector<FILE*> sfiles_;
+
+    /// @brief Current syntactic context
+    LexerContext ctx_;
+
+    /// @brief Enter a new syntactic context
+    ///
+    /// Entering a new syntactic context is useful in several ways.
+    /// First, it allows the parser to avoid conflicts. Second, it
+    /// allows the lexer to return different tokens depending on
+    /// context (e.g. if "renew-timer" string is detected, the lexer
+    /// will return STRING token if in JSON mode or RENEW_TIMER if
+    /// in DHCP6 mode. Finally, the syntactic context allows the
+    /// error message to be more descriptive if the input string
+    /// does not parse properly. Control Agent parser uses simplified
+    /// contexts: either it recognizes keywords (value set to KEYWORDS)
+    /// or not (value set to NO_KEYWORDS).
+    ///
+    /// Make sure to call @ref leave() once the parsing of your
+    /// context is complete.
+    ///
+    /// @param ctx the syntactic context to enter into
+    void enter(const LexerContext& ctx);
+
+    /// @brief Leave a syntactic context
+    ///
+    /// @ref enter() must be called before (when entering a new scope
+    /// or context). Once you complete the parsing, this method
+    /// should be called.
+    ///
+    /// @throw isc::Unexpected if unbalanced (more leave() than enter() calls)
+    void leave();
+
+    /// @brief Get the syntactic context name
+    ///
+    /// @return printable name of the context.
+    const std::string contextName();
+
+ private:
+    /// @brief Flag determining scanner debugging.
+    bool trace_scanning_;
+
+    /// @brief Flag determining parser debugging.
+    bool trace_parsing_;
+
+    /// @brief Syntactic context stack
+    std::vector<LexerContext> cstack_;
+
+    /// @brief Common part of parseXXX
+    ///
+    /// @return Element structure representing parsed text.
+    isc::data::ElementPtr parseCommon();
+};
+
+}; // end of isc::eval namespace
+}; // end of isc namespace
+
+#endif

+ 20 - 0
src/bin/agent/parser_context_decl.h

@@ -0,0 +1,20 @@
+// Copyright (C) 2017 Internet Systems Consortium, Inc. ("ISC")
+//
+// This Source Code Form is subject to the terms of the Mozilla Public
+// License, v. 2.0. If a copy of the MPL was not distributed with this
+// file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+#ifndef AGENT_CONTEXT_DECL_H
+#define AGENT_CONTEXT_DECL_H
+
+/// @file agent/parser_context_decl.h Forward declaration of the ParserContext class
+
+namespace isc {
+namespace agent {
+
+class ParserContext;
+
+}; // end of isc::dhcp namespace
+}; // end of isc namespace
+
+#endif

+ 181 - 0
src/bin/agent/position.hh

@@ -0,0 +1,181 @@
+// Generated 201702082310
+// A Bison parser, made by GNU Bison 3.0.4.
+
+// Positions for Bison parsers in C++
+
+// Copyright (C) 2002-2015 Free Software Foundation, Inc.
+
+// This program is free software: you can redistribute it and/or modify
+// it under the terms of the GNU General Public License as published by
+// the Free Software Foundation, either version 3 of the License, or
+// (at your option) any later version.
+
+// This program is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+// GNU General Public License for more details.
+
+// You should have received a copy of the GNU General Public License
+// along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+// As a special exception, you may create a larger work that contains
+// part or all of the Bison parser skeleton and distribute that work
+// under terms of your choice, so long as that work isn't itself a
+// parser generator using the skeleton or a modified version thereof
+// as a parser skeleton.  Alternatively, if you modify or redistribute
+// the parser skeleton itself, you may (at your option) remove this
+// special exception, which will cause the skeleton and the resulting
+// Bison output files to be licensed under the GNU General Public
+// License without this special exception.
+
+// This special exception was added by the Free Software Foundation in
+// version 2.2 of Bison.
+
+/**
+ ** \file position.hh
+ ** Define the isc::agent::position class.
+ */
+
+#ifndef YY_AGENT_POSITION_HH_INCLUDED
+# define YY_AGENT_POSITION_HH_INCLUDED
+
+# include <algorithm> // std::max
+# include <iostream>
+# include <string>
+
+# ifndef YY_NULLPTR
+#  if defined __cplusplus && 201103L <= __cplusplus
+#   define YY_NULLPTR nullptr
+#  else
+#   define YY_NULLPTR 0
+#  endif
+# endif
+
+#line 14 "agent_parser.yy" // location.cc:296
+namespace isc { namespace agent {
+#line 56 "position.hh" // location.cc:296
+  /// Abstract a position.
+  class position
+  {
+  public:
+    /// Construct a position.
+    explicit position (std::string* f = YY_NULLPTR,
+                       unsigned int l = 1u,
+                       unsigned int c = 1u)
+      : filename (f)
+      , line (l)
+      , column (c)
+    {
+    }
+
+
+    /// Initialization.
+    void initialize (std::string* fn = YY_NULLPTR,
+                     unsigned int l = 1u,
+                     unsigned int c = 1u)
+    {
+      filename = fn;
+      line = l;
+      column = c;
+    }
+
+    /** \name Line and Column related manipulators
+     ** \{ */
+    /// (line related) Advance to the COUNT next lines.
+    void lines (int count = 1)
+    {
+      if (count)
+        {
+          column = 1u;
+          line = add_ (line, count, 1);
+        }
+    }
+
+    /// (column related) Advance to the COUNT next columns.
+    void columns (int count = 1)
+    {
+      column = add_ (column, count, 1);
+    }
+    /** \} */
+
+    /// File name to which this position refers.
+    std::string* filename;
+    /// Current line number.
+    unsigned int line;
+    /// Current column number.
+    unsigned int column;
+
+  private:
+    /// Compute max(min, lhs+rhs) (provided min <= lhs).
+    static unsigned int add_ (unsigned int lhs, int rhs, unsigned int min)
+    {
+      return (0 < rhs || -static_cast<unsigned int>(rhs) < lhs
+              ? rhs + lhs
+              : min);
+    }
+  };
+
+  /// Add \a width columns, in place.
+  inline position&
+  operator+= (position& res, int width)
+  {
+    res.columns (width);
+    return res;
+  }
+
+  /// Add \a width columns.
+  inline position
+  operator+ (position res, int width)
+  {
+    return res += width;
+  }
+
+  /// Subtract \a width columns, in place.
+  inline position&
+  operator-= (position& res, int width)
+  {
+    return res += -width;
+  }
+
+  /// Subtract \a width columns.
+  inline position
+  operator- (position res, int width)
+  {
+    return res -= width;
+  }
+
+  /// Compare two position objects.
+  inline bool
+  operator== (const position& pos1, const position& pos2)
+  {
+    return (pos1.line == pos2.line
+            && pos1.column == pos2.column
+            && (pos1.filename == pos2.filename
+                || (pos1.filename && pos2.filename
+                    && *pos1.filename == *pos2.filename)));
+  }
+
+  /// Compare two position objects.
+  inline bool
+  operator!= (const position& pos1, const position& pos2)
+  {
+    return !(pos1 == pos2);
+  }
+
+  /** \brief Intercept output stream redirection.
+   ** \param ostr the destination output stream
+   ** \param pos a reference to the position to redirect
+   */
+  template <typename YYChar>
+  inline std::basic_ostream<YYChar>&
+  operator<< (std::basic_ostream<YYChar>& ostr, const position& pos)
+  {
+    if (pos.filename)
+      ostr << *pos.filename << ':';
+    return ostr << pos.line << '.' << pos.column;
+  }
+
+#line 14 "agent_parser.yy" // location.cc:296
+} } // isc::agent
+#line 180 "position.hh" // location.cc:296
+#endif // !YY_AGENT_POSITION_HH_INCLUDED

+ 158 - 0
src/bin/agent/stack.hh

@@ -0,0 +1,158 @@
+// Generated 201702082310
+// A Bison parser, made by GNU Bison 3.0.4.
+
+// Stack handling for Bison parsers in C++
+
+// Copyright (C) 2002-2015 Free Software Foundation, Inc.
+
+// This program is free software: you can redistribute it and/or modify
+// it under the terms of the GNU General Public License as published by
+// the Free Software Foundation, either version 3 of the License, or
+// (at your option) any later version.
+
+// This program is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+// GNU General Public License for more details.
+
+// You should have received a copy of the GNU General Public License
+// along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+// As a special exception, you may create a larger work that contains
+// part or all of the Bison parser skeleton and distribute that work
+// under terms of your choice, so long as that work isn't itself a
+// parser generator using the skeleton or a modified version thereof
+// as a parser skeleton.  Alternatively, if you modify or redistribute
+// the parser skeleton itself, you may (at your option) remove this
+// special exception, which will cause the skeleton and the resulting
+// Bison output files to be licensed under the GNU General Public
+// License without this special exception.
+
+// This special exception was added by the Free Software Foundation in
+// version 2.2 of Bison.
+
+/**
+ ** \file stack.hh
+ ** Define the isc::agent::stack class.
+ */
+
+#ifndef YY_AGENT_STACK_HH_INCLUDED
+# define YY_AGENT_STACK_HH_INCLUDED
+
+# include <vector>
+
+#line 14 "agent_parser.yy" // stack.hh:132
+namespace isc { namespace agent {
+#line 46 "stack.hh" // stack.hh:132
+  template <class T, class S = std::vector<T> >
+  class stack
+  {
+  public:
+    // Hide our reversed order.
+    typedef typename S::reverse_iterator iterator;
+    typedef typename S::const_reverse_iterator const_iterator;
+
+    stack ()
+      : seq_ ()
+    {
+      seq_.reserve (200);
+    }
+
+    stack (unsigned int n)
+      : seq_ (n)
+    {}
+
+    inline
+    T&
+    operator[] (unsigned int i)
+    {
+      return seq_[seq_.size () - 1 - i];
+    }
+
+    inline
+    const T&
+    operator[] (unsigned int i) const
+    {
+      return seq_[seq_.size () - 1 - i];
+    }
+
+    /// Steal the contents of \a t.
+    ///
+    /// Close to move-semantics.
+    inline
+    void
+    push (T& t)
+    {
+      seq_.push_back (T());
+      operator[](0).move (t);
+    }
+
+    inline
+    void
+    pop (unsigned int n = 1)
+    {
+      for (; n; --n)
+        seq_.pop_back ();
+    }
+
+    void
+    clear ()
+    {
+      seq_.clear ();
+    }
+
+    inline
+    typename S::size_type
+    size () const
+    {
+      return seq_.size ();
+    }
+
+    inline
+    const_iterator
+    begin () const
+    {
+      return seq_.rbegin ();
+    }
+
+    inline
+    const_iterator
+    end () const
+    {
+      return seq_.rend ();
+    }
+
+  private:
+    stack (const stack&);
+    stack& operator= (const stack&);
+    /// The wrapped container.
+    S seq_;
+  };
+
+  /// Present a slice of the top of a stack.
+  template <class T, class S = stack<T> >
+  class slice
+  {
+  public:
+    slice (const S& stack, unsigned int range)
+      : stack_ (stack)
+      , range_ (range)
+    {}
+
+    inline
+    const T&
+    operator [] (unsigned int i) const
+    {
+      return stack_[range_ - i];
+    }
+
+  private:
+    const S& stack_;
+    unsigned int range_;
+  };
+
+#line 14 "agent_parser.yy" // stack.hh:132
+} } // isc::agent
+#line 156 "stack.hh" // stack.hh:132
+
+#endif // !YY_AGENT_STACK_HH_INCLUDED

+ 34 - 31
src/bin/agent/tests/Makefile.am

@@ -22,6 +22,7 @@ AM_CPPFLAGS += -I$(top_srcdir)/src/bin
 AM_CPPFLAGS += $(BOOST_INCLUDES)
 AM_CPPFLAGS += -DTEST_DATA_BUILDDIR=\"$(abs_top_builddir)/src/bin/agent/tests\"
 AM_CPPFLAGS += -DINSTALL_PROG=\"$(abs_top_srcdir)/install-sh\"
+AM_CPPFLAGS += -DCFG_EXAMPLES=\"$(abs_top_srcdir)/doc/examples/agent\"
 
 CLEANFILES = $(builddir)/interfaces.txt $(builddir)/logger_lockfile
 
@@ -39,37 +40,39 @@ TESTS_ENVIRONMENT = \
 TESTS =
 if HAVE_GTEST
 
-TESTS += ctrl_agent_unittests
-
-ctrl_agent_unittests_SOURCES  = ctrl_agent_cfg_mgr_unittests.cc
-ctrl_agent_unittests_SOURCES += ctrl_agent_controller_unittests.cc
-ctrl_agent_unittests_SOURCES += ctrl_agent_process_unittests.cc
-ctrl_agent_unittests_SOURCES += ctrl_agent_unittests.cc
-
-ctrl_agent_unittests_CPPFLAGS = $(AM_CPPFLAGS) $(GTEST_INCLUDES)
-ctrl_agent_unittests_LDFLAGS = $(AM_LDFLAGS) $(CRYPTO_LDFLAGS) $(GTEST_LDFLAGS)
-
-ctrl_agent_unittests_LDADD = $(top_builddir)/src/bin/agent/libagent.la
-ctrl_agent_unittests_LDADD += $(top_builddir)/src/lib/process/testutils/libprocesstest.la
-ctrl_agent_unittests_LDADD += $(top_builddir)/src/lib/cfgrpt/libcfgrpt.la
-ctrl_agent_unittests_LDADD += $(top_builddir)/src/lib/process/libkea-process.la
-ctrl_agent_unittests_LDADD += $(top_builddir)/src/lib/dhcpsrv/libkea-dhcpsrv.la
-ctrl_agent_unittests_LDADD += $(top_builddir)/src/lib/eval/libkea-eval.la
-ctrl_agent_unittests_LDADD += $(top_builddir)/src/lib/dhcp_ddns/libkea-dhcp_ddns.la
-ctrl_agent_unittests_LDADD += $(top_builddir)/src/lib/stats/libkea-stats.la
-ctrl_agent_unittests_LDADD += $(top_builddir)/src/lib/config/libkea-cfgclient.la
-ctrl_agent_unittests_LDADD += $(top_builddir)/src/lib/dhcp/libkea-dhcp++.la
-ctrl_agent_unittests_LDADD += $(top_builddir)/src/lib/asiolink/libkea-asiolink.la
-ctrl_agent_unittests_LDADD += $(top_builddir)/src/lib/cc/libkea-cc.la
-ctrl_agent_unittests_LDADD += $(top_builddir)/src/lib/dns/libkea-dns++.la
-ctrl_agent_unittests_LDADD += $(top_builddir)/src/lib/cryptolink/libkea-cryptolink.la
-ctrl_agent_unittests_LDADD += $(top_builddir)/src/lib/hooks/libkea-hooks.la
-ctrl_agent_unittests_LDADD += $(top_builddir)/src/lib/log/libkea-log.la
-ctrl_agent_unittests_LDADD += $(top_builddir)/src/lib/util/threads/libkea-threads.la
-ctrl_agent_unittests_LDADD += $(top_builddir)/src/lib/util/libkea-util.la
-ctrl_agent_unittests_LDADD += $(top_builddir)/src/lib/exceptions/libkea-exceptions.la
-ctrl_agent_unittests_LDADD += $(LOG4CPLUS_LIBS) $(CRYPTO_LIBS)
-ctrl_agent_unittests_LDADD += $(BOOST_LIBS) $(GTEST_LDADD)
+TESTS += ctrl_agent_unittest
+
+ctrl_agent_unittest_SOURCES  = ctrl_agent_cfg_mgr_unittest.cc
+ctrl_agent_unittest_SOURCES += ctrl_agent_controller_unittest.cc
+ctrl_agent_unittest_SOURCES += parser_unittest.cc
+ctrl_agent_unittest_SOURCES += ctrl_agent_process_unittest.cc
+ctrl_agent_unittest_SOURCES += ctrl_agent_unittest.cc
+
+ctrl_agent_unittest_CPPFLAGS = $(AM_CPPFLAGS) $(GTEST_INCLUDES)
+ctrl_agent_unittest_LDFLAGS = $(AM_LDFLAGS) $(CRYPTO_LDFLAGS) $(GTEST_LDFLAGS)
+
+ctrl_agent_unittest_LDADD = $(top_builddir)/src/bin/agent/libagent.la
+ctrl_agent_unittest_LDADD += $(top_builddir)/src/lib/process/testutils/libprocesstest.la
+ctrl_agent_unittest_LDADD += $(top_builddir)/src/lib/cfgrpt/libcfgrpt.la
+ctrl_agent_unittest_LDADD += $(top_builddir)/src/lib/process/libkea-process.la
+ctrl_agent_unittest_LDADD += $(top_builddir)/src/lib/dhcpsrv/libkea-dhcpsrv.la
+ctrl_agent_unittest_LDADD += $(top_builddir)/src/lib/eval/libkea-eval.la
+ctrl_agent_unittest_LDADD += $(top_builddir)/src/lib/dhcp_ddns/libkea-dhcp_ddns.la
+ctrl_agent_unittest_LDADD += $(top_builddir)/src/lib/stats/libkea-stats.la
+ctrl_agent_unittest_LDADD += $(top_builddir)/src/lib/config/libkea-cfgclient.la
+ctrl_agent_unittest_LDADD += $(top_builddir)/src/lib/dhcp/libkea-dhcp++.la
+ctrl_agent_unittest_LDADD += $(top_builddir)/src/lib/asiolink/libkea-asiolink.la
+ctrl_agent_unittest_LDADD += $(top_builddir)/src/lib/cc/libkea-cc.la
+ctrl_agent_unittest_LDADD += $(top_builddir)/src/lib/dns/libkea-dns++.la
+ctrl_agent_unittest_LDADD += $(top_builddir)/src/lib/cryptolink/libkea-cryptolink.la
+ctrl_agent_unittest_LDADD += $(top_builddir)/src/lib/hooks/libkea-hooks.la
+ctrl_agent_unittest_LDADD += $(top_builddir)/src/lib/log/libkea-log.la
+ctrl_agent_unittest_LDADD += $(top_builddir)/src/lib/util/threads/libkea-threads.la
+ctrl_agent_unittest_LDADD += $(top_builddir)/src/lib/testutils/libkea-testutils.la
+ctrl_agent_unittest_LDADD += $(top_builddir)/src/lib/util/libkea-util.la
+ctrl_agent_unittest_LDADD += $(top_builddir)/src/lib/exceptions/libkea-exceptions.la
+ctrl_agent_unittest_LDADD += $(LOG4CPLUS_LIBS) $(CRYPTO_LIBS)
+ctrl_agent_unittest_LDADD += $(BOOST_LIBS) $(GTEST_LDADD)
 
 endif
 

src/bin/agent/tests/ctrl_agent_cfg_mgr_unittests.cc → src/bin/agent/tests/ctrl_agent_cfg_mgr_unittest.cc


src/bin/agent/tests/ctrl_agent_controller_unittests.cc → src/bin/agent/tests/ctrl_agent_controller_unittest.cc


src/bin/agent/tests/ctrl_agent_process_unittests.cc → src/bin/agent/tests/ctrl_agent_process_unittest.cc


src/bin/agent/tests/ctrl_agent_unittests.cc → src/bin/agent/tests/ctrl_agent_unittest.cc


+ 592 - 0
src/bin/agent/tests/parser_unittest.cc

@@ -0,0 +1,592 @@
+// Copyright (C) 2017 Internet Systems Consortium, Inc. ("ISC")
+//
+// This Source Code Form is subject to the terms of the Mozilla Public
+// License, v. 2.0. If a copy of the MPL was not distributed with this
+// file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+#include <gtest/gtest.h>
+#include <cc/data.h>
+#include <agent/parser_context.h>
+#include <cc/dhcp_config_error.h>
+#include <testutils/io_utils.h>
+
+using namespace isc::data;
+using namespace isc::test;
+using namespace std;
+
+namespace isc {
+namespace agent {
+namespace test {
+
+/// @brief compares two JSON trees
+///
+/// If differences are discovered, gtest failure is reported (using EXPECT_EQ)
+///
+/// @param a first to be compared
+/// @param b second to be compared
+void compareJSON(ConstElementPtr a, ConstElementPtr b) {
+    ASSERT_TRUE(a);
+    ASSERT_TRUE(b);
+    EXPECT_EQ(a->str(), b->str());
+}
+
+/// @brief Tests if the input string can be parsed with specific parser
+///
+/// The input text will be passed to bison parser of specified type.
+/// Then the same input text is passed to legacy JSON parser and outputs
+/// from both parsers are compared. The legacy comparison can be disabled,
+/// if the feature tested is not supported by the old parser (e.g.
+/// new comment styles)
+///
+/// @param txt text to be compared
+/// @param parser_type bison parser type to be instantiated
+/// @param compare whether to compare the output with legacy JSON parser
+void testParser(const std::string& txt, ParserContext::ParserType parser_type,
+    bool compare = true) {
+    ConstElementPtr test_json;
+
+    ASSERT_NO_THROW({
+            try {
+                ParserContext ctx;
+                test_json = ctx.parseString(txt, parser_type);
+            } catch (const std::exception &e) {
+                cout << "EXCEPTION: " << e.what() << endl;
+                throw;
+            }
+
+    });
+
+    if (!compare) {
+        return;
+    }
+
+    // Now compare if both representations are the same.
+    ElementPtr reference_json;
+    ASSERT_NO_THROW(reference_json = Element::fromJSON(txt, true));
+    compareJSON(reference_json, test_json);
+}
+
+TEST(ParserTest, mapInMap) {
+    string txt = "{ \"xyzzy\": { \"foo\": 123, \"baz\": 456 } }";
+    testParser(txt, ParserContext::PARSER_JSON);
+}
+
+TEST(ParserTest, listInList) {
+    string txt = "[ [ \"Britain\", \"Wales\", \"Scotland\" ], "
+                 "[ \"Pomorze\", \"Wielkopolska\", \"Tatry\"] ]";
+    testParser(txt, ParserContext::PARSER_JSON);
+}
+
+TEST(ParserTest, nestedMaps) {
+    string txt = "{ \"europe\": { \"UK\": { \"London\": { \"street\": \"221B Baker\" }}}}";
+    testParser(txt, ParserContext::PARSER_JSON);
+}
+
+TEST(ParserTest, nestedLists) {
+    string txt = "[ \"half\", [ \"quarter\", [ \"eighth\", [ \"sixteenth\" ]]]]";
+    testParser(txt, ParserContext::PARSER_JSON);
+}
+
+TEST(ParserTest, listsInMaps) {
+    string txt = "{ \"constellations\": { \"orion\": [ \"rigel\", \"betelgeuse\" ], "
+                    "\"cygnus\": [ \"deneb\", \"albireo\"] } }";
+    testParser(txt, ParserContext::PARSER_JSON);
+}
+
+TEST(ParserTest, mapsInLists) {
+    string txt = "[ { \"body\": \"earth\", \"gravity\": 1.0 },"
+                 " { \"body\": \"mars\", \"gravity\": 0.376 } ]";
+    testParser(txt, ParserContext::PARSER_JSON);
+}
+
+TEST(ParserTest, types) {
+    string txt = "{ \"string\": \"foo\","
+                   "\"integer\": 42,"
+                   "\"boolean\": true,"
+                   "\"map\": { \"foo\": \"bar\" },"
+                   "\"list\": [ 1, 2, 3 ],"
+                   "\"null\": null }";
+    testParser(txt, ParserContext::PARSER_JSON);
+}
+
+TEST(ParserTest, keywordJSON) {
+    string txt = "{ \"name\": \"user\","
+                   "\"type\": \"password\","
+                   "\"user\": \"name\","
+                   "\"password\": \"type\" }";
+    testParser(txt, ParserContext::PARSER_JSON);
+}
+
+// This test checks if full config (with top level and Control-agent objects) can
+// be parsed with syntactic checking (and as pure JSON).
+TEST(ParserTest, keywordAgent) {
+    string txt = "{ \"Control-agent\": {\n"
+        "    \"http-host\": \"localhost\",\n"
+        "    \"http-port\": 8000,\n"
+        "    \"control-sockets\": {"
+        "        \"dhcp4-server\": {"
+        "            \"socket-type\": \"unix\","
+        "            \"socket-name\": \"/path/to/the/unix/socket-v4\""
+        "        },"
+        "        \"dhcp6-server\": {"
+        "            \"socket-type\": \"unix\","
+        "            \"socket-name\": \"/path/to/the/unix/socket-v6\""
+        "        },"
+        "        \"d2-server\": {"
+        "            \"socket-type\": \"unix\","
+        "            \"socket-name\": \"/path/to/the/unix/socket-d2\""
+        "        }"
+        "    },"
+        "    \"hooks-libraries\": ["
+        "    {"
+        "        \"library\": \"/opt/local/control-agent-commands.so\","
+        "        \"parameters\": {"
+        "            \"param1\": \"foo\""
+        "        }"
+        "    }"
+        "   ]"
+        "} }";
+    // This is a full config, so we'll parse it as full config (PARSER_AGENT)
+    testParser(txt, ParserContext::PARSER_AGENT);
+    testParser(txt, ParserContext::PARSER_JSON);
+}
+
+// This test checks if simplified config (without top level and Control-agent
+// objects) can be parsed with syntactic checking (and as pure JSON).
+TEST(ParserTest, keywordSubAgent) {
+
+    // This is similar to previous test, but note the lack of outer
+    // map and Control-agent.
+    string txt = "{\n"
+        "    \"http-host\": \"localhost\",\n"
+        "    \"http-port\": 8000,\n"
+        "    \"control-sockets\": {"
+        "        \"dhcp4-server\": {"
+        "            \"socket-type\": \"unix\","
+        "            \"socket-name\": \"/path/to/the/unix/socket-v4\""
+        "        },"
+        "        \"dhcp6-server\": {"
+        "            \"socket-type\": \"unix\","
+        "            \"socket-name\": \"/path/to/the/unix/socket-v6\""
+        "        },"
+        "        \"d2-server\": {"
+        "            \"socket-type\": \"unix\","
+        "            \"socket-name\": \"/path/to/the/unix/socket-d2\""
+        "        }"
+        "    },"
+        "    \"hooks-libraries\": ["
+        "    {"
+        "        \"library\": \"/opt/local/control-agent-commands.so\","
+        "        \"parameters\": {"
+        "            \"param1\": \"foo\""
+        "        }"
+        "    }"
+        "   ]"
+        "}";
+
+    // This is only a subset of full config, so we'll parse with PARSER_SUB_AGENT.
+    testParser(txt, ParserContext::PARSER_SUB_AGENT);
+    testParser(txt, ParserContext::PARSER_JSON);
+}
+
+// Tests if bash (#) comments are supported. That's the only comment type that
+// was supported by the old parser.
+TEST(ParserTest, bashComments) {
+    string txt= "{ \"Control-agent\": {"
+                "  \"http-host\": \"localhost\","
+                "  \"http-port\": 9000,\n"
+                "  \"control-sockets\": {\n"
+                "    \"d2-server\": {\n"
+                "# this is a comment\n"
+                "\"socket-type\": \"unix\", \n"
+                "# This socket is mine. I can name it whatever\n"
+                "# I like, ok?\n"
+                "\"socket-name\": \"Hector\" \n"
+                "} } } }";
+    testParser(txt, ParserContext::PARSER_AGENT);
+}
+
+// Tests if C++ (//) comments can start anywhere, not just in the first line.
+TEST(ParserTest, cppComments) {
+    string txt= "{ \"Control-agent\": {"
+                "  \"http-host\": \"localhost\","
+                "  \"http-port\": 9001, // the level is over 9000!\n"
+                "  \"control-sockets\": {\n"
+                "    // Let's try talking to D2. Sadly, it never talks"
+                "    // to us back :( Maybe he doesn't like his name?\n"
+                "    \"d2-server\": {"
+                "\"socket-type\": \"unix\", \n"
+                "\"socket-name\": \"Hector\" \n"
+                "} } } }";
+
+    testParser(txt, ParserContext::PARSER_AGENT, false);
+}
+
+// Tests if bash (#) comments can start anywhere, not just in the first line.
+TEST(ParserTest, bashCommentsInline) {
+    string txt= "{ \"Control-agent\": {"
+                "  \"http-host\": \"localhost\","
+                "  \"http-port\": 9000,\n"
+                "  \"control-sockets\": {\n"
+                "    \"d2-server\": {"
+                "\"socket-type\": \"unix\", # Maybe Hector is not really a \n"
+                "\"socket-name\": \"Hector\" # Unix process?\n"
+                "# Oh no! He's a windows one and just pretending!\n"
+                "} } } }";
+    testParser(txt, ParserContext::PARSER_AGENT, false);
+}
+
+// Tests if multi-line C style comments are handled correctly.
+TEST(ParserTest, multilineComments) {
+    string txt= "{ \"Control-agent\": {"
+                "  \"http-host\": \"localhost\","
+                "  \"http-port\": 9000,\n"
+                "  \"control-sockets\": {\n"
+                "    \"dhcp4-server\": {\n"
+                "        \"socket-type\": \"unix\"\n"
+                "    }\n"
+                "  /* Ok, forget about it. If Hector doesn't want to talk,\n"
+                "     we won't talk to him either. We now have quiet days. */\n"
+                "  /* \"d2-server\": {"
+                "  \"socket-type\": \"unix\",\n"
+                "\"socket-name\": \"Hector\"\n"
+                "}*/ } } }";
+    testParser(txt, ParserContext::PARSER_AGENT, false);
+}
+
+/// @brief Loads specified example config file
+///
+/// This test loads specified example file twice: first, using the legacy
+/// JSON file and then second time using bison parser. Two created Element
+/// trees are then compared. The input is decommented before it is passed
+/// to legacy parser (as legacy support for comments is very limited).
+///
+/// @param fname name of the file to be loaded
+void testFile(const std::string& fname) {
+    ElementPtr reference_json;
+    ConstElementPtr test_json;
+
+    string decommented = decommentJSONfile(fname);
+
+    cout << "Parsing file " << fname << "(" << decommented << ")" << endl;
+
+    EXPECT_NO_THROW(reference_json = Element::fromJSONFile(decommented, true));
+
+    // remove the temporary file
+    EXPECT_NO_THROW(::remove(decommented.c_str()));
+
+    EXPECT_NO_THROW(
+    try {
+        ParserContext ctx;
+        test_json = ctx.parseFile(fname, ParserContext::PARSER_AGENT);
+    } catch (const std::exception &x) {
+        cout << "EXCEPTION: " << x.what() << endl;
+        throw;
+    });
+
+    ASSERT_TRUE(reference_json);
+    ASSERT_TRUE(test_json);
+
+    compareJSON(reference_json, test_json);
+}
+
+// This test loads all available existing files. Each config is loaded
+// twice: first with the existing Element::fromJSONFile() and then
+// the second time with AgentParser. Both JSON trees are then compared.
+// Hopefully the list of example configs will grow over time.
+TEST(ParserTest, file) {
+    vector<string> configs;
+    configs.push_back("simple.json");
+
+    for (int i = 0; i<configs.size(); i++) {
+        testFile(string(CFG_EXAMPLES) + "/" + configs[i]);
+    }
+}
+
+/// @brief Tests error conditions in AgentParser
+///
+/// @param txt text to be parsed
+/// @param parser_type type of the parser to be used in the test
+/// @param msg expected content of the exception
+void testError(const std::string& txt,
+               ParserContext::ParserType parser_type,
+               const std::string& msg)
+{
+    try {
+        ParserContext ctx;
+        ConstElementPtr parsed = ctx.parseString(txt, parser_type);
+        FAIL() << "Expected ParseError but nothing was raised (expected: "
+               << msg << ")";
+    }
+    catch (const ParseError& ex) {
+        EXPECT_EQ(msg, ex.what());
+    }
+    catch (...) {
+        FAIL() << "Expected ParseError but something else was raised";
+    }
+}
+
+// Verify that error conditions are handled correctly.
+TEST(ParserTest, errors) {
+    // no input
+    testError("", ParserContext::PARSER_JSON,
+              "<string>:1.1: syntax error, unexpected end of file");
+    testError(" ", ParserContext::PARSER_JSON,
+              "<string>:1.2: syntax error, unexpected end of file");
+    testError("\n", ParserContext::PARSER_JSON,
+              "<string>:2.1: syntax error, unexpected end of file");
+    testError("\t", ParserContext::PARSER_JSON,
+              "<string>:1.2: syntax error, unexpected end of file");
+    testError("\r", ParserContext::PARSER_JSON,
+              "<string>:1.2: syntax error, unexpected end of file");
+
+    // comments
+    testError("# nothing\n",
+              ParserContext::PARSER_JSON,
+              "<string>:2.1: syntax error, unexpected end of file");
+    testError(" #\n",
+              ParserContext::PARSER_JSON,
+              "<string>:2.1: syntax error, unexpected end of file");
+    testError("// nothing\n",
+              ParserContext::PARSER_JSON,
+              "<string>:2.1: syntax error, unexpected end of file");
+    testError("/* nothing */\n",
+              ParserContext::PARSER_JSON,
+              "<string>:2.1: syntax error, unexpected end of file");
+    testError("/* no\nthing */\n",
+              ParserContext::PARSER_JSON,
+              "<string>:3.1: syntax error, unexpected end of file");
+    testError("/* no\nthing */\n\n",
+              ParserContext::PARSER_JSON,
+              "<string>:4.1: syntax error, unexpected end of file");
+    testError("/* nothing\n",
+              ParserContext::PARSER_JSON,
+              "Comment not closed. (/* in line 1");
+    testError("\n\n\n/* nothing\n",
+              ParserContext::PARSER_JSON,
+              "Comment not closed. (/* in line 4");
+    testError("{ /* */*/ }\n",
+              ParserContext::PARSER_JSON,
+              "<string>:1.3-8: Invalid character: *");
+    testError("{ /* // *// }\n",
+              ParserContext::PARSER_JSON,
+              "<string>:1.3-11: Invalid character: /");
+    testError("{ /* // *///  }\n",
+              ParserContext::PARSER_JSON,
+              "<string>:2.1: syntax error, unexpected end of file, "
+              "expecting }");
+
+    // includes
+    testError("<?\n",
+              ParserContext::PARSER_JSON,
+              "Directive not closed.");
+    testError("<?include\n",
+              ParserContext::PARSER_JSON,
+              "Directive not closed.");
+    string file = string(CFG_EXAMPLES) + "/" + "simple.json";
+    testError("<?include \"" + file + "\"\n",
+              ParserContext::PARSER_JSON,
+              "Directive not closed.");
+    testError("<?include \"/foo/bar\" ?>/n",
+              ParserContext::PARSER_JSON,
+              "Can't open include file /foo/bar");
+
+    // JSON keywords
+    testError("{ \"foo\": True }",
+              ParserContext::PARSER_JSON,
+              "<string>:1.10-13: JSON true reserved keyword is lower case only");
+    testError("{ \"foo\": False }",
+              ParserContext::PARSER_JSON,
+              "<string>:1.10-14: JSON false reserved keyword is lower case only");
+    testError("{ \"foo\": NULL }",
+              ParserContext::PARSER_JSON,
+              "<string>:1.10-13: JSON null reserved keyword is lower case only");
+    testError("{ \"foo\": Tru }",
+              ParserContext::PARSER_JSON,
+              "<string>:1.10: Invalid character: T");
+    testError("{ \"foo\": nul }",
+              ParserContext::PARSER_JSON,
+              "<string>:1.10: Invalid character: n");
+
+    // numbers
+    testError("123",
+              ParserContext::PARSER_AGENT,
+              "<string>:1.1-3: syntax error, unexpected integer, "
+              "expecting {");
+    testError("-456",
+              ParserContext::PARSER_AGENT,
+              "<string>:1.1-4: syntax error, unexpected integer, "
+              "expecting {");
+    testError("-0001",
+              ParserContext::PARSER_AGENT,
+              "<string>:1.1-5: syntax error, unexpected integer, "
+              "expecting {");
+    testError("1234567890123456789012345678901234567890",
+              ParserContext::PARSER_JSON,
+              "<string>:1.1-40: Failed to convert "
+              "1234567890123456789012345678901234567890"
+              " to an integer.");
+    testError("-3.14e+0",
+              ParserContext::PARSER_AGENT,
+              "<string>:1.1-8: syntax error, unexpected floating point, "
+              "expecting {");
+    testError("1e50000",
+              ParserContext::PARSER_JSON,
+              "<string>:1.1-7: Failed to convert 1e50000 "
+              "to a floating point.");
+
+    // strings
+    testError("\"aabb\"",
+              ParserContext::PARSER_AGENT,
+              "<string>:1.1-6: syntax error, unexpected constant string, "
+              "expecting {");
+    testError("{ \"aabb\"err",
+              ParserContext::PARSER_JSON,
+              "<string>:1.9: Invalid character: e");
+    testError("{ err\"aabb\"",
+              ParserContext::PARSER_JSON,
+              "<string>:1.3: Invalid character: e");
+    testError("\"a\n\tb\"",
+              ParserContext::PARSER_JSON,
+              "<string>:1.1-6: Invalid control in \"a\n\tb\"");
+    testError("\"a\\n\\tb\"",
+              ParserContext::PARSER_AGENT,
+              "<string>:1.1-8: syntax error, unexpected constant string, "
+              "expecting {");
+    testError("\"a\\x01b\"",
+              ParserContext::PARSER_JSON,
+              "<string>:1.1-8: Bad escape in \"a\\x01b\"");
+    testError("\"a\\u0162\"",
+              ParserContext::PARSER_JSON,
+              "<string>:1.1-9: Unsupported unicode escape in \"a\\u0162\"");
+    testError("\"a\\u062z\"",
+              ParserContext::PARSER_JSON,
+              "<string>:1.1-9: Bad escape in \"a\\u062z\"");
+    testError("\"abc\\\"",
+              ParserContext::PARSER_JSON,
+              "<string>:1.1-6: Overflow escape in \"abc\\\"");
+
+    // from data_unittest.c
+    testError("\\a",
+              ParserContext::PARSER_JSON,
+              "<string>:1.1: Invalid character: \\");
+    testError("\\",
+              ParserContext::PARSER_JSON,
+              "<string>:1.1: Invalid character: \\");
+    testError("\\\"\\\"",
+              ParserContext::PARSER_JSON,
+              "<string>:1.1: Invalid character: \\");
+
+    // want a map
+    testError("[]\n",
+              ParserContext::PARSER_AGENT,
+              "<string>:1.1: syntax error, unexpected [, "
+              "expecting {");
+    testError("[]\n",
+              ParserContext::PARSER_AGENT,
+              "<string>:1.1: syntax error, unexpected [, "
+              "expecting {");
+    testError("{ 123 }\n",
+              ParserContext::PARSER_JSON,
+              "<string>:1.3-5: syntax error, unexpected integer, "
+              "expecting }");
+    testError("{ 123 }\n",
+              ParserContext::PARSER_AGENT,
+              "<string>:1.3-5: syntax error, unexpected integer");
+    testError("{ \"foo\" }\n",
+              ParserContext::PARSER_JSON,
+              "<string>:1.9: syntax error, unexpected }, "
+              "expecting :");
+    testError("{ \"foo\" }\n",
+              ParserContext::PARSER_AGENT,
+              "<string>:1.9: syntax error, unexpected }, expecting :");
+    testError("{ \"foo\":null }\n",
+              ParserContext::PARSER_AGENT,
+              "<string>:1.3-7: got unexpected keyword "
+              "\"foo\" in toplevel map.");
+    testError("{ \"Control-agent\" }\n",
+              ParserContext::PARSER_AGENT,
+              "<string>:1.19: syntax error, unexpected }, "
+              "expecting :");
+    testError("{ \"Control-agent\":",
+              ParserContext::PARSER_AGENT,
+              "<string>:1.19: syntax error, unexpected end of file, "
+              "expecting {");
+    testError("{}{}\n",
+              ParserContext::PARSER_JSON,
+              "<string>:1.3: syntax error, unexpected {, "
+              "expecting end of file");
+
+    // bad commas
+    testError("{ , }\n",
+              ParserContext::PARSER_JSON,
+              "<string>:1.3: syntax error, unexpected \",\", "
+              "expecting }");
+    testError("{ , \"foo\":true }\n",
+              ParserContext::PARSER_JSON,
+              "<string>:1.3: syntax error, unexpected \",\", "
+              "expecting }");
+    testError("{ \"foo\":true, }\n",
+              ParserContext::PARSER_JSON,
+              "<string>:1.15: syntax error, unexpected }, "
+              "expecting constant string");
+
+    // bad type
+    testError("{ \"Control-agent\":{\n"
+              "  \"http-port\":false }}\n",
+              ParserContext::PARSER_AGENT,
+              "<string>:2.15-19: syntax error, unexpected boolean, "
+              "expecting integer");
+
+    // unknown keyword
+    testError("{ \"Control-agent\":{\n"
+              " \"topping\": \"Mozarella\" }}\n",
+              ParserContext::PARSER_AGENT,
+              "<string>:2.2-10: got unexpected keyword "
+              "\"topping\" in Control-agent map.");
+}
+
+// Check unicode escapes
+TEST(ParserTest, unicodeEscapes) {
+    ConstElementPtr result;
+    string json;
+
+    // check we can reread output
+    for (char c = -128; c < 127; ++c) {
+        string ins(" ");
+        ins[1] = c;
+        ConstElementPtr e(new StringElement(ins));
+        json = e->str();
+        ASSERT_NO_THROW(
+        try {
+            ParserContext ctx;
+            result = ctx.parseString(json, ParserContext::PARSER_JSON);
+        } catch (const std::exception &x) {
+            cout << "EXCEPTION: " << x.what() << endl;
+            throw;
+        });
+        ASSERT_EQ(Element::string, result->getType());
+        EXPECT_EQ(ins, result->stringValue());
+    }
+}
+
+// This test checks that all representations of a slash is recognized properly.
+TEST(ParserTest, unicodeSlash) {
+    // check the 4 possible encodings of solidus '/'
+    ConstElementPtr result;
+    string json = "\"/\\/\\u002f\\u002F\"";
+    ASSERT_NO_THROW(
+    try {
+        ParserContext ctx;
+        result = ctx.parseString(json, ParserContext::PARSER_JSON);
+    } catch (const std::exception &x) {
+        cout << "EXCEPTION: " << x.what() << endl;
+        throw;
+    });
+    ASSERT_EQ(Element::string, result->getType());
+    EXPECT_EQ("////", result->stringValue());
+}
+
+};
+};
+};

+ 1 - 0
src/bin/dhcp4/tests/ctrl_dhcp4_srv_unittest.cc

@@ -42,6 +42,7 @@ using namespace isc::dhcp;
 using namespace isc::dhcp::test;
 using namespace isc::hooks;
 using namespace isc::stats;
+using namespace isc::test;
 
 namespace {
 

+ 1 - 0
src/bin/dhcp4/tests/parser_unittest.cc

@@ -10,6 +10,7 @@
 #include <testutils/io_utils.h>
 
 using namespace isc::data;
+using namespace isc::test;
 using namespace std;
 
 namespace isc {

+ 1 - 0
src/bin/dhcp6/tests/ctrl_dhcp6_srv_unittest.cc

@@ -39,6 +39,7 @@ using namespace isc::dhcp;
 using namespace isc::dhcp::test;
 using namespace isc::hooks;
 using namespace isc::stats;
+using namespace isc::test;
 
 namespace {
 

+ 1 - 0
src/bin/dhcp6/tests/parser_unittest.cc

@@ -11,6 +11,7 @@
 
 using namespace isc::data;
 using namespace std;
+using namespace isc::test;
 
 namespace isc {
 namespace dhcp {

+ 11 - 1
src/lib/cc/dhcp_config_error.h

@@ -10,6 +10,17 @@
 #include <exceptions/exceptions.h>
 
 namespace isc {
+
+/// @brief Evaluation error exception raised when trying to parse.
+///
+/// This exception is expected to be thrown when parsing of the input
+/// configuration has failed. This exception is used by parsers.
+class ParseError : public isc::Exception {
+ public:
+ ParseError(const char* file, size_t line, const char* what) :
+    isc::Exception(file, line, what) { };
+};
+
 namespace dhcp {
 
 /// An exception that is thrown if an error occurs while configuring
@@ -48,4 +59,3 @@ public:
 }; // end of isc namespace
 
 #endif // DHCP_CONFIG_ERROR_H
-

+ 1 - 1
src/lib/dhcp/tests/duid_factory_unittest.cc

@@ -180,7 +180,7 @@ DUIDFactoryTest::removeDefaultFile() const {
 
 std::string
 DUIDFactoryTest::readDefaultFile() const {
-    return (dhcp::test::readFile(absolutePath(DEFAULT_DUID_FILE)));
+    return (isc::test::readFile(absolutePath(DEFAULT_DUID_FILE)));
 }
 
 std::vector<uint8_t>

+ 4 - 4
src/lib/dhcpsrv/tests/cfg_duid_unittest.cc

@@ -165,7 +165,7 @@ TEST_F(CfgDUIDTest, createLLT) {
               duid->toText());
 
     // Verify that the DUID file has been created.
-    EXPECT_TRUE(dhcp::test::fileExists(absolutePath(DUID_FILE_NAME)));
+    EXPECT_TRUE(isc::test::fileExists(absolutePath(DUID_FILE_NAME)));
 }
 
 // This method checks that the DUID-EN can be created from the
@@ -185,7 +185,7 @@ TEST_F(CfgDUIDTest, createEN) {
     EXPECT_EQ("00:02:00:00:10:10:25:0f:3e:26:a7:62", duid->toText());
 
     // Verify that the DUID file has been created.
-    EXPECT_TRUE(dhcp::test::fileExists(absolutePath(DUID_FILE_NAME)));
+    EXPECT_TRUE(isc::test::fileExists(absolutePath(DUID_FILE_NAME)));
 }
 
 // This method checks that the DUID-LL can be created from the
@@ -205,7 +205,7 @@ TEST_F(CfgDUIDTest, createLL) {
     EXPECT_EQ("00:03:00:02:12:41:34:a4:b3:67", duid->toText());
 
     // Verify that the DUID file has been created.
-    EXPECT_TRUE(dhcp::test::fileExists(absolutePath(DUID_FILE_NAME)));
+    EXPECT_TRUE(isc::test::fileExists(absolutePath(DUID_FILE_NAME)));
 }
 
 // This test verifies that it is possible to disable storing
@@ -226,7 +226,7 @@ TEST_F(CfgDUIDTest, createDisableWrite) {
     EXPECT_EQ("00:02:00:00:10:10:25:0f:3e:26:a7:62", duid->toText());
 
     // DUID persistence is disabled so there should be no DUID file.
-    EXPECT_FALSE(dhcp::test::fileExists(absolutePath(DUID_FILE_NAME)));
+    EXPECT_FALSE(isc::test::fileExists(absolutePath(DUID_FILE_NAME)));
 }
 
 } // end of anonymous namespace

+ 1 - 3
src/lib/testutils/io_utils.cc

@@ -12,7 +12,6 @@
 #include <string>
 
 namespace isc {
-namespace dhcp {
 namespace test {
 
 bool fileExists(const std::string& file_path) {
@@ -109,6 +108,5 @@ std::string decommentJSONfile(const std::string& input_file) {
     return (outfile);
 }
 
-}; // end of isc::dhcp::test namespace
-}; // end of isc::dhcp namespace
+}; // end of isc::test namespace
 }; // end of isc namespace

+ 1 - 3
src/lib/testutils/io_utils.h

@@ -11,7 +11,6 @@
 #include <sys/stat.h>
 
 namespace isc {
-namespace dhcp {
 namespace test {
 
 /// @brief Checks if specified file exists.
@@ -40,8 +39,7 @@ std::string readFile(const std::string& file_path);
 /// @throw BadValue if the input file cannot be opened
 std::string decommentJSONfile(const std::string& input_file);
 
-}; // end of isc::dhcp::test namespace
-}; // end of isc::dhcp namespace
+}; // end of isc::test namespace
 }; // end of isc namespace
 
 #endif // TEST_IO_UTILS_H