Browse Source

[3413] src/lib/python removed :)

Tomek Mrugalski 11 years ago
parent
commit
bcdf6647ef
100 changed files with 1 additions and 11579 deletions
  1. 0 28
      configure.ac
  2. 1 1
      src/lib/Makefile.am
  3. 0 1
      src/lib/python/.gitignore
  4. 0 10
      src/lib/python/Makefile.am
  5. 0 111
      src/lib/python/bind10_config.py.in
  6. 0 12
      src/lib/python/isc/Makefile.am
  7. 0 3
      src/lib/python/isc/__init__.py
  8. 0 10
      src/lib/python/isc/bind10/Makefile.am
  9. 0 0
      src/lib/python/isc/bind10/__init__.py
  10. 0 714
      src/lib/python/isc/bind10/component.py
  11. 0 264
      src/lib/python/isc/bind10/sockcreator.py
  12. 0 303
      src/lib/python/isc/bind10/socket_cache.py
  13. 0 124
      src/lib/python/isc/bind10/special_component.py
  14. 0 30
      src/lib/python/isc/bind10/tests/Makefile.am
  15. 0 1078
      src/lib/python/isc/bind10/tests/component_test.py
  16. 0 333
      src/lib/python/isc/bind10/tests/sockcreator_test.py
  17. 0 396
      src/lib/python/isc/bind10/tests/socket_cache_test.py
  18. 0 24
      src/lib/python/isc/cc/Makefile.am
  19. 0 2
      src/lib/python/isc/cc/__init__.py
  20. 0 2
      src/lib/python/isc/cc/cc_generated/.gitignore
  21. 0 32
      src/lib/python/isc/cc/cc_generated/Makefile.am
  22. 0 276
      src/lib/python/isc/cc/data.py
  23. 0 26
      src/lib/python/isc/cc/logger.py
  24. 0 42
      src/lib/python/isc/cc/message.py
  25. 0 2
      src/lib/python/isc/cc/proto_defs.py
  26. 0 20
      src/lib/python/isc/cc/pycc_messages.mes
  27. 0 337
      src/lib/python/isc/cc/session.py
  28. 0 1
      src/lib/python/isc/cc/tests/.gitignore
  29. 0 29
      src/lib/python/isc/cc/tests/Makefile.am
  30. 0 27
      src/lib/python/isc/cc/tests/cc_test.in
  31. 0 244
      src/lib/python/isc/cc/tests/data_test.py
  32. 0 66
      src/lib/python/isc/cc/tests/message_test.py
  33. 0 37
      src/lib/python/isc/cc/tests/sendcmd.py
  34. 0 465
      src/lib/python/isc/cc/tests/session_test.py
  35. 0 75
      src/lib/python/isc/cc/tests/test_session.py
  36. 0 32
      src/lib/python/isc/config/Makefile.am
  37. 0 3
      src/lib/python/isc/config/__init__.py
  38. 0 869
      src/lib/python/isc/config/ccsession.py
  39. 0 612
      src/lib/python/isc/config/cfgmgr.py
  40. 0 79
      src/lib/python/isc/config/cfgmgr_messages.mes
  41. 0 927
      src/lib/python/isc/config/config_data.py
  42. 0 39
      src/lib/python/isc/config/config_messages.mes
  43. 0 455
      src/lib/python/isc/config/module_spec.py
  44. 0 1
      src/lib/python/isc/config/tests/.gitignore
  45. 0 35
      src/lib/python/isc/config/tests/Makefile.am
  46. 0 871
      src/lib/python/isc/config/tests/config_data_test.py
  47. 0 36
      src/lib/python/isc/config/tests/config_test.in
  48. 0 447
      src/lib/python/isc/config/tests/module_spec_test.py
  49. 0 112
      src/lib/python/isc/config/tests/unittest_fakesession.py
  50. 0 8
      src/lib/python/isc/dns/Makefile.am
  51. 0 1
      src/lib/python/isc/dns/__init__.py
  52. 0 39
      src/lib/python/isc/log/Makefile.am
  53. 0 33
      src/lib/python/isc/log/__init__.py
  54. 0 779
      src/lib/python/isc/log/log.cc
  55. 0 1
      src/lib/python/isc/log/tests/.gitignore
  56. 0 43
      src/lib/python/isc/log/tests/Makefile.am
  57. 0 3
      src/lib/python/isc/log/tests/check_output.sh
  58. 0 4
      src/lib/python/isc/log/tests/console.out
  59. 0 15
      src/lib/python/isc/log/tests/log_console.py.in
  60. 0 52
      src/lib/python/isc/log_messages/Makefile.am
  61. 0 68
      src/lib/python/isc/log_messages/README
  62. 0 3
      src/lib/python/isc/log_messages/__init__.py
  63. 0 1
      src/lib/python/isc/log_messages/cfgmgr_messages.py
  64. 0 1
      src/lib/python/isc/log_messages/cmdctl_messages.py
  65. 0 1
      src/lib/python/isc/log_messages/config_messages.py
  66. 0 1
      src/lib/python/isc/log_messages/dbutil_messages.py
  67. 0 1
      src/lib/python/isc/log_messages/ddns_messages.py
  68. 0 29
      src/lib/python/isc/log_messages/gen-forwarder.sh
  69. 0 1
      src/lib/python/isc/log_messages/init_messages.py
  70. 0 1
      src/lib/python/isc/log_messages/libddns_messages.py
  71. 0 1
      src/lib/python/isc/log_messages/libmemmgr_messages.py
  72. 0 1
      src/lib/python/isc/log_messages/libxfrin_messages.py
  73. 0 1
      src/lib/python/isc/log_messages/loadzone_messages.py
  74. 0 1
      src/lib/python/isc/log_messages/memmgr_messages.py
  75. 0 1
      src/lib/python/isc/log_messages/msgq_messages.py
  76. 0 1
      src/lib/python/isc/log_messages/notify_out_messages.py
  77. 0 1
      src/lib/python/isc/log_messages/pycc_messages.py
  78. 0 1
      src/lib/python/isc/log_messages/server_common_messages.py
  79. 0 1
      src/lib/python/isc/log_messages/stats_httpd_messages.py
  80. 0 1
      src/lib/python/isc/log_messages/stats_messages.py
  81. 0 1
      src/lib/python/isc/log_messages/util_messages.py
  82. 0 2
      src/lib/python/isc/log_messages/work/.gitignore
  83. 0 14
      src/lib/python/isc/log_messages/work/Makefile.am
  84. 0 5
      src/lib/python/isc/log_messages/work/README
  85. 0 3
      src/lib/python/isc/log_messages/work/__init__.py.in
  86. 0 1
      src/lib/python/isc/log_messages/xfrin_messages.py
  87. 0 1
      src/lib/python/isc/log_messages/xfrout_messages.py
  88. 0 1
      src/lib/python/isc/log_messages/zonemgr_messages.py
  89. 0 10
      src/lib/python/isc/net/Makefile.am
  90. 0 3
      src/lib/python/isc/net/__init__.py
  91. 0 46
      src/lib/python/isc/net/addr.py
  92. 0 48
      src/lib/python/isc/net/parse.py
  93. 0 24
      src/lib/python/isc/net/tests/Makefile.am
  94. 0 48
      src/lib/python/isc/net/tests/addr_test.py
  95. 0 85
      src/lib/python/isc/net/tests/parse_test.py
  96. 0 11
      src/lib/python/isc/sysinfo/Makefile.am
  97. 0 1
      src/lib/python/isc/sysinfo/__init__.py
  98. 0 514
      src/lib/python/isc/sysinfo/sysinfo.py
  99. 0 23
      src/lib/python/isc/sysinfo/tests/Makefile.am
  100. 0 0
      src/lib/python/isc/sysinfo/tests/sysinfo_test.py

+ 0 - 28
configure.ac

@@ -1488,34 +1488,6 @@ AC_CONFIG_FILES([compatcheck/Makefile
                  src/lib/log/tests/severity_test.sh
                  src/lib/log/tests/tempdir.h
                  src/lib/Makefile
-                 src/lib/python/bind10_config.py
-                 src/lib/python/isc/bind10/Makefile
-                 src/lib/python/isc/bind10/tests/Makefile
-                 src/lib/python/isc/cc/cc_generated/Makefile
-                 src/lib/python/isc/cc/Makefile
-                 src/lib/python/isc/cc/tests/cc_test
-                 src/lib/python/isc/cc/tests/Makefile
-                 src/lib/python/isc/config/Makefile
-                 src/lib/python/isc/config/tests/config_test
-                 src/lib/python/isc/config/tests/Makefile
-                 src/lib/python/isc/dns/Makefile
-                 src/lib/python/isc/log/Makefile
-                 src/lib/python/isc/log_messages/Makefile
-                 src/lib/python/isc/log_messages/work/__init__.py
-                 src/lib/python/isc/log_messages/work/Makefile
-                 src/lib/python/isc/log/tests/log_console.py
-                 src/lib/python/isc/log/tests/Makefile
-                 src/lib/python/isc/Makefile
-                 src/lib/python/isc/net/Makefile
-                 src/lib/python/isc/net/tests/Makefile
-                 src/lib/python/isc/sysinfo/Makefile
-                 src/lib/python/isc/sysinfo/tests/Makefile
-                 src/lib/python/isc/testutils/Makefile
-                 src/lib/python/isc/util/cio/Makefile
-                 src/lib/python/isc/util/cio/tests/Makefile
-                 src/lib/python/isc/util/Makefile
-                 src/lib/python/isc/util/tests/Makefile
-                 src/lib/python/Makefile
                  src/lib/testutils/Makefile
                  src/lib/testutils/testdata/Makefile
                  src/lib/util/io/Makefile

+ 1 - 1
src/lib/Makefile.am

@@ -1,4 +1,4 @@
 # The following build order must be maintained.
 SUBDIRS = exceptions util log hooks cryptolink dns cc config \
-          asiolink asiodns testutils python dhcp dhcp_ddns \
+          asiolink asiodns testutils dhcp dhcp_ddns \
           dhcpsrv

+ 0 - 1
src/lib/python/.gitignore

@@ -1 +0,0 @@
-/bind10_config.py

+ 0 - 10
src/lib/python/Makefile.am

@@ -1,10 +0,0 @@
-SUBDIRS = isc
-
-nodist_python_PYTHON =	bind10_config.py
-pythondir = $(pyexecdir)
-
-CLEANFILES = bind10_config.pyc bind10_config.pyo
-CLEANDIRS = __pycache__
-
-clean-local:
-	rm -rf $(CLEANDIRS)

+ 0 - 111
src/lib/python/bind10_config.py.in

@@ -1,111 +0,0 @@
-# Copyright (C) 2010  Internet Systems Consortium.
-#
-# Permission to use, copy, modify, and distribute this software for any
-# purpose with or without fee is hereby granted, provided that the above
-# copyright notice and this permission notice appear in all copies.
-#
-# THE SOFTWARE IS PROVIDED "AS IS" AND INTERNET SYSTEMS CONSORTIUM
-# DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL
-# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL
-# INTERNET SYSTEMS CONSORTIUM BE LIABLE FOR ANY SPECIAL, DIRECT,
-# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING
-# FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
-# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION
-# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-# This is a base-level module intended to provide configure-time
-# variables to python scripts and libraries.
-import os
-
-def get_specfile_location(module_name):
-    """Return the path to the module spec file following common convetion.
-
-    This method generates the path commonly used by most BIND 10
-    modules, determined by a well known prefix and the module name.
-
-    A specific module can override this method if it uses a different
-    path for the spec file.
-
-    """
-    # First check if it's running under an 'in-source' environment,
-    # then try commonly used paths and file names.  If found, use it.
-    for ev in ['B10_FROM_SOURCE', 'B10_FROM_BUILD']:
-        if ev in os.environ:
-            specfile = os.environ[ev] + '/src/bin/' + module_name + \
-                       '/' + module_name + '.spec'
-            if os.path.exists(specfile):
-                return specfile
-    # Otherwise, just use the installed path, whether or not it really
-    # exists; leave error handling to the caller.
-    specfile_path = '@datadir@/@PACKAGE@'\
-        .replace('${datarootdir}', '@datarootdir@')\
-        .replace('${prefix}', '@prefix@')
-    return specfile_path + '/' + module_name + '.spec'
-
-def reload():
-    # In a function, for testing purposes
-    global BIND10_MSGQ_SOCKET_FILE
-    global DATA_PATH
-    global PLUGIN_PATHS
-    global PREFIX
-    global LIBEXECPATH
-    global SYSCONFPATH
-    BIND10_MSGQ_SOCKET_FILE = os.path.join("@localstatedir@",
-                                           "@PACKAGE_NAME@",
-                                           "msgq_socket").replace("${prefix}",
-                                                                  "@prefix@")
-    PREFIX = "@prefix@"
-    SYSCONFPATH="@sysconfdir@/@PACKAGE@".replace('${prefix}', PREFIX)
-
-    # B10_FROM_SOURCE is set in the environment for internal tests and
-    # an experimental run without installation.  In that case we need to
-    # specialize some configuration variables, generally so that they refer
-    # to somewhere in the source tree instead of the appropriate places
-    # after installation.
-    #
-    # DATA_PATH: used by the config manager to find configuration files.
-    #  When "FROM_SOURCE", we use data files from a directory relative to the
-    #  value of that variable, or, if defined, relative to the value of
-    #  B10_FROM_SOURCE_LOCALSTATEDIR.  Otherwise we use the ones installed on
-    #  the system.
-    # PLUGIN_PATHS: configuration modules that are not associated to specific
-    #  process
-    # LIBEXECPATH: Paths to programs invoked by the b10-init process
-    #  The b10-init process (directly or via a helper module) uses this as
-    #  the prefererred PATH before starting a child process.
-    #  When "FROM_SOURCE", it lists the directories where the programs are
-    #  built so that when BIND 10 is experimentally started on the source
-    #  tree the programs in the tree (not installed ones) will be used.
-    # SYSCONFPATH: Path where the system-wide configuration files are
-    # stored (e.g. <prefix>/var/<package name>). This value is *not*
-    # overwritten if B10_FROM_SOURCE is specified.
-    #
-    # B10_FROM_SOURCE_LOCALSTATEDIR is specifically intended to be used for
-    # tests where we want to use various types of configuration within the test
-    # environment.  (We may want to make it even more generic so that the path
-    # is passed from the b10-init process)
-    if "B10_FROM_SOURCE" in os.environ:
-        if "B10_FROM_SOURCE_LOCALSTATEDIR" in os.environ:
-            DATA_PATH = os.environ["B10_FROM_SOURCE_LOCALSTATEDIR"]
-        else:
-            DATA_PATH = os.environ["B10_FROM_SOURCE"]
-        PLUGIN_PATHS = [os.environ["B10_FROM_SOURCE"] +
-                            '/src/bin/cfgmgr/local_plugins',
-                             os.environ["B10_FROM_SOURCE"] +
-                            '/src/bin/cfgmgr/plugins']
-        programdirs = ['auth', 'cfgmgr', 'cmdctl', 'ddns', 'dhcp6', 'msgq',
-                       'resolver', 'sockcreator', 'stats', 'xfrin', 'xfrout',
-                       'zonemgr']
-        LIBEXECPATH = ':'.join(['@abs_top_builddir@/src/bin/' + p for p in
-                                programdirs])
-    else:
-        DATA_PATH = "@localstatedir@/@PACKAGE@".replace("${prefix}", PREFIX)
-        PLUGIN_PATHS = ["@prefix@/share/@PACKAGE@/config_plugins"]
-        LIBEXECPATH = ("@libexecdir@/@PACKAGE@"). \
-            replace("${exec_prefix}", "@exec_prefix@"). \
-            replace("${prefix}", "@prefix@")
-    # For testing the plugins so they can find their own spec files
-    if "B10_TEST_PLUGIN_DIR" in os.environ:
-        PLUGIN_PATHS = os.environ["B10_TEST_PLUGIN_DIR"].split(':')
-
-reload()

+ 0 - 12
src/lib/python/isc/Makefile.am

@@ -1,12 +0,0 @@
-# The following build order must be maintained.
-SUBDIRS = util cc config dns log net testutils bind10 log_messages \
-          sysinfo
-
-python_PYTHON = __init__.py
-
-pythondir = $(pyexecdir)/isc
-
-CLEANDIRS = __pycache__
-
-clean-local:
-	rm -rf $(CLEANDIRS)

+ 0 - 3
src/lib/python/isc/__init__.py

@@ -1,3 +0,0 @@
-"""
-This is the top directory for common BIND 10 Python modules and packages.
-"""

+ 0 - 10
src/lib/python/isc/bind10/Makefile.am

@@ -1,10 +0,0 @@
-SUBDIRS = . tests
-
-python_PYTHON = __init__.py sockcreator.py component.py special_component.py \
-		socket_cache.py
-pythondir = $(pyexecdir)/isc/bind10
-
-CLEANDIRS = __pycache__
-
-clean-local:
-	rm -rf $(CLEANDIRS)

+ 0 - 0
src/lib/python/isc/bind10/__init__.py


+ 0 - 714
src/lib/python/isc/bind10/component.py

@@ -1,714 +0,0 @@
-# Copyright (C) 2011  Internet Systems Consortium, Inc. ("ISC")
-#
-# Permission to use, copy, modify, and distribute this software for any
-# purpose with or without fee is hereby granted, provided that the above
-# copyright notice and this permission notice appear in all copies.
-#
-# THE SOFTWARE IS PROVIDED "AS IS" AND INTERNET SYSTEMS CONSORTIUM
-# DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL
-# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL
-# INTERNET SYSTEMS CONSORTIUM BE LIABLE FOR ANY SPECIAL, DIRECT,
-# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING
-# FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
-# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION
-# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-"""
-Module for managing components (abstraction of process). It allows starting
-them in given order, handling when they crash (what happens depends on kind
-of component) and shutting down. It also handles the configuration of this.
-
-Dependencies between them are not yet handled. It might turn out they are
-needed, in that case they will be added sometime in future.
-
-This framework allows for a single process to be started multiple times (by
-specifying multiple components with the same configuration). We might want
-to add a more convenient support (like providing a count argument to the
-configuration). This is yet to be designed.
-"""
-
-import isc.log
-from isc.log_messages.init_messages import *
-import time
-import os
-import signal
-
-logger = isc.log.Logger("init")
-DBG_TRACE_DATA = 20
-DBG_TRACE_DETAILED = 80
-
-START_CMD = 'start'
-STOP_CMD = 'stop'
-
-STARTED_OK_TIME = 10
-COMPONENT_RESTART_DELAY = 10
-
-STATE_DEAD = 'dead'
-STATE_STOPPED = 'stopped'
-STATE_RESTARTING = 'restarting'
-STATE_RUNNING = 'running'
-
-def get_signame(signal_number):
-    """Return the symbolic name for a signal."""
-    for sig in dir(signal):
-        if sig.startswith("SIG") and sig[3].isalnum():
-            if getattr(signal, sig) == signal_number:
-                return sig
-    return "unknown signal"
-
-class BaseComponent:
-    """
-    This represents a single component. This one is an abstract base class.
-    There are some methods which should be left untouched, but there are
-    others which define the interface only and should be overriden in
-    concrete implementations.
-
-    The component is in one of the three states:
-    - Stopped - it is either not started yet or it was explicitly stopped.
-      The component is created in this state (it must be asked to start
-      explicitly).
-    - Running - after start() was called, it started successfully and is
-      now running.
-    - Restarting - the component failed (crashed) and is waiting for a restart
-    - Dead - it failed and can not be resurrected.
-
-    Init
-      |            stop()
-      |  +-----------------------+
-      |  |                       |
-      v  |  start()  success     |
-    Stopped --------+--------> Running <----------+
-                    |            |                |
-                    |failure     | failed()       |
-                    |            |                |
-                    v            |                | start()/restart()
-                    +<-----------+                |
-                    |                             |
-                    |  kind == dispensable or kind|== needed and failed late
-                    +-----------------------> Restarting
-                    |
-                    | kind == core or kind == needed and it failed too soon
-                    v
-                  Dead
-
-    Note that there are still situations which are not handled properly here.
-    We don't recognize a component that is starting up, but not ready yet, one
-    that is already shutting down, impossible to stop, etc. We need to add more
-    states in future to handle it properly.
-    """
-    def __init__(self, b10_init, kind):
-        """
-        Creates the component in not running mode.
-
-        The parameters are:
-        - `b10_init` the b10_init object to plug into. The component needs
-           to plug into it to know when it failed, etc.
-        - `kind` is the kind of component. It may be one of:
-          * 'core' means the system can't run without it and it can't be
-            safely restarted. If it does not start, the system is brought
-            down. If it crashes, the system is turned off as well (with
-            non-zero exit status).
-          * 'needed' means the system is able to restart the component,
-            but it is vital part of the service (like auth server). If
-            it fails to start or crashes in less than 10s after the first
-            startup, the system is brought down. If it crashes later on,
-            it is restarted (see below).
-          * 'dispensable' means the component should be running, but if it
-            doesn't start or crashes for some reason, the system simply tries
-            to restart it and keeps running.
-
-        For components that are restarted, the restarts are not always
-        immediate; if the component has run for more than
-        COMPONENT_RESTART_DELAY (10) seconds, they are restarted right
-        away. If the component has not run that long, the system waits
-        until that time has passed (since the last start) until the
-        component is restarted.
-
-        Note that the __init__ method of child class should have these
-        parameters:
-
-        __init__(self, process, b10_init, kind, address=None, params=None)
-
-        The extra parameters are:
-        - `process` - which program should be started.
-        - `address` - the address on message bus, used to talk to the
-           component.
-        - `params` - parameters to the program.
-
-        The methods you should not override are:
-        - start
-        - stop
-        - failed
-        - running
-
-        You should override:
-        - _start_internal
-        - _stop_internal
-        - _failed_internal (if you like, the empty default might be suitable)
-        - name
-        - pid
-        - kill
-        """
-        if kind not in ['core', 'needed', 'dispensable']:
-            raise ValueError('Component kind can not be ' + kind)
-        self.__state = STATE_STOPPED
-        self._kind = kind
-        self._b10_init = b10_init
-        self._original_start_time = None
-
-    def start(self):
-        """
-        Start the component for the first time or restart it. It runs
-        _start_internal to actually start the component.
-
-        If you try to start an already running component, it raises ValueError.
-        """
-        if self.__state == STATE_DEAD:
-            raise ValueError("Can't resurrect already dead component")
-        if self.is_running():
-            raise ValueError("Can't start already running component")
-        logger.info(BIND10_COMPONENT_START, self.name())
-        self.__state = STATE_RUNNING
-        self.__start_time = time.time()
-        if self._original_start_time is None:
-            self._original_start_time = self.__start_time
-        self._restart_time = None
-        try:
-            self._start_internal()
-        except Exception as e:
-            logger.error(BIND10_COMPONENT_START_EXCEPTION, self.name(), e)
-            try:
-                self.failed(None)
-            finally:
-                # Even failed() can fail if this happens during initial startup
-                # time.  In that case we'd rather propagate the original reason
-                # for the failure than the fact that failed() failed.  So we
-                # always re-raise the original exception.
-                raise e
-
-    def stop(self):
-        """
-        Stop the component. It calls _stop_internal to do the actual
-        stopping.
-
-        If you try to stop a component that is not running, it raises
-        ValueError.
-        """
-        # This is not tested. It talks with the outher world, which is out
-        # of scope of unittests.
-        if not self.is_running():
-            raise ValueError("Can't stop a component which is not running")
-        logger.info(BIND10_COMPONENT_STOP, self.name())
-        self.__state = STATE_STOPPED
-        self._stop_internal()
-
-    def failed(self, exit_code):
-        """
-        Notify the component it crashed. This will be called from b10_init
-        object.
-
-        If you try to call failed on a component that is not running,
-        a ValueError is raised.
-
-        If it is a core component or needed component and it was started only
-        recently, the component will become dead and will ask b10_init to shut
-        down with error exit status. A dead component can't be started again.
-
-        Otherwise the component will try to restart.
-
-        The exit code is used for logging. It might be None.
-
-        It calls _failed_internal internally.
-
-        Returns True if the process was immediately restarted, returns
-                False is the process was not restarted, either because
-                it is considered a core or needed component, or because
-                the component is to be restarted later.
-        """
-
-        if exit_code is not None:
-            if os.WIFEXITED(exit_code):
-                exit_str = "process exited normally with exit status %d" % (exit_code)
-            elif os.WIFSIGNALED(exit_code):
-                sig = os.WTERMSIG(exit_code)
-                signame = get_signame(sig)
-                if os.WCOREDUMP(exit_code):
-                    exit_str = "process dumped core with exit status %d (killed by signal %d: %s)" % (exit_code, sig, signame)
-                else:
-                    exit_str = "process terminated with exit status %d (killed by signal %d: %s)" % (exit_code, sig, signame)
-            else:
-                exit_str = "unknown condition with exit status %d" % (exit_code)
-        else:
-            exit_str = "unknown condition"
-
-        logger.error(BIND10_COMPONENT_FAILED, self.name(), self.pid(),
-                     exit_str)
-        if not self.is_running():
-            raise ValueError("Can't fail component that isn't running")
-        self.__state = STATE_RESTARTING # tentatively set, maybe changed to DEAD
-        self._failed_internal()
-        # If it is a core component or the needed component failed to start
-        # (including it stopped really soon)
-        if self._kind == 'core' or \
-            (self._kind == 'needed' and time.time() - STARTED_OK_TIME <
-             self._original_start_time):
-            self.__state = STATE_DEAD
-            logger.fatal(BIND10_COMPONENT_UNSATISFIED, self.name())
-            self._b10_init.component_shutdown(1)
-            return False
-        # This means we want to restart
-        else:
-            # if the component was only running for a short time, don't
-            # restart right away, but set a time it wants to restarted,
-            # and return that it wants to be restarted later
-            self.set_restart_time()
-            return self.restart()
-
-    def set_restart_time(self):
-        """Calculates and sets the time this component should be restarted.
-           Currently, it uses a very basic algorithm; start time +
-           RESTART_DELAY (10 seconds). This algorithm may be improved upon
-           in the future.
-        """
-        self._restart_at = self.__start_time + COMPONENT_RESTART_DELAY
-
-    def get_restart_time(self):
-        """Returns the time at which this component should be restarted."""
-        return self._restart_at
-
-    def restart(self, now = None):
-        """Restarts the component if it has a restart_time and if the value
-           of the restart_time is smaller than 'now'.
-
-           If the parameter 'now' is given, its value will be used instead
-           of calling time.time().
-
-           Returns True if the component is restarted, False if not."""
-        if now is None:
-            now = time.time()
-        if self.get_restart_time() is not None and\
-           self.get_restart_time() < now:
-            self.start()
-            return True
-        else:
-            return False
-
-    def is_running(self):
-        """
-        Informs if the component is currently running. It assumes the failed
-        is called whenever the component really fails and there might be some
-        time in between actual failure and the call, so this might be
-        inaccurate (it corresponds to the thing the object thinks is true, not
-        to the real "external" state).
-
-        It is not expected for this method to be overriden.
-        """
-        return self.__state == STATE_RUNNING
-
-    def is_restarting(self):
-        """Informs if the component has failed and is waiting for a restart.
-
-        Unlike the case of is_running(), if this returns True it always means
-        the corresponding process has died and not yet restarted.
-
-        """
-        return self.__state == STATE_RESTARTING
-
-    def _start_internal(self):
-        """
-        This method does the actual starting of a process. You need to override
-        this method to do the actual starting.
-
-        The ability to override this method presents some flexibility. It
-        allows processes started in a strange way, as well as components that
-        have no processes at all or components with multiple processes (in case
-        of multiple processes, care should be taken to make their
-        started/stopped state in sync and all the processes that can fail
-        should be registered).
-
-        You should register all the processes created by calling
-        self._b10_init.register_process.
-        """
-        pass
-
-    def _stop_internal(self):
-        """
-        This is the method that does the actual stopping of a component.
-        You need to provide it in a concrete implementation.
-
-        Also, note that it is a bad idea to raise exceptions from here.
-        Under such circumstance, the component will be considered stopped,
-        and the exception propagated, but we can't be sure it really is
-        dead.
-        """
-        pass
-
-    def _failed_internal(self):
-        """
-        This method is called from failed. You can replace it if you need
-        some specific behaviour when the component crashes. The default
-        implementation is empty.
-
-        Do not raise exceptions from here, please. The propper shutdown
-        would have not happened.
-        """
-        pass
-
-    def name(self):
-        """
-        Provides human readable name of the component, for logging and similar
-        purposes.
-
-        You need to provide this method in a concrete implementation.
-        """
-        pass
-
-    def pid(self):
-        """
-        Provides a PID of a process, if the component is real running process.
-        This may return None in cases when there's no process involved with the
-        component or in case the component is not started yet.
-
-        However, it is expected the component preserves the pid after it was
-        stopped, to ensure we can log it when we ask it to be killed (in case
-        the process refused to stop willingly).
-
-        You need to provide this method in a concrete implementation.
-        """
-        pass
-
-    def kill(self, forceful=False):
-        """
-        Kills the component.
-
-        If forcefull is true, it should do it in more direct and aggressive way
-        (for example by using SIGKILL or some equivalent). If it is false, more
-        peaceful way should be used (SIGTERM or equivalent).
-
-        You need to provide this method in a concrete implementation.
-        """
-        pass
-
-    def address(self):
-        """
-        Provides the name of the address used on the message bus
-
-        You need to provide this method in a concrete implementation.
-        """
-        pass
-
-class Component(BaseComponent):
-    """
-    The most common implementation of a component. It can be used either
-    directly, and it will just start the process without anything special,
-    or slightly customised by passing a start_func hook to the __init__
-    to change the way it starts.
-
-    If such customisation isn't enough, you should inherit BaseComponent
-    directly. It is not recommended to override methods of this class
-    on one-by-one basis.
-    """
-    def __init__(self, process, b10_init, kind, address=None, params=None,
-                 start_func=None):
-        """
-        Creates the component in not running mode.
-
-        The parameters are:
-        - `process` is the name of the process to start.
-        - `b10_init` the b10-init object to plug into. The component needs to
-          plug into it to know when it failed, etc.
-        - `kind` is the kind of component. Refer to the documentation of
-          BaseComponent for details.
-        - `address` is the address on message bus. It is used to ask it to
-            shut down at the end. If you specialize the class for a component
-            that is shut down differently, it might be None.
-        - `params` is a list of parameters to pass to the process when it
-           starts. It is currently unused and this support is left out for
-           now.
-        - `start_func` is a function called when it is started. It is supposed
-           to start up the process and return a ProcInfo object describing it.
-           There's a sensible default if not provided, which just launches
-           the program without any special care.
-        """
-        BaseComponent.__init__(self, b10_init, kind)
-        self._process = process
-        self._start_func = start_func
-        self._address = address
-        self._params = params
-        self._procinfo = None
-
-    def _start_internal(self):
-        """
-        You can change the "core" of this function by setting self._start_func
-        to a function without parameters. Such function should start the
-        process and return the procinfo object describing the running process.
-
-        If you don't provide the _start_func, the usual startup by calling
-        b10_init.start_simple is performed.
-        """
-        # This one is not tested. For one, it starts a real process
-        # which is out of scope of unit tests, for another, it just
-        # delegates the starting to other function in b10_init (if a derived
-        # class does not provide an override function), which is tested
-        # by use.
-        if self._start_func is not None:
-            procinfo = self._start_func()
-        else:
-            # TODO Handle params, etc
-            procinfo = self._b10_init.start_simple(self._process)
-        self._procinfo = procinfo
-        self._b10_init.register_process(self.pid(), self)
-
-    def _stop_internal(self):
-        self._b10_init.stop_process(self._process, self._address, self.pid())
-        # TODO Some way to wait for the process that doesn't want to
-        # terminate and kill it would prove nice (or add it to b10_init
-        # somewhere?)
-
-    def name(self):
-        """
-        Returns the name, derived from the process name.
-        """
-        return self._process
-
-    def pid(self):
-        return self._procinfo.pid if self._procinfo is not None else None
-
-    def kill(self, forcefull=False):
-        if self._procinfo is not None:
-            if forcefull:
-                self._procinfo.process.kill()
-            else:
-                self._procinfo.process.terminate()
-
-    def address(self):
-        """
-        Returns the name of the address used on the message bus
-        """
-        return self._address
-
-class Configurator:
-    """
-    This thing keeps track of configuration changes and starts and stops
-    components as it goes. It also handles the initial startup and final
-    shutdown.
-
-    Note that this will allow you to stop (by invoking reconfigure) a core
-    component. There should be some kind of layer protecting users from ever
-    doing so (users must not stop the config manager, message queue and stuff
-    like that or the system won't start again). However, if a user specifies
-    b10-auth as core, it is safe to stop that one.
-
-    The parameters are:
-    * `b10_init`: The b10-init we are managing for.
-    * `specials`: Dict of specially started components. Each item is a class
-      representing the component.
-
-    The configuration passed to it (by startup() and reconfigure()) is a
-    dictionary, each item represents one component that should be running.
-    The key is an unique identifier used to reference the component. The
-    value is a dictionary describing the component. All items in the
-    description is optional unless told otherwise and they are as follows:
-    * `special` - Some components are started in a special way. If it is
-      present, it specifies which class from the specials parameter should
-      be used to create the component. In that case, some of the following
-      items might be irrelevant, depending on the special component chosen.
-      If it is not there, the basic Component class is used.
-    * `process` - Name of the executable to start. If it is not present,
-      it defaults to the identifier of the component.
-    * `kind` - The kind of component, either of 'core', 'needed' and
-      'dispensable'. This specifies what happens if the component fails.
-      This one is required.
-    * `address` - The address of the component on message bus. It is used
-      to shut down the component. All special components currently either
-      know their own address or don't need one and ignore it. The common
-      components should provide this.
-    * `params` - The command line parameters of the executable. Defaults
-      to no parameters. It is currently unused.
-    * `priority` - When starting the component, the components with higher
-      priority are started before the ones with lower priority. If it is
-      not present, it defaults to 0.
-    """
-    def __init__(self, b10_init, specials = {}):
-        """
-        Initializes the configurator, but nothing is started yet.
-
-        The b10_init parameter is the b10-init object used to start and stop
-        processes.
-        """
-        self.__b10_init = b10_init
-        # These could be __private, but as we access them from within unittest,
-        # it's more comfortable to have them just _protected.
-
-        # They are tuples (configuration, component)
-        self._components = {}
-        self._running = False
-        self.__specials = specials
-
-    def __reconfigure_internal(self, old, new):
-        """
-        Does a switch from one configuration to another.
-        """
-        self._run_plan(self._build_plan(old, new))
-
-    def startup(self, configuration):
-        """
-        Starts the first set of processes. This configuration is expected
-        to be hardcoded from the b10-init itself to start the configuration
-        manager and other similar things.
-        """
-        if self._running:
-            raise ValueError("Trying to start the component configurator " +
-                             "twice")
-        logger.info(BIND10_CONFIGURATOR_START)
-        self.__reconfigure_internal(self._components, configuration)
-        self._running = True
-
-    def shutdown(self):
-        """
-        Shuts everything down.
-
-        It is not expected that anyone would want to shutdown and then start
-        the configurator again, so we don't explicitly make sure that would
-        work. However, we are not aware of anything that would make it not
-        work either.
-        """
-        if not self._running:
-            raise ValueError("Trying to shutdown the component " +
-                             "configurator while it's not yet running")
-        logger.info(BIND10_CONFIGURATOR_STOP)
-        self._running = False
-        self.__reconfigure_internal(self._components, {})
-
-    def has_component(self, component):
-        '''Return if a specified component is configured.'''
-        # Values of self._components are tuples of (config, component).
-        # Extract the components of the tuples and see if the given one
-        # is included.
-        return component in map(lambda x: x[1], self._components.values())
-
-    def reconfigure(self, configuration):
-        """
-        Changes configuration from the current one to the provided. It
-        starts and stops all the components as needed (eg. if there's
-        a component that was not in the original configuration, it is
-        started, any component that was in the old and is not in the
-        new one is stopped).
-        """
-        if not self._running:
-            raise ValueError("Trying to reconfigure the component " +
-                             "configurator while it's not yet running")
-        logger.info(BIND10_CONFIGURATOR_RECONFIGURE)
-        self.__reconfigure_internal(self._components, configuration)
-
-    def _build_plan(self, old, new):
-        """
-        Builds a plan how to transfer from the old configuration to the new
-        one. It'll be sorted by priority and it will contain the components
-        (already created, but not started). Each command in the plan is a dict,
-        so it can be extended any time in future to include whatever
-        parameters each operation might need.
-
-        Any configuration problems are expected to be handled here, so the
-        plan is not yet run.
-        """
-        logger.debug(DBG_TRACE_DATA, BIND10_CONFIGURATOR_BUILD, old, new)
-        plan = []
-        # Handle removals of old components
-        for cname in old.keys():
-            if cname not in new:
-                component = self._components[cname][1]
-                if component.is_running() or component.is_restarting():
-                    plan.append({
-                        'command': STOP_CMD,
-                        'component': component,
-                        'name': cname
-                    })
-        # Handle transitions of configuration of what is here
-        for cname in new.keys():
-            if cname in old:
-                for option in ['special', 'process', 'kind', 'address',
-                               'params']:
-                    if new[cname].get(option) != old[cname][0].get(option):
-                        raise NotImplementedError('Changing configuration of' +
-                                                  ' a running component is ' +
-                                                  'not yet supported. Remove' +
-                                                  ' and re-add ' + cname +
-                                                  ' to get the same effect')
-        # Handle introduction of new components
-        plan_add = []
-        for cname in new.keys():
-            if cname not in old:
-                component_config = new[cname]
-                creator = Component
-                if 'special' in component_config:
-                    # TODO: Better error handling
-                    creator = self.__specials[component_config['special']]
-                component = creator(component_config.get('process', cname),
-                                    self.__b10_init, component_config['kind'],
-                                    component_config.get('address'),
-                                    component_config.get('params'))
-                priority = component_config.get('priority', 0)
-                # We store tuples, priority first, so we can easily sort
-                plan_add.append((priority, {
-                    'component': component,
-                    'command': START_CMD,
-                    'name': cname,
-                    'config': component_config
-                }))
-        # Push the starts there sorted by priority
-        plan.extend([command for (_, command) in sorted(plan_add,
-                                                        reverse=True,
-                                                        key=lambda command:
-                                                            command[0])])
-        return plan
-
-    def running(self):
-        """
-        Returns if the configurator is running (eg. was started by startup and
-        not yet stopped by shutdown).
-        """
-        return self._running
-
-    def _run_plan(self, plan):
-        """
-        Run a plan, created beforehand by _build_plan.
-
-        With the start and stop commands, it also adds and removes components
-        in _components.
-
-        Currently implemented commands are:
-        * start
-        * stop
-
-        The plan is a list of tasks, each task is a dictionary. It must contain
-        at last 'component' (a component object to work with) and 'command'
-        (the command to do). Currently, both existing commands need 'name' of
-        the component as well (the identifier from configuration). The 'start'
-        one needs the 'config' to be there, which is the configuration description
-        of the component.
-        """
-        done = 0
-        try:
-            logger.debug(DBG_TRACE_DATA, BIND10_CONFIGURATOR_RUN, len(plan))
-            for task in plan:
-                component = task['component']
-                command = task['command']
-                logger.debug(DBG_TRACE_DETAILED, BIND10_CONFIGURATOR_TASK,
-                             command, component.name())
-                if command == START_CMD:
-                    component.start()
-                    self._components[task['name']] = (task['config'],
-                                                      component)
-                elif command == STOP_CMD:
-                    if component.is_running():
-                        component.stop()
-                    del self._components[task['name']]
-                else:
-                    # Can Not Happen (as the plans are generated by ourselves).
-                    # Therefore not tested.
-                    raise NotImplementedError("Command unknown: " + command)
-                done += 1
-        except:
-            logger.error(BIND10_CONFIGURATOR_PLAN_INTERRUPTED, done, len(plan))
-            raise

+ 0 - 264
src/lib/python/isc/bind10/sockcreator.py

@@ -1,264 +0,0 @@
-# Copyright (C) 2011  Internet Systems Consortium, Inc. ("ISC")
-#
-# Permission to use, copy, modify, and distribute this software for any
-# purpose with or without fee is hereby granted, provided that the above
-# copyright notice and this permission notice appear in all copies.
-#
-# THE SOFTWARE IS PROVIDED "AS IS" AND INTERNET SYSTEMS CONSORTIUM
-# DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL
-# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL
-# INTERNET SYSTEMS CONSORTIUM BE LIABLE FOR ANY SPECIAL, DIRECT,
-# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING
-# FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
-# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION
-# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-import socket
-import struct
-import os
-import errno
-import copy
-import subprocess
-import copy
-from isc.log_messages.init_messages import *
-from libutil_io_python import recv_fd
-
-logger = isc.log.Logger("init")
-
-"""
-Module that communicates with the privileged socket creator (b10-sockcreator).
-"""
-
-class CreatorError(Exception):
-    """
-    Exception for socket creator related errors.
-
-    It has two members: fatal and errno and they are just holding the values
-    passed to the __init__ function.
-    """
-
-    def __init__(self, message, fatal, error_num=None):
-        """
-        Creates the exception. The message argument is the usual string.
-        The fatal one tells if the error is fatal (eg. the creator crashed)
-        and error_num is the errno value returned from socket creator, if
-        applicable.
-        """
-        Exception.__init__(self, message)
-        self.fatal = fatal
-        self.errno = error_num
-
-class Parser:
-    """
-    This class knows the sockcreator language. It creates commands, sends them
-    and receives the answers and parses them.
-
-    It does not start it, the communication channel must be provided.
-
-    In theory, anything here can throw a fatal CreatorError exception, but it
-    happens only in case something like the creator process crashes. Any other
-    occasions are mentioned explicitly.
-    """
-
-    def __init__(self, creator_socket):
-        """
-        Creates the parser. The creator_socket is socket to the socket creator
-        process that will be used for communication. However, the object must
-        have a read_fd() method to read the file descriptor. This slightly
-        unusual trick with modifying an object is used to easy up testing.
-
-        You can use WrappedSocket in production code to add the method to any
-        ordinary socket.
-        """
-        self.__socket = creator_socket
-        logger.info(BIND10_SOCKCREATOR_INIT)
-
-    def terminate(self):
-        """
-        Asks the creator process to terminate and waits for it to close the
-        socket. Does not return anything. Raises a CreatorError if there is
-        still data on the socket, if there is an error closing the socket,
-        or if the socket had already been closed.
-        """
-        if self.__socket is None:
-            raise CreatorError('Terminated already', True)
-        logger.info(BIND10_SOCKCREATOR_TERMINATE)
-        try:
-            self.__socket.sendall(b'T')
-            # Wait for an EOF - it will return empty data
-            eof = self.__socket.recv(1)
-            if len(eof) != 0:
-                raise CreatorError('Protocol error - data after terminated',
-                                   True)
-            self.__socket = None
-        except socket.error as se:
-            self.__socket = None
-            raise CreatorError(str(se), True)
-
-    def __addrport_str(self, address, port):
-        '''Convert a pair of IP address and port to common form for logging.'''
-        if address.family == socket.AF_INET:
-            return str(address) + ':' + str(port)
-        else:
-            return '[' + str(address) + ']:' + str(port)
-
-    def get_socket(self, address, port, socktype):
-        """
-        Asks the socket creator process to create a socket. Pass an address
-        (the isc.net.IPaddr object), port number and socket type (either
-        string "UDP", "TCP" or constant socket.SOCK_DGRAM or
-        socket.SOCK_STREAM.
-
-        Blocks until it is provided by the socket creator process (which
-        should be fast, as it is on localhost) and returns the file descriptor
-        number. It raises a CreatorError exception if the creation fails.
-        """
-        if self.__socket is None:
-            raise CreatorError('Socket requested on terminated creator', True)
-        # First, assemble the request from parts
-        logger.info(BIND10_SOCKET_GET, address, port, socktype)
-        data = b'S'
-        if socktype == 'UDP' or socktype == socket.SOCK_DGRAM:
-            data += b'U'
-        elif socktype == 'TCP' or socktype == socket.SOCK_STREAM:
-            data += b'T'
-        else:
-            raise ValueError('Unknown socket type: ' + str(socktype))
-        if address.family == socket.AF_INET:
-            data += b'4'
-        elif address.family == socket.AF_INET6:
-            data += b'6'
-        else:
-            raise ValueError('Unknown address family in address')
-        data += struct.pack('!H', port)
-        data += address.addr
-        try:
-            # Send the request
-            self.__socket.sendall(data)
-            answer = self.__socket.recv(1)
-            if answer == b'S':
-                # Success!
-                result = self.__socket.read_fd()
-                logger.info(BIND10_SOCKET_CREATED, result)
-                return result
-            elif answer == b'E':
-                # There was an error, read the error as well
-                error = self.__socket.recv(1)
-                rcv_errno = struct.unpack('i',
-                                          self.__read_all(len(struct.pack('i',
-                                                                          0))))
-                if error == b'S':
-                    cause = 'socket'
-                elif error == b'B':
-                    cause = 'bind'
-                else:
-                    self.__socket = None
-                    logger.fatal(BIND10_SOCKCREATOR_BAD_CAUSE, error)
-                    raise CreatorError('Unknown error cause' + str(answer), True)
-                logger.error(BIND10_SOCKET_ERROR, cause, rcv_errno[0],
-                             os.strerror(rcv_errno[0]))
-
-                # Provide as detailed information as possible on the error,
-                # as error related to socket creation is a common operation
-                # trouble.  In particular, we are intentionally very verbose
-                # if it fails due to "permission denied" so the administrator
-                # can easily identify what is wrong and how to fix it.
-                addrport = self.__addrport_str(address, port)
-                error_text = 'Error creating socket on ' + cause + \
-                    ' to be bound to ' + addrport + ': ' + \
-                    os.strerror(rcv_errno[0])
-                if rcv_errno[0] == errno.EACCES:
-                    error_text += ' - probably need to restart BIND 10 ' + \
-                        'as a super user'
-                raise CreatorError(error_text, False, rcv_errno[0])
-            else:
-                self.__socket = None
-                logger.fatal(BIND10_SOCKCREATOR_BAD_RESPONSE, answer)
-                raise CreatorError('Unknown response ' + str(answer), True)
-        except socket.error as se:
-            self.__socket = None
-            logger.fatal(BIND10_SOCKCREATOR_TRANSPORT_ERROR, str(se))
-            raise CreatorError(str(se), True)
-
-    def __read_all(self, length):
-        """
-        Keeps reading until length data is read or EOF or error happens.
-
-        EOF is considered error as well and throws a CreatorError.
-        """
-        result = b''
-        while len(result) < length:
-            data = self.__socket.recv(length - len(result))
-            if len(data) == 0:
-                self.__socket = None
-                logger.fatal(BIND10_SOCKCREATOR_EOF)
-                raise CreatorError('Unexpected EOF', True)
-            result += data
-        return result
-
-class WrappedSocket:
-    """
-    This class wraps a socket and adds a read_fd method, so it can be used
-    for the Parser class conveniently. It simply copies all its guts into
-    itself and implements the method.
-    """
-    def __init__(self, socket):
-        # Copy whatever can be copied from the socket
-        for name in dir(socket):
-            if name not in ['__class__', '__weakref__']:
-                setattr(self, name, getattr(socket, name))
-        # Keep the socket, so we can prevent it from being garbage-collected
-        # and closed before we are removed ourself
-        self.__orig_socket = socket
-
-    def read_fd(self):
-        """
-        Read the file descriptor from the socket.
-        """
-        return recv_fd(self.fileno())
-
-# FIXME: Any idea how to test this? Starting an external process doesn't sound
-# OK
-class Creator(Parser):
-    """
-    This starts the socket creator and allows asking for the sockets.
-
-    Note: __process shouldn't be reset once created.  See the note
-    of the SockCreator class for details.
-    """
-    def __init__(self, path):
-        (local, remote) = socket.socketpair(socket.AF_UNIX, socket.SOCK_STREAM)
-        # Popen does not like, for some reason, having the same socket for
-        # stdin as well as stdout, so we dup it before passing it there.
-        remote2 = socket.fromfd(remote.fileno(), socket.AF_UNIX,
-                                socket.SOCK_STREAM)
-        env = copy.deepcopy(os.environ)
-        env['PATH'] = path
-        # We explicitly set close_fs to True; it's False by default before
-        # Python 3.2.  If we don't close the remaining FDs, the copy of
-        # 'local' will prevent the child process from terminating when
-        # the parent process died abruptly.
-        self.__process = subprocess.Popen(['b10-sockcreator'], env=env,
-                                          stdin=remote.fileno(),
-                                          stdout=remote2.fileno(),
-                                          close_fds=True,
-                                          preexec_fn=self.__preexec_work)
-        remote.close()
-        remote2.close()
-        Parser.__init__(self, WrappedSocket(local))
-
-    def __preexec_work(self):
-        """Function used before running a program that needs to run as a
-        different user."""
-        # Put us into a separate process group so we don't get
-        # SIGINT signals on Ctrl-C (b10-init will shut everything down by
-        # other means).
-        os.setpgrp()
-
-    def pid(self):
-        return self.__process.pid
-
-    def kill(self):
-        logger.warn(BIND10_SOCKCREATOR_KILL)
-        if self.__process is not None:
-            self.__process.kill()

+ 0 - 303
src/lib/python/isc/bind10/socket_cache.py

@@ -1,303 +0,0 @@
-# Copyright (C) 2011  Internet Systems Consortium, Inc. ("ISC")
-#
-# Permission to use, copy, modify, and distribute this software for any
-# purpose with or without fee is hereby granted, provided that the above
-# copyright notice and this permission notice appear in all copies.
-#
-# THE SOFTWARE IS PROVIDED "AS IS" AND INTERNET SYSTEMS CONSORTIUM
-# DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL
-# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL
-# INTERNET SYSTEMS CONSORTIUM BE LIABLE FOR ANY SPECIAL, DIRECT,
-# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING
-# FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
-# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION
-# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-"""
-Here's the cache for sockets from socket creator.
-"""
-
-import os
-import random
-import isc.bind10.sockcreator
-from copy import copy
-
-class SocketError(Exception):
-    """
-    Exception raised when the socket creator is unable to create requested
-    socket. Possible reasons might be the address it should be bound to
-    is already taken, the permissions are insufficient, the address family
-    is not supported on this computer and many more.
-
-    The errno, if not None, is passed from the socket creator.
-    """
-    def __init__(self, message, errno):
-        Exception.__init__(self, message)
-        self.errno = errno
-
-class ShareError(Exception):
-    """
-    The requested socket is already taken by other component and the sharing
-    parameters don't allow sharing with the new request.
-    """
-    pass
-
-class Socket:
-    """
-    This represents one socket cached by the cache program. This should never
-    be used directly by a user, it is used internally by the Cache. Therefore
-    many member variables are used directly instead of by a accessor method.
-
-    Be warned that this object implements the __del__ method. It closes the
-    socket held inside in it. But this poses various problems with garbage
-    collector. In short, do not make reference cycles with this and generally
-    leave this class alone to live peacefully.
-    """
-    def __init__(self, protocol, address, port, fileno):
-        """
-        Creates the socket.
-
-        The protocol, address and port are preserved for the information.
-        """
-        self.protocol = protocol
-        self.address = address
-        self.port = port
-        self.fileno = fileno
-        # Mapping from token -> application
-        self.active_tokens = {}
-        # The tokens which were not yet picked up
-        self.waiting_tokens = set()
-        # Share modes and names by the tokens (token -> (mode, name))
-        self.shares = {}
-
-    def __del__(self):
-        """
-        Closes the file descriptor.
-        """
-        os.close(self.fileno)
-
-    def share_compatible(self, mode, name):
-        """
-        Checks if the given share mode and name is compatible with the ones
-        already installed here.
-
-        The allowed values for mode are listed in the Cache.get_token
-        function.
-        """
-        if mode not in ['NO', 'SAMEAPP', 'ANY']:
-            raise ValueError("Mode " + mode + " is invalid")
-
-        # Go through the existing ones
-        for (emode, ename) in self.shares.values():
-            if emode == 'NO' or mode == 'NO':
-                # One of them can't live together with anything
-                return False
-            if (emode == 'SAMEAPP' or mode == 'SAMEAPP') and \
-                ename != name:
-                # One of them can't live together with someone of different
-                # name
-                return False
-            # else both are ANY or SAMEAPP with the same name, which is OK
-        # No problem found, so we consider it OK
-        return True
-
-class Cache:
-    """
-    This is the cache for sockets from socket creator. The purpose of cache
-    is to hold the sockets that were requested, until they are no longer
-    needed. One reason is, the socket is created before it is sent over the
-    unix domain socket in b10-init, so we need to keep it somewhere for a
-    while.
-
-    The other reason is, a single socket might be requested multiple times.
-    So we keep it here in case someone else might ask for it.
-
-    Each socket kept here has a reference count and when it drops to zero,
-    it is removed from cache and closed.
-
-    This is expected to be part of Init, it is not a general utility class.
-
-    It is not expected to be subclassed. The methods and members are named
-    as protected so tests are easier access into them.
-    """
-    def __init__(self, creator):
-        """
-        Initialization. The creator is the socket creator object
-        (isc.bind10.sockcreator.Creator) which will be used to create yet
-        uncached sockets.
-        """
-        self._creator = creator
-        # The sockets we have live here, these dicts are various ways how
-        # to get them. Each of them contains the Socket objects somehow
-
-        # This one is dict of token: socket for the ones that were not yet
-        # picked up by an application.
-        self._waiting_tokens = {}
-        # This format is the same as above, but for the tokens that were
-        # already picked up by the application and not yet released.
-        self._active_tokens = {}
-        # This is a dict from applications to set of tokens used by the
-        # application, for the sockets already picked up by an application
-        self._active_apps = {}
-        # The sockets live here to be indexed by protocol, address and
-        # subsequently by port
-        self._sockets = {}
-        # These are just the tokens actually in use, so we don't generate
-        # dupes. If one is dropped, it can be potentially reclaimed.
-        self._live_tokens = set()
-
-    def get_token(self, protocol, address, port, share_mode, share_name):
-        """
-        This requests a token representing a socket. The socket is either
-        found in the cache already or requested from the creator at this time
-        (and cached for later time).
-
-        The parameters are:
-        - protocol: either 'UDP' or 'TCP'
-        - address: the IPAddr object representing the address to bind to
-        - port: integer saying which port to bind to
-        - share_mode: either 'NO', 'SAMEAPP' or 'ANY', specifying how the
-          socket can be shared with others. See bin/bind10/creatorapi.txt
-          for details.
-        - share_name: the name of application, in case of 'SAMEAPP' share
-          mode. Only requests with the same name can share the socket.
-
-        If the call is successful, it returns a string token which can be
-        used to pick up the socket later. The socket is created with reference
-        count zero and if it isn't picked up soon enough (the time yet has to
-        be set), it will be removed and the token is invalid.
-
-        It can fail in various ways. Explicitly listed exceptions are:
-        - SocketError: this one is thrown if the socket creator couldn't provide
-          the socket and it is not yet cached (it belongs to other application,
-          for example).
-        - ShareError: the socket is already in the cache, but it can't be
-          shared due to share_mode and share_name combination (both the request
-          restrictions and of all copies of socket handed out are considered,
-          so it can be raised even if you call it with share_mode 'ANY').
-        - isc.bind10.sockcreator.CreatorError: fatal creator errors are
-          propagated. Thay should cause b10-init to exit if ever encountered.
-
-        Note that it isn't guaranteed the tokens would be unique and they
-        should be used as an opaque handle only.
-        """
-        addr_str = str(address)
-        try:
-            socket = self._sockets[protocol][addr_str][port]
-        except KeyError:
-            # Something in the dicts is not there, so socket is to be
-            # created
-            try:
-                fileno = self._creator.get_socket(address, port, protocol)
-            except isc.bind10.sockcreator.CreatorError as ce:
-                if ce.fatal:
-                    raise
-                else:
-                    raise SocketError(str(ce), ce.errno)
-            socket = Socket(protocol, address, port, fileno)
-            # And cache it
-            if protocol not in self._sockets:
-                self._sockets[protocol] = {}
-            if addr_str not in self._sockets[protocol]:
-                self._sockets[protocol][addr_str] = {}
-            self._sockets[protocol][addr_str][port] = socket
-        # Now we get the token, check it is compatible
-        if not socket.share_compatible(share_mode, share_name):
-            raise ShareError("Cached socket not compatible with mode " +
-                             share_mode + " and name " + share_name)
-        # Grab yet unused token
-        token = 't' + str(random.randint(0, 2 ** 32-1))
-        while token in self._live_tokens:
-            token = 't' + str(random.randint(0, 2 ** 32-1))
-        self._waiting_tokens[token] = socket
-        self._live_tokens.add(token)
-        socket.shares[token] = (share_mode, share_name)
-        socket.waiting_tokens.add(token)
-        return token
-
-    def get_socket(self, token, application):
-        """
-        This returns the socket created by get_token. The token should be the
-        one returned from previous call from get_token. The token can be used
-        only once to receive the socket.
-
-        The application is a token representing the application that
-        requested it. Currently, b10-init uses the file descriptor of
-        connection from the application, but anything which can be a key in
-        a dict is OK from the cache's point of view. You just need to use
-        the same thing in drop_application.
-
-        In case the token is considered invalid (it doesn't come from the
-        get_token, it was already used, the socket wasn't picked up soon
-        enough, ...), it raises ValueError.
-        """
-        try:
-            socket = self._waiting_tokens[token]
-        except KeyError:
-            raise ValueError("Token " + token +
-                             " isn't waiting to be picked up")
-        del self._waiting_tokens[token]
-        self._active_tokens[token] = socket
-        if application not in self._active_apps:
-            self._active_apps[application] = set()
-        self._active_apps[application].add(token)
-        socket.waiting_tokens.remove(token)
-        socket.active_tokens[token] = application
-        return socket.fileno
-
-    def drop_socket(self, token):
-        """
-        This signals the application no longer uses the socket which was
-        requested by the given token. It decreases the reference count for
-        the socket and closes and removes the cached copy if it was the last
-        one.
-
-        It raises ValueError if the token doesn't exist.
-        """
-        try:
-            socket = self._active_tokens[token]
-        except KeyError:
-            raise ValueError("Token " + token + " doesn't represent an " +
-                             "active socket")
-        # Now, remove everything from the bookkeeping
-        del socket.shares[token]
-        app = socket.active_tokens[token]
-        del socket.active_tokens[token]
-        del self._active_tokens[token]
-        self._active_apps[app].remove(token)
-        if len(self._active_apps[app]) == 0:
-            del self._active_apps[app]
-        self._live_tokens.remove(token)
-        # The socket is not used by anything now, so remove it
-        if len(socket.active_tokens) == 0 and len(socket.waiting_tokens) == 0:
-            addr = str(socket.address)
-            port = socket.port
-            proto = socket.protocol
-            del self._sockets[proto][addr][port]
-            # Clean up empty branches of the structure
-            if len(self._sockets[proto][addr]) == 0:
-                del self._sockets[proto][addr]
-            if len(self._sockets[proto]) == 0:
-                del self._sockets[proto]
-
-    def drop_application(self, application):
-        """
-        This signals the application terminated and all sockets it picked up
-        should be considered unused by it now. It effectively calls drop_socket
-        on each of the sockets the application picked up and didn't drop yet.
-
-        If the application is invalid (no get_socket was successful with this
-        value of application), it raises ValueError.
-        """
-        try:
-            # Get a copy. Who knows how iteration works through sets if we
-            # delete from it during the time, so we'll just have our own copy
-            # to iterate
-            to_drop = copy(self._active_apps[application])
-        except KeyError:
-            raise ValueError("Application " + str(application) +
-                             " doesn't hold any sockets")
-        for token in to_drop:
-            self.drop_socket(token)
-        # We don't call del now. The last drop_socket should have
-        # removed the application key as well.

+ 0 - 124
src/lib/python/isc/bind10/special_component.py

@@ -1,124 +0,0 @@
-# Copyright (C) 2011  Internet Systems Consortium, Inc. ("ISC")
-#
-# Permission to use, copy, modify, and distribute this software for any
-# purpose with or without fee is hereby granted, provided that the above
-# copyright notice and this permission notice appear in all copies.
-#
-# THE SOFTWARE IS PROVIDED "AS IS" AND INTERNET SYSTEMS CONSORTIUM
-# DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL
-# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL
-# INTERNET SYSTEMS CONSORTIUM BE LIABLE FOR ANY SPECIAL, DIRECT,
-# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING
-# FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
-# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION
-# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-from isc.bind10.component import Component, BaseComponent
-import isc.bind10.sockcreator
-from bind10_config import LIBEXECPATH
-import os
-import isc.log
-
-class SockCreator(BaseComponent):
-    """
-    The socket creator component. Will start and stop the socket creator
-    accordingly.
-
-    Note: _creator shouldn't be reset explicitly once created.  The
-    underlying Popen object would then wait() the child process internally,
-    which breaks the assumption of b10-init, who is expecting to see
-    the process die in waitpid().
-    """
-    def __init__(self, process, b10_init, kind, address=None, params=None):
-        BaseComponent.__init__(self, b10_init, kind)
-        self.__creator = None
-
-    def _start_internal(self):
-        self._b10_init.curproc = 'b10-sockcreator'
-        self.__creator = isc.bind10.sockcreator.Creator(LIBEXECPATH + ':' +
-                                                        os.environ['PATH'])
-        self._b10_init.register_process(self.pid(), self)
-        self._b10_init.set_creator(self.__creator)
-        self._b10_init.log_started(self.pid())
-
-        # We are now ready for switching user.
-        self._b10_init.change_user()
-
-    def _stop_internal(self):
-        self.__creator.terminate()
-
-    def name(self):
-        return "Socket creator"
-
-    def pid(self):
-        """
-        Pid of the socket creator. It is provided differently from a usual
-        component.
-        """
-        return self.__creator.pid() if self.__creator else None
-
-    def kill(self, forceful=False):
-        # We don't really care about forceful here
-        if self.__creator:
-            self.__creator.kill()
-
-class Msgq(Component):
-    """
-    The message queue. Starting is passed to b10-init, stopping is not
-    supported and we leave b10-init kill it by signal.
-    """
-    def __init__(self, process, b10_init, kind, address=None, params=None):
-        Component.__init__(self, process, b10_init, kind, None, None,
-                           b10_init.start_msgq)
-
-    def _stop_internal(self):
-        """
-        We can't really stop the message queue, as many processes may need
-        it for their shutdown and it doesn't have a shutdown command anyway.
-        But as it is stateless, it's OK to kill it.
-
-        So we disable this method (as the only time it could be called is
-        during shutdown) and wait for b10-init to kill it in the next shutdown
-        step.
-
-        This actually breaks the recommendation at Component we shouldn't
-        override its methods one by one. This is a special case, because
-        we don't provide a different implementation, we completely disable
-        the method by providing an empty one. This can't hurt the internals.
-        """
-        pass
-
-class CfgMgr(Component):
-    def __init__(self, process, b10_init, kind, address=None, params=None):
-        Component.__init__(self, process, b10_init, kind, 'ConfigManager',
-                           None, b10_init.start_cfgmgr)
-
-class Auth(Component):
-    def __init__(self, process, b10_init, kind, address=None, params=None):
-        Component.__init__(self, process, b10_init, kind, 'Auth', None,
-                           b10_init.start_auth)
-
-class Resolver(Component):
-    def __init__(self, process, b10_init, kind, address=None, params=None):
-        Component.__init__(self, process, b10_init, kind, 'Resolver', None,
-                           b10_init.start_resolver)
-
-class CmdCtl(Component):
-    def __init__(self, process, b10_init, kind, address=None, params=None):
-        Component.__init__(self, process, b10_init, kind, 'Cmdctl', None,
-                           b10_init.start_cmdctl)
-def get_specials():
-    """
-    List of specially started components. Each one should be the class than can
-    be created for that component.
-    """
-    return {
-        'sockcreator': SockCreator,
-        'msgq': Msgq,
-        'cfgmgr': CfgMgr,
-        # TODO: Should these be replaced by configuration in config manager only?
-        # They should not have any parameters anyway
-        'auth': Auth,
-        'resolver': Resolver,
-        'cmdctl': CmdCtl
-    }

File diff suppressed because it is too large
+ 0 - 30
src/lib/python/isc/bind10/tests/Makefile.am


File diff suppressed because it is too large
+ 0 - 1078
src/lib/python/isc/bind10/tests/component_test.py


+ 0 - 333
src/lib/python/isc/bind10/tests/sockcreator_test.py

@@ -1,333 +0,0 @@
-# Copyright (C) 2011  Internet Systems Consortium, Inc. ("ISC")
-#
-# Permission to use, copy, modify, and distribute this software for any
-# purpose with or without fee is hereby granted, provided that the above
-# copyright notice and this permission notice appear in all copies.
-#
-# THE SOFTWARE IS PROVIDED "AS IS" AND INTERNET SYSTEMS CONSORTIUM
-# DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL
-# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL
-# INTERNET SYSTEMS CONSORTIUM BE LIABLE FOR ANY SPECIAL, DIRECT,
-# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING
-# FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
-# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION
-# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-"""
-Tests for the bind10.sockcreator module.
-"""
-
-import unittest
-import struct
-import socket
-from isc.net.addr import IPAddr
-import isc.log
-from libutil_io_python import send_fd
-from isc.bind10.sockcreator import Parser, CreatorError, WrappedSocket
-
-class FakeCreator:
-    """
-    Class emulating the socket to the socket creator. It can be given expected
-    data to receive (and check) and responses to give to the Parser class
-    during testing.
-    """
-
-    class InvalidPlan(Exception):
-        """
-        Raised when someone wants to recv when sending is planned or vice
-        versa.
-        """
-        pass
-
-    class InvalidData(Exception):
-        """
-        Raises when the data passed to sendall are not the same as expected.
-        """
-        pass
-
-    def __init__(self, plan):
-        """
-        Create the object. The plan variable contains list of expected actions,
-        in form:
-
-        [('r', 'Data to return from recv'), ('s', 'Data expected on sendall'),
-             , ('d', 'File descriptor number to return from read_sock'), ('e',
-             None), ...]
-
-        It modifies the array as it goes.
-        """
-        self.__plan = plan
-
-    def __get_plan(self, expected):
-        if len(self.__plan) == 0:
-            raise InvalidPlan('Nothing more planned')
-        (kind, data) = self.__plan[0]
-        if kind == 'e':
-            self.__plan.pop(0)
-            raise socket.error('False socket error')
-        if kind != expected:
-            raise InvalidPlan('Planned ' + kind + ', but ' + expected +
-                'requested')
-        return data
-
-    def recv(self, maxsize):
-        """
-        Emulate recv. Returs maxsize bytes from the current recv plan. If
-        there are data left from previous recv call, it is used first.
-
-        If no recv is planned, raises InvalidPlan.
-        """
-        data = self.__get_plan('r')
-        result, rest = data[:maxsize], data[maxsize:]
-        if len(rest) > 0:
-            self.__plan[0] = ('r', rest)
-        else:
-            self.__plan.pop(0)
-        return result
-
-    def read_fd(self):
-        """
-        Emulate the reading of file descriptor. Returns one from a plan.
-
-        It raises InvalidPlan if no socket is planned now.
-        """
-        fd = self.__get_plan('f')
-        self.__plan.pop(0)
-        return fd
-
-    def sendall(self, data):
-        """
-        Checks that the data passed are correct according to plan. It raises
-        InvalidData if the data differs or InvalidPlan when sendall is not
-        expected.
-        """
-        planned = self.__get_plan('s')
-        dlen = len(data)
-        prefix, rest = planned[:dlen], planned[dlen:]
-        if prefix != data:
-            raise InvalidData('Expected "' + str(prefix)+ '", got "' +
-                str(data) + '"')
-        if len(rest) > 0:
-            self.__plan[0] = ('s', rest)
-        else:
-            self.__plan.pop(0)
-
-    def all_used(self):
-        """
-        Returns if the whole plan was consumed.
-        """
-        return len(self.__plan) == 0
-
-class ParserTests(unittest.TestCase):
-    """
-    Testcases for the Parser class.
-
-    A lot of these test could be done by
-    `with self.assertRaises(CreatorError) as cm`. But some versions of python
-    take the scope wrong and don't work, so we use the primitive way of
-    try-except.
-    """
-    def __terminate(self):
-        creator = FakeCreator([('s', b'T'), ('r', b'')])
-        parser = Parser(creator)
-        self.assertEqual(None, parser.terminate())
-        self.assertTrue(creator.all_used())
-        return parser
-
-    def test_terminate(self):
-        """
-        Test if the command to terminate is correct and it waits for reading the
-        EOF.
-        """
-        self.__terminate()
-
-    def __terminate_raises(self, parser):
-        """
-        Check that terminate() raises a fatal exception.
-        """
-        try:
-            parser.terminate()
-            self.fail("Not raised")
-        except CreatorError as ce:
-            self.assertTrue(ce.fatal)
-            self.assertEqual(None, ce.errno)
-
-    def test_terminate_error1(self):
-        """
-        Test it reports an exception when there's error terminating the creator.
-        This one raises an error when receiving the EOF.
-        """
-        creator = FakeCreator([('s', b'T'), ('e', None)])
-        parser = Parser(creator)
-        self.__terminate_raises(parser)
-
-    def test_terminate_error2(self):
-        """
-        Test it reports an exception when there's error terminating the creator.
-        This one raises an error when sending data.
-        """
-        creator = FakeCreator([('e', None)])
-        parser = Parser(creator)
-        self.__terminate_raises(parser)
-
-    def test_terminate_error3(self):
-        """
-        Test it reports an exception when there's error terminating the creator.
-        This one sends data when it should have terminated.
-        """
-        creator = FakeCreator([('s', b'T'), ('r', b'Extra data')])
-        parser = Parser(creator)
-        self.__terminate_raises(parser)
-
-    def test_terminate_twice(self):
-        """
-        Test we can't terminate twice.
-        """
-        parser = self.__terminate()
-        self.__terminate_raises(parser)
-
-    def test_crash(self):
-        """
-        Tests that the parser correctly raises exception when it crashes
-        unexpectedly.
-        """
-        creator = FakeCreator([('s', b'SU4\0\0\0\0\0\0'), ('r', b'')])
-        parser = Parser(creator)
-        try:
-            parser.get_socket(IPAddr('0.0.0.0'), 0, 'UDP')
-            self.fail("Not raised")
-        except CreatorError as ce:
-            self.assertTrue(creator.all_used())
-            # Is the exception correct?
-            self.assertTrue(ce.fatal)
-            self.assertEqual(None, ce.errno)
-
-    def test_error(self):
-        """
-        Tests that the parser correctly raises non-fatal exception when
-        the socket can not be created.
-        """
-        # We split the int to see if it can cope with data coming in
-        # different packets
-        intpart = struct.pack('@i', 42)
-        creator = FakeCreator([('s', b'SU4\0\0\0\0\0\0'), ('r', b'ES' +
-            intpart[:1]), ('r', intpart[1:])])
-        parser = Parser(creator)
-        try:
-            parser.get_socket(IPAddr('0.0.0.0'), 0, 'UDP')
-            self.fail("Not raised")
-        except CreatorError as ce:
-            self.assertTrue(creator.all_used())
-            # Is the exception correct?
-            self.assertFalse(ce.fatal)
-            self.assertEqual(42, ce.errno)
-
-    def __error(self, plan):
-        creator = FakeCreator(plan)
-        parser = Parser(creator)
-        try:
-            parser.get_socket(IPAddr('0.0.0.0'), 0, socket.SOCK_DGRAM)
-            self.fail("Not raised")
-        except CreatorError as ce:
-            self.assertTrue(creator.all_used())
-            self.assertTrue(ce.fatal)
-
-    def test_error_send(self):
-        self.__error([('e', None)])
-
-    def test_error_recv(self):
-        self.__error([('s', b'SU4\0\0\0\0\0\0'), ('e', None)])
-
-    def test_error_read_fd(self):
-        self.__error([('s', b'SU4\0\0\0\0\0\0'), ('r', b'S'), ('e', None)])
-
-    def __create(self, addr, socktype, encoded):
-        creator = FakeCreator([('s', b'S' + encoded), ('r', b'S'), ('f', 42)])
-        parser = Parser(creator)
-        self.assertEqual(42, parser.get_socket(IPAddr(addr), 42, socktype))
-
-    def test_create1(self):
-        self.__create('192.0.2.0', 'UDP', b'U4\0\x2A\xC0\0\x02\0')
-
-    def test_create2(self):
-        self.__create('2001:db8::', socket.SOCK_STREAM,
-            b'T6\0\x2A\x20\x01\x0d\xb8\0\0\0\0\0\0\0\0\0\0\0\0')
-
-    def test_create_terminated(self):
-        """
-        Test we can't request sockets after it was terminated.
-        """
-        parser = self.__terminate()
-        try:
-            parser.get_socket(IPAddr('0.0.0.0'), 0, 'UDP')
-            self.fail("Not raised")
-        except CreatorError as ce:
-            self.assertTrue(ce.fatal)
-            self.assertEqual(None, ce.errno)
-
-    def test_invalid_socktype(self):
-        """
-        Test invalid socket type is rejected
-        """
-        self.assertRaises(ValueError, Parser(FakeCreator([])).get_socket,
-                          IPAddr('0.0.0.0'), 42, 'RAW')
-
-    def test_invalid_family(self):
-        """
-        Test it rejects invalid address family.
-        """
-        # Note: this produces a bad logger output, since this address
-        # can not be converted to string, so the original message with
-        # placeholders is output. This should not happen in practice, so
-        # it is harmless.
-        addr = IPAddr('0.0.0.0')
-        addr.family = 42
-        self.assertRaises(ValueError, Parser(FakeCreator([])).get_socket,
-                          addr, 42, socket.SOCK_DGRAM)
-
-class WrapTests(unittest.TestCase):
-    """
-    Tests for the wrap_socket function.
-    """
-    def test_wrap(self):
-        # We construct two pairs of socket. The receiving side of one pair will
-        # be wrapped. Then we send one of the other pair through this pair and
-        # check the received one can be used as a socket
-
-        # The transport socket
-        (t1, t2) = socket.socketpair()
-        # The payload socket
-        (p1, p2) = socket.socketpair()
-
-        t2 = WrappedSocket(t2)
-
-        # Transfer the descriptor
-        send_fd(t1.fileno(), p1.fileno())
-        p1.close()
-        p1 = socket.fromfd(t2.read_fd(), socket.AF_UNIX, socket.SOCK_STREAM)
-
-        # Now, pass some data through the socket
-        p1.send(b'A')
-        data = p2.recv(1)
-        self.assertEqual(b'A', data)
-
-        # Test the wrapping didn't hurt the socket's usual methods
-        t1.send(b'B')
-        data = t2.recv(1)
-        self.assertEqual(b'B', data)
-        t2.send(b'C')
-        data = t1.recv(1)
-        self.assertEqual(b'C', data)
-
-        # Explicitly close temporary socket pair as the Python
-        # interpreter expects it.  It may not be 100% exception safe,
-        # but since this is only for tests we prefer brevity.
-        p1.close()
-        p2.close()
-        t1.close()
-        t2.close()
-
-if __name__ == '__main__':
-    isc.log.init("bind10") # FIXME Should this be needed?
-    isc.log.resetUnitTestRootLogger()
-    unittest.main()

+ 0 - 396
src/lib/python/isc/bind10/tests/socket_cache_test.py

@@ -1,396 +0,0 @@
-# Copyright (C) 2011  Internet Systems Consortium, Inc. ("ISC")
-#
-# Permission to use, copy, modify, and distribute this software for any
-# purpose with or without fee is hereby granted, provided that the above
-# copyright notice and this permission notice appear in all copies.
-#
-# THE SOFTWARE IS PROVIDED "AS IS" AND INTERNET SYSTEMS CONSORTIUM
-# DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL
-# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL
-# INTERNET SYSTEMS CONSORTIUM BE LIABLE FOR ANY SPECIAL, DIRECT,
-# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING
-# FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
-# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION
-# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-import unittest
-import isc.log
-import isc.bind10.socket_cache
-import isc.bind10.sockcreator
-from isc.net.addr import IPAddr
-import os
-
-class Test(unittest.TestCase):
-    """
-    Base for the tests here. It replaces the os.close method.
-    """
-    def setUp(self):
-        self._closes = []
-        isc.bind10.socket_cache.os.close = self.__close
-
-    def tearDown(self):
-        # This is not very clean solution. But when the test stops
-        # to exist, the method must not be used to destroy the
-        # object any more. And we can't restore the os.close here
-        # as we never work with real sockets here.
-        isc.bind10.socket_cache.os.close = lambda fd: None
-
-    def __close(self, fd):
-        """
-        Just log a close was called.
-        """
-        self._closes.append(fd)
-
-class SocketTest(Test):
-    """
-    Test for the Socket class.
-    """
-    def setUp(self):
-        """
-        Creates the socket to be tested.
-
-        It also creates other useful test variables.
-        """
-        Test.setUp(self)
-        self.__address = IPAddr("192.0.2.1")
-        self.__socket = isc.bind10.socket_cache.Socket('UDP', self.__address,
-                                                       1024, 42)
-
-    def test_init(self):
-        """
-        Checks the internals of the cache just after the creation.
-        """
-        self.assertEqual('UDP', self.__socket.protocol)
-        self.assertEqual(self.__address, self.__socket.address)
-        self.assertEqual(1024, self.__socket.port)
-        self.assertEqual(42, self.__socket.fileno)
-        self.assertEqual({}, self.__socket.active_tokens)
-        self.assertEqual({}, self.__socket.shares)
-        self.assertEqual(set(), self.__socket.waiting_tokens)
-
-    def test_del(self):
-        """
-        Check it closes the socket when removed.
-        """
-        # This should make the refcount 0 and call the destructor
-        # right away
-        self.__socket = None
-        self.assertEqual([42], self._closes)
-
-    def test_share_modes(self):
-        """
-        Test the share mode compatibility check function.
-        """
-        modes = ['NO', 'SAMEAPP', 'ANY']
-        # If there are no shares, it is compatible with everything.
-        for mode in modes:
-            self.assertTrue(self.__socket.share_compatible(mode, 'anything'))
-
-        # There's an NO already, so it is incompatible with everything.
-        self.__socket.shares = {'token': ('NO', 'anything')}
-        for mode in modes:
-            self.assertFalse(self.__socket.share_compatible(mode, 'anything'))
-
-        # If there's SAMEAPP, it is compatible with ANY and SAMEAPP with the
-        # same name.
-        self.__socket.shares = {'token': ('SAMEAPP', 'app')}
-        self.assertFalse(self.__socket.share_compatible('NO', 'app'))
-        self.assertFalse(self.__socket.share_compatible('SAMEAPP',
-                                                        'something'))
-        self.assertTrue(self.__socket.share_compatible('SAMEAPP', 'app'))
-        self.assertTrue(self.__socket.share_compatible('ANY', 'app'))
-        self.assertFalse(self.__socket.share_compatible('ANY', 'something'))
-
-        # If there's ANY, then ANY and SAMEAPP with the same name is compatible
-        self.__socket.shares = {'token': ('ANY', 'app')}
-        self.assertFalse(self.__socket.share_compatible('NO', 'app'))
-        self.assertFalse(self.__socket.share_compatible('SAMEAPP',
-                                                        'something'))
-        self.assertTrue(self.__socket.share_compatible('SAMEAPP', 'app'))
-        self.assertTrue(self.__socket.share_compatible('ANY', 'something'))
-
-        # In case there are multiple already inside
-        self.__socket.shares = {
-            'token': ('ANY', 'app'),
-            'another': ('SAMEAPP', 'app')
-        }
-        self.assertFalse(self.__socket.share_compatible('NO', 'app'))
-        self.assertFalse(self.__socket.share_compatible('SAMEAPP',
-                                                        'something'))
-        self.assertTrue(self.__socket.share_compatible('SAMEAPP', 'app'))
-        self.assertFalse(self.__socket.share_compatible('ANY', 'something'))
-        self.assertTrue(self.__socket.share_compatible('ANY', 'app'))
-
-        # Invalid inputs are rejected
-        self.assertRaises(ValueError, self.__socket.share_compatible, 'bad',
-                          'bad')
-
-class SocketCacheTest(Test):
-    """
-    Some tests for the isc.bind10.socket_cache.Cache.
-
-    This class, as well as being the testcase, pretends to be the
-    socket creator so it can hijack all the requests for sockets.
-    """
-    def setUp(self):
-        """
-        Creates the cache for tests with us being the socket creator.
-
-        Also creates some more variables for testing.
-        """
-        Test.setUp(self)
-        self.__cache = isc.bind10.socket_cache.Cache(self)
-        self.__address = IPAddr("192.0.2.1")
-        self.__socket = isc.bind10.socket_cache.Socket('UDP', self.__address,
-                                                       1024, 42)
-        self.__get_socket_called = False
-
-    def test_init(self):
-        """
-        Checks the internals of the cache just after the creation.
-        """
-        self.assertEqual(self, self.__cache._creator)
-        self.assertEqual({}, self.__cache._waiting_tokens)
-        self.assertEqual({}, self.__cache._active_tokens)
-        self.assertEqual({}, self.__cache._active_apps)
-        self.assertEqual({}, self.__cache._sockets)
-        self.assertEqual(set(), self.__cache._live_tokens)
-
-    def get_socket(self, address, port, socktype):
-        """
-        Pretend to be a socket creator.
-
-        This expects to be called with the _address, port 1024 and 'UDP'.
-
-        Returns 42 and notes down it was called.
-        """
-        self.assertEqual(self.__address, address)
-        self.assertEqual(1024, port)
-        self.assertEqual('UDP', socktype)
-        self.__get_socket_called = True
-        return 42
-
-    def test_get_token_cached(self):
-        """
-        Check the behaviour of get_token when the requested socket is already
-        cached inside.
-        """
-        self.__cache._sockets = {
-            'UDP': {'192.0.2.1': {1024: self.__socket}}
-        }
-        token = self.__cache.get_token('UDP', self.__address, 1024, 'ANY',
-                                       'test')
-        # It didn't call get_socket
-        self.assertFalse(self.__get_socket_called)
-        # It returned something
-        self.assertIsNotNone(token)
-        # The token is both in the waiting sockets and the live tokens
-        self.assertEqual({token: self.__socket}, self.__cache._waiting_tokens)
-        self.assertEqual(set([token]), self.__cache._live_tokens)
-        # The token got the new share to block any relevant queries
-        self.assertEqual({token: ('ANY', 'test')}, self.__socket.shares)
-        # The socket knows the token is waiting in it
-        self.assertEqual(set([token]), self.__socket.waiting_tokens)
-
-        # If we request one more, with incompatible share, it is rejected
-        self.assertRaises(isc.bind10.socket_cache.ShareError,
-                          self.__cache.get_token, 'UDP', self.__address, 1024,
-                          'NO', 'test')
-        # The internals are not changed, so the same checks
-        self.assertEqual({token: self.__socket}, self.__cache._waiting_tokens)
-        self.assertEqual(set([token]), self.__cache._live_tokens)
-        self.assertEqual({token: ('ANY', 'test')}, self.__socket.shares)
-        self.assertEqual(set([token]), self.__socket.waiting_tokens)
-
-    def test_get_token_uncached(self):
-        """
-        Check a new socket is created when a corresponding one is missing.
-        """
-        token = self.__cache.get_token('UDP', self.__address, 1024, 'ANY',
-                                       'test')
-        # The get_socket was called
-        self.assertTrue(self.__get_socket_called)
-        # It returned something
-        self.assertIsNotNone(token)
-        # Get the socket and check it looks OK
-        socket = self.__cache._waiting_tokens[token]
-        self.assertEqual(self.__address, socket.address)
-        self.assertEqual(1024, socket.port)
-        self.assertEqual(42, socket.fileno)
-        self.assertEqual('UDP', socket.protocol)
-        # The socket is properly cached
-        self.assertEqual({
-            'UDP': {'192.0.2.1': {1024: socket}}
-        }, self.__cache._sockets)
-        # The token is both in the waiting sockets and the live tokens
-        self.assertEqual({token: socket}, self.__cache._waiting_tokens)
-        self.assertEqual(set([token]), self.__cache._live_tokens)
-        # The token got the new share to block any relevant queries
-        self.assertEqual({token: ('ANY', 'test')}, socket.shares)
-        # The socket knows the token is waiting in it
-        self.assertEqual(set([token]), socket.waiting_tokens)
-
-    def test_get_token_excs(self):
-        """
-        Test that it is handled properly if the socket creator raises
-        some exceptions.
-        """
-        def raiseCreatorError(fatal):
-            raise isc.bind10.sockcreator.CreatorError('test error', fatal)
-        # First, fatal socket creator errors are passed through
-        self.get_socket = lambda addr, port, proto: raiseCreatorError(True)
-        self.assertRaises(isc.bind10.sockcreator.CreatorError,
-                          self.__cache.get_token, 'UDP', self.__address, 1024,
-                          'NO', 'test')
-        # And nonfatal are converted to SocketError
-        self.get_socket = lambda addr, port, proto: raiseCreatorError(False)
-        self.assertRaises(isc.bind10.socket_cache.SocketError,
-                          self.__cache.get_token, 'UDP', self.__address, 1024,
-                          'NO', 'test')
-
-    def test_get_socket(self):
-        """
-        Test that we can pickup a socket if we know a token.
-        """
-        token = "token"
-        app = 13
-        # No socket prepared there
-        self.assertRaises(ValueError, self.__cache.get_socket, token, app)
-        # Not changed
-        self.assertEqual({}, self.__cache._active_tokens)
-        self.assertEqual({}, self.__cache._active_apps)
-        self.assertEqual({}, self.__cache._sockets)
-        self.assertEqual(set(), self.__cache._live_tokens)
-        # Prepare a token there
-        self.__socket.waiting_tokens = set([token])
-        self.__socket.shares = {token: ('ANY', 'app')}
-        self.__cache._waiting_tokens = {token: self.__socket}
-        self.__cache._sockets = {'UDP': {'192.0.2.1': {1024: self.__socket}}}
-        self.__cache._live_tokens = set([token])
-        socket = self.__cache.get_socket(token, app)
-        # Received the fileno
-        self.assertEqual(42, socket)
-        # It moved from waiting to active ones
-        self.assertEqual({}, self.__cache._waiting_tokens)
-        self.assertEqual({token: self.__socket}, self.__cache._active_tokens)
-        self.assertEqual({13: set([token])}, self.__cache._active_apps)
-        self.assertEqual(set([token]), self.__cache._live_tokens)
-        self.assertEqual(set(), self.__socket.waiting_tokens)
-        self.assertEqual({token: 13}, self.__socket.active_tokens)
-        # Trying to get it again fails
-        self.assertRaises(ValueError, self.__cache.get_socket, token, app)
-
-    def test_drop_application(self):
-        """
-        Test that a drop_application calls drop_socket on all the sockets
-        held by the application.
-        """
-        sockets = set()
-        def drop_socket(token):
-            sockets.add(token)
-        # Mock the drop_socket so we know it is called
-        self.__cache.drop_socket = drop_socket
-        self.assertRaises(ValueError, self.__cache.drop_application,
-                          13)
-        self.assertEqual(set(), sockets)
-        # Put the tokens into active_apps. Nothing else should be touched
-        # by this call, so leave it alone.
-        self.__cache._active_apps = {
-            1: set(['t1', 't2']),
-            2: set(['t3'])
-        }
-        self.__cache.drop_application(1)
-        # We don't check the _active_apps, as it would be cleaned by
-        # drop_socket and we removed it.
-        self.assertEqual(set(['t1', 't2']), sockets)
-
-    def test_drop_socket(self):
-        """
-        Test the drop_socket call. It tests:
-        * That a socket that still has something to keep it alive is left alive
-          (both waiting and active).
-        * If not, it is deleted.
-        * All bookkeeping data around are properly removed.
-        * Of course the exception.
-        """
-        self.assertRaises(ValueError, self.__cache.drop_socket, "bad token")
-        self.__socket.active_tokens = {'t1': 1}
-        self.__socket.waiting_tokens = set(['t2'])
-        self.__socket.shares = {'t1': ('ANY', 'app1'), 't2': ('ANY', 'app2')}
-        self.__cache._waiting_tokens = {'t2': self.__socket}
-        self.__cache._active_tokens = {'t1': self.__socket}
-        self.__cache._sockets = {'UDP': {'192.0.2.1': {1024: self.__socket}}}
-        self.__cache._live_tokens = set(['t1', 't2'])
-        self.__cache._active_apps = {1: set(['t1'])}
-        # We can't drop what wasn't picket up yet
-        self.assertRaises(ValueError, self.__cache.drop_socket, 't2')
-        self.assertEqual({'t1': 1}, self.__socket.active_tokens)
-        self.assertEqual(set(['t2']), self.__socket.waiting_tokens)
-        self.assertEqual({'t1': ('ANY', 'app1'), 't2': ('ANY', 'app2')},
-                         self.__socket.shares)
-        self.assertEqual({'t2': self.__socket}, self.__cache._waiting_tokens)
-        self.assertEqual({'t1': self.__socket}, self.__cache._active_tokens)
-        self.assertEqual({'UDP': {'192.0.2.1': {1024: self.__socket}}},
-                         self.__cache._sockets)
-        self.assertEqual(set(['t1', 't2']), self.__cache._live_tokens)
-        self.assertEqual({1: set(['t1'])}, self.__cache._active_apps)
-        self.assertEqual([], self._closes)
-        # If we drop this, it survives because it waits for being picked up
-        self.__cache.drop_socket('t1')
-        self.assertEqual({}, self.__socket.active_tokens)
-        self.assertEqual(set(['t2']), self.__socket.waiting_tokens)
-        self.assertEqual({'t2': ('ANY', 'app2')}, self.__socket.shares)
-        self.assertEqual({}, self.__cache._active_tokens)
-        self.assertEqual({'UDP': {'192.0.2.1': {1024: self.__socket}}},
-                         self.__cache._sockets)
-        self.assertEqual(set(['t2']), self.__cache._live_tokens)
-        self.assertEqual({}, self.__cache._active_apps)
-        self.assertEqual([], self._closes)
-        # Fill it again, now two applications having the same socket
-        self.__socket.active_tokens = {'t1': 1, 't2': 2}
-        self.__socket.waiting_tokens = set()
-        self.__socket.shares = {'t1': ('ANY', 'app1'), 't2': ('ANY', 'app2')}
-        self.__cache._waiting_tokens = {}
-        self.__cache._active_tokens = {
-            't1': self.__socket,
-            't2': self.__socket
-        }
-        self.__cache._live_tokens = set(['t1', 't2', 't3'])
-        self.assertEqual([], self._closes)
-        # We cheat here little bit, the t3 doesn't exist anywhere else, but
-        # we need to check the app isn't removed too soon and it shouldn't
-        # matter anywhere else, so we just avoid the tiresome filling in
-        self.__cache._active_apps = {1: set(['t1', 't3']), 2: set(['t2'])}
-        # Drop it as t1. It should still live.
-        self.__cache.drop_socket('t1')
-        self.assertEqual({'t2': 2}, self.__socket.active_tokens)
-        self.assertEqual(set(), self.__socket.waiting_tokens)
-        self.assertEqual({'t2': ('ANY', 'app2')}, self.__socket.shares)
-        self.assertEqual({}, self.__cache._waiting_tokens)
-        self.assertEqual({'t2': self.__socket}, self.__cache._active_tokens)
-        self.assertEqual({'UDP': {'192.0.2.1': {1024: self.__socket}}},
-                         self.__cache._sockets)
-        self.assertEqual(set(['t3', 't2']), self.__cache._live_tokens)
-        self.assertEqual({1: set(['t3']), 2: set(['t2'])},
-                         self.__cache._active_apps)
-        self.assertEqual([], self._closes)
-        # Drop it again, from the other application. It should get removed
-        # and closed.
-        self.__cache.drop_socket('t2')
-        self.assertEqual({}, self.__socket.active_tokens)
-        self.assertEqual(set(), self.__socket.waiting_tokens)
-        self.assertEqual({}, self.__socket.shares)
-        self.assertEqual({}, self.__cache._waiting_tokens)
-        self.assertEqual({}, self.__cache._active_tokens)
-        self.assertEqual({}, self.__cache._sockets)
-        self.assertEqual(set(['t3']), self.__cache._live_tokens)
-        self.assertEqual({1: set(['t3'])}, self.__cache._active_apps)
-        # The cache doesn't hold the socket. So when we remove it ourself,
-        # it should get closed.
-        self.__socket = None
-        self.assertEqual([42], self._closes)
-
-if __name__ == '__main__':
-    isc.log.init("bind10")
-    isc.log.resetUnitTestRootLogger()
-    unittest.main()

+ 0 - 24
src/lib/python/isc/cc/Makefile.am

@@ -1,24 +0,0 @@
-SUBDIRS = . cc_generated tests
-
-python_PYTHON =	__init__.py data.py session.py message.py logger.py
-BUILT_SOURCES = $(PYTHON_LOGMSGPKG_DIR)/work/pycc_messages.py
-nodist_pylogmessage_PYTHON = $(PYTHON_LOGMSGPKG_DIR)/work/pycc_messages.py
-pylogmessagedir = $(pyexecdir)/isc/log_messages/
-
-CLEANFILES = $(PYTHON_LOGMSGPKG_DIR)/work/pycc_messages.py
-CLEANFILES += $(PYTHON_LOGMSGPKG_DIR)/work/pycc_messages.pyc
-CLEANFILES += $(PYTHON_LOGMSGPKG_DIR)/work/pycc_messages.pyo
-
-EXTRA_DIST = pycc_messages.mes proto_defs.py
-
-# Define rule to build logging source files from message file
-$(PYTHON_LOGMSGPKG_DIR)/work/pycc_messages.py: pycc_messages.mes
-	$(top_builddir)/src/lib/log/compiler/message \
-		-d $(PYTHON_LOGMSGPKG_DIR)/work -p $(srcdir)/pycc_messages.mes
-
-pythondir = $(pyexecdir)/isc/cc
-
-CLEANDIRS = __pycache__
-
-clean-local:
-	rm -rf $(CLEANDIRS)

+ 0 - 2
src/lib/python/isc/cc/__init__.py

@@ -1,2 +0,0 @@
-import isc.cc.message
-from isc.cc.session import *

+ 0 - 2
src/lib/python/isc/cc/cc_generated/.gitignore

@@ -1,2 +0,0 @@
-/__init__.py
-/proto_defs.py

+ 0 - 32
src/lib/python/isc/cc/cc_generated/Makefile.am

@@ -1,32 +0,0 @@
-# This makefile is a hack to enable tests to run with one module generated
-# while the rest is just used. The generated file is created under build dir,
-# not the src dir, which means it is not found when these are different.
-#
-# We have a forwarder module in the src dir and build the real one in different
-# location. This is similar to what happens in log_messages/work. We can't
-# reuse the name `work`, since it would collide, so we use less generic name.
-
-nodist_python_PYTHON = proto_defs.py
-BUILT_SOURCES = proto_defs.py __init__.py
-noinst_SCRIPTS = __init__.py
-
-proto_defs.py: $(top_srcdir)/src/lib/cc/proto_defs.cc \
-	$(top_srcdir)/src/lib/util/python/pythonize_constants.py
-	$(PYTHON) $(top_srcdir)/src/lib/util/python/pythonize_constants.py \
-		$(top_srcdir)/src/lib/cc/proto_defs.cc $@
-
-# We need to create an __init__.py, so it is recognized as module.
-# But it may be empty.
-__init__.py:
-	touch $@
-
-pythondir = $(pyexecdir)/isc/cc
-
-CLEANDIRS = __pycache__
-
-CLEANFILES = proto_defs.py __init__.py
-CLEANFILES += proto_defs.pyc __init__.pyc
-CLEANFILES += proto_defs.pyo __init__.pyo
-
-clean-local:
-	rm -rf $(CLEANDIRS)

+ 0 - 276
src/lib/python/isc/cc/data.py

@@ -1,276 +0,0 @@
-# Copyright (C) 2010  Internet Systems Consortium.
-#
-# Permission to use, copy, modify, and distribute this software for any
-# purpose with or without fee is hereby granted, provided that the above
-# copyright notice and this permission notice appear in all copies.
-#
-# THE SOFTWARE IS PROVIDED "AS IS" AND INTERNET SYSTEMS CONSORTIUM
-# DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL
-# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL
-# INTERNET SYSTEMS CONSORTIUM BE LIABLE FOR ANY SPECIAL, DIRECT,
-# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING
-# FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
-# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION
-# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-#
-# Helper functions for data elements as used in cc-channel and
-# configuration. There is no python equivalent for the cpp Element
-# class, since data elements are represented by native python types
-# (int, real, bool, string, list and dict respectively)
-#
-
-import json
-import re
-
-class DataNotFoundError(Exception):
-    """Raised if an identifier does not exist according to a spec file,
-       or if an item is addressed that is not in the current (or default)
-       config (such as a nonexistent list or map element)"""
-    pass
-
-class DataAlreadyPresentError(Exception):
-    """Raised if there is an attemt to add an element to a list or a
-       map that is already present in that list or map (i.e. if 'add'
-       is used when it should be 'set')"""
-    pass
-
-class DataTypeError(Exception):
-    """Raised if there is an attempt to set an element that is of a
-       different type than the type specified in the specification."""
-    pass
-
-def remove_identical(a, b):
-    """Removes the values from dict a that are the same as in dict b.
-       Raises a DataTypeError is a or b is not a dict"""
-    to_remove = []
-    if type(a) != dict or type(b) != dict:
-        raise DataTypeError("Not a dict in remove_identical()")
-    duplicate_keys = [key for key in a.keys() if key in b and a[key] == b[key]]
-    for id in duplicate_keys:
-        del(a[id])
-
-def merge(orig, new):
-    """Merges the contents of new into orig, think recursive update()
-       orig and new must both be dicts. If an element value is None in
-       new it will be removed in orig."""
-    if type(orig) != dict or type(new) != dict:
-        raise DataTypeError("Not a dict in merge()")
-    orig.update(new)
-    remove_null_items(orig)
-
-def remove_null_items(d):
-    """Recursively removes all (key,value) pairs from d where the
-       value is None"""
-    null_keys = []
-    for key in d.keys():
-        if type(d[key]) == dict:
-            remove_null_items(d[key])
-        elif d[key] is None:
-            null_keys.append(key)
-    for k in null_keys:
-        del d[k]
-
-def _concat_identifier(id_parts):
-    """Concatenates the given identifier parts into a string,
-       delimited with the '/' character.
-    """
-    return '/'.join(id_parts)
-
-def split_identifier(identifier):
-    """Splits the given identifier into a list of identifier parts,
-       as delimited by the '/' character.
-       Raises a DataTypeError if identifier is not a string."""
-    if type(identifier) != str:
-        raise DataTypeError("identifier is not a string")
-    id_parts = identifier.split('/')
-    id_parts[:] = (value for value in id_parts if value != "")
-    return id_parts
-
-def identifier_has_list_index(identifier):
-    """Returns True if the given identifier string has at least one
-       list index (with [I], where I is a number"""
-    return (type(identifier) == str and
-            re.search("\[\d+\]", identifier) is not None)
-
-
-def split_identifier_list_indices(identifier):
-    """Finds list indexes in the given identifier, which are of the
-       format [integer].
-       Identifier must be a string.
-       This will only give the list index for the last 'part' of the
-       given identifier (as delimited by the '/' sign).
-       Raises a DataTypeError if the identifier is not a string,
-       or if the format is bad.
-       Returns a tuple, where the first element is the string part of
-       the identifier, and the second element is a list of (nested) list
-       indices.
-       Examples:
-       'a/b/c' will return ('a/b/c', None)
-       'a/b/c[1]' will return ('a/b/c', [1])
-       'a/b/c[1][2][3]' will return ('a/b/c', [1, 2, 3])
-       'a[0]/b[1]/c[2]' will return ('a[0]/b[1]/c', [2])
-    """
-    if type(identifier) != str:
-        raise DataTypeError("identifier in "
-                            "split_identifier_list_indices() "
-                            "not a string: " + str(identifier))
-
-    # We only work on the final 'part' of the identifier
-    id_parts = split_identifier(identifier)
-    id_str = id_parts[-1]
-
-    i = id_str.find('[')
-    if i < 0:
-        if id_str.find(']') >= 0:
-            raise DataTypeError("Bad format in identifier (] but no [): " + str(identifier))
-        return identifier, None
-
-    # keep the non-index part of that to replace later
-    id = id_str[:i]
-    indices = []
-    while i >= 0:
-        e = id_str.find(']')
-        if e < i + 1:
-            raise DataTypeError("Bad format in identifier (] before [): " + str(identifier))
-        try:
-            indices.append(int(id_str[i+1:e]))
-        except ValueError:
-            raise DataTypeError("List index in " + identifier + " not an integer")
-        id_str = id_str[e + 1:]
-        i = id_str.find('[')
-        if i > 0:
-            raise DataTypeError("Bad format in identifier ([ within []): " + str(identifier))
-    if id.find(']') >= 0 or len(id_str) > 0:
-        raise DataTypeError("Bad format in identifier (extra ]): " + str(identifier))
-
-    # we replace the final part of the original identifier with
-    # the stripped string
-    id_parts[-1] = id
-    id = _concat_identifier(id_parts)
-    return id, indices
-
-def _find_child_el(element, id):
-    """Finds the child of element with the given id. If the id contains
-       [i], where i is a number, and the child element is a list, the
-       i-th element of that list is returned instead of the list itself.
-       Raises a DataTypeError if the element is of wrong type, if id
-       is not a string, or if the id string contains a bad value.
-       Raises a DataNotFoundError if the element at id could not be
-       found.
-    """
-    id, list_indices = split_identifier_list_indices(id)
-    if type(element) == dict and id in element.keys():
-        result = element[id]
-    else:
-        raise DataNotFoundError(id + " in " + str(element))
-    if type(result) == list and list_indices is not None:
-        for list_index in list_indices:
-            if list_index >= len(result):
-                raise DataNotFoundError("Element " + str(list_index) + " in " + str(result))
-            result = result[list_index]
-    return result
-
-def find(element, identifier):
-    """Returns the subelement in the given data element, raises
-       DataNotFoundError if not found.
-       Returns the given element if the identifier is an empty string.
-       Raises a DataTypeError if identifier is not a string, or if
-       identifier is not empty, and element is not a dict.
-    """
-    if type(identifier) != str:
-        raise DataTypeError("identifier in find() is not a str")
-    if identifier == "":
-        return element
-    if type(element) != dict:
-        raise DataTypeError("element in find() is not a dict")
-    id_parts = split_identifier(identifier)
-    cur_el = element
-    for id in id_parts:
-        cur_el = _find_child_el(cur_el, id)
-    return cur_el
-
-def set(element, identifier, value):
-    """Sets the value at the element specified by identifier to value.
-       If the value is None, it is removed from the dict. If element
-       is not a dict, or if the identifier points to something that is
-       not, a DataTypeError is raised. The element is updated inline,
-       so if the original needs to be kept, you must make a copy before
-       calling set(). The updated base element is returned (so that
-       el.set().set().set() is possible)"""
-    if type(element) != dict:
-        raise DataTypeError("element in set() is not a dict")
-    if type(identifier) != str:
-        raise DataTypeError("identifier in set() is not a str")
-    id_parts = split_identifier(identifier)
-    cur_el = element
-    for id in id_parts[:-1]:
-        try:
-            cur_el = _find_child_el(cur_el, id)
-        except DataNotFoundError:
-            if value is None:
-                # ok we are unsetting a value that wasn't set in
-                # the first place. Simply stop.
-                return
-            cur_el[id] = {}
-            cur_el = cur_el[id]
-
-    id, list_indices = split_identifier_list_indices(id_parts[-1])
-    if list_indices is None:
-        # value can be an empty list or dict, so check for None explicitly
-        if value is not None:
-            cur_el[id] = value
-        else:
-            del cur_el[id]
-    else:
-        cur_el = cur_el[id]
-        # in case of nested lists, we need to get to the next to last
-        for list_index in list_indices[:-1]:
-            if type(cur_el) != list:
-                raise DataTypeError("Element at " + identifier + " is not a list")
-            if len(cur_el) <= list_index:
-                raise DataNotFoundError("List index at " + identifier + " out of range")
-            cur_el = cur_el[list_index]
-        # value can be an empty list or dict, so check for None explicitly
-        list_index = list_indices[-1]
-        if type(cur_el) != list:
-            raise DataTypeError("Element at " + identifier + " is not a list")
-        if len(cur_el) <= list_index:
-            raise DataNotFoundError("List index at " + identifier + " out of range")
-        if value is not None:
-            cur_el[list_index] = value
-        else:
-            del cur_el[list_index]
-    return element
-
-def unset(element, identifier):
-    """Removes the element at the given identifier if it exists. Raises
-       a DataTypeError if element is not a dict or if identifier is not
-       a string. Returns the base element."""
-    # perhaps we can simply do with set none, and remove this whole
-    # function
-    return set(element, identifier, None)
-
-def find_no_exc(element, identifier):
-    """Returns the subelement in the given data element, returns None
-       if not found, or if an error occurred (i.e. this function should
-       never raise an exception)"""
-    try:
-        return find(element, identifier)
-    except DataNotFoundError:
-        return None
-    except DataTypeError:
-        return None
-
-def parse_value_str(value_str):
-    """Parses the given string to a native python object. If the
-       string cannot be parsed, it is returned. If it is not a string,
-       None is returned"""
-    if type(value_str) != str:
-        return None
-    try:
-        return json.loads(value_str)
-    except ValueError as ve:
-        # simply return the string itself
-        return value_str
-

+ 0 - 26
src/lib/python/isc/cc/logger.py

@@ -1,26 +0,0 @@
-# Copyright (C) 2013  Internet Systems Consortium.
-#
-# Permission to use, copy, modify, and distribute this software for any
-# purpose with or without fee is hereby granted, provided that the above
-# copyright notice and this permission notice appear in all copies.
-#
-# THE SOFTWARE IS PROVIDED "AS IS" AND INTERNET SYSTEMS CONSORTIUM
-# DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL
-# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL
-# INTERNET SYSTEMS CONSORTIUM BE LIABLE FOR ANY SPECIAL, DIRECT,
-# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING
-# FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
-# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION
-# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-""" This is a logging utility module for other modules of the cc library
-package.
-
-"""
-
-import isc.log
-
-# C++ version of the CC module uses 'cc'; using the same name does not
-# necessarily cause disruption, but we use a different name to minimize
-# possible confusion.
-logger = isc.log.Logger('pycc')

+ 0 - 42
src/lib/python/isc/cc/message.py

@@ -1,42 +0,0 @@
-# Copyright (C) 2009  Internet Systems Consortium.
-#
-# Permission to use, copy, modify, and distribute this software for any
-# purpose with or without fee is hereby granted, provided that the above
-# copyright notice and this permission notice appear in all copies.
-#
-# THE SOFTWARE IS PROVIDED "AS IS" AND INTERNET SYSTEMS CONSORTIUM
-# DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL
-# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL
-# INTERNET SYSTEMS CONSORTIUM BE LIABLE FOR ANY SPECIAL, DIRECT,
-# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING
-# FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
-# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION
-# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-#
-# Functions for reading and parsing cc messages
-# Currently these are only abstraction functions for JSON conversion.
-#
-
-import sys
-import struct
-
-import json
-
-def to_wire(items):
-    '''Encodes the given python structure in JSON, and converts the
-       result to bytes. Raises a TypeError if the given structure is
-       not serializable with JSON.'''
-    return json.dumps(items).encode('utf8')
-
-def from_wire(data):
-    '''Decodes the given bytes and parses it with the builtin JSON
-       parser. Raises a ValueError if the data is not valid JSON.
-       Raises an AttributeError if the given object has no decode()
-       method (which should return a string).
-       '''
-    return json.loads(data.decode('utf8'), strict=False)
-
-if __name__ == "__main__":
-    import doctest
-    doctest.testmod()

+ 0 - 2
src/lib/python/isc/cc/proto_defs.py

@@ -1,2 +0,0 @@
-# Forwarder module. Look into cc_generated/Makefile.am for details.
-from cc_generated.proto_defs import *

+ 0 - 20
src/lib/python/isc/cc/pycc_messages.mes

@@ -1,20 +0,0 @@
-# Copyright (C) 2013  Internet Systems Consortium, Inc. ("ISC")
-#
-# Permission to use, copy, modify, and/or distribute this software for any
-# purpose with or without fee is hereby granted, provided that the above
-# copyright notice and this permission notice appear in all copies.
-#
-# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH
-# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
-# AND FITNESS.  IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT,
-# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
-# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
-# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
-# PERFORMANCE OF THIS SOFTWARE.
-
-# No namespace declaration - these constants go in the global namespace
-# of the libddns_messages python module.
-
-% PYCC_LNAME_RECEIVED received local name: %1
-Debug message: the local module received its unique identifier (name)
-from msgq on completion of establishing the session with msgq.

+ 0 - 337
src/lib/python/isc/cc/session.py

@@ -1,337 +0,0 @@
-# Copyright (C) 2009  Internet Systems Consortium.
-#
-# Permission to use, copy, modify, and distribute this software for any
-# purpose with or without fee is hereby granted, provided that the above
-# copyright notice and this permission notice appear in all copies.
-#
-# THE SOFTWARE IS PROVIDED "AS IS" AND INTERNET SYSTEMS CONSORTIUM
-# DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL
-# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL
-# INTERNET SYSTEMS CONSORTIUM BE LIABLE FOR ANY SPECIAL, DIRECT,
-# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING
-# FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
-# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION
-# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-import sys
-import socket
-import struct
-import errno
-import os
-import threading
-import bind10_config
-
-import isc.cc.message
-import isc.log
-from isc.cc.logger import logger
-from isc.log_messages.pycc_messages import *
-from isc.cc.proto_defs import *
-
-class ProtocolError(Exception): pass
-class NetworkError(Exception): pass
-class SessionError(Exception): pass
-class SessionTimeout(Exception): pass
-
-class Session:
-    MSGQ_DEFAULT_TIMEOUT = 4000
-
-    def __init__(self, socket_file=None):
-        self._socket = None
-        self._lname = None
-        self._sequence = 1
-        self._closed = False
-        self._queue = []
-        self._lock = threading.RLock()
-        self.set_timeout(self.MSGQ_DEFAULT_TIMEOUT);
-        self._recv_len_size = 0
-        self._recv_size = 0
-
-        if socket_file is None:
-            if "BIND10_MSGQ_SOCKET_FILE" in os.environ:
-                self.socket_file = os.environ["BIND10_MSGQ_SOCKET_FILE"]
-            else:
-                self.socket_file = bind10_config.BIND10_MSGQ_SOCKET_FILE
-        else:
-            self.socket_file = socket_file
-
-        try:
-            self._socket = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
-            self._socket.connect(self.socket_file)
-            self.sendmsg({ CC_HEADER_TYPE: CC_COMMAND_GET_LNAME })
-            env, msg = self.recvmsg(False)
-            if not env:
-                raise ProtocolError("Could not get local name")
-            self._lname = msg[CC_PAYLOAD_LNAME]
-            if not self._lname:
-                raise ProtocolError("Could not get local name")
-            logger.debug(logger.DBGLVL_TRACE_BASIC, PYCC_LNAME_RECEIVED,
-                         self._lname)
-        except socket.error as se:
-            if self._socket:
-                self._socket.close()
-            raise SessionError(se)
-
-    @property
-    def lname(self):
-        return self._lname
-
-    def close(self):
-        self._socket.close()
-        self._lname = None
-        self._closed = True
-
-    def sendmsg(self, env, msg=None):
-        with self._lock:
-            if self._closed:
-                raise SessionError("Session has been closed.")
-            if type(env) == dict:
-                env = isc.cc.message.to_wire(env)
-            if len(env) > 65535:
-                raise ProtocolError("Envelope too large")
-            if type(msg) == dict:
-                msg = isc.cc.message.to_wire(msg)
-            length = 2 + len(env);
-            if msg is not None:
-                length += len(msg)
-
-            # Build entire message.
-            data = struct.pack("!I", length)
-            data += struct.pack("!H", len(env))
-            data += env
-            if msg is not None:
-                data += msg
-
-            # Send it in the blocking mode.  On some systems send() may
-            # actually send only part of the data, so we need to repeat it
-            # until all data have been sent out.
-            self._socket.setblocking(1)
-            while len(data) > 0:
-                cc = self._socket.send(data)
-                data = data[cc:]
-
-    def recvmsg(self, nonblock = True, seq = None):
-        """Reads a message. If nonblock is true, and there is no
-           message to read, it returns (None, None).
-           If seq is not None, it should be a value as returned by
-           group_sendmsg(), in which case only the response to
-           that message is returned, and others will be queued until
-           the next call to this method.
-           If seq is None, only messages that are *not* responses
-           will be returned, and responses will be queued.
-           The queue is checked for relevant messages before data
-           is read from the socket.
-           Raises a SessionError if there is a JSON decode problem in
-           the message that is read, or if the session has been closed
-           prior to the call of recvmsg()"""
-        with self._lock:
-            if len(self._queue) > 0:
-                i = 0;
-                for env, msg in self._queue:
-                    if seq != None and CC_HEADER_REPLY in env and \
-                        seq == env[CC_HEADER_REPLY]:
-                        return self._queue.pop(i)
-                    elif seq == None and CC_HEADER_REPLY not in env:
-                        return self._queue.pop(i)
-                    else:
-                        i = i + 1
-            if self._closed:
-                raise SessionError("Session has been closed.")
-            data = self._receive_full_buffer(nonblock)
-            if data and len(data) > 2:
-                header_length = struct.unpack('>H', data[0:2])[0]
-                data_length = len(data) - 2 - header_length
-                try:
-                    if data_length > 0:
-                        env = isc.cc.message.from_wire(data[2:header_length+2])
-                        msg = isc.cc.message.from_wire(data[header_length + 2:])
-                        if (seq == None and CC_HEADER_REPLY not in env) or \
-                            (seq != None and CC_HEADER_REPLY in env and
-                             seq == env[CC_HEADER_REPLY]):
-                            return env, msg
-                        else:
-                            self._queue.append((env,msg))
-                            return self.recvmsg(nonblock, seq)
-                    else:
-                        return isc.cc.message.from_wire(data[2:header_length+2]), None
-                except ValueError as ve:
-                    # TODO: when we have logging here, add a debug
-                    # message printing the data that we were unable
-                    # to parse as JSON
-                    raise SessionError(ve)
-            return None, None
-
-    def _receive_bytes(self, size):
-        """Try to get size bytes of data from the socket.
-           Raises a ProtocolError if the size is 0.
-           Raises any error from recv().
-           Returns whatever data was available (if >0 bytes).
-           """
-        data = self._socket.recv(size)
-        if len(data) == 0: # server closed connection
-            raise ProtocolError("Read of 0 bytes: connection closed")
-        return data
-
-    def _receive_len_data(self):
-        """Reads self._recv_len_size bytes of data from the socket into
-           self._recv_len_data
-           This is done through class variables so in the case of
-           an EAGAIN we can continue on a subsequent call.
-           Raises a ProtocolError, a socket.error (which may be
-           timeout or eagain), or reads until we have all data we need.
-           """
-        while self._recv_len_size > 0:
-            new_data = self._receive_bytes(self._recv_len_size)
-            self._recv_len_data += new_data
-            self._recv_len_size -= len(new_data)
-
-    def _receive_data(self):
-        """Reads self._recv_size bytes of data from the socket into
-           self._recv_data.
-           This is done through class variables so in the case of
-           an EAGAIN we can continue on a subsequent call.
-           Raises a ProtocolError, a socket.error (which may be
-           timeout or eagain), or reads until we have all data we need.
-        """
-        while self._recv_size > 0:
-            new_data = self._receive_bytes(self._recv_size)
-            self._recv_data += new_data
-            self._recv_size -= len(new_data)
-
-    def _receive_full_buffer(self, nonblock):
-        if nonblock:
-            self._socket.setblocking(0)
-        else:
-            self._socket.setblocking(1)
-            if self._socket_timeout == 0.0:
-                self._socket.settimeout(None)
-            else:
-                self._socket.settimeout(self._socket_timeout)
-
-        try:
-            # we might be in a call following an EAGAIN, in which case
-            # we simply continue. In the first case, either
-            # recv_size or recv_len size are not zero
-            # they may never both be non-zero (we are either starting
-            # a full read, or continuing one of the reads
-            assert self._recv_size == 0 or self._recv_len_size == 0
-
-            if self._recv_size == 0:
-                if self._recv_len_size == 0:
-                    # both zero, start a new full read
-                    self._recv_len_size = 4
-                    self._recv_len_data = bytearray()
-                self._receive_len_data()
-
-                self._recv_size = struct.unpack('>I', self._recv_len_data)[0]
-                self._recv_data = bytearray()
-            self._receive_data()
-
-            # no EAGAIN, so copy data and reset internal counters
-            data = self._recv_data
-
-            self._recv_len_size = 0
-            self._recv_size = 0
-
-            return (data)
-
-        except socket.timeout:
-            raise SessionTimeout("recv() on cc session timed out")
-        except socket.error as se:
-            # Only keep data in case of EAGAIN
-            if se.errno == errno.EAGAIN:
-                return None
-            # unknown state otherwise, best to drop data
-            self._recv_len_size = 0
-            self._recv_size = 0
-            # ctrl-c can result in EINTR, return None to prevent
-            # stacktrace output
-            if se.errno == errno.EINTR:
-                return None
-            raise se
-
-    def _next_sequence(self):
-        self._sequence += 1
-        return self._sequence
-
-    def group_subscribe(self, group, instance=CC_INSTANCE_WILDCARD):
-        self.sendmsg({
-            CC_HEADER_TYPE: CC_COMMAND_SUBSCRIBE,
-            CC_HEADER_GROUP: group,
-            CC_HEADER_INSTANCE: instance,
-        })
-
-    def group_unsubscribe(self, group, instance=CC_INSTANCE_WILDCARD):
-        self.sendmsg({
-            CC_HEADER_TYPE: CC_COMMAND_UNSUBSCRIBE,
-            CC_HEADER_GROUP: group,
-            CC_HEADER_INSTANCE: instance,
-        })
-
-    def group_sendmsg(self, msg, group, instance=CC_INSTANCE_WILDCARD,
-                      to=CC_TO_WILDCARD, want_answer=False):
-        '''
-        Send a message over the CC session.
-
-        Parameters:
-        - msg The message to send, encoded as python structures (dicts,
-          lists, etc).
-        - group The recipient group to send to.
-        - instance Instance in the group.
-        - to Direct recipient (overrides the above, should contain the
-          lname of the recipient).
-        - want_answer If an answer is requested. If there's no recipient
-          and this is true, the message queue would send an error message
-          instead of the answer.
-
-        Return:
-          A sequence number that can be used to wait for an answer
-          (see group_recvmsg).
-        '''
-        seq = self._next_sequence()
-        self.sendmsg({
-            CC_HEADER_TYPE: CC_COMMAND_SEND,
-            CC_HEADER_FROM: self._lname,
-            CC_HEADER_TO: to,
-            CC_HEADER_GROUP: group,
-            CC_HEADER_INSTANCE: instance,
-            CC_HEADER_SEQ: seq,
-            CC_HEADER_WANT_ANSWER: want_answer
-        }, isc.cc.message.to_wire(msg))
-        return seq
-
-    def has_queued_msgs(self):
-        return len(self._queue) > 0
-
-    def group_recvmsg(self, nonblock = True, seq = None):
-        env, msg  = self.recvmsg(nonblock, seq)
-        if env == None:
-            # return none twice to match normal return value
-            # (so caller won't get a type error on no data)
-            return (None, None)
-        return (msg, env)
-
-    def group_reply(self, routing, msg):
-        seq = self._next_sequence()
-        self.sendmsg({
-            CC_HEADER_TYPE: CC_COMMAND_SEND,
-            CC_HEADER_FROM: self._lname,
-            CC_HEADER_TO: routing[CC_HEADER_FROM],
-            CC_HEADER_GROUP: routing[CC_HEADER_GROUP],
-            CC_HEADER_INSTANCE: routing[CC_HEADER_INSTANCE],
-            CC_HEADER_SEQ: seq,
-            CC_HEADER_REPLY: routing[CC_HEADER_SEQ],
-        }, isc.cc.message.to_wire(msg))
-        return seq
-
-    def set_timeout(self, milliseconds):
-        """Sets the socket timeout for blocking reads to the given
-           number of milliseconds"""
-        self._socket_timeout = milliseconds / 1000.0
-
-    def get_timeout(self):
-        """Returns the current timeout for blocking reads (in milliseconds)"""
-        return self._socket_timeout * 1000.0
-
-if __name__ == "__main__":
-    import doctest
-    doctest.testmod()

+ 0 - 1
src/lib/python/isc/cc/tests/.gitignore

@@ -1 +0,0 @@
-/cc_test

+ 0 - 29
src/lib/python/isc/cc/tests/Makefile.am

@@ -1,29 +0,0 @@
-PYCOVERAGE_RUN = @PYCOVERAGE_RUN@          
-
-PYTESTS = message_test.py data_test.py session_test.py
-# NOTE: test_session.py is to be run manually, so not automated.
-EXTRA_DIST = $(PYTESTS)
-EXTRA_DIST += sendcmd.py
-EXTRA_DIST += test_session.py
-
-# If necessary (rare cases), explicitly specify paths to dynamic libraries
-# required by loadable python modules.
-LIBRARY_PATH_PLACEHOLDER =
-if SET_ENV_LIBRARY_PATH
-LIBRARY_PATH_PLACEHOLDER += $(ENV_LIBRARY_PATH)=$(abs_top_builddir)/src/lib/cryptolink/.libs:$(abs_top_builddir)/src/lib/dns/.libs:$(abs_top_builddir)/src/lib/dns/python/.libs:$(abs_top_builddir)/src/lib/cc/.libs:$(abs_top_builddir)/src/lib/config/.libs:$(abs_top_builddir)/src/lib/log/.libs:$(abs_top_builddir)/src/lib/util/.libs:$(abs_top_builddir)/src/lib/util/threads/.libs:$(abs_top_builddir)/src/lib/exceptions/.libs:$(abs_top_builddir)/src/lib/datasrc/.libs:$$$(ENV_LIBRARY_PATH)
-endif
-
-# test using command-line arguments, so use check-local target instead of TESTS
-check-local:
-if ENABLE_PYTHON_COVERAGE
-	touch $(abs_top_srcdir)/.coverage 
-	rm -f .coverage
-	${LN_S} $(abs_top_srcdir)/.coverage .coverage
-endif
-	for pytest in $(PYTESTS) ; do \
-	echo Running test: $$pytest ; \
-	$(LIBRARY_PATH_PLACEHOLDER) \
-	PYTHONPATH=$(COMMON_PYTHON_PATH) \
-	BIND10_TEST_SOCKET_FILE=$(builddir)/test_socket.sock \
-	$(PYCOVERAGE_RUN) $(abs_srcdir)/$$pytest || exit ; \
-	done

+ 0 - 27
src/lib/python/isc/cc/tests/cc_test.in

@@ -1,27 +0,0 @@
-#! /bin/sh
-
-# Copyright (C) 2010  Internet Systems Consortium.
-#
-# Permission to use, copy, modify, and distribute this software for any
-# purpose with or without fee is hereby granted, provided that the above
-# copyright notice and this permission notice appear in all copies.
-#
-# THE SOFTWARE IS PROVIDED "AS IS" AND INTERNET SYSTEMS CONSORTIUM
-# DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL
-# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL
-# INTERNET SYSTEMS CONSORTIUM BE LIABLE FOR ANY SPECIAL, DIRECT,
-# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING
-# FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
-# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION
-# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-PYTHON_EXEC=${PYTHON_EXEC:-@PYTHON@}
-export PYTHON_EXEC
-
-CONFIG_PATH=@abs_top_srcdir@/src/lib/python/isc/cc/tests
-
-PYTHONPATH=@abs_top_srcdir@/src/lib/python
-export PYTHONPATH
-
-cd ${BIND10_PATH}
-${PYTHON_EXEC} -O ${CONFIG_PATH}/session_test.py $*

+ 0 - 244
src/lib/python/isc/cc/tests/data_test.py

@@ -1,244 +0,0 @@
-# Copyright (C) 2010  Internet Systems Consortium.
-#
-# Permission to use, copy, modify, and distribute this software for any
-# purpose with or without fee is hereby granted, provided that the above
-# copyright notice and this permission notice appear in all copies.
-#
-# THE SOFTWARE IS PROVIDED "AS IS" AND INTERNET SYSTEMS CONSORTIUM
-# DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL
-# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL
-# INTERNET SYSTEMS CONSORTIUM BE LIABLE FOR ANY SPECIAL, DIRECT,
-# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING
-# FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
-# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION
-# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-#
-# Tests for the functions in data.py
-#
-
-import unittest
-import os
-from isc.cc import data
-
-class TestData(unittest.TestCase):
-    def test_remove_identical(self):
-        a = {}
-        b = {}
-        c = {}
-        data.remove_identical(a, b)
-        self.assertEqual(a, c)
-
-        a = { "a": 1 }
-        b = { "a": 1 }
-        c = {}
-        data.remove_identical(a, b)
-        self.assertEqual(a, c)
-    
-        a = { "a": 1, "b": [ 1, 2 ] }
-        b = {}
-        c = { "a": 1, "b": [ 1, 2 ] }
-        data.remove_identical(a, b)
-        self.assertEqual(a, c)
-    
-        a = { "a": 1, "b": [ 1, 2 ] }
-        b = { "a": 1, "b": [ 1, 2 ] }
-        c = {}
-        data.remove_identical(a, b)
-        self.assertEqual(a, c)
-    
-        a = { "a": 1, "b": [ 1, 2 ] }
-        b = { "a": 1, "b": [ 1, 3 ] }
-        c = { "b": [ 1, 2 ] }
-        data.remove_identical(a, b)
-        self.assertEqual(a, c)
-    
-        a = { "a": { "b": "c" } }
-        b = {}
-        c = { "a": { "b": "c" } }
-        data.remove_identical(a, b)
-        self.assertEqual(a, c)
-    
-        a = { "a": { "b": "c" } }
-        b = { "a": { "b": "c" } }
-        c = {}
-        data.remove_identical(a, b)
-        self.assertEqual(a, c)
-    
-        a = { "a": { "b": "c" } }
-        b = { "a": { "b": "d" } }
-        c = { "a": { "b": "c" } }
-        data.remove_identical(a, b)
-        self.assertEqual(a, c)
-
-        self.assertRaises(data.DataTypeError, data.remove_identical,
-                          a, 1)
-        self.assertRaises(data.DataTypeError, data.remove_identical,
-                          1, b)
-        
-    def test_merge(self):
-        d1 = { 'a': 'a', 'b': 1, 'c': { 'd': 'd', 'e': 2 } }
-        d2 = { 'a': None, 'c': { 'd': None, 'e': 3, 'f': [ 1 ] } }
-        d12 = { 'b': 1, 'c': { 'e': 3, 'f': [ 1 ] } }
-        m12 = d1
-        data.merge(m12, d2)
-        self.assertEqual(d12, m12)
-        self.assertRaises(data.DataTypeError, data.merge, d1, "a")
-        self.assertRaises(data.DataTypeError, data.merge, 1, d2)
-        self.assertRaises(data.DataTypeError, data.merge, None, None)
-
-
-    def test_split_identifier_list_indices(self):
-        id, indices = data.split_identifier_list_indices('a')
-        self.assertEqual(id, 'a')
-        self.assertEqual(indices, None)
-        id, indices = data.split_identifier_list_indices('a[0]')
-        self.assertEqual(id, 'a')
-        self.assertEqual(indices, [0])
-        id, indices = data.split_identifier_list_indices('a[0][1]')
-        self.assertEqual(id, 'a')
-        self.assertEqual(indices, [0, 1])
-
-        # examples from the docstring
-        id, indices = data.split_identifier_list_indices('a/b/c')
-        self.assertEqual(id, 'a/b/c')
-        self.assertEqual(indices, None)
-        
-        id, indices = data.split_identifier_list_indices('a/b/c[1]')
-        self.assertEqual(id, 'a/b/c')
-        self.assertEqual(indices, [1])
-       
-        id, indices = data.split_identifier_list_indices('a/b/c[1][2][3]')
-        self.assertEqual(id, 'a/b/c')
-        self.assertEqual(indices, [1, 2, 3])
-        
-        id, indices = data.split_identifier_list_indices('a[0]/b[1]/c[2]')
-        self.assertEqual(id, 'a[0]/b[1]/c')
-        self.assertEqual(indices, [2])
-
-        # bad formats
-        self.assertRaises(data.DataTypeError, data.split_identifier_list_indices, 'a[')
-        self.assertRaises(data.DataTypeError, data.split_identifier_list_indices, 'a]')
-        self.assertRaises(data.DataTypeError, data.split_identifier_list_indices, 'a[[0]]')
-        self.assertRaises(data.DataTypeError, data.split_identifier_list_indices, 'a[0]a')
-        self.assertRaises(data.DataTypeError, data.split_identifier_list_indices, 'a[0]a[1]')
-
-        self.assertRaises(data.DataTypeError, data.split_identifier_list_indices, 1)
-        
-
-    def test_find(self):
-        d1 = { 'a': 'a', 'b': 1, 'c': { 'd': 'd', 'e': 2, 'more': { 'data': 'here' } } }
-        self.assertEqual(data.find(d1, ''), d1)
-        self.assertEqual(data.find(d1, 'a'), 'a')
-        self.assertEqual(data.find(d1, 'c/e'), 2)
-        self.assertEqual(data.find(d1, 'c/more/'), { 'data': 'here' })
-        self.assertEqual(data.find(d1, 'c/more/data'), 'here')
-        self.assertRaises(data.DataNotFoundError, data.find, d1, 'c/f')
-        self.assertRaises(data.DataNotFoundError, data.find, d1, 'f')
-        self.assertRaises(data.DataTypeError, data.find, d1, 1)
-        self.assertRaises(data.DataTypeError, data.find, None, 1)
-        self.assertRaises(data.DataTypeError, data.find, None, "foo")
-        self.assertRaises(data.DataTypeError, data.find, "123", "123")
-        self.assertEqual(data.find("123", ""), "123")
-
-        d2 = { 'a': [ 1, 2, 3 ] }
-        self.assertEqual(data.find(d2, 'a[0]'), 1)
-        self.assertEqual(data.find(d2, 'a[1]'), 2)
-        self.assertEqual(data.find(d2, 'a[2]'), 3)
-        self.assertRaises(data.DataNotFoundError, data.find, d2, 'a[3]')
-        self.assertRaises(data.DataTypeError, data.find, d2, 'a[a]')
-
-        d3 = { 'a': [ { 'b': [ {}, { 'c': 'd' } ] } ] }
-        self.assertEqual(data.find(d3, 'a[0]/b[1]/c'), 'd')
-        self.assertRaises(data.DataNotFoundError, data.find, d3, 'a[1]/b[1]/c')
-        
-    def test_set(self):
-        d1 = { 'a': 'a', 'b': 1, 'c': { 'd': 'd', 'e': 2 } }
-        d12 = { 'b': 1, 'c': { 'e': 3, 'f': [ 1 ] } }
-        d13 = { 'b': 1, 'c': { 'e': 3, 'f': [ 2 ] } }
-        d14 = { 'b': 1, 'c': { 'e': 3, 'f': [ { 'g': [ 1, 2 ] } ] } }
-        d15 = { 'b': 1, 'c': { 'e': 3, 'f': [ { 'g': [ 1, 3 ] } ] } }
-        data.set(d1, 'a', None)
-        data.set(d1, 'c/d', None)
-        data.set(d1, 'c/e/', 3)
-        data.set(d1, 'c/f', [ 1 ] )
-        self.assertEqual(d1, d12)
-        data.set(d1, 'c/f[0]', 2 )
-        self.assertEqual(d1, d13)
-
-        data.set(d1, 'c/f[0]', { 'g': [ 1, 2] } )
-        self.assertEqual(d1, d14)
-        data.set(d1, 'c/f[0]/g[1]', 3)
-        self.assertEqual(d1, d15)
-        
-        self.assertRaises(data.DataTypeError, data.set, d1, 1, 2)
-        self.assertRaises(data.DataTypeError, data.set, 1, "", 2)
-        self.assertRaises(data.DataTypeError, data.set, d1, 'c[1]', 2)
-        self.assertRaises(data.DataTypeError, data.set, d1, 'c[1][2]', 2)
-        self.assertRaises(data.DataNotFoundError, data.set, d1, 'c/f[5]', 2)
-        self.assertRaises(data.DataNotFoundError, data.set, d1, 'c/f[5][2]', 2)
-
-        d3 = {}
-        e3 = data.set(d3, "does/not/exist", 123)
-        self.assertEqual(d3,
-                         { 'does': { 'not': { 'exist': 123 } } })
-        self.assertEqual(e3,
-                         { 'does': { 'not': { 'exist': 123 } } })
-
-    def test_unset(self):
-        d1 = { 'a': 'a', 'b': 1, 'c': { 'd': 'd', 'e': [ 1, 2, 3 ] } }
-        data.unset(d1, 'a')
-        data.unset(d1, 'c/d')
-        data.unset(d1, 'does/not/exist')
-        self.assertEqual(d1, { 'b': 1, 'c': { 'e': [ 1, 2, 3 ] } })
-        data.unset(d1, 'c/e[0]')
-        self.assertEqual(d1, { 'b': 1, 'c': { 'e': [ 2, 3 ] } })
-        data.unset(d1, 'c/e[1]')
-        self.assertEqual(d1, { 'b': 1, 'c': { 'e': [ 2 ] } })
-        # index 1 should now be out of range
-        self.assertRaises(data.DataNotFoundError, data.unset, d1, 'c/e[1]')
-        d2 = { 'a': [ { 'b': [ 1, 2 ] } ] }
-        data.unset(d2, 'a[0]/b[1]')
-        self.assertEqual(d2, { 'a': [ { 'b': [ 1 ] } ] })
-        d3 = { 'a': [ [ 1, 2 ] ] }
-        data.set(d3, "a[0][1]", 3)
-        self.assertEqual(d3, { 'a': [ [ 1, 3 ] ] })
-        data.unset(d3, 'a[0][1]')
-        self.assertEqual(d3, { 'a': [ [ 1 ] ] })
-        
-    def test_find_no_exc(self):
-        d1 = { 'a': 'a', 'b': 1, 'c': { 'd': 'd', 'e': 2, 'more': { 'data': 'here' } } }
-        self.assertEqual(data.find_no_exc(d1, ''), d1)
-        self.assertEqual(data.find_no_exc(d1, 'a'), 'a')
-        self.assertEqual(data.find_no_exc(d1, 'c/e'), 2)
-        self.assertEqual(data.find_no_exc(d1, 'c/more/'), { 'data': 'here' })
-        self.assertEqual(data.find_no_exc(d1, 'c/more/data'), 'here')
-        self.assertEqual(data.find_no_exc(d1, 'c/f'), None)
-        self.assertEqual(data.find_no_exc(d1, 'f'), None)
-        self.assertEqual(data.find_no_exc(d1, 1), None)
-        self.assertEqual(data.find_no_exc(d1, 'more/data/here'), None)
-        self.assertEqual(data.find_no_exc(None, 1), None)
-        self.assertEqual(data.find_no_exc("123", ""), "123")
-        self.assertEqual(data.find_no_exc("123", ""), "123")
-        
-    def test_parse_value_str(self):
-        self.assertEqual(data.parse_value_str("1"), 1)
-        self.assertEqual(data.parse_value_str("true"), True)
-        self.assertEqual(data.parse_value_str("null"), None)
-        self.assertEqual(data.parse_value_str("1.1"), 1.1)
-        self.assertEqual(data.parse_value_str("[]"), [])
-        self.assertEqual(data.parse_value_str("[ 1, null, \"asdf\" ]"), [ 1, None, "asdf" ])
-        self.assertEqual(data.parse_value_str("{}"), {})
-        self.assertEqual(data.parse_value_str("{ \"a\": \"b\", \"c\": 1 }"), { 'a': 'b', 'c': 1 })
-        self.assertEqual(data.parse_value_str("[ a c"), "[ a c")
-
-        self.assertEqual(data.parse_value_str(1), None)
-
-
-if __name__ == '__main__':
-    #if not 'CONFIG_TESTDATA_PATH' in os.environ:
-    #    print("You need to set the environment variable CONFIG_TESTDATA_PATH to point to the directory containing the test data files")
-    #    exit(1)
-    unittest.main()
-
-

+ 0 - 66
src/lib/python/isc/cc/tests/message_test.py

@@ -1,66 +0,0 @@
-
-# Copyright (C) 2010  Internet Systems Consortium.
-#
-# Permission to use, copy, modify, and distribute this software for any
-# purpose with or without fee is hereby granted, provided that the above
-# copyright notice and this permission notice appear in all copies.
-#
-# THE SOFTWARE IS PROVIDED "AS IS" AND INTERNET SYSTEMS CONSORTIUM
-# DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL
-# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL
-# INTERNET SYSTEMS CONSORTIUM BE LIABLE FOR ANY SPECIAL, DIRECT,
-# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING
-# FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
-# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION
-# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-#
-# Tests for the functions in data.py
-#
-
-import unittest
-import isc.cc
-
-class MessageTest(unittest.TestCase):
-    def setUp(self):
-        self.msg1 = { "just": [ "an", "arbitrary", "structure" ] }
-        self.msg1_str = "{\"just\": [\"an\", \"arbitrary\", \"structure\"]}";
-        self.msg1_wire = self.msg1_str.encode()
-
-        self.msg2 = { "aaa": [ 1, True, False, None ] }
-        self.msg2_str = "{\"aaa\": [1, true, false, null]}";
-        self.msg2_wire = self.msg2_str.encode()
-
-        self.msg3 = { "aaa": [ 1, 1.1, True, False, "string\n" ] }
-        self.msg3_str = "{\"aaa\": [1, 1.1, true, false, \"string\n\" ]}";
-        self.msg3_wire = self.msg3_str.encode()
-
-        # Due to the inherent impreciseness of floating point values,
-        # we test this one separately (with AlmostEqual)
-        self.msg_float = 1.1
-        self.msg_float_str = "1.1";
-        self.msg_float_wire = self.msg_float_str.encode()
-
-    def test_encode_json(self):
-        self.assertEqual(self.msg1_wire, isc.cc.message.to_wire(self.msg1))
-        self.assertEqual(self.msg2_wire, isc.cc.message.to_wire(self.msg2))
-        self.assertAlmostEqual(float(self.msg_float_wire),
-                               float(isc.cc.message.to_wire(self.msg_float)))
-        self.assertRaises(TypeError, isc.cc.message.to_wire, NotImplemented)
-
-    def test_decode_json(self):
-        self.assertEqual(self.msg1, isc.cc.message.from_wire(self.msg1_wire))
-        self.assertEqual(self.msg2, isc.cc.message.from_wire(self.msg2_wire))
-        self.assertEqual(self.msg3, isc.cc.message.from_wire(self.msg3_wire))
-
-        self.assertRaises(AttributeError, isc.cc.message.from_wire, 1)
-        self.assertRaises(ValueError, isc.cc.message.from_wire, b'\x001')
-        self.assertRaises(ValueError, isc.cc.message.from_wire, b'')
-        self.assertRaises(ValueError, isc.cc.message.from_wire, b'{"a": ')
-        self.assertRaises(ValueError, isc.cc.message.from_wire, b'[ 1 ')
-        self.assertRaises(ValueError, isc.cc.message.from_wire, b']')
-
-if __name__ == '__main__':
-    unittest.main()
-
-

+ 0 - 37
src/lib/python/isc/cc/tests/sendcmd.py

@@ -1,37 +0,0 @@
-#!/usr/bin/python3
-
-# Copyright (C) 2010  Internet Systems Consortium.
-#
-# Permission to use, copy, modify, and distribute this software for any
-# purpose with or without fee is hereby granted, provided that the above
-# copyright notice and this permission notice appear in all copies.
-#
-# THE SOFTWARE IS PROVIDED "AS IS" AND INTERNET SYSTEMS CONSORTIUM
-# DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL
-# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL
-# INTERNET SYSTEMS CONSORTIUM BE LIABLE FOR ANY SPECIAL, DIRECT,
-# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING
-# FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
-# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION
-# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-import isc, sys
-
-cc = isc.cc.Session()
-if len(sys.argv) < 3:
-    sys.stderr.write('Usage: ' + sys.argv[0] + ' <channel> <command> [arg]\n')
-    sys.exit(1)
-
-channel = sys.argv[1]
-command = sys.argv[2]
-if len(sys.argv) >= 4:
-    argument = sys.argv[3]
-else:
-    argument = ""
-
-cmd = { "command": [ command, argument ] }
-
-cc.group_subscribe(channel)
-print("Sending:")
-print(cmd)
-cc.group_sendmsg(cmd, channel)

+ 0 - 465
src/lib/python/isc/cc/tests/session_test.py

@@ -1,465 +0,0 @@
-# Copyright (C) 2010  Internet Systems Consortium.
-#
-# Permission to use, copy, modify, and distribute this software for any
-# purpose with or without fee is hereby granted, provided that the above
-# copyright notice and this permission notice appear in all copies.
-#
-# THE SOFTWARE IS PROVIDED "AS IS" AND INTERNET SYSTEMS CONSORTIUM
-# DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL
-# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL
-# INTERNET SYSTEMS CONSORTIUM BE LIABLE FOR ANY SPECIAL, DIRECT,
-# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING
-# FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
-# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION
-# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-#
-# Tests for the ConfigData and MultiConfigData classes
-#
-
-import unittest
-import os
-import json
-from isc.cc.session import *
-
-# our fake socket, where we can read and insert messages
-class MySocket():
-    def __init__(self, family, type):
-        self.family = family
-        self.type = type
-        self.recvqueue = bytearray()
-        self.sendqueue = bytearray()
-        self._blocking = True
-        self.send_limit = None
-
-    def connect(self, to):
-        pass
-
-    def close(self):
-        pass
-
-    def setblocking(self, val):
-        self._blocking = val
-
-    def send(self, data):
-        # If the upper limit is specified, only "send" up to the specified
-        # limit
-        if self.send_limit is not None and len(data) > self.send_limit:
-            self.sendqueue.extend(data[0:self.send_limit])
-            return self.send_limit
-        else:
-            self.sendqueue.extend(data)
-            return len(data)
-
-    def readsent(self, length):
-        if length > len(self.sendqueue):
-            raise Exception("readsent(" + str(length) + ") called, but only " + str(len(self.sendqueue)) + " in queue")
-        result = self.sendqueue[:length]
-        del self.sendqueue[:length]
-        return result
-
-    def readsentmsg(self):
-        """return bytearray of the full message include length specifiers"""
-        result = bytearray()
-
-        length_buf = self.readsent(4)
-        result.extend(length_buf)
-        length = struct.unpack('>I', length_buf)[0]
-
-        header_length_buf = self.readsent(2)
-        header_length = struct.unpack('>H', header_length_buf)[0]
-        result.extend(header_length_buf)
-
-        data_length = length - 2 - header_length
-
-        result.extend(self.readsent(header_length))
-        result.extend(self.readsent(data_length))
-
-        return result
-
-    def readsentmsg_parsed(self):
-        length_buf = self.readsent(4)
-        length = struct.unpack('>I', length_buf)[0]
-        header_length_buf = self.readsent(2)
-        header_length = struct.unpack('>H', header_length_buf)[0]
-        data_length = length - 2 - header_length
-
-        env = json.loads(self.readsent(header_length).decode('utf-8'), strict=False)
-        if (data_length > 0):
-            msg = json.loads(self.readsent(data_length).decode('utf-8'), strict=False)
-        else:
-            msg = {}
-        return (env, msg)
-
-    def recv(self, length):
-        if len(self.recvqueue) == 0:
-            if self._blocking:
-                return bytes()
-            else:
-                raise socket.error(errno.EAGAIN, "Resource temporarily unavailable")
-        if length > len(self.recvqueue):
-            raise Exception("Buffer underrun in test, does the test provide the right data?")
-        result = self.recvqueue[:length]
-        del self.recvqueue[:length]
-        #print("[XX] returning: " + str(result))
-        #print("[XX] queue now: " + str(self.recvqueue))
-        return result
-
-    def addrecv(self, env, msg = None):
-        if type(env) == dict:
-            env = isc.cc.message.to_wire(env)
-        if type(msg) == dict:
-            msg = isc.cc.message.to_wire(msg)
-        length = 2 + len(env);
-        if msg:
-            length += len(msg)
-        self.recvqueue.extend(struct.pack("!I", length))
-        self.recvqueue.extend(struct.pack("!H", len(env)))
-        self.recvqueue.extend(env)
-        if msg:
-            self.recvqueue.extend(msg)
-
-    def settimeout(self, val):
-        pass
-
-    def gettimeout(self):
-        return 0
-
-    def set_send_limit(self, limit):
-        '''Specify the upper limit of the transmittable data at once.
-
-        By default, the send() method of this class "sends" all given data.
-        If this method is called and the its parameter is not None,
-        subsequent calls to send() will only transmit the specified amount
-        of data.  This can be used to emulate the situation where send()
-        on a real socket object results in partial write.
-        '''
-        self.send_limit = limit
-
-#
-# We subclass the Session class we're testing here, only
-# to override the __init__() method, which wants a socket,
-# and we need to use our fake socket
-class MySession(Session):
-    def __init__(self, port=9912, s=None):
-        self._socket = None
-        self._socket_timeout = 1
-        self._lname = None
-        self._recvbuffer = bytearray()
-        self._recv_len_size = 0
-        self._recv_size = 0
-        self._sequence = 1
-        self._closed = False
-        self._queue = []
-        self._lock = threading.RLock()
-
-        if s is not None:
-            self._socket = s
-        else:
-            try:
-                self._socket = MySocket(socket.AF_INET, socket.SOCK_STREAM)
-                self._socket.connect(tuple(['127.0.0.1', port]))
-                self._lname = "test_name"
-                # testing getlname here isn't useful, code removed
-            except socket.error as se:
-                    raise SessionError(se)
-
-class testSession(unittest.TestCase):
-
-    def test_session_close(self):
-        sess = MySession()
-        self.assertEqual("test_name", sess.lname)
-        sess.close()
-        self.assertRaises(SessionError, sess.sendmsg, {}, {"hello": "a"})
-
-    def test_env_too_large(self):
-        sess = MySession()
-        largeenv = { "a": "b"*65535 }
-        self.assertRaises(ProtocolError, sess.sendmsg, largeenv, {"hello": "a"})
-
-    def test_session_sendmsg(self):
-        sess = MySession()
-        sess.sendmsg({}, {"hello": "a"})
-        sent = sess._socket.readsentmsg();
-        self.assertEqual(sent, b'\x00\x00\x00\x12\x00\x02{}{"hello": "a"}')
-        sess.close()
-        self.assertRaises(SessionError, sess.sendmsg, {}, {"hello": "a"})
-
-    def test_session_sendmsg2(self):
-        sess = MySession()
-        sess.sendmsg({'to': 'someone', 'reply': 1}, {"hello": "a"})
-        sent = sess._socket.readsentmsg();
-        #print(sent)
-        #self.assertRaises(SessionError, sess.sendmsg, {}, {"hello": "a"})
-
-    def test_session_sendmsg_shortwrite(self):
-        sess = MySession()
-        # Specify the upper limit of the size that can be transmitted at
-        # a single send() call on the faked socket (10 is an arbitrary choice,
-        # just reasonably small).
-        sess._socket.set_send_limit(10)
-        sess.sendmsg({'to': 'someone', 'reply': 1}, {"hello": "a"})
-        # The complete message should still have been transmitted in the end.
-        sent = sess._socket.readsentmsg();
-
-    def recv_and_compare(self, session, bytes, env, msg):
-        """Adds bytes to the recvqueue (which will be read by the
-           session object, and compare the resultinv env and msg to
-           the ones given."""
-        session._socket.addrecv(bytes)
-        s_env, s_msg = session.recvmsg(False)
-        self.assertEqual(env, s_env)
-        self.assertEqual(msg, s_msg)
-        # clear the recv buffer in case a malformed message left garbage
-        # (actually, shouldn't that case provide some error instead of
-        # None?)
-        session._socket.recvqueue = bytearray()
-
-    def test_session_recvmsg(self):
-        sess = MySession()
-        # {'to': "someone"}, {"hello": "a"}
-        #self.recv_and_compare(sess,
-        #                      b'\x00\x00\x00\x1f\x00\x10Skan\x02to(\x07someoneSkan\x05hello(\x01a',
-        #                      {'to': "someone"}, {"hello": "a"})
-
-        # 'malformed' messages
-        # shouldn't some of these raise exceptions?
-        #self.recv_and_compare(sess,
-        #                      b'\x00',
-        #                      None, None)
-        #self.recv_and_compare(sess,
-        #                      b'\x00\x00\x00\x10',
-        #                      None, None)
-        #self.recv_and_compare(sess,
-        #                      b'\x00\x00\x00\x02\x00\x00',
-        #                      None, None)
-        #self.recv_and_compare(sess,
-        #                      b'\x00\x00\x00\x02\x00\x02',
-        #                      None, None)
-        #self.recv_and_compare(sess,
-        #                      b'',
-        #                      None, None)
-
-        # need to clear
-        sess._socket.recvqueue = bytearray()
-
-        # 'queueing' system
-        # sending message {'to': 'someone', 'reply': 1}, {"hello": "a"}
-        #print("sending message {'to': 'someone', 'reply': 1}, {'hello': 'a'}")
-        # get no message without asking for a specific sequence number reply
-        self.assertFalse(sess.has_queued_msgs())
-        sess._socket.addrecv({'to': 'someone', 'reply': 1}, {"hello": "a"})
-        env, msg = sess.recvmsg(True)
-        self.assertEqual(None, env)
-        self.assertTrue(sess.has_queued_msgs())
-        env, msg = sess.recvmsg(True, 1)
-        self.assertEqual({'to': 'someone', 'reply': 1}, env)
-        self.assertEqual({"hello": "a"}, msg)
-        self.assertFalse(sess.has_queued_msgs())
-
-        # ask for a different sequence number reply (that doesn't exist)
-        # then ask for the one that is there
-        self.assertFalse(sess.has_queued_msgs())
-        sess._socket.addrecv({'to': 'someone', 'reply': 1}, {"hello": "a"})
-        env, msg = sess.recvmsg(True, 2)
-        self.assertEqual(None, env)
-        self.assertEqual(None, msg)
-        self.assertTrue(sess.has_queued_msgs())
-        env, msg = sess.recvmsg(True, 1)
-        self.assertEqual({'to': 'someone', 'reply': 1}, env)
-        self.assertEqual({"hello": "a"}, msg)
-        self.assertFalse(sess.has_queued_msgs())
-
-        # ask for a different sequence number reply (that doesn't exist)
-        # then ask for any message
-        self.assertFalse(sess.has_queued_msgs())
-        sess._socket.addrecv({'to': 'someone', 'reply': 1}, {"hello": "a"})
-        env, msg = sess.recvmsg(True, 2)
-        self.assertEqual(None, env)
-        self.assertEqual(None, msg)
-        self.assertTrue(sess.has_queued_msgs())
-        env, msg = sess.recvmsg(True, 1)
-        self.assertEqual({'to': 'someone', 'reply': 1}, env)
-        self.assertEqual({"hello": "a"}, msg)
-        self.assertFalse(sess.has_queued_msgs())
-
-        #print("sending message {'to': 'someone', 'reply': 1}, {'hello': 'a'}")
-
-        # ask for a different sequence number reply (that doesn't exist)
-        # send a new message, ask for specific message (get the first)
-        # then ask for any message (get the second)
-        self.assertFalse(sess.has_queued_msgs())
-        sess._socket.addrecv({'to': 'someone', 'reply': 1}, {'hello': 'a'})
-        env, msg = sess.recvmsg(True, 2)
-        self.assertEqual(None, env)
-        self.assertEqual(None, msg)
-        self.assertTrue(sess.has_queued_msgs())
-        sess._socket.addrecv({'to': 'someone' }, {'hello': 'b'})
-        env, msg = sess.recvmsg(True, 1)
-        self.assertEqual({'to': 'someone', 'reply': 1 }, env)
-        self.assertEqual({"hello": "a"}, msg)
-        self.assertFalse(sess.has_queued_msgs())
-        env, msg = sess.recvmsg(True)
-        self.assertEqual({'to': 'someone'}, env)
-        self.assertEqual({"hello": "b"}, msg)
-        self.assertFalse(sess.has_queued_msgs())
-
-        # send a message, then one with specific reply value
-        # ask for that specific message (get the second)
-        # then ask for any message (get the first)
-        self.assertFalse(sess.has_queued_msgs())
-        sess._socket.addrecv({'to': 'someone' }, {'hello': 'b'})
-        sess._socket.addrecv({'to': 'someone', 'reply': 1}, {'hello': 'a'})
-        env, msg = sess.recvmsg(True, 1)
-        self.assertEqual({'to': 'someone', 'reply': 1}, env)
-        self.assertEqual({"hello": "a"}, msg)
-        self.assertTrue(sess.has_queued_msgs())
-        env, msg = sess.recvmsg(True)
-        self.assertEqual({'to': 'someone'}, env)
-        self.assertEqual({"hello": "b"}, msg)
-        self.assertFalse(sess.has_queued_msgs())
-
-    def test_recv_bad_msg(self):
-        sess = MySession()
-        self.assertFalse(sess.has_queued_msgs())
-        sess._socket.addrecv({'to': 'someone' }, {'hello': 'b'})
-        sess._socket.addrecv({'to': 'someone', 'reply': 1}, {'hello': 'a'})
-        # mangle the bytes a bit
-        sess._socket.recvqueue[5] = sess._socket.recvqueue[5] - 2
-        sess._socket.recvqueue = sess._socket.recvqueue[:-2]
-        self.assertRaises(SessionError, sess.recvmsg, True, 1)
-
-    def test_next_sequence(self):
-        sess = MySession()
-        self.assertEqual(sess._sequence, 1)
-        self.assertEqual(sess._next_sequence(), 2)
-        self.assertEqual(sess._sequence, 2)
-        sess._sequence = 56175
-        self.assertEqual(sess._sequence, 56175)
-        self.assertEqual(sess._next_sequence(), 56176)
-        self.assertEqual(sess._sequence, 56176)
-
-    def test_group_subscribe(self):
-        sess = MySession()
-        sess.group_subscribe("mygroup")
-        sent = sess._socket.readsentmsg_parsed()
-        self.assertEqual(sent, ({"group": "mygroup", "type": "subscribe",
-                                 "instance": "*"}, {}))
-
-        sess.group_subscribe("mygroup")
-        sent = sess._socket.readsentmsg_parsed()
-        self.assertEqual(sent, ({"group": "mygroup", "type": "subscribe",
-                                 "instance": "*"}, {}))
-
-        sess.group_subscribe("mygroup", "my_instance")
-        sent = sess._socket.readsentmsg_parsed()
-        self.assertEqual(sent, ({"group": "mygroup", "type": "subscribe",
-                                 "instance": "my_instance"}, {}))
-
-    def test_group_unsubscribe(self):
-        sess = MySession()
-        sess.group_unsubscribe("mygroup")
-        sent = sess._socket.readsentmsg_parsed()
-        self.assertEqual(sent, ({"group": "mygroup", "type": "unsubscribe",
-                                 "instance": "*"}, {}))
-
-        sess.group_unsubscribe("mygroup")
-        sent = sess._socket.readsentmsg_parsed()
-        self.assertEqual(sent, ({"group": "mygroup", "type": "unsubscribe",
-                                 "instance": "*"}, {}))
-
-        sess.group_unsubscribe("mygroup", "my_instance")
-        sent = sess._socket.readsentmsg_parsed()
-        self.assertEqual(sent, ({"group": "mygroup", "type": "unsubscribe",
-                                 "instance": "my_instance"}, {}))
-
-    def test_group_sendmsg(self):
-        sess = MySession()
-        self.assertEqual(sess._sequence, 1)
-
-        msg = { "hello": "a" }
-
-        def check_sent(additional_headers, sequence):
-            sent = sess._socket.readsentmsg_parsed()
-            headers = dict({"from": "test_name",
-                            "seq": sequence,
-                            "to": "*",
-                            "type": "send"},
-                           **additional_headers)
-            self.assertEqual(sent, (headers, msg))
-            self.assertEqual(sess._sequence, sequence)
-
-        sess.group_sendmsg(msg, "my_group")
-        check_sent({"instance": "*", "group": "my_group",
-                    "want_answer": False}, 2)
-
-        sess.group_sendmsg(msg, "my_group", "my_instance")
-        check_sent({"instance": "my_instance", "group": "my_group",
-                    "want_answer": False}, 3)
-
-        sess.group_sendmsg(msg, "your_group", "your_instance")
-        check_sent({"instance": "your_instance", "group": "your_group",
-                    "want_answer": False}, 4)
-
-        # Test the optional want_answer parameter
-        sess.group_sendmsg(msg, "group", want_answer=True)
-        check_sent({"instance": "*", "group": "group", "want_answer": True}, 5)
-
-
-        sess.group_sendmsg(msg, "group", want_answer=False)
-        check_sent({"instance": "*", "group": "group", "want_answer": False},
-                   6)
-
-    def test_group_recvmsg(self):
-        # must this one do anything except not return messages with
-        # no header?
-        pass
-
-    def test_group_reply(self):
-        sess = MySession()
-        sess.group_reply({ 'from': 'me', 'group': 'our_group',
-                           'instance': 'other_instance', 'seq': 4},
-                         {"hello": "a"})
-        sent = sess._socket.readsentmsg_parsed();
-        self.assertEqual(sent, ({"from": "test_name", "seq": 2,
-                                 "to": "me", "instance": "other_instance",
-                                 "reply": 4, "group": "our_group",
-                                 "type": "send"},
-                                {"hello": "a"}))
-
-        sess.group_reply({ 'from': 'me', 'group': 'our_group',
-                           'instance': 'other_instance', 'seq': 9},
-                         {"hello": "a"})
-        sent = sess._socket.readsentmsg_parsed();
-        self.assertEqual(sent, ({"from": "test_name", "seq": 3,
-                                 "to": "me", "instance": "other_instance",
-                                 "reply": 9, "group": "our_group",
-                                 "type": "send"},
-                                {"hello": "a"}))
-
-    def test_timeout(self):
-        if "BIND10_TEST_SOCKET_FILE" not in os.environ:
-            self.assertEqual("", "This test can only run if the value BIND10_TEST_SOCKET_FILE is set in the environment")
-        TEST_SOCKET_FILE = os.environ["BIND10_TEST_SOCKET_FILE"]
-
-        # create a read domain socket to pass into the session
-        s1 = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
-        if os.path.exists(TEST_SOCKET_FILE):
-            os.remove(TEST_SOCKET_FILE)
-        s1.bind(TEST_SOCKET_FILE)
-
-        try:
-            s1.listen(1)
-
-            s2 = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
-            s2.connect(TEST_SOCKET_FILE)
-            sess = MySession(1, s2)
-            # set timeout to 100 msec, so test does not take too long
-            sess.set_timeout(100)
-            self.assertRaises(SessionTimeout, sess.group_recvmsg, False)
-        finally:
-            os.remove(TEST_SOCKET_FILE)
-
-if __name__ == "__main__":
-    unittest.main()
-

+ 0 - 75
src/lib/python/isc/cc/tests/test_session.py

@@ -1,75 +0,0 @@
-# Copyright (C) 2010  Internet Systems Consortium.
-#
-# Permission to use, copy, modify, and distribute this software for any
-# purpose with or without fee is hereby granted, provided that the above
-# copyright notice and this permission notice appear in all copies.
-#
-# THE SOFTWARE IS PROVIDED "AS IS" AND INTERNET SYSTEMS CONSORTIUM
-# DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL
-# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL
-# INTERNET SYSTEMS CONSORTIUM BE LIABLE FOR ANY SPECIAL, DIRECT,
-# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING
-# FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
-# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION
-# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-import isc
-
-import time
-import pprint
-import unittest
-
-#
-# This test requires the MsgQ daemon to be running.  We are doing nasty
-# tricks here, and so insert sleeps to give things time to migrate from
-# this process, to the MsgQ, and back to this process.
-#
-
-class TestCCWireEncoding(unittest.TestCase):
-    def setUp(self):
-        self.s1 = isc.cc.Session()
-        self.s2 = isc.cc.Session()
-
-    def tearDown(self):
-        self.s1.close()
-        self.s2.close()
-
-    def test_lname(self):
-        self.assertTrue(self.s1.lname)
-        self.assertTrue(self.s2.lname)
-
-    def test_subscribe(self):
-        self.s1.group_subscribe("g1", "i1")
-        self.s2.group_subscribe("g1", "i1")
-        time.sleep(0.5)
-        outmsg = { "data" : "foo" }
-        self.s1.group_sendmsg(outmsg, "g1", "i1")
-        time.sleep(0.5)
-        msg, env = self.s2.group_recvmsg()
-        self.assertEqual(env["from"], self.s1.lname)
-
-    def test_unsubscribe(self):
-        self.s1.group_subscribe("g1", "i1")
-        self.s2.group_subscribe("g1", "i1")
-        time.sleep(0.5)
-        self.s2.group_unsubscribe("g1", "i1")
-        outmsg = { "data" : "foo" }
-        self.s1.group_sendmsg(outmsg, "g1", "i1")
-        time.sleep(0.5)
-        msg, env = self.s2.group_recvmsg()
-        self.assertFalse(env)
-
-    def test_directed_recipient(self):
-        self.s1.group_subscribe("g1", "i1")
-        time.sleep(0.5)
-        outmsg = { "data" : "foo" }
-        self.s1.group_sendmsg(outmsg, "g4", "i4", self.s2.lname)
-        time.sleep(0.5)
-        msg, env = self.s2.group_recvmsg()
-        self.assertEqual(env["from"], self.s1.lname)
-        self.assertEqual(env["to"], self.s2.lname)
-        self.assertEqual(env["group"], "g4")
-        self.assertEqual(env["instance"], "i4")
-
-if __name__ == '__main__':
-    unittest.main()

+ 0 - 32
src/lib/python/isc/config/Makefile.am

@@ -1,32 +0,0 @@
-SUBDIRS = . tests
-
-python_PYTHON = __init__.py ccsession.py cfgmgr.py config_data.py module_spec.py
-pythondir = $(pyexecdir)/isc/config
-
-BUILT_SOURCES = $(PYTHON_LOGMSGPKG_DIR)/work/cfgmgr_messages.py
-BUILT_SOURCES += $(PYTHON_LOGMSGPKG_DIR)/work/config_messages.py
-nodist_pylogmessage_PYTHON = $(PYTHON_LOGMSGPKG_DIR)/work/cfgmgr_messages.py
-nodist_pylogmessage_PYTHON += $(PYTHON_LOGMSGPKG_DIR)/work/config_messages.py
-pylogmessagedir = $(pyexecdir)/isc/log_messages/
-
-CLEANFILES = $(PYTHON_LOGMSGPKG_DIR)/work/cfgmgr_messages.py
-CLEANFILES += $(PYTHON_LOGMSGPKG_DIR)/work/cfgmgr_messages.pyc
-CLEANFILES += $(PYTHON_LOGMSGPKG_DIR)/work/config_messages.py
-CLEANFILES += $(PYTHON_LOGMSGPKG_DIR)/work/config_messages.pyc
-CLEANFILES += $(PYTHON_LOGMSGPKG_DIR)/work/config_messages.pyo
-
-CLEANDIRS = __pycache__
-
-EXTRA_DIST = cfgmgr_messages.mes config_messages.mes
-
-# Define rule to build logging source files from message file
-$(PYTHON_LOGMSGPKG_DIR)/work/cfgmgr_messages.py : cfgmgr_messages.mes
-	$(top_builddir)/src/lib/log/compiler/message \
-	-d $(PYTHON_LOGMSGPKG_DIR)/work -p $(srcdir)/cfgmgr_messages.mes
-
-$(PYTHON_LOGMSGPKG_DIR)/work/config_messages.py : config_messages.mes
-	$(top_builddir)/src/lib/log/compiler/message \
-	-d $(PYTHON_LOGMSGPKG_DIR)/work -p $(srcdir)/config_messages.mes
-
-clean-local:
-	rm -rf $(CLEANDIRS)

+ 0 - 3
src/lib/python/isc/config/__init__.py

@@ -1,3 +0,0 @@
-from isc.config.ccsession import *
-from isc.config.config_data import *
-from isc.config.module_spec import *

+ 0 - 869
src/lib/python/isc/config/ccsession.py

@@ -1,869 +0,0 @@
-# Copyright (C) 2009  Internet Systems Consortium.
-#
-# Permission to use, copy, modify, and distribute this software for any
-# purpose with or without fee is hereby granted, provided that the above
-# copyright notice and this permission notice appear in all copies.
-#
-# THE SOFTWARE IS PROVIDED "AS IS" AND INTERNET SYSTEMS CONSORTIUM
-# DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL
-# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL
-# INTERNET SYSTEMS CONSORTIUM BE LIABLE FOR ANY SPECIAL, DIRECT,
-# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING
-# FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
-# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION
-# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-#
-# Client-side functionality for configuration and commands
-#
-# It keeps a cc-channel session with the configuration manager daemon,
-# and handles configuration updates and direct commands
-
-# modeled after ccsession.h/cc 'protocol' changes here need to be
-# made there as well
-"""Classes and functions for handling configuration and commands
-
-   This module provides the ModuleCCSession and UIModuleCCSession
-   classes, as well as a set of utility functions to create and parse
-   messages related to commands and configuration
-
-   Modules should use the ModuleCCSession class to connect to the
-   configuration manager, and receive updates and commands from
-   other modules.
-
-   Configuration user interfaces should use the UIModuleCCSession
-   to connect to b10-cmdctl, and receive and send configuration and
-   commands through that to the configuration manager.
-"""
-
-from isc.cc import Session
-from isc.cc.proto_defs import *
-from isc.config.config_data import ConfigData, MultiConfigData, BIND10_CONFIG_DATA_VERSION
-import isc.config.module_spec
-import isc
-from isc.util.file import path_search
-import bind10_config
-from isc.log import log_config_update
-import json
-from isc.log_messages.config_messages import *
-
-logger = isc.log.Logger("config")
-
-class ModuleCCSessionError(Exception): pass
-
-class RPCError(ModuleCCSessionError):
-    """
-    An exception raised by rpc_call in case the remote side reports
-    an error. It can be used to distinguish remote errors from protocol errors.
-    Also, it holds the code as well as the error message.
-    """
-    def __init__(self, code, message):
-        ModuleCCSessionError.__init__(self, message)
-        self.__code = code
-
-    def code(self):
-        """
-        The code as sent over the CC.
-        """
-        return self.__code
-
-class RPCRecipientMissing(RPCError):
-    """
-    Special version of the RPCError, for cases the recipient of the call
-    isn't connected to the bus. The code is always
-    isc.cc.proto_defs.CC_REPLY_NO_RECPT.
-    """
-    def __init__(self, message):
-        RPCError.__init__(self, CC_REPLY_NO_RECPT, message)
-
-def parse_answer(msg):
-    """Returns a tuple (rcode, value), where value depends on the
-       command that was called. If rcode != 0, value is a string
-       containing an error message"""
-    if type(msg) != dict:
-        raise ModuleCCSessionError("Answer message is not a dict: " + str(msg))
-    if CC_PAYLOAD_RESULT not in msg:
-        raise ModuleCCSessionError("answer message does not contain 'result' element")
-    elif type(msg[CC_PAYLOAD_RESULT]) != list:
-        raise ModuleCCSessionError("wrong result type in answer message")
-    elif len(msg[CC_PAYLOAD_RESULT]) < 1:
-        raise ModuleCCSessionError("empty result list in answer message")
-    elif type(msg[CC_PAYLOAD_RESULT][0]) != int:
-        raise ModuleCCSessionError("wrong rcode type in answer message")
-    else:
-        if len(msg[CC_PAYLOAD_RESULT]) > 1:
-            if (msg[CC_PAYLOAD_RESULT][0] != CC_REPLY_SUCCESS and
-                type(msg[CC_PAYLOAD_RESULT][1]) != str):
-                raise ModuleCCSessionError("rcode in answer message is non-zero, value is not a string")
-            return msg[CC_PAYLOAD_RESULT][0], msg[CC_PAYLOAD_RESULT][1]
-        else:
-            return msg[CC_PAYLOAD_RESULT][0], None
-
-def create_answer(rcode, arg = None):
-    """Creates an answer packet for config&commands. rcode must be an
-       integer. If rcode == 0, arg is an optional value that depends
-       on what the command or option was. If rcode != 0, arg must be
-       a string containing an error message"""
-    if type(rcode) != int:
-        raise ModuleCCSessionError("rcode in create_answer() must be an integer")
-    if rcode != CC_REPLY_SUCCESS and type(arg) != str:
-        raise ModuleCCSessionError("arg in create_answer for rcode != 0 must be a string describing the error")
-    if arg != None:
-        return { CC_PAYLOAD_RESULT: [ rcode, arg ] }
-    else:
-        return { CC_PAYLOAD_RESULT: [ rcode ] }
-
-# 'fixed' commands
-"""Fixed names for command and configuration messages"""
-COMMAND_CONFIG_UPDATE = "config_update"
-COMMAND_MODULE_SPECIFICATION_UPDATE = "module_specification_update"
-
-COMMAND_GET_COMMANDS_SPEC = "get_commands_spec"
-COMMAND_GET_STATISTICS_SPEC = "get_statistics_spec"
-COMMAND_GET_CONFIG = "get_config"
-COMMAND_SET_CONFIG = "set_config"
-COMMAND_GET_MODULE_SPEC = "get_module_spec"
-COMMAND_MODULE_SPEC = "module_spec"
-COMMAND_SHUTDOWN = "shutdown"
-COMMAND_MODULE_STOPPING = "stopping"
-
-def parse_command(msg):
-    """Parses what may be a command message. If it looks like one,
-       the function returns (command, value) where command is a
-       string. If it is not, this function returns None, None"""
-    if type(msg) == dict and len(msg.items()) == 1:
-        cmd, value = msg.popitem()
-        if cmd == CC_PAYLOAD_COMMAND and type(value) == list:
-            if len(value) == 1 and type(value[0]) == str:
-                return value[0], None
-            elif len(value) > 1 and type(value[0]) == str:
-                return value[0], value[1]
-    return None, None
-
-def create_command(command_name, params = None):
-    """Creates a module command message with the given command name (as
-       specified in the module's specification, and an optional params
-       object"""
-    # TODO: validate_command with spec
-    if type(command_name) != str:
-        raise ModuleCCSessionError("command in create_command() not a string")
-    cmd = [ command_name ]
-    if params:
-        cmd.append(params)
-    msg = { CC_PAYLOAD_COMMAND: cmd }
-    return msg
-
-def default_logconfig_handler(new_config, config_data):
-    errors = []
-
-    if config_data.get_module_spec().validate_config(False, new_config, errors):
-        isc.log.log_config_update(json.dumps(new_config),
-            json.dumps(config_data.get_module_spec().get_full_spec()))
-    else:
-        logger.error(CONFIG_LOG_CONFIG_ERRORS, errors)
-
-class ModuleCCSession(ConfigData):
-    """This class maintains a connection to the command channel, as
-       well as configuration options for modules. The module provides
-       a specification file that contains the module name, configuration
-       options, and commands. It also gives the ModuleCCSession two callback
-       functions, one to call when there is a direct command to the
-       module, and one to update the configuration run-time. These
-       callbacks are called when 'check_command' is called on the
-       ModuleCCSession"""
-
-    def __init__(self, spec_file_name, config_handler, command_handler,
-                 cc_session=None, handle_logging_config=True,
-                 socket_file = None):
-        """Initialize a ModuleCCSession. This does *NOT* send the
-           specification and request the configuration yet. Use start()
-           for that once the ModuleCCSession has been initialized.
-
-           specfile_name is the path to the specification file.
-
-           config_handler and command_handler are callback functions,
-           see set_config_handler and set_command_handler for more
-           information on their signatures.
-
-           cc_session can be used to pass in an existing CCSession,
-           if it is None, one will be set up. This is mainly intended
-           for testing purposes.
-
-           handle_logging_config: if True, the module session will
-           automatically handle logging configuration for the module;
-           it will read the system-wide Logging configuration and call
-           the logger manager to apply it. It will also inform the
-           logger manager when the logging configuration gets updated.
-           The module does not need to do anything except initializing
-           its loggers, and provide log messages. Defaults to true.
-
-           socket_file: If cc_session was none, this optional argument
-           specifies which socket file to use to connect to msgq. It
-           will be overridden by the environment variable
-           MSGQ_SOCKET_FILE. If none, and no environment variable is
-           set, it will use the system default.
-        """
-        module_spec = isc.config.module_spec_from_file(spec_file_name)
-        ConfigData.__init__(self, module_spec)
-
-        self._module_name = module_spec.get_module_name()
-
-        self.set_config_handler(config_handler)
-        self.set_command_handler(command_handler)
-
-        if not cc_session:
-            self._session = Session(socket_file)
-        else:
-            self._session = cc_session
-        self._session.group_subscribe(self._module_name, CC_INSTANCE_WILDCARD)
-
-        self._remote_module_configs = {}
-        self._remote_module_callbacks = {}
-
-        self._notification_callbacks = {}
-        self._last_notif_id = 0
-
-        if handle_logging_config:
-            self.add_remote_config(path_search('logging.spec', bind10_config.PLUGIN_PATHS),
-                                   default_logconfig_handler)
-
-    def __del__(self):
-        # If the CC Session obejct has been closed, it returns
-        # immediately.
-        if self._session._closed: return
-        self._session.group_unsubscribe(self._module_name,
-                                        CC_INSTANCE_WILDCARD)
-        for module_name in self._remote_module_configs:
-            self._session.group_unsubscribe(module_name)
-
-    def start(self):
-        """Send the specification for this module to the configuration
-           manager, and request the current non-default configuration.
-           The config_handler will be called with that configuration"""
-        self.__send_spec()
-        self.__request_config()
-
-    def send_stopping(self):
-        """Sends a 'stopping' message to the configuration manager. This
-           message is just an FYI, and no response is expected. Any errors
-           when sending this message (for instance if the msgq session has
-           previously been closed) are logged, but ignored."""
-        # create_command could raise an exception as well, but except for
-        # out of memory related errors, these should all be programming
-        # failures and are not caught
-        msg = create_command(COMMAND_MODULE_STOPPING,
-                             self.get_module_spec().get_full_spec())
-        try:
-            self._session.group_sendmsg(msg, "ConfigManager")
-        except Exception as se:
-            # If the session was previously closed, obvously trying to send
-            # a message fails. (TODO: check if session is open so we can
-            # error on real problems?)
-            logger.error(CONFIG_SESSION_STOPPING_FAILED, se)
-
-    def get_socket(self):
-        """Returns the socket from the command channel session. This
-           should *only* be used for select() loops to see if there
-           is anything on the channel. If that loop is not completely
-           time-critical, it is strongly recommended to only use
-           check_command(), and not look at the socket at all."""
-        return self._session._socket
-
-    def close(self):
-        """Close the session to the command channel"""
-        self._session.close()
-
-    def check_command(self, nonblock=True):
-        """Check whether there is a command or configuration update on
-           the channel. This function does a read on the cc session, and
-           returns nothing.
-           It calls check_command_without_recvmsg()
-           to parse the received message.
-
-           If nonblock is True, it just checks if there's a command
-           and does nothing if there isn't. If nonblock is False, it
-           waits until it arrives. It temporarily sets timeout to infinity,
-           because commands may not come in arbitrary long time."""
-        timeout_orig = self._session.get_timeout()
-        self._session.set_timeout(0)
-        try:
-            msg, env = self._session.group_recvmsg(nonblock)
-        finally:
-            self._session.set_timeout(timeout_orig)
-        self.check_command_without_recvmsg(msg, env)
-
-    def check_command_without_recvmsg(self, msg, env):
-        """Parse the given message to see if there is a command or a
-           configuration update. Calls the corresponding handler
-           functions if present. Responds on the channel if the
-           handler returns a message."""
-        if msg is None:
-            return
-        if CC_PAYLOAD_NOTIFICATION in msg:
-            group_s = env[CC_HEADER_GROUP].split('/', 1)
-            # What to do with these bogus inputs? We just ignore them for now.
-            if len(group_s) != 2:
-                return
-            [prefix, group] = group_s
-            if prefix + '/' != CC_GROUP_NOTIFICATION_PREFIX:
-                return
-            # Now, get the callbacks and call one by one
-            callbacks = self._notification_callbacks.get(group, {})
-            event = msg[CC_PAYLOAD_NOTIFICATION][0]
-            params = None
-            if len(msg[CC_PAYLOAD_NOTIFICATION]) > 1:
-                params = msg[CC_PAYLOAD_NOTIFICATION][1]
-            for key in sorted(callbacks.keys()):
-                callbacks[key](event, params)
-        elif not CC_PAYLOAD_RESULT in msg:
-            # should we default to an answer? success-by-default? unhandled
-            # error?
-            answer = None
-            try:
-                module_name = env[CC_HEADER_GROUP]
-                cmd, arg = isc.config.ccsession.parse_command(msg)
-                if cmd == COMMAND_CONFIG_UPDATE:
-                    new_config = arg
-                    # If the target channel was not this module
-                    # it might be in the remote_module_configs
-                    if module_name != self._module_name:
-                        if module_name in self._remote_module_configs:
-                            # no checking for validity, that's up to the
-                            # module itself.
-                            newc = self._remote_module_configs[module_name].get_local_config()
-                            isc.cc.data.merge(newc, new_config)
-                            self._remote_module_configs[module_name].set_local_config(newc)
-                            if self._remote_module_callbacks[module_name] != None:
-                                self._remote_module_callbacks[module_name](new_config,
-                                                                           self._remote_module_configs[module_name])
-                        # For other modules, we're not supposed to answer
-                        return
-
-                    # ok, so apparently this update is for us.
-                    errors = []
-                    if not self._config_handler:
-                        answer = create_answer(2, self._module_name + " has no config handler")
-                    elif not self.get_module_spec().validate_config(False, new_config, errors):
-                        answer = create_answer(1, ", ".join(errors))
-                    else:
-                        isc.cc.data.remove_identical(new_config, self.get_local_config())
-                        answer = self._config_handler(new_config)
-                        rcode, val = parse_answer(answer)
-                        if rcode == CC_REPLY_SUCCESS:
-                            newc = self.get_local_config()
-                            isc.cc.data.merge(newc, new_config)
-                            self.set_local_config(newc)
-                else:
-                    # ignore commands for 'remote' modules
-                    if module_name == self._module_name:
-                        if self._command_handler:
-                            answer = self._command_handler(cmd, arg)
-                        else:
-                            answer = create_answer(2, self._module_name + " has no command handler")
-            except Exception as exc:
-                answer = create_answer(1, str(exc))
-            if answer:
-                self._session.group_reply(env, answer)
-
-    def set_config_handler(self, config_handler):
-        """Set the config handler for this module. The handler is a
-           function that takes the full configuration and handles it.
-           It should return an answer created with create_answer()"""
-        self._config_handler = config_handler
-        # should we run this right now since we've changed the handler?
-
-    def set_command_handler(self, command_handler):
-        """Set the command handler for this module. The handler is a
-           function that takes a command as defined in the .spec file
-           and return an answer created with create_answer()"""
-        self._command_handler = command_handler
-
-    def _add_remote_config_internal(self, module_spec,
-                                    config_update_callback=None):
-        """The guts of add_remote_config and add_remote_config_by_name"""
-        module_cfg = ConfigData(module_spec)
-        module_name = module_spec.get_module_name()
-
-        self._session.group_subscribe(module_name)
-
-        # Get the current config for that module now
-        seq = self._session.group_sendmsg(create_command(COMMAND_GET_CONFIG, { "module_name": module_name }), "ConfigManager")
-
-        try:
-            answer, _ = self._session.group_recvmsg(False, seq)
-        except isc.cc.SessionTimeout:
-            raise ModuleCCSessionError("No answer from ConfigManager when "
-                                       "asking about Remote module " +
-                                       module_name)
-        call_callback = False
-        if answer:
-            rcode, value = parse_answer(answer)
-            if rcode == 0:
-                if value != None:
-                    if module_spec.validate_config(False, value):
-                        module_cfg.set_local_config(value)
-                        call_callback = True
-                    else:
-                        raise ModuleCCSessionError("Bad config data for " +
-                                                   module_name + ": " +
-                                                   str(value))
-            else:
-                raise ModuleCCSessionError("Failure requesting remote " +
-                                           "configuration data for " +
-                                           module_name)
-
-        # all done, add it
-        self._remote_module_configs[module_name] = module_cfg
-        self._remote_module_callbacks[module_name] = config_update_callback
-        if call_callback and config_update_callback is not None:
-            config_update_callback(value, module_cfg)
-
-    def add_remote_config_by_name(self, module_name,
-                                  config_update_callback=None):
-        """
-        This does the same as add_remote_config, but you provide the module name
-        instead of the name of the spec file.
-        """
-        seq = self._session.group_sendmsg(create_command(COMMAND_GET_MODULE_SPEC,
-                                                         { "module_name":
-                                                         module_name }),
-                                          "ConfigManager")
-        try:
-            answer, env = self._session.group_recvmsg(False, seq)
-        except isc.cc.SessionTimeout:
-            raise ModuleCCSessionError("No answer from ConfigManager when " +
-                                       "asking about for spec of Remote " +
-                                       "module " + module_name)
-        if answer:
-            rcode, value = parse_answer(answer)
-            if rcode == 0:
-                module_spec = isc.config.module_spec.ModuleSpec(value)
-                if module_spec.get_module_name() != module_name:
-                    raise ModuleCCSessionError("Module name mismatch: " +
-                                               module_name + " and " +
-                                               module_spec.get_module_name())
-                self._add_remote_config_internal(module_spec,
-                                                 config_update_callback)
-            else:
-                raise ModuleCCSessionError("Error code " + str(rcode) +
-                                           "when asking for module spec of " +
-                                           module_name)
-        else:
-            raise ModuleCCSessionError("No answer when asking for module " +
-                                       "spec of " + module_name)
-        # Just to be consistent with the add_remote_config
-        return module_name
-
-    def add_remote_config(self, spec_file_name, config_update_callback=None):
-        """Gives access to the configuration of a different module.
-           These remote module options can at this moment only be
-           accessed through get_remote_config_value(). This function
-           also subscribes to the channel of the remote module name
-           to receive the relevant updates. It is not possible to
-           specify your own handler for this right now, but you can
-           specify a callback that is called after the change happened.
-           start() must have been called on this CCSession
-           prior to the call to this method.
-           Returns the name of the module."""
-        module_spec = isc.config.module_spec_from_file(spec_file_name)
-        self._add_remote_config_internal(module_spec, config_update_callback)
-        return module_spec.get_module_name()
-
-    def remove_remote_config(self, module_name):
-        """Removes the remote configuration access for this module"""
-        if module_name in self._remote_module_configs:
-            self._session.group_unsubscribe(module_name)
-            del self._remote_module_configs[module_name]
-            del self._remote_module_callbacks[module_name]
-
-    def get_remote_config_value(self, module_name, identifier):
-        """Returns the current setting for the given identifier at the
-           given module. If the module has not been added with
-           add_remote_config, a ModuleCCSessionError is raised"""
-        if module_name in self._remote_module_configs:
-            return self._remote_module_configs[module_name].get_value(identifier)
-        else:
-            raise ModuleCCSessionError("Remote module " + module_name +
-                                       " not found")
-
-    def __send_spec(self):
-        """Sends the data specification to the configuration manager"""
-        msg = create_command(COMMAND_MODULE_SPEC, self.get_module_spec().get_full_spec())
-        seq = self._session.group_sendmsg(msg, "ConfigManager")
-        try:
-            answer, env = self._session.group_recvmsg(False, seq)
-        except isc.cc.SessionTimeout:
-            # TODO: log an error?
-            pass
-
-    def __request_config(self):
-        """Asks the configuration manager for the current configuration, and call the config handler if set.
-           Raises a ModuleCCSessionError if there is no answer from the configuration manager"""
-        seq = self._session.group_sendmsg(create_command(COMMAND_GET_CONFIG, { "module_name": self._module_name }), "ConfigManager")
-        try:
-            answer, env = self._session.group_recvmsg(False, seq)
-            if answer:
-                rcode, value = parse_answer(answer)
-                if rcode == 0:
-                    errors = []
-                    if value != None:
-                        if self.get_module_spec().validate_config(False,
-                                                                  value,
-                                                                  errors):
-                            self.set_local_config(value)
-                            if self._config_handler:
-                                self._config_handler(value)
-                        else:
-                            raise ModuleCCSessionError(
-                                "Wrong data in configuration: " +
-                                " ".join(errors))
-                else:
-                    logger.error(CONFIG_GET_FAILED, value)
-            else:
-                raise ModuleCCSessionError("No answer from configuration manager")
-        except isc.cc.SessionTimeout:
-            raise ModuleCCSessionError("CC Session timeout waiting for configuration manager")
-
-    def rpc_call(self, command, group, instance=CC_INSTANCE_WILDCARD,
-                 to=CC_TO_WILDCARD, params=None):
-        """
-        Create a command with the given name and parameters. Send it to a
-        recipient, wait for the answer and parse it.
-
-        This is a wrapper around the group_sendmsg and group_recvmsg on the CC
-        session. It exists mostly for convenience.
-
-        Params:
-        - command: Name of the command to call on the remote side.
-        - group, instance, to: Address specification of the recipient.
-        - params: Parameters to pass to the command (as keyword arguments).
-
-        Return: The return value of the remote call (just the value, no status
-          code or anything). May be None.
-
-        Raise:
-        - RPCRecipientMissing if the given recipient doesn't exist.
-        - RPCError if the other side sent an error response. The error string
-          is in the exception.
-        - ModuleCCSessionError in case of protocol errors, like malformed
-          answer.
-        """
-        cmd = create_command(command, params)
-        seq = self._session.group_sendmsg(cmd, group, instance=instance,
-                                          to=to, want_answer=True)
-        # For non-blocking, we'll have rpc_call_async (once the nonblock
-        # actually works)
-        reply, rheaders = self._session.group_recvmsg(nonblock=False, seq=seq)
-        code, value = parse_answer(reply)
-        if code == CC_REPLY_NO_RECPT:
-            raise RPCRecipientMissing(value)
-        elif code != CC_REPLY_SUCCESS:
-            raise RPCError(code, value)
-        return value
-
-    def notify(self, notification_group, event_name, params=None):
-        """
-        Send a notification to subscribed users.
-
-        Send a notification message to all users subscribed to the given
-        notification group.
-
-        This method does not block.
-
-        See docs/design/ipc-high.txt for details about notifications
-        and the format of messages sent.
-
-        Throws:
-        - CCSessionError: for low-level communication errors.
-        Params:
-        - notification_group (string): This parameter (indirectly) signifies
-          what users should receive the notification. Only users that
-          subscribed to notifications on the same group receive it.
-        - event_name (string): The name of the event to notify about (for
-          example `new_group_member`).
-        - params: Other parameters that describe the event. This might be, for
-          example, the ID of the new member and the name of the group. This can
-          be any data that can be sent over the isc.cc.Session, but it is
-          common for it to be dict.
-        Returns: Nothing
-        """
-        notification = [event_name]
-        if params is not None:
-            notification.append(params)
-        self._session.group_sendmsg({CC_PAYLOAD_NOTIFICATION: notification},
-                                    CC_GROUP_NOTIFICATION_PREFIX +
-                                    notification_group,
-                                    instance=CC_INSTANCE_WILDCARD,
-                                    to=CC_TO_WILDCARD,
-                                    want_answer=False)
-
-    def subscribe_notification(self, notification_group, callback):
-        """
-        Subscribe to receive notifications in given notification group. When a
-        notification comes to the group, the callback is called with two
-        parameters, the name of the event (the value of `event_name` parameter
-        passed to `notify`) and the parameters of the event (the value
-        of `params` passed to `notify`).
-
-        This is a fast operation (there may be communication with the message
-        queue daemon, but it does not wait for any remote process).
-
-        The callback may get called multiple times (once for each notification).
-        It is possible to subscribe multiple callbacks for the same notification,
-        by multiple calls of this method, and they will be called in the order
-        of registration when the notification comes.
-
-        Throws:
-        - CCSessionError: for low-level communication errors.
-        Params:
-        - notification_group (string): Notification group to subscribe to.
-          Notification with the same value of the same parameter of `notify`
-          will be received.
-        - callback (callable): The callback to be called whenever the
-          notification comes.
-
-          The callback should not raise exceptions, such exceptions are
-          likely to propagate through the loop and terminate program.
-        Returns: Opaque id of the subscription. It can be used to cancel
-          the subscription by unsubscribe_notification.
-        """
-        self._last_notif_id += 1
-        my_id = self._last_notif_id
-        if notification_group in self._notification_callbacks:
-            self._notification_callbacks[notification_group][my_id] = callback
-        else:
-            self._session.group_subscribe(CC_GROUP_NOTIFICATION_PREFIX +
-                                          notification_group)
-            self._notification_callbacks[notification_group] = \
-                { my_id: callback }
-        return (notification_group, my_id)
-
-    def unsubscribe_notification(self, nid):
-        """
-        Remove previous subscription for notifications. Pass the id returned
-        from subscribe_notification.
-
-        Throws:
-        - CCSessionError: for low-level communication errors.
-        - KeyError: The id does not correspond to valid subscription.
-        """
-        (group, cid) = nid
-        del self._notification_callbacks[group][cid]
-        if not self._notification_callbacks[group]:
-            # Removed the last one
-            self._session.group_unsubscribe(CC_GROUP_NOTIFICATION_PREFIX +
-                                            group)
-            del self._notification_callbacks[group]
-
-class UIModuleCCSession(MultiConfigData):
-    """This class is used in a configuration user interface. It contains
-       specific functions for getting, displaying, and sending
-       configuration settings through the b10-cmdctl module."""
-    def __init__(self, conn):
-        """Initialize a UIModuleCCSession. The conn object that is
-           passed must have send_GET and send_POST functions"""
-        MultiConfigData.__init__(self)
-        self._conn = conn
-        self.update_specs_and_config()
-
-    def request_specifications(self):
-        """Clears the current list of specifications, and requests a new
-            list from b10-cmdctl. As other actions may have caused modules
-            to be stopped, or new modules to be added, this is expected to
-            be run after each interaction (at this moment). It is usually
-            also combined with request_current_config(). For that reason,
-            we provide update_specs_and_config() which calls both."""
-        specs = self._conn.send_GET('/module_spec')
-        self.clear_specifications()
-        for module in specs.keys():
-            self.set_specification(isc.config.ModuleSpec(specs[module]))
-
-    def request_current_config(self):
-        """Requests the current configuration from the configuration
-           manager through b10-cmdctl, and stores those as CURRENT. This
-           does not modify any local changes, it just updates to the current
-           state of the server itself."""
-        config = self._conn.send_GET('/config_data')
-        if 'version' not in config or config['version'] != BIND10_CONFIG_DATA_VERSION:
-            raise ModuleCCSessionError("Bad config version")
-        self._set_current_config(config)
-
-    def update_specs_and_config(self):
-        """Convenience function to both clear and update the known list of
-           module specifications, and update the current configuration on
-           the server side. There are a few cases where the caller might only
-           want to run one of these tasks, but often they are both needed."""
-        self.request_specifications()
-        self.request_current_config()
-
-    def _add_value_to_list(self, identifier, value, module_spec):
-        cur_list, status = self.get_value(identifier)
-        if not cur_list:
-            cur_list = []
-
-        if value is None and "list_item_spec" in module_spec:
-            if "item_default" in module_spec["list_item_spec"]:
-                value = module_spec["list_item_spec"]["item_default"]
-
-        if value is None:
-            raise isc.cc.data.DataNotFoundError(
-                "No value given and no default for " + str(identifier))
-
-        if value not in cur_list:
-            cur_list.append(value)
-            self.set_value(identifier, cur_list)
-        else:
-            raise isc.cc.data.DataAlreadyPresentError(str(value) +
-                                                      " already in "
-                                                      + str(identifier))
-
-    def _add_value_to_named_set(self, identifier, value, item_value):
-        if type(value) != str:
-            raise isc.cc.data.DataTypeError("Name for named_set " +
-                                            identifier +
-                                            " must be a string")
-        # fail on both None and empty string
-        if not value:
-            raise isc.cc.data.DataNotFoundError(
-                    "Need a name to add a new item to named_set " +
-                    str(identifier))
-        else:
-            cur_map, status = self.get_value(identifier)
-            if not cur_map:
-                cur_map = {}
-            if value not in cur_map:
-                cur_map[value] = item_value
-                self.set_value(identifier, cur_map)
-            else:
-                raise isc.cc.data.DataAlreadyPresentError(value +
-                                                          " already in " +
-                                                          identifier)
-
-    def add_value(self, identifier, value_str = None, set_value_str = None):
-        """Add a value to a configuration list. Raises a DataTypeError
-           if the value does not conform to the list_item_spec field
-           of the module config data specification. If value_str is
-           not given, we add the default as specified by the .spec
-           file. Raises a DataNotFoundError if the given identifier
-           is not specified in the specification as a map or list.
-           Raises a DataAlreadyPresentError if the specified element
-           already exists."""
-        module_spec = self.find_spec_part(identifier)
-        if module_spec is None:
-            raise isc.cc.data.DataNotFoundError("Unknown item " + str(identifier))
-
-        # for type any, we determine the 'type' by what value is set
-        # (which would be either list or dict)
-        cur_value, _ = self.get_value(identifier)
-        type_any = isc.config.config_data.spec_part_is_any(module_spec)
-
-        # the specified element must be a list or a named_set
-        if 'list_item_spec' in module_spec or\
-           (type_any and type(cur_value) == list):
-            value = None
-            # in lists, we might get the value with spaces, making it
-            # the third argument. In that case we interpret both as
-            # one big string meant as the value
-            if value_str is not None:
-                if set_value_str is not None:
-                    value_str += set_value_str
-                value = isc.cc.data.parse_value_str(value_str)
-            self._add_value_to_list(identifier, value, module_spec)
-        elif 'named_set_item_spec' in module_spec or\
-           (type_any and type(cur_value) == dict):
-            item_name = None
-            item_value = None
-            if value_str is not None:
-                item_name = value_str
-            if set_value_str is not None:
-                item_value = isc.cc.data.parse_value_str(set_value_str)
-            else:
-                if 'item_default' in module_spec['named_set_item_spec']:
-                    item_value = module_spec['named_set_item_spec']['item_default']
-            self._add_value_to_named_set(identifier, item_name,
-                                         item_value)
-        else:
-            raise isc.cc.data.DataTypeError(str(identifier) + " is not a list or a named set")
-
-    def _remove_value_from_list(self, identifier, value):
-        if value is None:
-            # we are directly removing a list index
-            id, list_indices = isc.cc.data.split_identifier_list_indices(identifier)
-            if list_indices is None:
-                raise isc.cc.data.DataTypeError("identifier in remove_value() does not contain a list index, and no value to remove")
-            else:
-                self.set_value(identifier, None)
-        else:
-            cur_list, status = self.get_value(identifier)
-            if not cur_list:
-                cur_list = []
-            elif value in cur_list:
-                cur_list.remove(value)
-            self.set_value(identifier, cur_list)
-
-    def _remove_value_from_named_set(self, identifier, value):
-        if value is None:
-            raise isc.cc.data.DataNotFoundError("Need a name to remove an item from named_set " + str(identifier))
-        elif type(value) != str:
-            raise isc.cc.data.DataTypeError("Name for named_set " + identifier + " must be a string")
-        else:
-            cur_map, status = self.get_value(identifier)
-            if not cur_map:
-                cur_map = {}
-            if value in cur_map:
-                del cur_map[value]
-                self.set_value(identifier, cur_map)
-            else:
-                raise isc.cc.data.DataNotFoundError(value + " not found in named_set " + str(identifier))
-
-    def remove_value(self, identifier, value_str):
-        """Remove a value from a configuration list or named set.
-        The value string must be a string representation of the full
-        item. Raises a DataTypeError if the value at the identifier
-        is not a list, or if the given value_str does not match the
-        list_item_spec """
-        module_spec = self.find_spec_part(identifier)
-        if module_spec is None:
-            raise isc.cc.data.DataNotFoundError("Unknown item " + str(identifier))
-
-        value = None
-        if value_str is not None:
-            value = isc.cc.data.parse_value_str(value_str)
-
-        # for type any, we determine the 'type' by what value is set
-        # (which would be either list or dict)
-        cur_value, _ = self.get_value(identifier)
-        type_any = isc.config.config_data.spec_part_is_any(module_spec)
-
-        # there's two forms of 'remove from list'; the remove-value-from-list
-        # form, and the 'remove-by-index' form. We can recognize the second
-        # case by value is None
-        if 'list_item_spec' in module_spec or\
-           (type_any and type(cur_value) == list) or\
-           value is None:
-            if not type_any and value is not None:
-                isc.config.config_data.check_type(module_spec['list_item_spec'], value)
-            self._remove_value_from_list(identifier, value)
-        elif 'named_set_item_spec' in module_spec or\
-           (type_any and type(cur_value) == dict):
-            self._remove_value_from_named_set(identifier, value_str)
-        else:
-            raise isc.cc.data.DataTypeError(str(identifier) + " is not a list or a named_set")
-
-
-
-    def commit(self):
-        """Commit all local changes, send them through b10-cmdctl to
-           the configuration manager"""
-        if self.get_local_changes():
-            response = self._conn.send_POST('/ConfigManager/set_config',
-                                            [ self.get_local_changes() ])
-            answer = isc.cc.data.parse_value_str(response.read().decode())
-            # answer is either an empty dict (on success), or one
-            # containing errors
-            if answer == {}:
-                self.clear_local_changes()
-            elif "error" in answer:
-                raise ModuleCCSessionError("Error: " + str(answer["error"]) + "\n" + "Configuration not committed")
-            else:
-                raise ModuleCCSessionError("Unknown format of answer in commit(): " + str(answer))

+ 0 - 612
src/lib/python/isc/config/cfgmgr.py

@@ -1,612 +0,0 @@
-# Copyright (C) 2010  Internet Systems Consortium.
-#
-# Permission to use, copy, modify, and distribute this software for any
-# purpose with or without fee is hereby granted, provided that the above
-# copyright notice and this permission notice appear in all copies.
-#
-# THE SOFTWARE IS PROVIDED "AS IS" AND INTERNET SYSTEMS CONSORTIUM
-# DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL
-# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL
-# INTERNET SYSTEMS CONSORTIUM BE LIABLE FOR ANY SPECIAL, DIRECT,
-# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING
-# FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
-# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION
-# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-"""This is the BIND 10 configuration manager, run by b10-cfgmgr.
-
-   It stores the system configuration, and sends updates of the
-   configuration to the modules that need them.
-"""
-
-import isc
-import signal
-import ast
-import os
-import copy
-import tempfile
-import json
-import errno
-from isc.cc import data
-from isc.cc.proto_defs import *
-from isc.config import ccsession, config_data, module_spec
-from isc.util.file import path_search
-import bind10_config
-import isc.log
-from isc.log_messages.cfgmgr_messages import *
-
-logger = isc.log.Logger("cfgmgr", buffer=True)
-
-class ConfigManagerDataReadError(Exception):
-    """This exception is thrown when there is an error while reading
-       the current configuration on startup."""
-    pass
-
-class ConfigManagerDataEmpty(Exception):
-    """This exception is thrown when the currently stored configuration
-       is not found, or appears empty."""
-    pass
-
-class ConfigManagerData:
-    """This class hold the actual configuration information, and
-       reads it from and writes it to persistent storage"""
-
-    def __init__(self, data_path, file_name):
-        """Initialize the data for the configuration manager, and
-           set the version and path for the data store. Initializing
-           this does not yet read the database, a call to
-           read_from_file is needed for that.
-
-           In case the file_name is absolute, data_path is ignored
-           and the directory where the file_name lives is used instead.
-           """
-        self.data = {}
-        self.data['version'] = config_data.BIND10_CONFIG_DATA_VERSION
-        if os.path.isabs(file_name):
-            self.db_filename = file_name
-            self.data_path = os.path.dirname(file_name)
-        else:
-            self.db_filename = data_path + os.sep + file_name
-            self.data_path = data_path
-
-    def check_for_updates(file_config):
-        """
-        Given the parsed JSON data from the config file,
-        check whether it needs updating due to version changes.
-        Return the data with updates (or the original data if no
-        updates were necessary).
-        Even though it is at this moment not technically necessary, this
-        function makes and returns a copy of the given data.
-        """
-        config = copy.deepcopy(file_config)
-        if 'version' in config:
-            data_version = config['version']
-        else:
-            # If it is not present, assume latest or earliest?
-            data_version = 1
-
-        # For efficiency, if up-to-date, return now
-        if data_version == config_data.BIND10_CONFIG_DATA_VERSION:
-            return config
-
-        # Don't know what to do if it is more recent
-        if data_version > config_data.BIND10_CONFIG_DATA_VERSION:
-            raise ConfigManagerDataReadError(
-                      "Cannot load configuration file: version "
-                      "%d not yet supported" % config['version'])
-
-        # At some point we might give up supporting older versions
-        if data_version < 1:
-            raise ConfigManagerDataReadError(
-                      "Cannot load configuration file: version "
-                      "%d no longer supported" % config['version'])
-
-        # Ok, so we have a still-supported older version. Apply all
-        # updates
-        new_data_version = data_version
-        if new_data_version == 1:
-            # only format change, no other changes necessary
-            new_data_version = 2
-        if new_data_version == 2:
-            # 'Boss' got changed to 'Init'; If for some reason both are
-            # present, simply ignore the old one
-            if 'Boss' in config:
-                if not 'Init' in config:
-                    config['Init'] = config['Boss']
-                    del config['Boss']
-                else:
-                    # This should not happen, but we don't want to overwrite
-                    # any config in this case, so warn about it
-                    logger.warn(CFGMGR_CONFIG_UPDATE_BOSS_AND_INIT_FOUND)
-            new_data_version = 3
-
-        config['version'] = new_data_version
-        logger.info(CFGMGR_AUTOMATIC_CONFIG_DATABASE_UPDATE, data_version,
-                    new_data_version)
-        return config
-
-    def read_from_file(data_path, file_name):
-        """Read the current configuration found in the file file_name.
-           If file_name is absolute, data_path is ignored. Otherwise
-           we look for the file_name in data_path directory.
-
-           If the file does not exist, a ConfigManagerDataEmpty exception is
-           raised. If there is a parse error, or if the data in the file has
-           the wrong version, a ConfigManagerDataReadError is raised. In the
-           first case, it is probably safe to log and ignore. In the case of
-           the second exception, the best way is probably to report the error
-           and stop loading the system.
-           """
-        config = ConfigManagerData(data_path, file_name)
-        logger.info(CFGMGR_CONFIG_FILE, config.db_filename)
-        file = None
-        try:
-            file = open(config.db_filename, 'r')
-            file_config = json.loads(file.read())
-            # handle different versions here
-            # If possible, we automatically convert to the new
-            # scheme and update the configuration
-            # If not, we raise an exception
-            config.data = ConfigManagerData.check_for_updates(file_config)
-        except IOError as ioe:
-            # if IOError is 'no such file or directory', then continue
-            # (raise empty), otherwise fail (raise error)
-            if ioe.errno == errno.ENOENT:
-                raise ConfigManagerDataEmpty("No configuration file found")
-            else:
-                raise ConfigManagerDataReadError("Can't read configuration file: " + str(ioe))
-        except ValueError:
-            raise ConfigManagerDataReadError("Configuration file out of date or corrupt, please update or remove " + config.db_filename)
-        finally:
-            if file:
-                file.close();
-        return config
-
-    def write_to_file(self, output_file_name = None):
-        """Writes the current configuration data to a file. If
-           output_file_name is not specified, the file used in
-           read_from_file is used."""
-        filename = None
-
-        try:
-            file = tempfile.NamedTemporaryFile(mode='w',
-                                               prefix="b10-config.db.",
-                                               dir=self.data_path,
-                                               delete=False)
-            filename = file.name
-            file.write(json.dumps(self.data))
-            file.write("\n")
-            file.close()
-            if output_file_name:
-                os.rename(filename, output_file_name)
-            else:
-                os.rename(filename, self.db_filename)
-        except IOError as ioe:
-            logger.error(CFGMGR_IOERROR_WHILE_WRITING_CONFIGURATION, ioe)
-        except OSError as ose:
-            logger.error(CFGMGR_OSERROR_WHILE_WRITING_CONFIGURATION, ose)
-        try:
-            if filename and os.path.exists(filename):
-                os.remove(filename)
-        except OSError:
-            # Ok if we really can't delete it anymore, leave it
-            pass
-
-    def rename_config_file(self, old_file_name=None, new_file_name=None):
-        """Renames the given configuration file to the given new file name,
-           if it exists. If it does not exist, nothing happens.
-           If old_file_name is None (default), the file used in
-           read_from_file is used. If new_file_name is None (default), the
-           file old_file_name appended with .bak is used. If that file exists
-           already, .1 is appended. If that file exists, .2 is appended, etc.
-        """
-        if old_file_name is None:
-            old_file_name = self.db_filename
-        if new_file_name is None:
-            new_file_name = old_file_name + ".bak"
-        if os.path.exists(new_file_name):
-            i = 1
-            while os.path.exists(new_file_name + "." + str(i)):
-                i += 1
-            new_file_name = new_file_name + "." + str(i)
-        if os.path.exists(old_file_name):
-            logger.info(CFGMGR_BACKED_UP_CONFIG_FILE, old_file_name, new_file_name)
-            os.rename(old_file_name, new_file_name)
-
-    def __eq__(self, other):
-        """Returns True if the data contained is equal. data_path and
-           db_filename may be different."""
-        if type(other) != type(self):
-            return False
-        return self.data == other.data
-
-class ConfigManager:
-    """Creates a configuration manager. The data_path is the path
-       to the directory containing the configuration file,
-       database_filename points to the configuration file.
-       If session is set, this will be used as the communication
-       channel session. If not, a new session will be created.
-       The ability to specify a custom session is for testing purposes
-       and should not be needed for normal usage."""
-    def __init__(self, data_path, database_filename, session=None,
-                 clear_config=False):
-        """Initialize the configuration manager. The data_path string
-           is the path to the directory where the configuration is
-           stored (in <data_path>/<database_filename> or in
-           <database_filename>, if it is absolute). The database_filename
-           is the config file to load. Session is an optional
-           cc-channel session. If this is not given, a new one is
-           created. If clear_config is True, the configuration file is
-           renamed and a new one is created."""
-        self.data_path = data_path
-        self.database_filename = database_filename
-        self.module_specs = {}
-        # Virtual modules are the ones which have no process running. The
-        # checking of validity is done by functions presented here instead
-        # of some other process
-        self.virtual_modules = {}
-        self.config = ConfigManagerData(data_path, database_filename)
-        if clear_config:
-            self.config.rename_config_file()
-        if session:
-            self.cc = session
-        else:
-            self.cc = isc.cc.Session()
-        self.cc.group_subscribe("ConfigManager")
-        self.cc.group_subscribe("Init", "ConfigManager")
-        self.running = False
-        # As a core module, CfgMgr is different than other modules,
-        # as it does not use a ModuleCCSession, and hence needs
-        # to handle logging config on its own
-        self.log_config_data = config_data.ConfigData(
-            isc.config.module_spec_from_file(
-                path_search('logging.spec',
-                bind10_config.PLUGIN_PATHS)))
-        # store the logging 'module' name for easier reference
-        self.log_module_name = self.log_config_data.get_module_spec().get_module_name()
-
-    def check_logging_config(self, config):
-        if self.log_module_name in config:
-            # If there is logging config, apply it.
-            ccsession.default_logconfig_handler(config[self.log_module_name],
-                                                self.log_config_data)
-        else:
-            # If there is no logging config, we still need to trigger the
-            # handler, so make it use defaults (and flush any buffered logs)
-            ccsession.default_logconfig_handler({}, self.log_config_data)
-
-    def notify_b10_init(self):
-        """Notifies the Init module that the Config Manager is running"""
-        # TODO: Use a real, broadcast notification here.
-        self.cc.group_sendmsg({"running": "ConfigManager"}, "Init")
-
-    def set_module_spec(self, spec):
-        """Adds a ModuleSpec"""
-        self.module_specs[spec.get_module_name()] = spec
-
-    def set_virtual_module(self, spec, check_func):
-        """Adds a virtual module with its spec and checking function."""
-        self.module_specs[spec.get_module_name()] = spec
-        self.virtual_modules[spec.get_module_name()] = check_func
-
-    def remove_module_spec(self, module_name):
-        """Removes the full ModuleSpec for the given module_name.
-           Also removes the virtual module check function if it
-           was present.
-           Does nothing if the module was not present."""
-        if module_name in self.module_specs:
-            del self.module_specs[module_name]
-        if module_name in self.virtual_modules:
-            del self.virtual_modules[module_name]
-
-    def get_module_spec(self, module_name = None):
-        """Returns the full ModuleSpec for the module with the given
-           module_name. If no module name is given, a dict will
-           be returned with 'name': module_spec values. If the
-           module name is given, but does not exist, an empty dict
-           is returned"""
-        if module_name:
-            if module_name in self.module_specs:
-                return self.module_specs[module_name].get_full_spec()
-            else:
-                # TODO: log error?
-                return {}
-        else:
-            result = {}
-            for module in self.module_specs:
-                result[module] = self.module_specs[module].get_full_spec()
-            return result
-
-    def get_config_spec(self, name = None):
-        """Returns a dict containing 'module_name': config_spec for
-           all modules. If name is specified, only that module will
-           be included"""
-        config_data = {}
-        if name:
-            if name in self.module_specs:
-                config_data[name] = self.module_specs[name].get_config_spec()
-        else:
-            for module_name in self.module_specs.keys():
-                config_data[module_name] = self.module_specs[module_name].get_config_spec()
-        return config_data
-
-    def get_commands_spec(self, name = None):
-        """Returns a dict containing 'module_name': commands_spec for
-           all modules. If name is specified, only that module will
-           be included"""
-        commands = {}
-        if name:
-            if name in self.module_specs:
-                commands[name] = self.module_specs[name].get_commands_spec()
-        else:
-            for module_name in self.module_specs.keys():
-                commands[module_name] = self.module_specs[module_name].get_commands_spec()
-        return commands
-
-    def get_statistics_spec(self, name = None):
-        """Returns a dict containing 'module_name': statistics_spec for
-           all modules. If name is specified, only that module will
-           be included"""
-        statistics = {}
-        if name:
-            if name in self.module_specs:
-                statistics[name] = self.module_specs[name].get_statistics_spec()
-        else:
-            for module_name in self.module_specs.keys():
-                statistics[module_name] = self.module_specs[module_name].get_statistics_spec()
-        return statistics
-
-    def read_config(self):
-        """Read the current configuration from the file specificied at init()"""
-        try:
-            self.config = ConfigManagerData.read_from_file(self.data_path,
-                                                           self.\
-                                                           database_filename)
-        except ConfigManagerDataEmpty:
-            # ok, just start with an empty config
-            self.config = ConfigManagerData(self.data_path,
-                                            self.database_filename)
-        self.check_logging_config(self.config.data);
-
-    def write_config(self):
-        """Write the current configuration to the file specificied at init()"""
-        self.config.write_to_file()
-
-    def __handle_get_module_spec(self, cmd):
-        """Private function that handles the 'get_module_spec' command"""
-        answer = {}
-        if cmd != None:
-            if type(cmd) == dict:
-                if 'module_name' in cmd and cmd['module_name'] != '':
-                    module_name = cmd['module_name']
-                    spec = self.get_module_spec(cmd['module_name'])
-                    if type(spec) != type({}):
-                        # this is a ModuleSpec object.  Extract the
-                        # internal spec.
-                        spec = spec.get_full_spec()
-                    answer = ccsession.create_answer(0, spec)
-                else:
-                    answer = ccsession.create_answer(1, "Bad module_name in get_module_spec command")
-            else:
-                answer = ccsession.create_answer(1, "Bad get_module_spec command, argument not a dict")
-        else:
-            answer = ccsession.create_answer(0, self.get_module_spec())
-        return answer
-
-    def __handle_get_config_dict(self, cmd):
-        """Private function that handles the 'get_config' command
-           where the command has been checked to be a dict"""
-        if 'module_name' in cmd and cmd['module_name'] != '':
-            module_name = cmd['module_name']
-            try:
-                return ccsession.create_answer(0, data.find(self.config.data, module_name))
-            except data.DataNotFoundError as dnfe:
-                # no data is ok, that means we have nothing that
-                # deviates from default values
-                return ccsession.create_answer(0, { 'version': config_data.BIND10_CONFIG_DATA_VERSION })
-        else:
-            return ccsession.create_answer(1, "Bad module_name in get_config command")
-
-    def __handle_get_config(self, cmd):
-        """Private function that handles the 'get_config' command"""
-        if cmd != None:
-            if type(cmd) == dict:
-                return self.__handle_get_config_dict(cmd)
-            else:
-                return ccsession.create_answer(1, "Bad get_config command, argument not a dict")
-        else:
-            return ccsession.create_answer(0, self.config.data)
-
-    def __handle_set_config_module(self, module_name, cmd):
-        # the answer comes (or does not come) from the relevant module
-        # so we need a variable to see if we got it
-        answer = None
-        # todo: use api (and check the data against the definition?)
-        old_data = copy.deepcopy(self.config.data)
-        conf_part = data.find_no_exc(self.config.data, module_name)
-        update_cmd = None
-        use_part = None
-        if conf_part:
-            data.merge(conf_part, cmd)
-            use_part = conf_part
-        else:
-            conf_part = data.set(self.config.data, module_name, {})
-            data.merge(conf_part[module_name], cmd)
-            use_part = conf_part[module_name]
-
-        # The command to send
-        update_cmd = ccsession.create_command(ccsession.COMMAND_CONFIG_UPDATE,
-                                              use_part)
-
-        # TODO: This design might need some revisiting. We might want some
-        # polymorphism instead of branching. But it just might turn out it
-        # will get solved by itself when we move everything to virtual modules
-        # (which is possible solution to the offline configuration problem)
-        # or when we solve the incorrect behaviour here when a config is
-        # rejected (spying modules don't know it was rejected and some modules
-        # might have been committed already).
-        if module_name in self.virtual_modules:
-            # The module is virtual, so call it to get the answer
-            try:
-                error = self.virtual_modules[module_name](use_part)
-                if error is None:
-                    answer = ccsession.create_answer(0)
-                    # OK, it is successful, send the notify, but don't wait
-                    # for answer
-                    seq = self.cc.group_sendmsg(update_cmd, module_name)
-                else:
-                    answer = ccsession.create_answer(1, error)
-            # Make sure just a validating plugin don't kill the whole manager
-            except Exception as excp:
-                # Provide answer
-                answer = ccsession.create_answer(1, "Exception: " + str(excp))
-        else:
-            # Real module, send it over the wire to it
-            # send out changed info and wait for answer
-            seq = self.cc.group_sendmsg(update_cmd, module_name)
-            try:
-                # replace 'our' answer with that of the module
-                answer, env = self.cc.group_recvmsg(False, seq)
-            except isc.cc.SessionTimeout:
-                answer = ccsession.create_answer(1, "Timeout waiting for answer from " + module_name)
-            except isc.cc.SessionError as se:
-                logger.error(CFGMGR_BAD_UPDATE_RESPONSE_FROM_MODULE, module_name, se)
-                answer = ccsession.create_answer(1, "Unable to parse response from " + module_name + ": " + str(se))
-        if answer:
-            rcode, val = ccsession.parse_answer(answer)
-            if rcode == 0:
-                self.write_config()
-            else:
-                self.config.data = old_data
-        return answer
-
-    def __handle_set_config_all(self, cmd):
-        old_data = copy.deepcopy(self.config.data)
-        got_error = False
-        err_list = []
-        # The format of the command is a dict with module->newconfig
-        # sets, so we simply call set_config_module for each of those
-        for module in cmd:
-            if module != "version":
-                answer = self.__handle_set_config_module(module, cmd[module])
-                if answer == None:
-                    got_error = True
-                    err_list.append("No answer message from " + module)
-                else:
-                    rcode, val = ccsession.parse_answer(answer)
-                    if rcode != 0:
-                        got_error = True
-                        err_list.append(val)
-        if not got_error:
-            # if Logging config is in there, update our config as well
-            self.check_logging_config(cmd);
-            self.write_config()
-            return ccsession.create_answer(0)
-        else:
-            # TODO rollback changes that did get through, should we re-send update?
-            self.config.data = old_data
-            return ccsession.create_answer(1, " ".join(err_list))
-
-    def __handle_set_config(self, cmd):
-        """Private function that handles the 'set_config' command"""
-        answer = None
-
-        if cmd == None:
-            return ccsession.create_answer(1, "Wrong number of arguments")
-        if len(cmd) == 2:
-            answer = self.__handle_set_config_module(cmd[0], cmd[1])
-        elif len(cmd) == 1:
-            answer = self.__handle_set_config_all(cmd[0])
-        else:
-            answer = ccsession.create_answer(1, "Wrong number of arguments")
-        if not answer:
-            answer = ccsession.create_answer(1, "No answer message from " + cmd[0])
-
-        return answer
-
-    def __handle_module_spec(self, spec):
-        """Private function that handles the 'module_spec' command"""
-        # todo: validate? (no direct access to spec as
-        # todo: use ModuleSpec class
-        # todo: error checking (like keyerrors)
-        answer = {}
-        self.set_module_spec(spec)
-        self._send_module_spec_to_cmdctl(spec.get_module_name(),
-                                         spec.get_full_spec())
-        return ccsession.create_answer(0)
-
-    def __handle_module_stopping(self, arg):
-        """Private function that handles a 'stopping' command;
-           The argument is of the form { 'module_name': <name> }.
-           If the module is known, it is removed from the known list,
-           and a message is sent to the Cmdctl channel to remove it as well.
-           If it is unknown, the message is ignored."""
-        if arg['module_name'] in self.module_specs:
-            del self.module_specs[arg['module_name']]
-            self._send_module_spec_to_cmdctl(arg['module_name'], None)
-        # This command is not expected to be answered
-        return None
-
-    def _send_module_spec_to_cmdctl(self, module_name, spec):
-        """Sends the given module spec for the given module name to Cmdctl.
-           Parameters:
-           module_name: A string with the name of the module
-           spec: dict containing full module specification, as returned by
-                 ModuleSpec.get_full_spec(). This argument may also be None,
-                 in which case it signals Cmdctl to remove said module from
-                 its list.
-           No response from Cmdctl is expected."""
-        spec_update = ccsession.create_command(ccsession.COMMAND_MODULE_SPECIFICATION_UPDATE,
-                                               [ module_name, spec ])
-        self.cc.group_sendmsg(spec_update, "Cmdctl")
-
-    def handle_msg(self, msg):
-        """Handle a command from the cc channel to the configuration manager"""
-        answer = {}
-        cmd, arg = ccsession.parse_command(msg)
-        if cmd:
-            if cmd == ccsession.COMMAND_GET_COMMANDS_SPEC:
-                answer = ccsession.create_answer(0, self.get_commands_spec())
-            elif cmd == ccsession.COMMAND_GET_STATISTICS_SPEC:
-                answer = ccsession.create_answer(0, self.get_statistics_spec())
-            elif cmd == ccsession.COMMAND_GET_MODULE_SPEC:
-                answer = self.__handle_get_module_spec(arg)
-            elif cmd == ccsession.COMMAND_GET_CONFIG:
-                answer = self.__handle_get_config(arg)
-            elif cmd == ccsession.COMMAND_SET_CONFIG:
-                answer = self.__handle_set_config(arg)
-            elif cmd == ccsession.COMMAND_MODULE_STOPPING:
-                answer = self.__handle_module_stopping(arg)
-            elif cmd == ccsession.COMMAND_SHUTDOWN:
-                self.running = False
-                answer = ccsession.create_answer(0)
-            elif cmd == ccsession.COMMAND_MODULE_SPEC:
-                try:
-                    answer = self.__handle_module_spec(isc.config.ModuleSpec(arg))
-                except isc.config.ModuleSpecError as dde:
-                    answer = ccsession.create_answer(1, "Error in data definition: " + str(dde))
-            else:
-                answer = ccsession.create_answer(1, "Unknown command: " + str(cmd))
-        else:
-            answer = ccsession.create_answer(1, "Unknown message format: " + str(msg))
-        return answer
-
-    def run(self):
-        """Runs the configuration manager."""
-        self.running = True
-        while self.running:
-            # we just wait eternally for any command here, so disable
-            # timeouts for this specific recv
-            self.cc.set_timeout(0)
-            msg, env = self.cc.group_recvmsg(False)
-            # and set it back to whatever we default to
-            self.cc.set_timeout(isc.cc.Session.MSGQ_DEFAULT_TIMEOUT)
-            # ignore 'None' value (even though they should not occur)
-            # and messages that are answers to questions we did
-            # not ask
-            if msg is not None and not CC_PAYLOAD_RESULT in msg:
-                answer = self.handle_msg(msg);
-                # Only respond if there actually is something to respond with
-                if answer is not None:
-                    self.cc.group_reply(env, answer)
-        logger.info(CFGMGR_STOPPED_BY_COMMAND)

+ 0 - 79
src/lib/python/isc/config/cfgmgr_messages.mes

@@ -1,79 +0,0 @@
-# Copyright (C) 2011  Internet Systems Consortium, Inc. ("ISC")
-#
-# Permission to use, copy, modify, and/or distribute this software for any
-# purpose with or without fee is hereby granted, provided that the above
-# copyright notice and this permission notice appear in all copies.
-#
-# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH
-# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
-# AND FITNESS.  IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT,
-# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
-# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
-# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
-# PERFORMANCE OF THIS SOFTWARE.
-
-# No namespace declaration - these constants go in the global namespace
-# of the xfrin messages python module.
-
-% CFGMGR_AUTOMATIC_CONFIG_DATABASE_UPDATE Updating configuration database from version %1 to %2
-An older version of the configuration database has been found, from which
-there was an automatic upgrade path to the current version. These changes
-are now applied, and no action from the administrator is necessary.
-
-% CFGMGR_BACKED_UP_CONFIG_FILE Config file %1 was removed; a backup was made at %2
-BIND 10 has been started with the command to clear the configuration
-file.  The existing file has been backed up (moved) to the given file
-name. A new configuration file will be created in the original location
-when necessary.
-
-% CFGMGR_BAD_UPDATE_RESPONSE_FROM_MODULE Unable to parse response from module %1: %2
-The configuration manager sent a configuration update to a module, but
-the module responded with an answer that could not be parsed. The answer
-message appears to be invalid JSON data, or not decodable to a string.
-This is likely to be a problem in the module in question. The update is
-assumed to have failed, and will not be stored.
-
-% CFGMGR_CC_SESSION_ERROR Error connecting to command channel: %1
-The configuration manager daemon was unable to connect to the messaging
-system. The most likely cause is that msgq is not running.
-
-% CFGMGR_CONFIG_FILE Configuration manager starting with configuration file: %1
-The configuration manager is starting, reading and saving the configuration
-settings to the shown file.
-
-% CFGMGR_CONFIG_UPDATE_BOSS_AND_INIT_FOUND Configuration found for both 'Boss' and 'Init', ignoring 'Boss'
-In the process of updating the configuration from version 2 to version 3,
-the configuration manager has found that there are existing configurations
-for both the old value 'Boss' and the new value 'Init'. This should in
-theory not happen, as in older versions 'Init' does not exist, and in newer
-versions 'Boss' does not exist. The configuration manager will continue
-with the update process, leaving the values for both as they are, so as not
-to overwrite any settings. However, the values for 'Boss' are ignored by
-BIND 10, and it is probably wise to check the configuration file manually.
-
-% CFGMGR_DATA_READ_ERROR error reading configuration database from disk: %1
-There was a problem reading the persistent configuration data as stored
-on disk. The file may be corrupted, or it is of a version from where
-there is no automatic upgrade path. The file needs to be repaired or
-removed. The configuration manager daemon will now shut down.
-
-% CFGMGR_IOERROR_WHILE_WRITING_CONFIGURATION Unable to write configuration file; configuration not stored: %1
-There was an IO error from the system while the configuration manager
-was trying to write the configuration database to disk. The specific
-error is given. The most likely cause is that the directory where
-the file is stored does not exist, or is not writable. The updated
-configuration is not stored.
-
-% CFGMGR_OSERROR_WHILE_WRITING_CONFIGURATION Unable to write configuration file; configuration not stored: %1
-There was an OS error from the system while the configuration manager
-was trying to write the configuration database to disk. The specific
-error is given. The most likely cause is that the system does not have
-write access to the configuration database file. The updated
-configuration is not stored.
-
-% CFGMGR_STOPPED_BY_COMMAND received shutdown command, shutting down
-The configuration manager received a shutdown command, and is exiting.
-
-% CFGMGR_STOPPED_BY_KEYBOARD keyboard interrupt, shutting down
-There was a keyboard interrupt signal to stop the cfgmgr daemon. The
-daemon will now shut down.

+ 0 - 927
src/lib/python/isc/config/config_data.py

@@ -1,927 +0,0 @@
-# Copyright (C) 2010  Internet Systems Consortium.
-#
-# Permission to use, copy, modify, and distribute this software for any
-# purpose with or without fee is hereby granted, provided that the above
-# copyright notice and this permission notice appear in all copies.
-#
-# THE SOFTWARE IS PROVIDED "AS IS" AND INTERNET SYSTEMS CONSORTIUM
-# DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL
-# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL
-# INTERNET SYSTEMS CONSORTIUM BE LIABLE FOR ANY SPECIAL, DIRECT,
-# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING
-# FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
-# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION
-# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-"""
-Classes to store configuration data and module specifications
-
-Used by the config manager, (python) modules, and UI's (those last
-two through the classes in ccsession)
-"""
-
-import isc.cc.data
-import isc.config.module_spec
-import ast
-import copy
-import sys
-
-class ConfigDataError(Exception): pass
-
-BIND10_CONFIG_DATA_VERSION = 3
-
-# Helper functions
-def spec_part_is_list(spec_part):
-    """Returns True if the given spec_part is a dict that contains a
-       list specification, and False otherwise."""
-    return (type(spec_part) == dict and 'list_item_spec' in spec_part)
-
-def spec_part_is_map(spec_part):
-    """Returns True if the given spec_part is a dict that contains a
-       map specification, and False otherwise."""
-    return (type(spec_part) == dict and 'map_item_spec' in spec_part)
-
-def spec_part_is_named_set(spec_part):
-    """Returns True if the given spec_part is a dict that contains a
-       named_set specification, and False otherwise."""
-    return (type(spec_part) == dict and 'named_set_item_spec' in spec_part)
-
-def spec_part_is_any(spec_part):
-    """Returns true if the given spec_part specifies an element of type
-       any, and False otherwise.
-    """
-    return (type(spec_part) == dict and 'item_type' in spec_part and
-            spec_part['item_type'] == "any")
-
-def _type_as_string(value):
-    if type(value) == int:
-        return 'integer'
-    elif type(value) == float:
-        return 'real'
-    elif type(value) == bool:
-        return 'boolean'
-    elif type(value) == str:
-        return 'string'
-    elif type(value) == list:
-        return 'list'
-    elif type(value) == dict:
-        return 'map'
-    else:
-        return '<unknown>'
-
-def check_type(spec_part, value):
-    """Does nothing if the value is of the correct type given the
-       specification part relevant for the value. Raises an
-       isc.cc.data.DataTypeError exception if not. spec_part can be
-       retrieved with find_spec_part()"""
-    if type(spec_part) == dict and 'item_type' in spec_part:
-        data_type = spec_part['item_type']
-    else:
-        raise isc.cc.data.DataTypeError(str("Incorrect specification part for type checking"))
-
-    if data_type == "integer":
-        if type(value) != int:
-            raise isc.cc.data.DataTypeError('%s is not an integer (%s was passed)' % \
-                                            (str(value), _type_as_string(value)))
-        if value > sys.maxsize:
-            raise isc.cc.data.DataTypeError('%s is too large an integer' % \
-                                            (str(value)))
-    elif data_type == "real":
-        if type(value) != float:
-            raise isc.cc.data.DataTypeError('%s is not a real (%s was passed)' % \
-                                            (str(value), _type_as_string(value)))
-        if float(value) > sys.float_info.max:
-            raise isc.cc.data.DataTypeError('%s is too large for a float' % \
-                                            (str(value)))
-    elif data_type == "boolean" and type(value) != bool:
-        raise isc.cc.data.DataTypeError('%s is not a boolean (%s was passed)' % \
-                                        (str(value), _type_as_string(value)))
-    elif data_type == "string" and type(value) != str:
-        raise isc.cc.data.DataTypeError('%s is not a string (%s was passed)' % \
-                                        (str(value), _type_as_string(value)))
-    elif data_type == "list":
-        if type(value) != list:
-            raise isc.cc.data.DataTypeError('%s is not a list (%s was passed)' % \
-                                            (str(value), _type_as_string(value)))
-        else:
-            for element in value:
-                check_type(spec_part['list_item_spec'], element)
-    elif data_type == "map" and type(value) != dict:
-        # todo: check types of map contents too
-        raise isc.cc.data.DataTypeError('%s is not a map (%s was passed)' % \
-                                        (str(value), _type_as_string(value)))
-
-def convert_type(spec_part, value):
-    """Convert the given value(type is string) according specification
-    part relevant for the value. Raises an isc.cc.data.DataTypeError
-    exception if conversion failed.
-    """
-    if type(spec_part) == dict and 'item_type' in spec_part:
-        data_type = spec_part['item_type']
-    else:
-        raise isc.cc.data.DataTypeError(str("Incorrect specification part for type conversion"))
-
-    try:
-        if data_type == "integer":
-            return int(value)
-        elif data_type == "real":
-            return float(value)
-        elif data_type == "boolean":
-            return str.lower(str(value)) != 'false'
-        elif data_type == "string":
-            return str(value)
-        elif data_type == "list":
-            ret = []
-            if type(value) == list:
-                for item in value:
-                    ret.append(convert_type(spec_part['list_item_spec'], item))
-            elif type(value) == str:
-                value = value.split(',')
-                for item in value:
-                    sub_value = item.split()
-                    for sub_item in sub_value:
-                        ret.append(convert_type(spec_part['list_item_spec'],
-                                                sub_item))
-
-            if ret == []:
-                raise isc.cc.data.DataTypeError(str(value) + " is not a list")
-
-            return ret
-        elif data_type == "map":
-            try:
-                map = ast.literal_eval(value)
-                if type(map) == dict:
-                    # todo: check types of map contents too
-                    return map
-                else:
-                    raise isc.cc.data.DataTypeError(
-                               "Value in convert_type not a string "
-                               "specifying a dict")
-            except SyntaxError as se:
-                raise isc.cc.data.DataTypeError("Error parsing map: " + str(se))
-        else:
-            return value
-    except ValueError as err:
-        raise isc.cc.data.DataTypeError(str(err))
-    except TypeError as err:
-        raise isc.cc.data.DataTypeError(str(err))
-
-def _get_map_or_list(spec_part):
-    """Returns the list or map specification if this is a list or a
-       map specification part. If not, returns the given spec_part
-       itself"""
-    if spec_part_is_map(spec_part):
-        return spec_part["map_item_spec"]
-    elif spec_part_is_list(spec_part):
-        return spec_part["list_item_spec"]
-    else:
-        return spec_part
-
-def _find_spec_part_single(cur_spec, id_part):
-    """Find the spec part for the given (partial) name. This partial
-       name does not contain separators ('/'), and the specification
-       part should be a direct child of the given specification part.
-       id_part may contain list selectors, which will be ignored.
-       Returns the child part.
-       Raises DataNotFoundError if it was not found."""
-    # strip list selector part
-    # don't need it for the spec part, so just drop it
-    id, list_indices = isc.cc.data.split_identifier_list_indices(id_part)
-
-    # The specification we want a sub-part for should be either a
-    # list or a map, which is internally represented by a dict with
-    # an element 'map_item_spec', a dict with an element 'list_item_spec',
-    # or a list (when it is the 'main' config_data element of a module).
-    if spec_part_is_map(cur_spec):
-        for cur_spec_item in cur_spec['map_item_spec']:
-            if cur_spec_item['item_name'] == id:
-                return cur_spec_item
-        # not found
-        raise isc.cc.data.DataNotFoundError(id + " not found")
-    elif spec_part_is_list(cur_spec):
-        if cur_spec['item_name'] == id:
-            return cur_spec['list_item_spec']
-        # not found
-        raise isc.cc.data.DataNotFoundError(id + " not found")
-    elif type(cur_spec) == dict and 'named_set_item_spec' in cur_spec.keys():
-        return cur_spec['named_set_item_spec']
-    elif type(cur_spec) == list:
-        for cur_spec_item in cur_spec:
-            if cur_spec_item['item_name'] == id:
-                return cur_spec_item
-        # not found
-        raise isc.cc.data.DataNotFoundError(id + " not found")
-    else:
-        raise isc.cc.data.DataNotFoundError("Not a correct config specification")
-
-def find_spec_part(element, identifier, strict_identifier = True):
-    """find the data definition for the given identifier
-       returns either a map with 'item_name' etc, or a list of those
-       Parameters:
-       element: The specification element to start the search in
-       identifier: The element to find (relative to element above)
-       strict_identifier: If True (the default), additional checking occurs.
-                          Currently the only check is whether a list index is
-                          specified (except for the last part of the
-                          identifier)
-       Raises a DataNotFoundError if the data is not found, or if
-       strict_identifier is True and any non-final identifier parts
-       (i.e. before the last /) identify a list element and do not contain
-       an index.
-       Returns the spec element identified by the given identifier.
-    """
-    if identifier == "":
-        return element
-    id_parts = identifier.split("/")
-    id_parts[:] = (value for value in id_parts if value != "")
-    cur_el = element
-
-    # up to the last element, if the result is a map or a list,
-    # we want its subspecification (i.e. list_item_spec or
-    # map_item_spec). For the last element in the identifier we
-    # always want the 'full' spec of the item
-    for id_part in id_parts[:-1]:
-        cur_el = _find_spec_part_single(cur_el, id_part)
-        # As soon as we find 'any', return that
-        if cur_el["item_type"] == "any":
-            return cur_el
-        if strict_identifier and spec_part_is_list(cur_el) and\
-           not isc.cc.data.identifier_has_list_index(id_part):
-            raise isc.cc.data.DataNotFoundError(id_part +
-                                                " is a list and needs an index")
-        cur_el = _get_map_or_list(cur_el)
-
-    cur_el = _find_spec_part_single(cur_el, id_parts[-1])
-    # Due to the raw datatypes we use, it is safer to return a deep copy here
-    return copy.deepcopy(cur_el)
-
-def spec_name_list(spec, prefix="", recurse=False):
-    """Returns a full list of all possible item identifiers in the
-       specification (part). Raises a ConfigDataError if spec is not
-       a correct spec (as returned by ModuleSpec.get_config_spec()"""
-    result = []
-    if prefix != "" and not prefix.endswith("/"):
-        prefix += "/"
-    if type(spec) == dict:
-        if spec_part_is_map(spec):
-            for map_el in spec['map_item_spec']:
-                name = map_el['item_name']
-                if map_el['item_type'] == 'map':
-                    name += "/"
-                if recurse and spec_part_is_map(map_el):
-                    result.extend(spec_name_list(map_el['map_item_spec'], prefix + map_el['item_name'], recurse))
-                else:
-                    result.append(prefix + name)
-        elif 'named_set_item_spec' in spec:
-            # we added a '/' above, but in this one case we don't want it
-            result.append(prefix[:-1])
-        # ignore any
-        elif not spec_part_is_any(spec):
-            for name in spec:
-                result.append(prefix + name + "/")
-                if recurse:
-                    result.extend(spec_name_list(spec[name], name, recurse))
-    elif type(spec) == list:
-        for list_el in spec:
-            if 'item_name' in list_el:
-                if list_el['item_type'] == "map" and recurse:
-                    result.extend(spec_name_list(list_el['map_item_spec'], prefix + list_el['item_name'], recurse))
-                else:
-                    name = list_el['item_name']
-                    result.append(prefix + name)
-            else:
-                raise ConfigDataError("Bad specification")
-    else:
-        raise ConfigDataError("Bad specification")
-    return result
-
-class ConfigData:
-    """This class stores the module specs and the current non-default
-       config values. It provides functions to get the actual value or
-       the default value if no non-default value has been set"""
-
-    def __init__(self, specification):
-        """Initialize a ConfigData instance. If specification is not
-           of type ModuleSpec, a ConfigDataError is raised."""
-        if type(specification) != isc.config.ModuleSpec:
-            raise ConfigDataError("specification is of type " + str(type(specification)) + ", not ModuleSpec")
-        self.specification = specification
-        self.data = {}
-
-    def get_value(self, identifier):
-        """Returns a tuple where the first item is the value at the
-           given identifier, and the second item is a bool which is
-           true if the value is an unset default. Raises an
-           isc.cc.data.DataNotFoundError if the identifier is bad"""
-        value = isc.cc.data.find_no_exc(self.data, identifier)
-        if value != None:
-            return value, False
-        spec = find_spec_part(self.specification.get_config_spec(), identifier)
-        if spec and 'item_default' in spec:
-            return spec['item_default'], True
-        return None, False
-
-    def get_default_value(self, identifier):
-        """Returns the default from the specification, or None if there
-           is no default"""
-        # We are searching for the default value, so we can set
-        # strict_identifier to false (in fact, we need to; we may not know
-        # some list indices, or they may not exist, we are looking for
-        # a default value for a reason here).
-        spec = find_spec_part(self.specification.get_config_spec(),
-                              identifier, False)
-        if spec and 'item_default' in spec:
-            return spec['item_default']
-        else:
-            return None
-
-    def get_module_spec(self):
-        """Returns the ModuleSpec object associated with this ConfigData"""
-        return self.specification
-
-    def set_local_config(self, data):
-        """Set the non-default config values, as passed by cfgmgr"""
-        self.data = data
-
-    def get_local_config(self):
-        """Returns the non-default config values in a dict"""
-        return self.data
-
-    def get_item_list(self, identifier = None, recurse = False):
-        """Returns a list of strings containing the full identifiers of
-           all 'sub'options at the given identifier. If recurse is True,
-           it will also add all identifiers of all children, if any"""
-        if identifier:
-            spec = find_spec_part(self.specification.get_config_spec(), identifier)
-            return spec_name_list(spec, identifier + "/")
-        return spec_name_list(self.specification.get_config_spec(), "", recurse)
-
-    def get_full_config(self):
-        """Returns a dict containing identifier: value elements, for
-           all configuration options for this module. If there is
-           a local setting, that will be used. Otherwise the value
-           will be the default as specified by the module specification.
-           If there is no default and no local setting, the value will
-           be None"""
-        items = self.get_item_list(None, True)
-        result = {}
-        for item in items:
-            value, default = self.get_value(item)
-            result[item] = value
-        return result
-
-# should we just make a class for these?
-def _create_value_map_entry(name, type, value, status = None):
-    entry = {}
-    entry['name'] = name
-    entry['type'] = type
-    entry['value'] = value
-    entry['modified'] = False
-    entry['default'] = False
-    if status == MultiConfigData.LOCAL:
-        entry['modified'] = True
-    if status == MultiConfigData.DEFAULT:
-        entry['default'] = True
-    return entry
-
-class MultiConfigData:
-    """This class stores the module specs, current non-default
-       configuration values and 'local' (uncommitted) changes for
-       multiple modules"""
-    LOCAL   = 1
-    CURRENT = 2
-    DEFAULT = 3
-    NONE    = 4
-
-    def __init__(self):
-        self._specifications = {}
-        self._current_config = {}
-        self._local_changes = {}
-
-    def clear_specifications(self):
-        """Remove all known module specifications"""
-        self._specifications = {}
-
-    def set_specification(self, spec):
-        """Add or update a ModuleSpec. Raises a ConfigDataError is spec is not a ModuleSpec"""
-        if type(spec) != isc.config.ModuleSpec:
-            raise ConfigDataError("not a datadef: " + str(type(spec)))
-        self._specifications[spec.get_module_name()] = spec
-
-    def remove_specification(self, module_name):
-        """Removes the specification with the given module name. Does nothing if it wasn't there."""
-        if module_name in self._specifications:
-            del self._specifications[module_name]
-
-    def have_specification(self, module_name):
-        """Returns True if we have a specification for the module with the given name.
-           Returns False if we do not."""
-        return module_name in self._specifications
-
-    def get_module_spec(self, module):
-        """Returns the ModuleSpec for the module with the given name.
-           If there is no such module, it returns None"""
-        if module in self._specifications:
-            return self._specifications[module]
-        else:
-            return None
-
-    def find_spec_part(self, identifier):
-        """Returns the specification for the item at the given
-           identifier, or None if not found. The first part of the
-           identifier (up to the first /) is interpreted as the module
-           name. Returns None if not found, or if identifier is not a
-           string.
-           If an index is given for a List-type element, it returns
-           the specification of the list elements, not of the list itself
-           """
-        if type(identifier) != str or identifier == "":
-            return None
-        if identifier[0] == '/':
-            identifier = identifier[1:]
-        module, sep, id = identifier.partition("/")
-        if id != "":
-            id, indices = isc.cc.data.split_identifier_list_indices(id)
-        else:
-            indices = None
-        try:
-            spec_part = find_spec_part(self._specifications[module].get_config_spec(), id)
-            if indices is not None and spec_part_is_list(spec_part):
-                return spec_part['list_item_spec']
-            else:
-                return spec_part
-        except isc.cc.data.DataNotFoundError as dnfe:
-            return None
-        except KeyError as ke:
-            return None
-
-    # this function should only be called by __request_config
-    def _set_current_config(self, config):
-        """Replace the full current config values."""
-        self._current_config = config
-
-    def get_current_config(self):
-        """Returns the current configuration as it is known by the
-           configuration manager. It is a dict where the first level is
-           the module name, and the value is the config values for
-           that module"""
-        return self._current_config
-
-    def get_local_changes(self):
-        """Returns the local config changes, i.e. those that have not
-           been committed yet and are not known by the configuration
-           manager or the modules."""
-        return self._local_changes
-
-    def set_local_changes(self, new_local_changes):
-        """Sets the entire set of local changes, used when reverting
-           changes done automatically in case there was a problem (e.g.
-           when executing commands from a script that fails halfway
-           through).
-        """
-        self._local_changes = new_local_changes
-
-    def clear_local_changes(self):
-        """Reverts all local changes"""
-        self._local_changes = {}
-
-    def get_local_value(self, identifier):
-        """Returns a specific local (uncommitted) configuration value,
-           as specified by the identifier. If the local changes do not
-           contain a new setting for this identifier, or if the
-           identifier cannot be found, None is returned. See
-           get_value() for a general way to find a configuration value
-           """
-        return isc.cc.data.find_no_exc(self._local_changes, identifier)
-
-    def get_current_value(self, identifier):
-        """Returns the current non-default value as known by the
-           configuration manager, or None if it is not set.
-           See get_value() for a general way to find a configuration
-           value
-        """
-        return isc.cc.data.find_no_exc(self._current_config, identifier)
-
-    def get_default_value(self, identifier):
-        """Returns the default value for the given identifier as
-           specified by the module specification, or None if there is
-           no default or the identifier could not be found.
-           See get_value() for a general way to find a configuration
-           value
-        """
-        try:
-            if identifier[0] == '/':
-                identifier = identifier[1:]
-            module, sep, id = identifier.partition("/")
-            # if there is a 'higher-level' list index specified, we need
-            # to check if that list specification has a default that
-            # overrides the more specific default in the final spec item
-            # (ie. list_default = [1, 2, 3], list_item_spec=int, default=0)
-            # def default list[1] should return 2, not 0
-            id_parts = isc.cc.data.split_identifier(id)
-            id_prefix = ""
-            while len(id_parts) > 0:
-                id_part = id_parts.pop(0)
-                item_id, list_indices = isc.cc.data.split_identifier_list_indices(id_part)
-                id_list = module + "/" + id_prefix + "/" + item_id
-                id_prefix += "/" + id_part
-                part_spec = find_spec_part(self._specifications[module].get_config_spec(), id_prefix)
-                if part_spec['item_type'] == 'named_set':
-                    # For named sets, the identifier is partly defined
-                    # by which values are actually present, and not
-                    # purely by the specification.
-                    # So if there is a part of the identifier left,
-                    # we need to look up the value, then see if that
-                    # contains the next part of the identifier we got
-                    if len(id_parts) == 0:
-                        if 'item_default' in part_spec:
-                            return part_spec['item_default']
-                        else:
-                            return None
-                    id_part = id_parts.pop(0)
-                    item_id, list_indices =\
-                        isc.cc.data.split_identifier_list_indices(id_part)
-
-                    named_set_value, type = self.get_value(id_list)
-                    if item_id in named_set_value.keys():
-                        result = named_set_value[item_id]
-                        # If the item is a list and we have indices in the
-                        # identifier part, continue with the item pointed to
-                        # by those indices
-                        if list_indices is not None:
-                            while len(list_indices) > 0:
-                                result = result[list_indices.pop(0)]
-
-                        if len(id_parts) > 0:
-                            # we are looking for the *default* value.
-                            # so if not present in here, we need to
-                            # lookup the one from the spec
-                            rest_of_id = "/".join(id_parts)
-                            result = isc.cc.data.find_no_exc(result, rest_of_id)
-                            if result is None:
-                                spec_part = self.find_spec_part(identifier)
-                                if 'item_default' in spec_part:
-                                    return spec_part['item_default']
-                            return result
-                        else:
-                            return result
-                    else:
-                        return None
-                elif list_indices is not None:
-                    # there's actually two kinds of default here for
-                    # lists; they can have a default value (like an
-                    # empty list), but their elements can  also have
-                    # default values.
-                    # So if the list item *itself* is a default,
-                    # we need to get the value out of that. If not, we
-                    # need to find the default for the specific element.
-                    list_value, type = self.get_value(id_list)
-                    list_spec = find_spec_part(self._specifications[module].get_config_spec(), id_prefix)
-                    if type == self.DEFAULT:
-                        if 'item_default' in list_spec:
-                            list_value = list_spec['item_default']
-                            for i in list_indices:
-                                if i < len(list_value):
-                                    list_value = list_value[i]
-                                else:
-                                    # out of range, return None
-                                    return None
-
-                            if len(id_parts) > 0:
-                                rest_of_id = "/".join(id_parts)
-                                return isc.cc.data.find(list_value, rest_of_id)
-                            else:
-                                return list_value
-                    else:
-                        # we do have a non-default list, see if our indices
-                        # exist
-                        for i in list_indices:
-                            if i < len(list_value):
-                                list_value = list_value[i]
-                            else:
-                                # out of range, return None
-                                return None
-
-            spec = find_spec_part(self._specifications[module].get_config_spec(), id)
-            if 'item_default' in spec:
-                # one special case, named_set
-                if spec['item_type'] == 'named_set':
-                    return spec['item_default']
-                else:
-                    return spec['item_default']
-            else:
-                return None
-
-        except isc.cc.data.DataNotFoundError as dnfe:
-            return None
-
-    def get_value(self, identifier, default = True):
-        """Returns a tuple containing value,status.
-           The value contains the configuration value for the given
-           identifier. The status reports where this value came from;
-           it is one of: LOCAL, CURRENT, DEFAULT or NONE, corresponding
-           (local change, current setting, default as specified by the
-           specification, or not found at all). Does not check and
-           set DEFAULT if the argument 'default' is False (default
-           defaults to True)"""
-        value = self.get_local_value(identifier)
-        if value != None:
-            return value, self.LOCAL
-        value = self.get_current_value(identifier)
-        if value != None:
-            return value, self.CURRENT
-        if default:
-            value = self.get_default_value(identifier)
-            if value is not None:
-                return value, self.DEFAULT
-            else:
-                # get_default_value returns None for both
-                # the cases where there is no default, and where
-                # it is set to null, so we need to catch the latter
-                spec_part = self.find_spec_part(identifier)
-                if spec_part and 'item_default' in spec_part and\
-                   spec_part['item_default'] is None:
-                    return None, self.DEFAULT
-        return None, self.NONE
-
-    def _append_value_item(self, result, spec_part, identifier, all, first = False):
-        # Look at the spec; it is a list of items, or a map containing 'item_name' etc
-        if type(spec_part) == list:
-            for spec_part_element in spec_part:
-                spec_part_element_name = spec_part_element['item_name']
-                self._append_value_item(result, spec_part_element, identifier + "/" + spec_part_element_name, all)
-        elif type(spec_part) == dict:
-            # depending on item type, and the value of argument 'all'
-            # we need to either add an item, or recursively go on
-            # In the case of a list that is empty, we do need to show that
-            item_name = spec_part['item_name']
-            item_type = spec_part['item_type']
-            if item_type == "list" and (all or first):
-                spec_part_list = spec_part['list_item_spec']
-                list_value, status = self.get_value(identifier)
-                # If not set, and no default, lists will show up as 'None',
-                # but it's better to treat it as an empty list then
-                if list_value is None:
-                    list_value = []
-
-                if type(list_value) != list:
-                    # the identifier specified a single element
-                    self._append_value_item(result, spec_part_list, identifier, all)
-                else:
-                    list_len = len(list_value)
-                    if len(list_value) == 0 and (all or first):
-                        entry = _create_value_map_entry(identifier,
-                                                        item_type,
-                                                        [], status)
-                        result.append(entry)
-                    else:
-                        for i in range(len(list_value)):
-                            self._append_value_item(result, spec_part_list, "%s[%d]" % (identifier, i), all)
-            elif item_type == "map":
-                value, status = self.get_value(identifier)
-                # just show the specific contents of a map, we are
-                # almost never interested in just its name
-                spec_part_map = spec_part['map_item_spec']
-                self._append_value_item(result, spec_part_map, identifier, all)
-            elif item_type == "named_set":
-                value, status = self.get_value(identifier)
-
-                # show just the one entry, when either the map is empty,
-                # or when this is element is not requested specifically
-                if len(value.keys()) == 0:
-                    entry = _create_value_map_entry(identifier,
-                                                    item_type,
-                                                    {}, status)
-                    result.append(entry)
-                elif not first and not all:
-                    entry = _create_value_map_entry(identifier,
-                                                    item_type,
-                                                    None, status)
-                    result.append(entry)
-                else:
-                    spec_part_named_set = spec_part['named_set_item_spec']
-                    for entry in value:
-                        self._append_value_item(result,
-                                                spec_part_named_set,
-                                                identifier + "/" + entry,
-                                                all)
-            else:
-                value, status = self.get_value(identifier)
-                if status == self.NONE and not spec_part['item_optional']:
-                    raise isc.cc.data.DataNotFoundError(identifier + " not found")
-
-                entry = _create_value_map_entry(identifier,
-                                                item_type,
-                                                value, status)
-                result.append(entry)
-        return
-
-
-    def get_value_maps(self, identifier = None, all = False):
-        """Returns a list of dicts, containing the following values:
-           name: name of the entry (string)
-           type: string containing the type of the value (or 'module')
-           value: value of the entry if it is a string, int, double or bool
-           modified: true if the value is a local change that has not
-                     been committed
-           default: true if the value has not been changed (i.e. the
-                    value is the default from the specification)
-           TODO: use the consts for those last ones
-           Throws DataNotFoundError if the identifier is bad
-        """
-        result = []
-        if not identifier or identifier == "/":
-            # No identifier, so we need the list of current modules
-            for module in self._specifications.keys():
-                if all:
-                    spec = self.get_module_spec(module)
-                    if spec:
-                        spec_part = spec.get_config_spec()
-                        self._append_value_item(result, spec_part, module, all, True)
-                else:
-                    entry = _create_value_map_entry(module, 'module', None)
-                    result.append(entry)
-        else:
-            # Strip off start and end slashes, if they are there
-            if len(identifier) > 0 and identifier[0] == '/':
-                identifier = identifier[1:]
-            if len(identifier) > 0 and identifier[-1] == '/':
-                identifier = identifier[:-1]
-            module, sep, id = identifier.partition('/')
-            spec = self.get_module_spec(module)
-            if spec:
-                spec_part = find_spec_part(spec.get_config_spec(), id)
-                self._append_value_item(result, spec_part, identifier, all, True)
-        return result
-
-    def unset(self, identifier):
-        """
-        Reset the value to default.
-        """
-        spec_part = self.find_spec_part(identifier)
-        if spec_part is not None:
-            isc.cc.data.unset(self._local_changes, identifier)
-        else:
-            raise isc.cc.data.DataNotFoundError(identifier + "not found")
-
-    def set_value(self, identifier, value):
-        """Set the local value at the given identifier to value. If
-           there is a specification for the given identifier, the type
-           is checked."""
-        spec_part = self.find_spec_part(identifier)
-        if spec_part is not None:
-            if value is not None:
-                id, list_indices = isc.cc.data.split_identifier_list_indices(identifier)
-                if list_indices is not None \
-                   and spec_part['item_type'] == 'list':
-                    spec_part = spec_part['list_item_spec']
-                check_type(spec_part, value)
-        else:
-            raise isc.cc.data.DataNotFoundError(identifier + " not found")
-
-        # Since we do not support list diffs (yet?), we need to
-        # copy the currently set list of items to _local_changes
-        # if we want to modify an element in there
-        # (for any list indices specified in the full identifier)
-        id_parts = isc.cc.data.split_identifier(identifier)
-        cur_id_part = '/'
-        for id_part in id_parts:
-            id, list_indices = isc.cc.data.split_identifier_list_indices(id_part)
-            cur_value, status = self.get_value(cur_id_part + id)
-            # Check if the value was there in the first place
-            # If we are at the final element, we do not care whether we found
-            # it, since if we have reached this point and it did not exist,
-            # it was apparently an optional value without a default.
-            if status == MultiConfigData.NONE and cur_id_part != "/" and\
-               cur_id_part + id != identifier:
-                raise isc.cc.data.DataNotFoundError(id_part +
-                                                    " not found in " +
-                                                    cur_id_part)
-            if list_indices is not None:
-                # And check if we don't set something outside of any
-                # list
-                cur_list = cur_value
-                for list_index in list_indices:
-                    if type(cur_list) != list:
-                        raise isc.cc.data.DataTypeError(id + " is not a list")
-                    if list_index >= len(cur_list):
-                        raise isc.cc.data.DataNotFoundError("No item " +
-                                  str(list_index) + " in " + id_part)
-                    else:
-                        cur_list = cur_list[list_index]
-                if status != MultiConfigData.LOCAL:
-                    isc.cc.data.set(self._local_changes,
-                                    cur_id_part + id,
-                                    cur_value)
-            cur_id_part = cur_id_part + id_part + "/"
-
-            # We also need to copy to local if we are changing a named set,
-            # so that the other items in the set do not disappear
-            if spec_part_is_named_set(self.find_spec_part(cur_id_part)):
-                ns_value, ns_status = self.get_value(cur_id_part)
-                if ns_status != MultiConfigData.LOCAL:
-                    isc.cc.data.set(self._local_changes,
-                                    cur_id_part,
-                                    ns_value)
-        isc.cc.data.set(self._local_changes, identifier, value)
-
-    def _get_list_items(self, item_name):
-        """This method is used in get_config_item_list, to add list
-           indices and named_set names to the completion list. If
-           the given item_name is for a list or named_set, it'll
-           return a list of those (appended to item_name), otherwise
-           the list will only contain the item_name itself.
-
-           If the item is a named set, and it's contents are maps
-           or named_sets as well, a / is appended to the result
-           strings.
-
-           If the item is a list, this method is then called recursively
-           for each list entry.
-
-           This behaviour is slightly arbitrary, and currently reflects
-           the most probable way the resulting data should look like;
-           for lists, bindctl would always expect their contents to
-           be added as well. For named_sets, however, we do not
-           do recursion, since the resulting list may be too long.
-           This will probably change in a revision of the way this
-           data is handled; ideally, the result should always recurse,
-           but only up to a limited depth, and the resulting list
-           should probably be paginated clientside.
-
-           Parameters:
-           item_name (string): the (full) identifier for the list or
-                               named_set to enumerate.
-
-           Returns a list of strings with item names
-
-           Examples:
-           _get_list_items("Module/some_item")
-               where item is not a list of a named_set, or where
-               said list or named set is empty, returns
-               ["Module/some_item"]
-           _get_list_items("Module/named_set")
-               where the named_set contains items with names 'a'
-               and 'b', returns
-               [ "Module/named_set/a", "Module/named_set/b" ]
-           _get_list_items("Module/named_set_of_maps")
-               where the named_set contains items with names 'a'
-               and 'b', and those items are maps themselves
-               (or other named_sets), returns
-               [ "Module/named_set/a/", "Module/named_set/b/" ]
-           _get_list_items("Module/list")
-               where the list contains 2 elements, returns
-               [ "Module/list[0]", "Module/list[1]" ]
-        """
-        spec_part = self.find_spec_part(item_name)
-        if spec_part_is_named_set(spec_part):
-            values, _ = self.get_value(item_name)
-            if values is not None and len(values) > 0:
-                subslash = ""
-                if spec_part['named_set_item_spec']['item_type'] == 'map' or\
-                   spec_part['named_set_item_spec']['item_type'] == 'named_set':
-                    subslash = "/"
-                # Don't recurse for named_sets (so as not to return too
-                # much data), but do add a / so the client so that
-                # the user can immediately tab-complete further if needed.
-                return [ item_name + "/" + v + subslash for v in values.keys() ]
-            else:
-                return [ item_name ]
-        elif spec_part_is_list(spec_part):
-            values, _ = self.get_value(item_name)
-            if values is not None and len(values) > 0:
-                result = []
-                for i in range(len(values)):
-                    name = item_name + '[%d]' % i
-                    # Recurse for list entries, so that its sub-contents
-                    # are also added to the result
-                    result.extend(self._get_list_items(name))
-                return result
-            else:
-                return [ item_name ]
-        else:
-            return [ item_name ]
-
-    def get_config_item_list(self, identifier = None, recurse = False):
-        """Returns a list of strings containing the item_names of
-           the child items at the given identifier. If no identifier is
-           specified, returns a list of module names. The first part of
-           the identifier (up to the first /) is interpreted as the
-           module name"""
-        if identifier and identifier != "/":
-            if identifier.startswith("/"):
-                identifier = identifier[1:]
-            spec = self.find_spec_part(identifier)
-            spec_list = spec_name_list(spec, identifier + "/", recurse)
-            result_list = []
-            for spec_name in spec_list:
-                result_list.extend(self._get_list_items(spec_name))
-            return result_list
-        else:
-            if recurse:
-                id_list = []
-                for module in self._specifications.keys():
-                    id_list.extend(spec_name_list(self.find_spec_part(module), module, recurse))
-                return id_list
-            else:
-                return list(self._specifications.keys())

+ 0 - 39
src/lib/python/isc/config/config_messages.mes

@@ -1,39 +0,0 @@
-# Copyright (C) 2011  Internet Systems Consortium, Inc. ("ISC")
-#
-# Permission to use, copy, modify, and/or distribute this software for any
-# purpose with or without fee is hereby granted, provided that the above
-# copyright notice and this permission notice appear in all copies.
-#
-# THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH
-# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
-# AND FITNESS.  IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT,
-# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
-# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
-# OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
-# PERFORMANCE OF THIS SOFTWARE.
-
-# No namespace declaration - these constants go in the global namespace
-# of the config_messages python module.
-
-# since these messages are for the python config library, care must
-# be taken that names do not conflict with the messages from the c++
-# config library. A checker script should verify that, but we do not
-# have that at this moment. So when adding a message, make sure that
-# the name is not already used in src/lib/config/config_messages.mes
-
-% CONFIG_GET_FAILED error getting configuration from cfgmgr: %1
-The configuration manager returned an error response when the module
-requested its configuration. The full error message answer from the
-configuration manager is appended to the log error.
-
-% CONFIG_LOG_CONFIG_ERRORS error(s) in logging configuration: %1
-There was a logging configuration update, but the internal validator
-for logging configuration found that it contained errors. The errors
-are shown, and the update is ignored.
-
-% CONFIG_SESSION_STOPPING_FAILED error sending stopping message: %1
-There was a problem when sending a message signaling that the module using
-this CCSession is stopping. This message is sent so that the rest of the
-system is aware that the module is no longer running. Apart from logging
-this message, the error itself is ignored, and the ModuleCCSession is
-still stopped. The specific exception message is printed.

+ 0 - 455
src/lib/python/isc/config/module_spec.py

@@ -1,455 +0,0 @@
-# Copyright (C) 2009  Internet Systems Consortium.
-#
-# Permission to use, copy, modify, and distribute this software for any
-# purpose with or without fee is hereby granted, provided that the above
-# copyright notice and this permission notice appear in all copies.
-#
-# THE SOFTWARE IS PROVIDED "AS IS" AND INTERNET SYSTEMS CONSORTIUM
-# DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL
-# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL
-# INTERNET SYSTEMS CONSORTIUM BE LIABLE FOR ANY SPECIAL, DIRECT,
-# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING
-# FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
-# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION
-# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-"""Module Specifications
-
-   A module specification holds the information about what configuration
-   a module can have, and what commands it understands. It provides
-   functions to read it from a .spec file, and to validate a given
-   set of data against the specification
-"""
-
-import json
-import sys
-import time
-
-import isc.cc.data
-
-# file objects are passed around as _io.TextIOWrapper objects
-# import that so we can check those types
-
-class ModuleSpecError(Exception):
-    """This exception is raised it the ModuleSpec fails to initialize
-       or if there is a failure or parse error reading the specification
-       file"""
-    pass
-
-def module_spec_from_file(spec_file, check = True):
-    """Returns a ModuleSpec object defined by the file at spec_file.
-       If check is True, the contents are verified. If there is an error
-       in those contents, a ModuleSpecError is raised.
-       A ModuleSpecError is also raised if the file cannot be read, or
-       if it is not valid JSON."""
-    module_spec = None
-    try:
-        if hasattr(spec_file, 'read'):
-            json_str = spec_file.read()
-            module_spec = json.loads(json_str)
-        elif type(spec_file) == str:
-            file = open(spec_file)
-            json_str = file.read()
-            module_spec = json.loads(json_str)
-            file.close()
-        else:
-            raise ModuleSpecError("spec_file not a str or file-like object")
-    except ValueError as ve:
-        raise ModuleSpecError("JSON parse error: " + str(ve))
-    except IOError as ioe:
-        raise ModuleSpecError("JSON read error: " + str(ioe))
-
-    if 'module_spec' not in module_spec:
-        raise ModuleSpecError("Data definition has no module_spec element")
-
-    result = ModuleSpec(module_spec['module_spec'], check)
-    return result
-
-class ModuleSpec:
-    def __init__(self, module_spec, check = True):
-        """Initializes a ModuleSpec object from the specification in
-           the given module_spec (which must be a dict). If check is
-           True, the contents are verified. Raises a ModuleSpec error
-           if there is something wrong with the contents of the dict"""
-        if type(module_spec) != dict:
-            raise ModuleSpecError("module_spec is of type " + str(type(module_spec)) + ", not dict")
-        if check:
-            _check(module_spec)
-        self._module_spec = module_spec
-
-    def validate_config(self, full, data, errors = None):
-        """Check whether the given piece of data conforms to this
-           data definition. If so, it returns True. If not, it will
-           return false. If errors is given, and is an array, a string
-           describing the error will be appended to it. The current
-           version stops as soon as there is one error so this list
-           will not be exhaustive. If 'full' is true, it also errors on
-           non-optional missing values. Set this to False if you want to
-           validate only a part of a configuration tree (like a list of
-           non-default values)"""
-        data_def = self.get_config_spec()
-        if data_def is not None:
-            return _validate_spec_list(data_def, full, data, errors)
-        else:
-            # no spec, always bad
-            if errors is not None:
-                errors.append("No config_data specification")
-            return False
-
-    def validate_command(self, cmd_name, cmd_params, errors = None):
-        '''Check whether the given piece of command conforms to this 
-        command definition. If so, it reutrns True. If not, it will 
-        return False. If errors is given, and is an array, a string
-        describing the error will be appended to it. The current version
-        stops as soon as there is one error.
-           cmd_name is command name to be validated, cmd_params includes 
-        command's parameters needs to be validated. cmd_params must 
-        be a map, with the format like:
-        {param1_name: param1_value, param2_name: param2_value}
-        '''
-        cmd_spec = self.get_commands_spec()
-        if not cmd_spec:
-            return False
-
-        for cmd in cmd_spec:
-            if cmd['command_name'] != cmd_name:
-                continue
-            return _validate_spec_list(cmd['command_args'], True, cmd_params, errors)
-
-        return False
-
-    def validate_statistics(self, full, stat, errors = None):
-        """Check whether the given piece of data conforms to this
-           data definition. If so, it returns True. If not, it will
-           return false. If errors is given, and is an array, a string
-           describing the error will be appended to it. The current
-           version stops as soon as there is one error so this list
-           will not be exhaustive. If 'full' is true, it also errors on
-           non-optional missing values. Set this to False if you want to
-           validate only a part of a statistics tree (like a list of
-           non-default values). Also it checks 'item_format' in case
-           of time"""
-        stat_spec = self.get_statistics_spec()
-        if stat_spec is not None:
-            return _validate_spec_list(stat_spec, full, stat, errors)
-        else:
-            # no spec, always bad
-            if errors is not None:
-                errors.append("No statistics specification")
-            return False
-
-    def get_module_name(self):
-        """Returns a string containing the name of the module as
-           specified by the specification given at __init__()"""
-        return self._module_spec['module_name']
-
-    def get_module_description(self):
-        """Returns a string containing the description of the module as
-           specified by the specification given at __init__().
-           Returns an empty string if there is no description.
-        """
-        if 'module_description' in self._module_spec:
-            return self._module_spec['module_description']
-        else:
-            return ""
-
-    def get_full_spec(self):
-        """Returns a dict representation of the full module specification"""
-        return self._module_spec
-
-    def get_config_spec(self):
-        """Returns a dict representation of the configuration data part
-           of the specification, or None if there is none."""
-        if 'config_data' in self._module_spec:
-            return self._module_spec['config_data']
-        else:
-            return None
-    
-    def get_commands_spec(self):
-        """Returns a dict representation of the commands part of the
-           specification, or None if there is none."""
-        if 'commands' in self._module_spec:
-            return self._module_spec['commands']
-        else:
-            return None
-    
-    def get_statistics_spec(self):
-        """Returns a dict representation of the statistics part of the
-           specification, or None if there is none."""
-        if 'statistics' in self._module_spec:
-            return self._module_spec['statistics']
-        else:
-            return None
-    
-    def __str__(self):
-        """Returns a string representation of the full specification"""
-        return self._module_spec.__str__()
-
-def _check(module_spec):
-    """Checks the full specification. This is a dict that contains the
-       element "module_spec", which is in itself a dict that
-       must contain at least a "module_name" (string) and optionally
-       a "config_data", a "commands" and a "statistics" element, all
-       of which are lists of dicts. Raises a ModuleSpecError if there
-       is a problem."""
-    if type(module_spec) != dict:
-        raise ModuleSpecError("data specification not a dict")
-    if "module_name" not in module_spec:
-        raise ModuleSpecError("no module_name in module_spec")
-    if "module_description" in module_spec and \
-       type(module_spec["module_description"]) != str:
-        raise ModuleSpecError("module_description is not a string")
-    if "config_data" in module_spec:
-        _check_config_spec(module_spec["config_data"])
-    if "commands" in module_spec:
-        _check_command_spec(module_spec["commands"])
-    if "statistics" in module_spec:
-        _check_statistics_spec(module_spec["statistics"])
-
-def _check_config_spec(config_data):
-    # config data is a list of items represented by dicts that contain
-    # things like "item_name", depending on the type they can have
-    # specific subitems
-    """Checks a list that contains the configuration part of the
-       specification. Raises a ModuleSpecError if there is a
-       problem."""
-    if type(config_data) != list:
-        raise ModuleSpecError("config_data is of type " + str(type(config_data)) + ", not a list of items")
-    for config_item in config_data:
-        _check_item_spec(config_item)
-
-def _check_command_spec(commands):
-    """Checks the list that contains a set of commands. Raises a
-       ModuleSpecError is there is an error"""
-    if type(commands) != list:
-        raise ModuleSpecError("commands is not a list of commands")
-    for command in commands:
-        if type(command) != dict:
-            raise ModuleSpecError("command in commands list is not a dict")
-        if "command_name" not in command:
-            raise ModuleSpecError("no command_name in command item")
-        command_name = command["command_name"]
-        if type(command_name) != str:
-            raise ModuleSpecError("command_name not a string: " + str(type(command_name)))
-        if "command_description" in command:
-            if type(command["command_description"]) != str:
-                raise ModuleSpecError("command_description not a string in " + command_name)
-        if "command_args" in command:
-            if type(command["command_args"]) != list:
-                raise ModuleSpecError("command_args is not a list in " + command_name)
-            for command_arg in command["command_args"]:
-                if type(command_arg) != dict:
-                    raise ModuleSpecError("command argument not a dict in " + command_name)
-                _check_item_spec(command_arg)
-        else:
-            raise ModuleSpecError("command_args missing in " + command_name)
-    pass
-
-def _check_item_spec(config_item):
-    """Checks the dict that defines one config item
-       (i.e. containing "item_name", "item_type", etc.
-       Raises a ModuleSpecError if there is an error"""
-    if type(config_item) != dict:
-        raise ModuleSpecError("item spec not a dict")
-    if "item_name" not in config_item:
-        raise ModuleSpecError("no item_name in config item")
-    if type(config_item["item_name"]) != str:
-        raise ModuleSpecError("item_name is not a string: " + str(config_item["item_name"]))
-    item_name = config_item["item_name"]
-    if "item_type" not in config_item:
-        raise ModuleSpecError("no item_type in config item")
-    item_type = config_item["item_type"]
-    if type(item_type) != str:
-        raise ModuleSpecError("item_type in " + item_name + " is not a string: " + str(type(item_type)))
-    if item_type not in ["integer", "real", "boolean", "string", "list", "map", "named_set", "any"]:
-        raise ModuleSpecError("unknown item_type in " + item_name + ": " + item_type)
-    if "item_optional" in config_item:
-        if type(config_item["item_optional"]) != bool:
-            raise ModuleSpecError("item_default in " + item_name + " is not a boolean")
-        if not config_item["item_optional"] and "item_default" not in config_item:
-            raise ModuleSpecError("no default value for non-optional item " + item_name)
-    else:
-        raise ModuleSpecError("item_optional not in item " + item_name)
-    if "item_default" in config_item:
-        item_default = config_item["item_default"]
-        if (item_type == "integer" and type(item_default) != int) or \
-           (item_type == "real" and type(item_default) != float) or \
-           (item_type == "boolean" and type(item_default) != bool) or \
-           (item_type == "string" and type(item_default) != str) or \
-           (item_type == "list" and type(item_default) != list) or \
-           (item_type == "map" and type(item_default) != dict):
-            raise ModuleSpecError("Wrong type for item_default in " + item_name)
-    # TODO: once we have check_type, run the item default through that with the list|map_item_spec
-    if item_type == "list":
-        if "list_item_spec" not in config_item:
-            raise ModuleSpecError("no list_item_spec in list item " + item_name)
-        if type(config_item["list_item_spec"]) != dict:
-            raise ModuleSpecError("list_item_spec in " + item_name + " is not a dict")
-        _check_item_spec(config_item["list_item_spec"])
-    if item_type == "map":
-        if "map_item_spec" not in config_item:
-            raise ModuleSpecError("no map_item_sepc in map item " + item_name)
-        if type(config_item["map_item_spec"]) != list:
-            raise ModuleSpecError("map_item_spec in " + item_name + " is not a list")
-        for map_item in config_item["map_item_spec"]:
-            if type(map_item) != dict:
-                raise ModuleSpecError("map_item_spec element is not a dict")
-            _check_item_spec(map_item)
-    if 'item_format' in config_item and 'item_default' in config_item:
-        item_format = config_item["item_format"]
-        item_default = config_item["item_default"]
-        if not _check_format(item_default, item_format):
-            raise ModuleSpecError(
-                "Wrong format for " + str(item_default) + " in " + str(item_name))
-
-def _check_statistics_spec(statistics):
-    # statistics is a list of items represented by dicts that contain
-    # things like "item_name", depending on the type they can have
-    # specific subitems
-    """Checks a list that contains the statistics part of the
-       specification. Raises a ModuleSpecError if there is a
-       problem."""
-    if type(statistics) != list:
-        raise ModuleSpecError("statistics is of type " + str(type(statistics))
-                              + ", not a list of items")
-    for stat_item in statistics:
-        _check_item_spec(stat_item)
-        # Additionally checks if there are 'item_title' and
-        # 'item_description'
-        for item in [ 'item_title',  'item_description' ]:
-            if item not in stat_item:
-                raise ModuleSpecError("no " + item + " in statistics item")
-
-def _check_format(value, format_name):
-    """Check if specified value and format are correct. Return True if
-       is is correct."""
-    # TODO: should be added other format types if necessary
-    time_formats = { 'date-time' : "%Y-%m-%dT%H:%M:%SZ",
-                     'date'      : "%Y-%m-%d",
-                     'time'      : "%H:%M:%S" }
-    for fmt in time_formats:
-        if format_name == fmt:
-            try:
-                # reverse check
-                return value == time.strftime(
-                    time_formats[fmt],
-                    time.strptime(value, time_formats[fmt]))
-            except (ValueError, TypeError):
-                break
-    return False
-
-def _validate_type(spec, value, errors):
-    """Returns true if the value is of the correct type given the
-       specification"""
-    data_type = spec['item_type']
-    if data_type == "integer" and type(value) != int:
-        if errors is not None:
-            errors.append(str(value) + " should be an integer")
-        return False
-    elif data_type == "real" and type(value) != float:
-        if errors is not None:
-            errors.append(str(value) + " should be a real")
-        return False
-    elif data_type == "boolean" and type(value) != bool:
-        if errors is not None:
-            errors.append(str(value) + " should be a boolean")
-        return False
-    elif data_type == "string" and type(value) != str:
-        if errors is not None:
-            errors.append(str(value) + " should be a string")
-        return False
-    elif data_type == "list" and type(value) != list:
-        if errors is not None:
-            errors.append(str(value) + " should be a list")
-        return False
-    elif data_type == "map" and type(value) != dict:
-        if errors is not None:
-            errors.append(str(value) + " should be a map")
-        return False
-    elif data_type == "named_set" and type(value) != dict:
-        if errors != None:
-            errors.append(str(value) + " should be a map")
-        return False
-    else:
-        return True
-
-def _validate_format(spec, value, errors):
-    """Returns true if the value is of the correct format given the
-       specification. And also return true if no 'item_format'"""
-    if "item_format" in spec:
-        item_format = spec['item_format']
-        if not _check_format(value, item_format):
-            if errors is not None:
-                errors.append("format type of " + str(value)
-                              + " should be " + item_format)
-            return False
-    return True
-
-def _validate_item(spec, full, data, errors):
-    if spec.get('item_type') == 'any':
-        return True
-    if not _validate_type(spec, data, errors):
-        return False
-    elif type(data) == list:
-        list_spec = spec['list_item_spec']
-        for data_el in data:
-            if not _validate_type(list_spec, data_el, errors):
-                return False
-            if not _validate_format(list_spec, data_el, errors):
-                return False
-            if list_spec['item_type'] == "map":
-                if not _validate_item(list_spec, full, data_el, errors):
-                    return False
-    elif type(data) == dict:
-        if 'map_item_spec' in spec:
-            if not _validate_spec_list(spec['map_item_spec'], full, data, errors):
-                return False
-        else:
-            named_set_spec = spec['named_set_item_spec']
-            for data_el in data.values():
-                if not _validate_type(named_set_spec, data_el, errors):
-                    return False
-                if not _validate_item(named_set_spec, full, data_el, errors):
-                    return False
-    elif not _validate_format(spec, data, errors):
-        return False
-    return True
-
-def _validate_spec(spec, full, data, errors):
-    item_name = spec['item_name']
-    item_optional = spec['item_optional']
-
-    if not data and item_optional:
-        return True
-    elif item_name in data:
-        return _validate_item(spec, full, data[item_name], errors)
-    elif full and not item_optional:
-        if errors is not None:
-            errors.append("non-optional item " + item_name + " missing")
-        return False
-    else:
-        return True
-
-def _validate_spec_list(module_spec, full, data, errors):
-    # we do not return immediately, there may be more errors
-    # so we keep a boolean to keep track if we found errors
-    validated = True
-
-    # check if the known items are correct
-    for spec_item in module_spec:
-        if not _validate_spec(spec_item, full, data, errors):
-            validated = False
-
-    # check if there are items in our data that are not in the
-    # specification
-    if data is not None:
-        for item_name in data:
-            found = False
-            for spec_item in module_spec:
-                if spec_item["item_name"] == item_name:
-                    found = True
-            if not found and item_name != "version":
-                if errors is not None:
-                    errors.append("unknown item " + item_name)
-                validated = False
-    return validated

+ 0 - 1
src/lib/python/isc/config/tests/.gitignore

@@ -1 +0,0 @@
-/config_test

+ 0 - 35
src/lib/python/isc/config/tests/Makefile.am

@@ -1,35 +0,0 @@
-PYCOVERAGE_RUN=@PYCOVERAGE_RUN@
-PYTESTS = config_data_test.py
-PYTESTS += module_spec_test.py
-EXTRA_DIST = $(PYTESTS)
-EXTRA_DIST += unittest_fakesession.py
-
-# If necessary (rare cases), explicitly specify paths to dynamic libraries
-# required by loadable python modules.
-LIBRARY_PATH_PLACEHOLDER =
-if SET_ENV_LIBRARY_PATH
-LIBRARY_PATH_PLACEHOLDER += $(ENV_LIBRARY_PATH)=$(abs_top_builddir)/src/lib/cryptolink/.libs:$(abs_top_builddir)/src/lib/dns/.libs:$(abs_top_builddir)/src/lib/dns/python/.libs:$(abs_top_builddir)/src/lib/cc/.libs:$(abs_top_builddir)/src/lib/config/.libs:$(abs_top_builddir)/src/lib/log/.libs:$(abs_top_builddir)/src/lib/util/.libs:$(abs_top_builddir)/src/lib/util/threads/.libs:$(abs_top_builddir)/src/lib/exceptions/.libs:$(abs_top_builddir)/src/lib/datasrc/.libs:$$$(ENV_LIBRARY_PATH)
-endif
-
-# test using command-line arguments, so use check-local target instead of TESTS
-check-local:
-if ENABLE_PYTHON_COVERAGE
-	touch $(abs_top_srcdir)/.coverage
-	rm -f .coverage
-	${LN_S} $(abs_top_srcdir)/.coverage .coverage
-endif
-	for pytest in $(PYTESTS) ; do \
-	echo Running test: $$pytest ; \
-	$(LIBRARY_PATH_PLACEHOLDER) \
-	PYTHONPATH=$(COMMON_PYTHON_PATH):$(abs_top_builddir)/src/lib/python/isc/config \
-	B10_LOCKFILE_DIR_FROM_BUILD=$(abs_top_builddir) \
-	B10_TEST_PLUGIN_DIR=$(abs_top_srcdir)/src/bin/cfgmgr/plugins \
-	CONFIG_TESTDATA_PATH=$(abs_top_srcdir)/src/lib/config/tests/testdata \
-	CONFIG_WR_TESTDATA_PATH=$(abs_top_builddir)/src/lib/config/tests/testdata \
-	$(PYCOVERAGE_RUN) $(abs_srcdir)/$$pytest || exit ; \
-	done
-
-CLEANDIRS = __pycache__
-
-clean-local:
-	rm -rf $(CLEANDIRS)

+ 0 - 871
src/lib/python/isc/config/tests/config_data_test.py

@@ -1,871 +0,0 @@
-# Copyright (C) 2010  Internet Systems Consortium.
-#
-# Permission to use, copy, modify, and distribute this software for any
-# purpose with or without fee is hereby granted, provided that the above
-# copyright notice and this permission notice appear in all copies.
-#
-# THE SOFTWARE IS PROVIDED "AS IS" AND INTERNET SYSTEMS CONSORTIUM
-# DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL
-# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL
-# INTERNET SYSTEMS CONSORTIUM BE LIABLE FOR ANY SPECIAL, DIRECT,
-# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING
-# FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
-# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION
-# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-#
-# Tests for the ConfigData and MultiConfigData classes
-#
-
-import unittest
-import os
-from isc.config.config_data import *
-from isc.config.module_spec import *
-
-class TestConfigData(unittest.TestCase):
-    def setUp(self):
-        if 'CONFIG_TESTDATA_PATH' in os.environ:
-            self.data_path = os.environ['CONFIG_TESTDATA_PATH']
-        else:
-            self.data_path = "../../../testdata"
-        spec = isc.config.module_spec_from_file(self.data_path + os.sep + "spec2.spec")
-        self.cd = ConfigData(spec)
-
-    #def test_module_spec_from_file(self):
-    #    spec = isc.config.module_spec_from_file(self.data_path + os.sep + "spec1.spec")
-    #    cd = ConfigData(spec)
-    #    self.assertEqual(cd.specification, spec)
-    #    self.assertEqual(cd.data, {})
-    #    self.assertRaises(ConfigDataError, ConfigData, 1)
-
-    def test_check_type(self):
-        config_spec = isc.config.module_spec_from_file(self.data_path + os.sep + "spec22.spec").get_config_spec()
-        spec_part = find_spec_part(config_spec, "value1")
-        check_type(spec_part, 1)
-        self.assertRaises(isc.cc.data.DataTypeError, check_type, spec_part, 1.1)
-        self.assertRaises(isc.cc.data.DataTypeError, check_type, spec_part, True)
-        self.assertRaises(isc.cc.data.DataTypeError, check_type, spec_part, "a")
-        self.assertRaises(isc.cc.data.DataTypeError, check_type, spec_part, [ 1, 2 ])
-        self.assertRaises(isc.cc.data.DataTypeError, check_type, spec_part, { "a": 1 })
-        self.assertRaises(isc.cc.data.DataTypeError, check_type, spec_part, 10000000000000000000000)
-
-        spec_part = find_spec_part(config_spec, "value2")
-        check_type(spec_part, 1.1)
-        self.assertRaises(isc.cc.data.DataTypeError, check_type, spec_part, 1)
-        self.assertRaises(isc.cc.data.DataTypeError, check_type, spec_part, True)
-        self.assertRaises(isc.cc.data.DataTypeError, check_type, spec_part, "a")
-        self.assertRaises(isc.cc.data.DataTypeError, check_type, spec_part, [ 1, 2 ])
-        self.assertRaises(isc.cc.data.DataTypeError, check_type, spec_part, { "a": 1 })
-        self.assertRaises(isc.cc.data.DataTypeError, check_type, spec_part, 2.0000000e+308)
-
-        spec_part = find_spec_part(config_spec, "value3")
-        check_type(spec_part, True)
-        check_type(spec_part, False)
-        self.assertRaises(isc.cc.data.DataTypeError, check_type, spec_part, 1)
-        self.assertRaises(isc.cc.data.DataTypeError, check_type, spec_part, 1.1)
-        self.assertRaises(isc.cc.data.DataTypeError, check_type, spec_part, "a")
-        self.assertRaises(isc.cc.data.DataTypeError, check_type, spec_part, [ 1, 2 ])
-        self.assertRaises(isc.cc.data.DataTypeError, check_type, spec_part, { "a": 1 })
-
-        spec_part = find_spec_part(config_spec, "value4")
-        check_type(spec_part, "asdf")
-        self.assertRaises(isc.cc.data.DataTypeError, check_type, spec_part, 1)
-        self.assertRaises(isc.cc.data.DataTypeError, check_type, spec_part, 1.1)
-        self.assertRaises(isc.cc.data.DataTypeError, check_type, spec_part, True)
-        self.assertRaises(isc.cc.data.DataTypeError, check_type, spec_part, [ 1, 2 ])
-        self.assertRaises(isc.cc.data.DataTypeError, check_type, spec_part, { "a": 1 })
-
-        spec_part = find_spec_part(config_spec, "value5")
-        check_type(spec_part, [1, 2])
-        self.assertRaises(isc.cc.data.DataTypeError, check_type, spec_part, 1)
-        self.assertRaises(isc.cc.data.DataTypeError, check_type, spec_part, 1.1)
-        self.assertRaises(isc.cc.data.DataTypeError, check_type, spec_part, True)
-        self.assertRaises(isc.cc.data.DataTypeError, check_type, spec_part, "a")
-        self.assertRaises(isc.cc.data.DataTypeError, check_type, spec_part, [ "a", "b" ])
-        self.assertRaises(isc.cc.data.DataTypeError, check_type, spec_part, { "a": 1 })
-
-        spec_part = find_spec_part(config_spec, "value6")
-        check_type(spec_part, { "value1": "aaa", "value2": 2 })
-        self.assertRaises(isc.cc.data.DataTypeError, check_type, spec_part, 1)
-        self.assertRaises(isc.cc.data.DataTypeError, check_type, spec_part, 1.1)
-        self.assertRaises(isc.cc.data.DataTypeError, check_type, spec_part, True)
-        self.assertRaises(isc.cc.data.DataTypeError, check_type, spec_part, "a")
-        self.assertRaises(isc.cc.data.DataTypeError, check_type, spec_part, [ 1, 2 ])
-        #self.assertRaises(isc.cc.data.DataTypeError, check_type, spec_part, { "value1": 1 })
-
-        self.assertRaises(isc.cc.data.DataTypeError, check_type, config_spec, 1)
-
-    def test_convert_type(self):
-        config_spec = isc.config.module_spec_from_file(self.data_path + os.sep + "spec22.spec").get_config_spec()
-        spec_part = find_spec_part(config_spec, "value1")
-        self.assertEqual(1, convert_type(spec_part, '1'))
-        self.assertEqual(2, convert_type(spec_part, 2.1))
-        self.assertEqual(2, convert_type(spec_part, '2'))
-        self.assertEqual(3, convert_type(spec_part, '3'))
-        self.assertEqual(1, convert_type(spec_part, True))
-
-        self.assertRaises(isc.cc.data.DataTypeError, convert_type, spec_part, "a")
-        self.assertRaises(isc.cc.data.DataTypeError, convert_type, spec_part, [ 1, 2 ])
-        self.assertRaises(isc.cc.data.DataTypeError, convert_type, spec_part, { "a": 1 })
-        self.assertRaises(isc.cc.data.DataTypeError, convert_type, 1, "a")
-        self.assertRaises(isc.cc.data.DataTypeError, convert_type, { 'somedict': 'somevalue' }, "a")
-
-        spec_part = find_spec_part(config_spec, "value2")
-        self.assertEqual(1.1, convert_type(spec_part, '1.1'))
-        self.assertEqual(123.0, convert_type(spec_part, '123'))
-        self.assertEqual(1.0, convert_type(spec_part, True))
-        self.assertRaises(isc.cc.data.DataTypeError, convert_type, spec_part, "a")
-        self.assertRaises(isc.cc.data.DataTypeError, convert_type, spec_part, [ 1, 2 ])
-        self.assertRaises(isc.cc.data.DataTypeError, convert_type, spec_part, { "a": 1 })
-
-        spec_part = find_spec_part(config_spec, "value3")
-        self.assertEqual(True, convert_type(spec_part, 'True'))
-        self.assertEqual(False, convert_type(spec_part, 'False'))
-        self.assertEqual(True, convert_type(spec_part, 1))
-        self.assertEqual(True, convert_type(spec_part, 1.1))
-        self.assertEqual(True, convert_type(spec_part, 'a'))
-        self.assertEqual(True, convert_type(spec_part, [1, 2]))
-        self.assertEqual(True, convert_type(spec_part, {'a' : 1}))
-
-        spec_part = find_spec_part(config_spec, "value4")
-        self.assertEqual('asdf', convert_type(spec_part, "asdf"))
-        self.assertEqual('1', convert_type(spec_part, 1))
-        self.assertEqual('1.1', convert_type(spec_part, 1.1))
-        self.assertEqual('True', convert_type(spec_part, True))
-
-        spec_part = find_spec_part(config_spec, "value5")
-        self.assertEqual([1, 2], convert_type(spec_part, '1, 2'))
-        self.assertEqual([1, 2, 3], convert_type(spec_part, '1 2  3'))
-        self.assertEqual([1, 2, 3,4], convert_type(spec_part, '1 2  3, 4'))
-        self.assertEqual([1], convert_type(spec_part, [1,]))
-        self.assertEqual([1,2], convert_type(spec_part, [1,2]))
-        self.assertEqual([1,2], convert_type(spec_part, ['1', '2']))
-
-        self.assertRaises(isc.cc.data.DataTypeError, convert_type, spec_part, 1.1)
-        self.assertRaises(isc.cc.data.DataTypeError, convert_type, spec_part, True)
-        self.assertRaises(isc.cc.data.DataTypeError, convert_type, spec_part, "a")
-        self.assertRaises(isc.cc.data.DataTypeError, convert_type, spec_part, [ "a", "b" ])
-        self.assertRaises(isc.cc.data.DataTypeError, convert_type, spec_part, [ "1", "b" ])
-        self.assertRaises(isc.cc.data.DataTypeError, convert_type, spec_part, { "a": 1 })
-
-        spec_part = find_spec_part(config_spec, "value6")
-        self.assertEqual({}, convert_type(spec_part, '{}'))
-        self.assertEqual({ 'v61': 'a' }, convert_type(spec_part, '{ \'v61\': \'a\' }'))
-
-        self.assertRaises(isc.cc.data.DataTypeError, convert_type, spec_part, 1.1)
-        self.assertRaises(isc.cc.data.DataTypeError, convert_type, spec_part, True)
-        self.assertRaises(isc.cc.data.DataTypeError, convert_type, spec_part, "a")
-        self.assertRaises(isc.cc.data.DataTypeError, convert_type, spec_part, "1")
-        self.assertRaises(isc.cc.data.DataTypeError, convert_type, spec_part, [ "a", "b" ])
-        self.assertRaises(isc.cc.data.DataTypeError, convert_type, spec_part, [ "1", "b" ])
-        self.assertRaises(isc.cc.data.DataTypeError, convert_type, spec_part, { "a": 1 })
-        self.assertRaises(isc.cc.data.DataTypeError, convert_type, spec_part, "\"{ \"a\": 1 }\"")
-
-        spec_part = find_spec_part(config_spec, "value7")
-        self.assertEqual(['1', '2'], convert_type(spec_part, '1, 2'))
-        self.assertEqual(['1', '2', '3'], convert_type(spec_part, '1 2  3'))
-        self.assertEqual(['1', '2', '3','4'], convert_type(spec_part, '1 2  3, 4'))
-        self.assertEqual([1], convert_type(spec_part, [1,]))
-        self.assertEqual([1,2], convert_type(spec_part, [1,2]))
-        self.assertEqual(['1','2'], convert_type(spec_part, ['1', '2']))
-
-        self.assertRaises(isc.cc.data.DataTypeError, convert_type, spec_part, 1.1)
-        self.assertRaises(isc.cc.data.DataTypeError, convert_type, spec_part, True)
-        self.assertEqual(['a'], convert_type(spec_part, "a"))
-        self.assertEqual(['a', 'b'], convert_type(spec_part, ["a", "b" ]))
-        self.assertEqual([1, 'b'], convert_type(spec_part, [1, "b" ]))
-
-    def test_find_spec_part(self):
-        config_spec = self.cd.get_module_spec().get_config_spec()
-        spec_part = find_spec_part(config_spec, "item1")
-        self.assertEqual({'item_name': 'item1', 'item_type': 'integer', 'item_optional': False, 'item_default': 1, }, spec_part)
-        spec_part = find_spec_part(config_spec, "/item1")
-        self.assertEqual({'item_name': 'item1', 'item_type': 'integer', 'item_optional': False, 'item_default': 1, }, spec_part)
-        self.assertRaises(isc.cc.data.DataNotFoundError, find_spec_part, config_spec, "no_such_item")
-        self.assertRaises(isc.cc.data.DataNotFoundError, find_spec_part, config_spec, "no_such_item/multilevel")
-        self.assertRaises(isc.cc.data.DataNotFoundError, find_spec_part, config_spec, "item6/multilevel")
-        self.assertRaises(isc.cc.data.DataNotFoundError, find_spec_part, 1, "item6/multilevel")
-        spec_part = find_spec_part(config_spec, "item6/value1")
-        self.assertEqual({'item_name': 'value1', 'item_type': 'string', 'item_optional': True, 'item_default': 'default'}, spec_part)
-
-        # make sure the returned data is a copy
-        spec_part['item_default'] = 'foo'
-        self.assertNotEqual(spec_part, find_spec_part(config_spec, "item6/value1"))
-
-    def test_find_spec_part_lists(self):
-        # A few specific tests for list data
-        module_spec = isc.config.module_spec_from_file(self.data_path +
-                                                       os.sep +
-                                                       "spec31.spec")
-        config_spec = module_spec.get_config_spec()
-
-        expected_spec_part = {'item_name': 'number',
-                              'item_type': 'integer',
-                              'item_default': 1,
-                              'item_optional': False}
-
-        # First a check for a correct fetch
-        spec_part = find_spec_part(config_spec,
-                                   "/first_list_items[0]/second_list_items[1]/"
-                                   "map_element/list1[1]/list2[1]")
-        self.assertEqual(expected_spec_part, spec_part)
-
-        # Leaving out an index should fail by default
-        self.assertRaises(isc.cc.data.DataNotFoundError,
-                          find_spec_part, config_spec,
-                          "/first_list_items[0]/second_list_items/"
-                          "map_element/list1[1]/list2[1]")
-
-        # But not for the last element
-        spec_part = find_spec_part(config_spec,
-                                   "/first_list_items[0]/second_list_items[1]/"
-                                   "map_element/list1[1]/list2")
-        self.assertEqual(expected_spec_part, spec_part)
-
-        # And also not if strict_identifier is false (third argument)
-        spec_part = find_spec_part(config_spec,
-                                   "/first_list_items[0]/second_list_items/"
-                                   "map_element/list1[1]/list2[1]", False)
-        self.assertEqual(expected_spec_part, spec_part)
-
-
-    def test_spec_name_list(self):
-        name_list = spec_name_list(self.cd.get_module_spec().get_config_spec())
-        self.assertEqual(['item1', 'item2', 'item3', 'item4', 'item5', 'item6'], name_list)
-        name_list = spec_name_list(self.cd.get_module_spec().get_config_spec(), "", True)
-        self.assertEqual(['item1', 'item2', 'item3', 'item4', 'item5', 'item6/value1', 'item6/value2'], name_list)
-        spec_part = find_spec_part(self.cd.get_module_spec().get_config_spec(), "item6")
-        name_list = spec_name_list(spec_part, "item6", True)
-        self.assertEqual(['item6/value1', 'item6/value2'], name_list)
-        spec_part = find_spec_part(self.cd.get_module_spec().get_config_spec(), "item6")
-        name_list = spec_name_list(spec_part, "item6", True)
-        self.assertEqual(['item6/value1', 'item6/value2'], name_list)
-
-        config_spec = isc.config.module_spec_from_file(self.data_path + os.sep + "spec22.spec").get_config_spec()
-        spec_part = find_spec_part(config_spec, "value9")
-        name_list = spec_name_list(spec_part, "value9", True)
-        self.assertEqual(['value9/v91', 'value9/v92/v92a', 'value9/v92/v92b'], name_list)
-
-        name_list = spec_name_list({ "myModule": config_spec }, "", False)
-        self.assertEqual(['myModule/'], name_list)
-        name_list = spec_name_list({ "myModule": config_spec }, "", True)
-        self.assertEqual(['myModule/', 'myModule/value1', 'myModule/value2', 'myModule/value3', 'myModule/value4', 'myModule/value5', 'myModule/value6/v61', 'myModule/value6/v62', 'myModule/value7', 'myModule/value8', 'myModule/value9/v91', 'myModule/value9/v92/v92a', 'myModule/value9/v92/v92b'], name_list)
-
-        self.assertRaises(ConfigDataError, spec_name_list, 1)
-        self.assertRaises(ConfigDataError, spec_name_list, [ 'a' ])
-
-        # Test one with type any as well
-        module_spec = isc.config.module_spec_from_file(self.data_path + os.sep + "spec40.spec")
-        spec_part = module_spec.get_config_spec()
-        name_list = spec_name_list(module_spec.get_config_spec())
-        self.assertEqual(['item1', 'item2', 'item3'], name_list)
-
-        # item3 itself is 'empty'
-        spec_part = find_spec_part(spec_part, 'item3')
-        name_list = spec_name_list(spec_part)
-        self.assertEqual([], name_list)
-
-
-    def test_init(self):
-        self.assertRaises(ConfigDataError, ConfigData, "asdf")
-
-    def test_get_value(self):
-        value, default = self.cd.get_value("item1")
-        self.assertEqual(1, value)
-        self.assertEqual(True, default)
-        value, default = self.cd.get_value("item2")
-        self.assertEqual(1.1, value)
-        self.assertEqual(True, default)
-        value, default = self.cd.get_value("item3")
-        self.assertEqual(True, value)
-        self.assertEqual(True, default)
-        value, default = self.cd.get_value("item4")
-        self.assertEqual("test", value)
-        self.assertEqual(True, default)
-        value, default = self.cd.get_value("item5")
-        self.assertEqual(["a", "b"], value)
-        self.assertEqual(True, default)
-        value, default = self.cd.get_value("item6")
-        self.assertEqual({}, value)
-        self.assertEqual(True, default)
-        self.assertRaises(isc.cc.data.DataNotFoundError, self.cd.get_value, "no_such_item")
-        value, default = self.cd.get_value("item6/value2")
-        self.assertEqual(None, value)
-        self.assertEqual(False, default)
-        self.assertRaises(isc.cc.data.DataNotFoundError, self.cd.get_value, "item6/no_such_item")
-
-    def test_get_default_value(self):
-        self.assertEqual(1, self.cd.get_default_value("item1"))
-        self.assertEqual('default', self.cd.get_default_value("item6/value1"))
-        self.assertEqual(None, self.cd.get_default_value("item6/value2"))
-
-        # set some local values to something else, and see if we
-        # still get the default
-        self.cd.set_local_config({"item1": 2, "item6": { "value1": "asdf" } })
-
-        self.assertEqual((2, False), self.cd.get_value("item1"))
-        self.assertEqual(1, self.cd.get_default_value("item1"))
-        self.assertEqual(('asdf', False), self.cd.get_value("item6/value1"))
-        self.assertEqual('default', self.cd.get_default_value("item6/value1"))
-
-        self.assertRaises(isc.cc.data.DataNotFoundError,
-                          self.cd.get_default_value,
-                          "does_not_exist/value1")
-        self.assertRaises(isc.cc.data.DataNotFoundError,
-                          self.cd.get_default_value,
-                          "item6/doesnotexist")
-
-    def test_set_local_config(self):
-        self.cd.set_local_config({"item1": 2})
-        value, default = self.cd.get_value("item1")
-        self.assertEqual(2, value)
-        self.assertEqual(False, default)
-
-    def test_get_local_config(self):
-        local_config = self.cd.get_local_config()
-        self.assertEqual({}, local_config)
-        my_config = { "item1": 2, "item2": 2.2, "item3": False, "item4": "asdf", "item5": [ "c", "d" ] }
-        self.cd.set_local_config(my_config)
-        self.assertEqual(my_config, self.cd.get_local_config())
-
-    def test_get_item_list(self):
-        name_list = self.cd.get_item_list()
-        self.assertEqual(['item1', 'item2', 'item3', 'item4', 'item5', 'item6'], name_list)
-        name_list = self.cd.get_item_list("", True)
-        self.assertEqual(['item1', 'item2', 'item3', 'item4', 'item5', 'item6/value1', 'item6/value2'], name_list)
-        name_list = self.cd.get_item_list("item6", False)
-        self.assertEqual(['item6/value1', 'item6/value2'], name_list)
-
-    def test_get_full_config(self):
-        full_config = self.cd.get_full_config()
-        self.assertEqual({ "item1": 1, "item2": 1.1, "item3": True, "item4": "test", "item5": ['a', 'b'], "item6/value1": 'default', 'item6/value2': None}, full_config)
-        my_config = { "item1": 2, "item2": 2.2, "item3": False, "item4": "asdf", "item5": [ "c", "d" ] }
-        self.cd.set_local_config(my_config)
-        full_config = self.cd.get_full_config()
-        self.assertEqual({ "item1": 2, "item2": 2.2, "item3": False, "item4": "asdf", "item5": [ "c", "d" ], "item6/value1": 'default', 'item6/value2': None}, full_config)
-
-class TestMultiConfigData(unittest.TestCase):
-    def setUp(self):
-        if 'CONFIG_TESTDATA_PATH' in os.environ:
-            self.data_path = os.environ['CONFIG_TESTDATA_PATH']
-        else:
-            self.data_path = "../../../testdata"
-        self.mcd = MultiConfigData()
-
-    def test_init(self):
-        self.assertEqual({}, self.mcd._specifications)
-        self.assertEqual({}, self.mcd._current_config)
-        self.assertEqual({}, self.mcd._local_changes)
-
-    def test_set_remove_specification(self):
-        module_spec = isc.config.module_spec_from_file(self.data_path + os.sep + "spec1.spec")
-        self.assertFalse(self.mcd.have_specification(module_spec.get_module_name()))
-        self.mcd.set_specification(module_spec)
-        self.assertTrue(self.mcd.have_specification(module_spec.get_module_name()))
-        self.assertIn(module_spec.get_module_name(), self.mcd._specifications)
-        self.assertEqual(module_spec, self.mcd._specifications[module_spec.get_module_name()])
-        self.assertRaises(ConfigDataError, self.mcd.set_specification, "asdf")
-        self.mcd.remove_specification(module_spec.get_module_name())
-        self.assertFalse(self.mcd.have_specification(module_spec.get_module_name()))
-
-    def test_clear_specifications(self):
-        self.assertEqual(0, len(self.mcd._specifications))
-        module_spec = isc.config.module_spec_from_file(self.data_path +
-                                                       os.sep +
-                                                       "spec1.spec")
-        self.mcd.set_specification(module_spec)
-        self.assertEqual(1, len(self.mcd._specifications))
-        self.mcd.clear_specifications()
-        self.assertEqual(0, len(self.mcd._specifications))
-
-    def test_get_module_spec(self):
-        module_spec = isc.config.module_spec_from_file(self.data_path + os.sep + "spec1.spec")
-        self.mcd.set_specification(module_spec)
-        module_spec2 = self.mcd.get_module_spec(module_spec.get_module_name())
-        self.assertEqual(module_spec, module_spec2)
-        module_spec3 = self.mcd.get_module_spec("no_such_module")
-        self.assertEqual(None, module_spec3)
-
-    def test_find_spec_part(self):
-        spec_part = self.mcd.find_spec_part("Spec2/item1")
-        self.assertEqual(None, spec_part)
-        spec_part = self.mcd.find_spec_part("/Spec2/item1")
-        self.assertEqual(None, spec_part)
-        module_spec = isc.config.module_spec_from_file(self.data_path +
-                                                       os.sep + "spec2.spec")
-        self.mcd.set_specification(module_spec)
-        spec_part = self.mcd.find_spec_part("Spec2/item1")
-        self.assertEqual({'item_name': 'item1', 'item_type': 'integer',
-                          'item_optional': False, 'item_default': 1, },
-                         spec_part)
-
-        # For lists, either the spec of the list itself, or the
-        # spec for the list contents should be returned (the
-        # latter when an index is given in the identifier)
-        spec_part = self.mcd.find_spec_part("Spec2/item5")
-        self.assertEqual({'item_default': ['a', 'b'],
-                          'item_name': 'item5',
-                          'item_optional': False,
-                          'item_type': 'list',
-                          'list_item_spec': {'item_default': '',
-                                             'item_name': 'list_element',
-                                             'item_optional': False,
-                                             'item_type': 'string'}},
-                         spec_part)
-        spec_part = self.mcd.find_spec_part("Spec2/item5[0]")
-        self.assertEqual({'item_default': '',
-                          'item_name': 'list_element',
-                          'item_optional': False,
-                          'item_type': 'string'},
-                         spec_part)
-
-
-    def test_find_spec_part_nested(self):
-        # Check that find_spec_part works for nested lists
-        module_spec = isc.config.module_spec_from_file(self.data_path + os.sep + "spec30.spec")
-        self.mcd.set_specification(module_spec)
-        spec_part = self.mcd.find_spec_part("/lists/first_list_items[0]/second_list_items[1]/final_element")
-        self.assertEqual({'item_name': 'final_element', 'item_type': 'string', 'item_default': 'hello', 'item_optional': False}, spec_part)
-        spec_part = self.mcd.find_spec_part("/BAD_NAME/first_list_items[0]/second_list_items[1]/final_element")
-        self.assertEqual(None, spec_part)
-
-    def test_find_spec_part_nested2(self):
-        # Check that find_spec_part works for nested lists and maps
-        module_spec = isc.config.module_spec_from_file(self.data_path + os.sep + "spec31.spec")
-        self.mcd.set_specification(module_spec)
-        spec_part = self.mcd.find_spec_part("/lists/first_list_items[0]/second_list_items[1]/map_element/list1[1]/list2[2]")
-        self.assertEqual({"item_name": "number", "item_type": "integer", "item_optional": False, "item_default": 1}, spec_part)
-
-        spec_part = self.mcd.find_spec_part("/DOESNOTEXIST")
-        self.assertEqual(None, spec_part)
-        spec_part = self.mcd.find_spec_part("/lists/DOESNOTEXIST")
-        self.assertEqual(None, spec_part)
-        spec_part = self.mcd.find_spec_part("/lists/first_list_items[0]/DOESNOTEXIST")
-        self.assertEqual(None, spec_part)
-        spec_part = self.mcd.find_spec_part("/lists/first_list_items[0]/second_list_items[1]/DOESNOTEXIST")
-        self.assertEqual(None, spec_part)
-        spec_part = self.mcd.find_spec_part("/lists/first_list_items[0]/second_list_items[1]/map_element/DOESNOTEXIST")
-        self.assertEqual(None, spec_part)
-        spec_part = self.mcd.find_spec_part("/lists/first_list_items[0]/second_list_items[1]/map_element/list1[1]/DOESNOTEXIST")
-        self.assertEqual(None, spec_part)
-        spec_part = self.mcd.find_spec_part("/lists/first_list_items[0]/second_list_items[1]/map_element/list1[1]/list2[1]/DOESNOTEXIST")
-        self.assertEqual(None, spec_part)
-
-    def test_get_current_config(self):
-        cf = { 'module1': { 'item1': 2, 'item2': True } }
-        self.mcd._set_current_config(cf)
-        self.assertEqual(cf, self.mcd.get_current_config())
-
-    def test_get_local_changes(self):
-        module_spec = isc.config.module_spec_from_file(self.data_path + os.sep + "spec2.spec")
-        self.mcd.set_specification(module_spec)
-        local_changes = self.mcd.get_local_changes()
-        self.assertEqual({}, local_changes)
-        self.mcd.set_value("Spec2/item1", 2)
-        local_changes = self.mcd.get_local_changes()
-        self.assertEqual({"Spec2": { "item1": 2}}, local_changes)
-
-    def test_set_local_changes(self):
-        module_spec = isc.config.module_spec_from_file(self.data_path + os.sep + "spec2.spec")
-        self.mcd.set_specification(module_spec)
-        self.assertEqual({}, self.mcd.get_local_changes())
-        new_local_changes = {"Spec2": { "item1": 2}}
-        self.mcd.set_local_changes(new_local_changes)
-        self.assertEqual(new_local_changes, self.mcd.get_local_changes())
-
-    def test_clear_local_changes(self):
-        module_spec = isc.config.module_spec_from_file(self.data_path + os.sep + "spec2.spec")
-        self.mcd.set_specification(module_spec)
-        self.mcd.set_value("Spec2/item1", 2)
-        self.mcd.clear_local_changes()
-        local_changes = self.mcd.get_local_changes()
-        self.assertEqual({}, local_changes)
-        pass
-
-    def test_get_local_value(self):
-        module_spec = isc.config.module_spec_from_file(self.data_path + os.sep + "spec2.spec")
-        self.mcd.set_specification(module_spec)
-        value = self.mcd.get_local_value("Spec2/item1")
-        self.assertEqual(None, value)
-        self.mcd.set_value("Spec2/item1", 2)
-        value = self.mcd.get_local_value("Spec2/item1")
-        self.assertEqual(2, value)
-
-    def test_get_current_value(self):
-        value = self.mcd.get_current_value("Spec2/item1")
-        self.assertEqual(None, value)
-        self.mcd._current_config = { "Spec2": { "item1": 3 } }
-        value = self.mcd.get_current_value("Spec2/item1")
-        self.assertEqual(3, value)
-        pass
-
-    def test_get_default_value(self):
-        module_spec = isc.config.module_spec_from_file(self.data_path + os.sep + "spec2.spec")
-        self.mcd.set_specification(module_spec)
-        value = self.mcd.get_default_value("Spec2/item1")
-        self.assertEqual(1, value)
-        value = self.mcd.get_default_value("/Spec2/item1")
-        self.assertEqual(1, value)
-        value = self.mcd.get_default_value("Spec2/item5[0]")
-        self.assertEqual('a', value)
-        value = self.mcd.get_default_value("Spec2/item5[1]")
-        self.assertEqual('b', value)
-        value = self.mcd.get_default_value("Spec2/item5[5]")
-        self.assertEqual(None, value)
-        value = self.mcd.get_default_value("Spec2/item5[0][1]")
-        self.assertEqual(None, value)
-        value = self.mcd.get_default_value("Spec2/item6/value1")
-        self.assertEqual('default', value)
-        value = self.mcd.get_default_value("Spec2/item6/value2")
-        self.assertEqual(None, value)
-        value = self.mcd.get_default_value("Spec2/no_such_item/asdf")
-        self.assertEqual(None, value)
-
-        module_spec = isc.config.module_spec_from_file(self.data_path + os.sep + "spec32.spec")
-        self.mcd.set_specification(module_spec)
-        value = self.mcd.get_default_value("Spec32/named_set_item")
-        self.assertEqual({ 'a': 1, 'b': 2}, value)
-        value = self.mcd.get_default_value("Spec32/named_set_item/a")
-        self.assertEqual(1, value)
-        value = self.mcd.get_default_value("Spec32/named_set_item/b")
-        self.assertEqual(2, value)
-        value = self.mcd.get_default_value("Spec32/named_set_item/no_such_item")
-        self.assertEqual(None, value)
-        # Check that top-level default value works when named set contains list
-        # (issue #2114)
-        value = self.mcd.get_default_value("Spec32/named_set_item3/values[2]")
-        self.assertEqual(3, value)
-        self.assertRaises(IndexError, self.mcd.get_default_value,
-                          "Spec32/named_set_item3/values[5]")
-
-    def test_get_value(self):
-        module_spec = isc.config.module_spec_from_file(self.data_path + os.sep + "spec2.spec")
-        self.mcd.set_specification(module_spec)
-        self.mcd.set_value("Spec2/item1", 2)
-
-        value, status = self.mcd.get_value("Spec2/item1")
-        self.assertEqual(2, value)
-        self.assertEqual(MultiConfigData.LOCAL, status)
-
-        value, status = self.mcd.get_value("Spec2/item2")
-        self.assertEqual(1.1, value)
-        self.assertEqual(MultiConfigData.DEFAULT, status)
-
-        self.mcd._current_config = { "Spec2": { "item3": False } }
-
-        value, status = self.mcd.get_value("Spec2/item3")
-        self.assertEqual(False, value)
-        self.assertEqual(MultiConfigData.CURRENT, status)
-
-        value, status = self.mcd.get_value("Spec2/no_such_item")
-        self.assertEqual(None, value)
-        self.assertEqual(MultiConfigData.NONE, status)
-
-        value, status = self.mcd.get_value("Spec2/item5")
-        self.assertEqual(['a', 'b'], value)
-        self.assertEqual(MultiConfigData.DEFAULT, status)
-
-        value, status = self.mcd.get_value("Spec2/item5[0]")
-        self.assertEqual("a", value)
-        self.assertEqual(MultiConfigData.DEFAULT, status)
-
-        value, status = self.mcd.get_value("Spec2/item5[0]", False)
-        self.assertEqual(None, value)
-        self.assertEqual(MultiConfigData.NONE, status)
-
-        value, status = self.mcd.get_value("Spec2/item5[1]")
-        self.assertEqual("b", value)
-        self.assertEqual(MultiConfigData.DEFAULT, status)
-
-
-    def test_get_value_maps(self):
-        maps = self.mcd.get_value_maps()
-        self.assertEqual([], maps)
-
-        module_spec = isc.config.module_spec_from_file(self.data_path + os.sep + "spec1.spec")
-        self.mcd.set_specification(module_spec)
-
-        expected = [{'default': False,
-                     'type': 'module',
-                     'name': 'Spec1',
-                     'value': None,
-                     'modified': False}]
-
-        maps = self.mcd.get_value_maps()
-        self.assertEqual(expected, maps)
-
-        maps = self.mcd.get_value_maps("/")
-        self.assertEqual(expected, maps)
-
-        maps = self.mcd.get_value_maps('Spec2')
-        self.assertEqual([], maps)
-        maps = self.mcd.get_value_maps('Spec1')
-        self.assertEqual([], maps)
-        self.mcd.remove_specification("Spec1")
-        self.mcd.remove_specification("foo")
-
-        module_spec = isc.config.module_spec_from_file(self.data_path + os.sep + "spec2.spec")
-        self.mcd.set_specification(module_spec)
-        maps = self.mcd.get_value_maps()
-        self.assertEqual([{'default': False, 'type': 'module', 'name': 'Spec2', 'value': None, 'modified': False}], maps)
-        self.mcd._set_current_config({ "Spec2": { "item1": 2 } })
-        self.mcd.set_value("Spec2/item3", False)
-        maps = self.mcd.get_value_maps("/Spec2")
-        self.assertEqual([{'default': False, 'type': 'integer', 'name': 'Spec2/item1', 'value': 2, 'modified': False},
-                          {'default': True, 'type': 'real', 'name': 'Spec2/item2', 'value': 1.1, 'modified': False},
-                          {'default': False, 'type': 'boolean', 'name': 'Spec2/item3', 'value': False, 'modified': True},
-                          {'default': True, 'type': 'string', 'name': 'Spec2/item4', 'value': 'test', 'modified': False},
-                          {'default': True, 'type': 'list', 'name': 'Spec2/item5', 'value': ['a', 'b'], 'modified': False},
-                          {'default': True, 'type': 'string', 'name': 'Spec2/item6/value1', 'value': 'default', 'modified': False},
-                          {'default': False, 'type': 'integer', 'name': 'Spec2/item6/value2', 'value': None, 'modified': False}], maps)
-        maps = self.mcd.get_value_maps("Spec2")
-        self.assertEqual([{'default': False, 'type': 'integer', 'name': 'Spec2/item1', 'value': 2, 'modified': False},
-                          {'default': True, 'type': 'real', 'name': 'Spec2/item2', 'value': 1.1, 'modified': False},
-                          {'default': False, 'type': 'boolean', 'name': 'Spec2/item3', 'value': False, 'modified': True},
-                          {'default': True, 'type': 'string', 'name': 'Spec2/item4', 'value': 'test', 'modified': False},
-                          {'default': True, 'type': 'list', 'name': 'Spec2/item5', 'value': ['a', 'b'], 'modified': False},
-                          {'default': True, 'type': 'string', 'name': 'Spec2/item6/value1', 'value': 'default', 'modified': False},
-                          {'default': False, 'type': 'integer', 'name': 'Spec2/item6/value2', 'value': None, 'modified': False}], maps)
-        maps = self.mcd.get_value_maps("/Spec2/item5")
-        self.assertEqual([{'default': True, 'type': 'string', 'name': 'Spec2/item5[0]', 'value': 'a', 'modified': False},
-                          {'default': True, 'type': 'string', 'name': 'Spec2/item5[1]', 'value': 'b', 'modified': False}], maps)
-        maps = self.mcd.get_value_maps("/Spec2/item5[0]")
-        self.assertEqual([{'default': True, 'modified': False, 'name': 'Spec2/item5[0]', 'type': 'string', 'value': 'a'}], maps)
-        maps = self.mcd.get_value_maps("/Spec2/item1")
-        self.assertEqual([{'default': False, 'type': 'integer', 'name': 'Spec2/item1', 'value': 2, 'modified': False}], maps)
-        maps = self.mcd.get_value_maps("/Spec2/item2")
-        self.assertEqual([{'default': True, 'type': 'real', 'name': 'Spec2/item2', 'value': 1.1, 'modified': False}], maps)
-        maps = self.mcd.get_value_maps("/Spec2/item3")
-        self.assertEqual([{'default': False, 'type': 'boolean', 'name': 'Spec2/item3', 'value': False, 'modified': True}], maps)
-        maps = self.mcd.get_value_maps("/Spec2/item4")
-        self.assertEqual([{'default': True, 'type': 'string', 'name': 'Spec2/item4', 'value': 'test', 'modified': False}], maps)
-
-        module_spec = isc.config.module_spec_from_file(self.data_path + os.sep + "spec24.spec")
-        self.mcd.set_specification(module_spec)
-        # optional list item that is not set should return as empty list
-        maps = self.mcd.get_value_maps("/Spec24/item", 4)
-        self.assertEqual([{'default': False, 'type': 'list', 'name': 'Spec24/item', 'value': [], 'modified': False}], maps)
-
-        self.mcd._set_current_config({ "Spec24": { "item": [] } })
-        maps = self.mcd.get_value_maps("/Spec24/item")
-        self.assertEqual([{'default': False, 'modified': False, 'name': 'Spec24/item', 'type': 'list', 'value': []}], maps)
-
-        module_spec = isc.config.module_spec_from_file(self.data_path + os.sep + "spec22.spec")
-        self.mcd.set_specification(module_spec)
-        expected = [{'default': True,
-                     'modified': False,
-                     'name': 'Spec22/value9/v91',
-                     'type': 'string',
-                     'value': 'def'},
-                    {'default': True,
-                     'modified': False,
-                     'name': 'Spec22/value9/v92/v92a',
-                     'type': 'string',
-                     'value': 'Hello'
-                    },
-                    {'default': True,
-                     'modified': False,
-                     'name': 'Spec22/value9/v92/v92b',
-                     'type': 'integer',
-                     'value': 56176
-                    }
-                   ]
-        maps = self.mcd.get_value_maps("/Spec22/value9")
-        self.assertEqual(expected, maps)
-
-        # A slash at the end should not produce different output
-        maps = self.mcd.get_value_maps("/Spec22/value9/")
-        self.assertEqual(expected, maps)
-
-        # A slash at the end should not produce different output with
-        # indices either
-        expected2 = [{'default': True,
-                      'type': 'integer',
-                      'name': 'Spec22/value5[1]',
-                      'value': 'b',
-                      'modified': False}]
-        maps = self.mcd.get_value_maps("/Spec22/value5[1]/")
-        self.assertEqual(expected2, maps)
-
-    def test_get_value_maps_named_set(self):
-        module_spec = isc.config.module_spec_from_file(self.data_path + os.sep + "spec32.spec")
-        self.mcd.set_specification(module_spec)
-        maps = self.mcd.get_value_maps()
-        self.assertEqual([{'default': False, 'type': 'module',
-                           'name': 'Spec32', 'value': None,
-                           'modified': False}], maps)
-        maps = self.mcd.get_value_maps("/Spec32/named_set_item")
-        self.assertEqual(len(maps), 2)
-        self.assertIn({'default': True, 'type': 'integer',
-                       'name': 'Spec32/named_set_item/a',
-                       'value': 1, 'modified': False},
-                      maps)
-        self.assertIn({'default': True, 'type': 'integer',
-                       'name': 'Spec32/named_set_item/b',
-                       'value': 2, 'modified': False},
-                      maps)
-        maps = self.mcd.get_value_maps("/Spec32/named_set_item/a")
-        self.assertEqual([{'default': True, 'type': 'integer',
-                           'name': 'Spec32/named_set_item/a',
-                           'value': 1, 'modified': False}], maps)
-        maps = self.mcd.get_value_maps("/Spec32/named_set_item/b")
-        self.assertEqual([{'default': True, 'type': 'integer',
-                           'name': 'Spec32/named_set_item/b',
-                           'value': 2, 'modified': False}], maps)
-
-    def test_set_value(self):
-        module_spec = isc.config.module_spec_from_file(self.data_path + os.sep + "spec2.spec")
-        self.mcd.set_specification(module_spec)
-        self.mcd.set_value("Spec2/item1", 2)
-        self.assertRaises(isc.cc.data.DataTypeError,
-                          self.mcd.set_value, "Spec2/item1", "asdf")
-
-        self.assertRaises(isc.cc.data.DataNotFoundError,
-                          self.mcd.set_value, "Spec2/no_such_item", 4)
-
-        self.mcd.set_value("Spec2/item5[0]", "c")
-        value, status = self.mcd.get_value("Spec2/item5[0]")
-        self.assertEqual(value, "c")
-        self.assertEqual(MultiConfigData.LOCAL, status)
-
-        self.assertRaises(isc.cc.data.DataTypeError, self.mcd.set_value, "Spec2/item5[a]", "asdf")
-
-
-    def test_unset(self):
-        """
-        Test the unset command works.
-        """
-        module_spec = isc.config.module_spec_from_file(self.data_path + os.sep + "spec2.spec")
-        self.mcd.set_specification(module_spec)
-        self.mcd.set_specification(module_spec)
-        value, status = self.mcd.get_value("Spec2/item1")
-        # This is the default first
-        self.assertEqual(1, value)
-        self.assertEqual(MultiConfigData.DEFAULT, status)
-        # Unseting a default item does nothing.
-        self.mcd.unset("Spec2/item1")
-        value, status = self.mcd.get_value("Spec2/item1")
-        # This should be the default
-        self.assertEqual(1, value)
-        self.assertEqual(MultiConfigData.DEFAULT, status)
-        # Set it to something else
-        self.mcd.set_value("Spec2/item1", 42)
-        value, status = self.mcd.get_value("Spec2/item1")
-        self.assertEqual(42, value)
-        self.assertEqual(MultiConfigData.LOCAL, status)
-        # Try to unset it
-        self.mcd.unset("Spec2/item1")
-        value, status = self.mcd.get_value("Spec2/item1")
-        # This should be the default
-        self.assertEqual(1, value)
-        self.assertEqual(MultiConfigData.DEFAULT, status)
-        # Unset a nonexisting item. Should raise.
-        self.assertRaises(isc.cc.data.DataNotFoundError, self.mcd.unset, "Spec2/doesnotexist")
-
-    def test_get_config_item_list(self):
-        # Test get_config_item_list(), which returns a list of the config
-        # items in a specification.
-        config_items = self.mcd.get_config_item_list()
-        self.assertEqual([], config_items)
-        module_spec = isc.config.module_spec_from_file(self.data_path + os.sep + "spec2.spec")
-        self.mcd.set_specification(module_spec)
-        config_items = self.mcd.get_config_item_list()
-        self.assertEqual(['Spec2'], config_items)
-        config_items = self.mcd.get_config_item_list(None, False)
-        self.assertEqual(['Spec2'], config_items)
-        config_items = self.mcd.get_config_item_list(None, True)
-        self.assertEqual(['Spec2/item1', 'Spec2/item2', 'Spec2/item3',
-                          'Spec2/item4', 'Spec2/item5', 'Spec2/item6/value1',
-                          'Spec2/item6/value2'], config_items)
-        config_items = self.mcd.get_config_item_list("Spec2", True)
-        self.assertEqual(['Spec2/item1', 'Spec2/item2', 'Spec2/item3',
-                          'Spec2/item4', 'Spec2/item5[0]', 'Spec2/item5[1]',
-                          'Spec2/item6/value1', 'Spec2/item6/value2'],
-                          config_items)
-        config_items = self.mcd.get_config_item_list("Spec2")
-        self.assertEqual(['Spec2/item1', 'Spec2/item2', 'Spec2/item3',
-                          'Spec2/item4', 'Spec2/item5[0]', 'Spec2/item5[1]',
-                          'Spec2/item6'], config_items)
-        config_items = self.mcd.get_config_item_list("/Spec2")
-        self.assertEqual(['Spec2/item1', 'Spec2/item2', 'Spec2/item3',
-                          'Spec2/item4', 'Spec2/item5[0]', 'Spec2/item5[1]',
-                          'Spec2/item6'], config_items)
-        config_items = self.mcd.get_config_item_list("Spec2", True)
-        self.assertEqual(['Spec2/item1', 'Spec2/item2', 'Spec2/item3',
-                          'Spec2/item4', 'Spec2/item5[0]', 'Spec2/item5[1]',
-                          'Spec2/item6/value1', 'Spec2/item6/value2'],
-                          config_items)
-
-        # When lists are empty, it should only show the name
-        self.mcd.set_value('Spec2/item5', [])
-        config_items = self.mcd.get_config_item_list("Spec2", True)
-        self.assertEqual(['Spec2/item1', 'Spec2/item2', 'Spec2/item3',
-                          'Spec2/item4', 'Spec2/item5', 'Spec2/item6/value1',
-                          'Spec2/item6/value2'], config_items)
-
-        # Also if the list is None (optional value and no default)
-        module_spec = isc.config.module_spec_from_file(self.data_path
-                                                       + os.sep
-                                                       + "spec42.spec")
-        self.mcd.set_specification(module_spec)
-        config_items = self.mcd.get_config_item_list("Spec42", True)
-        self.assertEqual(['Spec42/list_item'], config_items)
-
-    def test_is_named_set(self):
-        module_spec = isc.config.module_spec_from_file(self.data_path + os.sep + "spec32.spec")
-        self.mcd.set_specification(module_spec)
-        spec_part = self.mcd.find_spec_part("Spec32/named_set_item")
-        self.assertTrue(spec_part_is_named_set(spec_part))
-
-    def test_get_config_item_list_named_set(self):
-        config_items = self.mcd.get_config_item_list()
-        self.assertEqual([], config_items)
-        module_spec = isc.config.module_spec_from_file(self.data_path + os.sep + "spec32.spec")
-        self.mcd.set_specification(module_spec)
-        config_items = self.mcd.get_config_item_list()
-        self.assertEqual(['Spec32'], config_items)
-        config_items = self.mcd.get_config_item_list(None, False)
-        self.assertEqual(['Spec32'], config_items)
-        config_items = self.mcd.get_config_item_list(None, True)
-        self.assertEqual(['Spec32/named_set_item', 'Spec32/named_set_item2',
-                          'Spec32/named_set_item3', 'Spec32/named_set_item4'],
-                         config_items)
-        self.mcd.set_value('Spec32/named_set_item', { "aaaa": 4, "aabb": 5,
-                                                      "bbbb": 6})
-        config_items = self.mcd.get_config_item_list("/Spec32/named_set_item",
-                                                     True)
-        self.assertEqual(len(config_items), 3)
-        self.assertIn('Spec32/named_set_item/aaaa', config_items)
-        self.assertIn('Spec32/named_set_item/aabb', config_items)
-        self.assertIn('Spec32/named_set_item/bbbb', config_items)
-
-        self.mcd.set_value('Spec32/named_set_item', {})
-        config_items = self.mcd.get_config_item_list("/Spec32/named_set_item",
-                                                     True)
-        self.assertEqual(['Spec32/named_set_item'], config_items)
-
-        self.mcd.set_value('Spec32/named_set_item4', { "a": { "aa": 4 } } )
-        config_items = self.mcd.get_config_item_list("/Spec32/named_set_item4",
-                                                     True)
-        self.assertEqual(['Spec32/named_set_item4/a/'], config_items)
-        config_items = self.mcd.get_config_item_list("/Spec32/named_set_item4/a",
-                                                     True)
-        self.assertEqual(['Spec32/named_set_item4/a/aa'], config_items)
-
-    def test_set_named_set_nonlocal(self):
-        # Test whether a default named set is copied to local if a subitem
-        # is changed, and that other items in the set do not get lost
-        module_spec = isc.config.module_spec_from_file(self.data_path + os.sep + 'spec32.spec')
-        self.mcd.set_specification(module_spec)
-        value, status = self.mcd.get_value('Spec32/named_set_item')
-        self.assertEqual({'a': 1, 'b': 2}, value)
-        self.assertEqual(MultiConfigData.DEFAULT, status)
-
-        self.mcd.set_value('Spec32/named_set_item/b', 3)
-        value, status = self.mcd.get_value('Spec32/named_set_item')
-        self.assertEqual({'a': 1, 'b': 3}, value)
-        self.assertEqual(MultiConfigData.LOCAL, status)
-
-if __name__ == '__main__':
-    unittest.main()
-

+ 0 - 36
src/lib/python/isc/config/tests/config_test.in

@@ -1,36 +0,0 @@
-#! /bin/sh
-
-# Copyright (C) 2010  Internet Systems Consortium.
-#
-# Permission to use, copy, modify, and distribute this software for any
-# purpose with or without fee is hereby granted, provided that the above
-# copyright notice and this permission notice appear in all copies.
-#
-# THE SOFTWARE IS PROVIDED "AS IS" AND INTERNET SYSTEMS CONSORTIUM
-# DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL
-# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL
-# INTERNET SYSTEMS CONSORTIUM BE LIABLE FOR ANY SPECIAL, DIRECT,
-# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING
-# FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
-# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION
-# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-PYTHON_EXEC=${PYTHON_EXEC:-@PYTHON@}
-export PYTHON_EXEC
-
-CONFIG_PATH=@abs_top_srcdir@/src/lib/python/isc/config/tests
-
-PYTHONPATH=@abs_top_srcdir@/src/lib/python
-export PYTHONPATH
-
-CONFIG_TESTDATA_PATH=@abs_top_srcdir@/src/lib/config/testdata
-export CONFIG_TESTDATA_PATH
-
-cd ${BIND10_PATH}
-${PYTHON_EXEC} -O ${CONFIG_PATH}/config_data_test.py $*
-
-${PYTHON_EXEC} -O ${CONFIG_PATH}/module_spec_test.py $*
-
-${PYTHON_EXEC} -O ${CONFIG_PATH}/ccsession_test.py $*
-
-${PYTHON_EXEC} -O ${CONFIG_PATH}/cfgmgr_test.py $*

+ 0 - 447
src/lib/python/isc/config/tests/module_spec_test.py

@@ -1,447 +0,0 @@
-# Copyright (C) 2009  Internet Systems Consortium.
-#
-# Permission to use, copy, modify, and distribute this software for any
-# purpose with or without fee is hereby granted, provided that the above
-# copyright notice and this permission notice appear in all copies.
-#
-# THE SOFTWARE IS PROVIDED "AS IS" AND INTERNET SYSTEMS CONSORTIUM
-# DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL
-# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL
-# INTERNET SYSTEMS CONSORTIUM BE LIABLE FOR ANY SPECIAL, DIRECT,
-# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING
-# FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
-# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION
-# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-#
-# Tests for the module_spec module
-#
-
-import unittest
-import os
-from isc.config import ModuleSpec, ModuleSpecError
-import isc.cc.data
-
-class TestModuleSpec(unittest.TestCase):
-
-    def setUp(self):
-        if 'CONFIG_TESTDATA_PATH' in os.environ:
-            self.data_path = os.environ['CONFIG_TESTDATA_PATH']
-        else:
-            self.data_path = "../../../testdata"
-
-    def spec_file(self, filename):
-        return(self.data_path + os.sep + filename)
-
-    def read_spec_file(self, filename):
-        return isc.config.module_spec_from_file(self.spec_file(filename))
-
-    def spec1(self, dd):
-        module_spec = dd.get_full_spec()
-        self.assertIn('module_name', module_spec)
-        self.assertEqual(module_spec['module_name'], "Spec1")
-        
-    def test_open_file_name(self):
-        dd = self.read_spec_file("spec1.spec")
-        self.spec1(dd)
-
-    def test_open_file_obj(self):
-        with open(self.spec_file("spec1.spec")) as file1:
-            dd = isc.config.module_spec_from_file(file1)
-        self.spec1(dd)
-
-    def test_open_bad_file_obj(self):
-        self.assertRaises(ModuleSpecError, isc.config.module_spec_from_file, 1)
-        # contains single quotes which json parser does not accept
-        
-        self.assertRaises(ModuleSpecError, isc.config.module_spec_from_file, self.spec_file("spec28.spec"), False)
-        my_spec_file = open(self.spec_file("spec28.spec"))
-        self.assertRaises(ModuleSpecError, isc.config.module_spec_from_file, my_spec_file, False)
-
-        self.assertRaises(ModuleSpecError, isc.config.module_spec_from_file, self.spec_file("does_not_exist"), False)
-
-    def test_bad_specfiles(self):
-        self.assertRaises(ModuleSpecError, self.read_spec_file, "spec3.spec")
-        self.assertRaises(ModuleSpecError, self.read_spec_file, "spec4.spec")
-        self.assertRaises(ModuleSpecError, self.read_spec_file, "spec5.spec")
-        self.assertRaises(ModuleSpecError, self.read_spec_file, "spec6.spec")
-        self.assertRaises(ModuleSpecError, self.read_spec_file, "spec7.spec")
-        self.assertRaises(ModuleSpecError, self.read_spec_file, "spec8.spec")
-        self.assertRaises(ModuleSpecError, self.read_spec_file, "spec9.spec")
-        self.assertRaises(ModuleSpecError, self.read_spec_file, "spec10.spec")
-        self.assertRaises(ModuleSpecError, self.read_spec_file, "spec11.spec")
-        self.assertRaises(ModuleSpecError, self.read_spec_file, "spec12.spec")
-        self.assertRaises(ModuleSpecError, self.read_spec_file, "spec13.spec")
-        self.assertRaises(ModuleSpecError, self.read_spec_file, "spec14.spec")
-        self.assertRaises(ModuleSpecError, self.read_spec_file, "spec15.spec")
-        self.assertRaises(ModuleSpecError, self.read_spec_file, "spec16.spec")
-        self.assertRaises(ModuleSpecError, self.read_spec_file, "spec17.spec")
-        self.assertRaises(ModuleSpecError, self.read_spec_file, "spec18.spec")
-        self.assertRaises(ModuleSpecError, self.read_spec_file, "spec19.spec")
-        self.assertRaises(ModuleSpecError, self.read_spec_file, "spec20.spec")
-        self.assertRaises(ModuleSpecError, self.read_spec_file, "spec21.spec")
-        self.assertRaises(ModuleSpecError, self.read_spec_file, "spec26.spec")
-        self.assertRaises(ModuleSpecError, self.read_spec_file, "spec34.spec")
-        self.assertRaises(ModuleSpecError, self.read_spec_file, "spec35.spec")
-        self.assertRaises(ModuleSpecError, self.read_spec_file, "spec36.spec")
-        self.assertRaises(ModuleSpecError, self.read_spec_file, "spec37.spec")
-        self.assertRaises(ModuleSpecError, self.read_spec_file, "spec38.spec")
-
-    def validate_data(self, specfile_name, datafile_name):
-        dd = self.read_spec_file(specfile_name);
-        with open(self.spec_file(datafile_name)) as data_file:
-            data_str = data_file.read()
-        data = isc.cc.data.parse_value_str(data_str)
-        return dd.validate_config(True, data)
-        
-    def test_data_validation(self):
-        self.assertEqual(True, self.validate_data("spec22.spec", "data22_1.data"))
-        self.assertEqual(False, self.validate_data("spec22.spec", "data22_2.data"))
-        self.assertEqual(False, self.validate_data("spec22.spec", "data22_3.data"))
-        self.assertEqual(False, self.validate_data("spec22.spec", "data22_4.data"))
-        self.assertEqual(False, self.validate_data("spec22.spec", "data22_5.data"))
-        self.assertEqual(True, self.validate_data("spec22.spec", "data22_6.data"))
-        self.assertEqual(True, self.validate_data("spec22.spec", "data22_7.data"))
-        self.assertEqual(False, self.validate_data("spec22.spec", "data22_8.data"))
-        self.assertEqual(True, self.validate_data("spec32.spec", "data32_1.data"))
-        self.assertEqual(False, self.validate_data("spec32.spec", "data32_2.data"))
-        self.assertEqual(False, self.validate_data("spec32.spec", "data32_3.data"))
-
-    def validate_command_params(self, specfile_name, datafile_name, cmd_name):
-        dd = self.read_spec_file(specfile_name);
-        with open(self.spec_file(datafile_name)) as data_file:
-            data_str = data_file.read()
-        params = isc.cc.data.parse_value_str(data_str)
-        return dd.validate_command(cmd_name, params)
-
-    def test_command_validation(self):
-        # tests for a command that doesn't take an argument
-        self.assertEqual(True, self.read_spec_file("spec2.spec").validate_command("shutdown", None));
-        self.assertEqual(False, self.read_spec_file("spec2.spec").validate_command("shutdown", '{"val": 1}'));
-        self.assertEqual(True, self.validate_command_params("spec27.spec", "data22_1.data", 'cmd1'))
-        self.assertEqual(False, self.validate_command_params("spec27.spec", "data22_2.data",'cmd1'))
-        self.assertEqual(False, self.validate_command_params("spec27.spec", "data22_3.data", 'cmd1'))
-        self.assertEqual(False, self.validate_command_params("spec27.spec", "data22_4.data", 'cmd1'))
-        self.assertEqual(False, self.validate_command_params("spec27.spec", "data22_5.data", 'cmd1'))
-        self.assertEqual(True, self.validate_command_params("spec27.spec", "data22_6.data", 'cmd1'))
-        self.assertEqual(True, self.validate_command_params("spec27.spec", "data22_7.data", 'cmd1'))
-        self.assertEqual(False, self.validate_command_params("spec27.spec", "data22_8.data", 'cmd1'))
-        self.assertEqual(False, self.validate_command_params("spec27.spec", "data22_8.data", 'cmd2'))
-
-    def test_statistics_validation(self):
-        def _validate_stat(specfile_name, datafile_name):
-            dd = self.read_spec_file(specfile_name);
-            with open(self.spec_file(datafile_name)) as data_file:
-                data_str = data_file.read()
-            data = isc.cc.data.parse_value_str(data_str)
-            return dd.validate_statistics(True, data, [])
-        self.assertFalse(self.read_spec_file("spec1.spec").validate_statistics(True, None, None));
-        self.assertTrue(_validate_stat("spec33.spec", "data33_1.data"))
-        self.assertFalse(_validate_stat("spec33.spec", "data33_2.data"))
-        self.assertTrue(_validate_stat("spec41.spec", "data41_1.data"))
-        self.assertFalse(_validate_stat("spec41.spec", "data41_2.data"))
-
-    def test_init(self):
-        self.assertRaises(ModuleSpecError, ModuleSpec, 1)
-        module_spec = isc.config.module_spec_from_file(self.spec_file("spec1.spec"), False)
-        self.spec1(module_spec)
-
-        module_spec = isc.config.module_spec_from_file(self.spec_file("spec25.spec"), True)
-        self.assertEqual("Spec25", module_spec.get_module_name())
-        self.assertEqual("Just an empty module", module_spec.get_module_description())
-
-    def test_str(self):
-        module_spec = isc.config.module_spec_from_file(self.spec_file("spec1.spec"), False)
-        self.assertEqual(module_spec.__str__(), "{'module_name': 'Spec1'}")
-
-    def test_check_module_spec(self):
-        self.assertRaises(ModuleSpecError, isc.config.module_spec._check, 1)
-        
-    def test_check_command_spec(self):
-        self.assertRaises(ModuleSpecError, isc.config.module_spec._check_command_spec, 1 )
-        
-        self.assertRaises(ModuleSpecError, isc.config.module_spec._check_command_spec, [ 1 ] )
-
-        self.assertRaises(ModuleSpecError, isc.config.module_spec._check_command_spec,
-                          [ { 'command_name': 1,
-                              'command_description': 'just for testing',
-                              'command_args': [
-                                { 'item_name': 'arg1',
-                                  'item_type': 'string',
-                                  'item_optional': True
-                                }
-                              ]
-                            }
-                          ]
-                         )
-
-        self.assertRaises(ModuleSpecError, isc.config.module_spec._check_command_spec,
-                          [ { 'command_name': 'test_command',
-                              'command_description': 1,
-                              'command_args': [
-                                { 'item_name': 'arg1',
-                                  'item_type': 'string',
-                                  'item_optional': True
-                                }
-                              ]
-                            }
-                          ]
-                         )
-
-        self.assertRaises(ModuleSpecError, isc.config.module_spec._check_command_spec,
-                          [  { 'command_name': 'test_command',
-                              'command_args': [ 1 ]
-                            }
-                          ]
-                         )
-
-    def test_check_item_spec(self):
-        self.assertRaises(ModuleSpecError, isc.config.module_spec._check_item_spec, 1 )
-        self.assertRaises(ModuleSpecError, isc.config.module_spec._check_item_spec,
-                          { 'item_name': 1,
-                            'item_type': "string",
-                            'item_optional': False,
-                            'item_default': "asdf"
-                          }
-                         )
-
-        self.assertRaises(ModuleSpecError, isc.config.module_spec._check_item_spec,
-                          { 'item_name': "an_item",
-                            'item_type': "bad_type",
-                            'item_optional': False,
-                            'item_default': "asdf"
-                          }
-                         )
-
-        self.assertRaises(ModuleSpecError, isc.config.module_spec._check_item_spec,
-                          { 'item_name': "an_item",
-                            'item_type': 1,
-                            'item_optional': False,
-                            'item_default': "asdf"
-                          }
-                         )
-
-        self.assertRaises(ModuleSpecError, isc.config.module_spec._check_item_spec,
-                          { 'item_name': "an_item",
-                            'item_type': "string",
-                            'item_optional': False,
-                          }
-                         )
-
-        self.assertRaises(ModuleSpecError, isc.config.module_spec._check_item_spec,
-                          { 'item_name': "an_item",
-                            'item_type': "string",
-                            'item_optional': 1,
-                          }
-                         )
-
-        self.assertRaises(ModuleSpecError, isc.config.module_spec._check_item_spec,
-                          { 'item_name': "an_item",
-                            'item_type': "string",
-                            'item_optional': False,
-                            'item_default': 1
-                          }
-                         )
-
-        
-        self.assertRaises(ModuleSpecError, isc.config.module_spec._check_item_spec,
-                          { 'item_name': "an_item",
-                            'item_type': "list",
-                            'item_optional': False,
-                            'item_default': []
-                          }
-                         )
-
-        self.assertRaises(ModuleSpecError, isc.config.module_spec._check_item_spec,
-                          { 'item_name': "an_item",
-                            'item_type': "list",
-                            'item_optional': False,
-                            'item_default': [],
-                            'list_item_spec': 1
-                          }
-                         )
-
-        self.assertRaises(ModuleSpecError, isc.config.module_spec._check_item_spec,
-                          { 'item_name': "an_item",
-                            'item_type': "map",
-                            'item_optional': False,
-                            'item_default': {}
-                          }
-                         )
-
-        self.assertRaises(ModuleSpecError, isc.config.module_spec._check_item_spec,
-                          { 'item_name': "an_item",
-                            'item_type': "map",
-                            'item_optional': False,
-                            'item_default': {},
-                            'map_item_spec': 1
-                          }
-                         )
-
-        self.assertRaises(ModuleSpecError, isc.config.module_spec._check_item_spec,
-                          { 'item_name': "an_item",
-                            'item_type': "map",
-                            'item_optional': False,
-                            'item_default': {},
-                            'map_item_spec': [ 1 ]
-                          }
-                         )
-
-        self.assertRaises(ModuleSpecError, isc.config.module_spec._check_item_spec,
-                          { 'item_name': "a_datetime",
-                            'item_type': "string",
-                            'item_optional': False,
-                            'item_default': 1,
-                            'item_format': "date-time"
-                          }
-                         )
-
-        self.assertRaises(ModuleSpecError, isc.config.module_spec._check_item_spec,
-                          { 'item_name': "a_date",
-                            'item_type': "string",
-                            'item_optional': False,
-                            'item_default': 1,
-                            'item_format': "date"
-                          }
-                         )
-
-        self.assertRaises(ModuleSpecError, isc.config.module_spec._check_item_spec,
-                          { 'item_name': "a_time",
-                            'item_type': "string",
-                            'item_optional': False,
-                            'item_default': 1,
-                            'item_format': "time"
-                          }
-                         )
-
-        self.assertRaises(ModuleSpecError, isc.config.module_spec._check_item_spec,
-                          { 'item_name': "a_datetime",
-                            'item_type': "string",
-                            'item_optional': False,
-                            'item_default': "2011-05-27T19:42:57Z",
-                            'item_format': "dummy-format"
-                          }
-                         )
-
-        self.assertRaises(ModuleSpecError, isc.config.module_spec._check_item_spec,
-                          { 'item_name': "a_date",
-                            'item_type': "string",
-                            'item_optional': False,
-                            'item_default': "2011-05-27",
-                            'item_format': "dummy-format"
-                          }
-                         )
-
-        self.assertRaises(ModuleSpecError, isc.config.module_spec._check_item_spec,
-                          { 'item_name': "a_time",
-                            'item_type': "string",
-                            'item_optional': False,
-                            'item_default': "19:42:57Z",
-                            'item_format': "dummy-format"
-                          }
-                         )
-
-    def test_check_format(self):
-        self.assertTrue(isc.config.module_spec._check_format('2011-05-27T19:42:57Z', 'date-time'))
-        self.assertTrue(isc.config.module_spec._check_format('2011-05-27', 'date'))
-        self.assertTrue(isc.config.module_spec._check_format('19:42:57', 'time'))
-        self.assertFalse(isc.config.module_spec._check_format('2011-05-27T19:42:57Z', 'dummy'))
-        self.assertFalse(isc.config.module_spec._check_format('2011-05-27', 'dummy'))
-        self.assertFalse(isc.config.module_spec._check_format('19:42:57', 'dummy'))
-        self.assertFalse(isc.config.module_spec._check_format('2011-13-99T99:99:99Z', 'date-time'))
-        self.assertFalse(isc.config.module_spec._check_format('2011-13-99', 'date'))
-        self.assertFalse(isc.config.module_spec._check_format('99:99:99', 'time'))
-        self.assertFalse(isc.config.module_spec._check_format('', 'date-time'))
-        self.assertFalse(isc.config.module_spec._check_format(None, 'date-time'))
-        self.assertFalse(isc.config.module_spec._check_format(None, None))
-        # wrong date-time-type format not ending with "Z"
-        self.assertFalse(isc.config.module_spec._check_format('2011-05-27T19:42:57', 'date-time'))
-        # wrong date-type format ending with "T"
-        self.assertFalse(isc.config.module_spec._check_format('2011-05-27T', 'date'))
-        # wrong time-type format ending with "Z"
-        self.assertFalse(isc.config.module_spec._check_format('19:42:57Z', 'time'))
-
-    def test_validate_type(self):
-        errors = []
-        self.assertEqual(True, isc.config.module_spec._validate_type({ 'item_type': 'integer' }, 1, errors))
-        self.assertEqual(False, isc.config.module_spec._validate_type({ 'item_type': 'integer' }, "a", None))
-        self.assertEqual(False, isc.config.module_spec._validate_type({ 'item_type': 'integer' }, "a", errors))
-        self.assertEqual(['a should be an integer'], errors)
-
-        errors = []
-        self.assertEqual(True, isc.config.module_spec._validate_type({ 'item_type': 'real' }, 1.1, errors))
-        self.assertEqual(False, isc.config.module_spec._validate_type({ 'item_type': 'real' }, "a", None))
-        self.assertEqual(False, isc.config.module_spec._validate_type({ 'item_type': 'real' }, "a", errors))
-        self.assertEqual(['a should be a real'], errors)
-
-        errors = []
-        self.assertEqual(True, isc.config.module_spec._validate_type({ 'item_type': 'boolean' }, True, errors))
-        self.assertEqual(False, isc.config.module_spec._validate_type({ 'item_type': 'boolean' }, "a", None))
-        self.assertEqual(False, isc.config.module_spec._validate_type({ 'item_type': 'boolean' }, "a", errors))
-        self.assertEqual(['a should be a boolean'], errors)
-
-        errors = []
-        self.assertEqual(True, isc.config.module_spec._validate_type({ 'item_type': 'string' }, "a", errors))
-        self.assertEqual(False, isc.config.module_spec._validate_type({ 'item_type': 'string' }, 1, None))
-        self.assertEqual(False, isc.config.module_spec._validate_type({ 'item_type': 'string' }, 1, errors))
-        self.assertEqual(['1 should be a string'], errors)
-
-        errors = []
-        self.assertEqual(True, isc.config.module_spec._validate_type({ 'item_type': 'list' }, [ 1, 1], errors))
-        self.assertEqual(False, isc.config.module_spec._validate_type({ 'item_type': 'list' }, 1, None))
-        self.assertEqual(False, isc.config.module_spec._validate_type({ 'item_type': 'list' }, 1, errors))
-        self.assertEqual(['1 should be a list'], errors)
-
-        errors = []
-        self.assertEqual(True, isc.config.module_spec._validate_type({ 'item_type': 'map' }, {"a": 1}, errors))
-        self.assertEqual(False, isc.config.module_spec._validate_type({ 'item_type': 'map' }, 1, None))
-        self.assertEqual(False, isc.config.module_spec._validate_type({ 'item_type': 'map' }, 1, errors))
-        self.assertEqual(['1 should be a map'], errors)
-
-    def test_validate_format(self):
-        errors = []
-        self.assertEqual(True, isc.config.module_spec._validate_format({ 'item_format': 'date-time' }, "2011-05-27T19:42:57Z", errors))
-        self.assertEqual(False, isc.config.module_spec._validate_format({ 'item_format': 'date-time' }, "a", None))
-        self.assertEqual(False, isc.config.module_spec._validate_format({ 'item_format': 'date-time' }, "a", errors))
-        self.assertEqual(['format type of a should be date-time'], errors)
-
-        errors = []
-        self.assertEqual(True, isc.config.module_spec._validate_format({ 'item_format': 'date' }, "2011-05-27", errors))
-        self.assertEqual(False, isc.config.module_spec._validate_format({ 'item_format': 'date' }, "a", None))
-        self.assertEqual(False, isc.config.module_spec._validate_format({ 'item_format': 'date' }, "a", errors))
-        self.assertEqual(['format type of a should be date'], errors)
-
-        errors = []
-        self.assertEqual(True, isc.config.module_spec._validate_format({ 'item_format': 'time' }, "19:42:57", errors))
-        self.assertEqual(False, isc.config.module_spec._validate_format({ 'item_format': 'time' }, "a", None))
-        self.assertEqual(False, isc.config.module_spec._validate_format({ 'item_format': 'time' }, "a", errors))
-        self.assertEqual(['format type of a should be time'], errors)
-
-    def test_validate_spec(self):
-        spec = { 'item_name': "an_item",
-                 'item_type': "string",
-                 'item_optional': False,
-                 'item_default': "asdf"
-               }
-        errors = []
-        self.assertEqual(False, isc.config.module_spec._validate_spec(spec, True, {}, None))
-        self.assertEqual(False, isc.config.module_spec._validate_spec(spec, True, {}, errors))
-        self.assertEqual(['non-optional item an_item missing'], errors)
-
-    def test_validate_unknown_items(self):
-        spec = [{ 'item_name': "an_item",
-                 'item_type': "string",
-                 'item_optional': True,
-                 'item_default': "asdf"
-               }]
-
-        errors = []
-        self.assertEqual(True, isc.config.module_spec._validate_spec_list(spec, True, None, None))
-        self.assertEqual(False, isc.config.module_spec._validate_spec_list(spec, True, { 'does_not_exist': 1 }, None))
-        self.assertEqual(False, isc.config.module_spec._validate_spec_list(spec, True, { 'does_not_exist': 1 }, errors))
-        self.assertEqual(['unknown item does_not_exist'], errors)
-        
-
-
-if __name__ == '__main__':
-    unittest.main()

+ 0 - 112
src/lib/python/isc/config/tests/unittest_fakesession.py

@@ -1,112 +0,0 @@
-# Copyright (C) 2010  Internet Systems Consortium.
-#
-# Permission to use, copy, modify, and distribute this software for any
-# purpose with or without fee is hereby granted, provided that the above
-# copyright notice and this permission notice appear in all copies.
-#
-# THE SOFTWARE IS PROVIDED "AS IS" AND INTERNET SYSTEMS CONSORTIUM
-# DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL
-# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL
-# INTERNET SYSTEMS CONSORTIUM BE LIABLE FOR ANY SPECIAL, DIRECT,
-# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING
-# FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
-# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION
-# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-import isc
-
-class WouldBlockForever(Exception):
-    """
-    This is thrown by the FakeModuleCCSession if it would need
-    to block forever for incoming message.
-    """
-    pass
-
-#
-# We can probably use a more general version of this
-#
-class FakeModuleCCSession:
-    def __init__(self):
-        self.subscriptions = {}
-        # each entry is of the form [ channel, instance, message, want_answer ]
-        self.message_queue = []
-        self._socket = "ok we just need something not-None here atm"
-        # if self.timeout is set to anything other than 0, and
-        # the message_queue is empty when receive is called, throw
-        # a SessionTimeout
-        self._timeout = 0
-        self._closed = False
-
-    def group_subscribe(self, group_name, instance_name = None):
-        if not group_name in self.subscriptions:
-            self.subscriptions[group_name] = []
-        if instance_name:
-            self.subscriptions[group_name].append(instance_name)
-            
-    def group_unsubscribe(self, group_name, instance_name = None):
-
-        # raises SessionError if the session has been already closed.
-        if self._closed:
-            raise isc.cc.SessionError("Session has been closed.")        
-
-        if group_name in self.subscriptions:
-            if instance_name:
-                if len(self.subscriptions[group_name]) > 1:
-                    del self.subscriptions[group_name][instance_name]
-                else:
-                    del self.subscriptions[group_name]
-            else:
-                del self.subscriptions[group_name]
-            
-
-    def has_subscription(self, group_name, instance_name = None):
-        if group_name in self.subscriptions:
-            if instance_name:
-                return instance_name in self.subscriptions[group_name]
-            else:
-                return True
-        else:
-            return False
-
-    def group_sendmsg(self, msg, group, instance=None, to=None,
-                      want_answer=False):
-        self.message_queue.append([ group, instance, msg, want_answer ])
-        return 42
-
-    def group_reply(self, env, msg):
-        if 'group' in env:
-            self.message_queue.append([ env['group'], None, msg, False])
-
-    def group_recvmsg(self, nonblock=True, seq = None):
-        for qm in self.message_queue:
-            if qm[0] in self.subscriptions and (qm[1] == None or qm[1] in
-                self.subscriptions[qm[0]]):
-                self.message_queue.remove(qm)
-                return qm[2], {'group': qm[0], 'from': qm[1]}
-        if self._timeout == 0:
-            if nonblock:
-                return None, None
-            else:
-                raise WouldBlockForever(
-                    "Blocking read without timeout and no message ready")
-        else:
-            raise isc.cc.SessionTimeout("Timeout set but no data to "
-                                 "return to group_recvmsg()")
-
-    def get_message(self, channel, target = None):
-        for qm in self.message_queue:
-            if qm[0] == channel and qm[1] == target:
-                self.message_queue.remove(qm)
-                return qm[2]
-        return None
-
-    def close(self):
-        # need to pass along somehow that this function has been called,
-        self._socket = None
-        self._closed = True
-
-    def set_timeout(self, timeout):
-        self._timeout = timeout
-
-    def get_timeout(self):
-        return self._timeout

+ 0 - 8
src/lib/python/isc/dns/Makefile.am

@@ -1,8 +0,0 @@
-python_PYTHON = __init__.py
-pythondir = $(pyexecdir)/isc/dns
-
-CLEANDIRS = __pycache__
-
-clean-local:
-	rm -rf $(CLEANDIRS)
-

+ 0 - 1
src/lib/python/isc/dns/__init__.py

@@ -1 +0,0 @@
-from pydnspp import *

+ 0 - 39
src/lib/python/isc/log/Makefile.am

@@ -1,39 +0,0 @@
-SUBDIRS = . tests
-
-AM_CPPFLAGS = -I$(top_srcdir)/src/lib -I$(top_builddir)/src/lib
-AM_CPPFLAGS += $(BOOST_INCLUDES)
-AM_CXXFLAGS = $(B10_CXXFLAGS)
-
-# There is a build problem with python3.4 (a new field tp_finalize) has been
-# added and now compiler complains about it not being properly initialized in
-# construtor. Since the whole python thing goes away, it's counter-productive
-# to spend any time on making this work on both python3.3 and 3.4, so
-# ingoring the warning seems the way to go.
-AM_CXXFLAGS += -Wno-error
-
-pythondir = $(pyexecdir)/isc
-python_LTLIBRARIES = log.la
-log_la_SOURCES = log.cc
-
-log_la_CPPFLAGS = $(AM_CPPFLAGS) $(PYTHON_INCLUDES)
-# Note: PYTHON_CXXFLAGS may have some -Wno... workaround, which must be
-# placed after -Wextra defined in AM_CXXFLAGS
-log_la_CXXFLAGS = $(AM_CXXFLAGS) $(PYTHON_CXXFLAGS)
-log_la_LDFLAGS = $(PYTHON_LDFLAGS)
-log_la_LDFLAGS += -module -avoid-version
-log_la_LIBADD = $(top_builddir)/src/lib/log/libkea-log.la
-log_la_LIBADD += $(top_builddir)/src/lib/cc/libkea-cc.la
-log_la_LIBADD += $(top_builddir)/src/lib/config/libkea-cfgclient.la
-log_la_LIBADD += $(top_builddir)/src/lib/exceptions/libkea-exceptions.la
-log_la_LIBADD += $(PYTHON_LIB)
-
-# This is not installed, it helps locate the module during tests
-EXTRA_DIST = __init__.py
-
-pytest:
-	$(SHELL) tests/log_test
-
-CLEANDIRS = __pycache__
-
-clean-local:
-	rm -rf $(CLEANDIRS)

+ 0 - 33
src/lib/python/isc/log/__init__.py

@@ -1,33 +0,0 @@
-# Copyright (C) 2011  Internet Systems Consortium.
-#
-# Permission to use, copy, modify, and distribute this software for any
-# purpose with or without fee is hereby granted, provided that the above
-# copyright notice and this permission notice appear in all copies.
-#
-# THE SOFTWARE IS PROVIDED "AS IS" AND INTERNET SYSTEMS CONSORTIUM
-# DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL
-# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL
-# INTERNET SYSTEMS CONSORTIUM BE LIABLE FOR ANY SPECIAL, DIRECT,
-# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING
-# FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
-# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION
-# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-# This file is not installed. The log.so is installed into the right place.
-# It is only to find it in the .libs directory when we run as a test or
-# from the build directory.
-# But as nobody gives us the builddir explicitly (and we can't use generation
-# from .in file, as it would put us into the builddir and we wouldn't be found)
-# we guess from current directory. Any idea for something better? This should
-# be enough for the tests, but would it work for B10_FROM_SOURCE as well?
-# Should we look there? Or define something in bind10_config?
-
-import os
-import sys
-
-for base in sys.path[:]:
-    loglibdir = os.path.join(base, 'isc/log/.libs')
-    if os.path.exists(loglibdir):
-        sys.path.insert(0, loglibdir)
-
-from log import *

+ 0 - 779
src/lib/python/isc/log/log.cc

@@ -1,779 +0,0 @@
-// Copyright (C) 2011  Internet Systems Consortium, Inc. ("ISC")
-//
-// Permission to use, copy, modify, and/or distribute this software for any
-// purpose with or without fee is hereby granted, provided that the above
-// copyright notice and this permission notice appear in all copies.
-//
-// THE SOFTWARE IS PROVIDED "AS IS" AND ISC DISCLAIMS ALL WARRANTIES WITH
-// REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
-// AND FITNESS.  IN NO EVENT SHALL ISC BE LIABLE FOR ANY SPECIAL, DIRECT,
-// INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
-// LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE
-// OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
-// PERFORMANCE OF THIS SOFTWARE.
-
-#define PY_SSIZE_T_CLEAN
-#include <Python.h>
-#include <structmember.h>
-
-#include <config.h>
-
-#include <log/message_dictionary.h>
-#include <log/logger_manager.h>
-#include <log/logger_support.h>
-#include <log/logger.h>
-
-#include <config/ccsession.h>
-
-#include <string>
-#include <boost/bind.hpp>
-
-#include <util/python/pycppwrapper_util.h>
-#include <log/log_dbglevels.h>
-
-using namespace isc::log;
-using namespace isc::util::python;
-using std::string;
-using boost::bind;
-
-// We encountered a strange problem with Clang (clang version 2.8
-// (tags/RELEASE_28 115909)) on OSX, where unwinding the stack
-// segfaults the moment this exception was thrown and caught.
-//
-// Placing it in a named namespace instead of the original
-// unnamed namespace appears to solve this, so as a temporary
-// workaround, we create a local randomly named namespace here
-// to solve this issue.
-namespace clang_unnamed_namespace_workaround {
-    // To propagate python exceptions through our code
-    // This exception is used to signal to the calling function that a
-    // proper Python Exception has already been set, and the caller
-    // should now return NULL.
-    // Since it is only used internally, and should not pass any
-    // information itself, is is not derived from std::exception
-    class InternalError : public std::exception {};
-}
-using namespace clang_unnamed_namespace_workaround;
-
-namespace {
-
-// This is for testing only. The real module will have it always set as
-// NULL and will use the global dictionary.
-MessageDictionary* testDictionary = NULL;
-
-PyObject*
-setTestDictionary(PyObject*, PyObject* args) {
-    PyObject* enableO;
-    // The API doesn't seem to provide conversion to bool,
-    // so we do it little bit manually
-    if (!PyArg_ParseTuple(args, "O", &enableO)) {
-        return (NULL);
-    }
-    int enableI(PyObject_IsTrue(enableO));
-    if (enableI == -1) {
-        return (NULL);
-    }
-    bool enable(enableI != 0);
-
-    try {
-        delete testDictionary;
-        testDictionary = NULL;
-        if (enable) {
-            testDictionary = new MessageDictionary;
-        }
-    }
-    catch (const std::exception& e) {
-        PyErr_SetString(PyExc_RuntimeError, e.what());
-        return (NULL);
-    }
-    catch (...) {
-        PyErr_SetString(PyExc_RuntimeError, "Unknown C++ exception");
-        return (NULL);
-    }
-    Py_RETURN_NONE;
-}
-
-PyObject*
-createMessage(PyObject*, PyObject* args) {
-    const char* mid;
-    const char* text;
-    // We parse the strings
-    if (!PyArg_ParseTuple(args, "ss", &mid, &text)) {
-        return (NULL);
-    }
-    PyObject* origMid;
-    // And extract the original representation of the message
-    // ID, so we can return it instead of creating another instance.
-    // This call shouldn't fail if the previous suceeded.
-    if (!PyArg_ParseTuple(args, "Os", &origMid, &text)) {
-        return (NULL);
-    }
-
-    try {
-        MessageDictionary* dict = testDictionary ? testDictionary :
-            &MessageDictionary::globalDictionary();
-
-        // We ignore the result, they will be in some kind of dupe list
-        // if there's a problem
-        dict->add(mid, text);
-    }
-    catch (const std::exception& e) {
-        PyErr_SetString(PyExc_RuntimeError, e.what());
-        return (NULL);
-    }
-    catch (...) {
-        PyErr_SetString(PyExc_RuntimeError, "Unknown C++ exception");
-        return (NULL);
-    }
-
-    // Return the ID
-    Py_INCREF(origMid);
-    return (origMid);
-}
-
-PyObject*
-getMessage(PyObject*, PyObject* args) {
-    const char* mid;
-    if (!PyArg_ParseTuple(args, "s", &mid)) {
-        return (NULL);
-    }
-
-    try {
-        MessageDictionary* dict = testDictionary ? testDictionary :
-            &MessageDictionary::globalDictionary();
-
-        const std::string& result(dict->getText(mid));
-        if (result.empty()) {
-            Py_RETURN_NONE;
-        } else {
-            return (Py_BuildValue("s", result.c_str()));
-        }
-    }
-    catch (const std::exception& e) {
-        PyErr_SetString(PyExc_RuntimeError, e.what());
-        return (NULL);
-    }
-    catch (...) {
-        PyErr_SetString(PyExc_RuntimeError, "Unknown C++ exception");
-        return (NULL);
-    }
-}
-
-PyObject*
-reset(PyObject*, PyObject*) {
-    LoggerManager::reset();
-    Py_RETURN_NONE;
-}
-
-PyObject*
-init(PyObject*, PyObject* args, PyObject* arg_keywords) {
-    const char* root;
-    const char* file(NULL);
-    const char* severity("INFO");
-    bool buffer = false;
-    int dbglevel(0);
-    const char* const keywords[] = { "name", "severity", "debuglevel", "file",
-                                     "buffer", NULL };
-    if (!PyArg_ParseTupleAndKeywords(args, arg_keywords, "s|sizb",
-                                     const_cast<char**>(keywords), &root,
-                                     &severity, &dbglevel, &file, &buffer)) {
-        return (NULL);
-    }
-
-    try {
-        LoggerManager::init(root, getSeverity(severity), dbglevel, file,
-                            buffer);
-    }
-    catch (const std::exception& e) {
-        PyErr_SetString(PyExc_RuntimeError, e.what());
-        return (NULL);
-    }
-    catch (...) {
-        PyErr_SetString(PyExc_RuntimeError, "Unknown C++ exception");
-        return (NULL);
-    }
-    Py_RETURN_NONE;
-}
-
-// This initialization is for unit tests.  It allows message settings to
-// be determined by a set of B10_xxx environment variables.  (See the
-// description of initLogger() for more details.)  The function has been named
-// resetUnitTestRootLogger() here as being more descriptive and
-// trying to avoid confusion.
-PyObject*
-resetUnitTestRootLogger(PyObject*, PyObject*) {
-    try {
-        isc::log::resetUnitTestRootLogger();
-    }
-    catch (const std::exception& e) {
-        PyErr_SetString(PyExc_RuntimeError, e.what());
-        return (NULL);
-    }
-    catch (...) {
-        PyErr_SetString(PyExc_RuntimeError, "Unknown C++ exception");
-        return (NULL);
-    }
-    Py_RETURN_NONE;
-}
-
-PyObject*
-logConfigUpdate(PyObject*, PyObject* args) {
-    // we have no wrappers for ElementPtr and ConfigData,
-    // So we expect JSON strings and convert them.
-    // The new_config object is assumed to have been validated.
-
-    const char* new_config_json;
-    const char* mod_spec_json;
-    if (!PyArg_ParseTuple(args, "ss",
-                          &new_config_json, &mod_spec_json)) {
-        return (NULL);
-    }
-
-    try {
-        isc::data::ConstElementPtr new_config =
-            isc::data::Element::fromJSON(new_config_json);
-        isc::data::ConstElementPtr mod_spec_e =
-            isc::data::Element::fromJSON(mod_spec_json);
-        isc::config::ModuleSpec mod_spec(mod_spec_e);
-        isc::config::ConfigData config_data(mod_spec);
-        isc::config::default_logconfig_handler("logging", new_config,
-                                               config_data);
-
-        Py_RETURN_NONE;
-    } catch (const isc::data::JSONError& je) {
-        std::string error_msg = std::string("JSON format error: ") + je.what();
-        PyErr_SetString(PyExc_TypeError, error_msg.c_str());
-    } catch (const isc::data::TypeError&) {
-        PyErr_SetString(PyExc_TypeError, "argument 1 of log_config_update "
-                                         "is not a map of config data");
-    } catch (const isc::config::ModuleSpecError&) {
-        PyErr_SetString(PyExc_TypeError, "argument 2 of log_config_update "
-                                         "is not a correct module specification");
-    } catch (const std::exception& e) {
-        PyErr_SetString(PyExc_RuntimeError, e.what());
-    } catch (...) {
-        PyErr_SetString(PyExc_RuntimeError, "Unknown C++ exception");
-    }
-    return (NULL);
-}
-
-PyMethodDef methods[] = {
-    {"set_test_dictionary", setTestDictionary, METH_VARARGS,
-        "Set or unset testing mode for message dictionary. In testing, "
-        "the create_message and get_message functions work on different "
-        "than the logger-global dictionary, not polluting it."},
-    {"create_message", createMessage, METH_VARARGS,
-        "Creates a new message in the dictionary. You shouldn't need to "
-        "call this directly, it should be called by the generated message "
-        "file. Returns the identifier to be used in logging. The text "
-        "shouldn't be empty."},
-    {"get_message", getMessage, METH_VARARGS,
-        "Get a message. This function is for testing purposes and you don't "
-        "need to call it. It returns None if the message does not exist."},
-    {"reset", reset, METH_NOARGS,
-        "Reset all logging. For testing purposes only, do not use."},
-    {"init", reinterpret_cast<PyCFunction>(init), METH_VARARGS | METH_KEYWORDS,
-        "Run-time initialization. You need to call this before you do any "
-        "logging, to configure the root logger name. You may also provide "
-        "Arguments:\n"
-        "name: root logger name\n"
-        "severity (optional): one of 'DEBUG', 'INFO', 'WARN', 'ERROR' or "
-        "'FATAL'\n"
-        "debuglevel (optional): a debug level (integer in the range 0-99) "
-        "file (optional): a file name of a dictionary with message text "
-        "translations\n"
-        "buffer (optional), boolean, when True, causes all log messages "
-        "to be stored internally until log_config_update is called, at "
-        "which point they shall be logged."},
-    {"resetUnitTestRootLogger", resetUnitTestRootLogger, METH_VARARGS,
-        "Resets the configuration of the root logger to that set by the "
-        "B10_XXX environment variables.  It is aimed at unit tests, where "
-        "the logging is initialized by the code under test; called before "
-        "the unit test starts, this function resets the logging configuration "
-        "to that in use for the C++ unit tests."},
-    {"log_config_update", logConfigUpdate, METH_VARARGS,
-        "Update logger settings. This method is automatically used when "
-        "ModuleCCSession is initialized with handle_logging_config set "
-        "to True. When called, the first argument is the new logging "
-        "configuration (in JSON format). The second argument is "
-        "the raw specification (as returned from "
-        "ConfigData.get_module_spec().get_full_spec(), and converted to "
-        "JSON format).\n"
-        "Raises a TypeError if either argument is not a (correct) JSON "
-        "string, or if the spec is not a correct spec.\n"
-        "If this call succeeds, the global logger settings have "
-        "been updated."
-    },
-    {NULL, NULL, 0, NULL}
-};
-
-class LoggerWrapper : public PyObject {
-// Everything is public here, as it is accessible only inside this .cc file.
-public:
-    Logger *logger_;
-};
-
-extern PyTypeObject logger_type;
-
-int
-Logger_init(PyObject* po_self, PyObject* args, PyObject*) {
-    LoggerWrapper* self = static_cast<LoggerWrapper*>(po_self);
-    const char* name;
-    if (!PyArg_ParseTuple(args, "s", &name)) {
-        return (-1);
-    }
-    try {
-        self->logger_ = new Logger(name);
-        return (0);
-    }
-    catch (const std::exception& e) {
-        PyErr_SetString(PyExc_RuntimeError, e.what());
-        return (-1);
-    }
-    catch (...) {
-        PyErr_SetString(PyExc_RuntimeError, "Unknown C++ exception");
-        return (-1);
-    }
-}
-
-void
-//Logger_destroy(LoggerWrapper* const self) {
-Logger_destroy(PyObject* po_self) {
-    LoggerWrapper* self = static_cast<LoggerWrapper*>(po_self);
-    delete self->logger_;
-    self->logger_ = NULL;
-    Py_TYPE(self)->tp_free(self);
-}
-
-// The isc::log doesn't contain function to convert this way
-const char*
-severityToText(const Severity& severity) {
-    switch (severity) {
-        case DEFAULT:
-            return ("DEFAULT");
-        case DEBUG:
-            return ("DEBUG");
-        case INFO:
-            return ("INFO");
-        case WARN:
-            return ("WARN");
-        case ERROR:
-            return ("ERROR");
-        case FATAL:
-            return ("FATAL");
-        default:
-            return (NULL);
-    }
-}
-
-PyObject*
-Logger_getEffectiveSeverity(PyObject* po_self, PyObject*) {
-    LoggerWrapper* self = static_cast<LoggerWrapper*>(po_self);
-    try {
-        return (Py_BuildValue("s",
-                              severityToText(
-                                  self->logger_->getEffectiveSeverity())));
-    }
-    catch (const std::exception& e) {
-        PyErr_SetString(PyExc_RuntimeError, e.what());
-        return (NULL);
-    }
-    catch (...) {
-        PyErr_SetString(PyExc_RuntimeError, "Unknown C++ exception");
-        return (NULL);
-    }
-}
-
-PyObject*
-Logger_getEffectiveDebugLevel(PyObject* po_self, PyObject*) {
-    LoggerWrapper* self = static_cast<LoggerWrapper*>(po_self);
-    try {
-        return (Py_BuildValue("i", self->logger_->getEffectiveDebugLevel()));
-    }
-    catch (const std::exception& e) {
-        PyErr_SetString(PyExc_RuntimeError, e.what());
-        return (NULL);
-    }
-    catch (...) {
-        PyErr_SetString(PyExc_RuntimeError, "Unknown C++ exception");
-        return (NULL);
-    }
-}
-
-PyObject*
-Logger_setSeverity(PyObject* po_self, PyObject* args) {
-    LoggerWrapper* self = static_cast<LoggerWrapper*>(po_self);
-    const char* severity;
-    int dbgLevel = 0;
-    if (!PyArg_ParseTuple(args, "z|i", &severity, &dbgLevel)) {
-        return (NULL);
-    }
-    try {
-        self->logger_->setSeverity((severity == NULL) ? DEFAULT :
-                                   getSeverity(severity), dbgLevel);
-    }
-    catch (const std::exception& e) {
-        PyErr_SetString(PyExc_RuntimeError, e.what());
-        return (NULL);
-    }
-    catch (...) {
-        PyErr_SetString(PyExc_RuntimeError, "Unknown C++ exception");
-        return (NULL);
-    }
-    Py_RETURN_NONE;
-}
-
-template<class FPtr> // Who should remember the pointer-to-method syntax
-PyObject*
-Logger_isLevelEnabled(LoggerWrapper* self, FPtr function) {
-    try {
-        if ((self->logger_->*function)()) {
-            Py_RETURN_TRUE;
-        } else {
-            Py_RETURN_FALSE;
-        }
-    }
-    catch (const std::exception& e) {
-        PyErr_SetString(PyExc_RuntimeError, e.what());
-        return (NULL);
-    }
-    catch (...) {
-        PyErr_SetString(PyExc_RuntimeError, "Unknown C++ exception");
-        return (NULL);
-    }
-}
-
-PyObject*
-Logger_isInfoEnabled(PyObject* po_self, PyObject*) {
-    LoggerWrapper* self = static_cast<LoggerWrapper*>(po_self);
-    return (Logger_isLevelEnabled(self, &Logger::isInfoEnabled));
-}
-
-PyObject*
-Logger_isWarnEnabled(PyObject* po_self, PyObject*) {
-    LoggerWrapper* self = static_cast<LoggerWrapper*>(po_self);
-    return (Logger_isLevelEnabled(self, &Logger::isWarnEnabled));
-}
-
-PyObject*
-Logger_isErrorEnabled(PyObject* po_self, PyObject*) {
-    LoggerWrapper* self = static_cast<LoggerWrapper*>(po_self);
-    return (Logger_isLevelEnabled(self, &Logger::isErrorEnabled));
-}
-
-PyObject*
-Logger_isFatalEnabled(PyObject* po_self, PyObject*) {
-    LoggerWrapper* self = static_cast<LoggerWrapper*>(po_self);
-    return (Logger_isLevelEnabled(self, &Logger::isFatalEnabled));
-}
-
-PyObject*
-Logger_isDebugEnabled(PyObject* po_self, PyObject* args) {
-    LoggerWrapper* self = static_cast<LoggerWrapper*>(po_self);
-    int level = MIN_DEBUG_LEVEL;
-    if (!PyArg_ParseTuple(args, "|i", &level)) {
-        return (NULL);
-    }
-
-    try {
-        if (self->logger_->isDebugEnabled(level)) {
-            Py_RETURN_TRUE;
-        } else {
-            Py_RETURN_FALSE;
-        }
-    }
-    catch (const std::exception& e) {
-        PyErr_SetString(PyExc_RuntimeError, e.what());
-        return (NULL);
-    }
-    catch (...) {
-        PyErr_SetString(PyExc_RuntimeError, "Unknown C++ exception");
-        return (NULL);
-    }
-}
-
-string
-objectToStr(PyObject* object, bool convert) {
-    PyObjectContainer objstr_container;
-    if (convert) {
-        PyObject* text_obj = PyObject_Str(object);
-        if (text_obj == NULL) {
-            // PyObject_Str could fail for various reasons, including because
-            // the object cannot be converted to a string.  We exit with
-            // InternalError to preserve the PyErr set in PyObject_Str.
-            throw InternalError();
-        }
-        objstr_container.reset(text_obj);
-        object = objstr_container.get();
-    }
-
-    PyObjectContainer tuple_container(Py_BuildValue("(O)", object));
-    const char* value;
-    if (!PyArg_ParseTuple(tuple_container.get(), "s", &value)) {
-        throw InternalError();
-    }
-    return (string(value));
-}
-
-// Generic function to output the logging message. Called by the real functions.
-template <class Function>
-PyObject*
-Logger_performOutput(Function function, PyObject* args, bool dbgLevel) {
-    try {
-        const Py_ssize_t number(PyObject_Length(args));
-        if (number < 0) {
-            return (NULL);
-        }
-
-        // Which argument is the first to format?
-        const size_t start = dbgLevel ? 2 : 1;
-        if (number < start) {
-            return (PyErr_Format(PyExc_TypeError, "Too few arguments to "
-                                 "logging call, at least %zu needed and %zd "
-                                 "given", start, number));
-        }
-
-        // Extract the fixed arguments
-        long dbg(0);
-        if (dbgLevel) {
-            PyObjectContainer dbg_container(PySequence_GetItem(args, 0));
-            dbg = PyLong_AsLong(dbg_container.get());
-            if (PyErr_Occurred()) {
-                return (NULL);
-            }
-        }
-
-        // We create the logging message right now. If we fail to convert a
-        // parameter to string, at least the part that we already did will
-        // be output
-        PyObjectContainer msgid_container(PySequence_GetItem(args, start - 1));
-        const string mid(objectToStr(msgid_container.get(), false));
-        Logger::Formatter formatter(function(dbg, mid.c_str()));
-
-        // Now process the rest of parameters, convert each to string and put
-        // into the formatter. It will print itself in the end.
-        for (size_t i(start); i < number; ++ i) {
-            PyObjectContainer param_container(PySequence_GetItem(args, i));
-            try {
-                formatter = formatter.arg(objectToStr(param_container.get(),
-                                                      true));
-            }
-            catch (...) {
-                formatter.deactivate();
-                throw;
-            }
-        }
-        Py_RETURN_NONE;
-    }
-    catch (const InternalError&) {
-        return (NULL);
-    }
-    catch (const std::exception& e) {
-        PyErr_SetString(PyExc_RuntimeError, e.what());
-        return (NULL);
-    }
-    catch (...) {
-        PyErr_SetString(PyExc_RuntimeError, "Unknown C++ exception");
-        return (NULL);
-    }
-}
-
-// Now map the functions into the performOutput. I wish C++ could do
-// functional programming.
-PyObject*
-Logger_debug(PyObject* po_self, PyObject* args) {
-    LoggerWrapper* self = static_cast<LoggerWrapper*>(po_self);
-    return (Logger_performOutput(bind(&Logger::debug, self->logger_, _1, _2),
-                                 args, true));
-}
-
-PyObject*
-Logger_info(PyObject* po_self, PyObject* args) {
-    LoggerWrapper* self = static_cast<LoggerWrapper*>(po_self);
-    return (Logger_performOutput(bind(&Logger::info, self->logger_, _2),
-                                 args, false));
-}
-
-PyObject*
-Logger_warn(PyObject* po_self, PyObject* args) {
-    LoggerWrapper* self = static_cast<LoggerWrapper*>(po_self);
-    return (Logger_performOutput(bind(&Logger::warn, self->logger_, _2),
-                                 args, false));
-}
-
-PyObject*
-Logger_error(PyObject* po_self, PyObject* args) {
-    LoggerWrapper* self = static_cast<LoggerWrapper*>(po_self);
-    return (Logger_performOutput(bind(&Logger::error, self->logger_, _2),
-                                 args, false));
-}
-
-PyObject*
-Logger_fatal(PyObject* po_self, PyObject* args) {
-    LoggerWrapper* self = static_cast<LoggerWrapper*>(po_self);
-    return (Logger_performOutput(bind(&Logger::fatal, self->logger_, _2),
-                                 args, false));
-}
-
-PyMethodDef loggerMethods[] = {
-    { "get_effective_severity", Logger_getEffectiveSeverity, METH_NOARGS,
-        "Returns the effective logging severity as string" },
-    { "get_effective_debug_level", Logger_getEffectiveDebugLevel, METH_NOARGS,
-        "Returns the current debug level." },
-    { "set_severity", Logger_setSeverity, METH_VARARGS,
-        "Sets the severity of a logger. The parameters are severity as a "
-        "string and, optionally, a debug level (integer in range 0-99). "
-        "The severity may be NULL, in which case an inherited value is taken."
-    },
-    { "is_debug_enabled", Logger_isDebugEnabled, METH_VARARGS,
-      "Returns if the logger would log debug message now. "
-            "You can provide a desired debug level." },
-    { "is_info_enabled", Logger_isInfoEnabled, METH_NOARGS,
-      "Returns if the logger would log info message now." },
-    { "is_warn_enabled", Logger_isWarnEnabled, METH_NOARGS,
-      "Returns if the logger would log warn message now." },
-    { "is_error_enabled", Logger_isErrorEnabled, METH_NOARGS,
-      "Returns if the logger would log error message now." },
-    { "is_fatal_enabled", Logger_isFatalEnabled, METH_NOARGS,
-      "Returns if the logger would log fatal message now." },
-    { "debug", Logger_debug, METH_VARARGS,
-        "Logs a debug-severity message. It takes the debug level, message ID "
-        "and any number of stringifiable arguments to the message." },
-    { "info", Logger_info, METH_VARARGS,
-        "Logs a info-severity message. It taskes the message ID and any "
-        "number of stringifiable arguments to the message." },
-    { "warn", Logger_warn, METH_VARARGS,
-        "Logs a warn-severity message. It taskes the message ID and any "
-        "number of stringifiable arguments to the message." },
-    { "error", Logger_error, METH_VARARGS,
-        "Logs a error-severity message. It taskes the message ID and any "
-        "number of stringifiable arguments to the message." },
-    { "fatal", Logger_fatal, METH_VARARGS,
-        "Logs a fatal-severity message. It taskes the message ID and any "
-        "number of stringifiable arguments to the message." },
-    { NULL, NULL, 0, NULL }
-};
-
-PyTypeObject logger_type = {
-    PyVarObject_HEAD_INIT(NULL, 0)
-    "isc.log.Logger",
-    sizeof(LoggerWrapper),                 // tp_basicsize
-    0,                                  // tp_itemsize
-    Logger_destroy,                     // tp_dealloc
-    NULL,                               // tp_print
-    NULL,                               // tp_getattr
-    NULL,                               // tp_setattr
-    NULL,                               // tp_reserved
-    NULL,                               // tp_repr
-    NULL,                               // tp_as_number
-    NULL,                               // tp_as_sequence
-    NULL,                               // tp_as_mapping
-    NULL,                               // tp_hash
-    NULL,                               // tp_call
-    NULL,                               // tp_str
-    NULL,                               // tp_getattro
-    NULL,                               // tp_setattro
-    NULL,                               // tp_as_buffer
-    Py_TPFLAGS_DEFAULT,                 // tp_flags
-    "Wrapper around the C++ isc::log::Logger class."
-    "It is not complete, but everything important should be here.",
-    NULL,                               // tp_traverse
-    NULL,                               // tp_clear
-    NULL,                               // tp_richcompare
-    0,                                  // tp_weaklistoffset
-    NULL,                               // tp_iter
-    NULL,                               // tp_iternext
-    loggerMethods,                      // tp_methods
-    NULL,                               // tp_members
-    NULL,                               // tp_getset
-    NULL,                               // tp_base
-    NULL,                               // tp_dict
-    NULL,                               // tp_descr_get
-    NULL,                               // tp_descr_set
-    0,                                  // tp_dictoffset
-    Logger_init,                        // tp_init
-    NULL,                               // tp_alloc
-    PyType_GenericNew,                  // tp_new
-    NULL,                               // tp_free
-    NULL,                               // tp_is_gc
-    NULL,                               // tp_bases
-    NULL,                               // tp_mro
-    NULL,                               // tp_cache
-    NULL,                               // tp_subclasses
-    NULL,                               // tp_weaklist
-    NULL,                               // tp_del
-    0                                   // tp_version_tag
-};
-
-PyModuleDef iscLog = {
-    { PyObject_HEAD_INIT(NULL) NULL, 0, NULL},
-    "log",
-    "Python bindings for the classes in the isc::log namespace.\n\n"
-    "These bindings are close match to the C++ API, but they are not complete "
-    "(some parts are not needed) and some are done in more python-like ways.",
-    -1,
-    methods,
-    NULL,
-    NULL,
-    NULL,
-    NULL
-};
-
-} // end anonymous namespace
-
-PyMODINIT_FUNC
-PyInit_log(void) {
-    PyObject* mod = PyModule_Create(&iscLog);
-    if (mod == NULL) {
-        return (NULL);
-    }
-
-    // Finalize logger class and add in the definitions of the standard debug
-    // levels.  These can then be referred to in Python through the constants
-    // log.DBGLVL_XXX.
-    // N.B. These should be kept in sync with the constants defined in
-    // log_dbglevels.h.
-    try {
-        if (PyType_Ready(&logger_type) < 0) {
-            throw InternalError();
-        }
-        void* p = &logger_type;
-        if (PyModule_AddObject(mod, "Logger",
-                               static_cast<PyObject*>(p)) < 0) {
-            throw InternalError();
-        }
-
-        installClassVariable(logger_type, "DBGLVL_START_SHUT",
-                             Py_BuildValue("I", DBGLVL_START_SHUT));
-        installClassVariable(logger_type, "DBGLVL_COMMAND",
-                             Py_BuildValue("I", DBGLVL_COMMAND));
-        installClassVariable(logger_type, "DBGLVL_COMMAND_DATA",
-                             Py_BuildValue("I", DBGLVL_COMMAND_DATA));
-        installClassVariable(logger_type, "DBGLVL_TRACE_BASIC",
-                             Py_BuildValue("I", DBGLVL_TRACE_BASIC));
-        installClassVariable(logger_type, "DBGLVL_TRACE_BASIC_DATA",
-                             Py_BuildValue("I", DBGLVL_TRACE_BASIC_DATA));
-        installClassVariable(logger_type, "DBGLVL_TRACE_DETAIL",
-                             Py_BuildValue("I", DBGLVL_TRACE_DETAIL));
-        installClassVariable(logger_type, "DBGLVL_TRACE_DETAIL_DATA",
-                             Py_BuildValue("I", DBGLVL_TRACE_DETAIL_DATA));
-    } catch (const InternalError&) {
-        Py_DECREF(mod);
-        return (NULL);
-    } catch (const std::exception& ex) {
-        const std::string ex_what =
-            "Unexpected failure in Log initialization: " +
-            std::string(ex.what());
-        PyErr_SetString(PyExc_SystemError, ex_what.c_str());
-        Py_DECREF(mod);
-        return (NULL);
-    } catch (...) {
-        PyErr_SetString(PyExc_SystemError,
-                        "Unexpected failure in Log initialization");
-        Py_DECREF(mod);
-        return (NULL);
-    }
-
-    Py_INCREF(&logger_type);
-    return (mod);
-}

+ 0 - 1
src/lib/python/isc/log/tests/.gitignore

@@ -1 +0,0 @@
-/log_console.py

+ 0 - 43
src/lib/python/isc/log/tests/Makefile.am

@@ -1,43 +0,0 @@
-PYCOVERAGE_RUN = @PYCOVERAGE_RUN@
-PYTESTS_GEN = log_console.py
-PYTESTS_NOGEN = 
-noinst_SCRIPTS = $(PYTESTS_GEN)
-EXTRA_DIST = console.out check_output.sh $(PYTESTS_NOGEN)
-
-# If necessary (rare cases), explicitly specify paths to dynamic libraries
-# required by loadable python modules.
-LIBRARY_PATH_PLACEHOLDER =
-if SET_ENV_LIBRARY_PATH
-LIBRARY_PATH_PLACEHOLDER += $(ENV_LIBRARY_PATH)=$(abs_top_builddir)/src/lib/cryptolink/.libs:$(abs_top_builddir)/src/lib/dns/.libs:$(abs_top_builddir)/src/lib/dns/python/.libs:$(abs_top_builddir)/src/lib/cc/.libs:$(abs_top_builddir)/src/lib/config/.libs:$(abs_top_builddir)/src/lib/log/.libs:$(abs_top_builddir)/src/lib/util/.libs:$(abs_top_builddir)/src/lib/util/threads/.libs:$(abs_top_builddir)/src/lib/exceptions/.libs:$(abs_top_builddir)/src/lib/datasrc/.libs:$$$(ENV_LIBRARY_PATH)
-endif
-
-# test using command-line arguments, so use check-local target instead of TESTS
-# We need to run the cycle twice, because once the files are in builddir, once in srcdir
-check-local:
-	chmod +x $(abs_builddir)/log_console.py
-	$(LIBRARY_PATH_PLACEHOLDER) \
-	PYTHONPATH=$(COMMON_PYTHON_PATH):$(abs_top_builddir)/src/lib/python/isc/log \
-	B10_LOCKFILE_DIR_FROM_BUILD=$(abs_top_builddir) \
-	$(abs_srcdir)/check_output.sh $(abs_builddir)/log_console.py $(abs_srcdir)/console.out
-if ENABLE_PYTHON_COVERAGE
-	touch $(abs_top_srcdir)/.coverage
-	rm -f .coverage
-	${LN_S} $(abs_top_srcdir)/.coverage .coverage
-endif
-	for pytest in $(PYTESTS_NOGEN) ; do \
-	echo Running test: $$pytest ; \
-	$(LIBRARY_PATH_PLACEHOLDER) \
-	PYTHONPATH=$(COMMON_PYTHON_PATH):$(abs_top_builddir)/src/lib/python/isc/log:$(abs_top_builddir)/src/lib/log/python/.libs \
-	B10_TEST_PLUGIN_DIR=$(abs_top_srcdir)/src/bin/cfgmgr/plugins \
-	B10_LOCKFILE_DIR_FROM_BUILD=$(abs_top_builddir) \
-	$(PYCOVERAGE_RUN) $(abs_srcdir)/$$pytest || exit ; \
-	done ; \
-	for pytest in $(PYTESTS_GEN) ; do \
-	echo Running test: $$pytest ; \
-	chmod +x $(abs_builddir)/$$pytest ; \
-	$(LIBRARY_PATH_PLACEHOLDER) \
-	PYTHONPATH=$(COMMON_PYTHON_PATH):$(abs_top_builddir)/src/lib/python/isc/log:$(abs_top_builddir)/src/lib/log/python/.libs \
-	B10_TEST_PLUGIN_DIR=$(abs_top_srcdir)/src/bin/cfgmgr/plugins \
-	B10_LOCKFILE_DIR_FROM_BUILD=$(abs_top_builddir) \
-	$(PYCOVERAGE_RUN) $(abs_builddir)/$$pytest || exit ; \
-	done

+ 0 - 3
src/lib/python/isc/log/tests/check_output.sh

@@ -1,3 +0,0 @@
-#!/bin/sh
-
-"$1" 2>&1 | sed -e 's/\[\([a-z0-9\.]\{1,\}\)\/\([0-9]\{1,\}\)\]/[\1]/' | cut -d\  -f3- | diff - "$2" 1>&2

+ 0 - 4
src/lib/python/isc/log/tests/console.out

@@ -1,4 +0,0 @@
-INFO  [test.output] MSG_ID Message with list [1, 2, 3, 4]
-WARN  [test.output] DIFFERENT Different message
-FATAL [test.output] MSG_ID Message with 2 1
-DEBUG [test.output] MSG_ID Message with 3 2

+ 0 - 15
src/lib/python/isc/log/tests/log_console.py.in

@@ -1,15 +0,0 @@
-#!@PYTHON@
-
-import isc.log
-# This would come from a dictionary in real life
-MSG_ID = isc.log.create_message("MSG_ID", "Message with %2 %1")
-DIFFERENT = isc.log.create_message("DIFFERENT", "Different message")
-isc.log.init("test")
-logger = isc.log.Logger("output")
-
-logger.debug(20, MSG_ID, "test", "no output")
-logger.info(MSG_ID, [1, 2, 3, 4], "list")
-logger.warn(DIFFERENT)
-logger.fatal(MSG_ID, 1, 2)
-logger.set_severity("DEBUG", 99)
-logger.debug(1, MSG_ID, 2, 3)

+ 0 - 52
src/lib/python/isc/log_messages/Makefile.am

@@ -1,52 +0,0 @@
-SUBDIRS = work
-
-EXTRA_DIST = __init__.py
-EXTRA_DIST += init_messages.py
-EXTRA_DIST += cmdctl_messages.py
-EXTRA_DIST += ddns_messages.py
-EXTRA_DIST += libmemmgr_messages.py
-EXTRA_DIST += memmgr_messages.py
-EXTRA_DIST += stats_messages.py
-EXTRA_DIST += stats_httpd_messages.py
-EXTRA_DIST += xfrin_messages.py
-EXTRA_DIST += xfrout_messages.py
-EXTRA_DIST += zonemgr_messages.py
-EXTRA_DIST += cfgmgr_messages.py
-EXTRA_DIST += config_messages.py
-EXTRA_DIST += notify_out_messages.py
-EXTRA_DIST += libddns_messages.py
-EXTRA_DIST += libxfrin_messages.py
-EXTRA_DIST += loadzone_messages.py
-EXTRA_DIST += server_common_messages.py
-EXTRA_DIST += dbutil_messages.py
-EXTRA_DIST += msgq_messages.py
-EXTRA_DIST += pycc_messages.py
-EXTRA_DIST += util_messages.py
-
-CLEANFILES = __init__.pyc
-CLEANFILES += init_messages.pyc
-CLEANFILES += cmdctl_messages.pyc
-CLEANFILES += ddns_messages.pyc
-CLEANFILES += libmemmgr_messages.pyc
-CLEANFILES += memmgr_messages.pyc
-CLEANFILES += stats_messages.pyc
-CLEANFILES += stats_httpd_messages.pyc
-CLEANFILES += xfrin_messages.pyc
-CLEANFILES += xfrout_messages.pyc
-CLEANFILES += zonemgr_messages.pyc
-CLEANFILES += cfgmgr_messages.pyc
-CLEANFILES += config_messages.pyc
-CLEANFILES += notify_out_messages.pyc
-CLEANFILES += libddns_messages.pyc
-CLEANFILES += libxfrin_messages.pyc
-CLEANFILES += loadzone_messages.pyc
-CLEANFILES += server_common_messages.pyc
-CLEANFILES += dbutil_messages.pyc
-CLEANFILES += msgq_messages.pyc
-CLEANFILES += pycc_messages.pyc
-CLEANFILES += util_messages.pyc
-
-CLEANDIRS = __pycache__
-
-clean-local:
-	rm -rf $(CLEANDIRS)

+ 0 - 68
src/lib/python/isc/log_messages/README

@@ -1,68 +0,0 @@
-This is a placeholder package for logging messages of various modules
-in the form of python scripts.  This package is expected to be installed
-somewhere like <top-install-dir>/python3.x/site-packages/isc/log_messages
-and each message script is expected to be imported as
-"isc.log_messages.some_module_messages".
-
-We also need to allow in-source test code to get access to the message
-scripts in the same manner.  That's why the package is stored in the
-directory that shares the same trailing part as the install directory,
-i.e., isc/log_messages.
-
-Furthermore, we need to support a build mode using a separate build
-tree (such as in the case with 'make distcheck').  In that case if an
-application (via a test script) imports "isc.log_messages.xxx", it
-would try to import the module under the source tree, where the
-generated message script doesn't exist.  So, in the source directory
-(i.e., here) we provide dummy scripts that subsequently import the
-same name of module under the "work" sub-package.  The caller
-application is assumed to have <top_builddir>/src/lib/python/isc/log_messages
-in its module search path (this is done by including
-$(COMMON_PYTHON_PATH) in the PYTHONPATH environment variable),
-which ensures the right directory is chosen.
-
-A python module or program that defines its own log messages needs to
-make sure that the setup described above is implemented.  It's a
-complicated process, but can generally be done by following a common
-pattern:
-
-1. Create the dummy script (see above) for the module and update
-   Makefile.am in this directory accordingly.  See (and use)
-   a helper shell script named gen-forwarder.sh.
-2. Update Makefil.am of the module that defines the log message.  The
-   following are a sample snippet for Makefile.am for a module named
-   "mymodule" (which is supposed to be generated from a file
-   "mymodule_messages.mes").  In many cases it should work simply by
-   replacing 'mymodule' with the actual module name.
-
-====================  begin Makefile.am additions ===================
-nodist_pylogmessage_PYTHON = $(PYTHON_LOGMSGPKG_DIR)/work/mymodule_messages.py
-pylogmessagedir = $(pyexecdir)/isc/log_messages/
-
-CLEANFILES = $(PYTHON_LOGMSGPKG_DIR)/work/mymodule_messages.py
-CLEANFILES += $(PYTHON_LOGMSGPKG_DIR)/work/mymodule_messages.pyc
-
-EXTRA_DIST = mymodule_messages.mes
-
-$(PYTHON_LOGMSGPKG_DIR)/work/mymodule_messages.py : mymodule_messages.mes
-	$(top_builddir)/src/lib/log/compiler/message \
-	-d $(PYTHON_LOGMSGPKG_DIR)/work -p $(srcdir)/mymodule_messages.mes
-
-# This rule ensures mymodule_messages.py is (re)generated as a result of
-# 'make'.  If there's no other appropriate target, specify
-# mymodule_messages.py in BUILT_SOURCES.
-mymodule: <other source files> $(PYTHON_LOGMSGPKG_DIR)/work/mymodule_messages.py
-=====================  end Makefile.am additions ====================
-
-Notes:
-- "nodist_" prefix is important.  Without this, 'make distcheck' tries
-  to make _messages.py before actually starting the main build, which
-  would fail because the message compiler isn't built yet.
-- "pylogmessage" is a prefix for python scripts that define log
-  messages and are expected to be installed in the common isc/log_messages
-  directory.   It's intentionally named differently from the common
-  "python" prefix (as in python_PYTHON), because the latter may be
-  used for other scripts in the same Makefile.am file.
-- $(PYTHON_LOGMSGPKG_DIR) should be set to point to this directory (or
-  the corresponding build directory if it's different) by the
-  configure script.

+ 0 - 3
src/lib/python/isc/log_messages/__init__.py

@@ -1,3 +0,0 @@
-"""
-This is an in-source forwarder package redirecting to work/* scripts.
-"""

+ 0 - 1
src/lib/python/isc/log_messages/cfgmgr_messages.py

@@ -1 +0,0 @@
-from work.cfgmgr_messages import *

+ 0 - 1
src/lib/python/isc/log_messages/cmdctl_messages.py

@@ -1 +0,0 @@
-from work.cmdctl_messages import *

+ 0 - 1
src/lib/python/isc/log_messages/config_messages.py

@@ -1 +0,0 @@
-from work.config_messages import *

+ 0 - 1
src/lib/python/isc/log_messages/dbutil_messages.py

@@ -1 +0,0 @@
-from work.dbutil_messages import *

+ 0 - 1
src/lib/python/isc/log_messages/ddns_messages.py

@@ -1 +0,0 @@
-from work.ddns_messages import *

+ 0 - 29
src/lib/python/isc/log_messages/gen-forwarder.sh

@@ -1,29 +0,0 @@
-#!/bin/sh
-
-# Copyright (C) 2011  Internet Systems Consortium.
-#
-# Permission to use, copy, modify, and distribute this software for any
-# purpose with or without fee is hereby granted, provided that the above
-# copyright notice and this permission notice appear in all copies.
-#
-# THE SOFTWARE IS PROVIDED "AS IS" AND INTERNET SYSTEMS CONSORTIUM
-# DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL
-# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL
-# INTERNET SYSTEMS CONSORTIUM BE LIABLE FOR ANY SPECIAL, DIRECT,
-# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING
-# FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
-# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION
-# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-MODULE_NAME=$1
-if test -z $MODULE_NAME; then
-	echo 'Usage: gen-forwarder.sh module_name'
-	exit 1
-fi
-
-echo "from work.${MODULE_NAME}_messages import *" > ${MODULE_NAME}_messages.py
-echo "Forwarder python script is generated.  Make sure to perform:"
-echo "git add ${MODULE_NAME}_messages.py"
-echo "and add the following to Makefile.am:"
-echo "EXTRA_DIST += ${MODULE_NAME}_messages.py"
-echo "CLEANFILES += ${MODULE_NAME}_messages.pyc"

+ 0 - 1
src/lib/python/isc/log_messages/init_messages.py

@@ -1 +0,0 @@
-from work.init_messages import *

+ 0 - 1
src/lib/python/isc/log_messages/libddns_messages.py

@@ -1 +0,0 @@
-from work.libddns_messages import *

+ 0 - 1
src/lib/python/isc/log_messages/libmemmgr_messages.py

@@ -1 +0,0 @@
-from work.libmemmgr_messages import *

+ 0 - 1
src/lib/python/isc/log_messages/libxfrin_messages.py

@@ -1 +0,0 @@
-from work.libxfrin_messages import *

+ 0 - 1
src/lib/python/isc/log_messages/loadzone_messages.py

@@ -1 +0,0 @@
-from work.loadzone_messages import *

+ 0 - 1
src/lib/python/isc/log_messages/memmgr_messages.py

@@ -1 +0,0 @@
-from work.memmgr_messages import *

+ 0 - 1
src/lib/python/isc/log_messages/msgq_messages.py

@@ -1 +0,0 @@
-from work.msgq_messages import *

+ 0 - 1
src/lib/python/isc/log_messages/notify_out_messages.py

@@ -1 +0,0 @@
-from work.notify_out_messages import *

+ 0 - 1
src/lib/python/isc/log_messages/pycc_messages.py

@@ -1 +0,0 @@
-from work.pycc_messages import *

+ 0 - 1
src/lib/python/isc/log_messages/server_common_messages.py

@@ -1 +0,0 @@
-from work.server_common_messages import *

+ 0 - 1
src/lib/python/isc/log_messages/stats_httpd_messages.py

@@ -1 +0,0 @@
-from work.stats_httpd_messages import *

+ 0 - 1
src/lib/python/isc/log_messages/stats_messages.py

@@ -1 +0,0 @@
-from work.stats_messages import *

+ 0 - 1
src/lib/python/isc/log_messages/util_messages.py

@@ -1 +0,0 @@
-from work.util_messages import *

+ 0 - 2
src/lib/python/isc/log_messages/work/.gitignore

@@ -1,2 +0,0 @@
-/__init__.py
-/*_messages.py

+ 0 - 14
src/lib/python/isc/log_messages/work/Makefile.am

@@ -1,14 +0,0 @@
-# .py is generated in the builddir by the configure script so that test
-# scripts can refer to it when a separate builddir is used.
-
-python_PYTHON = __init__.py
-
-pythondir = $(pyexecdir)/isc/log_messages/
-
-CLEANFILES = __init__.pyc __init__.pyo
-CLEANDIRS = __pycache__
-
-EXTRA_DIST = README
-
-clean-local:
-	rm -rf $(CLEANDIRS)

+ 0 - 5
src/lib/python/isc/log_messages/work/README

@@ -1,5 +0,0 @@
-The __init__.py.in in this directory is meant to be processed by
-configure so that the generated __init__.py ends up in the builddir, and
-not the srcdir. This is because Python associates a module with a
-directory, and you can't have portions of the module in two separate
-directories.

+ 0 - 3
src/lib/python/isc/log_messages/work/__init__.py.in

@@ -1,3 +0,0 @@
-"""
-This package is a placeholder for python scripts of log messages.
-"""

+ 0 - 1
src/lib/python/isc/log_messages/xfrin_messages.py

@@ -1 +0,0 @@
-from work.xfrin_messages import *

+ 0 - 1
src/lib/python/isc/log_messages/xfrout_messages.py

@@ -1 +0,0 @@
-from work.xfrout_messages import *

+ 0 - 1
src/lib/python/isc/log_messages/zonemgr_messages.py

@@ -1 +0,0 @@
-from work.zonemgr_messages import *

+ 0 - 10
src/lib/python/isc/net/Makefile.am

@@ -1,10 +0,0 @@
-SUBDIRS = tests
-
-python_PYTHON = __init__.py addr.py parse.py
-
-pythondir = $(pyexecdir)/isc/net
-
-CLEANDIRS = __pycache__
-
-clean-local:
-	rm -rf $(CLEANDIRS)

+ 0 - 3
src/lib/python/isc/net/__init__.py

@@ -1,3 +0,0 @@
-"""
-Here are function and classes that are related to networking.
-"""

+ 0 - 46
src/lib/python/isc/net/addr.py

@@ -1,46 +0,0 @@
-# Copyright (C) 2010  Internet Systems Consortium.
-#
-# Permission to use, copy, modify, and distribute this software for any
-# purpose with or without fee is hereby granted, provided that the above
-# copyright notice and this permission notice appear in all copies.
-#
-# THE SOFTWARE IS PROVIDED "AS IS" AND INTERNET SYSTEMS CONSORTIUM
-# DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL
-# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL
-# INTERNET SYSTEMS CONSORTIUM BE LIABLE FOR ANY SPECIAL, DIRECT,
-# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING
-# FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
-# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION
-# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-"""Module where address representations live."""
-import socket
-import re
-
-class InvalidAddress(ValueError):
-    """Exception for invalid addresses."""
-    pass
-
-class IPAddr:
-    """Stores an IPv4 or IPv6 address."""
-    family = None
-    addr = None
-
-    def __init__(self, addr):
-        """
-        Creates the address object from a string representation. It raises
-        an InvalidAddr exception if the provided string isn't valid address.
-        """
-        try:
-            addrinfo = socket.getaddrinfo(addr, None, 0, 0, 0,
-                socket.AI_NUMERICHOST)[0]
-            self.family = addrinfo[0]
-            if not self.family in [socket.AF_INET, socket.AF_INET6]:
-                raise InvalidAddress(
-                    'IPAddr can hold only IPv4 or IPv6 address')
-            self.addr = socket.inet_pton(self.family, addr)
-        except socket.error as e:
-            raise InvalidAddress(str(e))
-
-    def __str__(self):
-        return socket.inet_ntop(self.family, self.addr)

+ 0 - 48
src/lib/python/isc/net/parse.py

@@ -1,48 +0,0 @@
-# Copyright (C) 2010  Internet Systems Consortium, Inc. ("ISC")
-#
-# Permission to use, copy, modify, and distribute this software for any
-# purpose with or without fee is hereby granted, provided that the above
-# copyright notice and this permission notice appear in all copies.
-#
-# THE SOFTWARE IS PROVIDED "AS IS" AND INTERNET SYSTEMS CONSORTIUM
-# DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL
-# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL
-# INTERNET SYSTEMS CONSORTIUM BE LIABLE FOR ANY SPECIAL, DIRECT,
-# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING
-# FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
-# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION
-# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-"""
-Checking and parsing of ports and IP addresses.
-"""
-
-from isc.net.addr import IPAddr, InvalidAddress
-import socket
-
-def port_parse(port):
-    """
-    Takes a port as an int or string and checks if it is valid. It returns
-    the port as int. If it is not a valid port (the string doesn't contain
-    number or it is not in the valid range), it raises ValueError.
-    """
-    inted = int(port)
-    if inted < 0:
-        raise ValueError("Port value " + str(inted) +
-            " too small, allower range is 0-65535")
-    if inted > 65535:
-        raise ValueError("Port value " + str(inted) +
-            " too large, allowed range is 0-65535")
-    return inted
-
-def addr_parse(addr):
-    """
-    Checks and parses an IP address (either IPv4 or IPv6) and returns
-    the IPAddr object. It raises ValueError if the passed string is not
-    valid IP address.
-    """
-    try:
-        return IPAddr(addr)
-    except InvalidAddress:
-        raise ValueError('Value ' + addr +
-            ' is not valid IPv4 or IPv6 address')

+ 0 - 24
src/lib/python/isc/net/tests/Makefile.am

@@ -1,24 +0,0 @@
-PYCOVERAGE_RUN = @PYCOVERAGE_RUN@
-PYTESTS = addr_test.py parse_test.py
-EXTRA_DIST = $(PYTESTS)
-
-# If necessary (rare cases), explicitly specify paths to dynamic libraries
-# required by loadable python modules.
-LIBRARY_PATH_PLACEHOLDER =
-if SET_ENV_LIBRARY_PATH
-LIBRARY_PATH_PLACEHOLDER += $(ENV_LIBRARY_PATH)=$(abs_top_builddir)/src/lib/cryptolink/.libs:$(abs_top_builddir)/src/lib/dns/.libs:$(abs_top_builddir)/src/lib/dns/python/.libs:$(abs_top_builddir)/src/lib/cc/.libs:$(abs_top_builddir)/src/lib/config/.libs:$(abs_top_builddir)/src/lib/log/.libs:$(abs_top_builddir)/src/lib/util/.libs:$(abs_top_builddir)/src/lib/util/threads/.libs:$(abs_top_builddir)/src/lib/exceptions/.libs:$(abs_top_builddir)/src/lib/datasrc/.libs:$$$(ENV_LIBRARY_PATH)
-endif
-
-# test using command-line arguments, so use check-local target instead of TESTS
-check-local:
-if ENABLE_PYTHON_COVERAGE
-	touch $(abs_top_srcdir)/.coverage 
-	rm -f .coverage
-	${LN_S} $(abs_top_srcdir)/.coverage .coverage
-endif
-	for pytest in $(PYTESTS) ; do \
-	echo Running test: $$pytest ; \
-	$(LIBRARY_PATH_PLACEHOLDER) \
-	PYTHONPATH=$(COMMON_PYTHON_PATH):$(abs_top_builddir)/src/lib/dns/python/.libs \
-	$(PYCOVERAGE_RUN) $(abs_srcdir)/$$pytest || exit ; \
-	done

+ 0 - 48
src/lib/python/isc/net/tests/addr_test.py

@@ -1,48 +0,0 @@
-# Copyright (C) 2010  Internet Systems Consortium.
-#
-# Permission to use, copy, modify, and distribute this software for any
-# purpose with or without fee is hereby granted, provided that the above
-# copyright notice and this permission notice appear in all copies.
-#
-# THE SOFTWARE IS PROVIDED "AS IS" AND INTERNET SYSTEMS CONSORTIUM
-# DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL
-# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL
-# INTERNET SYSTEMS CONSORTIUM BE LIABLE FOR ANY SPECIAL, DIRECT,
-# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING
-# FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
-# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION
-# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-"""Tests for the isc.net.addr module."""
-import unittest
-import socket
-from isc.net.addr import IPAddr, InvalidAddress
-
-class TestIPAddr(unittest.TestCase):
-    """Test for the IPAddr class."""
-    def test_v6ok(self):
-        """Test if we can parse IPv6."""
-        addr = IPAddr('2001:4f8::1')
-        self.assertEqual(addr.family, socket.AF_INET6)
-        self.assertEqual(addr.addr, socket.inet_pton(socket.AF_INET6, '2001:4f8::1'))
-
-    def test_v4ok(self):
-        """Test if we can parse IPv4."""
-        addr = IPAddr('127.127.127.127')
-        self.assertEqual(addr.family, socket.AF_INET)
-        self.assertEqual(addr.addr, socket.inet_aton('127.127.127.127'))
-
-    def test_badaddr(self):
-        """Test if we raise on wrong address."""
-        self.assertRaises(InvalidAddress, IPAddr, 'foobar')
-        self.assertRaises(InvalidAddress, IPAddr, 'foo::bar')
-        self.assertRaises(InvalidAddress, IPAddr, '123')
-        self.assertRaises(InvalidAddress, IPAddr, '123.456.789.0')
-        self.assertRaises(InvalidAddress, IPAddr, '127/8')
-        self.assertRaises(InvalidAddress, IPAddr, '0/0')
-        self.assertRaises(InvalidAddress, IPAddr, '1.2.3.4/32')
-        self.assertRaises(InvalidAddress, IPAddr, '0')
-        self.assertRaises(InvalidAddress, IPAddr, '')
-
-if __name__ == '__main__':
-    unittest.main()

+ 0 - 85
src/lib/python/isc/net/tests/parse_test.py

@@ -1,85 +0,0 @@
-# Copyright (C) 2010  Internet Systems Consortium, Inc. ("ISC")
-#
-# Permission to use, copy, modify, and distribute this software for any
-# purpose with or without fee is hereby granted, provided that the above
-# copyright notice and this permission notice appear in all copies.
-#
-# THE SOFTWARE IS PROVIDED "AS IS" AND INTERNET SYSTEMS CONSORTIUM
-# DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL
-# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL
-# INTERNET SYSTEMS CONSORTIUM BE LIABLE FOR ANY SPECIAL, DIRECT,
-# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING
-# FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
-# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION
-# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-"""Tests for isc.net.parse."""
-import unittest
-import socket
-from isc.net.parse import port_parse, addr_parse
-
-class TestCheckPort(unittest.TestCase):
-    """
-    Testcases for the isc.net.port_parse function
-    """
-    def test_fail(self):
-        """
-        Test if it fails on invalid input in the correct way.
-        """
-        self.assertRaises(ValueError, port_parse, "not a number")
-        self.assertRaises(ValueError, port_parse, -1)
-        self.assertRaises(ValueError, port_parse, 65536)
-
-    def test_success(self):
-        """
-        Test if it succeeds on valid inputs and returns the correct output
-        """
-        self.assertEqual(port_parse(0), 0)
-        self.assertEqual(port_parse("65535"), 65535)
-        self.assertEqual(port_parse(1234), 1234)
-
-class TestCheckIP(unittest.TestCase):
-    """
-    Testcases for the isc.net.ip_par function
-    """
-    def test_fail(self):
-        """
-        Test if it fails on invalid input the correct way.
-        """
-        self.assertRaises(ValueError, addr_parse, "not an address")
-        self.assertRaises(ValueError, addr_parse, "123.456.789.012")
-        self.assertRaises(ValueError, addr_parse, "123.0.0.")
-        # Address range not allowed
-        self.assertRaises(ValueError, addr_parse, "192.0.2.0/24")
-        try:
-            # XXX: MacOS X's inet_pton() doesn't reject this form, so we
-            # check the behavior of the underlying library implementation
-            # before the actual test
-            socket.inet_pton(socket.AF_INET, "0000.0.0.0")
-        except socket.error:
-            self.assertRaises(ValueError, addr_parse, "0000.0.0.0")
-        self.assertRaises(ValueError, addr_parse, "bada:ddr0::")
-        self.assertRaises(ValueError, addr_parse, "2001:db8::/32")
-        # This should be one part too long (eg. 9 segments)
-        self.assertRaises(ValueError, addr_parse, "2001:db8:0:0:0:0:0:0:0")
-        # Only one :: allowed
-        self.assertRaises(ValueError, addr_parse, "2001::db8::c")
-
-    def test_success(self):
-        """
-        Test if it succeeds on valid inputs and returns addresses that look
-        the same.
-        """
-        self.assertEqual("192.0.2.0", str(addr_parse("192.0.2.0")))
-        # The OS could return something else than canonical form, in which
-        # case the test would fail. However, I do not see an easy way to fix
-        # the test, so it is left this way unless someone finds an OS that
-        # does return something else.
-        self.assertEqual("2001:bd8::", str(addr_parse("2001:bd8::")))
-        # It should strip the unnecesarry parts
-        self.assertEqual("2001:bd8::", str(addr_parse("2001:bd8:0:0:0:0:0:0")))
-        self.assertEqual("::", str(addr_parse("::")))
-        self.assertEqual("2001:bd8::", str(addr_parse("2001:bd8::0.0.0.0")))
-
-if __name__ == "__main__":
-    unittest.main()

+ 0 - 11
src/lib/python/isc/sysinfo/Makefile.am

@@ -1,11 +0,0 @@
-SUBDIRS = . tests
-
-python_PYTHON = __init__.py
-python_PYTHON += sysinfo.py
-
-pythondir = $(pyexecdir)/isc/sysinfo
-
-CLEANDIRS = __pycache__
-
-clean-local:
-	rm -rf $(CLEANDIRS)

+ 0 - 1
src/lib/python/isc/sysinfo/__init__.py

@@ -1 +0,0 @@
-from isc.sysinfo.sysinfo import *

+ 0 - 514
src/lib/python/isc/sysinfo/sysinfo.py

@@ -1,514 +0,0 @@
-# Copyright (C) 2012  Internet Systems Consortium, Inc. ("ISC")
-#
-# Permission to use, copy, modify, and distribute this software for any
-# purpose with or without fee is hereby granted, provided that the above
-# copyright notice and this permission notice appear in all copies.
-#
-# THE SOFTWARE IS PROVIDED "AS IS" AND INTERNET SYSTEMS CONSORTIUM
-# DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL
-# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL
-# INTERNET SYSTEMS CONSORTIUM BE LIABLE FOR ANY SPECIAL, DIRECT,
-# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING
-# FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
-# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION
-# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-
-'''This module returns system information.'''
-
-import os
-import sys
-import re
-import subprocess
-import os.path
-import platform
-import time
-from datetime import timedelta
-
-class SysInfo:
-    def __init__(self):
-        self._num_processors = None
-        self._endianness = 'Unknown'
-        self._hostname = ''
-        self._platform_name = 'Unknown'
-        self._platform_version = 'Unknown'
-        self._platform_machine = 'Unknown'
-        self._platform_is_smp = None
-        self._uptime = None
-        self._loadavg = None
-        self._mem_total = None
-        self._mem_free = None
-        self._mem_swap_total = None
-        self._mem_swap_free = None
-        self._net_interfaces = 'Unknown\n'
-        self._net_routing_table = 'Unknown\n'
-        self._net_stats = 'Unknown\n'
-        self._net_connections = 'Unknown\n'
-
-        # The following are Linux specific, and should eventually be removed
-        # from this level; for now we simply default to None (so they won't
-        # be printed)
-        self._platform_distro = None
-        self._mem_cached = None
-        self._mem_buffers = None
-
-    def get_num_processors(self):
-        """Returns the number of processors. This is the number of
-        hyperthreads when hyper-threading is enabled.
-        """
-        return self._num_processors
-
-    def get_endianness(self):
-        """Returns 'big' or 'little'."""
-        return self._endianness
-
-    def get_platform_hostname(self):
-        """Returns the hostname of the system."""
-        return self._hostname
-
-    def get_platform_name(self):
-        """Returns the platform name (uname -s)."""
-        return self._platform_name
-
-    def get_platform_version(self):
-        """Returns the platform version (uname -v)."""
-        return self._platform_version
-
-    def get_platform_machine(self):
-        """Returns the platform machine architecture."""
-        return self._platform_machine
-
-    def get_platform_is_smp(self):
-        """Returns True if an SMP kernel is being used, False otherwise."""
-        return self._platform_is_smp
-
-    def get_platform_distro(self):
-        """Returns the name of the OS distribution in use.
-
-        Note: the concept of 'distribution' is Linux specific.  This shouldn't
-        be at this level.
-
-        """
-        return self._platform_distro
-
-    def get_uptime(self):
-        """Returns the uptime in seconds."""
-        return self._uptime
-
-    def get_uptime_desc(self):
-        """Returns the uptime in human readable form.
-
-        The format is the result of str() method of the standard library
-        datetime.timedelta class.  It returns None if _uptime is None.
-
-        """
-        if self._uptime is None:
-            return None
-
-        return str(timedelta(seconds=self._uptime))
-
-    def get_loadavg(self):
-        """Returns the load average as 3 floating point values in an array."""
-        return self._loadavg
-
-    def get_mem_total(self):
-        """Returns the total amount of memory in bytes."""
-        return self._mem_total
-
-    def get_mem_free(self):
-        """Returns the amount of free memory in bytes."""
-        return self._mem_free
-
-    def get_mem_cached(self):
-        """Returns the amount of cached memory in bytes."""
-        return self._mem_cached
-
-    def get_mem_buffers(self):
-        """Returns the amount of buffer in bytes."""
-        return self._mem_buffers
-
-    def get_mem_swap_total(self):
-        """Returns the total amount of swap in bytes."""
-        return self._mem_swap_total
-
-    def get_mem_swap_free(self):
-        """Returns the amount of free swap in bytes."""
-        return self._mem_swap_free
-
-    def get_net_interfaces(self):
-        """Returns information about network interfaces (as a multi-line string)."""
-        return self._net_interfaces
-
-    def get_net_routing_table(self):
-        """Returns information about network routing table (as a multi-line string)."""
-        return self._net_routing_table
-
-    def get_net_stats(self):
-        """Returns network statistics (as a multi-line string)."""
-        return self._net_stats
-
-    def get_net_connections(self):
-        """Returns network connection information (as a multi-line string)."""
-        return self._net_connections
-
-class SysInfoPOSIX(SysInfo):
-    """Common POSIX implementation of the SysInfo class.
-    See the SysInfo class documentation for more information.
-    """
-    def __init__(self):
-        super().__init__()
-
-        self._num_processors = os.sysconf('SC_NPROCESSORS_CONF')
-        self._endianness = sys.byteorder
-
-        u = os.uname()
-        self._platform_name = u[0]
-        self._hostname = u[1]
-        self._platform_version = u[2]
-        self._platform_machine = u[4]
-
-        self._loadavg = os.getloadavg()
-
-class SysInfoLinux(SysInfoPOSIX):
-    """Linux implementation of the SysInfo class.
-    See the SysInfo class documentation for more information.
-    """
-    def __init__(self):
-        super().__init__()
-
-        with open('/proc/sys/kernel/hostname') as f:
-            self._hostname = f.read().strip()
-
-        with open('/proc/version') as f:
-            self._platform_is_smp = ' SMP ' in f.read().strip()
-
-        with open('/proc/uptime') as f:
-            u = f.read().strip().split(' ')
-            if len(u) > 1:
-                self._uptime = int(round(float(u[0])))
-
-        with open('/proc/loadavg') as f:
-            l = f.read().strip().split(' ')
-            if len(l) >= 3:
-                self._loadavg = (float(l[0]), float(l[1]), float(l[2]))
-
-        with open('/proc/meminfo') as f:
-            m = f.readlines()
-            for line in m:
-                r = re.match('^MemTotal:\s+(.*)\s*kB', line)
-                if r:
-                    self._mem_total = int(r.group(1).strip()) * 1024
-                    continue
-                r = re.match('^MemFree:\s+(.*)\s*kB', line)
-                if r:
-                    self._mem_free = int(r.group(1).strip()) * 1024
-                    continue
-                r = re.match('^Cached:\s+(.*)\s*kB', line)
-                if r:
-                    self._mem_cached = int(r.group(1).strip()) * 1024
-                    continue
-                r = re.match('^Buffers:\s+(.*)\s*kB', line)
-                if r:
-                    self._mem_buffers = int(r.group(1).strip()) * 1024
-                    continue
-                r = re.match('^SwapTotal:\s+(.*)\s*kB', line)
-                if r:
-                    self._mem_swap_total = int(r.group(1).strip()) * 1024
-                    continue
-                r = re.match('^SwapFree:\s+(.*)\s*kB', line)
-                if r:
-                    self._mem_swap_free = int(r.group(1).strip()) * 1024
-                    continue
-
-        self._platform_distro = None
-
-        try:
-            s = subprocess.check_output(['lsb_release', '-a'])
-            for line in s.decode('utf-8').split('\n'):
-                r = re.match('^Description:(.*)', line)
-                if r:
-                    self._platform_distro = r.group(1).strip()
-                    break
-        except (subprocess.CalledProcessError, OSError):
-            pass
-
-        if self._platform_distro is None:
-            files = ['/etc/debian_release',
-                     '/etc/debian_version',
-                     '/etc/SuSE-release',
-                     '/etc/UnitedLinux-release',
-                     '/etc/mandrake-release',
-                     '/etc/gentoo-release',
-                     '/etc/fedora-release',
-                     '/etc/redhat-release',
-                     '/etc/redhat_version',
-                     '/etc/slackware-release',
-                     '/etc/slackware-version',
-                     '/etc/arch-release',
-                     '/etc/lsb-release',
-                     '/etc/mageia-release']
-            for fn in files:
-                if os.path.exists(fn):
-                    with open(fn) as f:
-                        self._platform_distro = f.read().strip()
-                    break
-
-        if self._platform_distro is None:
-            self._platform_distro = 'Unknown'
-
-        try:
-            s = subprocess.check_output(['ip', 'addr'])
-            self._net_interfaces = s.decode('utf-8')
-        except (subprocess.CalledProcessError, OSError):
-            self._net_interfaces = 'Warning: "ip addr" command failed.\n'
-
-        try:
-            s = subprocess.check_output(['ip', 'route'])
-            self._net_routing_table = s.decode('utf-8')
-            self._net_routing_table += '\n'
-            s = subprocess.check_output(['ip', '-f', 'inet6', 'route'])
-            self._net_routing_table += s.decode('utf-8')
-        except (subprocess.CalledProcessError, OSError):
-            self._net_routing_table = 'Warning: "ip route" or "ip -f inet6 route" command failed.\n'
-
-        try:
-            s = subprocess.check_output(['netstat', '-s'])
-            self._net_stats = s.decode('utf-8')
-        except (subprocess.CalledProcessError, OSError):
-            self._net_stats = 'Warning: "netstat -s" command failed.\n'
-
-        try:
-            s = subprocess.check_output(['netstat', '-apn'])
-            self._net_connections = s.decode('utf-8')
-        except (subprocess.CalledProcessError, OSError):
-            self._net_connections = 'Warning: "netstat -apn" command failed.\n'
-
-class SysInfoBSD(SysInfoPOSIX):
-    """Common BSD implementation of the SysInfo class.
-    See the SysInfo class documentation for more information.
-    """
-    def __init__(self):
-        super().__init__()
-
-        try:
-            s = subprocess.check_output(['hostname'])
-            self._hostname = s.decode('utf-8').strip()
-        except (subprocess.CalledProcessError, OSError):
-            pass
-
-        try:
-            s = subprocess.check_output(['sysctl', '-n', 'hw.physmem'])
-            self._mem_total = int(s.decode('utf-8').strip())
-        except (subprocess.CalledProcessError, OSError):
-            pass
-
-        try:
-            s = subprocess.check_output(['ifconfig'])
-            self._net_interfaces = s.decode('utf-8')
-        except (subprocess.CalledProcessError, OSError):
-            self._net_interfaces = 'Warning: "ifconfig" command failed.\n'
-
-        try:
-            s = subprocess.check_output(['netstat', '-s'])
-            self._net_stats = s.decode('utf-8')
-        except (subprocess.CalledProcessError, OSError):
-            self._net_stats = 'Warning: "netstat -s" command failed.\n'
-
-        try:
-            s = subprocess.check_output(['netstat', '-an'])
-            self._net_connections = s.decode('utf-8')
-        except (subprocess.CalledProcessError, OSError):
-            self._net_connections = 'Warning: "netstat -an" command failed.\n'
-
-        try:
-            s = subprocess.check_output(['netstat', '-nr'])
-            self._net_routing_table = s.decode('utf-8')
-        except (subprocess.CalledProcessError, OSError):
-            self._net_connections = 'Warning: "netstat -nr" command failed.\n'
-
-class SysInfoNetBSD(SysInfoBSD):
-    """NetBSD and OpenBSD implementation of the SysInfo class.
-    See the SysInfo class documentation for more information.
-    """
-    def __init__(self):
-        super().__init__()
-
-        try:
-            s = subprocess.check_output(['sysctl', '-n', 'kern.boottime'])
-            t = s.decode('utf-8').strip()
-            sec = time.time() - int(t)
-            self._uptime = int(round(sec))
-        except (subprocess.CalledProcessError, OSError):
-            pass
-
-        try:
-            s = subprocess.check_output(['sysctl', '-n', 'vm.loadavg'])
-            l = s.decode('utf-8').strip().split(' ')
-            if len(l) >= 3:
-                self._loadavg = (float(l[0]), float(l[1]), float(l[2]))
-        except (subprocess.CalledProcessError, OSError):
-            pass
-
-        try:
-            # We use the size of free-list from the vmstat result.
-            s = subprocess.check_output(['vmstat'])
-            lines = s.decode('utf-8').split('\n')
-            v = re.split('\s+', lines[2])
-            self._mem_free = int(v[5]) * 1024
-        except (subprocess.CalledProcessError, OSError):
-            pass
-
-        try:
-            s = subprocess.check_output(['swapctl', '-s', '-k'])
-            l = s.decode('utf-8').strip()
-            r = re.match('^total: (\d+) 1K-blocks allocated, (\d+) used, (\d+) available', l)
-            if r:
-                self._mem_swap_total = int(r.group(1).strip()) * 1024
-                self._mem_swap_free = int(r.group(3).strip()) * 1024
-        except (subprocess.CalledProcessError, OSError):
-            pass
-
-class SysInfoFreeBSDOSX(SysInfoBSD):
-    """Shared code for the FreeBSD and OS X implementations of the SysInfo
-    class. See the SysInfo class documentation for more information.
-    """
-    def __init__(self):
-        super().__init__()
-
-        try:
-            s = subprocess.check_output(['sysctl', '-n', 'kern.boottime'])
-            t = s.decode('utf-8').strip()
-            r = re.match('^\{\s+sec\s+\=\s+(\d+),.*', t)
-            if r:
-                sec = time.time() - int(r.group(1))
-                self._uptime = int(round(sec))
-        except (subprocess.CalledProcessError, OSError):
-            pass
-
-        try:
-            s = subprocess.check_output(['sysctl', '-n', 'vm.loadavg'])
-            l = s.decode('utf-8').strip()
-            r = re.match('^\{(.*)\}$', l)
-            if r:
-                la = r.group(1).strip().split(' ')
-            else:
-                la = l.split(' ')
-            if len(la) >= 3:
-                self._loadavg = (float(la[0]), float(la[1]), float(la[2]))
-        except (subprocess.CalledProcessError, OSError):
-            pass
-
-class SysInfoFreeBSD(SysInfoFreeBSDOSX):
-    """FreeBSD implementation of the SysInfo class.
-    See the SysInfo class documentation for more information.
-    """
-    def __init__(self):
-        super().__init__()
-
-        try:
-            # There doesn't seem to be an easy way to reliably detect whether
-            # the kernel was built with SMP support on FreeBSD.  We use
-            # a sysctl variable that is only defined in SMP kernels.
-            # This assumption seems to hold for several recent versions of
-            # FreeBSD, but it may not always be so for future versions.
-            s = subprocess.check_output(['sysctl', '-n',
-                                         'kern.smp.forward_signal_enabled'])
-            self._platform_is_smp = True # the value doesn't matter
-        except subprocess.CalledProcessError:
-            # if this variable isn't defined we should see this exception.
-            # interpret it as an indication of non-SMP kernel.
-            self._platform_is_smp = False
-        except OSError:
-            pass
-
-        try:
-            # We use the size of free-list from the vmstat result.
-            s = subprocess.check_output(['vmstat', '-H'])
-            lines = s.decode('utf-8').split('\n')
-            v = re.split('\s+', lines[2])
-            self._mem_free = int(v[5]) * 1024
-        except (subprocess.CalledProcessError, OSError):
-            pass
-
-        try:
-            s = subprocess.check_output(['swapctl', '-s', '-k'])
-            l = s.decode('utf-8').strip()
-            r = re.match('^Total:\s+(\d+)\s+(\d+)', l)
-            if r:
-                self._mem_swap_total = int(r.group(1).strip()) * 1024
-                self._mem_swap_free = self._mem_swap_total - (int(r.group(2).strip()) * 1024)
-        except (subprocess.CalledProcessError, OSError):
-            pass
-
-
-
-class SysInfoOSX(SysInfoFreeBSDOSX):
-    """OS X (Darwin) implementation of the SysInfo class.
-    See the SysInfo class documentation for more information.
-    """
-    def __init__(self):
-        super().__init__()
-
-        # note; this call overrides the value already set when hw.physmem
-        # was read. However, on OSX, physmem is not necessarily the correct
-        # value. But since it does not fail and does work on most BSD's, it's
-        # left in the base class and overwritten here
-        self._mem_total = None
-        try:
-            s = subprocess.check_output(['sysctl', '-n', 'hw.memsize'])
-            self._mem_total = int(s.decode('utf-8').strip())
-        except (subprocess.CalledProcessError, OSError):
-            pass
-
-        try:
-            s = subprocess.check_output(['vm_stat'])
-            lines = s.decode('utf-8').split('\n')
-            # store all values in a dict
-            values = {}
-            page_size = None
-            page_size_re = re.compile('.*page size of ([0-9]+) bytes')
-            for line in lines:
-                page_size_m = page_size_re.match(line)
-                if page_size_m:
-                    page_size = int(page_size_m.group(1))
-                else:
-                    key, _, value = line.partition(':')
-                    values[key] = value.strip()[:-1]
-            # Only calculate memory if page size is known
-            if page_size is not None:
-                self._mem_free = int(values['Pages free']) * page_size +\
-                                 int(values['Pages speculative']) * page_size
-        except (subprocess.CalledProcessError, OSError):
-            pass
-
-        try:
-            s = subprocess.check_output(['sysctl', '-n', 'vm.swapusage'])
-            l = s.decode('utf-8').strip()
-            r = re.match('^total = (\d+\.\d+)M\s+used = (\d+\.\d+)M\s+free = (\d+\.\d+)M', l)
-            if r:
-                self._mem_swap_total = float(r.group(1).strip()) * 1024
-                self._mem_swap_free = float(r.group(3).strip()) * 1024
-        except (subprocess.CalledProcessError, OSError):
-            pass
-
-
-class SysInfoTestcase(SysInfo):
-    def __init__(self):
-        super().__init__()
-        self._endianness = 'bigrastafarian'
-        self._platform_name = 'b10test'
-        self._uptime = 131072
-
-def SysInfoFromFactory():
-    osname = platform.system()
-    if osname == 'Linux':
-        return SysInfoLinux()
-    elif (osname == 'NetBSD') or (osname == 'OpenBSD'):
-        return SysInfoNetBSD()
-    elif osname == 'FreeBSD':
-        return SysInfoFreeBSD()
-    elif osname == 'Darwin':
-        return SysInfoOSX()
-    elif osname == 'BIND10Testcase':
-        return SysInfoTestcase()
-    else:
-        return SysInfoPOSIX()

File diff suppressed because it is too large
+ 0 - 23
src/lib/python/isc/sysinfo/tests/Makefile.am


+ 0 - 0
src/lib/python/isc/sysinfo/tests/sysinfo_test.py


Some files were not shown because too many files changed in this diff