loadzone.py.in 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310
  1. #!@PYTHON@
  2. # Copyright (C) 2012 Internet Systems Consortium.
  3. #
  4. # Permission to use, copy, modify, and distribute this software for any
  5. # purpose with or without fee is hereby granted, provided that the above
  6. # copyright notice and this permission notice appear in all copies.
  7. #
  8. # THE SOFTWARE IS PROVIDED "AS IS" AND INTERNET SYSTEMS CONSORTIUM
  9. # DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL
  10. # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL
  11. # INTERNET SYSTEMS CONSORTIUM BE LIABLE FOR ANY SPECIAL, DIRECT,
  12. # INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING
  13. # FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
  14. # NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION
  15. # WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
  16. import sys
  17. sys.path.append('@@PYTHONPATH@@')
  18. import time
  19. import signal
  20. from optparse import OptionParser
  21. from isc.dns import *
  22. from isc.datasrc import *
  23. import isc.log
  24. from isc.log_messages.loadzone_messages import *
  25. # These are needed for logger settings
  26. import bind10_config
  27. import json
  28. from isc.config import module_spec_from_file
  29. from isc.config.ccsession import path_search
  30. isc.log.init("b10-loadzone")
  31. logger = isc.log.Logger("loadzone")
  32. # The default value for the interval of progress report in terms of the
  33. # number of RRs loaded in that interval. Arbitrary choice, but intended to
  34. # be reasonably small to handle emergency exit.
  35. LOAD_INTERVAL_DEFAULT = 10000
  36. class BadArgument(Exception):
  37. '''An exception indicating an error in command line argument.
  38. '''
  39. pass
  40. class LoadFailure(Exception):
  41. '''An exception indicating failure in loading operation.
  42. '''
  43. pass
  44. def set_cmd_options(parser):
  45. '''Helper function to set command-line options.
  46. '''
  47. parser.add_option("-c", "--datasrc-conf", dest="conf", action="store",
  48. help="""(Mandatory) configuration of datasrc to load
  49. the zone in. Example:
  50. '{"database_file": "/path/to/dbfile/db.sqlite3"}'""",
  51. metavar='CONFIG')
  52. parser.add_option("-d", "--debug", dest="debug_level",
  53. type='int', action="store", default=None,
  54. help="enable debug logs with the specified level")
  55. parser.add_option("-i", "--report-interval", dest="report_interval",
  56. type='int', action="store",
  57. default=LOAD_INTERVAL_DEFAULT,
  58. help="""report logs progress per specified number of RRs
  59. (specify 0 to suppress report) [default: %default]""")
  60. parser.add_option("-t", "--datasrc-type", dest="datasrc_type",
  61. action="store", default='sqlite3',
  62. help="""type of data source (e.g., 'sqlite3')\n
  63. [default: %default]""")
  64. parser.add_option("-C", "--class", dest="zone_class", action="store",
  65. default='IN',
  66. help="""RR class of the zone; currently must be 'IN'
  67. [default: %default]""")
  68. class LoadZoneRunner:
  69. '''Main logic for the loadzone.
  70. This is implemented as a class mainly for the convenience of tests.
  71. '''
  72. def __init__(self, command_args):
  73. self.__command_args = command_args
  74. self.__loaded_rrs = 0
  75. self.__interrupted = False # will be set to True on receiving signal
  76. # system-wide log configuration. We need to configure logging this
  77. # way so that the logging policy applies to underlying libraries, too.
  78. self.__log_spec = json.dumps(isc.config.module_spec_from_file(
  79. path_search('logging.spec', bind10_config.PLUGIN_PATHS)).
  80. get_full_spec())
  81. # "severity" and "debuglevel" are the tunable parameters, which will
  82. # be set in _config_log().
  83. self.__log_conf_base = {"loggers":
  84. [{"name": "*",
  85. "output_options":
  86. [{"output": "stderr",
  87. "destination": "console"}]}]}
  88. # These are essentially private, and defined as "protected" for the
  89. # convenience of tests inspecting them
  90. self._zone_class = None
  91. self._zone_name = None
  92. self._zone_file = None
  93. self._datasrc_config = None
  94. self._datasrc_type = None
  95. self._log_severity = 'INFO'
  96. self._log_debuglevel = 0
  97. self._report_interval = LOAD_INTERVAL_DEFAULT
  98. self._config_log()
  99. def _config_log(self):
  100. '''Configure logging policy.
  101. This is essentially private, but defined as "protected" for tests.
  102. '''
  103. self.__log_conf_base['loggers'][0]['severity'] = self._log_severity
  104. self.__log_conf_base['loggers'][0]['debuglevel'] = self._log_debuglevel
  105. isc.log.log_config_update(json.dumps(self.__log_conf_base),
  106. self.__log_spec)
  107. def _parse_args(self):
  108. '''Parse command line options and other arguments.
  109. This is essentially private, but defined as "protected" for tests.
  110. '''
  111. usage_txt = \
  112. 'usage: %prog [options] -c datasrc_config zonename zonefile'
  113. parser = OptionParser(usage=usage_txt)
  114. set_cmd_options(parser)
  115. (options, args) = parser.parse_args(args=self.__command_args)
  116. # Configure logging policy as early as possible
  117. if options.debug_level is not None:
  118. self._log_severity = 'DEBUG'
  119. # optparse performs type check
  120. self._log_debuglevel = int(options.debug_level)
  121. if self._log_debuglevel < 0:
  122. raise BadArgument(
  123. 'Invalid debug level (must be non negative): %d' %
  124. self._log_debuglevel)
  125. self._config_log()
  126. if options.conf is None:
  127. raise BadArgument('data source config option cannot be omitted')
  128. self._datasrc_config = options.conf
  129. self._datasrc_type = options.datasrc_type
  130. try:
  131. self._zone_class = RRClass(options.zone_class)
  132. except isc.dns.InvalidRRClass as ex:
  133. raise BadArgument('Invalid zone class: ' + str(ex))
  134. if self._zone_class != RRClass.IN():
  135. raise BadArgument("RR class is not supported: " +
  136. str(self._zone_class))
  137. self._report_interval = int(options.report_interval)
  138. if self._report_interval < 0:
  139. raise BadArgument(
  140. 'Invalid report interval (must be non negative): %d' %
  141. self._report_interval)
  142. if len(args) != 2:
  143. raise BadArgument('Unexpected number of arguments: %d (must be 2)'
  144. % (len(args)))
  145. try:
  146. self._zone_name = Name(args[0])
  147. except Exception as ex: # too broad, but there's no better granurality
  148. raise BadArgument("Invalid zone name '" + args[0] + "': " +
  149. str(ex))
  150. self._zone_file = args[1]
  151. def __cancel_create(self):
  152. '''sqlite3-only hack: delete the zone just created on load failure.
  153. This should eventually be done via generic datasrc API, but right now
  154. we don't have that interface. Leaving the zone in this situation
  155. is too bad, so we handle it with a workaround.
  156. '''
  157. if self._datasrc_type is not 'sqlite3':
  158. return
  159. import sqlite3 # we need the module only here
  160. import json
  161. # If we are here, the following should basically succeed; since
  162. # this is considered a temporary workaround we don't bother to catch
  163. # and recover rare failure cases.
  164. dbfile = json.loads(self._datasrc_config)['database_file']
  165. with sqlite3.connect(dbfile) as conn:
  166. cur = conn.cursor()
  167. cur.execute("DELETE FROM zones WHERE name = ?",
  168. [self._zone_name.to_text()])
  169. def _report_progress(self, loaded_rrs):
  170. '''Dump the current progress report to stdout.
  171. This is essentially private, but defined as "protected" for tests.
  172. '''
  173. elapsed = time.time() - self.__start_time
  174. sys.stdout.write("\r" + (80 * " "))
  175. sys.stdout.write("\r%d RRs loaded in %.2f seconds" %
  176. (loaded_rrs, elapsed))
  177. def _do_load(self):
  178. '''Main part of the load logic.
  179. This is essentially private, but defined as "protected" for tests.
  180. '''
  181. created = False
  182. try:
  183. datasrc_client = DataSourceClient(self._datasrc_type,
  184. self._datasrc_config)
  185. created = datasrc_client.create_zone(self._zone_name)
  186. if created:
  187. logger.info(LOADZONE_ZONE_CREATED, self._zone_name,
  188. self._zone_class)
  189. loader = ZoneLoader(datasrc_client, self._zone_name,
  190. self._zone_file)
  191. self.__start_time = time.time()
  192. if self._report_interval > 0:
  193. limit = self._report_interval
  194. else:
  195. # Even if progress report is suppressed, we still load
  196. # incrementally so we won't delay catching signals too long.
  197. limit = LOAD_INTERVAL_DEFAULT
  198. while (not self.__interrupted and
  199. not loader.load_incremental(limit)):
  200. self.__loaded_rrs += self._report_interval
  201. if self._report_interval > 0:
  202. self._report_progress(self.__loaded_rrs)
  203. if self.__interrupted:
  204. raise LoadFailure('loading interrupted by signal')
  205. except Exception as ex:
  206. # release any remaining lock held in the client/loader
  207. loader, datasrc_client = None, None
  208. if created:
  209. self.__cancel_create()
  210. logger.error(LOADZONE_CANCEL_CREATE_ZONE, self._zone_name,
  211. self._zone_class)
  212. raise LoadFailure(str(ex))
  213. def _post_load_checks(self):
  214. '''Perform minimal validity checks on the loaded zone.
  215. We do this ourselves because the underlying library currently
  216. doesn't do any checks. Once the library support post-load validation
  217. this check should be removed.
  218. '''
  219. datasrc_client = DataSourceClient(self._datasrc_type,
  220. self._datasrc_config)
  221. _, finder = datasrc_client.find_zone(self._zone_name) # should succeed
  222. result = finder.find(self._zone_name, RRType.SOA())[0]
  223. if result is not finder.SUCCESS:
  224. self._post_load_warning('zone has no SOA')
  225. result = finder.find(self._zone_name, RRType.NS())[0]
  226. if result is not finder.SUCCESS:
  227. self._post_load_warning('zone has no NS')
  228. def _post_load_warning(self, msg):
  229. logger.warn(LOADZONE_POSTLOAD_ISSUE, self._zone_name,
  230. self._zone_class, msg)
  231. def _set_signal_handlers(self):
  232. signal.signal(signal.SIGINT, self._interrupt_handler)
  233. signal.signal(signal.SIGTERM, self._interrupt_handler)
  234. def _interrupt_handler(self, signal, frame):
  235. self.__interrupted = True
  236. def run(self):
  237. '''Top-level method, simply calling other helpers'''
  238. try:
  239. self._set_signal_handlers()
  240. self._parse_args()
  241. self._do_load()
  242. total_elapsed_txt = "%.2f" % (time.time() - self.__start_time)
  243. logger.info(LOADZONE_DONE, self.__loaded_rrs, self._zone_name,
  244. self._zone_class, total_elapsed_txt)
  245. self._post_load_checks()
  246. return 0
  247. except BadArgument as ex:
  248. logger.error(LOADZONE_ARGUMENT_ERROR, ex)
  249. except LoadFailure as ex:
  250. logger.error(LOADZONE_LOAD_ERROR, self._zone_name,
  251. self._zone_class, ex)
  252. except Exception as ex:
  253. logger.error(LOADZONE_UNEXPECTED_FAILURE, ex)
  254. return 1
  255. if '__main__' == __name__:
  256. runner = LoadZoneRunner(sys.argv[1:])
  257. ret = runner.run()
  258. sys.exit(ret)
  259. ## Local Variables:
  260. ## mode: python
  261. ## End: