crawler.py 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383
  1. import io
  2. import cgi
  3. import pytz
  4. from datetime import datetime, timedelta
  5. from werkzeug.http import parse_date
  6. from flask import escape, json, current_app
  7. import requests
  8. from ispformat.validator import validate_isp
  9. from .models import ISP
  10. from .utils import check_geojson_spatialite, utcnow
  11. from . import db
  12. def get_encoding(content_type):
  13. """
  14. >>> get_encoding('wat/ever; charset=hey')
  15. 'hey'
  16. """
  17. content_type, params = cgi.parse_header(content_type)
  18. if 'charset' in params:
  19. return params['charset'].strip("'\"")
  20. class Crawler(object):
  21. MAX_JSON_SIZE=1*1024*1024
  22. escape=staticmethod(lambda x: unicode(str(x), 'utf8') if type(x) != unicode else x)
  23. def __init__(self):
  24. self.success=False
  25. self.modified=True
  26. self.jdict={}
  27. self.cache_info=None
  28. self.jdict_max_age=self.config('DEFAULT_CACHE_TIME')
  29. def m(self, msg, evt=None):
  30. if not evt:
  31. return u'%s\n'%msg
  32. else:
  33. return u''
  34. def err(self, msg, *args):
  35. return self.m(u'! %s'%msg, *args)
  36. def warn(self, msg):
  37. return self.m(u'@ %s'%msg)
  38. def info(self, msg):
  39. return self.m(u'\u2013 %s'%msg)
  40. def abort(self, msg):
  41. raise NotImplemented
  42. def color(self, color, msg):
  43. return msg
  44. def bold(self, msg):
  45. return msg
  46. def italics(self, msg):
  47. return msg
  48. def nl(self):
  49. return self.m('')
  50. def format_validation_errors(self, errs):
  51. r=[]
  52. for e in errs:
  53. r.append(u' %s: %s'%('.'.join(list(e.schema_path)[1:]), e.message))
  54. return u'\n'.join(r)+'\n'
  55. def pre_done_cb(self, *args):
  56. pass
  57. def done_cb(self):
  58. pass
  59. def config(self, name):
  60. return current_app.config.get('CRAWLER_'+name)
  61. def parse_cache_control(self, _cachectl):
  62. cachectl={}
  63. for cc in _cachectl.split(','):
  64. cc=cc.strip()
  65. if not cc:
  66. continue
  67. cc=cc.split('=')
  68. if cc[0] not in ('max-age', 's-maxage'):
  69. continue
  70. try:
  71. cachectl[cc[0]]=cc[1]
  72. except IndexError:
  73. cachectl[cc[0]]=True
  74. return cachectl
  75. def __call__(self, url, cache_info={}):
  76. esc=self.escape
  77. yield self.m('Starting the validation process...')
  78. r=None
  79. try:
  80. yield self.m('* Attempting to retreive %s'%self.bold(url))
  81. headers={'User-Agent': 'FFDN DB validator'}
  82. if cache_info.get('etag'):
  83. headers['If-None-Match'] = cache_info['etag']
  84. if cache_info.get('last-modified'):
  85. headers['If-Modified-Since'] = cache_info['last-modified']
  86. r=requests.get(url, verify='/etc/ssl/certs/ca-certificates.crt',
  87. headers=headers, stream=True, timeout=10)
  88. except requests.exceptions.SSLError as e:
  89. yield self.err('Unable to connect, SSL Error: '+self.color('#dd1144', esc(e)))
  90. except requests.exceptions.ConnectionError as e:
  91. yield self.err('Unable to connect: '+self.color('#dd1144', esc(e)))
  92. except requests.exceptions.Timeout as e:
  93. yield self.err('Connection timeout')
  94. except requests.exceptions.TooManyRedirects as e:
  95. yield self.err('Too many redirects')
  96. except requests.exceptions.RequestException as e:
  97. yield self.err('Internal request exception')
  98. # except Exception as e:
  99. # yield self.err('Unexpected request exception')
  100. if r is None:
  101. yield self.abort('Connection could not be established, aborting')
  102. return
  103. yield self.info('Connection established')
  104. yield self.info('Response code: '+self.bold(str(r.status_code)+' '+esc(r.reason)))
  105. try:
  106. r.raise_for_status()
  107. except requests.exceptions.HTTPError as e:
  108. yield self.err('Response code indicates an error')
  109. yield self.abort('Invalid response code')
  110. return
  111. _cachecontrol=r.headers.get('cache-control')
  112. cachecontrol=self.parse_cache_control(_cachecontrol) if _cachecontrol else None
  113. max_age=None
  114. if cachecontrol:
  115. try:
  116. _maxage=cachecontrol.get('max-age')
  117. _maxage=cachecontrol.get('s-maxage', _maxage) # s-maxage takes precedence
  118. max_age=int(_maxage)
  119. except ValueError:
  120. yield self.warn('Invalid max-age '+esc(_maxage))
  121. yield self.info('Cache control: '+self.bold(esc(
  122. ', '.join([k+'='+v if type(v) != bool else k for k, v in cachecontrol.iteritems()]))
  123. ))
  124. _expires=r.headers.get('expires')
  125. expires=parse_date(_expires)
  126. if expires:
  127. _now=r.headers.get('date')
  128. if _now: # use server date when possible
  129. now=parse_date(_now)
  130. else:
  131. now=datetime.utcnow()
  132. if expires > now:
  133. expires=(expires-now).total_seconds()
  134. yield self.info('Expires: '+self.bold(esc(_expires)))
  135. else:
  136. yield self.warn('Invalid Expires header. Expiry date must be in the future.')
  137. expires=None
  138. elif _expires and not expires:
  139. yield self.warn('Invalid Expires header %r'%esc(_expires))
  140. if not max_age and not expires:
  141. yield self.warn('No valid expiration time provided ! Please provide it either '
  142. 'with a Cache-Control or Expires header.')
  143. max_age=self.config('DEFAULT_CACHE_TIME')
  144. yield self.info('Using default expiration time of %d seconds'%(max_age))
  145. self.jdict_max_age = max_age if max_age else expires
  146. self.jdict_max_age = min(
  147. self.config('MAX_CACHE_TIME'),
  148. max(self.config('MIN_CACHE_TIME'), self.jdict_max_age)
  149. )
  150. yield self.info('Next update will be in %s'%(timedelta(seconds=self.jdict_max_age)))
  151. etag=r.headers.get('etag')
  152. last_modified=r.headers.get('last-modified')
  153. if not etag and not last_modified:
  154. yield self.warn('Please, provide at an ETag or Last-Modified header for '
  155. 'conditional requests')
  156. self.cache_info={}
  157. if etag:
  158. self.cache_info['etag']=etag
  159. if last_modified:
  160. self.cache_info['last-modified']=last_modified
  161. if cache_info and r.status_code == 304: # not modified
  162. self.m('== '+self.color('forestgreen', 'Response not modified. All good !'))
  163. self.modified=False
  164. self.success=True
  165. self.done_cb()
  166. return
  167. yield self.info('Content type: '+self.bold(esc(r.headers.get('content-type', 'not defined'))))
  168. if not r.headers.get('content-type'):
  169. yield self.err('Content-type '+self.bold('MUST')+' be defined')
  170. yield self.abort('The file must have a proper content-type to continue')
  171. return
  172. elif r.headers.get('content-type').lower() != 'application/json':
  173. yield self.warn('Content-type '+self.italics('SHOULD')+' be application/json')
  174. encoding=get_encoding(r.headers.get('content-type'))
  175. if not encoding:
  176. yield self.warn('Encoding not set. Assuming it\'s unicode, as per RFC4627 section 3')
  177. yield self.info('Content length: %s'%(self.bold(esc(r.headers.get('content-length', 'not set')))))
  178. cl=r.headers.get('content-length')
  179. if not cl:
  180. yield self.warn('No content-length. Note that we will not process a file whose size exceed 1MiB')
  181. elif int(cl) > self.MAX_JSON_SIZE:
  182. yield self.abort('File too big ! File size must be less then 1MiB')
  183. return
  184. yield self.info('Reading response into memory...')
  185. b=io.BytesIO()
  186. for d in r.iter_content(requests.models.CONTENT_CHUNK_SIZE):
  187. b.write(d)
  188. if b.tell() > self.MAX_JSON_SIZE:
  189. yield self.abort('File too big ! File size must be less then 1MiB')
  190. return
  191. r._content=b.getvalue()
  192. del b
  193. yield self.info('Successfully read %d bytes'%len(r.content))
  194. yield self.nl()+self.m('* Parsing the JSON file')
  195. if not encoding:
  196. charset=requests.utils.guess_json_utf(r.content)
  197. if not charset:
  198. yield self.err('Unable to guess unicode charset')
  199. yield self.abort('The file MUST be unicode-encoded when no explicit charset is in the content-type')
  200. return
  201. yield self.info('Guessed charset: '+self.bold(charset))
  202. try:
  203. txt=r.content.decode(encoding or charset)
  204. yield self.info('Successfully decoded file as %s'%esc(encoding or charset))
  205. except LookupError as e:
  206. yield self.err('Invalid/unknown charset: %s'%esc(e))
  207. yield self.abort('Charset error, Cannot continue')
  208. return
  209. except UnicodeDecodeError as e:
  210. yield self.err('Unicode decode error: %s'%e)
  211. yield self.abort('Charset error, cannot continue')
  212. return
  213. except Exception:
  214. yield self.abort('Unexpected charset error')
  215. return
  216. jdict=None
  217. try:
  218. jdict=json.loads(txt)
  219. except ValueError as e:
  220. yield self.err('Error while parsing JSON: %s'%esc(e))
  221. except Exception as e:
  222. yield self.err('Unexpected error while parsing JSON: %s'%esc(e))
  223. if not jdict:
  224. yield self.abort('Could not parse JSON')
  225. return
  226. yield self.info('JSON parsed successfully')
  227. yield self.nl()+self.m('* Validating the JSON against the schema')
  228. v=list(validate_isp(jdict))
  229. if v:
  230. yield self.err('Validation errors:')+self.format_validation_errors(v)
  231. yield self.abort('Your JSON file does not follow the schema, please fix it')
  232. return
  233. else:
  234. yield self.info('Done. No errors encountered \o')
  235. for ca in jdict.get('coveredAreas', []):
  236. if not 'area' in ca:
  237. continue
  238. if not check_geojson_spatialite(ca['area']):
  239. yield self.err('GeoJSON data for covered area "%s" cannot '
  240. 'be handled by our database'%esc(ca['name']))
  241. yield self.abort('Please fix your GeoJSON')
  242. return
  243. ret=self.pre_done_cb(jdict)
  244. if ret:
  245. yield ret
  246. return
  247. yield (self.nl()+self.m('== '+self.color('forestgreen', 'All good ! You can click on Confirm now'))+
  248. self.m(json.dumps({'passed': 1}), 'control'))
  249. self.jdict=jdict
  250. self.success=True
  251. self.done_cb()
  252. class PrettyValidator(Crawler):
  253. def __init__(self, session=None, sesskey=None, *args, **kwargs):
  254. super(PrettyValidator, self).__init__(*args, **kwargs)
  255. self.session=session
  256. self.sesskey=sesskey
  257. self.escape=lambda x: escape(unicode(str(x), 'utf8') if type(x) != unicode else x)
  258. def m(self, msg, evt=None):
  259. return u'%sdata: %s\n\n'%(u'event: %s\n'%evt if evt else '', msg)
  260. def err(self, msg, *args):
  261. return self.m(u'<strong style="color: crimson">!</strong> %s'%msg, *args)
  262. def warn(self, msg):
  263. return self.m(u'<strong style="color: dodgerblue">@</strong> %s'%msg)
  264. def info(self, msg):
  265. return self.m(u'&ndash; %s'%msg)
  266. def abort(self, msg):
  267. return (self.m(u'<br />== <span style="color: crimson">%s</span>'%msg)+
  268. self.m(json.dumps({'closed': 1}), 'control'))
  269. def bold(self, msg):
  270. return u'<strong>%s</strong>'%msg
  271. def italics(self, msg):
  272. return u'<em>%s</em>'%msg
  273. def color(self, color, msg):
  274. return u'<span style="color: %s">%s</span>'%(color, msg)
  275. def format_validation_errors(self, errs):
  276. lns=super(PrettyValidator, self).format_validation_errors(errs)
  277. buf=u''
  278. for l in lns.split('\n'):
  279. buf+=self.m(self.escape(l))
  280. return buf
  281. def done_cb(self):
  282. self.session[self.sesskey]['validated']=True
  283. self.session[self.sesskey]['jdict']=self.jdict
  284. self.session[self.sesskey]['cache_info']=self.cache_info
  285. self.session[self.sesskey]['last_update']=utcnow()
  286. self.session[self.sesskey]['next_update']=utcnow()+timedelta(seconds=self.jdict_max_age)
  287. self.session.save()
  288. class WebValidator(PrettyValidator):
  289. def pre_done_cb(self, jdict):
  290. # check name uniqueness
  291. where = (ISP.name == jdict['name'])
  292. if 'shortname' in jdict and jdict['shortname']:
  293. where |= (ISP.shortname == jdict.get('shortname'))
  294. if ISP.query.filter(where).count() > 0:
  295. ret = self.nl()
  296. ret += self.err('An ISP named "%s" already exist in our database'%self.escape(
  297. jdict['name']+(' ('+jdict['shortname']+')' if jdict.get('shortname') else '')
  298. ))
  299. ret += self.abort('The name of your ISP must be unique')
  300. return ret
  301. class TextValidator(Crawler):
  302. def abort(self, msg):
  303. res=u'FATAL ERROR: %s\n'%msg
  304. pad=u'='*(len(res)-1)+'\n'
  305. return self.m(pad+res+pad)