Browse Source

Add all scripts as they are (some need cleanup, most need documentation)

zorun 9 years ago
parent
commit
3a9a2cbd6b
13 changed files with 686 additions and 0 deletions
  1. 140 0
      bgp-lastseen.py
  2. 9 0
      bgp-lastseen.sh
  3. 6 0
      bgp-origin-as-bird.sh
  4. 59 0
      bgp-origin-as-json.py
  5. 25 0
      bgp-origin-as.sh
  6. 19 0
      bird-update.py
  7. 141 0
      dn42-registry-routes.py
  8. 72 0
      dn42.py
  9. 77 0
      fritz-usage-report.py
  10. 79 0
      mtn-stats.py
  11. 16 0
      mtn-stats.sh
  12. 17 0
      registry-json.py
  13. 26 0
      unregistered-asn.py

+ 140 - 0
bgp-lastseen.py

@@ -0,0 +1,140 @@
+#!/usr/bin/env python3
+
+import io
+import sys
+import json
+import time
+from netaddr import IPNetwork, IPSet
+
+from utils import read_json, write_json
+from registry import Inetnum, AutNum
+
+DBFILE = "/srv/http/dn42/tower-bird.json"
+REGISTRY = "/home/zorun/net.dn42.registry"
+HTMLOUT = "/srv/http/dn42/lastseen/index.html"
+
+# Where the data comes from
+ASN = 76142
+
+#TIMEFMT = '%F %H:%M'
+TIMEFMT = '%c UTC'
+
+DN42 = IPSet(["172.22.0.0/15"])
+
+def prefix_components(prefix):
+    """Can be used as a key for sorting collections of prefixes"""
+    ip = prefix.split("/")[0]
+    cidr = prefix.split("/")[1]
+    return tuple(int(part) for part in ip.split('.')) + (cidr,)
+
+
+class LastSeen():
+    # Database of all previously seen prefixes, with dates
+    prefixes_db = dict()
+    # Prefixes currently announced
+    current_prefixes = list()
+    # From the registry
+    autnums = dict()
+    inetnums = dict()
+    # Optimised version for inclusion testing
+    networks = list()
+
+    def __init__(self, db_filename):
+        self.prefixes_db = read_json(db_filename)
+        self.current_prefixes = [prefix for prefix in self.prefixes_db if self.prefixes_db[prefix]["current"]]
+        # Registry
+        self.inetnums = Inetnum(REGISTRY).data
+        self.autnums = AutNum(REGISTRY).data
+        # Precompute this
+        self.networks = [IPNetwork(net) for net in self.inetnums]
+
+    def stats(self):
+        known = IPSet(self.prefixes_db)
+        current = IPSet(self.current_prefixes)
+        ratio_known = float(known.size) / float(DN42.size)
+        ratio_current = float(current.size) / float(DN42.size)
+        return {"known": ratio_known, "active": ratio_current}
+
+    def whois(self, prefix):
+        """Returns the name associated to a prefix, according to the registry.  We
+        look for the most specific prefix containing the argument.
+        """
+        prefix = IPNetwork(prefix)
+        relevant_nets = [net for net in self.networks if prefix in net]
+        if relevant_nets:
+            final_net = str(max(relevant_nets, key=(lambda p: p.prefixlen)))
+            if "netname" in self.inetnums[final_net]:
+                netname = self.inetnums[final_net]["netname"][0]
+                #print("{} -> {} [{}]".format(prefix, final_net, netname))
+                return netname
+            else:
+                return None
+        else:
+            #print("No whois for {}".format(prefix))
+            return None
+
+    def as_name(self, asn):
+        """Returns a tuple (as-name, descr), any of which can be the empty string,
+        or None if the AS is not found in the registry.
+        """
+        if isinstance(asn, int):
+            query = 'AS' + str(asn)
+        elif isinstance(asn, str):
+            asn = asn.upper()
+            if not asn.startswith('AS'):
+                query = 'AS' + asn
+        else:
+            return None
+
+        if query in self.autnums:
+            return (self.autnums[query].get('as-name', [""])[0], self.autnums[query].get('descr', [""])[0])
+
+
+    def gen_html(self, out):
+        stats = self.stats()
+        out.write("<html><head><style type='text/css'>table { text-align: center} tr td.good { background-color: #00AA00 } tr td.bad { background-color: #AA0000 }</style>")
+        out.write("<h1>Last seen in dn42 BGP (from AS {})</h1>".format(ASN))
+        out.write("<p><a href='../tower-bird.json'>Raw JSON data</a>, <a href='../scripts/bgp-lastseen.py'>Python script</a></p>")
+        out.write("<p><strong>Last update:</strong> {} (data collection started on 26th January 2014)</p>".format(time.strftime('%c UTC', time.gmtime())))
+        out.write("<p><strong>Number of prefixes currently announced:</strong> {} (totalizing {:.2%} of dn42 address space)</p>".format(len(self.current_prefixes), stats['active']))
+        out.write("<p><strong>Number of known prefixes since January 2014:</strong> {} (totalizing {:.2%} of dn42 address space)</p>".format(len(self.prefixes_db), stats['known']))
+        out.write("<p><em>Data comes from BGP (AS {}) and is sampled every 10 minutes. \"UP\" means that the prefix is currently announced in dn42. \"DOWN\" means that the prefix has been announced at some point, but not anymore</em></p>".format(ASN))
+        out.write("<p><table border=1>"
+                  "<tr>"
+                  "<th>Status</th>"
+                  "<th>Prefix</th>"
+                  "<th>Netname</th>"
+                  "<th>Origin</th>"
+                  "<th>Last seen</th>"
+#                  "<th>First seen</th>"
+                  "</tr>")
+        for (prefix, data) in sorted(self.prefixes_db.items(), key=(lambda d : (d[1]["last_updated"],) + prefix_components(d[0]))):
+            out.write("<tr>")
+            good = data["current"]
+            netname = self.whois(prefix)
+            asn = data["origin_as"]
+            as_string = 'AS' + str(asn)
+            as_name = self.as_name(asn)
+            if as_name:
+                as_string += ' | ' + as_name[0]
+            last_seen = data["last_updated"]
+#            first_seen = time.strftime(TIMEFMT,
+#                                       time.gmtime(int(data["first_seen"]))) \
+#                if "first_seen" in data else "?"
+            out.write("<td style='font-weight: bold' {}>{}</td>".format("class='good'" if good else "class='bad'",
+                                         "UP" if good else "DOWN"))
+            out.write("<td>{}</td>".format(prefix))
+            out.write("<td>{}</td>".format(netname if netname else "?"))
+            out.write("<td>{}</td>".format(as_string))
+            out.write("<td>{}</td>".format(time.strftime(TIMEFMT, time.gmtime(int(last_seen)))))
+#            out.write("<td>{}</td>".format(first_seen))
+            out.write("</tr>")
+        out.write("</table></p></html>")
+
+
+if __name__ == '__main__':
+    l = LastSeen(DBFILE)
+    buf = io.StringIO()
+    l.gen_html(buf)
+    with open(HTMLOUT, "w+") as htmlfile:
+        htmlfile.write(buf.getvalue())

+ 9 - 0
bgp-lastseen.sh

@@ -0,0 +1,9 @@
+#!/bin/sh
+
+now=$(date +%s)
+
+birdc 'show route' | grep "^[0-9]" | cut -d ' ' -f 1 | while read prefix
+do
+    echo "$prefix" "$now"
+done > /tmp/bgp_dn42_lastseen
+

+ 6 - 0
bgp-origin-as-bird.sh

@@ -0,0 +1,6 @@
+#!/bin/sh
+#
+# List all known BGP prefixes along with their origin AS. For Anycast
+# prefixes, there may be multiple AS announcing the same prefixes.
+
+ssh root@tower.polyno.me "birdc 'show route'" | grep "^[0-9]" | awk '{ print $1, substr($NF, 4, length($NF) - 5); }'

+ 59 - 0
bgp-origin-as-json.py

@@ -0,0 +1,59 @@
+#!/usr/bin/env python
+
+import json
+import sys
+import time
+from netaddr import IPNetwork, IPSet
+
+MY_AS = 76184
+
+import dn42
+from dn42 import Prefix, UsedPrefix
+
+
+if __name__ == '__main__':
+    asdata_dir = sys.argv[1]
+    prefixes_out = sys.argv[2]
+    asdata_out = sys.argv[3]
+
+    asdata = dn42.parse_records(asdata_dir)
+    asdata = { int(asn[2:]): d for asn, d in asdata.items() }
+
+    lines = sys.stdin.readlines()
+    prefixes = [l.split()[0] for l in lines]
+
+    # IPSet are great, but they "compact" redundant subnets, which we
+    # don't want for used prefixes.  Indeed, merging two subnets is
+    # not correct when they belong to different people.  However, for
+    # unused subnets, we don't care: that's why we use IPSet.
+    used_set = IPSet(prefixes)
+    used_set = used_set & dn42.ADDRSPACE
+    unused_set = dn42.ADDRSPACE ^ used_set
+
+    # Work with lists of our custom classes
+    used = [UsedPrefix(l.split()[0], {"asn": l.split()[1]}) for l in lines]
+    used = [p for p in used if p.prefix in dn42.ADDRSPACE]
+    unused = [Prefix(str(p)) for p in unused_set.iter_cidrs()]
+
+    all_prefixes = used + unused
+    all_prefixes.sort(key=lambda p : p.prefix)
+    assert(IPSet([p.prefix for p in all_prefixes]) == dn42.ADDRSPACE)
+
+    # TODO: handle prefix inclusion, this generates incorrect data
+    # right now.
+    result = dict()
+    result["name"] = "addrspace"
+    result["display"] = "none"
+    result["date"] = time.time()
+    result["origin"] = MY_AS
+    result["prefixes"] = len(used)
+    data = {
+        "name": "prefixes",
+        "prefix": dn42.ROOT_PREFIX,
+        "children": [p.to_dict() for p in all_prefixes]
+    }
+    result["children"] = [{"name": "empty", "size": 1}, data]
+    with open(prefixes_out, "w") as f:
+        json.dump(result, f)
+    with open(asdata_out, "w") as f:
+        json.dump(asdata, f)

+ 25 - 0
bgp-origin-as.sh

@@ -0,0 +1,25 @@
+#!/bin/sh
+#
+# List all known BGP prefixes along with their origin AS. For Anycast
+# prefixes, there may be multiple AS announcing the same prefixes, we
+# display the one related to the best route we have.
+#
+# TODO: this script currently doesn't grab our own announced prefix.
+
+MY_AS=76184
+
+ssh -N -M -S '~/.ssh/master_%r-%h-%p' -o ControlPersist=10 mejis.polynome.dn42 >/dev/null &
+
+sleep 1
+
+prefixes="$(ssh mejis.polynome.dn42 -S '~/.ssh/master_%r-%h-%p' "vtysh -c 'show ip route'" | grep '^B>*' | cut -d ' ' -f 2)"
+
+for prefix in $prefixes
+do
+    echo -n "$prefix "
+    ssh mejis.polynome.dn42 -S '~/.ssh/master_%r-%h-%p' "vtysh -c 'show ip bgp $prefix'" \
+        | grep -B 2 'best$' | head -1 \
+        | sed -e 's/^ *//' -e 's/,.*//' \
+        | awk '{ print $(NF); }' \
+        | sed -e "s/Local/$MY_AS/"
+done

+ 19 - 0
bird-update.py

@@ -0,0 +1,19 @@
+#!/usr/bin/env python
+
+import sys
+import time
+
+import bird
+from utils import read_json, write_json
+
+DBFILE = '/srv/http/dn42/tower-bird.json'
+
+
+def update(dbfile, stream):
+    new = bird.parse_bird(stream)
+    db = read_json(dbfile)
+    bird.update(db, new)
+    write_json(db, dbfile)
+
+if __name__ == '__main__':
+    update(DBFILE, sys.stdin)

+ 141 - 0
dn42-registry-routes.py

@@ -0,0 +1,141 @@
+#!/usr/bin/env python3
+
+"""Parses the inetnum data from the registry to turn it into a tree-like
+structure in JSON, that can be used for instance with d3js.
+"""
+
+import json
+import sys
+import os
+import time
+from netaddr import IPNetwork, IPSet
+
+import dn42
+
+# A tree is represented by a dictionary 'node' → [children], where [children]
+# is the list of *direct* children of 'node'
+def print_tree(tree, indent=0):
+    for p, children in tree.items():
+        print("{}{}{}".format(' ' * indent, p, ':' if children else ''))
+        for child in children:
+            print_tree(child, indent + 2)
+
+def print_json_tree(tree, indent=0):
+    print("{}{}{}".format(' ' * indent,
+                          '[' + str(tree['prefix']) + ']'
+                          if "display" in tree else tree['prefix'],
+                          ':' if tree['children'] else ''))
+    if 'children' in tree:
+        for p in tree["children"]:
+            print_json_tree(p, indent + 2)
+
+
+def empty_prefix_tree():
+    return { 'prefix': IPNetwork('0.0.0.0/0'),
+             'children': [] }
+
+# Since we work with subnets, we know that given subnets 'a' and 'b', either
+# 'a' includes 'b', either 'b' includes 'a', either 'a' and 'b' don't
+# otherlap.
+def insert_prefix(tree, prefix):
+    """Each node of the tree has a variable number of children. A tree always has
+    a (dummy) root, otherwise we could have multiple roots. The root compare
+    greater than any prefix.
+
+    Algo: start with an empty tree (only a root). For each prefix $p$, try to
+    insert it in the current tree:
+    - if the current node $n$ has no child, insert $p$ as a child of $n$
+    - else, for each child $s$ of the current node $n$:
+        - if $p$ is a subset of $s$, recursively insert $p$ in $s$ and return
+        - if $p$ is a superset of $s$, collect all children $s'$ of $n$ that are
+          also subsets of $p$, and move them as children of $p$. Then insert $p$
+          as a child of $n$ and return.
+    - if neither case happened, just insert $p$ as a child of $n$.
+
+    """
+    for child in tree['children']:
+        if prefix in child['prefix']:
+            insert_prefix(child, prefix)
+            return
+
+    subsets = [child for child in tree['children']
+               if child['prefix'] in prefix]
+    for c in subsets:
+        tree['children'].remove(c)
+    tree['children'].append({ 'prefix': prefix, 'children': subsets})
+
+
+def filter_tree(tree, prefix):
+    """Keep only the parts of the tree that are included in the given prefix."""
+    tree['children'] = [c for c in tree['children'] if c['prefix'] in prefix]
+    for c in tree['children']:
+        filter_tree(c, prefix)
+
+def sort_tree(tree):
+    """Recursively sorts all children of the tree. This does not work if
+    prefixes are represented as IPSet()."""
+    for c in tree["children"]:
+        sort_tree(c)
+    tree["children"].sort(key=lambda t : t['prefix'])
+
+
+def fill_blanks(tree):
+    for c in tree["children"]:
+        fill_blanks(c)
+    used_set = IPSet([c['prefix'] for c in tree['children']])
+    if used_set.size is not 0:
+        unused_set = IPSet([tree['prefix']]) ^ used_set
+        tree["children"].extend([{ "prefix": p, "children": [], "display": "none"}
+                                 for p in unused_set.iter_cidrs()])
+
+
+def prefixes_tree(prefixes):
+    tree = empty_prefix_tree()
+    for p in (IPNetwork(p) for p in prefixes):
+        #print_json_tree(tree)
+        #print()
+        insert_prefix(tree, p)
+    # Only keep dn42 range
+    filter_tree(tree, dn42.ADDRSPACE)
+    # Then sort subnets
+    sort_tree(tree)
+    # Add dummy subnets for the visualization
+    for child in tree["children"]:
+        fill_blanks(child)
+    # And resort!
+    sort_tree(tree)
+    return tree
+
+def compute_size(tree):
+    for c in tree["children"]:
+        compute_size(c)
+    tree['size'] = tree["prefix"].size
+
+
+def serialize(tree):
+    """Transforms all IPSet() and IPNetwork() to strings"""
+    for c in tree["children"]:
+        serialize(c)
+    tree["prefix"] = str(tree["prefix"])
+
+
+if __name__ == '__main__':
+    inetnums_dir = sys.argv[1]
+    prefixtree_out = sys.argv[2]
+    inetnums_out = sys.argv[3]
+
+    inetnums = dn42.parse_records(inetnums_dir)
+    tree = prefixes_tree(inetnums.keys())
+    #print_json_tree(tree)
+    compute_size(tree)
+    serialize(tree)
+    # Don't display the root
+    tree["display"] = "none"
+    tree["date"] = time.time()
+    tree["prefixes"] = len(inetnums)
+    # TODO: add mtn revision
+    tree["origin"] = "registry"
+    with open(prefixtree_out, "w") as f:
+        json.dump(tree, f)
+    with open(inetnums_out, "w") as f:
+        json.dump(inetnums, f)

+ 72 - 0
dn42.py

@@ -0,0 +1,72 @@
+import os
+from netaddr import IPNetwork, IPSet
+
+ROOT_PREFIX = "172.22.0.0/15"
+ADDRSPACE = IPSet([ROOT_PREFIX])
+
+class Prefix(object):
+    prefix = None
+    display = "none"
+
+    def __init__(self, prefix):
+        self.prefix = IPNetwork(prefix)
+
+    def __str__(self):
+        return str(self.prefix)
+
+    def to_dict(self):
+        d = dict()
+        d["name"] = d["prefix"] = str(self.prefix)
+        d["size"] = self.prefix.size
+        if self.display:
+            d["display"] = self.display
+        return d
+
+
+class UsedPrefix(Prefix):
+    properties = dict()
+    reserved_properties = {"name", "prefix", "size", "display"}
+
+    def __init__(self, prefix, properties):
+        self.properties = properties
+        self.check_properties()
+        super(UsedPrefix, self).__init__(prefix)
+        self.display = None
+
+    def check_properties(self):
+        if self.reserved_properties.intersection(self.properties.keys()):
+            raise ValueError
+
+    def to_dict(self):
+        d = super(UsedPrefix, self).to_dict()
+        d.update(self.properties)
+        return d
+
+
+def parse_record(stream):
+    """General parsing of the "key: value" syntax. Returns a key → [values]
+    dictionary.
+    """
+    d = dict()
+    for entry in stream.readlines():
+        try:
+            key, value = [s.strip() for s in entry.split(':', 1)]
+            if not key in d:
+                d[key] = list()
+            d[key].append(value)
+        except ValueError: pass
+    return d
+
+
+def parse_records(records_dir, fix_underscore=True):
+    """Takes a directory containing records, and builds a dictionary mapping the
+    filename of each record to its parsed data. By default, we transform '_'
+    to '/' in the name of the records.
+    """
+    records = dict()
+    for record in os.listdir(records_dir):
+        record_path = os.path.join(records_dir, record)
+        record_key = record.replace('_', '/') if fix_underscore else record
+        with open(record_path, "r") as f:
+            records[record_key] = parse_record(f)
+    return records

+ 77 - 0
fritz-usage-report.py

@@ -0,0 +1,77 @@
+# coding: utf-8
+
+import netaddr
+import datetime
+import lglass.bird
+import lglass.route
+import lglass.database.file
+
+with open("routes.bird") as fh:
+	routes = lglass.route.RoutingTable(lglass.bird.parse_routes(fh))
+
+db = lglass.database.file.FileDatabase("/home/zorun/net.dn42.registry/data")
+
+dn42_native = netaddr.IPNetwork("172.22.0.0/15")
+db_nets = netaddr.IPSet()
+route_nets = netaddr.IPSet()
+
+for type, primary_key in db.list():
+	if type == "inetnum":
+		addr = netaddr.IPNetwork(primary_key)
+		if addr not in dn42_native:
+			continue
+		obj = db.get(type, primary_key)
+		if "BLK" in obj.getfirst("netname", ""):
+			continue
+		db_nets.add(addr)
+
+for route in routes:
+	if route.prefix not in dn42_native:
+		continue
+	route_nets.add(route.prefix)
+
+all_nets = netaddr.IPSet()
+all_nets.add(dn42_native)
+announced_nets = route_nets
+registered_nets = db_nets
+unused_nets = all_nets - announced_nets
+unregistered_nets = all_nets - registered_nets
+free_nets = unused_nets.intersection(unregistered_nets)
+
+print("DN42 network usage report")
+print("Date: {}".format(datetime.datetime.now()))
+print()
+
+print("Statistics:")
+print("  Total        {}".format(len(all_nets)))
+print("-" * 80)
+print("  Announced    {}\t{}%".format(len(announced_nets), len(announced_nets)/len(all_nets)*100))
+print("  Registered   {}\t{}%".format(len(registered_nets), len(registered_nets)/len(all_nets)*100))
+print("  Unregistered {}\t{}%".format(len(unregistered_nets), len(unregistered_nets)/len(all_nets)*100))
+print("  Unused       {}\t{}%".format(len(unused_nets), len(unused_nets)/len(all_nets)*100))
+print("  Free         {}\t{}%".format(len(free_nets), len(free_nets)/len(all_nets)*100))
+print()
+
+print("Announced netwoks:")
+for net in announced_nets.iter_cidrs():
+	print(" * {}".format(net))
+print()
+
+print("Registered netwoks:")
+for net in registered_nets.iter_cidrs():
+	print(" * {}".format(net))
+print()
+
+print("Unregistered netwoks:")
+for net in unregistered_nets.iter_cidrs():
+	print(" * {}".format(net))
+print()
+
+print("Unused netwoks:")
+for net in unused_nets.iter_cidrs():
+	print(" * {}".format(net))
+print()
+
+print("Free netwoks:")
+for net in free_nets.iter_cidrs():
+	print(" * {}".format(net))

+ 79 - 0
mtn-stats.py

@@ -0,0 +1,79 @@
+#!/usr/bin/env python3
+
+from collections import defaultdict
+from pprint import pprint
+import sys
+
+from netaddr import IPSet, IPNetwork
+from registry import Inetnum
+from utils import read_json
+
+class Count():
+    # timestamp -> int (deletion or addition)
+    count = defaultdict(int)
+
+    def feed(self, line):
+        data = line.split()
+        timestamp = int(data[0])
+        if data[1] == "add_file":
+            self.count[timestamp] += 1
+        elif data[1] == "delete":
+            self.count[timestamp] -= 1
+
+    def dump(self):
+        return self.count
+
+    def history(self):
+        count = 0
+        for timestamp in sorted(self.count):
+            count += self.count[timestamp]
+            print("{} {}".format(timestamp, count))
+
+class Subnets():
+    # timestamp -> (added subnets, removed subnets)
+    space = defaultdict(lambda: (IPSet([]), IPSet([])))
+    dn42 = IPSet(["172.22.0.0/15"])
+    registry = Inetnum("/home/zorun/net.dn42.registry")
+
+    def feed(self, timestamp, type, subnet):
+        if not subnet in self.dn42:
+            return
+        if subnet in self.registry.data and not self.registry.data[subnet]["status"][0].lower().startswith("assigned"):
+            return
+        if data[1] == "add_file":
+            self.space[timestamp][0].add(subnet)
+        elif data[1] == "delete":
+            self.space[timestamp][1].add(subnet)
+
+    def history(self):
+        current = IPSet(read_json("/srv/http/dn42/tower-bird.json"))
+        used = IPSet([])
+        for timestamp in sorted(self.space):
+            used = used.difference(self.space[timestamp][1])
+            used = used.union(self.space[timestamp][0])
+            announced = used.intersection(current)
+            print("{} {} {}".format(timestamp,
+                                    float(used.size) / float(self.dn42.size),
+                                    float(announced.size) / float(self.dn42.size)))
+        return used
+        
+
+if __name__ == '__main__':
+    persons = Count()
+    inetnums = Subnets()
+    for line in sys.stdin:
+        data = line.split()
+        if data[2].startswith('"data/person/'):
+            persons.feed(line)
+        elif data[2].startswith('"data/inetnum/'):
+            subnet = data[2][14:-1].replace('_', '/')
+            inetnums.feed(int(data[0]), data[1], subnet)
+
+    #persons.history()
+    result = inetnums.history()
+    registry = (subnet for subnet in inetnums.registry.data.keys() if inetnums.registry.data[subnet]["status"][0].lower().startswith("assigned"))
+    registry = IPSet(registry)
+    
+    pprint(result)
+    print("\n\n")
+    pprint(registry.intersection(inetnums.dn42).difference(result))

+ 16 - 0
mtn-stats.sh

@@ -0,0 +1,16 @@
+#!/bin/bash
+
+[ -n "$1" ] && repo="." || repo="$1"
+
+cd "$repo"
+
+mtn automate log --no-merges | while read revision
+do
+    date="$(mtn automate certs "$revision" \
+        | grep -A1 'name "date"' | tail -n 1 \
+        | sed -e 's/.*\"\(.*\)\"/\1/')"
+    timestamp="$(date --date="$date" +%s)"
+    mtn automate get_revision "$revision" \
+        | grep '^add_file\|^delete\|^rename\|^patch' \
+        | sed -e "s/^/$timestamp /"
+done

+ 17 - 0
registry-json.py

@@ -0,0 +1,17 @@
+#!/usr/bin/env python
+
+import os
+
+from registry import Registry
+
+OUTDIR = '/srv/http/dn42/registry'
+
+if __name__ == '__main__':
+    dn42 = Registry("/home/zorun/net.dn42.registry")
+    for attr in ["dns", "inetnum", "inet6num", "route", "route6", "person",
+                 "organisation", "mntner", "asblock", "asset", "autnum"]:
+        d = dn42.__getattribute__(attr)
+        outfile = os.path.join(OUTDIR, attr + '.json')
+        with open(outfile, "w") as f:
+            print("Writing {}...".format(outfile))
+            d.write_json(f)

+ 26 - 0
unregistered-asn.py

@@ -0,0 +1,26 @@
+#!/usr/bin/env python3
+
+from registry import AutNum, Inetnum, Route
+from utils import read_json
+
+d = read_json("/srv/http/dn42/tower-bird.json")
+autnum = AutNum("/home/zorun/net.dn42.registry")
+inetnum = Inetnum("/home/zorun/net.dn42.registry")
+route = Route("/home/zorun/net.dn42.registry")
+unregistered_asn = set()
+for prefix, data in d.items():
+    if data["origin_as"] == "?":
+        continue
+    asn = "AS" + str(data["origin_as"])
+    if not asn in autnum.data:
+        unregistered_asn.add(data["origin_as"])
+
+for asn in sorted(unregistered_asn):
+    print("***********")
+    print("* AS{} *".format(asn))
+    print("***********\n")
+    for prefix in sorted((pref for pref in d if d[pref]["origin_as"] == asn)):
+        inum = inetnum.data[prefix]["netname"][0] if prefix in inetnum.data else ""
+        rout = route.data[prefix]["descr"][0] if prefix in route.data else ""
+        print("{} | {} | {}".format(prefix, inum, rout))
+    print("\n")