Compare commits

...

14 Commits

Author SHA1 Message Date
Sebastian Lohff 9784d4dc7c Add ASN last seen tab 2020-06-10 03:12:23 +02:00
Sebastian Lohff f898cd8d40 Add stubs, neighbors, etc to crawl data
* mark networks as stubs that are never seen in the middle of an aspath
 * add number of neighbors to each node
 * fix bug where route was an unbound variable
2020-06-10 03:09:49 +02:00
Sebastian Lohff 99c02a84d6 Remove some prints 2020-06-10 03:09:16 +02:00
Sebastian Lohff 214f9680c0 WIP: new map 2020-06-10 03:07:55 +02:00
Sebastian Lohff bbf4061292 Add all migrations, leftover from migration 2020-06-07 02:27:58 +02:00
Sebastian Lohff dc0111ce6d Add restframework based API 2020-06-07 02:27:39 +02:00
Sebastian Lohff 15f4971bde Move API from /map/api/ to /api/v1/ 2020-06-07 02:20:51 +02:00
Sebastian Lohff a609eff143 Add restframework, grpcio and protobuf to deps 2020-06-06 22:22:54 +02:00
Sebastian Lohff 31d4620adf Django 2 migration 2020-06-06 22:01:51 +02:00
Sebastian Lohff 285ee74560 WIP: Rework complete backend
New crawler, new gobgp based backend, crawls are now based on networkx
2020-06-06 17:46:08 +02:00
Sebastian Lohff 87642cc4d9 Add GoBGP data source 2020-05-31 22:10:26 +02:00
Sebastian Lohff b9956c0246 Python3: Use __str__ instead of __unicode
...also no unicode strings.
2020-05-31 01:49:03 +02:00
Sebastian Lohff 525d8a724d Migrate from tabs to spaces
Bow to the python best practices...
2020-05-31 01:07:44 +02:00
Sebastian Lohff e98400e1b5 Use non-relative import for python3 migration 2020-05-31 01:01:38 +02:00
53 changed files with 18147 additions and 767 deletions

1
.gitignore vendored
View File

@ -4,3 +4,4 @@
.*.swo
db.sqlite3
dnmapper/settings.py
__pycache__

0
apiv2/__init__.py Normal file
View File

3
apiv2/admin.py Normal file
View File

@ -0,0 +1,3 @@
from django.contrib import admin
# Register your models here.

5
apiv2/apps.py Normal file
View File

@ -0,0 +1,5 @@
from django.apps import AppConfig
class Apiv2Config(AppConfig):
name = 'apiv2'

View File

3
apiv2/models.py Normal file
View File

@ -0,0 +1,3 @@
from django.db import models
# Create your models here.

32
apiv2/serializers.py Normal file
View File

@ -0,0 +1,32 @@
import json
from rest_framework import serializers
from bgpdata.models import CrawlRun, ASLastSeen
class CrawlRunSerializer(serializers.ModelSerializer):
graph = serializers.ReadOnlyField()
class Meta:
model = CrawlRun
fields = ('id', 'startTime', 'endTime', 'asCount', 'asOnlineCount', 'asOfflineCount', 'peeringCount', 'graph')
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
if "with_graph" not in self.context['request'].query_params:
self.fields.pop("graph")
def to_representation(self, instance):
data = super().to_representation(instance)
for elem in data:
if "graph" in data and isinstance(data['graph'], str):
data['graph'] = json.loads(data['graph'])
return data
class ASLastSeenSerializer(serializers.ModelSerializer):
class Meta:
model = ASLastSeen
fields = ('id', 'asn', 'directlyCrawled', 'online', 'lastSeen', 'crawlLastSeen')

3
apiv2/tests.py Normal file
View File

@ -0,0 +1,3 @@
from django.test import TestCase
# Create your tests here.

12
apiv2/urls.py Normal file
View File

@ -0,0 +1,12 @@
from django.conf.urls import include, url
from rest_framework import routers
from apiv2.views import CrawlRunViewSet, ASLastSeenViewSet
router = routers.DefaultRouter()
router.register('crawlrun', CrawlRunViewSet)
router.register('asn', ASLastSeenViewSet)
urlpatterns = [
url('', include(router.urls)),
]

30
apiv2/views.py Normal file
View File

@ -0,0 +1,30 @@
from rest_framework import viewsets
from apiv2.serializers import CrawlRunSerializer, ASLastSeenSerializer
from backend import crawler
from bgpdata.models import CrawlRun, ASLastSeen
class CrawlRunViewSet(viewsets.ReadOnlyModelViewSet):
"""Represents a CrawlRun.
Graph is shown if with_graph is passed as query arg.
With /live/ the current network is shown (internally, a crawl is triggered for each request)"""
queryset = CrawlRun.objects.all()
serializer_class = CrawlRunSerializer
def get_object(self):
lookup_url_kwarg = self.lookup_url_kwarg or self.lookup_field
if self.kwargs.get(lookup_url_kwarg) == "live":
net = crawler.get_current_network()
obj = crawler.make_crawl_from_net(net)
self.check_object_permissions(self.request, obj)
else:
obj = super().get_object()
return obj
class ASLastSeenViewSet(viewsets.ReadOnlyModelViewSet):
queryset = ASLastSeen.objects.all()
serializer_class = ASLastSeenSerializer

0
backend/__init__.py Normal file
View File

354
backend/cmk_parser.py Normal file
View File

@ -0,0 +1,354 @@
#!/usr/bin/env python
# This file is part of dnmapper, an AS--level mapping tool
# Licensed under GNU General Public License v3 or later
# Written by Sebastian Lohff (seba@someserver.de)
from __future__ import print_function
from collections import OrderedDict
import re
import socket
from backend.exceptions import RouterParserException
def err(msg):
raise RouterParserException(msg)
def getBGPData(ip, asno):
rawData = getDataFromHost(ip)
if not rawData:
err("Could not get data from host (empty response)")
router = parseBGPData(rawData, asno)
router["ip"] = ip
return router
def getDataFromHost(ip):
socket.setdefaulttimeout(5)
x = socket.socket()
x.connect((ip, 6556))
f = x.makefile()
data = f.read()
x.close()
return data
def parseBGPData(raw, asno):
d = re.search(r"(?:^|\n)<<<(quagga|bird)>>>\n(.*?)(?:$|<<<[^\n]+>>>)", raw, re.DOTALL)
if not d:
err("Data not found in check mk output")
# mkify
raw = d.group(2).split("\n")
arr = filter(lambda _z: _z, map(lambda _y: filter(lambda _x: _x, re.split(r"\s+", _y)), raw))
# parse for bird/quagga
result = None
if d.group(1) == "quagga":
result = parseQuagga(arr, raw, asno)
else:
result = parseBird(arr, raw, asno)
return result
def parseQuagga(data, raw, asno):
status = _quaggaFindCommand(data, "show ip bgp sum")
if status[0][0:3] == ['IPv4', 'Unicast', 'Summary:']:
del(status[0])
if status[0][0:3] != ['BGP', 'router', 'identifier']:
print(status)
err("Couldn't find router id in quagga output")
peers = _quaggaFindNeighbors(data)
if asno and int(asno) != int(status[0][7]):
err("AS number (%s) does not match as number from quagga (%s)" % (asno, status[0][7]))
routes = _quaggaFindRoutes(raw)
return {"local_id": status[0][3].strip(","), "local_as": int(status[0][7]), "peers": peers, "routes": routes}
def parseBird(data, raw, asno):
status = _birdFindTable(data, "show status")
if status[2][0] != "1011-Router":
err("Couldn't find router id in bird output")
peers = filter(lambda _x: _x["type"] == "BGP", _birdMakeProtocols(data))
if asno == None:
err("Host is bird")
# FIXME
routes = _birdFindRoutes(data)
return {"local_id": status[2][3], "local_as": int(asno), "peers": peers, "routes": routes}
def _birdFindTable(info, command):
""" find command output of a bird command, e.g. "show bgp neighbors" """
command = ["bird>"] + command.split(" ")
commandInfo = []
editNextLine = False
for line in info:
if not commandInfo:
if line == command:
commandInfo.append(line)
editNextLine = True
else:
if editNextLine:
editNextLine = False
commandInfo.append(line[1:])
elif line[0] == "bird>":
return commandInfo
else:
commandInfo.append(line)
return []
def _birdFindProtocols(info):
""" return a list of tuples (protoname, protoinfo) """
protocolTable = _birdFindTable(info, "show protocols all")
protocols = OrderedDict()
currProto = None
for line in protocolTable[2:]:
if line[0][0:4] == "1002":
currProto = line[0][5:]
protocols[currProto] = [[currProto] + line[1:]]
elif currProto == None:
err("No proto selected, couldn't parse line:", line)
else:
protocols[currProto].append(line)
return protocols
def _birdMakeProtocols(info):
""" Parse birds show protocols all output """
# proto: name, type, description, state (up/down?), up-since
# routes imported, exported, preferred
# also: routing stats (
# bgp special stuff: state, neighbor (address, as, id) (id not available when down)
# state (established, active)
# if error, last error is avilable
protocols = []
for proto, data in _birdFindProtocols(info).iteritems():
protoInfo = {
"name": proto,
"type": data[0][1],
"table": data[0][2],
"state": data[0][3],
"last_change": data[0][4],
"info": " ".join(data[0][5:]),
"description": " ".join(data[1][2:]),
"routes": {
"imported": data[5][1],
"exported": data[5][3],
"preferred": data[5][5],
}
}
if protoInfo["type"] == "BGP":
found = False
for n, line in enumerate(data):
if line[0:2] == ["BGP", "state:"]:
found = True
protoInfo["BGP"] = {
"state": data[n][2],
"online": data[n][2] == "Established",
"neighbor_address": data[n+1][2],
"neighbor_as": int(data[n+2][2]),
"neighbor_id": data[n+3][2] if len(data) > n+3 and data[n+3][0:2] == ["Neighbor", "ID:"] else None,
"last_error": " ".join(data[n+3][2:]) if len(data) > n+3 and data[n+3][0:2] == ["Last", "error:"] else None,
}
if not found:
protoInfo["BGP"] = None
protocols.append(protoInfo)
return protocols
def _birdFindRoutes(info):
output = _birdFindTable(info, "show route all")
if len(output) < 1:
# no data found
return None
def handleCandidate(routes, candidate):
if candidate:
# path, nexthop, network
for key in ["path", "nexthop", "network", "iBGP"]:
if key not in candidate:
return
route = {"prefix": candidate["network"], "nexthop": candidate["nexthop"],
"path": list(map(int, candidate["path"])), "iBGP": candidate["iBGP"]}
routes.append(route)
routes = []
candidate = None
lastIP = None
for line in output:
if line[0].startswith("1007-"):
# new route!
handleCandidate(routes, candidate)
if line[0] != "1007-":
# line has a network, use it!
lastIP = line[0][5:]
candidate = {"network": lastIP, "iBGP": None}
elif candidate is not None:
# search bgp attributes
if line[0] == "1012-":
pass
k, v = line[1], line[2:]
else:
k, v = line[0], line[1:]
k = k.rstrip(":")
if k == "BGP.next_hop":
candidate["nexthop"] = v[0]
elif k == "BGP.as_path":
candidate["path"] = v
handleCandidate(routes, candidate)
return routes
def _quaggaFindCommand(info, cmd):
# ['core-frunde#', 'show', 'ip', 'bgp', 'sum']
# ['core-frunde#', 'show', 'ip', 'bgp', 'neighbors']
output = []
cmd = cmd.split(" ")
prompt = None
for line in info:
if line[1:] == cmd:
prompt = line[0]
elif line[0] == prompt:
# done
return output
elif prompt != None:
output.append(line)
err("Could not find command '%s' in output" % " ".join(cmd))
def _quaggaFindNeighbors(info):
#['BGP', 'neighbor', 'is', '10.50.1.2,', 'remote', 'AS', '65001,', 'local', 'AS', '65001,', 'internal', 'link']
output = _quaggaFindCommand(info, "show ip bgp neighbors")
start = ["BGP", "neighbor", "is"]
curr = None
rawNeighbors = []
for line in output:
if line[0:3] == start:
if curr:
rawNeighbors.append(curr)
curr = [line]
elif curr:
curr.append(line)
else:
err("Could not find start of neighbors")
if curr:
rawNeighbors.append(curr)
curr = None
neighbors = []
neighborDict = OrderedDict()
for raw in rawNeighbors:
descrIdx = 1 if raw[1][0] == "Description:" else 0
if raw[descrIdx + 1][0] == "Hostname:":
descrIdx += 1
peerdict = {
"neighbor_address": raw[0][3].rstrip(","),
"neighbor_as": int(raw[0][6].rstrip(",")),
"local_as": int(raw[0][9].rstrip(",")),
"description": " ".join(raw[1][1:]) if descrIdx else "No description",
"neighbor_id": raw[1+descrIdx][6].strip(","),
"state": raw[2+descrIdx][3].strip(","),
"routes": {
"imported": 0,
},
"BGP": {
"state": raw[2+descrIdx][3].strip(","),
"online": raw[2+descrIdx][3].strip(",") == "Established",
"neighbor_id": raw[1+descrIdx][6].strip(","),
"neighbor_address": raw[0][3].rstrip(","),
"neighbor_as": int(raw[0][6].rstrip(",")),
"state": raw[2+descrIdx][3].strip(","),
},
}
for line in raw:
if line[1:3] == ["accepted", "prefixes"]:
# woooo
peerdict["routes"]["imported"] = int(line[0])
break
neighbors.append(peerdict)
neighborDict[peerdict["neighbor_address"]] = peerdict
return neighbors
def _quaggaFindRoutes(raw):
# from # show ip bgp to Total number of prefixes XX
# BGP table version is 0, local router ID is 10.50.0.1
# *> 10.3.14.0/27 10.75.0.22 0 65002 65112 i
cmdre = re.compile(r"^([^\s#]+#) show ip bgp$")
routere = re.compile(r"^(?P<status>.)(?P<status2>.)(?P<origin>.)(?P<network>[0-9./]+)?\s+(?P<nexthop>[0-9./]+)[\s0-9i?]+$")
# find output
output = []
prompt = None
for line in raw:
if not prompt:
m = cmdre.match(line)
if m:
prompt = m.group(1) + " "
else:
if line.startswith(prompt):
break
else:
output.append(line)
if len(output) < 1:
# no data found
return None
routes = []
foundTable = False
lastIP = None
for line in output:
if not foundTable:
if line.endswith("Metric LocPrf Weight Path"):
foundTable = True
else:
if line != '':
if line.startswith("Total number of prefixes") or line.startswith("Displayed "):
break
else:
# parse one route line
#print(line)
m = routere.match(line)
d = m.groupdict()
if d["network"]:
lastIP = d["network"]
else:
d["network"] = lastIP
# "parse" path (everything after 61 chars, but no i)
path = filter(lambda _x: _x not in ('', 'i'), line[61:].split(" "))
# currently skip incomplete routes
if '?' not in path:
route = {"prefix": d["network"], "nexthop": d["nexthop"],
"path": list(map(int, path)), "iBGP": d["origin"] == "i"}
routes.append(route)
return routes

199
backend/crawler.py Normal file
View File

@ -0,0 +1,199 @@
import json
import logging
import socket
import time
from django.utils import timezone
import networkx as nx
from backend import gobgp, cmk_parser
from backend.exceptions import RouterParserException
from bgpdata.models import ConfigHost, ASLastSeen, ASLastSeenNeighbor, CrawlRun
log = logging.getLogger(__name__)
FORMAT = '%(asctime)-15s %(message)s'
logging.basicConfig(level=logging.INFO, format=FORMAT)
log.setLevel(logging.INFO)
def crawl():
net = get_current_network()
crawl = make_crawl_from_net(net)
crawl.save()
# handle last seen ASNs
log.info("Updating last seen info")
last_seen = {}
for asls in ASLastSeen.objects.all():
last_seen[asls.asn] = asls
for asn, node in net.nodes(data=True):
if asn not in last_seen:
last_seen[asn] = ASLastSeen(asn=asn)
asls = last_seen[asn]
asls.online = node['online']
if node['online']:
asls.directlyCrawled = node['directly_crawled']
asls.lastSeen = timezone.now()
asls.crawlLastSeen = crawl
else:
asls.directlyCrawled = False
asls.save()
if asls.online:
neighs = net.neighbors(asn)
db_neighs = set()
for db_neigh in asls.aslastseenneighbor_set.all():
if db_neigh.asn in neighs:
db_neighs.add(asls.asn)
else:
db_neigh.delete()
for neigh in neighs:
if neigh not in db_neighs:
asneigh = ASLastSeenNeighbor(asn=neigh, neighbor=asls)
asneigh.save()
db_neighs.add(neigh)
crawl.endTime = timezone.now()
crawl.save()
log.info("Automated crawl done")
def make_crawl_from_net(net):
"""Create a CrawlRun, but don't save it"""
asCount = asOnlineCount = asOfflineCount = 0
for asn, node in net.nodes(data=True):
asCount += 1
if node['online']:
asOnlineCount += 1
else:
asOfflineCount += 1
crawl = CrawlRun()
crawl.startTime = timezone.now()
crawl.graph = net_to_json(net)
crawl.asCount = asCount
crawl.asOnlineCount = asOnlineCount
crawl.asOfflineCount = asOfflineCount
crawl.peeringCount = len(net.edges)
return crawl
def get_current_network():
net = nx.Graph()
crawl_start = time.time()
log.info("Crawl run started")
for host in ConfigHost.objects.all():
try:
if host.checkMethod == 'CMK':
data = cmk_parser.getBGPData(host.ip, host.number)
_add_data_to_net(net, data)
elif host.checkMethod == 'GOBGP':
for entry in gobgp.get_bgp_data(host.ip):
_add_data_to_net(net, entry)
except (RouterParserException, socket.error):
log.exception("Could not get data from host %s method %s", host, host.checkMethod)
continue
# add neighbor count
for node, data in net.nodes(data=True):
data["neighbors"] = len(list(net.neighbors(node)))
log.info("Adding last seen neighbor info")
for asls in ASLastSeen.objects.all():
if asls.asn not in net.nodes:
if any(neigh.asn in net.nodes for neigh in asls.aslastseenneighbor_set.all()):
_populate_node(net, asls.asn)
net.nodes[asls.asn]['online'] = False
for neigh in asls.aslastseenneighbor_set.all():
if neigh.asn not in net.nodes:
_populate_node(net, neigh.asn)
net.nodes[asls.asn]['online'] = False
log.info("Crawl done in %.2fs", time.time() - crawl_start)
# add id to edges
for n, (_, _, data) in enumerate(net.edges(data=True)):
data['id'] = n
# import IPython
# IPython.embed()
return net
def net_to_json(net):
"""Dum net to json, will replace all sets from the graph"""
# replace all sets with lists for better dumpability
for node in net.nodes.values():
for key, val in node.items():
if isinstance(val, set):
node[key] = list(val)
return json.dumps(nx.readwrite.json_graph.node_link_data(net))
def _populate_node(net, asn):
net.add_node(asn)
node = net.nodes[asn]
node.setdefault("prefixes", set())
node.setdefault("router_ids", set())
node.setdefault("routing_table", set())
node.setdefault("directly_crawled", False)
node.setdefault("online", True)
node.setdefault("stub", True)
return node
def _add_data_to_net(net, data):
asn = data['local_as']
as_node = _populate_node(net, asn)
as_node['router_ids'].add(data['local_id'])
as_node['directly_crawled'] = True
for peer in data['peers']:
pass
for route in data['routes']:
as_node['routing_table'].add((route['prefix'], tuple(route['path'])))
as_path = route['path']
if not as_path:
continue
orig_node = _populate_node(net, as_path[0])
orig_node['prefixes'].add(route['prefix'])
for n in range(len(as_path) - 1):
if as_path[n] != as_path[n + 1]:
if as_path[n + 1] not in net.nodes:
_populate_node(net, as_path[n + 1])
if as_path[n + 1] not in (as_path[-1], as_path[0]):
net.nodes[as_path[n + 1]]['stub'] = False
net.add_edge(as_path[n], as_path[n + 1])
def convert_crawl(crawl):
net = nx.Graph()
for asn in crawl.as_set.all():
if asn.number not in net.nodes:
_populate_node(net, asn.number)
d = net.nodes[asn.number]
d['online'] = asn.online
d['directly_crawled'] = asn.directlyCrawled
for br in asn.borderrouter_set.all():
d['router_ids'].add(br.routerID)
for asn in crawl.as_set.all():
for peering in asn.getPeerings():
net.add_edge(peering.as1.number, peering.as2.number)
for ann in asn.announcement_set.all():
prefix = "{}/{}".format(ann.ip, ann.prefix)
path = list(map(int, ann.ASPath.split()))
net.nodes[asn.number]['routing_table'].add((prefix, tuple(path)))
net.nodes[path[-1]]['prefixes'].add(prefix)

2
backend/exceptions.py Normal file
View File

@ -0,0 +1,2 @@
class RouterParserException(Exception):
pass

65
backend/gobgp.py Normal file
View File

@ -0,0 +1,65 @@
import grpc
from backend.gobgp_api import gobgp_pb2_grpc, attribute_pb2
from backend.gobgp_api.gobgp_pb2 import TableType, Family, ListPeerRequest, ListPathRequest
def get_bgp_data(gobgp_host):
_timeout = 10
channel = grpc.insecure_channel('{}:50051'.format(gobgp_host))
stub = gobgp_pb2_grpc.GobgpApiStub(channel)
data = []
peers = stub.ListPeer(ListPeerRequest(), _timeout)
for peer in peers:
entry = {
"local_id": peer.peer.state.router_id,
"local_as": peer.peer.conf.peer_as,
"peers": [], # we don't export any peers
"routes": [],
}
neigh = peer.peer.conf.neighbor_address
for af in [Family.Afi.AFI_IP, Family.Afi.AFI_IP6]:
req = ListPathRequest(
name=neigh,
table_type=TableType.ADJ_IN,
family=Family(afi=af, safi=Family.Safi.SAFI_UNICAST),
sort_type=ListPathRequest.SortType.PREFIX)
rib = stub.ListPath(req)
for dest in rib:
prefix = dest.destination.prefix
for path in dest.destination.paths:
as_path = []
next_hop = "<unknown>" # currently here to prevent error from NOT NULL constraint
# parse attrs
for pattr in path.pattrs:
if pattr.type_url == "type.googleapis.com/gobgpapi.NextHopAttribute":
nh = attribute_pb2.NextHopAttribute()
nh.ParseFromString(pattr.value)
next_hop = nh.next_hop
elif pattr.type_url == "type.googleapis.com/gobgpapi.AsPathAttribute":
asp = attribute_pb2.AsPathAttribute()
asp.ParseFromString(pattr.value)
for seg in asp.segments:
if seg.type == 2:
as_path = seg.numbers
elif pattr.type_url == "type.googleapis.com/gobgpapi.MpReachNLRIAttribute":
mpreach = attribute_pb2.MpReachNLRIAttribute()
mpreach.ParseFromString(pattr.value)
next_hop = mpreach.next_hops[0]
if not as_path:
continue
route = {
"prefix": prefix,
"path": as_path,
"nexthop": next_hop,
}
entry["routes"].append(route)
data.append(entry)
return data

View File

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,3 @@
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc

View File

@ -0,0 +1,692 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: capability.proto
from google.protobuf.internal import enum_type_wrapper
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
import gobgp_pb2 as gobgp__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='capability.proto',
package='gobgpapi',
syntax='proto3',
serialized_options=None,
serialized_pb=b'\n\x10\x63\x61pability.proto\x12\x08gobgpapi\x1a\x0bgobgp.proto\";\n\x17MultiProtocolCapability\x12 \n\x06\x66\x61mily\x18\x01 \x01(\x0b\x32\x10.gobgpapi.Family\"\x18\n\x16RouteRefreshCapability\"\x1d\n\x1b\x43\x61rryingLabelInfoCapability\"q\n\x1e\x45xtendedNexthopCapabilityTuple\x12%\n\x0bnlri_family\x18\x01 \x01(\x0b\x32\x10.gobgpapi.Family\x12(\n\x0enexthop_family\x18\x02 \x01(\x0b\x32\x10.gobgpapi.Family\"U\n\x19\x45xtendedNexthopCapability\x12\x38\n\x06tuples\x18\x01 \x03(\x0b\x32(.gobgpapi.ExtendedNexthopCapabilityTuple\"Q\n\x1eGracefulRestartCapabilityTuple\x12 \n\x06\x66\x61mily\x18\x01 \x01(\x0b\x32\x10.gobgpapi.Family\x12\r\n\x05\x66lags\x18\x02 \x01(\r\"r\n\x19GracefulRestartCapability\x12\r\n\x05\x66lags\x18\x01 \x01(\r\x12\x0c\n\x04time\x18\x02 \x01(\r\x12\x38\n\x06tuples\x18\x03 \x03(\x0b\x32(.gobgpapi.GracefulRestartCapabilityTuple\")\n\x1b\x46ourOctetASNumberCapability\x12\n\n\x02\x61s\x18\x01 \x01(\r\"_\n\x16\x41\x64\x64PathCapabilityTuple\x12 \n\x06\x66\x61mily\x18\x01 \x01(\x0b\x32\x10.gobgpapi.Family\x12#\n\x04mode\x18\x02 \x01(\x0e\x32\x15.gobgpapi.AddPathMode\"E\n\x11\x41\x64\x64PathCapability\x12\x30\n\x06tuples\x18\x01 \x03(\x0b\x32 .gobgpapi.AddPathCapabilityTuple\" \n\x1e\x45nhancedRouteRefreshCapability\"h\n\'LongLivedGracefulRestartCapabilityTuple\x12 \n\x06\x66\x61mily\x18\x01 \x01(\x0b\x32\x10.gobgpapi.Family\x12\r\n\x05\x66lags\x18\x02 \x01(\r\x12\x0c\n\x04time\x18\x03 \x01(\r\"g\n\"LongLivedGracefulRestartCapability\x12\x41\n\x06tuples\x18\x01 \x03(\x0b\x32\x31.gobgpapi.LongLivedGracefulRestartCapabilityTuple\"\x1d\n\x1bRouteRefreshCiscoCapability\"0\n\x11UnknownCapability\x12\x0c\n\x04\x63ode\x18\x01 \x01(\r\x12\r\n\x05value\x18\x02 \x01(\x0c*L\n\x0b\x41\x64\x64PathMode\x12\r\n\tMODE_NONE\x10\x00\x12\x10\n\x0cMODE_RECEIVE\x10\x01\x12\r\n\tMODE_SEND\x10\x02\x12\r\n\tMODE_BOTH\x10\x03\x62\x06proto3'
,
dependencies=[gobgp__pb2.DESCRIPTOR,])
_ADDPATHMODE = _descriptor.EnumDescriptor(
name='AddPathMode',
full_name='gobgpapi.AddPathMode',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='MODE_NONE', index=0, number=0,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='MODE_RECEIVE', index=1, number=1,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='MODE_SEND', index=2, number=2,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='MODE_BOTH', index=3, number=3,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
serialized_start=1099,
serialized_end=1175,
)
_sym_db.RegisterEnumDescriptor(_ADDPATHMODE)
AddPathMode = enum_type_wrapper.EnumTypeWrapper(_ADDPATHMODE)
MODE_NONE = 0
MODE_RECEIVE = 1
MODE_SEND = 2
MODE_BOTH = 3
_MULTIPROTOCOLCAPABILITY = _descriptor.Descriptor(
name='MultiProtocolCapability',
full_name='gobgpapi.MultiProtocolCapability',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='family', full_name='gobgpapi.MultiProtocolCapability.family', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=43,
serialized_end=102,
)
_ROUTEREFRESHCAPABILITY = _descriptor.Descriptor(
name='RouteRefreshCapability',
full_name='gobgpapi.RouteRefreshCapability',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=104,
serialized_end=128,
)
_CARRYINGLABELINFOCAPABILITY = _descriptor.Descriptor(
name='CarryingLabelInfoCapability',
full_name='gobgpapi.CarryingLabelInfoCapability',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=130,
serialized_end=159,
)
_EXTENDEDNEXTHOPCAPABILITYTUPLE = _descriptor.Descriptor(
name='ExtendedNexthopCapabilityTuple',
full_name='gobgpapi.ExtendedNexthopCapabilityTuple',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='nlri_family', full_name='gobgpapi.ExtendedNexthopCapabilityTuple.nlri_family', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='nexthop_family', full_name='gobgpapi.ExtendedNexthopCapabilityTuple.nexthop_family', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=161,
serialized_end=274,
)
_EXTENDEDNEXTHOPCAPABILITY = _descriptor.Descriptor(
name='ExtendedNexthopCapability',
full_name='gobgpapi.ExtendedNexthopCapability',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='tuples', full_name='gobgpapi.ExtendedNexthopCapability.tuples', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=276,
serialized_end=361,
)
_GRACEFULRESTARTCAPABILITYTUPLE = _descriptor.Descriptor(
name='GracefulRestartCapabilityTuple',
full_name='gobgpapi.GracefulRestartCapabilityTuple',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='family', full_name='gobgpapi.GracefulRestartCapabilityTuple.family', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='flags', full_name='gobgpapi.GracefulRestartCapabilityTuple.flags', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=363,
serialized_end=444,
)
_GRACEFULRESTARTCAPABILITY = _descriptor.Descriptor(
name='GracefulRestartCapability',
full_name='gobgpapi.GracefulRestartCapability',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='flags', full_name='gobgpapi.GracefulRestartCapability.flags', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='time', full_name='gobgpapi.GracefulRestartCapability.time', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='tuples', full_name='gobgpapi.GracefulRestartCapability.tuples', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=446,
serialized_end=560,
)
_FOUROCTETASNUMBERCAPABILITY = _descriptor.Descriptor(
name='FourOctetASNumberCapability',
full_name='gobgpapi.FourOctetASNumberCapability',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='as', full_name='gobgpapi.FourOctetASNumberCapability.as', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=562,
serialized_end=603,
)
_ADDPATHCAPABILITYTUPLE = _descriptor.Descriptor(
name='AddPathCapabilityTuple',
full_name='gobgpapi.AddPathCapabilityTuple',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='family', full_name='gobgpapi.AddPathCapabilityTuple.family', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='mode', full_name='gobgpapi.AddPathCapabilityTuple.mode', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=605,
serialized_end=700,
)
_ADDPATHCAPABILITY = _descriptor.Descriptor(
name='AddPathCapability',
full_name='gobgpapi.AddPathCapability',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='tuples', full_name='gobgpapi.AddPathCapability.tuples', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=702,
serialized_end=771,
)
_ENHANCEDROUTEREFRESHCAPABILITY = _descriptor.Descriptor(
name='EnhancedRouteRefreshCapability',
full_name='gobgpapi.EnhancedRouteRefreshCapability',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=773,
serialized_end=805,
)
_LONGLIVEDGRACEFULRESTARTCAPABILITYTUPLE = _descriptor.Descriptor(
name='LongLivedGracefulRestartCapabilityTuple',
full_name='gobgpapi.LongLivedGracefulRestartCapabilityTuple',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='family', full_name='gobgpapi.LongLivedGracefulRestartCapabilityTuple.family', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='flags', full_name='gobgpapi.LongLivedGracefulRestartCapabilityTuple.flags', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='time', full_name='gobgpapi.LongLivedGracefulRestartCapabilityTuple.time', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=807,
serialized_end=911,
)
_LONGLIVEDGRACEFULRESTARTCAPABILITY = _descriptor.Descriptor(
name='LongLivedGracefulRestartCapability',
full_name='gobgpapi.LongLivedGracefulRestartCapability',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='tuples', full_name='gobgpapi.LongLivedGracefulRestartCapability.tuples', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=913,
serialized_end=1016,
)
_ROUTEREFRESHCISCOCAPABILITY = _descriptor.Descriptor(
name='RouteRefreshCiscoCapability',
full_name='gobgpapi.RouteRefreshCiscoCapability',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1018,
serialized_end=1047,
)
_UNKNOWNCAPABILITY = _descriptor.Descriptor(
name='UnknownCapability',
full_name='gobgpapi.UnknownCapability',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='code', full_name='gobgpapi.UnknownCapability.code', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='value', full_name='gobgpapi.UnknownCapability.value', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1049,
serialized_end=1097,
)
_MULTIPROTOCOLCAPABILITY.fields_by_name['family'].message_type = gobgp__pb2._FAMILY
_EXTENDEDNEXTHOPCAPABILITYTUPLE.fields_by_name['nlri_family'].message_type = gobgp__pb2._FAMILY
_EXTENDEDNEXTHOPCAPABILITYTUPLE.fields_by_name['nexthop_family'].message_type = gobgp__pb2._FAMILY
_EXTENDEDNEXTHOPCAPABILITY.fields_by_name['tuples'].message_type = _EXTENDEDNEXTHOPCAPABILITYTUPLE
_GRACEFULRESTARTCAPABILITYTUPLE.fields_by_name['family'].message_type = gobgp__pb2._FAMILY
_GRACEFULRESTARTCAPABILITY.fields_by_name['tuples'].message_type = _GRACEFULRESTARTCAPABILITYTUPLE
_ADDPATHCAPABILITYTUPLE.fields_by_name['family'].message_type = gobgp__pb2._FAMILY
_ADDPATHCAPABILITYTUPLE.fields_by_name['mode'].enum_type = _ADDPATHMODE
_ADDPATHCAPABILITY.fields_by_name['tuples'].message_type = _ADDPATHCAPABILITYTUPLE
_LONGLIVEDGRACEFULRESTARTCAPABILITYTUPLE.fields_by_name['family'].message_type = gobgp__pb2._FAMILY
_LONGLIVEDGRACEFULRESTARTCAPABILITY.fields_by_name['tuples'].message_type = _LONGLIVEDGRACEFULRESTARTCAPABILITYTUPLE
DESCRIPTOR.message_types_by_name['MultiProtocolCapability'] = _MULTIPROTOCOLCAPABILITY
DESCRIPTOR.message_types_by_name['RouteRefreshCapability'] = _ROUTEREFRESHCAPABILITY
DESCRIPTOR.message_types_by_name['CarryingLabelInfoCapability'] = _CARRYINGLABELINFOCAPABILITY
DESCRIPTOR.message_types_by_name['ExtendedNexthopCapabilityTuple'] = _EXTENDEDNEXTHOPCAPABILITYTUPLE
DESCRIPTOR.message_types_by_name['ExtendedNexthopCapability'] = _EXTENDEDNEXTHOPCAPABILITY
DESCRIPTOR.message_types_by_name['GracefulRestartCapabilityTuple'] = _GRACEFULRESTARTCAPABILITYTUPLE
DESCRIPTOR.message_types_by_name['GracefulRestartCapability'] = _GRACEFULRESTARTCAPABILITY
DESCRIPTOR.message_types_by_name['FourOctetASNumberCapability'] = _FOUROCTETASNUMBERCAPABILITY
DESCRIPTOR.message_types_by_name['AddPathCapabilityTuple'] = _ADDPATHCAPABILITYTUPLE
DESCRIPTOR.message_types_by_name['AddPathCapability'] = _ADDPATHCAPABILITY
DESCRIPTOR.message_types_by_name['EnhancedRouteRefreshCapability'] = _ENHANCEDROUTEREFRESHCAPABILITY
DESCRIPTOR.message_types_by_name['LongLivedGracefulRestartCapabilityTuple'] = _LONGLIVEDGRACEFULRESTARTCAPABILITYTUPLE
DESCRIPTOR.message_types_by_name['LongLivedGracefulRestartCapability'] = _LONGLIVEDGRACEFULRESTARTCAPABILITY
DESCRIPTOR.message_types_by_name['RouteRefreshCiscoCapability'] = _ROUTEREFRESHCISCOCAPABILITY
DESCRIPTOR.message_types_by_name['UnknownCapability'] = _UNKNOWNCAPABILITY
DESCRIPTOR.enum_types_by_name['AddPathMode'] = _ADDPATHMODE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
MultiProtocolCapability = _reflection.GeneratedProtocolMessageType('MultiProtocolCapability', (_message.Message,), {
'DESCRIPTOR' : _MULTIPROTOCOLCAPABILITY,
'__module__' : 'capability_pb2'
# @@protoc_insertion_point(class_scope:gobgpapi.MultiProtocolCapability)
})
_sym_db.RegisterMessage(MultiProtocolCapability)
RouteRefreshCapability = _reflection.GeneratedProtocolMessageType('RouteRefreshCapability', (_message.Message,), {
'DESCRIPTOR' : _ROUTEREFRESHCAPABILITY,
'__module__' : 'capability_pb2'
# @@protoc_insertion_point(class_scope:gobgpapi.RouteRefreshCapability)
})
_sym_db.RegisterMessage(RouteRefreshCapability)
CarryingLabelInfoCapability = _reflection.GeneratedProtocolMessageType('CarryingLabelInfoCapability', (_message.Message,), {
'DESCRIPTOR' : _CARRYINGLABELINFOCAPABILITY,
'__module__' : 'capability_pb2'
# @@protoc_insertion_point(class_scope:gobgpapi.CarryingLabelInfoCapability)
})
_sym_db.RegisterMessage(CarryingLabelInfoCapability)
ExtendedNexthopCapabilityTuple = _reflection.GeneratedProtocolMessageType('ExtendedNexthopCapabilityTuple', (_message.Message,), {
'DESCRIPTOR' : _EXTENDEDNEXTHOPCAPABILITYTUPLE,
'__module__' : 'capability_pb2'
# @@protoc_insertion_point(class_scope:gobgpapi.ExtendedNexthopCapabilityTuple)
})
_sym_db.RegisterMessage(ExtendedNexthopCapabilityTuple)
ExtendedNexthopCapability = _reflection.GeneratedProtocolMessageType('ExtendedNexthopCapability', (_message.Message,), {
'DESCRIPTOR' : _EXTENDEDNEXTHOPCAPABILITY,
'__module__' : 'capability_pb2'
# @@protoc_insertion_point(class_scope:gobgpapi.ExtendedNexthopCapability)
})
_sym_db.RegisterMessage(ExtendedNexthopCapability)
GracefulRestartCapabilityTuple = _reflection.GeneratedProtocolMessageType('GracefulRestartCapabilityTuple', (_message.Message,), {
'DESCRIPTOR' : _GRACEFULRESTARTCAPABILITYTUPLE,
'__module__' : 'capability_pb2'
# @@protoc_insertion_point(class_scope:gobgpapi.GracefulRestartCapabilityTuple)
})
_sym_db.RegisterMessage(GracefulRestartCapabilityTuple)
GracefulRestartCapability = _reflection.GeneratedProtocolMessageType('GracefulRestartCapability', (_message.Message,), {
'DESCRIPTOR' : _GRACEFULRESTARTCAPABILITY,
'__module__' : 'capability_pb2'
# @@protoc_insertion_point(class_scope:gobgpapi.GracefulRestartCapability)
})
_sym_db.RegisterMessage(GracefulRestartCapability)
FourOctetASNumberCapability = _reflection.GeneratedProtocolMessageType('FourOctetASNumberCapability', (_message.Message,), {
'DESCRIPTOR' : _FOUROCTETASNUMBERCAPABILITY,
'__module__' : 'capability_pb2'
# @@protoc_insertion_point(class_scope:gobgpapi.FourOctetASNumberCapability)
})
_sym_db.RegisterMessage(FourOctetASNumberCapability)
AddPathCapabilityTuple = _reflection.GeneratedProtocolMessageType('AddPathCapabilityTuple', (_message.Message,), {
'DESCRIPTOR' : _ADDPATHCAPABILITYTUPLE,
'__module__' : 'capability_pb2'
# @@protoc_insertion_point(class_scope:gobgpapi.AddPathCapabilityTuple)
})
_sym_db.RegisterMessage(AddPathCapabilityTuple)
AddPathCapability = _reflection.GeneratedProtocolMessageType('AddPathCapability', (_message.Message,), {
'DESCRIPTOR' : _ADDPATHCAPABILITY,
'__module__' : 'capability_pb2'
# @@protoc_insertion_point(class_scope:gobgpapi.AddPathCapability)
})
_sym_db.RegisterMessage(AddPathCapability)
EnhancedRouteRefreshCapability = _reflection.GeneratedProtocolMessageType('EnhancedRouteRefreshCapability', (_message.Message,), {
'DESCRIPTOR' : _ENHANCEDROUTEREFRESHCAPABILITY,
'__module__' : 'capability_pb2'
# @@protoc_insertion_point(class_scope:gobgpapi.EnhancedRouteRefreshCapability)
})
_sym_db.RegisterMessage(EnhancedRouteRefreshCapability)
LongLivedGracefulRestartCapabilityTuple = _reflection.GeneratedProtocolMessageType('LongLivedGracefulRestartCapabilityTuple', (_message.Message,), {
'DESCRIPTOR' : _LONGLIVEDGRACEFULRESTARTCAPABILITYTUPLE,
'__module__' : 'capability_pb2'
# @@protoc_insertion_point(class_scope:gobgpapi.LongLivedGracefulRestartCapabilityTuple)
})
_sym_db.RegisterMessage(LongLivedGracefulRestartCapabilityTuple)
LongLivedGracefulRestartCapability = _reflection.GeneratedProtocolMessageType('LongLivedGracefulRestartCapability', (_message.Message,), {
'DESCRIPTOR' : _LONGLIVEDGRACEFULRESTARTCAPABILITY,
'__module__' : 'capability_pb2'
# @@protoc_insertion_point(class_scope:gobgpapi.LongLivedGracefulRestartCapability)
})
_sym_db.RegisterMessage(LongLivedGracefulRestartCapability)
RouteRefreshCiscoCapability = _reflection.GeneratedProtocolMessageType('RouteRefreshCiscoCapability', (_message.Message,), {
'DESCRIPTOR' : _ROUTEREFRESHCISCOCAPABILITY,
'__module__' : 'capability_pb2'
# @@protoc_insertion_point(class_scope:gobgpapi.RouteRefreshCiscoCapability)
})
_sym_db.RegisterMessage(RouteRefreshCiscoCapability)
UnknownCapability = _reflection.GeneratedProtocolMessageType('UnknownCapability', (_message.Message,), {
'DESCRIPTOR' : _UNKNOWNCAPABILITY,
'__module__' : 'capability_pb2'
# @@protoc_insertion_point(class_scope:gobgpapi.UnknownCapability)
})
_sym_db.RegisterMessage(UnknownCapability)
# @@protoc_insertion_point(module_scope)

View File

@ -0,0 +1,3 @@
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

View File

@ -3,7 +3,7 @@
# Written by Sebastian Lohff (seba@someserver.de)
from django.contrib import admin
from bgpdata.models import ConfigHost, CrawlRun, CrawlLog, AS, BorderRouter, Announcement, Peering, BorderRouterPair
from bgpdata.models import ConfigHost, CrawlRun, CrawlLog, AS, BorderRouter, Announcement, Peering, BorderRouterPair, ASLastSeen, ASLastSeenNeighbor
# Register your models here.
admin.site.register(ConfigHost)
@ -14,3 +14,5 @@ admin.site.register(BorderRouter)
admin.site.register(Announcement)
admin.site.register(Peering)
admin.site.register(BorderRouterPair)
admin.site.register(ASLastSeen)
admin.site.register(ASLastSeenNeighbor)

View File

@ -6,35 +6,49 @@ from tastypie.resources import ModelResource, ALL_WITH_RELATIONS, ALL
from tastypie import fields
from bgpdata.models import AS, CrawlRun, Announcement, BorderRouter
class ASResource(ModelResource):
crawl = fields.ForeignKey("bgpdata.api.CrawlResource", "crawl")
class Meta:
list_allowed_methods = ['get']
detail_allowed_methods = ['get']
filtering = {'crawl': ALL_WITH_RELATIONS, 'number': ALL}
queryset = AS.objects.all()
resource_name = "as"
class ASResource(ModelResource):
crawl = fields.ForeignKey("bgpdata.api.CrawlResource", "crawl")
class Meta:
list_allowed_methods = ['get']
detail_allowed_methods = ['get']
filtering = {'crawl': ALL_WITH_RELATIONS, 'number': ALL}
queryset = AS.objects.all()
resource_name = "as"
class CrawlResource(ModelResource):
class Meta:
queryset = CrawlRun.objects.all()
resource_name = "crawl"
class Meta:
queryset = CrawlRun.objects.all()
resource_name = "crawl"
excludes = ["graph"]
class CrawlGraphResource(ModelResource):
class Meta:
queryset = CrawlRun.objects.all()
resource_name = "crawl_graph"
class BorderRouterResource(ModelResource):
AS = fields.ForeignKey("bgpdata.api.ASResource", "AS")
class Meta:
list_allowed_methods = ['get']
detail_allowed_methods = ['get']
filtering = {'AS': ALL_WITH_RELATIONS}
AS = fields.ForeignKey("bgpdata.api.ASResource", "AS")
class Meta:
list_allowed_methods = ['get']
detail_allowed_methods = ['get']
filtering = {'AS': ALL_WITH_RELATIONS}
queryset = BorderRouter.objects.all()
resource_name = "borderrouter"
queryset = BorderRouter.objects.all()
resource_name = "borderrouter"
class AnnouncementResource(ModelResource):
router = fields.ForeignKey("bgpdata.api.BorderRouterResource", "router")
class Meta:
list_allowed_methods = ['get']
detail_allowed_methods = ['get']
filtering = {'originAS': ALL_WITH_RELATIONS, 'crawlAS': ALL_WITH_RELATIONS, 'router': ALL_WITH_RELATIONS}
queryset = Announcement.objects.all()
router = fields.ForeignKey("bgpdata.api.BorderRouterResource", "router")
class Meta:
list_allowed_methods = ['get']
detail_allowed_methods = ['get']
filtering = {'originAS': ALL_WITH_RELATIONS, 'crawlAS': ALL_WITH_RELATIONS, 'router': ALL_WITH_RELATIONS}
queryset = Announcement.objects.all()

21
bgpdata/api_urls.py Normal file
View File

@ -0,0 +1,21 @@
# This file is part of dnmapper, an AS--level mapping tool
# Licensed under GNU General Public License v3 or later
# Written by Sebastian Lohff (seba@someserver.de)
from django.conf.urls import url, include
from bgpdata.api import ASResource, CrawlResource, CrawlGraphResource, BorderRouterResource, AnnouncementResource
asResource = ASResource()
crawlResource = CrawlResource()
crawlGraphResource = CrawlGraphResource()
borderRouterResource = BorderRouterResource()
announcementResource = AnnouncementResource()
urlpatterns = (
url('', include(asResource.urls)),
url('', include(crawlResource.urls)),
url('', include(crawlGraphResource.urls)),
url('', include(borderRouterResource.urls)),
url('', include(announcementResource.urls)),
)

View File

@ -39,7 +39,7 @@ class Migration(migrations.Migration):
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('pingable', models.BooleanField(default=False)),
('reachable', models.BooleanField(default=False)),
('AS', models.ForeignKey(to='bgpdata.AS')),
('AS', models.ForeignKey(to='bgpdata.AS', on_delete=models.CASCADE)),
],
options={
},
@ -73,10 +73,10 @@ class Migration(migrations.Migration):
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('origin', models.CharField(max_length=10, choices=[(b'path', b'BGP Path'), (b'direct', b'Direct Connection')])),
('as1', models.ForeignKey(related_name='peering1', to='bgpdata.AS')),
('as2', models.ForeignKey(related_name='peering2', to='bgpdata.AS')),
('router1', models.ForeignKey(related_name='peering1', default=None, to='bgpdata.BorderRouter', null=True)),
('router2', models.ForeignKey(related_name='peering2', default=None, to='bgpdata.BorderRouter', null=True)),
('as1', models.ForeignKey(related_name='peering1', to='bgpdata.AS', on_delete=models.CASCADE)),
('as2', models.ForeignKey(related_name='peering2', to='bgpdata.AS', on_delete=models.CASCADE)),
('router1', models.ForeignKey(related_name='peering1', default=None, to='bgpdata.BorderRouter', null=True, on_delete=models.CASCADE)),
('router2', models.ForeignKey(related_name='peering2', default=None, to='bgpdata.BorderRouter', null=True, on_delete=models.CASCADE)),
],
options={
},
@ -85,13 +85,13 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='as',
name='crawl',
field=models.ForeignKey(to='bgpdata.CrawlRun'),
field=models.ForeignKey(to='bgpdata.CrawlRun', on_delete=models.CASCADE),
preserve_default=True,
),
migrations.AddField(
model_name='announcement',
name='originAS',
field=models.ForeignKey(to='bgpdata.AS'),
field=models.ForeignKey(to='bgpdata.AS', on_delete=models.CASCADE),
preserve_default=True,
),
]

View File

@ -18,8 +18,8 @@ class Migration(migrations.Migration):
('logtime', models.DateTimeField(auto_now_add=True)),
('severity', models.CharField(max_length=10, choices=[(b'INFO', b'info'), (b'ERROR', b'error'), (b'DEBUG', b'debug'), (b'WARN', b'warning')])),
('message', models.TextField()),
('crawl', models.ForeignKey(to='bgpdata.CrawlRun')),
('host', models.ForeignKey(to='bgpdata.ConfigHost')),
('crawl', models.ForeignKey(to='bgpdata.CrawlRun', on_delete=models.CASCADE)),
('host', models.ForeignKey(to='bgpdata.ConfigHost', on_delete=models.SET_NULL)),
],
options={
},

View File

@ -14,7 +14,7 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name='crawllog',
name='host',
field=models.ForeignKey(to='bgpdata.ConfigHost', null=True),
field=models.ForeignKey(to='bgpdata.ConfigHost', null=True, on_delete=models.SET_NULL),
preserve_default=True,
),
]

View File

@ -20,7 +20,7 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name='crawllog',
name='host',
field=models.ForeignKey(blank=True, to='bgpdata.ConfigHost', null=True),
field=models.ForeignKey(blank=True, to='bgpdata.ConfigHost', null=True, on_delete=models.CASCADE),
preserve_default=True,
),
]

View File

@ -15,8 +15,8 @@ class Migration(migrations.Migration):
name='BorderRouterPair',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('router1', models.ForeignKey(related_name='routerpair1', default=None, blank=True, to='bgpdata.BorderRouter', null=True)),
('router2', models.ForeignKey(related_name='routerpair2', default=None, blank=True, to='bgpdata.BorderRouter', null=True)),
('router1', models.ForeignKey(related_name='routerpair1', default=None, blank=True, to='bgpdata.BorderRouter', null=True, on_delete=models.CASCADE)),
('router2', models.ForeignKey(related_name='routerpair2', default=None, blank=True, to='bgpdata.BorderRouter', null=True, on_delete=models.CASCADE)),
],
options={
},
@ -33,7 +33,7 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='announcement',
name='router',
field=models.ForeignKey(default=None, to='bgpdata.BorderRouter'),
field=models.ForeignKey(default=None, to='bgpdata.BorderRouter', on_delete=models.CASCADE),
preserve_default=False,
),
migrations.AddField(

View File

@ -18,7 +18,7 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='borderrouterpair',
name='peering',
field=models.ForeignKey(default=None, to='bgpdata.Peering'),
field=models.ForeignKey(default=None, to='bgpdata.Peering', on_delete=models.CASCADE),
preserve_default=False,
),
]

View File

@ -14,7 +14,7 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='as',
name='lastSeen',
field=models.ForeignKey(related_name='as_lastseen', default=None, blank=True, to='bgpdata.CrawlRun', null=True),
field=models.ForeignKey(related_name='as_lastseen', default=None, blank=True, to='bgpdata.CrawlRun', null=True, on_delete=models.SET_NULL),
preserve_default=True,
),
migrations.AddField(

View File

@ -14,7 +14,7 @@ class Migration(migrations.Migration):
migrations.AddField(
model_name='announcement',
name='crawlAS',
field=models.ForeignKey(related_name='crawl_as', to='bgpdata.AS', null=True),
field=models.ForeignKey(related_name='crawl_as', to='bgpdata.AS', null=True, on_delete=models.CASCADE),
preserve_default=True,
),
migrations.AlterField(

View File

@ -14,7 +14,7 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name='announcement',
name='originAS',
field=models.ForeignKey(to='bgpdata.AS', null=True),
field=models.ForeignKey(to='bgpdata.AS', null=True, on_delete=models.CASCADE),
preserve_default=True,
),
]

View File

@ -0,0 +1,30 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.11.25 on 2020-05-31 00:00
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('bgpdata', '0016_auto_20170117_0103'),
]
operations = [
migrations.AlterField(
model_name='confighost',
name='checkMethod',
field=models.CharField(choices=[('CMK', 'Check MK'), ('PLAIN', 'Plain'), ('GOBGP', 'GoBGP')], max_length=10),
),
migrations.AlterField(
model_name='crawllog',
name='severity',
field=models.CharField(choices=[('INFO', 'info'), ('ERROR', 'error'), ('DEBUG', 'debug'), ('WARN', 'warning')], max_length=10),
),
migrations.AlterField(
model_name='peering',
name='origin',
field=models.CharField(choices=[('path', 'BGP Path'), ('direct', 'Direct Connection')], max_length=10),
),
]

View File

@ -0,0 +1,45 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.11.25 on 2020-06-03 22:45
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('bgpdata', '0017_auto_20200531_0200'),
]
operations = [
migrations.CreateModel(
name='ASLastSeen',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('asn', models.IntegerField(db_index=True, unique=True)),
('directlyCrawled', models.BooleanField(default=False)),
('online', models.BooleanField()),
('lastSeen', models.DateTimeField(blank=True, null=True)),
],
),
migrations.CreateModel(
name='ASLastSeenNeighbor',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('asn', models.IntegerField(unique=True)),
('neighbor', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='bgpdata.ASLastSeen')),
],
),
migrations.AddField(
model_name='crawlrun',
name='graph',
field=models.TextField(default=''),
preserve_default=False,
),
migrations.AddField(
model_name='aslastseen',
name='crawlLastSeen',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='bgpdata.CrawlRun'),
),
]

View File

@ -0,0 +1,20 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.11.25 on 2020-06-03 23:45
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('bgpdata', '0018_auto_20200604_0045'),
]
operations = [
migrations.AlterField(
model_name='aslastseenneighbor',
name='asn',
field=models.IntegerField(),
),
]

View File

@ -0,0 +1,26 @@
# -*- coding: utf-8 -*-
# Generated by Django 1.11.29 on 2020-06-06 18:40
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('bgpdata', '0019_auto_20200604_0145'),
]
operations = [
migrations.AlterField(
model_name='as',
name='lastSeen',
field=models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='as_lastseen', to='bgpdata.CrawlRun'),
),
migrations.AlterField(
model_name='aslastseen',
name='crawlLastSeen',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='bgpdata.CrawlRun'),
),
]

View File

@ -8,185 +8,216 @@ from django.db.models import Q
# Create your models here.
class ConfigHost(models.Model):
CHECK_CHOICES = (
('CMK', "Check MK"),
('PLAIN', "Plain"),
)
CHECK_CHOICES = (
('CMK', "Check MK"),
('PLAIN', "Plain"),
('GOBGP', "GoBGP"),
)
# asno, ip, check method,
name = models.CharField(max_length=50)
number = models.IntegerField()
ip = models.GenericIPAddressField()
checkMethod = models.CharField(max_length=4, choices=CHECK_CHOICES)
# asno, ip, check method,
name = models.CharField(max_length=50)
number = models.IntegerField()
ip = models.GenericIPAddressField()
checkMethod = models.CharField(max_length=10, choices=CHECK_CHOICES)
def __str__(self):
return "%s (%s / %s)" % (self.name, self.number, self.ip)
def __unicode__(self):
return u"%s (%s / %s)" % (self.name, self.number, self.ip)
class CrawlRun(models.Model):
# time start, time end,
startTime = models.DateTimeField()
endTime = models.DateTimeField(null=True, blank=True)
# time start, time end,
startTime = models.DateTimeField()
endTime = models.DateTimeField(null=True, blank=True)
hostsCrawled = models.ManyToManyField(ConfigHost, null=True, blank=True)
hostsCrawled = models.ManyToManyField(ConfigHost, blank=True)
graph = models.TextField()
asCount = models.IntegerField(default=0)
asOnlineCount = models.IntegerField(default=0)
asOfflineCount = models.IntegerField(default=0)
peeringCount = models.IntegerField(default=0)
asCount = models.IntegerField(default=0)
asOnlineCount = models.IntegerField(default=0)
asOfflineCount = models.IntegerField(default=0)
peeringCount = models.IntegerField(default=0)
def __unicode__(self):
return u"Run %d - %s to %s" % (self.pk, self.startTime, self.endTime if self.endTime else "?")
def __str__(self):
return "Run %d - %s to %s" % (self.pk, self.startTime, self.endTime if self.endTime else "?")
def countAS(self):
return self.asCount
def countASOnline(self):
return self.asOnlineCount
def countASOffline(self):
return self.asOfflineCount
def countPeerings(self):
return self.peeringCount
def countAS(self):
return self.asCount
def countASOnline(self):
return self.asOnlineCount
def countASOffline(self):
return self.asOfflineCount
#return self.as_set.filter(online=False).count()
def countPeerings(self):
return self.peeringCount
#return Peering.objects.filter(Q(as1__crawl=self)|Q(as2__crawl=self)).count()
class CrawlLog(models.Model):
INFO = 'INFO'
ERROR = 'ERROR'
DEBUG = 'DEBUG'
WARN = 'WARN'
SEVERITY = (
(INFO, 'info'),
(ERROR, 'error'),
(DEBUG, 'debug'),
(WARN, 'warning'),
)
INFO = 'INFO'
ERROR = 'ERROR'
DEBUG = 'DEBUG'
WARN = 'WARN'
SEVERITY = (
(INFO, 'info'),
(ERROR, 'error'),
(DEBUG, 'debug'),
(WARN, 'warning'),
)
crawl = models.ForeignKey(CrawlRun)
host = models.ForeignKey(ConfigHost, null=True, blank=True, on_delete=models.SET_NULL)
logtime = models.DateTimeField(auto_now_add=True)
severity = models.CharField(max_length=10, choices=SEVERITY)
message = models.TextField()
crawl = models.ForeignKey(CrawlRun, on_delete=models.CASCADE)
host = models.ForeignKey(ConfigHost, null=True, blank=True, on_delete=models.SET_NULL)
logtime = models.DateTimeField(auto_now_add=True)
severity = models.CharField(max_length=10, choices=SEVERITY)
message = models.TextField()
@staticmethod
def log(crawl, msg, severity=None, host=None):
if not severity:
severity = CrawlLog.ERROR
@staticmethod
def log(crawl, msg, severity=None, host=None):
if not severity:
severity = CrawlLog.ERROR
log = CrawlLog()
log.crawl = crawl
log.message = msg
log.severity = severity
log.host = host
log.save()
log = CrawlLog()
log.crawl = crawl
log.message = msg
log.severity = severity
log.host = host
log.save()
def __str__(self):
host = "host %s - " % self.host.name if self.host else ""
return "Log %s %s: %s%s" % (self.get_severity_display(), self.logtime, host, self.message)
def __unicode__(self):
host = "host %s - " % self.host.name if self.host else ""
return u"Log %s %s: %s%s" % (self.get_severity_display(), self.logtime, host, self.message)
class AS(models.Model):
# asno
crawl = models.ForeignKey(CrawlRun)
number = models.IntegerField(db_index=True)
# asno
crawl = models.ForeignKey(CrawlRun, on_delete=models.CASCADE)
number = models.IntegerField(db_index=True)
directlyCrawled = models.BooleanField(default=False)
online = models.BooleanField(default=True, db_index=True)
lastSeen = models.ForeignKey(CrawlRun, blank=True, null=True, default=None, related_name='as_lastseen')
directlyCrawled = models.BooleanField(default=False)
online = models.BooleanField(default=True, db_index=True)
lastSeen = models.ForeignKey(CrawlRun, blank=True, null=True, default=None, related_name='as_lastseen',
on_delete=models.CASCADE)
class Meta:
unique_together = (('crawl', 'number'),)
index_together = (
('crawl', 'number'),
)
class Meta:
unique_together = (('crawl', 'number'),)
index_together = (
('crawl', 'number'),
)
def __unicode__(self):
return u"AS %s (crawl %d)" % (self.number, self.crawl.pk)
def __str__(self):
return "AS %s (crawl %d)" % (self.number, self.crawl.pk)
def setOnline(self):
if not self.online:
self.online = True
self.lastSeen = None
self.save()
def setOnline(self):
if not self.online:
self.online = True
self.lastSeen = None
self.save()
def getPeerings(self):
return Peering.objects.filter(Q(as1=self)|Q(as2=self))
def getPeerings(self):
return Peering.objects.filter(Q(as1=self) | Q(as2=self))
def getAnnouncedPrefixes(self):
return list(set(map(lambda _x: "%(ip)s/%(prefix)s" % _x, self.announcement_set.all().values('ip', 'prefix'))))
def getAnnouncedPrefixes(self):
return list(set(map(lambda _x: "%(ip)s/%(prefix)s" % _x, self.announcement_set.all().values('ip', 'prefix'))))
def formatLastSeen(self):
if self.lastSeen:
return self.lastSeen.startTime.strftime("%d.%m.%Y %H:%I")
def formatLastSeen(self):
if self.lastSeen:
return self.lastSeen.startTime.strftime("%d.%m.%Y %H:%I")
class BorderRouter(models.Model):
# as id, ip, check method, pingable, reachable
# unique: (crawl_id, asno, as id)
AS = models.ForeignKey(AS)
routerID = models.GenericIPAddressField()
pingable = models.BooleanField(default=False)
reachable = models.BooleanField(default=False)
# as id, ip, check method, pingable, reachable
# unique: (crawl_id, asno, as id)
AS = models.ForeignKey(AS, on_delete=models.CASCADE)
routerID = models.GenericIPAddressField()
pingable = models.BooleanField(default=False)
reachable = models.BooleanField(default=False)
def __str__(self):
p = "p" if self.pingable else "!p"
r = "r" if self.reachable else "!r"
return "Router %s (AS %s, %s%s)" % (self.routerID, self.AS.number, p, r)
def __unicode__(self):
p = "p" if self.pingable else "!p"
r = "r" if self.reachable else "!r"
return u"Router %s (AS %s, %s%s)" % (self.routerID, self.AS.number, p, r)
class Announcement(models.Model):
router = models.ForeignKey(BorderRouter)
router = models.ForeignKey(BorderRouter, on_delete=models.CASCADE)
ip = models.GenericIPAddressField()
prefix = models.IntegerField()
ip = models.GenericIPAddressField()
prefix = models.IntegerField()
# NOTE: increase length for longer pathes (currently supports a length of ~85)
ASPath = models.CharField(max_length=512)
nextHop = models.GenericIPAddressField()
originAS = models.ForeignKey(AS, null=True)
crawlAS = models.ForeignKey(AS, related_name='crawl_as', null=True)
# NOTE: increase length for longer pathes (currently supports a length of ~85)
ASPath = models.CharField(max_length=512)
nextHop = models.GenericIPAddressField()
originAS = models.ForeignKey(AS, null=True, on_delete=models.CASCADE)
crawlAS = models.ForeignKey(AS, related_name='crawl_as', null=True, on_delete=models.CASCADE)
def __str__(self):
return "%s/%s via %s (crawl %s)" % (self.ip, self.prefix, self.ASPath, self.router.AS.crawl.pk)
def __unicode__(self):
return u"%s/%s via %s (crawl %s)" % (self.ip, self.prefix, self.ASPath, self.router.AS.crawl.pk)
class Peering(models.Model):
DIRECT = 'direct'
PATH = 'path'
DIRECT = 'direct'
PATH = 'path'
ORIGIN = (
(PATH, 'BGP Path'),
(DIRECT, 'Direct Connection'),
)
ORIGIN = (
(PATH, 'BGP Path'),
(DIRECT, 'Direct Connection'),
)
index_together = (
('as1', 'as2'),
)
index_together = (
('as1', 'as2'),
)
as1 = models.ForeignKey(AS, related_name='peering1')
as2 = models.ForeignKey(AS, related_name='peering2')
origin = models.CharField(max_length=10, choices=ORIGIN)
as1 = models.ForeignKey(AS, related_name='peering1', on_delete=models.CASCADE)
as2 = models.ForeignKey(AS, related_name='peering2', on_delete=models.CASCADE)
origin = models.CharField(max_length=10, choices=ORIGIN)
def __unicode__(self):
return u"AS %s <--> AS %s (%s, crawl %s)" % (self.as1.number, self.as2.number, self.get_origin_display(), self.as1.crawl.pk)
def __str__(self):
return "AS %s <--> AS %s (%s, crawl %s)" % (self.as1.number, self.as2.number,
self.get_origin_display(), self.as1.crawl.pk)
def containsAS(self, AS):
return AS in (self.as1, self.as2)
def containsAS(self, AS):
return AS in (self.as1, self.as2)
@staticmethod
def getPeering(as1, as2):
""" Find matching peering """
try:
return Peering.objects.get(as1=as1, as2=as2)
except Peering.DoesNotExist:
return Peering.objects.get(as1=as2, as2=as1)
@staticmethod
def getPeering(as1, as2):
""" Find matching peering """
try:
return Peering.objects.get(as1=as1, as2=as2)
except Peering.DoesNotExist:
return Peering.objects.get(as1=as2, as2=as1)
class BorderRouterPair(models.Model):
peering = models.ForeignKey(Peering)
router1 = models.ForeignKey(BorderRouter, default=None, blank=True, null=True, related_name='routerpair1')
router2 = models.ForeignKey(BorderRouter, default=None, blank=True, null=True, related_name='routerpair2')
peering = models.ForeignKey(Peering, on_delete=models.CASCADE)
router1 = models.ForeignKey(BorderRouter, default=None, blank=True, null=True, related_name='routerpair1',
on_delete=models.CASCADE)
router2 = models.ForeignKey(BorderRouter, default=None, blank=True, null=True, related_name='routerpair2',
on_delete=models.CASCADE)
def __unicode__(self):
return u"%s <--> %s (crawl %d)" % (self.router1, self.router2, self.router1.AS.crawl.pk)
def __str__(self):
return "%s <--> %s (crawl %d)" % (self.router1, self.router2, self.router1.AS.crawl.pk)
@staticmethod
def getPairing(peering, router1, router2):
try:
return BorderRouterPair.objects.get(peering=peering, router1=router1, router2=router2)
except BorderRouterPair.DoesNotExist:
return BorderRouterPair.objects.get(peering=peering, router1=router2, router2=router1)
@staticmethod
def getPairing(peering, router1, router2):
try:
return BorderRouterPair.objects.get(peering=peering, router1=router1, router2=router2)
except BorderRouterPair.DoesNotExist:
return BorderRouterPair.objects.get(peering=peering, router1=router2, router2=router1)
class ASLastSeen(models.Model):
asn = models.IntegerField(db_index=True, unique=True)
directlyCrawled = models.BooleanField(default=False)
online = models.BooleanField()
lastSeen = models.DateTimeField(blank=True, null=True)
crawlLastSeen = models.ForeignKey(CrawlRun, null=True, on_delete=models.SET_NULL)
def __str__(self):
return ("AS{} {}, last seen {} (crawl {})"
.format(self.asn, "online" if self.online else "offline", self.lastSeen, self.crawlLastSeen.pk))
class ASLastSeenNeighbor(models.Model):
asn = models.IntegerField()
neighbor = models.ForeignKey(ASLastSeen, on_delete=models.CASCADE)

View File

@ -0,0 +1,28 @@
{% extends "base.html" %}
{% block body %}
<h3>ASN Last Seen</h3>
<table class="table">
<thead>
<tr>
<th>#</th>
<th>Status</th>
<th>Directly Crawled</th>
<th>Last Seen</th>
<th>Last Seen Crawl</th>
</tr>
</thead>
<tbody>
{% for asn in last_seen %}
<tr class="{% if asn.online %}success{% else %}danger{% endif %}">
<td>{{ asn.asn }}</td>
<td>{%if not asn.online %}Offline{% endif %}</td>
<td>{% if asn.directlyCrawled %}Yes{% endif %}</td>
<td>{% if not asn.online %}{{ asn.lastSeen }}{% endif %}</td>
<td><a href="/map/{{ asn.crawlLastSeen.id }}/">Crawl {{ asn.crawlLastSeen.id }}</a></td>
</tr>
{% endfor %}
</tbody>
</table>
{% endblock %}

View File

@ -238,12 +238,12 @@ function click(d) {
//});
if(d.crawled) {
$.ajax({url: "/map/api/borderrouter/?AS__crawl={{crawl.pk}}&AS__number=" + d.asnumber, success: function(result) {
$.ajax({url: "/api/v1/borderrouter/?AS__crawl={{crawl.pk}}&AS__number=" + d.asnumber, success: function(result) {
$("#infowin").html('');
$("#infowin").fadeIn('fast', function() {});
for(var i=0; i<result.objects.length; i++) {
(function(currRouter) {
$.ajax({url:"/map/api/announcement/?router="+currRouter.id, success: function(result2) {
$.ajax({url:"/api/v1/announcement/?router="+currRouter.id, success: function(result2) {
astable = 'Table for AS'+d.asnumber+' router ID ' + currRouter.routerID;
astable += '<table class="table table-bordered">';
astable += '<tr><th>Network</th><th>Next Hop</th><th>AS Path</th></tr>';

View File

@ -0,0 +1,215 @@
{% extends "base.html" %}
{% block container_class %}container-fluid{% endblock %}
{% block head %}
{% load static from staticfiles %}
<!-- <script src="{% static "js/d3.js" %}" charset="utf-8"></script> -->
<script src="https://d3js.org/d3.v5.min.js"></script>
<style>
.node {
stroke: #fff;
stroke-width: 1.5px;
}
.link {
stroke: #999;
stroke-opacity: .6;
}
#plotwin {
height: 75vh;
}
</style>
{% endblock %}
{% block body %}
<div class="page-header">
<h3>{% if crawl.pk %}Crawl run {{crawl.pk}}{% else %}Live Crawl {% endif %} from {{crawl.startTime|date:"d.m.Y H:i"}}</h3>
</div>
<div id="plotwin" class="container-fluid"></div>
<!-- <div id="infowin"><div class="alert alert-info" role="alert">Click on a node for more information</div></div> -->
<script type="text/javascript" charset="utf-8">
const margin = {
top: 40,
bottom: 10,
left: 20,
right: 20,
};
let plotwin = document.getElementById("plotwin")
const width = plotwin.offsetWidth - margin.left - margin.right;
const height = plotwin.offsetHeight - margin.top - margin.bottom;
let drag = d3.drag()
.on("drag", dragged);
function dragged() {
let current = d3.select(this);
current
.attr('cx', d3.event.x)
.attr('cy', d3.event.y);
console.log(`${d3.event.x}, ${d3.event.y}`);
}
function dragmove(d) {
let x = d3.event.x;
let y = d3.event.y;
d3.select(this).attr("transform", "translate(" + x + "," + y + ")");
}
const parent_svg = d3
.select("#plotwin")
.append("svg")
.attr("width", width + margin.left + margin.right)
.attr("height", height + margin.top + margin.bottom)
//.attr("viewBox", [width / 2, height / 2, width, height])
.attr("viewBox", [0, 0, width, height])
const svg = parent_svg.append("g")
// .append("g")
// .attr("transform", `translate(${margin.left}, ${margin.top})`);
parent_svg.call(
d3.zoom()
.scaleExtent([.1, 4])
.on("zoom", function() { svg.attr("transform", d3.event.transform); })
);
//const simulation = d3
// .forceSimulation()
// .force(
// "link",
// d3.forceLink().id((d) => d.id)
// )
// .force("charge", d3.forceManyBody().distanceMin(50).strength(-300))
// .force("center", d3.forceCenter(width / 2, height / 2));
const color = d3.scaleOrdinal(d3.schemeCategory10);
drag = simulation => {
function dragstarted(d) {
if (!d3.event.active) simulation.alphaTarget(0.3).restart();
d.fx = d.x;
d.fy = d.y;
}
function dragged(d) {
d.fx = d3.event.x;
d.fy = d3.event.y;
}
function dragended(d) {
if (!d3.event.active) simulation.alphaTarget(0);
d.fx = null;
d.fy = null;
}
return d3.drag()
.on("start", dragstarted)
.on("drag", dragged)
.on("end", dragended);
}
d3.json("/api/v2/crawlrun/live/?with_graph").then((data) => {
// Links data join
data = data.graph
const simulation = d3.forceSimulation(data.nodes)
//.force("link", d3.forceLink(data.links).id(d => d.id).distance(150).strength(1))
//.force("link", d3.forceLink().id(function(d) { return d.id; }))
.force("link", d3.forceLink().id(d => d.id)
.distance(l => {
neighs = Math.min(l.source.neighbors, l.target.neighbors);
switch(neighs) {
case 0: return 40;
case 1: return 40;
case 2: return 120;
case 3:
case 4: return 200;
default: return 300;
}
})
.strength(l => {
neighs = Math.min(l.source.neighbors, l.target.neighbors);
return 1 / (neighs);
}))
.force("charge", d3.forceManyBody()
.strength(-200)
.theta(1.1))
//.force("collision", d3.forceCollide(50).strength(0.2).iterations(100))
//.force("collision", d3.forceCollide(40).strength(0.2))
.force("collision", d3.forceCollide(40).strength(0.2))
//.force("x", d3.forceX())
//.force("y", d3.forceY())
.force("center", d3.forceCenter(width / 2, height / 2));
//.force("center", d3.forceRadial(100, width / 2, height / 2));
const link = svg
.selectAll(".link")
.data(data.links)
.join((enter) =>
enter.append("line")
.attr("class", "link"));
// Nodes data join
let node = svg.selectAll('.node')
.data(data.nodes)
.enter()
.append("g")
.attr("id", d => "node-" + d.id)
.call(drag(simulation));
node.append("ellipse")
//.attr("rx", d => d.stub ? 35 : 40)
//.attr("ry", d => d.stub ? 14 : 20)
.attr("rx", d => d.stub ? 34 : (40 + (d.neighbors > 5 ? 5 : 0)))
.attr("ry", d => d.stub ? 12 : (20 + (d.neighbors > 5 ? 2 : 0)))
.attr("fill", function(d) {
if(d.directly_crawled)
return "#94FF70";
else if(!d.online)
return "#FFCCCC";
// return "#F0FFEB";
else if(d.stub)
return "#3291A8"
else
return "#D1FFC2";
})
.attr("stroke", "black")
.attr("stroke-width", "1px");
node.append('text')
.attr("fill", "black")
.attr("font-family", "sans-serif")
.attr("font-size", "13px")
.attr("font-weight", "bold")
.attr("dy", "4")
.attr("text-anchor", "middle")
.text(d => d.id)
simulation.nodes(data.nodes).force("link").links(data.links);
simulation.on("tick", (e) => {
link
.attr("x1", (d) => d.source.x)
.attr("y1", (d) => d.source.y)
.attr("x2", (d) => d.target.x)
.attr("y2", (d) => d.target.y);
node.attr("transform", d => "translate(" + d.x + "," + d.y + ")");
});
});
</script>
{% endblock %}

View File

@ -1,26 +1,14 @@
# This file is part of dnmapper, an AS--level mapping tool
# Licensed under GNU General Public License v3 or later
# Written by Sebastian Lohff (seba@someserver.de)
from django.conf.urls import url
from django.conf.urls import url, include
from api import ASResource, CrawlResource, BorderRouterResource, AnnouncementResource
from bgpdata import views as bgpdata_views
asResource = ASResource()
crawlResource = CrawlResource()
borderRouterResource = BorderRouterResource()
announcementResource = AnnouncementResource()
urlpatterns = (
url(r'^$', bgpdata_views.overview),
url(r'^([0-9]+)/$', bgpdata_views.showMap),
#url(r'^api/crawl/(?P<crawlID>\d+)/asses/$', 'bgpdata.api.asses'),
#(r'^api/', include(asResource.urls)),
url(r'^api/', include(asResource.urls)),
url(r'^api/', include(crawlResource.urls)),
url(r'^api/', include(borderRouterResource.urls)),
url(r'^api/', include(announcementResource.urls)),
url(r'^$', bgpdata_views.overview, name='overview'),
url(r'^([0-9]+)/$', bgpdata_views.showMap, name='show_map'),
url(r'^new/([0-9]+)/$', bgpdata_views.show_new_map, name='show_map'),
url(r'^new/(?P<crawl_id>live)/$', bgpdata_views.show_new_map, name='show_live_map'),
url(r'^lastseen/$', bgpdata_views.show_asn_last_seen, name='lastseen'),
)

View File

@ -1,24 +1,45 @@
# This file is part of dnmapper, an AS--level mapping tool
# Licensed under GNU General Public License v3 or later
# Written by Sebastian Lohff (seba@someserver.de)
from django.shortcuts import render
from bgpdata.models import CrawlRun, AS, Peering
from django.core.paginator import Paginator
from bgpdata.models import CrawlRun, AS, Peering, ASLastSeen
from backend import crawler
def overview(request):
crawls = CrawlRun.objects.order_by("-startTime")
crawlsPage = Paginator(crawls, 200)
return render(request, 'bgpdata/overview.html', {"crawls": crawlsPage.page(1)})
crawls = CrawlRun.objects.order_by("-startTime")
crawlsPage = Paginator(crawls, 200)
return render(request, 'bgpdata/overview.html', {"crawls": crawlsPage.page(1)})
def showMap(request, crawlId):
crawl = None
try:
crawl = CrawlRun.objects.get(id=crawlId)
except CrawlRun.DoesNotExist:
return render(request, "bgpdata/no-map-found.html", {"crawl_id": crawlId})
crawl = None
try:
crawl = CrawlRun.objects.get(id=crawlId)
except CrawlRun.DoesNotExist:
return render(request, "bgpdata/no-map-found.html", {"crawl_id": crawlId})
ASses = AS.objects.filter(crawl=crawl)
peerings = Peering.objects.filter(as1__crawl=crawl)
ASses = AS.objects.filter(crawl=crawl)
peerings = Peering.objects.filter(as1__crawl=crawl)
return render(request, 'bgpdata/map.html', {"crawl": crawl, 'ASses': ASses, 'peerings': peerings})
return render(request, 'bgpdata/map.html', {"crawl": crawl, 'ASses': ASses, 'peerings': peerings})
def show_new_map(request, crawl_id):
crawl = None
if crawl_id == 'live':
net = crawler.get_current_network()
crawl = crawler.make_crawl_from_net(net)
else:
try:
crawl = CrawlRun.objects.get(id=crawl_id)
except CrawlRun.DoesNotExist:
return render(request, "bgpdata/no-map-found.html", {"crawl_id": crawl_id})
return render(request, 'bgpdata/new_new_map.html', {"crawl": crawl})
def show_asn_last_seen(request):
return render(request, 'bgpdata/asn_last_seen.html', {'last_seen': ASLastSeen.objects.order_by("asn")})

28
bin/conv.py Executable file
View File

@ -0,0 +1,28 @@
#!/usr/bin/env python
import argparse
import os
import sys
sys.path.append("..")
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "dnmapper.settings")
import django
django.setup()
from backend.crawler import convert_crawl
from bgpdata.models import CrawlRun
def main():
parser = argparse.ArgumentParser()
parser.add_argument("-c", "--crawl-id", type=int)
args = parser.parse_args()
try:
crawl = CrawlRun.objects.get(pk=args.crawl_id)
except CrawlRun.DoesNotExist:
parser.error("CrawlRun with id {} does not exist".format(args.crawl_id))
convert_crawl(crawl)
if __name__ == '__main__':
main()

View File

@ -1,13 +1,10 @@
#!/usr/bin/env python2
#!/usr/bin/env python
# This file is part of dnmapper, an AS--level mapping tool
# Licensed under GNU General Public License v3 or later
# Written by Sebastian Lohff (seba@someserver.de)
from __future__ import print_function
# config
LAST_SEEN_DAYS = 7
# prepare environment
import sys
sys.path.append("..")
@ -22,197 +19,213 @@ from django.utils import timezone
from django.db.models import Q, Max
from bgpdata.models import ConfigHost, CrawlRun, CrawlLog, AS, BorderRouter, Announcement, Peering, BorderRouterPair
from routerparsers import getBGPData, RouterParserException
from backend import gobgp, cmk_parser
# config
LAST_SEEN_DAYS = 7
def getOrCreateAS(crawl, number, online=True):
currAS = None
try:
currAS = AS.objects.get(crawl=crawl, number=number)
if online:
currAS.setOnline()
except AS.DoesNotExist:
currAS = AS(crawl=crawl, number=number, online=online)
currAS.save()
currAS = None
try:
currAS = AS.objects.get(crawl=crawl, number=number)
if online:
currAS.setOnline()
except AS.DoesNotExist:
currAS = AS(crawl=crawl, number=number, online=online)
currAS.save()
return currAS
return currAS
def main():
# 1. create crawl run
crawl = CrawlRun()
crawl.startTime = timezone.now()
crawl.save()
# 1. create crawl run
crawl = CrawlRun()
crawl.startTime = timezone.now()
crawl.save()
CrawlLog.log(crawl, "Starting crawl run!", severity=CrawlLog.INFO)
CrawlLog.log(crawl, "Starting crawl run!", severity=CrawlLog.INFO)
# 2. get data from all hosts, put it in the database
for host in ConfigHost.objects.all():
crawl.hostsCrawled.add(host)
data = None
print(" -- Getting data for host %s" % host)
try:
if host.checkMethod == 'CMK':
data = getBGPData(host.ip, host.number)
else:
CrawlLog.log(crawl, "Method %s is not currently supported, skipping host" % host.checkMethod, host=host, severity=CrawlLog.ERROR)
continue
except RouterParserException as e:
msg = "Could not parse data for host: %s" % str(e)
print("%s: %s" % (host, msg))
CrawlLog.log(crawl, msg, host=host, severity=CrawlLog.ERROR)
continue
except socket.error as e:
msg = "Could not reach host: %s" % (e,)
print("%s: %s" % (host, msg))
CrawlLog.log(crawl, msg, host=host, severity=CrawlLog.ERROR)
continue
# 2. get data from all hosts, put it in the database
for host in ConfigHost.objects.all():
crawl.hostsCrawled.add(host)
data = None
print(" -- Getting data for host %s" % host)
try:
if host.checkMethod == 'CMK':
data = [cmk_parser.getBGPData(host.ip, host.number)]
elif host.checkMethod == 'GOBGP':
data = gobgp.get_bgp_data(host.ip)
else:
CrawlLog.log(crawl, "Method %s is not currently supported, skipping host" % host.checkMethod, host=host, severity=CrawlLog.ERROR)
continue
except cmk_parser.RouterParserException as e:
msg = "Could not parse data for host: %s" % str(e)
print("%s: %s" % (host, msg))
CrawlLog.log(crawl, msg, host=host, severity=CrawlLog.ERROR)
continue
except socket.error as e:
msg = "Could not reach host: %s" % (e,)
print("%s: %s" % (host, msg))
CrawlLog.log(crawl, msg, host=host, severity=CrawlLog.ERROR)
continue
print(" -- parsing...")
for elem in data:
parseData(crawl, host, elem)
currASno = int(data["local_as"])
currAS = getOrCreateAS(crawl, currASno)
# 3. calculate missing data
print(" -- Adding extra data from announcements...")
# 3.1. use announcement data to find hidden peerings
for announcement in Announcement.objects.filter(router__AS__crawl=crawl):
path = announcement.ASPath.split(" ")
if len(path) > 1:
firstASno = path.pop(0)
firstAS = getOrCreateAS(crawl, firstASno)
while len(path) > 0:
secondASno = path.pop(0)
secondAS = getOrCreateAS(crawl, secondASno)
currAS.directlyCrawled = True
currAS.save()
try:
Peering.getPeering(firstAS, secondAS)
except Peering.DoesNotExist:
peering = Peering(as1=firstAS, as2=secondAS, origin=Peering.PATH)
peering.save()
currRouter = None
try:
currRouter = BorderRouter.objects.get(AS=currAS, routerID=data["local_id"])
currRouter.pingable = True
currRouter.reachable = True
currRouter.save()
except BorderRouter.DoesNotExist:
currRouter = BorderRouter(AS=currAS, routerID=data["local_id"], pingable=True, reachable=True)
currRouter.save()
firstAS = secondAS
print(" --> peers")
for peer in data["peers"]:
# peerings
# data: BGP{state, neighbor_id, neighbor_as}, description
# 3.2 add ASses, routers and peerings from old crawlruns (last should suffice)
# find
print(" --> copy old ASses")
timerangeStart = crawl.startTime - datetime.timedelta(LAST_SEEN_DAYS)
oldASses = AS.objects.filter(online=True, crawl__startTime__gte=timerangeStart) \
.values("number").annotate(lastSeen=Max('crawl_id')).filter(~Q(lastSeen=crawl.pk))
# a) find/create neighbor
print(" ----> Peer:", int(peer["BGP"]["neighbor_as"]))
neighAS = getOrCreateAS(crawl, int(peer["BGP"]["neighbor_as"]), online=peer["BGP"]["online"])
# 3.2.1. copy old asses
print(" ----> create ASses")
for oldASdata in oldASses:
print(" ------> AS", oldASdata["number"])
oldAS = AS.objects.get(number=oldASdata["number"], crawl=oldASdata["lastSeen"])
# b) find out if a peering already exists (maybe where we only need to add our router id?)
peering = None
try:
peering = Peering.getPeering(currAS, neighAS)
except Peering.DoesNotExist:
peering = Peering(as1=currAS, as2=neighAS, origin=Peering.DIRECT)
peering.save()
try:
newAS = AS.objects.get(number=oldAS.number, crawl=crawl)
if not newAS.online and not newAS.lastSeen:
newAS.lastSeen = oldAS.crawl
newAS.save()
except Exception:
newAS = AS(number=oldAS.number, crawl=crawl, lastSeen=oldAS.crawl, directlyCrawled=False, online=False)
newAS.save()
# c) look for router/peering pairs
if peer["BGP"]["neighbor_id"]:
try:
neighRouter = BorderRouter.objects.get(AS=neighAS, routerID=peer["BGP"]["neighbor_id"])
except BorderRouter.DoesNotExist:
neighRouter = BorderRouter(AS=neighAS, routerID=peer["BGP"]["neighbor_id"], pingable=False, reachable=False)
neighRouter.save()
try:
BorderRouterPair.getPairing(peering, currRouter, neighRouter)
except BorderRouterPair.DoesNotExist:
pairs = BorderRouterPair.objects.filter(Q(peering=peering) & (Q(router1=neighRouter, router2=None)|Q(router1=None, router2=neighRouter)))
if pairs.count() > 0:
pair = pairs[0]
if pair.router1 == None:
pair.router1 = currRouter
else:
pair.router2 = currRouter
pair.save()
else:
pair = BorderRouterPair(peering=peering, router1=currRouter, router2=neighRouter)
pair.save()
# 3.2.2 copy peerings between old asses
print(" ----> copy peerings")
for oldASdata in oldASses:
print(" ------> AS", oldASdata["number"])
oldAS = AS.objects.get(number=oldASdata["number"], crawl=oldASdata["lastSeen"])
for peering in oldAS.getPeerings():
print(" --------> Peering %s <--> %s" % (peering.as1.number, peering.as2.number))
peering = Peering(
as1=AS.objects.get(number=peering.as1.number, crawl=crawl),
as2=AS.objects.get(number=peering.as2.number, crawl=crawl),
origin=peering.origin)
peering.save()
print(" --> Announcements")
if "routes" in data and data["routes"]:
for route in data["routes"]:
print(" ---->", route["prefix"])
if "/" not in route["prefix"]:
continue
# 3.3 FIXME: do we also want to have old peerings which do not exist anymore?
crawlAS = currAS
if len(route["path"]) > 0:
crawlAS = getOrCreateAS(crawl, route["path"][0])
originAS = getOrCreateAS(crawl, route["path"][-1])
ip, prefix = route["prefix"].split("/")
a = Announcement(router=currRouter, ip=ip, prefix=prefix,
ASPath=" ".join(route["path"]), nextHop=route["nexthop"],
crawlAS=crawlAS, originAS=originAS)
a.save()
else:
print(" !! No routes found in host output")
CrawlLog.log(crawl, "No routes found in host output (no bgp feed included?)", host=host, severity=CrawlLog.WARN)
# 4. end crawl run
crawl.endTime = timezone.now()
crawl.save()
# 3. calculate missing data
print(" -- Adding extra data from announcements...")
# 3.1. use announcement data to find hidden peerings
for announcement in Announcement.objects.filter(router__AS__crawl=crawl):
path = announcement.ASPath.split(" ")
if len(path) > 1:
firstASno = path.pop(0)
firstAS = getOrCreateAS(crawl, firstASno)
while len(path) > 0:
secondASno = path.pop(0)
secondAS = getOrCreateAS(crawl, secondASno)
# additional data
crawl.asCount = crawl.as_set.count()
crawl.asOnlineCount = crawl.as_set.filter(online=True).count()
crawl.asOfflineCount = crawl.as_set.filter(online=False).count()
crawl.peeringCount = Peering.objects.filter(Q(as1__crawl=crawl) | Q(as2__crawl=crawl)).count()
crawl.save()
try:
Peering.getPeering(firstAS, secondAS)
except Peering.DoesNotExist:
peering = Peering(as1=firstAS, as2=secondAS, origin=Peering.PATH)
peering.save()
print(" !! Done")
CrawlLog.log(crawl, "Crawl completed", severity=CrawlLog.INFO)
firstAS = secondAS
# 3.2 add ASses, routers and peerings from old crawlruns (last should suffice)
# find
print(" --> copy old ASses")
timerangeStart = crawl.startTime - datetime.timedelta(LAST_SEEN_DAYS)
oldASses = AS.objects.filter(online=True, crawl__startTime__gte=timerangeStart).values("number").annotate(lastSeen=Max('crawl_id')).filter(~Q(lastSeen=crawl.pk))
def parseData(crawl, host, data):
print(" -- parsing...")
# 3.2.1. copy old asses
print(" ----> create ASses")
for oldASdata in oldASses:
print(" ------> AS", oldASdata["number"])
oldAS = AS.objects.get(number=oldASdata["number"], crawl=oldASdata["lastSeen"])
currASno = int(data["local_as"])
currAS = getOrCreateAS(crawl, currASno)
try:
newAS = AS.objects.get(number=oldAS.number, crawl=crawl)
if not newAS.online and not newAS.lastSeen:
newAS.lastSeen = oldAS.crawl
newAS.save()
except:
newAS = AS(number=oldAS.number, crawl=crawl, lastSeen=oldAS.crawl, directlyCrawled=False, online=False)
newAS.save()
currAS.directlyCrawled = True
currAS.save()
# 3.2.2 copy peerings between old asses
print(" ----> copy peerings")
for oldASdata in oldASses:
print(" ------> AS", oldASdata["number"])
oldAS = AS.objects.get(number=oldASdata["number"], crawl=oldASdata["lastSeen"])
for peering in oldAS.getPeerings():
print(" --------> Peering %s <--> %s" % (peering.as1.number, peering.as2.number))
peering = Peering(
as1=AS.objects.get(number=peering.as1.number, crawl=crawl),
as2=AS.objects.get(number=peering.as2.number, crawl=crawl),
origin=peering.origin)
peering.save()
currRouter = None
try:
currRouter = BorderRouter.objects.get(AS=currAS, routerID=data["local_id"])
currRouter.pingable = True
currRouter.reachable = True
currRouter.save()
except BorderRouter.DoesNotExist:
currRouter = BorderRouter(AS=currAS, routerID=data["local_id"], pingable=True, reachable=True)
currRouter.save()
# 3.3 FIXME: do we also want to have old peerings which do not exist anymore?
print(" --> peers")
for peer in data["peers"]:
# peerings
# data: BGP{state, neighbor_id, neighbor_as}, description
# 4. end crawl run
crawl.endTime = timezone.now()
crawl.save()
# a) find/create neighbor
print(" ----> Peer:", int(peer["BGP"]["neighbor_as"]))
neighAS = getOrCreateAS(crawl, int(peer["BGP"]["neighbor_as"]), online=peer["BGP"]["online"])
# additional data
crawl.asCount = crawl.as_set.count()
crawl.asOnlineCount = crawl.as_set.filter(online=True).count()
crawl.asOfflineCount = crawl.as_set.filter(online=False).count()
crawl.peeringCount = Peering.objects.filter(Q(as1__crawl=crawl)|Q(as2__crawl=crawl)).count()
crawl.save()
# b) find out if a peering already exists (maybe where we only need to add our router id?)
peering = None
try:
peering = Peering.getPeering(currAS, neighAS)
except Peering.DoesNotExist:
peering = Peering(as1=currAS, as2=neighAS, origin=Peering.DIRECT)
peering.save()
# c) look for router/peering pairs
if peer["BGP"]["neighbor_id"]:
try:
neighRouter = BorderRouter.objects.get(AS=neighAS, routerID=peer["BGP"]["neighbor_id"])
except BorderRouter.DoesNotExist:
neighRouter = BorderRouter(AS=neighAS, routerID=peer["BGP"]["neighbor_id"],
pingable=False, reachable=False)
neighRouter.save()
try:
BorderRouterPair.getPairing(peering, currRouter, neighRouter)
except BorderRouterPair.DoesNotExist:
pairs = BorderRouterPair.objects.filter(Q(peering=peering) & (Q(router1=neighRouter, router2=None) |
Q(router1=None, router2=neighRouter)))
if pairs.count() > 0:
pair = pairs[0]
if pair.router1 is None:
pair.router1 = currRouter
else:
pair.router2 = currRouter
pair.save()
else:
pair = BorderRouterPair(peering=peering, router1=currRouter, router2=neighRouter)
pair.save()
print(" --> Announcements")
if "routes" in data and data["routes"]:
for route in data["routes"]:
print(" ---->", route["prefix"])
if "/" not in route["prefix"]:
continue
crawlAS = currAS
if len(route["path"]) > 0:
route["path"] = list(map(str, route["path"]))
crawlAS = getOrCreateAS(crawl, route["path"][0])
originAS = getOrCreateAS(crawl, route["path"][-1])
ip, prefix = route["prefix"].split("/")
a = Announcement(router=currRouter, ip=ip, prefix=prefix,
ASPath=" ".join(route["path"]), nextHop=route["nexthop"],
crawlAS=crawlAS, originAS=originAS)
a.save()
else:
print(" !! No routes found in host output")
CrawlLog.log(crawl, "No routes found in host output (no bgp feed included?)",
host=host, severity=CrawlLog.WARN)
print(" !! Done")
CrawlLog.log(crawl, "Crawl completed", severity=CrawlLog.INFO)
if __name__ == '__main__':
main()
main()

17
bin/netcrawl.py Executable file
View File

@ -0,0 +1,17 @@
#!/usr/bin/env python
import os
import sys
sys.path.append("..")
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "dnmapper.settings")
import django
django.setup()
from backend.crawler import crawl
def main():
crawl()
if __name__ == '__main__':
main()

View File

@ -1,353 +0,0 @@
#!/usr/bin/env python
# This file is part of dnmapper, an AS--level mapping tool
# Licensed under GNU General Public License v3 or later
# Written by Sebastian Lohff (seba@someserver.de)
from __future__ import print_function
import re
import socket
from collections import OrderedDict
class RouterParserException(Exception):
pass
def err(msg):
raise RouterParserException(msg)
def getBGPData(ip, asno):
rawData = getDataFromHost(ip)
if not rawData:
err("Could not get data from host (empty response)")
router = parseBGPData(rawData, asno)
router["ip"] = ip
return router
def getDataFromHost(ip):
socket.setdefaulttimeout(5)
x = socket.socket()
x.connect((ip, 6556))
f = x.makefile()
data = f.read()
x.close()
return data
def parseBGPData(raw, asno):
d = re.search(r"(?:^|\n)<<<(quagga|bird)>>>\n(.*?)(?:$|<<<[^\n]+>>>)", raw, re.DOTALL)
if not d:
err("Data not found in check mk output")
# mkify
raw = d.group(2).split("\n")
arr = filter(lambda _z: _z, map(lambda _y: filter(lambda _x: _x, re.split(r"\s+", _y)), raw))
# parse for bird/quagga
result = None
if d.group(1) == "quagga":
result = parseQuagga(arr, raw, asno)
else:
result = parseBird(arr, raw, asno)
return result
def parseQuagga(data, raw, asno):
status = _quaggaFindCommand(data, "show ip bgp sum")
if status[0][0:3] == ['IPv4', 'Unicast', 'Summary:']:
del(status[0])
if status[0][0:3] != ['BGP', 'router', 'identifier']:
print(status)
err("Couldn't find router id in quagga output")
peers = _quaggaFindNeighbors(data)
if asno and int(asno) != int(status[0][7]):
err("AS number (%s) does not match as number from quagga (%s)" % (asno, status[0][7]))
routes = _quaggaFindRoutes(raw)
return {"local_id": status[0][3].strip(","), "local_as": int(status[0][7]), "peers": peers, "routes": routes}
def parseBird(data, raw, asno):
status = _birdFindTable(data, "show status")
if status[2][0] != "1011-Router":
err("Couldn't find router id in bird output")
peers = filter(lambda _x: _x["type"] == "BGP", _birdMakeProtocols(data))
if asno == None:
err("Host is bird")
# FIXME
routes = _birdFindRoutes(data)
return {"local_id": status[2][3], "local_as": int(asno), "peers": peers, "routes": routes}
def _birdFindTable(info, command):
""" find command output of a bird command, e.g. "show bgp neighbors" """
command = ["bird>"] + command.split(" ")
commandInfo = []
editNextLine = False
for line in info:
if not commandInfo:
if line == command:
commandInfo.append(line)
editNextLine = True
else:
if editNextLine:
editNextLine = False
commandInfo.append(line[1:])
elif line[0] == "bird>":
return commandInfo
else:
commandInfo.append(line)
return []
def _birdFindProtocols(info):
""" return a list of tuples (protoname, protoinfo) """
protocolTable = _birdFindTable(info, "show protocols all")
protocols = OrderedDict()
currProto = None
for line in protocolTable[2:]:
if line[0][0:4] == "1002":
currProto = line[0][5:]
protocols[currProto] = [[currProto] + line[1:]]
elif currProto == None:
err("No proto selected, couldn't parse line:", line)
else:
protocols[currProto].append(line)
return protocols
def _birdMakeProtocols(info):
""" Parse birds show protocols all output """
# proto: name, type, description, state (up/down?), up-since
# routes imported, exported, preferred
# also: routing stats (
# bgp special stuff: state, neighbor (address, as, id) (id not available when down)
# state (established, active)
# if error, last error is avilable
protocols = []
for proto, data in _birdFindProtocols(info).iteritems():
protoInfo = {
"name": proto,
"type": data[0][1],
"table": data[0][2],
"state": data[0][3],
"last_change": data[0][4],
"info": " ".join(data[0][5:]),
"description": " ".join(data[1][2:]),
"routes": {
"imported": data[5][1],
"exported": data[5][3],
"preferred": data[5][5],
}
}
if protoInfo["type"] == "BGP":
found = False
for n, line in enumerate(data):
if line[0:2] == ["BGP", "state:"]:
found = True
protoInfo["BGP"] = {
"state": data[n][2],
"online": data[n][2] == "Established",
"neighbor_address": data[n+1][2],
"neighbor_as": int(data[n+2][2]),
"neighbor_id": data[n+3][2] if len(data) > n+3 and data[n+3][0:2] == ["Neighbor", "ID:"] else None,
"last_error": " ".join(data[n+3][2:]) if len(data) > n+3 and data[n+3][0:2] == ["Last", "error:"] else None,
}
if not found:
protoInfo["BGP"] = None
protocols.append(protoInfo)
return protocols
def _birdFindRoutes(info):
output = _birdFindTable(info, "show route all")
if len(output) < 1:
# no data found
return None
def handleCandidate(routes, candidate):
if candidate:
# path, nexthop, network
for key in ["path", "nexthop", "network", "iBGP"]:
if key not in candidate:
return
route = {"prefix": candidate["network"], "nexthop": candidate["nexthop"], "path": candidate["path"], "iBGP": candidate["iBGP"]}
routes.append(route)
pass
routes = []
candidate = None
lastIP = None
for line in output:
if line[0].startswith("1007-"):
# new route!
handleCandidate(routes, candidate)
if line[0] != "1007-":
# line has a network, use it!
lastIP = line[0][5:]
candidate = {"network": lastIP, "iBGP": None}
elif candidate is not None:
# search bgp attributes
if line[0] == "1012-":
pass
k, v = line[1], line[2:]
else:
k, v = line[0], line[1:]
k = k.rstrip(":")
if k == "BGP.next_hop":
candidate["nexthop"] = v[0]
elif k == "BGP.as_path":
candidate["path"] = v
handleCandidate(routes, candidate)
return routes
def _quaggaFindCommand(info, cmd):
# ['core-frunde#', 'show', 'ip', 'bgp', 'sum']
# ['core-frunde#', 'show', 'ip', 'bgp', 'neighbors']
output = []
cmd = cmd.split(" ")
prompt = None
for line in info:
if line[1:] == cmd:
prompt = line[0]
elif line[0] == prompt:
# done
return output
elif prompt != None:
output.append(line)
err("Could not find command '%s' in output" % " ".join(cmd))
def _quaggaFindNeighbors(info):
#['BGP', 'neighbor', 'is', '10.50.1.2,', 'remote', 'AS', '65001,', 'local', 'AS', '65001,', 'internal', 'link']
output = _quaggaFindCommand(info, "show ip bgp neighbors")
start = ["BGP", "neighbor", "is"]
curr = None
rawNeighbors = []
for line in output:
if line[0:3] == start:
if curr:
rawNeighbors.append(curr)
curr = [line]
elif curr:
curr.append(line)
else:
err("Could not find start of neighbors")
if curr:
rawNeighbors.append(curr)
curr = None
neighbors = []
neighborDict = OrderedDict()
for raw in rawNeighbors:
descrIdx = 1 if raw[1][0] == "Description:" else 0
if raw[descrIdx + 1][0] == "Hostname:":
descrIdx += 1
peerdict = {
"neighbor_address": raw[0][3].rstrip(","),
"neighbor_as": int(raw[0][6].rstrip(",")),
"local_as": int(raw[0][9].rstrip(",")),
"description": " ".join(raw[1][1:]) if descrIdx else "No description",
"neighbor_id": raw[1+descrIdx][6].strip(","),
"state": raw[2+descrIdx][3].strip(","),
"routes": {
"imported": 0,
},
"BGP": {
"state": raw[2+descrIdx][3].strip(","),
"online": raw[2+descrIdx][3].strip(",") == "Established",
"neighbor_id": raw[1+descrIdx][6].strip(","),
"neighbor_address": raw[0][3].rstrip(","),
"neighbor_as": int(raw[0][6].rstrip(",")),
"state": raw[2+descrIdx][3].strip(","),
},
}
for line in raw:
if line[1:3] == ["accepted", "prefixes"]:
# woooo
peerdict["routes"]["imported"] = int(line[0])
break
neighbors.append(peerdict)
neighborDict[peerdict["neighbor_address"]] = peerdict
return neighbors
def _quaggaFindRoutes(raw):
# from # show ip bgp to Total number of prefixes XX
# BGP table version is 0, local router ID is 10.50.0.1
# *> 10.3.14.0/27 10.75.0.22 0 65002 65112 i
cmdre = re.compile(r"^([^\s#]+#) show ip bgp$")
routere = re.compile(r"^(?P<status>.)(?P<status2>.)(?P<origin>.)(?P<network>[0-9./]+)?\s+(?P<nexthop>[0-9./]+)[\s0-9i?]+$")
# find output
output = []
prompt = None
for line in raw:
if not prompt:
m = cmdre.match(line)
if m:
prompt = m.group(1) + " "
else:
if line.startswith(prompt):
break
else:
output.append(line)
if len(output) < 1:
# no data found
return None
routes = []
foundTable = False
lastIP = None
for line in output:
if not foundTable:
if line.endswith("Metric LocPrf Weight Path"):
foundTable = True
else:
if line != '':
if line.startswith("Total number of prefixes") or line.startswith("Displayed "):
break
else:
# parse one route line
#print(line)
m = routere.match(line)
d = m.groupdict()
if d["network"]:
lastIP = d["network"]
else:
d["network"] = lastIP
# "parse" path (everything after 61 chars, but no i)
path = filter(lambda _x: _x not in ('', 'i'), line[61:].split(" "))
# currently skip incomplete routes
if '?' not in path:
route = {"prefix": d["network"], "nexthop": d["nexthop"], "path": path, "iBGP": d["origin"] == "i"}
routes.append(route)
return routes

View File

@ -30,7 +30,7 @@ TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
STATICFILES_DIRS = (
'static/',
'static/',
)
# Application definition
@ -42,8 +42,8 @@ INSTALLED_APPS = (
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'bgpdata',
'tastypie',
'bgpdata',
'tastypie',
)
API_LIMIT_PER_PAGE = 100
@ -86,7 +86,7 @@ USE_L10N = True
USE_TZ = True
TEMPLATE_DIRS = (
'templates/',
'templates/',
)

View File

@ -29,7 +29,7 @@ DEBUG = True
ALLOWED_HOSTS = []
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'static/'),
os.path.join(BASE_DIR, 'static/'),
)
# Application definition
@ -41,18 +41,18 @@ INSTALLED_APPS = (
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'bgpdata',
'tastypie',
'rest_framework',
'bgpdata',
'tastypie',
)
API_LIMIT_PER_PAGE = 100
MIDDLEWARE_CLASSES = (
MIDDLEWARE = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)

View File

@ -5,14 +5,19 @@
from django.conf.urls import include, url
from django.contrib import admin
from django.views.generic import RedirectView
import bgpdata.urls
import bgpdata.api_urls
import apiv2.urls
urlpatterns = (
# Examples:
# url(r'^$', 'dnmapper.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^$', RedirectView.as_view(url='/map/')),
url(r'^map/', include(bgpdata.urls)),
url(r'^$', RedirectView.as_view(url='/map/')),
url(r'^map/', include(bgpdata.urls)),
url(r'^api/v1/', include(bgpdata.api_urls)),
url(r'^api/v2/', include(apiv2.urls)),
url(r'^admin/', include(admin.site.urls)),
url(r'^admin/', admin.site.urls),
)

View File

@ -1,3 +1,6 @@
django==1.11.25
django<3
django-tastypie
django-rest-framework
django-filter
networkx
grpcio

View File

@ -26,29 +26,15 @@
</div>
<div id="navbar" class="navbar-collapse collapse">
<ul class="nav navbar-nav">
<li class="active"><a href="/">Crawl Data</a></li>
<!--
<li><a href="#about">About</a></li>
<li><a href="#contact">Contact</a></li>
<li class="dropdown">
<a href="#" class="dropdown-toggle" data-toggle="dropdown" role="button" aria-expanded="false">Dropdown <span class="caret"></span></a>
<ul class="dropdown-menu" role="menu">
<li><a href="#">Action</a></li>
<li><a href="#">Another action</a></li>
<li><a href="#">Something else here</a></li>
<li class="divider"></li>
<li class="dropdown-header">Nav header</li>
<li><a href="#">Separated link</a></li>
<li><a href="#">One more separated link</a></li>
</ul>
</li>
-->
<li{% if request.resolver_match.url_name == "show_map" or request.resolver_match.url_name == "overview" %} class="active"{%endif%}><a href="/">Crawl Data</a></li>
<li{% if request.resolver_match.url_name == "show_live_map" %} class="active"{%endif%}><a href="{% url "show_live_map" crawl_id="live" %}">Live View</a></li>
<li{% if request.resolver_match.url_name == 'lastseen' %} class="active"{%endif%}><a href="{% url "lastseen" %}">AS Status</a></li>
</ul>
</div><!--/.nav-collapse -->
</div>
</nav>
<div class="container">
<div class="{% block container_class %}container{% endblock %}">
{% block body %}{% endblock %}
</div>
</body>