You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

156 lines
4.8 KiB

#!/usr/bin/env python2
from __future__ import print_function
# prepare environment
import sys
sys.path.append("..")
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "dnmapper.settings")
import django
django.setup()
from django.utils import timezone
from django.db.models import Q
from bgpdata.models import ConfigHost, CrawlRun, CrawlLog, AS, BorderRouter, Announcement, Peering, BorderRouterPair
from routerparsers import getBGPData, RouterParserException
def getOrCreateAS(crawl, number):
currAS = None
try:
currAS = AS.objects.get(crawl=crawl, number=number)
except AS.DoesNotExist:
currAS = AS(crawl=crawl, number=number)
currAS.save()
return currAS
def main():
# 1. create crawl run
crawl = CrawlRun()
crawl.startTime = timezone.now()
crawl.save()
CrawlLog.log(crawl, "Starting crawl run!", severity=CrawlLog.INFO)
# 2. get data from all hosts, put it in the database
for host in ConfigHost.objects.all():
data = None
print(" -- Getting data for host %s" % host)
try:
if host.checkMethod == 'CMK':
data = getBGPData(host.ip, host.number)
else:
CrawlLog.log(crawl, "Method %s is not currently supported, skipping host" % host.checkMethod, host=host, severity=CrawlLog.ERROR)
continue
except RouterParserException as e:
msg = "Could not parse data for host: %s" % str(e)
print("%s: %s" % (host, msg))
CrawlLog.log(crawl, msg, host=host, severity=CrawlLog.ERROR)
continue
print(" -- parsing...")
currASno = int(data["local_as"])
currAS = getOrCreateAS(crawl, currASno)
currRouter = None
try:
currRouter = BorderRouter.objects.get(AS=currAS, routerID=data["local_id"])
currRouter.pingable = True
currRouter.reachable = True
currRouter.save()
except BorderRouter.DoesNotExist:
currRouter = BorderRouter(AS=currAS, routerID=data["local_id"], pingable=True, reachable=True)
currRouter.save()
print(" --> peers")
for peer in data["peers"]:
# peerings
# data: BGP{state, neighbor_id, neighbor_as}, description
# a) find/create neighbor
print(" ----> Peer:", int(peer["BGP"]["neighbor_as"]))
neighAS = None
try:
neighAS = AS.objects.get(crawl=crawl, number=int(peer["BGP"]["neighbor_as"]))
except AS.DoesNotExist:
neighAS = AS(crawl=crawl, number=int(peer["BGP"]["neighbor_as"]))
neighAS.save()
# b) find out if a peering already exists (maybe where we only need to add our router id?)
peering = None
try:
peering = Peering.getPeering(currAS, neighAS)
except Peering.DoesNotExist:
peering = Peering(as1=currAS, as2=neighAS, origin=Peering.DIRECT)
peering.save()
# c) look for router/peering pairs
if peer["BGP"]["neighbor_id"]:
try:
neighRouter = BorderRouter.objects.get(AS=neighAS, routerID=peer["BGP"]["neighbor_id"])
except BorderRouter.DoesNotExist:
neighRouter = BorderRouter(AS=currAS, routerID=peer["BGP"]["neighbor_id"], pingable=False, reachable=False)
neighRouter.save()
try:
BorderRouterPair.getPairing(peering, currRouter, neighRouter)
except BorderRouterPair.DoesNotExist:
pairs = BorderRouterPair.objects.filter(Q(peering=peering) & (Q(router1=neighRouter, router2=None)|Q(router1=None, router2=neighRouter)))
if pairs.count() > 0:
pair = pairs[0]
if pair.router1 == None:
pair.router1 = currRouter
else:
pair.router2 = currRouter
pair.save()
else:
pair = BorderRouterPair(peering=peering, router1=currRouter, router2=neighRouter)
pair.save()
print(" --> Announcements")
if "routes" in data and data["routes"]:
for route in data["routes"]:
print(" ---->", route["prefix"])
if "/" not in route["prefix"]:
continue
ip, prefix = route["prefix"].split("/")
a = Announcement(router=currRouter, ip=ip, prefix=prefix,
ASPath=" ".join(route["path"]), nextHop=route["nexthop"],
originAS=currAS)
a.save()
else:
print(" !! No routes found in host output")
CrawlLog.log(crawl, "No routes found in host output (no bgp feed included?)", host=host, severity=CrawlLog.WARN)
# 3. calculate missing data
print(" -- Adding extra data from announcements...")
# 3.1. use announcement data to find hidden peerings
for announcement in Announcement.objects.filter(router__AS__crawl=crawl):
path = announcement.ASPath.split(" ")
if len(path) > 1:
firstASno = path.pop(0)
firstAS = getOrCreateAS(crawl, firstASno)
while len(path) > 0:
secondASno = path.pop(0)
secondAS = getOrCreateAS(crawl, secondASno)
try:
Peering.getPeering(firstAS, secondAS)
except Peering.DoesNotExist:
peering = Peering(as1=firstAS, as2=secondAS, origin=Peering.PATH)
peering.save()
firstAS = secondAS
# 4. end crawl run
crawl.endTime = timezone.now()
crawl.save()
print(" !! Done")
CrawlLog.log(crawl, "Crawl completed", severity=CrawlLog.INFO)
if __name__ == '__main__':
main()